query
stringlengths 5
1.23k
| positive
stringlengths 53
15.2k
| id_
int64 0
252k
| task_name
stringlengths 87
242
| negative
listlengths 20
553
|
|---|---|---|---|---|
Prompt the user to continue or not
|
def continue_prompt ( message = "" ) : answer = False message = message + "\n'Yes' or 'No' to continue: " while answer not in ( 'Yes' , 'No' ) : answer = prompt ( message , eventloop = eventloop ( ) ) if answer == "Yes" : answer = True break if answer == "No" : answer = False break return answer
| 2,000
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/utils.py#L216-L236
|
[
"def",
"put",
"(",
"self",
",",
"store",
",",
"obj",
")",
":",
"spec",
"=",
"self",
".",
"_get_store_spec",
"(",
"store",
")",
"blob",
"=",
"pack",
"(",
"obj",
")",
"blob_hash",
"=",
"long_hash",
"(",
"blob",
")",
"+",
"store",
"[",
"len",
"(",
"'external-'",
")",
":",
"]",
"if",
"spec",
"[",
"'protocol'",
"]",
"==",
"'file'",
":",
"folder",
"=",
"os",
".",
"path",
".",
"join",
"(",
"spec",
"[",
"'location'",
"]",
",",
"self",
".",
"database",
")",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"folder",
",",
"blob_hash",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"full_path",
")",
":",
"try",
":",
"safe_write",
"(",
"full_path",
",",
"blob",
")",
"except",
"FileNotFoundError",
":",
"os",
".",
"makedirs",
"(",
"folder",
")",
"safe_write",
"(",
"full_path",
",",
"blob",
")",
"elif",
"spec",
"[",
"'protocol'",
"]",
"==",
"'s3'",
":",
"S3Folder",
"(",
"database",
"=",
"self",
".",
"database",
",",
"*",
"*",
"spec",
")",
".",
"put",
"(",
"blob_hash",
",",
"blob",
")",
"else",
":",
"raise",
"DataJointError",
"(",
"'Unknown external storage protocol {protocol} for {store}'",
".",
"format",
"(",
"store",
"=",
"store",
",",
"protocol",
"=",
"spec",
"[",
"'protocol'",
"]",
")",
")",
"# insert tracking info",
"self",
".",
"connection",
".",
"query",
"(",
"\"INSERT INTO {tab} (hash, size) VALUES ('{hash}', {size}) \"",
"\"ON DUPLICATE KEY UPDATE timestamp=CURRENT_TIMESTAMP\"",
".",
"format",
"(",
"tab",
"=",
"self",
".",
"full_table_name",
",",
"hash",
"=",
"blob_hash",
",",
"size",
"=",
"len",
"(",
"blob",
")",
")",
")",
"return",
"blob_hash"
] |
Write msg on stdout . If no encoding is specified the detected encoding of stdout is used . If the encoding can t encode some chars they are replaced by ?
|
def printo ( msg , encoding = None , errors = 'replace' , std_type = 'stdout' ) : std = getattr ( sys , std_type , sys . stdout ) if encoding is None : try : encoding = std . encoding except AttributeError : encoding = None # Fallback to ascii if no encoding is found if encoding is None : encoding = 'ascii' # https://docs.python.org/3/library/sys.html#sys.stdout # write in the binary buffer directly in python3 if hasattr ( std , 'buffer' ) : std = std . buffer std . write ( msg . encode ( encoding , errors = errors ) ) std . write ( b'\n' ) std . flush ( )
| 2,001
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/utils.py#L281-L304
|
[
"def",
"_rnd_date",
"(",
"start",
",",
"end",
")",
":",
"return",
"date",
".",
"fromordinal",
"(",
"random",
".",
"randint",
"(",
"start",
".",
"toordinal",
"(",
")",
",",
"end",
".",
"toordinal",
"(",
")",
")",
")"
] |
Format a python tree structure
|
def format_tree ( tree ) : def _traverse_tree ( tree , parents = None ) : tree [ 'parents' ] = parents childs = tree . get ( 'childs' , [ ] ) nb_childs = len ( childs ) for index , child in enumerate ( childs ) : child_parents = list ( parents ) + [ index == nb_childs - 1 ] tree [ 'childs' ] [ index ] = _traverse_tree ( tree [ 'childs' ] [ index ] , parents = child_parents ) return tree tree = _traverse_tree ( tree , parents = [ ] ) def _get_rows_data ( tree , rows ) : prefix = '' for p in tree [ 'parents' ] [ : - 1 ] : if p is False : prefix += '│ ' else : prefix += ' ' if not tree [ 'parents' ] : pass elif tree [ 'parents' ] [ - 1 ] is True : prefix += '└── ' else : prefix += '├── ' if isinstance ( tree [ 'node' ] , string_types ) : tree [ 'node' ] = [ tree [ 'node' ] ] rows . append ( [ prefix + tree [ 'node' ] [ 0 ] ] + tree [ 'node' ] [ 1 : ] ) for child in tree . get ( 'childs' , [ ] ) : rows = _get_rows_data ( child , rows ) return rows rows = _get_rows_data ( tree , [ ] ) return format_table ( rows )
| 2,002
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/utils.py#L345-L434
|
[
"def",
"_sendStatCmd",
"(",
"self",
",",
"cmd",
")",
":",
"try",
":",
"self",
".",
"_conn",
".",
"write",
"(",
"\"%s\\r\\n\"",
"%",
"cmd",
")",
"regex",
"=",
"re",
".",
"compile",
"(",
"'^(END|ERROR)\\r\\n'",
",",
"re",
".",
"MULTILINE",
")",
"(",
"idx",
",",
"mobj",
",",
"text",
")",
"=",
"self",
".",
"_conn",
".",
"expect",
"(",
"[",
"regex",
",",
"]",
",",
"self",
".",
"_timeout",
")",
"#@UnusedVariable",
"except",
":",
"raise",
"Exception",
"(",
"\"Communication with %s failed\"",
"%",
"self",
".",
"_instanceName",
")",
"if",
"mobj",
"is",
"not",
"None",
":",
"if",
"mobj",
".",
"group",
"(",
"1",
")",
"==",
"'END'",
":",
"return",
"text",
".",
"splitlines",
"(",
")",
"[",
":",
"-",
"1",
"]",
"elif",
"mobj",
".",
"group",
"(",
"1",
")",
"==",
"'ERROR'",
":",
"raise",
"Exception",
"(",
"\"Protocol error in communication with %s.\"",
"%",
"self",
".",
"_instanceName",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Connection with %s timed out.\"",
"%",
"self",
".",
"_instanceName",
")"
] |
Map func on a list using gevent greenlets .
|
def parallel_map ( func , iterable , args = None , kwargs = None , workers = None ) : if args is None : args = ( ) if kwargs is None : kwargs = { } if workers is not None : pool = Pool ( workers ) else : pool = Group ( ) iterable = [ pool . spawn ( func , i , * args , * * kwargs ) for i in iterable ] pool . join ( raise_error = True ) for idx , i in enumerate ( iterable ) : i_type = type ( i . get ( ) ) i_value = i . get ( ) if issubclass ( i_type , BaseException ) : raise i_value iterable [ idx ] = i_value return iterable
| 2,003
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/utils.py#L437-L468
|
[
"def",
"inconclusive_count",
"(",
"self",
")",
":",
"inconc_count",
"=",
"len",
"(",
"[",
"i",
"for",
"i",
",",
"result",
"in",
"enumerate",
"(",
"self",
".",
"data",
")",
"if",
"result",
".",
"inconclusive",
"]",
")",
"unknown_count",
"=",
"len",
"(",
"[",
"i",
"for",
"i",
",",
"result",
"in",
"enumerate",
"(",
"self",
".",
"data",
")",
"if",
"result",
".",
"get_verdict",
"(",
")",
"==",
"\"unknown\"",
"]",
")",
"return",
"inconc_count",
"+",
"unknown_count"
] |
A general method for parsing word - representations of numbers . Supports floats and integers .
|
def parse ( self , words ) : def exact ( words ) : """If already represented as float or int, convert.""" try : return float ( words ) except : return None guess = exact ( words ) if guess is not None : return guess split = words . split ( ' ' ) # Replace final ordinal/fraction with number if split [ - 1 ] in self . __fractions__ : split [ - 1 ] = self . __fractions__ [ split [ - 1 ] ] elif split [ - 1 ] in self . __ordinals__ : split [ - 1 ] = self . __ordinals__ [ split [ - 1 ] ] parsed_ordinals = ' ' . join ( split ) return self . parseFloat ( parsed_ordinals )
| 2,004
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/numbers.py#L91-L122
|
[
"def",
"configure_url",
"(",
"url",
")",
":",
"app",
".",
"config",
"[",
"'COIL_URL'",
"]",
"=",
"_site",
".",
"config",
"[",
"'SITE_URL'",
"]",
"=",
"_site",
".",
"config",
"[",
"'BASE_URL'",
"]",
"=",
"_site",
".",
"GLOBAL_CONTEXT",
"[",
"'blog_url'",
"]",
"=",
"site",
".",
"config",
"[",
"'SITE_URL'",
"]",
"=",
"site",
".",
"config",
"[",
"'BASE_URL'",
"]",
"=",
"url"
] |
Convert a floating - point number described in words to a double .
|
def parseFloat ( self , words ) : def pointFloat ( words ) : m = re . search ( r'(.*) point (.*)' , words ) if m : whole = m . group ( 1 ) frac = m . group ( 2 ) total = 0.0 coeff = 0.10 for digit in frac . split ( ' ' ) : total += coeff * self . parse ( digit ) coeff /= 10.0 return self . parseInt ( whole ) + total return None def fractionFloat ( words ) : m = re . search ( r'(.*) and (.*)' , words ) if m : whole = self . parseInt ( m . group ( 1 ) ) frac = m . group ( 2 ) # Replace plurals frac = re . sub ( r'(\w+)s(\b)' , '\g<1>\g<2>' , frac ) # Convert 'a' to 'one' (e.g., 'a third' to 'one third') frac = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , frac ) split = frac . split ( ' ' ) # Split fraction into num (regular integer), denom (ordinal) num = split [ : 1 ] denom = split [ 1 : ] while denom : try : # Test for valid num, denom num_value = self . parse ( ' ' . join ( num ) ) denom_value = self . parse ( ' ' . join ( denom ) ) return whole + float ( num_value ) / denom_value except : # Add another word to num num += denom [ : 1 ] denom = denom [ 1 : ] return None # Extract "one point two five"-type float result = pointFloat ( words ) if result : return result # Extract "one and a quarter"-type float result = fractionFloat ( words ) if result : return result # Parse as integer return self . parseInt ( words )
| 2,005
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/numbers.py#L124-L192
|
[
"def",
"override_env_variables",
"(",
")",
":",
"env_vars",
"=",
"(",
"\"LOGNAME\"",
",",
"\"USER\"",
",",
"\"LNAME\"",
",",
"\"USERNAME\"",
")",
"old",
"=",
"[",
"os",
".",
"environ",
"[",
"v",
"]",
"if",
"v",
"in",
"os",
".",
"environ",
"else",
"None",
"for",
"v",
"in",
"env_vars",
"]",
"for",
"v",
"in",
"env_vars",
":",
"os",
".",
"environ",
"[",
"v",
"]",
"=",
"\"test\"",
"yield",
"for",
"i",
",",
"v",
"in",
"enumerate",
"(",
"env_vars",
")",
":",
"if",
"old",
"[",
"i",
"]",
":",
"os",
".",
"environ",
"[",
"v",
"]",
"=",
"old",
"[",
"i",
"]"
] |
Parses words to the integer they describe .
|
def parseInt ( self , words ) : # Remove 'and', case-sensitivity words = words . replace ( " and " , " " ) . lower ( ) # 'a' -> 'one' words = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , words ) def textToNumber ( s ) : """
Converts raw number string to an integer.
Based on text2num.py by Greg Hewill.
""" a = re . split ( r"[\s-]+" , s ) n = 0 g = 0 for w in a : x = NumberService . __small__ . get ( w , None ) if x is not None : g += x elif w == "hundred" : g *= 100 else : x = NumberService . __magnitude__ . get ( w , None ) if x is not None : n += g * x g = 0 else : raise NumberService . NumberException ( "Unknown number: " + w ) return n + g return textToNumber ( words )
| 2,006
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/numbers.py#L194-L232
|
[
"def",
"get_site_url",
"(",
")",
":",
"site_url",
"=",
"getattr",
"(",
"_THREAD_LOCAL",
",",
"_THREAD_SITE_URL",
",",
"None",
")",
"if",
"site_url",
"is",
"None",
":",
"site_url",
"=",
"SITE_URL",
"or",
"get_site_url_",
"(",
")",
"setattr",
"(",
"_THREAD_LOCAL",
",",
"_THREAD_SITE_URL",
",",
"site_url",
")",
"return",
"site_url"
] |
Parses a number m into a human - ready string representation . For example crops off floats if they re too accurate .
|
def parseMagnitude ( m ) : m = NumberService ( ) . parse ( m ) def toDecimalPrecision ( n , k ) : return float ( "%.*f" % ( k , round ( n , k ) ) ) # Cast to two digits of precision digits = 2 magnitude = toDecimalPrecision ( m , digits ) # If value is really small, keep going while not magnitude : digits += 1 magnitude = toDecimalPrecision ( m , digits ) # If item is less than one, go one beyond 'necessary' number of digits if m < 1.0 : magnitude = toDecimalPrecision ( m , digits + 1 ) # Ignore decimal accuracy if irrelevant if int ( magnitude ) == magnitude : magnitude = int ( magnitude ) # Adjust for scientific notation magString = str ( magnitude ) magString = re . sub ( r'(\d)e-(\d+)' , '\g<1> times ten to the negative \g<2>' , magString ) magString = re . sub ( r'(\d)e\+(\d+)' , '\g<1> times ten to the \g<2>' , magString ) magString = re . sub ( r'-(\d+)' , 'negative \g<1>' , magString ) magString = re . sub ( r'\b0(\d+)' , '\g<1>' , magString ) return magString
| 2,007
|
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/numbers.py#L242-L282
|
[
"def",
"_if",
"(",
"ctx",
",",
"logical_test",
",",
"value_if_true",
"=",
"0",
",",
"value_if_false",
"=",
"False",
")",
":",
"return",
"value_if_true",
"if",
"conversions",
".",
"to_boolean",
"(",
"logical_test",
",",
"ctx",
")",
"else",
"value_if_false"
] |
Encode the private part of the key in a base64 format by default but when raw is True it will return hex encoded bytes .
|
def serialize ( self , raw = False ) : if raw : return self . _key . encode ( ) return self . _key . encode ( nacl . encoding . Base64Encoder )
| 2,008
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_keys.py#L37-L44
|
[
"def",
"is_nsphere",
"(",
"points",
")",
":",
"center",
",",
"radius",
",",
"error",
"=",
"fit_nsphere",
"(",
"points",
")",
"check",
"=",
"error",
"<",
"tol",
".",
"merge",
"return",
"check"
] |
Convenient method for GET requests Returns http request status value from a POST request
|
def _do_get ( self , url , * * kwargs ) : #TODO: # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : #response = self._session.get("{}/{}".format(self._api_url, uri)).json() response = self . _session . get ( url ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_get() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_get() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_get() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_get() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_get() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_get() - Communication error with ScaleIO gateway" ) return response
| 2,009
|
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/api/scaleio/common/connection.py#L104-L125
|
[
"def",
"_record_offset",
"(",
"self",
")",
":",
"offset",
"=",
"self",
".",
"blob_file",
".",
"tell",
"(",
")",
"self",
".",
"event_offsets",
".",
"append",
"(",
"offset",
")"
] |
Convenient method for POST requests Returns http request status value from a POST request
|
def _do_post ( self , url , * * kwargs ) : #TODO: # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : response = self . _session . post ( url , headers = scaleioapi_post_headers , * * kwargs ) self . conn . logger . debug ( '_do_post() - HTTP response: %s' , response . text ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_post() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_post() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_post() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_post() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_post() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_post() - Communication error with ScaleIO gateway" ) return response
| 2,010
|
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/api/scaleio/common/connection.py#L127-L148
|
[
"def",
"complete_use",
"(",
"self",
",",
"text",
",",
"*",
"_",
")",
":",
"return",
"[",
"t",
"+",
"\" \"",
"for",
"t",
"in",
"REGIONS",
"if",
"t",
".",
"startswith",
"(",
"text",
")",
"]"
] |
Get response content and headers from a discharge macaroons error .
|
def discharge_required_response ( macaroon , path , cookie_suffix_name , message = None ) : if message is None : message = 'discharge required' content = json . dumps ( { 'Code' : 'macaroon discharge required' , 'Message' : message , 'Info' : { 'Macaroon' : macaroon . to_dict ( ) , 'MacaroonPath' : path , 'CookieNameSuffix' : cookie_suffix_name } , } ) . encode ( 'utf-8' ) return content , { 'WWW-Authenticate' : 'Macaroon' , 'Content-Type' : 'application/json' }
| 2,011
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/_error.py#L35-L65
|
[
"def",
"add",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"_CONFIG",
".",
"add_section",
"(",
"name",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"_CONFIG",
".",
"set",
"(",
"name",
",",
"key",
",",
"value",
")",
"with",
"open",
"(",
"_CONFIG_FILEPATH",
",",
"'w'",
")",
"as",
"configfile",
":",
"_CONFIG",
".",
"write",
"(",
"configfile",
")",
"info",
"(",
"'Configuration updated at %s'",
"%",
"_JUT_HOME",
")"
] |
Determines the bakery protocol version from a client request . If the protocol cannot be determined or is invalid the original version of the protocol is used . If a later version is found the latest known version is used which is OK because versions are backwardly compatible .
|
def request_version ( req_headers ) : vs = req_headers . get ( BAKERY_PROTOCOL_HEADER ) if vs is None : # No header - use backward compatibility mode. return bakery . VERSION_1 try : x = int ( vs ) except ValueError : # Badly formed header - use backward compatibility mode. return bakery . VERSION_1 if x > bakery . LATEST_VERSION : # Later version than we know about - use the # latest version that we can. return bakery . LATEST_VERSION return x
| 2,012
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/_error.py#L75-L97
|
[
"def",
"start",
"(",
"self",
")",
":",
"t",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_consume",
")",
"t",
".",
"start",
"(",
")"
] |
Create an error from a JSON - deserialized object
|
def from_dict ( cls , serialized ) : # Some servers return lower case field names for message and code. # The Go client is tolerant of this, so be similarly tolerant here. def field ( name ) : return serialized . get ( name ) or serialized . get ( name . lower ( ) ) return Error ( code = field ( 'Code' ) , message = field ( 'Message' ) , info = ErrorInfo . from_dict ( field ( 'Info' ) ) , version = bakery . LATEST_VERSION , )
| 2,013
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/_error.py#L105-L118
|
[
"def",
"Modify",
"(",
"self",
",",
"client_limit",
"=",
"None",
",",
"client_rate",
"=",
"None",
",",
"duration",
"=",
"None",
")",
":",
"args",
"=",
"hunt_pb2",
".",
"ApiModifyHuntArgs",
"(",
"hunt_id",
"=",
"self",
".",
"hunt_id",
")",
"if",
"client_limit",
"is",
"not",
"None",
":",
"args",
".",
"client_limit",
"=",
"client_limit",
"if",
"client_rate",
"is",
"not",
"None",
":",
"args",
".",
"client_rate",
"=",
"client_rate",
"if",
"duration",
"is",
"not",
"None",
":",
"args",
".",
"duration",
"=",
"duration",
"data",
"=",
"self",
".",
"_context",
".",
"SendRequest",
"(",
"\"ModifyHunt\"",
",",
"args",
")",
"return",
"Hunt",
"(",
"data",
"=",
"data",
",",
"context",
"=",
"self",
".",
"_context",
")"
] |
Checks whether the error is an InteractionRequired error that implements the method with the given name and JSON - unmarshals the method - specific data into x by calling its from_dict method with the deserialized JSON object .
|
def interaction_method ( self , kind , x ) : if self . info is None or self . code != ERR_INTERACTION_REQUIRED : raise InteractionError ( 'not an interaction-required error (code {})' . format ( self . code ) ) entry = self . info . interaction_methods . get ( kind ) if entry is None : raise InteractionMethodNotFound ( 'interaction method {} not found' . format ( kind ) ) return x . from_dict ( entry )
| 2,014
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/_error.py#L120-L140
|
[
"def",
"reset_flags",
"(",
"self",
")",
":",
"self",
".",
"C",
"=",
"None",
"self",
".",
"Z",
"=",
"None",
"self",
".",
"P",
"=",
"None",
"self",
".",
"S",
"=",
"None"
] |
Create a new ErrorInfo object from a JSON deserialized dictionary
|
def from_dict ( cls , serialized ) : if serialized is None : return None macaroon = serialized . get ( 'Macaroon' ) if macaroon is not None : macaroon = bakery . Macaroon . from_dict ( macaroon ) path = serialized . get ( 'MacaroonPath' ) cookie_name_suffix = serialized . get ( 'CookieNameSuffix' ) visit_url = serialized . get ( 'VisitURL' ) wait_url = serialized . get ( 'WaitURL' ) interaction_methods = serialized . get ( 'InteractionMethods' ) return ErrorInfo ( macaroon = macaroon , macaroon_path = path , cookie_name_suffix = cookie_name_suffix , visit_url = visit_url , wait_url = wait_url , interaction_methods = interaction_methods )
| 2,015
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/_error.py#L178-L197
|
[
"def",
"_wait_for_save",
"(",
"nb_name",
",",
"timeout",
"=",
"5",
")",
":",
"modification_time",
"=",
"os",
".",
"path",
".",
"getmtime",
"(",
"nb_name",
")",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"while",
"time",
".",
"time",
"(",
")",
"<",
"start_time",
"+",
"timeout",
":",
"if",
"(",
"os",
".",
"path",
".",
"getmtime",
"(",
"nb_name",
")",
">",
"modification_time",
"and",
"os",
".",
"path",
".",
"getsize",
"(",
"nb_name",
")",
">",
"0",
")",
":",
"return",
"True",
"time",
".",
"sleep",
"(",
"0.2",
")",
"return",
"False"
] |
Dumps blob message . Supports both blob and raw value .
|
async def dump_blob ( elem , elem_type = None ) : elem_is_blob = isinstance ( elem , x . BlobType ) data = getattr ( elem , x . BlobType . DATA_ATTR ) if elem_is_blob else elem if data is None or len ( data ) == 0 : return b'' if isinstance ( data , ( bytes , bytearray , list ) ) : return base64 . b16encode ( bytes ( data ) ) else : raise ValueError ( 'Unknown blob type' )
| 2,016
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L70-L88
|
[
"def",
"do_handshake",
"(",
"self",
")",
":",
"_logger",
".",
"debug",
"(",
"\"Initiating handshake...\"",
")",
"try",
":",
"self",
".",
"_wrap_socket_library_call",
"(",
"lambda",
":",
"SSL_do_handshake",
"(",
"self",
".",
"_ssl",
".",
"value",
")",
",",
"ERR_HANDSHAKE_TIMEOUT",
")",
"except",
"openssl_error",
"(",
")",
"as",
"err",
":",
"if",
"err",
".",
"ssl_error",
"==",
"SSL_ERROR_SYSCALL",
"and",
"err",
".",
"result",
"==",
"-",
"1",
":",
"raise_ssl_error",
"(",
"ERR_PORT_UNREACHABLE",
",",
"err",
")",
"raise",
"self",
".",
"_handshake_done",
"=",
"True",
"_logger",
".",
"debug",
"(",
"\"...completed handshake\"",
")"
] |
Serializes container as popo
|
async def dump_container ( obj , container , container_type , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE obj = [ ] if obj is None else get_elem ( obj ) if container is None : return None for elem in container : fvalue = await field_archiver ( None , elem , elem_type , params [ 1 : ] if params else None ) obj . append ( fvalue ) return obj
| 2,017
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L102-L124
|
[
"def",
"filter_model_items",
"(",
"index_instance",
",",
"model_items",
",",
"model_name",
",",
"start_date",
",",
"end_date",
")",
":",
"if",
"index_instance",
".",
"updated_field",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"\"No updated date field found for {} - not restricting with start and end date\"",
".",
"format",
"(",
"model_name",
")",
")",
"else",
":",
"if",
"start_date",
":",
"model_items",
"=",
"model_items",
".",
"filter",
"(",
"*",
"*",
"{",
"'{}__gte'",
".",
"format",
"(",
"index_instance",
".",
"updated_field",
")",
":",
"__str_to_tzdate__",
"(",
"start_date",
")",
"}",
")",
"if",
"end_date",
":",
"model_items",
"=",
"model_items",
".",
"filter",
"(",
"*",
"*",
"{",
"'{}__lte'",
".",
"format",
"(",
"index_instance",
".",
"updated_field",
")",
":",
"__str_to_tzdate__",
"(",
"end_date",
")",
"}",
")",
"return",
"model_items"
] |
Loads container of elements from the object representation . Supports the container ref . Returns loaded container .
|
async def load_container ( obj , container_type , params = None , container = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else load_field if obj is None : return None c_len = len ( obj ) elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE res = container if container else [ ] for i in range ( c_len ) : fvalue = await field_archiver ( obj [ i ] , elem_type , params [ 1 : ] if params else None , eref ( res , i ) if container else None ) if not container : res . append ( fvalue ) return res
| 2,018
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L127-L155
|
[
"def",
"transform",
"(",
"self",
",",
"x",
",",
"use_spln",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"x",
"=",
"asarray",
"(",
"x",
",",
"dtype",
"=",
"float",
")",
"if",
"use_spln",
":",
"if",
"self",
".",
"spln",
"is",
"None",
":",
"self",
".",
"set_spline",
"(",
"x",
".",
"min",
"(",
")",
",",
"x",
".",
"max",
"(",
")",
",",
"*",
"*",
"kwargs",
")",
"return",
"apply_along_axis",
"(",
"self",
".",
"spln",
",",
"0",
",",
"x",
")",
"else",
":",
"return",
"self",
".",
"tfun",
"(",
"x",
",",
"*",
"self",
".",
"args",
",",
"*",
"*",
"self",
".",
"kwargs",
")"
] |
Dumps a message field to the object . Field is defined by the message field specification .
|
async def dump_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) field_archiver = field_archiver if field_archiver else dump_field return await field_archiver ( eref ( obj , fname , True ) , fvalue , ftype , params )
| 2,019
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L158-L171
|
[
"def",
"home_wins",
"(",
"self",
")",
":",
"try",
":",
"wins",
",",
"losses",
"=",
"re",
".",
"findall",
"(",
"r'\\d+'",
",",
"self",
".",
"_home_record",
")",
"return",
"wins",
"except",
"ValueError",
":",
"return",
"0"
] |
Loads message field from the object . Field is defined by the message field specification . Returns loaded value supports field reference .
|
async def load_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] field_archiver = field_archiver if field_archiver else load_field await field_archiver ( obj [ fname ] , ftype , params , eref ( msg , fname ) )
| 2,020
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L174-L187
|
[
"def",
"_connect",
"(",
"self",
")",
":",
"try",
":",
"# Open Connection",
"self",
".",
"influx",
"=",
"InfluxDBClient",
"(",
"self",
".",
"hostname",
",",
"self",
".",
"port",
",",
"self",
".",
"username",
",",
"self",
".",
"password",
",",
"self",
".",
"database",
",",
"self",
".",
"ssl",
")",
"# Log",
"self",
".",
"log",
".",
"debug",
"(",
"\"InfluxdbHandler: Established connection to \"",
"\"%s:%d/%s.\"",
",",
"self",
".",
"hostname",
",",
"self",
".",
"port",
",",
"self",
".",
"database",
")",
"except",
"Exception",
"as",
"ex",
":",
"# Log Error",
"self",
".",
"_throttle_error",
"(",
"\"InfluxdbHandler: Failed to connect to \"",
"\"%s:%d/%s. %s\"",
",",
"self",
".",
"hostname",
",",
"self",
".",
"port",
",",
"self",
".",
"database",
",",
"ex",
")",
"# Close Socket",
"self",
".",
"_close",
"(",
")",
"return"
] |
Dumps message to the object . Returns message popo representation .
|
async def dump_message ( obj , msg , field_archiver = None ) : mtype = msg . __class__ fields = mtype . f_specs ( ) obj = collections . OrderedDict ( ) if obj is None else get_elem ( obj ) for field in fields : await dump_message_field ( obj , msg = msg , field = field , field_archiver = field_archiver ) return obj
| 2,021
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L190-L206
|
[
"def",
"sync_blockchain",
"(",
"working_dir",
",",
"bt_opts",
",",
"last_block",
",",
"server_state",
",",
"expected_snapshots",
"=",
"{",
"}",
",",
"*",
"*",
"virtualchain_args",
")",
":",
"subdomain_index",
"=",
"server_state",
"[",
"'subdomains'",
"]",
"atlas_state",
"=",
"server_state",
"[",
"'atlas'",
"]",
"# make this usable even if we haven't explicitly configured virtualchain ",
"impl",
"=",
"sys",
".",
"modules",
"[",
"__name__",
"]",
"log",
".",
"info",
"(",
"\"Synchronizing database {} up to block {}\"",
".",
"format",
"(",
"working_dir",
",",
"last_block",
")",
")",
"# NOTE: this is the only place where a read-write handle should be created,",
"# since this is the only place where the db should be modified.",
"new_db",
"=",
"BlockstackDB",
".",
"borrow_readwrite_instance",
"(",
"working_dir",
",",
"last_block",
",",
"expected_snapshots",
"=",
"expected_snapshots",
")",
"# propagate runtime state to virtualchain callbacks",
"new_db",
".",
"subdomain_index",
"=",
"subdomain_index",
"new_db",
".",
"atlas_state",
"=",
"atlas_state",
"rc",
"=",
"virtualchain",
".",
"sync_virtualchain",
"(",
"bt_opts",
",",
"last_block",
",",
"new_db",
",",
"expected_snapshots",
"=",
"expected_snapshots",
",",
"*",
"*",
"virtualchain_args",
")",
"BlockstackDB",
".",
"release_readwrite_instance",
"(",
"new_db",
",",
"last_block",
")",
"return",
"rc"
] |
Loads message if the given type from the object . Supports reading directly to existing message .
|
async def load_message ( obj , msg_type , msg = None , field_archiver = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await load_message_field ( obj , msg , field , field_archiver = field_archiver ) return msg
| 2,022
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L209-L226
|
[
"def",
"on_epoch_end",
"(",
"self",
",",
"last_metrics",
":",
"MetricsList",
",",
"iteration",
":",
"int",
",",
"*",
"*",
"kwargs",
")",
"->",
"None",
":",
"self",
".",
"_write_metrics",
"(",
"iteration",
"=",
"iteration",
",",
"last_metrics",
"=",
"last_metrics",
")"
] |
Transform variant to the popo object representation .
|
async def dump_variant ( obj , elem , elem_type = None , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field if isinstance ( elem , x . VariantType ) or elem_type . WRAPS_VALUE : return { elem . variant_elem : await field_archiver ( None , getattr ( elem , elem . variant_elem ) , elem . variant_elem_type ) } else : fdef = elem_type . find_fdef ( elem_type . f_specs ( ) , elem ) return { fdef [ 0 ] : await field_archiver ( None , elem , fdef [ 1 ] ) }
| 2,023
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L229-L250
|
[
"def",
"_get_initial_request",
"(",
"self",
")",
":",
"# Any ack IDs that are under lease management need to have their",
"# deadline extended immediately.",
"if",
"self",
".",
"_leaser",
"is",
"not",
"None",
":",
"# Explicitly copy the list, as it could be modified by another",
"# thread.",
"lease_ids",
"=",
"list",
"(",
"self",
".",
"_leaser",
".",
"ack_ids",
")",
"else",
":",
"lease_ids",
"=",
"[",
"]",
"# Put the request together.",
"request",
"=",
"types",
".",
"StreamingPullRequest",
"(",
"modify_deadline_ack_ids",
"=",
"list",
"(",
"lease_ids",
")",
",",
"modify_deadline_seconds",
"=",
"[",
"self",
".",
"ack_deadline",
"]",
"*",
"len",
"(",
"lease_ids",
")",
",",
"stream_ack_deadline_seconds",
"=",
"self",
".",
"ack_histogram",
".",
"percentile",
"(",
"99",
")",
",",
"subscription",
"=",
"self",
".",
"_subscription",
",",
")",
"# Return the initial request.",
"return",
"request"
] |
Dumps generic field to the popo object representation according to the element specification . General multiplexer .
|
async def dump_field ( obj , elem , elem_type , params = None ) : if isinstance ( elem , ( int , bool ) ) or issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . BlobType ) or isinstance ( obj , bytes ) or isinstance ( obj , bytearray ) : return set_elem ( obj , await dump_blob ( elem ) ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . VariantType ) : return set_elem ( obj , await dump_variant ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . ContainerType ) : # container ~ simple list return set_elem ( obj , await dump_container ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . MessageType ) : return set_elem ( obj , await dump_message ( None , elem ) ) else : raise TypeError
| 2,024
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L283-L313
|
[
"def",
"_update_offset_file",
"(",
"self",
")",
":",
"if",
"self",
".",
"on_update",
":",
"self",
".",
"on_update",
"(",
")",
"offset",
"=",
"self",
".",
"_filehandle",
"(",
")",
".",
"tell",
"(",
")",
"inode",
"=",
"stat",
"(",
"self",
".",
"filename",
")",
".",
"st_ino",
"fh",
"=",
"open",
"(",
"self",
".",
"_offset_file",
",",
"\"w\"",
")",
"fh",
".",
"write",
"(",
"\"%s\\n%s\\n\"",
"%",
"(",
"inode",
",",
"offset",
")",
")",
"fh",
".",
"close",
"(",
")",
"self",
".",
"_since_update",
"=",
"0"
] |
Loads a field from the reader based on the field type specification . Demultiplexer .
|
async def load_field ( obj , elem_type , params = None , elem = None ) : if issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) or isinstance ( obj , ( int , bool ) ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . BlobType ) : fvalue = await load_blob ( obj , elem_type ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . VariantType ) : fvalue = await load_variant ( obj , elem = get_elem ( elem ) , elem_type = elem_type , params = params ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . ContainerType ) : # container ~ simple list fvalue = await load_container ( obj , elem_type , params = params , container = get_elem ( elem ) ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . MessageType ) : fvalue = await load_message ( obj , msg_type = elem_type , msg = get_elem ( elem ) ) return set_elem ( elem , fvalue ) else : raise TypeError
| 2,025
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrobj.py#L316-L349
|
[
"def",
"set_password",
"(",
"self",
",",
"service",
",",
"username",
",",
"password",
")",
":",
"# encrypt the password",
"password_encrypted",
"=",
"_win_crypto",
".",
"encrypt",
"(",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"# encode with base64",
"password_base64",
"=",
"base64",
".",
"encodestring",
"(",
"password_encrypted",
")",
"# encode again to unicode",
"password_saved",
"=",
"password_base64",
".",
"decode",
"(",
"'ascii'",
")",
"# store the password",
"key_name",
"=",
"self",
".",
"_key_for_service",
"(",
"service",
")",
"hkey",
"=",
"winreg",
".",
"CreateKey",
"(",
"winreg",
".",
"HKEY_CURRENT_USER",
",",
"key_name",
")",
"winreg",
".",
"SetValueEx",
"(",
"hkey",
",",
"username",
",",
"0",
",",
"winreg",
".",
"REG_SZ",
",",
"password_saved",
")"
] |
Instantiate the given data using the blueprinter .
|
def instantiate ( data , blueprint ) : Validator = jsonschema . validators . validator_for ( blueprint ) blueprinter = extend ( Validator ) ( blueprint ) return blueprinter . instantiate ( data )
| 2,026
|
https://github.com/Julian/Seep/blob/57b5f391d0e23afb7777293a9002125967a014ad/seep/core.py#L35-L49
|
[
"def",
"port_channel_vlag_ignore_split",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"name",
"=",
"str",
"(",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
")",
"enabled",
"=",
"bool",
"(",
"kwargs",
".",
"pop",
"(",
"'enabled'",
",",
"True",
")",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"vlag_ignore_args",
"=",
"dict",
"(",
"name",
"=",
"name",
")",
"if",
"not",
"pynos",
".",
"utilities",
".",
"valid_interface",
"(",
"'port_channel'",
",",
"name",
")",
":",
"raise",
"ValueError",
"(",
"\"`name` must match x\"",
")",
"config",
"=",
"getattr",
"(",
"self",
".",
"_interface",
",",
"'interface_port_channel_vlag_ignore_split'",
")",
"(",
"*",
"*",
"vlag_ignore_args",
")",
"if",
"not",
"enabled",
":",
"ignore_split",
"=",
"config",
".",
"find",
"(",
"'.//*ignore-split'",
")",
"ignore_split",
".",
"set",
"(",
"'operation'",
",",
"'delete'",
")",
"return",
"callback",
"(",
"config",
")"
] |
The entry point of the script .
|
def main ( argv = None ) : from vsgen import VSGSuite from vsgen import VSGLogger # Special case to use the sys.argv when main called without a list. if argv is None : argv = sys . argv # Initialize the application logger pylogger = VSGLogger ( ) # Construct a command line parser and parse the command line args = VSGSuite . make_parser ( description = 'Executes the vsgen package as an application.' ) . parse_args ( argv [ 1 : ] ) for s in VSGSuite . from_args ( * * vars ( args ) ) : s . write ( False ) return 0
| 2,027
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/__main__.py#L19-L37
|
[
"def",
"parse_device",
"(",
"lines",
")",
":",
"name",
",",
"status_line",
",",
"device",
"=",
"parse_device_header",
"(",
"lines",
".",
"pop",
"(",
"0",
")",
")",
"# There are edge cases when the device list is empty and the status line is",
"# merged with the header line, in those cases, the status line is returned",
"# from parse_device_header(), the rest of the time, it's the next line.",
"if",
"not",
"status_line",
":",
"status_line",
"=",
"lines",
".",
"pop",
"(",
"0",
")",
"status",
"=",
"parse_device_status",
"(",
"status_line",
",",
"device",
"[",
"\"personality\"",
"]",
")",
"bitmap",
"=",
"None",
"resync",
"=",
"None",
"for",
"line",
"in",
"lines",
":",
"if",
"line",
".",
"startswith",
"(",
"\" bitmap:\"",
")",
":",
"bitmap",
"=",
"parse_device_bitmap",
"(",
"line",
")",
"elif",
"line",
".",
"startswith",
"(",
"\" [\"",
")",
":",
"resync",
"=",
"parse_device_resync_progress",
"(",
"line",
")",
"elif",
"line",
".",
"startswith",
"(",
"\" \\tresync=\"",
")",
":",
"resync",
"=",
"parse_device_resync_standby",
"(",
"line",
")",
"else",
":",
"raise",
"NotImplementedError",
"(",
"\"unknown device line: {0}\"",
".",
"format",
"(",
"line",
")",
")",
"device",
".",
"update",
"(",
"{",
"\"status\"",
":",
"status",
",",
"\"bitmap\"",
":",
"bitmap",
",",
"\"resync\"",
":",
"resync",
",",
"}",
")",
"return",
"(",
"name",
",",
"device",
")"
] |
Parse model fields .
|
def parse_fields ( attributes ) : return tuple ( field . bind_name ( name ) for name , field in six . iteritems ( attributes ) if isinstance ( field , fields . Field ) )
| 2,028
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/models.py#L38-L42
|
[
"def",
"_accountForNlinkEquals2",
"(",
"self",
",",
"localFilePath",
")",
":",
"fileStats",
"=",
"os",
".",
"stat",
"(",
"localFilePath",
")",
"assert",
"fileStats",
".",
"st_nlink",
">=",
"self",
".",
"nlinkThreshold",
"with",
"self",
".",
"_CacheState",
".",
"open",
"(",
"self",
")",
"as",
"cacheInfo",
":",
"cacheInfo",
".",
"sigmaJob",
"-=",
"fileStats",
".",
"st_size",
"jobState",
"=",
"self",
".",
"_JobState",
"(",
"cacheInfo",
".",
"jobState",
"[",
"self",
".",
"jobID",
"]",
")",
"jobState",
".",
"updateJobReqs",
"(",
"fileStats",
".",
"st_size",
",",
"'remove'",
")"
] |
Prepare model fields attribute .
|
def prepare_fields_attribute ( attribute_name , attributes , class_name ) : attribute = attributes . get ( attribute_name ) if not attribute : attribute = tuple ( ) elif isinstance ( attribute , std_collections . Iterable ) : attribute = tuple ( attribute ) else : raise errors . Error ( '{0}.{1} is supposed to be a list of {2}, ' 'instead {3} given' , class_name , attribute_name , fields . Field , attribute ) return attribute
| 2,029
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/models.py#L50-L61
|
[
"def",
"get_BM_EOS",
"(",
"cryst",
",",
"systems",
")",
":",
"pvdat",
"=",
"array",
"(",
"[",
"[",
"r",
".",
"get_volume",
"(",
")",
",",
"get_pressure",
"(",
"r",
".",
"get_stress",
"(",
")",
")",
",",
"norm",
"(",
"r",
".",
"get_cell",
"(",
")",
"[",
":",
",",
"0",
"]",
")",
",",
"norm",
"(",
"r",
".",
"get_cell",
"(",
")",
"[",
":",
",",
"1",
"]",
")",
",",
"norm",
"(",
"r",
".",
"get_cell",
"(",
")",
"[",
":",
",",
"2",
"]",
")",
"]",
"for",
"r",
"in",
"systems",
"]",
")",
".",
"T",
"# Estimate the initial guess assuming b0p=1",
"# Limiting volumes",
"v1",
"=",
"min",
"(",
"pvdat",
"[",
"0",
"]",
")",
"v2",
"=",
"max",
"(",
"pvdat",
"[",
"0",
"]",
")",
"# The pressure is falling with the growing volume",
"p2",
"=",
"min",
"(",
"pvdat",
"[",
"1",
"]",
")",
"p1",
"=",
"max",
"(",
"pvdat",
"[",
"1",
"]",
")",
"b0",
"=",
"(",
"p1",
"*",
"v1",
"-",
"p2",
"*",
"v2",
")",
"/",
"(",
"v2",
"-",
"v1",
")",
"v0",
"=",
"v1",
"*",
"(",
"p1",
"+",
"b0",
")",
"/",
"b0",
"# Initial guess",
"p0",
"=",
"[",
"v0",
",",
"b0",
",",
"1",
"]",
"# Fitting",
"try",
":",
"p1",
",",
"succ",
"=",
"optimize",
".",
"curve_fit",
"(",
"BMEOS",
",",
"pvdat",
"[",
"0",
"]",
",",
"pvdat",
"[",
"1",
"]",
",",
"p0",
")",
"except",
"(",
"ValueError",
",",
"RuntimeError",
",",
"optimize",
".",
"OptimizeWarning",
")",
"as",
"ex",
":",
"raise",
"RuntimeError",
"(",
"'Calculation failed'",
")",
"cryst",
".",
"bm_eos",
"=",
"p1",
"cryst",
".",
"pv",
"=",
"pvdat",
"return",
"cryst",
".",
"bm_eos"
] |
Bind fields to model class .
|
def bind_fields_to_model_cls ( cls , model_fields ) : return dict ( ( field . name , field . bind_model_cls ( cls ) ) for field in model_fields )
| 2,030
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/models.py#L64-L67
|
[
"def",
"mol_supplier",
"(",
"lines",
",",
"no_halt",
",",
"assign_descriptors",
")",
":",
"def",
"sdf_block",
"(",
"lns",
")",
":",
"mol",
"=",
"[",
"]",
"opt",
"=",
"[",
"]",
"is_mol",
"=",
"True",
"for",
"line",
"in",
"lns",
":",
"if",
"line",
".",
"startswith",
"(",
"\"$$$$\"",
")",
":",
"yield",
"mol",
"[",
":",
"]",
",",
"opt",
"[",
":",
"]",
"is_mol",
"=",
"True",
"mol",
".",
"clear",
"(",
")",
"opt",
".",
"clear",
"(",
")",
"elif",
"line",
".",
"startswith",
"(",
"\"M END\"",
")",
":",
"is_mol",
"=",
"False",
"elif",
"is_mol",
":",
"mol",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
")",
")",
"else",
":",
"opt",
".",
"append",
"(",
"line",
".",
"rstrip",
"(",
")",
")",
"if",
"mol",
":",
"yield",
"mol",
",",
"opt",
"for",
"i",
",",
"(",
"mol",
",",
"opt",
")",
"in",
"enumerate",
"(",
"sdf_block",
"(",
"lines",
")",
")",
":",
"try",
":",
"c",
"=",
"molecule",
"(",
"mol",
")",
"if",
"assign_descriptors",
":",
"molutil",
".",
"assign_descriptors",
"(",
"c",
")",
"except",
"ValueError",
"as",
"err",
":",
"if",
"no_halt",
":",
"print",
"(",
"\"Unsupported symbol: {} (#{} in v2000reader)\"",
".",
"format",
"(",
"err",
",",
"i",
"+",
"1",
")",
")",
"c",
"=",
"molutil",
".",
"null_molecule",
"(",
"assign_descriptors",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unsupported symbol: {}\"",
".",
"format",
"(",
"err",
")",
")",
"except",
"RuntimeError",
"as",
"err",
":",
"if",
"no_halt",
":",
"print",
"(",
"\"Failed to minimize ring: {} (#{} in v2000reader)\"",
".",
"format",
"(",
"err",
",",
"i",
"+",
"1",
")",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Failed to minimize ring: {}\"",
".",
"format",
"(",
"err",
")",
")",
"except",
":",
"if",
"no_halt",
":",
"print",
"(",
"\"Unexpected error (#{} in v2000reader)\"",
".",
"format",
"(",
"i",
"+",
"1",
")",
")",
"c",
"=",
"molutil",
".",
"null_molecule",
"(",
"assign_descriptors",
")",
"c",
".",
"data",
"=",
"optional_data",
"(",
"opt",
")",
"yield",
"c",
"continue",
"else",
":",
"print",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"raise",
"Exception",
"(",
"\"Unsupported Error\"",
")",
"c",
".",
"data",
"=",
"optional_data",
"(",
"opt",
")",
"yield",
"c"
] |
Bind collection to model s class .
|
def bind_collection_to_model_cls ( cls ) : cls . Collection = type ( '{0}.Collection' . format ( cls . __name__ ) , ( cls . Collection , ) , { 'value_type' : cls } ) cls . Collection . __module__ = cls . __module__
| 2,031
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/models.py#L70-L79
|
[
"def",
"get_stats_display_width",
"(",
"self",
",",
"curse_msg",
",",
"without_option",
"=",
"False",
")",
":",
"try",
":",
"if",
"without_option",
":",
"# Size without options",
"c",
"=",
"len",
"(",
"max",
"(",
"''",
".",
"join",
"(",
"[",
"(",
"u",
"(",
"u",
"(",
"nativestr",
"(",
"i",
"[",
"'msg'",
"]",
")",
")",
".",
"encode",
"(",
"'ascii'",
",",
"'replace'",
")",
")",
"if",
"not",
"i",
"[",
"'optional'",
"]",
"else",
"\"\"",
")",
"for",
"i",
"in",
"curse_msg",
"[",
"'msgdict'",
"]",
"]",
")",
".",
"split",
"(",
"'\\n'",
")",
",",
"key",
"=",
"len",
")",
")",
"else",
":",
"# Size with all options",
"c",
"=",
"len",
"(",
"max",
"(",
"''",
".",
"join",
"(",
"[",
"u",
"(",
"u",
"(",
"nativestr",
"(",
"i",
"[",
"'msg'",
"]",
")",
")",
".",
"encode",
"(",
"'ascii'",
",",
"'replace'",
")",
")",
"for",
"i",
"in",
"curse_msg",
"[",
"'msgdict'",
"]",
"]",
")",
".",
"split",
"(",
"'\\n'",
")",
",",
"key",
"=",
"len",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"debug",
"(",
"'ERROR: Can not compute plugin width ({})'",
".",
"format",
"(",
"e",
")",
")",
"return",
"0",
"else",
":",
"return",
"c"
] |
Checklist for releasing this project .
|
def checklist ( ctx ) : checklist = """PRE-RELEASE CHECKLIST:
[ ] Everything is checked in
[ ] All tests pass w/ tox
RELEASE CHECKLIST:
[{x1}] Bump version to new-version and tag repository (via bump_version)
[{x2}] Build packages (sdist, bdist_wheel via prepare)
[{x3}] Register and upload packages to testpypi repository (first)
[{x4}] Verify release is OK and packages from testpypi are usable
[{x5}] Register and upload packages to pypi repository
[{x6}] Push last changes to Github repository
POST-RELEASE CHECKLIST:
[ ] Bump version to new-develop-version (via bump_version)
[ ] Adapt CHANGES (if necessary)
[ ] Commit latest changes to Github repository
""" steps = dict ( x1 = None , x2 = None , x3 = None , x4 = None , x5 = None , x6 = None ) yesno_map = { True : "x" , False : "_" , None : " " } answers = { name : yesno_map [ value ] for name , value in steps . items ( ) } print ( checklist . format ( * * answers ) )
| 2,032
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/tasks/release.py#L61-L84
|
[
"def",
"process_pdb",
"(",
"pdbfile",
",",
"outpath",
",",
"as_string",
"=",
"False",
",",
"outputprefix",
"=",
"'report'",
")",
":",
"if",
"not",
"as_string",
":",
"startmessage",
"=",
"'\\nStarting analysis of %s\\n'",
"%",
"pdbfile",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"else",
":",
"startmessage",
"=",
"\"Starting analysis from stdin.\\n\"",
"write_message",
"(",
"startmessage",
")",
"write_message",
"(",
"'='",
"*",
"len",
"(",
"startmessage",
")",
"+",
"'\\n'",
")",
"mol",
"=",
"PDBComplex",
"(",
")",
"mol",
".",
"output_path",
"=",
"outpath",
"mol",
".",
"load_pdb",
"(",
"pdbfile",
",",
"as_string",
"=",
"as_string",
")",
"# #@todo Offers possibility for filter function from command line (by ligand chain, position, hetid)",
"for",
"ligand",
"in",
"mol",
".",
"ligands",
":",
"mol",
".",
"characterize_complex",
"(",
"ligand",
")",
"create_folder_if_not_exists",
"(",
"outpath",
")",
"# Generate the report files",
"streport",
"=",
"StructureReport",
"(",
"mol",
",",
"outputprefix",
"=",
"outputprefix",
")",
"config",
".",
"MAXTHREADS",
"=",
"min",
"(",
"config",
".",
"MAXTHREADS",
",",
"len",
"(",
"mol",
".",
"interaction_sets",
")",
")",
"######################################",
"# PyMOL Visualization (parallelized) #",
"######################################",
"if",
"config",
".",
"PYMOL",
"or",
"config",
".",
"PICS",
":",
"try",
":",
"from",
"plip",
".",
"modules",
".",
"visualize",
"import",
"visualize_in_pymol",
"except",
"ImportError",
":",
"from",
"modules",
".",
"visualize",
"import",
"visualize_in_pymol",
"complexes",
"=",
"[",
"VisualizerData",
"(",
"mol",
",",
"site",
")",
"for",
"site",
"in",
"sorted",
"(",
"mol",
".",
"interaction_sets",
")",
"if",
"not",
"len",
"(",
"mol",
".",
"interaction_sets",
"[",
"site",
"]",
".",
"interacting_res",
")",
"==",
"0",
"]",
"if",
"config",
".",
"MAXTHREADS",
">",
"1",
":",
"write_message",
"(",
"'\\nGenerating visualizations in parallel on %i cores ...'",
"%",
"config",
".",
"MAXTHREADS",
")",
"parfn",
"=",
"parallel_fn",
"(",
"visualize_in_pymol",
")",
"parfn",
"(",
"complexes",
",",
"processes",
"=",
"config",
".",
"MAXTHREADS",
")",
"else",
":",
"[",
"visualize_in_pymol",
"(",
"plcomplex",
")",
"for",
"plcomplex",
"in",
"complexes",
"]",
"if",
"config",
".",
"XML",
":",
"# Generate report in xml format",
"streport",
".",
"write_xml",
"(",
"as_string",
"=",
"config",
".",
"STDOUT",
")",
"if",
"config",
".",
"TXT",
":",
"# Generate report in txt (rst) format",
"streport",
".",
"write_txt",
"(",
"as_string",
"=",
"config",
".",
"STDOUT",
")"
] |
Build packages for this release .
|
def build_packages ( ctx , hide = False ) : print ( "build_packages:" ) ctx . run ( "python setup.py sdist bdist_wheel" , echo = True , hide = hide )
| 2,033
|
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/tasks/release.py#L98-L101
|
[
"def",
"get_stores_secrets_volumes",
"(",
"cls",
",",
"stores_secrets",
")",
":",
"volumes",
"=",
"[",
"]",
"volume_mounts",
"=",
"[",
"]",
"for",
"store_secret",
"in",
"stores_secrets",
":",
"store",
"=",
"store_secret",
"[",
"'store'",
"]",
"if",
"store",
"in",
"{",
"GCS",
",",
"S3",
"}",
":",
"secrets_volumes",
",",
"secrets_volume_mounts",
"=",
"get_volume_from_secret",
"(",
"volume_name",
"=",
"cls",
".",
"STORE_SECRET_VOLUME_NAME",
".",
"format",
"(",
"store",
")",
",",
"mount_path",
"=",
"cls",
".",
"STORE_SECRET_KEY_MOUNT_PATH",
".",
"format",
"(",
"store",
")",
",",
"secret_name",
"=",
"store_secret",
"[",
"'persistence_secret'",
"]",
",",
")",
"volumes",
"+=",
"secrets_volumes",
"volume_mounts",
"+=",
"secrets_volume_mounts",
"return",
"volumes",
",",
"volume_mounts"
] |
Register a new thread .
|
def register ( self , name , function , description = None ) : return self . __app . threads . register ( name , function , self . _plugin , description )
| 2,034
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_threads_pattern.py#L60-L69
|
[
"def",
"unbind",
"(",
"self",
",",
"devices_to_unbind",
")",
":",
"if",
"self",
".",
"entity_api_key",
"==",
"\"\"",
":",
"return",
"{",
"'status'",
":",
"'failure'",
",",
"'response'",
":",
"'No API key found in request'",
"}",
"url",
"=",
"self",
".",
"base_url",
"+",
"\"api/0.1.0/subscribe/unbind\"",
"headers",
"=",
"{",
"\"apikey\"",
":",
"self",
".",
"entity_api_key",
"}",
"data",
"=",
"{",
"\"exchange\"",
":",
"\"amq.topic\"",
",",
"\"keys\"",
":",
"devices_to_unbind",
",",
"\"queue\"",
":",
"self",
".",
"entity_id",
"}",
"with",
"self",
".",
"no_ssl_verification",
"(",
")",
":",
"r",
"=",
"requests",
".",
"delete",
"(",
"url",
",",
"json",
"=",
"data",
",",
"headers",
"=",
"headers",
")",
"print",
"(",
"r",
")",
"response",
"=",
"dict",
"(",
")",
"if",
"\"No API key\"",
"in",
"str",
"(",
"r",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
":",
"response",
"[",
"\"status\"",
"]",
"=",
"\"failure\"",
"r",
"=",
"json",
".",
"loads",
"(",
"r",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"[",
"'message'",
"]",
"elif",
"'unbind'",
"in",
"str",
"(",
"r",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
":",
"response",
"[",
"\"status\"",
"]",
"=",
"\"success\"",
"r",
"=",
"r",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
"else",
":",
"response",
"[",
"\"status\"",
"]",
"=",
"\"failure\"",
"r",
"=",
"r",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
"response",
"[",
"\"response\"",
"]",
"=",
"str",
"(",
"r",
")",
"return",
"response"
] |
Unregisters an existing thread so that this thread is no longer available .
|
def unregister ( self , thread ) : if thread not in self . threads . keys ( ) : self . log . warning ( "Can not unregister thread %s" % thread ) else : del ( self . threads [ thread ] ) self . __log . debug ( "Thread %s got unregistered" % thread )
| 2,035
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_threads_pattern.py#L111-L123
|
[
"def",
"summary",
"(",
"self",
",",
"featuresCol",
",",
"weightCol",
"=",
"None",
")",
":",
"featuresCol",
",",
"weightCol",
"=",
"Summarizer",
".",
"_check_param",
"(",
"featuresCol",
",",
"weightCol",
")",
"return",
"Column",
"(",
"self",
".",
"_java_obj",
".",
"summary",
"(",
"featuresCol",
".",
"_jc",
",",
"weightCol",
".",
"_jc",
")",
")"
] |
Get one or more threads .
|
def get ( self , thread = None , plugin = None ) : if plugin is not None : if thread is None : threads_list = { } for key in self . threads . keys ( ) : if self . threads [ key ] . plugin == plugin : threads_list [ key ] = self . threads [ key ] return threads_list else : if thread in self . threads . keys ( ) : if self . threads [ thread ] . plugin == plugin : return self . threads [ thread ] else : return None else : return None else : if thread is None : return self . threads else : if thread in self . threads . keys ( ) : return self . threads [ thread ] else : return None
| 2,036
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_threads_pattern.py#L125-L156
|
[
"def",
"deserialize",
"(",
"obj",
")",
":",
"# Be careful of shallow copy here",
"target",
"=",
"dict",
"(",
"obj",
")",
"class_name",
"=",
"None",
"if",
"'__class__'",
"in",
"target",
":",
"class_name",
"=",
"target",
".",
"pop",
"(",
"'__class__'",
")",
"if",
"'__module__'",
"in",
"obj",
":",
"obj",
".",
"pop",
"(",
"'__module__'",
")",
"# Use getattr(module, class_name) for custom types if needed",
"if",
"class_name",
"==",
"'datetime'",
":",
"return",
"datetime",
".",
"datetime",
"(",
"tzinfo",
"=",
"utc",
",",
"*",
"*",
"target",
")",
"if",
"class_name",
"==",
"'StreamingBody'",
":",
"return",
"StringIO",
"(",
"target",
"[",
"'body'",
"]",
")",
"# Return unrecognized structures as-is",
"return",
"obj"
] |
Create and fill the schema from a directory which contains xsd files . It calls fill_schema_from_xsd_file for each xsd file found .
|
def create_schema_from_xsd_directory ( directory , version ) : schema = Schema ( version ) for f in _get_xsd_from_directory ( directory ) : logger . info ( "Loading schema %s" % f ) fill_schema_from_xsd_file ( f , schema ) return schema
| 2,037
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/schema.py#L103-L113
|
[
"def",
"powered_off",
"(",
"name",
",",
"connection",
"=",
"None",
",",
"username",
"=",
"None",
",",
"password",
"=",
"None",
")",
":",
"return",
"_virt_call",
"(",
"name",
",",
"'stop'",
",",
"'unpowered'",
",",
"'Machine has been powered off'",
",",
"connection",
"=",
"connection",
",",
"username",
"=",
"username",
",",
"password",
"=",
"password",
")"
] |
From an xsd file it fills the schema by creating needed Resource . The generateds idl_parser is used to parse ifmap statements in the xsd file .
|
def fill_schema_from_xsd_file ( filename , schema ) : ifmap_statements = _parse_xsd_file ( filename ) properties_all = [ ] for v in ifmap_statements . values ( ) : if ( isinstance ( v [ 0 ] , IDLParser . Link ) ) : src_name = v [ 1 ] target_name = v [ 2 ] src = schema . _get_or_add_resource ( src_name ) target = schema . _get_or_add_resource ( target_name ) if "has" in v [ 3 ] : src . children . append ( target_name ) target . parent = src_name if "ref" in v [ 3 ] : src . refs . append ( target_name ) target . back_refs . append ( src_name ) elif isinstance ( v [ 0 ] , IDLParser . Property ) : target_name = v [ 1 ] [ 0 ] prop = ResourceProperty ( v [ 0 ] . name , is_list = v [ 0 ] . is_list , is_map = v [ 0 ] . is_map ) if target_name != 'all' : target = schema . _get_or_add_resource ( target_name ) target . properties . append ( prop ) else : properties_all . append ( prop ) for r in schema . all_resources ( ) : schema . resource ( r ) . properties += properties_all
| 2,038
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/schema.py#L116-L147
|
[
"def",
"GetElapsedMs",
"(",
"self",
")",
":",
"counter",
"=",
"c_uint64",
"(",
")",
"ret",
"=",
"vmGuestLib",
".",
"VMGuestLib_GetElapsedMs",
"(",
"self",
".",
"handle",
".",
"value",
",",
"byref",
"(",
"counter",
")",
")",
"if",
"ret",
"!=",
"VMGUESTLIB_ERROR_SUCCESS",
":",
"raise",
"VMGuestLibException",
"(",
"ret",
")",
"return",
"counter",
".",
"value"
] |
Decorator to split files into manageable chunks as not to exceed the windows cmd limit
|
def split_ls ( func ) : @ wraps ( func ) def wrapper ( self , files , silent = True , exclude_deleted = False ) : if not isinstance ( files , ( tuple , list ) ) : files = [ files ] counter = 0 index = 0 results = [ ] while files : if index >= len ( files ) : results += func ( self , files , silent , exclude_deleted ) break length = len ( str ( files [ index ] ) ) if length + counter > CHAR_LIMIT : # -- at our limit runfiles = files [ : index ] files = files [ index : ] counter = 0 index = 0 results += func ( self , runfiles , silent , exclude_deleted ) runfiles = None del runfiles else : index += 1 counter += length return results return wrapper
| 2,039
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L69-L105
|
[
"def",
"render_to_json",
"(",
"response",
",",
"request",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# determine the status code",
"if",
"hasattr",
"(",
"response",
",",
"'status_code'",
")",
":",
"status_code",
"=",
"response",
".",
"status_code",
"elif",
"issubclass",
"(",
"type",
"(",
"response",
")",
",",
"Http404",
")",
":",
"status_code",
"=",
"404",
"elif",
"issubclass",
"(",
"type",
"(",
"response",
")",
",",
"Exception",
")",
":",
"status_code",
"=",
"500",
"logger",
".",
"exception",
"(",
"str",
"(",
"response",
")",
",",
"extra",
"=",
"{",
"'request'",
":",
"request",
"}",
")",
"if",
"settings",
".",
"DEBUG",
":",
"import",
"sys",
"reporter",
"=",
"ExceptionReporter",
"(",
"None",
",",
"*",
"sys",
".",
"exc_info",
"(",
")",
")",
"text",
"=",
"reporter",
".",
"get_traceback_text",
"(",
")",
"response",
"=",
"HttpResponseServerError",
"(",
"text",
",",
"content_type",
"=",
"'text/plain'",
")",
"else",
":",
"response",
"=",
"HttpResponseServerError",
"(",
"\"An error occured while processing an AJAX request.\"",
",",
"content_type",
"=",
"'text/plain'",
")",
"else",
":",
"status_code",
"=",
"200",
"# creating main structure",
"data",
"=",
"{",
"'status'",
":",
"status_code",
",",
"'statusText'",
":",
"REASON_PHRASES",
".",
"get",
"(",
"status_code",
",",
"'UNKNOWN STATUS CODE'",
")",
",",
"'content'",
":",
"response",
"}",
"return",
"JSONResponse",
"(",
"data",
",",
"*",
"*",
"kwargs",
")"
] |
Parses the P4 env vars using set p4
|
def __getVariables ( self ) : try : startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW output = subprocess . check_output ( [ 'p4' , 'set' ] , startupinfo = startupinfo ) if six . PY3 : output = str ( output , 'utf8' ) except subprocess . CalledProcessError as err : LOGGER . error ( err ) return p4vars = { } for line in output . splitlines ( ) : if not line : continue try : k , v = line . split ( '=' , 1 ) except ValueError : continue p4vars [ k . strip ( ) ] = v . strip ( ) . split ( ' (' ) [ 0 ] if p4vars [ k . strip ( ) ] . startswith ( '(config' ) : del p4vars [ k . strip ( ) ] self . _port = self . _port or os . getenv ( 'P4PORT' , p4vars . get ( 'P4PORT' ) ) self . _user = self . _user or os . getenv ( 'P4USER' , p4vars . get ( 'P4USER' ) ) self . _client = self . _client or os . getenv ( 'P4CLIENT' , p4vars . get ( 'P4CLIENT' ) )
| 2,040
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L138-L166
|
[
"def",
"get_listing",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'listing'",
")",
":",
"allEvents",
"=",
"self",
".",
"get_allEvents",
"(",
")",
"openEvents",
"=",
"allEvents",
".",
"filter",
"(",
"registrationOpen",
"=",
"True",
")",
"closedEvents",
"=",
"allEvents",
".",
"filter",
"(",
"registrationOpen",
"=",
"False",
")",
"publicEvents",
"=",
"allEvents",
".",
"instance_of",
"(",
"PublicEvent",
")",
"allSeries",
"=",
"allEvents",
".",
"instance_of",
"(",
"Series",
")",
"self",
".",
"listing",
"=",
"{",
"'allEvents'",
":",
"allEvents",
",",
"'openEvents'",
":",
"openEvents",
",",
"'closedEvents'",
":",
"closedEvents",
",",
"'publicEvents'",
":",
"publicEvents",
",",
"'allSeries'",
":",
"allSeries",
",",
"'regOpenEvents'",
":",
"publicEvents",
".",
"filter",
"(",
"registrationOpen",
"=",
"True",
")",
".",
"filter",
"(",
"Q",
"(",
"publicevent__category__isnull",
"=",
"True",
")",
"|",
"Q",
"(",
"publicevent__category__separateOnRegistrationPage",
"=",
"False",
")",
")",
",",
"'regClosedEvents'",
":",
"publicEvents",
".",
"filter",
"(",
"registrationOpen",
"=",
"False",
")",
".",
"filter",
"(",
"Q",
"(",
"publicevent__category__isnull",
"=",
"True",
")",
"|",
"Q",
"(",
"publicevent__category__separateOnRegistrationPage",
"=",
"False",
")",
")",
",",
"'categorySeparateEvents'",
":",
"publicEvents",
".",
"filter",
"(",
"publicevent__category__separateOnRegistrationPage",
"=",
"True",
")",
".",
"order_by",
"(",
"'publicevent__category'",
")",
",",
"'regOpenSeries'",
":",
"allSeries",
".",
"filter",
"(",
"registrationOpen",
"=",
"True",
")",
".",
"filter",
"(",
"Q",
"(",
"series__category__isnull",
"=",
"True",
")",
"|",
"Q",
"(",
"series__category__separateOnRegistrationPage",
"=",
"False",
")",
")",
",",
"'regClosedSeries'",
":",
"allSeries",
".",
"filter",
"(",
"registrationOpen",
"=",
"False",
")",
".",
"filter",
"(",
"Q",
"(",
"series__category__isnull",
"=",
"True",
")",
"|",
"Q",
"(",
"series__category__separateOnRegistrationPage",
"=",
"False",
")",
")",
",",
"'categorySeparateSeries'",
":",
"allSeries",
".",
"filter",
"(",
"series__category__separateOnRegistrationPage",
"=",
"True",
")",
".",
"order_by",
"(",
"'series__category'",
")",
",",
"}",
"return",
"self",
".",
"listing"
] |
The client used in perforce queries
|
def client ( self ) : if isinstance ( self . _client , six . string_types ) : self . _client = Client ( self . _client , self ) return self . _client
| 2,041
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L169-L174
|
[
"def",
"configure",
"(",
"self",
",",
"*",
",",
"hwm",
":",
"int",
"=",
"None",
",",
"rcvtimeo",
":",
"int",
"=",
"None",
",",
"sndtimeo",
":",
"int",
"=",
"None",
",",
"linger",
":",
"int",
"=",
"None",
")",
"->",
"'Socket'",
":",
"if",
"hwm",
"is",
"not",
"None",
":",
"self",
".",
"set_hwm",
"(",
"hwm",
")",
"if",
"rcvtimeo",
"is",
"not",
"None",
":",
"self",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVTIMEO",
",",
"rcvtimeo",
")",
"if",
"sndtimeo",
"is",
"not",
"None",
":",
"self",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDTIMEO",
",",
"sndtimeo",
")",
"if",
"linger",
"is",
"not",
"None",
":",
"self",
".",
"setsockopt",
"(",
"zmq",
".",
"LINGER",
",",
"linger",
")",
"return",
"self"
] |
The status of the connection to perforce
|
def status ( self ) : try : # -- Check client res = self . run ( [ 'info' ] ) if res [ 0 ] [ 'clientName' ] == '*unknown*' : return ConnectionStatus . INVALID_CLIENT # -- Trigger an auth error if not logged in self . run ( [ 'user' , '-o' ] ) except errors . CommandError as err : if 'password (P4PASSWD) invalid or unset' in str ( err . args [ 0 ] ) : return ConnectionStatus . NO_AUTH if 'Connect to server failed' in str ( err . args [ 0 ] ) : return ConnectionStatus . OFFLINE return ConnectionStatus . OK
| 2,042
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L201-L216
|
[
"def",
"_create_application_request",
"(",
"app_metadata",
",",
"template",
")",
":",
"app_metadata",
".",
"validate",
"(",
"[",
"'author'",
",",
"'description'",
",",
"'name'",
"]",
")",
"request",
"=",
"{",
"'Author'",
":",
"app_metadata",
".",
"author",
",",
"'Description'",
":",
"app_metadata",
".",
"description",
",",
"'HomePageUrl'",
":",
"app_metadata",
".",
"home_page_url",
",",
"'Labels'",
":",
"app_metadata",
".",
"labels",
",",
"'LicenseUrl'",
":",
"app_metadata",
".",
"license_url",
",",
"'Name'",
":",
"app_metadata",
".",
"name",
",",
"'ReadmeUrl'",
":",
"app_metadata",
".",
"readme_url",
",",
"'SemanticVersion'",
":",
"app_metadata",
".",
"semantic_version",
",",
"'SourceCodeUrl'",
":",
"app_metadata",
".",
"source_code_url",
",",
"'SpdxLicenseId'",
":",
"app_metadata",
".",
"spdx_license_id",
",",
"'TemplateBody'",
":",
"template",
"}",
"# Remove None values",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"request",
".",
"items",
"(",
")",
"if",
"v",
"}"
] |
Runs a p4 command and returns a list of dictionary objects
|
def run ( self , cmd , stdin = None , marshal_output = True , * * kwargs ) : records = [ ] args = [ self . _executable , "-u" , self . _user , "-p" , self . _port ] if self . _client : args += [ "-c" , str ( self . _client ) ] if marshal_output : args . append ( '-G' ) if isinstance ( cmd , six . string_types ) : raise ValueError ( 'String commands are not supported, please use a list' ) args += cmd command = ' ' . join ( args ) startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW proc = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , startupinfo = startupinfo , * * kwargs ) if stdin : proc . stdin . write ( six . b ( stdin ) ) if marshal_output : try : while True : record = marshal . load ( proc . stdout ) if record . get ( b'code' , '' ) == b'error' and record [ b'severity' ] >= self . _level : proc . stdin . close ( ) proc . stdout . close ( ) raise errors . CommandError ( record [ b'data' ] , record , command ) if isinstance ( record , dict ) : if six . PY2 : records . append ( record ) else : records . append ( { str ( k , 'utf8' ) : str ( v ) if isinstance ( v , int ) else str ( v , 'utf8' , errors = 'ignore' ) for k , v in record . items ( ) } ) except EOFError : pass stdout , stderr = proc . communicate ( ) else : records , stderr = proc . communicate ( ) if stderr : raise errors . CommandError ( stderr , command ) return records
| 2,043
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L218-L287
|
[
"def",
"open_issue",
"(",
"self",
")",
":",
"self",
".",
"github_request",
".",
"update",
"(",
"issue",
"=",
"self",
",",
"state",
"=",
"'open'",
")",
"self",
".",
"state",
"=",
"'open'"
] |
Gets or creates a Changelist object with a description
|
def findChangelist ( self , description = None ) : if description is None : change = Default ( self ) else : if isinstance ( description , six . integer_types ) : change = Changelist ( description , self ) else : pending = self . run ( [ 'changes' , '-l' , '-s' , 'pending' , '-c' , str ( self . _client ) , '-u' , self . _user ] ) for cl in pending : if cl [ 'desc' ] . strip ( ) == description . strip ( ) : LOGGER . debug ( 'Changelist found: {}' . format ( cl [ 'change' ] ) ) change = Changelist ( int ( cl [ 'change' ] ) , self ) break else : LOGGER . debug ( 'No changelist found, creating one' ) change = Changelist . create ( description , self ) change . client = self . _client change . save ( ) return change
| 2,044
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L320-L345
|
[
"def",
"register_write",
"(",
"self",
",",
"reg_index",
",",
"value",
")",
":",
"res",
"=",
"self",
".",
"_dll",
".",
"JLINKARM_WriteReg",
"(",
"reg_index",
",",
"value",
")",
"if",
"res",
"!=",
"0",
":",
"raise",
"errors",
".",
"JLinkException",
"(",
"'Error writing to register %d'",
"%",
"reg_index",
")",
"return",
"value"
] |
Adds a new file to a changelist
|
def add ( self , filename , change = None ) : try : if not self . canAdd ( filename ) : raise errors . RevisionError ( 'File is not under client path' ) if change is None : self . run ( [ 'add' , filename ] ) else : self . run ( [ 'add' , '-c' , str ( change . change ) , filename ] ) data = self . run ( [ 'fstat' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) raise errors . RevisionError ( 'File is not under client path' ) rev = Revision ( data , self ) if isinstance ( change , Changelist ) : change . append ( rev ) return rev
| 2,045
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L347-L375
|
[
"def",
"_report_pods_running",
"(",
"self",
",",
"pods",
",",
"instance_tags",
")",
":",
"pods_tag_counter",
"=",
"defaultdict",
"(",
"int",
")",
"containers_tag_counter",
"=",
"defaultdict",
"(",
"int",
")",
"for",
"pod",
"in",
"pods",
"[",
"'items'",
"]",
":",
"# Containers reporting",
"containers",
"=",
"pod",
".",
"get",
"(",
"'status'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'containerStatuses'",
",",
"[",
"]",
")",
"has_container_running",
"=",
"False",
"for",
"container",
"in",
"containers",
":",
"container_id",
"=",
"container",
".",
"get",
"(",
"'containerID'",
")",
"if",
"not",
"container_id",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'skipping container with no id'",
")",
"continue",
"if",
"\"running\"",
"not",
"in",
"container",
".",
"get",
"(",
"'state'",
",",
"{",
"}",
")",
":",
"continue",
"has_container_running",
"=",
"True",
"tags",
"=",
"tagger",
".",
"tag",
"(",
"container_id",
",",
"tagger",
".",
"LOW",
")",
"or",
"None",
"if",
"not",
"tags",
":",
"continue",
"tags",
"+=",
"instance_tags",
"hash_tags",
"=",
"tuple",
"(",
"sorted",
"(",
"tags",
")",
")",
"containers_tag_counter",
"[",
"hash_tags",
"]",
"+=",
"1",
"# Pod reporting",
"if",
"not",
"has_container_running",
":",
"continue",
"pod_id",
"=",
"pod",
".",
"get",
"(",
"'metadata'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'uid'",
")",
"if",
"not",
"pod_id",
":",
"self",
".",
"log",
".",
"debug",
"(",
"'skipping pod with no uid'",
")",
"continue",
"tags",
"=",
"tagger",
".",
"tag",
"(",
"'kubernetes_pod://%s'",
"%",
"pod_id",
",",
"tagger",
".",
"LOW",
")",
"or",
"None",
"if",
"not",
"tags",
":",
"continue",
"tags",
"+=",
"instance_tags",
"hash_tags",
"=",
"tuple",
"(",
"sorted",
"(",
"tags",
")",
")",
"pods_tag_counter",
"[",
"hash_tags",
"]",
"+=",
"1",
"for",
"tags",
",",
"count",
"in",
"iteritems",
"(",
"pods_tag_counter",
")",
":",
"self",
".",
"gauge",
"(",
"self",
".",
"NAMESPACE",
"+",
"'.pods.running'",
",",
"count",
",",
"list",
"(",
"tags",
")",
")",
"for",
"tags",
",",
"count",
"in",
"iteritems",
"(",
"containers_tag_counter",
")",
":",
"self",
".",
"gauge",
"(",
"self",
".",
"NAMESPACE",
"+",
"'.containers.running'",
",",
"count",
",",
"list",
"(",
"tags",
")",
")"
] |
Determines if a filename can be added to the depot under the current client
|
def canAdd ( self , filename ) : try : result = self . run ( [ 'add' , '-n' , '-t' , 'text' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) return False if result . get ( 'code' ) not in ( 'error' , 'info' ) : return True LOGGER . warn ( 'Unable to add {}: {}' . format ( filename , result [ 'data' ] ) ) return False
| 2,046
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L377-L394
|
[
"def",
"create",
"(",
"cls",
",",
"class_name",
":",
"str",
",",
"monoid_type",
"=",
"Union",
"[",
"Monoid",
",",
"str",
"]",
")",
":",
"def",
"unit",
"(",
"cls",
",",
"value",
")",
":",
"if",
"hasattr",
"(",
"monoid_type",
",",
"\"empty\"",
")",
":",
"log",
"=",
"monoid_type",
".",
"empty",
"(",
")",
"else",
":",
"log",
"=",
"monoid_type",
"(",
")",
"return",
"cls",
"(",
"value",
",",
"log",
")",
"return",
"type",
"(",
"class_name",
",",
"(",
"Writer",
",",
")",
",",
"dict",
"(",
"unit",
"=",
"classmethod",
"(",
"unit",
")",
")",
")"
] |
Queries the depot to get the current status of the changelist
|
def query ( self , files = True ) : if self . _change : cl = str ( self . _change ) self . _p4dict = { camel_case ( k ) : v for k , v in six . iteritems ( self . _connection . run ( [ 'change' , '-o' , cl ] ) [ 0 ] ) } if files : self . _files = [ ] if self . _p4dict . get ( 'status' ) == 'pending' or self . _change == 0 : change = self . _change or 'default' data = self . _connection . run ( [ 'opened' , '-c' , str ( change ) ] ) self . _files = [ Revision ( r , self . _connection ) for r in data ] else : data = self . _connection . run ( [ 'describe' , str ( self . _change ) ] ) [ 0 ] depotfiles = [ ] for k , v in six . iteritems ( data ) : if k . startswith ( 'depotFile' ) : depotfiles . append ( v ) self . _files = self . _connection . ls ( depotfiles )
| 2,047
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L542-L560
|
[
"def",
"_read_columns_file",
"(",
"f",
")",
":",
"try",
":",
"columns",
"=",
"json",
".",
"loads",
"(",
"open",
"(",
"f",
",",
"'r'",
")",
".",
"read",
"(",
")",
",",
"object_pairs_hook",
"=",
"collections",
".",
"OrderedDict",
")",
"except",
"Exception",
"as",
"err",
":",
"raise",
"InvalidColumnsFileError",
"(",
"\"There was an error while reading {0}: {1}\"",
".",
"format",
"(",
"f",
",",
"err",
")",
")",
"# Options are not supported yet:",
"if",
"'__options'",
"in",
"columns",
":",
"del",
"columns",
"[",
"'__options'",
"]",
"return",
"columns"
] |
Removes a revision from this changelist
|
def remove ( self , rev , permanent = False ) : if not isinstance ( rev , Revision ) : raise TypeError ( 'argument needs to be an instance of Revision' ) if rev not in self : raise ValueError ( '{} not in changelist' . format ( rev ) ) self . _files . remove ( rev ) if not permanent : rev . changelist = self . _connection . default
| 2,048
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L585-L601
|
[
"def",
"_add_arg_python",
"(",
"self",
",",
"key",
",",
"value",
"=",
"None",
",",
"mask",
"=",
"False",
")",
":",
"self",
".",
"_data",
"[",
"key",
"]",
"=",
"value",
"if",
"not",
"value",
":",
"# both false boolean values (flags) and empty values should not be added.",
"pass",
"elif",
"value",
"is",
"True",
":",
"# true boolean values are flags and should not contain a value",
"self",
".",
"_args",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"self",
".",
"_args_quoted",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"self",
".",
"_args_masked",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"else",
":",
"self",
".",
"_args",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"if",
"mask",
":",
"# mask sensitive values",
"value",
"=",
"'x'",
"*",
"len",
"(",
"str",
"(",
"value",
")",
")",
"else",
":",
"# quote all values that would get displayed",
"value",
"=",
"self",
".",
"quote",
"(",
"value",
")",
"self",
".",
"_args_quoted",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"self",
".",
"_args_masked",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")"
] |
Revert all files in this changelist
|
def revert ( self , unchanged_only = False ) : if self . _reverted : raise errors . ChangelistError ( 'This changelist has been reverted' ) change = self . _change if self . _change == 0 : change = 'default' cmd = [ 'revert' , '-c' , str ( change ) ] if unchanged_only : cmd . append ( '-a' ) files = [ f . depotFile for f in self . _files ] if files : cmd += files self . _connection . run ( cmd ) self . _files = [ ] self . _reverted = True
| 2,049
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L603-L628
|
[
"def",
"ping",
"(",
"self",
")",
":",
"randomToken",
"=",
"''",
".",
"join",
"(",
"random",
".",
"choice",
"(",
"string",
".",
"ascii_uppercase",
"+",
"string",
".",
"ascii_lowercase",
"+",
"string",
".",
"digits",
")",
"for",
"x",
"in",
"range",
"(",
"32",
")",
")",
"r",
"=",
"self",
".",
"doQuery",
"(",
"'ping?data='",
"+",
"randomToken",
")",
"if",
"r",
".",
"status_code",
"==",
"200",
":",
"# Query ok ?",
"if",
"r",
".",
"json",
"(",
")",
"[",
"'data'",
"]",
"==",
"randomToken",
":",
"# Token equal ?",
"return",
"True",
"return",
"False"
] |
Submits a chagelist to the depot
|
def submit ( self ) : if self . _dirty : self . save ( ) self . _connection . run ( [ 'submit' , '-c' , str ( self . _change ) ] , marshal_output = False )
| 2,050
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L635-L640
|
[
"def",
"as_version",
"(",
"self",
",",
"version",
"=",
"Version",
".",
"latest",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
",",
"list",
")",
":",
"result",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"self",
".",
"iteritems",
"(",
")",
"if",
"isinstance",
"(",
"self",
",",
"dict",
")",
"else",
"vars",
"(",
"self",
")",
".",
"iteritems",
"(",
")",
":",
"k",
"=",
"self",
".",
"_props_corrected",
".",
"get",
"(",
"k",
",",
"k",
")",
"if",
"isinstance",
"(",
"v",
",",
"SerializableBase",
")",
":",
"result",
"[",
"k",
"]",
"=",
"v",
".",
"as_version",
"(",
"version",
")",
"elif",
"isinstance",
"(",
"v",
",",
"list",
")",
":",
"result",
"[",
"k",
"]",
"=",
"[",
"]",
"for",
"val",
"in",
"v",
":",
"if",
"isinstance",
"(",
"val",
",",
"SerializableBase",
")",
":",
"result",
"[",
"k",
"]",
".",
"append",
"(",
"val",
".",
"as_version",
"(",
"version",
")",
")",
"else",
":",
"result",
"[",
"k",
"]",
".",
"append",
"(",
"val",
")",
"elif",
"isinstance",
"(",
"v",
",",
"uuid",
".",
"UUID",
")",
":",
"result",
"[",
"k",
"]",
"=",
"unicode",
"(",
"v",
")",
"elif",
"isinstance",
"(",
"v",
",",
"datetime",
".",
"timedelta",
")",
":",
"result",
"[",
"k",
"]",
"=",
"jsonify_timedelta",
"(",
"v",
")",
"elif",
"isinstance",
"(",
"v",
",",
"datetime",
".",
"datetime",
")",
":",
"result",
"[",
"k",
"]",
"=",
"jsonify_datetime",
"(",
"v",
")",
"else",
":",
"result",
"[",
"k",
"]",
"=",
"v",
"result",
"=",
"self",
".",
"_filter_none",
"(",
"result",
")",
"else",
":",
"result",
"=",
"[",
"]",
"for",
"v",
"in",
"self",
":",
"if",
"isinstance",
"(",
"v",
",",
"SerializableBase",
")",
":",
"result",
".",
"append",
"(",
"v",
".",
"as_version",
"(",
"version",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"v",
")",
"return",
"result"
] |
Reverts all files in this changelist then deletes the changelist from perforce
|
def delete ( self ) : try : self . revert ( ) except errors . ChangelistError : pass self . _connection . run ( [ 'change' , '-d' , str ( self . _change ) ] )
| 2,051
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L642-L649
|
[
"def",
"print_rendered_results",
"(",
"results_dict",
")",
":",
"class",
"_HubComponentEncoder",
"(",
"json",
".",
"JSONEncoder",
")",
":",
"def",
"default",
"(",
"self",
",",
"o",
")",
":",
"if",
"isinstance",
"(",
"o",
",",
"base",
".",
"HubComponent",
")",
":",
"return",
"repr",
"(",
"o",
")",
"return",
"json",
".",
"JSONEncoder",
".",
"default",
"(",
"self",
",",
"o",
")",
"formatted",
"=",
"json",
".",
"dumps",
"(",
"results_dict",
",",
"indent",
"=",
"4",
",",
"cls",
"=",
"_HubComponentEncoder",
")",
"# the returned string contains lines with trailing spaces, which causes",
"# doctests to fail. So fix that here.",
"for",
"s",
"in",
"formatted",
".",
"splitlines",
"(",
")",
":",
"print",
"(",
"s",
".",
"rstrip",
"(",
")",
")"
] |
Creates a new changelist
|
def create ( description = '<Created by Python>' , connection = None ) : connection = connection or Connection ( ) description = description . replace ( '\n' , '\n\t' ) form = NEW_FORMAT . format ( client = str ( connection . client ) , description = description ) result = connection . run ( [ 'change' , '-i' ] , stdin = form , marshal_output = False ) return Changelist ( int ( result . split ( ) [ 1 ] ) , connection )
| 2,052
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L694-L708
|
[
"def",
"_add_arg_python",
"(",
"self",
",",
"key",
",",
"value",
"=",
"None",
",",
"mask",
"=",
"False",
")",
":",
"self",
".",
"_data",
"[",
"key",
"]",
"=",
"value",
"if",
"not",
"value",
":",
"# both false boolean values (flags) and empty values should not be added.",
"pass",
"elif",
"value",
"is",
"True",
":",
"# true boolean values are flags and should not contain a value",
"self",
".",
"_args",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"self",
".",
"_args_quoted",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"self",
".",
"_args_masked",
".",
"append",
"(",
"'--{}'",
".",
"format",
"(",
"key",
")",
")",
"else",
":",
"self",
".",
"_args",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"if",
"mask",
":",
"# mask sensitive values",
"value",
"=",
"'x'",
"*",
"len",
"(",
"str",
"(",
"value",
")",
")",
"else",
":",
"# quote all values that would get displayed",
"value",
"=",
"self",
".",
"quote",
"(",
"value",
")",
"self",
".",
"_args_quoted",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")",
"self",
".",
"_args_masked",
".",
"append",
"(",
"'--{}={}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")"
] |
Runs an fstat for this file and repopulates the data
|
def query ( self ) : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , self . _p4dict [ 'depotFile' ] ] ) [ 0 ] self . _head = HeadRevision ( self . _p4dict ) self . _filename = self . depotFile
| 2,053
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L770-L776
|
[
"def",
"_is_junction",
"(",
"arg",
")",
":",
"return",
"isinstance",
"(",
"arg",
",",
"dict",
")",
"and",
"len",
"(",
"arg",
")",
"==",
"1",
"and",
"next",
"(",
"six",
".",
"iterkeys",
"(",
"arg",
")",
")",
"==",
"'junction'"
] |
Checks out the file
|
def edit ( self , changelist = 0 ) : command = 'reopen' if self . action in ( 'add' , 'edit' ) else 'edit' if int ( changelist ) : self . _connection . run ( [ command , '-c' , str ( changelist . change ) , self . depotFile ] ) else : self . _connection . run ( [ command , self . depotFile ] ) self . query ( )
| 2,054
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L778-L790
|
[
"def",
"_get_port_speed_price_id",
"(",
"items",
",",
"port_speed",
",",
"no_public",
",",
"location",
")",
":",
"for",
"item",
"in",
"items",
":",
"if",
"utils",
".",
"lookup",
"(",
"item",
",",
"'itemCategory'",
",",
"'categoryCode'",
")",
"!=",
"'port_speed'",
":",
"continue",
"# Check for correct capacity and if the item matches private only",
"if",
"any",
"(",
"[",
"int",
"(",
"utils",
".",
"lookup",
"(",
"item",
",",
"'capacity'",
")",
")",
"!=",
"port_speed",
",",
"_is_private_port_speed_item",
"(",
"item",
")",
"!=",
"no_public",
",",
"not",
"_is_bonded",
"(",
"item",
")",
"]",
")",
":",
"continue",
"for",
"price",
"in",
"item",
"[",
"'prices'",
"]",
":",
"if",
"not",
"_matches_location",
"(",
"price",
",",
"location",
")",
":",
"continue",
"return",
"price",
"[",
"'id'",
"]",
"raise",
"SoftLayer",
".",
"SoftLayerError",
"(",
"\"Could not find valid price for port speed: '%s'\"",
"%",
"port_speed",
")"
] |
Locks or unlocks the file
|
def lock ( self , lock = True , changelist = 0 ) : cmd = 'lock' if lock else 'unlock' if changelist : self . _connection . run ( [ cmd , '-c' , changelist , self . depotFile ] ) else : self . _connection . run ( [ cmd , self . depotFile ] ) self . query ( )
| 2,055
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L792-L807
|
[
"def",
"search",
"(",
")",
":",
"redis_key",
"=",
"'s_%s'",
"%",
"request",
".",
"args",
"[",
"'query'",
"]",
".",
"lower",
"(",
")",
"cached",
"=",
"redis_ro_conn",
".",
"get",
"(",
"redis_key",
")",
"if",
"cached",
":",
"return",
"Response",
"(",
"cached",
")",
"else",
":",
"try",
":",
"found",
"=",
"get_on_tmdb",
"(",
"u'/search/movie'",
",",
"query",
"=",
"request",
".",
"args",
"[",
"'query'",
"]",
")",
"movies",
"=",
"[",
"]",
"for",
"movie",
"in",
"found",
"[",
"'results'",
"]",
":",
"cast",
"=",
"get_on_tmdb",
"(",
"u'/movie/%s/casts'",
"%",
"movie",
"[",
"'id'",
"]",
")",
"year",
"=",
"datetime",
".",
"strptime",
"(",
"movie",
"[",
"'release_date'",
"]",
",",
"'%Y-%m-%d'",
")",
".",
"year",
"if",
"movie",
"[",
"'release_date'",
"]",
"else",
"None",
"movies",
".",
"append",
"(",
"{",
"'title'",
":",
"movie",
"[",
"'original_title'",
"]",
",",
"'directors'",
":",
"[",
"x",
"[",
"'name'",
"]",
"for",
"x",
"in",
"cast",
"[",
"'crew'",
"]",
"if",
"x",
"[",
"'department'",
"]",
"==",
"'Directing'",
"and",
"x",
"[",
"'job'",
"]",
"==",
"'Director'",
"]",
",",
"'year'",
":",
"year",
",",
"'_tmdb_id'",
":",
"movie",
"[",
"'id'",
"]",
"}",
")",
"except",
"requests",
".",
"HTTPError",
"as",
"err",
":",
"return",
"Response",
"(",
"'TMDB API error: %s'",
"%",
"str",
"(",
"err",
")",
",",
"status",
"=",
"err",
".",
"response",
".",
"status_code",
")",
"json_response",
"=",
"json",
".",
"dumps",
"(",
"{",
"'movies'",
":",
"movies",
"}",
")",
"redis_conn",
".",
"setex",
"(",
"redis_key",
",",
"app",
".",
"config",
"[",
"'CACHE_TTL'",
"]",
",",
"json_response",
")",
"return",
"Response",
"(",
"json_response",
")"
] |
Syncs the file at the current revision
|
def sync ( self , force = False , safe = True , revision = 0 , changelist = 0 ) : cmd = [ 'sync' ] if force : cmd . append ( '-f' ) if safe : cmd . append ( '-s' ) if revision : cmd . append ( '{}#{}' . format ( self . depotFile , revision ) ) elif changelist : cmd . append ( '{}@{}' . format ( self . depotFile , changelist ) ) else : cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
| 2,056
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L809-L837
|
[
"def",
"safestr",
"(",
"str_",
")",
":",
"str_",
"=",
"str_",
"or",
"\"\"",
"return",
"\"\"",
".",
"join",
"(",
"x",
"for",
"x",
"in",
"str_",
"if",
"x",
".",
"isalnum",
"(",
")",
")"
] |
Reverts any file changes
|
def revert ( self , unchanged = False ) : cmd = [ 'revert' ] if unchanged : cmd . append ( '-a' ) wasadd = self . action == 'add' cmd . append ( self . depotFile ) self . _connection . run ( cmd ) if 'movedFile' in self . _p4dict : self . _p4dict [ 'depotFile' ] = self . _p4dict [ 'movedFile' ] if not wasadd : self . query ( ) if self . _changelist : self . _changelist . remove ( self , permanent = True )
| 2,057
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L839-L862
|
[
"def",
"get_generated_number",
"(",
"context",
",",
"config",
",",
"variables",
",",
"*",
"*",
"kw",
")",
":",
"# separator where to split the ID",
"separator",
"=",
"kw",
".",
"get",
"(",
"'separator'",
",",
"'-'",
")",
"# allow portal_type override",
"portal_type",
"=",
"get_type_id",
"(",
"context",
",",
"*",
"*",
"kw",
")",
"# The ID format for string interpolation, e.g. WS-{seq:03d}",
"id_template",
"=",
"config",
".",
"get",
"(",
"\"form\"",
",",
"\"\"",
")",
"# The split length defines where the key is splitted from the value",
"split_length",
"=",
"config",
".",
"get",
"(",
"\"split_length\"",
",",
"1",
")",
"# The prefix template is the static part of the ID",
"prefix_template",
"=",
"slice",
"(",
"id_template",
",",
"separator",
"=",
"separator",
",",
"end",
"=",
"split_length",
")",
"# get the number generator",
"number_generator",
"=",
"getUtility",
"(",
"INumberGenerator",
")",
"# generate the key for the number generator storage",
"prefix",
"=",
"prefix_template",
".",
"format",
"(",
"*",
"*",
"variables",
")",
"# normalize out any unicode characters like Ö, É, etc. from the prefix",
"prefix",
"=",
"api",
".",
"normalize_filename",
"(",
"prefix",
")",
"# The key used for the storage",
"key",
"=",
"make_storage_key",
"(",
"portal_type",
",",
"prefix",
")",
"# Handle flushed storage",
"if",
"key",
"not",
"in",
"number_generator",
":",
"max_num",
"=",
"0",
"existing",
"=",
"get_ids_with_prefix",
"(",
"portal_type",
",",
"prefix",
")",
"numbers",
"=",
"map",
"(",
"lambda",
"id",
":",
"get_seq_number_from_id",
"(",
"id",
",",
"id_template",
",",
"prefix",
")",
",",
"existing",
")",
"# figure out the highest number in the sequence",
"if",
"numbers",
":",
"max_num",
"=",
"max",
"(",
"numbers",
")",
"# set the number generator",
"logger",
".",
"info",
"(",
"\"*** SEEDING Prefix '{}' to {}\"",
".",
"format",
"(",
"prefix",
",",
"max_num",
")",
")",
"number_generator",
".",
"set_number",
"(",
"key",
",",
"max_num",
")",
"if",
"not",
"kw",
".",
"get",
"(",
"\"dry_run\"",
",",
"False",
")",
":",
"# Generate a new number",
"# NOTE Even when the number exceeds the given ID sequence format,",
"# it will overflow gracefully, e.g.",
"# >>> {sampleId}-R{seq:03d}'.format(sampleId=\"Water\", seq=999999)",
"# 'Water-R999999‘",
"number",
"=",
"number_generator",
".",
"generate_number",
"(",
"key",
"=",
"key",
")",
"else",
":",
"# => This allows us to \"preview\" the next generated ID in the UI",
"# TODO Show the user the next generated number somewhere in the UI",
"number",
"=",
"number_generator",
".",
"get",
"(",
"key",
",",
"1",
")",
"# Return an int or Alphanumber",
"return",
"get_alpha_or_number",
"(",
"number",
",",
"id_template",
")"
] |
Shelves the file if it is in a changelist
|
def shelve ( self , changelist = None ) : if changelist is None and self . changelist . description == 'default' : raise errors . ShelveError ( 'Unabled to shelve files in the default changelist' ) cmd = [ 'shelve' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
| 2,058
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L864-L881
|
[
"def",
"_count_devices",
"(",
"self",
")",
":",
"number_of_devices",
"=",
"ctypes",
".",
"c_uint",
"(",
")",
"if",
"ctypes",
".",
"windll",
".",
"user32",
".",
"GetRawInputDeviceList",
"(",
"ctypes",
".",
"POINTER",
"(",
"ctypes",
".",
"c_int",
")",
"(",
")",
",",
"ctypes",
".",
"byref",
"(",
"number_of_devices",
")",
",",
"ctypes",
".",
"sizeof",
"(",
"RawInputDeviceList",
")",
")",
"==",
"-",
"1",
":",
"warn",
"(",
"\"Call to GetRawInputDeviceList was unsuccessful.\"",
"\"We have no idea if a mouse or keyboard is attached.\"",
",",
"RuntimeWarning",
")",
"return",
"devices_found",
"=",
"(",
"RawInputDeviceList",
"*",
"number_of_devices",
".",
"value",
")",
"(",
")",
"if",
"ctypes",
".",
"windll",
".",
"user32",
".",
"GetRawInputDeviceList",
"(",
"devices_found",
",",
"ctypes",
".",
"byref",
"(",
"number_of_devices",
")",
",",
"ctypes",
".",
"sizeof",
"(",
"RawInputDeviceList",
")",
")",
"==",
"-",
"1",
":",
"warn",
"(",
"\"Call to GetRawInputDeviceList was unsuccessful.\"",
"\"We have no idea if a mouse or keyboard is attached.\"",
",",
"RuntimeWarning",
")",
"return",
"for",
"device",
"in",
"devices_found",
":",
"if",
"device",
".",
"dwType",
"==",
"0",
":",
"self",
".",
"_raw_device_counts",
"[",
"'mice'",
"]",
"+=",
"1",
"elif",
"device",
".",
"dwType",
"==",
"1",
":",
"self",
".",
"_raw_device_counts",
"[",
"'keyboards'",
"]",
"+=",
"1",
"elif",
"device",
".",
"dwType",
"==",
"2",
":",
"self",
".",
"_raw_device_counts",
"[",
"'otherhid'",
"]",
"+=",
"1",
"else",
":",
"self",
".",
"_raw_device_counts",
"[",
"'unknown'",
"]",
"+=",
"1"
] |
Marks the file for delete
|
def delete ( self , changelist = 0 ) : cmd = [ 'delete' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
| 2,059
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L909-L923
|
[
"def",
"bind_context",
"(",
"context_filename",
")",
":",
"global",
"PRODUCT_CONTEXT",
"if",
"PRODUCT_CONTEXT",
"is",
"None",
":",
"with",
"open",
"(",
"context_filename",
")",
"as",
"contextfile",
":",
"try",
":",
"context",
"=",
"json",
".",
"loads",
"(",
"contextfile",
".",
"read",
"(",
")",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"ContextParseError",
"(",
"'Error parsing %s: %s'",
"%",
"(",
"context_filename",
",",
"str",
"(",
"e",
")",
")",
")",
"context",
"[",
"'PRODUCT_CONTEXT_FILENAME'",
"]",
"=",
"context_filename",
"context",
"[",
"'PRODUCT_EQUATION_FILENAME'",
"]",
"=",
"os",
".",
"environ",
"[",
"'PRODUCT_EQUATION_FILENAME'",
"]",
"context",
"[",
"'PRODUCT_NAME'",
"]",
"=",
"os",
".",
"environ",
"[",
"'PRODUCT_NAME'",
"]",
"context",
"[",
"'CONTAINER_NAME'",
"]",
"=",
"os",
".",
"environ",
"[",
"'CONTAINER_NAME'",
"]",
"context",
"[",
"'PRODUCT_DIR'",
"]",
"=",
"os",
".",
"environ",
"[",
"'PRODUCT_DIR'",
"]",
"context",
"[",
"'CONTAINER_DIR'",
"]",
"=",
"os",
".",
"environ",
"[",
"'CONTAINER_DIR'",
"]",
"context",
"[",
"'APE_ROOT_DIR'",
"]",
"=",
"os",
".",
"environ",
"[",
"'APE_ROOT_DIR'",
"]",
"context",
"[",
"'APE_GLOBAL_DIR'",
"]",
"=",
"os",
".",
"environ",
"[",
"'APE_GLOBAL_DIR'",
"]",
"PRODUCT_CONTEXT",
"=",
"ContextAccessor",
"(",
"context",
")",
"else",
":",
"# bind_context called but context already bound",
"# harmless rebind (with same file) is ignored",
"# otherwise this is a serious error",
"if",
"PRODUCT_CONTEXT",
".",
"PRODUCT_CONTEXT_FILENAME",
"!=",
"context_filename",
":",
"raise",
"ContextBindingError",
"(",
"'product context bound multiple times using different data!'",
")"
] |
The hash value of the current revision
|
def hash ( self ) : if 'digest' not in self . _p4dict : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , '-Ol' , self . depotFile ] ) [ 0 ] return self . _p4dict [ 'digest' ]
| 2,060
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L926-L931
|
[
"def",
"deletecols",
"(",
"X",
",",
"cols",
")",
":",
"if",
"isinstance",
"(",
"cols",
",",
"str",
")",
":",
"cols",
"=",
"cols",
".",
"split",
"(",
"','",
")",
"retain",
"=",
"[",
"n",
"for",
"n",
"in",
"X",
".",
"dtype",
".",
"names",
"if",
"n",
"not",
"in",
"cols",
"]",
"if",
"len",
"(",
"retain",
")",
">",
"0",
":",
"return",
"X",
"[",
"retain",
"]",
"else",
":",
"return",
"None"
] |
A list of view specs
|
def view ( self ) : spec = [ ] for k , v in six . iteritems ( self . _p4dict ) : if k . startswith ( 'view' ) : match = RE_FILESPEC . search ( v ) if match : spec . append ( FileSpec ( v [ : match . end ( ) - 1 ] , v [ match . end ( ) : ] ) ) return spec
| 2,061
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L1145-L1154
|
[
"def",
"private_messenger",
"(",
")",
":",
"while",
"__websocket_server_running__",
":",
"pipein",
"=",
"open",
"(",
"PRIVATE_PIPE",
",",
"'r'",
")",
"line",
"=",
"pipein",
".",
"readline",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
".",
"replace",
"(",
"'\\r'",
",",
"''",
")",
"if",
"line",
"!=",
"''",
":",
"message",
"=",
"json",
".",
"loads",
"(",
"line",
")",
"WebSocketHandler",
".",
"send_private_message",
"(",
"user_id",
"=",
"message",
"[",
"'user_id'",
"]",
",",
"message",
"=",
"message",
")",
"print",
"line",
"remaining_lines",
"=",
"pipein",
".",
"read",
"(",
")",
"pipein",
".",
"close",
"(",
")",
"pipeout",
"=",
"open",
"(",
"PRIVATE_PIPE",
",",
"'w'",
")",
"pipeout",
".",
"write",
"(",
"remaining_lines",
")",
"pipeout",
".",
"close",
"(",
")",
"else",
":",
"pipein",
".",
"close",
"(",
")",
"time",
".",
"sleep",
"(",
"0.05",
")"
] |
Which stream if any the client is under
|
def stream ( self ) : stream = self . _p4dict . get ( 'stream' ) if stream : return Stream ( stream , self . _connection )
| 2,062
|
https://github.com/theiviaxx/python-perforce/blob/01a3b01fe5949126fa0097d9a8ad386887823b5a/perforce/models.py#L1167-L1171
|
[
"def",
"get_transaction_status",
"(",
"self",
",",
"transactionId",
")",
":",
"params",
"=",
"{",
"}",
"params",
"[",
"'TransactionId'",
"]",
"=",
"transactionId",
"response",
"=",
"self",
".",
"make_request",
"(",
"\"GetTransactionStatus\"",
",",
"params",
")",
"body",
"=",
"response",
".",
"read",
"(",
")",
"if",
"(",
"response",
".",
"status",
"==",
"200",
")",
":",
"rs",
"=",
"ResultSet",
"(",
")",
"h",
"=",
"handler",
".",
"XmlHandler",
"(",
"rs",
",",
"self",
")",
"xml",
".",
"sax",
".",
"parseString",
"(",
"body",
",",
"h",
")",
"return",
"rs",
"else",
":",
"raise",
"FPSResponseError",
"(",
"response",
".",
"status",
",",
"response",
".",
"reason",
",",
"body",
")"
] |
Stores version to the stream if not stored yet
|
async def set_version ( self , tp , params , version = None , elem = None ) : self . registry . set_tr ( None ) tw = TypeWrapper ( tp , params ) if not tw . is_versioned ( ) : return TypeWrapper . ELEMENTARY_RES # If not in the DB, store to the archive at the current position if not self . version_db . is_versioned ( tw ) : if version is None : version = self . _cur_version ( tw , elem ) await dump_uvarint ( self . iobj , 0 ) await dump_uvarint ( self . iobj , version ) self . version_db . set_version ( tw , 0 , version ) return self . version_db . get_version ( tw ) [ 1 ]
| 2,063
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrboost.py#L206-L230
|
[
"def",
"make_app",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"default_options",
"=",
"[",
"[",
"'content_path'",
",",
"'.'",
"]",
",",
"[",
"'uri_marker'",
",",
"''",
"]",
"]",
"args",
"=",
"list",
"(",
"args",
")",
"options",
"=",
"dict",
"(",
"default_options",
")",
"options",
".",
"update",
"(",
"kw",
")",
"while",
"default_options",
"and",
"args",
":",
"_d",
"=",
"default_options",
".",
"pop",
"(",
"0",
")",
"_a",
"=",
"args",
".",
"pop",
"(",
"0",
")",
"options",
"[",
"_d",
"[",
"0",
"]",
"]",
"=",
"_a",
"options",
"[",
"'content_path'",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"options",
"[",
"'content_path'",
"]",
".",
"decode",
"(",
"'utf8'",
")",
")",
"options",
"[",
"'uri_marker'",
"]",
"=",
"options",
"[",
"'uri_marker'",
"]",
".",
"decode",
"(",
"'utf8'",
")",
"selector",
"=",
"WSGIHandlerSelector",
"(",
")",
"git_inforefs_handler",
"=",
"GitHTTPBackendInfoRefs",
"(",
"*",
"*",
"options",
")",
"git_rpc_handler",
"=",
"GitHTTPBackendSmartHTTP",
"(",
"*",
"*",
"options",
")",
"static_handler",
"=",
"StaticServer",
"(",
"*",
"*",
"options",
")",
"file_handler",
"=",
"FileServer",
"(",
"*",
"*",
"options",
")",
"json_handler",
"=",
"JSONServer",
"(",
"*",
"*",
"options",
")",
"ui_handler",
"=",
"UIServer",
"(",
"*",
"*",
"options",
")",
"if",
"options",
"[",
"'uri_marker'",
"]",
":",
"marker_regex",
"=",
"r'(?P<decorative_path>.*?)(?:/'",
"+",
"options",
"[",
"'uri_marker'",
"]",
"+",
"')'",
"else",
":",
"marker_regex",
"=",
"''",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*?)/info/refs\\?.*?service=(?P<git_command>git-[^&]+).*$'",
",",
"GET",
"=",
"git_inforefs_handler",
",",
"HEAD",
"=",
"git_inforefs_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)/(?P<git_command>git-[^/]+)$'",
",",
"POST",
"=",
"git_rpc_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'/static/(?P<working_path>.*)$'",
",",
"GET",
"=",
"static_handler",
",",
"HEAD",
"=",
"static_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)/file$'",
",",
"GET",
"=",
"file_handler",
",",
"HEAD",
"=",
"file_handler",
")",
"selector",
".",
"add",
"(",
"marker_regex",
"+",
"r'(?P<working_path>.*)$'",
",",
"GET",
"=",
"ui_handler",
",",
"POST",
"=",
"json_handler",
",",
"HEAD",
"=",
"ui_handler",
")",
"return",
"selector"
] |
Symmetric version management
|
async def version ( self , tp , params , version = None , elem = None ) : if self . writing : return await self . set_version ( tp , params , version , elem ) else : return await self . get_version ( tp , params )
| 2,064
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrboost.py#L232-L244
|
[
"def",
"GenerateGaussianNoise",
"(",
"PSD",
")",
":",
"Noise",
"=",
"np",
".",
"zeros",
"(",
"(",
"N_fd",
")",
",",
"complex",
")",
"# Generate noise from PSD ",
"Real",
"=",
"np",
".",
"random",
".",
"randn",
"(",
"N_fd",
")",
"*",
"np",
".",
"sqrt",
"(",
"PSD",
"/",
"(",
"4.",
"*",
"dF",
")",
")",
"Imag",
"=",
"np",
".",
"random",
".",
"randn",
"(",
"N_fd",
")",
"*",
"np",
".",
"sqrt",
"(",
"PSD",
"/",
"(",
"4.",
"*",
"dF",
")",
")",
"Noise",
"=",
"Real",
"+",
"1j",
"*",
"Imag",
"return",
"Noise"
] |
Root - level message . First entry in the archive . Archive headers processing
|
async def root_message ( self , msg , msg_type = None ) : await self . root ( ) await self . message ( msg , msg_type )
| 2,065
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrboost.py#L675-L683
|
[
"def",
"_write_vmx_file",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"manager",
".",
"write_vmx_file",
"(",
"self",
".",
"_vmx_path",
",",
"self",
".",
"_vmx_pairs",
")",
"except",
"OSError",
"as",
"e",
":",
"raise",
"VMwareError",
"(",
"'Could not write VMware VMX file \"{}\": {}'",
".",
"format",
"(",
"self",
".",
"_vmx_path",
",",
"e",
")",
")"
] |
Dumps message to the writer .
|
async def dump_message ( self , msg , msg_type = None ) : mtype = msg . __class__ if msg_type is None else msg_type fields = mtype . f_specs ( ) for field in fields : await self . message_field ( msg = msg , field = field )
| 2,066
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrboost.py#L753-L764
|
[
"def",
"delete",
"(",
"self",
",",
"path",
")",
":",
"path",
"=",
"sanitize_mount",
"(",
"path",
")",
"val",
"=",
"None",
"if",
"path",
".",
"startswith",
"(",
"'cubbyhole'",
")",
":",
"self",
".",
"token",
"=",
"self",
".",
"initial_token",
"val",
"=",
"super",
"(",
"Client",
",",
"self",
")",
".",
"delete",
"(",
"path",
")",
"self",
".",
"token",
"=",
"self",
".",
"operational_token",
"else",
":",
"super",
"(",
"Client",
",",
"self",
")",
".",
"delete",
"(",
"path",
")",
"return",
"val"
] |
Loads message if the given type from the reader . Supports reading directly to existing message .
|
async def load_message ( self , msg_type , msg = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await self . message_field ( msg , field ) return msg
| 2,067
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrboost.py#L766-L780
|
[
"def",
"on_epoch_end",
"(",
"self",
",",
"last_metrics",
":",
"MetricsList",
",",
"iteration",
":",
"int",
",",
"*",
"*",
"kwargs",
")",
"->",
"None",
":",
"self",
".",
"_write_metrics",
"(",
"iteration",
"=",
"iteration",
",",
"last_metrics",
"=",
"last_metrics",
")"
] |
Handle HTTP errors returned by the API server
|
def contrail_error_handler ( f ) : @ wraps ( f ) def wrapper ( * args , * * kwargs ) : try : return f ( * args , * * kwargs ) except HttpError as e : # Replace message by details to provide a # meaningful message if e . details : e . message , e . details = e . details , e . message e . args = ( "%s (HTTP %s)" % ( e . message , e . http_status ) , ) raise return wrapper
| 2,068
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L15-L29
|
[
"def",
"add_directory",
"(",
"self",
",",
"directory",
":",
"PathLike",
",",
"*",
",",
"load_wdl",
":",
"bool",
"=",
"True",
",",
"load_dtz",
":",
"bool",
"=",
"True",
")",
"->",
"int",
":",
"num",
"=",
"0",
"directory",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"directory",
")",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"directory",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"filename",
")",
"tablename",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"if",
"is_table_name",
"(",
"tablename",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"if",
"load_wdl",
":",
"if",
"ext",
"==",
"self",
".",
"variant",
".",
"tbw_suffix",
":",
"num",
"+=",
"self",
".",
"_open_table",
"(",
"self",
".",
"wdl",
",",
"WdlTable",
",",
"path",
")",
"elif",
"\"P\"",
"not",
"in",
"tablename",
"and",
"ext",
"==",
"self",
".",
"variant",
".",
"pawnless_tbw_suffix",
":",
"num",
"+=",
"self",
".",
"_open_table",
"(",
"self",
".",
"wdl",
",",
"WdlTable",
",",
"path",
")",
"if",
"load_dtz",
":",
"if",
"ext",
"==",
"self",
".",
"variant",
".",
"tbz_suffix",
":",
"num",
"+=",
"self",
".",
"_open_table",
"(",
"self",
".",
"dtz",
",",
"DtzTable",
",",
"path",
")",
"elif",
"\"P\"",
"not",
"in",
"tablename",
"and",
"ext",
"==",
"self",
".",
"variant",
".",
"pawnless_tbz_suffix",
":",
"num",
"+=",
"self",
".",
"_open_table",
"(",
"self",
".",
"dtz",
",",
"DtzTable",
",",
"path",
")",
"return",
"num"
] |
Initialize a session to Contrail API server
|
def make ( self , host = "localhost" , port = 8082 , protocol = "http" , base_uri = "" , os_auth_type = "http" , * * kwargs ) : loader = loading . base . get_plugin_loader ( os_auth_type ) plugin_options = { opt . dest : kwargs . pop ( "os_%s" % opt . dest ) for opt in loader . get_options ( ) if 'os_%s' % opt . dest in kwargs } plugin = loader . load_from_options ( * * plugin_options ) return self . load_from_argparse_arguments ( Namespace ( * * kwargs ) , host = host , port = port , protocol = protocol , base_uri = base_uri , auth = plugin )
| 2,069
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L38-L57
|
[
"def",
"do_watch",
"(",
"self",
",",
"*",
"args",
")",
":",
"tables",
"=",
"[",
"]",
"if",
"not",
"self",
".",
"engine",
".",
"cached_descriptions",
":",
"self",
".",
"engine",
".",
"describe_all",
"(",
")",
"all_tables",
"=",
"list",
"(",
"self",
".",
"engine",
".",
"cached_descriptions",
")",
"for",
"arg",
"in",
"args",
":",
"candidates",
"=",
"set",
"(",
"(",
"t",
"for",
"t",
"in",
"all_tables",
"if",
"fnmatch",
"(",
"t",
",",
"arg",
")",
")",
")",
"for",
"t",
"in",
"sorted",
"(",
"candidates",
")",
":",
"if",
"t",
"not",
"in",
"tables",
":",
"tables",
".",
"append",
"(",
"t",
")",
"mon",
"=",
"Monitor",
"(",
"self",
".",
"engine",
",",
"tables",
")",
"mon",
".",
"start",
"(",
")"
] |
POST data to the api - server
|
def post_json ( self , url , data , cls = None , * * kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . post ( url , * * kwargs ) . json ( )
| 2,070
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L136-L147
|
[
"def",
"update_volume",
"(",
"self",
",",
"volumeID",
",",
"metadata",
")",
":",
"log",
".",
"debug",
"(",
"'updating volume metadata: {}'",
".",
"format",
"(",
"volumeID",
")",
")",
"rawVolume",
"=",
"self",
".",
"_req_raw_volume",
"(",
"volumeID",
")",
"normalized",
"=",
"self",
".",
"normalize_volume",
"(",
"rawVolume",
")",
"normalized",
"[",
"'metadata'",
"]",
"=",
"metadata",
"_",
",",
"newRawVolume",
"=",
"self",
".",
"denormalize_volume",
"(",
"normalized",
")",
"self",
".",
"_db",
".",
"modify_book",
"(",
"volumeID",
",",
"newRawVolume",
")"
] |
PUT data to the api - server
|
def put_json ( self , url , data , cls = None , * * kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . put ( url , * * kwargs ) . json ( )
| 2,071
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L150-L161
|
[
"def",
"update_volume",
"(",
"self",
",",
"volumeID",
",",
"metadata",
")",
":",
"log",
".",
"debug",
"(",
"'updating volume metadata: {}'",
".",
"format",
"(",
"volumeID",
")",
")",
"rawVolume",
"=",
"self",
".",
"_req_raw_volume",
"(",
"volumeID",
")",
"normalized",
"=",
"self",
".",
"normalize_volume",
"(",
"rawVolume",
")",
"normalized",
"[",
"'metadata'",
"]",
"=",
"metadata",
"_",
",",
"newRawVolume",
"=",
"self",
".",
"denormalize_volume",
"(",
"normalized",
")",
"self",
".",
"_db",
".",
"modify_book",
"(",
"volumeID",
",",
"newRawVolume",
")"
] |
Return uuid for fq_name
|
def fqname_to_id ( self , fq_name , type ) : data = { "type" : type , "fq_name" : list ( fq_name ) } return self . post_json ( self . make_url ( "/fqname-to-id" ) , data ) [ "uuid" ]
| 2,072
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L163-L179
|
[
"def",
"stage",
"(",
"self",
",",
"fileobj",
",",
"creds",
"=",
"None",
",",
"callback",
"=",
"None",
")",
":",
"if",
"not",
"hasattr",
"(",
"fileobj",
",",
"'read'",
")",
":",
"fileobj",
"=",
"open",
"(",
"fileobj",
",",
"'rb'",
")",
"if",
"not",
"creds",
":",
"res",
"=",
"self",
".",
"_get_credentials",
"(",
")",
"creds",
"=",
"res",
".",
"json",
"(",
")",
"session",
"=",
"boto3_session",
"(",
"aws_access_key_id",
"=",
"creds",
"[",
"'accessKeyId'",
"]",
",",
"aws_secret_access_key",
"=",
"creds",
"[",
"'secretAccessKey'",
"]",
",",
"aws_session_token",
"=",
"creds",
"[",
"'sessionToken'",
"]",
",",
"region_name",
"=",
"'us-east-1'",
")",
"s3",
"=",
"session",
".",
"resource",
"(",
"'s3'",
")",
"bucket",
"=",
"s3",
".",
"Bucket",
"(",
"creds",
"[",
"'bucket'",
"]",
")",
"key",
"=",
"creds",
"[",
"'key'",
"]",
"bucket",
".",
"upload_fileobj",
"(",
"fileobj",
",",
"key",
",",
"Callback",
"=",
"callback",
")",
"return",
"creds",
"[",
"'url'",
"]"
] |
Return fq_name and type for uuid
|
def id_to_fqname ( self , uuid , type = None ) : data = { "uuid" : uuid } result = self . post_json ( self . make_url ( "/id-to-fqname" ) , data ) result [ 'fq_name' ] = FQName ( result [ 'fq_name' ] ) if type is not None and not result [ 'type' ] . replace ( '_' , '-' ) == type : raise HttpError ( 'uuid %s not found for type %s' % ( uuid , type ) , http_status = 404 ) return result
| 2,073
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L181-L204
|
[
"def",
"decompress_messages",
"(",
"self",
",",
"partitions_offmsgs",
")",
":",
"for",
"pomsg",
"in",
"partitions_offmsgs",
":",
"if",
"pomsg",
"[",
"'message'",
"]",
":",
"pomsg",
"[",
"'message'",
"]",
"=",
"self",
".",
"decompress_fun",
"(",
"pomsg",
"[",
"'message'",
"]",
")",
"yield",
"pomsg"
] |
Add a key - value store entry .
|
def add_kv_store ( self , key , value ) : data = { 'operation' : 'STORE' , 'key' : key , 'value' : value } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
| 2,074
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L246-L258
|
[
"def",
"get_correlation_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Correlations\"",
",",
"label",
"=",
"\"tab:parameter_correlations\"",
")",
":",
"parameters",
",",
"cor",
"=",
"self",
".",
"get_correlations",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cor",
",",
"caption",
",",
"label",
")"
] |
Remove a key - value store entry .
|
def remove_kv_store ( self , key ) : data = { 'operation' : 'DELETE' , 'key' : key } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
| 2,075
|
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/client.py#L260-L270
|
[
"def",
"get_correlation_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Correlations\"",
",",
"label",
"=",
"\"tab:parameter_correlations\"",
")",
":",
"parameters",
",",
"cor",
"=",
"self",
".",
"get_correlations",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cor",
",",
"caption",
",",
"label",
")"
] |
Returns the given operations array sorted with duplicates removed .
|
def canonical_ops ( ops ) : new_ops = sorted ( set ( ops ) , key = lambda x : ( x . entity , x . action ) ) return new_ops
| 2,076
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_oven.py#L269-L276
|
[
"def",
"libvlc_media_player_navigate",
"(",
"p_mi",
",",
"navigate",
")",
":",
"f",
"=",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_player_navigate'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_player_navigate'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
")",
",",
"None",
",",
"None",
",",
"MediaPlayer",
",",
"ctypes",
".",
"c_uint",
")",
"return",
"f",
"(",
"p_mi",
",",
"navigate",
")"
] |
Return operations suitable for serializing as part of a MacaroonId .
|
def _macaroon_id_ops ( ops ) : id_ops = [ ] for entity , entity_ops in itertools . groupby ( ops , lambda x : x . entity ) : actions = map ( lambda x : x . action , entity_ops ) id_ops . append ( id_pb2 . Op ( entity = entity , actions = actions ) ) return id_ops
| 2,077
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_oven.py#L279-L289
|
[
"def",
"get_users",
"(",
"self",
",",
"limit",
"=",
"100",
",",
"offset",
"=",
"0",
")",
":",
"url",
"=",
"self",
".",
"TEAM_USERS_URL",
"+",
"\"?limit=%s&offset=%s\"",
"%",
"(",
"limit",
",",
"offset",
")",
"connection",
"=",
"Connection",
"(",
"self",
".",
"token",
")",
"connection",
".",
"set_url",
"(",
"self",
".",
"production",
",",
"url",
")",
"return",
"connection",
".",
"get_request",
"(",
")"
] |
Takes a macaroon with the given version from the oven associates it with the given operations and attaches the given caveats . There must be at least one operation specified . The macaroon will expire at the given time - a time_before first party caveat will be added with that time .
|
def macaroon ( self , version , expiry , caveats , ops ) : if len ( ops ) == 0 : raise ValueError ( 'cannot mint a macaroon associated ' 'with no operations' ) ops = canonical_ops ( ops ) root_key , storage_id = self . root_keystore_for_ops ( ops ) . root_key ( ) id = self . _new_macaroon_id ( storage_id , expiry , ops ) id_bytes = six . int2byte ( LATEST_VERSION ) + id . SerializeToString ( ) if macaroon_version ( version ) < MACAROON_V2 : # The old macaroon format required valid text for the macaroon id, # so base64-encode it. id_bytes = raw_urlsafe_b64encode ( id_bytes ) m = Macaroon ( root_key , id_bytes , self . location , version , self . namespace , ) m . add_caveat ( checkers . time_before_caveat ( expiry ) , self . key , self . locator ) m . add_caveats ( caveats , self . key , self . locator ) return m
| 2,078
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_oven.py#L81-L117
|
[
"def",
"get_default_config",
"(",
"self",
")",
":",
"config",
"=",
"super",
"(",
"XFSCollector",
",",
"self",
")",
".",
"get_default_config",
"(",
")",
"config",
".",
"update",
"(",
"{",
"'path'",
":",
"'xfs'",
"}",
")",
"return",
"config"
] |
Returns a new multi - op entity name string that represents all the given operations and caveats . It returns the same value regardless of the ordering of the operations . It assumes that the operations have been canonicalized and that there s at least one operation .
|
def ops_entity ( self , ops ) : # Hash the operations, removing duplicates as we go. hash_entity = hashlib . sha256 ( ) for op in ops : hash_entity . update ( '{}\n{}\n' . format ( op . action , op . entity ) . encode ( ) ) hash_encoded = base64 . urlsafe_b64encode ( hash_entity . digest ( ) ) return 'multi-' + hash_encoded . decode ( 'utf-8' ) . rstrip ( '=' )
| 2,079
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_oven.py#L135-L151
|
[
"def",
"set_end_date",
"(",
"self",
",",
"lifetime",
")",
":",
"self",
".",
"end_date",
"=",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"+",
"datetime",
".",
"timedelta",
"(",
"0",
",",
"lifetime",
")",
")"
] |
This method makes the oven satisfy the MacaroonOpStore protocol required by the Checker class .
|
def macaroon_ops ( self , macaroons ) : if len ( macaroons ) == 0 : raise ValueError ( 'no macaroons provided' ) storage_id , ops = _decode_macaroon_id ( macaroons [ 0 ] . identifier_bytes ) root_key = self . root_keystore_for_ops ( ops ) . get ( storage_id ) if root_key is None : raise VerificationError ( 'macaroon key not found in storage' ) v = Verifier ( ) conditions = [ ] def validator ( condition ) : # Verify the macaroon's signature only. Don't check any of the # caveats yet but save them so that we can return them. conditions . append ( condition ) return True v . satisfy_general ( validator ) try : v . verify ( macaroons [ 0 ] , root_key , macaroons [ 1 : ] ) except Exception as exc : # Unfortunately pymacaroons doesn't control # the set of exceptions that can be raised here. # Possible candidates are: # pymacaroons.exceptions.MacaroonUnmetCaveatException # pymacaroons.exceptions.MacaroonInvalidSignatureException # ValueError # nacl.exceptions.CryptoError # # There may be others too, so just catch everything. raise six . raise_from ( VerificationError ( 'verification failed: {}' . format ( str ( exc ) ) ) , exc , ) if ( self . ops_store is not None and len ( ops ) == 1 and ops [ 0 ] . entity . startswith ( 'multi-' ) ) : # It's a multi-op entity, so retrieve the actual operations # it's associated with. ops = self . ops_store . get_ops ( ops [ 0 ] . entity ) return ops , conditions
| 2,080
|
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/bakery/_oven.py#L153-L204
|
[
"def",
"_GetUncompressedStreamSize",
"(",
"self",
")",
":",
"self",
".",
"_file_object",
".",
"seek",
"(",
"0",
",",
"os",
".",
"SEEK_SET",
")",
"self",
".",
"_decompressor",
"=",
"self",
".",
"_GetDecompressor",
"(",
")",
"self",
".",
"_uncompressed_data",
"=",
"b''",
"compressed_data_offset",
"=",
"0",
"compressed_data_size",
"=",
"self",
".",
"_file_object",
".",
"get_size",
"(",
")",
"uncompressed_stream_size",
"=",
"0",
"while",
"compressed_data_offset",
"<",
"compressed_data_size",
":",
"read_count",
"=",
"self",
".",
"_ReadCompressedData",
"(",
"self",
".",
"_COMPRESSED_DATA_BUFFER_SIZE",
")",
"if",
"read_count",
"==",
"0",
":",
"break",
"compressed_data_offset",
"+=",
"read_count",
"uncompressed_stream_size",
"+=",
"self",
".",
"_uncompressed_data_size",
"return",
"uncompressed_stream_size"
] |
Extend the list by appending all the items in the given list .
|
def extend ( self , iterable ) : return super ( Collection , self ) . extend ( self . _ensure_iterable_is_valid ( iterable ) )
| 2,081
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/collections.py#L27-L30
|
[
"def",
"getTripInfo",
"(",
"triple",
")",
":",
"col_names",
"=",
"[",
"'mjdate'",
",",
"'filter'",
",",
"'elongation'",
",",
"'discovery'",
",",
"'checkup'",
",",
"'recovery'",
",",
"'iq'",
",",
"'block'",
"]",
"sql",
"=",
"\"SELECT mjdate md,\"",
"sql",
"=",
"sql",
"+",
"\" filter, avg(elongation), d.id, checkup.checkup, recovery.recovery , avg(obs_iq_refccd), b.qname \"",
"sql",
"=",
"sql",
"+",
"\"FROM triple_members t JOIN bucket.exposure e ON t.expnum=e.expnum \"",
"sql",
"=",
"sql",
"+",
"\"JOIN bucket.blocks b ON b.expnum=e.expnum \"",
"sql",
"=",
"sql",
"+",
"\"JOIN bucket.circumstance c on e.expnum=c.expnum \"",
"sql",
"=",
"sql",
"+",
"\"LEFT JOIN discovery d ON t.triple=d.triple \"",
"sql",
"=",
"sql",
"+",
"\"LEFT JOIN checkup ON t.triple=checkup.triple \"",
"sql",
"=",
"sql",
"+",
"\"LEFT JOIN recovery ON t.triple=recovery.triple \"",
"sql",
"=",
"sql",
"+",
"\"WHERE t.triple=%s \"",
"sql",
"=",
"sql",
"+",
"\"GROUP BY t.triple ORDER BY t.triple \"",
"cfeps",
".",
"execute",
"(",
"sql",
",",
"(",
"triple",
",",
")",
")",
"rows",
"=",
"cfeps",
".",
"fetchall",
"(",
")",
"result",
"=",
"{",
"}",
"#import datetime",
"for",
"idx",
"in",
"range",
"(",
"len",
"(",
"rows",
"[",
"0",
"]",
")",
")",
":",
"result",
"[",
"col_names",
"[",
"idx",
"]",
"]",
"=",
"rows",
"[",
"0",
"]",
"[",
"idx",
"]",
"return",
"result"
] |
Insert an item at a given position .
|
def insert ( self , index , value ) : return super ( Collection , self ) . insert ( index , self . _ensure_value_is_valid ( value ) )
| 2,082
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/collections.py#L32-L35
|
[
"def",
"start",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"is_running",
"(",
")",
":",
"self",
".",
"websock_url",
"=",
"self",
".",
"chrome",
".",
"start",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"websock",
"=",
"websocket",
".",
"WebSocketApp",
"(",
"self",
".",
"websock_url",
")",
"self",
".",
"websock_thread",
"=",
"WebsockReceiverThread",
"(",
"self",
".",
"websock",
",",
"name",
"=",
"'WebsockThread:%s'",
"%",
"self",
".",
"chrome",
".",
"port",
")",
"self",
".",
"websock_thread",
".",
"start",
"(",
")",
"self",
".",
"_wait_for",
"(",
"lambda",
":",
"self",
".",
"websock_thread",
".",
"is_open",
",",
"timeout",
"=",
"30",
")",
"# tell browser to send us messages we're interested in",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Network.enable'",
")",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Page.enable'",
")",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Console.enable'",
")",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Runtime.enable'",
")",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'ServiceWorker.enable'",
")",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'ServiceWorker.setForceUpdateOnPageLoad'",
")",
"# disable google analytics",
"self",
".",
"send_to_chrome",
"(",
"method",
"=",
"'Network.setBlockedURLs'",
",",
"params",
"=",
"{",
"'urls'",
":",
"[",
"'*google-analytics.com/analytics.js'",
",",
"'*google-analytics.com/ga.js'",
"]",
"}",
")"
] |
Ensure that value is a valid collection s value .
|
def _ensure_value_is_valid ( self , value ) : if not isinstance ( value , self . __class__ . value_type ) : raise TypeError ( '{0} is not valid collection value, instance ' 'of {1} required' . format ( value , self . __class__ . value_type ) ) return value
| 2,083
|
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/collections.py#L71-L77
|
[
"def",
"wavefunction",
"(",
"self",
",",
"quil_program",
":",
"Program",
",",
"memory_map",
":",
"Any",
"=",
"None",
")",
"->",
"Wavefunction",
":",
"if",
"memory_map",
"is",
"not",
"None",
":",
"quil_program",
"=",
"self",
".",
"augment_program_with_memory_values",
"(",
"quil_program",
",",
"memory_map",
")",
"return",
"self",
".",
"connection",
".",
"_wavefunction",
"(",
"quil_program",
"=",
"quil_program",
",",
"random_seed",
"=",
"self",
".",
"random_seed",
")"
] |
Returns container element type
|
def container_elem_type ( container_type , params ) : elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE return elem_type
| 2,084
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/core/message_types.py#L153-L164
|
[
"def",
"_mod_repo_in_file",
"(",
"repo",
",",
"repostr",
",",
"filepath",
")",
":",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"filepath",
")",
"as",
"fhandle",
":",
"output",
"=",
"[",
"]",
"for",
"line",
"in",
"fhandle",
":",
"cols",
"=",
"salt",
".",
"utils",
".",
"args",
".",
"shlex_split",
"(",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_unicode",
"(",
"line",
")",
".",
"strip",
"(",
")",
")",
"if",
"repo",
"not",
"in",
"cols",
":",
"output",
".",
"append",
"(",
"line",
")",
"else",
":",
"output",
".",
"append",
"(",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_str",
"(",
"repostr",
"+",
"'\\n'",
")",
")",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"filepath",
",",
"'w'",
")",
"as",
"fhandle",
":",
"fhandle",
".",
"writelines",
"(",
"output",
")"
] |
Check that a given DOI is a valid canonical DOI .
|
def is_valid ( doi ) : match = REGEX . match ( doi ) return ( match is not None ) and ( match . group ( 0 ) == doi )
| 2,085
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/doi.py#L26-L58
|
[
"def",
"tags_uuids",
"(",
"self",
",",
"uuids",
",",
"archiver",
"=",
"\"\"",
",",
"timeout",
"=",
"DEFAULT_TIMEOUT",
")",
":",
"if",
"not",
"isinstance",
"(",
"uuids",
",",
"list",
")",
":",
"uuids",
"=",
"[",
"uuids",
"]",
"where",
"=",
"\" or \"",
".",
"join",
"(",
"[",
"'uuid = \"{0}\"'",
".",
"format",
"(",
"uuid",
")",
"for",
"uuid",
"in",
"uuids",
"]",
")",
"return",
"self",
".",
"query",
"(",
"\"select * where {0}\"",
".",
"format",
"(",
"where",
")",
",",
"archiver",
",",
"timeout",
")",
".",
"get",
"(",
"'metadata'",
",",
"{",
"}",
")"
] |
Get an OA version for a given DOI .
|
def get_oa_version ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return result [ "paper" ] [ "pdf_url" ] except ( AssertionError , ValueError , KeyError , RequestException ) : return None
| 2,086
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/doi.py#L116-L137
|
[
"def",
"stop_animation",
"(",
"self",
",",
"sprites",
")",
":",
"if",
"isinstance",
"(",
"sprites",
",",
"list",
")",
"is",
"False",
":",
"sprites",
"=",
"[",
"sprites",
"]",
"for",
"sprite",
"in",
"sprites",
":",
"self",
".",
"tweener",
".",
"kill_tweens",
"(",
"sprite",
")"
] |
Get OA policy for a given DOI .
|
def get_oa_policy ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return ( [ i for i in result [ "paper" ] [ "publications" ] if i [ "doi" ] == doi ] [ 0 ] ) [ "policy" ] except ( AssertionError , ValueError , KeyError , RequestException , IndexError ) : return None
| 2,087
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/doi.py#L140-L168
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
Get the original link behind the DOI .
|
def get_linked_version ( doi ) : try : request = requests . head ( to_url ( doi ) ) return request . headers . get ( "location" ) except RequestException : return None
| 2,088
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/doi.py#L171-L185
|
[
"def",
"get_config_string",
"(",
"params",
",",
"units",
"=",
"None",
")",
":",
"compact_str_items",
"=",
"[",
"]",
"# first make a list of compact strings for each parameter",
"for",
"k",
",",
"v",
"in",
"params",
".",
"items",
"(",
")",
":",
"unit",
"=",
"\"\"",
"if",
"isinstance",
"(",
"units",
",",
"dict",
")",
":",
"#check if not None not enough, units could be mocked which causes errors",
"unit",
"=",
"units",
".",
"get",
"(",
"k",
",",
"\"\"",
")",
"compact_str_items",
".",
"append",
"(",
"k",
"+",
"\"=\"",
"+",
"str",
"(",
"v",
")",
"+",
"unit",
")",
"# and finally join them",
"compact_str",
"=",
"\", \"",
".",
"join",
"(",
"compact_str_items",
")",
"return",
"compact_str"
] |
Get a BibTeX entry for a given DOI .
|
def get_bibtex ( doi ) : try : request = requests . get ( to_url ( doi ) , headers = { "accept" : "application/x-bibtex" } ) request . raise_for_status ( ) assert request . headers . get ( "content-type" ) == "application/x-bibtex" return request . text except ( RequestException , AssertionError ) : return None
| 2,089
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/doi.py#L188-L209
|
[
"def",
"clear_message_streams",
"(",
"self",
")",
":",
"for",
"client",
"in",
"self",
".",
"_clients",
":",
"client",
".",
"empty",
"(",
")",
"for",
"server",
"in",
"self",
".",
"_servers",
":",
"server",
".",
"empty",
"(",
")"
] |
Configures the logging module with a given dictionary which in most cases was loaded from a configuration file .
|
def _configure_logging ( self , logger_dict = None ) : self . log . debug ( "Configure logging" ) # Let's be sure, that for our log no handlers are registered anymore for handler in self . log . handlers : self . log . removeHandler ( handler ) if logger_dict is None : self . log . debug ( "No logger dictionary defined. Doing default logger configuration" ) formatter = logging . Formatter ( "%(name)s - %(asctime)s - [%(levelname)s] - %(module)s - %(message)s" ) stream_handler = logging . StreamHandler ( sys . stdout ) stream_handler . setLevel ( logging . WARNING ) stream_handler . setFormatter ( formatter ) self . log . addHandler ( stream_handler ) self . log . setLevel ( logging . WARNING ) else : self . log . debug ( "Logger dictionary defined. Loading dictConfig for logging" ) logging . config . dictConfig ( logger_dict ) self . log . debug ( "dictConfig loaded" )
| 2,090
|
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/groundwork.py#L92-L120
|
[
"def",
"_try_free_lease",
"(",
"self",
",",
"shard_state",
",",
"slice_retry",
"=",
"False",
")",
":",
"@",
"db",
".",
"transactional",
"def",
"_tx",
"(",
")",
":",
"fresh_state",
"=",
"model",
".",
"ShardState",
".",
"get_by_shard_id",
"(",
"shard_state",
".",
"shard_id",
")",
"if",
"fresh_state",
"and",
"fresh_state",
".",
"active",
":",
"# Free lease.",
"fresh_state",
".",
"slice_start_time",
"=",
"None",
"fresh_state",
".",
"slice_request_id",
"=",
"None",
"if",
"slice_retry",
":",
"fresh_state",
".",
"slice_retries",
"+=",
"1",
"fresh_state",
".",
"put",
"(",
")",
"try",
":",
"_tx",
"(",
")",
"# pylint: disable=broad-except",
"except",
"Exception",
",",
"e",
":",
"logging",
".",
"warning",
"(",
"e",
")",
"logging",
".",
"warning",
"(",
"\"Release lock for shard %s failed. Wait for lease to expire.\"",
",",
"shard_state",
".",
"shard_id",
")"
] |
Dumps a message field to the writer . Field is defined by the message field specification .
|
async def _dump_message_field ( self , writer , msg , field , fvalue = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) if fvalue is None else fvalue await self . dump_field ( writer , fvalue , ftype , params )
| 2,091
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrserialize.py#L659-L671
|
[
"def",
"home_wins",
"(",
"self",
")",
":",
"try",
":",
"wins",
",",
"losses",
"=",
"re",
".",
"findall",
"(",
"r'\\d+'",
",",
"self",
".",
"_home_record",
")",
"return",
"wins",
"except",
"ValueError",
":",
"return",
"0"
] |
Loads message field from the reader . Field is defined by the message field specification . Returns loaded value supports field reference .
|
async def _load_message_field ( self , reader , msg , field ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] await self . load_field ( reader , ftype , params , eref ( msg , fname ) )
| 2,092
|
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrserialize.py#L673-L684
|
[
"def",
"_dt_to_wmi",
"(",
"self",
",",
"dt",
")",
":",
"return",
"from_time",
"(",
"year",
"=",
"dt",
".",
"year",
",",
"month",
"=",
"dt",
".",
"month",
",",
"day",
"=",
"dt",
".",
"day",
",",
"hours",
"=",
"dt",
".",
"hour",
",",
"minutes",
"=",
"dt",
".",
"minute",
",",
"seconds",
"=",
"dt",
".",
"second",
",",
"microseconds",
"=",
"0",
",",
"timezone",
"=",
"0",
",",
")"
] |
Manually starts timer with the message .
|
def start ( self , message ) : self . _start = time . clock ( ) VSGLogger . info ( "{0:<20} - Started" . format ( message ) )
| 2,093
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/timer.py#L47-L54
|
[
"def",
"_apply_pax_info",
"(",
"self",
",",
"pax_headers",
",",
"encoding",
",",
"errors",
")",
":",
"for",
"keyword",
",",
"value",
"in",
"pax_headers",
".",
"items",
"(",
")",
":",
"if",
"keyword",
"==",
"\"GNU.sparse.name\"",
":",
"setattr",
"(",
"self",
",",
"\"path\"",
",",
"value",
")",
"elif",
"keyword",
"==",
"\"GNU.sparse.size\"",
":",
"setattr",
"(",
"self",
",",
"\"size\"",
",",
"int",
"(",
"value",
")",
")",
"elif",
"keyword",
"==",
"\"GNU.sparse.realsize\"",
":",
"setattr",
"(",
"self",
",",
"\"size\"",
",",
"int",
"(",
"value",
")",
")",
"elif",
"keyword",
"in",
"PAX_FIELDS",
":",
"if",
"keyword",
"in",
"PAX_NUMBER_FIELDS",
":",
"try",
":",
"value",
"=",
"PAX_NUMBER_FIELDS",
"[",
"keyword",
"]",
"(",
"value",
")",
"except",
"ValueError",
":",
"value",
"=",
"0",
"if",
"keyword",
"==",
"\"path\"",
":",
"value",
"=",
"value",
".",
"rstrip",
"(",
"\"/\"",
")",
"setattr",
"(",
"self",
",",
"keyword",
",",
"value",
")",
"self",
".",
"pax_headers",
"=",
"pax_headers",
".",
"copy",
"(",
")"
] |
Manually stops timer with the message .
|
def stop ( self , message ) : self . _stop = time . clock ( ) VSGLogger . info ( "{0:<20} - Finished [{1}s]" . format ( message , self . pprint ( self . _stop - self . _start ) ) )
| 2,094
|
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/timer.py#L56-L63
|
[
"def",
"make",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"\"preparing to add all git files\"",
")",
"num_added",
"=",
"self",
".",
"local_repo",
".",
"add_all_files",
"(",
")",
"if",
"num_added",
":",
"self",
".",
"local_repo",
".",
"commit",
"(",
"\"Initial import from Project Gutenberg\"",
")",
"file_handler",
"=",
"NewFilesHandler",
"(",
"self",
")",
"file_handler",
".",
"add_new_files",
"(",
")",
"num_added",
"=",
"self",
".",
"local_repo",
".",
"add_all_files",
"(",
")",
"if",
"num_added",
":",
"self",
".",
"local_repo",
".",
"commit",
"(",
"\"Updates Readme, contributing, license files, cover, metadata.\"",
")"
] |
Try to fetch BibTeX from a found identifier .
|
def get_bibtex ( identifier ) : identifier_type , identifier_id = identifier if identifier_type not in __valid_identifiers__ : return None # Dynamically call the ``get_bibtex`` method from the associated module. module = sys . modules . get ( "libbmc.%s" % ( identifier_type , ) , None ) if module is None : return None return getattr ( module , "get_bibtex" ) ( identifier_id )
| 2,095
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/papers/identifiers.py#L72-L92
|
[
"def",
"set_parameters",
"(",
"self",
",",
"samples",
":",
"np",
".",
"ndarray",
",",
"window_size",
",",
"data_min",
",",
"data_max",
")",
"->",
"bool",
":",
"redraw_needed",
"=",
"False",
"if",
"self",
".",
"samples_need_update",
":",
"self",
".",
"spectrogram",
".",
"samples",
"=",
"samples",
"redraw_needed",
"=",
"True",
"self",
".",
"samples_need_update",
"=",
"False",
"if",
"window_size",
"!=",
"self",
".",
"spectrogram",
".",
"window_size",
":",
"self",
".",
"spectrogram",
".",
"window_size",
"=",
"window_size",
"redraw_needed",
"=",
"True",
"if",
"data_min",
"!=",
"self",
".",
"spectrogram",
".",
"data_min",
":",
"self",
".",
"spectrogram",
".",
"data_min",
"=",
"data_min",
"redraw_needed",
"=",
"True",
"if",
"data_max",
"!=",
"self",
".",
"spectrogram",
".",
"data_max",
":",
"self",
".",
"spectrogram",
".",
"data_max",
"=",
"data_max",
"redraw_needed",
"=",
"True",
"return",
"redraw_needed"
] |
Only try to parse as JSON if the JSON content type header is set .
|
def initialize ( self , * args , * * kwargs ) : super ( JSONHandler , self ) . initialize ( * args , * * kwargs ) content_type = self . request . headers . get ( 'Content-Type' , '' ) if 'application/json' in content_type . lower ( ) : self . _parse_json_body_arguments ( )
| 2,096
|
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/handlers/rest.py#L17-L25
|
[
"def",
"cancel_order",
"(",
"self",
",",
"order_id",
",",
"stock",
")",
":",
"url_fragment",
"=",
"'venues/{venue}/stocks/{stock}/orders/{order_id}'",
".",
"format",
"(",
"venue",
"=",
"self",
".",
"venue",
",",
"stock",
"=",
"stock",
",",
"order_id",
"=",
"order_id",
",",
")",
"url",
"=",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"url_fragment",
")",
"return",
"self",
".",
"session",
".",
"delete",
"(",
"url",
")",
".",
"json",
"(",
")"
] |
Parse a BibTeX file to get a clean list of plaintext citations .
|
def get_plaintext_citations ( bibtex ) : parser = BibTexParser ( ) parser . customization = convert_to_unicode # Load the BibTeX if os . path . isfile ( bibtex ) : with open ( bibtex ) as fh : bib_database = bibtexparser . load ( fh , parser = parser ) else : bib_database = bibtexparser . loads ( bibtex , parser = parser ) # Convert bibentries to plaintext bibentries = [ bibentry_as_plaintext ( bibentry ) for bibentry in bib_database . entries ] # Return them return bibentries
| 2,097
|
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/citations/bibtex.py#L36-L56
|
[
"def",
"concat",
"(",
"self",
",",
"operand",
",",
"start",
"=",
"0",
",",
"end",
"=",
"0",
",",
"offset",
"=",
"0",
")",
":",
"if",
"not",
"Gauged",
".",
"map_concat",
"(",
"self",
".",
"ptr",
",",
"operand",
".",
"ptr",
",",
"start",
",",
"end",
",",
"offset",
")",
":",
"raise",
"MemoryError"
] |
Loads INI configuration into this module s attributes .
|
def init ( filename = ConfigPath ) : section , parts = "DEFAULT" , filename . rsplit ( ":" , 1 ) if len ( parts ) > 1 and os . path . isfile ( parts [ 0 ] ) : filename , section = parts if not os . path . isfile ( filename ) : return vardict , parser = globals ( ) , configparser . RawConfigParser ( ) parser . optionxform = str # Force case-sensitivity on names
try : def parse_value ( raw ) : try : return json . loads ( raw ) # Try to interpret as JSON
except ValueError : return raw # JSON failed, fall back to raw
txt = open ( filename ) . read ( ) # Add DEFAULT section if none present
if not re . search ( "\\[\\w+\\]" , txt ) : txt = "[DEFAULT]\n" + txt parser . readfp ( StringIO . StringIO ( txt ) , filename ) for k , v in parser . items ( section ) : vardict [ k ] = parse_value ( v ) except Exception : logging . warn ( "Error reading config from %s." , filename , exc_info = True )
| 2,098
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/conf.py#L253-L270
|
[
"def",
"fix_vcf_line",
"(",
"parts",
",",
"ref_base",
")",
":",
"swap",
"=",
"{",
"\"1/1\"",
":",
"\"0/0\"",
",",
"\"0/1\"",
":",
"\"0/1\"",
",",
"\"0/0\"",
":",
"\"1/1\"",
",",
"\"./.\"",
":",
"\"./.\"",
"}",
"complements",
"=",
"{",
"\"G\"",
":",
"\"C\"",
",",
"\"A\"",
":",
"\"T\"",
",",
"\"C\"",
":",
"\"G\"",
",",
"\"T\"",
":",
"\"A\"",
",",
"\"N\"",
":",
"\"N\"",
"}",
"varinfo",
",",
"genotypes",
"=",
"fix_line_problems",
"(",
"parts",
")",
"ref",
",",
"var",
"=",
"varinfo",
"[",
"3",
":",
"5",
"]",
"# non-reference regions or non-informative, can't do anything",
"if",
"ref_base",
"in",
"[",
"None",
",",
"\"N\"",
"]",
"or",
"set",
"(",
"genotypes",
")",
"==",
"set",
"(",
"[",
"\"./.\"",
"]",
")",
":",
"varinfo",
"=",
"None",
"# matching reference, all good",
"elif",
"ref_base",
"==",
"ref",
":",
"assert",
"ref_base",
"==",
"ref",
",",
"(",
"ref_base",
",",
"parts",
")",
"# swapped reference and alternate regions",
"elif",
"ref_base",
"==",
"var",
"or",
"ref",
"in",
"[",
"\"N\"",
",",
"\"0\"",
"]",
":",
"varinfo",
"[",
"3",
"]",
"=",
"var",
"varinfo",
"[",
"4",
"]",
"=",
"ref",
"genotypes",
"=",
"[",
"swap",
"[",
"x",
"]",
"for",
"x",
"in",
"genotypes",
"]",
"# reference is on alternate strand",
"elif",
"ref_base",
"!=",
"ref",
"and",
"complements",
".",
"get",
"(",
"ref",
")",
"==",
"ref_base",
":",
"varinfo",
"[",
"3",
"]",
"=",
"complements",
"[",
"ref",
"]",
"varinfo",
"[",
"4",
"]",
"=",
"\",\"",
".",
"join",
"(",
"[",
"complements",
"[",
"v",
"]",
"for",
"v",
"in",
"var",
".",
"split",
"(",
"\",\"",
")",
"]",
")",
"# unspecified alternative base",
"elif",
"ref_base",
"!=",
"ref",
"and",
"var",
"in",
"[",
"\"N\"",
",",
"\"0\"",
"]",
":",
"varinfo",
"[",
"3",
"]",
"=",
"ref_base",
"varinfo",
"[",
"4",
"]",
"=",
"ref",
"genotypes",
"=",
"[",
"swap",
"[",
"x",
"]",
"for",
"x",
"in",
"genotypes",
"]",
"# swapped and on alternate strand",
"elif",
"ref_base",
"!=",
"ref",
"and",
"complements",
".",
"get",
"(",
"var",
")",
"==",
"ref_base",
":",
"varinfo",
"[",
"3",
"]",
"=",
"complements",
"[",
"var",
"]",
"varinfo",
"[",
"4",
"]",
"=",
"\",\"",
".",
"join",
"(",
"[",
"complements",
"[",
"v",
"]",
"for",
"v",
"in",
"ref",
".",
"split",
"(",
"\",\"",
")",
"]",
")",
"genotypes",
"=",
"[",
"swap",
"[",
"x",
"]",
"for",
"x",
"in",
"genotypes",
"]",
"else",
":",
"print",
"\"Did not associate ref {0} with line: {1}\"",
".",
"format",
"(",
"ref_base",
",",
"varinfo",
")",
"if",
"varinfo",
"is",
"not",
"None",
":",
"return",
"varinfo",
"+",
"genotypes"
] |
Saves this module s changed attributes to INI configuration .
|
def save ( filename = ConfigPath ) : default_values = defaults ( ) parser = configparser . RawConfigParser ( ) parser . optionxform = str # Force case-sensitivity on names
try : save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in sorted ( globals ( ) . items ( ) ) : if not isinstance ( v , save_types ) or k . startswith ( "_" ) or default_values . get ( k , parser ) == v : continue # for k, v
try : parser . set ( "DEFAULT" , k , json . dumps ( v ) ) except Exception : pass if parser . defaults ( ) : with open ( filename , "wb" ) as f : f . write ( "# %s %s configuration written on %s.\n" % ( Title , Version , datetime . datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) ) ) parser . write ( f ) else : # Nothing to write: delete configuration file
try : os . unlink ( filename ) except Exception : pass except Exception : logging . warn ( "Error writing config to %s." , filename , exc_info = True )
| 2,099
|
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/conf.py#L273-L294
|
[
"def",
"getApplicationsThatSupportMimeType",
"(",
"self",
",",
"pchMimeType",
",",
"pchAppKeysThatSupportBuffer",
",",
"unAppKeysThatSupportBuffer",
")",
":",
"fn",
"=",
"self",
".",
"function_table",
".",
"getApplicationsThatSupportMimeType",
"result",
"=",
"fn",
"(",
"pchMimeType",
",",
"pchAppKeysThatSupportBuffer",
",",
"unAppKeysThatSupportBuffer",
")",
"return",
"result"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.