query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Send an output frame .
def send ( self , output_name , frame ) : for input_method in self . _component_connections [ output_name ] : input_method ( frame )
3,400
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L280-L294
[ "def", "calculate_journal_volume", "(", "pub_date", ",", "year", ")", ":", "try", ":", "volume", "=", "str", "(", "pub_date", ".", "tm_year", "-", "year", "+", "1", ")", "except", "TypeError", ":", "volume", "=", "None", "except", "AttributeError", ":", "volume", "=", "None", "return", "volume" ]
Called by the event loop when it is started .
def start_event ( self ) : # create object pool for each output if self . with_outframe_pool : self . update_config ( ) for name in self . outputs : self . outframe_pool [ name ] = ObjectPool ( Frame , self . new_frame , self . config [ 'outframe_pool_len' ] ) try : self . on_start ( ) except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
3,401
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L324-L342
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Called by the event loop when it is stopped .
def stop_event ( self ) : self . logger . debug ( 'stopping' ) try : self . on_stop ( ) except Exception as ex : self . logger . exception ( ex ) for name in self . outputs : self . send ( name , None )
3,402
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L348-L361
[ "def", "generate_context", "(", "force_overwrite", "=", "False", ",", "drop_secret_key", "=", "False", ")", ":", "print", "(", "'... generating context'", ")", "context_fp", "=", "'%s/context.json'", "%", "os", ".", "environ", "[", "'PRODUCT_DIR'", "]", "context", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "context_fp", ")", ":", "print", "(", "'... augment existing context.json'", ")", "with", "open", "(", "context_fp", ",", "'r'", ")", "as", "context_f", ":", "content", "=", "context_f", ".", "read", "(", ")", ".", "strip", "(", ")", "or", "'{}'", "try", ":", "context", "=", "json", ".", "loads", "(", "content", ")", "except", "ValueError", ":", "print", "(", "'ERROR: not valid json in your existing context.json!!!'", ")", "return", "if", "force_overwrite", ":", "print", "(", "'... overwriting existing context.json'", ")", "if", "drop_secret_key", ":", "print", "(", "'... generating new SECRET_KEY'", ")", "context", "=", "{", "}", "else", ":", "print", "(", "'... using existing SECRET_KEY from existing context.json'", ")", "context", "=", "{", "'SECRET_KEY'", ":", "context", "[", "'SECRET_KEY'", "]", "}", "with", "open", "(", "context_fp", ",", "'w'", ")", "as", "context_f", ":", "new_context", "=", "tasks", ".", "get_context_template", "(", ")", "new_context", ".", "update", "(", "context", ")", "context_f", ".", "write", "(", "json", ".", "dumps", "(", "new_context", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ")", "print", "(", ")", "print", "(", "'*** Successfully generated context.json'", ")" ]
Is component the last one in a pipeline .
def is_pipe_end ( self ) : for name in self . outputs : if self . _component_connections [ name ] : return False return True
3,403
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L363-L380
[ "def", "_check_rest_version", "(", "self", ",", "version", ")", ":", "version", "=", "str", "(", "version", ")", "if", "version", "not", "in", "self", ".", "supported_rest_versions", ":", "msg", "=", "\"Library is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "array_rest_versions", "=", "self", ".", "_list_available_rest_versions", "(", ")", "if", "version", "not", "in", "array_rest_versions", ":", "msg", "=", "\"Array is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "return", "LooseVersion", "(", "version", ")" ]
Called by the event loop when new config is available .
def new_config_event ( self ) : try : self . on_set_config ( ) except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
3,404
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L389-L397
[ "def", "auto_convert_cell_no_flags", "(", "cell", ",", "units", "=", "None", ",", "parens_as_neg", "=", "True", ")", ":", "units", "=", "units", "if", "units", "!=", "None", "else", "{", "}", "return", "auto_convert_cell", "(", "flagable", "=", "Flagable", "(", ")", ",", "cell", "=", "cell", ",", "position", "=", "None", ",", "worksheet", "=", "0", ",", "flags", "=", "{", "}", ",", "units", "=", "units", ",", "parens_as_neg", "=", "parens_as_neg", ")" ]
Called by the event loop when a new input or output frame is available .
def new_frame_event ( self ) : # check output frames are available for out_pool in self . outframe_pool . values ( ) : if not out_pool . available ( ) : return # check input frames are available, and get current frame numbers frame_nos = { } for in_buff in self . input_buffer . values ( ) : if not in_buff . available ( ) : return in_frame = in_buff . peek ( ) if in_frame is None : raise StopIteration ( ) if in_frame . frame_no >= 0 : frame_nos [ in_buff ] = in_frame . frame_no else : # discard any superseded 'static' input while in_buff . available ( ) > 1 and in_buff . peek ( 1 ) is not None : in_buff . get ( ) if len ( frame_nos ) > 1 : frame_no = max ( frame_nos . values ( ) ) # discard old frames that can never be used for in_buff in frame_nos : while frame_nos [ in_buff ] < frame_no and in_buff . available ( ) > 1 : in_buff . get ( ) in_frame = in_buff . peek ( ) if in_frame is None : raise StopIteration ( ) frame_nos [ in_buff ] = in_frame . frame_no # check for complete set of matching frame numbers if min ( frame_nos . values ( ) ) != max ( frame_nos . values ( ) ) : return # now have a full set of correlated inputs to process try : self . process_frame ( ) except StopIteration : raise except Exception as ex : self . logger . exception ( ex ) raise StopIteration ( )
3,405
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/base.py#L410-L466
[ "def", "poissonVectorRDD", "(", "sc", ",", "mean", ",", "numRows", ",", "numCols", ",", "numPartitions", "=", "None", ",", "seed", "=", "None", ")", ":", "return", "callMLlibFunc", "(", "\"poissonVectorRDD\"", ",", "sc", ".", "_jsc", ",", "float", "(", "mean", ")", ",", "numRows", ",", "numCols", ",", "numPartitions", ",", "seed", ")" ]
Retrieves all members from this node of the tree down .
def get_tree_members ( self ) : members = [ ] queue = deque ( ) queue . appendleft ( self ) visited = set ( ) while len ( queue ) : node = queue . popleft ( ) if node not in visited : members . extend ( node . get_member_info ( ) ) queue . extendleft ( node . get_children ( ) ) visited . add ( node ) return [ { attribute : member . get ( attribute ) for attribute in self . attr_list } for member in members if member ]
3,406
https://github.com/kavdev/ldap-groups/blob/0dd3a7d9eafa3903127364839b12a4b3dd3ca521/ldap_groups/groups.py#L464-L480
[ "def", "build_agency", "(", "pfeed", ")", ":", "return", "pd", ".", "DataFrame", "(", "{", "'agency_name'", ":", "pfeed", ".", "meta", "[", "'agency_name'", "]", ".", "iat", "[", "0", "]", ",", "'agency_url'", ":", "pfeed", ".", "meta", "[", "'agency_url'", "]", ".", "iat", "[", "0", "]", ",", "'agency_timezone'", ":", "pfeed", ".", "meta", "[", "'agency_timezone'", "]", ".", "iat", "[", "0", "]", ",", "}", ",", "index", "=", "[", "0", "]", ")" ]
recurse down the tree and return a list of the most deeply nested child nodes of the given triple
def deepest_node ( ( subj , pred , obj ) , graph ) : # i don't fully accept the premise that this docstring presents # i'm not a docstring literalist to_return = [ ] def _deepest_node ( ( subj , pred , obj ) , graph ) : children = [ ] if isinstance ( obj , rt . BNode ) : for s , p , o in graph : if str ( s ) == str ( obj ) : children . append ( ( s , p , o ) ) for s , p , o in children : s1 , p1 , o1 = _deepest_node ( ( s , p , o ) , graph ) # coupling *smacks hand with ruler* if "rNews" in str ( o1 ) and ( s1 , p1 , o1 ) not in to_return : to_return . append ( ( s1 , p1 , o1 ) ) return ( s1 , p1 , o1 ) else : return ( subj , pred , obj ) _deepest_node ( ( subj , pred , obj ) , graph ) return to_return
3,407
https://github.com/Parsely/schemato/blob/7002316fbcd52f2e669f8372bf1338c572e3df4b/schemato/utils.py#L4-L27
[ "def", "dump", "(", "self", ")", ":", "out", "=", "[", "]", "out", ".", "append", "(", "self", ".", "filetype", ")", "out", ".", "append", "(", "\"Format: {}\"", ".", "format", "(", "self", ".", "version", ")", ")", "out", ".", "append", "(", "\"Type: ASCII\"", ")", "out", ".", "append", "(", "\"\"", ")", "for", "cmd", "in", "self", ".", "commands", ":", "out", ".", "append", "(", "self", ".", "encode", "(", "cmd", ")", ")", "return", "\"\\n\"", ".", "join", "(", "out", ")", "+", "\"\\n\"" ]
Chained lookup of item on model
def getattribute ( model , item ) : elements = item . split ( '.' ) element = elements . pop ( 0 ) try : attr = getattr ( model , element , None ) except : return if attr is None : # end of recursion return if callable ( attr ) : try : attr = attr ( ) except : # couldn't call this method without params return if elements : return getattribute ( attr , '.' . join ( elements ) ) return attr
3,408
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/templatetags/getter_tags.py#L13-L38
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Display a comma - separated list of models for M2M fields
def nice_display ( item ) : if hasattr ( item , 'all' ) : # RelatedManager: display a list return ', ' . join ( map ( text_type , item . all ( ) ) ) return item
3,409
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/templatetags/getter_tags.py#L48-L52
[ "def", "enable_job", "(", "name", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "'comment'", ":", "[", "]", ",", "'result'", ":", "True", "}", "if", "not", "name", ":", "ret", "[", "'comment'", "]", "=", "'Job name is required.'", "ret", "[", "'result'", "]", "=", "False", "if", "'test'", "in", "__opts__", "and", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'Job: {0} would be enabled in schedule.'", ".", "format", "(", "name", ")", "else", ":", "persist", "=", "True", "if", "'persist'", "in", "kwargs", ":", "persist", "=", "kwargs", "[", "'persist'", "]", "if", "name", "in", "list_", "(", "show_all", "=", "True", ",", "where", "=", "'opts'", ",", "return_yaml", "=", "False", ")", ":", "event_data", "=", "{", "'name'", ":", "name", ",", "'func'", ":", "'enable_job'", ",", "'persist'", ":", "persist", "}", "elif", "name", "in", "list_", "(", "show_all", "=", "True", ",", "where", "=", "'pillar'", ",", "return_yaml", "=", "False", ")", ":", "event_data", "=", "{", "'name'", ":", "name", ",", "'where'", ":", "'pillar'", ",", "'func'", ":", "'enable_job'", ",", "'persist'", ":", "False", "}", "else", ":", "ret", "[", "'comment'", "]", "=", "'Job {0} does not exist.'", ".", "format", "(", "name", ")", "ret", "[", "'result'", "]", "=", "False", "return", "ret", "try", ":", "eventer", "=", "salt", ".", "utils", ".", "event", ".", "get_event", "(", "'minion'", ",", "opts", "=", "__opts__", ")", "res", "=", "__salt__", "[", "'event.fire'", "]", "(", "event_data", ",", "'manage_schedule'", ")", "if", "res", ":", "event_ret", "=", "eventer", ".", "get_event", "(", "tag", "=", "'/salt/minion/minion_schedule_enabled_job_complete'", ",", "wait", "=", "30", ")", "if", "event_ret", "and", "event_ret", "[", "'complete'", "]", ":", "schedule", "=", "event_ret", "[", "'schedule'", "]", "# check item exists in schedule and is enabled", "if", "name", "in", "schedule", "and", "schedule", "[", "name", "]", "[", "'enabled'", "]", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'Enabled Job {0} in schedule.'", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "'Failed to enable job {0} in schedule.'", ".", "format", "(", "name", ")", "return", "ret", "except", "KeyError", ":", "# Effectively a no-op, since we can't really return without an event system", "ret", "[", "'comment'", "]", "=", "'Event module not available. Schedule enable job failed.'", "return", "ret" ]
Choose the most common item from the list or the first item if all items are unique .
def mostCommonItem ( lst ) : # This elegant solution from: http://stackoverflow.com/a/1518632/1760218 lst = [ l for l in lst if l ] if lst : return max ( set ( lst ) , key = lst . count ) else : return None
3,410
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/util.py#L61-L69
[ "def", "write", "(", "self", ",", "data", ")", ":", "begin", ",", "end", ",", "size", "=", "0", ",", "0", ",", "len", "(", "data", ")", "bytes_sent", "=", "0", "raw_write", "=", "super", "(", "USBRawDevice", ",", "self", ")", ".", "write", "while", "not", "end", ">", "size", ":", "begin", "=", "end", "end", "=", "begin", "+", "self", ".", "RECV_CHUNK", "bytes_sent", "+=", "raw_write", "(", "data", "[", "begin", ":", "end", "]", ")", "return", "bytes_sent" ]
Obfuscates password from a database URL .
def safeDbUrl ( db_url ) : url = urlparse ( db_url ) return db_url . replace ( url . password , "****" ) if url . password else db_url
3,411
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/util.py#L72-L75
[ "def", "dice", "(", "input", ":", "Tensor", ",", "targs", ":", "Tensor", ",", "iou", ":", "bool", "=", "False", ")", "->", "Rank0Tensor", ":", "n", "=", "targs", ".", "shape", "[", "0", "]", "input", "=", "input", ".", "argmax", "(", "dim", "=", "1", ")", ".", "view", "(", "n", ",", "-", "1", ")", "targs", "=", "targs", ".", "view", "(", "n", ",", "-", "1", ")", "intersect", "=", "(", "input", "*", "targs", ")", ".", "sum", "(", ")", ".", "float", "(", ")", "union", "=", "(", "input", "+", "targs", ")", ".", "sum", "(", ")", ".", "float", "(", ")", "if", "not", "iou", ":", "return", "(", "2.", "*", "intersect", "/", "union", "if", "union", ">", "0", "else", "union", ".", "new", "(", "[", "1.", "]", ")", ".", "squeeze", "(", ")", ")", "else", ":", "return", "intersect", "/", "(", "union", "-", "intersect", "+", "1.0", ")" ]
Read parse and return given Json config file
def loadJson ( self , filename ) : jsonConfig = { } if os . path . isfile ( filename ) : jsonConfig = json . loads ( ' ' . join ( open ( filename , 'r' ) . readlines ( ) ) ) return jsonConfig
3,412
https://github.com/bear/bearlib/blob/30f9b8ba4b7a8db4cd2f4c6e07966ae51d0a00dd/bearlib/config.py#L203-L209
[ "def", "start_packet_groups", "(", "self", ",", "clear_time_stamps", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "clear_time_stamps", ":", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list", ")", ")" ]
The model can t be changed once the export is created
def get_readonly_fields ( self , request , obj = None ) : if obj is None : return [ ] return super ( ExportAdmin , self ) . get_readonly_fields ( request , obj )
3,413
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/admin.py#L31-L35
[ "def", "Run", "(", "self", ",", "args", ")", ":", "with", "vfs", ".", "VFSOpen", "(", "args", ".", "pathspec", ",", "progress_callback", "=", "self", ".", "Progress", ")", "as", "file_obj", ":", "fingerprinter", "=", "Fingerprinter", "(", "self", ".", "Progress", ",", "file_obj", ")", "response", "=", "rdf_client_action", ".", "FingerprintResponse", "(", ")", "response", ".", "pathspec", "=", "file_obj", ".", "pathspec", "if", "args", ".", "tuples", ":", "tuples", "=", "args", ".", "tuples", "else", ":", "# There are none selected -- we will cover everything", "tuples", "=", "list", "(", ")", "for", "k", "in", "self", ".", "_fingerprint_types", ":", "tuples", ".", "append", "(", "rdf_client_action", ".", "FingerprintTuple", "(", "fp_type", "=", "k", ")", ")", "for", "finger", "in", "tuples", ":", "hashers", "=", "[", "self", ".", "_hash_types", "[", "h", "]", "for", "h", "in", "finger", ".", "hashers", "]", "or", "None", "if", "finger", ".", "fp_type", "in", "self", ".", "_fingerprint_types", ":", "invoke", "=", "self", ".", "_fingerprint_types", "[", "finger", ".", "fp_type", "]", "res", "=", "invoke", "(", "fingerprinter", ",", "hashers", ")", "if", "res", ":", "response", ".", "matching_types", ".", "append", "(", "finger", ".", "fp_type", ")", "else", ":", "raise", "RuntimeError", "(", "\"Encountered unknown fingerprint type. %s\"", "%", "finger", ".", "fp_type", ")", "# Structure of the results is a list of dicts, each containing the", "# name of the hashing method, hashes for enabled hash algorithms,", "# and auxilliary data where present (e.g. signature blobs).", "# Also see Fingerprint:HashIt()", "response", ".", "results", "=", "fingerprinter", ".", "HashIt", "(", ")", "# We now return data in a more structured form.", "for", "result", "in", "response", ".", "results", ":", "if", "result", ".", "GetItem", "(", "\"name\"", ")", "==", "\"generic\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", "is", "not", "None", ":", "setattr", "(", "response", ".", "hash", ",", "hash_type", ",", "value", ")", "if", "result", "[", "\"name\"", "]", "==", "\"pecoff\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", ":", "setattr", "(", "response", ".", "hash", ",", "\"pecoff_\"", "+", "hash_type", ",", "value", ")", "signed_data", "=", "result", ".", "GetItem", "(", "\"SignedData\"", ",", "[", "]", ")", "for", "data", "in", "signed_data", ":", "response", ".", "hash", ".", "signed_data", ".", "Append", "(", "revision", "=", "data", "[", "0", "]", ",", "cert_type", "=", "data", "[", "1", "]", ",", "certificate", "=", "data", "[", "2", "]", ")", "self", ".", "SendReply", "(", "response", ")" ]
If we re adding save must be save and continue editing
def response_add ( self , request , obj , post_url_continue = POST_URL_CONTINUE ) : if '_addanother' not in request . POST and '_popup' not in request . POST : request . POST [ '_continue' ] = 1 return super ( ExportAdmin , self ) . response_add ( request , obj , post_url_continue )
3,414
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/admin.py#L45-L57
[ "def", "get_targets_bottom", "(", "modality_type", ",", "value", "=", "None", ")", ":", "if", "modality_type", "==", "ModalityType", ".", "AUDIO", ":", "return", "make_targets_bottom", "(", "audio_bottom", ")", "elif", "modality_type", "==", "ModalityType", ".", "AUDIO_SPECTRAL", ":", "return", "make_targets_bottom", "(", "audio_spectral_bottom", ")", "elif", "modality_type", "in", "(", "ModalityType", ".", "CLASS_LABEL", ",", "ModalityType", ".", "MULTI_LABEL", ",", "ModalityType", ".", "ONE_HOT_CLASS_LABEL", ",", "ModalityType", ".", "SIGMOID_CLASS_LABEL", ",", "ModalityType", ".", "SIGMOID_MAX_POOLING_CLASS_LABEL", ",", "ModalityType", ".", "SOFTMAX_AVERAGE_POOLING_CLASS_LABEL", ",", "ModalityType", ".", "SOFTMAX_LAST_TIMESTEP_CLASS_LABEL", ",", "ModalityType", ".", "SOFTMAX_MAX_POOLING_CLASS_LABEL", ")", ":", "return", "class_label_targets_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "CTC_SYMBOL", ",", "ModalityType", ".", "SYMBOL", ",", "ModalityType", ".", "SYMBOL_WEIGHTS_ALL", ")", ":", "return", "symbol_targets_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "GENERIC_L2_LOSS", ",", "ModalityType", ".", "IDENTITY_SYMBOL", ")", ":", "return", "identity_bottom", "elif", "modality_type", "==", "ModalityType", ".", "IDENTITY", ":", "return", "make_targets_bottom", "(", "identity_bottom", ")", "elif", "modality_type", "==", "ModalityType", ".", "IMAGE", ":", "return", "image_targets_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "IMAGE_CHANNEL_BOTTOM_IDENTITY", ",", "ModalityType", ".", "IMAGE_CHANNEL_COMPRESS", ")", ":", "return", "image_channel_compress_targets_bottom", "elif", "modality_type", "==", "ModalityType", ".", "IMAGE_CHANNEL_EMBEDDINGS_BOTTOM", ":", "return", "image_channel_embeddings_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "REAL", ",", "ModalityType", ".", "REAL_L2_LOSS", ",", "ModalityType", ".", "REAL_LOG_POISSON_LOSS", ")", ":", "return", "make_targets_bottom", "(", "real_bottom", ")", "elif", "modality_type", "==", "ModalityType", ".", "SPEECH_RECOGNITION", ":", "return", "make_targets_bottom", "(", "speech_recognition_bottom", ")", "elif", "modality_type", "==", "ModalityType", ".", "SYMBOL_ONE_HOT", ":", "return", "symbol_one_hot_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "VIDEO", ",", "ModalityType", ".", "VIDEO_L1", ",", "ModalityType", ".", "VIDEO_L2", ")", ":", "return", "video_targets_bottom", "elif", "modality_type", "==", "ModalityType", ".", "VIDEO_BITWISE", ":", "return", "video_bitwise_targets_bottom", "elif", "modality_type", "==", "ModalityType", ".", "VIDEO_IDENTITY", ":", "return", "video_identity_targets_bottom", "elif", "modality_type", "in", "(", "ModalityType", ".", "VIDEO_L1_RAW", ",", "ModalityType", ".", "VIDEO_L2_RAW", ")", ":", "return", "video_raw_targets_bottom", "elif", "modality_type", "==", "ModalityType", ".", "VIDEO_PIXEL_NOISE", ":", "return", "make_targets_bottom", "(", "video_pixel_noise_bottom", ")", "return", "value" ]
Verifies that the pylint score is above a given threshold .
def above_score_threshold ( new_data , old_data , strict = False , threshold = PYLINT_SCORE_THRESHOLD ) : success = True score = 0 message = '' if strict : for fscore , fname in new_data [ 'scores' ] : if fscore < threshold : success = False score = - 1 message += "File {} score ({}) below threshold {}\n" . format ( fname , fscore , threshold ) return success , score , message else : if new_data [ 'average' ] < threshold : success = False message = "Failed! Average pylint score ({})" " below threshold (9)!" . format ( new_data [ 'average' ] ) score = - 1 return success , score , message
3,415
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/validators/pylint_validator.py#L72-L99
[ "def", "channeldir_node_to_row", "(", "self", ",", "path_tuple", ")", ":", "row", "=", "dict", "(", ")", "for", "key", "in", "CONTENT_INFO_HEADER", ":", "row", "[", "key", "]", "=", "None", "row", "[", "CONTENT_PATH_KEY", "]", "=", "\"/\"", ".", "join", "(", "path_tuple", ")", "# use / in .csv on Windows and UNIX", "title", "=", "path_tuple", "[", "-", "1", "]", ".", "replace", "(", "'_'", ",", "' '", ")", "for", "ext", "in", "content_kinds", ".", "MAPPING", ".", "keys", "(", ")", ":", "if", "title", ".", "endswith", "(", "ext", ")", ":", "title", "=", "title", ".", "replace", "(", "'.'", "+", "ext", ",", "''", ")", "row", "[", "CONTENT_TITLE_KEY", "]", "=", "title", "row", "[", "CONTENT_SOURCEID_KEY", "]", "=", "path_tuple", "[", "-", "1", "]", "return", "row" ]
Run the new pylint data through given all current checkers including comparisons to old pylint data .
def run ( self , new_pylint_data , old_pylint_data ) : for validator in self . checkers : success , score , message = validator ( new_pylint_data , old_pylint_data ) if not success : return score , message message = self . default_message . format ( new_pylint_data [ 'average' ] ) return self . default_score , message
3,416
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/validators/pylint_validator.py#L34-L46
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
helper open a file or url and return the content and identifier
def _get_document ( self , source ) : scheme_url = source if not source . startswith ( "http" ) : scheme_url = "http://%s" % source text = source try : text = urllib . urlopen ( scheme_url ) . read ( ) except : pass else : return ( text , scheme_url ) try : text = open ( source , "r" ) . read ( ) except : pass else : return ( text , source ) return ( text , None )
3,417
https://github.com/Parsely/schemato/blob/7002316fbcd52f2e669f8372bf1338c572e3df4b/schemato/schemato.py#L74-L96
[ "def", "updateSeriesRegistrationStatus", "(", ")", ":", "from", ".", "models", "import", "Series", "if", "not", "getConstant", "(", "'general__enableCronTasks'", ")", ":", "return", "logger", ".", "info", "(", "'Checking status of Series that are open for registration.'", ")", "open_series", "=", "Series", ".", "objects", ".", "filter", "(", ")", ".", "filter", "(", "*", "*", "{", "'registrationOpen'", ":", "True", "}", ")", "for", "series", "in", "open_series", ":", "series", ".", "updateRegistrationStatus", "(", ")" ]
Returns random unused port number .
def select_random ( ports = None , exclude_ports = None ) : if ports is None : ports = available_good_ports ( ) if exclude_ports is None : exclude_ports = set ( ) ports . difference_update ( set ( exclude_ports ) ) for port in random . sample ( ports , min ( len ( ports ) , 100 ) ) : if not port_is_used ( port ) : return port raise PortForException ( "Can't select a port" )
3,418
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L15-L30
[ "def", "from_overlays", "(", "overlays", ")", ":", "jc", "=", "JobConfig", "(", ")", "jc", ".", "comment", "=", "overlays", ".", "get", "(", "'comment'", ")", "if", "'jobConfigOverlays'", "in", "overlays", ":", "if", "len", "(", "overlays", "[", "'jobConfigOverlays'", "]", ")", ">=", "1", ":", "jco", "=", "copy", ".", "deepcopy", "(", "overlays", "[", "'jobConfigOverlays'", "]", "[", "0", "]", ")", "# Now extract the logical information", "if", "'jobConfig'", "in", "jco", ":", "_jc", "=", "jco", "[", "'jobConfig'", "]", "jc", ".", "job_name", "=", "_jc", ".", "pop", "(", "'jobName'", ",", "None", ")", "jc", ".", "job_group", "=", "_jc", ".", "pop", "(", "'jobGroup'", ",", "None", ")", "jc", ".", "preload", "=", "_jc", ".", "pop", "(", "'preloadApplicationBundles'", ",", "False", ")", "jc", ".", "data_directory", "=", "_jc", ".", "pop", "(", "'dataDirectory'", ",", "None", ")", "jc", ".", "tracing", "=", "_jc", ".", "pop", "(", "'tracing'", ",", "None", ")", "for", "sp", "in", "_jc", ".", "pop", "(", "'submissionParameters'", ",", "[", "]", ")", ":", "jc", ".", "submission_parameters", "[", "sp", "[", "'name'", "]", "]", "=", "sp", "[", "'value'", "]", "if", "not", "_jc", ":", "del", "jco", "[", "'jobConfig'", "]", "if", "'deploymentConfig'", "in", "jco", ":", "_dc", "=", "jco", "[", "'deploymentConfig'", "]", "if", "'manual'", "==", "_dc", ".", "get", "(", "'fusionScheme'", ")", ":", "if", "'fusionTargetPeCount'", "in", "_dc", ":", "jc", ".", "target_pe_count", "=", "_dc", ".", "pop", "(", "'fusionTargetPeCount'", ")", "if", "len", "(", "_dc", ")", "==", "1", ":", "del", "jco", "[", "'deploymentConfig'", "]", "if", "jco", ":", "jc", ".", "raw_overlay", "=", "jco", "return", "jc" ]
Returns a list of good port ranges . Such ranges are large and don t contain ephemeral or well - known ports . Ranges borders are also excluded .
def good_port_ranges ( ports = None , min_range_len = 20 , border = 3 ) : min_range_len += border * 2 if ports is None : ports = available_ports ( ) ranges = utils . to_ranges ( list ( ports ) ) lenghts = sorted ( [ ( r [ 1 ] - r [ 0 ] , r ) for r in ranges ] , reverse = True ) long_ranges = [ l [ 1 ] for l in lenghts if l [ 0 ] >= min_range_len ] without_borders = [ ( low + border , high - border ) for low , high in long_ranges ] return without_borders
3,419
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L61-L74
[ "def", "get_duration", "(", "self", ")", ":", "postgame", "=", "self", ".", "get_postgame", "(", ")", "if", "postgame", ":", "return", "postgame", ".", "duration_int", "*", "1000", "duration", "=", "self", ".", "_header", ".", "initial", ".", "restore_time", "try", ":", "while", "self", ".", "_handle", ".", "tell", "(", ")", "<", "self", ".", "size", ":", "operation", "=", "mgz", ".", "body", ".", "operation", ".", "parse_stream", "(", "self", ".", "_handle", ")", "if", "operation", ".", "type", "==", "'sync'", ":", "duration", "+=", "operation", ".", "time_increment", "elif", "operation", ".", "type", "==", "'action'", ":", "if", "operation", ".", "action", ".", "type", "==", "'resign'", ":", "self", ".", "_cache", "[", "'resigned'", "]", ".", "add", "(", "operation", ".", "action", ".", "player_id", ")", "self", ".", "_handle", ".", "seek", "(", "self", ".", "body_position", ")", "except", "(", "construct", ".", "core", ".", "ConstructError", ",", "zlib", ".", "error", ",", "ValueError", ")", ":", "raise", "RuntimeError", "(", "\"invalid mgz file\"", ")", "return", "duration" ]
Returns if port is used . Port is considered used if the current process can t bind to it or the port doesn t refuse connections .
def port_is_used ( port , host = '127.0.0.1' ) : unused = _can_bind ( port , host ) and _refuses_connection ( port , host ) return not unused
3,420
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/api.py#L83-L89
[ "def", "marvcli_comment_list", "(", "datasets", ")", ":", "app", "=", "create_app", "(", ")", "ids", "=", "parse_setids", "(", "datasets", ",", "dbids", "=", "True", ")", "comments", "=", "db", ".", "session", ".", "query", "(", "Comment", ")", ".", "options", "(", "db", ".", "joinedload", "(", "Comment", ".", "dataset", ")", ")", ".", "filter", "(", "Comment", ".", "dataset_id", ".", "in_", "(", "ids", ")", ")", "for", "comment", "in", "sorted", "(", "comments", ",", "key", "=", "lambda", "x", ":", "(", "x", ".", "dataset", ".", "_setid", ",", "x", ".", "id", ")", ")", ":", "print", "(", "comment", ".", "dataset", ".", "setid", ",", "comment", ".", "id", ",", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "int", "(", "comment", ".", "time_added", "/", "1000", ")", ")", ",", "comment", ".", "author", ",", "repr", "(", "comment", ".", "text", ")", ")" ]
Most recent day if it s during the Advent of Code . Happy Holidays! Day 1 is assumed otherwise .
def current_day ( ) : aoc_now = datetime . datetime . now ( tz = AOC_TZ ) if aoc_now . month != 12 : log . warning ( "current_day is only available in December (EST)" ) return 1 day = min ( aoc_now . day , 25 ) return day
3,421
https://github.com/wimglenn/advent-of-code-data/blob/a3856459d225840f2b6919659fc65aa7a6a74533/aocd/get.py#L57-L67
[ "def", "merge_offsets_metadata", "(", "topics", ",", "*", "offsets_responses", ")", ":", "result", "=", "dict", "(", ")", "for", "topic", "in", "topics", ":", "partition_offsets", "=", "[", "response", "[", "topic", "]", "for", "response", "in", "offsets_responses", "if", "topic", "in", "response", "]", "result", "[", "topic", "]", "=", "merge_partition_offsets", "(", "*", "partition_offsets", ")", "return", "result" ]
Register a Linter class for file verification .
def register_linter ( linter ) : if hasattr ( linter , "EXTS" ) and hasattr ( linter , "run" ) : LintFactory . PLUGINS . append ( linter ) else : raise LinterException ( "Linter does not have 'run' method or EXTS variable!" )
3,422
https://github.com/astraw38/lint/blob/162ceefcb812f07d18544aaa887b9ec4f102cfb1/lint/linters/lint_factory.py#L32-L42
[ "def", "get_series", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", "series", "import", "Series", ",", "SeriesDataWrapper", "return", "self", ".", "get_related_resource", "(", "Series", ",", "SeriesDataWrapper", ",", "args", ",", "kwargs", ")" ]
Escapes certain filter characters from an LDAP query .
def escape_query ( query ) : return query . replace ( "\\" , r"\5C" ) . replace ( "*" , r"\2A" ) . replace ( "(" , r"\28" ) . replace ( ")" , r"\29" )
3,423
https://github.com/kavdev/ldap-groups/blob/0dd3a7d9eafa3903127364839b12a4b3dd3ca521/ldap_groups/utils.py#L23-L26
[ "def", "image_write", "(", "image", ",", "filename", ",", "ri", "=", "False", ")", ":", "if", "filename", ".", "endswith", "(", "'.npy'", ")", ":", "img_array", "=", "image", ".", "numpy", "(", ")", "img_header", "=", "{", "'origin'", ":", "image", ".", "origin", ",", "'spacing'", ":", "image", ".", "spacing", ",", "'direction'", ":", "image", ".", "direction", ".", "tolist", "(", ")", ",", "'components'", ":", "image", ".", "components", "}", "np", ".", "save", "(", "filename", ",", "img_array", ")", "with", "open", "(", "filename", ".", "replace", "(", "'.npy'", ",", "'.json'", ")", ",", "'w'", ")", "as", "outfile", ":", "json", ".", "dump", "(", "img_header", ",", "outfile", ")", "else", ":", "image", ".", "to_file", "(", "filename", ")", "if", "ri", ":", "return", "image" ]
Raises ValueError if 2 arguments are not passed to an XOR
def _validate_xor_args ( self , p ) : if len ( p [ 1 ] ) != 2 : raise ValueError ( 'Invalid syntax: XOR only accepts 2 arguments, got {0}: {1}' . format ( len ( p [ 1 ] ) , p ) )
3,424
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L98-L103
[ "def", "contributors", "(", "lancet", ",", "output", ")", ":", "sorting", "=", "pygit2", ".", "GIT_SORT_TIME", "|", "pygit2", ".", "GIT_SORT_REVERSE", "commits", "=", "lancet", ".", "repo", ".", "walk", "(", "lancet", ".", "repo", ".", "head", ".", "target", ",", "sorting", ")", "contributors", "=", "(", "(", "c", ".", "author", ".", "name", ",", "c", ".", "author", ".", "email", ")", "for", "c", "in", "commits", ")", "contributors", "=", "OrderedDict", "(", "contributors", ")", "template_content", "=", "content_from_path", "(", "lancet", ".", "config", ".", "get", "(", "'packaging'", ",", "'contributors_template'", ")", ")", "template", "=", "Template", "(", "template_content", ")", "output", ".", "write", "(", "template", ".", "render", "(", "contributors", "=", "contributors", ")", ".", "encode", "(", "'utf-8'", ")", ")" ]
Returns True of False if value in the pattern p matches the filter .
def _match_value_filter ( self , p , value ) : return self . _VALUE_FILTER_MAP [ p [ 0 ] ] ( value [ p [ 1 ] ] , p [ 2 ] )
3,425
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L139-L143
[ "def", "console", "(", "self", ",", "console", ")", ":", "if", "console", "==", "self", ".", "_console", ":", "return", "if", "self", ".", "_console_type", "==", "\"vnc\"", "and", "console", "is", "not", "None", "and", "console", "<", "5900", ":", "raise", "NodeError", "(", "\"VNC console require a port superior or equal to 5900 currently it's {}\"", ".", "format", "(", "console", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "console", "is", "not", "None", ":", "if", "self", ".", "console_type", "==", "\"vnc\"", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ",", "port_range_start", "=", "5900", ",", "port_range_end", "=", "6000", ")", "else", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: console port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "console", ")", ")" ]
Builds a set of all field keys used in the pattern including nested fields .
def get_field_keys ( self , pattern = None ) : # Use own pattern or passed in argument for recursion pattern = pattern or self . pattern # Validate the pattern so we can make assumptions about the data self . _validate ( pattern ) keys = set ( ) # Valid pattern length can only be 2 or 3 # With key filters, field key is second item just like 3 item patterns if len ( pattern ) == 2 and pattern [ 0 ] not in self . _KEY_FILTER_MAP : if pattern [ 0 ] in ( '&' , '|' , '^' ) : # Pass each nested pattern to get_field_keys for filter_item in pattern [ 1 ] : keys = keys . union ( self . get_field_keys ( filter_item ) ) else : # pattern[0] == '!' keys = keys . union ( self . get_field_keys ( pattern [ 1 ] ) ) else : # Pattern length is 3 keys . add ( pattern [ 1 ] ) return keys
3,426
https://github.com/ambitioninc/kmatch/blob/22bb5f0c1d86d0e4a69bdf18f092f095934ebb0d/kmatch/kmatch.py#L164-L194
[ "def", "getNextSample", "(", "self", ",", "V", ")", ":", "W", ",", "WProb", "=", "self", ".", "drawRankingPlakettLuce", "(", "V", ")", "VProb", "=", "self", ".", "calcProbOfVFromW", "(", "V", ",", "W", ")", "acceptanceRatio", "=", "self", ".", "calcAcceptanceRatio", "(", "V", ",", "W", ")", "prob", "=", "min", "(", "1.0", ",", "acceptanceRatio", "*", "(", "VProb", "/", "WProb", ")", ")", "if", "random", ".", "random", "(", ")", "<=", "prob", ":", "V", "=", "W", "return", "V" ]
Write metadata to an image video or XMP sidecar file .
def to_file ( self , path ) : xmp_path = path + '.xmp' # remove any existing XMP file if os . path . exists ( xmp_path ) : os . unlink ( xmp_path ) # attempt to open image/video file for metadata md_path = path md = GExiv2 . Metadata ( ) try : md . open_path ( md_path ) except GLib . GError : # file type does not support metadata so use XMP sidecar md_path = xmp_path # create empty XMP file with open ( md_path , 'w' ) as of : of . write ( '''<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?> <x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="XMP Core 4.4.0-Exiv2"> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"> <rdf:Description rdf:about="" xmlns:xmp="http://ns.adobe.com/xap/1.0/" xmp:CreatorTool=""/> </rdf:RDF> </x:xmpmeta> <?xpacket end="w"?>''' ) md = GExiv2 . Metadata ( ) md . open_path ( md_path ) # add our namespace md . register_xmp_namespace ( 'https://github.com/jim-easterbrook/pyctools' , 'pyctools' ) # copy metadata for tag , value in self . data . items ( ) : if md . get_tag_type ( tag ) in ( 'XmpBag' , 'XmpSeq' ) : md . set_tag_multiple ( tag , value ) else : md . set_tag_string ( tag , value ) if self . comment is not None : md . set_comment ( self . comment ) # save file md . save_file ( md_path )
3,427
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L231-L274
[ "def", "NamedPlaceholders", "(", "iterable", ")", ":", "placeholders", "=", "\", \"", ".", "join", "(", "\"%({})s\"", ".", "format", "(", "key", ")", "for", "key", "in", "sorted", "(", "iterable", ")", ")", "return", "\"({})\"", ".", "format", "(", "placeholders", ")" ]
Get image dimensions from metadata .
def image_size ( self ) : xlen = None ylen = None for tag in ( 'Xmp.pyctools.xlen' , 'Exif.Photo.PixelXDimension' , 'Exif.Image.ImageWidth' , 'Xmp.tiff.ImageWidth' ) : if tag in self . data : xlen = int ( self . data [ tag ] ) break for tag in ( 'Xmp.pyctools.ylen' , 'Exif.Photo.PixelYDimension' , 'Exif.Image.ImageLength' , 'Xmp.tiff.ImageLength' ) : if tag in self . data : ylen = int ( self . data [ tag ] ) break if xlen and ylen : return xlen , ylen raise RuntimeError ( 'Metadata does not have image dimensions' )
3,428
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L295-L322
[ "def", "_replace_auth_key", "(", "user", ",", "key", ",", "enc", "=", "'ssh-rsa'", ",", "comment", "=", "''", ",", "options", "=", "None", ",", "config", "=", "'.ssh/authorized_keys'", ")", ":", "auth_line", "=", "_format_auth_line", "(", "key", ",", "enc", ",", "comment", ",", "options", "or", "[", "]", ")", "lines", "=", "[", "]", "full", "=", "_get_config_file", "(", "user", ",", "config", ")", "try", ":", "# open the file for both reading AND writing", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "full", ",", "'r'", ")", "as", "_fh", ":", "for", "line", "in", "_fh", ":", "# We don't need any whitespace-only containing lines or arbitrary doubled newlines", "line", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ".", "strip", "(", ")", ")", "if", "line", "==", "''", ":", "continue", "line", "+=", "'\\n'", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "# Commented Line", "lines", ".", "append", "(", "line", ")", "continue", "comps", "=", "re", ".", "findall", "(", "r'((.*)\\s)?(ssh-[a-z0-9-]+|ecdsa-[a-z0-9-]+)\\s([a-zA-Z0-9+/]+={0,2})(\\s(.*))?'", ",", "line", ")", "if", "comps", "and", "len", "(", "comps", "[", "0", "]", ")", ">", "3", "and", "comps", "[", "0", "]", "[", "3", "]", "==", "key", ":", "# Found our key, replace it", "lines", ".", "append", "(", "auth_line", ")", "else", ":", "lines", ".", "append", "(", "line", ")", "_fh", ".", "close", "(", ")", "# Re-open the file writable after properly closing it", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "full", ",", "'wb'", ")", "as", "_fh", ":", "# Write out any changes", "_fh", ".", "writelines", "(", "salt", ".", "utils", ".", "data", ".", "encode", "(", "lines", ")", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "exc", ":", "raise", "CommandExecutionError", "(", "'Problem reading or writing to key file: {0}'", ".", "format", "(", "exc", ")", ")" ]
Get a metadata value .
def get ( self , tag , default = None ) : full_tag = 'Xmp.pyctools.' + tag if full_tag in self . data : return self . data [ full_tag ] return default
3,429
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L324-L342
[ "def", "gfsep", "(", "targ1", ",", "shape1", ",", "inframe1", ",", "targ2", ",", "shape2", ",", "inframe2", ",", "abcorr", ",", "obsrvr", ",", "relate", ",", "refval", ",", "adjust", ",", "step", ",", "nintvals", ",", "cnfine", ",", "result", "=", "None", ")", ":", "assert", "isinstance", "(", "cnfine", ",", "stypes", ".", "SpiceCell", ")", "assert", "cnfine", ".", "is_double", "(", ")", "if", "result", "is", "None", ":", "result", "=", "stypes", ".", "SPICEDOUBLE_CELL", "(", "2000", ")", "else", ":", "assert", "isinstance", "(", "result", ",", "stypes", ".", "SpiceCell", ")", "assert", "result", ".", "is_double", "(", ")", "targ1", "=", "stypes", ".", "stringToCharP", "(", "targ1", ")", "shape1", "=", "stypes", ".", "stringToCharP", "(", "shape1", ")", "inframe1", "=", "stypes", ".", "stringToCharP", "(", "inframe1", ")", "targ2", "=", "stypes", ".", "stringToCharP", "(", "targ2", ")", "shape2", "=", "stypes", ".", "stringToCharP", "(", "shape2", ")", "inframe2", "=", "stypes", ".", "stringToCharP", "(", "inframe2", ")", "abcorr", "=", "stypes", ".", "stringToCharP", "(", "abcorr", ")", "obsrvr", "=", "stypes", ".", "stringToCharP", "(", "obsrvr", ")", "relate", "=", "stypes", ".", "stringToCharP", "(", "relate", ")", "refval", "=", "ctypes", ".", "c_double", "(", "refval", ")", "adjust", "=", "ctypes", ".", "c_double", "(", "adjust", ")", "step", "=", "ctypes", ".", "c_double", "(", "step", ")", "nintvals", "=", "ctypes", ".", "c_int", "(", "nintvals", ")", "libspice", ".", "gfsep_c", "(", "targ1", ",", "shape1", ",", "inframe1", ",", "targ2", ",", "shape2", ",", "inframe2", ",", "abcorr", ",", "obsrvr", ",", "relate", ",", "refval", ",", "adjust", ",", "step", ",", "nintvals", ",", "ctypes", ".", "byref", "(", "cnfine", ")", ",", "ctypes", ".", "byref", "(", "result", ")", ")", "return", "result" ]
Set a metadata value .
def set ( self , tag , value ) : full_tag = 'Xmp.pyctools.' + tag self . data [ full_tag ] = value
3,430
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/frame.py#L344-L358
[ "def", "gfsep", "(", "targ1", ",", "shape1", ",", "inframe1", ",", "targ2", ",", "shape2", ",", "inframe2", ",", "abcorr", ",", "obsrvr", ",", "relate", ",", "refval", ",", "adjust", ",", "step", ",", "nintvals", ",", "cnfine", ",", "result", "=", "None", ")", ":", "assert", "isinstance", "(", "cnfine", ",", "stypes", ".", "SpiceCell", ")", "assert", "cnfine", ".", "is_double", "(", ")", "if", "result", "is", "None", ":", "result", "=", "stypes", ".", "SPICEDOUBLE_CELL", "(", "2000", ")", "else", ":", "assert", "isinstance", "(", "result", ",", "stypes", ".", "SpiceCell", ")", "assert", "result", ".", "is_double", "(", ")", "targ1", "=", "stypes", ".", "stringToCharP", "(", "targ1", ")", "shape1", "=", "stypes", ".", "stringToCharP", "(", "shape1", ")", "inframe1", "=", "stypes", ".", "stringToCharP", "(", "inframe1", ")", "targ2", "=", "stypes", ".", "stringToCharP", "(", "targ2", ")", "shape2", "=", "stypes", ".", "stringToCharP", "(", "shape2", ")", "inframe2", "=", "stypes", ".", "stringToCharP", "(", "inframe2", ")", "abcorr", "=", "stypes", ".", "stringToCharP", "(", "abcorr", ")", "obsrvr", "=", "stypes", ".", "stringToCharP", "(", "obsrvr", ")", "relate", "=", "stypes", ".", "stringToCharP", "(", "relate", ")", "refval", "=", "ctypes", ".", "c_double", "(", "refval", ")", "adjust", "=", "ctypes", ".", "c_double", "(", "adjust", ")", "step", "=", "ctypes", ".", "c_double", "(", "step", ")", "nintvals", "=", "ctypes", ".", "c_int", "(", "nintvals", ")", "libspice", ".", "gfsep_c", "(", "targ1", ",", "shape1", ",", "inframe1", ",", "targ2", ",", "shape2", ",", "inframe2", ",", "abcorr", ",", "obsrvr", ",", "relate", ",", "refval", ",", "adjust", ",", "step", ",", "nintvals", ",", "ctypes", ".", "byref", "(", "cnfine", ")", ",", "ctypes", ".", "byref", "(", "result", ")", ")", "return", "result" ]
Gets a StrictRedis connection for normal redis or for redis sentinel based upon redis mode in configuration .
def get_connection ( self , is_read_only = False ) -> redis . StrictRedis : if self . connection is not None : return self . connection if self . is_sentinel : kwargs = dict ( ) if self . password : kwargs [ "password" ] = self . password sentinel = Sentinel ( [ ( self . host , self . port ) ] , * * kwargs ) if is_read_only : connection = sentinel . slave_for ( self . sentinel_service , decode_responses = True ) else : connection = sentinel . master_for ( self . sentinel_service , decode_responses = True ) else : connection = redis . StrictRedis ( host = self . host , port = self . port , decode_responses = True , password = self . password ) self . connection = connection return connection
3,431
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/redis_helper.py#L18-L43
[ "def", "ttl", "(", "self", ",", "value", ")", ":", "# get timer", "timer", "=", "getattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "None", ")", "# if timer is running, stop the timer", "if", "timer", "is", "not", "None", ":", "timer", ".", "cancel", "(", ")", "# initialize timestamp", "timestamp", "=", "None", "# if value is None", "if", "value", "is", "None", ":", "# nonify timer", "timer", "=", "None", "else", ":", "# else, renew a timer", "# get timestamp", "timestamp", "=", "time", "(", ")", "+", "value", "# start a new timer", "timer", "=", "Timer", "(", "value", ",", "self", ".", "__del__", ")", "timer", ".", "start", "(", ")", "# set/update attributes", "setattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "timer", ")", "setattr", "(", "self", ",", "Annotation", ".", "__TS", ",", "timestamp", ")" ]
Calculates the hash for the given filename .
def calculate_hash_for_file ( name ) : longlongformat = 'q' # long long bytesize = struct . calcsize ( longlongformat ) f = open ( name , "rb" ) filesize = os . path . getsize ( name ) hash = filesize minimum_size = 65536 * 2 assert filesize >= minimum_size , 'Movie {name} must have at least {min} bytes' . format ( min = minimum_size , name = name ) for x in range ( 65536 // bytesize ) : buffer = f . read ( bytesize ) ( l_value , ) = struct . unpack ( longlongformat , buffer ) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF #to remain as 64bit number f . seek ( max ( 0 , filesize - 65536 ) , 0 ) for x in range ( 65536 // bytesize ) : buffer = f . read ( bytesize ) ( l_value , ) = struct . unpack ( longlongformat , buffer ) hash += l_value hash = hash & 0xFFFFFFFFFFFFFFFF f . close ( ) returnedhash = "%016x" % hash return returnedhash
3,432
https://github.com/nicoddemus/ss/blob/df77c745e511f542c456450ed94adff1b969fc92/ss.py#L218-L259
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Update the component s configuration .
def set_config ( self , config ) : # put copy of config on queue for running component self . _configmixin_queue . append ( copy . deepcopy ( config ) ) # notify component, using thread safe method self . new_config ( )
3,433
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/config.py#L381-L397
[ "def", "delete", "(", "request", ",", "obj_id", "=", "None", ")", ":", "data", "=", "request", ".", "DELETE", "or", "json", ".", "loads", "(", "request", ".", "body", ")", "guids", "=", "data", ".", "get", "(", "'guids'", ")", ".", "split", "(", "','", ")", "objects", "=", "getObjectsFromGuids", "(", "guids", ")", "gallery", "=", "Gallery", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "LOGGER", ".", "info", "(", "'{} removed {} from {}'", ".", "format", "(", "request", ".", "user", ".", "email", ",", "guids", ",", "gallery", ")", ")", "for", "o", "in", "objects", ":", "if", "isinstance", "(", "o", ",", "Image", ")", ":", "gallery", ".", "images", ".", "remove", "(", "o", ")", "elif", "isinstance", "(", "o", ",", "Video", ")", ":", "gallery", ".", "videos", ".", "remove", "(", "o", ")", "res", "=", "Result", "(", ")", "return", "JsonResponse", "(", "res", ".", "asDict", "(", ")", ")" ]
Gaussian filter generator core .
def GaussianFilterCore ( x_sigma = 0.0 , y_sigma = 0.0 ) : def filter_1D ( sigma ) : alpha = 1.0 / ( 2.0 * ( max ( sigma , 0.0001 ) ** 2.0 ) ) coefs = [ ] coef = 1.0 while coef > 0.0001 : coefs . append ( coef ) coef = math . exp ( - ( alpha * ( float ( len ( coefs ) ** 2 ) ) ) ) fil_dim = len ( coefs ) - 1 result = numpy . zeros ( 1 + ( fil_dim * 2 ) , dtype = numpy . float32 ) for n , coef in enumerate ( coefs ) : result [ fil_dim - n ] = coef result [ fil_dim + n ] = coef # normalise result result /= result . sum ( ) return result x_sigma = max ( x_sigma , 0.0 ) y_sigma = max ( y_sigma , 0.0 ) x_fil = filter_1D ( x_sigma ) y_fil = filter_1D ( y_sigma ) result = numpy . empty ( [ y_fil . shape [ 0 ] , x_fil . shape [ 0 ] , 1 ] , dtype = numpy . float32 ) for y in range ( y_fil . shape [ 0 ] ) : for x in range ( x_fil . shape [ 0 ] ) : result [ y , x , 0 ] = x_fil [ x ] * y_fil [ y ] out_frame = Frame ( ) out_frame . data = result out_frame . type = 'fil' audit = out_frame . metadata . get ( 'audit' ) audit += 'data = GaussianFilter()\n' if x_sigma != 0.0 : audit += ' x_sigma: %g\n' % ( x_sigma ) if y_sigma != 0.0 : audit += ' y_sigma: %g\n' % ( y_sigma ) out_frame . metadata . set ( 'audit' , audit ) return out_frame
3,434
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/components/interp/gaussianfilter.py#L92-L147
[ "def", "convolve_filter", "(", "signal", ",", "impulse_response", ")", ":", "if", "impulse_response", "is", "not", "None", ":", "# print 'interpolated calibration'#, self.calibration_frequencies", "adjusted_signal", "=", "fftconvolve", "(", "signal", ",", "impulse_response", ")", "adjusted_signal", "=", "adjusted_signal", "[", "len", "(", "impulse_response", ")", "/", "2", ":", "len", "(", "adjusted_signal", ")", "-", "len", "(", "impulse_response", ")", "/", "2", "+", "1", "]", "return", "adjusted_signal", "else", ":", "return", "signal" ]
Put a command on the queue to be called in the component s thread .
def queue_command ( self , command ) : if self . _running : # queue event normally QtCore . QCoreApplication . postEvent ( self , ActionEvent ( command ) , QtCore . Qt . LowEventPriority ) else : # save event until we are started self . _incoming . append ( command )
3,435
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/qt.py#L72-L86
[ "def", "DIV", "(", "classical_reg", ",", "right", ")", ":", "left", ",", "right", "=", "unpack_reg_val_pair", "(", "classical_reg", ",", "right", ")", "return", "ClassicalDiv", "(", "left", ",", "right", ")" ]
Wait until the event loop terminates or timeout is reached .
def join ( self , timeout = 3600 ) : start = time . time ( ) while self . _running : now = time . time ( ) maxtime = timeout + start - now if maxtime <= 0 : return QCoreApplication . processEvents ( QEventLoop . AllEvents , int ( maxtime * 1000 ) )
3,436
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/core/qt.py#L112-L130
[ "def", "_load", "(", "self", ")", ":", "indexfilename", "=", "os", ".", "path", ".", "join", "(", "self", ".", "__dir", ",", "\"index.dat\"", ")", "if", "os", ".", "path", ".", "exists", "(", "indexfilename", ")", ":", "data", "=", "self", ".", "_read_file", "(", "indexfilename", ")", "self", ".", "__index", "=", "data", "[", "0", "]", "self", ".", "__filename_rep", "=", "data", "[", "1", "]", "if", "self", ".", "__filename_rep", ".", "_sha1_sigs", "!=", "self", ".", "__sha1_sigs", ":", "print", "(", "(", "\"CACHE: Warning: sha1_sigs stored in the cache is set \"", "+", "\"to %s.\"", ")", "%", "self", ".", "__filename_rep", ".", "_sha1_sigs", ")", "print", "(", "\"Please remove the cache to change this setting.\"", ")", "self", ".", "__sha1_sigs", "=", "self", ".", "__filename_rep", ".", "_sha1_sigs", "else", ":", "self", ".", "__index", "=", "{", "}", "self", ".", "__filename_rep", "=", "filename_repository_t", "(", "self", ".", "__sha1_sigs", ")", "self", ".", "__modified_flag", "=", "False" ]
Intra field interlace to sequential converter .
def IntraField ( config = { } ) : return Compound ( config = config , deint = SimpleDeinterlace ( ) , interp = Resize ( ) , filgen = FilterGenerator ( yaperture = 8 , ycut = 50 ) , gain = Arithmetic ( func = 'data * pt_float(2)' ) , linkages = { ( 'self' , 'input' ) : [ ( 'deint' , 'input' ) ] , ( 'deint' , 'output' ) : [ ( 'interp' , 'input' ) ] , ( 'interp' , 'output' ) : [ ( 'self' , 'output' ) ] , ( 'filgen' , 'output' ) : [ ( 'gain' , 'input' ) ] , ( 'gain' , 'output' ) : [ ( 'interp' , 'filter' ) ] , } )
3,437
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/components/deinterlace/intrafield.py#L28-L52
[ "def", "P_isothermal_critical_flow", "(", "P", ",", "fd", ",", "D", ",", "L", ")", ":", "# Correct branch of lambertw found by trial and error", "lambert_term", "=", "float", "(", "lambertw", "(", "-", "exp", "(", "(", "-", "D", "-", "L", "*", "fd", ")", "/", "D", ")", ",", "-", "1", ")", ".", "real", ")", "return", "P", "*", "exp", "(", "(", "D", "*", "(", "lambert_term", "+", "1.0", ")", "+", "L", "*", "fd", ")", "/", "(", "2.0", "*", "D", ")", ")" ]
Creates a rate limiting rule with rate limiting period and attempt limit
def create ( self , period : int , limit : int ) : self . period = period self . limit = limit
3,438
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/pyratelimit.py#L39-L51
[ "def", "_read", "(", "self", ",", "directory", ",", "filename", ",", "session", ",", "path", ",", "name", ",", "extension", ",", "spatial", "=", "None", ",", "spatialReferenceID", "=", "None", ",", "replaceParamFile", "=", "None", ")", ":", "yml_events", "=", "[", "]", "with", "open", "(", "path", ")", "as", "fo", ":", "yml_events", "=", "yaml", ".", "load", "(", "fo", ")", "for", "yml_event", "in", "yml_events", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "yml_event", ".", "subfolder", ")", ")", ":", "orm_event", "=", "yml_event", ".", "as_orm", "(", ")", "if", "not", "self", ".", "_similar_event_exists", "(", "orm_event", ".", "subfolder", ")", ":", "session", ".", "add", "(", "orm_event", ")", "self", ".", "events", ".", "append", "(", "orm_event", ")", "session", ".", "commit", "(", ")" ]
Checks if a namespace is already rate limited or not without making any additional attempts
def is_rate_limited ( self , namespace : str ) -> bool : return not self . __can_attempt ( namespace = namespace , add_attempt = False )
3,439
https://github.com/biplap-sarkar/pylimit/blob/d2170a8c02a9be083f37c9e4ec1e28700a33d64e/pylimit/pyratelimit.py#L96-L105
[ "def", "list_publications", "(", ")", ":", "publications", "=", "search_publications", "(", "DBPublication", "(", "is_public", "=", "True", ")", ")", "return", "SimpleTemplate", "(", "INDEX_TEMPLATE", ")", ".", "render", "(", "publications", "=", "publications", ",", "compose_path", "=", "web_tools", ".", "compose_path", ",", "delimiter", "=", "\":\"", ",", ")" ]
Shows basic usage of the Google Calendar API .
def main ( ) : credentials = get_credentials ( ) http = credentials . authorize ( httplib2 . Http ( ) ) service = discovery . build ( 'calendar' , 'v3' , http = http ) now = datetime . datetime . utcnow ( ) . isoformat ( ) + 'Z' # 'Z' indicates UTC time print ( 'Getting the upcoming 10 events' ) eventsResult = service . events ( ) . list ( calendarId = 'primary' , timeMin = now , maxResults = 10 , singleEvents = True , orderBy = 'startTime' ) . execute ( ) events = eventsResult . get ( 'items' , [ ] ) if not events : print ( 'No upcoming events found.' ) for event in events : start = event [ 'start' ] . get ( 'dateTime' , event [ 'start' ] . get ( 'date' ) ) print ( start , event [ 'summary' ] )
3,440
https://github.com/NUAA-Open-Source/NUAA-iCal-Python/blob/1bdc4016e4d8b236a12bba5047a5150f889bc880/NUAAiCal/AddToGCal.py#L54-L75
[ "def", "_unregister_bundle_factories", "(", "self", ",", "bundle", ")", ":", "# type: (Bundle) -> None", "with", "self", ".", "__factories_lock", ":", "# Find out which factories must be removed", "to_remove", "=", "[", "factory_name", "for", "factory_name", "in", "self", ".", "__factories", "if", "self", ".", "get_factory_bundle", "(", "factory_name", ")", "is", "bundle", "]", "# Remove all of them", "for", "factory_name", "in", "to_remove", ":", "try", ":", "self", ".", "unregister_factory", "(", "factory_name", ")", "except", "ValueError", "as", "ex", ":", "_logger", ".", "warning", "(", "\"Error unregistering factory '%s': %s\"", ",", "factory_name", ",", "ex", ")" ]
Find arguments that should accumulate values and fix them .
def fix_list_arguments ( self ) : either = [ list ( c . children ) for c in self . either . children ] for case in either : case = [ c for c in case if case . count ( c ) > 1 ] for a in [ e for e in case if type ( e ) == Argument ] : a . value = [ ] return self
3,441
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/docopt.py#L59-L66
[ "def", "enforce_filetype_file", "(", "form", ",", "field", ")", ":", "if", "form", ".", "_fields", ".", "get", "(", "'filetype'", ")", ".", "data", "!=", "RESOURCE_FILETYPE_FILE", ":", "return", "domain", "=", "urlparse", "(", "field", ".", "data", ")", ".", "netloc", "allowed_domains", "=", "current_app", ".", "config", "[", "'RESOURCES_FILE_ALLOWED_DOMAINS'", "]", "allowed_domains", "+=", "[", "current_app", ".", "config", ".", "get", "(", "'SERVER_NAME'", ")", "]", "if", "current_app", ".", "config", ".", "get", "(", "'CDN_DOMAIN'", ")", ":", "allowed_domains", ".", "append", "(", "current_app", ".", "config", "[", "'CDN_DOMAIN'", "]", ")", "if", "'*'", "in", "allowed_domains", ":", "return", "if", "domain", "and", "domain", "not", "in", "allowed_domains", ":", "message", "=", "_", "(", "'Domain \"{domain}\" not allowed for filetype \"{filetype}\"'", ")", "raise", "validators", ".", "ValidationError", "(", "message", ".", "format", "(", "domain", "=", "domain", ",", "filetype", "=", "RESOURCE_FILETYPE_FILE", ")", ")" ]
Transform pattern into an equivalent with only top - level Either .
def either ( self ) : # Currently the pattern will not be equivalent, but more "narrow", # although good enough to reason about list arguments. if not hasattr ( self , 'children' ) : return Either ( Required ( self ) ) else : ret = [ ] groups = [ [ self ] ] while groups : children = groups . pop ( 0 ) types = [ type ( c ) for c in children ] if Either in types : either = [ c for c in children if type ( c ) is Either ] [ 0 ] children . pop ( children . index ( either ) ) for c in either . children : groups . append ( [ c ] + children ) elif Required in types : required = [ c for c in children if type ( c ) is Required ] [ 0 ] children . pop ( children . index ( required ) ) groups . append ( list ( required . children ) + children ) elif Optional in types : optional = [ c for c in children if type ( c ) is Optional ] [ 0 ] children . pop ( children . index ( optional ) ) groups . append ( list ( optional . children ) + children ) elif OneOrMore in types : oneormore = [ c for c in children if type ( c ) is OneOrMore ] [ 0 ] children . pop ( children . index ( oneormore ) ) groups . append ( list ( oneormore . children ) * 2 + children ) else : ret . append ( children ) return Either ( * [ Required ( * e ) for e in ret ] )
3,442
https://github.com/kmike/port-for/blob/f61ebf3c2caf54eabe8233b40ef67b973176a6f5/port_for/docopt.py#L69-L100
[ "def", "send_data", "(", "data", ")", ":", "datalength", "=", "len", "(", "data", ")", "csm1", "=", "checksum1", "(", "data", ",", "datalength", ")", "csm2", "=", "checksum2", "(", "csm1", ")", "data", ".", "insert", "(", "0", ",", "0xFF", ")", "data", ".", "insert", "(", "1", ",", "0xFF", ")", "data", ".", "insert", "(", "5", ",", "csm1", ")", "data", ".", "insert", "(", "6", ",", "csm2", ")", "stringtosend", "=", "\"\"", "for", "i", "in", "range", "(", "len", "(", "data", ")", ")", ":", "byteformat", "=", "'%02X'", "%", "data", "[", "i", "]", "stringtosend", "=", "stringtosend", "+", "\"\\\\x\"", "+", "byteformat", "try", ":", "SERPORT", ".", "write", "(", "stringtosend", ".", "decode", "(", "'string-escape'", ")", ")", "#print stringtosend", "except", ":", "raise", "HerkulexError", "(", "\"could not communicate with motors\"", ")" ]
Add or updated the Image .
def syncImage ( img , current , session ) : def _img_str ( i ) : return "%s - %s" % ( i . type , i . description ) for db_img in current . images : img_info = ( img . type , img . md5 , img . size ) db_img_info = ( db_img . type , db_img . md5 , db_img . size ) if db_img_info == img_info : img = None break elif ( db_img . type == img . type and db_img . description == img . description ) : if img . md5 != db_img . md5 : # Update image current . images . remove ( db_img ) current . images . append ( img ) session . add ( current ) pout ( Fg . green ( "Updating image" ) + ": " + _img_str ( img ) ) img = None break if img : # Add image current . images . append ( img ) session . add ( current ) pout ( Fg . green ( "Adding image" ) + ": " + _img_str ( img ) )
3,443
https://github.com/nicfit/MishMash/blob/8f988936340bf0ffb83ea90ea124efb3c36a1174/mishmash/commands/sync/utils.py#L64-L92
[ "def", "search_for_port", "(", "port_glob", ",", "req", ",", "expected_res", ")", ":", "# Check that the USB port actually exists, based on the known vendor and", "# product ID.", "if", "usb", ".", "core", ".", "find", "(", "idVendor", "=", "0x0403", ",", "idProduct", "=", "0x6001", ")", "is", "None", ":", "return", "None", "# Find ports matching the supplied glob.", "ports", "=", "glob", ".", "glob", "(", "port_glob", ")", "if", "len", "(", "ports", ")", "==", "0", ":", "return", "None", "for", "port", "in", "ports", ":", "with", "r12_serial_port", "(", "port", ")", "as", "ser", ":", "if", "not", "ser", ".", "isOpen", "(", ")", ":", "ser", ".", "open", "(", ")", "# Write a request out.", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "ser", ".", "write", "(", "str", "(", "req", ")", ".", "encode", "(", "'utf-8'", ")", ")", "else", ":", "ser", ".", "write", "(", "bytes", "(", "req", ",", "'utf-8'", ")", ")", "# Wait a short period to allow the connection to generate output.", "time", ".", "sleep", "(", "0.1", ")", "# Read output from the serial connection check if it's what we want.", "res", "=", "ser", ".", "read", "(", "ser", ".", "in_waiting", ")", ".", "decode", "(", "OUTPUT_ENCODING", ")", "if", "expected_res", "in", "res", ":", "return", "port", "raise", "ArmException", "(", "'ST Robotics connection found, but is not responsive.'", "+", "' Is the arm powered on?'", ")", "return", "None" ]
Filter the form s column choices
def add_fields ( self , form , index ) : super ( ColumnFormSet , self ) . add_fields ( form , index ) form . fields [ 'column' ] . choices = self . get_choices ( )
3,444
https://github.com/magopian/django-data-exports/blob/a73db486779d93046ad89c5bf582ff8ae869120f/data_exports/forms.py#L39-L48
[ "def", "_count_devices", "(", "self", ")", ":", "number_of_devices", "=", "ctypes", ".", "c_uint", "(", ")", "if", "ctypes", ".", "windll", ".", "user32", ".", "GetRawInputDeviceList", "(", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_int", ")", "(", ")", ",", "ctypes", ".", "byref", "(", "number_of_devices", ")", ",", "ctypes", ".", "sizeof", "(", "RawInputDeviceList", ")", ")", "==", "-", "1", ":", "warn", "(", "\"Call to GetRawInputDeviceList was unsuccessful.\"", "\"We have no idea if a mouse or keyboard is attached.\"", ",", "RuntimeWarning", ")", "return", "devices_found", "=", "(", "RawInputDeviceList", "*", "number_of_devices", ".", "value", ")", "(", ")", "if", "ctypes", ".", "windll", ".", "user32", ".", "GetRawInputDeviceList", "(", "devices_found", ",", "ctypes", ".", "byref", "(", "number_of_devices", ")", ",", "ctypes", ".", "sizeof", "(", "RawInputDeviceList", ")", ")", "==", "-", "1", ":", "warn", "(", "\"Call to GetRawInputDeviceList was unsuccessful.\"", "\"We have no idea if a mouse or keyboard is attached.\"", ",", "RuntimeWarning", ")", "return", "for", "device", "in", "devices_found", ":", "if", "device", ".", "dwType", "==", "0", ":", "self", ".", "_raw_device_counts", "[", "'mice'", "]", "+=", "1", "elif", "device", ".", "dwType", "==", "1", ":", "self", ".", "_raw_device_counts", "[", "'keyboards'", "]", "+=", "1", "elif", "device", ".", "dwType", "==", "2", ":", "self", ".", "_raw_device_counts", "[", "'otherhid'", "]", "+=", "1", "else", ":", "self", ".", "_raw_device_counts", "[", "'unknown'", "]", "+=", "1" ]
Walk source directory tree and convert each sub directory to a package name .
def find_packages ( ) : packages = [ 'pyctools' ] for root , dirs , files in os . walk ( os . path . join ( 'src' , 'pyctools' ) ) : package = '.' . join ( root . split ( os . sep ) [ 1 : ] ) for name in dirs : packages . append ( package + '.' + name ) return packages
3,445
https://github.com/jim-easterbrook/pyctools/blob/2a958665326892f45f249bebe62c2c23f306732b/src/pyctools/setup.py#L41-L51
[ "def", "_custom_response_edit", "(", "self", ",", "method", ",", "url", ",", "headers", ",", "body", ",", "response", ")", ":", "if", "self", ".", "get_implementation", "(", ")", ".", "is_mock", "(", ")", ":", "delay", "=", "self", ".", "get_setting", "(", "\"MOCKDATA_DELAY\"", ",", "0.0", ")", "time", ".", "sleep", "(", "delay", ")", "self", ".", "_edit_mock_response", "(", "method", ",", "url", ",", "headers", ",", "body", ",", "response", ")" ]
From a list of args extract the one param if supplied returning the value and unused args .
def extract_param ( param , args , type = None ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '--' + param , type = type ) res , unused = parser . parse_known_args ( args ) return getattr ( res , param ) , unused
3,446
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/cli.py#L4-L19
[ "def", "start", "(", "self", ")", ":", "self", ".", "startTime", "=", "time", ".", "time", "(", ")", "self", ".", "configure", "(", "text", "=", "'{0:<d} s'", ".", "format", "(", "0", ")", ")", "self", ".", "update", "(", ")" ]
Returns a builder with stemmers for all languages added to it .
def get_nltk_builder ( languages ) : all_stemmers = [ ] all_stopwords_filters = [ ] all_word_characters = set ( ) for language in languages : if language == "en" : # use Lunr's defaults all_stemmers . append ( lunr . stemmer . stemmer ) all_stopwords_filters . append ( stop_word_filter ) all_word_characters . update ( { r"\w" } ) else : stopwords , word_characters = _get_stopwords_and_word_characters ( language ) all_stemmers . append ( Pipeline . registered_functions [ "stemmer-{}" . format ( language ) ] ) all_stopwords_filters . append ( generate_stop_word_filter ( stopwords , language = language ) ) all_word_characters . update ( word_characters ) builder = Builder ( ) multi_trimmer = generate_trimmer ( "" . join ( sorted ( all_word_characters ) ) ) Pipeline . register_function ( multi_trimmer , "lunr-multi-trimmer-{}" . format ( "-" . join ( languages ) ) ) builder . pipeline . reset ( ) for fn in chain ( [ multi_trimmer ] , all_stopwords_filters , all_stemmers ) : builder . pipeline . add ( fn ) for fn in all_stemmers : builder . search_pipeline . add ( fn ) return builder
3,447
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/languages/__init__.py#L51-L89
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
Register all supported languages to ensure compatibility .
def register_languages ( ) : for language in set ( SUPPORTED_LANGUAGES ) - { "en" } : language_stemmer = partial ( nltk_stemmer , get_language_stemmer ( language ) ) Pipeline . register_function ( language_stemmer , "stemmer-{}" . format ( language ) )
3,448
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/languages/__init__.py#L92-L96
[ "def", "object", "(", "self", ")", ":", "if", "self", ".", "type", "==", "EntryType", ".", "category", ":", "return", "self", ".", "category", "elif", "self", ".", "type", "==", "EntryType", ".", "event", ":", "return", "self", ".", "event", "elif", "self", ".", "type", "==", "EntryType", ".", "session", ":", "return", "self", ".", "session", "elif", "self", ".", "type", "==", "EntryType", ".", "contribution", ":", "return", "self", ".", "contribution", "elif", "self", ".", "type", "==", "EntryType", ".", "subcontribution", ":", "return", "self", ".", "subcontribution" ]
Converts a number to its ordinal representation .
def ordinal ( value ) : try : value = int ( value ) except ( TypeError , ValueError ) : raise ValueError if value % 100 in ( 11 , 12 , 13 ) : return '%d%s' % ( value , ORDINAL_SUFFIX [ 0 ] ) else : return '%d%s' % ( value , ORDINAL_SUFFIX [ value % 10 ] )
3,449
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L45-L73
[ "def", "run_user_process", "(", "self", ",", "program", ",", "args", ",", "env", ")", ":", "stdout_streams", ",", "stderr_streams", "=", "self", ".", "_get_stdout_stderr_streams", "(", ")", "if", "sys", ".", "platform", "==", "\"win32\"", ":", "# PTYs don't work in windows so we use pipes.", "self", ".", "_stdout_tee", "=", "io_wrap", ".", "Tee", ".", "pipe", "(", "*", "stdout_streams", ")", "self", ".", "_stderr_tee", "=", "io_wrap", ".", "Tee", ".", "pipe", "(", "*", "stderr_streams", ")", "# Seems like the following actually isn't necessary on Windows", "# TODO(adrian): we may need to do the following if we use pipes instead of PTYs", "# because Python on Unix doesn't like writing UTF-8 to files", "# tell child python interpreters we accept utf-8", "# env['PYTHONIOENCODING'] = 'UTF-8'", "else", ":", "self", ".", "_stdout_tee", "=", "io_wrap", ".", "Tee", ".", "pty", "(", "*", "stdout_streams", ")", "self", ".", "_stderr_tee", "=", "io_wrap", ".", "Tee", ".", "pty", "(", "*", "stderr_streams", ")", "command", "=", "[", "program", "]", "+", "list", "(", "args", ")", "runner", "=", "util", ".", "find_runner", "(", "program", ")", "if", "runner", ":", "command", "=", "runner", "+", "command", "command", "=", "' '", ".", "join", "(", "six", ".", "moves", ".", "shlex_quote", "(", "arg", ")", "for", "arg", "in", "command", ")", "self", ".", "_stdout_stream", ".", "write_string", "(", "command", "+", "\"\\n\\n\"", ")", "try", ":", "self", ".", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "env", "=", "env", ",", "stdout", "=", "self", ".", "_stdout_tee", ".", "tee_file", ",", "stderr", "=", "self", ".", "_stderr_tee", ".", "tee_file", ",", "shell", "=", "True", ",", ")", "self", ".", "_run", ".", "pid", "=", "self", ".", "proc", ".", "pid", "except", "(", "OSError", ",", "IOError", ")", ":", "raise", "Exception", "(", "'Could not find program: %s'", "%", "command", ")", "self", ".", "_sync_etc", "(", ")" ]
Converts a fraction to a formatted percentage .
def percentage ( value , digits = 2 ) : value = float ( value ) * 100.0 return u'' + '%s %%' % ( _format ( value , digits ) , )
3,450
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L117-L134
[ "def", "crtPwBoxCarFn", "(", "varNumVol", ",", "aryPngData", ",", "aryPresOrd", ",", "vecMtDrctn", ")", ":", "print", "(", "'------Create pixel-wise boxcar functions'", ")", "aryBoxCar", "=", "np", ".", "empty", "(", "aryPngData", ".", "shape", "[", "0", ":", "2", "]", "+", "(", "len", "(", "vecMtDrctn", ")", ",", ")", "+", "(", "varNumVol", ",", ")", ",", "dtype", "=", "'int64'", ")", "for", "ind", ",", "num", "in", "enumerate", "(", "vecMtDrctn", ")", ":", "aryCondTemp", "=", "np", ".", "zeros", "(", "(", "aryPngData", ".", "shape", ")", ",", "dtype", "=", "'int64'", ")", "lgcTempMtDrctn", "=", "[", "aryPresOrd", "==", "num", "]", "[", "0", "]", "aryCondTemp", "[", ":", ",", ":", ",", "lgcTempMtDrctn", "]", "=", "np", ".", "copy", "(", "aryPngData", "[", ":", ",", ":", ",", "lgcTempMtDrctn", "]", ")", "aryBoxCar", "[", ":", ",", ":", ",", "ind", ",", ":", "]", "=", "aryCondTemp", "return", "aryBoxCar" ]
Converts a large number to a formatted number containing the textual suffix for that number .
def word ( value , digits = 2 ) : convention = locale . localeconv ( ) decimal_point = convention [ 'decimal_point' ] decimal_zero = re . compile ( r'%s0+' % re . escape ( decimal_point ) ) prefix = value < 0 and '-' or '' value = abs ( int ( value ) ) if value < 1000 : return u'' . join ( [ prefix , decimal_zero . sub ( '' , _format ( value , digits ) ) , ] ) for base , suffix in enumerate ( LARGE_NUMBER_SUFFIX ) : exp = ( base + 2 ) * 3 power = 10 ** exp if value < power : value = value / float ( 10 ** ( exp - 3 ) ) return '' . join ( [ prefix , decimal_zero . sub ( '' , _format ( value , digits ) ) , ' ' , suffix , ] ) raise OverflowError
3,451
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/number.py#L137-L174
[ "def", "remove_server", "(", "self", ",", "server_id", ")", ":", "# Validate server_id", "server", "=", "self", ".", "_get_server", "(", "server_id", ")", "# Delete any instances we recorded to be cleaned up", "if", "server_id", "in", "self", ".", "_owned_subscriptions", ":", "inst_list", "=", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_filters", ":", "inst_list", "=", "self", ".", "_owned_filters", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_filters", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_destinations", ":", "inst_list", "=", "self", ".", "_owned_destinations", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_destinations", "[", "server_id", "]", "# Remove server from this listener", "del", "self", ".", "_servers", "[", "server_id", "]" ]
Return True only if the arguments passed specify exact namespaces and to conduct a rename of every namespace .
def _full_rename ( args ) : return ( args . ns and all ( map ( args . rename . affects , args . ns ) ) )
3,452
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L297-L305
[ "def", "debug", "(", "self", ")", ":", "url", "=", "'{}/debug/status'", ".", "format", "(", "self", ".", "url", ")", "data", "=", "self", ".", "_get", "(", "url", ")", "return", "data", ".", "json", "(", ")" ]
Apply operation in db
def apply ( db , op ) : dbname = op [ 'ns' ] . split ( '.' ) [ 0 ] or "admin" opts = bson . CodecOptions ( uuid_representation = bson . binary . STANDARD ) db [ dbname ] . command ( "applyOps" , [ op ] , codec_options = opts )
3,453
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L445-L451
[ "def", "EnumKey", "(", "key", ",", "index", ")", ":", "regenumkeyex", "=", "advapi32", "[", "\"RegEnumKeyExW\"", "]", "regenumkeyex", ".", "restype", "=", "ctypes", ".", "c_long", "regenumkeyex", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "wintypes", ".", "DWORD", ",", "ctypes", ".", "c_wchar_p", ",", "LPDWORD", ",", "LPDWORD", ",", "ctypes", ".", "c_wchar_p", ",", "LPDWORD", ",", "ctypes", ".", "POINTER", "(", "FileTime", ")", "]", "buf", "=", "ctypes", ".", "create_unicode_buffer", "(", "257", ")", "length", "=", "ctypes", ".", "wintypes", ".", "DWORD", "(", "257", ")", "rc", "=", "regenumkeyex", "(", "key", ".", "handle", ",", "index", ",", "ctypes", ".", "cast", "(", "buf", ",", "ctypes", ".", "c_wchar_p", ")", ",", "ctypes", ".", "byref", "(", "length", ")", ",", "LPDWORD", "(", ")", ",", "ctypes", ".", "c_wchar_p", "(", ")", ",", "LPDWORD", "(", ")", ",", "ctypes", ".", "POINTER", "(", "FileTime", ")", "(", ")", ")", "if", "rc", "!=", "0", ":", "raise", "ctypes", ".", "WinError", "(", "2", ")", "return", "ctypes", ".", "wstring_at", "(", "buf", ",", "length", ".", "value", ")", ".", "rstrip", "(", "u\"\\x00\"", ")" ]
Query the oplog for items since ts and then return
def since ( self , ts ) : spec = { 'ts' : { '$gt' : ts } } cursor = self . query ( spec ) while True : # todo: trap InvalidDocument errors: # except bson.errors.InvalidDocument as e: # logging.info(repr(e)) for doc in cursor : yield doc if not cursor . alive : break time . sleep ( 1 )
3,454
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L472-L486
[ "def", "create", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "super", "(", "ImageMemberManager", ",", "self", ")", ".", "create", "(", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "e", ".", "http_status", "==", "403", ":", "raise", "exc", ".", "UnsharableImage", "(", "\"You cannot share a public image.\"", ")", "else", ":", "raise" ]
Determine if there are any ops before ts
def has_ops_before ( self , ts ) : spec = { 'ts' : { '$lt' : ts } } return bool ( self . coll . find_one ( spec ) )
3,455
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L488-L493
[ "def", "wrap_file_exceptions", "(", ")", ":", "try", ":", "yield", "except", "(", "PermissionError", ",", "FileNotFoundError", ")", ":", "# pragma: only py3", "# In py3, we want to raise Exception unchanged, so there would be no need for this block.", "# BUT (IOError, OSError) - see below - also matches, so we capture it here", "raise", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "# pragma: only py2", "if", "e", ".", "errno", "==", "errno", ".", "EACCES", ":", "raise", "PermissionError", "(", "str", "(", "e", ")", ")", "elif", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "raise", "FileNotFoundError", "(", "str", "(", "e", ")", ")", "raise" ]
Tail the oplog starting from ts .
def since ( self , ts ) : while True : items = super ( TailingOplog , self ) . since ( ts ) for doc in items : yield doc ts = doc [ 'ts' ]
3,456
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L502-L510
[ "def", "bucket", "(", "self", ",", "experiment", ",", "user_id", ",", "bucketing_id", ")", ":", "if", "not", "experiment", ":", "return", "None", "# Determine if experiment is in a mutually exclusive group", "if", "experiment", ".", "groupPolicy", "in", "GROUP_POLICIES", ":", "group", "=", "self", ".", "config", ".", "get_group", "(", "experiment", ".", "groupId", ")", "if", "not", "group", ":", "return", "None", "user_experiment_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "groupId", ",", "group", ".", "trafficAllocation", ")", "if", "not", "user_experiment_id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no experiment.'", "%", "user_id", ")", "return", "None", "if", "user_experiment_id", "!=", "experiment", ".", "id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is not in experiment \"%s\" of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "return", "None", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in experiment %s of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "# Bucket user if not in white-list and in group (if any)", "variation_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "id", ",", "experiment", ".", "trafficAllocation", ")", "if", "variation_id", ":", "variation", "=", "self", ".", "config", ".", "get_variation_from_id", "(", "experiment", ".", "key", ",", "variation_id", ")", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in variation \"%s\" of experiment %s.'", "%", "(", "user_id", ",", "variation", ".", "key", ",", "experiment", ".", "key", ")", ")", "return", "variation", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no variation.'", "%", "user_id", ")", "return", "None" ]
Serialize self to text stream .
def dump ( self , stream ) : items = ( ( 'time' , self . time ) , ( 'inc' , self . inc ) , ) # use ordered dict to retain order ts = collections . OrderedDict ( items ) json . dump ( dict ( ts = ts ) , stream )
3,457
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L524-L535
[ "def", "get", "(", "self", ",", "*", "index", ")", ":", "assert", "self", ".", "wrapFunction", "is", "not", "None", "if", "len", "(", "index", ")", "==", "1", "and", "isinstance", "(", "index", "[", "0", "]", ",", "(", "tuple", ",", "list", ")", ")", ":", "index", "=", "index", "[", "0", "]", "if", "len", "(", "index", ")", "==", "0", ":", "return", "self", ".", "wrapFunction", "(", "self", ".", "_impl", ".", "get", "(", ")", ")", "else", ":", "return", "self", ".", "wrapFunction", "(", "self", ".", "_impl", ".", "get", "(", "Tuple", "(", "index", ")", ".", "_impl", ")", ")" ]
Load a serialized version of self from text stream .
def load ( cls , stream ) : data = json . load ( stream ) [ 'ts' ] return cls ( data [ 'time' ] , data [ 'inc' ] )
3,458
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L538-L544
[ "def", "rmdir", "(", "self", ",", "tid", ")", ":", "pt", "=", "self", ".", "PathType", ".", "get", "(", "tid", ")", "if", "pt", "is", "self", ".", "PathType", ".", "main", ":", "raise", "FuseOSError", "(", "errno", ".", "EINVAL", ")", "elif", "pt", "is", "not", "self", ".", "PathType", ".", "subdir", ":", "raise", "FuseOSError", "(", "errno", ".", "ENOTDIR", ")", "try", ":", "self", ".", "searches", "[", "tid", "[", "0", "]", "]", ".", "clean", "(", ")", "del", "self", ".", "searches", "[", "tid", "[", "0", "]", "]", "except", "KeyError", ":", "raise", "FuseOSError", "(", "errno", ".", "ENOENT", ")", "return", "0" ]
Given a timedelta window return a timestamp representing that time .
def for_window ( cls , window ) : utcnow = datetime . datetime . utcnow ( ) return cls ( utcnow - window , 0 )
3,459
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L547-L553
[ "def", "_initialize_node_agents", "(", "self", ")", ":", "nodes", "=", "_get_dict_from_list", "(", "'nodes'", ",", "self", ".", "cx", ")", "invalid_genes", "=", "[", "]", "for", "node", "in", "nodes", ":", "id", "=", "node", "[", "'@id'", "]", "cx_db_refs", "=", "self", ".", "get_aliases", "(", "node", ")", "up_id", "=", "cx_db_refs", ".", "get", "(", "'UP'", ")", "if", "up_id", ":", "gene_name", "=", "uniprot_client", ".", "get_gene_name", "(", "up_id", ")", "hgnc_id", "=", "hgnc_client", ".", "get_hgnc_id", "(", "gene_name", ")", "db_refs", "=", "{", "'UP'", ":", "up_id", ",", "'HGNC'", ":", "hgnc_id", ",", "'TEXT'", ":", "gene_name", "}", "agent", "=", "Agent", "(", "gene_name", ",", "db_refs", "=", "db_refs", ")", "self", ".", "_node_names", "[", "id", "]", "=", "gene_name", "self", ".", "_node_agents", "[", "id", "]", "=", "agent", "continue", "else", ":", "node_name", "=", "node", "[", "'n'", "]", "self", ".", "_node_names", "[", "id", "]", "=", "node_name", "hgnc_id", "=", "hgnc_client", ".", "get_hgnc_id", "(", "node_name", ")", "db_refs", "=", "{", "'TEXT'", ":", "node_name", "}", "if", "not", "hgnc_id", ":", "if", "not", "self", ".", "require_grounding", ":", "self", ".", "_node_agents", "[", "id", "]", "=", "Agent", "(", "node_name", ",", "db_refs", "=", "db_refs", ")", "invalid_genes", ".", "append", "(", "node_name", ")", "else", ":", "db_refs", ".", "update", "(", "{", "'HGNC'", ":", "hgnc_id", "}", ")", "up_id", "=", "hgnc_client", ".", "get_uniprot_id", "(", "hgnc_id", ")", "# It's possible that a valid HGNC ID will not have a", "# Uniprot ID, as in the case of HOTAIR (HOX transcript", "# antisense RNA, HGNC:33510)", "if", "up_id", ":", "db_refs", ".", "update", "(", "{", "'UP'", ":", "up_id", "}", ")", "self", ".", "_node_agents", "[", "id", "]", "=", "Agent", "(", "node_name", ",", "db_refs", "=", "db_refs", ")", "if", "invalid_genes", ":", "verb", "=", "'Skipped'", "if", "self", ".", "require_grounding", "else", "'Included'", "logger", ".", "info", "(", "'%s invalid gene symbols: %s'", "%", "(", "verb", ",", "', '", ".", "join", "(", "invalid_genes", ")", ")", ")" ]
Save timestamp to file .
def save ( self , ts ) : with open ( self , 'w' ) as f : Timestamp . wrap ( ts ) . dump ( f )
3,460
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/oplog.py#L557-L562
[ "def", "AND", "(", "queryArr", ",", "exclude", "=", "None", ")", ":", "assert", "isinstance", "(", "queryArr", ",", "list", ")", ",", "\"provided argument as not a list\"", "assert", "len", "(", "queryArr", ")", ">", "0", ",", "\"queryArr had an empty list\"", "q", "=", "CombinedQuery", "(", ")", "q", ".", "setQueryParam", "(", "\"$and\"", ",", "[", "]", ")", "for", "item", "in", "queryArr", ":", "assert", "isinstance", "(", "item", ",", "(", "CombinedQuery", ",", "BaseQuery", ")", ")", ",", "\"item in the list was not a CombinedQuery or BaseQuery instance\"", "q", ".", "getQuery", "(", ")", "[", "\"$and\"", "]", ".", "append", "(", "item", ".", "getQuery", "(", ")", ")", "if", "exclude", "!=", "None", ":", "assert", "isinstance", "(", "exclude", ",", "(", "CombinedQuery", ",", "BaseQuery", ")", ")", ",", "\"exclude parameter was not a CombinedQuery or BaseQuery instance\"", "q", ".", "setQueryParam", "(", "\"$not\"", ",", "exclude", ".", "getQuery", "(", ")", ")", "return", "q" ]
Splits a string into tokens ready to be inserted into the search index .
def Tokenizer ( obj , metadata = None , separator = SEPARATOR ) : if obj is None : return [ ] metadata = metadata or { } if isinstance ( obj , ( list , tuple ) ) : return [ Token ( as_string ( element ) . lower ( ) , deepcopy ( metadata ) ) for element in obj ] string = str ( obj ) . strip ( ) . lower ( ) length = len ( string ) tokens = [ ] slice_start = 0 for slice_end in range ( length ) : char = string [ slice_end ] slice_length = slice_end - slice_start if separator . match ( char ) or slice_end == length - 1 : if slice_length > 0 : sl = slice ( slice_start , slice_end if slice_end < length - 1 else None ) token_metadata = { } token_metadata [ "position" ] = [ slice_start , slice_length if slice_end < length - 1 else slice_length + 1 , ] token_metadata [ "index" ] = len ( tokens ) token_metadata . update ( metadata ) tokens . append ( Token ( string [ sl ] , token_metadata ) ) slice_start = slice_end + 1 return tokens
3,461
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/tokenizer.py#L14-L59
[ "def", "cross", "(", "self", ",", "vec", ")", ":", "if", "not", "isinstance", "(", "vec", ",", "Vector3Array", ")", ":", "raise", "TypeError", "(", "'Cross product operand must be a Vector3Array'", ")", "if", "self", ".", "nV", "!=", "1", "and", "vec", ".", "nV", "!=", "1", "and", "self", ".", "nV", "!=", "vec", ".", "nV", ":", "raise", "ValueError", "(", "'Cross product operands must have the same '", "'number of elements.'", ")", "return", "Vector3Array", "(", "np", ".", "cross", "(", "self", ",", "vec", ")", ")" ]
Yield all non - sytem collections in db .
def all_collections ( db ) : include_pattern = r'(?!system\.)' return ( db [ name ] for name in db . list_collection_names ( ) if re . match ( include_pattern , name ) )
3,462
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/manage.py#L16-L25
[ "async", "def", "set_max_relative_mod", "(", "self", ",", "max_mod", ",", "timeout", "=", "OTGW_DEFAULT_TIMEOUT", ")", ":", "if", "isinstance", "(", "max_mod", ",", "int", ")", "and", "not", "0", "<=", "max_mod", "<=", "100", ":", "return", "None", "cmd", "=", "OTGW_CMD_MAX_MOD", "status", "=", "{", "}", "ret", "=", "await", "self", ".", "_wait_for_cmd", "(", "cmd", ",", "max_mod", ",", "timeout", ")", "if", "ret", "not", "in", "[", "'-'", ",", "None", "]", ":", "ret", "=", "int", "(", "ret", ")", "if", "ret", "==", "'-'", ":", "status", "[", "DATA_SLAVE_MAX_RELATIVE_MOD", "]", "=", "None", "else", ":", "status", "[", "DATA_SLAVE_MAX_RELATIVE_MOD", "]", "=", "ret", "self", ".", "_update_status", "(", "status", ")", "return", "ret" ]
Cannot remove documents from capped collections in later versions of MongoDB so drop the collection instead .
def safe_purge_collection ( coll ) : op = ( drop_collection if coll . options ( ) . get ( 'capped' , False ) else purge_collection ) return op ( coll )
3,463
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/manage.py#L32-L43
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Builds a stopWordFilter function from the provided list of stop words .
def generate_stop_word_filter ( stop_words , language = None ) : def stop_word_filter ( token , i = None , tokens = None ) : if token and str ( token ) not in stop_words : return token # camelCased for for compatibility with lunr.js label = ( "stopWordFilter-{}" . format ( language ) if language is not None else "stopWordFilter" ) Pipeline . register_function ( stop_word_filter , label ) return stop_word_filter
3,464
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/stop_word_filter.py#L130-L149
[ "def", "_read_config", "(", "config_location", ")", ":", "global", "LOGGING_CONFIG", "with", "open", "(", "config_location", ",", "\"r\"", ")", "as", "config_loc", ":", "cfg_file", "=", "json", ".", "load", "(", "config_loc", ")", "if", "\"logging\"", "in", "cfg_file", ":", "log_dict", "=", "cfg_file", ".", "get", "(", "\"logging\"", ")", "with", "open", "(", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "__file__", ",", "os", ".", "path", ".", "pardir", ",", "'logging_schema.json'", ")", ")", ")", "as", "schema_file", ":", "logging_schema", "=", "json", ".", "load", "(", "schema_file", ")", "jsonschema", ".", "validate", "(", "log_dict", ",", "logging_schema", ")", "merged", "=", "jsonmerge", ".", "merge", "(", "LOGGING_CONFIG", ",", "log_dict", ")", "LOGGING_CONFIG", "=", "merged" ]
Printable Pseudo Electronic Serial Number .
def pesn ( number , separator = u'' ) : number = re . sub ( r'[\s-]' , '' , meid ( number ) ) serial = hashlib . sha1 ( unhexlify ( number [ : 14 ] ) ) return separator . join ( [ '80' , serial . hexdigest ( ) [ - 6 : ] . upper ( ) ] )
3,465
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/phone.py#L234-L246
[ "def", "set", "(", "self", ",", "value", ")", ":", "if", "self", ".", "is_value_set", ":", "# While we want to *allow* re-setting previously set measurements, we'd", "# rather promote the use of multidimensional measurements instead of", "# discarding data, so we make this somewhat chatty.", "_LOG", ".", "warning", "(", "'Overriding previous measurement %s value of %s with %s, the old '", "'value will be lost. Use a dimensioned measurement if you need to '", "'save multiple values.'", ",", "self", ".", "name", ",", "self", ".", "stored_value", ",", "value", ")", "if", "value", "is", "None", ":", "_LOG", ".", "warning", "(", "'Measurement %s is set to None'", ",", "self", ".", "name", ")", "self", ".", "stored_value", "=", "value", "self", ".", "_cached_value", "=", "data", ".", "convert_to_base_types", "(", "value", ")", "self", ".", "is_value_set", "=", "True" ]
Convert a file size into natural readable format . Multiple formats are supported .
def filesize ( value , format = 'decimal' , digits = 2 ) : if format not in FILESIZE_SUFFIX : raise TypeError base = FILESIZE_BASE [ format ] size = int ( value ) sign = size < 0 and u'-' or '' size = abs ( size ) for i , suffix in enumerate ( FILESIZE_SUFFIX [ format ] ) : unit = base ** ( i + 1 ) if size < unit : result = u'' . join ( [ sign , _format ( base * size / float ( unit ) , digits ) , u' ' , suffix , ] ) if format == 'gnu' : result = result . replace ( ' ' , '' ) return result raise OverflowError
3,466
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/size.py#L12-L51
[ "def", "fixed_terms", "(", "self", ")", ":", "return", "{", "k", ":", "v", "for", "(", "k", ",", "v", ")", "in", "self", ".", "terms", ".", "items", "(", ")", "if", "not", "v", ".", "random", "}" ]
Creates filter object for given class name and DN values .
def create_dn_in_filter ( filter_class , filter_value , helper ) : in_filter = FilterFilter ( ) in_filter . AddChild ( create_dn_wcard_filter ( filter_class , filter_value ) ) return in_filter
3,467
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L24-L28
[ "def", "validate_pai_trial_conifg", "(", "experiment_config", ")", ":", "if", "experiment_config", ".", "get", "(", "'trainingServicePlatform'", ")", "==", "'pai'", ":", "if", "experiment_config", ".", "get", "(", "'trial'", ")", ".", "get", "(", "'shmMB'", ")", "and", "experiment_config", "[", "'trial'", "]", "[", "'shmMB'", "]", ">", "experiment_config", "[", "'trial'", "]", "[", "'memoryMB'", "]", ":", "print_error", "(", "'shmMB should be no more than memoryMB!'", ")", "exit", "(", "1", ")" ]
Get the specified MO from UCS Manager .
def get_managed_object ( handle , class_id , params , inMo = None , in_heir = False , dump = False ) : return handle . GetManagedObject ( inMo , class_id , params , inHierarchical = in_heir , dumpXml = dump )
3,468
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L61-L72
[ "def", "_generate_notebook_by_difficulty_body", "(", "notebook_object", ",", "dict_by_difficulty", ")", ":", "difficulty_keys", "=", "list", "(", "dict_by_difficulty", ".", "keys", "(", ")", ")", "difficulty_keys", ".", "sort", "(", ")", "for", "difficulty", "in", "difficulty_keys", ":", "markdown_cell", "=", "STAR_TABLE_HEADER", "markdown_cell", "=", "_set_star_value", "(", "markdown_cell", ",", "int", "(", "difficulty", ")", ")", "for", "notebook_file", "in", "dict_by_difficulty", "[", "str", "(", "difficulty", ")", "]", ":", "split_path", "=", "notebook_file", ".", "split", "(", "\"/\"", ")", "notebook_type", "=", "split_path", "[", "-", "2", "]", "notebook_name", "=", "split_path", "[", "-", "1", "]", ".", "split", "(", "\"&\"", ")", "[", "0", "]", "notebook_title", "=", "split_path", "[", "-", "1", "]", ".", "split", "(", "\"&\"", ")", "[", "1", "]", "markdown_cell", "+=", "\"\\n\\t<tr>\\n\\t\\t<td width='20%' class='header_image_color_\"", "+", "str", "(", "NOTEBOOK_KEYS", "[", "notebook_type", "]", ")", "+", "\"'><img \"", "\"src='../../images/icons/\"", "+", "notebook_type", ".", "title", "(", ")", "+", "\".png' width='15%'>\\n\\t\\t</td>\"", "markdown_cell", "+=", "\"\\n\\t\\t<td width='60%' class='center_cell open_cell_light'>\"", "+", "notebook_title", "+", "\"\\n\\t\\t</td>\"", "markdown_cell", "+=", "\"\\n\\t\\t<td width='20%' class='center_cell'>\\n\\t\\t\\t<a href='\"", "\"../\"", "+", "notebook_type", ".", "title", "(", ")", "+", "\"/\"", "+", "notebook_name", "+", "\"'><div class='file_icon'></div></a>\\n\\t\\t</td>\\n\\t</tr>\"", "markdown_cell", "+=", "\"</table>\"", "# ==================== Insertion of HTML table in a new Notebook cell ======================", "notebook_object", "[", "\"cells\"", "]", ".", "append", "(", "nb", ".", "v4", ".", "new_markdown_cell", "(", "markdown_cell", ")", ")" ]
Configure the specified MO in UCS Manager .
def config_managed_object ( p_dn , p_class_id , class_id , mo_config , mo_dn , handle = None , delete = True ) : if handle is None : handle = self . handle try : result = handle . AddManagedObject ( None , classId = class_id , params = mo_config , modifyPresent = True , dumpXml = YesOrNo . FALSE ) return result except UcsException as ex : print ( _ ( "Cisco client exception: %(msg)s" ) , { 'msg' : ex } ) raise exception . UcsOperationError ( 'config_managed_object' , error = ex )
3,469
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/helper.py#L83-L109
[ "def", "get", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "while", "self", ".", "_clients", "and", "self", ".", "_clients", "[", "0", "]", "[", "0", "]", "<", "now", ":", "_", ",", "(", "client", ",", "last_wait", ")", "=", "heapq", ".", "heappop", "(", "self", ".", "_clients", ")", "connect_start", "=", "time", ".", "time", "(", ")", "try", ":", "client", ".", "echo", "(", "\"test\"", ")", "# reconnected if this succeeds.", "self", ".", "_client_ids", ".", "remove", "(", "client", ".", "pool_id", ")", "yield", "client", "except", "(", "ConnectionError", ",", "TimeoutError", ")", ":", "timer", "=", "time", ".", "time", "(", ")", "-", "connect_start", "wait", "=", "min", "(", "int", "(", "last_wait", "*", "self", ".", "_multiplier", ")", ",", "self", ".", "_max_wait", ")", "heapq", ".", "heappush", "(", "self", ".", "_clients", ",", "(", "time", ".", "time", "(", ")", "+", "wait", ",", "(", "client", ",", "wait", ")", ")", ")", "log", ".", "info", "(", "\"%r is still down after a %s second attempt to connect. Retrying in %ss.\"", ",", "client", ",", "timer", ",", "wait", ",", ")" ]
Build a projection for MongoDB .
def project ( * args , * * kwargs ) : projection = dict ( * args , * * kwargs ) return { key : int ( value ) for key , value in six . iteritems ( projection ) }
3,470
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/query.py#L6-L20
[ "def", "_parse_node", "(", "self", ",", "node", ",", "state", ")", ":", "# Processing on entry of node", "state", "=", "self", ".", "_parse_section", "(", "node", ",", "state", ")", "state", "=", "self", ".", "_parse_figure", "(", "node", ",", "state", ")", "if", "self", ".", "tabular", ":", "state", "=", "self", ".", "_parse_table", "(", "node", ",", "state", ")", "state", "=", "self", ".", "_parse_caption", "(", "node", ",", "state", ")", "yield", "from", "self", ".", "_parse_paragraph", "(", "node", ",", "state", ")" ]
Fetch exactly one matching document or upsert the document if not found returning the matching or upserted document .
def upsert_and_fetch ( coll , doc , * * kwargs ) : return coll . find_one_and_update ( doc , { "$setOnInsert" : doc } , upsert = True , return_document = pymongo . ReturnDocument . AFTER , * * kwargs )
3,471
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/query.py#L39-L64
[ "def", "client_end_all", "(", ")", ":", "for", "request", ",", "socket", ",", "context", "in", "CLIENTS", ".", "values", "(", ")", "[", ":", "]", ":", "client_end", "(", "request", ",", "socket", ",", "context", ")" ]
Synchronize state with switch .
def update ( self , retry = 2 ) -> None : try : _LOGGER . debug ( "Updating device state." ) key = ON_KEY if not self . _flip_on_off else OFF_KEY self . state = self . _device . readCharacteristic ( HANDLE ) == key except ( bluepy . btle . BTLEException , AttributeError ) : if retry < 1 or not self . _connect ( ) : self . available = False _LOGGER . error ( "Failed to update device state." , exc_info = True ) return None return self . update ( retry - 1 ) self . available = True return None
3,472
https://github.com/Danielhiversen/pySwitchmate/blob/9563345d35d7dcc0b920b1939eea9a7897223221/switchmate/__init__.py#L56-L69
[ "def", "listrecords", "(", "*", "*", "kwargs", ")", ":", "record_dumper", "=", "serializer", "(", "kwargs", "[", "'metadataPrefix'", "]", ")", "e_tree", ",", "e_listrecords", "=", "verb", "(", "*", "*", "kwargs", ")", "result", "=", "get_records", "(", "*", "*", "kwargs", ")", "for", "record", "in", "result", ".", "items", ":", "pid", "=", "oaiid_fetcher", "(", "record", "[", "'id'", "]", ",", "record", "[", "'json'", "]", "[", "'_source'", "]", ")", "e_record", "=", "SubElement", "(", "e_listrecords", ",", "etree", ".", "QName", "(", "NS_OAIPMH", ",", "'record'", ")", ")", "header", "(", "e_record", ",", "identifier", "=", "pid", ".", "pid_value", ",", "datestamp", "=", "record", "[", "'updated'", "]", ",", "sets", "=", "record", "[", "'json'", "]", "[", "'_source'", "]", ".", "get", "(", "'_oai'", ",", "{", "}", ")", ".", "get", "(", "'sets'", ",", "[", "]", ")", ",", ")", "e_metadata", "=", "SubElement", "(", "e_record", ",", "etree", ".", "QName", "(", "NS_OAIPMH", ",", "'metadata'", ")", ")", "e_metadata", ".", "append", "(", "record_dumper", "(", "pid", ",", "record", "[", "'json'", "]", ")", ")", "resumption_token", "(", "e_listrecords", ",", "result", ",", "*", "*", "kwargs", ")", "return", "e_tree" ]
An instance of lunr . MatchData will be created for every term that matches a document .
def combine ( self , other ) : for term in other . metadata . keys ( ) : if term not in self . metadata : self . metadata [ term ] = { } fields = other . metadata [ term ] . keys ( ) for field in fields : if field not in self . metadata [ term ] : self . metadata [ term ] [ field ] = { } keys = other . metadata [ term ] [ field ] . keys ( ) for key in keys : if key not in self . metadata [ term ] [ field ] : self . metadata [ term ] [ field ] [ key ] = other . metadata [ term ] [ field ] [ key ] else : self . metadata [ term ] [ field ] [ key ] . extend ( other . metadata [ term ] [ field ] [ key ] )
3,473
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/match_data.py#L25-L51
[ "def", "delete_permission", "(", "self", ",", "username", ",", "virtual_host", ")", ":", "virtual_host", "=", "quote", "(", "virtual_host", ",", "''", ")", "return", "self", ".", "http_client", ".", "delete", "(", "API_USER_VIRTUAL_HOST_PERMISSIONS", "%", "(", "virtual_host", ",", "username", ")", ")" ]
Get current power state of this node
def get_power_state ( self ) : rn_array = [ self . helper . service_profile , ManagedObject ( NamingId . LS_POWER ) . MakeRn ( ) ] try : ls_power = ucs_helper . get_managed_object ( self . helper . handle , LsPower . ClassId ( ) , { LsPower . DN : UcsUtils . MakeDn ( rn_array ) } ) if not ls_power : raise exception . UcsOperationError ( "get_power_state" , "Failed to get LsPower MO, configure valid " "service-profile" ) return ls_power [ 0 ] . getattr ( LsPower . STATE ) except UcsException as ex : raise exception . UcsOperationError ( message = ex )
3,474
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L30-L52
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Set power state of this node
def set_power_state ( self , desired_state ) : rn_array = [ self . helper . service_profile , ManagedObject ( NamingId . LS_POWER ) . MakeRn ( ) ] try : ls_power = ucs_helper . get_managed_object ( self . helper . handle , LsPower . ClassId ( ) , { LsPower . DN : UcsUtils . MakeDn ( rn_array ) } ) if not ls_power : raise exception . UcsOperationError ( "set_power_state" , "Failed to get power MO," " configure valid service-profile." ) else : ls_power_set = self . helper . handle . SetManagedObject ( ls_power , LsPower . ClassId ( ) , { LsPower . STATE : desired_state } , dumpXml = YesOrNo . TRUE ) if ls_power_set : power = ls_power_set . pop ( ) return power . getattr ( LsPower . STATE ) else : return states . ERROR except Exception as ex : raise exception . UcsOperationError ( "set_power_state" , "Failed to get power MO," "configure valid servie-profile." )
3,475
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L54-L88
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]
Hard reset the power of this node .
def reboot ( self ) : if self . get_power_state ( ) == LsPower . CONST_STATE_DOWN : self . set_power_state ( LsPower . CONST_STATE_UP ) else : self . set_power_state ( LsPower . CONST_STATE_HARD_RESET_IMMEDIATE )
3,476
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/utils/power.py#L90-L96
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Use the factory to establish a connection to uri .
def connect ( uri , factory = pymongo . MongoClient ) : warnings . warn ( "do not use. Just call MongoClient directly." , DeprecationWarning ) return factory ( uri )
3,477
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/helper.py#L18-L24
[ "def", "iter_series", "(", "self", ",", "workbook", ",", "row", ",", "col", ")", ":", "for", "series", "in", "self", ".", "__series", ":", "series", "=", "dict", "(", "series", ")", "series", "[", "\"values\"", "]", "=", "series", "[", "\"values\"", "]", ".", "get_formula", "(", "workbook", ",", "row", ",", "col", ")", "if", "\"categories\"", "in", "series", ":", "series", "[", "\"categories\"", "]", "=", "series", "[", "\"categories\"", "]", ".", "get_formula", "(", "workbook", ",", "row", ",", "col", ")", "yield", "series" ]
Construct a GridFS instance for a MongoDB URI .
def connect_gridfs ( uri , db = None ) : return gridfs . GridFS ( db or connect_db ( uri ) , collection = get_collection ( uri ) or 'fs' , )
3,478
https://github.com/jaraco/jaraco.mongodb/blob/280f17894941f4babf2e97db033dbb1fd2b9f705/jaraco/mongodb/helper.py#L66-L73
[ "def", "get_events_in_both_arrays", "(", "events_one", ",", "events_two", ")", ":", "events_one", "=", "np", ".", "ascontiguousarray", "(", "events_one", ")", "# change memory alignement for c++ library", "events_two", "=", "np", ".", "ascontiguousarray", "(", "events_two", ")", "# change memory alignement for c++ library", "event_result", "=", "np", ".", "empty_like", "(", "events_one", ")", "count", "=", "analysis_functions", ".", "get_events_in_both_arrays", "(", "events_one", ",", "events_two", ",", "event_result", ")", "return", "event_result", "[", ":", "count", "]" ]
Internal method to support CompareManagedObject functionality .
def Compare ( fromMo , toMo , diff ) : from UcsBase import UcsUtils if ( fromMo . classId != toMo . classId ) : return CompareStatus . TypesDifferent for prop in UcsUtils . GetUcsPropertyMetaAttributeList ( str ( fromMo . classId ) ) : propMeta = UcsUtils . IsPropertyInMetaIgnoreCase ( fromMo . classId , prop ) if propMeta != None : if ( ( propMeta . access == UcsPropertyMeta . Internal ) or ( propMeta . access == UcsPropertyMeta . ReadOnly ) or ( prop in toMo . _excludePropList ) ) : continue if ( ( toMo . __dict__ . has_key ( prop ) ) and ( fromMo . getattr ( prop ) != toMo . getattr ( prop ) ) ) : diff . append ( prop ) if ( len ( diff ) > 0 ) : return CompareStatus . PropsDifferent return CompareStatus . Equal
3,479
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2742-L2761
[ "def", "translate_gitlab_exception", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "_wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "gitlab", ".", "GitlabError", "as", "e", ":", "status_to_exception", "=", "{", "401", ":", "UnauthorizedError", ",", "404", ":", "NotFoundError", ",", "}", "exc_class", "=", "status_to_exception", ".", "get", "(", "e", ".", "response_code", ",", "GitClientError", ")", "raise", "exc_class", "(", "str", "(", "e", ")", ",", "status_code", "=", "e", ".", "response_code", ")", "return", "_wrapper" ]
Method used to translate a managedobject . This method is used in CompareManagedObject .
def TranslateManagedObject ( mObj , xlateOrg , xlateMap ) : from UcsBase import UcsUtils , WriteUcsWarning from Mos import OrgOrg xMO = mObj . Clone ( ) xMO . SetHandle ( mObj . GetHandle ( ) ) if ( xlateOrg != None ) : matchObj = re . match ( r'^(org-[\-\.:_a-zA-Z0-9]{1,16}/)*org-[\-\.:_a-zA-Z0-9]{1,16}' , xMO . Dn ) if matchObj : if UcsUtils . WordL ( xMO . classId ) == OrgOrg . ClassId ( ) : orgMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( OrgOrg . ClassId ( ) ) , "Meta" ) if orgMoMeta == None : # TODO: Add Warning/Error messages in Logger. WriteUcsWarning ( '[Warning]: Could not translate [%s]' % ( xMO . Dn ) ) return xMO # Check for naming property matchObj1 = re . findall ( r'(\[[^\]]+\])' , orgMoMeta . rn ) if matchObj1 : UpdateMoDnAlongWithNamingProperties ( xMO , orgMoMeta , xlateOrg ) else : newDn = re . sub ( "%s" % ( matchObj . group ( 0 ) ) , "%s" % ( xlateOrg ) , xMO . Dn ) # print "Translating", xMO.Dn, " => ", newDn xMO . Dn = newDn else : newDn = re . sub ( "^%s/" % ( matchObj . group ( 0 ) ) , "%s/" % ( xlateOrg ) , xMO . Dn ) # print "Translating", xMO.Dn, " => ", newDn xMO . Dn = newDn if ( xlateMap != None ) : originalDn = xMO . Dn if originalDn in xlateMap : xMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( xMO . classId ) , "Meta" ) if xMoMeta == None : # TODO: Add Warning/Error messages in Logger. WriteUcsWarning ( '[Warning]: Could not translate [%s]' % ( originalDn ) ) return xMO # Check for naming property matchObj = re . findall ( r'(\[[^\]]+\])' , xMoMeta . rn ) if matchObj : UpdateMoDnAlongWithNamingProperties ( xMO , xMoMeta , xlateMap [ originalDn ] ) else : # print "Translating", xMO.Dn, " => ", xlateMap[originalDn] xMO . Dn = xlateMap [ originalDn ] else : originalDn = re . sub ( r'[/]*[^/]+$' , '' , originalDn ) while ( originalDn != None or originalDn == "" ) : if ( not ( originalDn in xlateMap ) ) : originalDn = re . sub ( r'[/]*[^/]+$' , '' , originalDn ) continue newDn = re . sub ( "^%s/" % ( originalDn ) , "%s/" % ( xlateMap [ originalDn ] ) , xMO . Dn ) # print "Translating", xMO.Dn, " => ", newDn xMO . Dn = newDn break return xMO
3,480
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2764-L2822
[ "def", "get_chunked_archive", "(", "self", ",", "*", "*", "kwargs", ")", ":", "chunk_size", "=", "kwargs", ".", "pop", "(", "'chunk_size'", ",", "8192", ")", "stream", "=", "kwargs", ".", "get", "(", "'stream'", ")", "self", ".", "fill_archive", "(", "*", "*", "kwargs", ")", "while", "True", ":", "data", "=", "stream", ".", "read", "(", "chunk_size", ")", "if", "not", "data", ":", "break", "yield", "data" ]
This operation will do a login to each UCS which is present in credential file .
def ImportUcsSession ( filePath , key ) : from UcsBase import UcsUtils , WriteUcsWarning , UcsValidationException # from p3 import p3_encrypt, p3_decrypt if filePath is None : raise UcsValidationException ( "filePath parameter is not provided." ) # raise Exception('[Error]: Please provide filePath') if key is None : raise UcsValidationException ( "key parameter is not provided." ) # raise Exception('[Error]: Please provide key') if not os . path . isfile ( filePath ) or not os . path . exists ( filePath ) : raise UcsValidationException ( '[Error]: File <%s> does not exist ' % ( filePath ) ) # raise Exception('[Error]: File <%s> does not exist ' %(filePath)) doc = xml . dom . minidom . parse ( filePath ) topNode = doc . documentElement # print topNode.localName if topNode is None or topNode . localName != UcsLoginXml . UCS_HANDLES : return None if ( topNode . hasChildNodes ( ) ) : # childList = topNode._get_childNodes() # childCount = childList._get_length() childList = topNode . childNodes childCount = len ( childList ) for i in range ( childCount ) : childNode = childList . item ( i ) if ( childNode . nodeType != Node . ELEMENT_NODE ) : continue if childNode . localName != UcsLoginXml . UCS : continue lName = None lUsername = None lPassword = None lNoSsl = False lPort = None if childNode . hasAttribute ( UcsLoginXml . NAME ) : lName = childNode . getAttribute ( UcsLoginXml . NAME ) if childNode . hasAttribute ( UcsLoginXml . USER_NAME ) : lUsername = childNode . getAttribute ( UcsLoginXml . USER_NAME ) if childNode . hasAttribute ( UcsLoginXml . PASSWORD ) : # lPassword = p3_decrypt(childNode.getAttribute(UcsLoginXml.PASSWORD), key) lPassword = UcsUtils . DecryptPassword ( childNode . getAttribute ( UcsLoginXml . PASSWORD ) , key ) if childNode . hasAttribute ( UcsLoginXml . NO_SSL ) : lNoSsl = childNode . getAttribute ( UcsLoginXml . NO_SSL ) if childNode . hasAttribute ( UcsLoginXml . PORT ) : lPort = childNode . getAttribute ( UcsLoginXml . PORT ) # Process Login if ( ( lName is None ) or ( lUsername == None ) or ( lPassword == None ) ) : # WriteUcsWarning("[Warning] Insufficient information for login ...") continue try : handle = UcsHandle ( ) handle . Login ( name = lName , username = lUsername , password = lPassword , noSsl = lNoSsl , port = lPort ) except Exception , err : # TODO: Add Warning/Error messages in Logger. WriteUcsWarning ( "[Connection Error<%s>] %s" % ( lName , str ( err ) ) )
3,481
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L2950-L3026
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Constructs the connection URI from name noSsl and port instance variables .
def Uri ( self ) : return ( "%s://%s%s" % ( ( "https" , "http" ) [ self . _noSsl == True ] , self . _name , ( ":" + str ( self . _port ) , "" ) [ ( ( ( self . _noSsl == False ) and ( self . _port == 80 ) ) or ( ( self . _noSsl == True ) and ( self . _port == 443 ) ) ) ] ) )
3,482
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L80-L83
[ "def", "_GetResourceTimestamps", "(", "self", ",", "pefile_object", ")", ":", "timestamps", "=", "[", "]", "if", "not", "hasattr", "(", "pefile_object", ",", "'DIRECTORY_ENTRY_RESOURCE'", ")", ":", "return", "timestamps", "for", "entrydata", "in", "pefile_object", ".", "DIRECTORY_ENTRY_RESOURCE", ".", "entries", ":", "directory", "=", "entrydata", ".", "directory", "timestamp", "=", "getattr", "(", "directory", ",", "'TimeDateStamp'", ",", "0", ")", "if", "timestamp", ":", "timestamps", ".", "append", "(", "timestamp", ")", "return", "timestamps" ]
Cancels any running transaction .
def UndoTransaction ( self ) : from Ucs import ConfigMap self . _transactionInProgress = False self . _configMap = ConfigMap ( )
3,483
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L95-L100
[ "def", "handlePortfolio", "(", "self", ",", "msg", ")", ":", "# log handler msg", "self", ".", "log_msg", "(", "\"portfolio\"", ",", "msg", ")", "# contract identifier", "contract_tuple", "=", "self", ".", "contract_to_tuple", "(", "msg", ".", "contract", ")", "contractString", "=", "self", ".", "contractString", "(", "contract_tuple", ")", "# try creating the contract", "self", ".", "registerContract", "(", "msg", ".", "contract", ")", "# new account?", "if", "msg", ".", "accountName", "not", "in", "self", ".", "_portfolios", ".", "keys", "(", ")", ":", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "=", "{", "}", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "[", "contractString", "]", "=", "{", "\"symbol\"", ":", "contractString", ",", "\"position\"", ":", "int", "(", "msg", ".", "position", ")", ",", "\"marketPrice\"", ":", "float", "(", "msg", ".", "marketPrice", ")", ",", "\"marketValue\"", ":", "float", "(", "msg", ".", "marketValue", ")", ",", "\"averageCost\"", ":", "float", "(", "msg", ".", "averageCost", ")", ",", "\"unrealizedPNL\"", ":", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"realizedPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", ",", "\"totalPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", "+", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"account\"", ":", "msg", ".", "accountName", "}", "# fire callback", "self", ".", "ibCallback", "(", "caller", "=", "\"handlePortfolio\"", ",", "msg", "=", "msg", ")" ]
Completes a transaction .
def CompleteTransaction ( self , dumpXml = None ) : from Ucs import ConfigMap , Pair from UcsBase import ManagedObject , WriteUcsWarning , WriteObject , UcsException self . _transactionInProgress = False ccm = self . ConfigConfMos ( self . _configMap , YesOrNo . FALSE , dumpXml ) self . _configMap = ConfigMap ( ) if ccm . errorCode == 0 : moList = [ ] for child in ccm . OutConfigs . GetChild ( ) : if ( isinstance ( child , Pair ) == True ) : for mo in child . GetChild ( ) : moList . append ( mo ) elif ( isinstance ( child , ManagedObject ) == True ) : moList . append ( child ) # WriteObject(moList) return moList else : # raise Exception('[Error]: CompleteTransaction [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) raise UcsException ( ccm . errorCode , ccm . errorDescr )
3,484
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L102-L127
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Accepts xmlQuery String and returns xml response String . No object manipulation is done in this method .
def XmlRawQuery ( self , xml , dumpXml = None ) : if ( dumpXml == None ) : dumpXml = self . _dumpXml uri = self . Uri ( ) + '/nuova' if ( dumpXml in _AffirmativeList ) : print '%s ====> %s' % ( self . _ucs , xml ) # req = urllib2.Request(url=uri,data=xml) # f = urllib2.urlopen(req) w = xml . dom . minidom . Document ( ) if ( self . _noSsl ) : req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) opener = urllib2 . build_opener ( SmartRedirectHandler ( ) ) f = opener . open ( req ) # print "##", f , "##" if type ( f ) is list : if ( len ( f ) == 2 and f [ 0 ] == 302 ) : # self._noSsl = False # uri = self.Uri() + '/nuova' uri = f [ 1 ] req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) f = urllib2 . urlopen ( req ) # print "status code is:",f[0] # print "location is:", f[1] else : req = urllib2 . Request ( url = uri , data = w . toxml ( ) ) f = urllib2 . urlopen ( req ) rsp = f . read ( ) if ( dumpXml in _AffirmativeList ) : print '%s <==== %s' % ( self . _ucs , rsp ) return rsp
3,485
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L268-L303
[ "def", "create_constraints", "(", "self", ",", "courses", ")", ":", "for", "i", ",", "course1", "in", "enumerate", "(", "courses", ")", ":", "for", "j", ",", "course2", "in", "enumerate", "(", "courses", ")", ":", "if", "i", "<=", "j", ":", "continue", "self", ".", "p", ".", "add_constraint", "(", "self", ".", "section_constraint", ",", "[", "course1", ",", "course2", "]", ")", "self", ".", "p", ".", "add_constraint", "(", "self", ".", "time_conflict", ",", "[", "course1", "]", ")" ]
Logout method disconnects from UCS .
def Logout ( self , dumpXml = None ) : from UcsBase import UcsException if ( self . _cookie == None ) : return True if self . _refreshTimer : self . _refreshTimer . cancel ( ) response = self . AaaLogout ( dumpXml ) self . _cookie = None self . _lastUpdateTime = str ( time . asctime ( ) ) self . _domains = None self . _priv = None self . _sessionId = None self . _version = None if self . _ucs in defaultUcs : del defaultUcs [ self . _ucs ] if ( response . errorCode != 0 ) : raise UcsException ( response . errorCode , response . errorDescr ) # raise Exception('[Error]: Logout [Code]:' + response.errorCode + '[Description]:' + response.errorDescr) return True
3,486
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L397-L422
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]
Internal method to support auto - refresh functionality .
def _Start_refresh_timer ( self ) : if self . _refreshPeriod > 60 : interval = self . _refreshPeriod - 60 else : interval = 60 self . _refreshTimer = Timer ( self . _refreshPeriod , self . Refresh ) # TODO:handle exit and logout active connections. revert from daemon then self . _refreshTimer . setDaemon ( True ) self . _refreshTimer . start ( )
3,487
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L424-L433
[ "def", "bucket", "(", "self", ",", "experiment", ",", "user_id", ",", "bucketing_id", ")", ":", "if", "not", "experiment", ":", "return", "None", "# Determine if experiment is in a mutually exclusive group", "if", "experiment", ".", "groupPolicy", "in", "GROUP_POLICIES", ":", "group", "=", "self", ".", "config", ".", "get_group", "(", "experiment", ".", "groupId", ")", "if", "not", "group", ":", "return", "None", "user_experiment_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "groupId", ",", "group", ".", "trafficAllocation", ")", "if", "not", "user_experiment_id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no experiment.'", "%", "user_id", ")", "return", "None", "if", "user_experiment_id", "!=", "experiment", ".", "id", ":", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is not in experiment \"%s\" of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "return", "None", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in experiment %s of group %s.'", "%", "(", "user_id", ",", "experiment", ".", "key", ",", "experiment", ".", "groupId", ")", ")", "# Bucket user if not in white-list and in group (if any)", "variation_id", "=", "self", ".", "find_bucket", "(", "bucketing_id", ",", "experiment", ".", "id", ",", "experiment", ".", "trafficAllocation", ")", "if", "variation_id", ":", "variation", "=", "self", ".", "config", ".", "get_variation_from_id", "(", "experiment", ".", "key", ",", "variation_id", ")", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in variation \"%s\" of experiment %s.'", "%", "(", "user_id", ",", "variation", ".", "key", ",", "experiment", ".", "key", ")", ")", "return", "variation", "self", ".", "config", ".", "logger", ".", "info", "(", "'User \"%s\" is in no variation.'", "%", "user_id", ")", "return", "None" ]
Internal method to start the enqueue thread which adds the events in an internal queue .
def _start_enqueue_thread ( self ) : self . _enqueueThreadSignal . acquire ( ) self . _enqueueThread = Thread ( target = self . _enqueue_function ) self . _enqueueThread . daemon = True self . _enqueueThread . start ( ) self . _enqueueThreadSignal . wait ( ) self . _enqueueThreadSignal . release ( )
3,488
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L562-L569
[ "def", "_get_divisions", "(", "taxdump_file", ")", ":", "with", "tarfile", ".", "open", "(", "taxdump_file", ")", "as", "tf", ":", "with", "tf", ".", "extractfile", "(", "'division.dmp'", ")", "as", "fh", ":", "df", "=", "pd", ".", "read_csv", "(", "fh", ",", "header", "=", "None", ",", "sep", "=", "'|'", ",", "encoding", "=", "'ascii'", ")", "# only keep division ids and names", "df", "=", "df", ".", "iloc", "[", ":", ",", "[", "0", ",", "2", "]", "]", "# remove tab characters flanking each division name", "df", ".", "iloc", "[", ":", ",", "1", "]", "=", "df", ".", "iloc", "[", ":", ",", "1", "]", ".", "str", ".", "strip", "(", "'\\t'", ")", "# generate dictionary", "divisions", "=", "{", "}", "for", "_", ",", "row", "in", "df", ".", "iterrows", "(", ")", ":", "divisions", "[", "row", ".", "iloc", "[", "1", "]", "]", "=", "row", ".", "iloc", "[", "0", "]", "return", "divisions" ]
Internal method to add a watch block for starting event monitoring .
def _add_watch_block ( self , params , filterCb , capacity = 500 , cb = None ) : if ( self . _wbslock == None ) : self . _wbslock = Lock ( ) self . _wbslock . acquire ( ) wb = WatchBlock ( params , filterCb , capacity , cb ) # Add a List of Watchers if ( ( wb != None ) and ( wb . cb == None ) ) : wb . cb = wb . _dequeue_default_cb self . _wbs . append ( wb ) self . _wbslock . release ( ) if self . _cookie == None : return None if wb != None and len ( self . _wbs ) == 1 and wb . params [ "pollSec" ] == None : self . _start_enqueue_thread ( ) if self . _enqueueThread == None : return wb self . _enqueueThreadSignal . acquire ( ) self . _enqueueThreadSignal . notify ( ) # Notify self . _enqueueThreadSignal . release ( ) # Release the Lock return wb
3,489
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L571-L595
[ "def", "columnSchema", "(", "self", ")", ":", "if", "self", ".", "_columnSchema", "is", "None", ":", "ctx", "=", "SparkContext", ".", "_active_spark_context", "jschema", "=", "ctx", ".", "_jvm", ".", "org", ".", "apache", ".", "spark", ".", "ml", ".", "image", ".", "ImageSchema", ".", "columnSchema", "(", ")", "self", ".", "_columnSchema", "=", "_parse_datatype_json_string", "(", "jschema", ".", "json", "(", ")", ")", "return", "self", ".", "_columnSchema" ]
Internal method to remove a watch block for stopping event monitoring .
def _remove_watch_block ( self , wb ) : if ( self . _wbslock == None ) : self . _wbslock = Lock ( ) self . _wbslock . acquire ( ) self . _wbs . remove ( wb ) if len ( self . _wbs ) == 0 : self . _stop_enqueue_thread ( ) self . _stop_dequeue_thread ( ) self . _wbslock . release ( )
3,490
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L597-L608
[ "def", "columnSchema", "(", "self", ")", ":", "if", "self", ".", "_columnSchema", "is", "None", ":", "ctx", "=", "SparkContext", ".", "_active_spark_context", "jschema", "=", "ctx", ".", "_jvm", ".", "org", ".", "apache", ".", "spark", ".", "ml", ".", "image", ".", "ImageSchema", ".", "columnSchema", "(", ")", "self", ".", "_columnSchema", "=", "_parse_datatype_json_string", "(", "jschema", ".", "json", "(", ")", ")", "return", "self", ".", "_columnSchema" ]
Removes an event handler .
def RemoveEventHandler ( self , wb ) : from UcsBase import WriteUcsWarning if wb in self . _wbs : self . _remove_watch_block ( wb ) else : WriteUcsWarning ( "Event handler not found" )
3,491
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L718-L725
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
Internal method to start dequeue thread .
def _start_dequeue_thread ( self ) : self . _dequeueThread = Thread ( target = self . _dequeue_function ) self . _dequeueThread . daemon = True self . _dequeueThread . start ( )
3,492
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L731-L735
[ "def", "_options_dir", "(", "name", ")", ":", "_check_portname", "(", "name", ")", "_root", "=", "'/var/db/ports'", "# New path: /var/db/ports/category_portname", "new_dir", "=", "os", ".", "path", ".", "join", "(", "_root", ",", "name", ".", "replace", "(", "'/'", ",", "'_'", ")", ")", "# Old path: /var/db/ports/portname", "old_dir", "=", "os", ".", "path", ".", "join", "(", "_root", ",", "name", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ")", "if", "os", ".", "path", ".", "isdir", "(", "old_dir", ")", ":", "return", "old_dir", "return", "new_dir" ]
Launches the UCSM GUI via specific UCS handle .
def StartGuiSession ( self ) : from UcsBase import WriteUcsWarning , UcsUtils , UcsValidationException import urllib , tempfile , fileinput , os , subprocess , platform osSupport = [ "Windows" , "Linux" , "Microsoft" ] if platform . system ( ) not in osSupport : raise UcsValidationException ( "Currently works with Windows OS and Ubuntu" ) # raise Exception("Currently works with Windows OS and Ubuntu") try : javawsPath = UcsUtils . GetJavaInstallationPath ( ) # print r"%s" %(javawsPath) if javawsPath != None : url = "%s/ucsm/ucsm.jnlp" % ( self . Uri ( ) ) source = urllib . urlopen ( url ) . read ( ) jnlpdir = tempfile . gettempdir ( ) jnlpfile = os . path . join ( jnlpdir , "temp.jnlp" ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) jnlpFH = open ( jnlpfile , "w+" ) jnlpFH . write ( source ) jnlpFH . close ( ) for line in fileinput . input ( jnlpfile , inplace = 1 ) : if re . search ( r'^\s*</resources>\s*$' , line ) : print '\t<property name="log.show.encrypted" value="true"/>' print line , subprocess . call ( [ javawsPath , jnlpfile ] ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) else : return None except Exception , err : fileinput . close ( ) if os . path . exists ( jnlpfile ) : os . remove ( jnlpfile ) raise
3,493
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1064-L1109
[ "def", "make_headers", "(", "headers", ")", ":", "out", "=", "{", "}", "if", "'etag'", "in", "headers", ":", "out", "[", "'if-none-match'", "]", "=", "headers", "[", "'etag'", "]", "if", "'last-modified'", "in", "headers", ":", "out", "[", "'if-modified-since'", "]", "=", "headers", "[", "'last-modified'", "]", "return", "out" ]
Imports backUp . This operation will upload the UCSM backup taken earlier via GUI or BackupUcs operation for all configuration system configuration and logical configuration files . User can perform an import while the system is up and running . - path specifies path of the backup file . - merge specifies whether to merge the backup configuration with the existing UCSM configuration .
def ImportUcsBackup ( self , path = None , merge = False , dumpXml = False ) : from UcsBase import WriteUcsWarning , UcsUtils , ManagedObject , WriteObject , UcsUtils , UcsException , UcsValidationException from Ucs import ConfigConfig from Mos import MgmtImporter from datetime import datetime if ( self . _transactionInProgress ) : raise UcsValidationException ( "UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction." ) # raise Exception("UCS transaction in progress. Cannot execute ImportUcsBackup. Complete or Undo UCS transaction.") if not path : raise UcsValidationException ( "path parameter is not provided." ) # raise Exception("Please provide path") if not os . path . exists ( path ) : raise UcsValidationException ( "Backup File not found <%s>" % ( path ) ) # raise Exception("Backup File not found <%s>" %(path)) dn = None filePath = path localFile = os . path . basename ( filePath ) topSystem = ManagedObject ( NamingId . TOP_SYSTEM ) mgmtImporter = ManagedObject ( NamingId . MGMT_IMPORTER ) mgmtImporter . Hostname = os . environ [ 'COMPUTERNAME' ] . lower ( ) + datetime . now ( ) . strftime ( '%Y%m%d%H%M' ) dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , mgmtImporter . MakeRn ( ) ] ) mgmtImporter . Dn = dn mgmtImporter . Status = Status . CREATED mgmtImporter . RemoteFile = filePath mgmtImporter . Proto = MgmtImporter . CONST_PROTO_HTTP mgmtImporter . AdminState = MgmtImporter . CONST_ADMIN_STATE_ENABLED if merge : mgmtImporter . Action = MgmtImporter . CONST_ACTION_MERGE else : mgmtImporter . Action = MgmtImporter . CONST_ACTION_REPLACE inConfig = ConfigConfig ( ) inConfig . AddChild ( mgmtImporter ) uri = "%s/operations/file-%s/importconfig.txt" % ( self . Uri ( ) , localFile ) if sys . version_info < ( 2 , 6 ) : uploadFileHandle = open ( filePath , 'rb' ) stream = uploadFileHandle . read ( ) else : progress = Progress ( ) stream = file_with_callback ( filePath , 'rb' , progress . update , filePath ) request = urllib2 . Request ( uri ) request . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( self . _cookie ) ) request . add_data ( stream ) response = urllib2 . urlopen ( request ) . read ( ) if not response : raise UcsValidationException ( "Unable to upload properly." ) # WriteUcsWarning("Unable to upload properly.") ccm = self . ConfigConfMo ( dn = dn , inConfig = inConfig , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( ccm . errorCode != 0 ) : raise UcsException ( ccm . errorCode , ccm . errorDescr ) # raise Exception('[Error]: BackupUcs [Code]:' + ccm.errorCode + ' [Description]:' + ccm.errorDescr) return ccm . OutConfig . GetChild ( )
3,494
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1224-L1301
[ "def", "wait_until", "(", "predicate", ",", "timeout", "=", "30", ")", ":", "not_expired", "=", "Timeout", "(", "timeout", ")", "while", "not_expired", "(", ")", ":", "r", "=", "predicate", "(", ")", "if", "r", ":", "break" ]
Uploads a specific CCO Image on UCS . - path specifies the path of the image to be uploaded .
def SendUcsFirmware ( self , path = None , dumpXml = False ) : from UcsBase import WriteUcsWarning , UcsUtils , ManagedObject , WriteObject , UcsUtils , UcsValidationException , UcsException from Ucs import ConfigConfig from Mos import FirmwareDownloader if ( self . _transactionInProgress ) : raise UcsValidationException ( "UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction." ) # raise Exception("UCS transaction in progress. Cannot execute SendUcsFirmware. Complete or Undo UCS transaction.") if not path : raise UcsValidationException ( "path parameter is not provided." ) # raise Exception("Please provide path") if not os . path . exists ( path ) : raise UcsValidationException ( "Image not found <%s>" % ( path ) ) # raise Exception("Image not found <%s>" %(path)) dn = None filePath = path localFile = os . path . basename ( filePath ) # Exit if image already exist on UCSM topSystem = ManagedObject ( NamingId . TOP_SYSTEM ) firmwareCatalogue = ManagedObject ( NamingId . FIRMWARE_CATALOGUE ) firmwareDistributable = ManagedObject ( NamingId . FIRMWARE_DISTRIBUTABLE ) firmwareDistributable . Name = localFile dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , firmwareCatalogue . MakeRn ( ) , firmwareDistributable . MakeRn ( ) ] ) crDn = self . ConfigResolveDn ( dn , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( crDn . OutConfig . GetChildCount ( ) > 0 ) : raise UcsValidationException ( "Image file <%s> already exist on FI." % ( filePath ) ) # raise Exception("Image file <%s> already exist on FI." %(filePath)) # Create object of type <firmwareDownloader> firmwareDownloader = ManagedObject ( NamingId . FIRMWARE_DOWNLOADER ) firmwareDownloader . FileName = localFile dn = UcsUtils . MakeDn ( [ topSystem . MakeRn ( ) , firmwareCatalogue . MakeRn ( ) , firmwareDownloader . MakeRn ( ) ] ) firmwareDownloader . Dn = dn firmwareDownloader . Status = Status . CREATED firmwareDownloader . FileName = localFile firmwareDownloader . Server = FirmwareDownloader . CONST_PROTOCOL_LOCAL firmwareDownloader . Protocol = FirmwareDownloader . CONST_PROTOCOL_LOCAL inConfig = ConfigConfig ( ) inConfig . AddChild ( firmwareDownloader ) uri = "%s/operations/file-%s/image.txt" % ( self . Uri ( ) , localFile ) progress = Progress ( ) stream = file_with_callback ( filePath , 'rb' , progress . update , filePath ) request = urllib2 . Request ( uri ) request . add_header ( 'Cookie' , 'ucsm-cookie=%s' % ( self . _cookie ) ) request . add_data ( stream ) response = urllib2 . urlopen ( request ) . read ( ) if not response : raise UcsValidationException ( "Unable to upload properly." ) # WriteUcsWarning("Unable to upload properly.") ccm = self . ConfigConfMo ( dn = dn , inConfig = inConfig , inHierarchical = YesOrNo . FALSE , dumpXml = dumpXml ) if ( ccm . errorCode != 0 ) : raise UcsException ( ccm . errorCode , ccm . errorDescr ) return ccm . OutConfig . GetChild ( )
3,495
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1303-L1375
[ "def", "generate_secret_key", "(", ")", ":", "import", "string", "import", "random", "rng", "=", "random", ".", "SystemRandom", "(", ")", "return", "''", ".", "join", "(", "rng", ".", "choice", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "for", "dummy", "in", "range", "(", "0", ",", "256", ")", ")" ]
Gets Child Managed Object from UCS .
def GetUcsChild ( self , inMo = None , inDn = None , classId = None , inHierarchical = False , dumpXml = None ) : from UcsBase import UcsValidationException , UcsException , UcsUtils if not inDn and not inMo : raise UcsValidationException ( '[Error]: get_ucs_child: Provide in_mo or in_dn.' ) if inMo : parentDn = inMo . getattr ( "Dn" ) elif inDn : parentDn = inDn crc = self . ConfigResolveChildren ( classId , parentDn , None , inHierarchical , dumpXml ) if crc . errorCode == 0 : moList = UcsUtils . extractMolistFromMethodResponse ( crc , inHierarchical ) return moList else : raise UcsException ( crc . errorCode , crc . error_descr )
3,496
https://github.com/CiscoUcs/UcsPythonSDK/blob/bf6b07d6abeacb922c92b198352eda4eb9e4629b/src/UcsSdk/UcsHandle_Edit.py#L1810-L1835
[ "def", "retract", "(", "args", ")", ":", "if", "not", "args", ".", "msg", ":", "return", "\"Syntax: !vote retract <pollnum>\"", "if", "not", "args", ".", "msg", ".", "isdigit", "(", ")", ":", "return", "\"Not A Valid Positive Integer.\"", "response", "=", "get_response", "(", "args", ".", "session", ",", "args", ".", "msg", ",", "args", ".", "nick", ")", "if", "response", "is", "None", ":", "return", "\"You haven't voted on that poll yet!\"", "args", ".", "session", ".", "delete", "(", "response", ")", "return", "\"Vote retracted\"" ]
Transform a sentence using the code spelling alphabet multiple international code alphabets are supported .
def code ( sentence , pad = ' ' , format = 'army' ) : try : return ALPHABET [ 'code' ] [ format ] ( sentence , pad or CODE_PADDING [ format ] ) except KeyError : raise TypeError ( 'Unsupported code alphabet "%s"' % ( format , ) )
3,497
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/text.py#L73-L121
[ "async", "def", "_window_open", "(", "self", ",", "stream_id", ":", "int", ")", ":", "stream", "=", "self", ".", "_get_stream", "(", "stream_id", ")", "return", "await", "stream", ".", "window_open", ".", "wait", "(", ")" ]
Transform a sentence using the NATO spelling alphabet .
def nato ( sentence , pad = ' ' , format = 'telephony' ) : try : return '' + ALPHABET [ 'nato' ] [ format ] ( sentence , pad ) except KeyError : raise TypeError ( 'Unsupported NATO alphabet "%s"' % ( format , ) )
3,498
https://github.com/tehmaze/natural/blob/d7a1fc9de712f9bcf68884a80826a7977df356fb/natural/text.py#L135-L153
[ "def", "remove_server", "(", "self", ",", "server_id", ")", ":", "# Validate server_id", "server", "=", "self", ".", "_get_server", "(", "server_id", ")", "# Delete any instances we recorded to be cleaned up", "if", "server_id", "in", "self", ".", "_owned_subscriptions", ":", "inst_list", "=", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_filters", ":", "inst_list", "=", "self", ".", "_owned_filters", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_filters", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_destinations", ":", "inst_list", "=", "self", ".", "_owned_destinations", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_destinations", "[", "server_id", "]", "# Remove server from this listener", "del", "self", ".", "_servers", "[", "server_id", "]" ]
Adds a lunr . Clause to this query .
def clause ( self , * args , * * kwargs ) : if args and isinstance ( args [ 0 ] , Clause ) : clause = args [ 0 ] else : clause = Clause ( * args , * * kwargs ) if not clause . fields : clause . fields = self . all_fields if ( clause . wildcard & Query . WILDCARD_LEADING ) and ( clause . term [ 0 ] != Query . WILDCARD ) : clause . term = Query . WILDCARD + clause . term if ( clause . wildcard & Query . WILDCARD_TRAILING ) and ( clause . term [ - 1 ] != Query . WILDCARD ) : clause . term = clause . term + Query . WILDCARD self . clauses . append ( clause ) return self
3,499
https://github.com/yeraydiazdiaz/lunr.py/blob/28ec3f6d4888295eed730211ee9617aa488d6ba3/lunr/query.py#L43-L74
[ "def", "validate", "(", "self", ",", "api_key", "=", "None", ")", ":", "local_data", "=", "self", ".", "json_body", "if", "\"id\"", "not", "in", "local_data", "or", "\"livemode\"", "not", "in", "local_data", ":", "return", "False", "if", "self", ".", "is_test_event", ":", "logger", ".", "info", "(", "\"Test webhook received: {}\"", ".", "format", "(", "local_data", ")", ")", "return", "False", "if", "djstripe_settings", ".", "WEBHOOK_VALIDATION", "is", "None", ":", "# validation disabled", "return", "True", "elif", "(", "djstripe_settings", ".", "WEBHOOK_VALIDATION", "==", "\"verify_signature\"", "and", "djstripe_settings", ".", "WEBHOOK_SECRET", ")", ":", "try", ":", "stripe", ".", "WebhookSignature", ".", "verify_header", "(", "self", ".", "body", ",", "self", ".", "headers", ".", "get", "(", "\"stripe-signature\"", ")", ",", "djstripe_settings", ".", "WEBHOOK_SECRET", ",", "djstripe_settings", ".", "WEBHOOK_TOLERANCE", ",", ")", "except", "stripe", ".", "error", ".", "SignatureVerificationError", ":", "return", "False", "else", ":", "return", "True", "livemode", "=", "local_data", "[", "\"livemode\"", "]", "api_key", "=", "api_key", "or", "djstripe_settings", ".", "get_default_api_key", "(", "livemode", ")", "# Retrieve the event using the api_version specified in itself", "with", "stripe_temporary_api_version", "(", "local_data", "[", "\"api_version\"", "]", ",", "validate", "=", "False", ")", ":", "remote_data", "=", "Event", ".", "stripe_class", ".", "retrieve", "(", "id", "=", "local_data", "[", "\"id\"", "]", ",", "api_key", "=", "api_key", ")", "return", "local_data", "[", "\"data\"", "]", "==", "remote_data", "[", "\"data\"", "]" ]