query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Handles presence stanzas
def on_presence ( self , session , presence ) : from_jid = presence . getFrom ( ) is_member = self . is_member ( from_jid . getStripped ( ) ) if is_member : member = self . get_member ( from_jid . getStripped ( ) ) else : member = None logger . info ( 'presence: from=%s is_member=%s type=%s' % ( from_jid , is_member , presence . getType ( ) ) ) if presence . getType ( ) == 'subscribed' : if is_member : logger . info ( '[%s] accepted their invitation' % ( from_jid , ) ) member [ 'STATUS' ] = 'ACTIVE' else : #TODO: user accepted, but is no longer be on the roster, unsubscribe? pass elif presence . getType ( ) == 'subscribe' : if is_member : logger . info ( 'Acknowledging subscription request from [%s]' % ( from_jid , ) ) self . client . sendPresence ( jid = from_jid , typ = 'subscribed' ) member [ 'STATUS' ] = 'ACTIVE' self . broadcast ( '%s has accepted their invitation!' % ( from_jid , ) ) else : #TODO: show that a user has requested membership? pass elif presence . getType ( ) == None : if is_member : member [ 'ONLINE' ] += 1 elif presence . getType ( ) == 'unavailable' : if is_member : member [ 'ONLINE' ] -= 1 else : logger . info ( 'Unhandled presence stanza of type [%s] from [%s]' % ( presence . getType ( ) , from_jid ) )
10,700
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L179-L213
[ "def", "load_projects", "(", "self", ")", ":", "server_config", "=", "Config", ".", "instance", "(", ")", ".", "get_section_config", "(", "\"Server\"", ")", "projects_path", "=", "os", ".", "path", ".", "expanduser", "(", "server_config", ".", "get", "(", "\"projects_path\"", ",", "\"~/GNS3/projects\"", ")", ")", "os", ".", "makedirs", "(", "projects_path", ",", "exist_ok", "=", "True", ")", "try", ":", "for", "project_path", "in", "os", ".", "listdir", "(", "projects_path", ")", ":", "project_dir", "=", "os", ".", "path", ".", "join", "(", "projects_path", ",", "project_path", ")", "if", "os", ".", "path", ".", "isdir", "(", "project_dir", ")", ":", "for", "file", "in", "os", ".", "listdir", "(", "project_dir", ")", ":", "if", "file", ".", "endswith", "(", "\".gns3\"", ")", ":", "try", ":", "yield", "from", "self", ".", "load_project", "(", "os", ".", "path", ".", "join", "(", "project_dir", ",", "file", ")", ",", "load", "=", "False", ")", "except", "(", "aiohttp", ".", "web_exceptions", ".", "HTTPConflict", ",", "NotImplementedError", ")", ":", "pass", "# Skip not compatible projects", "except", "OSError", "as", "e", ":", "log", ".", "error", "(", "str", "(", "e", ")", ")" ]
Handles messge stanzas
def on_message ( self , con , event ) : msg_type = event . getType ( ) nick = event . getFrom ( ) . getResource ( ) from_jid = event . getFrom ( ) . getStripped ( ) body = event . getBody ( ) if msg_type == 'chat' and body is None : return logger . debug ( 'msg_type[%s] from[%s] nick[%s] body[%s]' % ( msg_type , from_jid , nick , body , ) ) sender = filter ( lambda m : m [ 'JID' ] == from_jid , self . params [ 'MEMBERS' ] ) should_process = msg_type in [ 'message' , 'chat' , None ] and body is not None and len ( sender ) == 1 if not should_process : return sender = sender [ 0 ] try : for p in self . command_patterns : reg , cmd = p m = reg . match ( body ) if m : logger . info ( 'pattern matched for bot command \'%s\'' % ( cmd , ) ) function = getattr ( self , str ( cmd ) , None ) if function : return function ( sender , body , m ) words = body . split ( ' ' ) cmd , args = words [ 0 ] , words [ 1 : ] if cmd and cmd [ 0 ] == '/' : cmd = cmd [ 1 : ] command_handler = getattr ( self , 'do_' + cmd , None ) if command_handler : return command_handler ( sender , body , args ) broadcast_body = '[%s] %s' % ( sender [ 'NICK' ] , body , ) return self . broadcast ( broadcast_body , exclude = ( sender , ) ) except : logger . exception ( 'Error handling message [%s] from [%s]' % ( body , sender [ 'JID' ] ) )
10,701
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L215-L254
[ "def", "debug_video_writer_factory", "(", "output_dir", ")", ":", "if", "FLAGS", ".", "disable_ffmpeg", ":", "return", "common_video", ".", "IndividualFrameWriter", "(", "output_dir", ")", "else", ":", "output_path", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "\"video.avi\"", ")", "return", "common_video", ".", "WholeVideoWriter", "(", "fps", "=", "10", ",", "output_path", "=", "output_path", ",", "file_format", "=", "\"avi\"", ")" ]
method to activate all activation methods in the shell and its plugins .
def activate ( self ) : d = dir ( self ) self . plugins = [ ] for key in d : if key . startswith ( "shell_activate_" ) : if self . echo : Console . ok ( "Shell Activate: {0}" . format ( key ) ) self . plugins . append ( key ) for key in d : if key . startswith ( "activate_" ) : if self . echo : Console . ok ( "Activate: {0}" . format ( key ) ) self . plugins . append ( key ) for key in self . plugins : if self . echo : Console . ok ( "> {0}" . format ( key . replace ( "_" , " " , 1 ) ) ) exec ( "self.%s()" % key )
10,702
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/activate.py#L20-L40
[ "def", "upload_benchmark_run", "(", "self", ",", "dataset_name", ",", "table_name", ",", "run_id", ")", ":", "expected_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_logging_dir", ",", "logger", ".", "BENCHMARK_RUN_LOG_FILE_NAME", ")", "with", "tf", ".", "gfile", ".", "GFile", "(", "expected_file", ")", "as", "f", ":", "benchmark_json", "=", "json", ".", "load", "(", "f", ")", "benchmark_json", "[", "\"model_id\"", "]", "=", "run_id", "table_ref", "=", "self", ".", "_bq_client", ".", "dataset", "(", "dataset_name", ")", ".", "table", "(", "table_name", ")", "errors", "=", "self", ".", "_bq_client", ".", "insert_rows_json", "(", "table_ref", ",", "[", "benchmark_json", "]", ")", "if", "errors", ":", "tf", ".", "logging", ".", "error", "(", "\"Failed to upload benchmark info to bigquery: {}\"", ".", "format", "(", "errors", ")", ")" ]
List available commands with help or detailed help with help cmd .
def do_help ( self , arg ) : if arg : # XXX check arg syntax try : func = getattr ( self , 'help_' + arg ) except AttributeError : try : doc = getattr ( self , 'do_' + arg ) . __doc__ if doc : self . stdout . write ( "%s\n" % str ( doc ) ) return except AttributeError : pass self . stdout . write ( "%s\n" % str ( self . nohelp % ( arg , ) ) ) return func ( ) else : names = self . get_names ( ) cmds_doc = [ ] cmds_undoc = [ ] help_page = { } for name in names : if name [ : 5 ] == 'help_' : help_page [ name [ 5 : ] ] = 1 names . sort ( ) # There can be duplicates if routines overridden prevname = '' for name in names : if name [ : 3 ] == 'do_' : if name == prevname : continue prevname = name cmd = name [ 3 : ] if cmd in help_page : cmds_doc . append ( cmd ) del help_page [ cmd ] elif getattr ( self , name ) . __doc__ : cmds_doc . append ( cmd ) else : cmds_undoc . append ( cmd ) self . stdout . write ( "%s\n" % str ( self . doc_leader ) ) self . print_topics ( self . doc_header , cmds_doc , 15 , 80 ) self . print_topics ( self . misc_header , list ( help_page . keys ( ) ) , 15 , 80 ) self . print_topics ( self . undoc_header , cmds_undoc , 15 , 80 ) for topic in self . command_topics : topic_cmds = self . command_topics [ topic ] self . print_topics ( string . capwords ( topic + " commands" ) , topic_cmds , 15 , 80 )
10,703
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/activate.py#L42-L92
[ "def", "libvlc_log_set_file", "(", "p_instance", ",", "stream", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_log_set_file'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_log_set_file'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "Instance", ",", "FILE_ptr", ")", "return", "f", "(", "p_instance", ",", "stream", ")" ]
Retrieve Ziggo channel information .
def _fetch_channels ( self ) : json = requests . get ( self . _channels_url ) . json ( ) self . _channels = { c [ 'channel' ] [ 'code' ] : c [ 'channel' ] [ 'name' ] for c in json [ 'channels' ] }
10,704
https://github.com/b10m/ziggo_mediabox_xl/blob/49520ec3e2e3d09339cea667723914b10399249d/ziggo_mediabox_xl.py#L33-L37
[ "def", "begin_commit", "(", ")", ":", "session_token", "=", "request", ".", "headers", "[", "'session_token'", "]", "repository", "=", "request", ".", "headers", "[", "'repository'", "]", "#===", "current_user", "=", "have_authenticated_user", "(", "request", ".", "environ", "[", "'REMOTE_ADDR'", "]", ",", "repository", ",", "session_token", ")", "if", "current_user", "is", "False", ":", "return", "fail", "(", "user_auth_fail_msg", ")", "#===", "repository_path", "=", "config", "[", "'repositories'", "]", "[", "repository", "]", "[", "'path'", "]", "def", "with_exclusive_lock", "(", ")", ":", "# The commit is locked for a given time period to a given session token,", "# a client must hold this lock to use any of push_file(), delete_files() and commit().", "# It does not matter if the user lock technically expires while a client is writing", "# a large file, as the user lock is locked using flock for the duration of any", "# operation and thus cannot be stolen by another client. It is updated to be in", "# the future before returning to the client. The lock only needs to survive until", "# the client owning the lock sends another request and re acquires the flock.", "if", "not", "can_aquire_user_lock", "(", "repository_path", ",", "session_token", ")", ":", "return", "fail", "(", "lock_fail_msg", ")", "# Commits can only take place if the committing user has the latest revision,", "# as committing from an outdated state could cause unexpected results, and may", "# have conflicts. Conflicts are resolved during a client update so they are", "# handled by the client, and a server interface for this is not needed.", "data_store", "=", "versioned_storage", "(", "repository_path", ")", "if", "data_store", ".", "get_head", "(", ")", "!=", "request", ".", "headers", "[", "\"previous_revision\"", "]", ":", "return", "fail", "(", "need_to_update_msg", ")", "# Should the lock expire, the client which had the lock previously will be unable", "# to continue the commit it had in progress. When this, or another client attempts", "# to commit again it must do so by first obtaining the lock again by calling begin_commit().", "# Any remaining commit data from failed prior commits is garbage collected here.", "# While it would technically be possible to implement commit resume should the same", "# client resume, I only see commits failing due to a network error and this is so", "# rare I don't think it's worth the trouble.", "if", "data_store", ".", "have_active_commit", "(", ")", ":", "data_store", ".", "rollback", "(", ")", "#------------", "data_store", ".", "begin", "(", ")", "update_user_lock", "(", "repository_path", ",", "session_token", ")", "return", "success", "(", ")", "return", "lock_access", "(", "repository_path", ",", "with_exclusive_lock", ")" ]
Send keys to the device .
def send_keys ( self , keys ) : try : sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) sock . settimeout ( self . _timeout ) sock . connect ( ( self . _ip , self . _port [ 'cmd' ] ) ) # mandatory dance version_info = sock . recv ( 15 ) sock . send ( version_info ) sock . recv ( 2 ) sock . send ( bytes . fromhex ( '01' ) ) sock . recv ( 4 ) sock . recv ( 24 ) # send our command now! for key in keys : if key in self . _keys : sock . send ( bytes . fromhex ( "04 01 00 00 00 00 " + self . _keys [ key ] ) ) sock . send ( bytes . fromhex ( "04 00 00 00 00 00 " + self . _keys [ key ] ) ) sock . close ( ) except socket . error : raise
10,705
https://github.com/b10m/ziggo_mediabox_xl/blob/49520ec3e2e3d09339cea667723914b10399249d/ziggo_mediabox_xl.py#L72-L94
[ "def", "add_copyright", "(", "self", ",", "material", "=", "None", ",", "holder", "=", "None", ",", "statement", "=", "None", ",", "url", "=", "None", ",", "year", "=", "None", ")", ":", "copyright", "=", "{", "}", "for", "key", "in", "(", "'holder'", ",", "'statement'", ",", "'url'", ")", ":", "if", "locals", "(", ")", "[", "key", "]", "is", "not", "None", ":", "copyright", "[", "key", "]", "=", "locals", "(", ")", "[", "key", "]", "if", "material", "is", "not", "None", ":", "copyright", "[", "'material'", "]", "=", "material", ".", "lower", "(", ")", "if", "year", "is", "not", "None", ":", "copyright", "[", "'year'", "]", "=", "int", "(", "year", ")", "self", ".", "_append_to", "(", "'copyright'", ",", "copyright", ")" ]
Construct a subclass of a derived models . Field specific to the type of the EChoice values .
def make_echoicefield ( echoices , * args , klass_name = None , * * kwargs ) : assert issubclass ( echoices , EChoice ) value_type = echoices . __getvaluetype__ ( ) if value_type is str : cls_ = models . CharField elif value_type is int : cls_ = models . IntegerField elif value_type is float : cls_ = models . FloatField elif value_type is bool : cls_ = models . BooleanField else : raise NotImplementedError ( "Please open an issue if you wish your value type to be supported: " "https://github.com/mbourqui/django-echoices/issues/new" ) if klass_name and StrictVersion ( django_version ( ) ) < StrictVersion ( '1.9.0' ) : warnings . warn ( "Django < 1.9 throws an 'ImportError' if the class name is not defined in the module. " "The provided klass_name will be replaced by {}" . format ( EChoiceField . __name__ ) , RuntimeWarning ) klass_name = EChoiceField . __name__ if StrictVersion ( django_version ( ) ) < StrictVersion ( '1.9.0' ) else klass_name if klass_name else "{}Field" . format ( echoices . __name__ ) d = dict ( cls_ . __dict__ ) d . update ( dict ( EChoiceField . __dict__ ) ) return type ( klass_name , ( cls_ , ) , d ) ( echoices , * args , * * kwargs )
10,706
https://github.com/mbourqui/django-echoices/blob/c57405005ec368ac602bb38a71091a1e03c723bb/echoices/fields/fields.py#L120-L163
[ "def", "set_nvidia_environment_variables", "(", "environment", ",", "gpu_ids", ")", ":", "if", "gpu_ids", ":", "nvidia_visible_devices", "=", "\"\"", "for", "gpu_id", "in", "gpu_ids", ":", "nvidia_visible_devices", "+=", "\"{},\"", ".", "format", "(", "gpu_id", ")", "environment", "[", "\"NVIDIA_VISIBLE_DEVICES\"", "]", "=", "nvidia_visible_devices" ]
Make an instance to look like an empty dummy .
def make_dummy ( instance , relations = { } , datetime_default = dt . strptime ( '1901-01-01' , '%Y-%m-%d' ) , varchar_default = "" , integer_default = 0 , numeric_default = 0.0 , * args , * * kwargs ) : # init_data knows how to put an init value depending on data type init_data = { 'DATETIME' : datetime_default , 'VARCHAR' : varchar_default , 'INTEGER' : integer_default , 'NUMERIC(50, 10)' : numeric_default , 'TEXT' : varchar_default , } # the type of the instance is the SQLAlchemy Table table = type ( instance ) for col in table . __table__ . columns : # declarative base tables have a columns property useful for reflection try : setattr ( instance , col . name , kwargs [ col . name ] ) except KeyError : setattr ( instance , col . name , init_data [ str ( col . type ) ] ) for k , v in relations . iteritems ( ) : # set the relationship property with the first element of the tuple setattr ( instance , k , v [ 0 ] ) # try: # # set the relationship backref on the first element of the tuple # # with a property named according to the second element of the # # tuple, pointing to a list with the instance itself (assumes a # # one-to-many relationship) # # in case you don't want a backref, just send a None as v[1] # try: # getattr(v[0], v[1]).append(instance) # except: # setattr(v[0], v[1], [ instance ]) # except: # pass return instance
10,707
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/orm/schema_dummies.py#L14-L99
[ "def", "releaseNativeOverlayHandle", "(", "self", ",", "ulOverlayHandle", ",", "pNativeTextureHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "releaseNativeOverlayHandle", "result", "=", "fn", "(", "ulOverlayHandle", ",", "pNativeTextureHandle", ")", "return", "result" ]
Set up the network .
def set_up_network ( self , genes : List [ Gene ] , gene_filter : bool = False , disease_associations : Optional [ Dict ] = None ) -> None : if gene_filter : self . filter_genes ( [ gene . entrez_id for gene in genes ] ) self . _add_vertex_attributes ( genes , disease_associations ) self . print_summary ( "Graph of all genes" )
10,708
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L38-L55
[ "def", "revoke_local_roles_for", "(", "brain_or_object", ",", "roles", ",", "user", "=", "None", ")", ":", "user_id", "=", "get_user_id", "(", "user", ")", "obj", "=", "api", ".", "get_object", "(", "brain_or_object", ")", "valid_roles", "=", "get_valid_roles_for", "(", "obj", ")", "to_grant", "=", "list", "(", "get_local_roles_for", "(", "obj", ")", ")", "if", "isinstance", "(", "roles", ",", "basestring", ")", ":", "roles", "=", "[", "roles", "]", "for", "role", "in", "roles", ":", "if", "role", "in", "to_grant", ":", "if", "role", "not", "in", "valid_roles", ":", "raise", "ValueError", "(", "\"The Role '{}' is invalid.\"", ".", "format", "(", "role", ")", ")", "# Remove the role", "to_grant", ".", "remove", "(", "role", ")", "if", "len", "(", "to_grant", ")", ">", "0", ":", "obj", ".", "manage_setLocalRoles", "(", "user_id", ",", "to_grant", ")", "else", ":", "obj", ".", "manage_delLocalRoles", "(", "[", "user_id", "]", ")", "return", "get_local_roles_for", "(", "brain_or_object", ")" ]
Filter out the genes that are not in list relevant_entrez .
def filter_genes ( self , relevant_entrez : list ) -> None : logger . info ( "In filter_genes()" ) irrelevant_genes = self . graph . vs . select ( name_notin = relevant_entrez ) self . graph . delete_vertices ( irrelevant_genes )
10,709
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L57-L64
[ "def", "_PrintStorageInformationAsJSON", "(", "self", ",", "storage_reader", ")", ":", "serializer", "=", "json_serializer", ".", "JSONAttributeContainerSerializer", "storage_counters", "=", "self", ".", "_CalculateStorageCounters", "(", "storage_reader", ")", "storage_counters_json", "=", "json", ".", "dumps", "(", "storage_counters", ")", "self", ".", "_output_writer", ".", "Write", "(", "'{'", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\"storage_counters\": {0:s}'", ".", "format", "(", "storage_counters_json", ")", ")", "self", ".", "_output_writer", ".", "Write", "(", "',\\n'", ")", "self", ".", "_output_writer", ".", "Write", "(", "' \"sessions\": {'", ")", "for", "index", ",", "session", "in", "enumerate", "(", "storage_reader", ".", "GetSessions", "(", ")", ")", ":", "json_string", "=", "serializer", ".", "WriteSerialized", "(", "session", ")", "if", "index", "!=", "0", ":", "self", ".", "_output_writer", ".", "Write", "(", "',\\n'", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\"session_{0:s}\": {1:s} '", ".", "format", "(", "session", ".", "identifier", ",", "json_string", ")", ")", "self", ".", "_output_writer", ".", "Write", "(", "'}}'", ")" ]
Add attributes to vertices .
def _add_vertex_attributes ( self , genes : List [ Gene ] , disease_associations : Optional [ dict ] = None ) -> None : self . _set_default_vertex_attributes ( ) self . _add_vertex_attributes_by_genes ( genes ) # compute up-regulated and down-regulated genes up_regulated = self . get_upregulated_genes ( ) down_regulated = self . get_downregulated_genes ( ) # set the attributes for up-regulated and down-regulated genes self . graph . vs ( up_regulated . indices ) [ "diff_expressed" ] = True self . graph . vs ( up_regulated . indices ) [ "up_regulated" ] = True self . graph . vs ( down_regulated . indices ) [ "diff_expressed" ] = True self . graph . vs ( down_regulated . indices ) [ "down_regulated" ] = True # add disease associations self . _add_disease_associations ( disease_associations ) logger . info ( "Number of all differentially expressed genes is: {}" . format ( len ( up_regulated ) + len ( down_regulated ) ) )
10,710
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L66-L89
[ "def", "parse_topic", "(", "self", ",", "params", ",", "region", ",", "topic", ")", ":", "topic", "[", "'arn'", "]", "=", "topic", ".", "pop", "(", "'TopicArn'", ")", "topic", "[", "'name'", "]", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "(", "prefix", ",", "partition", ",", "service", ",", "region", ",", "account", ",", "name", ")", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "api_client", "=", "api_clients", "[", "region", "]", "attributes", "=", "api_client", ".", "get_topic_attributes", "(", "TopicArn", "=", "topic", "[", "'arn'", "]", ")", "[", "'Attributes'", "]", "for", "k", "in", "[", "'Owner'", ",", "'DisplayName'", "]", ":", "topic", "[", "k", "]", "=", "attributes", "[", "k", "]", "if", "k", "in", "attributes", "else", "None", "for", "k", "in", "[", "'Policy'", ",", "'DeliveryPolicy'", ",", "'EffectiveDeliveryPolicy'", "]", ":", "topic", "[", "k", "]", "=", "json", ".", "loads", "(", "attributes", "[", "k", "]", ")", "if", "k", "in", "attributes", "else", "None", "topic", "[", "'name'", "]", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "manage_dictionary", "(", "topic", ",", "'subscriptions'", ",", "{", "}", ")", "manage_dictionary", "(", "topic", ",", "'subscriptions_count'", ",", "0", ")", "self", ".", "topics", "[", "topic", "[", "'name'", "]", "]", "=", "topic" ]
Assign default values on attributes to all vertices .
def _set_default_vertex_attributes ( self ) -> None : self . graph . vs [ "l2fc" ] = 0 self . graph . vs [ "padj" ] = 0.5 self . graph . vs [ "symbol" ] = self . graph . vs [ "name" ] self . graph . vs [ "diff_expressed" ] = False self . graph . vs [ "up_regulated" ] = False self . graph . vs [ "down_regulated" ] = False
10,711
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L91-L98
[ "def", "update_notes", "(", "self", ",", "xml_file", ",", "new", "=", "False", ")", ":", "if", "new", ":", "create_empty_annotations", "(", "xml_file", ",", "self", ".", "parent", ".", "info", ".", "dataset", ")", "self", ".", "annot", "=", "Annotations", "(", "xml_file", ")", "else", ":", "self", ".", "annot", "=", "Annotations", "(", "xml_file", ")", "self", ".", "enable_events", "(", ")", "self", ".", "parent", ".", "create_menubar", "(", ")", "self", ".", "idx_stage", ".", "clear", "(", ")", "for", "one_stage", "in", "STAGE_NAME", ":", "self", ".", "idx_stage", ".", "addItem", "(", "one_stage", ")", "self", ".", "idx_stage", ".", "setCurrentIndex", "(", "-", "1", ")", "self", ".", "idx_quality", ".", "clear", "(", ")", "for", "one_qual", "in", "QUALIFIERS", ":", "self", ".", "idx_quality", ".", "addItem", "(", "one_qual", ")", "self", ".", "idx_quality", ".", "setCurrentIndex", "(", "-", "1", ")", "w1", "=", "self", ".", "idx_summary", ".", "takeAt", "(", "1", ")", ".", "widget", "(", ")", "w2", "=", "self", ".", "idx_summary", ".", "takeAt", "(", "1", ")", ".", "widget", "(", ")", "self", ".", "idx_summary", ".", "removeWidget", "(", "w1", ")", "self", ".", "idx_summary", ".", "removeWidget", "(", "w2", ")", "w1", ".", "deleteLater", "(", ")", "w2", ".", "deleteLater", "(", ")", "b1", "=", "QGroupBox", "(", "'Staging'", ")", "layout", "=", "QFormLayout", "(", ")", "for", "one_stage", "in", "STAGE_NAME", ":", "layout", ".", "addRow", "(", "one_stage", ",", "QLabel", "(", "''", ")", ")", "b1", ".", "setLayout", "(", "layout", ")", "self", ".", "idx_summary", ".", "addWidget", "(", "b1", ")", "self", ".", "idx_stage_stats", "=", "layout", "b2", "=", "QGroupBox", "(", "'Signal quality'", ")", "layout", "=", "QFormLayout", "(", ")", "for", "one_qual", "in", "QUALIFIERS", ":", "layout", ".", "addRow", "(", "one_qual", ",", "QLabel", "(", "''", ")", ")", "b2", ".", "setLayout", "(", "layout", ")", "self", ".", "idx_summary", ".", "addWidget", "(", "b2", ")", "self", ".", "idx_qual_stats", "=", "layout", "self", ".", "display_notes", "(", ")" ]
Assign values to attributes on vertices .
def _add_vertex_attributes_by_genes ( self , genes : List [ Gene ] ) -> None : for gene in genes : try : vertex = self . graph . vs . find ( name = str ( gene . entrez_id ) ) . index self . graph . vs [ vertex ] [ 'l2fc' ] = gene . log2_fold_change self . graph . vs [ vertex ] [ 'symbol' ] = gene . symbol self . graph . vs [ vertex ] [ 'padj' ] = gene . padj except ValueError : pass
10,712
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L100-L112
[ "def", "get_partition", "(", "url", ",", "headers", ",", "source_id", ",", "container", ",", "partition", ")", ":", "accepted_formats", "=", "list", "(", "serializer", ".", "format_registry", ".", "keys", "(", ")", ")", "accepted_compression", "=", "list", "(", "serializer", ".", "compression_registry", ".", "keys", "(", ")", ")", "payload", "=", "dict", "(", "action", "=", "'read'", ",", "source_id", "=", "source_id", ",", "accepted_formats", "=", "accepted_formats", ",", "accepted_compression", "=", "accepted_compression", ")", "if", "partition", "is", "not", "None", ":", "payload", "[", "'partition'", "]", "=", "partition", "try", ":", "resp", "=", "requests", ".", "post", "(", "urljoin", "(", "url", ",", "'/v1/source'", ")", ",", "data", "=", "msgpack", ".", "packb", "(", "payload", ",", "use_bin_type", "=", "True", ")", ",", "*", "*", "headers", ")", "if", "resp", ".", "status_code", "!=", "200", ":", "raise", "Exception", "(", "'Error reading data'", ")", "msg", "=", "msgpack", ".", "unpackb", "(", "resp", ".", "content", ",", "*", "*", "unpack_kwargs", ")", "format", "=", "msg", "[", "'format'", "]", "compression", "=", "msg", "[", "'compression'", "]", "compressor", "=", "serializer", ".", "compression_registry", "[", "compression", "]", "encoder", "=", "serializer", ".", "format_registry", "[", "format", "]", "chunk", "=", "encoder", ".", "decode", "(", "compressor", ".", "decompress", "(", "msg", "[", "'data'", "]", ")", ",", "container", ")", "return", "chunk", "finally", ":", "if", "resp", "is", "not", "None", ":", "resp", ".", "close", "(", ")" ]
Add disease association annotation to the network .
def _add_disease_associations ( self , disease_associations : dict ) -> None : if disease_associations is not None : for target_id , disease_id_list in disease_associations . items ( ) : if target_id in self . graph . vs [ "name" ] : self . graph . vs . find ( name = target_id ) [ "associated_diseases" ] = disease_id_list
10,713
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L114-L122
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Get genes that are up - regulated .
def get_upregulated_genes ( self ) -> VertexSeq : up_regulated = self . graph . vs . select ( self . _is_upregulated_gene ) logger . info ( f"No. of up-regulated genes after laying on network: {len(up_regulated)}" ) return up_regulated
10,714
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L124-L131
[ "def", "DeleteContainer", "(", "self", ",", "collection_link", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "{", "}", "path", "=", "base", ".", "GetPathFromLink", "(", "collection_link", ")", "collection_id", "=", "base", ".", "GetResourceIdOrFullNameFromLink", "(", "collection_link", ")", "return", "self", ".", "DeleteResource", "(", "path", ",", "'colls'", ",", "collection_id", ",", "None", ",", "options", ")" ]
Get genes that are down - regulated .
def get_downregulated_genes ( self ) -> VertexSeq : down_regulated = self . graph . vs . select ( self . _is_downregulated_gene ) logger . info ( f"No. of down-regulated genes after laying on network: {len(down_regulated)}" ) return down_regulated
10,715
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L133-L140
[ "def", "_compute_sync_map_file_path", "(", "self", ",", "root", ",", "hierarchy_type", ",", "custom_id", ",", "file_name", ")", ":", "prefix", "=", "root", "if", "hierarchy_type", "==", "HierarchyType", ".", "PAGED", ":", "prefix", "=", "gf", ".", "norm_join", "(", "prefix", ",", "custom_id", ")", "file_name_joined", "=", "gf", ".", "norm_join", "(", "prefix", ",", "file_name", ")", "return", "self", ".", "_replace_placeholder", "(", "file_name_joined", ",", "custom_id", ")" ]
Print the summary of a graph .
def print_summary ( self , heading : str ) -> None : logger . info ( heading ) logger . info ( "Number of nodes: {}" . format ( len ( self . graph . vs ) ) ) logger . info ( "Number of edges: {}" . format ( len ( self . graph . es ) ) )
10,716
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L151-L158
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Get the differentially expressed genes based on diff_type .
def get_differentially_expressed_genes ( self , diff_type : str ) -> VertexSeq : if diff_type == "up" : diff_expr = self . graph . vs . select ( up_regulated_eq = True ) elif diff_type == "down" : diff_expr = self . graph . vs . select ( down_regulated_eq = True ) else : diff_expr = self . graph . vs . select ( diff_expressed_eq = True ) return diff_expr
10,717
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L160-L172
[ "def", "clean_registration_ids", "(", "self", ",", "registration_ids", "=", "[", "]", ")", ":", "valid_registration_ids", "=", "[", "]", "for", "registration_id", "in", "registration_ids", ":", "details", "=", "self", ".", "registration_info_request", "(", "registration_id", ")", "if", "details", ".", "status_code", "==", "200", ":", "valid_registration_ids", ".", "append", "(", "registration_id", ")", "return", "valid_registration_ids" ]
Write the network as an adjacency list to a file .
def write_adj_list ( self , path : str ) -> None : adj_list = self . get_adjlist ( ) with open ( path , mode = "w" ) as file : for i , line in enumerate ( adj_list ) : print ( i , * line , file = file )
10,718
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L174-L183
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Get attribute values for the requested indices .
def get_attribute_from_indices ( self , indices : list , attribute_name : str ) : return list ( np . array ( self . graph . vs [ attribute_name ] ) [ indices ] )
10,719
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/network.py#L192-L199
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Read headers from the given stream into the given header dict . If hdict is None a new header dict is created . Returns the populated header dict . Headers which are repeated are folded together using a comma if their specification so dictates . This function raises ValueError when the read bytes violate the HTTP spec . You should probably return 400 Bad Request if this happens .
def read_headers ( rfile , hdict = None ) : if hdict is None : hdict = { } while True : line = rfile . readline ( ) if not line : # No more data--illegal end of headers raise ValueError ( "Illegal end of headers." ) if line == CRLF : # Normal end of headers break if not line . endswith ( CRLF ) : raise ValueError ( "HTTP requires CRLF terminators" ) if line [ 0 ] in ' \t' : # It's a continuation line. v = line . strip ( ) else : try : k , v = line . split ( ":" , 1 ) except ValueError : raise ValueError ( "Illegal header line." ) # TODO: what about TE and WWW-Authenticate? k = k . strip ( ) . title ( ) v = v . strip ( ) hname = k if k in comma_separated_headers : existing = hdict . get ( hname ) if existing : v = ", " . join ( ( existing , v ) ) hdict [ hname ] = v return hdict
10,720
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/cherrypy/__init__.py#L137-L183
[ "def", "detach_all_classes", "(", "self", ")", ":", "classes", "=", "list", "(", "self", ".", "_observers", ".", "keys", "(", ")", ")", "for", "cls", "in", "classes", ":", "self", ".", "detach_class", "(", "cls", ")" ]
Parse the next HTTP request start - line and message - headers .
def parse_request ( self ) : self . rfile = SizeCheckWrapper ( self . conn . rfile , self . server . max_request_header_size ) try : self . read_request_line ( ) except MaxSizeExceeded : self . simple_response ( "414 Request-URI Too Long" , "The Request-URI sent with the request exceeds the maximum " "allowed bytes." ) return try : success = self . read_request_headers ( ) except MaxSizeExceeded : self . simple_response ( "413 Request Entity Too Large" , "The headers sent with the request exceed the maximum " "allowed bytes." ) return else : if not success : return self . ready = True
10,721
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/cherrypy/__init__.py#L513-L536
[ "def", "getOverlayTextureSize", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTextureSize", "pWidth", "=", "c_uint32", "(", ")", "pHeight", "=", "c_uint32", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "pWidth", ")", ",", "byref", "(", "pHeight", ")", ")", "return", "result", ",", "pWidth", ".", "value", ",", "pHeight", ".", "value" ]
Assert process and send the HTTP response message - headers . You must set self . status and self . outheaders before calling this .
def send_headers ( self ) : hkeys = [ key . lower ( ) for key , value in self . outheaders ] status = int ( self . status [ : 3 ] ) if status == 413 : # Request Entity Too Large. Close conn to avoid garbage. self . close_connection = True elif "content-length" not in hkeys : # "All 1xx (informational), 204 (no content), # and 304 (not modified) responses MUST NOT # include a message-body." So no point chunking. if status < 200 or status in ( 204 , 205 , 304 ) : pass else : if ( self . response_protocol == 'HTTP/1.1' and self . method != 'HEAD' ) : # Use the chunked transfer-coding self . chunked_write = True self . outheaders . append ( ( "Transfer-Encoding" , "chunked" ) ) else : # Closing the conn is the only way to determine len. self . close_connection = True if "connection" not in hkeys : if self . response_protocol == 'HTTP/1.1' : # Both server and client are HTTP/1.1 or better if self . close_connection : self . outheaders . append ( ( "Connection" , "close" ) ) else : # Server and/or client are HTTP/1.0 if not self . close_connection : self . outheaders . append ( ( "Connection" , "Keep-Alive" ) ) if ( not self . close_connection ) and ( not self . chunked_read ) : # Read any remaining request body data on the socket. # "If an origin server receives a request that does not include an # Expect request-header field with the "100-continue" expectation, # the request includes a request body, and the server responds # with a final status code before reading the entire request body # from the transport connection, then the server SHOULD NOT close # the transport connection until it has read the entire request, # or until the client closes the connection. Otherwise, the client # might not reliably receive the response message. However, this # requirement is not be construed as preventing a server from # defending itself against denial-of-service attacks, or from # badly broken client implementations." remaining = getattr ( self . rfile , 'remaining' , 0 ) if remaining > 0 : self . rfile . read ( remaining ) if "date" not in hkeys : self . outheaders . append ( ( "Date" , rfc822 . formatdate ( ) ) ) if "server" not in hkeys : self . outheaders . append ( ( "Server" , self . server . server_name ) ) buf = [ self . server . protocol + " " + self . status + CRLF ] for k , v in self . outheaders : buf . append ( k + ": " + v + CRLF ) buf . append ( CRLF ) self . conn . wfile . sendall ( "" . join ( buf ) )
10,722
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/cherrypy/__init__.py#L811-L875
[ "def", "StripTableName", "(", "name", ")", ":", "if", "name", ".", "lower", "(", ")", "!=", "name", ":", "warnings", ".", "warn", "(", "\"table name \\\"%s\\\" is not lower case\"", "%", "name", ")", "try", ":", "return", "TablePattern", ".", "search", "(", "name", ")", ".", "group", "(", "\"Name\"", ")", "except", "AttributeError", ":", "return", "name" ]
Start the pool of threads .
def start ( self ) : for i in range ( self . min ) : self . _threads . append ( WorkerThread ( self . server ) ) for worker in self . _threads : worker . setName ( "CP Server " + worker . getName ( ) ) worker . start ( ) for worker in self . _threads : while not worker . ready : time . sleep ( .1 )
10,723
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/cherrypy/__init__.py#L1397-L1406
[ "def", "subdir_findall", "(", "dir", ",", "subdir", ")", ":", "strip_n", "=", "len", "(", "dir", ".", "split", "(", "'/'", ")", ")", "path", "=", "'/'", ".", "join", "(", "(", "dir", ",", "subdir", ")", ")", "return", "[", "'/'", ".", "join", "(", "s", ".", "split", "(", "'/'", ")", "[", "strip_n", ":", "]", ")", "for", "s", "in", "setuptools", ".", "findall", "(", "path", ")", "]" ]
Fields that should be considered for our notion of object equality .
def fields ( self ) : return ( self . locus , self . offset_start , self . offset_end , self . alignment_key )
10,724
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_element.py#L57-L62
[ "def", "exec_start", "(", "self", ",", "exec_id", ",", "detach", "=", "False", ",", "tty", "=", "False", ",", "stream", "=", "False", ",", "socket", "=", "False", ",", "demux", "=", "False", ")", ":", "# we want opened socket if socket == True", "data", "=", "{", "'Tty'", ":", "tty", ",", "'Detach'", ":", "detach", "}", "headers", "=", "{", "}", "if", "detach", "else", "{", "'Connection'", ":", "'Upgrade'", ",", "'Upgrade'", ":", "'tcp'", "}", "res", "=", "self", ".", "_post_json", "(", "self", ".", "_url", "(", "'/exec/{0}/start'", ",", "exec_id", ")", ",", "headers", "=", "headers", ",", "data", "=", "data", ",", "stream", "=", "True", ")", "if", "detach", ":", "return", "self", ".", "_result", "(", "res", ")", "if", "socket", ":", "return", "self", ".", "_get_raw_response_socket", "(", "res", ")", "return", "self", ".", "_read_from_socket", "(", "res", ",", "stream", ",", "tty", "=", "tty", ",", "demux", "=", "demux", ")" ]
The sequenced bases in the alignment that align to this locus in the genome as a string .
def bases ( self ) : sequence = self . alignment . query_sequence assert self . offset_end <= len ( sequence ) , "End offset=%d > sequence length=%d. CIGAR=%s. SEQUENCE=%s" % ( self . offset_end , len ( sequence ) , self . alignment . cigarstring , sequence ) return sequence [ self . offset_start : self . offset_end ]
10,725
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_element.py#L71-L86
[ "def", "handle_simulation_end", "(", "self", ",", "data_portal", ")", ":", "log", ".", "info", "(", "'Simulated {} trading days\\n'", "'first open: {}\\n'", "'last close: {}'", ",", "self", ".", "_session_count", ",", "self", ".", "_trading_calendar", ".", "session_open", "(", "self", ".", "_first_session", ")", ",", "self", ".", "_trading_calendar", ".", "session_close", "(", "self", ".", "_last_session", ")", ",", ")", "packet", "=", "{", "}", "self", ".", "end_of_simulation", "(", "packet", ",", "self", ".", "_ledger", ",", "self", ".", "_trading_calendar", ",", "self", ".", "_sessions", ",", "data_portal", ",", "self", ".", "_benchmark_source", ",", ")", "return", "packet" ]
The minimum of the base qualities . In the case of a deletion in which case there are no bases in this PileupElement the minimum is taken over the sequenced bases immediately before and after the deletion .
def min_base_quality ( self ) : try : return min ( self . base_qualities ) except ValueError : # We are mid-deletion. We return the minimum of the adjacent bases. assert self . offset_start == self . offset_end adjacent_qualities = [ self . alignment . query_qualities [ offset ] for offset in [ self . offset_start - 1 , self . offset_start ] if 0 <= offset < len ( self . alignment . query_qualities ) ] return min ( adjacent_qualities )
10,726
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_element.py#L98-L114
[ "def", "shared_dataset_ids", "(", "self", ")", ":", "shared_ids", "=", "set", "(", "self", ".", "scenes", "[", "0", "]", ".", "keys", "(", ")", ")", "for", "scene", "in", "self", ".", "scenes", "[", "1", ":", "]", ":", "shared_ids", "&=", "set", "(", "scene", ".", "keys", "(", ")", ")", "return", "shared_ids" ]
Factory function to create a new PileupElement from a pysam PileupRead .
def from_pysam_alignment ( locus , pileup_read ) : assert not pileup_read . is_refskip , ( "Can't create a PileupElement in a refskip (typically an intronic " "gap in an RNA alignment)" ) # Pysam has an `aligned_pairs` method that gives a list of # (offset, locus) pairs indicating the correspondence between bases in # the alignment and reference loci. Here we use that to compute # offset_start and offset_end. # # This is slightly tricky in the case of insertions and deletions. # Here are examples of the desired logic. # # Target locus = 1000 # # (1) Simple case: matching bases. # # OFFSET LOCUS # 0 999 # 1 1000 # 2 1001 # # DESIRED RESULT: offset_start=1, offset_end=2. # # # (2) A 1 base insertion at offset 2. # # OFFSET LOCUS # 0 999 # 1 1000 # 2 None # 3 1001 # # DESIRED RESULT: offset_start = 1, offset_end=3. # # # (3) A 2 base deletion at loci 1000 and 1001. # # OFFSET LOCUS # 0 999 # None 1000 # None 1001 # 1 1002 # # DESIRED RESULT: offset_start = 1, offset_end=1. # offset_start = None offset_end = len ( pileup_read . alignment . query_sequence ) # TODO: doing this with get_blocks() may be faster. for ( offset , position ) in pileup_read . alignment . aligned_pairs : if offset is not None and position is not None : if position == locus . position : offset_start = offset elif position > locus . position : offset_end = offset break if offset_start is None : offset_start = offset_end assert pileup_read . is_del == ( offset_end - offset_start == 0 ) , "Deletion=%s but | [%d,%d) |=%d for locus %d in: \n%s" % ( pileup_read . is_del , offset_start , offset_end , offset_end - offset_start , locus . position , pileup_read . alignment . aligned_pairs ) assert offset_end >= offset_start result = PileupElement ( locus , offset_start , offset_end , pileup_read . alignment ) return result
10,727
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup_element.py#L117-L206
[ "def", "set_visible", "(", "self", ",", "visible", ")", ":", "v", "=", "View", ".", "VISIBILITY_VISIBLE", "if", "visible", "else", "View", ".", "VISIBILITY_GONE", "self", ".", "widget", ".", "setVisibility", "(", "v", ")" ]
A slightly safer version of request .
def safe_request ( url , method = None , params = None , data = None , json = None , headers = None , allow_redirects = False , timeout = 30 , verify_ssl = True , ) : session = requests . Session ( ) kwargs = { } if json : kwargs [ 'json' ] = json if not headers : headers = { } headers . setdefault ( 'Content-Type' , 'application/json' ) if data : kwargs [ 'data' ] = data if params : kwargs [ 'params' ] = params if headers : kwargs [ 'headers' ] = headers if method is None : method = 'POST' if ( data or json ) else 'GET' response = session . request ( method = method , url = url , allow_redirects = allow_redirects , timeout = timeout , verify = verify_ssl , * * kwargs ) return response
10,728
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/http.py#L12-L56
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Decorator to mark a function as invoking a remote procedure call . When invoked in server mode the function will be called ; when invoked in client mode an RPC will be initiated .
def remote ( func ) : @ functools . wraps ( func ) def wrapper ( self , * args , * * kwargs ) : if self . mode == 'server' : # In server mode, call the function return func ( self , * args , * * kwargs ) # Make sure we're connected if not self . conn : self . connect ( ) # Call the remote function self . conn . send ( 'CALL' , func . __name__ , args , kwargs ) # Receive the response cmd , payload = self . conn . recv ( ) if cmd == 'ERR' : self . close ( ) raise Exception ( "Catastrophic error from server: %s" % payload [ 0 ] ) elif cmd == 'EXC' : exc_type = utils . find_entrypoint ( None , payload [ 0 ] ) raise exc_type ( payload [ 1 ] ) elif cmd != 'RES' : self . close ( ) raise Exception ( "Invalid command response from server: %s" % cmd ) return payload [ 0 ] # Mark it a callable wrapper . _remote = True # Return the wrapped function return wrapper
10,729
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L171-L210
[ "def", "_unscramble_regressor_columns", "(", "parent_data", ",", "data", ")", ":", "matches", "=", "[", "'_power[0-9]+'", ",", "'_derivative[0-9]+'", "]", "var", "=", "OrderedDict", "(", "(", "c", ",", "deque", "(", ")", ")", "for", "c", "in", "parent_data", ".", "columns", ")", "for", "c", "in", "data", ".", "columns", ":", "col", "=", "c", "for", "m", "in", "matches", ":", "col", "=", "re", ".", "sub", "(", "m", ",", "''", ",", "col", ")", "if", "col", "==", "c", ":", "var", "[", "col", "]", ".", "appendleft", "(", "c", ")", "else", ":", "var", "[", "col", "]", ".", "append", "(", "c", ")", "unscrambled", "=", "reduce", "(", "(", "lambda", "x", ",", "y", ":", "x", "+", "y", ")", ",", "var", ".", "values", "(", ")", ")", "return", "data", "[", "[", "*", "unscrambled", "]", "]" ]
Send a command message to the other end .
def send ( self , cmd , * payload ) : # If it's closed, raise an error up front if not self . _sock : raise ConnectionClosed ( "Connection closed" ) # Construct the outgoing message msg = json . dumps ( dict ( cmd = cmd , payload = payload ) ) + '\n' # Send it try : self . _sock . sendall ( msg ) except socket . error : # We'll need to re-raise e_type , e_value , e_tb = sys . exc_info ( ) # Make sure the socket is closed self . close ( ) # Re-raise raise e_type , e_value , e_tb
10,730
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L67-L94
[ "def", "modify", "(", "self", ",", "symbol", ",", "base_value", ")", ":", "self", ".", "_unit_system_id", "=", "None", "if", "symbol", "not", "in", "self", ".", "lut", ":", "raise", "SymbolNotFoundError", "(", "\"Tried to modify the symbol '%s', but it does not exist \"", "\"in this registry.\"", "%", "symbol", ")", "if", "hasattr", "(", "base_value", ",", "\"in_base\"", ")", ":", "new_dimensions", "=", "base_value", ".", "units", ".", "dimensions", "base_value", "=", "base_value", ".", "in_base", "(", "\"mks\"", ")", "base_value", "=", "base_value", ".", "value", "else", ":", "new_dimensions", "=", "self", ".", "lut", "[", "symbol", "]", "[", "1", "]", "self", ".", "lut", "[", "symbol", "]", "=", "(", "float", "(", "base_value", ")", ",", "new_dimensions", ")", "+", "self", ".", "lut", "[", "symbol", "]", "[", "2", ":", "]" ]
Internal helper to pop a message off the receive buffer . If the message is an Exception that exception will be raised ; otherwise a tuple of command and payload will be returned .
def _recvbuf_pop ( self ) : # Pop a message off the recv buffer and return (or raise) it msg = self . _recvbuf . pop ( 0 ) if isinstance ( msg , Exception ) : raise msg return msg [ 'cmd' ] , msg [ 'payload' ]
10,731
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L96-L107
[ "def", "build_synchronize_decorator", "(", ")", ":", "lock", "=", "threading", ".", "Lock", "(", ")", "def", "lock_decorator", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "lock_decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "lock", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "lock_decorated", "return", "lock_decorator" ]
Ping the server . Returns the time interval in seconds required for the server to respond to the PING message .
def ping ( self ) : # Make sure we're connected if not self . conn : self . connect ( ) # Send the ping and wait for the response self . conn . send ( 'PING' , time . time ( ) ) cmd , payload = self . conn . recv ( ) recv_ts = time . time ( ) # Make sure the response was a PONG if cmd != 'PONG' : raise Exception ( "Invalid response from server" ) # Return the RTT return recv_ts - payload [ 0 ]
10,732
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L288-L308
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Listen for clients . This method causes the SimpleRPC object to switch to server mode . One thread will be created for each client .
def listen ( self ) : # Make sure we're in server mode if self . mode and self . mode != 'server' : raise ValueError ( "%s is not in server mode" % self . __class__ . __name__ ) self . mode = 'server' # Obtain a listening socket serv = _create_server ( self . host , self . port ) # If we have too many errors, we want to bail out err_thresh = 0 while True : # Accept a connection try : sock , addr = serv . accept ( ) except Exception as exc : err_thresh += 1 if err_thresh >= self . max_err_thresh : LOG . exception ( "Too many errors accepting " "connections: %s" % str ( exc ) ) break continue # Pragma: nocover # Decrement error count on successful connections err_thresh = max ( err_thresh - 1 , 0 ) # Log the connection attempt LOG . info ( "Accepted connection from %s port %s" % ( addr [ 0 ] , addr [ 1 ] ) ) # And handle the connection eventlet . spawn_n ( self . serve , self . connection_class ( sock ) , addr ) # Close the listening socket with utils . ignore_except ( ) : serv . close ( )
10,733
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L360-L402
[ "def", "_extract_axes", "(", "self", ",", "data", ",", "axes", ",", "*", "*", "kwargs", ")", ":", "return", "[", "self", ".", "_extract_axis", "(", "self", ",", "data", ",", "axis", "=", "i", ",", "*", "*", "kwargs", ")", "for", "i", ",", "a", "in", "enumerate", "(", "axes", ")", "]" ]
Handle a single client .
def serve ( self , conn , addr , auth = False ) : try : # Handle data from the client while True : # Get the command try : cmd , payload = conn . recv ( ) except ValueError as exc : # Tell the client about the error conn . send ( 'ERR' , "Failed to parse command: %s" % str ( exc ) ) # If they haven't successfully authenticated yet, # disconnect them if not auth : return continue # Pragma: nocover # Log the command and payload, for debugging purposes LOG . debug ( "Received command %r from %s port %s; payload: %r" % ( cmd , addr [ 0 ] , addr [ 1 ] , payload ) ) # Handle authentication if cmd == 'AUTH' : if auth : conn . send ( 'ERR' , "Already authenticated" ) elif payload [ 0 ] != self . authkey : # Don't give them a second chance conn . send ( 'ERR' , "Invalid authentication key" ) return else : # Authentication successful conn . send ( 'OK' ) auth = True # Handle unauthenticated connections elif not auth : # No second chances conn . send ( 'ERR' , "Not authenticated" ) return # Handle aliveness test elif cmd == 'PING' : conn . send ( 'PONG' , * payload ) # Handle a function call command elif cmd == 'CALL' : try : # Get the call parameters try : funcname , args , kwargs = payload except ValueError as exc : conn . send ( 'ERR' , "Invalid payload for 'CALL' " "command: %s" % str ( exc ) ) continue # Look up the function func = self . _get_remote_method ( funcname ) # Call the function result = func ( * args , * * kwargs ) except Exception as exc : exc_name = '%s:%s' % ( exc . __class__ . __module__ , exc . __class__ . __name__ ) conn . send ( 'EXC' , exc_name , str ( exc ) ) else : # Return the result conn . send ( 'RES' , result ) # Handle all other commands by returning an ERR else : conn . send ( 'ERR' , "Unrecognized command %r" % cmd ) except ConnectionClosed : # Ignore the connection closed error pass except Exception as exc : # Log other exceptions LOG . exception ( "Error serving client at %s port %s: %s" % ( addr [ 0 ] , addr [ 1 ] , str ( exc ) ) ) finally : LOG . info ( "Closing connection from %s port %s" % ( addr [ 0 ] , addr [ 1 ] ) ) # Make sure the socket gets closed conn . close ( )
10,734
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L419-L514
[ "def", "body_block_caption_render", "(", "caption_tags", ",", "base_url", "=", "None", ")", ":", "caption_content", "=", "[", "]", "supplementary_material_tags", "=", "[", "]", "for", "block_tag", "in", "remove_doi_paragraph", "(", "caption_tags", ")", ":", "# Note then skip p tags with supplementary-material inside", "if", "raw_parser", ".", "supplementary_material", "(", "block_tag", ")", ":", "for", "supp_tag", "in", "raw_parser", ".", "supplementary_material", "(", "block_tag", ")", ":", "supplementary_material_tags", ".", "append", "(", "supp_tag", ")", "continue", "for", "block_content", "in", "body_block_content_render", "(", "block_tag", ",", "base_url", "=", "base_url", ")", ":", "if", "block_content", "!=", "{", "}", ":", "caption_content", ".", "append", "(", "block_content", ")", "return", "caption_content", ",", "supplementary_material_tags" ]
Retrieve the LimitData object the middleware will use for getting the limits . This implementation returns a RemoteLimitData instance that can access the LimitData stored in the RemoteControlDaemon process .
def get_limits ( self ) : # Set one up if we don't already have it if not self . remote_limits : self . remote_limits = RemoteLimitData ( self . remote ) return self . remote_limits
10,735
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/remote.py#L643-L654
[ "def", "maybe_colored", "(", "msg", ",", "color", ",", "opt", ")", ":", "if", "opt", ".", "monochrome", ":", "return", "msg", "return", "colored", "(", "msg", ",", "color", ")" ]
Popen . waitUpTo - Wait up to a certain number of seconds for the process to end .
def waitUpTo ( self , timeoutSeconds , pollInterval = DEFAULT_POLL_INTERVAL ) : i = 0 numWaits = timeoutSeconds / float ( pollInterval ) ret = self . poll ( ) if ret is None : while i < numWaits : time . sleep ( pollInterval ) ret = self . poll ( ) if ret is not None : break i += 1 return ret
10,736
https://github.com/kata198/python-subprocess2/blob/8544b0b651d8e14de9fdd597baa704182e248b01/subprocess2/__init__.py#L63-L85
[ "def", "get_inconsistent_fieldnames", "(", ")", ":", "field_name_list", "=", "[", "]", "for", "fieldset_title", ",", "fields_list", "in", "settings", ".", "CONFIG_FIELDSETS", ".", "items", "(", ")", ":", "for", "field_name", "in", "fields_list", ":", "field_name_list", ".", "append", "(", "field_name", ")", "if", "not", "field_name_list", ":", "return", "{", "}", "return", "set", "(", "set", "(", "settings", ".", "CONFIG", ".", "keys", "(", ")", ")", "-", "set", "(", "field_name_list", ")", ")" ]
waitOrTerminate - Wait up to a certain number of seconds for the process to end .
def waitOrTerminate ( self , timeoutSeconds , pollInterval = DEFAULT_POLL_INTERVAL , terminateToKillSeconds = SUBPROCESS2_DEFAULT_TERMINATE_TO_KILL_SECONDS ) : returnCode = self . waitUpTo ( timeoutSeconds , pollInterval ) actionTaken = SUBPROCESS2_PROCESS_COMPLETED if returnCode is None : if terminateToKillSeconds is None : self . terminate ( ) actionTaken |= SUBPROCESS2_PROCESS_TERMINATED time . sleep ( pollInterval ) # Give a chance to cleanup returnCode = self . poll ( ) elif terminateToKillSeconds == 0 : self . kill ( ) actionTaken |= SUBPROCESS2_PROCESS_KILLED time . sleep ( .01 ) # Give a chance to happen self . poll ( ) # Don't defunct returnCode = None else : self . terminate ( ) actionTaken |= SUBPROCESS2_PROCESS_TERMINATED returnCode = self . waitUpTo ( terminateToKillSeconds , pollInterval ) if returnCode is None : actionTaken |= SUBPROCESS2_PROCESS_KILLED self . kill ( ) time . sleep ( .01 ) self . poll ( ) # Don't defunct return { 'returnCode' : returnCode , 'actionTaken' : actionTaken }
10,737
https://github.com/kata198/python-subprocess2/blob/8544b0b651d8e14de9fdd597baa704182e248b01/subprocess2/__init__.py#L89-L147
[ "def", "_get_management_client", "(", "self", ",", "client_class", ")", ":", "try", ":", "client", "=", "get_client_from_auth_file", "(", "client_class", ",", "auth_path", "=", "self", ".", "service_account_file", ")", "except", "ValueError", "as", "error", ":", "raise", "AzureCloudException", "(", "'Service account file format is invalid: {0}.'", ".", "format", "(", "error", ")", ")", "except", "KeyError", "as", "error", ":", "raise", "AzureCloudException", "(", "'Service account file missing key: {0}.'", ".", "format", "(", "error", ")", ")", "except", "Exception", "as", "error", ":", "raise", "AzureCloudException", "(", "'Unable to create resource management client: '", "'{0}.'", ".", "format", "(", "error", ")", ")", "return", "client" ]
runInBackground - Create a background thread which will manage this process automatically read from streams and perform any cleanups
def runInBackground ( self , pollInterval = .1 , encoding = False ) : from . BackgroundTask import BackgroundTaskThread taskInfo = BackgroundTaskInfo ( encoding ) thread = BackgroundTaskThread ( self , taskInfo , pollInterval , encoding ) thread . start ( ) #thread.run() # Uncomment to use pdb debug (will not run in background) return taskInfo
10,738
https://github.com/kata198/python-subprocess2/blob/8544b0b651d8e14de9fdd597baa704182e248b01/subprocess2/__init__.py#L152-L172
[ "def", "conversions", "(", "self", ")", ":", "return", "\"\\n\"", ".", "join", "(", "str", "(", "self", ".", "to", "(", "unit", ")", ")", "for", "unit", "in", "self", ".", "supported_units", ")" ]
Adds the clients for this group to a clients field .
def setClients ( self , * args , * * kwargs ) : requests = 0 if 'fullDetails' in kwargs : fullDetails = kwargs [ 'fullDetails' ] kwargs . pop ( 'fullDetails' ) else : fullDetails = True clients = [ ] for m in self [ 'groupMembers' ] : try : client = self . mambuclientclass ( entid = m [ 'clientKey' ] , fullDetails = fullDetails , * args , * * kwargs ) except AttributeError as ae : from . mambuclient import MambuClient self . mambuclientclass = MambuClient client = self . mambuclientclass ( entid = m [ 'clientKey' ] , fullDetails = fullDetails , * args , * * kwargs ) requests += 1 clients . append ( client ) self [ 'clients' ] = clients return requests
10,739
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambugroup.py#L77-L112
[ "def", "free", "(", "self", ",", "lpAddress", ")", ":", "hProcess", "=", "self", ".", "get_handle", "(", "win32", ".", "PROCESS_VM_OPERATION", ")", "win32", ".", "VirtualFreeEx", "(", "hProcess", ",", "lpAddress", ")" ]
Adds the activities for this group to a activities field .
def setActivities ( self , * args , * * kwargs ) : def activityDate ( activity ) : """Util function used for sorting activities according to timestamp""" try : return activity [ 'activity' ] [ 'timestamp' ] except KeyError as kerr : return None try : activities = self . mambuactivitiesclass ( groupId = self [ 'encodedKey' ] , * args , * * kwargs ) except AttributeError as ae : from . mambuactivity import MambuActivities self . mambuactivitiesclass = MambuActivities activities = self . mambuactivitiesclass ( groupId = self [ 'encodedKey' ] , * args , * * kwargs ) activities . attrs = sorted ( activities . attrs , key = activityDate ) self [ 'activities' ] = activities return 1
10,740
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambugroup.py#L142-L167
[ "def", "get_doc", "(", "logger", "=", "None", ",", "plugin", "=", "None", ",", "reporthook", "=", "None", ")", ":", "from", "ginga", ".", "GingaPlugin", "import", "GlobalPlugin", ",", "LocalPlugin", "if", "isinstance", "(", "plugin", ",", "GlobalPlugin", ")", ":", "plugin_page", "=", "'plugins_global'", "plugin_name", "=", "str", "(", "plugin", ")", "elif", "isinstance", "(", "plugin", ",", "LocalPlugin", ")", ":", "plugin_page", "=", "'plugins_local'", "plugin_name", "=", "str", "(", "plugin", ")", "else", ":", "plugin_page", "=", "None", "plugin_name", "=", "None", "try", ":", "index_html", "=", "_download_rtd_zip", "(", "reporthook", "=", "reporthook", ")", "# Download failed, use online resource", "except", "Exception", "as", "e", ":", "url", "=", "'https://ginga.readthedocs.io/en/latest/'", "if", "plugin_name", "is", "not", "None", ":", "if", "toolkit", ".", "family", ".", "startswith", "(", "'qt'", ")", ":", "# This displays plugin docstring.", "url", "=", "None", "else", ":", "# This redirects to online doc.", "url", "+=", "'manual/{}/{}.html'", ".", "format", "(", "plugin_page", ",", "plugin_name", ")", "if", "logger", "is", "not", "None", ":", "logger", ".", "error", "(", "str", "(", "e", ")", ")", "# Use local resource", "else", ":", "pfx", "=", "'file:'", "url", "=", "'{}{}'", ".", "format", "(", "pfx", ",", "index_html", ")", "# https://github.com/rtfd/readthedocs.org/issues/2803", "if", "plugin_name", "is", "not", "None", ":", "url", "+=", "'#{}'", ".", "format", "(", "plugin_name", ")", "return", "url" ]
Set the sensitivity value .
def set_sensitivity ( self , sensitivity = DEFAULT_SENSITIVITY ) : if sensitivity < 31 : self . _mtreg = 31 elif sensitivity > 254 : self . _mtreg = 254 else : self . _mtreg = sensitivity self . _power_on ( ) self . _set_mode ( 0x40 | ( self . _mtreg >> 5 ) ) self . _set_mode ( 0x60 | ( self . _mtreg & 0x1f ) ) self . _power_down ( )
10,741
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bh1750.py#L92-L106
[ "def", "deleteMessages", "(", "self", ",", "message_ids", ")", ":", "message_ids", "=", "require_list", "(", "message_ids", ")", "data", "=", "dict", "(", ")", "for", "i", ",", "message_id", "in", "enumerate", "(", "message_ids", ")", ":", "data", "[", "\"message_ids[{}]\"", ".", "format", "(", "i", ")", "]", "=", "message_id", "r", "=", "self", ".", "_post", "(", "self", ".", "req_url", ".", "DELETE_MESSAGES", ",", "data", ")", "return", "r", ".", "ok" ]
Return current measurement result in lx .
def _get_result ( self ) -> float : try : data = self . _bus . read_word_data ( self . _i2c_add , self . _mode ) self . _ok = True except OSError as exc : self . log_error ( "Bad reading in bus: %s" , exc ) self . _ok = False return - 1 count = data >> 8 | ( data & 0xff ) << 8 mode2coeff = 2 if self . _high_res else 1 ratio = 1 / ( 1.2 * ( self . _mtreg / 69.0 ) * mode2coeff ) return ratio * count
10,742
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bh1750.py#L108-L121
[ "def", "tornado_run", "(", "app", ",", "port", "=", "5000", ",", "address", "=", "\"\"", ",", "use_gevent", "=", "False", ",", "start", "=", "True", ",", "monkey_patch", "=", "None", ",", "Container", "=", "None", ",", "Server", "=", "None", ",", "threadpool", "=", "None", ")", ":", "# pragma: no cover", "if", "Container", "is", "None", ":", "from", "tornado", ".", "wsgi", "import", "WSGIContainer", "Container", "=", "WSGIContainer", "if", "Server", "is", "None", ":", "from", "tornado", ".", "httpserver", "import", "HTTPServer", "Server", "=", "HTTPServer", "if", "monkey_patch", "is", "None", ":", "monkey_patch", "=", "use_gevent", "CustomWSGIContainer", "=", "Container", "if", "use_gevent", ":", "if", "monkey_patch", ":", "from", "gevent", "import", "monkey", "monkey", ".", "patch_all", "(", ")", "import", "gevent", "class", "GeventWSGIContainer", "(", "Container", ")", ":", "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "async_task", "(", ")", ":", "super", "(", "GeventWSGIContainer", ",", "self", ")", ".", "__call__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "gevent", ".", "spawn", "(", "async_task", ")", "CustomWSGIContainer", "=", "GeventWSGIContainer", "if", "threadpool", "is", "not", "None", ":", "from", "multiprocessing", ".", "pool", "import", "ThreadPool", "if", "not", "isinstance", "(", "threadpool", ",", "ThreadPool", ")", ":", "threadpool", "=", "ThreadPool", "(", "threadpool", ")", "class", "ThreadPoolWSGIContainer", "(", "Container", ")", ":", "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "async_task", "(", ")", ":", "super", "(", "ThreadPoolWSGIContainer", ",", "self", ")", ".", "__call__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "threadpool", ".", "apply_async", "(", "async_task", ")", "CustomWSGIContainer", "=", "ThreadPoolWSGIContainer", "http_server", "=", "Server", "(", "CustomWSGIContainer", "(", "app", ")", ")", "http_server", ".", "listen", "(", "port", ",", "address", ")", "if", "start", ":", "tornado_start", "(", ")", "return", "http_server" ]
Wait for the sensor to be ready for measurement .
def _wait_for_result ( self ) : basetime = 0.018 if self . _low_res else 0.128 sleep ( basetime * ( self . _mtreg / 69.0 ) + self . _delay )
10,743
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bh1750.py#L123-L126
[ "def", "delete_network_acl", "(", "network_acl_id", "=", "None", ",", "network_acl_name", "=", "None", ",", "disassociate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "disassociate", ":", "network_acl", "=", "_get_resource", "(", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "network_acl", "and", "network_acl", ".", "associations", ":", "subnet_id", "=", "network_acl", ".", "associations", "[", "0", "]", ".", "subnet_id", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "disassociate_network_acl", "(", "subnet_id", ")", "except", "BotoServerError", ":", "pass", "return", "_delete_resource", "(", "resource", "=", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "resource_id", "=", "network_acl_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")" ]
Update the measured light level in lux .
def update ( self ) : if not self . _continuous_sampling or self . _light_level < 0 or self . _operation_mode != self . _mode : self . _reset ( ) self . _set_mode ( self . _operation_mode ) self . _wait_for_result ( ) self . _light_level = self . _get_result ( ) if not self . _continuous_sampling : self . _power_down ( )
10,744
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bh1750.py#L128-L138
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Make a timestamped one - time - use token that can be used to identifying the user .
def get_token ( user , secret , timestamp = None ) : timestamp = int ( timestamp or time ( ) ) secret = to_bytes ( secret ) key = '|' . join ( [ hashlib . sha1 ( secret ) . hexdigest ( ) , str ( user . id ) , get_hash_extract ( user . password ) , str ( getattr ( user , 'last_sign_in' , 0 ) ) , str ( timestamp ) , ] ) key = key . encode ( 'utf8' , 'ignore' ) mac = hmac . new ( key , msg = None , digestmod = hashlib . sha512 ) mac = mac . hexdigest ( ) [ : 50 ] token = '{0}${1}${2}' . format ( user . id , to36 ( timestamp ) , mac ) return token
10,745
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/utils.py#L63-L90
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
Return the real user object .
def __get_user ( self ) : storage = object . __getattribute__ ( self , '_LazyUser__storage' ) user = getattr ( self . __auth , 'get_user' ) ( ) setattr ( storage , self . __user_name , user ) return user
10,746
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/utils.py#L116-L122
[ "def", "remove_range", "(", "cls", ",", "elem", ",", "end_elem", ",", "delete_end", "=", "True", ")", ":", "while", "elem", "is", "not", "None", "and", "elem", "!=", "end_elem", "and", "end_elem", "not", "in", "elem", ".", "xpath", "(", "\"descendant::*\"", ")", ":", "parent", "=", "elem", ".", "getparent", "(", ")", "nxt", "=", "elem", ".", "getnext", "(", ")", "parent", ".", "remove", "(", "elem", ")", "if", "DEBUG", "==", "True", ":", "print", "(", "etree", ".", "tounicode", "(", "elem", ")", ")", "elem", "=", "nxt", "if", "elem", "==", "end_elem", ":", "if", "delete_end", "==", "True", ":", "cls", ".", "remove", "(", "end_elem", ",", "leave_tail", "=", "True", ")", "elif", "elem", "is", "None", ":", "if", "parent", ".", "tail", "not", "in", "[", "None", ",", "''", "]", ":", "parent", ".", "tail", "=", "''", "cls", ".", "remove_range", "(", "parent", ".", "getnext", "(", ")", ",", "end_elem", ")", "XML", ".", "remove_if_empty", "(", "parent", ")", "elif", "end_elem", "in", "elem", ".", "xpath", "(", "\"descendant::*\"", ")", ":", "if", "DEBUG", "==", "True", ":", "print", "(", "elem", ".", "text", ")", "elem", ".", "text", "=", "''", "cls", ".", "remove_range", "(", "elem", ".", "getchildren", "(", ")", "[", "0", "]", ",", "end_elem", ")", "XML", ".", "remove_if_empty", "(", "elem", ")", "else", ":", "print", "(", "\"LOGIC ERROR\"", ",", "file", "=", "sys", ".", "stderr", ")" ]
expands the filename if there is a . as leading path
def _expand_filename ( self , line ) : # expand . newline = line path = os . getcwd ( ) if newline . startswith ( "." ) : newline = newline . replace ( "." , path , 1 ) # expand ~ newline = os . path . expanduser ( newline ) return newline
10,747
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/browser.py#L14-L23
[ "def", "refresh_datasources", "(", "self", ",", "datasource_name", "=", "None", ",", "merge_flag", "=", "True", ",", "refreshAll", "=", "True", ")", ":", "ds_list", "=", "self", ".", "get_datasources", "(", ")", "blacklist", "=", "conf", ".", "get", "(", "'DRUID_DATA_SOURCE_BLACKLIST'", ",", "[", "]", ")", "ds_refresh", "=", "[", "]", "if", "not", "datasource_name", ":", "ds_refresh", "=", "list", "(", "filter", "(", "lambda", "ds", ":", "ds", "not", "in", "blacklist", ",", "ds_list", ")", ")", "elif", "datasource_name", "not", "in", "blacklist", "and", "datasource_name", "in", "ds_list", ":", "ds_refresh", ".", "append", "(", "datasource_name", ")", "else", ":", "return", "self", ".", "refresh", "(", "ds_refresh", ",", "merge_flag", ",", "refreshAll", ")" ]
Modifies the customField field for the given object with something related to the value of the given field .
def setCustomField ( mambuentity , customfield = "" , * args , * * kwargs ) : from . import mambuuser from . import mambuclient try : customFieldValue = mambuentity [ customfield ] # find the dataType customfield by name or id datatype = [ l [ 'customField' ] [ 'dataType' ] for l in mambuentity [ mambuentity . customFieldName ] if ( l [ 'name' ] == customfield or l [ 'id' ] == customfield ) ] [ 0 ] except IndexError as ierr : # if no customfield found with the given name, assume it is a # grouped custom field, name must have an index suffix that must # be removed try : # find the dataType customfield by name or id datatype = [ l [ 'customField' ] [ 'dataType' ] for l in mambuentity [ mambuentity . customFieldName ] if ( l [ 'name' ] == customfield . split ( '_' ) [ 0 ] or l [ 'id' ] == customfield . split ( '_' ) [ 0 ] ) ] [ 0 ] except IndexError : err = MambuError ( "Object %s has no custom field '%s'" % ( mambuentity [ 'id' ] , customfield ) ) raise err except AttributeError : err = MambuError ( "Object does not have a custom field to set" ) raise err if datatype == "USER_LINK" : mambuentity [ customfield ] = mambuuser . MambuUser ( entid = customFieldValue , * args , * * kwargs ) elif datatype == "CLIENT_LINK" : mambuentity [ customfield ] = mambuclient . MambuClient ( entid = customFieldValue , * args , * * kwargs ) else : mambuentity [ customfield ] = customFieldValue return 0 return 1
10,748
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L837-L878
[ "def", "plot_entropy", "(", "self", ",", "tmin", ",", "tmax", ",", "ntemp", ",", "ylim", "=", "None", ",", "*", "*", "kwargs", ")", ":", "temperatures", "=", "np", ".", "linspace", "(", "tmin", ",", "tmax", ",", "ntemp", ")", "if", "self", ".", "structure", ":", "ylabel", "=", "r\"$S$ (J/K/mol)\"", "else", ":", "ylabel", "=", "r\"$S$ (J/K/mol-c)\"", "fig", "=", "self", ".", "_plot_thermo", "(", "self", ".", "dos", ".", "entropy", ",", "temperatures", ",", "ylabel", "=", "ylabel", ",", "ylim", "=", "ylim", ",", "*", "*", "kwargs", ")", "return", "fig" ]
Turns every attribute of the Mambu object in to a string representation .
def serializeFields ( data ) : if isinstance ( data , MambuStruct ) : return data . serializeStruct ( ) try : it = iter ( data ) except TypeError as terr : return unicode ( data ) if type ( it ) == type ( iter ( [ ] ) ) : l = [ ] for e in it : l . append ( MambuStruct . serializeFields ( e ) ) return l elif type ( it ) == type ( iter ( { } ) ) : d = { } for k in it : d [ k ] = MambuStruct . serializeFields ( data [ k ] ) return d # elif ... tuples? sets? return unicode ( data )
10,749
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L121-L150
[ "def", "min_periodic_distance", "(", "self", ",", "xyz0", ",", "xyz1", ")", ":", "d", "=", "np", ".", "abs", "(", "xyz0", "-", "xyz1", ")", "d", "=", "np", ".", "where", "(", "d", ">", "0.5", "*", "self", ".", "periodicity", ",", "self", ".", "periodicity", "-", "d", ",", "d", ")", "return", "np", ".", "sqrt", "(", "(", "d", "**", "2", ")", ".", "sum", "(", "axis", "=", "-", "1", ")", ")" ]
Default initialization from a dictionary responded by Mambu
def init ( self , attrs = { } , * args , * * kwargs ) : self . attrs = attrs self . preprocess ( ) self . convertDict2Attrs ( * args , * * kwargs ) self . postprocess ( ) try : for meth in kwargs [ 'methods' ] : try : getattr ( self , meth ) ( ) except Exception : pass except Exception : pass try : for propname , propval in kwargs [ 'properties' ] . items ( ) : setattr ( self , propname , propval ) except Exception : pass
10,750
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L299-L355
[ "def", "rate", "(", "self", ")", ":", "return", "float", "(", "self", ".", "_eta", ".", "rate_unstable", "if", "self", ".", "eta_every", ">", "1", "else", "self", ".", "_eta", ".", "rate", ")" ]
Connect to Mambu make the request to the REST API .
def connect ( self , * args , * * kwargs ) : from copy import deepcopy if args : self . __args = deepcopy ( args ) if kwargs : for k , v in kwargs . items ( ) : self . __kwargs [ k ] = deepcopy ( v ) jsresp = { } if not self . __urlfunc : return # Pagination window, Mambu restricts at most 500 elements in response offset = self . __offset window = True jsresp = { } while window : if not self . __limit or self . __limit > OUT_OF_BOUNDS_PAGINATION_LIMIT_VALUE : limit = OUT_OF_BOUNDS_PAGINATION_LIMIT_VALUE else : limit = self . __limit # Retry mechanism, for awful connections retries = 0 while retries < MambuStruct . RETRIES : try : # Basic authentication user = self . __kwargs . get ( 'user' , apiuser ) pwd = self . __kwargs . get ( 'pwd' , apipwd ) if self . __data : headers = { 'content-type' : 'application/json' } data = json . dumps ( encoded_dict ( self . __data ) ) url = iriToUri ( self . __urlfunc ( self . entid , limit = limit , offset = offset , * self . __args , * * self . __kwargs ) ) # PATCH if self . __method == "PATCH" : resp = requests . patch ( url , data = data , headers = headers , auth = ( user , pwd ) ) # POST else : resp = requests . post ( url , data = data , headers = headers , auth = ( user , pwd ) ) # GET else : url = iriToUri ( self . __urlfunc ( self . entid , limit = limit , offset = offset , * self . __args , * * self . __kwargs ) ) resp = requests . get ( url , auth = ( user , pwd ) ) # Always count a new request when done! self . rc . add ( datetime . now ( ) ) try : jsonresp = json . loads ( resp . content ) # Returns list: extend list for offset if type ( jsonresp ) == list : try : jsresp . extend ( jsonresp ) except AttributeError : # First window, forget that jsresp was a dict, turn it in to a list jsresp = jsonresp if len ( jsonresp ) < limit : window = False # Returns dict: in theory Mambu REST API doesn't takes limit/offset in to account else : jsresp = jsonresp window = False except ValueError as ex : # json.loads invalid data argument raise ex except Exception as ex : # any other json error raise MambuError ( "JSON Error: %s" % repr ( ex ) ) # if we reach here, we're done and safe break except MambuError as merr : raise merr except requests . exceptions . RequestException : retries += 1 except Exception as ex : raise ex else : raise MambuCommError ( "ERROR I can't communicate with Mambu" ) # next window, moving offset... offset = offset + limit if self . __limit : self . __limit -= limit if self . __limit <= 0 : window = False self . __limit = self . __inilimit try : if u'returnCode' in jsresp and u'returnStatus' in jsresp and jsresp [ u'returnCode' ] != 0 : raise MambuError ( jsresp [ u'returnStatus' ] ) except AttributeError : pass if self . __method != "PATCH" : self . init ( attrs = jsresp , * self . __args , * * self . __kwargs )
10,751
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L516-L664
[ "def", "get_correlation_table", "(", "self", ",", "chain", "=", "0", ",", "parameters", "=", "None", ",", "caption", "=", "\"Parameter Correlations\"", ",", "label", "=", "\"tab:parameter_correlations\"", ")", ":", "parameters", ",", "cor", "=", "self", ".", "get_correlations", "(", "chain", "=", "chain", ",", "parameters", "=", "parameters", ")", "return", "self", ".", "_get_2d_latex_table", "(", "parameters", ",", "cor", ",", "caption", ",", "label", ")" ]
Each element on the atttrs attribute gest converted to a proper python object depending on type .
def convertDict2Attrs ( self , * args , * * kwargs ) : constantFields = [ 'id' , 'groupName' , 'name' , 'homePhone' , 'mobilePhone1' , 'phoneNumber' , 'postcode' , 'emailAddress' ] def convierte ( data ) : """Recursively convert the fields on the data given to a python object.""" # Iterators, lists and dictionaries # Here comes the recursive calls! try : it = iter ( data ) if type ( it ) == type ( iter ( { } ) ) : d = { } for k in it : if k in constantFields : d [ k ] = data [ k ] else : d [ k ] = convierte ( data [ k ] ) data = d if type ( it ) == type ( iter ( [ ] ) ) : l = [ ] for e in it : l . append ( convierte ( e ) ) data = l except TypeError as terr : pass except Exception as ex : raise ex # Python built-in types: ints, floats, or even datetimes. If it # cannot convert it to a built-in type, leave it as string, or # as-is. There may be nested Mambu objects here! # This are the recursion base cases! try : d = int ( data ) if str ( d ) != data : # if string has trailing 0's, leave it as string, to not lose them return data return d except ( TypeError , ValueError ) as tverr : try : return float ( data ) except ( TypeError , ValueError ) as tverr : try : return self . util_dateFormat ( data ) except ( TypeError , ValueError ) as tverr : return data return data self . attrs = convierte ( self . attrs )
10,752
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L739-L791
[ "def", "get_registered_configs", "(", "self", ",", "instances", "=", "None", ")", ":", "configs", "=", "self", ".", "state", ".", "get", "(", "'config_files'", ",", "{", "}", ")", "if", "instances", "is", "not", "None", ":", "for", "config_file", ",", "config", "in", "configs", ".", "items", "(", ")", ":", "if", "config", "[", "'instance_name'", "]", "not", "in", "instances", ":", "configs", ".", "pop", "(", "config_file", ")", "return", "configs" ]
Converts a datetime field to a datetime using some specified format .
def util_dateFormat ( self , field , formato = None ) : if not formato : try : formato = self . __formatoFecha except AttributeError : formato = "%Y-%m-%dT%H:%M:%S+0000" return datetime . strptime ( datetime . strptime ( field , "%Y-%m-%dT%H:%M:%S+0000" ) . strftime ( formato ) , formato )
10,753
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L793-L814
[ "def", "undefine", "(", "self", ")", ":", "if", "lib", ".", "EnvUndefglobal", "(", "self", ".", "_env", ",", "self", ".", "_glb", ")", "!=", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "self", ".", "_env", "=", "None" ]
Creates an entity in Mambu
def create ( self , data , * args , * * kwargs ) : # if module of the function is diferent from the module of the object # that means create is not implemented in child class if self . create . __func__ . __module__ != self . __module__ : raise Exception ( "Child method not implemented" ) self . _MambuStruct__method = "POST" self . _MambuStruct__data = data self . connect ( * args , * * kwargs ) self . _MambuStruct__method = "GET" self . _MambuStruct__data = None
10,754
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambustruct.py#L816-L834
[ "def", "comparelist_view", "(", "self", ",", "request", ",", "object_id", ",", "extra_context", "=", "None", ")", ":", "opts", "=", "self", ".", "model", ".", "_meta", "object_id", "=", "unquote", "(", "object_id", ")", "current", "=", "get_object_or_404", "(", "self", ".", "model", ",", "pk", "=", "object_id", ")", "# As done by reversion's history_view", "action_list", "=", "[", "{", "\"revision\"", ":", "version", ".", "revision", ",", "\"url\"", ":", "reverse", "(", "\"%s:%s_%s_compare\"", "%", "(", "self", ".", "admin_site", ".", "name", ",", "opts", ".", "app_label", ",", "opts", ".", "model_name", ")", ",", "args", "=", "(", "quote", "(", "version", ".", "object_id", ")", ",", "version", ".", "id", ")", ")", ",", "}", "for", "version", "in", "self", ".", "_reversion_order_version_queryset", "(", "Version", ".", "objects", ".", "get_for_object_reference", "(", "self", ".", "model", ",", "object_id", ")", ".", "select_related", "(", "\"revision__user\"", ")", ")", "]", "context", "=", "{", "\"action_list\"", ":", "action_list", ",", "\"opts\"", ":", "opts", ",", "\"object_id\"", ":", "quote", "(", "object_id", ")", ",", "\"original\"", ":", "current", ",", "}", "extra_context", "=", "extra_context", "or", "{", "}", "context", ".", "update", "(", "extra_context", ")", "return", "render", "(", "request", ",", "self", ".", "compare_list_template", "or", "self", ".", "_get_template_list", "(", "\"compare_list.html\"", ")", ",", "context", ")" ]
Runs bcp FORMAT command to create a format file that will assist in creating the bulk data file
def make ( self , cmd_args , db_args ) : with NamedTemporaryFile ( delete = True ) as f : format_file = f . name + '.bcp-format' format_args = cmd_args + [ 'format' , NULL_FILE , '-c' , '-f' , format_file , '-t,' ] + db_args _run_cmd ( format_args ) self . load ( format_file ) return format_file
10,755
https://github.com/ExoticObjects/django-sql-server-bcp/blob/3bfc593a18091cf837a9c31cbbe7025ecc5e3226/django_sql_server_bcp/__init__.py#L110-L119
[ "def", "stop", "(", "self", ",", "wait", "=", "True", ")", ":", "logger", ".", "info", "(", "\"Primitive %s stopped.\"", ",", "self", ")", "StoppableThread", ".", "stop", "(", "self", ",", "wait", ")" ]
Reads a non - XML bcp FORMAT file and parses it into fields list used for creating bulk data file
def load ( self , filename = None ) : fields = [ ] with open ( filename , 'r' ) as f : format_data = f . read ( ) . strip ( ) lines = format_data . split ( '\n' ) self . _sql_version = lines . pop ( 0 ) self . _num_fields = int ( lines . pop ( 0 ) ) for line in lines : # Get rid of mulitple spaces line = re . sub ( ' +' , ' ' , line . strip ( ) ) row_format = BCPFormatRow ( line . split ( ' ' ) ) fields . append ( row_format ) self . fields = fields self . filename = filename
10,756
https://github.com/ExoticObjects/django-sql-server-bcp/blob/3bfc593a18091cf837a9c31cbbe7025ecc5e3226/django_sql_server_bcp/__init__.py#L121-L140
[ "def", "unregister_vm", "(", "vm_ref", ")", ":", "vm_name", "=", "get_managed_object_name", "(", "vm_ref", ")", "log", ".", "trace", "(", "'Destroying vm \\'%s\\''", ",", "vm_name", ")", "try", ":", "vm_ref", ".", "UnregisterVM", "(", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")" ]
Retrieve the content of a resource .
def retrieve_content ( self ) : path = self . _construct_path_to_source_content ( ) res = self . _http . get ( path ) self . _populated_fields [ 'content' ] = res [ 'content' ] return res [ 'content' ]
10,757
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/resources.py#L27-L32
[ "def", "delete_user", "(", "self", ",", "username", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'username'", ":", "username", ",", "}", "api_path", "=", "'/v1/auth/{mount_point}/users/{username}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "username", "=", "username", ",", ")", "return", "self", ".", "_adapter", ".", "delete", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Use separate URL for updating the source file .
def _update ( self , * * kwargs ) : if 'content' in kwargs : content = kwargs . pop ( 'content' ) path = self . _construct_path_to_source_content ( ) self . _http . put ( path , json . dumps ( { 'content' : content } ) ) super ( Resource , self ) . _update ( * * kwargs )
10,758
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/resources.py#L34-L40
[ "def", "_get_partition_info", "(", "storage_system", ",", "device_path", ")", ":", "try", ":", "partition_infos", "=", "storage_system", ".", "RetrieveDiskPartitionInfo", "(", "devicePath", "=", "[", "device_path", "]", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "log", ".", "trace", "(", "'partition_info = %s'", ",", "partition_infos", "[", "0", "]", ")", "return", "partition_infos", "[", "0", "]" ]
clean the dis and uninstall cloudmesh
def all ( ) : dir ( ) cmd3 ( ) banner ( "CLEAN PREVIOUS CLOUDMESH INSTALLS" ) r = int ( local ( "pip freeze |fgrep cloudmesh | wc -l" , capture = True ) ) while r > 0 : local ( 'echo "y\n" | pip uninstall cloudmesh' ) r = int ( local ( "pip freeze |fgrep cloudmesh | wc -l" , capture = True ) )
10,759
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/fabfile/clean.py#L24-L32
[ "def", "overview", "(", "index", ",", "start", ",", "end", ")", ":", "results", "=", "{", "\"activity_metrics\"", ":", "[", "Commits", "(", "index", ",", "start", ",", "end", ")", "]", ",", "\"author_metrics\"", ":", "[", "Authors", "(", "index", ",", "start", ",", "end", ")", "]", ",", "\"bmi_metrics\"", ":", "[", "]", ",", "\"time_to_close_metrics\"", ":", "[", "]", ",", "\"projects_metrics\"", ":", "[", "]", "}", "return", "results" ]
This method should return an iterable containing matches of this element .
def find ( cls , text ) : if isinstance ( cls . pattern , string_types ) : cls . pattern = re . compile ( cls . pattern ) return cls . pattern . finditer ( text )
10,760
https://github.com/frostming/marko/blob/1cd030b665fa37bad1f8b3a25a89ce1a7c491dde/marko/inline.py#L39-L43
[ "async", "def", "update_lease_async", "(", "self", ",", "lease", ")", ":", "if", "lease", "is", "None", ":", "return", "False", "if", "not", "lease", ".", "token", ":", "return", "False", "_logger", ".", "debug", "(", "\"Updating lease %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ")", "# First, renew the lease to make sure the update will go through.", "if", "await", "self", ".", "renew_lease_async", "(", "lease", ")", ":", "try", ":", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "lease", ".", "partition_id", ",", "json", ".", "dumps", "(", "lease", ".", "serializable", "(", ")", ")", ",", "lease_id", "=", "lease", ".", "token", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to update lease %r %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ",", "err", ")", "raise", "err", "else", ":", "return", "False", "return", "True" ]
Entry point for running crony .
def main ( ) : parser = argparse . ArgumentParser ( description = 'Monitor your crons with cronitor.io & sentry.io' , epilog = 'https://github.com/youversion/crony' , prog = 'crony' ) parser . add_argument ( '-c' , '--cronitor' , action = 'store' , help = 'Cronitor link identifier. This can be found in your Cronitor unique' ' ping URL right after https://cronitor.link/' ) parser . add_argument ( '-e' , '--venv' , action = 'store' , help = 'Path to virtualenv to source before running script. May be passed' ' as an argument or loaded from an environment variable or config file.' ) parser . add_argument ( '-d' , '--cd' , action = 'store' , help = 'If the script needs ran in a specific directory, than can be passed' ' or cd can be ran prior to running crony.' ) parser . add_argument ( '-l' , '--log' , action = 'store' , help = 'Log file to direct stdout of script run to. Can be passed or ' 'defined in config file with "log_file"' ) parser . add_argument ( '-o' , '--config' , action = 'store' , help = 'Path to a crony config file to use.' ) parser . add_argument ( '-p' , '--path' , action = 'store' , help = 'Paths to append to the PATH environment variable before running. ' ' Can be passed as an argument or loaded from config file.' ) parser . add_argument ( '-s' , '--dsn' , action = 'store' , help = 'Sentry DSN. May be passed or loaded from an environment variable ' 'or a config file.' ) parser . add_argument ( '-t' , '--timeout' , action = 'store' , default = 10 , help = 'Timeout to use when' ' sending requests to Cronitor' , type = int ) parser . add_argument ( '-v' , '--verbose' , action = 'store_true' , help = 'Increase level of verbosity' ' output by crony' ) parser . add_argument ( '--version' , action = 'store_true' , help = 'Output crony version # and exit' ) parser . add_argument ( 'cmd' , nargs = argparse . REMAINDER , help = 'Command to run and monitor' ) cc = CommandCenter ( parser . parse_args ( ) ) sys . exit ( cc . log ( * cc . func ( ) ) )
10,761
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L232-L290
[ "def", "_get_annotation_entries_from_data", "(", "self", ",", "graph", ":", "BELGraph", ",", "data", ":", "EdgeData", ")", "->", "Optional", "[", "List", "[", "NamespaceEntry", "]", "]", ":", "annotations_dict", "=", "data", ".", "get", "(", "ANNOTATIONS", ")", "if", "annotations_dict", "is", "not", "None", ":", "return", "[", "entry", "for", "url", ",", "names", "in", "self", ".", "_iter_from_annotations_dict", "(", "graph", ",", "annotations_dict", "=", "annotations_dict", ")", "for", "entry", "in", "self", ".", "get_annotation_entries_by_names", "(", "url", ",", "names", ")", "]" ]
Wrap run with requests to cronitor .
def cronitor ( self ) : url = f'https://cronitor.link/{self.opts.cronitor}/{{}}' try : run_url = url . format ( 'run' ) self . logger . debug ( f'Pinging {run_url}' ) requests . get ( run_url , timeout = self . opts . timeout ) except requests . exceptions . RequestException as e : self . logger . exception ( e ) # Cronitor may be having an outage, but we still want to run our stuff output , exit_status = self . run ( ) endpoint = 'complete' if exit_status == 0 else 'fail' try : ping_url = url . format ( endpoint ) self . logger . debug ( 'Pinging {}' . format ( ping_url ) ) requests . get ( ping_url , timeout = self . opts . timeout ) except requests . exceptions . RequestException as e : self . logger . exception ( e ) return output , exit_status
10,762
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L76-L100
[ "def", "destroy_volume_snapshot", "(", "volume_id", ",", "snapshot_id", ",", "profile", ",", "*", "*", "libcloud_kwargs", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "libcloud_kwargs", "=", "salt", ".", "utils", ".", "args", ".", "clean_kwargs", "(", "*", "*", "libcloud_kwargs", ")", "volume", "=", "_get_by_id", "(", "conn", ".", "list_volumes", "(", ")", ",", "volume_id", ")", "snapshot", "=", "_get_by_id", "(", "conn", ".", "list_volume_snapshots", "(", "volume", ")", ",", "snapshot_id", ")", "return", "conn", ".", "destroy_volume_snapshot", "(", "snapshot", ",", "*", "*", "libcloud_kwargs", ")" ]
Attempt to load config from file .
def load_config ( self , custom_config ) : self . config = configparser . ConfigParser ( ) if custom_config : self . config . read ( custom_config ) return f'Loading config from file {custom_config}.' home = os . path . expanduser ( '~{}' . format ( getpass . getuser ( ) ) ) home_conf_file = os . path . join ( home , '.cronyrc' ) system_conf_file = '/etc/crony.conf' conf_precedence = ( home_conf_file , system_conf_file ) for conf_file in conf_precedence : if os . path . exists ( conf_file ) : self . config . read ( conf_file ) return f'Loading config from file {conf_file}.' self . config [ 'crony' ] = { } return 'No config file found.'
10,763
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L102-L127
[ "def", "Nu_vertical_cylinder", "(", "Pr", ",", "Gr", ",", "L", "=", "None", ",", "D", "=", "None", ",", "Method", "=", "None", ",", "AvailableMethods", "=", "False", ")", ":", "def", "list_methods", "(", ")", ":", "methods", "=", "[", "]", "for", "key", ",", "values", "in", "vertical_cylinder_correlations", ".", "items", "(", ")", ":", "if", "values", "[", "4", "]", "or", "all", "(", "(", "L", ",", "D", ")", ")", ":", "methods", ".", "append", "(", "key", ")", "if", "'Popiel & Churchill'", "in", "methods", ":", "methods", ".", "remove", "(", "'Popiel & Churchill'", ")", "methods", ".", "insert", "(", "0", ",", "'Popiel & Churchill'", ")", "elif", "'McAdams, Weiss & Saunders'", "in", "methods", ":", "methods", ".", "remove", "(", "'McAdams, Weiss & Saunders'", ")", "methods", ".", "insert", "(", "0", ",", "'McAdams, Weiss & Saunders'", ")", "return", "methods", "if", "AvailableMethods", ":", "return", "list_methods", "(", ")", "if", "not", "Method", ":", "Method", "=", "list_methods", "(", ")", "[", "0", "]", "if", "Method", "in", "vertical_cylinder_correlations", ":", "if", "vertical_cylinder_correlations", "[", "Method", "]", "[", "4", "]", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ")", "else", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ",", "L", "=", "L", ",", "D", "=", "D", ")", "else", ":", "raise", "Exception", "(", "\"Correlation name not recognized; see the \"", "\"documentation for the available options.\"", ")" ]
Log given CompletedProcess and return exit status code .
def log ( self , output , exit_status ) : if exit_status != 0 : self . logger . error ( f'Error running command! Exit status: {exit_status}, {output}' ) return exit_status
10,764
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L129-L134
[ "def", "connection_cache", "(", "func", ":", "callable", ")", ":", "cache", "=", "dict", "(", ")", "lock", "=", "RLock", "(", ")", "@", "wraps", "(", "func", ")", "def", "func_wrapper", "(", "host", ":", "str", ",", "username", ":", "str", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "key", "=", "\"{h}-{u}\"", ".", "format", "(", "h", "=", "host", ",", "u", "=", "username", ")", "if", "key", "in", "cache", ":", "# connection exists, check if it is still valid before", "# returning it.", "conn", "=", "cache", "[", "key", "]", "if", "conn", "and", "conn", ".", "is_active", "(", ")", "and", "conn", ".", "is_authenticated", "(", ")", ":", "return", "conn", "else", ":", "# try to close a bad connection and remove it from", "# the cache.", "if", "conn", ":", "try_close", "(", "conn", ")", "del", "cache", "[", "key", "]", "# key is not in the cache, so try to recreate it", "# it may have been removed just above.", "if", "key", "not", "in", "cache", ":", "conn", "=", "func", "(", "host", ",", "username", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "conn", "is", "not", "None", ":", "cache", "[", "key", "]", "=", "conn", "return", "conn", "# not sure how to reach this point, but just in case.", "return", "None", "def", "get_cache", "(", ")", "->", "dict", ":", "return", "cache", "def", "purge", "(", "key", ":", "str", "=", "None", ")", ":", "with", "lock", ":", "if", "key", "is", "None", ":", "conns", "=", "[", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "cache", ".", "items", "(", ")", "]", "elif", "key", "in", "cache", ":", "conns", "=", "(", "(", "key", ",", "cache", "[", "key", "]", ")", ",", ")", "else", ":", "conns", "=", "list", "(", ")", "for", "k", ",", "v", "in", "conns", ":", "try_close", "(", "v", ")", "del", "cache", "[", "k", "]", "func_wrapper", ".", "get_cache", "=", "get_cache", "func_wrapper", ".", "purge", "=", "purge", "return", "func_wrapper" ]
Run command and report errors to Sentry .
def run ( self ) : self . logger . debug ( f'Running command: {self.cmd}' ) def execute ( cmd ) : output = "" popen = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , universal_newlines = True , shell = True ) for stdout_line in iter ( popen . stdout . readline , "" ) : stdout_line = stdout_line . strip ( '\n' ) output += stdout_line yield stdout_line popen . stdout . close ( ) return_code = popen . wait ( ) if return_code : raise subprocess . CalledProcessError ( return_code , cmd , output ) try : for out in execute ( self . cmd ) : self . logger . info ( out ) return "" , 0 except subprocess . CalledProcessError as e : return e . output , e . returncode
10,765
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L136-L158
[ "def", "DbGetClassPropertyList", "(", "self", ",", "argin", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"In DbGetClassPropertyList()\"", ")", "if", "not", "argin", ":", "argin", "=", "\"%\"", "else", ":", "argin", "=", "replace_wildcard", "(", "argin", ")", "return", "self", ".", "db", ".", "get_class_property_list", "(", "argin", ")" ]
Change directory for script if necessary .
def setup_dir ( self ) : cd = self . opts . cd or self . config [ 'crony' ] . get ( 'directory' ) if cd : self . logger . debug ( f'Adding cd to {cd}' ) self . cmd = f'cd {cd} && {self.cmd}'
10,766
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L160-L165
[ "def", "delete_topic_groups", "(", "self", ",", "group_id", ",", "topic_id", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - group_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"group_id\"", "]", "=", "group_id", "# REQUIRED - PATH - topic_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"topic_id\"", "]", "=", "topic_id", "self", ".", "logger", ".", "debug", "(", "\"DELETE /api/v1/groups/{group_id}/discussion_topics/{topic_id} with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"DELETE\"", ",", "\"/api/v1/groups/{group_id}/discussion_topics/{topic_id}\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
Setup python logging handler .
def setup_logging ( self ) : date_format = '%Y-%m-%dT%H:%M:%S' log_format = '%(asctime)s %(levelname)s: %(message)s' if self . opts . verbose : lvl = logging . DEBUG else : lvl = logging . INFO # Requests is a bit chatty logging . getLogger ( 'requests' ) . setLevel ( 'WARNING' ) self . logger . setLevel ( lvl ) stdout = logging . StreamHandler ( sys . stdout ) stdout . setLevel ( lvl ) formatter = logging . Formatter ( log_format , date_format ) stdout . setFormatter ( formatter ) self . logger . addHandler ( stdout ) # Decided not to use stderr # stderr = logging.StreamHandler(sys.stderr) # stderr.setLevel(logging.ERROR) # Error and above go to both stdout & stderr # formatter = logging.Formatter(log_format, date_format) # stderr.setFormatter(formatter) # self.logger.addHandler(stderr) log = self . opts . log or self . config [ 'crony' ] . get ( 'log_file' ) if log : logfile = logging . FileHandler ( log ) logfile . setLevel ( lvl ) formatter = logging . Formatter ( log_format , date_format ) logfile . setFormatter ( formatter ) self . logger . addHandler ( logfile ) if self . sentry_client : sentry = SentryHandler ( self . sentry_client ) sentry . setLevel ( logging . ERROR ) self . logger . addHandler ( sentry ) self . logger . debug ( 'Logging setup complete.' )
10,767
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L167-L207
[ "def", "parse_torrent_properties", "(", "table_datas", ")", ":", "output", "=", "{", "'category'", ":", "table_datas", "[", "0", "]", ".", "text", ",", "'subcategory'", ":", "None", ",", "'quality'", ":", "None", ",", "'language'", ":", "None", "}", "for", "i", "in", "range", "(", "1", ",", "len", "(", "table_datas", ")", ")", ":", "td", "=", "table_datas", "[", "i", "]", "url", "=", "td", ".", "get", "(", "'href'", ")", "params", "=", "Parser", ".", "get_params", "(", "url", ")", "if", "Parser", ".", "is_subcategory", "(", "params", ")", "and", "not", "output", "[", "'subcategory'", "]", ":", "output", "[", "'subcategory'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_quality", "(", "params", ")", "and", "not", "output", "[", "'quality'", "]", ":", "output", "[", "'quality'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_language", "(", "params", ")", "and", "not", "output", "[", "'language'", "]", ":", "output", "[", "'language'", "]", "=", "td", ".", "text", "return", "output" ]
Setup PATH env var if necessary .
def setup_path ( self ) : path = self . opts . path or self . config [ 'crony' ] . get ( 'path' ) if path : self . logger . debug ( f'Adding {path} to PATH environment variable' ) self . cmd = f'export PATH={path}:$PATH && {self.cmd}'
10,768
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L209-L214
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Setup virtualenv if necessary .
def setup_venv ( self ) : venv = self . opts . venv if not venv : venv = os . environ . get ( 'CRONY_VENV' ) if not venv and self . config [ 'crony' ] : venv = self . config [ 'crony' ] . get ( 'venv' ) if venv : if not venv . endswith ( 'activate' ) : add_path = os . path . join ( 'bin' , 'activate' ) self . logger . debug ( f'Venv directory given, adding {add_path}' ) venv = os . path . join ( venv , add_path ) self . logger . debug ( f'Adding sourcing virtualenv {venv}' ) self . cmd = f'. {venv} && {self.cmd}'
10,769
https://github.com/youversion/crony/blob/c93d14b809a2e878f1b9d6d53d5a04947896583b/crony/crony.py#L216-L229
[ "def", "remove_from_category", "(", "self", ",", "category", ")", ":", "ctype", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "self", ")", "self", ".", "categories", ".", "model", ".", "objects", ".", "filter", "(", "category", "=", "category", ",", "content_type", "=", "ctype", ",", "object_id", "=", "self", ".", "id", ")", ".", "delete", "(", ")" ]
Returns list of found branches .
def get_repos ( path ) : p = str ( path ) ret = [ ] if not os . path . exists ( p ) : return ret for d in os . listdir ( p ) : pd = os . path . join ( p , d ) if os . path . exists ( pd ) and is_repo ( pd ) : ret . append ( Local ( pd ) ) return ret
10,770
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L32-L46
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Returns parent repo or input path if none found .
def get_repo_parent ( path ) : # path is a repository if is_repo ( path ) : return Local ( path ) # path is inside a repository elif not os . path . isdir ( path ) : _rel = '' while path and path != '/' : if is_repo ( path ) : return Local ( path ) else : _rel = os . path . join ( os . path . basename ( path ) , _rel ) path = os . path . dirname ( path ) return path
10,771
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L48-L67
[ "def", "_mmUpdateDutyCycles", "(", "self", ")", ":", "period", "=", "self", ".", "getDutyCyclePeriod", "(", ")", "unionSDRArray", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ")", "unionSDRArray", "[", "list", "(", "self", ".", "_mmTraces", "[", "\"unionSDR\"", "]", ".", "data", "[", "-", "1", "]", ")", "]", "=", "1", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", ",", "unionSDRArray", ",", "period", ")", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", ",", "self", ".", "_poolingActivation", ",", "period", ")" ]
Checkout a version of the repo .
def setVersion ( self , version ) : try : sha = self . versions ( version ) . commit . sha self . git . reset ( "--hard" , sha ) except Exception , e : raise RepoError ( e )
10,772
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L159-L169
[ "def", "get_user_permissions", "(", "user", ")", ":", "permissions", "=", "SeedPermission", ".", "objects", ".", "all", "(", ")", "# User must be on a team that grants the permission", "permissions", "=", "permissions", ".", "filter", "(", "seedteam__users", "=", "user", ")", "# The team must be active", "permissions", "=", "permissions", ".", "filter", "(", "seedteam__archived", "=", "False", ")", "# The organization of that team must be active", "permissions", "=", "permissions", ".", "filter", "(", "seedteam__organization__archived", "=", "False", ")", "return", "permissions" ]
Returns a list of the commits reachable from head .
def _commits ( self , head = 'HEAD' ) : pending_commits = [ head ] history = [ ] while pending_commits != [ ] : head = pending_commits . pop ( 0 ) try : commit = self [ head ] except KeyError : raise KeyError ( head ) if type ( commit ) != Commit : raise TypeError ( commit ) if commit in history : continue i = 0 for known_commit in history : if known_commit . commit_time > commit . commit_time : break i += 1 history . insert ( i , commit ) pending_commits += commit . parents return history
10,773
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L171-L199
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
List of Versions of this repository .
def versions ( self , version = None ) : try : versions = [ Version ( self , c ) for c in self . _commits ( ) ] except Exception , e : log . debug ( 'No versions exist' ) return [ ] if version is not None and versions : try : versions = versions [ version ] except IndexError : raise VersionError ( 'Version %s does not exist' % version ) return versions
10,774
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L201-L220
[ "def", "is_valid", "(", "self", ")", "->", "bool", ":", "if", "self", ".", "bot", ".", "config", ".", "SESSION_EXPIRE_TIMEOUT", "and", "self", ".", "_last_interaction", "and", "datetime", ".", "now", "(", ")", "-", "self", ".", "_last_interaction", ">", "self", ".", "bot", ".", "config", ".", "SESSION_EXPIRE_TIMEOUT", ":", "return", "False", "return", "True" ]
sets repository description
def setDescription ( self , desc = 'No description' ) : try : self . _put_named_file ( 'description' , desc ) except Exception , e : raise RepoError ( e )
10,775
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L229-L234
[ "def", "_assign_numbers", "(", "self", ")", ":", "first", "=", "self", ".", "select_related", "(", "'point_of_sales'", ",", "'receipt_type'", ")", ".", "first", "(", ")", "next_num", "=", "Receipt", ".", "objects", ".", "fetch_last_receipt_number", "(", "first", ".", "point_of_sales", ",", "first", ".", "receipt_type", ",", ")", "+", "1", "for", "receipt", "in", "self", ".", "filter", "(", "receipt_number__isnull", "=", "True", ")", ":", "# Atomically update receipt number", "Receipt", ".", "objects", ".", "filter", "(", "pk", "=", "receipt", ".", "id", ",", "receipt_number__isnull", "=", "True", ",", ")", ".", "update", "(", "receipt_number", "=", "next_num", ",", ")", "next_num", "+=", "1" ]
Create a new bare repo . Local instance .
def new ( self , path , desc = None , bare = True ) : if os . path . exists ( path ) : raise RepoError ( 'Path already exists: %s' % path ) try : os . mkdir ( path ) if bare : Repo . init_bare ( path ) else : Repo . init ( path ) repo = Local ( path ) if desc : repo . setDescription ( desc ) version = repo . addVersion ( ) version . save ( 'Repo Initialization' ) return repo except Exception , e : traceback . print_exc ( ) raise RepoError ( 'Error creating repo' )
10,776
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L246-L272
[ "def", "rest_action", "(", "self", ",", "func", ",", "url", ",", "*", "*", "kwargs", ")", ":", "try", ":", "response", "=", "func", "(", "url", ",", "timeout", "=", "self", ".", "TIMEOUT", ",", "*", "*", "kwargs", ")", "except", "requests", ".", "RequestException", ",", "err", ":", "log", ".", "exception", "(", "\"[PyLmod] Error - connection error in \"", "\"rest_action, err=%s\"", ",", "err", ")", "raise", "err", "try", ":", "return", "response", ".", "json", "(", ")", "except", "ValueError", ",", "err", ":", "log", ".", "exception", "(", "'Unable to decode %s'", ",", "response", ".", "content", ")", "raise", "err" ]
Create a branch of this repo at name .
def branch ( self , name , desc = None ) : return Local . new ( path = os . path . join ( self . path , name ) , desc = desc , bare = True )
10,777
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L274-L283
[ "def", "port_list_compress", "(", "port_list", ")", ":", "if", "not", "port_list", "or", "len", "(", "port_list", ")", "==", "0", ":", "LOGGER", ".", "info", "(", "\"Invalid or empty port list.\"", ")", "return", "''", "port_list", "=", "sorted", "(", "set", "(", "port_list", ")", ")", "compressed_list", "=", "[", "]", "for", "key", ",", "group", "in", "itertools", ".", "groupby", "(", "enumerate", "(", "port_list", ")", ",", "lambda", "t", ":", "t", "[", "1", "]", "-", "t", "[", "0", "]", ")", ":", "group", "=", "list", "(", "group", ")", "if", "group", "[", "0", "]", "[", "1", "]", "==", "group", "[", "-", "1", "]", "[", "1", "]", ":", "compressed_list", ".", "append", "(", "str", "(", "group", "[", "0", "]", "[", "1", "]", ")", ")", "else", ":", "compressed_list", ".", "append", "(", "str", "(", "group", "[", "0", "]", "[", "1", "]", ")", "+", "'-'", "+", "str", "(", "group", "[", "-", "1", "]", "[", "1", "]", ")", ")", "return", "','", ".", "join", "(", "compressed_list", ")" ]
add a new Item class object
def addItem ( self , item , message = None ) : if message is None : message = 'Adding item %s' % item . path try : v = Version . new ( repo = self ) v . addItem ( item ) v . save ( message ) except VersionError , e : raise RepoError ( e )
10,778
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L290-L299
[ "def", "get_program", "(", "self", ",", "n", ",", "timeout", "=", "2.0", ",", "max_retries", "=", "2", ")", ":", "# Send the 'TPROG PROGn' command to read the program.", "response", "=", "self", ".", "driver", ".", "send_command", "(", "'TPROG PROG'", "+", "str", "(", "int", "(", "n", ")", ")", ",", "timeout", "=", "timeout", ",", "immediate", "=", "True", ",", "max_retries", "=", "max_retries", ")", "# If there was an error, then return empty. Otherwise, return", "# the response lines but strip the leading '*' first and the", "# 'END' at the end of the list.", "if", "self", ".", "driver", ".", "command_error", "(", "response", ")", "or", "len", "(", "response", "[", "4", "]", ")", "==", "0", ":", "return", "[", "]", "else", ":", "if", "'*END'", "in", "response", "[", "4", "]", ":", "response", "[", "4", "]", ".", "remove", "(", "'*END'", ")", "return", "[", "line", "[", "1", ":", "]", "for", "line", "in", "response", "[", "4", "]", "]" ]
Returns a list of items .
def items ( self , path = None , version = None ) : if version is None : version = - 1 items = { } for item in self . versions ( version ) . items ( ) : items [ item . path ] = item parent = self . parent # get latest committed items from parents while parent : for item in parent . items ( path = path ) : if item . path not in items . keys ( ) : items [ item . path ] = item parent = parent . parent # filter items matching path regex if path is not None : path += '$' regex = re . compile ( path ) return [ item for path , item in items . items ( ) if regex . match ( path ) ] else : return items . values ( )
10,779
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/local.py#L305-L334
[ "def", "_start_vibration_win", "(", "self", ",", "left_motor", ",", "right_motor", ")", ":", "xinput_set_state", "=", "self", ".", "manager", ".", "xinput", ".", "XInputSetState", "xinput_set_state", ".", "argtypes", "=", "[", "ctypes", ".", "c_uint", ",", "ctypes", ".", "POINTER", "(", "XinputVibration", ")", "]", "xinput_set_state", ".", "restype", "=", "ctypes", ".", "c_uint", "vibration", "=", "XinputVibration", "(", "int", "(", "left_motor", "*", "65535", ")", ",", "int", "(", "right_motor", "*", "65535", ")", ")", "xinput_set_state", "(", "self", ".", "__device_number", ",", "ctypes", ".", "byref", "(", "vibration", ")", ")" ]
Add the reply markup to a message from the layers
async def set_reply_markup ( msg : Dict , request : 'Request' , stack : 'Stack' ) -> None : from bernard . platforms . telegram . layers import InlineKeyboard , ReplyKeyboard , ReplyKeyboardRemove try : keyboard = stack . get_layer ( InlineKeyboard ) except KeyError : pass else : msg [ 'reply_markup' ] = await keyboard . serialize ( request ) try : keyboard = stack . get_layer ( ReplyKeyboard ) except KeyError : pass else : msg [ 'reply_markup' ] = await keyboard . serialize ( request ) try : remove = stack . get_layer ( ReplyKeyboardRemove ) except KeyError : pass else : msg [ 'reply_markup' ] = remove . serialize ( )
10,780
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/_utils.py#L11-L42
[ "def", "build_agency", "(", "pfeed", ")", ":", "return", "pd", ".", "DataFrame", "(", "{", "'agency_name'", ":", "pfeed", ".", "meta", "[", "'agency_name'", "]", ".", "iat", "[", "0", "]", ",", "'agency_url'", ":", "pfeed", ".", "meta", "[", "'agency_url'", "]", ".", "iat", "[", "0", "]", ",", "'agency_timezone'", ":", "pfeed", ".", "meta", "[", "'agency_timezone'", "]", ".", "iat", "[", "0", "]", ",", "}", ",", "index", "=", "[", "0", "]", ")" ]
Decompose the locale into a normalized tuple .
def split_locale ( locale : Text ) -> Tuple [ Text , Optional [ Text ] ] : items = re . split ( r'[_\-]' , locale . lower ( ) , 1 ) try : return items [ 0 ] , items [ 1 ] except IndexError : return items [ 0 ] , None
10,781
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/utils.py#L14-L27
[ "def", "cycle_file", "(", "source_plaintext_filename", ")", ":", "# Create a static random master key provider", "key_id", "=", "os", ".", "urandom", "(", "8", ")", "master_key_provider", "=", "StaticRandomMasterKeyProvider", "(", ")", "master_key_provider", ".", "add_master_key", "(", "key_id", ")", "ciphertext_filename", "=", "source_plaintext_filename", "+", "\".encrypted\"", "cycled_plaintext_filename", "=", "source_plaintext_filename", "+", "\".decrypted\"", "# Encrypt the plaintext source data", "with", "open", "(", "source_plaintext_filename", ",", "\"rb\"", ")", "as", "plaintext", ",", "open", "(", "ciphertext_filename", ",", "\"wb\"", ")", "as", "ciphertext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "mode", "=", "\"e\"", ",", "source", "=", "plaintext", ",", "key_provider", "=", "master_key_provider", ")", "as", "encryptor", ":", "for", "chunk", "in", "encryptor", ":", "ciphertext", ".", "write", "(", "chunk", ")", "# Decrypt the ciphertext", "with", "open", "(", "ciphertext_filename", ",", "\"rb\"", ")", "as", "ciphertext", ",", "open", "(", "cycled_plaintext_filename", ",", "\"wb\"", ")", "as", "plaintext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "mode", "=", "\"d\"", ",", "source", "=", "ciphertext", ",", "key_provider", "=", "master_key_provider", ")", "as", "decryptor", ":", "for", "chunk", "in", "decryptor", ":", "plaintext", ".", "write", "(", "chunk", ")", "# Verify that the \"cycled\" (encrypted, then decrypted) plaintext is identical to the source", "# plaintext", "assert", "filecmp", ".", "cmp", "(", "source_plaintext_filename", ",", "cycled_plaintext_filename", ")", "# Verify that the encryption context used in the decrypt operation includes all key pairs from", "# the encrypt operation", "#", "# In production, always use a meaningful encryption context. In this sample, we omit the", "# encryption context (no key pairs).", "assert", "all", "(", "pair", "in", "decryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", "for", "pair", "in", "encryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", ")", "return", "ciphertext_filename", ",", "cycled_plaintext_filename" ]
Compares two locales to find the level of compatibility
def compare_locales ( a , b ) : if a is None or b is None : if a == b : return 2 else : return 0 a = split_locale ( a ) b = split_locale ( b ) if a == b : return 2 elif a [ 0 ] == b [ 0 ] : return 1 else : return 0
10,782
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/utils.py#L30-L53
[ "def", "remove_selection", "(", "self", ",", "sel", ")", ":", "self", ".", "_selections", ".", "remove", "(", "sel", ")", "# <artist>.figure will be unset so we save them first.", "figures", "=", "{", "artist", ".", "figure", "for", "artist", "in", "[", "sel", ".", "annotation", "]", "+", "sel", ".", "extras", "}", "# ValueError is raised if the artist has already been removed.", "with", "suppress", "(", "ValueError", ")", ":", "sel", ".", "annotation", ".", "remove", "(", ")", "for", "artist", "in", "sel", ".", "extras", ":", "with", "suppress", "(", "ValueError", ")", ":", "artist", ".", "remove", "(", ")", "for", "cb", "in", "self", ".", "_callbacks", "[", "\"remove\"", "]", ":", "cb", "(", "sel", ")", "for", "figure", "in", "figures", ":", "figure", ".", "canvas", ".", "draw_idle", "(", ")" ]
Returns the list of available locales . The first locale is the default locale to be used . If no locales are known then None will be the first item .
def list_locales ( self ) -> List [ Optional [ Text ] ] : locales = list ( self . dict . keys ( ) ) if not locales : locales . append ( None ) return locales
10,783
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/utils.py#L61-L73
[ "def", "delete_types_s", "(", "s", ",", "types", ")", ":", "patt", "=", "'(?s)'", "+", "'|'", ".", "join", "(", "'(?<=\\n)'", "+", "s", "+", "'\\n.+?\\n(?=\\S+|$)'", "for", "s", "in", "types", ")", "return", "re", ".", "sub", "(", "patt", ",", "''", ",", "'\\n'", "+", "s", ".", "strip", "(", ")", "+", "'\\n'", ",", ")", ".", "strip", "(", ")" ]
Returns the best matching locale in what is available .
def choose_locale ( self , locale : Text ) -> Text : if locale not in self . _choice_cache : locales = self . list_locales ( ) best_choice = locales [ 0 ] best_level = 0 for candidate in locales : cmp = compare_locales ( locale , candidate ) if cmp > best_level : best_choice = candidate best_level = cmp self . _choice_cache [ locale ] = best_choice return self . _choice_cache [ locale ]
10,784
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/utils.py#L75-L98
[ "def", "remove_armor", "(", "armored_data", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "armored_data", ")", "lines", "=", "stream", ".", "readlines", "(", ")", "[", "3", ":", "-", "1", "]", "data", "=", "base64", ".", "b64decode", "(", "b''", ".", "join", "(", "lines", ")", ")", "payload", ",", "checksum", "=", "data", "[", ":", "-", "3", "]", ",", "data", "[", "-", "3", ":", "]", "assert", "util", ".", "crc24", "(", "payload", ")", "==", "checksum", "return", "payload" ]
Receive an update from a loader .
def update ( self , new_data : Dict [ Text , Dict [ Text , Text ] ] ) : for locale , data in new_data . items ( ) : if locale not in self . dict : self . dict [ locale ] = { } self . dict [ locale ] . update ( data )
10,785
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/utils.py#L109-L120
[ "def", "get_cgi_parameter_bool", "(", "form", ":", "cgi", ".", "FieldStorage", ",", "key", ":", "str", ")", "->", "bool", ":", "return", "is_1", "(", "get_cgi_parameter_str", "(", "form", ",", "key", ")", ")" ]
Signs the URL if needed
async def _make_url ( self , url : Text , request : 'Request' ) -> Text : if self . sign_webview : return await request . sign_url ( url ) return url
10,786
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/helpers.py#L125-L133
[ "def", "magic_register", "(", "self", ",", "line", ")", ":", "message", "=", "\"\"", "for", "reg", "in", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "line", ".", "replace", "(", "','", ",", "''", ")", ".", "split", "(", ")", "]", ":", "if", "'-'", "in", "reg", ":", "# We have a range (Rn-Rk)", "r1", ",", "r2", "=", "reg", ".", "split", "(", "'-'", ")", "# TODO do we want to allow just numbers?", "n1", "=", "re", ".", "search", "(", "self", ".", "interpreter", ".", "REGISTER_REGEX", ",", "r1", ")", ".", "groups", "(", ")", "[", "0", "]", "n2", "=", "re", ".", "search", "(", "self", ".", "interpreter", ".", "REGISTER_REGEX", ",", "r2", ")", ".", "groups", "(", ")", "[", "0", "]", "n1", "=", "self", ".", "interpreter", ".", "convert_to_integer", "(", "n1", ")", "n2", "=", "self", ".", "interpreter", ".", "convert_to_integer", "(", "n2", ")", "for", "i", "in", "range", "(", "n1", ",", "n2", "+", "1", ")", ":", "val", "=", "self", ".", "interpreter", ".", "register", "[", "r1", "[", "0", "]", "+", "str", "(", "i", ")", "]", "val", "=", "self", ".", "convert_representation", "(", "val", ")", "message", "+=", "\"{}: {}\\n\"", ".", "format", "(", "r1", "[", "0", "]", "+", "str", "(", "i", ")", ",", "val", ")", "else", ":", "val", "=", "self", ".", "interpreter", ".", "register", "[", "reg", "]", "val", "=", "self", ".", "convert_representation", "(", "val", ")", "message", "+=", "\"{}: {}\\n\"", ".", "format", "(", "reg", ",", "val", ")", "stream_content", "=", "{", "'name'", ":", "'stdout'", ",", "'text'", ":", "message", "}", "self", ".", "send_response", "(", "self", ".", "iopub_socket", ",", "'stream'", ",", "stream_content", ")" ]
Make sure that nothing inside blocks sharing .
def is_sharable ( self ) : if self . buttons : return ( all ( b . is_sharable ( ) for b in self . buttons ) and self . default_action and self . default_action . is_sharable ( ) )
10,787
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/facebook/helpers.py#L306-L313
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Checks the grid boundary variables .
def check_bounds_variables ( self , dataset ) : recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended variables to describe grid boundaries' ) bounds_map = { 'lat_bounds' : { 'units' : 'degrees_north' , 'comment' : 'latitude values at the north and south bounds of each pixel.' } , 'lon_bounds' : { 'units' : 'degrees_east' , 'comment' : 'longitude values at the west and east bounds of each pixel.' } , 'z_bounds' : { 'comment' : 'z bounds for each z value' , } , 'time_bounds' : { 'comment' : 'time bounds for each time value' } } bounds_variables = [ v . bounds for v in dataset . get_variables_by_attributes ( bounds = lambda x : x is not None ) ] for variable in bounds_variables : ncvar = dataset . variables . get ( variable , { } ) recommended_ctx . assert_true ( ncvar != { } , 'a variable {} should exist as indicated by a bounds attribute' . format ( variable ) ) if ncvar == { } : continue units = getattr ( ncvar , 'units' , '' ) if variable in bounds_map and 'units' in bounds_map [ variable ] : recommended_ctx . assert_true ( units == bounds_map [ variable ] [ 'units' ] , 'variable {} should have units {}' . format ( variable , bounds_map [ variable ] [ 'units' ] ) ) else : recommended_ctx . assert_true ( units != '' , 'variable {} should have a units attribute that is not empty' . format ( variable ) ) comment = getattr ( ncvar , 'comment' , '' ) recommended_ctx . assert_true ( comment != '' , 'variable {} should have a comment and not be empty' ) return recommended_ctx . to_result ( )
10,788
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_grid.py#L42-L93
[ "def", "populateFromRow", "(", "self", ",", "readGroupSetRecord", ")", ":", "self", ".", "_dataUrl", "=", "readGroupSetRecord", ".", "dataurl", "self", ".", "_indexFile", "=", "readGroupSetRecord", ".", "indexfile", "self", ".", "_programs", "=", "[", "]", "for", "jsonDict", "in", "json", ".", "loads", "(", "readGroupSetRecord", ".", "programs", ")", ":", "program", "=", "protocol", ".", "fromJson", "(", "json", ".", "dumps", "(", "jsonDict", ")", ",", "protocol", ".", "Program", ")", "self", ".", "_programs", ".", "append", "(", "program", ")", "stats", "=", "protocol", ".", "fromJson", "(", "readGroupSetRecord", ".", "stats", ",", "protocol", ".", "ReadStats", ")", "self", ".", "_numAlignedReads", "=", "stats", ".", "aligned_read_count", "self", ".", "_numUnalignedReads", "=", "stats", ".", "unaligned_read_count" ]
Geocode an address . Pls refer to the Google Maps Web API for the details of the parameters
def geocode ( self , string , bounds = None , region = None , language = None , sensor = False ) : if isinstance ( string , unicode ) : string = string . encode ( 'utf-8' ) params = { 'address' : self . format_string % string , 'sensor' : str ( sensor ) . lower ( ) } if bounds : params [ 'bounds' ] = bounds if region : params [ 'region' ] = region if language : params [ 'language' ] = language if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
10,789
https://github.com/walter426/Python_GoogleMapsApi/blob/4832b293a0027446941a5f00ecc66256f92ddbce/GoogleMapsApi/geocode.py#L34-L63
[ "def", "set_lease", "(", "self", ",", "new_lease", ")", ":", "if", "self", ".", "partition_context", ":", "self", ".", "partition_context", ".", "lease", "=", "new_lease", "self", ".", "partition_context", ".", "event_processor_context", "=", "new_lease", ".", "event_processor_context" ]
Reverse geocode a point . Pls refer to the Google Maps Web API for the details of the parameters
def reverse ( self , point , language = None , sensor = False ) : params = { 'latlng' : point , 'sensor' : str ( sensor ) . lower ( ) } if language : params [ 'language' ] = language if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
10,790
https://github.com/walter426/Python_GoogleMapsApi/blob/4832b293a0027446941a5f00ecc66256f92ddbce/GoogleMapsApi/geocode.py#L66-L83
[ "def", "partition_key", "(", "self", ")", ":", "try", ":", "return", "self", ".", "_annotations", "[", "self", ".", "_partition_key", "]", "except", "KeyError", ":", "return", "self", ".", "_annotations", ".", "get", "(", "EventData", ".", "PROP_PARTITION_KEY", ",", "None", ")" ]
Get Directions Service Pls refer to the Google Maps Web API for the details of the remained parameters
def GetDirections ( self , origin , destination , sensor = False , mode = None , waypoints = None , alternatives = None , avoid = None , language = None , units = None , region = None , departure_time = None , arrival_time = None ) : params = { 'origin' : origin , 'destination' : destination , 'sensor' : str ( sensor ) . lower ( ) } if mode : params [ 'mode' ] = mode if waypoints : params [ 'waypoints' ] = waypoints if alternatives : params [ 'alternatives' ] = alternatives if avoid : params [ 'avoid' ] = avoid if language : params [ 'language' ] = language if units : params [ 'units' ] = units if region : params [ 'region' ] = region if departure_time : params [ 'departure_time' ] = departure_time if arrival_time : params [ 'arrival_time' ] = arrival_time if not self . premier : url = self . get_url ( params ) else : url = self . get_signed_url ( params ) return self . GetService_url ( url )
10,791
https://github.com/walter426/Python_GoogleMapsApi/blob/4832b293a0027446941a5f00ecc66256f92ddbce/GoogleMapsApi/directions.py#L33-L79
[ "def", "set_exception", "(", "self", ",", "exception", ")", ":", "with", "self", ".", "__condition", ":", "self", ".", "__exception", "=", "exception", "self", ".", "__state", "=", "FINISHED", "self", ".", "__condition", ".", "notify_all", "(", ")", "self", ".", "_invoke_callbacks", "(", ")" ]
convert json env variable if set to list
def get ( self ) : self . _cast = type ( [ ] ) source_value = os . getenv ( self . env_name ) # set the environment if it is not set if source_value is None : os . environ [ self . env_name ] = json . dumps ( self . default ) return self . default try : val = json . loads ( source_value ) except JSONDecodeError as e : click . secho ( str ( e ) , err = True , color = 'red' ) sys . exit ( 1 ) except ValueError as e : click . secho ( e . message , err = True , color = 'red' ) sys . exit ( 1 ) if self . validator : val = self . validator ( val ) return val
10,792
https://github.com/sthysel/knobs/blob/1d01f50f643068076e38118a93fed9375ea3ac81/src/knobs.py#L225-L250
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Build an undirected graph of gene interactions from edgelist file .
def parse_ppi_graph ( path : str , min_edge_weight : float = 0.0 ) -> Graph : logger . info ( "In parse_ppi_graph()" ) graph = igraph . read ( os . path . expanduser ( path ) , format = "ncol" , directed = False , names = True ) graph . delete_edges ( graph . es . select ( weight_lt = min_edge_weight ) ) graph . delete_vertices ( graph . vs . select ( _degree = 0 ) ) logger . info ( f"Loaded PPI network.\n" f"Number of proteins: {len(graph.vs)}\n" f"Number of interactions: {len(graph.es)}\n" ) return graph
10,793
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L19-L33
[ "def", "now_heating", "(", "self", ")", ":", "try", ":", "if", "self", ".", "side", "==", "'left'", ":", "heat", "=", "self", ".", "device", ".", "device_data", "[", "'leftNowHeating'", "]", "elif", "self", ".", "side", "==", "'right'", ":", "heat", "=", "self", ".", "device", ".", "device_data", "[", "'rightNowHeating'", "]", "return", "heat", "except", "TypeError", ":", "return", "None" ]
Read an excel file on differential expression values as Gene objects .
def parse_excel ( file_path : str , entrez_id_header , log_fold_change_header , adjusted_p_value_header , entrez_delimiter , base_mean_header = None ) -> List [ Gene ] : logger . info ( "In parse_excel()" ) df = pd . read_excel ( file_path ) return handle_dataframe ( df , entrez_id_name = entrez_id_header , log2_fold_change_name = log_fold_change_header , adjusted_p_value_name = adjusted_p_value_header , entrez_delimiter = entrez_delimiter , base_mean = base_mean_header , )
10,794
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L36-L59
[ "def", "any", "(", "self", ",", "array", ",", "role", "=", "None", ")", ":", "sum_in_entity", "=", "self", ".", "sum", "(", "array", ",", "role", "=", "role", ")", "return", "(", "sum_in_entity", ">", "0", ")" ]
Read a csv file on differential expression values as Gene objects .
def parse_csv ( file_path : str , entrez_id_header , log_fold_change_header , adjusted_p_value_header , entrez_delimiter , base_mean_header = None , sep = "," ) -> List [ Gene ] : logger . info ( "In parse_csv()" ) df = pd . read_csv ( file_path , sep = sep ) return handle_dataframe ( df , entrez_id_name = entrez_id_header , log2_fold_change_name = log_fold_change_header , adjusted_p_value_name = adjusted_p_value_header , entrez_delimiter = entrez_delimiter , base_mean = base_mean_header , )
10,795
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L62-L86
[ "def", "getOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTransformTrackedDeviceRelative", "punTrackedDevice", "=", "TrackedDeviceIndex_t", "(", ")", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "punTrackedDevice", ")", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "punTrackedDevice", ",", "pmatTrackedDeviceToOverlayTransform" ]
Convert data frame on differential expression values as Gene objects .
def handle_dataframe ( df : pd . DataFrame , entrez_id_name , log2_fold_change_name , adjusted_p_value_name , entrez_delimiter , base_mean = None , ) -> List [ Gene ] : logger . info ( "In _handle_df()" ) if base_mean is not None and base_mean in df . columns : df = df [ pd . notnull ( df [ base_mean ] ) ] df = df [ pd . notnull ( df [ entrez_id_name ] ) ] df = df [ pd . notnull ( df [ log2_fold_change_name ] ) ] df = df [ pd . notnull ( df [ adjusted_p_value_name ] ) ] # try: # import bio2bel_hgnc # except ImportError: # logger.debug('skipping mapping') # else: # manager = bio2bel_hgnc.Manager() # # TODO @cthoyt return [ Gene ( entrez_id = entrez_id , log2_fold_change = data [ log2_fold_change_name ] , padj = data [ adjusted_p_value_name ] ) for _ , data in df . iterrows ( ) for entrez_id in str ( data [ entrez_id_name ] ) . split ( entrez_delimiter ) ]
10,796
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L89-L129
[ "def", "truncate_schema", "(", "self", ")", ":", "assert", "self", ".", "server", "==", "'localhost'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DELETE FROM publication;'", ")", "cur", ".", "execute", "(", "'TRUNCATE systems CASCADE;'", ")", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return" ]
Parse a list of genes and return them if they are in the network .
def parse_gene_list ( path : str , graph : Graph , anno_type : str = "name" ) -> list : # read the file genes = pd . read_csv ( path , header = None ) [ 0 ] . tolist ( ) genes = [ str ( int ( gene ) ) for gene in genes ] # get those genes which are in the network ind = [ ] if anno_type == "name" : ind = graph . vs . select ( name_in = genes ) . indices elif anno_type == "symbol" : ind = graph . vs . select ( symbol_in = genes ) . indices else : raise Exception ( "The type can either be name or symbol, {} is not " "supported" . format ( anno_type ) ) genes = graph . vs [ ind ] [ anno_type ] return genes
10,797
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L132-L155
[ "def", "getOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTransformTrackedDeviceRelative", "punTrackedDevice", "=", "TrackedDeviceIndex_t", "(", ")", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "punTrackedDevice", ")", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "punTrackedDevice", ",", "pmatTrackedDeviceToOverlayTransform" ]
Parse the disease identifier file .
def parse_disease_ids ( path : str ) : if os . path . isdir ( path ) or not os . path . exists ( path ) : logger . info ( "Couldn't find the disease identifiers file. Returning empty list." ) return [ ] df = pd . read_csv ( path , names = [ "ID" ] ) return set ( df [ "ID" ] . tolist ( ) )
10,798
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L158-L169
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Parse the disease - drug target associations file .
def parse_disease_associations ( path : str , excluded_disease_ids : set ) : if os . path . isdir ( path ) or not os . path . exists ( path ) : logger . info ( "Couldn't find the disease associations file. Returning empty list." ) return { } disease_associations = defaultdict ( list ) with open ( path ) as input_file : for line in input_file : target_id , disease_id = line . strip ( ) . split ( " " ) if disease_id not in excluded_disease_ids : disease_associations [ target_id ] . append ( disease_id ) return disease_associations
10,799
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/parsers.py#L172-L189
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]