idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
4,900
def count_by_time ( self ) : out = np . empty ( ( len ( self . timesteps ) , ) , dtype = [ ( 'time' , float ) , ( 'count' , int ) ] ) for cursor , timestep in enumerate ( self . timesteps ) : out [ cursor ] = ( timestep , len ( [ x for x in self . timeseries if x . time == timestep ] ) ) return out . view ( np . recarray )
Count how many salt bridges occured in each frame . Returns numpy array .
108
16
4,901
def keep_longest ( head , update , down_path ) : if update is None : return 'f' if head is None : return 's' return 'f' if len ( head ) >= len ( update ) else 's'
Keep longest field among head and update .
51
8
4,902
def comments ( self , case_id = None , variant_id = None , username = None ) : logger . debug ( "Looking for comments" ) comment_objs = self . query ( Comment ) if case_id : comment_objs = comment_objs . filter_by ( case_id = case_id ) if variant_id : comment_objs = comment_objs . filter_by ( variant_id = variant_id ) elif case_id : comment_objs = comment_objs . filter_by ( variant_id = None ) return comment_objs
Return comments for a case or variant .
128
8
4,903
def add_comment ( self , case_obj , text , variant_id = None , username = None ) : comment = Comment ( text = text , username = username or 'Anonymous' , case = case_obj , # md5 sum of chrom, pos, ref, alt variant_id = variant_id ) self . session . add ( comment ) self . save ( ) return comment
Add a comment to a variant or a case
81
9
4,904
def _add_consequences ( self , variant_obj , raw_variant_line ) : consequences = [ ] for consequence in SO_TERMS : if consequence in raw_variant_line : consequences . append ( consequence ) variant_obj . consequences = consequences
Add the consequences found for a variant
57
7
4,905
def collect_appendvars ( ap_ , cls ) : for key , value in cls . __dict__ . items ( ) : if key . startswith ( 'appendvars_' ) : varname = key [ 11 : ] if varname not in ap_ . appendvars : ap_ . appendvars [ varname ] = [ ] if value not in ap_ . appendvars [ varname ] : if not isinstance ( value , list ) : value = [ value ] ap_ . appendvars [ varname ] += value
colleziona elementi per le liste .
120
10
4,906
def has_shared ( arg , shared ) : try : if isinstance ( shared , list ) : shared_arguments = shared else : shared_arguments = shared . __shared_arguments__ for idx , ( args , kwargs ) in enumerate ( shared_arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
Verifica se ci sono shared .
131
10
4,907
def has_argument ( arg , arguments ) : try : if not isinstance ( arguments , list ) : arguments = arguments . __arguments__ for idx , ( args , kwargs ) in enumerate ( arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
Verifica se ci sono argument con la classe .
116
14
4,908
def get_functarguments ( func ) : argspec = inspect . getargspec ( func ) if argspec . defaults is not None : args = argspec . args [ : - len ( argspec . defaults ) ] kwargs = dict ( zip ( argspec . args [ - len ( argspec . defaults ) : ] , argspec . defaults ) ) else : args = argspec . args kwargs = { } if args and args [ 0 ] == 'self' : args . pop ( 0 ) func . __named__ = [ ] arguments = [ ] shared = get_shared ( func ) for arg in args : if has_shared ( arg , shared ) is not False : continue if has_argument ( arg , func . __cls__ ) is not False : continue arguments . append ( ( [ arg ] , { } , ) ) func . __named__ . append ( arg ) for key , val in kwargs . items ( ) : if has_shared ( key , shared ) is not False : continue if has_argument ( key , func . __cls__ ) is not False : continue if isinstance ( val , dict ) : flags = [ val . pop ( 'lflag' , '--%s' % key ) ] short = val . pop ( 'flag' , None ) dest = val . get ( 'dest' , key ) . replace ( '-' , '_' ) if short : flags . insert ( 0 , short ) else : flags = [ '--%s' % key ] val = dict ( default = val ) dest = key . replace ( '-' , '_' ) func . __named__ . append ( dest ) arguments . append ( ( flags , val , ) ) return arguments
Recupera gli argomenti dalla funzione stessa .
370
17
4,909
def get_parser ( func , parent ) : parser = parent . add_parser ( func . __cmd_name__ , help = func . __doc__ ) for args , kwargs in func . __arguments__ : parser . add_argument ( * args , * * kwargs ) return parser
Imposta il parser .
65
5
4,910
def get_shared ( func ) : shared = [ ] if not hasattr ( func , '__cls__' ) : return shared if not hasattr ( func . __cls__ , '__shared_arguments__' ) : return shared if hasattr ( func , '__no_share__' ) : if func . __no_share__ is True : return shared else : shared += [ s for s in func . __cls__ . __shared_arguments__ if ( s [ 0 ] [ - 1 ] . replace ( '--' , '' ) . replace ( '-' , '_' ) ) not in func . __no_share__ ] else : shared = func . __cls__ . __shared_arguments__ return shared
return shared .
162
3
4,911
def set_subcommands ( func , parser ) : if hasattr ( func , '__subcommands__' ) and func . __subcommands__ : sub_parser = parser . add_subparsers ( title = SUBCOMMANDS_LIST_TITLE , dest = 'subcommand' , description = SUBCOMMANDS_LIST_DESCRIPTION . format ( func . __cmd_name__ ) , help = func . __doc__ ) for sub_func in func . __subcommands__ . values ( ) : parser = get_parser ( sub_func , sub_parser ) for args , kwargs in get_shared ( sub_func ) : parser . add_argument ( * args , * * kwargs ) else : for args , kwargs in get_shared ( func ) : parser . add_argument ( * args , * * kwargs )
Set subcommands .
193
5
4,912
def check_help ( ) : # know arguments know = set ( ( '-h' , '--help' , '-v' , '--version' ) ) # arguments args = set ( sys . argv [ 1 : ] ) # returns True if there is at least one known argument in arguments return len ( know . intersection ( args ) ) > 0
check know args in argv .
76
7
4,913
def analysis_of_prot_lig_interactions ( self ) : self . hbonds = HBonds ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff , distance = 3 ) self . pistacking = PiStacking ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff ) self . sasa = SASA ( self . topol_data , self . trajectory ) self . lig_descr = LigDescr ( self . topol_data ) if self . trajectory != [ ] : self . rmsf = RMSF_measurements ( self . topol_data , self . topology , self . trajectory , self . ligand , self . start , self . end , self . skip ) self . salt_bridges = SaltBridges ( self . topol_data , self . trajectory , self . lig_descr , self . start , self . end , self . skip , self . analysis_cutoff )
The classes and function that deal with protein - ligand interaction analysis .
238
14
4,914
def save_files ( self ) : while True : try : os . mkdir ( self . output_name ) except Exception as e : self . output_name = raw_input ( "This directory already exists - please enter a new name:" ) else : break self . workdir = os . getcwd ( ) os . chdir ( self . workdir + "/" + self . output_name )
Saves all output from LINTools run in a single directory named after the output name .
86
19
4,915
def remove_files ( self ) : file_list = [ "molecule.svg" , "lig.pdb" , "HIS.pdb" , "PHE.pdb" , "TRP.pdb" , "TYR.pdb" , "lig.mol" , "test.xtc" ] for residue in self . topol_data . dict_of_plotted_res . keys ( ) : file_list . append ( residue [ 1 ] + residue [ 2 ] + ".svg" ) for f in file_list : if os . path . isfile ( f ) == True : os . remove ( f )
Removes intermediate files .
144
5
4,916
def setup ( self ) : # Initialize the statistics variables. self . radiation_count = 0 self . noise_count = 0 self . count = 0 # Initialize count_history[]. self . count_history = [ 0 ] * HISTORY_LENGTH self . history_index = 0 # Init measurement time. self . previous_time = millis ( ) self . previous_history_time = millis ( ) self . duration = 0 # Init the GPIO context. GPIO . setup ( self . radiation_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) GPIO . setup ( self . noise_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) # Register local callbacks. GPIO . add_event_detect ( self . radiation_pin , GPIO . FALLING , callback = self . _on_radiation ) GPIO . add_event_detect ( self . noise_pin , GPIO . FALLING , callback = self . _on_noise ) # Enable the timer for processing the statistics periodically. self . _enable_timer ( ) return self
Initialize the driver by setting up GPIO interrupts and periodic statistics processing .
242
14
4,917
def load ( ctx , variant_source , family_file , family_type , root ) : root = root or ctx . obj . get ( 'root' ) or os . path . expanduser ( "~/.puzzle" ) if os . path . isfile ( root ) : logger . error ( "'root' can't be a file" ) ctx . abort ( ) logger . info ( "Root directory is: {}" . format ( root ) ) db_path = os . path . join ( root , 'puzzle_db.sqlite3' ) logger . info ( "db path is: {}" . format ( db_path ) ) if not os . path . exists ( db_path ) : logger . warn ( "database not initialized, run 'puzzle init'" ) ctx . abort ( ) if not os . path . isfile ( variant_source ) : logger . error ( "Variant source has to be a file" ) ctx . abort ( ) mode = get_file_type ( variant_source ) if mode == 'unknown' : logger . error ( "Unknown file type" ) ctx . abort ( ) #Test if gemini is installed elif mode == 'gemini' : logger . debug ( "Initialzing GEMINI plugin" ) if not GEMINI : logger . error ( "Need to have gemini installed to use gemini plugin" ) ctx . abort ( ) logger . debug ( 'Set puzzle backend to {0}' . format ( mode ) ) variant_type = get_variant_type ( variant_source ) logger . debug ( 'Set variant type to {0}' . format ( variant_type ) ) cases = get_cases ( variant_source = variant_source , case_lines = family_file , case_type = family_type , variant_type = variant_type , variant_mode = mode ) if len ( cases ) == 0 : logger . warning ( "No cases found" ) ctx . abort ( ) logger . info ( "Initializing sqlite plugin" ) store = SqlStore ( db_path ) for case_obj in cases : if store . case ( case_obj . case_id ) is not None : logger . warn ( "{} already exists in the database" . format ( case_obj . case_id ) ) continue # extract case information logger . debug ( "adding case: {} to puzzle db" . format ( case_obj . case_id ) ) store . add_case ( case_obj , vtype = variant_type , mode = mode )
Load a variant source into the database .
551
8
4,918
def list ( self , deal_id , * * params ) : _ , _ , associated_contacts = self . http_client . get ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , params = params ) return associated_contacts
Retrieve deal s associated contacts
66
6
4,919
def create ( self , deal_id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for AssociatedContact are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , associated_contact = self . http_client . post ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , body = attributes ) return associated_contact
Create an associated contact
140
4
4,920
def destroy ( self , deal_id , contact_id ) : status_code , _ , _ = self . http_client . delete ( "/deals/{deal_id}/associated_contacts/{contact_id}" . format ( deal_id = deal_id , contact_id = contact_id ) ) return status_code == 204
Remove an associated contact
75
4
4,921
def list ( self , * * params ) : _ , _ , contacts = self . http_client . get ( "/contacts" , params = params ) return contacts
Retrieve all contacts
35
4
4,922
def retrieve ( self , id ) : _ , _ , contact = self . http_client . get ( "/contacts/{id}" . format ( id = id ) ) return contact
Retrieve a single contact
39
5
4,923
def list ( self , * * params ) : _ , _ , deals = self . http_client . get ( "/deals" , params = params ) for deal in deals : deal [ 'value' ] = Coercion . to_decimal ( deal [ 'value' ] ) return deals
Retrieve all deals
62
4
4,924
def create ( self , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . post ( "/deals" , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Create a deal
161
3
4,925
def retrieve ( self , id ) : _ , _ , deal = self . http_client . get ( "/deals/{id}" . format ( id = id ) ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Retrieve a single deal
61
5
4,926
def update ( self , id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . put ( "/deals/{id}" . format ( id = id ) , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
Update a deal
173
3
4,927
def update ( self , id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for DealSource are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , deal_source = self . http_client . put ( "/deal_sources/{id}" . format ( id = id ) , body = attributes ) return deal_source
Update a source
129
3
4,928
def list ( self , * * params ) : _ , _ , deal_unqualified_reasons = self . http_client . get ( "/deal_unqualified_reasons" , params = params ) return deal_unqualified_reasons
Retrieve all deal unqualified reasons
52
7
4,929
def retrieve ( self , id ) : _ , _ , deal_unqualified_reason = self . http_client . get ( "/deal_unqualified_reasons/{id}" . format ( id = id ) ) return deal_unqualified_reason
Retrieve a single deal unqualified reason
54
8
4,930
def list ( self , * * params ) : _ , _ , leads = self . http_client . get ( "/leads" , params = params ) return leads
Retrieve all leads
35
4
4,931
def retrieve ( self , id ) : _ , _ , lead = self . http_client . get ( "/leads/{id}" . format ( id = id ) ) return lead
Retrieve a single lead
39
5
4,932
def list ( self , * * params ) : _ , _ , lead_unqualified_reasons = self . http_client . get ( "/lead_unqualified_reasons" , params = params ) return lead_unqualified_reasons
Retrieve all lead unqualified reasons
52
7
4,933
def list ( self , order_id , * * params ) : _ , _ , line_items = self . http_client . get ( "/orders/{order_id}/line_items" . format ( order_id = order_id ) , params = params ) return line_items
Retrieve order s line items
63
6
4,934
def retrieve ( self , order_id , id ) : _ , _ , line_item = self . http_client . get ( "/orders/{order_id}/line_items/{id}" . format ( order_id = order_id , id = id ) ) return line_item
Retrieve a single line item
64
6
4,935
def list ( self , * * params ) : _ , _ , loss_reasons = self . http_client . get ( "/loss_reasons" , params = params ) return loss_reasons
Retrieve all reasons
43
4
4,936
def retrieve ( self , id ) : _ , _ , loss_reason = self . http_client . get ( "/loss_reasons/{id}" . format ( id = id ) ) return loss_reason
Retrieve a single reason
45
5
4,937
def list ( self , * * params ) : _ , _ , notes = self . http_client . get ( "/notes" , params = params ) return notes
Retrieve all notes
34
4
4,938
def retrieve ( self , id ) : _ , _ , note = self . http_client . get ( "/notes/{id}" . format ( id = id ) ) return note
Retrieve a single note
38
5
4,939
def list ( self , * * params ) : _ , _ , orders = self . http_client . get ( "/orders" , params = params ) return orders
Retrieve all orders
34
4
4,940
def retrieve ( self , id ) : _ , _ , order = self . http_client . get ( "/orders/{id}" . format ( id = id ) ) return order
Retrieve a single order
38
5
4,941
def list ( self , * * params ) : _ , _ , pipelines = self . http_client . get ( "/pipelines" , params = params ) return pipelines
Retrieve all pipelines
36
4
4,942
def list ( self , * * params ) : _ , _ , products = self . http_client . get ( "/products" , params = params ) return products
Retrieve all products
34
4
4,943
def retrieve ( self , id ) : _ , _ , product = self . http_client . get ( "/products/{id}" . format ( id = id ) ) return product
Retrieve a single product
38
5
4,944
def list ( self , * * params ) : _ , _ , stages = self . http_client . get ( "/stages" , params = params ) return stages
Retrieve all stages
35
4
4,945
def list ( self , * * params ) : _ , _ , tags = self . http_client . get ( "/tags" , params = params ) return tags
Retrieve all tags
34
4
4,946
def retrieve ( self , id ) : _ , _ , tag = self . http_client . get ( "/tags/{id}" . format ( id = id ) ) return tag
Retrieve a single tag
38
5
4,947
def list ( self , * * params ) : _ , _ , tasks = self . http_client . get ( "/tasks" , params = params ) return tasks
Retrieve all tasks
35
4
4,948
def retrieve ( self , id ) : _ , _ , task = self . http_client . get ( "/tasks/{id}" . format ( id = id ) ) return task
Retrieve a single task
39
5
4,949
def list ( self , * * params ) : _ , _ , text_messages = self . http_client . get ( "/text_messages" , params = params ) return text_messages
Retrieve text messages
43
4
4,950
def retrieve ( self , id ) : _ , _ , text_message = self . http_client . get ( "/text_messages/{id}" . format ( id = id ) ) return text_message
Retrieve a single text message
45
6
4,951
def list ( self , * * params ) : _ , _ , users = self . http_client . get ( "/users" , params = params ) return users
Retrieve all users
34
4
4,952
def retrieve ( self , id ) : _ , _ , user = self . http_client . get ( "/users/{id}" . format ( id = id ) ) return user
Retrieve a single user
38
5
4,953
def list ( self , * * params ) : _ , _ , visit_outcomes = self . http_client . get ( "/visit_outcomes" , params = params ) return visit_outcomes
Retrieve visit outcomes
44
4
4,954
def request ( url , * args , * * kwargs ) : method = kwargs . get ( 'method' , 'GET' ) timeout = kwargs . pop ( 'timeout' , 10 ) # hass default timeout req = requests . request ( method , url , * args , timeout = timeout , * * kwargs ) data = req . json ( ) _LOGGER . debug ( json . dumps ( data ) ) return data
Do the HTTP Request and return data
95
7
4,955
def message_worker ( device ) : _LOGGER . debug ( "Starting Worker Thread." ) msg_q = device . messages while True : if not msg_q . empty ( ) : message = msg_q . get ( ) data = { } try : data = json . loads ( message . decode ( "utf-8" ) ) except ValueError : _LOGGER . error ( "Received invalid message: %s" , message ) if 'device_id' in data : device_id = data . get ( 'device_id' ) if device_id == device . device_id : device . handle_event ( data ) else : _LOGGER . warning ( "Received message for unknown device." ) msg_q . task_done ( ) time . sleep ( 0.2 )
Loop through messages and pass them on to right device
170
10
4,956
def socket_worker ( sock , msg_q ) : _LOGGER . debug ( "Starting Socket Thread." ) while True : try : data , addr = sock . recvfrom ( 1024 ) # buffer size is 1024 bytes except OSError as err : _LOGGER . error ( err ) else : _LOGGER . debug ( "received message: %s from %s" , data , addr ) msg_q . put ( data ) time . sleep ( 0.2 )
Socket Loop that fills message queue
102
6
4,957
def toposort ( graph , pick_first = 'head' ) : in_deg = { } for node , next_nodes in six . iteritems ( graph ) : for next_node in [ next_nodes . head_node , next_nodes . update_node ] : if next_node is None : continue in_deg [ next_node ] = in_deg . get ( next_node , 0 ) + 1 stk = [ FIRST ] ordered = [ ] visited = set ( ) while stk : node = stk . pop ( ) visited . add ( node ) if node != FIRST : ordered . append ( node ) traversal = _get_traversal ( graph . get ( node , BeforeNodes ( ) ) , pick_first ) for next_node in traversal : if next_node is None : continue if next_node in visited : raise ValueError ( 'Graph has a cycle' ) in_deg [ next_node ] -= 1 if in_deg [ next_node ] == 0 : stk . append ( next_node ) # Nodes may not be walked because they don't reach in degree 0. if len ( ordered ) != len ( graph ) - 1 : raise ValueError ( 'Graph has a cycle' ) return ordered
Toplogically sorts a list match graph .
272
9
4,958
def sort_cyclic_graph_best_effort ( graph , pick_first = 'head' ) : ordered = [ ] visited = set ( ) # Go first on the pick_first chain then go back again on the others # that were not visited. Given the way the graph is built both chains # will always contain all the elements. if pick_first == 'head' : fst_attr , snd_attr = ( 'head_node' , 'update_node' ) else : fst_attr , snd_attr = ( 'update_node' , 'head_node' ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , fst_attr ) if current not in visited and current is not None : ordered . append ( current ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , snd_attr ) if current not in visited and current is not None : ordered . append ( current ) return ordered
Fallback for cases in which the graph has cycles .
225
11
4,959
def get ( url ) : writeln ( "Getting data from url" , url ) response = requests . get ( url ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
Retrieve an url .
60
5
4,960
def post ( url , var ) : data = { b [ 0 ] : b [ 1 ] for b in [ a . split ( "=" ) for a in var ] } writeln ( "Sending data to url" , url ) response = requests . post ( url , data = data ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
Post data to an url .
96
6
4,961
def cast_bytes ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s elif isinstance ( s , str ) : return s . encode ( encoding , errors ) else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
cast str or bytes to bytes
73
6
4,962
def cast_str ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s . decode ( encoding , errors ) elif isinstance ( s , str ) : return s else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
cast bytes or str to str
73
6
4,963
def cast_datetime ( ts , fmt = None ) : dt = datetime . datetime . fromtimestamp ( ts ) if fmt : return dt . strftime ( fmt ) return dt
cast timestamp to datetime or date str
43
8
4,964
def singleton_init_by ( init_fn = None ) : if not init_fn : def wrap_init ( origin_init ) : return origin_init else : def wrap_init ( origin_init ) : def __init__ ( self ) : origin_init ( self ) init_fn ( self ) return __init__ def inner ( cls_def : type ) : if not hasattr ( cls_def , '__instancecheck__' ) or isinstance ( cls_def . __instancecheck__ , ( types . BuiltinMethodType , _slot_wrapper ) ) : def __instancecheck__ ( self , instance ) : return instance is self cls_def . __instancecheck__ = __instancecheck__ _origin_init = cls_def . __init__ cls_def . __init__ = wrap_init ( _origin_init ) return cls_def ( ) return inner
>>> from Redy . Magic . Classic import singleton >>>
197
12
4,965
def const_return ( func ) : result = _undef def ret_call ( * args , * * kwargs ) : nonlocal result if result is _undef : result = func ( * args , * * kwargs ) return result return ret_call
>>> from Redy . Magic . Classic import const_return >>>
57
13
4,966
def execute ( func : types . FunctionType ) : spec = getfullargspec ( func ) default = spec . defaults arg_cursor = 0 def get_item ( name ) : nonlocal arg_cursor ctx = func . __globals__ value = ctx . get ( name , _undef ) if value is _undef : try : value = default [ arg_cursor ] arg_cursor += 1 except ( TypeError , IndexError ) : raise ValueError ( f"Current context has no variable `{name}`" ) return value return func ( * ( get_item ( arg_name ) for arg_name in spec . args ) )
>>> from Redy . Magic . Classic import execute >>> x = 1 >>>
144
15
4,967
def cast ( cast_fn ) : def inner ( func ) : def call ( * args , * * kwargs ) : return cast_fn ( func ( * args , * * kwargs ) ) functools . update_wrapper ( call , func ) return call return inner
>>> from Redy . Magic . Classic import cast >>>
60
11
4,968
def insert ( self , action : Action , where : 'Union[int, Delegate.Where]' ) : if isinstance ( where , int ) : self . actions . insert ( where , action ) return here = where ( self . actions ) self . actions . insert ( here , action )
add a new action with specific priority
61
7
4,969
def patch_to_conflict_set ( patch ) : patch_type , patched_key , value = patch if isinstance ( patched_key , list ) : key_path = tuple ( patched_key ) else : key_path = tuple ( k for k in patched_key . split ( '.' ) if k ) conflicts = set ( ) if patch_type == REMOVE : conflict_type = ConflictType . REMOVE_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , None ) ) elif patch_type == CHANGE : conflict_type = ConflictType . SET_FIELD first_val , second_val = value conflicts . add ( Conflict ( conflict_type , key_path , second_val ) ) elif patch_type == ADD : conflict_type = ConflictType . SET_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , obj ) ) return conflicts
Translates a dictdiffer conflict into a json_merger one .
220
16
4,970
def merge ( self ) : if isinstance ( self . head , dict ) and isinstance ( self . update , dict ) : if not isinstance ( self . root , dict ) : self . root = { } self . _merge_dicts ( ) else : self . _merge_base_values ( ) if self . conflict_set : raise MergeError ( 'Dictdiffer Errors' , self . conflicts )
Perform merge of head and update starting from root .
91
11
4,971
def chebyshev ( point1 , point2 ) : return max ( abs ( point1 [ 0 ] - point2 [ 0 ] ) , abs ( point1 [ 1 ] - point2 [ 1 ] ) )
Computes distance between 2D points using chebyshev metric
46
13
4,972
def circlescan ( x0 , y0 , r1 , r2 ) : # Validate inputs if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) # List of pixels visited in previous diameter previous = [ ] # Scan distances outward (1) or inward (-1) rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : if distance == 0 : yield x0 , y0 else : # Computes points for first octant and the rotate by multiples of # 45 degrees to compute the other octants a = 0.707107 rotations = { 0 : [ [ 1 , 0 ] , [ 0 , 1 ] ] , 1 : [ [ a , a ] , [ - a , a ] ] , 2 : [ [ 0 , 1 ] , [ - 1 , 0 ] ] , 3 : [ [ - a , a ] , [ - a , - a ] ] , 4 : [ [ - 1 , 0 ] , [ 0 , - 1 ] ] , 5 : [ [ - a , - a ] , [ a , - a ] ] , 6 : [ [ 0 , - 1 ] , [ 1 , 0 ] ] , 7 : [ [ a , - a ] , [ a , a ] ] } nangles = len ( rotations ) # List of pixels visited in current diameter current = [ ] for angle in range ( nangles ) : x = 0 y = distance d = 1 - distance while x < y : xr = rotations [ angle ] [ 0 ] [ 0 ] * x + rotations [ angle ] [ 0 ] [ 1 ] * y yr = rotations [ angle ] [ 1 ] [ 0 ] * x + rotations [ angle ] [ 1 ] [ 1 ] * y xr = x0 + xr yr = y0 + yr # First check if point was in previous diameter # since our scan pattern can lead to duplicates in # neighboring diameters point = ( int ( round ( xr ) ) , int ( round ( yr ) ) ) if point not in previous : yield xr , yr current . append ( point ) # Move pixel according to circle constraint if ( d < 0 ) : d += 3 + 2 * x else : d += 5 - 2 * ( y - x ) y -= 1 x += 1 previous = current
Scan pixels in a circle pattern around a center point
527
10
4,973
def gridscan ( xi , yi , xf , yf , stepx = 1 , stepy = 1 ) : if stepx <= 0 : raise ValueError ( "X-step must be positive" ) if stepy <= 0 : raise ValueError ( "Y-step must be positive" ) # Determine direction to move dx = stepx if xf >= xi else - stepx dy = stepy if yf >= yi else - stepy for y in range ( yi , yf + dy , dy ) : for x in range ( xi , xf + dx , dx ) : yield x , y
Scan pixels in a grid pattern along the x - coordinate then y - coordinate
135
15
4,974
def ringscan ( x0 , y0 , r1 , r2 , metric = chebyshev ) : # Validate inputs if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) if not hasattr ( metric , "__call__" ) : raise TypeError ( "Metric not callable" ) # Define clockwise step directions direction = 0 steps = { 0 : [ 1 , 0 ] , 1 : [ 1 , - 1 ] , 2 : [ 0 , - 1 ] , 3 : [ - 1 , - 1 ] , 4 : [ - 1 , 0 ] , 5 : [ - 1 , 1 ] , 6 : [ 0 , 1 ] , 7 : [ 1 , 1 ] } nsteps = len ( steps ) center = [ x0 , y0 ] # Scan distances outward (1) or inward (-1) rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : initial = [ x0 , y0 + distance ] current = initial # Number of tries to find a valid neighrbor ntrys = 0 while True : # Short-circuit special case if distance == 0 : yield current [ 0 ] , current [ 1 ] break # Try and take a step and check if still within distance nextpoint = [ current [ i ] + steps [ direction ] [ i ] for i in range ( 2 ) ] if metric ( center , nextpoint ) != distance : # Check if we tried all step directions and failed ntrys += 1 if ntrys == nsteps : break # Try the next direction direction = ( direction + 1 ) % nsteps continue ntrys = 0 yield current [ 0 ] , current [ 1 ] # Check if we have come all the way around current = nextpoint if current == initial : break # Check if we tried all step directions and failed if ntrys == nsteps : break
Scan pixels in a ring pattern around a center point clockwise
433
12
4,975
def snakescan ( xi , yi , xf , yf ) : # Determine direction to move dx = 1 if xf >= xi else - 1 dy = 1 if yf >= yi else - 1 # Scan pixels first along x-coordinate then y-coordinate and flip # x-direction when the end of the line is reached x , xa , xb = xi , xi , xf for y in range ( yi , yf + dy , dy ) : for x in range ( xa , xb + dx , dx ) : yield x , y # Swap x-direction if x == xa or x == xb : dx *= - 1 xa , xb = xb , xa
Scan pixels in a snake pattern along the x - coordinate then y - coordinate
159
15
4,976
def walkscan ( x0 , y0 , xn = 0.25 , xp = 0.25 , yn = 0.25 , yp = 0.25 ) : # Validate inputs if xn < 0 : raise ValueError ( "Negative x probabilty must be non-negative" ) if xp < 0 : raise ValueError ( "Positive x probabilty must be non-negative" ) if yn < 0 : raise ValueError ( "Negative y probabilty must be non-negative" ) if yp < 0 : raise ValueError ( "Positive y probabilty must be non-negative" ) # Compute normalized probability total = xp + xn + yp + yn xn /= total xp /= total yn /= total yp /= total # Compute cumulative probability cxn = xn cxp = cxn + xp cyn = cxp + yn # Initialize position x , y = x0 , y0 while True : yield x , y # Take random step probability = random . random ( ) if probability <= cxn : x -= 1 elif probability <= cxp : x += 1 elif probability <= cyn : y -= 1 else : y += 1
Scan pixels in a random walk pattern with given step probabilities . The random walk will continue indefinitely unless a skip transformation is used with the stop parameter set or a clip transformation is used with the abort parameter set to True . The probabilities are normalized to sum to 1 .
261
52
4,977
def validate ( self ) : if self . access_token is None : raise ConfigurationError ( 'No access token provided. ' 'Set your access token during client initialization using: ' '"basecrm.Client(access_token= <YOUR_PERSONAL_ACCESS_TOKEN>)"' ) if re . search ( r'\s' , self . access_token ) : raise ConfigurationError ( 'Provided access token is invalid ' 'as it contains disallowed characters. ' 'Please double-check you access token.' ) if len ( self . access_token ) != 64 : raise ConfigurationError ( 'Provided access token is invalid ' 'as it has invalid length. ' 'Please double-check your access token.' ) if not self . base_url or not re . match ( self . URL_REGEXP , self . base_url ) : raise ConfigurationError ( 'Provided base url is invalid ' 'as it not a valid URI. ' 'Please make sure it incldues the schema part, ' 'both http and https are accepted, ' 'and the hierarchical part' ) return True
Validates whether a configuration is valid .
236
8
4,978
def start ( self , device_uuid ) : status_code , _ , session = self . http_client . post ( '/sync/start' , body = None , headers = self . build_headers ( device_uuid ) ) return None if status_code == 204 else session
Start synchronization flow
61
3
4,979
def fetch ( self , device_uuid , session_id ) : status_code , _ , root = self . http_client . get ( "/sync/{session_id}/queues/main" . format ( session_id = session_id ) , params = None , headers = self . build_headers ( device_uuid ) , raw = True ) return [ ] if status_code == 204 else root [ 'items' ]
Get data from queue
95
4
4,980
def ack ( self , device_uuid , ack_keys ) : attributes = { 'ack_keys' : ack_keys } status_code , _ , _ = self . http_client . post ( '/sync/ack' , body = attributes , headers = self . build_headers ( device_uuid ) ) return status_code == 202
Acknowledge received data
77
5
4,981
def fetch ( self , callback ) : # Set up a new synchronization session for a given device's UUID session = self . client . sync . start ( self . device_uuid ) # Check if there is anything to synchronize if session is None or 'id' not in session : return # Drain the main queue until there is no more data (empty array) while True : # Fetch the main queue queue_items = self . client . sync . fetch ( self . device_uuid , session [ 'id' ] ) # nothing more to synchronize ? if not queue_items : break # let client know about both data and meta ack_keys = [ ] for item in queue_items : if callback ( item [ 'meta' ] , item [ 'data' ] ) : ack_keys . append ( item [ 'meta' ] [ 'sync' ] [ 'ack_key' ] ) # As we fetch new data, we need to send acknowledgement keys # if any .. if ack_keys : self . client . sync . ack ( self . device_uuid , ack_keys )
Perform a full synchronization flow .
236
7
4,982
def weighted_mean ( data , weights = None ) : if weights is None : return mean ( data ) total_weight = float ( sum ( weights ) ) weights = [ weight / total_weight for weight in weights ] w_mean = 0 for i , weight in enumerate ( weights ) : w_mean += weight * data [ i ] return w_mean
Calculate the weighted mean of a list .
76
10
4,983
def median ( data ) : data . sort ( ) num_values = len ( data ) half = num_values // 2 if num_values % 2 : return data [ half ] return 0.5 * ( data [ half - 1 ] + data [ half ] )
Calculate the median of a list .
56
9
4,984
def weighted_median ( data , weights = None ) : if weights is None : return median ( data ) midpoint = 0.5 * sum ( weights ) if any ( [ j > midpoint for j in weights ] ) : return data [ weights . index ( max ( weights ) ) ] if any ( [ j > 0 for j in weights ] ) : sorted_data , sorted_weights = zip ( * sorted ( zip ( data , weights ) ) ) cumulative_weight = 0 below_midpoint_index = 0 while cumulative_weight <= midpoint : below_midpoint_index += 1 cumulative_weight += sorted_weights [ below_midpoint_index - 1 ] cumulative_weight -= sorted_weights [ below_midpoint_index - 1 ] if cumulative_weight - midpoint < sys . float_info . epsilon : bounds = sorted_data [ below_midpoint_index - 2 : below_midpoint_index ] return sum ( bounds ) / float ( len ( bounds ) ) return sorted_data [ below_midpoint_index - 1 ]
Calculate the weighted median of a list .
227
10
4,985
def init_app ( self , app , config_prefix = None ) : # Normalize the prefix and add this instance to app.extensions. config_prefix = ( config_prefix or 'REDIS' ) . rstrip ( '_' ) . upper ( ) if not hasattr ( app , 'extensions' ) : app . extensions = dict ( ) if config_prefix . lower ( ) in app . extensions : raise ValueError ( 'Already registered config prefix {0!r}.' . format ( config_prefix ) ) app . extensions [ config_prefix . lower ( ) ] = _RedisState ( self , app ) # Read config. args = read_config ( app . config , config_prefix ) # Instantiate StrictRedis. super ( Redis , self ) . __init__ ( * * args )
Actual method to read Redis settings from app configuration and initialize the StrictRedis instance .
177
20
4,986
def _recursive_remove ( fs , path ) : if not fs . is_link ( path = path ) and fs . is_dir ( path = path ) : for child in fs . children ( path = path ) : _recursive_remove ( fs = fs , path = child ) fs . remove_empty_directory ( path = path ) else : fs . remove_file ( path = path )
A recursive non - atomic directory removal .
86
8
4,987
def create ( name , create_file , open_file , remove_file , create_directory , list_directory , remove_empty_directory , temporary_directory , stat , lstat , link , readlink , realpath = _realpath , remove = _recursive_remove , ) : methods = dict ( create = create_file , open = lambda fs , path , mode = "r" : open_file ( fs = fs , path = path , mode = mode , ) , remove_file = remove_file , create_directory = create_directory , list_directory = list_directory , remove_empty_directory = remove_empty_directory , temporary_directory = temporary_directory , get_contents = _get_contents , set_contents = _set_contents , create_with_contents = _create_with_contents , remove = remove , removing = _removing , stat = stat , lstat = lstat , link = link , readlink = readlink , realpath = realpath , exists = _exists , is_dir = _is_dir , is_file = _is_file , is_link = _is_link , touch = _touch , children = _children , glob_children = _glob_children , ) return attr . s ( hash = True ) ( type ( name , ( object , ) , methods ) )
Create a new kind of filesystem .
297
7
4,988
def _exists ( fs , path ) : try : fs . stat ( path ) except ( exceptions . FileNotFound , exceptions . NotADirectory ) : return False return True
Check that the given path exists on the filesystem .
38
10
4,989
def _is_dir ( fs , path ) : try : return stat . S_ISDIR ( fs . stat ( path ) . st_mode ) except exceptions . FileNotFound : return False
Check that the given path is a directory .
41
9
4,990
def _is_file ( fs , path ) : try : return stat . S_ISREG ( fs . stat ( path ) . st_mode ) except exceptions . FileNotFound : return False
Check that the given path is a file .
41
9
4,991
def _is_link ( fs , path ) : try : return stat . S_ISLNK ( fs . lstat ( path ) . st_mode ) except exceptions . FileNotFound : return False
Check that the given path is a symbolic link .
43
10
4,992
def list ( self , * * kwargs ) : return ModelList ( self . ghost . execute_get ( '%s/' % self . _type_name , * * kwargs ) , self . _type_name , self , kwargs , model_type = self . _model_type )
Fetch a list of resources from the API .
68
10
4,993
def get ( self , id = None , slug = None , * * kwargs ) : if id : items = self . ghost . execute_get ( '%s/%s/' % ( self . _type_name , id ) , * * kwargs ) elif slug : items = self . ghost . execute_get ( '%s/slug/%s/' % ( self . _type_name , slug ) , * * kwargs ) else : raise GhostException ( 500 , 'Either the ID or the Slug of the resource needs to be specified' ) return self . _model_type ( items [ self . _type_name ] [ 0 ] )
Fetch a resource from the API . Either the id or the slug has to be present .
147
19
4,994
def create ( self , * * kwargs ) : response = self . ghost . execute_post ( '%s/' % self . _type_name , json = { self . _type_name : [ kwargs ] } ) return self . _model_type ( response . get ( self . _type_name ) [ 0 ] )
Creates a new resource .
75
6
4,995
def create ( self , * * kwargs ) : return super ( PostController , self ) . create ( * * self . _with_markdown ( kwargs ) )
Creates a new post . When the markdown property is present it will be automatically converted to mobiledoc on v1 . + of the server .
38
31
4,996
def update ( self , id , * * kwargs ) : return super ( PostController , self ) . update ( id , * * self . _with_markdown ( kwargs ) )
Updates an existing post . When the markdown property is present it will be automatically converted to mobiledoc on v1 . + of the server .
42
31
4,997
def define_residues_for_plotting_traj ( self , analysis_cutoff ) : self . residue_counts_fraction = { } #Calculate the fraction of time a residue spends in each simulation for traj in self . residue_counts : self . residue_counts_fraction [ traj ] = { residue : float ( values ) / len ( self . contacts_per_timeframe [ traj ] ) for residue , values in self . residue_counts [ traj ] . items ( ) } for traj in self . residue_counts_fraction : for residue in self . residue_counts_fraction [ traj ] : self . frequency [ residue ] . append ( self . residue_counts_fraction [ traj ] [ residue ] ) self . topology_data . dict_of_plotted_res = { i : self . frequency [ i ] for i in self . frequency if sum ( self . frequency [ i ] ) > ( int ( len ( self . trajectory ) ) * analysis_cutoff ) } assert len ( self . topology_data . dict_of_plotted_res ) != 0 , "Nothing to draw for this ligand:(residue number: " + str ( self . topology_data . universe . ligand . resids [ 0 ] ) + " on the chain " + str ( self . topology_data . universe . ligand . segids [ 0 ] ) + ") - try reducing the analysis cutoff."
Since plotting all residues that have made contact with the ligand over a lenghty simulation is not always feasible or desirable . Therefore only the residues that have been in contact with ligand for a long amount of time will be plotted in the final image .
329
51
4,998
def detect_aromatic_rings_in_ligand ( self ) : self . ligrings = { } try : ring_info = self . topology_data . mol . GetRingInfo ( ) self . ligand_ring_num = ring_info . NumRings ( ) except Exception as e : m = Chem . MolFromPDBFile ( "lig.pdb" ) ring_info = m . GetRingInfo ( ) self . ligand_ring_num = ring_info . NumRings ( ) i = 0 for ring in range ( self . ligand_ring_num ) : if 4 < len ( ring_info . AtomRings ( ) [ ring ] ) <= 6 and False not in [ self . topology_data . mol . GetAtomWithIdx ( x ) . GetIsAromatic ( ) for x in ring_info . AtomRings ( ) [ ring ] ] : #narrow ring definition atom_ids_in_ring = [ ] for atom in ring_info . AtomRings ( ) [ ring ] : atom_ids_in_ring . append ( self . topology_data . universe . ligand . atoms [ atom ] . name ) self . ligrings [ i ] = atom_ids_in_ring i += 1
Using rdkit to detect aromatic rings in ligand - size 4 - 6 atoms and all atoms are part of the ring . Saves this data in self . ligrings .
278
37
4,999
def define_all_protein_rings ( self ) : self . protein_rings = { } i = 0 for residue in self . topology_data . dict_of_plotted_res : for ring in self . rings : if ring [ 0 ] == residue [ 0 ] : atom_names = "" for atom in self . rings [ ring ] : atom_names = atom_names + " " + atom self . protein_rings [ i ] = self . topology_data . universe . select_atoms ( "resname " + residue [ 0 ] + " and resid " + residue [ 1 ] + " and segid " + residue [ 2 ] + " and name " + atom_names ) i += 1
Make MDAnalysis atom selections for rings in protein residues that will be plotted in the final figure - since they are the only ones that should be analysed . Saves the rings in self . protein_rings dictionary .
154
42