idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
11,600
def send_exception ( self ) : self . compiler . reset ( ) exc = traceback . format_exc ( ) self . writer . write ( exc . encode ( 'utf8' ) ) yield from self . writer . drain ( )
When an exception has occurred write the traceback to the user .
51
13
11,601
def handle_one_command ( self ) : while True : yield from self . write_prompt ( ) codeobj = yield from self . read_command ( ) if codeobj is not None : yield from self . run_command ( codeobj )
Process a single command . May have many lines .
53
10
11,602
def run_command ( self , codeobj ) : try : value , stdout = yield from self . attempt_exec ( codeobj , self . namespace ) except Exception : yield from self . send_exception ( ) return else : yield from self . send_output ( value , stdout )
Execute a compiled code object and write the output back to the client .
62
15
11,603
def read_command ( self ) : reader = self . reader line = yield from reader . readline ( ) if line == b'' : # lost connection raise ConnectionResetError ( ) try : # skip the newline to make CommandCompiler work as advertised codeobj = self . attempt_compile ( line . rstrip ( b'\n' ) ) except SyntaxError : yield from self . send_exception ( ) return return codeobj
Read a command from the user line by line .
95
10
11,604
def send_output ( self , value , stdout ) : writer = self . writer if value is not None : writer . write ( '{!r}\n' . format ( value ) . encode ( 'utf8' ) ) if stdout : writer . write ( stdout . encode ( 'utf8' ) ) yield from writer . drain ( )
Write the output or value of the expression back to user .
75
12
11,605
def call ( self , method , * args ) : try : response = getattr ( self . client . service , method ) ( * args ) except ( URLError , SSLError ) as e : log . exception ( 'Failed to connect to responsys service' ) raise ConnectError ( "Request to service timed out" ) except WebFault as web_fault : fault_name = getattr ( web_fault . fault , 'faultstring' , None ) error = str ( web_fault . fault . detail ) if fault_name == 'TableFault' : raise TableFault ( error ) if fault_name == 'ListFault' : raise ListFault ( error ) if fault_name == 'API_LIMIT_EXCEEDED' : raise ApiLimitError ( error ) if fault_name == 'AccountFault' : raise AccountFault ( error ) raise ServiceError ( web_fault . fault , web_fault . document ) return response
Calls the service method defined with the arguments provided
217
10
11,606
def connect ( self ) : if self . session and self . session . is_expired : # Close the session to avoid max concurrent session errors self . disconnect ( abandon_session = True ) if not self . session : try : login_result = self . login ( self . username , self . password ) except AccountFault : log . error ( 'Login failed, invalid username or password' ) raise else : self . session = login_result . session_id self . connected = time ( ) return self . connected
Connects to the Responsys soap service
108
8
11,607
def disconnect ( self , abandon_session = False ) : self . connected = False if ( self . session and self . session . is_expired ) or abandon_session : try : self . logout ( ) except : log . warning ( 'Logout call to responsys failed, session may have not been terminated' , exc_info = True ) del self . session return True
Disconnects from the Responsys soap service
80
9
11,608
def merge_list_members ( self , list_ , record_data , merge_rule ) : list_ = list_ . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) merge_rule = merge_rule . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeListMembers' , list_ , record_data , merge_rule ) )
Responsys . mergeListMembers call
103
8
11,609
def merge_list_members_RIID ( self , list_ , record_data , merge_rule ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'mergeListMembersRIID' , list_ , record_data , merge_rule ) return RecipientResult ( result . recipientResult )
Responsys . mergeListMembersRIID call
77
10
11,610
def delete_list_members ( self , list_ , query_column , ids_to_delete ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'deleteListMembers' , list_ , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : return [ DeleteResult ( delete_result ) for delete_result in result ] return [ DeleteResult ( result ) ]
Responsys . deleteListMembers call
104
8
11,611
def retrieve_list_members ( self , list_ , query_column , field_list , ids_to_retrieve ) : list_ = list_ . get_soap_object ( self . client ) result = self . call ( 'retrieveListMembers' , list_ , query_column , field_list , ids_to_retrieve ) return RecordData . from_soap_type ( result . recordData )
Responsys . retrieveListMembers call
94
8
11,612
def create_table ( self , table , fields ) : table = table . get_soap_object ( self . client ) return self . call ( 'createTable' , table , fields )
Responsys . createTable call
41
7
11,613
def create_table_with_pk ( self , table , fields , primary_keys ) : table = table . get_soap_object ( self . client ) return self . call ( 'createTableWithPK' , table , fields , primary_keys )
Responsys . createTableWithPK call
56
9
11,614
def delete_table ( self , table ) : table = table . get_soap_object ( self . client ) return self . call ( 'deleteTable' , table )
Responsys . deleteTable call
37
7
11,615
def delete_profile_extension_members ( self , profile_extension , query_column , ids_to_delete ) : profile_extension = profile_extension . get_soap_object ( self . client ) result = self . call ( 'deleteProfileExtensionMembers' , profile_extension , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : return [ DeleteResult ( delete_result ) for delete_result in result ] return [ DeleteResult ( result ) ]
Responsys . deleteProfileExtensionRecords call
117
11
11,616
def retrieve_profile_extension_records ( self , profile_extension , field_list , ids_to_retrieve , query_column = 'RIID' ) : profile_extension = profile_extension . get_soap_object ( self . client ) return RecordData . from_soap_type ( self . call ( 'retrieveProfileExtensionRecords' , profile_extension , query_column , field_list , ids_to_retrieve ) )
Responsys . retrieveProfileExtensionRecords call
108
11
11,617
def truncate_table ( self , table ) : table = table . get_soap_object ( self . client ) return self . call ( 'truncateTable' , table )
Responsys . truncateTable call
40
8
11,618
def delete_table_records ( self , table , query_column , ids_to_delete ) : table = table . get_soap_object ( self . client ) result = self . call ( 'deleteTableRecords' , table , query_column , ids_to_delete ) if hasattr ( result , '__iter__' ) : return [ DeleteResult ( delete_result ) for delete_result in result ] return [ DeleteResult ( result ) ]
Responsys . deleteTableRecords call
102
9
11,619
def merge_table_records ( self , table , record_data , match_column_names ) : table = table . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeTableRecords' , table , record_data , match_column_names ) )
Responsys . mergeTableRecords call
86
9
11,620
def merge_table_records_with_pk ( self , table , record_data , insert_on_no_match , update_on_match ) : table = table . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) return MergeResult ( self . call ( 'mergeTableRecordsWithPK' , table , record_data , insert_on_no_match , update_on_match ) )
Responsys . mergeTableRecordsWithPK call
109
11
11,621
def merge_into_profile_extension ( self , profile_extension , record_data , match_column , insert_on_no_match , update_on_match ) : profile_extension = profile_extension . get_soap_object ( self . client ) record_data = record_data . get_soap_object ( self . client ) results = self . call ( 'mergeIntoProfileExtension' , profile_extension , record_data , match_column , insert_on_no_match , update_on_match ) return [ RecipientResult ( result ) for result in results ]
Responsys . mergeIntoProfileExtension call
136
11
11,622
def retrieve_table_records ( self , table , query_column , field_list , ids_to_retrieve ) : table = table . get_soap_object ( self . client ) return RecordData . from_soap_type ( self . call ( 'retrieveTableRecords' , table , query_column , field_list , ids_to_retrieve ) )
Responsys . retrieveTableRecords call
86
9
11,623
def normalize_docroot ( app , root ) : srcdir = app . env . srcdir default_version = app . config . javalink_default_version if isinstance ( root , basestring ) : ( url , base ) = _parse_docroot_str ( srcdir , root ) return { 'root' : url , 'base' : base , 'version' : default_version } else : normalized = { } normalized [ 'root' ] = _parse_docroot_str ( srcdir , root [ 'root' ] ) [ 0 ] if 'base' in root : normalized [ 'base' ] = _parse_docroot_str ( srcdir , root [ 'base' ] ) [ 1 ] else : normalized [ 'base' ] = _parse_docroot_str ( srcdir , root [ 'root' ] ) [ 1 ] if 'version' in root : normalized [ 'version' ] = root [ 'version' ] else : normalized [ 'version' ] = default_version return normalized
Creates a package - list URL and a link base from a docroot element .
221
17
11,624
def assign_valence ( mol ) : for u , v , bond in mol . bonds_iter ( ) : if bond . order == 2 : mol . atom ( u ) . pi = 1 mol . atom ( v ) . pi = 1 if mol . atom ( u ) . symbol == "O" and not mol . atom ( u ) . charge : mol . atom ( v ) . carbonyl_C = 1 if mol . atom ( v ) . symbol == "O" and not mol . atom ( v ) . charge : mol . atom ( u ) . carbonyl_C = 1 elif bond . order == 3 : mol . atom ( u ) . pi = mol . atom ( v ) . pi = 2 max_nbr = { "C" : 4 , "Si" : 4 , "N" : 3 , "P" : 3 , "As" : 3 , "O" : 2 , "S" : 2 , "Se" : 2 , "F" : 1 , "Cl" : 1 , "Br" : 1 , "I" : 1 } for i , nbrs in mol . neighbors_iter ( ) : atom = mol . atom ( i ) if len ( nbrs ) == 2 and all ( bond . order == 2 for bond in nbrs . values ( ) ) : atom . pi = 2 # sp (allene, ketene) if atom . symbol in max_nbr : h_cnt = max_nbr [ atom . symbol ] - len ( nbrs ) - atom . pi + atom . charge if h_cnt > 0 : mol . atom ( i ) . add_hydrogen ( h_cnt ) mol . descriptors . add ( "Valence" )
Assign pi electron and hydrogens
374
7
11,625
def assign_charge ( mol , force_recalc = False ) : # TODO: not implemented yet mol . require ( "Aromatic" ) for i , nbrs in mol . neighbors_iter ( ) : atom = mol . atom ( i ) nbrcnt = len ( nbrs ) if atom . symbol == "N" : if not atom . pi : # non-conjugated amines are anion mol . atom ( i ) . charge_phys = 1 elif nbrcnt == 1 and atom . pi == 2 : # amidine, guanidine are conjugated cation ni = list ( nbrs . keys ( ) ) [ 0 ] conj = False sp2n = None for nni , nnb in mol . neighbors ( ni ) . items ( ) : if mol . atom ( nni ) . symbol == "N" and nnb . order == 2 and not mol . atom ( nni ) . aromatic : mol . atom ( nni ) . charge_conj = 1 conj = True elif mol . atom ( nni ) . symbol == "N" and nni != i : sp2n = nni if conj : mol . atom ( i ) . charge_phys = 1 if sp2n is not None : mol . atom ( sp2n ) . charge_conj = 1 elif atom . symbol == "O" and nbrcnt == 1 and atom . pi == 2 : # oxoacid are conjugated anion ni = list ( nbrs . keys ( ) ) [ 0 ] conj = False if mol . atom ( ni ) . symbol == "N" : mol . atom ( i ) . n_oxide = True mol . atom ( ni ) . n_oxide = True for nni , nnb in mol . neighbors ( ni ) . items ( ) : if mol . atom ( nni ) . symbol in ( "O" , "S" ) and nnb . order == 2 and not mol . atom ( ni ) . n_oxide : mol . atom ( nni ) . charge_conj = - 1 conj = True if conj : mol . atom ( i ) . charge_phys = - 1 elif atom . symbol == "S" and nbrcnt == 1 : # thiophenols are anion ni = list ( nbrs . keys ( ) ) [ 0 ] if mol . atom ( ni ) . aromatic : mol . atom ( i ) . charge_phys = - 1 mol . charge_assigned = True mol . descriptors . add ( "Phys_charge" )
Assign charges in physiological condition
554
6
11,626
def get_type ( type_name ) : parts = type_name . split ( '.' ) if len ( parts ) < 2 : raise SphinxError ( 'Type must be fully-qualified, ' 'of the form ``module.MyClass``. Got: {}' . format ( type_name ) ) module_name = "." . join ( parts [ 0 : - 1 ] ) name = parts [ - 1 ] return getattr ( import_module ( module_name ) , name )
Get a type given its importable name .
105
9
11,627
def get_task_config_fields ( config_class ) : from lsst . pex . config import Field def is_config_field ( obj ) : return isinstance ( obj , Field ) return _get_alphabetical_members ( config_class , is_config_field )
Get all configuration Fields from a Config class .
62
9
11,628
def get_subtask_fields ( config_class ) : from lsst . pex . config import ConfigurableField , RegistryField def is_subtask_field ( obj ) : return isinstance ( obj , ( ConfigurableField , RegistryField ) ) return _get_alphabetical_members ( config_class , is_subtask_field )
Get all configurable subtask fields from a Config class .
75
12
11,629
def _get_alphabetical_members ( obj , predicate ) : fields = dict ( inspect . getmembers ( obj , predicate ) ) keys = list ( fields . keys ( ) ) keys . sort ( ) return { k : fields [ k ] for k in keys }
Get members of an object sorted alphabetically .
57
9
11,630
def typestring ( obj ) : obj_type = type ( obj ) return '.' . join ( ( obj_type . __module__ , obj_type . __name__ ) )
Make a string for the object s type
40
8
11,631
def get_docstring ( obj ) : docstring = getdoc ( obj , allow_inherited = True ) if docstring is None : logger = getLogger ( __name__ ) logger . warning ( "Object %s doesn't have a docstring." , obj ) docstring = 'Undocumented' # ignore is simply the number of initial lines to ignore when determining # the docstring's baseline indent level. We really want "1" here. return prepare_docstring ( docstring , ignore = 1 )
Extract the docstring from an object as individual lines .
109
12
11,632
def extract_docstring_summary ( docstring ) : summary_lines = [ ] for line in docstring : if line == '' : break else : summary_lines . append ( line ) return ' ' . join ( summary_lines )
Get the first summary sentence from a docstring .
50
10
11,633
def run ( self ) : self . mark_incomplete ( ) session = client . get_client ( ) . create_session ( ) # load CPI data cpi = ConsumerPriceIndexFile ( ) . load ( ) # max year we have CPI data for max_cpi_year = cpi [ 'Year' ] . max ( ) # extract annual average only, index by year cpi = cpi . set_index ( 'Year' ) [ 'Annual' ] # process all movies for movie in session . query ( models . Movie ) . all ( ) : # we can only compute an inflation-adjusted budget if we know the year and budget if movie . year is not None and movie . budget is not None : if movie . year > max_cpi_year : # if movie is too new, don't inflation-adjust movie . budget_inflation_adjusted = movie . budget else : movie . budget_inflation_adjusted = movie . budget * cpi . loc [ max_cpi_year ] / cpi . loc [ movie . year ] # done, save all data, finalize task session . commit ( ) session . close ( ) self . mark_complete ( )
Compute and store inflation - adjusted movie budgets
253
9
11,634
def _argsort ( y_score , k = None ) : ranks = y_score . argsort ( ) argsort = ranks [ : : - 1 ] if k is not None : argsort = argsort [ 0 : k ] return argsort
Returns the indexes in descending order of the top k score or all scores if k is None
53
18
11,635
def count ( y_true , y_score = None , countna = False ) : if not countna : return ( ~ np . isnan ( to_float ( y_true ) ) ) . sum ( ) else : return len ( y_true )
Counts the number of examples . If countna is False then only count labeled examples i . e . those with y_true not NaN
55
29
11,636
def count_series ( y_true , y_score , countna = False ) : y_true , y_score = to_float ( y_true , y_score ) top = _argsort ( y_score ) if not countna : a = ( ~ np . isnan ( y_true [ top ] ) ) . cumsum ( ) else : a = range ( 1 , len ( y_true ) + 1 ) return pd . Series ( a , index = range ( 1 , len ( a ) + 1 ) )
Returns series whose i - th entry is the number of examples in the top i
116
16
11,637
def baseline ( y_true , y_score = None ) : if len ( y_true ) > 0 : return np . nansum ( y_true ) / count ( y_true , countna = False ) else : return 0.0
Number of positive labels divided by number of labels or zero if there are no labels
53
16
11,638
def roc_auc ( y_true , y_score ) : notnull = ~ np . isnan ( y_true ) fpr , tpr , thresholds = sklearn . metrics . roc_curve ( y_true [ notnull ] , y_score [ notnull ] ) return sklearn . metrics . auc ( fpr , tpr )
Returns are under the ROC curve
78
7
11,639
def recall_series ( y_true , y_score , k = None , value = True ) : y_true , y_score = to_float ( y_true , y_score ) top = _argsort ( y_score , k ) if not value : y_true = 1 - y_true a = np . nan_to_num ( y_true [ top ] ) . cumsum ( ) return pd . Series ( a , index = np . arange ( 1 , len ( a ) + 1 ) )
Returns series of length k whose i - th entry is the recall in the top i
115
17
11,640
def autorotate ( image , orientation = None ) : orientation_value = orientation if orientation else image . _getexif ( ) . get ( EXIF_KEYS . get ( 'Orientation' ) ) if orientation_value is None : raise ImDirectException ( "No orientation available in Exif " "tag or given explicitly." ) if orientation_value in ( 1 , 2 ) : i = image elif orientation_value in ( 3 , 4 ) : i = image . transpose ( Image . ROTATE_180 ) elif orientation_value in ( 5 , 6 ) : i = image . transpose ( Image . ROTATE_270 ) elif orientation_value in ( 7 , 8 ) : i = image . transpose ( Image . ROTATE_90 ) else : i = image if orientation_value in ( 2 , 4 , 5 , 7 ) : i = i . transpose ( Image . FLIP_LEFT_RIGHT ) return i
Rotate and return an image according to its Exif information .
209
13
11,641
def imdirect_open ( fp ) : img = pil_open ( fp , 'r' ) if img . format == 'JPEG' : # Read Exif tag on image. if isinstance ( fp , string_types ) : exif = piexif . load ( text_type_to_use ( fp ) ) else : fp . seek ( 0 ) exif = piexif . load ( fp . read ( ) ) # If orientation field is missing or equal to 1, nothing needs to be done. orientation_value = exif . get ( '0th' , { } ) . get ( piexif . ImageIFD . Orientation ) if orientation_value is None or orientation_value == 1 : return img # Otherwise, rotate the image and update the exif accordingly. img_rot = autorotate ( img ) exif = update_exif_for_rotated_image ( exif ) # Now, lets restore the output image to # PIL.JpegImagePlugin.JpegImageFile class with the correct, # updated Exif information. # Save image as JPEG to get a correct byte representation of # the image and then read it back. with io . BytesIO ( ) as bio : img_rot . save ( bio , format = 'jpeg' , exif = piexif . dump ( exif ) ) bio . seek ( 0 ) img_rot_new = pil_open ( bio , 'r' ) # Since we use a BytesIO we need to avoid the lazy # loading of the PIL image. Therefore, we explicitly # load the data here. img_rot_new . load ( ) img = img_rot_new return img
Opens identifies the given image file and rotates it if it is a JPEG .
369
17
11,642
def monkey_patch ( enabled = True ) : if enabled : Image . open = imdirect_open else : Image . open = pil_open
Monkey patching PIL . Image . open method
30
11
11,643
def save_with_exif_info ( img , * args , * * kwargs ) : if 'exif' in kwargs : exif = kwargs . pop ( 'exif' ) else : exif = img . info . get ( 'exif' ) img . save ( * args , exif = exif , * * kwargs )
Saves an image using PIL preserving the exif information .
81
13
11,644
def create ( context , resource , * * kwargs ) : data = utils . sanitize_kwargs ( * * kwargs ) uri = '%s/%s' % ( context . dci_cs_api , resource ) r = context . session . post ( uri , timeout = HTTP_TIMEOUT , json = data ) return r
Create a resource
79
3
11,645
def get ( context , resource , * * kwargs ) : uri = '%s/%s/%s' % ( context . dci_cs_api , resource , kwargs . pop ( 'id' ) ) r = context . session . get ( uri , timeout = HTTP_TIMEOUT , params = kwargs ) return r
List a specific resource
77
4
11,646
def get_data ( context , resource , * * kwargs ) : url_suffix = '' if 'keys' in kwargs and kwargs [ 'keys' ] : url_suffix = '/?keys=%s' % ',' . join ( kwargs . pop ( 'keys' ) ) uri = '%s/%s/%s/data%s' % ( context . dci_cs_api , resource , kwargs . pop ( 'id' ) , url_suffix ) r = context . session . get ( uri , timeout = HTTP_TIMEOUT , params = kwargs ) return r
Retrieve data field from a resource
141
7
11,647
def update ( context , resource , * * kwargs ) : etag = kwargs . pop ( 'etag' ) id = kwargs . pop ( 'id' ) data = utils . sanitize_kwargs ( * * kwargs ) uri = '%s/%s/%s' % ( context . dci_cs_api , resource , id ) r = context . session . put ( uri , timeout = HTTP_TIMEOUT , headers = { 'If-match' : etag } , json = data ) return r
Update a specific resource
123
4
11,648
def delete ( context , resource , id , * * kwargs ) : etag = kwargs . pop ( 'etag' , None ) id = id subresource = kwargs . pop ( 'subresource' , None ) subresource_id = kwargs . pop ( 'subresource_id' , None ) uri = '%s/%s/%s' % ( context . dci_cs_api , resource , id ) if subresource : uri = '%s/%s/%s' % ( uri , subresource , subresource_id ) r = context . session . delete ( uri , timeout = HTTP_TIMEOUT , headers = { 'If-match' : etag } ) return r
Delete a specific resource
162
4
11,649
def purge ( context , resource , * * kwargs ) : uri = '%s/%s/purge' % ( context . dci_cs_api , resource ) if 'force' in kwargs and kwargs [ 'force' ] : r = context . session . post ( uri , timeout = HTTP_TIMEOUT ) else : r = context . session . get ( uri , timeout = HTTP_TIMEOUT ) return r
Purge resource type .
98
5
11,650
def parse_rst_content ( content , state ) : # http://www.sphinx-doc.org/en/master/extdev/markupapi.html # #parsing-directive-content-as-rest container_node = nodes . section ( ) container_node . document = state . document viewlist = ViewList ( ) for i , line in enumerate ( content . splitlines ( ) ) : viewlist . append ( line , source = '' , offset = i ) with switch_source_input ( state , viewlist ) : state . nested_parse ( viewlist , 0 , container_node ) return container_node . children
Parse rST - formatted string content into docutils nodes
142
12
11,651
def make_python_xref_nodes ( py_typestr , state , hide_namespace = False ) : if hide_namespace : template = ':py:obj:`~{}`\n' else : template = ':py:obj:`{}`\n' xref_text = template . format ( py_typestr ) return parse_rst_content ( xref_text , state )
Make docutils nodes containing a cross - reference to a Python object .
96
14
11,652
def make_python_xref_nodes_for_type ( py_type , state , hide_namespace = False ) : if py_type . __module__ == 'builtins' : typestr = py_type . __name__ else : typestr = '.' . join ( ( py_type . __module__ , py_type . __name__ ) ) return make_python_xref_nodes ( typestr , state , hide_namespace = hide_namespace )
Make docutils nodes containing a cross - reference to a Python object given the object s type .
110
19
11,653
def make_section ( section_id = None , contents = None ) : section = nodes . section ( ) section [ 'ids' ] . append ( nodes . make_id ( section_id ) ) section [ 'names' ] . append ( section_id ) if contents is not None : section . extend ( contents ) return section
Make a docutils section node .
70
7
11,654
def split_role_content ( role_rawsource ) : parts = { 'last_component' : False , 'display' : None , 'ref' : None } if role_rawsource . startswith ( '~' ) : # Only the last part of a namespace should be shown. parts [ 'last_component' ] = True # Strip that marker off role_rawsource = role_rawsource . lstrip ( '~' ) match = ROLE_DISPLAY_PATTERN . match ( role_rawsource ) if match : parts [ 'display' ] = match . group ( 'display' ) . strip ( ) parts [ 'ref' ] = match . group ( 'reference' ) . strip ( ) else : # No suggested display parts [ 'display' ] = None parts [ 'ref' ] = role_rawsource . strip ( ) return parts
Split the rawsource of a role into standard components .
186
11
11,655
def largest_graph ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) m = clone ( mol ) # Avoid modification of original object if m . isolated : for k in itertools . chain . from_iterable ( m . isolated ) : m . remove_atom ( k ) return m
Return a molecule which has largest graph in the compound Passing single molecule object will results as same as molutil . clone
72
23
11,656
def H_donor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_donor )
Hydrogen bond donor count
44
5
11,657
def H_acceptor_count ( mol ) : mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_acceptor )
Hydrogen bond acceptor count
44
6
11,658
def rotatable_count ( mol ) : mol . require ( "Rotatable" ) return sum ( 1 for _ , _ , b in mol . bonds_iter ( ) if b . rotatable )
Rotatable bond count
42
4
11,659
def rule_of_five_violation ( mol ) : v = 0 if mw ( mol ) > 500 : v += 1 if H_donor_count ( mol ) > 5 : v += 1 if H_acceptor_count ( mol ) > 10 : v += 1 try : if wclogp . wclogp ( mol ) > 5 : v += 1 except TypeError : # N/A v += 1 return v
Lipinski s rule of five violation count
94
9
11,660
def formula ( mol ) : mol . require ( "Valence" ) mol . require ( "Topology" ) total_cntr = Counter ( ) for m in sorted ( mols_iter ( mol ) , key = len , reverse = True ) : cntr = Counter ( ) for i in m : cntr += mol . atom ( i ) . composition ( ) text = [ ] Cs = cntr . pop ( "C" , 0 ) if Cs : text . append ( "C" ) if Cs > 1 : text . append ( str ( Cs ) ) Hs = cntr . pop ( "H" , 0 ) if Hs : text . append ( "H" ) if Hs > 1 : text . append ( str ( Hs ) ) heteros = sorted ( cntr . items ( ) , key = lambda x : atom_number ( x [ 0 ] ) ) for k , v in heteros : text . append ( k ) if v > 1 : text . append ( str ( v ) ) total_cntr [ "" . join ( text ) ] += 1 total = sorted ( total_cntr . items ( ) , key = lambda x : len ( x [ 0 ] ) , reverse = True ) total_text = [ ] for k , v in total : if v > 1 : total_text . append ( str ( v ) + k ) else : total_text . append ( k ) return "." . join ( total_text )
Chemical formula . Atoms should be arranged in order of C H and other atoms . Molecules should be arranged in order of length of formula text .
324
31
11,661
def debug ( * args , * * attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( debug , * args , * * attrs )
Show debugging information .
51
4
11,662
def dryrun ( * args , * * attrs ) : attrs . setdefault ( "is_flag" , True ) attrs . setdefault ( "default" , None ) return option ( dryrun , * args , * * attrs )
Perform a dryrun .
53
6
11,663
def log ( * args , * * attrs ) : attrs . setdefault ( "metavar" , "PATH" ) attrs . setdefault ( "show_default" , False ) return option ( log , * args , * * attrs )
Override log file location .
55
5
11,664
def version ( * args , * * attrs ) : if hasattr ( sys , "_getframe" ) : package = attrs . pop ( "package" , sys . _getframe ( 1 ) . f_globals . get ( "__package__" ) ) if package : attrs . setdefault ( "version" , get_version ( package ) ) return click . version_option ( * args , * * attrs )
Show the version and exit .
94
6
11,665
def to_rdmol ( mol ) : rwmol = Chem . RWMol ( Chem . MolFromSmiles ( '' ) ) key_to_idx = { } bond_type = { 1 : Chem . BondType . SINGLE , 2 : Chem . BondType . DOUBLE , 3 : Chem . BondType . TRIPLE } conf = Chem . Conformer ( rwmol . GetNumAtoms ( ) ) for k , a in mol . atoms_iter ( ) : i = rwmol . AddAtom ( Chem . Atom ( atom_number ( a . symbol ) ) ) key_to_idx [ k ] = i conf . SetAtomPosition ( i , a . coords ) rwmol . AddConformer ( conf ) for u , v , b in mol . bonds_iter ( ) : ui = key_to_idx [ u ] vi = key_to_idx [ v ] rwmol . AddBond ( ui , vi , bond_type [ b . order ] ) Chem . GetSSSR ( rwmol ) # Ring recognition is required for fingerprint rwmol . UpdatePropertyCache ( strict = False ) return rwmol . GetMol ( )
Convert molecule to RDMol
265
7
11,666
def morgan_sim ( mol1 , mol2 , radius = 2 , digit = 3 ) : rdmol1 = to_rdmol ( mol1 ) rdmol2 = to_rdmol ( mol2 ) fp1 = AllChem . GetMorganFingerprint ( rdmol1 , radius ) fp2 = AllChem . GetMorganFingerprint ( rdmol2 , radius ) return round ( DataStructs . DiceSimilarity ( fp1 , fp2 ) , digit )
Calculate morgan fingerprint similarity by using RDKit radius = 2 roughly equivalent to ECFP4
109
20
11,667
def build ( self , X , Y , w = None , edges = None ) : super ( MergeTree , self ) . build ( X , Y , w , edges ) if self . debug : sys . stdout . write ( "Merge Tree Computation: " ) start = time . clock ( ) self . __tree = MergeTreeFloat ( vectorFloat ( self . Xnorm . flatten ( ) ) , vectorFloat ( self . Y ) , str ( self . gradient ) , self . graph_rep . full_graph ( ) , self . debug , ) self . _internal_build ( ) if self . debug : end = time . clock ( ) sys . stdout . write ( "%f s\n" % ( end - start ) )
Assigns data to this object and builds the Merge Tree
159
12
11,668
def build_for_contour_tree ( self , contour_tree , negate = False ) : if self . debug : tree_type = "Join" if negate : tree_type = "Split" sys . stdout . write ( "{} Tree Computation: " . format ( tree_type ) ) start = time . clock ( ) Y = contour_tree . Y if negate : Y = - Y self . __tree = MergeTreeFloat ( vectorFloat ( contour_tree . Xnorm . flatten ( ) ) , vectorFloat ( Y ) , str ( contour_tree . gradient ) , contour_tree . graph_rep . full_graph ( ) , self . debug , ) self . _internal_build ( ) if self . debug : end = time . clock ( ) sys . stdout . write ( "%f s\n" % ( end - start ) )
A helper function that will reduce duplication of data by reusing the parent contour tree s parameters and data
190
21
11,669
def verify_abort ( func , * args , * * kwargs ) : expected_exception = kwargs . pop ( "expected_exception" , runez . system . AbortException ) with CaptureOutput ( ) as logged : try : value = func ( * args , * * kwargs ) assert False , "%s did not raise, but returned %s" % ( func , value ) except expected_exception : return str ( logged )
Convenient wrapper around functions that should exit or raise an exception
99
12
11,670
def pop ( self , strip = False ) : r = self . contents ( ) self . clear ( ) if r and strip : r = r . strip ( ) return r
Current content popped useful for testing
36
6
11,671
def contents ( self ) : c = self . _header [ : ] c . append ( ' font-weight="{}"' . format ( self . font_weight ) ) c . append ( ' font-family="{}"' . format ( self . font_family ) ) c . append ( ' width="{}" height="{}"' . format ( * self . screen_size ) ) sclw = self . original_size [ 0 ] * self . scale_factor sclh = self . original_size [ 1 ] * self . scale_factor longside = max ( [ sclw , sclh ] ) width = round ( longside + self . margin * 2 , 2 ) height = round ( longside + self . margin * 2 , 2 ) xleft = round ( - self . margin - ( longside - sclw ) / 2 , 2 ) ytop = round ( - self . margin - ( longside - sclh ) / 2 , 2 ) c . append ( ' viewBox="{} {} {} {}">\n' . format ( xleft , ytop , width , height ) ) if self . bgcolor is not None : c . append ( '<rect x="{}", y="{}" width="{}" height="{}" fill="{}" \ />\n' . format ( xleft , ytop , width , height , self . bgcolor ) ) c . extend ( self . _elems ) c . append ( "</svg>" ) return "" . join ( c )
Get svg string
332
4
11,672
def data_url_scheme ( self ) : # TODO: move to web.app or make it function # remove #svg from dataframe encoded = base64 . b64encode ( self . contents ( ) . encode ( ) ) return "data:image/svg+xml;base64," + encoded . decode ( )
Get svg in Data URL Scheme format .
72
9
11,673
def _coords_conv ( self , pos ) : px = ( self . original_size [ 0 ] / 2 + pos [ 0 ] ) * self . scale_factor py = ( self . original_size [ 1 ] / 2 - pos [ 1 ] ) * self . scale_factor return round ( px , 2 ) , round ( py , 2 )
For Svg coordinate system reflect over X axis and translate from center to top - left
78
17
11,674
def get_logger ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'configuring a logger' ) if self . _logger_instance is not None : return self . _logger_instance self . _logger_instance = logging . getLogger ( "flowsLogger" ) self . _logger_instance . setLevel ( logging . DEBUG ) log_format = '%(asctime)s - [%(levelname)s]|%(thread)d\t%(message)s' log_date_format = '%Y-%m-%d %H:%M:%S' formatter = logging . Formatter ( log_format , log_date_format ) new_log_stream_handler = logging . StreamHandler ( ) new_log_stream_handler . setFormatter ( formatter ) new_log_stream_handler . setLevel ( logging . INFO ) self . _logger_instance . addHandler ( new_log_stream_handler ) return self . _logger_instance
Returns the standard logger
232
4
11,675
def reconfigure_log_level ( self ) : if Global . LOGGER : Global . LOGGER . debug ( 'reconfiguring logger level' ) stream_handlers = filter ( lambda x : type ( x ) is logging . StreamHandler , self . _logger_instance . handlers ) for x in stream_handlers : x . level = Global . CONFIG_MANAGER . log_level return self . get_logger ( )
Returns a new standard logger instance
94
6
11,676
def _build_toctree_node ( parent = None , entries = None , includefiles = None , caption = None ) : # Add the toctree's node itself subnode = sphinx . addnodes . toctree ( ) subnode [ 'parent' ] = parent subnode [ 'entries' ] = entries subnode [ 'includefiles' ] = includefiles subnode [ 'caption' ] = caption # These values are needed for toctree node types. We don't need/want # these to be configurable for module-toctree. subnode [ 'maxdepth' ] = 1 subnode [ 'hidden' ] = False subnode [ 'glob' ] = None subnode [ 'hidden' ] = False subnode [ 'includehidden' ] = False subnode [ 'numbered' ] = 0 subnode [ 'titlesonly' ] = False return subnode
Factory for a toctree node .
199
8
11,677
def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] modules = [ module . strip ( ) for module in skip_text . split ( ',' ) ] return modules
Parse the skip option of skipped module names .
55
10
11,678
def _parse_skip_option ( self ) : try : skip_text = self . options [ 'skip' ] except KeyError : return [ ] packages = [ package . strip ( ) for package in skip_text . split ( ',' ) ] return packages
Parse the skip option of skipped package names .
55
10
11,679
def _set_command_line_arguments ( self , args ) : Global . LOGGER . debug ( "setting command line arguments" ) if args . VERBOSE : Global . LOGGER . debug ( "verbose mode active" ) Global . CONFIG_MANAGER . log_level = logging . DEBUG Global . LOGGER_INSTANCE . reconfigure_log_level ( ) if args . STATS > 0 : Global . LOGGER . debug ( f"stats requested every {args.STATS} seconds" ) Global . CONFIG_MANAGER . show_stats = True Global . CONFIG_MANAGER . stats_timeout = args . STATS if args . INTERVAL > 0 : Global . LOGGER . debug ( f"setting sleep interval to {args.INTERVAL} milliseconds" ) Global . CONFIG_MANAGER . sleep_interval = float ( args . INTERVAL ) / 1000 if args . TRACE : Global . LOGGER . debug ( "tracing mode active" ) Global . CONFIG_MANAGER . tracing_mode = True Global . CONFIG_MANAGER . log_level = logging . DEBUG Global . LOGGER_INSTANCE . reconfigure_log_level ( ) if args . MESSAGEINTERVAL is not None and args . MESSAGEINTERVAL > 0 : Global . LOGGER . debug ( f"setting message fetcher sleep interval to {args.MESSAGEINTERVAL/10} milliseconds" ) Global . CONFIG_MANAGER . message_fetcher_sleep_interval = float ( args . MESSAGEINTERVAL ) / 10000 Global . CONFIG_MANAGER . fixed_message_fetcher_interval = True Global . LOGGER . debug ( f"recipes to be parsed: {args.FILENAME}" ) Global . CONFIG_MANAGER . recipes = ( args . FILENAME )
Set internal configuration variables according to the input parameters
399
9
11,680
def start ( self ) : Global . LOGGER . info ( "starting the flow manager" ) self . _start_actions ( ) self . _start_message_fetcher ( ) Global . LOGGER . debug ( "flow manager started" )
Start all the processes
52
4
11,681
def stop ( self ) : Global . LOGGER . info ( "stopping the flow manager" ) self . _stop_actions ( ) self . isrunning = False Global . LOGGER . debug ( "flow manager stopped" )
Stop all the processes
48
4
11,682
def restart ( self ) : Global . LOGGER . info ( "restarting the flow manager" ) self . _stop_actions ( ) # stop the old actions self . actions = [ ] # clear the action list self . _start_actions ( ) # start the configured actions Global . LOGGER . debug ( "flow manager restarted" )
Restart all the processes
73
5
11,683
def _start_actions ( self ) : Global . LOGGER . info ( "starting actions" ) for recipe in Global . CONFIG_MANAGER . recipes : Global . CONFIG_MANAGER . read_recipe ( recipe ) list ( map ( lambda section : self . _start_action_for_section ( section ) , Global . CONFIG_MANAGER . sections ) )
Start all the actions for the recipes
81
7
11,684
def _start_action_for_section ( self , section ) : if section == "configuration" : return Global . LOGGER . debug ( "starting actions for section " + section ) # read the configuration of the action action_configuration = Global . CONFIG_MANAGER . sections [ section ] if len ( action_configuration ) == 0 : Global . LOGGER . warn ( f"section {section} has no configuration, skipping" ) return action_type = None # action_input = None new_managed_input = [ ] if "type" in action_configuration : action_type = action_configuration [ "type" ] if "input" in action_configuration : action_input = action_configuration [ "input" ] new_managed_input = ( item . strip ( ) for item in action_input . split ( "," ) ) my_action = Action . create_action_for_code ( action_type , section , action_configuration , list ( new_managed_input ) ) if not my_action : Global . LOGGER . warn ( f"can't find a type for action {section}, the action will be skipped" ) return self . actions . append ( my_action ) Global . LOGGER . debug ( "updating the subscriptions table" ) for my_input in my_action . monitored_input : self . subscriptions . setdefault ( my_input , [ ] ) . append ( my_action )
Start all the actions for a particular section
312
8
11,685
def _stop_actions ( self ) : Global . LOGGER . info ( "stopping actions" ) list ( map ( lambda x : x . stop ( ) , self . actions ) ) Global . LOGGER . info ( "actions stopped" )
Stop all the actions
52
4
11,686
def _perform_system_check ( self ) : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "performing a system check" ) now = datetime . datetime . now ( ) sent = Global . MESSAGE_DISPATCHER . dispatched received = self . fetched queue_length = sent - received message_sleep_interval = Global . CONFIG_MANAGER . message_fetcher_sleep_interval if Global . CONFIG_MANAGER . show_stats : if ( now - self . last_stats_check_date ) . total_seconds ( ) > Global . CONFIG_MANAGER . stats_timeout : self . last_stats_check_date = now stats_string = f"showing stats\n--- [STATS] ---\nMessage Sent: {sent}\nMessage Received: {received}\nMessage Sleep Interval = {message_sleep_interval}\nQueue length = {queue_length}\n--- [ END ] ---" Global . LOGGER . info ( stats_string ) # if we are accumulating messages, or we have processed at least 5000 messages # since last check, we need to speed up the process messages_limit_reached = sent - self . last_queue_check_count > Global . CONFIG_MANAGER . messages_dispatched_for_system_check queue_limit_reached = queue_length > Global . CONFIG_MANAGER . queue_length_for_system_check time_limit_since_last_check_is_over = ( now - self . last_queue_check_date ) . total_seconds ( ) > Global . CONFIG_MANAGER . seconds_between_queue_check if not Global . CONFIG_MANAGER . fixed_message_fetcher_interval : if ( messages_limit_reached ) or ( queue_limit_reached and time_limit_since_last_check_is_over ) : cause = "messages limit reached" if messages_limit_reached else "queue limit reached" Global . LOGGER . debug ( f"triggering the throttle function due to {cause}" ) self . _adapt_sleep_interval ( sent , received , queue_length , now )
Perform a system check to define if we need to throttle to handle all the incoming messages
488
18
11,687
def _deliver_message ( self , msg ) : my_subscribed_actions = self . subscriptions . get ( msg . sender , [ ] ) for action in my_subscribed_actions : if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( f"delivering message to {action.name}" ) action . on_input_received ( msg )
Deliver the message to the subscripted actions
84
9
11,688
def _fetch_messages ( self ) : try : [ _ , msg ] = self . socket . recv_multipart ( flags = zmq . NOBLOCK ) if Global . CONFIG_MANAGER . tracing_mode : Global . LOGGER . debug ( "fetched a new message" ) self . fetched = self . fetched + 1 obj = pickle . loads ( msg ) self . _deliver_message ( obj ) return obj except zmq . error . Again : return None except Exception as new_exception : Global . LOGGER . error ( new_exception ) raise new_exception
Get an input message from the socket
135
7
11,689
async def message_fetcher_coroutine ( self , loop ) : Global . LOGGER . debug ( 'registering callbacks for message fetcher coroutine' ) self . isrunning = True while self . isrunning : loop . call_soon ( self . _fetch_messages ) loop . call_soon ( self . _perform_system_check ) await asyncio . sleep ( Global . CONFIG_MANAGER . message_fetcher_sleep_interval ) Global . LOGGER . debug ( 'message fetcher stopped' )
Register callback for message fetcher coroutines
117
9
11,690
def _adapt_sleep_interval ( self , sent , received , queue , now ) : Global . LOGGER . debug ( "adjusting sleep interval" ) dispatched_since_last_check = sent - self . last_queue_check_count seconds_since_last_check = ( now - self . last_queue_check_date ) . total_seconds ( ) Global . LOGGER . debug ( str ( dispatched_since_last_check ) + " dispatched in the last " + str ( seconds_since_last_check ) ) sleep_time = ( seconds_since_last_check / ( dispatched_since_last_check + queue + 1 ) ) * 0.75 if sleep_time > 0.5 : sleep_time = 0.5 if sleep_time < 0.0001 : sleep_time = 0.0001 self . last_queue_check_date = now self . last_queue_check_count = sent Global . CONFIG_MANAGER . message_fetcher_sleep_interval = sleep_time sleep_interval_log_string = f"new sleep_interval = {sleep_time}" Global . LOGGER . debug ( sleep_interval_log_string ) if Global . CONFIG_MANAGER . show_stats : Global . LOGGER . info ( sleep_interval_log_string )
Adapt sleep time based on the number of the messages in queue
289
12
11,691
def _parse_input_parameters ( self ) : Global . LOGGER . debug ( "define and parsing command line arguments" ) parser = argparse . ArgumentParser ( description = 'A workflow engine for Pythonistas' , formatter_class = argparse . RawTextHelpFormatter ) parser . add_argument ( 'FILENAME' , nargs = '+' , help = 'name of the recipe file(s)' ) parser . add_argument ( '-i' , '--INTERVAL' , type = int , default = 500 , metavar = ( 'MS' ) , help = 'perform a cycle each [MS] milliseconds. (default = 500)' ) parser . add_argument ( '-m' , '--MESSAGEINTERVAL' , type = int , metavar = ( 'X' ) , help = 'dequeue a message each [X] tenth of milliseconds. (default = auto)' ) parser . add_argument ( '-s' , '--STATS' , type = int , default = 0 , metavar = ( 'SEC' ) , help = 'show stats each [SEC] seconds. (default = NO STATS)' ) parser . add_argument ( '-t' , '--TRACE' , action = 'store_true' , help = 'enable super verbose output, only useful for tracing' ) parser . add_argument ( '-v' , '--VERBOSE' , action = 'store_true' , help = 'enable verbose output' ) parser . add_argument ( '-V' , '--VERSION' , action = "version" , version = __version__ ) args = parser . parse_args ( ) return args
Set the configuration for the Logger
369
7
11,692
def migrate_050_to_051 ( session ) : entries_to_update = session . query ( Entry ) . filter ( Entry . forgot_sign_out . is_ ( True ) ) . filter ( Entry . time_out . isnot ( None ) ) for entry in entries_to_update : entry . time_out = None logging . info ( 'Entry updated {}' . format ( entry . uuid ) ) logging . debug ( entry . uuid ) session . add ( entry )
Set time_out field of all flagged timesheet entries to Null .
106
14
11,693
def get_task_param_string ( task ) : # get dict str -> str from luigi param_dict = task . to_str_params ( ) # sort keys, serialize items = [ ] for key in sorted ( param_dict . keys ( ) ) : items . append ( "'{:s}': '{:s}'" . format ( key , param_dict [ key ] ) ) return "{" + ", " . join ( items ) + "}"
Get all parameters of a task as one string
102
9
11,694
def check_completion ( task , mark_incomplete = False , clear = False , return_stats = False ) : # run recursive task checking, get stats to_clear = dict ( ) is_complete , stats = _check_completion ( task , mark_incomplete = mark_incomplete , clear = clear , stats = { } , visited = dict ( ) , to_clear = to_clear ) # task clearing needs to happen top-down: because of foreign key constraints, a task can # only be cleared once all tasks that require it have been cleared while to_clear : # find all tasks that we can currently clear - tasks not required by other tasks; # iterate over list of keys to be able to modify dict while iterating found_clearable_task = False for task_id in list ( to_clear . keys ( ) ) : v = to_clear [ task_id ] if not v [ 'required_by' ] : # this is a task that can be cleared - no other task requires it found_clearable_task = True task = v [ 'task' ] if isinstance ( task , ORMTask ) : task . mark_incomplete ( ) task . clear ( ) _increment_stats ( stats , 'Cleared' ) config . logger . info ( "Cleared task: " + task_id ) else : config . logger . info ( 'Cannot clear task, not an ORMTask: ' + task_id ) # remove the task from the list of tasks that need clearing, remove references # in the required_by lists of all other tasks; this is not an efficient implementation, # O(n^2), could be made O(n) using lookup tables of the task graph del to_clear [ task_id ] for w in to_clear . values ( ) : w [ 'required_by' ] . discard ( task_id ) if not found_clearable_task : raise RuntimeError ( "Error in recursive task clearing, no clearable task found" ) config . logger . info ( "Task completion checking, summary:\n" + str ( stats ) ) if return_stats : return is_complete , stats else : return is_complete
Recursively check if a task and all its requirements are complete
469
13
11,695
def build ( cls , local_scheduler = True , * * task_params ) : luigi . build ( [ cls ( * * task_params ) ] , local_scheduler = local_scheduler )
Instantiate the task and build it with luigi
51
11
11,696
def clear ( self # type: ORMTask ) : # mark this task as incomplete self . mark_incomplete ( ) # delete objects for object_class in self . object_classes : self . session . query ( object_class ) . delete ( ) self . close_session ( )
Delete all objects created by this task .
61
8
11,697
def complete ( self ) : is_complete = super ( ORMWrapperTask , self ) . complete ( ) for req in self . requires ( ) : is_complete &= req . complete ( ) return is_complete
Task is complete if completion marker is set and all requirements are complete
47
13
11,698
def save ( self , filename = None ) : if filename is None : filename = "morse_smale_complex.json" with open ( filename , "w" ) as fp : fp . write ( self . to_json ( ) )
Saves a constructed Morse - Smale Complex in json file
54
12
11,699
def get_label ( self , indices = None ) : if indices is None : indices = list ( range ( 0 , self . get_sample_size ( ) ) ) elif isinstance ( indices , collections . Iterable ) : indices = sorted ( list ( set ( indices ) ) ) else : indices = [ indices ] if len ( indices ) == 0 : return [ ] partitions = self . get_partitions ( self . persistence ) labels = self . X . shape [ 0 ] * [ None ] for label , partition_indices in partitions . items ( ) : for idx in np . intersect1d ( partition_indices , indices ) : labels [ idx ] = label labels = np . array ( labels ) if len ( indices ) == 1 : return labels [ indices ] [ 0 ] return labels [ indices ]
Returns the label pair indices requested by the user
174
9