idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
7,500
def encode ( self , encoding = None ) : try : fc = self . func . value except AttributeError : # application-specific function code fc = self . func mhash = bytes ( [ fc , len ( self . digest ) ] ) + self . digest if encoding : mhash = CodecReg . get_encoder ( encoding ) ( mhash ) return mhash
r Encode into a multihash - encoded digest .
81
12
7,501
def verify ( self , data ) : digest = _do_digest ( data , self . func ) return digest [ : len ( self . digest ) ] == self . digest
r Does the given data hash to the digest in this Multihash ?
37
15
7,502
def truncate ( self , length ) : if length > len ( self . digest ) : raise ValueError ( "cannot enlarge the original digest by %d bytes" % ( length - len ( self . digest ) ) ) return self . __class__ ( self . func , self . digest [ : length ] )
Return a new Multihash with a shorter digest length .
66
12
7,503
def set ( self , key : URIRef , value : Union [ Literal , BNode , URIRef , str , int ] , lang : Optional [ str ] = None ) : if not isinstance ( value , Literal ) and lang is not None : value = Literal ( value , lang = lang ) elif not isinstance ( value , ( BNode , URIRef ) ) : value , _type = term . _castPythonToLiteral ( value ) if _type is None : value = Literal ( value ) else : value = Literal ( value , datatype = _type ) self . graph . set ( ( self . asNode ( ) , key , value ) )
Set the VALUE for KEY predicate in the Metadata Graph
150
12
7,504
def add ( self , key , value , lang = None ) : if not isinstance ( value , Literal ) and lang is not None : value = Literal ( value , lang = lang ) elif not isinstance ( value , ( BNode , URIRef ) ) : value , _type = term . _castPythonToLiteral ( value ) if _type is None : value = Literal ( value ) else : value = Literal ( value , datatype = _type ) self . graph . add ( ( self . asNode ( ) , key , value ) )
Add a triple to the graph related to this node
124
10
7,505
def get ( self , key , lang = None ) : if lang is not None : for o in self . graph . objects ( self . asNode ( ) , key ) : if o . language == lang : yield o else : for o in self . graph . objects ( self . asNode ( ) , key ) : yield o
Returns triple related to this node . Can filter on lang
69
11
7,506
def get_single ( self , key , lang = None ) : if not isinstance ( key , URIRef ) : key = URIRef ( key ) if lang is not None : default = None for o in self . graph . objects ( self . asNode ( ) , key ) : default = o if o . language == lang : return o return default else : for o in self . graph . objects ( self . asNode ( ) , key ) : return o
Returns a single triple related to this node .
99
9
7,507
def remove ( self , predicate = None , obj = None ) : self . graph . remove ( ( self . asNode ( ) , predicate , obj ) )
Remove triple matching the predicate or the object
33
8
7,508
def unlink ( self , subj = None , predicate = None ) : self . graph . remove ( ( subj , predicate , self . asNode ( ) ) )
Remove triple where Metadata is the object
36
8
7,509
def getOr ( subject , predicate , * args , * * kwargs ) : if ( subject , predicate , None ) in get_graph ( ) : return Metadata ( node = get_graph ( ) . objects ( subject , predicate ) . __next__ ( ) ) return Metadata ( * args , * * kwargs )
Retrieve a metadata node or generate a new one
71
10
7,510
def forwards_func ( apps , schema_editor ) : print ( "\n" ) create_count = 0 BackupRun = apps . get_model ( "backup_app" , "BackupRun" ) # historical version of BackupRun backup_runs = BackupRun . objects . all ( ) for backup_run in backup_runs : # Use the origin BackupRun model to get access to write_config() temp = OriginBackupRun ( name = backup_run . name , backup_datetime = backup_run . backup_datetime ) try : temp . write_config ( ) except OSError as err : print ( "ERROR creating config file: %s" % err ) else : create_count += 1 # print("%r created." % config_path.path) print ( "%i config files created.\n" % create_count )
manage migrate backup_app 0004_BackupRun_ini_file_20160203_1415
184
22
7,511
def reverse_func ( apps , schema_editor ) : print ( "\n" ) remove_count = 0 BackupRun = apps . get_model ( "backup_app" , "BackupRun" ) backup_runs = BackupRun . objects . all ( ) for backup_run in backup_runs : # Use the origin BackupRun model to get access to get_config_path() temp = OriginBackupRun ( name = backup_run . name , backup_datetime = backup_run . backup_datetime ) config_path = temp . get_config_path ( ) try : config_path . unlink ( ) except OSError as err : print ( "ERROR removing config file: %s" % err ) else : remove_count += 1 # print("%r removed." % config_path.path) print ( "%i config files removed.\n" % remove_count )
manage migrate backup_app 0003_auto_20160127_2002
194
16
7,512
def speziale_grun ( v , v0 , gamma0 , q0 , q1 ) : if isuncertainties ( [ v , v0 , gamma0 , q0 , q1 ] ) : gamma = gamma0 * unp . exp ( q0 / q1 * ( ( v / v0 ) ** q1 - 1. ) ) else : gamma = gamma0 * np . exp ( q0 / q1 * ( ( v / v0 ) ** q1 - 1. ) ) return gamma
calculate Gruneisen parameter for the Speziale equation
111
14
7,513
def speziale_debyetemp ( v , v0 , gamma0 , q0 , q1 , theta0 ) : if isuncertainties ( [ v , v0 , gamma0 , q0 , q1 , theta0 ] ) : f_vu = np . vectorize ( uct . wrap ( integrate_gamma ) , excluded = [ 1 , 2 , 3 , 4 , 5 , 6 ] ) integ = f_vu ( v , v0 , gamma0 , q0 , q1 , theta0 ) theta = unp . exp ( unp . log ( theta0 ) - integ ) else : f_v = np . vectorize ( integrate_gamma , excluded = [ 1 , 2 , 3 , 4 , 5 , 6 ] ) integ = f_v ( v , v0 , gamma0 , q0 , q1 , theta0 ) theta = np . exp ( np . log ( theta0 ) - integ ) return theta
calculate Debye temperature for the Speziale equation
212
13
7,514
def integrate_gamma ( v , v0 , gamma0 , q0 , q1 , theta0 ) : def f_integrand ( v ) : gamma = gamma0 * np . exp ( q0 / q1 * ( ( v / v0 ) ** q1 - 1. ) ) return gamma / v theta_term = quad ( f_integrand , v0 , v ) [ 0 ] return theta_term
internal function to calculate Debye temperature
93
7
7,515
def speziale_pth ( v , temp , v0 , gamma0 , q0 , q1 , theta0 , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) gamma = speziale_grun ( v , v0 , gamma0 , q0 , q1 ) theta = speziale_debyetemp ( v , v0 , gamma0 , q0 , q1 , theta0 ) xx = theta / temp debye = debye_E ( xx ) if t_ref == 0. : debye0 = 0. else : xx0 = theta / t_ref debye0 = debye_E ( xx0 ) Eth0 = three_r * n * t_ref * debye0 Eth = three_r * n * temp * debye delEth = Eth - Eth0 p_th = ( gamma / v_mol * delEth ) * 1.e-9 return p_th
calculate thermal pressure for the Speziale equation
233
12
7,516
def text ( self ) -> str : return self . export ( output = Mimetypes . PLAINTEXT , exclude = self . default_exclude )
String representation of the text
32
5
7,517
def set_creator ( self , value : Union [ Literal , Identifier , str ] , lang : str = None ) : self . metadata . add ( key = DC . creator , value = value , lang = lang )
Set the DC Creator literal value
47
6
7,518
def set_title ( self , value : Union [ Literal , Identifier , str ] , lang : str = None ) : return self . metadata . add ( key = DC . title , value = value , lang = lang )
Set the DC Title literal value
48
6
7,519
def get_description ( self , lang : str = None ) -> Literal : return self . metadata . get_single ( key = DC . description , lang = lang )
Get the description of the object
36
6
7,520
def set_description ( self , value : Union [ Literal , Identifier , str ] , lang : str = None ) : return self . metadata . add ( key = DC . description , value = value , lang = lang )
Set the DC Description literal value
48
6
7,521
def set_subject ( self , value : Union [ Literal , Identifier , str ] , lang : str = None ) : return self . metadata . add ( key = DC . subject , value = value , lang = lang )
Set the DC Subject literal value
48
6
7,522
def childIds ( self ) -> BaseReferenceSet : if self . _childIds is None : self . _childIds = self . getReffs ( ) return self . _childIds
Identifiers of children
44
4
7,523
def firstId ( self ) -> BaseReference : if self . childIds is not None : if len ( self . childIds ) > 0 : return self . childIds [ 0 ] return None else : raise NotImplementedError
First child s id of current TextualNode
51
9
7,524
def lastId ( self ) -> BaseReference : if self . childIds is not None : if len ( self . childIds ) > 0 : return self . childIds [ - 1 ] return None else : raise NotImplementedError
Last child s id of current TextualNode
52
9
7,525
def compile_vocab ( docs , limit = 1e6 , verbose = 0 , tokenizer = Tokenizer ( stem = None , lower = None , strip = None ) ) : tokenizer = make_tokenizer ( tokenizer ) d = Dictionary ( ) try : limit = min ( limit , docs . count ( ) ) docs = docs . iterator ( ) except ( AttributeError , TypeError ) : pass for i , doc in enumerate ( docs ) : # if isinstance(doc, (tuple, list)) and len(doc) == 2 and isinstance(doc[1], int): # doc, score = docs try : # in case docs is a values() queryset (dicts of records in a DB table) doc = doc . values ( ) except AttributeError : # doc already is a values_list if not isinstance ( doc , str ) : doc = ' ' . join ( [ str ( v ) for v in doc ] ) else : doc = str ( doc ) if i >= limit : break d . add_documents ( [ list ( tokenizer ( doc ) ) ] ) if verbose and not i % 100 : log . info ( '{}: {}' . format ( i , repr ( d ) [ : 120 ] ) ) return d
Get the set of words used anywhere in a sequence of documents and assign an integer id
272
17
7,526
def gen_file_lines ( path , mode = 'rUb' , strip_eol = True , ascii = True , eol = '\n' ) : if isinstance ( path , str ) : path = open ( path , mode ) with path : # TODO: read one char at a time looking for the eol char and yielding the interveening chars for line in path : if ascii : line = str ( line ) if strip_eol : line = line . rstrip ( '\n' ) yield line
Generate a sequence of documents from the lines in a file
117
12
7,527
def inventory ( self , inventory_name ) : def decorator ( f ) : self . add ( func = f , inventory_name = inventory_name ) return f return decorator
Decorator to register filters for given inventory . For a function abc it has the same effect
38
20
7,528
def dispatch ( self , collection , * * kwargs ) : for inventory , method in self . methods [ : : - 1 ] : if method ( collection , * * kwargs ) is True : collection . parent = self . collection . children [ inventory ] return raise UndispatchedTextError ( "CapitainsCtsText not dispatched %s" % collection . id )
Dispatch a collection using internal filters
81
6
7,529
def generate_tokens ( doc , regex = CRE_TOKEN , strip = True , nonwords = False ) : if isinstance ( regex , basestring ) : regex = re . compile ( regex ) for w in regex . finditer ( doc ) : if w : w = w . group ( ) if strip : w = w . strip ( r'-_*`()}{' + r"'" ) if w and ( nonwords or not re . match ( r'^' + RE_NONWORD + '$' , w ) ) : yield w
r Return a sequence of words or tokens using a re . match iteratively through the str
122
18
7,530
def financial_float ( s , scale_factor = 1 , typ = float , ignore = FINANCIAL_WHITESPACE , percent_str = PERCENT_SYMBOLS , replace = FINANCIAL_MAPPING , normalize_case = str . lower ) : percent_scale_factor = 1 if isinstance ( s , basestring ) : s = normalize_case ( s ) . strip ( ) for i in ignore : s = s . replace ( normalize_case ( i ) , '' ) s = s . strip ( ) for old , new in replace : s = s . replace ( old , new ) for p in percent_str : if s . endswith ( p ) : # %% will become 0.0001 percent_scale_factor *= 0.01 s = s [ : - len ( p ) ] try : return ( scale_factor if scale_factor < 1 else percent_scale_factor ) * typ ( float ( s ) ) except ( ValueError , TypeError ) : return s
Strip dollar signs and commas from financial numerical string
221
11
7,531
def is_invalid_date ( d ) : if not isinstance ( d , DATE_TYPES ) : return False if d . year < 1970 or d . year >= 2100 : return True
Return boolean to indicate whether date is invalid None if valid False if not a date
43
16
7,532
def vocab_freq ( docs , limit = 1e6 , verbose = 1 , tokenizer = generate_tokens ) : total = Counter ( ) try : limit = min ( limit , docs . count ( ) ) docs = docs . iterator ( ) except : pass for i , doc in enumerate ( docs ) : try : doc = doc . values ( ) except AttributeError : if not isinstance ( doc , basestring ) : doc = ' ' . join ( [ stringify ( v ) for v in doc ] ) else : doc = stringify ( doc ) if i >= limit : break c = Counter ( tokenizer ( doc , strip = True , nonwords = False ) ) if verbose and ( verbose < 1e-3 or not i % int ( limit * verbose ) ) : print ( '{}: {} ... {}' . format ( i , c . keys ( ) [ : 3 ] , c . keys ( ) [ - 3 : ] if len ( c . keys ( ) ) > 6 else '' ) ) total += c return total
Get the set of words used anywhere in a sequence of documents and count occurrences
228
15
7,533
def make_filename ( s , allow_whitespace = False , allow_underscore = False , allow_hyphen = False , limit = 255 , lower = False ) : s = stringify ( s ) s = CRE_BAD_FILENAME . sub ( '' , s ) if not allow_whitespace : s = CRE_WHITESPACE . sub ( '' , s ) if lower : s = str . lower ( s ) if not allow_hyphen : s = s . replace ( '-' , '' ) if not allow_underscore : s = s . replace ( '_' , '' ) if limit is not None : s = s [ : limit ] return s or 'empty' [ : limit ]
r Make sure the provided string is a valid filename and optionally remove whitespace
157
15
7,534
def stem ( self , s ) : if self . _stemmer is None : return passthrough ( s ) try : # try the local attribute `stemmer`, a StemmerI instance first # if you use the self.stem method from an unpickled object it may not work return getattr ( getattr ( self , '_stemmer' , None ) , 'stem' , None ) ( s ) except ( AttributeError , TypeError ) : return getattr ( getattr ( self , '_stemmer' , self ) , 'lemmatize' , passthrough ) ( s )
This should make the Stemmer picklable and unpicklable by not using bound methods
130
20
7,535
def assoc ( self , index , value ) : newvec = ImmutableVector ( ) newvec . tree = self . tree . assoc ( index , value ) if index >= self . _length : newvec . _length = index + 1 else : newvec . _length = self . _length return newvec
Return a new vector with value associated at index . The implicit parameter is not modified .
67
17
7,536
def concat ( self , tailvec ) : newvec = ImmutableVector ( ) vallist = [ ( i + self . _length , tailvec [ i ] ) for i in range ( 0 , tailvec . _length ) ] newvec . tree = self . tree . multi_assoc ( vallist ) newvec . _length = self . _length + tailvec . _length return newvec
Returns the result of concatenating tailvec to the implicit parameter
88
13
7,537
def pop ( self ) : if self . _length == 0 : raise IndexError ( ) newvec = ImmutableVector ( ) newvec . tree = self . tree . remove ( self . _length - 1 ) newvec . _length = self . _length - 1 return newvec
Return a new ImmutableVector with the last item removed .
60
12
7,538
def read ( self , identifier , path ) : with open ( path ) as f : o = self . classes [ "text" ] ( urn = identifier , resource = self . xmlparse ( f ) ) return o
Retrieve and parse a text given an identifier
46
9
7,539
def _parse_textgroup ( self , cts_file ) : with io . open ( cts_file ) as __xml__ : return self . classes [ "textgroup" ] . parse ( resource = __xml__ ) , cts_file
Parses a textgroup from a cts file
54
11
7,540
def _parse_work ( self , cts_file , textgroup ) : with io . open ( cts_file ) as __xml__ : work , texts = self . classes [ "work" ] . parse ( resource = __xml__ , parent = textgroup , _with_children = True ) return work , texts , os . path . dirname ( cts_file )
Parses a work from a cts file
82
10
7,541
def _parse_text ( self , text , directory ) : text_id , text_metadata = text . id , text text_metadata . path = "{directory}/{textgroup}.{work}.{version}.xml" . format ( directory = directory , textgroup = text_metadata . urn . textgroup , work = text_metadata . urn . work , version = text_metadata . urn . version ) if os . path . isfile ( text_metadata . path ) : try : text = self . read ( text_id , path = text_metadata . path ) cites = list ( ) for cite in [ c for c in text . citation ] [ : : - 1 ] : if len ( cites ) >= 1 : cites . append ( self . classes [ "citation" ] ( xpath = cite . xpath . replace ( "'" , '"' ) , scope = cite . scope . replace ( "'" , '"' ) , name = cite . name , child = cites [ - 1 ] ) ) else : cites . append ( self . classes [ "citation" ] ( xpath = cite . xpath . replace ( "'" , '"' ) , scope = cite . scope . replace ( "'" , '"' ) , name = cite . name ) ) del text text_metadata . citation = cites [ - 1 ] self . logger . info ( "%s has been parsed " , text_metadata . path ) if not text_metadata . citation . is_set ( ) : self . logger . error ( "%s has no passages" , text_metadata . path ) return False return True except Exception : self . logger . error ( "%s does not accept parsing at some level (most probably citation) " , text_metadata . path ) return False else : self . logger . error ( "%s is not present" , text_metadata . path ) return False
Complete the TextMetadata object with its citation scheme by parsing the original text
398
15
7,542
def _dispatch ( self , textgroup , directory ) : if textgroup . id in self . dispatcher . collection : self . dispatcher . collection [ textgroup . id ] . update ( textgroup ) else : self . dispatcher . dispatch ( textgroup , path = directory ) for work_urn , work in textgroup . works . items ( ) : if work_urn in self . dispatcher . collection [ textgroup . id ] . works : self . dispatcher . collection [ work_urn ] . update ( work )
Run the dispatcher over a textgroup .
107
8
7,543
def parse ( self , resource ) : textgroups = [ ] texts = [ ] invalids = [ ] for folder in resource : cts_files = glob ( "{base_folder}/data/*/__cts__.xml" . format ( base_folder = folder ) ) for cts_file in cts_files : textgroup , cts_file = self . _parse_textgroup ( cts_file ) textgroups . append ( ( textgroup , cts_file ) ) for textgroup , cts_textgroup_file in textgroups : cts_work_files = glob ( "{parent}/*/__cts__.xml" . format ( parent = os . path . dirname ( cts_textgroup_file ) ) ) for cts_work_file in cts_work_files : _ , parsed_texts , directory = self . _parse_work ( cts_work_file , textgroup ) texts . extend ( [ ( text , directory ) for text in parsed_texts ] ) for text , directory in texts : # If text_id is not none, the text parsing errored if not self . _parse_text ( text , directory ) : invalids . append ( text ) # Dispatching routine for textgroup , textgroup_path in textgroups : self . _dispatch_container ( textgroup , textgroup_path ) # Clean invalids if there was a need self . _clean_invalids ( invalids ) self . inventory = self . dispatcher . collection return self . inventory
Parse a list of directories and reads it into a collection
333
12
7,544
def velocities_to_moduli ( rho , v_phi , v_s ) : return v_phi * v_phi * rho , v_s * v_s * rho
convert velocities to moduli mainly to support Burnman operations
44
14
7,545
def moduli_to_velocities ( rho , K_s , G ) : return np . sqrt ( K_s / rho ) , np . sqrt ( G / rho )
convert moduli to velocities mainly to support Burnman operations
44
14
7,546
def jamieson_pst ( v , v0 , c0 , s , gamma0 , q , theta0 , n , z , mass , c_v , three_r = 3. * constants . R , t_ref = 300. ) : rho = mass / vol_uc2mol ( v , z ) * 1.e-6 rho0 = mass / vol_uc2mol ( v0 , z ) * 1.e-6 p_h = hugoniot_p ( rho , rho0 , c0 , s ) p_th_h = jamieson_pth ( v , v0 , c0 , s , gamma0 , q , theta0 , n , z , mass , c_v , three_r = three_r , t_ref = t_ref ) p_st = p_h - p_th_h return p_st
calculate static pressure at 300 K from Hugoniot data using the constq formulation
197
18
7,547
def jamieson_pth ( v , v0 , c0 , s , gamma0 , q , theta0 , n , z , mass , c_v , three_r = 3. * constants . R , t_ref = 300. ) : rho = mass / vol_uc2mol ( v , z ) * 1.e-6 rho0 = mass / vol_uc2mol ( v0 , z ) * 1.e-6 temp = hugoniot_t ( rho , rho0 , c0 , s , gamma0 , q , theta0 , n , mass , three_r = three_r , t_ref = t_ref , c_v = c_v ) pth = constq_pth ( v , temp , v0 , gamma0 , q , theta0 , n , z , t_ref = t_ref , three_r = three_r ) return pth
calculate thermal pressure from Hugoniot data using the constq formulation
205
15
7,548
def hugoniot_p_nlin ( rho , rho0 , a , b , c ) : eta = 1. - ( rho0 / rho ) Up = np . zeros_like ( eta ) if isuncertainties ( [ rho , rho0 , a , b , c ] ) : Up [ eta != 0. ] = ( ( b * eta - 1. ) + unp . sqrt ( np . power ( ( 1. - b * eta ) , 2. ) - 4. * np . power ( eta , 2. ) * a * c ) ) / ( - 2. * eta * c ) else : Up [ eta != 0. ] = ( ( b * eta - 1. ) + np . sqrt ( np . power ( ( 1. - b * eta ) , 2. ) - 4. * np . power ( eta , 2. ) * a * c ) ) / ( - 2. * eta * c ) Us = a + Up * b + Up * Up * c Ph = rho0 * Up * Us return Ph
calculate pressure along a Hugoniot throug nonlinear equations presented in Jameison 1982
244
21
7,549
def generate_address_label ( self ) : if self . organisation_name : self . address_label . append ( self . organisation_name ) if self . department_name : self . address_label . append ( self . department_name ) if self . po_box_number : self . address_label . append ( 'PO Box ' + self . po_box_number ) elements = [ self . sub_building_name , self . building_name , self . building_number , self . dependent_thoroughfare , self . thoroughfare , self . double_dependent_locality , self . dependent_locality , ] for element in elements : if element : self . _append_to_label ( element ) # pad label to length of 7 if not already if len ( self . address_label ) < 7 : for i in range ( 7 - len ( self . address_label ) ) : self . address_label . append ( '' ) # finally, add post town self . address_label [ 5 ] = self . post_town return ", " . join ( [ f for f in self . address_label if f ] )
Construct a list for address label .
243
7
7,550
def _is_exception_rule ( self , element ) : if element [ 0 ] . isdigit ( ) and element [ - 1 ] . isdigit ( ) : return True if len ( element ) > 1 and element [ 0 ] . isdigit ( ) and element [ - 2 ] . isdigit ( ) and element [ - 1 ] . isalpha ( ) : return True if len ( element ) == 1 and element . isalpha ( ) : return True return False
Check for exception rule .
99
5
7,551
def _append_to_label ( self , element ) : if len ( self . address_label ) > 0 and self . _is_exception_rule ( self . address_label [ - 1 ] ) : self . address_label [ - 1 ] += ( ' ' + element ) else : self . address_label . append ( element )
Append address element to the label .
74
8
7,552
def load_template_source ( template_name , template_dirs = None ) : template_zipfiles = getattr ( settings , "TEMPLATE_ZIP_FILES" , [ ] ) # Try each ZIP file in TEMPLATE_ZIP_FILES. for fname in template_zipfiles : try : z = zipfile . ZipFile ( fname ) source = z . read ( template_name ) except ( IOError , KeyError ) : continue z . close ( ) # We found a template, so return the source. template_path = "%s:%s" % ( fname , template_name ) return ( source , template_path ) # If we reach here, the template couldn't be loaded raise TemplateDoesNotExist ( template_name )
Template loader that loads templates from a ZIP file .
170
10
7,553
def sanitize_capabilities ( caps ) : platform = caps [ "platform" ] upper_platform = platform . upper ( ) if upper_platform . startswith ( "WINDOWS 8" ) : caps [ "platform" ] = "WIN8" elif upper_platform . startswith ( "OS X " ) : caps [ "platform" ] = "MAC" elif upper_platform == "WINDOWS 10" : del caps [ "platform" ] caps [ "os" ] = "Windows" caps [ "os_version" ] = "10" if caps [ "browserName" ] . upper ( ) == "MICROSOFTEDGE" : # Sauce Labs takes complete version numbers like # 15.1234. However, Browser Stack takes only .0 numbers like # 15.0. caps [ "version" ] = caps [ "version" ] . split ( "." , 1 ) [ 0 ] + ".0" caps [ "browser_version" ] = caps [ "version" ] del caps [ "version" ] return caps
Sanitize the capabilities we pass to Selenic so that they can be consumed by Browserstack .
224
20
7,554
def my_func ( version ) : # noqa: D202 class MyClass ( object ) : """Enclosed class.""" if version == 2 : import docs . support . python2_module as pm else : import docs . support . python3_module as pm def __init__ ( self , value ) : self . _value = value def _get_value ( self ) : return self . _value value = property ( _get_value , pm . _set_value , None , "Value property" )
Enclosing function .
109
5
7,555
def get_subscriptions ( self , publication_id = None , owner_id = None , since_when = None , limit_to = 200 , max_calls = None , start_record = 0 , verbose = False ) : query = "SELECT Objects() FROM Subscription" # collect all where parameters into a list of # (key, operator, value) tuples where_params = [ ] if owner_id : where_params . append ( ( 'owner' , '=' , "'%s'" % owner_id ) ) if publication_id : where_params . append ( ( 'publication' , '=' , "'%s'" % publication_id ) ) if since_when : d = datetime . date . today ( ) - datetime . timedelta ( days = since_when ) where_params . append ( ( 'LastModifiedDate' , ">" , "'%s 00:00:00'" % d ) ) if where_params : query += " WHERE " query += " AND " . join ( [ "%s %s %s" % ( p [ 0 ] , p [ 1 ] , p [ 2 ] ) for p in where_params ] ) subscription_list = self . get_long_query ( query , limit_to = limit_to , max_calls = max_calls , start_record = start_record , verbose = verbose ) return subscription_list
Fetches all subscriptions from Membersuite of a particular publication_id if set .
305
18
7,556
def get_prep_value ( self , value ) : if isinstance ( value , JSON . JsonDict ) : return json . dumps ( value , cls = JSON . Encoder ) if isinstance ( value , JSON . JsonList ) : return value . json_string if isinstance ( value , JSON . JsonString ) : return json . dumps ( value ) return value
The psycopg adaptor returns Python objects but we also have to handle conversion ourselves
82
17
7,557
def registry ( attr , base = type ) : class Registry ( base ) : def __init__ ( cls , name , bases , attrs ) : super ( Registry , cls ) . __init__ ( name , bases , attrs ) if not hasattr ( cls , '__registry__' ) : cls . __registry__ = { } key = getattr ( cls , attr ) if key is not NotImplemented : assert key not in cls . __registry__ cls . __registry__ [ key ] = cls def __dispatch__ ( cls , key ) : try : return cls . __registry__ [ key ] except KeyError : raise ValueError ( 'Unknown %s: %s' % ( attr , key ) ) return Registry
Generates a meta class to index sub classes by their keys .
173
13
7,558
def debug_generate ( self , debug_generator , * gen_args , * * gen_kwargs ) : if self . isEnabledFor ( logging . DEBUG ) : message = debug_generator ( * gen_args , * * gen_kwargs ) # Allow for content filtering to skip logging if message is not None : return self . debug ( message )
Used for efficient debug logging where the actual message isn t evaluated unless it will actually be accepted by the logger .
77
22
7,559
def verify_token ( token , public_key_or_address , signing_algorithm = "ES256K" ) : decoded_token = decode_token ( token ) decoded_token_payload = decoded_token [ "payload" ] if "subject" not in decoded_token_payload : raise ValueError ( "Token doesn't have a subject" ) if "publicKey" not in decoded_token_payload [ "subject" ] : raise ValueError ( "Token doesn't have a subject public key" ) if "issuer" not in decoded_token_payload : raise ValueError ( "Token doesn't have an issuer" ) if "publicKey" not in decoded_token_payload [ "issuer" ] : raise ValueError ( "Token doesn't have an issuer public key" ) if "claim" not in decoded_token_payload : raise ValueError ( "Token doesn't have a claim" ) issuer_public_key = str ( decoded_token_payload [ "issuer" ] [ "publicKey" ] ) public_key_object = ECPublicKey ( issuer_public_key ) compressed_public_key = compress ( issuer_public_key ) decompressed_public_key = decompress ( issuer_public_key ) if public_key_object . _type == PubkeyType . compressed : compressed_address = public_key_object . address ( ) uncompressed_address = bin_hash160_to_address ( bin_hash160 ( decompress ( public_key_object . to_bin ( ) ) ) ) elif public_key_object . _type == PubkeyType . uncompressed : compressed_address = bin_hash160_to_address ( bin_hash160 ( compress ( public_key_object . to_bin ( ) ) ) ) uncompressed_address = public_key_object . address ( ) else : raise ValueError ( "Invalid issuer public key format" ) if public_key_or_address == compressed_public_key : pass elif public_key_or_address == decompressed_public_key : pass elif public_key_or_address == compressed_address : pass elif public_key_or_address == uncompressed_address : pass else : raise ValueError ( "Token public key doesn't match the verifying value" ) token_verifier = TokenVerifier ( ) if not token_verifier . verify ( token , public_key_object . to_pem ( ) ) : raise ValueError ( "Token was not signed by the issuer public key" ) return decoded_token
A function for validating an individual token .
568
9
7,560
def verify_token_record ( token_record , public_key_or_address , signing_algorithm = "ES256K" ) : if "token" not in token_record : raise ValueError ( "Token record must have a token inside it" ) token = token_record [ "token" ] decoded_token = verify_token ( token , public_key_or_address , signing_algorithm = signing_algorithm ) token_payload = decoded_token [ "payload" ] issuer_public_key = token_payload [ "issuer" ] [ "publicKey" ] if "parentPublicKey" in token_record : if issuer_public_key == token_record [ "parentPublicKey" ] : pass else : raise ValueError ( "Verification of tokens signed with keychains is not yet supported" ) return decoded_token
A function for validating an individual token record and extracting the decoded token .
188
16
7,561
def get_profile_from_tokens ( token_records , public_key_or_address , hierarchical_keys = False ) : if hierarchical_keys : raise NotImplementedError ( "Hierarchical key support not implemented" ) profile = { } for token_record in token_records : # print token_record try : decoded_token = verify_token_record ( token_record , public_key_or_address ) except ValueError : # traceback.print_exc() continue else : if "payload" in decoded_token : if "claim" in decoded_token [ "payload" ] : claim = decoded_token [ "payload" ] [ "claim" ] profile . update ( claim ) return profile
A function for extracting a profile from a list of tokens .
165
12
7,562
def resolve_zone_file_to_profile ( zone_file , address_or_public_key ) : if is_profile_in_legacy_format ( zone_file ) : return zone_file try : token_file_url = get_token_file_url_from_zone_file ( zone_file ) except Exception as e : raise Exception ( "Token file URL could not be extracted from zone file" ) try : r = requests . get ( token_file_url ) except Exception as e : raise Exception ( "Token could not be acquired from token file URL" ) try : profile_token_records = json . loads ( r . text ) except ValueError : raise Exception ( "Token records could not be extracted from token file" ) try : profile = get_profile_from_tokens ( profile_token_records , address_or_public_key ) except Exception as e : raise Exception ( "Profile could not be extracted from token records" ) return profile
Resolves a zone file to a profile and checks to makes sure the tokens are signed with a key that corresponds to the address or public key provided .
212
30
7,563
def __dog_started ( self ) : if self . __task is not None : raise RuntimeError ( 'Unable to start task. In order to start a new task - at first stop it' ) self . __task = self . record ( ) . task ( ) if isinstance ( self . __task , WScheduleTask ) is False : task_class = self . __task . __class__ . __qualname__ raise RuntimeError ( 'Unable to start unknown type of task: %s' % task_class )
Prepare watchdog for scheduled task starting
113
7
7,564
def __thread_started ( self ) : if self . __task is None : raise RuntimeError ( 'Unable to start thread without "start" method call' ) self . __task . start ( ) self . __task . start_event ( ) . wait ( self . __scheduled_task_startup_timeout__ )
Start a scheduled task
71
4
7,565
def _polling_iteration ( self ) : if self . __task is None : self . ready_event ( ) . set ( ) elif self . __task . check_events ( ) is True : self . ready_event ( ) . set ( ) self . registry ( ) . task_finished ( self )
Poll for scheduled task stop events
68
6
7,566
def thread_stopped ( self ) : if self . __task is not None : if self . __task . stop_event ( ) . is_set ( ) is False : self . __task . stop ( ) self . __task = None
Stop scheduled task beacuse of watchdog stop
52
9
7,567
def stop_running_tasks ( self ) : for task in self . __running_registry : task . stop ( ) self . __running_registry . clear ( )
Terminate all the running tasks
38
6
7,568
def add_source ( self , task_source ) : next_start = task_source . next_start ( ) self . __sources [ task_source ] = next_start self . __update ( task_source )
Add new tasks source
48
4
7,569
def __update_all ( self ) : self . __next_start = None self . __next_sources = [ ] for source in self . __sources : self . __update ( source )
Recheck next start of records from all the sources
43
11
7,570
def __update ( self , task_source ) : next_start = task_source . next_start ( ) if next_start is not None : if next_start . tzinfo is None or next_start . tzinfo != timezone . utc : raise ValueError ( 'Invalid timezone information' ) if self . __next_start is None or next_start < self . __next_start : self . __next_start = next_start self . __next_sources = [ task_source ] elif next_start == self . __next_start : self . __next_sources . append ( task_source )
Recheck next start of tasks from the given one only
139
12
7,571
def check ( self ) : if self . __next_start is not None : utc_now = utc_datetime ( ) if utc_now >= self . __next_start : result = [ ] for task_source in self . __next_sources : records = task_source . has_records ( ) if records is not None : result . extend ( records ) self . __update_all ( ) if len ( result ) > 0 : return tuple ( result )
Check if there are records that are ready to start and return them if there are any
104
17
7,572
def thread_started ( self ) : self . __running_record_registry . start ( ) self . __running_record_registry . start_event ( ) . wait ( ) WPollingThreadTask . thread_started ( self )
Start required registries and start this scheduler
52
9
7,573
def dir_contains ( dirname , path , exists = True ) : if exists : dirname = osp . abspath ( dirname ) path = osp . abspath ( path ) if six . PY2 or six . PY34 : return osp . exists ( path ) and osp . samefile ( osp . commonprefix ( [ dirname , path ] ) , dirname ) else : return osp . samefile ( osp . commonpath ( [ dirname , path ] ) , dirname ) return dirname in osp . commonprefix ( [ dirname , path ] )
Check if a file of directory is contained in another .
129
11
7,574
def get_next_name ( old , fmt = '%i' ) : nums = re . findall ( '\d+' , old ) if not nums : raise ValueError ( "Could not get the next name because the old name " "has no numbers in it" ) num0 = nums [ - 1 ] num1 = str ( int ( num0 ) + 1 ) return old [ : : - 1 ] . replace ( num0 [ : : - 1 ] , num1 [ : : - 1 ] , 1 ) [ : : - 1 ]
Return the next name that numerically follows old
121
9
7,575
def go_through_dict ( key , d , setdefault = None ) : patt = re . compile ( r'(?<!\\)\.' ) sub_d = d splitted = patt . split ( key ) n = len ( splitted ) for i , k in enumerate ( splitted ) : if i < n - 1 : if setdefault is not None : sub_d = sub_d . setdefault ( k , setdefault ( ) ) else : sub_d = sub_d [ k ] else : return k , sub_d
Split up the key by . and get the value from the base dictionary d
120
15
7,576
def sha1_hmac ( secret , document ) : signature = hmac . new ( secret , document , hashlib . sha1 ) . digest ( ) . encode ( "base64" ) [ : - 1 ] return signature
Calculate the Base 64 encoding of the HMAC for the given document .
50
16
7,577
def filter_query_string ( query ) : return '&' . join ( [ q for q in query . split ( '&' ) if not ( q . startswith ( '_k=' ) or q . startswith ( '_e=' ) or q . startswith ( '_s' ) ) ] )
Return a version of the query string with the _e _k and _s values removed .
71
19
7,578
def fost_hmac_url_signature ( key , secret , host , path , query_string , expires ) : if query_string : document = '%s%s?%s\n%s' % ( host , path , query_string , expires ) else : document = '%s%s\n%s' % ( host , path , expires ) signature = sha1_hmac ( secret , document ) return signature
Return a signature that corresponds to the signed URL .
95
10
7,579
def fost_hmac_request_signature ( secret , method , path , timestamp , headers = { } , body = '' ) : signed_headers , header_values = 'X-FOST-Headers' , [ ] for header , value in headers . items ( ) : signed_headers += ' ' + header header_values . append ( value ) return fost_hmac_request_signature_with_headers ( secret , method , path , timestamp , [ signed_headers ] + header_values , body )
Calculate the signature for the given secret and arguments .
111
12
7,580
def fost_hmac_request_signature_with_headers ( secret , method , path , timestamp , headers , body ) : document = "%s %s\n%s\n%s\n%s" % ( method , path , timestamp , '\n' . join ( headers ) , body ) signature = sha1_hmac ( secret , document ) logging . info ( "Calculated signature %s for document\n%s" , signature , document ) return document , signature
Calculate the signature for the given secret and other arguments .
106
13
7,581
def get_order ( membersuite_id , client = None ) : if not membersuite_id : return None client = client or get_new_client ( request_session = True ) if not client . session_id : client . request_session ( ) object_query = "SELECT Object() FROM ORDER WHERE ID = '{}'" . format ( membersuite_id ) result = client . execute_object_query ( object_query ) msql_result = result [ "body" ] [ "ExecuteMSQLResult" ] if msql_result [ "Success" ] : membersuite_object_data = ( msql_result [ "ResultValue" ] [ "SingleObject" ] ) else : raise ExecuteMSQLError ( result = result ) return Order ( membersuite_object_data = membersuite_object_data )
Get an Order by ID .
188
6
7,582
def export_private_key ( self , password = None ) : if self . __private_key is None : raise ValueError ( 'Unable to call this method. Private key must be set' ) if password is not None : if isinstance ( password , str ) is True : password = password . encode ( ) return self . __private_key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . PKCS8 , encryption_algorithm = serialization . BestAvailableEncryption ( password ) ) return self . __private_key . private_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = serialization . NoEncryption ( ) )
Export a private key in PEM - format
167
9
7,583
def export_public_key ( self ) : if self . __public_key is None : raise ValueError ( 'Unable to call this method. Public key must be set' ) return self . __public_key . public_bytes ( encoding = serialization . Encoding . PEM , format = serialization . PublicFormat . SubjectPublicKeyInfo )
Export a public key in PEM - format
75
9
7,584
def import_private_key ( self , pem_text , password = None ) : if isinstance ( pem_text , str ) is True : pem_text = pem_text . encode ( ) if password is not None and isinstance ( password , str ) is True : password = password . encode ( ) self . __set_private_key ( serialization . load_pem_private_key ( pem_text , password = password , backend = default_backend ( ) ) )
Import a private key from data in PEM - format
109
11
7,585
def decrypt ( self , data , oaep_hash_fn_name = None , mgf1_hash_fn_name = None ) : if self . __private_key is None : raise ValueError ( 'Unable to call this method. Private key must be set' ) if oaep_hash_fn_name is None : oaep_hash_fn_name = self . __class__ . __default_oaep_hash_function_name__ if mgf1_hash_fn_name is None : mgf1_hash_fn_name = self . __class__ . __default_mgf1_hash_function_name__ oaep_hash_cls = getattr ( hashes , oaep_hash_fn_name ) mgf1_hash_cls = getattr ( hashes , mgf1_hash_fn_name ) return self . __private_key . decrypt ( data , padding . OAEP ( mgf = padding . MGF1 ( algorithm = mgf1_hash_cls ( ) ) , algorithm = oaep_hash_cls ( ) , label = None ) )
Decrypt a data that used PKCS1 OAEP protocol
251
13
7,586
def validate ( self , value , model = None , context = None ) : length = len ( str ( value ) ) params = dict ( min = self . min , max = self . max ) # too short? if self . min and self . max is None : if length < self . min : return Error ( self . too_short , params ) # too long? if self . max and self . min is None : if length > self . max : return Error ( self . too_long , params ) # within range? if self . min and self . max : if length < self . min or length > self . max : return Error ( self . not_in_range , params ) # success otherwise return Error ( )
Validate Perform value validation against validation settings and return simple result object
153
13
7,587
def qteSaveMacroData ( self , data , widgetObj : QtGui . QWidget = None ) : # Check type of input arguments. if not hasattr ( widgetObj , '_qteAdmin' ) and ( widgetObj is not None ) : msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) # If no widget was specified then use the calling widget. if not widgetObj : widgetObj = self . qteWidget # Store the supplied data in the applet specific macro storage. widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] = data
Associate arbitrary data with widgetObj .
161
8
7,588
def qteMacroData ( self , widgetObj : QtGui . QWidget = None ) : # Check type of input arguments. if not hasattr ( widgetObj , '_qteAdmin' ) and ( widgetObj is not None ) : msg = '<widgetObj> was probably not added with <qteAddWidget>' msg += ' method because it lacks the <_qteAdmin> attribute.' raise QtmacsOtherError ( msg ) # If no widget was specified then use the calling widget. if not widgetObj : widgetObj = self . qteWidget # Retrieve the data structure. try : _ = widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] except KeyError : # If the entry does not exist then this is a bug; create # an empty entry for next time. widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ] = None # Return the data. return widgetObj . _qteAdmin . macroData [ self . qteMacroName ( ) ]
Retrieve widgetObj specific data previously saved with qteSaveMacroData .
230
16
7,589
def qteSetAppletSignature ( self , appletSignatures : ( str , tuple , list ) ) : # Convert the argument to a tuple if it is not already a tuple # or list. if not isinstance ( appletSignatures , ( tuple , list ) ) : appletSignatures = appletSignatures , # Ensure that all arguments in the tuple/list are strings. for idx , val in enumerate ( appletSignatures ) : if not isinstance ( val , str ) : args = ( 'appletSignatures' , 'str' , inspect . stack ( ) [ 0 ] [ 3 ] ) raise QtmacsArgumentError ( * args ) # Store the compatible applet signatures as a tuple (of strings). self . _qteAppletSignatures = tuple ( appletSignatures )
Specify the applet signatures with which this macro is compatible .
176
13
7,590
def qteSetWidgetSignature ( self , widgetSignatures : ( str , tuple , list ) ) : # Convert the argument to a tuple if it is not already a tuple # or list. if not isinstance ( widgetSignatures , ( tuple , list ) ) : widgetSignatures = widgetSignatures , # Ensure that all arguments in the tuple/list are strings. for idx , val in enumerate ( widgetSignatures ) : if not isinstance ( val , str ) : args = ( 'widgetSignatures' , 'str' , inspect . stack ( ) [ 0 ] [ 3 ] ) raise QtmacsArgumentError ( * args ) # Store the compatible widget signatures as a tuple (of strings). self . _qteWidgetSignatures = tuple ( widgetSignatures )
Specify the widget signatures with which this macro is compatible .
166
12
7,591
def qtePrepareToRun ( self ) : # Report the execution attempt. msgObj = QtmacsMessage ( ( self . qteMacroName ( ) , self . qteWidget ) , None ) msgObj . setSignalName ( 'qtesigMacroStart' ) self . qteMain . qtesigMacroStart . emit ( msgObj ) # Try to run the macro and radio the success via the # ``qtesigMacroFinished`` signal. try : self . qteRun ( ) self . qteMain . qtesigMacroFinished . emit ( msgObj ) except Exception as err : if self . qteApplet is None : appID = appSig = None else : appID = self . qteApplet . qteAppletID ( ) appSig = self . qteApplet . qteAppletSignature ( ) msg = ( 'Macro <b>{}</b> (called from the <b>{}</b> applet' ' with ID <b>{}</b>) did not execute properly.' ) msg = msg . format ( self . qteMacroName ( ) , appSig , appID ) if isinstance ( err , QtmacsArgumentError ) : msg += '<br/>' + str ( err ) # Irrespective of the error, log it, enable macro # processing (in case it got disabled), and trigger the # error signal. self . qteMain . qteEnableMacroProcessing ( ) self . qteMain . qtesigMacroError . emit ( msgObj ) self . qteLogger . exception ( msg , exc_info = True , stack_info = True )
This method is called by Qtmacs to prepare the macro for execution .
372
15
7,592
def all_valid ( formsets ) : valid = True for formset in formsets : if not formset . is_valid ( ) : valid = False return valid
Returns true if every formset in formsets is valid .
35
12
7,593
def forms_valid ( self , inlines ) : for formset in inlines : formset . save ( ) return HttpResponseRedirect ( self . get_success_url ( ) )
If the form and formsets are valid save the associated models .
41
13
7,594
def post ( self , request , * args , * * kwargs ) : self . object = self . get_object ( ) self . get_context_data ( ) inlines = self . construct_inlines ( ) if all_valid ( inlines ) : return self . forms_valid ( inlines ) return self . forms_invalid ( inlines )
Handles POST requests instantiating a form and formset instances with the passed POST variables and then checked for validity .
78
23
7,595
def get_success_url ( self ) : if self . success_url : # Forcing possible reverse_lazy evaluation url = force_text ( self . success_url ) else : raise ImproperlyConfigured ( "No URL to redirect to. Provide a success_url." ) return url
Returns the supplied success URL .
63
6
7,596
def displayStatusMessage ( self , msgObj ) : # Ensure the message ends with a newline character. msg = msgObj . data if not msg . endswith ( '\n' ) : msg = msg + '\n' # Display the message in the status field. self . qteLabel . setText ( msg )
Display the last status message and partially completed key sequences .
70
11
7,597
def qteUpdateLogSlot ( self ) : # Fetch all log records that have arrived since the last # fetch() call and update the record counter. log = self . logHandler . fetch ( start = self . qteLogCnt ) self . qteLogCnt += len ( log ) # Return immediately if no log message is available (this case # should be impossible). if not len ( log ) : return # Remove all duplicate entries and count their repetitions. log_pruned = [ ] last_entry = log [ 0 ] num_rep = - 1 for cur_entry in log : # If the previous log message is identical to the current # one increase its repetition counter. If the two log # messages differ, add the last message to the output log # and reset the repetition counter. if last_entry . msg == cur_entry . msg : num_rep += 1 else : log_pruned . append ( [ last_entry , num_rep ] ) num_rep = 0 last_entry = cur_entry # The very last entry must be added by hand. log_pruned . append ( [ cur_entry , num_rep ] ) # Format the log entries (eg. color coding etc.) log_formatted = "" for cur_entry in log_pruned : log_formatted += self . qteFormatMessage ( cur_entry [ 0 ] , cur_entry [ 1 ] ) log_formatted + '\n' # Insert the formatted text all at once as calls to insertHtml # are expensive. self . qteText . insertHtml ( log_formatted ) self . qteMoveToEndOfBuffer ( ) # If the log contained an error (or something else of interest # to the user) then switch to the messages buffer (ie. switch # to this very applet). if self . qteAutoActivate : self . qteAutoActivate = False self . qteMain . qteMakeAppletActive ( self )
Fetch and display the next batch of log messages .
420
11
7,598
def qteMoveToEndOfBuffer ( self ) : tc = self . qteText . textCursor ( ) tc . movePosition ( QtGui . QTextCursor . End ) self . qteText . setTextCursor ( tc )
Move cursor to the end of the buffer to facilitate auto scrolling .
54
13
7,599
def sign_token_records ( profile_components , parent_private_key , signing_algorithm = "ES256K" ) : if signing_algorithm != "ES256K" : raise ValueError ( "Signing algorithm not supported" ) token_records = [ ] for profile_component in profile_components : private_key = ECPrivateKey ( parent_private_key ) public_key = private_key . public_key ( ) subject = { "publicKey" : public_key . to_hex ( ) } token = sign_token ( profile_component , private_key . to_hex ( ) , subject , signing_algorithm = signing_algorithm ) token_record = wrap_token ( token ) token_record [ "parentPublicKey" ] = public_key . to_hex ( ) token_records . append ( token_record ) return token_records
Function for iterating through a list of profile components and signing separate individual profile tokens .
197
17