idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
16,200
def print_callback ( msg ) : json . dump ( msg , stdout ) stdout . write ( '\n' ) stdout . flush ( )
Print callback prints message to stdout as JSON in one line .
33
13
16,201
def prog_callback ( prog , msg ) : pipe = Popen ( prog , stdin = PIPE ) data = json . dumps ( msg ) pipe . stdin . write ( data . encode ( 'utf-8' ) ) pipe . stdin . close ( )
Program callback calls prog with message in stdin
58
9
16,202
def git_tag2eups_tag ( git_tag ) : eups_tag = git_tag # eups tags should not start with a numeric value -- prefix `v` if # it does if re . match ( r'\d' , eups_tag ) : eups_tag = "v{eups_tag}" . format ( eups_tag = eups_tag ) # convert '.'s and '-'s to '_'s eups_tag = eups_tag . translate ( str . maketrans ( '.-' , '__' ) ) return eups_tag
Convert git tag to an acceptable eups tag format
133
11
16,203
def sqrt ( n ) : if isinstance ( n , Rational ) : n = Constructible ( n ) elif not isinstance ( n , Constructible ) : raise ValueError ( 'the square root is not implemented for the type %s' % type ( n ) ) r = n . _try_sqrt ( ) # pylint: disable=protected-access if r is not None : return r return Constructible ( Constructible . lift_rational_field ( 0 , n . field ) , Constructible . lift_rational_field ( 1 , n . field ) , ( n , n . field ) )
return the square root of n in an exact representation
131
10
16,204
def _try_sqrt ( self ) : if not self . field : assert self . b == 0 root , remainder = fsqrt ( self . a ) if remainder == 1 : return Constructible ( root ) else : return None if self . _sign ( ) < 0 : raise ValueError ( 'math domain error %s' % self ) nn = self . a * self . a - self . b * self . b * self . r if nn . _sign ( ) < 0 : return None n = nn . _try_sqrt ( ) if n is None : return None a = ( ( self . a + n ) * Fraction ( 1 , 2 ) ) . _try_sqrt ( ) if a is not None : result = Constructible ( a , self . b / a * Fraction ( 1 , 2 ) , self . field ) assert result . field == self . field return result b = ( ( self . a + n ) / self . r * Fraction ( 1 , 2 ) ) . _try_sqrt ( ) if b is not None : result = Constructible ( self . b / b * Fraction ( 1 , 2 ) , b , self . field ) assert result . field == self . field return result return None
try to compute the square root in the field itself .
269
11
16,205
def print_packet_count ( ) : for name in archive . list_packet_names ( ) : packet_count = 0 for group in archive . list_packet_histogram ( name ) : for rec in group . records : packet_count += rec . count print ( ' {: <40} {: >20}' . format ( name , packet_count ) )
Print the number of packets grouped by packet name .
81
10
16,206
def print_pp_groups ( ) : for group in archive . list_processed_parameter_groups ( ) : frame_count = 0 for pp_group in archive . list_processed_parameter_group_histogram ( group ) : for rec in pp_group . records : frame_count += rec . count print ( ' {: <40} {: >20}' . format ( group , frame_count ) )
Print the number of processed parameter frames by group name .
92
11
16,207
def print_event_count ( ) : for source in archive . list_event_sources ( ) : event_count = 0 for group in archive . list_event_histogram ( source ) : for rec in group . records : event_count += rec . count print ( ' {: <40} {: >20}' . format ( source , event_count ) )
Print the number of events grouped by source .
79
9
16,208
def print_command_count ( ) : mdb = client . get_mdb ( instance = 'simulator' ) for command in mdb . list_commands ( ) : total = 0 for group in archive . list_command_histogram ( command . qualified_name ) : for rec in group . records : total += rec . count print ( ' {: <40} {: >20}' . format ( command , total ) )
Print the number of commands grouped by name .
93
9
16,209
def cmp_dict ( d1 , d2 , ignore_keys = [ ] ) : # https://stackoverflow.com/questions/10480806/compare-dictionaries-ignoring-specific-keys return { k : v for k , v in d1 . items ( ) if k not in ignore_keys } == { k : v for k , v in d2 . items ( ) if k not in ignore_keys }
Compare dicts ignoring select keys
97
6
16,210
def cross_reference_products ( eups_products , manifest_products , ignore_manifest_versions = False , fail_fast = False , ) : products = { } problems = [ ] for name , eups_data in eups_products . items ( ) : try : manifest_data = manifest_products [ name ] except KeyError : yikes = RuntimeError ( textwrap . dedent ( """\ failed to find record in manifest for: {product} {eups_version}\ """ ) . format ( product = name , eups_version = eups_data [ 'eups_version' ] , ) ) if fail_fast : raise yikes from None problems . append ( yikes ) error ( yikes ) if ignore_manifest_versions : # ignore the manifest eups_version string by simply setting it to # the eups tag value. This ensures that the eups tag value will be # passed though. manifest_data = manifest_data . copy ( ) manifest_data [ 'eups_version' ] = eups_data [ 'eups_version' ] if eups_data [ 'eups_version' ] != manifest_data [ 'eups_version' ] : yikes = RuntimeError ( textwrap . dedent ( """\ eups version string mismatch: eups tag: {product} {eups_eups_version} manifest: {product} {manifest_eups_version}\ """ ) . format ( product = name , eups_eups_version = eups_data [ 'eups_version' ] , manifest_eups_version = manifest_data [ 'eups_version' ] , ) ) if fail_fast : raise yikes problems . append ( yikes ) error ( yikes ) products [ name ] = eups_data . copy ( ) products [ name ] . update ( manifest_data ) if problems : error ( "{n} product(s) have error(s)" . format ( n = len ( problems ) ) ) return products , problems
Cross reference EupsTag and Manifest data and return a merged result
437
13
16,211
def check_existing_git_tag ( repo , t_tag , * * kwargs ) : assert isinstance ( repo , github . Repository . Repository ) , type ( repo ) assert isinstance ( t_tag , codekit . pygithub . TargetTag ) , type ( t_tag ) debug ( "looking for existing tag: {tag} in repo: {repo}" . format ( repo = repo . full_name , tag = t_tag . name , ) ) # find ref/tag by name e_ref = pygithub . find_tag_by_name ( repo , t_tag . name ) if not e_ref : debug ( " not found: {tag}" . format ( tag = t_tag . name ) ) return False # find tag object pointed to by the ref try : e_tag = repo . get_git_tag ( e_ref . object . sha ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = "error getting tag: {tag} [{sha}]" . format ( tag = e_tag . tag , sha = e_tag . sha , ) raise pygithub . CaughtRepositoryError ( repo , e , msg ) from None debug ( " found existing: {tag} [{sha}]" . format ( tag = e_tag . tag , sha = e_tag . sha , ) ) if cmp_existing_git_tag ( t_tag , e_tag , * * kwargs ) : return True yikes = GitTagExistsError ( textwrap . dedent ( """\ tag: {tag} already exists in repo: {repo} with conflicting values: existing: sha: {e_sha} message: {e_message} tagger: {e_tagger} target: sha: {t_sha} message: {t_message} tagger: {t_tagger}\ """ ) . format ( tag = t_tag . name , repo = repo . full_name , e_sha = e_tag . object . sha , e_message = e_tag . message , e_tagger = e_tag . tagger , t_sha = t_tag . sha , t_message = t_tag . message , t_tagger = t_tag . tagger , ) ) raise yikes
Check for a pre - existng tag in the github repo .
515
13
16,212
def can_proceed ( self ) : now = datetime . datetime . now ( ) delta = datetime . timedelta ( days = self . update_interval ) return now >= self . last_update + delta
Checks whether app can proceed
47
6
16,213
def parse_lock ( self ) : try : with open ( self . lock_file , "r" ) as reader : data = json . loads ( reader . read ( ) ) self . last_update = datetime . datetime . strptime ( data [ "last_update" ] , AppCronLock . DATETIME_FORMAT ) except : # malformed lock file self . write_lock ( last_update = datetime . datetime . fromtimestamp ( 0 ) ) self . parse_lock ( )
Parses app lock file
113
6
16,214
def write_lock ( self , last_update = datetime . datetime . now ( ) ) : data = { "last_update" : last_update . strftime ( AppCronLock . DATETIME_FORMAT ) } with open ( self . lock_file , "w" ) as writer : json . dump ( data , writer )
Writes lock file
76
4
16,215
def filter_seq ( seq ) : if seq . res : return None n = nt . Factors ( seq . factors ) guide , s , t = aq . canonical_form ( n ) seq . guide = guide # The target_tau for the composite is at most the class minus extant prime factor count cls = aq . get_class ( guide = guide ) num_larges = seq . factors . count ( 'P' ) upper_bound_tau = cls - num_larges - len ( t ) if cls < 2 or upper_bound_tau < 2 : # Cheap tests to eliminate almost all sequences return None # Next we ignore sequences whose guide is solely a power of 2 greater than 3 v = nt . Factors ( { p : a for p , a in guide . items ( ) if p != 2 and a > 0 } ) if int ( v ) == 1 and cls > 3 : return None # This condition greatly reduces fdb load, but excludes a lot of sequences if not aq . is_driver ( guide = guide ) : return None return n , guide
Examines unreserved sequences to see if they are prone to mutation . This currently ignores solely - power - of - 2 guides with b > 3
235
29
16,216
def get_token ( self , appname , username , password ) : ext_exception = TouchWorksException ( TouchWorksErrorMessages . GET_TOKEN_FAILED_ERROR ) data = { 'Username' : username , 'Password' : password } resp = self . _http_request ( TouchWorksEndPoints . GET_TOKEN , data ) try : logger . debug ( 'token : %s' % resp ) if not resp . text : raise ext_exception try : uuid . UUID ( resp . text , version = 4 ) return SecurityToken ( resp . text ) except ValueError : logger . error ( 'response was not valid uuid string. %s' % resp . text ) raise ext_exception except Exception as ex : logger . exception ( ex ) raise ext_exception
get the security token by connecting to TouchWorks API
174
10
16,217
def _http_request ( self , api , data , headers = None ) : if not headers : headers = { 'Content-Type' : 'application/json' } if not self . _token_valid : self . _token = self . get_token ( self . _app_name , self . _username , self . _password ) response = requests . post ( self . _base_url + '/' + api , data = json . dumps ( data ) , headers = headers ) # raise an exception if the status was not 200 logger . debug ( json . dumps ( data ) ) logger . debug ( response . text ) response . raise_for_status ( ) return response
internal method for handling request and response and raising an exception is http return status code is not success
144
19
16,218
def query_item ( self , key , abis ) : try : key = int ( key ) field = 'number' except ValueError : try : key = int ( key , 16 ) field = 'number' except ValueError : field = 'name' arg = and_ ( getattr ( Item , field ) == key , or_ ( Item . abi == abi for abi in abis ) ) return self . session . query ( Item ) . filter ( arg ) . all ( )
Query items based on system call number or name .
105
10
16,219
def query_decl ( self , * * kwargs ) : return self . session . query ( Decl ) . filter_by ( * * kwargs ) . all ( )
Query declarations .
38
3
16,220
def add_data ( self , filenames ) : def _parse_table ( table ) : def _parse_line ( line ) : return line . split ( '\t' ) lines = ( _parse_line ( one ) for one in table . splitlines ( ) if re . match ( r'^\d' , one ) ) return ( remove_false ( one ) for one in lines ) def _parse_decl ( decl ) : index = len ( 'SYSCALL_DEFINE' ) argc = decl [ index ] rest = decl [ index + 1 : ] [ 1 : - 1 ] . split ( ',' ) name = rest [ 0 ] # args = [one.strip() for one in rest[1:]] args = ',' . join ( rest [ 1 : ] ) return name , argc , args def _parse_line ( line ) : index = line . find ( ':' ) if index == - 1 : raise RuntimeError ( 'This is unexpected: %s' , line ) filename = line [ : index ] decl = line [ index + 1 : ] return filename , _parse_decl ( decl ) def _split_into_lines ( grep_output ) : lines = grep_output . replace ( '\n\n' , '\n' ) lines = lines . replace ( '\n\t' , '' ) . replace ( '\t' , ' ' ) return lines . strip ( ) . splitlines ( ) for one in filenames : if one . name . endswith ( '.tbl' ) : for item in _parse_table ( one . read ( ) ) : args = list ( item ) if len ( args ) != 5 : args += [ '' ] * ( 5 - len ( args ) ) self . session . add ( Item ( name = args [ 2 ] , abi = args [ 1 ] , number = args [ 0 ] , entry = args [ 3 ] , compat = args [ 4 ] ) ) else : for line in _split_into_lines ( one . read ( ) ) : filename , rest = ( _parse_line ( line ) ) self . session . add ( Decl ( name = rest [ 0 ] , filename = filename , argc = rest [ 1 ] , args = rest [ 2 ] ) ) self . session . commit ( )
Add data .
498
3
16,221
def start ( self , on_exit_callback = None ) : # TODO: Support params for services by mapping {servicename: {class, # params}}? for service in self . services . keys ( ) : self . services [ service ] = self . services [ service ] ( ) self . server . start ( on_exit_callback )
Start the Engel application by initializing all registered services and starting an Autobahn IOLoop .
74
19
16,222
def register ( self , event , callback , selector = None ) : self . processor . register ( event , callback , selector )
Resister an event that you want to monitor .
26
10
16,223
def unregister ( self , event , callback , selector = None ) : self . processor . unregister ( event , callback , selector )
Unregisters an event that was being monitored .
28
10
16,224
def from_api ( cls , api ) : ux = TodoUX ( api ) from . pseudorpc import PseudoRpc rpc = PseudoRpc ( api ) return cls ( { ViaAPI : api , ViaUX : ux , ViaRPC : rpc } )
create an application description for the todo app that based on the api can use either tha api or the ux for interaction
64
25
16,225
def consequence_level ( self ) : if self . _proto . HasField ( 'consequenceLevel' ) : return mdb_pb2 . SignificanceInfo . SignificanceLevelType . Name ( self . _proto . consequenceLevel ) return None
One of NONE WATCH WARNING DISTRESS CRITICAL or SEVERE .
56
16
16,226
def get_documents_count ( self ) : db_collections = [ self . database [ c ] for c in self . get_collection_names ( ) ] # list of all collections in database return sum ( [ c . count ( ) for c in db_collections ] )
Counts documents in database
61
5
16,227
def get_documents_in_collection ( self , collection_name , with_id = True ) : documents_iterator = self . database [ collection_name ] . find ( ) # anything documents = [ d for d in documents_iterator ] # list of all documents in collection in database if not with_id : for doc in documents : doc . pop ( "_id" ) # remove id key return documents
Gets all documents in collection
86
6
16,228
def get_documents_in_database ( self , with_id = True ) : documents = [ ] for coll in self . get_collection_names ( ) : documents += self . get_documents_in_collection ( coll , with_id = with_id ) return documents
Gets all documents in database
61
6
16,229
def check_tags ( repos , tags , ignore_existing = False , fail_fast = False ) : debug ( "looking for {n} tag(s):" . format ( n = len ( tags ) ) ) [ debug ( " {t}" . format ( t = t ) ) for t in tags ] debug ( "in {n} repo(s):" . format ( n = len ( repos ) ) ) [ debug ( " {r}" . format ( r = r . full_name ) ) for r in repos ] # present/missing tags by repo name present_tags = { } absent_tags = { } problems = [ ] for r in repos : has_tags = find_tags_in_repo ( r , tags ) if has_tags : if not ignore_existing : yikes = GitTagExistsError ( "tag(s) {tag} already exists in repos {r}" . format ( tag = list ( has_tags . keys ( ) ) , r = r . full_name ) ) if fail_fast : raise yikes problems . append ( yikes ) error ( yikes ) present_tags [ r . full_name ] = { 'repo' : r , 'tags' : list ( has_tags . values ( ) ) , } missing_tags = [ x for x in tags if x not in has_tags ] if missing_tags : absent_tags [ r . full_name ] = { 'repo' : r , 'need_tags' : missing_tags , } debug ( textwrap . dedent ( """\ found: {n_with:>4} repos with tag(s) {n_none:>4} repos with no tag(s) {errors:>4} repos with error(s)\ """ ) . format ( n_with = len ( present_tags ) , n_none = len ( absent_tags ) , errors = len ( problems ) , ) ) return present_tags , absent_tags , problems
check if tags already exist in repos
434
8
16,230
def delete_refs ( repo , refs , dry_run = False ) : assert isinstance ( repo , github . Repository . Repository ) , type ( repo ) debug ( "removing {n} refs from {repo}" . format ( n = len ( refs ) , repo = repo . full_name ) ) for r in refs : debug ( " deleting {ref}" . format ( ref = r . ref ) ) if dry_run : debug ( ' (noop)' ) continue r . delete ( )
Note that only the ref to a tag can be explicitly removed . The tag object will leave on until it s gargabe collected .
114
26
16,231
def get_by ( self , name ) : item = self . app . get_by ( name ) return TodoListUX ( ux = self , controlled_list = item )
get a todo list ux by name
39
9
16,232
def create_item ( self , name ) : item = self . app . create_item ( name ) return TodoListUX ( ux = self , controlled_list = item )
create a new named todo list
39
7
16,233
def get_by ( self , name ) : item = self . controlled_list . get_by ( name ) if item : return TodoElementUX ( parent = self , controlled_element = item )
find a todo list element by name
43
8
16,234
def create_item ( self , name ) : elem = self . controlled_list . create_item ( name ) if elem : return TodoElementUX ( parent = self , controlled_element = elem )
create a new todo list item
46
7
16,235
def chose_blacklist ( self , ip ) : blacklist = 'ellis_blacklist{0}' try : address = ipaddress . ip_address ( ip ) except ipaddress . AddressValueError : raise else : if address . version is 6 : # We don't ban private IPv6: if address . is_private : msg = "We don't ban private addresses ({0} given)." . format ( address ) raise ipaddress . AddressValueError ( msg ) else : # Do we have an embedded IPv4 ? if address . ipv4_mapped is not None : address = address . ipv4_mapped elif address . sixtofour is not None : address = address . sixtofour blacklist = blacklist . format ( address . version ) return ( address , blacklist )
Given an IP address figure out the set we have to use .
169
13
16,236
def under_attack ( col , queens ) : left = right = col for _ , column in reversed ( queens ) : left , right = left - 1 , right + 1 if column in ( left , col , right ) : return True return False
Checks if queen is under attack
51
7
16,237
def _get_bundles_by_type ( self , type ) : bundles = { } bundle_definitions = self . config . get ( type ) if bundle_definitions is None : return bundles # bundle name: common for bundle_name , paths in bundle_definitions . items ( ) : bundle_files = [ ] # path: static/js/vendor/*.js for path in paths : # pattern: /tmp/static/js/vendor/*.js pattern = abspath = os . path . join ( self . basedir , path ) # assetdir: /tmp/static/js/vendor # assetdir contents: # - /tmp/static/js/vendor/t1.js # - /tmp/static/js/vendor/t2.js # - /tmp/static/js/vendor/index.html assetdir = os . path . dirname ( abspath ) # expanded_fnames after filtering using the pattern: # - /tmp/static/js/vendor/t1.js # - /tmp/static/js/vendor/t2.js fnames = [ os . path . join ( assetdir , fname ) for fname in os . listdir ( assetdir ) ] expanded_fnames = fnmatch . filter ( fnames , pattern ) bundle_files . extend ( sorted ( expanded_fnames ) ) bundles [ bundle_name ] = bundle_files return bundles
Get a dictionary of bundles for requested type .
312
9
16,238
def blastnprep ( self ) : # Populate threads for each gene, genome combination for sample in self . metadata : if sample . general . bestassemblyfile != 'NA' : # # sample[self.analysistype].alleleresults = GenObject() sample [ self . analysistype ] . closealleles = dict ( ) sample [ self . analysistype ] . mismatches = dict ( ) sample [ self . analysistype ] . alignmentlength = dict ( ) sample [ self . analysistype ] . subjectlength = dict ( ) sample [ self . analysistype ] . queryid = dict ( ) sample [ self . analysistype ] . start = dict ( ) sample [ self . analysistype ] . end = dict ( ) sample [ self . analysistype ] . queryseq = dict ( ) if type ( sample [ self . analysistype ] . allelenames ) == list : for allele in sample [ self . analysistype ] . combinedalleles : # Add each fasta/allele file combination to the threads self . runblast ( sample . general . bestassemblyfile , allele , sample )
Setup blastn analyses
240
4
16,239
def strainer ( self ) : # Initialise a variable to store whether the analyses need to be performed analyse = list ( ) for sample in self . runmetadata . samples : if sample . general . bestassemblyfile != 'NA' : try : # Try to open the final report from the analyses. If it exists, then the analyses don't need to be # performed again. if os . path . isfile ( '{}{}_{}.csv' . format ( sample [ self . analysistype ] . reportdir , sample . name , self . analysistype ) ) : if self . analysistype == 'rmlst' : # Run the allele updater method updatecall , allelefolder = getrmlsthelper ( self . referencefilepath , self . updatedatabases , self . start ) else : # referencefilepath, start, organism, update allelefolder = getmlsthelper ( self . referencefilepath , self . start , sample . general . referencegenus , self . updatedatabases ) # Alleles have a .tfa extension self . alleles = glob ( '{}/*.tfa' . format ( allelefolder ) ) sample [ self . analysistype ] . alleles = self . alleles sample [ self . analysistype ] . allelenames = [ os . path . split ( x ) [ 1 ] . split ( '.' ) [ 0 ] for x in self . alleles ] # The analyses have already been successfully completed analyse . append ( False ) # Otherwise run the analyses else : self . populator ( sample ) analyse . append ( True ) # If the attribute doesn't exist, then the analyses haven't been performed yet. except ( KeyError , AttributeError ) : self . populator ( sample ) analyse . append ( True ) else : self . populator ( sample ) analyse . append ( False ) # Only run the analyses if they have not completed successfully before # if any(analyse): # Run the MLST analyses MLST ( self )
Determine whether it is required to run the MLST analyses
424
13
16,240
def get_seconds ( self ) : parsed = self . parse_hh_mm_ss ( ) # get times total_seconds = parsed . second total_seconds += parsed . minute * 60.0 total_seconds += parsed . hour * 60.0 * 60.0 return total_seconds
Gets seconds from raw time
61
6
16,241
def get_email_content ( file_path ) : with open ( file_path , "r" ) as in_file : text = str ( in_file . read ( ) ) return text . replace ( "\n\n" , "<br>" )
Email content in file
55
4
16,242
def set ( self , * * kwargs ) : self . player_lock . acquire ( ) if 'acqtime' in kwargs : self . player . set_aidur ( kwargs [ 'acqtime' ] ) if 'aifs' in kwargs : self . player . set_aifs ( kwargs [ 'aifs' ] ) self . aifs = kwargs [ 'aifs' ] if 'aifs' in kwargs or 'acqtime' in kwargs : t = kwargs . get ( 'acqtime' , self . player . get_aidur ( ) ) npoints = t * float ( kwargs . get ( 'aifs' , self . player . get_aifs ( ) ) ) self . aitimes = np . linspace ( 0 , t , npoints ) if 'trigger' in kwargs : self . player . set_trigger ( kwargs [ 'trigger' ] ) self . player_lock . release ( ) if 'aochan' in kwargs : self . aochan = kwargs [ 'aochan' ] if 'aichan' in kwargs : self . aichan = kwargs [ 'aichan' ] if 'binsz' in kwargs : self . binsz = kwargs [ 'binsz' ] if 'save' in kwargs : self . save_data = kwargs [ 'save' ] if 'caldb' in kwargs : self . caldb = kwargs [ 'caldb' ] if 'calv' in kwargs : self . calv = kwargs [ 'calv' ] if 'calf' in kwargs : self . calf = kwargs [ 'calf' ] if 'caldb' in kwargs or 'calv' in kwargs : self . update_reference_voltage ( ) if 'datafile' in kwargs : self . datafile = kwargs [ 'datafile' ] if 'reprate' in kwargs : self . reprate = kwargs [ 'reprate' ] if 'save' in kwargs : self . save_data = kwargs [ 'save' ] if 'average' in kwargs : self . average = kwargs [ 'average' ] if 'reject' in kwargs : self . reject = kwargs [ 'reject' ] if 'rejectrate' in kwargs : self . rejectrate = kwargs [ 'rejectrate' ]
Sets an internal setting for acquistion using keywords .
580
12
16,243
def interval_wait ( self ) : # calculate time since last interation and wait to acheive desired interval now = time . time ( ) elapsed = ( now - self . last_tick ) * 1000 # print("interval %d, time from start %d \n" % (elapsed, (now - self.start_time)*1000)) if elapsed < self . interval : # print('sleep ', (self.interval-elapsed)) # self.signals.warning.emit('') # clear previous warning time . sleep ( ( self . interval - elapsed ) / 1000 ) now = time . time ( ) elif elapsed > self . interval : pass # self.signals.warning.emit("WARNING: PROVIDED INTERVAL EXCEEDED, ELAPSED TIME %d" % (elapsed)) self . last_tick = now
Pauses the correct amount of time according to this acquisition object s interval setting and the last time this function was called
185
23
16,244
def putnotify ( self , name , * args ) : # self.signals[name][0].send(*args) self . queues [ name ] [ 0 ] . put ( * args ) self . queues [ name ] [ 1 ] . set ( )
Puts data into queue and alerts listeners
55
8
16,245
def loadassignment ( self ) : printtime ( 'Finding taxonomic assignments' , self . start ) # Create and start threads for i in range ( self . cpus ) : # Send the threads to the appropriate destination function threads = Thread ( target = self . assignmentload , args = ( ) ) # Set the daemon to true - something to do with thread management threads . setDaemon ( True ) # Start the threading threads . start ( ) for sample in self . runmetadata . samples : self . loadqueue . put ( sample ) self . loadqueue . join ( ) # Filter the .fastq files self . readlist ( )
Load the taxonomic assignment for each read
134
8
16,246
def readlist ( self ) : printtime ( 'Sorting reads' , self . start ) # Create and start threads for i in range ( self . cpus ) : # Send the threads to the appropriate destination function threads = Thread ( target = self . listread , args = ( ) ) # Set the daemon to true - something to do with thread management threads . setDaemon ( True ) # Start the threading threads . start ( ) for sample in self . runmetadata . samples : self . listqueue . put ( sample ) self . listqueue . join ( ) # Create self . fastqfilter ( )
Sort the reads and create lists to be used in creating sorted . fastq files
128
16
16,247
def fastqfilter ( self ) : printtime ( 'Creating filtered .fastqfiles' , self . start ) # Create and start threads for i in range ( self . cpus ) : # Send the threads to the appropriate destination function threads = Thread ( target = self . filterfastq , args = ( ) ) # Set the daemon to true - something to do with thread management threads . setDaemon ( True ) # Start the threading threads . start ( ) for sample in self . runmetadata . samples : self . filterqueue . put ( sample ) self . filterqueue . join ( ) # Print the metadata to file metadataprinter . MetadataPrinter ( self )
Filter the reads into separate files based on taxonomic assignment
144
11
16,248
def remove_escapes ( self ) : chars = [ ] i = 0 while i < len ( self . string ) : char = self . string [ i ] if char == "\\" : i += 1 else : chars . append ( char ) i += 1 return "" . join ( chars )
Removes everything except number and letters from string
62
9
16,249
def convert_accents ( self ) : nkfd_form = unicodedata . normalize ( 'NFKD' , self . string ) return "" . join ( [ char for char in nkfd_form if not unicodedata . combining ( char ) ] )
Removes accents from text
59
5
16,250
def remove_all ( self , token ) : out = self . string . replace ( " " , token ) # replace tokens while out . find ( token + token ) >= 0 : # while there are tokens out = out . replace ( token + token , token ) return out
Removes all occurrences of token
57
6
16,251
def init_logging ( ) : with open ( os . path . join ( os . path . dirname ( __file__ ) , 'logging.conf' ) , 'r' ) as yf : config = yaml . load ( yf ) logging . config . dictConfig ( config )
Initialize a logger from a configuration file to use throughout the project
64
13
16,252
def _clean_page_unique_slug_required ( self , slug ) : if hasattr ( self , 'instance' ) and self . instance . id : if Content . objects . exclude ( page = self . instance ) . filter ( body = slug , type = "slug" ) . count ( ) : raise forms . ValidationError ( self . err_dict [ 'another_page_error' ] ) elif Content . objects . filter ( body = slug , type = "slug" ) . count ( ) : raise forms . ValidationError ( self . err_dict [ 'another_page_error' ] ) return slug
See if this slug exists already
137
6
16,253
def extract_stack ( start = 0 ) : try : raise ZeroDivisionError except ZeroDivisionError : trace = sys . exc_info ( ) [ 2 ] f = trace . tb_frame . f_back for i in range ( start ) : f = f . f_back stack = [ ] while f is not None : stack . append ( { "line" : f . f_lineno , "file" : f . f_code . co_filename , "method" : f . f_code . co_name } ) f = f . f_back return stack
SNAGGED FROM traceback . py Altered to return Data
126
14
16,254
def _extract_traceback ( start ) : tb = sys . exc_info ( ) [ 2 ] for i in range ( start ) : tb = tb . tb_next return _parse_traceback ( tb )
SNAGGED FROM traceback . py
52
9
16,255
def wrap ( cls , e , stack_depth = 0 ) : if e == None : return Null elif isinstance ( e , ( list , Except ) ) : return e elif is_data ( e ) : e . cause = unwraplist ( [ Except . wrap ( c ) for c in listwrap ( e . cause ) ] ) return Except ( * * e ) else : tb = getattr ( e , '__traceback__' , None ) if tb is not None : trace = _parse_traceback ( tb ) else : trace = _extract_traceback ( 0 ) cause = Except . wrap ( getattr ( e , '__cause__' , None ) ) if hasattr ( e , "message" ) and e . message : output = Except ( context = ERROR , template = text_type ( e . message ) , trace = trace , cause = cause ) else : output = Except ( context = ERROR , template = text_type ( e ) , trace = trace , cause = cause ) trace = extract_stack ( stack_depth + 2 ) # +2 = to remove the caller, and it's call to this' Except.wrap() output . trace . extend ( trace ) return output
ENSURE THE STACKTRACE AND CAUSAL CHAIN IS CAPTURED PLUS ADD FEATURES OF Except
263
24
16,256
def determine_elected_candidates_in_order ( self , candidate_votes ) : eligible_by_vote = defaultdict ( list ) for candidate_id , votes in candidate_votes . candidate_votes_iter ( ) : if candidate_id in self . candidates_elected : continue if votes < self . quota : continue eligible_by_vote [ votes ] . append ( candidate_id ) elected = [ ] for votes in reversed ( sorted ( eligible_by_vote ) ) : candidate_ids = eligible_by_vote [ votes ] # we sort here to ensure stability, so external callers can hard-coded their response candidate_ids . sort ( key = self . candidate_order_fn ) if len ( candidate_ids ) == 1 : elected . append ( candidate_ids [ 0 ] ) else : tie_breaker_round = self . find_tie_breaker ( candidate_ids ) if tie_breaker_round is not None : self . results . provision_used ( ActProvision ( "Multiple candidates elected with %d votes. Tie broken from previous totals." % ( votes ) ) ) for candidate_id in reversed ( sorted ( candidate_ids , key = tie_breaker_round . get_vote_count ) ) : elected . append ( candidate_id ) else : self . results . provision_used ( ActProvision ( "Multiple candidates elected with %d votes. Input required from Australian Electoral Officer." % ( votes ) ) ) permutations = list ( itertools . permutations ( candidate_ids ) ) permutations . sort ( ) choice = self . resolve_election_order ( permutations ) for candidate_id in permutations [ choice ] : elected . append ( candidate_id ) return elected
determine all candidates with at least a quota of votes in candidate_votes . returns results in order of decreasing vote count . Any ties are resolved within this method .
363
34
16,257
def get_initial_totals ( self ) : candidate_votes = { } # initialise to zero for every individual candidate for candidate_id in self . candidate_ids : candidate_votes [ candidate_id ] = 0 for candidate_id in self . candidate_ids : candidate_votes [ candidate_id ] = self . candidate_bundle_transactions . get_paper_count ( candidate_id ) for candidate_id in candidate_votes : candidate_votes [ candidate_id ] = int ( candidate_votes [ candidate_id ] ) return candidate_votes , 0 , 0
determine the initial total for each candidate . only call this at the start of round 1
125
19
16,258
def bundle_to_next_candidate ( self , bundle ) : ticket_state = bundle . ticket_state while True : ticket_state = TicketState ( ticket_state . preferences , ticket_state . up_to + 1 ) candidate_id = get_preference ( ticket_state ) # if the preference passes through an elected or excluded candidate, we # skip over it if candidate_id in self . candidates_elected or candidate_id in self . candidates_excluded : continue return candidate_id , ticket_state
returns the next candidate_it of the next preference expressed in the ticket for this bundle and the next ticket_state after preferences are moved along if the vote exhausts candidate_id will be None
111
40
16,259
def elect ( self , candidate_aggregates , candidate_id ) : # somewhat paranoid cross-check, but we've had this bug before.. assert ( candidate_id not in self . candidates_elected ) elected_no = len ( self . candidates_elected ) + 1 self . candidates_elected [ candidate_id ] = True transfer_value = 0 excess_votes = paper_count = None if len ( self . candidates_elected ) != self . vacancies : excess_votes = max ( candidate_aggregates . get_vote_count ( candidate_id ) - self . quota , 0 ) assert ( excess_votes >= 0 ) paper_count = self . candidate_bundle_transactions . get_paper_count ( candidate_id ) if paper_count > 0 : transfer_value = fractions . Fraction ( excess_votes , paper_count ) assert ( transfer_value >= 0 ) self . election_distributions_pending . append ( ( candidate_id , transfer_value , excess_votes ) ) self . results . candidate_elected ( CandidateElected ( candidate_id = candidate_id , order = elected_no , excess_votes = excess_votes , paper_count = paper_count , transfer_value = transfer_value ) )
Elect a candidate updating internal state to track this . Calculate the paper count to be transferred on to other candidates and if required schedule a distribution fo papers .
269
31
16,260
def find_tie_breaker ( self , candidate_ids ) : for candidate_aggregates in reversed ( self . round_candidate_aggregates ) : candidates_on_vote = defaultdict ( int ) for candidate_id in candidate_ids : votes = candidate_aggregates . get_vote_count ( candidate_id ) candidates_on_vote [ votes ] += 1 if max ( candidates_on_vote . values ( ) ) == 1 : return candidate_aggregates
finds a round in the count history in which the candidate_ids each had different vote counts if no such round exists returns None
105
26
16,261
def get_candidate_notional_votes ( self , candidate_aggregates , adjustment ) : continuing = self . get_continuing_candidates ( candidate_aggregates ) candidates_notional = { } by_votes = self . get_votes_to_candidates ( continuing , candidate_aggregates ) total = adjustment for votes , candidates in sorted ( by_votes . items ( ) , key = lambda x : x [ 0 ] ) : for candidate_id in candidates : candidates_notional [ candidate_id ] = total + votes total += votes * len ( candidates ) return candidates_notional
aggregate of vote received by each candidate and the votes received by any candidate lower in the poll
132
19
16,262
def check ( self , action , page = None , lang = None , method = None ) : if self . user . is_superuser : return True if action == 'change' : return self . has_change_permission ( page , lang , method ) if action == 'delete' : if not self . delete_page ( ) : return False return True if action == 'add' : if not self . add_page ( ) : return False return True if action == 'freeze' : perm = self . user . has_perm ( 'pages.can_freeze' ) if perm : return True return False if action == 'publish' : perm = self . user . has_perm ( 'pages.can_publish' ) if perm : return True return False return False
Return True if the current user has permission on the page .
167
12
16,263
def has_change_permission ( self , page , lang , method = None ) : # the user has always the right to look at a page content # if he doesn't try to modify it. if method != 'POST' : return True # right to change all the pages if self . change_page ( ) : return True if lang : # try the global language permission first perm = self . user . has_perm ( 'pages.can_manage_%s' % lang . replace ( '-' , '_' ) ) if perm : return True # then per object permission perm_func = getattr ( self , 'manage (%s)_page' % lang ) if perm_func ( page ) : return True # last hierarchic permissions because it's more expensive perm_func = getattr ( self , 'manage hierarchy_page' ) if perm_func ( page ) : return True else : for ancestor in page . get_ancestors ( ) : if perm_func ( ancestor ) : return True # everything else failed, no permissions return False
Return True if the current user has permission to change the page .
225
13
16,264
def _join_lines ( txt ) : txt = txt or '' # Handle NoneType input values val = '' lines = txt . split ( '\n' ) for line in lines : stripped = line . strip ( ) if len ( stripped ) == 0 : continue val += stripped + ' ' return val . strip ( )
Remove whitespace from XML input
71
6
16,265
def _parse_desc ( node ) : desc = '' if len ( node ) == 0 : return '<p>' + node . text + '</p>' for n in node : if n . tag == 'p' : desc += '<p>' + _join_lines ( n . text ) + '</p>' elif n . tag == 'ol' or n . tag == 'ul' : desc += '<ul>' for c in n : if c . tag == 'li' : desc += '<li>' + _join_lines ( c . text ) + '</li>' else : raise ParseError ( 'Expected <li> in <%s>, got <%s>' % ( n . tag , c . tag ) ) desc += '</ul>' else : raise ParseError ( 'Expected <p>, <ul>, <ol> in <%s>, got <%s>' % ( node . tag , n . tag ) ) return desc
A quick n dirty description parser
220
6
16,266
def validate_description ( xml_data ) : try : root = ET . fromstring ( '<document>' + xml_data + '</document>' ) except StdlibParseError as e : raise ParseError ( str ( e ) ) return _parse_desc ( root )
Validate the description for validity
63
6
16,267
def import_description ( text ) : xml = '' is_in_ul = False for line in text . split ( '\n' ) : # don't include whitespace line = line . strip ( ) if len ( line ) == 0 : continue # detected as a list element? line_li = _import_description_to_list_element ( line ) if line_li : # first list element if not is_in_ul : xml += '<ul>\n' is_in_ul = True xml += '<li>' + _import_description_sentence_case ( line_li ) + '</li>\n' continue # done with the list if is_in_ul : xml += '</ul>\n' is_in_ul = False # regular paragraph xml += '<p>' + _import_description_sentence_case ( line ) + '</p>\n' # no trailing paragraph if is_in_ul : xml += '</ul>\n' return xml
Convert ASCII text to AppStream markup format
222
9
16,268
def fill_form_field ( self , field_name , field_value ) : self . browser . execute_script ( "document.getElementsByName(\"" + str ( field_name ) + "\")[0].value = \"" + str ( field_value ) + "\"" )
Fills given field with given value
66
7
16,269
def fill_login_form ( self , username , username_field , user_password , user_password_field ) : self . fill_form_field ( username_field , username ) # set username self . fill_form_field ( user_password_field , user_password )
Fills form with login info
61
6
16,270
def open_scene ( f , kwargs = None ) : defaultkwargs = { 'open' : True } if kwargs is None : kwargs = { } kwargs . update ( defaultkwargs ) fp = f . get_fullpath ( ) mayafile = cmds . file ( fp , * * kwargs ) msg = "Successfully opened file %s with arguments: %s" % ( fp , kwargs ) return ActionStatus ( ActionStatus . SUCCESS , msg , returnvalue = mayafile )
Opens the given JB_File
121
8
16,271
def import_all_references ( arg , kwargs = None ) : defaultkwargs = { 'importReference' : True } if kwargs is None : kwargs = { } kwargs . update ( defaultkwargs ) imported = [ ] # list all reference files refs = cmds . file ( query = True , reference = True ) while refs : for rfile in refs : cmds . file ( rfile , * * kwargs ) imported . append ( rfile ) refs = cmds . file ( query = True , reference = True ) msg = "Successfully imported references %s with arguments: %s" % ( imported , kwargs ) return ActionStatus ( ActionStatus . SUCCESS , msg , returnvalue = imported )
Import all references in the currently open scene
166
8
16,272
def update_scenenode ( f ) : n = get_current_scene_node ( ) if not n : msg = "Could not find a scene node." return ActionStatus ( ActionStatus . FAILURE , msg ) # get dbentry for for the given jbfile tfi = f . get_obj ( ) assert tfi tf = dj . taskfiles . get ( task = tfi . task , releasetype = tfi . releasetype , version = tfi . version , descriptor = tfi . descriptor , typ = tfi . typ ) cmds . setAttr ( '%s.taskfile_id' % n , lock = False ) cmds . setAttr ( '%s.taskfile_id' % n , tf . pk ) cmds . setAttr ( '%s.taskfile_id' % n , lock = True ) msg = "Successfully updated scene node to %s" % tf . id return ActionStatus ( ActionStatus . SUCCESS , msg )
Set the id of the current scene node to the id for the given file
222
15
16,273
def call ( args , stdout = PIPE , stderr = PIPE ) : p = Popen ( args , stdout = stdout , stderr = stderr ) out , err = p . communicate ( ) try : return out . decode ( sys . stdout . encoding ) , err . decode ( sys . stdout . encoding ) except Exception : return out , err
Calls the given arguments in a seperate process and returns the contents of standard out .
84
18
16,274
def make ( self ) : eval = self . command . eval ( ) with open ( self . filename , 'w' ) as f : f . write ( eval )
Evaluate the command and write it to a file .
35
12
16,275
def set_default_args ( self , default_args ) : for name , args in default_args . items ( ) : command = self [ name ] command . default_args = default_args . get ( command . name ) or { }
Set default args for commands in collection .
52
8
16,276
def extract_traits ( self , entity ) : traits = getattr ( entity , self . _characteristic ) if traits is not None and isinstance ( traits , Hashable ) : traits = [ traits ] return Trait ( traits , getattr ( entity , self . _characteristic + '_match' , True ) )
Extract data required to classify entity .
69
8
16,277
def add ( self , entity ) : characteristic = self . extract_traits ( entity ) if not characteristic . traits : return if characteristic . is_matching : self . add_match ( entity , * characteristic . traits ) else : self . add_mismatch ( entity , * characteristic . traits )
Add entity to index .
64
5
16,278
def remove ( self , entity ) : empty_traits = set ( ) self . mismatch_unknown . discard ( entity ) for trait , entities in self . index . items ( ) : entities . discard ( entity ) if not entities : empty_traits . add ( trait ) for empty_trait in empty_traits : del self . index [ empty_trait ]
Remove entity from the MatchBox .
79
7
16,279
def get_host_certificate ( host , port = 443 ) : ip_addr = socket . gethostbyname ( host ) sock = socket . socket ( ) context = SSL . Context ( SSL . TLSv1_METHOD ) context . set_options ( SSL . OP_NO_SSLv2 ) context . load_verify_locations ( certifi . where ( ) , None ) ssl_sock = SSL . Connection ( context , sock ) ssl_sock . connect ( ( ip_addr , port ) ) ssl_sock . do_handshake ( ) return ssl_sock . get_peer_certificate ( )
Get a host s certificate .
141
6
16,280
def get_inner_keys ( dictionary ) : keys = [ ] for key in dictionary . keys ( ) : inner_keys = dictionary [ key ] . keys ( ) keys += [ key + " " + inner_key # concatenate for inner_key in inner_keys ] return keys
Gets 2nd - level dictionary keys
61
8
16,281
def get_inner_data ( dictionary ) : out = { } for key in dictionary . keys ( ) : inner_keys = dictionary [ key ] . keys ( ) for inner_key in inner_keys : new_key = key + " " + inner_key # concatenate out [ new_key ] = dictionary [ key ] [ inner_key ] return out
Gets 2nd - level data into 1st - level dictionary
78
13
16,282
def do_use ( self , args ) : self . instance = args self . prompt = self . instance + '> ' archive = self . _client . get_archive ( self . instance ) self . streams = [ s . name for s in archive . list_streams ( ) ] self . tables = [ t . name for t in archive . list_tables ( ) ]
Use another instance provided as argument .
81
7
16,283
def update_label ( self ) : current_file = str ( self . selectedFiles ( ) [ 0 ] ) if not '.' in current_file . split ( os . path . sep ) [ - 1 ] : # add hdf5 extention if none given current_file += '.hdf5' if os . path . isfile ( current_file ) : self . setLabelText ( QtGui . QFileDialog . Accept , 'Reload' ) elif os . path . isdir ( current_file ) : self . setLabelText ( QtGui . QFileDialog . Accept , 'Open' ) else : self . setLabelText ( QtGui . QFileDialog . Accept , 'Create' )
Updates the text on the accept button to reflect if the name of the data file will result in opening an existing file or creating a new one
155
29
16,284
def abs_path ( path , format_kwargs = { } , relative_to = None , keep_slash = False ) : if format_kwargs : path = path . format_map ( format_kwargs ) has_slash = path . endswith ( os . sep ) if os . path . isabs ( path ) : path = os . path . normpath ( path ) elif ':' in path : path = asset_path ( path , keep_slash = False ) else : path = os . path . expanduser ( path ) if relative_to : path = os . path . join ( relative_to , path ) path = os . path . abspath ( path ) path = os . path . normpath ( path ) if has_slash and keep_slash : path = '{path}{slash}' . format ( path = path , slash = os . sep ) return path
Get abs . path for path .
197
7
16,285
def paths_to_str ( paths , format_kwargs = { } , delimiter = os . pathsep , asset_paths = False , check_paths = False ) : if not paths : return '' if isinstance ( paths , str ) : paths = paths . split ( delimiter ) processed_paths = [ ] for path in paths : original = path path = path . format_map ( format_kwargs ) if not os . path . isabs ( path ) : if asset_paths and ':' in path : try : path = asset_path ( path ) except ValueError : path = None if path is not None and os . path . isdir ( path ) : processed_paths . append ( path ) elif check_paths : f = locals ( ) printer . warning ( 'Path does not exist: {path} (from {original})' . format_map ( f ) ) return delimiter . join ( processed_paths )
Convert paths to a single string .
208
8
16,286
def index ( ) : # Reset current index values when the page is refreshed for k , v in current_index . items ( ) : current_index [ k ] = 0 logging . info ( "Dashboard refreshed" ) # render the template (below) that will use JavaScript to read the stream return render_template ( "crystal_dashboard.html" )
Renders the dashboard when the server is initially run .
76
11
16,287
def update ( ) : assert request . method == "POST" , "POST request expected received {}" . format ( request . method ) if request . method == 'POST' : # Get figure stats selected_run = request . form [ 'selected_run' ] variable_names = utils . get_variables ( selected_run ) . items ( ) if len ( current_index ) < 1 : for _ , v_n in variable_names : current_index [ v_n ] = 0 logging . info ( "Current index: {}" . format ( current_index ) ) data = utils . get_variable_update_dicts ( current_index , variable_names , selected_run ) return jsonify ( data )
Called by XMLHTTPrequest function periodically to get new graph data .
156
16
16,288
def get_projects ( ) : assert request . method == "GET" , "GET request expected received {}" . format ( request . method ) try : if request . method == 'GET' : projects = utils . get_projects ( ) return jsonify ( projects ) except Exception as e : logging . error ( e ) return jsonify ( { "0" : "__EMPTY" } )
Send a dictionary of projects that are available on the database .
84
12
16,289
def get_runs ( ) : assert request . method == "POST" , "POST request expected received {}" . format ( request . method ) if request . method == "POST" : try : selected_project = request . form [ "selected_project" ] runs = utils . get_runs ( selected_project ) return jsonify ( runs ) except Exception as e : logging . error ( e ) return jsonify ( { "0" : "__EMPTY" } )
Send a dictionary of runs associated with the selected project .
101
11
16,290
def get_variables ( ) : assert request . method == "POST" , "POST request expected received {}" . format ( request . method ) if request . method == "POST" : try : selected_run = request . form [ "selected_run" ] variables = utils . get_variables ( selected_run ) # Reset current_index when you select a new run variable_names = variables . items ( ) global current_index current_index = { } if len ( current_index ) < 1 : for _ , v_n in variable_names : current_index [ "{}" . format ( v_n ) ] = 0 return jsonify ( variables ) except Exception as e : logging . error ( e ) return jsonify ( { "0" : "__EMPTY" } )
Send a dictionary of variables associated with the selected run .
170
11
16,291
def install_completion ( shell : arg ( choices = ( 'bash' , 'fish' ) , help = 'Shell to install completion for' ) , to : arg ( help = '~/.bashrc.d/runcommands.rc or ~/.config/fish/runcommands.fish' ) = None , overwrite : 'Overwrite if exists' = False ) : if shell == 'bash' : source = 'runcommands:completion/bash/runcommands.rc' to = to or '~/.bashrc.d' elif shell == 'fish' : source = 'runcommands:completion/fish/runcommands.fish' to = to or '~/.config/fish/runcommands.fish' source = asset_path ( source ) destination = os . path . expanduser ( to ) if os . path . isdir ( destination ) : destination = os . path . join ( destination , os . path . basename ( source ) ) printer . info ( 'Installing' , shell , 'completion script to:\n ' , destination ) if os . path . exists ( destination ) : if overwrite : printer . info ( 'Overwriting:\n {destination}' . format_map ( locals ( ) ) ) else : message = 'File exists. Overwrite?' . format_map ( locals ( ) ) overwrite = confirm ( message , abort_on_unconfirmed = True ) copy_file ( source , destination ) printer . info ( 'Installed; remember to:\n source {destination}' . format_map ( locals ( ) ) )
Install command line completion script .
340
6
16,292
def synthesize ( self , modules , use_string , x64 , native ) : # code_opts = CodeOpts( # str.lower, None if use_string else hash_func, # 'reloc_delta', '->', # True) # gen_opts = GenOpts('defs', transformed) print ( hash_func ) groups = group_by ( modules , ends_with_punctuation ) sources = self . make_source ( groups , self . database ) if sources : return stylify_files ( { 'defs.h' : sources [ 0 ] , 'init.c' : sources [ 1 ] } ) else : return ''
Transform sources .
146
3
16,293
def make_source ( self , groups , code_opts , gen_opts ) : modules = self . make_modules ( groups , code_opts ) var_decls = modules . var_decls relocs = AttrsGetter ( modules . relocs ) x86 , x64 = relocs . get_attrs ( 'x86' , 'x64' ) if code_opts . windll : structs , x86_reloc , x64_reloc = make_windll ( modules . structs ) x86 += x86_reloc x64 += x64_reloc else : structs = '' . join ( modules . structs ) c_relocs = reloc_both ( relocs . strings + x86 , x64 ) data = var_decls . strip ( ) c_header = make_c_header ( gen_opts . filename , 'NOTICE' , modules . typedefs + structs + data ) c_source = make_init ( modules . hashes + c_relocs + modules . libprocs , callable ( code_opts . hash_func ) ) return [ c_header , c_source ]
Build the final source code for all modules .
263
9
16,294
def make_modules ( self , groups , code_opts ) : modules = [ ] for raw_module , raw_funcs in groups : module = raw_module [ 0 ] . strip ( ) . strip ( string . punctuation ) funcs = [ func . strip ( ) for func in raw_funcs ] args = [ self . database . query_args ( func , raw = True ) for func in funcs ] if self . generic : args = [ arg if arg else ( 'VOID *' , [ ] ) for arg in args ] else : args = [ arg for arg in args if arg ] if not args : logging . info ( _ ( '%s not found.' ) , module ) continue logging . debug ( module ) module = ModuleSource ( module , zip ( funcs , args ) , code_opts ) modules . append ( module . c_source ( ) ) return AttrsGetter ( modules )
Build shellcoding files for the module .
198
9
16,295
def c_source ( self ) : relocs = Relocs ( '' . join ( self . c_self_relocs ( ) ) , * self . c_module_relocs ( ) ) return Source ( '' . join ( self . c_typedefs ( ) ) , '' if self . opts . no_structs else self . c_struct ( ) , '' . join ( self . c_hashes ( ) ) , '' . join ( self . c_var_decls ( ) ) , relocs , self . c_loadlib ( ) + '' . join ( self . c_getprocs ( ) ) )
Return strings .
143
3
16,296
def c_typedefs ( self ) : defs = [ ] attrs = self . opts . attrs + '\n' if self . opts . attrs else '' for name , args in self . funcs : logging . debug ( 'name: %s args: %s' , name , args ) defs . append ( 'typedef\n{}\n{}{}({});\n' . format ( args [ 0 ] , attrs , self . _c_type_name ( name ) , make_c_args ( args [ 2 ] ) ) ) return defs
Get the typedefs of the module .
131
9
16,297
def c_struct ( self ) : member = '\n' . join ( self . c_member_funcs ( True ) ) if self . opts . windll : return 'struct {{\n{}{} }} {};\n' . format ( self . _c_dll_base ( ) , member , self . name ) return 'typedef\nstruct {2} {{\n{0}\n{1}}}\n{3};\n' . format ( self . _c_dll_base ( ) , member , * self . _c_struct_names ( ) )
Get the struct of the module .
130
7
16,298
def c_hashes ( self ) : if callable ( self . opts . hash_func ) : hashes = [ '# define {}{} {}\n' . format ( self . opts . prefix , name , self . opts . hash_func ( name ) ) for name , dummy_args in self . funcs ] else : hashes = [ make_c_str ( self . opts . prefix + name , name ) for name , dummy_args in self . funcs ] if self . name != 'kernel32' : hashes = [ make_c_str ( self . opts . prefix + self . name , self . name ) ] + hashes return hashes
Get the hashes of the module including functions and DLLs .
145
13
16,299
def c_self_relocs ( self ) : relocs = [ ] if not callable ( self . opts . hash_func ) : relocs = [ reloc_ptr ( self . opts . prefix + name , self . opts . reloc_delta , 'char *' ) for name , dummy_args in self . funcs ] if self . name != 'kernel32' : relocs = [ reloc_ptr ( self . opts . prefix + self . name , self . opts . reloc_delta , 'char *' ) ] + relocs return relocs
Build relocation for strings .
136
5