idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
14,800
def start ( self ) : self . window = _curses . initscr ( ) _curses . savetty ( ) _curses . start_color ( ) _curses . use_default_colors ( ) self . window . leaveok ( 1 ) _curses . raw ( ) self . window . keypad ( 1 ) _curses . noecho ( ) _curses . cbreak ( ) _curses . nonl ( ) _curses . curs_set ( 0 ) if self . blocking : self . window . nodelay ( 0 ) else : self . window . nodelay ( 1 ) self . running = True while self . running : self . cycle ( ) if self . friendly and not self . blocking : time . sleep ( self . delay ) self . stop ( )
Window event loop
172
3
14,801
def stop ( self ) : _curses . nocbreak ( ) self . window . keypad ( 0 ) _curses . echo ( ) _curses . resetty ( ) _curses . endwin ( ) self . running = False
Restore the TTY to its original state .
52
10
14,802
def coordinate ( self , panes = [ ] , index = 0 ) : y = 0 # height for i , element in enumerate ( self . panes ) : x = 0 # width if isinstance ( element , list ) : current_height = 0 for j , pane in enumerate ( element ) : if pane . hidden : continue current_width = pane . width current_height = pane . height upper = ( ( y , x ) , ( y , x + current_width ) ) lower = ( ( y + ( current_height if current_height > 1 else 0 ) , x ) , ( y + ( current_height if current_height > 1 else 0 ) , x + current_width ) ) pane . coords = [ upper , lower ] x += current_width y += ( current_height + 1 if current_height > 1 else 1 ) else : if element . hidden : continue current_width = element . width current_height = element . height upper = ( ( y , x ) , ( y , x + current_width ) ) lower = ( ( y + ( current_height if current_height > 1 else 0 ) , x ) , ( y + ( current_height if current_height > 1 else 0 ) , x + current_width ) ) element . coords = [ upper , lower ] y += ( current_height + 1 if current_height > 1 else 1 ) if self . debug : coordinates = "Coordinates: " + str ( [ p . coords for p in self ] ) if len ( coordinates ) > self . width : coordinates = coordinates [ : self . width - 3 ] coordinates += '...' self . addstr ( self . height - 3 , 0 , coordinates )
Update pane coordinate tuples based on their height and width relative to other panes within the dimensions of the current window .
366
24
14,803
def addstr ( self , h , w , text , attrs = 0 ) : self . update_window_size ( ) if h > self . height or w > self . width : return try : # Python curses addstr doesn't deal with non-ascii characters #self.window.addstr(h, w, text.encode("ascii", "ignore"), attrs) self . window . addstr ( h , w , text , attrs ) except Exception as e : pass
A safe addstr wrapper
107
5
14,804
def update_window_size ( self ) : height , width = self . window . getmaxyx ( ) if self . height != height or self . width != width : self . height , self . width = height , width self . window . clear ( )
Update the current window object with its current height and width and clear the screen if they ve changed .
55
20
14,805
def add ( self , pane ) : if isinstance ( pane , list ) : initialised_panes = [ ] for p in pane : initialised_panes . append ( self . init_pane ( p ) ) self . panes . append ( initialised_panes ) else : pane = self . init_pane ( pane ) self . panes . append ( pane )
Adds new panes to the window
83
7
14,806
def get ( self , name , default = None , cache = False ) : if cache == True : for pane in self . cache : if pane . name == name : return pane return default for pane in self : if pane . name == name : return pane return default
Get a pane by name possibly from the cache . Return None if not found .
55
16
14,807
def process_input ( self , character ) : func = None try : func = getattr ( self , "handle_%s" % chr ( character ) , None ) except : pass if func : func ( )
A subclassable method for dealing with input characters .
46
10
14,808
def cmd ( send , msg , args ) : if args [ 'type' ] == 'privmsg' : send ( 'GCC is a group exercise!' ) return if 'include' in msg : send ( "We're not a terribly inclusive community around here." ) return if 'import' in msg : send ( "I'll have you know that standards compliance is important." ) return tmpfile = tempfile . NamedTemporaryFile ( ) for line in msg . splitlines ( ) : line = line + '\n' tmpfile . write ( line . encode ( ) ) tmpfile . flush ( ) process = subprocess . run ( [ 'gcc' , '-o' , '/dev/null' , '-xc' , tmpfile . name ] , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , timeout = 5 , universal_newlines = True ) tmpfile . close ( ) # Take the last 3 lines to prevent Excess Flood on long error messages output = process . stdout . splitlines ( ) [ : 3 ] for line in output : send ( line , target = args [ 'nick' ] ) if process . returncode == 0 : send ( gen_slogan ( "gcc victory" ) ) else : send ( gen_slogan ( "gcc failed" ) )
Compiles stuff .
287
4
14,809
def get_image_list ( self , page = 1 , per_page = 20 ) : url = self . api_url + '/api/images' params = { 'page' : page , 'per_page' : per_page } response = self . _request_url ( url , 'get' , params = params , with_access_token = True ) headers , result = self . _parse_and_check ( response ) images = ImageList . from_list ( result ) images . set_attributes_from_headers ( headers ) return images
Return a list of user s saved images
120
8
14,810
def upload_image ( self , image_file , referer_url = None , title = None , desc = None , created_at = None , collection_id = None ) : url = self . upload_url + '/api/upload' data = { } if referer_url is not None : data [ 'referer_url' ] = referer_url if title is not None : data [ 'title' ] = title if desc is not None : data [ 'desc' ] = desc if created_at is not None : data [ 'created_at' ] = str ( created_at ) if collection_id is not None : data [ 'collection_id' ] = collection_id files = { 'imagedata' : image_file } response = self . _request_url ( url , 'post' , data = data , files = files , with_access_token = True ) headers , result = self . _parse_and_check ( response ) return Image . from_dict ( result )
Upload an image
218
3
14,811
def get_oembed ( self , url ) : api_url = self . api_url + '/api/oembed' parameters = { 'url' : url } response = self . _request_url ( api_url , 'get' , params = parameters ) headers , result = self . _parse_and_check ( response ) return result
Return an oEmbed format json dictionary
74
8
14,812
def _request_url ( self , url , method , params = None , data = None , files = None , with_client_id = False , with_access_token = False ) : headers = { } # type: Dict[str, Any] if data is None : data = { } if params is None : params = { } if with_client_id and self . _client_id is not None : params [ 'client_id' ] = self . _client_id if with_access_token and self . _access_token is not None : headers [ 'Authorization' ] = "Bearer " + self . _access_token try : return requests . request ( method , url , params = params , data = data , files = files , headers = headers ) except requests . RequestException as e : raise GyazoError ( str ( e ) )
Send HTTP request
186
3
14,813
def validate_rows_length ( data , length , message = None , exception = MatrixError ) : if message is None : message = 'All rows must have the same length (same number of columns)' for row in data : if len ( row ) != length : raise exception ( message )
Validate that all rows have the same length .
60
10
14,814
def validate_square ( data , message = None , exception = MatrixError ) : rows , columns = len ( data ) , len ( data [ 0 ] ) if data else 0 if message is None : message = 'Number of rows: %s != number of columns: %s in matrix' % ( rows , columns ) if rows != columns : raise exception ( message )
Validate that the matrix has equal number of rows and columns .
77
13
14,815
def validate_categories_equal_entities ( categories , entities , message = None , exception = MatrixError ) : nb_categories = len ( categories ) nb_entities = len ( entities ) if message is None : message = 'Number of categories: %s != number of entities: %s' % ( nb_categories , nb_entities ) if categories and nb_categories != nb_entities : raise exception ( message )
Validate that the matrix has equal number of entities and categories .
101
13
14,816
def validate ( self ) : super ( ) . validate ( ) nb_entities = len ( self . entities ) if nb_entities != self . rows : raise self . error ( 'Number of entities: %s != number of rows: %s' % ( nb_entities , self . rows ) )
Base validation + entities = rows .
69
7
14,817
def transitive_closure ( self ) : data = [ [ 1 if j else 0 for j in i ] for i in self . data ] for k in range ( self . rows ) : for i in range ( self . rows ) : for j in range ( self . rows ) : if data [ i ] [ k ] and data [ k ] [ j ] : data [ i ] [ j ] = 1 return data
Compute the transitive closure of the matrix .
88
10
14,818
def validate ( self ) : super ( ) . validate ( ) nb_entities = len ( self . entities ) if nb_entities != self . rows + self . columns : raise self . error ( 'Number of entities: %s != number of rows + ' 'number of columns: %s+%s=%s' % ( nb_entities , self . rows , self . columns , self . rows + self . columns ) )
Base validation + entities = rows + columns .
97
9
14,819
def default_entities ( self ) : return [ str ( i ) for i in range ( self . rows + self . columns ) ]
Return range from 0 to rows + columns .
29
9
14,820
def validate ( self ) : super ( ) . validate ( ) message_dsm = 'Matrix at [%s:%s] is not an instance of ' 'DesignStructureMatrix or MultipleDomainMatrix.' message_ddm = 'Matrix at [%s:%s] is not an instance of ' 'DomainMappingMatrix or MultipleDomainMatrix.' messages = [ ] for i , row in enumerate ( self . data ) : for j , cell in enumerate ( row ) : if i == j : if not isinstance ( cell , ( DesignStructureMatrix , MultipleDomainMatrix ) ) : messages . append ( message_dsm % ( i , j ) ) elif not isinstance ( cell , ( DomainMappingMatrix , MultipleDomainMatrix ) ) : messages . append ( message_ddm % ( i , j ) ) if messages : raise self . error ( '\n' . join ( messages ) )
Base validation + each cell is instance of DSM or MDM .
195
13
14,821
def _calc_distortion ( self ) : m = self . _X . shape [ 0 ] self . distortion = 1 / m * sum ( linalg . norm ( self . _X [ i , : ] - self . centroids [ self . clusters [ i ] ] ) ** 2 for i in range ( m ) ) return self . distortion
Calculates the distortion value of the current clusters
76
10
14,822
def _move_centroids ( self ) : for k in range ( self . n_clusters ) : if k in self . clusters : centroid = np . mean ( self . _X [ self . clusters == k , : ] , axis = 0 ) self . centroids [ k ] = centroid else : self . n_clusters -= 1 self . centroids = self . centroids [ : self . n_clusters ] self . clusters -= 1 k -= 1
Calculate new centroids as the means of the samples in each cluster
105
16
14,823
def _closest_centroid ( self , x ) : closest_centroid = 0 distance = 10 ^ 9 for i in range ( self . n_clusters ) : current_distance = linalg . norm ( x - self . centroids [ i ] ) if current_distance < distance : closest_centroid = i distance = current_distance return closest_centroid
Returns the index of the closest centroid to the sample
82
11
14,824
def _assign_clusters ( self ) : self . clusters = np . array ( [ self . _closest_centroid ( x ) for x in self . _X ] )
Assign the samples to the closest centroids to create clusters
41
13
14,825
def fit ( self , X ) : self . _X = super ( ) . cluster ( X ) candidates = [ ] for _ in range ( self . n_runs ) : self . _init_random_centroids ( ) while True : prev_clusters = self . clusters self . _assign_clusters ( ) self . _move_centroids ( ) if np . all ( prev_clusters == self . clusters ) : break self . _calc_distortion ( ) candidates . append ( ( self . distortion , self . centroids , self . clusters ) ) candidates . sort ( key = lambda x : x [ 0 ] ) self . distortion = candidates [ 0 ] [ 0 ] self . centroids = candidates [ 0 ] [ 1 ] self . clusters = candidates [ 0 ] [ 2 ] return self
The K - Means itself
177
5
14,826
def cmd ( send , * _ ) : words = [ [ verb , noun , abbrev , noun , adj , abbrev , noun ] , [ verb , adj , abbrev , noun ] , [ verb , abbrev , noun , verb , adj , noun ] , [ verb , noun , ingverb , adj , abbrev , noun ] , [ adj , abbrev , noun , verb , adj , noun ] , [ abbrev , noun , verb , adj , noun , verb , abbrev , noun ] , [ ingverb , noun , verb , adj , abbrev , noun ] , [ verb , adj , abbrev , noun , verb , abbrev , noun ] ] msgtype = [ "If we %s the %s, we can get to the %s %s through the %s %s %s!" % tuple ( map ( choice , words [ 0 ] ) ) , "We need to %s the %s %s %s!" % tuple ( map ( choice , words [ 1 ] ) ) , "Try to %s the %s %s, maybe it will %s the %s %s!" % tuple ( map ( choice , words [ 2 ] ) ) , "You can't %s the %s without %s the %s %s %s!" % tuple ( map ( choice , words [ 3 ] ) ) , "Use the %s %s %s, then you can %s the %s %s!" % tuple ( map ( choice , words [ 4 ] ) ) , "The %s %s is down, %s the %s %s so we can %s the %s %s!" % tuple ( map ( choice , words [ 5 ] ) ) , "%s the %s won't do anything, we need to %s the %s %s %s!" % tuple ( map ( choice , words [ 6 ] ) ) , "I'll %s the %s %s %s, that should %s the %s %s!" % tuple ( map ( choice , words [ 7 ] ) ) ] send ( choice ( msgtype ) )
Causes the bot to generate some jargon .
442
9
14,827
def _GenerateStaticsTable ( self , title = 'Current Statistics' ) : if len ( self . __categories . keys ( ) ) < 1 : return '' d = self . __categories keys = sorted ( d . keys ( ) ) cats = ', ' . join ( [ '"%s"' % k for k in keys ] ) vals = ', ' . join ( [ '%d' % d [ k ] for k in keys ] ) return r''' %s %s .. csv-table:: :header: %s %s ''' % ( title , '-' * len ( title ) , cats , vals )
Generates a statics table based on set categories
138
10
14,828
def _ProduceSingleContent ( self , mod , showprivate = False , showinh = False ) : try : all = mod [ 1 ] . __all__ except AttributeError : raise RuntimeError ( 'Module (%s) MUST have `__all__` defined.' % mod [ 1 ] . __name__ ) try : name = mod [ 1 ] . __displayname__ except AttributeError : name = mod [ 0 ] try : category = mod [ 1 ] . __category__ self . __categories . setdefault ( category , 0 ) self . __categories [ category ] += 1 except AttributeError : pass feats = inspect . getmembers ( mod [ 1 ] ) fname = 'content/' + mod [ 1 ] . __name__ . replace ( '.' , '/' ) . replace ( ' ' , '-' ) + '.rst' feats = [ f for f in feats if f [ 0 ] in all and ( showprivate or not f [ 0 ] [ 0 : 1 ] == '_' ) ] with open ( fname , 'w' ) as fid : fid . write ( Classifier . GetModuleText ( name , mod [ 1 ] . __name__ , showprivate = showprivate ) ) for f in feats : # Check for a __displayname__ if inspect . isclass ( f [ 1 ] ) or inspect . isfunction ( f [ 1 ] ) : try : featname = f [ 1 ] . __displayname__ except AttributeError : featname = f [ 1 ] . __name__ try : category = f [ 1 ] . __category__ self . __categories . setdefault ( category , 0 ) self . __categories [ category ] += 1 except AttributeError : pass # Make the auto doc rst if inspect . isclass ( f [ 1 ] ) : fid . write ( Classifier . GetClassText ( featname , '%s.%s' % ( mod [ 1 ] . __name__ , f [ 1 ] . __name__ ) , showprivate = showprivate , showinh = showinh ) ) elif inspect . isfunction ( f [ 1 ] ) : fid . write ( Classifier . GetFunctionText ( featname , '%s.%s' % ( mod [ 1 ] . __name__ , f [ 1 ] . __name__ ) ) ) fid . close ( ) return '\n %s' % ( fname . split ( '/' ) [ - 1 ] )
An internal helper to create a page for a single module . This will automatically generate the needed RSF to document the module and save the module to its own page in its appropriate location .
528
37
14,829
def _ProduceContent ( self , mods , showprivate = False , showinh = False ) : result = '' nestedresult = '' # For each module for mod in mods : # Test to see if module to document has an __all__ variable try : all = mod [ 1 ] . __all__ except AttributeError : raise RuntimeError ( 'Module (%s) MUST have `__all__` defined.' % mod [ 1 ] . __name__ ) if not showprivate and mod [ 0 ] [ 0 : 1 ] == '_' : continue if mod [ 0 ] [ 0 : 2 ] == '__' : #and not showprivate continue result += self . _ProduceSingleContent ( mod , showprivate , showinh ) return result
An internal helper to create pages for several modules that do not have nested modules . This will automatically generate the needed RSF to document each module module and save the module to its own page appropriately .
159
39
14,830
def _MakePackagePages ( self , package , showprivate = False , nested = False , showinh = False ) : def checkNoNested ( mod ) : try : all = mod . __all__ except AttributeError : return False mems = inspect . getmembers ( mod , inspect . ismodule ) mems = [ m for m in mems if m [ 0 ] in mod . __all__ ] if len ( mems ) > 0 : return False return True # Get package module members mods = inspect . getmembers ( package , inspect . ismodule ) # Split into modules and sub-packages nmods , pvt , npkgs = [ ] , [ ] , [ ] for mod in mods : # Deal with private modules if checkNoNested ( mod [ 1 ] ) : if mod [ 0 ] [ 0 ] == '_' : pvt . append ( mod ) else : nmods . append ( mod ) else : npkgs . append ( mod ) if showprivate : nmods += pvt # for each member that has a nested module # recurse and keep track of index files for that package files = [ ] ignore = [ ] for pkg in npkgs : pt = '%s/%s/%s' % ( self . path , package . __name__ . replace ( '.' , '/' ) , pkg [ 1 ] . __name__ . split ( '.' ) [ - 1 ] ) if os . path . exists ( pt ) : shutil . rmtree ( pt ) os . makedirs ( pt ) ignore += inspect . getmembers ( pkg [ 1 ] ) f = self . _MakePackagePages ( pkg [ 1 ] , showprivate = showprivate , nested = True , showinh = showinh ) files . append ( f . split ( package . __name__ . replace ( '.' , '/' ) + '/' ) [ 1 ] ) if nested : try : name = package . __displayname__ except AttributeError : name = package . __name__ # Create index file here index = r''' %s %s .. toctree:: :maxdepth: 5 ''' % ( name , '*' * len ( name ) ) # include sub packages first index += '\n ' . join ( files ) # then include modules index += '\n ' + self . _ProduceContent ( nmods , showprivate = showprivate , showinh = showinh ) findex = 'content/%s/index.rst' % ( package . __name__ . replace ( '.' , '/' ) ) # Write the file with open ( findex , 'w' ) as f : if package . __doc__ : f . write ( package . __doc__ ) f . write ( index ) # return filename for index file at package level return '\n ' + findex # Not nested: return all files names = '\n %s/%s/' % ( self . path , package . __name__ . replace ( '.' , '/' ) ) nmods = [ m for m in nmods if m not in ignore ] return names . join ( self . _ProduceContent ( nmods , showprivate = showprivate , showinh = showinh ) . split ( '\n ' ) + files )
An internal helper to generate all of the pages for a given package
708
13
14,831
def _DocPackageFromTop ( self , packages , showprivate = False , showinh = False ) : appIndex = '' if not isinstance ( packages , list ) : packages = [ packages ] if os . path . exists ( 'content' ) : shutil . rmtree ( 'content' ) os . makedirs ( 'content' ) appIndex += r''' .. toctree:: :maxdepth: 5 :hidden: :caption: %s: ''' % ( 'API Index' ) # Iterate over each package and generate appropriate pages for i in range ( len ( packages ) ) : # The package to document and its path package = packages [ i ] try : name = package . __displayname__ except AttributeError : name = package . __name__ # Make sure paths are ready path = 'content/%s' % package . __name__ if os . path . exists ( path ) : shutil . rmtree ( path ) os . makedirs ( path ) # Check if there is top level documentation # if package.__doc__: # Get metadata meta = 'About %s\n%s\n' % ( name , '=' * len ( 'About ' + name ) ) author = getattr ( package , "__author__" , None ) license = getattr ( package , "__license__" , None ) copyright = getattr ( package , "__copyright__" , None ) version = getattr ( package , "__version__" , None ) if author : meta += '\n* Author: %s' % author if license : meta += '\n* License: %s' % license if copyright : meta += '\n* Copyright: %s' % copyright if version : meta += '\n* Version: %s' % version about = '%s/%s' % ( path , 'index.rst' ) this_toc = r''' .. toctree:: :maxdepth: 5 :caption: %s: ''' % ( name ) this_toc += self . _MakePackagePages ( package , showprivate = showprivate , showinh = showinh ) this_toc = this_toc . replace ( '%s/' % path , '' ) with open ( about , 'w' ) as f : f . write ( '%s\n\n' % meta ) if package . __doc__ : f . write ( package . __doc__ ) f . write ( this_toc ) appIndex += '\n %s' % about # Return the new content to append return appIndex
Generates all of the documentation for given packages and appends new tocrees to the index . All documentation pages will be under the set relative path .
557
31
14,832
def eye ( root = None , zodb_uri = None , port = 8080 ) : if root is not None : root_factory = lambda request : Node ( root ) elif zodb_uri is not None : if '://' not in zodb_uri : # treat it as a file:// zodb_uri = 'file://' + os . path . abspath ( zodb_uri ) from repoze . zodbconn . finder import PersistentApplicationFinder finder = PersistentApplicationFinder ( zodb_uri , appmaker = lambda root : Node ( root ) ) root_factory = lambda request : finder ( request . environ ) else : raise RuntimeError ( "Must specify root object or ZODB URI." ) app = Eye ( root_factory ) if 'DEBUG' in os . environ : from repoze . debug . pdbpm import PostMortemDebug app = PostMortemDebug ( app ) serve ( app , host = '127.0.0.1' , port = port )
Serves a WSGI app to browse objects based on a root object or ZODB URI .
234
20
14,833
def cmd ( send , _ , args ) : if args [ 'target' ] == 'private' : send ( "You're all alone!" ) return with args [ 'handler' ] . data_lock : channel = args [ 'handler' ] . channels [ args [ 'target' ] ] voiced = len ( [ x for x in args [ 'handler' ] . voiced [ args [ 'target' ] ] . values ( ) if x ] ) total = len ( channel . users ( ) ) send ( "%d active users, %d total users, %g%% active" % ( voiced , total , voiced / total * 100 ) )
Returns stats on the active users .
135
7
14,834
def determine_end_point ( http_request , url ) : if url . endswith ( 'aggregates' ) or url . endswith ( 'aggregates/' ) : return 'aggregates' else : return 'detail' if is_detail_url ( http_request , url ) else 'list'
returns detail list or aggregates
71
7
14,835
def run_simulation ( self ) : for _ in range ( self . num_fights ) : # restore health between each fight self . c1 . stats [ 'Health' ] = self . c1 . stats [ 'max_health' ] self . c2 . stats [ 'Health' ] = self . c2 . stats [ 'max_health' ] # run the Battles b = Battle ( self . c1 , self . c2 , self . traits , self . rules , print_console = 'No' ) #print(b) if b . status == self . c1 . name : self . num_c1 += 1 else : self . num_c2 += 1 # tag winner if self . num_c1 > self . num_c2 : self . winner = self . c1 . name else : self . winner = self . c2 . name
runs the simulation
186
3
14,836
def take_damage ( self , c , dmg ) : if c . name == self . c1 . name : self . c1 . stats [ 'Health' ] = self . c1 . stats [ 'Health' ] - dmg else : self . c2 . stats [ 'Health' ] = self . c2 . stats [ 'Health' ] - dmg
wrapper to apply damage taken to a character
76
8
14,837
def show_message ( self , c_attack , c_defend , result , dmg , print_console = 'Yes' ) : perc_health_att = '[' + str ( round ( ( c_attack . stats [ 'Health' ] * 100 ) / c_attack . stats [ 'max_health' ] ) ) + '%]' perc_health_def = '[' + str ( round ( ( c_defend . stats [ 'Health' ] * 100 ) / c_defend . stats [ 'max_health' ] ) ) + '%]' if result == 'Miss' : txt = c_attack . name + ' ' + perc_health_att . rjust ( 6 ) + ' miss ' + c_defend . name + ' ' + perc_health_def . rjust ( 6 ) elif result == 'Crit' : txt = c_attack . name + ' ' + perc_health_att . rjust ( 6 ) + ' CRIT ' + c_defend . name + ' ' + perc_health_def . rjust ( 6 ) txt += ' for ' + str ( dmg ) else : txt = c_attack . name + ' ' + perc_health_att . rjust ( 6 ) + ' hits ' + c_defend . name + ' ' + perc_health_def . rjust ( 6 ) txt += ' for ' + str ( dmg ) if print_console == 'Yes' : print ( txt )
function to wrap the display of the battle messages
332
9
14,838
def iterqueue ( queue , expected ) : while expected > 0 : for item in iter ( queue . get , EXIT ) : yield item expected -= 1
Iterate all value from the queue until the expected number of EXIT elements is received
32
17
14,839
def run ( self ) : input = self . _consume ( ) put_item = self . _que_out . put try : if input is None : # producer res = self . _callable ( * self . _args , * * self . _kwargs ) else : res = self . _callable ( input , * self . _args , * * self . _kwargs ) if res != None : for item in res : put_item ( item ) except Exception as e : # we catch an error, we send on the error que, we consume the input and we exit # consuming the input queue avoid to keep running processes before exiting with # errors self . _que_err . put ( ( self . name , e ) ) if input is not None : for i in input : pass raise finally : for i in range ( self . _num_followers ) : put_item ( EXIT ) self . _que_err . put ( EXIT )
Execute the task on all the input and send the needed number of EXIT at the end
205
19
14,840
def setup ( self , workers = 1 , qsize = 0 ) : if workers <= 0 : raise ValueError ( "workers have to be greater then zero" ) if qsize < 0 : raise ValueError ( "qsize have to be greater or equal zero" ) self . qsize = qsize # output que size self . workers = workers return self
Setup the pool parameters like number of workers and output queue size
74
12
14,841
def processes ( self ) : if self . _processes is None : self . _processes = [ ] for p in range ( self . workers ) : t = Task ( self . _target , self . _args , self . _kwargs ) t . name = "%s-%d" % ( self . target_name , p ) self . _processes . append ( t ) return self . _processes
Initialise and return the list of processes associated with this pool
89
12
14,842
def results ( self ) : tt = None for i , tf in enumerate ( self [ : - 1 ] ) : tt = self [ i + 1 ] q = Queue ( tf . qsize ) tf . set_out ( q , tt . workers ) tt . set_in ( q , tf . workers ) if tt is None : # we have only one pool tt = self [ 0 ] q = Queue ( tt . qsize ) err_q = Queue ( ) tt . set_out ( q , 1 ) for t in self : t . set_err ( err_q ) t . _start ( ) for item in iterqueue ( q , tt . workers ) : yield item errors = list ( iterqueue ( err_q , sum ( t . workers for t in self ) ) ) for t in self : t . _join ( ) if len ( errors ) > 0 : task_name , ex = errors [ 0 ] if len ( errors ) == 1 : msg = 'The task "%s" raised %s' % ( task_name , repr ( ex ) , ) else : msg = '%d tasks raised an exeption. First error reported on task "%s": %s' % ( len ( errors ) , task_name , repr ( ex ) ) raise TaskException ( msg )
Start all the tasks and return data on an iterator
288
10
14,843
def office_content ( self , election_day , office ) : from electionnight . models import PageType office_type = ContentType . objects . get_for_model ( office ) page_type = PageType . objects . get ( model_type = office_type , election_day = election_day , division_level = office . division . level , ) page_content = self . get ( content_type__pk = office_type . pk , object_id = office . pk , election_day = election_day , ) page_type_content = self . get ( content_type = ContentType . objects . get_for_model ( page_type ) , object_id = page_type . pk , election_day = election_day , ) return { "site" : self . site_content ( election_day ) [ "site" ] , "page_type" : self . serialize_content_blocks ( page_type_content ) , "page" : self . serialize_content_blocks ( page_content ) , }
Return serialized content for an office page .
230
9
14,844
def body_content ( self , election_day , body , division = None ) : from electionnight . models import PageType body_type = ContentType . objects . get_for_model ( body ) page_type = PageType . objects . get ( model_type = body_type , election_day = election_day , body = body , jurisdiction = body . jurisdiction , division_level = body . jurisdiction . division . level , ) page_type_content = self . get ( content_type = ContentType . objects . get_for_model ( page_type ) , object_id = page_type . pk , election_day = election_day , ) kwargs = { "content_type__pk" : body_type . pk , "object_id" : body . pk , "election_day" : election_day , } if division : kwargs [ "division" ] = division content = self . get ( * * kwargs ) return { "site" : self . site_content ( election_day ) [ "site" ] , "page_type" : self . serialize_content_blocks ( page_type_content ) , "page" : self . serialize_content_blocks ( content ) , "featured" : [ e . meta . ap_election_id for e in content . featured . all ( ) ] , }
Return serialized content for a body page .
299
9
14,845
def division_content ( self , election_day , division , special = False ) : from electionnight . models import PageType division_type = ContentType . objects . get_for_model ( division ) page_type = PageType . objects . get ( model_type = division_type , election_day = election_day , division_level = division . level , ) page_content = self . get ( content_type__pk = division_type . pk , object_id = division . pk , election_day = election_day , special_election = special , ) page_type_content = self . get ( content_type = ContentType . objects . get_for_model ( page_type ) , object_id = page_type . pk , election_day = election_day , ) return { "site" : self . site_content ( election_day ) [ "site" ] , "page_type" : self . serialize_content_blocks ( page_type_content ) , "page" : self . serialize_content_blocks ( page_content ) , }
Return serialized content for a division page .
238
9
14,846
def site_content ( self , election_day ) : from electionnight . models import PageType page_type = PageType . objects . get ( model_type = ContentType . objects . get ( app_label = "election" , model = "electionday" ) , election_day = election_day , ) site_content = self . get ( content_type = ContentType . objects . get_for_model ( page_type ) , object_id = page_type . pk , election_day = election_day , ) return { "site" : self . serialize_content_blocks ( site_content ) }
Site content represents content for the entire site on a given election day .
135
14
14,847
def get_interesting_members ( base_class , cls ) : base_members = dir ( base_class ) predicate = inspect . ismethod if _py2 else inspect . isfunction all_members = inspect . getmembers ( cls , predicate = predicate ) return [ member for member in all_members if not member [ 0 ] in base_members and ( ( hasattr ( member [ 1 ] , "__self__" ) and not member [ 1 ] . __self__ in inspect . getmro ( cls ) ) if _py2 else True ) and not member [ 0 ] . startswith ( "_" ) and not member [ 0 ] . startswith ( "before_" ) and not member [ 0 ] . startswith ( "after_" ) ]
Returns a list of methods that can be routed to
167
10
14,848
def initialize ( self , value = ( ) ) : if value == ( ) : try : return self . default ( ) except TypeError : return self . default else : return self . clean ( value )
\ initialize returns a cleaned value or the default raising ValueErrors as necessary .
42
16
14,849
def clean ( self , value ) : if not isinstance ( value , self . t ) : value = self . t ( value ) if not self . allow_negative and value < 0 : raise ValueError ( 'value was negative' ) if not self . allow_positive and value > 0 : raise ValueError ( 'values was positive' ) return value
clean a value converting and performing bounds checking
74
8
14,850
def r12_serial_port ( port ) : return serial . Serial ( port , baudrate = BAUD_RATE , parity = PARITY , stopbits = STOP_BITS , bytesize = BYTE_SIZE )
Create a serial connect to the arm .
49
8
14,851
def search_for_port ( port_glob , req , expected_res ) : # Check that the USB port actually exists, based on the known vendor and # product ID. if usb . core . find ( idVendor = 0x0403 , idProduct = 0x6001 ) is None : return None # Find ports matching the supplied glob. ports = glob . glob ( port_glob ) if len ( ports ) == 0 : return None for port in ports : with r12_serial_port ( port ) as ser : if not ser . isOpen ( ) : ser . open ( ) # Write a request out. if sys . version_info [ 0 ] == 2 : ser . write ( str ( req ) . encode ( 'utf-8' ) ) else : ser . write ( bytes ( req , 'utf-8' ) ) # Wait a short period to allow the connection to generate output. time . sleep ( 0.1 ) # Read output from the serial connection check if it's what we want. res = ser . read ( ser . in_waiting ) . decode ( OUTPUT_ENCODING ) if expected_res in res : return port raise ArmException ( 'ST Robotics connection found, but is not responsive.' + ' Is the arm powered on?' ) return None
Find the serial port the arm is connected to .
275
10
14,852
def connect ( self , port = None ) : if port is None : self . port = search_for_port ( '/dev/ttyUSB*' , 'ROBOFORTH\r\n' , 'ROBOFORTH' ) else : self . port = port if self . port is None : raise ArmException ( 'ST Robotics connection not found.' ) self . ser = r12_serial_port ( port ) if not self . ser . isOpen ( ) : self . ser . open ( ) if not self . ser . isOpen ( ) : raise ArmException ( 'Failed to open serial port. Exiting.' ) return self . port
Open a serial connection to the arm .
139
8
14,853
def write ( self , text ) : # Output is converted to bytes with Windows-style line endings. if sys . version_info [ 0 ] == 2 : text_bytes = str ( text . upper ( ) + '\r\n' ) . encode ( 'utf-8' ) else : text_bytes = bytes ( text . upper ( ) + '\r\n' , 'utf-8' ) self . ser . write ( text_bytes )
Write text out to the arm .
98
7
14,854
def read ( self , timeout = READ_TIMEOUT , raw = False ) : time . sleep ( READ_SLEEP_TIME ) raw_out = self . ser . read ( self . ser . in_waiting ) out = raw_out . decode ( OUTPUT_ENCODING ) time_waiting = 0 while len ( out ) == 0 or ending_in ( out . strip ( OUTPUT_STRIP_CHARS ) , RESPONSE_END_WORDS ) is None : time . sleep ( READ_SLEEP_TIME ) time_waiting += READ_SLEEP_TIME raw_out += self . ser . read ( self . ser . in_waiting ) out = raw_out . decode ( OUTPUT_ENCODING ) # TODO how to handle timeouts, if they're now unexpected? if time_waiting >= timeout : break if raw : return raw_out return out
Read data from the arm . Data is returned as a latin_1 encoded string or raw bytes if raw is True .
198
25
14,855
def dump ( self , raw = False ) : raw_out = self . ser . read ( self . ser . in_waiting ) if raw : return raw_out return raw_out . decode ( OUTPUT_ENCODING )
Dump all output currently in the arm s output queue .
50
12
14,856
def get_info ( self ) : return { 'Connected' : self . is_connected ( ) , 'Port' : self . port , 'Bytes Waiting' : self . ser . in_waiting if self . ser else 0 }
Returns status of the robot arm .
51
7
14,857
async def get_data ( self ) : try : async with async_timeout . timeout ( 5 , loop = self . _loop ) : response = await self . _session . get ( self . base_url ) _LOGGER . info ( "Response from OpenSenseMap API: %s" , response . status ) self . data = await response . json ( ) _LOGGER . debug ( self . data ) except ( asyncio . TimeoutError , aiohttp . ClientError , socket . gaierror ) : _LOGGER . error ( "Can not load data from openSenseMap API" ) raise exceptions . OpenSenseMapConnectionError
Get details of OpenSenseMap station .
137
8
14,858
def get_value ( self , key ) : for title in _TITLES . get ( key , ( ) ) + ( key , ) : try : value = [ entry [ 'lastMeasurement' ] [ 'value' ] for entry in self . data [ 'sensors' ] if entry [ 'title' ] == title ] [ 0 ] return value except IndexError : pass return None
Extract a value for a given key .
83
9
14,859
def cmd ( send , msg , args ) : if not args [ 'config' ] [ 'feature' ] . getboolean ( 'hooks' ) : send ( "Hooks are disabled, and this command depends on hooks. Please contact the bot admin(s)." ) return if args [ 'type' ] == 'privmsg' : send ( "Note-passing should be done in public." ) return try : nick , note = msg . split ( maxsplit = 1 ) nicks = set ( x for x in nick . split ( ',' ) if x ) except ValueError : send ( "Not enough arguments." ) return nickregex = args [ 'config' ] [ 'core' ] [ 'nickregex' ] + '+$' successful_nicks = [ ] failed_nicks = [ ] for nick in nicks : if re . match ( nickregex , nick ) : row = Notes ( note = note , submitter = args [ 'nick' ] , nick = nick , time = datetime . now ( ) ) args [ 'db' ] . add ( row ) successful_nicks . append ( nick ) else : failed_nicks . append ( nick ) if successful_nicks : send ( "Note left for %s." % ", " . join ( successful_nicks ) ) if failed_nicks : send ( "Invalid nick(s): %s." % ", " . join ( failed_nicks ) )
Leaves a note for a user or users .
311
10
14,860
def from_frame ( klass , frame , connection ) : event = frame . headers [ 'new' ] data = json . loads ( frame . body ) info = data [ 'info' ] build = Build . fromDict ( info ) build . connection = connection return klass ( build , event )
Create a new BuildStateChange event from a Stompest Frame .
64
14
14,861
def from_frame ( klass , frame , connection ) : event = frame . headers [ 'new' ] data = json . loads ( frame . body ) info = data [ 'info' ] task = Task . fromDict ( info ) task . connection = connection return klass ( task , event )
Create a new TaskStateChange event from a Stompest Frame .
64
14
14,862
def cmd ( send , msg , args ) : if not msg : send ( "Ping what?" ) return channel = args [ 'target' ] if args [ 'target' ] != 'private' else args [ 'nick' ] # CTCP PING if "." not in msg and ":" not in msg : targets = set ( msg . split ( ) ) if len ( targets ) > 3 : send ( "Please specify three or fewer people to ping." ) return for target in targets : if not re . match ( args [ 'config' ] [ 'core' ] [ 'nickregex' ] , target ) : send ( "Invalid nick %s" % target ) else : args [ 'handler' ] . ping_map [ target ] = channel args [ 'handler' ] . connection . ctcp ( "PING" , target , " " . join ( str ( time ( ) ) . split ( '.' ) ) ) return try : answer = subprocess . check_output ( [ args [ 'name' ] , '-W' , '1' , '-c' , '1' , msg ] , stderr = subprocess . STDOUT ) answer = answer . decode ( ) . splitlines ( ) send ( answer [ 0 ] ) send ( answer [ 1 ] ) except subprocess . CalledProcessError as e : if e . returncode == 2 : send ( "ping: unknown host " + msg ) elif e . returncode == 1 : send ( e . output . decode ( ) . splitlines ( ) [ - 2 ] )
Ping something .
333
3
14,863
def execution_duration ( self ) : duration = None if self . execution_start and self . execution_end : delta = self . execution_end - self . execution_start duration = delta . total_seconds ( ) return duration
Returns total BMDS execution time in seconds .
48
9
14,864
def get_exe_path ( cls ) : return os . path . abspath ( os . path . join ( ROOT , cls . bmds_version_dir , cls . exe + ".exe" ) )
Return the full path to the executable .
50
8
14,865
def plot ( self ) : fig = self . dataset . plot ( ) ax = fig . gca ( ) ax . set_title ( "{}\n{}, {}" . format ( self . dataset . _get_dataset_name ( ) , self . name , self . get_bmr_text ( ) ) ) if self . has_successfully_executed : self . _set_x_range ( ax ) ax . plot ( self . _xs , self . get_ys ( self . _xs ) , label = self . name , * * plotting . LINE_FORMAT ) self . _add_bmr_lines ( ax ) else : self . _add_plot_failure ( ax ) ax . legend ( * * settings . LEGEND_OPTS ) return fig
After model execution print the dataset curve - fit BMD and BMDL .
169
16
14,866
def write_dfile ( self ) : f_in = self . tempfiles . get_tempfile ( prefix = "bmds-" , suffix = ".(d)" ) with open ( f_in , "w" ) as f : f . write ( self . as_dfile ( ) ) return f_in
Write the generated d_file to a temporary file .
69
11
14,867
def to_dict ( self , model_index ) : return dict ( name = self . name , model_index = model_index , model_name = self . model_name , model_version = self . version , has_output = self . output_created , dfile = self . as_dfile ( ) , execution_halted = self . execution_halted , stdout = self . stdout , stderr = self . stderr , outfile = getattr ( self , "outfile" , None ) , output = getattr ( self , "output" , None ) , logic_bin = getattr ( self , "logic_bin" , None ) , logic_notes = getattr ( self , "logic_notes" , None ) , recommended = getattr ( self , "recommended" , None ) , recommended_variable = getattr ( self , "recommended_variable" , None ) , )
Return a summary of the model in a dictionary format for serialization .
202
14
14,868
def merge_config ( d1 , d2 ) : result = deepcopy ( d1 ) elements = deque ( ) elements . append ( ( result , d2 ) ) while elements : old , new = elements . popleft ( ) new = OrderedDict ( [ ( k . lower ( ) , ( k , v ) ) for k , v in new . items ( ) ] ) visited_keys = [ ] for k , old_value in old . items ( ) : klow = k . lower ( ) if klow in new : new_key , new_value = new [ klow ] visited_keys . append ( new_key ) if all ( isinstance ( e , MutableMapping ) for e in ( old_value , new_value ) ) : elements . append ( ( old_value , new_value ) ) else : old [ k ] = deepcopy ( new_value ) for k , v in new . values ( ) : if k not in visited_keys : old [ k ] = deepcopy ( v ) return result
Merges to config dicts . Key values are case insensitive for merging but the value of d1 is remembered .
226
23
14,869
def set_if_none ( user_config , config , key , value ) : keys = key . split ( '.' ) for k in keys [ : - 1 ] : try : user_config = user_config [ k ] except KeyError : user_config = { } config = config [ k ] key = keys [ - 1 ] if key not in user_config and not config [ key ] : config [ key ] = value
If the value of the key in is None and doesn t exist on the user config set it to a different value
92
23
14,870
def set_admin ( msg , handler ) : if handler . config [ 'feature' ] [ 'servicestype' ] == "ircservices" : match = re . match ( "STATUS (.*) ([0-3])" , msg ) elif handler . config [ 'feature' ] [ 'servicestype' ] == "atheme" : match = re . match ( "(.*) ACC ([0-3])" , msg ) if match : status = int ( match . group ( 2 ) ) nick = match . group ( 1 ) if status != 3 : return with handler . db . session_scope ( ) as session : admin = session . query ( Permissions ) . filter ( Permissions . nick == nick ) . first ( ) if admin is None : session . add ( Permissions ( nick = nick , role = 'admin' , registered = True , time = datetime . now ( ) ) ) else : admin . registered = True admin . time = datetime . now ( )
Handle admin verification responses from NickServ .
214
8
14,871
def is_tuple ( obj , len_ = None ) : if not isinstance ( obj , tuple ) : return False if len_ is None : return True if not isinstance ( len_ , Integral ) : raise TypeError ( "length must be a number (got %s instead)" % type ( len_ ) . __name__ ) if len_ < 0 : raise ValueError ( "length must be positive (got %s instead)" % len_ ) return len ( obj ) == len_
Checks whether given object is a tuple .
106
9
14,872
def select ( indices , from_ , strict = False ) : ensure_iterable ( indices ) ensure_sequence ( from_ ) if strict : return from_ . __class__ ( from_ [ index ] for index in indices ) else : len_ = len ( from_ ) return from_ . __class__ ( from_ [ index ] for index in indices if 0 <= index < len_ )
Selects a subsequence of given tuple including only specified indices .
83
13
14,873
def omit ( indices , from_ , strict = False ) : from taipan . collections . sets import remove_subset ensure_iterable ( indices ) ensure_sequence ( from_ ) if strict : remaining_indices = set ( xrange ( len ( from_ ) ) ) try : remove_subset ( remaining_indices , indices ) except KeyError as e : raise IndexError ( int ( str ( e ) ) ) else : remaining_indices = set ( xrange ( len ( from_ ) ) ) - set ( indices ) return from_ . __class__ ( from_ [ index ] for index in remaining_indices )
Returns a subsequence from given tuple omitting specified indices .
137
12
14,874
def _describe_type ( arg ) : if isinstance ( arg , tuple ) : return "tuple of length %s" % len ( arg ) else : return type ( arg ) . __name__
Describe given argument including length if it s a tuple .
44
12
14,875
def _create_image_url ( self , file_path , type_ , target_size ) : if self . image_config is None : logger . warning ( 'no image configuration available' ) return return '' . join ( [ self . image_config [ 'secure_base_url' ] , self . _image_size ( self . image_config , type_ , target_size ) , file_path , ] )
The the closest available size for specified image type .
91
10
14,876
def from_json ( cls , json , image_config = None ) : cls . image_config = image_config return cls ( * * { attr : json . get ( attr if key is None else key ) for attr , key in cls . JSON_MAPPING . items ( ) } )
Create a model instance
70
4
14,877
def _image_size ( image_config , type_ , target_size ) : return min ( image_config [ '{}_sizes' . format ( type_ ) ] , key = lambda size : ( abs ( target_size - int ( size [ 1 : ] ) ) if size . startswith ( 'w' ) or size . startswith ( 'h' ) else 999 ) , )
Find the closest available size for specified image type .
88
10
14,878
def getallgroups ( arr , k = - 1 ) : if k < 0 : k = len ( arr ) return itertools . chain . from_iterable ( itertools . combinations ( set ( arr ) , j ) for j in range ( 1 , k + 1 ) )
returns all the subset of
61
6
14,879
def open_get_line ( filename , limit = - 1 , * * kwargs ) : allowed_keys_for_get_line = { 'sep' , 'pw_filter' , 'errors' } for k in list ( kwargs . keys ( ) ) : if k not in allowed_keys_for_get_line : del kwargs [ k ] print ( "After filtering: {}" . format ( kwargs ) ) with open_ ( filename , 'rt' ) as f : for w , c in get_line ( f , limit , * * kwargs ) : yield w , c
Opens the password file named
136
6
14,880
def stop_workers ( self , clean ) : with executor_lock : self . executor . shutdown ( clean ) del self . executor with self . worker_lock : if clean : self . pool . close ( ) else : self . pool . terminate ( ) self . pool . join ( ) del self . pool for x in self . events . values ( ) : x . event . cancel ( ) self . events . clear ( )
Stop workers and deferred events .
92
6
14,881
def extract_translations ( self , string ) : tree = ast . parse ( string ) # ast_visit(tree) visitor = TransVisitor ( self . tranz_functions , self . tranzchoice_functions ) visitor . visit ( tree ) return visitor . translations
Extract messages from Python string .
60
7
14,882
def cmd ( send , msg , args ) : if not msg : send ( "What are you trying to get to?" ) return nick = args [ 'nick' ] isup = get ( "http://isup.me/%s" % msg ) . text if "looks down from here" in isup : send ( "%s: %s is down" % ( nick , msg ) ) elif "like a site on the interwho" in isup : send ( "%s: %s is not a valid url" % ( nick , msg ) ) else : send ( "%s: %s is up" % ( nick , msg ) )
Checks if a website is up .
139
8
14,883
def create_required_directories ( self ) : required = ( self . CACHE_DIR , self . LOG_DIR , self . OUTPUT_DIR , self . ENGINEER . JINJA_CACHE_DIR , ) for folder in required : ensure_exists ( folder , assume_dirs = True )
Creates any directories required for Engineer to function if they don t already exist .
71
16
14,884
def cmd ( send , msg , args ) : if 'livedoc' in args [ 'name' ] : url = 'http://livedoc.tjhsst.edu/w' name = 'livedoc' else : url = 'http://en.wikipedia.org/w' name = 'wikipedia' if not msg : msg = get_rand ( url ) params = { 'format' : 'json' , 'action' : 'query' , 'list' : 'search' , 'srlimit' : '1' , 'srsearch' : msg } data = get ( '%s/api.php' % url , params = params ) . json ( ) try : article = data [ 'query' ] [ 'search' ] [ 0 ] [ 'title' ] except IndexError : send ( "%s isn't important enough to have a %s article." % ( msg , name ) ) return article = article . replace ( ' ' , '_' ) # wikipedia uses /w for api and /wiki for articles url += 'iki' send ( '%s/%s' % ( url , article ) )
Returns the first wikipedia result for the argument .
241
10
14,885
def parse_devices ( self ) : devices = [ ] for device in self . _channel_dict [ "devices" ] : devices . append ( Device ( device , self . _is_sixteen_bit , self . _ignore_list ) ) return devices
Creates an array of Device objects from the channel
55
10
14,886
def update ( self ) : for device in self . devices : device . update ( ) for i in range ( len ( self . _channel_dict [ "devices" ] ) ) : device_dict = self . _channel_dict [ "devices" ] [ i ] for device in self . _devices : if device . name == device_dict [ "common.ALLTYPES_NAME" ] : self . _channel_dict [ "devices" ] [ i ] = device . as_dict ( )
Updates the dictionary of the channel
108
7
14,887
def open_config ( self , type = "shared" ) : try : #attempt to open a configuration output = self . dev . rpc ( "<open-configuration><{0}/></open-configuration>" . format ( type ) ) except Exception as err : #output an error if the configuration is not availble print err
Opens the configuration of the currently connected device
72
9
14,888
def close_config ( self ) : try : self . dev . rpc . close_configuration ( ) except Exception as err : print err
Closes the exiting opened configuration
30
6
14,889
def commit_config ( self ) : try : self . dev . rpc . commit_configuration ( ) except Exception as err : print err
Commits exiting configuration
30
4
14,890
def commit_and_quit ( self ) : try : self . dev . rpc . commit_configuration ( ) self . close_config ( ) except Exception as err : print err
Commits and closes the currently open configration . Saves a step by not needing to manually close the config .
39
23
14,891
def load_local_plugin ( name ) : try : module_name = '.' . join ( name . split ( '.' ) [ : - 1 ] ) module_obj = importlib . import_module ( name = module_name ) obj = getattr ( module_obj , name . split ( '.' ) [ - 1 ] ) return obj except ( ImportError , AttributeError , ValueError ) as e : raise PluginNotFoundError ( e )
Import a local plugin accessible through Python path .
97
9
14,892
def load_installed_plugins ( ) : providers = { } checkers = { } for entry_point in pkg_resources . iter_entry_points ( group = 'archan' ) : obj = entry_point . load ( ) if issubclass ( obj , Provider ) : providers [ entry_point . name ] = obj elif issubclass ( obj , Checker ) : checkers [ entry_point . name ] = obj return collections . namedtuple ( 'Plugins' , 'providers checkers' ) ( providers = providers , checkers = checkers )
Search and load every installed plugin through entry points .
124
10
14,893
def from_file ( path ) : with open ( path ) as stream : obj = yaml . safe_load ( stream ) Config . lint ( obj ) return Config ( config_dict = obj )
Return a Config instance by reading a configuration file .
43
10
14,894
def find ( ) : names = ( 'archan.yml' , 'archan.yaml' , '.archan.yml' , '.archan.yaml' ) current_dir = os . getcwd ( ) configconfig_file = os . path . join ( current_dir , '.configconfig' ) default_config_dir = os . path . join ( current_dir , 'config' ) if os . path . isfile ( configconfig_file ) : logger . debug ( 'Reading %s to get config folder path' , configconfig_file ) with open ( configconfig_file ) as stream : config_dir = os . path . join ( current_dir , stream . read ( ) ) . strip ( ) elif os . path . isdir ( default_config_dir ) : config_dir = default_config_dir else : config_dir = current_dir logger . debug ( 'Config folder = %s' , config_dir ) for name in names : config_file = os . path . join ( config_dir , name ) logger . debug ( 'Searching for config file at %s' , config_file ) if os . path . isfile ( config_file ) : logger . debug ( 'Found %s' , config_file ) return config_file logger . debug ( 'No config file found' ) return None
Find the configuration file if any .
294
7
14,895
def inflate_nd_checker ( identifier , definition ) : if isinstance ( definition , bool ) : return Checker ( name = identifier , passes = definition ) elif isinstance ( definition , dict ) : return Checker ( definition . pop ( 'name' , identifier ) , * * definition ) else : raise ValueError ( '%s type is not supported for no-data checkers, ' 'use bool or dict' % type ( definition ) )
Inflate a no - data checker from a basic definition .
97
14
14,896
def get_plugin ( self , identifier , cls = None ) : if ( ( cls is None or cls == 'provider' ) and identifier in self . available_providers ) : return self . available_providers [ identifier ] elif ( ( cls is None or cls == 'checker' ) and identifier in self . available_checkers ) : return self . available_checkers [ identifier ] return Config . load_local_plugin ( identifier )
Return the plugin corresponding to the given identifier and type .
101
11
14,897
def provider_from_dict ( self , dct ) : provider_identifier = list ( dct . keys ( ) ) [ 0 ] provider_class = self . get_provider ( provider_identifier ) if provider_class : return provider_class ( * * dct [ provider_identifier ] ) return None
Return a provider instance from a dict object .
69
9
14,898
def checker_from_dict ( self , dct ) : checker_identifier = list ( dct . keys ( ) ) [ 0 ] checker_class = self . get_checker ( checker_identifier ) if checker_class : return checker_class ( * * dct [ checker_identifier ] ) return None
Return a checker instance from a dict object .
76
10
14,899
def inflate_plugin ( self , identifier , definition = None , cls = None ) : cls = self . get_plugin ( identifier , cls ) # TODO: implement re-usability of plugins? # same instances shared across analyses (to avoid re-computing stuff) return cls ( * * definition or { } )
Inflate a plugin thanks to it s identifier definition and class .
72
14