idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
20,900
def from_file ( self , file , store = current_store , extra_args = None , extra_kwargs = None ) : if isinstance ( file , cgi . FieldStorage ) : file = file . file data = io . BytesIO ( ) shutil . copyfileobj ( file , data ) data . seek ( 0 ) return self . from_raw_file ( data , store , original = True , extra_args = extra_args , extra_kwargs = extra_kwargs )
Stores the file for the image into the store .
108
11
20,901
def clear_database ( self ) -> None : self . _next_entity_id = 0 self . _dead_entities . clear ( ) self . _components . clear ( ) self . _entities . clear ( ) self . clear_cache ( )
Remove all Entities and Components from the World .
56
10
20,902
def add_processor ( self , processor_instance : Processor , priority = 0 ) -> None : assert issubclass ( processor_instance . __class__ , Processor ) processor_instance . priority = priority processor_instance . world = self self . _processors . append ( processor_instance ) self . _processors . sort ( key = lambda proc : proc . priority , reverse = True )
Add a Processor instance to the World .
82
8
20,903
def remove_processor ( self , processor_type : Processor ) -> None : for processor in self . _processors : if type ( processor ) == processor_type : processor . world = None self . _processors . remove ( processor )
Remove a Processor from the World by type .
50
9
20,904
def get_processor ( self , processor_type : Type [ P ] ) -> P : for processor in self . _processors : if type ( processor ) == processor_type : return processor
Get a Processor instance by type .
40
7
20,905
def create_entity ( self , * components ) -> int : self . _next_entity_id += 1 # TODO: duplicate add_component code here for performance for component in components : self . add_component ( self . _next_entity_id , component ) # self.clear_cache() return self . _next_entity_id
Create a new Entity .
73
5
20,906
def delete_entity ( self , entity : int , immediate = False ) -> None : if immediate : for component_type in self . _entities [ entity ] : self . _components [ component_type ] . discard ( entity ) if not self . _components [ component_type ] : del self . _components [ component_type ] del self . _entities [ entity ] self . clear_cache ( ) else : self . _dead_entities . add ( entity )
Delete an Entity from the World .
104
7
20,907
def component_for_entity ( self , entity : int , component_type : Type [ C ] ) -> C : return self . _entities [ entity ] [ component_type ]
Retrieve a Component instance for a specific Entity .
39
10
20,908
def components_for_entity ( self , entity : int ) -> Tuple [ C , ... ] : return tuple ( self . _entities [ entity ] . values ( ) )
Retrieve all Components for a specific Entity as a Tuple .
38
13
20,909
def has_component ( self , entity : int , component_type : Any ) -> bool : return component_type in self . _entities [ entity ]
Check if a specific Entity has a Component of a certain type .
33
13
20,910
def add_component ( self , entity : int , component_instance : Any ) -> None : component_type = type ( component_instance ) if component_type not in self . _components : self . _components [ component_type ] = set ( ) self . _components [ component_type ] . add ( entity ) if entity not in self . _entities : self . _entities [ entity ] = { } self . _entities [ entity ] [ component_type ] = component_instance self . clear_cache ( )
Add a new Component instance to an Entity .
116
9
20,911
def remove_component ( self , entity : int , component_type : Any ) -> int : self . _components [ component_type ] . discard ( entity ) if not self . _components [ component_type ] : del self . _components [ component_type ] del self . _entities [ entity ] [ component_type ] if not self . _entities [ entity ] : del self . _entities [ entity ] self . clear_cache ( ) return entity
Remove a Component instance from an Entity by type .
102
10
20,912
def _get_component ( self , component_type : Type [ C ] ) -> Iterable [ Tuple [ int , C ] ] : entity_db = self . _entities for entity in self . _components . get ( component_type , [ ] ) : yield entity , entity_db [ entity ] [ component_type ]
Get an iterator for Entity Component pairs .
72
8
20,913
def _get_components ( self , * component_types : Type ) -> Iterable [ Tuple [ int , ... ] ] : entity_db = self . _entities comp_db = self . _components try : for entity in set . intersection ( * [ comp_db [ ct ] for ct in component_types ] ) : yield entity , [ entity_db [ entity ] [ ct ] for ct in component_types ] except KeyError : pass
Get an iterator for Entity and multiple Component sets .
102
10
20,914
def try_component ( self , entity : int , component_type : Type ) : if component_type in self . _entities [ entity ] : yield self . _entities [ entity ] [ component_type ] else : return None
Try to get a single component type for an Entity . This method will return the requested Component if it exists but will pass silently if it does not . This allows a way to access optional Components that may or may not exist .
50
45
20,915
def _clear_dead_entities ( self ) : for entity in self . _dead_entities : for component_type in self . _entities [ entity ] : self . _components [ component_type ] . discard ( entity ) if not self . _components [ component_type ] : del self . _components [ component_type ] del self . _entities [ entity ] self . _dead_entities . clear ( ) self . clear_cache ( )
Finalize deletion of any Entities that are marked dead . In the interest of performance this method duplicates code from the delete_entity method . If that method is changed those changes should be duplicated here as well .
103
44
20,916
def _timed_process ( self , * args , * * kwargs ) : for processor in self . _processors : start_time = _time . process_time ( ) processor . process ( * args , * * kwargs ) process_time = int ( round ( ( _time . process_time ( ) - start_time ) * 1000 , 2 ) ) self . process_times [ processor . __class__ . __name__ ] = process_time
Track Processor execution time for benchmarking .
101
8
20,917
def process ( self , * args , * * kwargs ) : self . _clear_dead_entities ( ) self . _process ( * args , * * kwargs )
Call the process method on all Processors in order of their priority .
40
14
20,918
def texture_from_image ( renderer , image_name ) : soft_surface = ext . load_image ( image_name ) texture = SDL_CreateTextureFromSurface ( renderer . renderer , soft_surface ) SDL_FreeSurface ( soft_surface ) return texture
Create an SDL2 Texture from an image file
61
9
20,919
def setup_tree ( ctx , verbose = None , root = None , tree_dir = None , modules_dir = None ) : print ( 'Setting up the tree' ) ctx . run ( 'python bin/setup_tree.py -t {0} -r {1} -m {2}' . format ( tree_dir , root , modules_dir ) )
Sets up the SDSS tree enviroment
83
12
20,920
def set_roots ( self , uproot_with = None ) : # Check for TREE_DIR self . treedir = os . environ . get ( 'TREE_DIR' , None ) if not uproot_with else uproot_with if not self . treedir : treefilepath = os . path . dirname ( os . path . abspath ( __file__ ) ) if 'python/' in treefilepath : self . treedir = treefilepath . rsplit ( '/' , 2 ) [ 0 ] else : self . treedir = treefilepath self . treedir = treefilepath os . environ [ 'TREE_DIR' ] = self . treedir # Check sas_base_dir if 'SAS_BASE_DIR' in os . environ : self . sasbasedir = os . environ [ "SAS_BASE_DIR" ] else : self . sasbasedir = os . path . expanduser ( '~/sas' ) # make the directories if not os . path . isdir ( self . sasbasedir ) : os . makedirs ( self . sasbasedir )
Set the roots of the tree in the os environment
258
10
20,921
def load_config ( self , config = None ) : # Read the config file cfgname = ( config or self . config_name ) cfgname = 'sdsswork' if cfgname is None else cfgname assert isinstance ( cfgname , six . string_types ) , 'config name must be a string' config_name = cfgname if cfgname . endswith ( '.cfg' ) else '{0}.cfg' . format ( cfgname ) self . configfile = os . path . join ( self . treedir , 'data' , config_name ) assert os . path . isfile ( self . configfile ) is True , 'configfile {0} must exist in the proper directory' . format ( self . configfile ) self . _cfg = SafeConfigParser ( ) try : self . _cfg . read ( self . configfile . decode ( 'utf-8' ) ) except AttributeError : self . _cfg . read ( self . configfile ) # create the local tree environment self . environ = OrderedDict ( ) self . environ [ 'default' ] = self . _cfg . defaults ( ) # set the filesystem envvar to sas_base_dir self . _file_replace = '@FILESYSTEM@' if self . environ [ 'default' ] [ 'filesystem' ] == self . _file_replace : self . environ [ 'default' ] [ 'filesystem' ] = self . sasbasedir
loads a config file
327
4
20,922
def branch_out ( self , limb = None ) : # Filter on sections if not limb : limbs = self . _cfg . sections ( ) else : # we must have the general always + secton limb = limb if isinstance ( limb , list ) else [ limb ] limbs = [ 'general' ] limbs . extend ( limb ) # add all limbs into the tree environ for leaf in limbs : leaf = leaf if leaf in self . _cfg . sections ( ) else leaf . upper ( ) self . environ [ leaf ] = OrderedDict ( ) options = self . _cfg . options ( leaf ) for opt in options : if opt in self . environ [ 'default' ] : continue val = self . _cfg . get ( leaf , opt ) if val . find ( self . _file_replace ) == 0 : val = val . replace ( self . _file_replace , self . sasbasedir ) self . environ [ leaf ] [ opt ] = val
Set the individual section branches
208
5
20,923
def add_limbs ( self , key = None ) : self . branch_out ( limb = key ) self . add_paths_to_os ( key = key )
Add a new section from the tree into the existing os environment
38
12
20,924
def get_paths ( self , key ) : newkey = key if key in self . environ else key . upper ( ) if key . upper ( ) in self . environ else None if newkey : return self . environ [ newkey ] else : raise KeyError ( 'Key {0} not found in tree environment' . format ( key ) )
Retrieve a set of environment paths from the config
77
10
20,925
def add_paths_to_os ( self , key = None , update = None ) : if key is not None : allpaths = key if isinstance ( key , list ) else [ key ] else : allpaths = [ k for k in self . environ . keys ( ) if 'default' not in k ] for key in allpaths : paths = self . get_paths ( key ) self . check_paths ( paths , update = update )
Add the paths in tree environ into the os environ
102
12
20,926
def check_paths ( self , paths , update = None ) : # set up the exclusion list exclude = [ ] if not self . exclude else self . exclude if isinstance ( self . exclude , list ) else [ self . exclude ] # check the path names for pathname , path in paths . items ( ) : if update and pathname . upper ( ) not in exclude : os . environ [ pathname . upper ( ) ] = os . path . normpath ( path ) elif pathname . upper ( ) not in os . environ : os . environ [ pathname . upper ( ) ] = os . path . normpath ( path )
Check if the path is in the os environ and if not add it
139
15
20,927
def replant_tree ( self , config = None , exclude = None ) : # reinitialize a new Tree with a new config self . __init__ ( key = self . key , config = config , update = True , exclude = exclude )
Replant the tree with a different config setup
52
9
20,928
def print_exception_formatted ( type , value , tb ) : tbtext = '' . join ( traceback . format_exception ( type , value , tb ) ) lexer = get_lexer_by_name ( 'pytb' , stripall = True ) formatter = TerminalFormatter ( ) sys . stderr . write ( highlight ( tbtext , lexer , formatter ) )
A custom hook for printing tracebacks with colours .
93
10
20,929
def colored_formatter ( record ) : colours = { 'info' : ( 'blue' , 'normal' ) , 'debug' : ( 'magenta' , 'normal' ) , 'warning' : ( 'yellow' , 'normal' ) , 'print' : ( 'green' , 'normal' ) , 'error' : ( 'red' , 'bold' ) } levelname = record . levelname . lower ( ) if levelname == 'error' : return if levelname . lower ( ) in colours : levelname_color = colours [ levelname ] [ 0 ] header = color_text ( '[{}]: ' . format ( levelname . upper ( ) ) , levelname_color ) message = '{0}' . format ( record . msg ) warning_category = re . match ( r'^(\w+Warning:).*' , message ) if warning_category is not None : warning_category_colour = color_text ( warning_category . groups ( ) [ 0 ] , 'cyan' ) message = message . replace ( warning_category . groups ( ) [ 0 ] , warning_category_colour ) sub_level = re . match ( r'(\[.+\]:)(.*)' , message ) if sub_level is not None : sub_level_name = color_text ( sub_level . groups ( ) [ 0 ] , 'red' ) message = '{}{}' . format ( sub_level_name , '' . join ( sub_level . groups ( ) [ 1 : ] ) ) # if len(message) > 79: # tw = TextWrapper() # tw.width = 79 # tw.subsequent_indent = ' ' * (len(record.levelname) + 2) # tw.break_on_hyphens = False # message = '\n'.join(tw.wrap(message)) sys . __stdout__ . write ( '{}{}\n' . format ( header , message ) ) sys . __stdout__ . flush ( ) return
Prints log messages with colours .
439
7
20,930
def _catch_exceptions ( self , exctype , value , tb ) : # Now we log it. self . error ( 'Uncaught exception' , exc_info = ( exctype , value , tb ) ) # First, we print to stdout with some colouring. print_exception_formatted ( exctype , value , tb )
Catches all exceptions and logs them .
81
8
20,931
def _set_defaults ( self , log_level = logging . INFO , redirect_stdout = False ) : # Remove all previous handlers for handler in self . handlers [ : ] : self . removeHandler ( handler ) # Set levels self . setLevel ( logging . DEBUG ) # Set up the stdout handler self . fh = None self . sh = logging . StreamHandler ( ) self . sh . emit = colored_formatter self . addHandler ( self . sh ) self . sh . setLevel ( log_level ) # warnings.showwarning = self._show_warning # Redirects all stdout to the logger if redirect_stdout : sys . stdout = LoggerStdout ( self . _print ) # Catches exceptions sys . excepthook = self . _catch_exceptions
Reset logger to its initial state .
173
8
20,932
def start_file_logger ( self , name , log_file_level = logging . DEBUG , log_file_path = './' ) : log_file_path = os . path . expanduser ( log_file_path ) / '{}.log' . format ( name ) logdir = log_file_path . parent try : logdir . mkdir ( parents = True , exist_ok = True ) # If the log file exists, backs it up before creating a new file handler if log_file_path . exists ( ) : strtime = datetime . datetime . utcnow ( ) . strftime ( '%Y-%m-%d_%H:%M:%S' ) shutil . move ( log_file_path , log_file_path + '.' + strtime ) self . fh = TimedRotatingFileHandler ( str ( log_file_path ) , when = 'midnight' , utc = True ) self . fh . suffix = '%Y-%m-%d_%H:%M:%S' except ( IOError , OSError ) as ee : warnings . warn ( 'log file {0!r} could not be opened for writing: ' '{1}' . format ( log_file_path , ee ) , RuntimeWarning ) else : self . fh . setFormatter ( fmt ) self . addHandler ( self . fh ) self . fh . setLevel ( log_file_level ) self . log_filename = log_file_path
Start file logging .
341
4
20,933
def create_index_page ( environ , defaults , envdir ) : # header of index file header = """<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head><meta name="viewport" content="width=device-width"/><meta http-equiv="content-type" content="text/html; charset=utf-8"/><style type="text/css">body,html {{background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}}tr:nth-child(even) {{background:#f4f4f4;}}th,td {{padding:0.1em 0.5em;}}th {{text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}}#list {{border:1px solid #aaa;width:100%%;}}a {{color:#a33;}}a:hover {{color:#e33;}}</style> <link rel="stylesheet" href="{url}/css/sas.css" type="text/css"/> <title>Index of /sas/{name}/env/</title> </head><body><h1>Index of /sas/{name}/env/</h1> """ # footer of index file footer = """<h3><a href='{url}/sas/'>{location}</a></h3> <p>This directory contains links to the contents of environment variables defined by the tree product, version {name}. To examine the <em>types</em> of files contained in each environment variable directory, visit <a href="/datamodel/files/">the datamodel.</a></p> </body></html> """ # create index html file index = header . format ( * * defaults ) index += create_index_table ( environ , envdir ) index += footer . format ( * * defaults ) return index
create the env index html page Builds the index . html page containing a table of symlinks to datamodel directories
528
25
20,934
def create_env ( environ , mirror = None , verbose = None ) : defaults = environ [ 'default' ] . copy ( ) defaults [ 'url' ] = "https://data.mirror.sdss.org" if mirror else "https://data.sdss.org" defaults [ 'location' ] = "SDSS-IV Science Archive Mirror (SAM)" if mirror else "SDSS-IV Science Archive Server (SAS)" if not os . path . exists ( environ [ 'general' ] [ 'sas_root' ] ) : if verbose : print ( "{0} doesn't exist, skipping env link creation." . format ( environ [ 'general' ] [ 'sas_root' ] ) ) return if verbose : print ( "Found {0}." . format ( environ [ 'general' ] [ 'sas_root' ] ) ) # sets and creates envdir envdir = os . path . join ( environ [ 'general' ] [ 'sas_root' ] , 'env' ) if not os . path . exists ( envdir ) : os . makedirs ( envdir ) if not os . access ( envdir , os . W_OK ) : return # create index html index = create_index_page ( environ , defaults , envdir ) # write the index file indexfile = os . path . join ( envdir , 'index.html' ) with open ( indexfile , 'w' ) as f : f . write ( index )
create the env symlink directory structure Creates the env folder filled with symlinks to datamodel directories for a given tree config file .
330
30
20,935
def check_sas_base_dir ( root = None ) : sasbasedir = root or os . getenv ( "SAS_BASE_DIR" ) if not sasbasedir : sasbasedir = input ( 'Enter a path for SAS_BASE_DIR: ' ) os . environ [ 'SAS_BASE_DIR' ] = sasbasedir
Check for the SAS_BASE_DIR environment variable
85
11
20,936
def write_file ( environ , term = 'bash' , out_dir = None , tree_dir = None ) : # get the proper name, header and file extension name = environ [ 'default' ] [ 'name' ] header = write_header ( term = term , name = name , tree_dir = tree_dir ) exts = { 'bash' : '.sh' , 'tsch' : '.csh' , 'modules' : '.module' } ext = exts [ term ] # shell command if term == 'bash' : cmd = 'export {0}={1}\n' else : cmd = 'setenv {0} {1}\n' # write the environment config files filename = os . path . join ( out_dir , name + ext ) with open ( filename , 'w' ) as f : f . write ( header + '\n' ) for key , values in environ . items ( ) : if key != 'default' : # write separator f . write ( '#\n# {0}\n#\n' . format ( key ) ) # write tree names and paths for tree_name , tree_path in values . items ( ) : f . write ( cmd . format ( tree_name . upper ( ) , tree_path ) ) # write default .version file for modules modules_version = write_version ( name ) if term == 'modules' and environ [ 'default' ] [ 'current' ] : version_name = os . path . join ( out_dir , '.version' ) with open ( version_name , 'w' ) as f : f . write ( modules_version )
Write a tree environment file
358
5
20,937
def get_tree ( config = None ) : path = os . path . dirname ( os . path . abspath ( __file__ ) ) pypath = os . path . realpath ( os . path . join ( path , '..' , 'python' ) ) if pypath not in sys . path : sys . path . append ( pypath ) os . chdir ( pypath ) from tree . tree import Tree tree = Tree ( config = config ) return tree
Get the tree for a given config
104
7
20,938
def copy_modules ( filespath = None , modules_path = None , verbose = None ) : # find or define a modules path if not modules_path : modulepath = os . getenv ( "MODULEPATH" ) if not modulepath : modules_path = input ( 'Enter the root path for your module files:' ) else : split_mods = modulepath . split ( ':' ) if len ( split_mods ) > 1 : if verbose : print ( 'Multiple module paths found. Finding all that contain a tree directory.' ) for mfile in split_mods : if os . path . exists ( os . path . join ( mfile , 'tree' ) ) : copy_modules ( filespath = filespath , modules_path = mfile , verbose = verbose ) else : return else : modules_path = split_mods [ 0 ] # check for the tree module directory tree_mod = os . path . join ( modules_path , 'tree' ) if not os . path . isdir ( tree_mod ) : os . makedirs ( tree_mod ) # copy the modules into the tree if verbose : print ( 'Copying modules from etc/ into {0}' . format ( tree_mod ) ) module_files = glob . glob ( os . path . join ( filespath , '*.module' ) ) for mfile in module_files : base = os . path . splitext ( os . path . basename ( mfile ) ) [ 0 ] tree_out = os . path . join ( tree_mod , base ) shutil . copy2 ( mfile , tree_out ) # copy the default version into the tree version = os . path . join ( filespath , '.version' ) if os . path . isfile ( version ) : shutil . copy2 ( version , tree_mod )
Copy over the tree module files into your path
396
9
20,939
def _indent ( text , level = 1 ) : prefix = ' ' * ( 4 * level ) def prefixed_lines ( ) : for line in text . splitlines ( True ) : yield ( prefix + line if line . strip ( ) else line ) return '' . join ( prefixed_lines ( ) )
Does a proper indenting for Sphinx rst
67
10
20,940
def get_requirements ( opts ) : if opts . dev : name = 'requirements_dev.txt' elif opts . doc : name = 'requirements_doc.txt' else : name = 'requirements.txt' requirements_file = os . path . join ( os . path . dirname ( __file__ ) , name ) install_requires = [ line . strip ( ) . replace ( '==' , '>=' ) for line in open ( requirements_file ) if not line . strip ( ) . startswith ( '#' ) and line . strip ( ) != '' ] return install_requires
Get the proper requirements file based on the optional argument
136
10
20,941
def remove_args ( parser ) : arguments = [ ] for action in list ( parser . _get_optional_actions ( ) ) : if '--help' not in action . option_strings : arguments += action . option_strings for arg in arguments : if arg in sys . argv : sys . argv . remove ( arg )
Remove custom arguments from the parser
71
6
20,942
def _render_log ( ) : config = load_config ( ROOT ) definitions = config [ 'types' ] fragments , fragment_filenames = find_fragments ( pathlib . Path ( config [ 'directory' ] ) . absolute ( ) , config [ 'sections' ] , None , definitions , ) rendered = render_fragments ( pathlib . Path ( config [ 'template' ] ) . read_text ( encoding = 'utf-8' ) , config [ 'issue_format' ] , split_fragments ( fragments , definitions ) , definitions , config [ 'underlines' ] [ 1 : ] , ) return rendered
Totally tap into Towncrier internals to get an in - memory result .
139
17
20,943
def adjust_name_for_printing ( name ) : if name is not None : name2 = name name = name . replace ( " " , "_" ) . replace ( "." , "_" ) . replace ( "-" , "_m_" ) name = name . replace ( "+" , "_p_" ) . replace ( "!" , "_I_" ) name = name . replace ( "**" , "_xx_" ) . replace ( "*" , "_x_" ) name = name . replace ( "/" , "_l_" ) . replace ( "@" , '_at_' ) name = name . replace ( "(" , "_of_" ) . replace ( ")" , "" ) if re . match ( r'^[a-zA-Z_][a-zA-Z0-9-_]*$' , name ) is None : raise NameError ( "name {} converted to {} cannot be further converted to valid python variable name!" . format ( name2 , name ) ) return name return ''
Make sure a name can be printed alongside used as a variable name .
226
14
20,944
def name ( self , name ) : from_name = self . name assert isinstance ( name , str ) self . _name = name if self . has_parent ( ) : self . _parent_ . _name_changed ( self , from_name )
Set the name of this object . Tell the parent if the name has changed .
55
16
20,945
def hierarchy_name ( self , adjust_for_printing = True ) : if adjust_for_printing : adjust = lambda x : adjust_name_for_printing ( x ) else : adjust = lambda x : x if self . has_parent ( ) : return self . _parent_ . hierarchy_name ( ) + "." + adjust ( self . name ) return adjust ( self . name )
return the name for this object with the parents names attached by dots .
87
14
20,946
def grep_param_names ( self , regexp ) : if not isinstance ( regexp , _pattern_type ) : regexp = compile ( regexp ) found_params = [ ] def visit ( innerself , regexp ) : if ( innerself is not self ) and regexp . match ( innerself . hierarchy_name ( ) . partition ( '.' ) [ 2 ] ) : found_params . append ( innerself ) self . traverse ( visit , regexp ) return found_params
create a list of parameters matching regular expression regexp
106
10
20,947
def _setup_observers ( self ) : if self . has_parent ( ) : self . add_observer ( self . _parent_ , self . _parent_ . _pass_through_notify_observers , - np . inf )
Setup the default observers
56
4
20,948
def _repr_html_ ( self , indices = None , iops = None , lx = None , li = None , lls = None ) : filter_ = self . _current_slice_ vals = self . flat if indices is None : indices = self . _indices ( filter_ ) if iops is None : ravi = self . _raveled_index ( filter_ ) iops = OrderedDict ( [ name , iop . properties_for ( ravi ) ] for name , iop in self . _index_operations . items ( ) ) if lls is None : lls = [ self . _max_len_names ( iop , name ) for name , iop in iops . items ( ) ] header_format = """ <tr> <th><b>{i}</b></th> <th><b>{x}</b></th> <th><b>{iops}</b></th> </tr>""" header = header_format . format ( x = self . hierarchy_name ( ) , i = __index_name__ , iops = "</b></th><th><b>" . join ( list ( iops . keys ( ) ) ) ) # nice header for printing to_print = [ """<style type="text/css"> .tg {padding:2px 3px;word-break:normal;border-collapse:collapse;border-spacing:0;border-color:#DCDCDC;margin:0px auto;width:100%;} .tg td{font-family:"Courier New", Courier, monospace !important;font-weight:bold;color:#444;background-color:#F7FDFA;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg th{font-family:"Courier New", Courier, monospace !important;font-weight:normal;color:#fff;background-color:#26ADE4;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} .tg .tg-left{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:left;} .tg .tg-right{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:right;} </style>""" ] to_print . append ( '<table class="tg">' ) to_print . append ( header ) format_spec = self . _format_spec ( indices , iops , lx , li , lls , False ) format_spec [ : 2 ] = [ "<tr><td class=tg-left>{i}</td>" . format ( i = format_spec [ 0 ] ) , "<td class=tg-right>{i}</td>" . format ( i = format_spec [ 1 ] ) ] for i in range ( 2 , len ( format_spec ) ) : format_spec [ i ] = '<td class=tg-left>{c}</td>' . format ( c = format_spec [ i ] ) format_spec = "" . join ( format_spec ) + '</tr>' for i in range ( self . size ) : to_print . append ( format_spec . format ( index = indices [ i ] , value = "{1:.{0}f}" . format ( __precision__ , vals [ i ] ) , * * dict ( ( name , ' ' . join ( map ( str , iops [ name ] [ i ] ) ) ) for name in iops ) ) ) return '\n' . join ( to_print )
Representation of the parameter in html for notebook display .
837
11
20,949
def add_observer ( self , observer , callble , priority = 0 ) : self . observers . add ( priority , observer , callble )
Add an observer observer with the callback callble and priority priority to this observers list .
31
17
20,950
def notify_observers ( self , which = None , min_priority = None ) : if self . _update_on : if which is None : which = self if min_priority is None : [ callble ( self , which = which ) for _ , _ , callble in self . observers ] else : for p , _ , callble in self . observers : if p <= min_priority : break callble ( self , which = which )
Notifies all observers . Which is the element which kicked off this notification loop . The first argument will be self the second which .
96
26
20,951
def constrain_fixed ( self , value = None , warning = True , trigger_parent = True ) : if value is not None : self [ : ] = value #index = self.unconstrain() index = self . _add_to_index_operations ( self . constraints , np . empty ( 0 ) , __fixed__ , warning ) self . _highest_parent_ . _set_fixed ( self , index ) self . notify_observers ( self , None if trigger_parent else - np . inf ) return index
Constrain this parameter to be fixed to the current value it carries .
116
15
20,952
def unconstrain_fixed ( self ) : unconstrained = self . unconstrain ( __fixed__ ) self . _highest_parent_ . _set_unfixed ( self , unconstrained ) #if self._default_constraint_ is not None: # return self.constrain(self._default_constraint_) return unconstrained
This parameter will no longer be fixed .
79
8
20,953
def checkgrad ( self , verbose = 0 , step = 1e-6 , tolerance = 1e-3 , df_tolerance = 1e-12 ) : # Make sure we always call the gradcheck on the highest parent # This ensures the assumption of the highest parent to hold the fixes # In the checkgrad function we take advantage of that, so it needs # to be set in place here. if self . has_parent ( ) : return self . _highest_parent_ . _checkgrad ( self , verbose = verbose , step = step , tolerance = tolerance , df_tolerance = df_tolerance ) return self . _checkgrad ( self , verbose = verbose , step = step , tolerance = tolerance , df_tolerance = df_tolerance )
Check the gradient of this parameter with respect to the highest parent s objective function . This is a three point estimate of the gradient wiggling at the parameters with a stepsize step . The check passes if either the ratio or the difference between numerical and analytical gradient is smaller then tolerance .
166
57
20,954
def opt ( self , x_init , f_fp = None , f = None , fp = None ) : tnc_rcstrings = [ 'Local minimum' , 'Converged' , 'XConverged' , 'Maximum number of f evaluations reached' , 'Line search failed' , 'Function is constant' ] assert f_fp != None , "TNC requires f_fp" opt_dict = { } if self . xtol is not None : opt_dict [ 'xtol' ] = self . xtol if self . ftol is not None : opt_dict [ 'ftol' ] = self . ftol if self . gtol is not None : opt_dict [ 'pgtol' ] = self . gtol opt_result = optimize . fmin_tnc ( f_fp , x_init , messages = self . messages , maxfun = self . max_f_eval , * * opt_dict ) self . x_opt = opt_result [ 0 ] self . f_opt = f_fp ( self . x_opt ) [ 0 ] self . funct_eval = opt_result [ 1 ] self . status = tnc_rcstrings [ opt_result [ 2 ] ]
Run the TNC optimizer
269
6
20,955
def opt ( self , x_init , f_fp = None , f = None , fp = None ) : statuses = [ 'Converged' , 'Maximum number of function evaluations made' , 'Maximum number of iterations reached' ] opt_dict = { } if self . xtol is not None : opt_dict [ 'xtol' ] = self . xtol if self . ftol is not None : opt_dict [ 'ftol' ] = self . ftol if self . gtol is not None : print ( "WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it" ) opt_result = optimize . fmin ( f , x_init , ( ) , disp = self . messages , maxfun = self . max_f_eval , full_output = True , * * opt_dict ) self . x_opt = opt_result [ 0 ] self . f_opt = opt_result [ 1 ] self . funct_eval = opt_result [ 3 ] self . status = statuses [ opt_result [ 4 ] ] self . trace = None
The simplex optimizer does not require gradients .
244
11
20,956
def combine_inputs ( self , args , kw , ignore_args ) : inputs = args + tuple ( c [ 1 ] for c in sorted ( kw . items ( ) , key = lambda x : x [ 0 ] ) ) # REMOVE the ignored arguments from input and PREVENT it from being checked!!! return [ a for i , a in enumerate ( inputs ) if i not in ignore_args ]
Combines the args and kw in a unique way such that ordering of kwargs does not lead to recompute
90
24
20,957
def ensure_cache_length ( self ) : if len ( self . order ) == self . limit : # we have reached the limit, so lets release one element cache_id = self . order . popleft ( ) combined_args_kw = self . cached_inputs [ cache_id ] for ind in combined_args_kw : ind_id = self . id ( ind ) tmp = self . cached_input_ids . get ( ind_id , None ) if tmp is not None : ref , cache_ids = tmp if len ( cache_ids ) == 1 and ref ( ) is not None : ref ( ) . remove_observer ( self , self . on_cache_changed ) del self . cached_input_ids [ ind_id ] else : cache_ids . remove ( cache_id ) self . cached_input_ids [ ind_id ] = [ ref , cache_ids ] try : del self . cached_outputs [ cache_id ] except KeyError : # Was not cached before, possibly a keyboard interrupt pass try : del self . inputs_changed [ cache_id ] except KeyError : # Was not cached before, possibly a keyboard interrupt pass try : del self . cached_inputs [ cache_id ] except KeyError : # Was not cached before, possibly a keyboard interrupt pass
Ensures the cache is within its limits and has one place free
281
14
20,958
def add_to_cache ( self , cache_id , inputs , output ) : self . inputs_changed [ cache_id ] = False self . cached_outputs [ cache_id ] = output self . order . append ( cache_id ) self . cached_inputs [ cache_id ] = inputs for a in inputs : if a is not None and not isinstance ( a , Number ) and not isinstance ( a , str ) : ind_id = self . id ( a ) v = self . cached_input_ids . get ( ind_id , [ weakref . ref ( a ) , [ ] ] ) v [ 1 ] . append ( cache_id ) if len ( v [ 1 ] ) == 1 : a . add_observer ( self , self . on_cache_changed ) self . cached_input_ids [ ind_id ] = v
This adds cache_id to the cache with inputs and output
187
12
20,959
def on_cache_changed ( self , direct , which = None ) : for what in [ direct , which ] : ind_id = self . id ( what ) _ , cache_ids = self . cached_input_ids . get ( ind_id , [ None , [ ] ] ) for cache_id in cache_ids : self . inputs_changed [ cache_id ] = True
A callback funtion which sets local flags when the elements of some cached inputs change
83
16
20,960
def reset ( self ) : [ a ( ) . remove_observer ( self , self . on_cache_changed ) if ( a ( ) is not None ) else None for [ a , _ ] in self . cached_input_ids . values ( ) ] self . order = collections . deque ( ) self . cached_inputs = { } # point from cache_ids to a list of [ind_ids], which where used in cache cache_id #======================================================================= # point from each ind_id to [ref(obj), cache_ids] # 0: a weak reference to the object itself # 1: the cache_ids in which this ind_id is used (len will be how many times we have seen this ind_id) self . cached_input_ids = { } #======================================================================= self . cached_outputs = { } # point from cache_ids to outputs self . inputs_changed = { }
Totally reset the cache
198
5
20,961
def disable_caching ( self ) : self . caching_enabled = False for c in self . values ( ) : c . disable_cacher ( )
Disable the cache of this object . This also removes previously cached results
33
13
20,962
def enable_caching ( self ) : self . caching_enabled = True for c in self . values ( ) : c . enable_cacher ( )
Enable the cache of this object .
33
7
20,963
def remove ( self , priority , observer , callble ) : self . flush ( ) for i in range ( len ( self ) - 1 , - 1 , - 1 ) : p , o , c = self [ i ] if priority == p and observer == o and callble == c : del self . _poc [ i ]
Remove one observer which had priority and callble .
70
10
20,964
def add ( self , priority , observer , callble ) : #if observer is not None: ins = 0 for pr , _ , _ in self : if priority > pr : break ins += 1 self . _poc . insert ( ins , ( priority , weakref . ref ( observer ) , callble ) )
Add an observer with priority and callble
66
8
20,965
def properties_for ( self , index ) : return vectorize ( lambda i : [ prop for prop in self . properties ( ) if i in self [ prop ] ] , otypes = [ list ] ) ( index )
Returns a list of properties such that each entry in the list corresponds to the element of the index given .
46
21
20,966
def properties_dict_for ( self , index ) : props = self . properties_for ( index ) prop_index = extract_properties_to_index ( index , props ) return prop_index
Return a dictionary containing properties as keys and indices as index Thus the indices for each constraint which is contained will be collected as one dictionary
42
26
20,967
def optimize ( self , optimizer = None , start = None , messages = False , max_iters = 1000 , ipython_notebook = True , clear_after_finish = False , * * kwargs ) : if self . is_fixed or self . size == 0 : print ( 'nothing to optimize' ) return if not self . update_model ( ) : print ( "updates were off, setting updates on again" ) self . update_model ( True ) if start is None : start = self . optimizer_array if optimizer is None : optimizer = self . preferred_optimizer if isinstance ( optimizer , optimization . Optimizer ) : opt = optimizer opt . model = self else : optimizer = optimization . get_optimizer ( optimizer ) opt = optimizer ( max_iters = max_iters , * * kwargs ) with VerboseOptimization ( self , opt , maxiters = max_iters , verbose = messages , ipython_notebook = ipython_notebook , clear_after_finish = clear_after_finish ) as vo : opt . run ( start , f_fp = self . _objective_grads , f = self . _objective , fp = self . _grads ) self . optimizer_array = opt . x_opt self . optimization_runs . append ( opt ) return opt
Optimize the model using self . log_likelihood and self . log_likelihood_gradient as well as self . priors .
303
28
20,968
def optimize_restarts ( self , num_restarts = 10 , robust = False , verbose = True , parallel = False , num_processes = None , * * kwargs ) : initial_length = len ( self . optimization_runs ) initial_parameters = self . optimizer_array . copy ( ) if parallel : #pragma: no cover try : pool = mp . Pool ( processes = num_processes ) obs = [ self . copy ( ) for i in range ( num_restarts ) ] [ obs [ i ] . randomize ( ) for i in range ( num_restarts - 1 ) ] jobs = pool . map ( opt_wrapper , [ ( o , kwargs ) for o in obs ] ) pool . close ( ) pool . join ( ) except KeyboardInterrupt : print ( "Ctrl+c received, terminating and joining pool." ) pool . terminate ( ) pool . join ( ) for i in range ( num_restarts ) : try : if not parallel : if i > 0 : self . randomize ( ) self . optimize ( * * kwargs ) else : #pragma: no cover self . optimization_runs . append ( jobs [ i ] ) if verbose : print ( ( "Optimization restart {0}/{1}, f = {2}" . format ( i + 1 , num_restarts , self . optimization_runs [ - 1 ] . f_opt ) ) ) except Exception as e : if robust : print ( ( "Warning - optimization restart {0}/{1} failed" . format ( i + 1 , num_restarts ) ) ) else : raise e if len ( self . optimization_runs ) > initial_length : # This works, since failed jobs don't get added to the optimization_runs. i = np . argmin ( [ o . f_opt for o in self . optimization_runs [ initial_length : ] ] ) self . optimizer_array = self . optimization_runs [ initial_length + i ] . x_opt else : self . optimizer_array = initial_parameters return self . optimization_runs
Perform random restarts of the model and set the model to the best seen solution .
456
18
20,969
def _grads ( self , x ) : try : # self._set_params_transformed(x) self . optimizer_array = x self . obj_grads = self . _transform_gradients ( self . objective_function_gradients ( ) ) self . _fail_count = 0 except ( LinAlgError , ZeroDivisionError , ValueError ) : #pragma: no cover if self . _fail_count >= self . _allowed_failures : raise self . _fail_count += 1 self . obj_grads = np . clip ( self . _transform_gradients ( self . objective_function_gradients ( ) ) , - 1e100 , 1e100 ) return self . obj_grads
Gets the gradients from the likelihood and the priors .
161
13
20,970
def _objective ( self , x ) : try : self . optimizer_array = x obj = self . objective_function ( ) self . _fail_count = 0 except ( LinAlgError , ZeroDivisionError , ValueError ) : #pragma: no cover if self . _fail_count >= self . _allowed_failures : raise self . _fail_count += 1 return np . inf return obj
The objective function passed to the optimizer . It combines the likelihood and the priors .
91
18
20,971
def _repr_html_ ( self ) : model_details = [ [ '<b>Model</b>' , self . name + '<br>' ] , [ '<b>Objective</b>' , '{}<br>' . format ( float ( self . objective_function ( ) ) ) ] , [ "<b>Number of Parameters</b>" , '{}<br>' . format ( self . size ) ] , [ "<b>Number of Optimization Parameters</b>" , '{}<br>' . format ( self . _size_transformed ( ) ) ] , [ "<b>Updates</b>" , '{}<br>' . format ( self . _update_on ) ] , ] from operator import itemgetter to_print = [ """<style type="text/css"> .pd{ font-family: "Courier New", Courier, monospace !important; width: 100%; padding: 3px; } </style>\n""" ] + [ "<p class=pd>" ] + [ "{}: {}" . format ( name , detail ) for name , detail in model_details ] + [ "</p>" ] to_print . append ( super ( Model , self ) . _repr_html_ ( ) ) return "\n" . join ( to_print )
Representation of the model in html for notebook display .
293
11
20,972
def add_index_operation ( self , name , operations ) : if name not in self . _index_operations : self . _add_io ( name , operations ) else : raise AttributeError ( "An index operation with the name {} was already taken" . format ( name ) )
Add index operation with name to the operations given .
62
10
20,973
def _offset_for ( self , param ) : if param . has_parent ( ) : p = param . _parent_ . _get_original ( param ) if p in self . parameters : return reduce ( lambda a , b : a + b . size , self . parameters [ : p . _parent_index_ ] , 0 ) return self . _offset_for ( param . _parent_ ) + param . _parent_ . _offset_for ( param ) return 0
Return the offset of the param inside this parameterized object . This does not need to account for shaped parameters as it basically just sums up the parameter sizes which come before param .
102
35
20,974
def _raveled_index_for ( self , param ) : from . . param import ParamConcatenation if isinstance ( param , ParamConcatenation ) : return np . hstack ( ( self . _raveled_index_for ( p ) for p in param . params ) ) return param . _raveled_index ( ) + self . _offset_for ( param )
get the raveled index for a param that is an int array containing the indexes for the flattened param inside this parameterized logic .
84
27
20,975
def copy ( self ) : from . lists_and_dicts import ObserverList memo = { } memo [ id ( self ) ] = self memo [ id ( self . observers ) ] = ObserverList ( ) return self . __deepcopy__ ( memo )
Make a copy . This means we delete all observers and return a copy of this array . It will still be an ObsAr!
54
26
20,976
def update_model ( self , updates = None ) : if updates is None : return self . _update_on assert isinstance ( updates , bool ) , "updates are either on (True) or off (False)" p = getattr ( self , '_highest_parent_' , None ) def turn_updates ( s ) : s . _update_on = updates p . traverse ( turn_updates ) self . trigger_update ( )
Get or set whether automatic updates are performed . When updates are off the model might be in a non - working state . To make the model work turn updates on again .
97
34
20,977
def trigger_update ( self , trigger_parent = True ) : if not self . update_model ( ) or ( hasattr ( self , "_in_init_" ) and self . _in_init_ ) : #print "Warning: updates are off, updating the model will do nothing" return self . _trigger_params_changed ( trigger_parent )
Update the model from the current state . Make sure that updates are on otherwise this method will do nothing
77
20
20,978
def optimizer_array ( self ) : if self . __dict__ . get ( '_optimizer_copy_' , None ) is None or self . size != self . _optimizer_copy_ . size : self . _optimizer_copy_ = np . empty ( self . size ) if not self . _optimizer_copy_transformed : self . _optimizer_copy_ . flat = self . param_array . flat #py3 fix #[np.put(self._optimizer_copy_, ind, c.finv(self.param_array[ind])) for c, ind in self.constraints.iteritems() if c != __fixed__] [ np . put ( self . _optimizer_copy_ , ind , c . finv ( self . param_array [ ind ] ) ) for c , ind in self . constraints . items ( ) if c != __fixed__ ] self . _optimizer_copy_transformed = True if self . _has_fixes ( ) : # or self._has_ties()): self . _ensure_fixes ( ) return self . _optimizer_copy_ [ self . _fixes_ ] return self . _optimizer_copy_
Array for the optimizer to work on . This array always lives in the space for the optimizer . Thus it is untransformed going from Transformations .
265
32
20,979
def _trigger_params_changed ( self , trigger_parent = True ) : [ p . _trigger_params_changed ( trigger_parent = False ) for p in self . parameters if not p . is_fixed ] self . notify_observers ( None , None if trigger_parent else - np . inf )
First tell all children to update then update yourself .
68
10
20,980
def _transform_gradients ( self , g ) : #py3 fix #[np.put(g, i, c.gradfactor(self.param_array[i], g[i])) for c, i in self.constraints.iteritems() if c != __fixed__] [ np . put ( g , i , c . gradfactor ( self . param_array [ i ] , g [ i ] ) ) for c , i in self . constraints . items ( ) if c != __fixed__ ] if self . _has_fixes ( ) : return g [ self . _fixes_ ] return g
Transform the gradients by multiplying the gradient factor for each constraint to it .
134
15
20,981
def parameter_names ( self , add_self = False , adjust_for_printing = False , recursive = True , intermediate = False ) : if adjust_for_printing : adjust = adjust_name_for_printing else : adjust = lambda x : x names = [ ] if intermediate or ( not recursive ) : names . extend ( [ adjust ( x . name ) for x in self . parameters ] ) if intermediate or recursive : names . extend ( [ xi for x in self . parameters for xi in x . parameter_names ( add_self = True , adjust_for_printing = adjust_for_printing , recursive = True , intermediate = False ) ] ) if add_self : names = map ( lambda x : adjust ( self . name ) + "." + x , names ) return names
Get the names of all parameters of this model or parameter . It starts from the parameterized object you are calling this method on .
174
26
20,982
def parameter_names_flat ( self , include_fixed = False ) : name_list = [ ] for p in self . flattened_parameters : name = p . hierarchy_name ( ) if p . size > 1 : name_list . extend ( [ "{}[{!s}]" . format ( name , i ) for i in p . _indices ( ) ] ) else : name_list . append ( name ) name_list = np . array ( name_list ) if not include_fixed and self . _has_fixes ( ) : return name_list [ self . _fixes_ ] return name_list
Return the flattened parameter names for all subsequent parameters of this parameter . We do not include the name for self here!
134
23
20,983
def _propagate_param_grad ( self , parray , garray ) : #if self.param_array.size != self.size: # self._param_array_ = np.empty(self.size, dtype=np.float64) #if self.gradient.size != self.size: # self._gradient_array_ = np.empty(self.size, dtype=np.float64) pi_old_size = 0 for pi in self . parameters : pislice = slice ( pi_old_size , pi_old_size + pi . size ) self . param_array [ pislice ] = pi . param_array . flat # , requirements=['C', 'W']).flat self . gradient_full [ pislice ] = pi . gradient_full . flat # , requirements=['C', 'W']).flat pi . param_array . data = parray [ pislice ] . data pi . gradient_full . data = garray [ pislice ] . data pi . _propagate_param_grad ( parray [ pislice ] , garray [ pislice ] ) pi_old_size += pi . size self . _model_initialized_ = True
For propagating the param_array and gradient_array . This ensures the in memory view of each subsequent array .
266
23
20,984
def initialize_parameter ( self ) : #logger.debug("connecting parameters") self . _highest_parent_ . _notify_parent_change ( ) self . _highest_parent_ . _connect_parameters ( ) #logger.debug("calling parameters changed") self . _highest_parent_ . _connect_fixes ( ) self . trigger_update ( )
Call this function to initialize the model if you built it without initialization .
82
14
20,985
def traverse_parents ( self , visit , * args , * * kwargs ) : if self . has_parent ( ) : self . __visited = True self . _parent_ . traverse_parents ( visit , * args , * * kwargs ) self . _parent_ . traverse ( visit , * args , * * kwargs ) self . __visited = False
Traverse the hierarchy upwards visiting all parents and their children except self . See visitor pattern in literature . This is implemented in pre - order fashion .
82
29
20,986
def phi ( self , Xpred , degrees = None ) : assert Xpred . shape [ 1 ] == self . X . shape [ 1 ] , "Need to predict with same shape as training data." if degrees is None : degrees = range ( self . basis . degree + 1 ) tmp_phi = np . empty ( ( len ( degrees ) , Xpred . shape [ 0 ] , Xpred . shape [ 1 ] ) ) for i , w in enumerate ( degrees ) : # Objective function tmpX = self . _phi ( Xpred , w ) tmp_phi [ i ] = tmpX * self . weights [ [ w ] , : ] return tmp_phi
Compute the design matrix for this model using the degrees given by the index array in degrees
142
18
20,987
def consolidate_dependencies ( needs_ipython , child_program , requirement_files , manual_dependencies ) : # We get the logger here because it's not defined at module level logger = logging . getLogger ( 'fades' ) if needs_ipython : logger . debug ( "Adding ipython dependency because --ipython was detected" ) ipython_dep = parsing . parse_manual ( [ 'ipython' ] ) else : ipython_dep = { } if child_program : srcfile_deps = parsing . parse_srcfile ( child_program ) logger . debug ( "Dependencies from source file: %s" , srcfile_deps ) docstring_deps = parsing . parse_docstring ( child_program ) logger . debug ( "Dependencies from docstrings: %s" , docstring_deps ) else : srcfile_deps = { } docstring_deps = { } all_dependencies = [ ipython_dep , srcfile_deps , docstring_deps ] if requirement_files is not None : for rf_path in requirement_files : rf_deps = parsing . parse_reqfile ( rf_path ) logger . debug ( 'Dependencies from requirements file %r: %s' , rf_path , rf_deps ) all_dependencies . append ( rf_deps ) manual_deps = parsing . parse_manual ( manual_dependencies ) logger . debug ( "Dependencies from parameters: %s" , manual_deps ) all_dependencies . append ( manual_deps ) # Merge dependencies indicated_deps = { } for dep in all_dependencies : for repo , info in dep . items ( ) : indicated_deps . setdefault ( repo , set ( ) ) . update ( info ) return indicated_deps
Parse files get deps and merge them . Deps read later overwrite those read earlier .
407
19
20,988
def detect_inside_virtualenv ( prefix , real_prefix , base_prefix ) : if real_prefix is not None : return True if base_prefix is None : return False # if prefix is different than base_prefix, it's a venv return prefix != base_prefix
Tell if fades is running inside a virtualenv .
59
10
20,989
def _get_normalized_args ( parser ) : env = os . environ if '_' in env and env [ '_' ] != sys . argv [ 0 ] and len ( sys . argv ) >= 1 and " " in sys . argv [ 1 ] : return parser . parse_args ( shlex . split ( sys . argv [ 1 ] ) + sys . argv [ 2 : ] ) else : return parser . parse_args ( )
Return the parsed command line arguments .
101
7
20,990
def parse_fade_requirement ( text ) : text = text . strip ( ) if "::" in text : repo_raw , requirement = text . split ( "::" , 1 ) try : repo = { 'pypi' : REPO_PYPI , 'vcs' : REPO_VCS } [ repo_raw ] except KeyError : logger . warning ( "Not understood fades repository: %r" , repo_raw ) return else : if ":" in text and "/" in text : repo = REPO_VCS else : repo = REPO_PYPI requirement = text if repo == REPO_VCS : dependency = VCSDependency ( requirement ) else : dependency = list ( parse_requirements ( requirement ) ) [ 0 ] return repo , dependency
Return a requirement and repo from the given text already parsed and converted .
172
14
20,991
def _parse_content ( fh ) : content = iter ( fh ) deps = { } for line in content : # quickly discard most of the lines if 'fades' not in line : continue # discard other string with 'fades' that isn't a comment if '#' not in line : continue # assure that it's a well commented line and no other stuff line = line . strip ( ) index_of_last_fades = line . rfind ( 'fades' ) index_of_first_hash = line . index ( '#' ) # discard when fades does not appear after # if index_of_first_hash > index_of_last_fades : continue import_part , fades_part = line . rsplit ( "#" , 1 ) # discard other comments in the same line that aren't for fades if "fades" not in fades_part : import_part , fades_part = import_part . rsplit ( "#" , 1 ) fades_part = fades_part . strip ( ) if not fades_part . startswith ( "fades" ) : continue if not import_part : # the fades comment was done at the beginning of the line, # which means that the import info is in the next one import_part = next ( content ) . strip ( ) if import_part . startswith ( '#' ) : continue # get module import_tokens = import_part . split ( ) if import_tokens [ 0 ] == 'import' : module_path = import_tokens [ 1 ] elif import_tokens [ 0 ] == 'from' and import_tokens [ 2 ] == 'import' : module_path = import_tokens [ 1 ] else : logger . debug ( "Not understood import info: %s" , import_tokens ) continue module = module_path . split ( "." ) [ 0 ] # If fades know the real name of the pkg. Replace it! if module in PKG_NAMES_DB : module = PKG_NAMES_DB [ module ] # To match the "safe" name that pkg_resources creates: module = module . replace ( '_' , '-' ) # get the fades info after 'fades' mark, if any if len ( fades_part ) == 5 or fades_part [ 5 : ] . strip ( ) [ 0 ] in "<>=!" : # just the 'fades' mark, and maybe a version specification, the requirement is what # was imported (maybe with that version comparison) requirement = module + fades_part [ 5 : ] elif fades_part [ 5 ] != " " : # starts with fades but it's part of a longer weird word logger . warning ( "Not understood fades info: %r" , fades_part ) continue else : # more complex stuff, to be parsed as a normal requirement requirement = fades_part [ 5 : ] # parse and convert the requirement parsed_req = parse_fade_requirement ( requirement ) if parsed_req is None : continue repo , dependency = parsed_req deps . setdefault ( repo , [ ] ) . append ( dependency ) return deps
Parse the content of a script to find marked dependencies .
688
12
20,992
def _parse_docstring ( fh ) : find_fades = re . compile ( r'\b(fades)\b:' ) . search for line in fh : if line . startswith ( "'" ) : quote = "'" break if line . startswith ( '"' ) : quote = '"' break else : return { } if line [ 1 ] == quote : # comment start with triple quotes endquote = quote * 3 else : endquote = quote if endquote in line [ len ( endquote ) : ] : docstring_lines = [ line [ : line . index ( endquote ) ] ] else : docstring_lines = [ line ] for line in fh : if endquote in line : docstring_lines . append ( line [ : line . index ( endquote ) ] ) break docstring_lines . append ( line ) docstring_lines = iter ( docstring_lines ) for doc_line in docstring_lines : if find_fades ( doc_line ) : break else : return { } return _parse_requirement ( list ( docstring_lines ) )
Parse the docstrings of a script to find marked dependencies .
239
13
20,993
def _parse_requirement ( iterable ) : deps = { } for line in iterable : line = line . strip ( ) if not line or line [ 0 ] == '#' : continue parsed_req = parse_fade_requirement ( line ) if parsed_req is None : continue repo , dependency = parsed_req deps . setdefault ( repo , [ ] ) . append ( dependency ) return deps
Actually parse the requirements from file or manually specified .
91
10
20,994
def _read_lines ( filepath ) : with open ( filepath , 'rt' , encoding = 'utf8' ) as fh : for line in fh : line = line . strip ( ) if line . startswith ( "-r" ) : logger . debug ( "Reading deps from nested requirement file: %s" , line ) try : nested_filename = line . split ( ) [ 1 ] except IndexError : logger . warning ( "Invalid format to indicate a nested requirements file: '%r'" , line ) else : nested_filepath = os . path . join ( os . path . dirname ( filepath ) , nested_filename ) yield from _read_lines ( nested_filepath ) else : yield line
Read a req file to a list to support nested requirement files .
158
13
20,995
def create_venv ( requested_deps , interpreter , is_current , options , pip_options ) : # create virtualenv env = _FadesEnvBuilder ( ) env_path , env_bin_path , pip_installed = env . create_env ( interpreter , is_current , options ) venv_data = { } venv_data [ 'env_path' ] = env_path venv_data [ 'env_bin_path' ] = env_bin_path venv_data [ 'pip_installed' ] = pip_installed # install deps installed = { } for repo in requested_deps . keys ( ) : if repo in ( REPO_PYPI , REPO_VCS ) : mgr = PipManager ( env_bin_path , pip_installed = pip_installed , options = pip_options ) else : logger . warning ( "Install from %r not implemented" , repo ) continue installed [ repo ] = { } repo_requested = requested_deps [ repo ] logger . debug ( "Installing dependencies for repo %r: requested=%s" , repo , repo_requested ) for dependency in repo_requested : try : mgr . install ( dependency ) except Exception : logger . debug ( "Installation Step failed, removing virtualenv" ) destroy_venv ( env_path ) raise FadesError ( 'Dependency installation failed' ) if repo == REPO_VCS : # no need to request the installed version, as we'll always compare # to the url itself project = dependency . url version = None else : # always store the installed dependency, as in the future we'll select the venv # based on what is installed, not what used requested (remember that user may # request >, >=, etc!) project = dependency . project_name version = mgr . get_version ( project ) installed [ repo ] [ project ] = version logger . debug ( "Installed dependencies: %s" , installed ) return venv_data , installed
Create a new virtualvenv with the requirements of this script .
434
13
20,996
def destroy_venv ( env_path , venvscache = None ) : # remove the venv itself in disk logger . debug ( "Destroying virtualenv at: %s" , env_path ) shutil . rmtree ( env_path , ignore_errors = True ) # remove venv from cache if venvscache is not None : venvscache . remove ( env_path )
Destroy a venv .
88
5
20,997
def create_with_virtualenv ( self , interpreter , virtualenv_options ) : args = [ 'virtualenv' , '--python' , interpreter , self . env_path ] args . extend ( virtualenv_options ) if not self . pip_installed : args . insert ( 3 , '--no-pip' ) try : helpers . logged_exec ( args ) self . env_bin_path = os . path . join ( self . env_path , 'bin' ) except FileNotFoundError as error : logger . error ( 'Virtualenv is not installed. It is needed to create a virtualenv with ' 'a different python version than fades (got {})' . format ( error ) ) raise FadesError ( 'virtualenv not found' ) except helpers . ExecutionError as error : error . dump_to_log ( logger ) raise FadesError ( 'virtualenv could not be run' ) except Exception as error : logger . exception ( "Error creating virtualenv: %s" , error ) raise FadesError ( 'General error while running virtualenv' )
Create a virtualenv using the virtualenv lib .
229
10
20,998
def create_env ( self , interpreter , is_current , options ) : if is_current : # apply pyvenv options pyvenv_options = options [ 'pyvenv_options' ] if "--system-site-packages" in pyvenv_options : self . system_site_packages = True logger . debug ( "Creating virtualenv with pyvenv. options=%s" , pyvenv_options ) self . create ( self . env_path ) else : virtualenv_options = options [ 'virtualenv_options' ] logger . debug ( "Creating virtualenv with virtualenv" ) self . create_with_virtualenv ( interpreter , virtualenv_options ) logger . debug ( "env_bin_path: %s" , self . env_bin_path ) # Re check if pip was installed (supporting both binary and .exe for Windows) pip_bin = os . path . join ( self . env_bin_path , "pip" ) pip_exe = os . path . join ( self . env_bin_path , "pip.exe" ) if not ( os . path . exists ( pip_bin ) or os . path . exists ( pip_exe ) ) : logger . debug ( "pip isn't installed in the venv, setting pip_installed=False" ) self . pip_installed = False return self . env_path , self . env_bin_path , self . pip_installed
Create the virtualenv and return its info .
313
9
20,999
def store_usage_stat ( self , venv_data , cache ) : with open ( self . stat_file_path , 'at' ) as f : self . _write_venv_usage ( f , venv_data )
Log an usage record for venv_data .
52
10