idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
242,300
def write_other_members ( self , f , catch_all = False ) : if catch_all : names = self . _members . items ( ) else : names = inspect . getmembers ( self . _module ) leftovers = [ ] for name , _ in names : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : print ( ...
Writes the leftover members to f .
242,301
def assert_no_leftovers ( self ) : leftovers = [ ] for name in self . _members . keys ( ) : if name in self . _members and name not in self . _documented : leftovers . append ( name ) if leftovers : raise RuntimeError ( "%s: undocumented members: %s" % ( self . _title , ", " . join ( leftovers ) ) )
Generate an error if there are leftover members .
242,302
def start_http_server ( self , port , host = '0.0.0.0' , endpoint = None ) : if self . should_start_http_server ( ) : pc_start_http_server ( port , host , registry = self . registry )
Start an HTTP server for exposing the metrics if the should_start_http_server function says we should otherwise just return . Uses the implementation from prometheus_client rather than a Flask app .
242,303
def init_app ( self , app ) : if self . path : self . register_endpoint ( self . path , app ) if self . _export_defaults : self . export_defaults ( self . buckets , self . group_by , self . _defaults_prefix , app )
This callback can be used to initialize an application for the use with this prometheus reporter setup .
242,304
def register_endpoint ( self , path , app = None ) : if is_running_from_reloader ( ) and not os . environ . get ( 'DEBUG_METRICS' ) : return if app is None : app = self . app or current_app @ app . route ( path ) @ self . do_not_track ( ) def prometheus_metrics ( ) : from prometheus_client import multiprocess , Collect...
Register the metrics endpoint on the Flask application .
242,305
def start_http_server ( self , port , host = '0.0.0.0' , endpoint = '/metrics' ) : if is_running_from_reloader ( ) : return app = Flask ( 'prometheus-flask-exporter-%d' % port ) self . register_endpoint ( endpoint , app ) def run_app ( ) : app . run ( host = host , port = port ) thread = threading . Thread ( target = r...
Start an HTTP server for exposing the metrics . This will be an individual Flask application not the one registered with this class .
242,306
def histogram ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Histogram , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry )
Use a Histogram to track the execution time and invocation count of the method .
242,307
def summary ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Summary , lambda metric , time : metric . observe ( time ) , kwargs , name , description , labels , registry = self . registry )
Use a Summary to track the execution time and invocation count of the method .
242,308
def gauge ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Gauge , lambda metric , time : metric . dec ( ) , kwargs , name , description , labels , registry = self . registry , before = lambda metric : metric . inc ( ) )
Use a Gauge to track the number of invocations in progress for the method .
242,309
def counter ( self , name , description , labels = None , ** kwargs ) : return self . _track ( Counter , lambda metric , time : metric . inc ( ) , kwargs , name , description , labels , registry = self . registry )
Use a Counter to track the total number of invocations of the method .
242,310
def _track ( metric_type , metric_call , metric_kwargs , name , description , labels , registry , before = None ) : if labels is not None and not isinstance ( labels , dict ) : raise TypeError ( 'labels needs to be a dictionary of {labelname: callable}' ) label_names = labels . keys ( ) if labels else tuple ( ) parent_...
Internal method decorator logic .
242,311
def do_not_track ( ) : def decorator ( f ) : @ functools . wraps ( f ) def func ( * args , ** kwargs ) : request . prom_do_not_track = True return f ( * args , ** kwargs ) return func return decorator
Decorator to skip the default metrics collection for the method .
242,312
def info ( self , name , description , labelnames = None , labelvalues = None , ** labels ) : if labels and labelnames : raise ValueError ( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels : labelnames = labels . keys ( ) elif labelnames and labelvalues : ...
Report any information as a Prometheus metric . This will create a Gauge with the initial value of 1 .
242,313
def inve ( env , command , * args , ** kwargs ) : with temp_environ ( ) : os . environ [ 'VIRTUAL_ENV' ] = str ( workon_home / env ) os . environ [ 'PATH' ] = compute_path ( env ) unsetenv ( 'PYTHONHOME' ) unsetenv ( '__PYVENV_LAUNCHER__' ) try : return check_call ( [ command ] + list ( args ) , shell = windows , ** kw...
Run a command in the given virtual environment .
242,314
def ls_cmd ( argv ) : parser = argparse . ArgumentParser ( ) p_group = parser . add_mutually_exclusive_group ( ) p_group . add_argument ( '-b' , '--brief' , action = 'store_false' ) p_group . add_argument ( '-l' , '--long' , action = 'store_true' ) args = parser . parse_args ( argv ) lsvirtualenv ( args . long )
List available environments .
242,315
def workon_cmd ( argv ) : parser = argparse . ArgumentParser ( prog = 'pew workon' ) parser . add_argument ( 'envname' , nargs = '?' ) parser . add_argument ( '-n' , '--no-cd' , action = 'store_true' , help = ( 'Do not change working directory to project directory after ' 'activating virtualenv.' ) ) args = parser . pa...
List or change working virtual environments .
242,316
def add_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '-d' , dest = 'remove' , action = 'store_true' ) parser . add_argument ( 'dirs' , nargs = '+' ) args = parser . parse_args ( argv ) extra_paths = sitepackages_dir ( ) / '_virtualenv_path_extensions.pth' new_paths = [ os . path . abspa...
Add the specified directories to the Python path for the currently active virtualenv .
242,317
def lssitepackages_cmd ( argv ) : site = sitepackages_dir ( ) print ( * sorted ( site . iterdir ( ) ) , sep = os . linesep ) extra_paths = site / '_virtualenv_path_extensions.pth' if extra_paths . exists ( ) : print ( 'from _virtualenv_path_extensions.pth:' ) with extra_paths . open ( ) as extra : print ( '' . join ( e...
Show the content of the site - packages directory of the current virtualenv .
242,318
def toggleglobalsitepackages_cmd ( argv ) : quiet = argv == [ '-q' ] site = sitepackages_dir ( ) ngsp_file = site . parent / 'no-global-site-packages.txt' if ngsp_file . exists ( ) : ngsp_file . unlink ( ) if not quiet : print ( 'Enabled global site-packages' ) else : with ngsp_file . open ( 'w' ) : if not quiet : prin...
Toggle the current virtualenv between having and not having access to the global site - packages .
242,319
def cp_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' , nargs = '?' ) parser . add_argument ( '-d' , '--dont-activate' , action = 'store_false' , default = True , dest = 'activate' , help = "After \ creation, continue with...
Duplicate the named virtualenv to make a new one .
242,320
def rename_cmd ( argv ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( 'source' ) parser . add_argument ( 'target' ) pargs = parser . parse_args ( argv ) copy_virtualenv_project ( pargs . source , pargs . target ) return rmvirtualenvs ( [ pargs . source ] )
Rename a virtualenv
242,321
def setproject_cmd ( argv ) : args = dict ( enumerate ( argv ) ) project = os . path . abspath ( args . get ( 1 , '.' ) ) env = args . get ( 0 , os . environ . get ( 'VIRTUAL_ENV' ) ) if not env : sys . exit ( 'pew setproject [virtualenv] [project_path]' ) if not ( workon_home / env ) . exists ( ) : sys . exit ( "Envir...
Given a virtualenv directory and a project directory set the \ virtualenv up to be associated with the project .
242,322
def getproject_cmd ( argv ) : parser = argparse . ArgumentParser ( description = "Print an environment's project directory." , ) parser . add_argument ( 'envname' , nargs = '?' , default = os . environ . get ( 'VIRTUAL_ENV' ) , help = ( 'The name of the environment to return the project directory ' 'for. If omitted, w...
Print a virtualenv s project directory if set .
242,323
def mkproject_cmd ( argv ) : if '-l' in argv or '--list' in argv : templates = [ t . name [ 9 : ] for t in workon_home . glob ( "template_*" ) ] print ( "Available project templates:" , * templates , sep = '\n' ) return parser = mkvirtualenv_argparser ( ) parser . add_argument ( 'envname' ) parser . add_argument ( '-t'...
Create a new project directory and its associated virtualenv .
242,324
def mktmpenv_cmd ( argv ) : parser = mkvirtualenv_argparser ( ) env = '.' while ( workon_home / env ) . exists ( ) : env = hex ( random . getrandbits ( 64 ) ) [ 2 : - 1 ] args , rest = parser . parse_known_args ( argv ) mkvirtualenv ( env , args . python , args . packages , requirements = args . requirements , rest = r...
Create a temporary virtualenv .
242,325
def inall_cmd ( argv ) : envs = lsenvs ( ) errors = False for env in envs : print ( "\n%s:" % env ) try : inve ( env , * argv ) except CalledProcessError as e : errors = True err ( e ) sys . exit ( errors )
Run a command in each virtualenv .
242,326
def in_cmd ( argv ) : if len ( argv ) == 1 : return workon_cmd ( argv ) parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) return inve ( * argv )
Run a command in the given virtualenv .
242,327
def restore_cmd ( argv ) : if len ( argv ) < 1 : sys . exit ( 'You must provide a valid virtualenv to target' ) env = argv [ 0 ] path = workon_home / env py = path / env_bin_dir / ( 'python.exe' if windows else 'python' ) exact_py = py . resolve ( ) . name return check_call ( [ sys . executable , "-m" , "virtualenv" , ...
Try to restore a broken virtualenv by reinstalling the same python version on top of it
242,328
def dir_cmd ( argv ) : env = parse_envname ( argv , lambda : sys . exit ( 'You must provide a valid virtualenv to target' ) ) print ( workon_home / env )
Print the path for the virtualenv directory
242,329
def install_cmd ( argv ) : installer = InstallCommand ( ) options , versions = installer . parser . parse_args ( argv ) if len ( versions ) != 1 : installer . parser . print_help ( ) sys . exit ( 1 ) else : try : actual_installer = PythonInstaller . get_installer ( versions [ 0 ] , options ) return actual_installer . i...
Use Pythonz to download and build the specified Python version
242,330
def version_cmd ( argv ) : import pkg_resources try : __version__ = pkg_resources . get_distribution ( 'pew' ) . version except pkg_resources . DistributionNotFound : __version__ = 'unknown' print ( 'Setuptools has some issues here, failed to get our own package.' , file = sys . stderr ) print ( __version__ )
Prints current pew version
242,331
def csstext_to_pairs ( csstext ) : with csstext_to_pairs . _lock : return sorted ( [ ( prop . name . strip ( ) , format_value ( prop ) ) for prop in cssutils . parseStyle ( csstext ) ] , key = itemgetter ( 0 ) , )
csstext_to_pairs takes css text and make it to list of tuple of key value .
242,332
def merge_styles ( inline_style , new_styles , classes , remove_unset_properties = False ) : styles = OrderedDict ( [ ( "" , OrderedDict ( ) ) ] ) for pc in set ( classes ) : styles [ pc ] = OrderedDict ( ) for i , style in enumerate ( new_styles ) : for k , v in style : styles [ classes [ i ] ] [ k ] = v if inline_sty...
This will merge all new styles where the order is important The last one will override the first When that is done it will apply old inline style again The old inline style is always important and override all new ones . The inline style must be valid .
242,333
def make_important ( bulk ) : return ";" . join ( "%s !important" % p if not p . endswith ( "!important" ) else p for p in bulk . split ( ";" ) )
makes every property in a string !important .
242,334
def capitalize_float_margin ( css_body ) : def _capitalize_property ( match ) : return "{0}:{1}{2}" . format ( match . group ( "property" ) . capitalize ( ) , match . group ( "value" ) , match . group ( "terminator" ) , ) return _lowercase_margin_float_rule . sub ( _capitalize_property , css_body )
Capitalize float and margin CSS property names
242,335
def _load_external ( self , url ) : if url . startswith ( "//" ) : if self . base_url and "https://" in self . base_url : url = "https:" + url else : url = "http:" + url if url . startswith ( "http://" ) or url . startswith ( "https://" ) : css_body = self . _load_external_url ( url ) else : stylefile = url if not os ....
loads an external stylesheet from a remote url or local path
242,336
def _css_rules_to_string ( self , rules ) : lines = [ ] for item in rules : if isinstance ( item , tuple ) : k , v = item lines . append ( "%s {%s}" % ( k , make_important ( v ) ) ) else : for rule in item . cssRules : if isinstance ( rule , ( cssutils . css . csscomment . CSSComment , cssutils . css . cssunknownrule ....
given a list of css rules returns a css string
242,337
def check_workers ( self ) : if time . time ( ) - self . _worker_alive_time > 5 : self . _worker_alive_time = time . time ( ) [ worker . join ( ) for worker in self . _workers if not worker . is_alive ( ) ] self . _workers = [ worker for worker in self . _workers if worker . is_alive ( ) ] if len ( self . _workers ) < ...
Kill workers that have been pending for a while and check if all workers are alive .
242,338
def kill_all ( self ) : while self . _num_workers > 0 and self . _worker_backend_socket . poll ( 1000 ) : msg = self . _worker_backend_socket . recv_pyobj ( ) self . _worker_backend_socket . send_pyobj ( None ) self . _num_workers -= 1 self . report ( f'Kill {msg[1:]}' ) [ worker . join ( ) for worker in self . _worker...
Kill all workers
242,339
def _install ( self , name , autoinstall ) : import importlib import pkg_resources spam_spec = importlib . util . find_spec ( name ) reinstall = False if spam_spec is not None : if self . _version : mod = importlib . __import__ ( name ) if hasattr ( mod , '__version__' ) : ver = mod . __version__ else : try : ver = pkg...
Check existence of Python module and install it using command pip install if necessary .
242,340
def execute_task ( task_id , verbosity = None , runmode = 'run' , sigmode = None , monitor_interval = 5 , resource_monitor_interval = 60 ) : tf = TaskFile ( task_id ) tf . status = 'running' try : signal . signal ( signal . SIGTERM , signal_handler ) res = _execute_task ( task_id , verbosity , runmode , sigmode , monit...
Execute single or master task return a dictionary
242,341
def textMD5 ( text ) : m = hash_md5 ( ) if isinstance ( text , str ) : m . update ( text . encode ( ) ) else : m . update ( text ) return m . hexdigest ( )
Get md5 of a piece of text
242,342
def objectMD5 ( obj ) : if hasattr ( obj , 'target_name' ) : return obj . target_name ( ) try : return textMD5 ( pickle . dumps ( obj ) ) except : return ''
Get md5 of an object
242,343
def fileMD5 ( filename , partial = True ) : filesize = os . path . getsize ( filename ) md5 = hash_md5 ( ) block_size = 2 ** 20 try : if ( not partial ) or filesize < 2 ** 24 : with open ( filename , 'rb' ) as f : while True : data = f . read ( block_size ) if not data : break md5 . update ( data ) else : count = 16 wi...
Calculate partial MD5 basically the first and last 8M of the file for large files . This should signicicantly reduce the time spent on the creation and comparison of file signature when dealing with large bioinformat ics datasets .
242,344
def target_signature ( self ) : if self . exists ( ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) return ( os . path . getmtime ( self ) , os . path . getsize ( self ) , self . _md5 ) elif ( self + '.zapped' ) . is_file ( ) : with open ( self + '.zapped' ) as sig : line = sig . readline ( ) _ , mtime , size , ...
Return file signature
242,345
def validate ( self , sig = None ) : if sig is not None : sig_mtime , sig_size , sig_md5 = sig else : try : with open ( self . sig_file ( ) ) as sig : sig_mtime , sig_size , sig_md5 = sig . read ( ) . strip ( ) . split ( ) except : return False if not self . exists ( ) : if ( self + '.zapped' ) . is_file ( ) : with ope...
Check if file matches its signature
242,346
def write_sig ( self ) : if not self . _md5 : self . _md5 = fileMD5 ( self ) with open ( self . sig_file ( ) , 'w' ) as sig : sig . write ( f'{os.path.getmtime(self)}\t{os.path.getsize(self)}\t{self._md5}' )
Write signature to sig store
242,347
def remove_targets ( self , type , kept = None ) : if kept is None : kept = [ i for i , x in enumerate ( self . _targets ) if not isinstance ( x , type ) ] if len ( kept ) == len ( self . _targets ) : return self self . _targets = [ self . _targets [ x ] for x in kept ] self . _labels = [ self . _labels [ x ] for x in ...
Remove targets of certain type
242,348
def resolve_remote ( self ) : for idx , target in enumerate ( self . _targets ) : if isinstance ( target , remote ) : resolved = target . resolve ( ) if isinstance ( resolved , str ) : resolved = interpolate ( resolved , env . sos_dict . dict ( ) ) self . _targets [ idx ] = file_target ( resolved ) . set ( ** target . ...
If target is of remote type resolve it
242,349
def _handle_paired_with ( self , paired_with ) : if paired_with is None or not paired_with : var_name = [ ] var_value = [ ] elif isinstance ( paired_with , str ) : var_name = [ '_' + paired_with ] if paired_with not in env . sos_dict : raise ValueError ( f'Variable {paired_with} does not exist.' ) var_value = [ env . s...
Handle input option paired_with
242,350
def _handle_group_with ( self , group_with ) : if group_with is None or not group_with : var_name = [ ] var_value = [ ] elif isinstance ( group_with , str ) : var_name = [ '_' + group_with ] if group_with not in env . sos_dict : raise ValueError ( f'Variable {group_with} does not exist.' ) var_value = [ env . sos_dict ...
Handle input option group_with
242,351
def _handle_extract_pattern ( self , pattern ) : if pattern is None or not pattern : patterns = [ ] elif isinstance ( pattern , str ) : patterns = [ pattern ] elif isinstance ( pattern , Iterable ) : patterns = pattern else : raise ValueError ( f'Unacceptable value for parameter pattern: {pattern}' ) for pattern in pat...
Handle input option pattern
242,352
def write ( self ) : if not self . output_files . valid ( ) : raise ValueError ( f'Cannot write signature with undetermined output {self.output_files}' ) else : if 'TARGET' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'TARGET' , f'write signature {self.sig_id} with outp...
Write signature file with signature of script input output and dependent files . Because local input and output files can only be determined after the execution of workflow . They are not part of the construction .
242,353
def clear_output ( output = None ) : for target in env . sos_dict [ '_output' ] if output is None else output : if isinstance ( target , file_target ) and target . exists ( ) : try : target . unlink ( ) except Exception as e : env . logger . warning ( f'Failed to remove {target}: {e}' )
Remove file targets in _output when a step fails to complete
242,354
def add_forward_workflow ( self , dag , sections , satisfies = None ) : dag . new_forward_workflow ( ) if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , f'Adding mini-workflow with {len(sections)} sections' ) default_input : sos_targets = sos_targets ( [ ] )...
Add a forward - workflow return number of nodes added
242,355
def initialize_dag ( self , targets : Optional [ List [ str ] ] = [ ] , nested : bool = False ) -> SoS_DAG : self . reset_dict ( ) dag = SoS_DAG ( name = self . md5 ) targets = sos_targets ( targets ) self . add_forward_workflow ( dag , self . workflow . sections ) if self . resolve_dangling_targets ( dag , targets ) =...
Create a DAG by analyzing sections statically .
242,356
def short_repr ( obj , noneAsNA = False ) : if obj is None : return 'unspecified' if noneAsNA else 'None' elif isinstance ( obj , str ) and len ( obj ) > 80 : return '{}...{}' . format ( obj [ : 60 ] . replace ( '\n' , '\\n' ) , obj [ - 20 : ] . replace ( '\n' , '\\n' ) ) elif isinstance ( obj , ( str , int , float , b...
Return a short representation of obj for clarity .
242,357
def tail_of_file ( filename , n , ansi2html = False ) : avg_line_length = 74 to_read = n with open ( filename ) as f : while 1 : try : f . seek ( - ( avg_line_length * to_read ) , 2 ) except IOError : f . seek ( 0 ) pos = f . tell ( ) lines = f . read ( ) . splitlines ( ) if len ( lines ) >= to_read or pos == 0 : if an...
Reads a n lines from f with an offset of offset lines .
242,358
def sample_lines ( lines , n ) : if len ( lines ) <= n : return '' . join ( lines ) else : m = len ( lines ) return '' . join ( [ lines [ x * m // n + m // ( 2 * n ) ] for x in range ( n ) ] )
Draw a sample of n lines from filename largely evenly .
242,359
def set ( self , key , value ) : if hasattr ( value , 'labels' ) : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'VARIABLE' , f"Set {key} to {short_repr(value)} with labels {short_repr(value.labels)}" ) else : if 'VARIABLE' in env . config [ 'SOS_DEBUG' ] o...
A short cut to set value to key without triggering any logging or warning message .
242,360
def update ( self , obj ) : self . _dict . update ( obj ) for k , v in obj . items ( ) : if env . verbosity > 2 : self . _log ( k , v )
Redefine update to trigger logging message
242,361
def execute_substep ( stmt , global_def , global_vars , task = '' , task_params = '' , proc_vars = { } , shared_vars = [ ] , config = { } ) : assert not env . zmq_context . closed assert 'workflow_id' in proc_vars assert 'step_id' in proc_vars assert '_input' in proc_vars assert '_output' in proc_vars assert '_depends'...
Execute a substep with specific input etc
242,362
def files ( self ) : try : cur = self . conn . cursor ( ) cur . execute ( 'SELECT id, item FROM workflows WHERE entry_type = "tracked_files"' ) return [ ( x [ 0 ] , eval ( x [ 1 ] ) ) for x in cur . fetchall ( ) ] except sqlite3 . DatabaseError as e : env . logger . warning ( f'Failed to get files from signature databa...
Listing files related to workflows related to current directory
242,363
def find_executable ( self ) : if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , 'find_executable' ) for node in self . nodes ( ) : if node . _status is None : with_dependency = False for edge in self . in_edges ( node ) : if edge [ 0 ] . _status != 'complet...
Find an executable node which means nodes that has not been completed and has no input dependency .
242,364
def dangling ( self , targets : sos_targets ) : existing = [ ] missing = [ ] if env . config [ 'trace_existing' ] : for x in self . _all_depends_files . keys ( ) : if x not in self . _all_output_files : if x . target_exists ( ) : existing . append ( x ) else : missing . append ( x ) else : missing = [ x for x in self ....
returns 1 . missing targets which are missing from the DAG or from the provided targets 2 . existing targets of provided target list not in DAG
242,365
def subgraph_from ( self , targets : sos_targets ) : if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , 'create subgraph' ) subnodes = [ ] for node in self . nodes ( ) : if node . _output_targets . valid ( ) and any ( x in node . _output_targets for x in targ...
Trim DAG to keep only nodes that produce targets
242,366
def build ( self ) : if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] : env . log_to_file ( 'DAG' , 'build DAG' ) for wf in range ( self . _forward_workflow_id + 1 ) : indexed = [ x for x in self . nodes ( ) if x . _wf_index == wf ] indexed . sort ( key = lambda x : x . _node_index ) fo...
Connect nodes according to status of targets
242,367
def monitor_tasks ( self , tasks = None , status = None , age = None ) : self . engine_ready . wait ( ) if not tasks : tasks = self . task_status . keys ( ) else : tasks = [ x for x in tasks if x in self . task_status ] with threading . Lock ( ) : for task in tasks : if self . task_status [ task ] in ( 'submitted' , 'r...
Start monitoring specified or all tasks
242,368
def _submit_task_with_template ( self , task_ids ) : runtime = self . config runtime . update ( { 'workdir' : os . getcwd ( ) , 'cur_dir' : os . getcwd ( ) , 'verbosity' : env . verbosity , 'sig_mode' : env . config . get ( 'sig_mode' , 'default' ) , 'run_mode' : env . config . get ( 'run_mode' , 'run' ) , 'home_dir' :...
Submit tasks by interpolating a shell script defined in job_template
242,369
def is_type_hint ( stmt : str ) -> bool : if stmt . count ( '=' ) > 1 : return False if ':' not in stmt : return False if not stmt . split ( ':' ) [ 1 ] . strip ( ) : return False if '=' not in stmt : action , par = [ x . strip ( ) for x in stmt . split ( ':' , 1 ) ] else : action , par = [ x . strip ( ) for x in stmt ...
Try to differentiate
242,370
def indented_script ( self ) -> bool : leading = INDENTED . match ( self . _script ) return 0 if leading is None else len ( leading . group ( 2 ) )
check self . _script and see if it is indented
242,371
def category ( self ) -> Optional [ str ] : if self . statements : if self . statements [ - 1 ] [ 0 ] == ':' : def validDirective ( ) : if not self . values : return True if self . values [ - 1 ] . strip ( ) . endswith ( ',' ) : return False try : compile ( 'func(' + '' . join ( self . values ) + ')' , filename = '<str...
Determine the category of existing statement
242,372
def isValid ( self ) -> bool : if not self . values : return True try : if self . category ( ) == 'directive' : if self . values [ - 1 ] . strip ( ) . endswith ( ',' ) : self . error_msg = 'Trailing ,' return False try : compile ( 'func(' + '' . join ( self . values ) + ')' , filename = '<string>' , mode = 'eval' ) exc...
Determine if the statement expression or directive is valid . Otherwise the parser will continue until a valid multi - line expression or statement can be found .
242,373
def extend ( self , line : str ) -> None : if self . category ( ) == 'directive' : self . add_directive ( None , line ) elif self . category ( ) == 'script' : self . _script += line else : self . add_statement ( line )
Extend the current directive expression or script
242,374
def add_statement ( self , line : str , lineno : Optional [ int ] = None ) -> None : if self . category ( ) != 'statements' : self . values = [ line ] else : self . values . append ( line ) if self . statements and self . statements [ - 1 ] [ 0 ] == '!' : self . statements [ - 1 ] [ - 1 ] += line else : self . statemen...
statements are regular python statements
242,375
def get_tokens ( self ) -> str : def _get_tokens ( statement ) : return [ x [ 1 ] for x in generate_tokens ( StringIO ( statement ) . readline ) if x [ 1 ] not in ( '' , '\n' ) ] tokens : List = [ ] for statement in self . statements : tokens . extend ( _get_tokens ( statement [ 2 ] if statement [ 0 ] == ':' else state...
Get tokens after input statement
242,376
def show ( self ) : textWidth = max ( 60 , shutil . get_terminal_size ( ( 80 , 20 ) ) . columns ) text = f' {self.step_name() + ":":<21} ' + self . comment print ( '\n' . join ( textwrap . wrap ( text , width = textWidth , initial_indent = '' , subsequent_indent = ' ' * 24 ) ) ) local_parameters = { x : y for x , y in...
Output for command sos show
242,377
def extend ( self , workflow : 'SoS_Workflow' ) -> None : if not workflow . sections : return if not self . sections : self . sections = workflow . sections return section = workflow . sections [ 0 ] depends_idx = [ idx for idx , stmt in enumerate ( section . statements ) if stmt [ 0 ] == ':' and stmt [ 1 ] == 'depends...
Append another workflow to existing one to created a combined workflow
242,378
def add_comment ( self , line : str ) -> None : self . _last_comment += ( ' ' if self . _last_comment else '' ) + line . lstrip ( '#' ) . strip ( )
Keeping track of last comment for section and parameter
242,379
def workflow ( self , workflow_name : Optional [ str ] = None , use_default : bool = True ) -> SoS_Workflow : if workflow_name is None and not use_default : return SoS_Workflow ( self . content , '' , '' , self . sections , self . global_stmts ) allowed_steps = None if not workflow_name : wf_name = '' else : if '+' in ...
Return a workflow with name_step + name_step specified in wf_name This function might be called recursively because of nested workflow .
242,380
def print_help ( self , script_name : str ) : textWidth = max ( 60 , shutil . get_terminal_size ( ( 80 , 20 ) ) . columns ) if len ( script_name ) > 20 : print ( f'usage: sos run {script_name}' ) print ( ' [workflow_name | -t targets] [options] [workflow_options]' ) else : print ( f'usage: sos run {script...
print a help message from the script
242,381
def glob_wildcards ( pattern : str , files : Optional [ List [ str ] ] = None ) -> Dict [ str , Union [ List [ Any ] , List [ str ] ] ] : pattern = os . path . normpath ( pattern ) if sys . platform == 'win32' : pattern = pattern . replace ( '\\' , '/' ) first_wildcard = re . search ( "{[^{]" , pattern ) dirname = os ....
Glob the values of the wildcards by matching the given pattern to the filesystem . Returns a named tuple with a list of values for each wildcard .
242,382
def extract_pattern ( pattern : str , ifiles : List [ str ] ) -> Dict [ str , any ] : res = glob_wildcards ( pattern , [ ] ) for ifile in ifiles : matched = glob_wildcards ( pattern , [ ifile ] ) for key in matched . keys ( ) : if not matched [ key ] : res [ key ] . append ( None ) else : res [ key ] . extend ( matched...
This function match pattern to a list of input files extract and return pieces of filenames as a list of variables with keys defined by pattern .
242,383
def expand_pattern ( pattern : str ) -> List [ str ] : ofiles = [ ] sz = None res = glob_wildcards ( pattern , [ ] ) sz = None wildcard = [ { } ] for key in res . keys ( ) : if key not in env . sos_dict : raise ValueError ( f'Undefined variable {key} in pattern {pattern}' ) if not isinstance ( env . sos_dict [ key ] , ...
This function expand patterns against the current namespace and return a list of filenames
242,384
def interpolate ( text , global_dict = None , local_dict = None ) : try : return eval ( as_fstring ( text ) , global_dict , local_dict ) except Exception as e : raise ValueError ( f'Failed to interpolate {text}: {e}' )
Evaluate expressions in text
242,385
def SoS_eval ( expr : str , extra_dict : dict = { } ) -> Any : return eval ( expr , env . sos_dict . dict ( ) , extra_dict )
Evaluate an expression with sos dict .
242,386
def SoS_exec ( script : str , _dict : dict = None , return_result : bool = True ) -> None : if _dict is None : _dict = env . sos_dict . dict ( ) if not return_result : exec ( compile ( script , filename = stmtHash . hash ( script ) , mode = 'exec' ) , _dict ) return None try : stmts = list ( ast . iter_child_nodes ( as...
Execute a statement .
242,387
def expand_depends_files ( * args , ** kwargs ) : args = [ x . resolve ( ) if isinstance ( x , dynamic ) else x for x in args ] kwargs = { x : ( y . resolve ( ) if isinstance ( y , dynamic ) else y ) for x , y in kwargs . items ( ) } return sos_targets ( * args , ** kwargs , _verify_existence = True , _undetermined = F...
handle directive depends
242,388
def wait_for_subworkflows ( self , workflow_results ) : wf_ids = sum ( [ x [ 'pending_workflows' ] for x in workflow_results ] , [ ] ) for wf_id in wf_ids : yield self . socket res = self . socket . recv_pyobj ( ) if res is None : sys . exit ( 0 ) elif isinstance ( res , Exception ) : raise res
Wait for results from subworkflows
242,389
def Rmarkdown ( script = None , input = None , output = None , args = '{input:r}, output_file={output:ar}' , ** kwargs ) : if not R_library ( 'rmarkdown' ) . target_exists ( ) : raise RuntimeError ( 'Library rmarkdown does not exist' ) input = sos_targets ( collect_input ( script , input ) ) output = sos_targets ( outp...
Convert input file to output using Rmarkdown
242,390
def total_memory ( self , image = 'ubuntu' ) : try : ret = subprocess . check_output ( f , shell = True , stdin = subprocess . DEVNULL ) self . tot_mem = int ( ret . split ( ) [ 1 ] ) except Exception : self . tot_mem = None return self . tot_mem
Get the available ram fo the docker machine in Kb
242,391
def script ( script , interpreter = '' , suffix = '' , args = '' , ** kwargs ) : return SoS_ExecuteScript ( script , interpreter , suffix , args ) . run ( ** kwargs )
Execute specified script using specified interpreter . This action accepts common action arguments such as input active workdir docker_image and args . In particular content of one or more files specified by option input would be prepended before the specified script .
242,392
def stop_if ( expr , msg = '' , no_output = False ) : if expr : raise StopInputGroup ( msg = msg , keep_output = not no_output ) return 0
Abort the execution of the current step or loop and yield an warning message msg if expr is False
242,393
def download ( URLs , dest_dir = '.' , dest_file = None , decompress = False , max_jobs = 5 ) : if env . config [ 'run_mode' ] == 'dryrun' : print ( f'HINT: download\n{URLs}\n' ) return None if isinstance ( URLs , str ) : urls = [ x . strip ( ) for x in URLs . split ( ) if x . strip ( ) ] else : urls = list ( URLs ) if...
Download files from specified URL which should be space tab or newline separated URLs . The files will be downloaded to specified destination . If filename . md5 files are downloaded they are used to validate downloaded filename . Unless otherwise specified compressed files are decompressed . If max_jobs is given a max...
242,394
def run ( script , args = '' , ** kwargs ) : if sys . platform == 'win32' : interpreter = '' else : if not script . startswith ( '#!' ) : interpreter = '/bin/bash' if not args : args = '-ev {filename:q}' else : interpreter = '' return SoS_ExecuteScript ( script , interpreter , '' , args ) . run ( ** kwargs )
Execute specified script using bash . This action accepts common action arguments such as input active workdir docker_image and args . In particular content of one or more files specified by option input would be prepended before the specified script .
242,395
def pandoc ( script = None , input = None , output = None , args = '{input:q} --output {output:q}' , ** kwargs ) : if not executable ( 'pandoc' ) . target_exists ( ) : raise RuntimeError ( 'pandoc not found' ) input = sos_targets ( collect_input ( script , input ) ) output = sos_targets ( output ) if len ( output ) == ...
Convert input file to output using pandoc
242,396
def get_changed_vars ( section : SoS_Step ) : if 'shared' not in section . options : return set ( ) changed_vars = set ( ) svars = section . options [ 'shared' ] if isinstance ( svars , str ) : changed_vars . add ( svars ) svars = { svars : svars } elif isinstance ( svars , Sequence ) : for item in svars : if isinstanc...
changed vars are variables that are shared and therefore provides to others
242,397
def get_all_used_vars ( section ) : all_used_vars = set ( ) for statement in section . statements : if statement [ 0 ] == '=' : all_used_vars |= accessed_vars ( '=' . join ( statement [ 1 : 3 ] ) ) elif statement [ 0 ] == '!' : all_used_vars |= accessed_vars ( statement [ 1 ] ) elif statement [ 0 ] == ':' : all_used_va...
Get variables which are variables used by input statement and statements before it
242,398
def get_signature_vars ( section ) : signature_vars = set ( section . parameters . keys ( ) & accessed_vars ( strip_param_defs ( section . global_stmts ) ) ) input_idx = find_statement ( section , 'input' ) after_input_idx = 0 if input_idx is None else input_idx + 1 for statement in section . statements [ after_input_i...
Get signature variables which are variables that will be saved with step signatures
242,399
def get_step_input ( section , default_input ) : step_input : sos_targets = sos_targets ( ) dynamic_input = True input_idx = find_statement ( section , 'input' ) if input_idx is None : return step_input , dynamic_input stmt = section . statements [ input_idx ] [ 2 ] try : svars = [ 'output_from' , 'named_output' , 'sos...
Find step input