idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
10,200
def filter ( self , filters ) : new_elements = [ e for e in self . elements if all ( function ( e ) for function in filters ) ] return Pileup ( self . locus , new_elements )
Apply filters to the pileup elements and return a new Pileup with the filtered elements removed .
10,201
def new_task ( func ) : @ wraps ( func ) async def wrapper ( self , * args , ** kwargs ) : loop = get_event_loop ( ) loop . create_task ( func ( self , * args , ** kwargs ) ) return wrapper
Runs the decorated function in a new task
10,202
async def providers ( ) : for provider in settings . ANALYTICS_PROVIDERS : cls : BaseAnalytics = import_class ( provider [ 'class' ] ) yield await cls . instance ( * provider [ 'args' ] )
Iterates over all instances of analytics provider found in configuration
10,203
async def page_view ( self , url : str , title : str , user_id : str , user_lang : str = '' ) -> None : raise NotImplementedError
Track the view of a page
10,204
def hash_user_id ( self , user_id : str ) -> str : h = sha256 ( ) h . update ( user_id . encode ( ) ) return h . hexdigest ( )
As per the law anonymize user identifier before sending it .
10,205
def delete ( cls , uuid ) : to_delete = Workflow . query . get ( uuid ) db . session . delete ( to_delete )
Delete a workflow .
10,206
def run_worker ( wname , data , engine_uuid_hex = None , ** kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False if engine_uuid_hex : engine_uuid = uuid . UUID ( hex = engine_uuid_hex ) engine = WorkflowEngine . from_uuid ( uuid = engine_uuid , ** kwargs ) else : engine = WorkflowEngine . with...
Run a workflow by name with list of data objects .
10,207
def restart_worker ( uuid , ** kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False engine = WorkflowEngine . from_uuid ( uuid = uuid , ** kwargs ) if "data" not in kwargs : objects = workflow_object_class . query ( id_workflow = uuid ) else : data = kwargs . pop ( "data" ) if not isinstance (...
Restart workflow from beginning with given engine UUID and any data .
10,208
def get_workflow_object_instances ( data , engine ) : workflow_objects = [ ] data_type = engine . get_default_data_type ( ) for data_object in data : if isinstance ( data_object , workflow_object_class . _get_current_object ( ) ) : if not data_object . data_type : data_object . data_type = data_type if data_object . id...
Analyze data and create corresponding WorkflowObjects .
10,209
def create_data_object_from_data ( data_object , engine , data_type ) : return workflow_object_class . create ( data = data_object , id_workflow = engine . uuid , status = workflow_object_class . known_statuses . INITIAL , data_type = data_type , )
Create a new WorkflowObject from given data and return it .
10,210
def _print_rst ( self , what ) : print print "Command - %s::" % what exec ( "h = self.do_%s.__doc__" % what ) h = textwrap . dedent ( h ) . replace ( "::\n\n" , "" ) h = textwrap . dedent ( h ) . replace ( "\n" , "\n " ) print h
prints the rst page of the command what
10,211
def load_json ( cls , data , default_rule = None , raise_error = False ) : rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in json . loads ( data ) . items ( ) } return cls ( rules , default_rule )
Allow loading of JSON rule data .
10,212
def from_dict ( cls , rules_dict : dict , default_rule = None , raise_error = False ) : rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in rules_dict . items ( ) } return cls ( rules , default_rule )
Allow loading of rule data from a dictionary .
10,213
def _set_rules ( self , rules : dict , overwrite = True ) : if not isinstance ( rules , dict ) : raise TypeError ( 'rules must be an instance of dict or Rules,' 'got %r instead' % type ( rules ) ) if overwrite : self . rules = Rules ( rules , self . default_rule ) else : self . rules . update ( rules )
Created a new Rules object based on the provided dict of rules .
10,214
def load_rules ( self , force_reload = False , overwrite = True ) : if self . load_once and self . _policy_loaded : return with self . _load_lock : if self . load_once and self . _policy_loaded : return reloaded , data = _cache . read_file ( self . policy_file , force_reload = force_reload ) self . _policy_loaded = Tru...
Load rules from policy file or cache .
10,215
def enforce ( self , rule , target , creds , exc = None , * args , ** kwargs ) : self . load_rules ( ) if isinstance ( rule , checks . BaseCheck ) : result = rule ( target , creds , self , rule ) elif not self . rules : result = False else : try : result = self . rules [ rule ] ( target , creds , self , rule ) except K...
Checks authorization of a rule against the target and credentials .
10,216
def get_flattened_bsp_keys_from_schema ( schema ) : keys = [ ] for key in schema . declared_fields . keys ( ) : field = schema . declared_fields [ key ] if isinstance ( field , mm . fields . Nested ) and isinstance ( field . schema , BoundSpatialPoint ) : keys . append ( "{}.{}" . format ( key , "position" ) ) return k...
Returns the flattened keys of BoundSpatialPoints in a schema
10,217
def lock ( self ) -> asyncio . Lock : if self . lock_key not in self . request . custom_content : self . request . custom_content [ self . lock_key ] = asyncio . Lock ( ) return self . request . custom_content [ self . lock_key ]
Return and generate if required the lock for this request .
10,218
async def get_value ( self ) : cc = self . request . custom_content async with self . lock : if self . content_key not in cc : cc [ self . content_key ] = await self . call_api ( ) return cc [ self . content_key ]
Get the value from the API . Make sure to use a lock in order not to fetch the value twice at the same time .
10,219
async def rank ( self ) -> Optional [ float ] : if not self . request . has_layer ( l . RawText ) : return tl = self . request . get_layer ( l . RawText ) matcher = Matcher ( [ tuple ( Trigram ( y ) for y in x ) for x in await self . intent . strings ( self . request ) ] ) return matcher % Trigram ( tl . text )
If there is a text layer inside the request try to find a matching text in the specified intent .
10,220
def _rank_qr ( self , choices ) : from bernard . platforms . facebook import layers as fbl try : qr = self . request . get_layer ( fbl . QuickReply ) self . chosen = choices [ qr . slug ] self . slug = qr . slug if self . when is None or self . when == qr . slug : return 1.0 except KeyError : pass
Look for the QuickReply layer s slug into available choices .
10,221
async def _rank_text ( self , choices ) : tl = self . request . get_layer ( l . RawText ) best = 0.0 for slug , params in choices . items ( ) : strings = [ ] if params [ 'intent' ] : intent = getattr ( intents , params [ 'intent' ] ) strings += await intent . strings ( self . request ) if params [ 'text' ] : strings . ...
Try to match the TextLayer with choice s intents .
10,222
def check_recommended_attributes ( self , dataset ) : results = [ ] recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) for attr in [ 'time_coverage_duration' , 'time_coverage_resolution' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_duration ( attr_value ) recommended_ctx ...
Feature type specific check of global recommended attributes .
10,223
def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are time-series incomplete feature types' ) message = '{} must be a valid timeseries feature type. It must have dimensions of (timeSeries, time).' message += ' And all coordinates must have dimensions of (ti...
Checks that the feature types of this dataset are consitent with a time series incomplete dataset
10,224
def read_file ( filename : str , force_reload = False ) : if force_reload : _delete_cached_file ( filename ) reloaded = False mtime = os . path . getmtime ( filename ) cache_info = CACHE . setdefault ( filename , { } ) if not cache_info or mtime > cache_info . get ( 'mtime' , 0 ) : LOG . debug ( 'Reloading cached file ...
Read a file if it has been modified .
10,225
def use_model_attr ( attr ) : def use_model_validator ( instance , attribute , value ) : getattr ( instance , attr ) ( instance , attribute , value ) return use_model_validator
Use the validator set on a separate attribute on the class .
10,226
def is_creation_model ( instance , attribute , value ) : creation_name = value . get ( 'name' ) if not isinstance ( creation_name , str ) : instance_name = instance . __class__ . __name__ err_str = ( "'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'" ) . format ( attr = att...
Must include at least a name key .
10,227
def is_manifestation_model ( instance , attribute , value ) : instance_name = instance . __class__ . __name__ is_creation_model ( instance , attribute , value ) manifestation_of = value . get ( 'manifestationOfWork' ) if not isinstance ( manifestation_of , str ) : err_str = ( "'manifestationOfWork' must be given as a s...
Must include a manifestationOfWork key .
10,228
def add_preprocessor ( preproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_preprocessor ( preproc ) return func return decorator
Define a preprocessor to run after the arguments are parsed and before the function is executed when running in console script mode .
10,229
def add_postprocessor ( postproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_postprocessor ( postproc ) return func return decorator
Define a postprocessor to run after the function is executed when running in console script mode .
10,230
def _setup_logging ( args ) : log_conf = getattr ( args , 'logging' , None ) if log_conf : logging . config . fileConfig ( log_conf ) else : logging . basicConfig ( )
Set up logging for the script based on the configuration specified by the logging attribute of the command line arguments .
10,231
def setup_limits ( conf_file , limits_file , do_reload = True , dry_run = False , debug = False ) : if dry_run : debug = True conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) control_channel = conf [ 'control' ] . get ( 'chan...
Set up or update limits in the Redis database .
10,232
def make_limit_node ( root , limit ) : limit_node = etree . SubElement ( root , 'limit' , { 'class' : limit . _limit_full_name } ) for attr in sorted ( limit . attrs ) : desc = limit . attrs [ attr ] attr_type = desc . get ( 'type' , str ) value = getattr ( limit , attr ) if 'default' in desc : default = ( desc [ 'defa...
Given a Limit object generate an XML node .
10,233
def dump_limits ( conf_file , limits_file , debug = False ) : conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) lims = [ limits . Limit . hydrate ( db , msgpack . loads ( lim ) ) for lim in db . zrange ( limits_key , 0 , - 1 )...
Dump the current limits from the Redis database .
10,234
def remote_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) daemon = remote . RemoteControlDaemon ( None , conf ) daemon . serve ( )
Run the external control daemon .
10,235
def turnstile_command ( conf_file , command , arguments = [ ] , channel = None , debug = False ) : conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) control_channel = conf [ 'control' ] . get ( 'channel' , 'control' ) command = command . lower ( ) ts_conv = False if command == 'ping' : if ar...
Issue a command to all running control daemons .
10,236
def compactor_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) compactor . compactor ( conf )
Run the compactor daemon .
10,237
def _wrap ( cls , func ) : if isinstance ( func , cls ) : return func return functools . update_wrapper ( cls ( func ) , func )
Ensures that the function is wrapped in a ScriptAdaptor object . If it is not a new ScriptAdaptor will be returned . If it is the ScriptAdaptor is returned .
10,238
def setup_args ( self , parser ) : for args , kwargs in self . _arguments : parser . add_argument ( * args , ** kwargs )
Set up an argparse . ArgumentParser object by adding all the arguments taken by the function .
10,239
def get_kwargs ( self , args ) : kwargs = { } argspec = inspect . getargspec ( self . _func ) required = set ( argspec . args [ : - len ( argspec . defaults ) ] if argspec . defaults else argspec . args ) for arg_name in argspec . args : try : kwargs [ arg_name ] = getattr ( args , arg_name ) except AttributeError : if...
Given a Namespace object drawn from argparse determines the keyword arguments to pass to the underlying function . Note that if the underlying function accepts all keyword arguments the dictionary returned will contain the entire contents of the Namespace object . Also note that an AttributeError will be raised if any ...
10,240
def console ( self ) : parser = argparse . ArgumentParser ( description = self . description ) self . setup_args ( parser ) args = parser . parse_args ( ) for proc in self . _preprocess : try : proc ( args ) except Exception as exc : if getattr ( args , 'debug' , False ) : raise return str ( exc ) result = self . safe_...
Call the function as a console script . Command line arguments are parsed preprocessors are called then the function is called . If a debug attribute is set by the command line arguments and it is True any exception raised by the underlying function will be reraised ; otherwise the return value will be either the retur...
10,241
def import_class ( name : Text ) -> Type : parts = name . split ( '.' ) module_name = parts [ : - 1 ] class_name = parts [ - 1 ] module_ = importlib . import_module ( '.' . join ( module_name ) ) return getattr ( module_ , class_name )
Import a class based on its full name .
10,242
def make_ro ( obj : Any , forgive_type = False ) : if isinstance ( obj , ( str , bytes , int , float , bool , RoDict , RoList ) ) or obj is None : return obj elif isinstance ( obj , Mapping ) : return RoDict ( obj , forgive_type ) elif isinstance ( obj , Sequence ) : return RoList ( obj , forgive_type ) elif forgive_ty...
Make a json - serializable type recursively read - only
10,243
def make_rw ( obj : Any ) : if isinstance ( obj , RoDict ) : return { k : make_rw ( v ) for k , v in obj . items ( ) } elif isinstance ( obj , RoList ) : return [ make_rw ( x ) for x in obj ] else : return obj
Copy a RO object into a RW structure made with standard Python classes .
10,244
def patch_qs ( url : Text , data : Dict [ Text , Text ] ) -> Text : qs_id = 4 p = list ( urlparse ( url ) ) qs = parse_qsl ( p [ qs_id ] ) patched_qs = list ( chain ( filter ( lambda x : x [ 0 ] not in data , qs ) , data . items ( ) , ) ) p [ qs_id ] = urlencode ( patched_qs ) return urlunparse ( p )
Given an URL change the query string to include the values specified in the dictionary .
10,245
def dict_is_subset ( subset : Any , full_set : Any ) -> bool : if not isinstance ( subset , full_set . __class__ ) : return False elif isinstance ( subset , dict ) : for k , v in subset . items ( ) : if k not in full_set or not dict_is_subset ( v , full_set [ k ] ) : return False return True elif isinstance ( subset , ...
Checks that all keys present in subset are present and have the same value in full_set . If a key is in full_set but not in subset then True will be returned anyways .
10,246
def _compile ( self , expression ) : x = self . RE_PYTHON_VAR . sub ( '(?:\\1,)' , expression ) x = self . RE_SPACES . sub ( '' , x ) return re . compile ( x )
Transform a class exp into an actual regex
10,247
def _make_string ( self , objects : List [ Any ] ) -> Text : return '' . join ( x . __class__ . __name__ + ',' for x in objects )
Transforms a list of objects into a matchable string
10,248
def match ( self , objects : List [ Any ] ) -> bool : s = self . _make_string ( objects ) m = self . _compiled_expression . match ( s ) return m is not None
Return True if the list of objects matches the expression .
10,249
def get_conf ( conf , sect , opt ) : argu = getattr ( args , "mambupy_" + opt . lower ( ) ) if not argu : envir = os . environ . get ( "MAMBUPY_" + opt . upper ( ) ) if not envir : try : return conf . get ( sect , opt ) except NoSectionError : return default_configs [ opt ] return envir return argu
Gets a config opt from conf file under section sect .
10,250
def iso8601timestamp ( T = None , nanos = True , utc = False ) : T = time . time ( ) if T is None else T Ti = math . floor ( T ) Tn = round ( ( T - Ti ) * 1e9 ) if Tn >= 1e9 : Ti += 1 Tn = 0 s = time . gmtime ( Ti ) if utc else time . localtime ( Ti ) f = time . strftime ( "%Y%m%dT%H%M%S" , s ) n = ".{:09d}" . format (...
Get ISO8601 - formatted timestamp string .
10,251
def createWorkDir ( baseDir , projName , expUUID , expNames = [ ] , nanos = True , utc = False ) : projDir = os . path . join ( baseDir , projName ) byuuidDir = os . path . join ( projDir , "by-uuid" ) bytimeDir = os . path . join ( projDir , "by-time" ) bynameDir = os . path . join ( projDir , "by-name" , * expNames )...
Create working directory for experiment if not existing already .
10,252
def humanize_timesince ( start_time ) : if not start_time : return start_time delta = local_now ( ) - start_time if delta . total_seconds ( ) < 0 : return 'a few seconds ago' num_years = delta . days // 365 if num_years > 0 : return '{} year{} ago' . format ( * ( ( num_years , 's' ) if num_years > 1 else ( num_years , ...
Creates a string representation of time since the given start_time .
10,253
def humanize_timedelta ( seconds ) : hours , remainder = divmod ( seconds , 3600 ) days , hours = divmod ( hours , 24 ) minutes , seconds = divmod ( remainder , 60 ) if days : result = '{}d' . format ( days ) if hours : result += ' {}h' . format ( hours ) if minutes : result += ' {}m' . format ( minutes ) return result...
Creates a string representation of timedelta .
10,254
def start ( self ) : if self . _http_last_send is not None : raise RuntimeError ( 'HttpMock has already been started' ) super ( HttpMock , self ) . start ( ) self . _patch_last_send ( )
Overrides default start behaviour by raising ConnectionError instead of custom requests_mock . exceptions . NoMockAddress .
10,255
def unhandle ( self , handler ) : h , _ , _ = self . _extract ( handler ) key = hash ( h ) with self . _hlock : if key not in self . handlers : raise ValueError ( 'Handler "%s" was not found' % str ( h ) ) handlers = self . handlers . copy ( ) del handlers [ key ] self . handlers = handlers return self
Unregisters a handler
10,256
def fire ( self , * args , ** kw ) : result = [ ] with self . _hlock : handlers = self . handlers if self . threads == 0 : for k in handlers : h , m , t = handlers [ k ] try : r = self . _memoize ( h , m , t , * args , ** kw ) result . append ( tuple ( r ) ) except : result . append ( ( False , self . _error ( sys . ex...
Stores all registered handlers in a queue for processing
10,257
def clear ( self ) : with self . _hlock : self . handlers . clear ( ) with self . _mlock : self . memoize . clear ( )
Discards all registered handlers and cached results
10,258
def _timeout ( self , timeout , handler , * args , ** kw ) : t = spawn_thread ( target = handler , args = args , kw = kw ) t . daemon = True t . start ( ) t . join ( timeout ) if not t . is_alive ( ) : if t . exc_info : return t . exc_info return t . result else : try : msg = '[%s] Execution was forcefully terminated' ...
Controls the time allocated for the execution of a method
10,259
def _threads ( self , handlers ) : if self . threads < len ( handlers ) : return self . threads return len ( handlers )
Calculates maximum number of threads that will be started
10,260
def update ( self ) : if not self . _ok : self . log_error ( "Trying to restore OK mode w/ soft reset" ) self . _ok = self . _soft_reset ( ) try : self . _bus . write_byte ( self . _i2c_add , CMD_READ_TEMP_NOHOLD ) sleep ( MEASUREMENT_WAIT_TIME ) buf_t = self . _bus . read_i2c_block_data ( self . _i2c_add , CMD_READ_TE...
Read raw data and calculate temperature and humidity .
10,261
def get_owner_access_token ( self ) : from . database import Session db_session = Session . object_session ( self ) owner = db_session . query ( User ) . filter_by ( id_ = self . owner_id ) . first ( ) return owner . access_token
Return workflow owner access token .
10,262
def update_workflow_status ( db_session , workflow_uuid , status , new_logs = '' , message = None ) : try : workflow = db_session . query ( Workflow ) . filter_by ( id_ = workflow_uuid ) . first ( ) if not workflow : raise Exception ( 'Workflow {0} doesn\'t exist in database.' . format ( workflow_uuid ) ) if status : w...
Update database workflow status .
10,263
def parse_server_addr ( str_addr , default_port = 26000 ) : m = ADDR_STR_RE . match ( str_addr ) if m is None : raise ValueError ( 'Bad address string "{0}"' . format ( str_addr ) ) dct = m . groupdict ( ) port = dct . get ( 'port' ) if port is None : port = default_port else : port = int ( port ) if port == 0 : raise ...
Parse address and returns host and port
10,264
def request_goto ( self , tc = None ) : if not tc : tc = TextHelper ( self . editor ) . word_under_cursor ( select_whole_word = True ) if not self . _definition or isinstance ( self . sender ( ) , QAction ) : self . select_word ( tc ) if self . _definition is not None : QTimer . singleShot ( 100 , self . _goto_def )
Request a go to assignment .
10,265
def get_template ( name ) : path = os . path . join ( base_dir , name ) if path not in templates : try : templates [ path ] = Template ( path ) except IOError : return None return copy . deepcopy ( templates [ path ] )
Return a copy of the template with the specified name . If not found or an error occurs during the load return None .
10,266
def set_value ( self , eid , val , idx = '*' ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] if type ( val ) in SEQ_TYPES : idx = 0 if idx == '*' : for elem in elems : self . __set_value ( eid , elem , val , idx ) elif idx < len ( elems ) : self . __set_value ( eid , elems [ idx ] , val , idx ...
Set the content of an xml element marked with the matching eid attribute .
10,267
def set_attribute ( self , aid , attrib , val , idx = '*' ) : if aid in self . __attrib_ids : elems = self . __attrib_ids [ aid ] if idx == '*' : for elem in elems : self . __set_attribute ( elem , attrib , val ) elif idx < len ( elems ) : elem = elems [ idx ] self . __set_attribute ( elem , attrib , val )
Set the value of an xml attribute marked with the matching aid attribute .
10,268
def hide ( self , eid , index = 0 ) : elems = None if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] if elems and index < len ( elems ) : elem = elems [ index ] elem . parent . children . remove ( elem )
Hide the element with the matching eid . If no match look for an element with a matching rid .
10,269
def repeat ( self , rid , count , index = 0 ) : elems = None if rid in self . __repeat_ids : elems = self . __repeat_ids [ rid ] elif rid in self . __element_ids : elems = self . __element_ids if elems and index < len ( elems ) : elem = elems [ index ] self . __repeat ( elem , count )
Repeat an xml element marked with the matching rid .
10,270
def replace ( self , eid , replacement , index = 0 ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] else : return if index < len ( elems ) : elem = elems [ index ] current_pos = elem . parent . children . index ( elem ) elem ...
Replace an xml element marked with the matching eid . If the replacement value is an Element or TextElement it s swapped in untouched . If it s a Template the children of the root element in the template are used . Otherwise the replacement value is wrapped with a TextElement .
10,271
def set_hasher ( self , hash , rounds = None ) : hash = hash . replace ( '-' , '_' ) if hash not in VALID_HASHERS : raise WrongHashAlgorithm ( WRONG_HASH_MESSAGE ) hasher = getattr ( ph , hash ) utils . test_hasher ( hasher ) default_rounds = getattr ( hasher , 'default_rounds' , 1 ) min_rounds = getattr ( hasher , 'mi...
Updates the has algorithm and optionally the number of rounds to use .
10,272
def to_bool ( value , do_raise = True ) : value = value . lower ( ) if value . isdigit ( ) : return bool ( int ( value ) ) if value in _str_true : return True elif value in _str_false : return False if do_raise : raise ValueError ( "invalid literal for to_bool(): %r" % value ) return False
Convert a string to a boolean value .
10,273
async def become ( self , layer_type : Type [ L ] , request : 'Request' ) -> L : raise ValueError ( 'Cannot become "{}"' . format ( layer_type . __name__ ) )
Transform this layer into another layer type
10,274
async def become ( self , layer_type : Type [ L ] , request : 'Request' ) : if layer_type != RawText : super ( Text , self ) . become ( layer_type , request ) return RawText ( await render ( self . text , request ) )
Transforms the translatable string into an actual string and put it inside a RawText .
10,275
def _make_register ( self ) -> BaseRegisterStore : s = settings . REGISTER_STORE store_class = import_class ( s [ 'class' ] ) return store_class ( ** s [ 'params' ] )
Make the register storage .
10,276
def _make_transitions ( self ) -> List [ Transition ] : module_name = settings . TRANSITIONS_MODULE module_ = importlib . import_module ( module_name ) return module_ . transitions
Load the transitions file .
10,277
def _make_allowed_states ( self ) -> Iterator [ Text ] : for trans in self . transitions : yield trans . dest . name ( ) if trans . origin : yield trans . origin . name ( )
Sometimes we load states from the database . In order to avoid loading an arbitrary class we list here the state classes that are allowed .
10,278
async def _find_trigger ( self , request : Request , origin : Optional [ Text ] = None , internal : bool = False ) -> Tuple [ Optional [ BaseTrigger ] , Optional [ Type [ BaseState ] ] , Optional [ bool ] , ] : reg = request . register if not origin : origin = reg . get ( Register . STATE ) logger . debug ( 'From state...
Find the best trigger for this request or go away .
10,279
def _confused_state ( self , request : Request ) -> Type [ BaseState ] : origin = request . register . get ( Register . STATE ) if origin in self . _allowed_states : try : return import_class ( origin ) except ( AttributeError , ImportError ) : pass return import_class ( settings . DEFAULT_STATE )
If we re confused find which state to call .
10,280
async def _build_state ( self , request : Request , message : BaseMessage , responder : Responder ) -> Tuple [ Optional [ BaseState ] , Optional [ BaseTrigger ] , Optional [ bool ] , ] : trigger , state_class , dnr = await self . _find_trigger ( request ) if trigger is None : if not message . should_confuse ( ) : retur...
Build the state for this request .
10,281
async def _run_state ( self , responder , state , trigger , request ) -> BaseState : user_trigger = trigger try : if trigger : await state . handle ( ) else : await state . confused ( ) for i in range ( 0 , settings . MAX_INTERNAL_JUMPS + 1 ) : if i == settings . MAX_INTERNAL_JUMPS : raise MaxInternalJump ( ) trigger ,...
Execute the state or if execution fails handle it .
10,282
async def _build_state_register ( self , state : BaseState , request : Request , responder : Responder ) -> Dict : return { Register . STATE : state . name ( ) , Register . TRANSITION : await responder . make_transition_register ( request ) , }
Build the next register to store .
10,283
def runGetResults ( cmd , stdout = True , stderr = True , encoding = sys . getdefaultencoding ( ) ) : if stderr in ( 'stdout' , subprocess . STDOUT ) : stderr = subprocess . STDOUT elif stderr == True or stderr == subprocess . PIPE : stderr = subprocess . PIPE else : stderr = None if stdout == True or stdout == subproc...
runGetResults - Simple method to run a command and return the results of the execution as a dict .
10,284
def create_context_store ( name = 'default' , ttl = settings . CONTEXT_DEFAULT_TTL , store = settings . CONTEXT_STORE ) -> 'BaseContextStore' : store_class = import_class ( store [ 'class' ] ) return store_class ( name = name , ttl = ttl , ** store [ 'params' ] )
Create a context store . By default using the default configured context store but you can use a custom class if you want to using the store setting .
10,285
def camelcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) if words : words [ 0 ] = words [ 0 ] . lower ( ) return '' . join ( words )
Return text in camelCase style .
10,286
def dotcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) return '.' . join ( [ w . lower ( ) for w in words ] )
Return text in dot . case style .
10,287
def separate_words ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms , preserve_case = True ) return ' ' . join ( words )
Return text in seperate words style .
10,288
def init_db ( ) : import reana_db . models if not database_exists ( engine . url ) : create_database ( engine . url ) Base . metadata . create_all ( bind = engine )
Initialize the DB .
10,289
def _load_significant_pathways_file ( path_to_file ) : feature_pathway_df = pd . read_table ( path_to_file , header = 0 , usecols = [ "feature" , "side" , "pathway" ] ) feature_pathway_df = feature_pathway_df . sort_values ( by = [ "feature" , "side" ] ) return feature_pathway_df
Read in the significant pathways file as a pandas . DataFrame .
10,290
def _pathway_feature_permutation ( pathway_feature_tuples , permutation_max_iters ) : pathways , features = [ list ( elements_at_position ) for elements_at_position in zip ( * pathway_feature_tuples ) ] original_pathways = pathways [ : ] random . shuffle ( pathways ) feature_block_locations = { } i = 0 while i < len ( ...
Permute the pathways across features for one side in the network . Used in permute_pathways_across_features
10,291
def weight_by_edge_odds_ratios ( self , edges_expected_weight , flag_as_significant ) : for edge_id , expected_weight in edges_expected_weight : edge_obj = self . edges [ edge_id ] edge_obj . weight /= expected_weight if edge_id in flag_as_significant : edge_obj . significant = True else : edge_obj . significant = Fals...
Applied during the permutation test . Update the edges in the network to be weighted by their odds ratios . The odds ratio measures how unexpected the observed edge weight is based on the expected weight .
10,292
def aggregate ( self , merge ) : self . features = set ( ) self . n_features += merge . n_features vertex_id_conversion = self . convert_pathway_mapping ( merge . pathways ) for edge_id , edge in merge . edges . items ( ) : edge_key = self . remapped_edge ( vertex_id_conversion , edge_id ) if edge_key in self . edges :...
Combine this network with another network . The aggregation step takes the union of the edges in the two networks where we take the sum of weights for edges common to both networks .
10,293
def edge_tuple ( self , vertex0_id , vertex1_id ) : pw0 = self . __getitem__ ( vertex0_id ) pw1 = self . __getitem__ ( vertex1_id ) if not pw0 or not pw1 : return None if pw0 < pw1 : return ( vertex0_id , vertex1_id ) elif pw0 > pw1 : return ( vertex1_id , vertex0_id ) else : return None
To avoid duplicate edges where the vertex ids are reversed we maintain that the vertex ids are ordered so that the corresponding pathway names are alphabetical .
10,294
def add_pathway ( self , pathway ) : if pathway not in self . pathways : self . pathways [ pathway ] = self . n_pathways self . n_pathways += 1 return self . pathways [ pathway ]
Updates self . pathways and self . n_pathways .
10,295
def get_edge_pathways ( self , edge_id ) : vertex0_id , vertex1_id = edge_id pw0 = self . get_pathway_from_vertex_id ( vertex0_id ) pw1 = self . get_pathway_from_vertex_id ( vertex1_id ) if not pw0 or not pw1 : return None return ( pw0 , pw1 )
Get the pathways associated with an edge .
10,296
def get_vertex_obj_from_pathway ( self , pathway ) : if pathway in self . pathways : vertex_id = self . pathways [ pathway ] return self . vertices [ vertex_id ] else : return None
Get the vertex object that corresponds to a pathway name
10,297
def get_adjacent_pathways ( self , pathway ) : vertex_id = self . pathways [ pathway ] adjacent = self . vertices [ vertex_id ] . get_adjacent_vertex_ids ( ) adjacent_pathways = [ ] for adjacent_id in adjacent : adjacent_pathways . append ( self . get_pathway_from_vertex_id ( adjacent_id ) ) return adjacent_pathways
Get the pathways adjacent to this pathway in the network
10,298
def to_dataframe ( self , drop_weights_below = 0 , whitelist = None ) : network_df_cols = [ "pw0" , "pw1" , "weight" ] if self . features : network_df_cols . append ( "features" ) network_df = pd . DataFrame ( columns = network_df_cols ) idx = 0 edge_pathways = set ( ) for ( v0 , v1 ) , edge_obj in self . edges . items...
Conversion of the network to a pandas . DataFrame .
10,299
def _add_edge_to_vertex ( self , vertex_id , edge ) : connected_to = edge . connected_to ( vertex_id ) if vertex_id not in self . vertices : vertex_obj = Vertex ( vertex_id ) self . vertices [ vertex_id ] = vertex_obj self . vertices [ vertex_id ] . edges [ connected_to ] = edge . weight
Adds the edge to the Vertex object s edges dictionary