idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
10,400
async def sign_url ( self , url , method = HASH ) : token = await self . get_token ( ) if method == self . QUERY : return patch_qs ( url , { settings . WEBVIEW_TOKEN_KEY : token , } ) elif method == self . HASH : hash_id = 5 p = list ( urlparse ( url ) ) p [ hash_id ] = quote ( token ) return urlunparse ( p ) else : raise ValueError ( f'Invalid signing method "{method}"' )
Sign an URL with this request s auth token
116
9
10,401
def layers ( self , value : List [ 'BaseLayer' ] ) : self . _layers = list ( value ) # type: List[BaseLayer] self . _index = self . _make_index ( ) self . _transformed = { }
Perform a copy of the layers list in order to avoid the list changing without updating the index .
55
20
10,402
def _make_index ( self ) : out = { } for layer in self . _layers : cls = layer . __class__ out [ cls ] = out . get ( cls , [ ] ) + [ layer ] return out
Perform the index computation . It groups layers by type into a dictionary to allow quick access .
52
19
10,403
def has_layer ( self , class_ : Type [ L ] , became : bool = True ) -> bool : return ( class_ in self . _index or ( became and class_ in self . _transformed ) )
Test the presence of a given layer type .
47
9
10,404
def get_layer ( self , class_ : Type [ L ] , became : bool = True ) -> L : try : return self . _index [ class_ ] [ 0 ] except KeyError : if became : return self . _transformed [ class_ ] [ 0 ] else : raise
Return the first layer of a given class . If that layer is not present then raise a KeyError .
61
21
10,405
def get_layers ( self , class_ : Type [ L ] , became : bool = True ) -> List [ L ] : out = self . _index . get ( class_ , [ ] ) if became : out += self . _transformed . get ( class_ , [ ] ) return out
Returns the list of layers of a given class . If no layers are present then the list will be empty .
64
22
10,406
def check_trajectory_id ( self , dataset ) : results = [ ] exists_ctx = TestCtx ( BaseCheck . MEDIUM , 'Variable defining "trajectory_id" exists' ) trajectory_ids = dataset . get_variables_by_attributes ( cf_role = 'trajectory_id' ) # No need to check exists_ctx . assert_true ( trajectory_ids , 'variable defining cf_role="trajectory_id" exists' ) if not trajectory_ids : return exists_ctx . to_result ( ) results . append ( exists_ctx . to_result ( ) ) test_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended attributes for the {} variable' . format ( trajectory_ids [ 0 ] . name ) ) test_ctx . assert_true ( getattr ( trajectory_ids [ 0 ] , 'long_name' , '' ) != "" , "long_name attribute should exist and not be empty" ) results . append ( test_ctx . to_result ( ) ) return results
Checks that if a variable exists for the trajectory id it has the appropriate attributes
229
16
10,407
def check_required_attributes ( self , dataset ) : results = [ ] required_ctx = TestCtx ( BaseCheck . HIGH , 'Required Global Attributes for Trajectory dataset' ) required_ctx . assert_true ( getattr ( dataset , 'nodc_template_version' , '' ) . lower ( ) == self . valid_templates [ 0 ] . lower ( ) , 'nodc_template_version attribute must be {}' . format ( self . valid_templates [ 0 ] ) ) required_ctx . assert_true ( getattr ( dataset , 'cdm_data_type' , '' ) == 'Trajectory' , 'cdm_data_type attribute must be set to Trajectory' ) required_ctx . assert_true ( getattr ( dataset , 'featureType' , '' ) == 'trajectory' , 'featureType attribute must be set to trajectory' ) results . append ( required_ctx . to_result ( ) ) return results
Feature type specific check of global required and highly recommended attributes .
214
12
10,408
def login ( self , user , remember = True , session = None ) : logger = logging . getLogger ( __name__ ) logger . debug ( u'User `{0}` logged in' . format ( user . login ) ) if session is None : session = self . session session [ 'permanent' ] = remember session [ self . session_key ] = user . get_uhmac ( ) if callable ( getattr ( session , 'save' , None ) ) : session . save ( )
Sets the current user UID in the session .
109
10
10,409
def index ( elem ) : parent = elem . getparent ( ) for x in range ( 0 , len ( parent . getchildren ( ) ) ) : if parent . getchildren ( ) [ x ] == elem : return x return - 1
Return the index position of an element in the children of a parent .
53
14
10,410
def replaceelement ( oldelem , newelem ) : parent = oldelem . getparent ( ) if parent is not None : size = len ( parent . getchildren ( ) ) for x in range ( 0 , size ) : if parent . getchildren ( ) [ x ] == oldelem : parent . remove ( oldelem ) parent . insert ( x , newelem )
Given a parent element replace oldelem with newelem .
83
13
10,411
def parseelement ( elem ) : xml = '<%(tag)s>%(content)s</%(tag)s>' % { 'tag' : elem . tag , 'content' : elem . text } et = etree . fromstring ( xml ) replaceelement ( elem , et )
Convert the content of an element into more ElementTree structures . We do this because sometimes we want to set xml as the content of an element .
70
30
10,412
def _check_min_max_range ( self , var , test_ctx ) : if 'valid_range' in var . ncattrs ( ) : test_ctx . assert_true ( var . valid_range . dtype == var . dtype and len ( var . valid_range ) == 2 and var . valid_range [ 0 ] <= var . valid_range [ 1 ] , "valid_range must be a two element vector of min followed by max with the same data type as {}" . format ( var . name ) ) else : for bound in ( 'valid_min' , 'valid_max' ) : v_bound = getattr ( var , bound , '' ) warn_msg = '{} attribute should exist, have the same type as {}, and not be empty or valid_range should be defined' . format ( bound , var . name ) # need to special case str attributes since they aren't directly # comparable to numpy dtypes if isinstance ( v_bound , six . string_types ) : test_ctx . assert_true ( v_bound != '' and var . dtype . char == 'S' , warn_msg ) # otherwise compare the numpy types directly else : test_ctx . assert_true ( v_bound . dtype == var . dtype , warn_msg ) return test_ctx
Checks that either both valid_min and valid_max exist or valid_range exists .
289
19
10,413
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) metadata_conventions = getattr ( dataset , 'Metadata_Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) accepted_conventions = 'CF-1.6' test_ctx . assert_true ( conventions == accepted_conventions , 'Conventions attribute is missing or is not equal to CF-1.6: {}' . format ( conventions ) ) test_ctx . assert_true ( metadata_conventions == 'Unidata Dataset Discovery v1.0' , "Metadata_Conventions attribute is required to be 'Unidata Dataset Discovery v1.0': {}" . format ( metadata_conventions ) ) test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) test_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) test_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) return test_ctx . to_result ( )
Check the global required and highly recommended attributes for 1 . 1 templates . These go an extra step besides just checking that they exist .
503
26
10,414
def check_base_required_attributes ( self , dataset ) : test_ctx = TestCtx ( BaseCheck . HIGH , 'Required global attributes' ) conventions = getattr ( dataset , 'Conventions' , '' ) feature_type = getattr ( dataset , 'featureType' , '' ) # Define conventions accepted_conventions = [ 'CF-1.6' , 'ACDD-1.3' ] dataset_conventions = conventions . replace ( ' ' , '' ) . split ( ',' ) for accepted_convention in accepted_conventions : if accepted_convention not in dataset_conventions : test_ctx . assert_true ( False , 'Conventions attribute is missing or is not equal to "CF-1.6, ACDD-1.3": {}' . format ( conventions ) ) break else : test_ctx . assert_true ( True , '' ) # Check feature types test_ctx . assert_true ( feature_type in [ 'point' , 'timeSeries' , 'trajectory' , 'profile' , 'timeSeriesProfile' , 'trajectoryProfile' ] , 'Feature type must be one of point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile: {}' . format ( feature_type ) ) return test_ctx . to_result ( )
Check the global required and highly recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
285
26
10,415
def check_recommended_global_attributes ( self , dataset ) : recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) sea_names = [ sn . lower ( ) for sn in util . get_sea_names ( ) ] sea_name = getattr ( dataset , 'sea_name' , '' ) sea_name = sea_name . replace ( ', ' , ',' ) sea_name = sea_name . split ( ',' ) if sea_name else [ ] for sea in sea_name : recommended_ctx . assert_true ( sea . lower ( ) in sea_names , 'sea_name attribute should exist and should be from the NODC sea names list: {} is not a valid sea name' . format ( sea ) ) # Parse dates, check for ISO 8601 for attr in [ 'time_coverage_start' , 'time_coverage_end' , 'date_created' , 'date_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) recommended_ctx . assert_true ( True , '' ) # Score it True! except ISO8601Error : recommended_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) value = getattr ( dataset , 'geospatial_vertical_positive' , '' ) recommended_ctx . assert_true ( value . lower ( ) in [ 'up' , 'down' ] , 'geospatial_vertical_positive attribute should be up or down: {}' . format ( value ) ) # I hate english. ack_exists = any ( ( getattr ( dataset , attr , '' ) != '' for attr in [ 'acknowledgment' , 'acknowledgement' ] ) ) recommended_ctx . assert_true ( ack_exists , 'acknowledgement attribute should exist and not be empty' ) standard_name_vocab = getattr ( dataset , 'standard_name_vocabulary' , '' ) regex = re . compile ( r'[sS]tandard [nN]ame [tT]able' ) recommended_ctx . assert_true ( regex . search ( standard_name_vocab ) , "standard_name_vocabulary doesn't contain 'Standard Name Table': {}" . format ( standard_name_vocab ) ) if hasattr ( dataset , 'comment' ) : recommended_ctx . assert_true ( getattr ( dataset , 'comment' , '' ) != '' , 'comment attribute should not be empty if specified' ) return recommended_ctx . to_result ( )
Check the global recommended attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
608
23
10,416
def check_base_suggested_attributes ( self , dataset ) : suggested_ctx = TestCtx ( BaseCheck . LOW , 'Suggested global attributes' ) # Do any of the variables define platform ? platform_name = getattr ( dataset , 'platform' , '' ) suggested_ctx . assert_true ( platform_name != '' , 'platform should exist and point to a term in :platform_vocabulary.' ) cdm_data_type = getattr ( dataset , 'cdm_data_type' , '' ) suggested_ctx . assert_true ( cdm_data_type . lower ( ) in [ 'grid' , 'image' , 'point' , 'radial' , 'station' , 'swath' , 'trajectory' ] , 'cdm_data_type must be one of Grid, Image, Point, Radial, Station, Swath, Trajectory: {}' . format ( cdm_data_type ) ) # Parse dates, check for ISO 8601 for attr in [ 'date_modified' , 'date_issued' , 'date_metadata_modified' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_datetime ( attr_value ) suggested_ctx . assert_true ( True , '' ) # Score it True! except ISO8601Error : suggested_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) units = getattr ( dataset , 'geospatial_lat_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_north' , 'geospatial_lat_units attribute should be degrees_north: {}' . format ( units ) ) units = getattr ( dataset , 'geospatial_lon_units' , '' ) . lower ( ) suggested_ctx . assert_true ( units == 'degrees_east' , 'geospatial_lon_units attribute should be degrees_east: {}' . format ( units ) ) contributor_name = getattr ( dataset , 'contributor_name' , '' ) contributor_role = getattr ( dataset , 'contributor_role' , '' ) names = contributor_role . split ( ',' ) roles = contributor_role . split ( ',' ) suggested_ctx . assert_true ( contributor_name != '' , 'contributor_name should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) suggested_ctx . assert_true ( contributor_role != '' , 'contributor_role should exist and not be empty.' ) suggested_ctx . assert_true ( len ( names ) == len ( roles ) , 'length of contributor names matches length of roles' ) return suggested_ctx . to_result ( )
Check the global suggested attributes for 2 . 0 templates . These go an extra step besides just checking that they exist .
652
23
10,417
def _configure ( self ) : path = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'config.yml' ) with open ( path ) as file : defaultconfig = yaml . load ( file ) self . config = merge_dict ( self . config , defaultconfig ) if 'logging' in self . config : logging . config . dictConfig ( self . config [ 'logging' ] ) else : logging . getLogger ( 'sirbot' ) . setLevel ( 'INFO' )
Configure the core of sirbot
125
7
10,418
def _import_plugins ( self ) -> None : logger . debug ( 'Importing plugins' ) self . _pm = pluggy . PluginManager ( 'sirbot' ) self . _pm . add_hookspecs ( hookspecs ) for plugin in self . config [ 'sirbot' ] [ 'plugins' ] : try : p = importlib . import_module ( plugin ) except ( ModuleNotFoundError , ) : if os . getcwd ( ) not in sys . path : sys . path . append ( os . getcwd ( ) ) p = importlib . import_module ( plugin ) else : raise self . _pm . register ( p )
Import and register plugin in the plugin manager .
145
9
10,419
def _initialize_plugins ( self ) : logger . debug ( 'Initializing plugins' ) plugins = self . _pm . hook . plugins ( loop = self . _loop ) if plugins : for plugin in plugins : name = plugin . __name__ registry_name = plugin . __registry__ or plugin . __name__ config = self . config . get ( name , { } ) priority = config . get ( 'priority' , 50 ) if priority : self . _plugins [ name ] = { 'plugin' : plugin , 'config' : config , 'priority' : priority , 'factory' : registry_name } self . _start_priority [ priority ] . append ( name ) else : logger . error ( 'No plugins found' )
Initialize the plugins
159
4
10,420
def _register_factory ( self ) : for name , info in self . _plugins . items ( ) : if info [ 'priority' ] : factory = getattr ( info [ 'plugin' ] , 'factory' , None ) if callable ( factory ) : registry [ info [ 'factory' ] ] = info [ 'plugin' ] . factory registry . freeze ( )
Index the available factories
82
4
10,421
async def _configure_plugins ( self ) -> None : logger . debug ( 'Configuring plugins' ) funcs = [ info [ 'plugin' ] . configure ( config = info [ 'config' ] , session = self . _session , router = self . app . router ) for info in self . _plugins . values ( ) ] if funcs : await asyncio . gather ( * funcs , loop = self . _loop ) logger . debug ( 'Plugins configured' )
Configure the plugins
104
4
10,422
async def _start_plugins ( self ) -> None : logger . debug ( 'Starting plugins' ) for priority in sorted ( self . _start_priority , reverse = True ) : logger . debug ( 'Starting plugins %s' , ', ' . join ( self . _start_priority [ priority ] ) ) for name in self . _start_priority [ priority ] : plugin = self . _plugins [ name ] self . _tasks [ name ] = self . _loop . create_task ( plugin [ 'plugin' ] . start ( ) ) while not all ( self . _plugins [ name ] [ 'plugin' ] . started for name in self . _tasks ) : for task in self . _tasks . values ( ) : if task . done ( ) : task . result ( ) await asyncio . sleep ( 0.2 , loop = self . _loop ) else : logger . debug ( 'Plugins %s started' , ', ' . join ( self . _start_priority [ priority ] ) )
Start the plugins by priority
217
5
10,423
def _create_settings ( self ) : self . settings = { "columns" : [ { "Header" : s , "accessor" : s } for s in self . settings ] , "port" : self . port , "docs" : construct_trie ( self . docs ) }
Creates the settings object that will be sent to the frontend vizualization
64
16
10,424
def run_server ( self ) : app = build_app ( ) run ( app , host = 'localhost' , port = self . port )
Runs a server to handle queries to the index without creating the javascript table .
31
16
10,425
def strip_spaces ( value , sep = None , join = True ) : value = value . strip ( ) value = [ v . strip ( ) for v in value . split ( sep ) ] join_sep = sep or ' ' return join_sep . join ( value ) if join else value
Cleans trailing whitespaces and replaces also multiple whitespaces with a single space .
65
16
10,426
async def rank ( self , request , origin : Optional [ Text ] ) -> Tuple [ float , Optional [ BaseTrigger ] , Optional [ type ] , Optional [ bool ] , ] : if self . origin_name == origin : score = 1.0 elif self . origin_name is None : score = settings . JUMPING_TRIGGER_PENALTY else : return 0.0 , None , None , None trigger = self . factory ( request ) rank = await run_or_return ( trigger . rank ( ) ) score *= self . weight * ( rank or 0.0 ) return score , trigger , self . dest , self . do_not_register
Computes the rank of this transition for a given request .
145
12
10,427
def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are point feature types' ) t = util . get_time_variable ( dataset ) # Exit prematurely if not t : required_ctx . assert_true ( False , 'A dimension representing time is required for point feature types' ) return required_ctx . to_result ( ) t_dims = dataset . variables [ t ] . dimensions o = None or ( t_dims and t_dims [ 0 ] ) message = '{} must be a valid timeseries feature type. It must have dimensions of ({}), and all coordinates must have dimensions of ({})' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_point ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable , o , o ) ) return required_ctx . to_result ( )
Checks that the feature types of this dataset are consitent with a point dataset
213
16
10,428
def settings ( cls ) : from bernard . platforms . management import get_platform_settings for platform in get_platform_settings ( ) : candidate = import_class ( platform [ 'class' ] ) if candidate == cls : return platform . get ( 'settings' , { } )
Find the settings for the current class inside the platforms configuration .
63
12
10,429
async def _notify ( self , message : BaseMessage , responder : Responder ) : for cb in self . _listeners : coro = cb ( message , responder , self . fsm_creates_task ) if not self . fsm_creates_task : self . _register = await coro
Notify all callbacks that a message was received .
72
11
10,430
async def async_init ( self ) : self . session = aiohttp . ClientSession ( ) asyncio . get_event_loop ( ) . create_task ( self . _deferred_init ( ) )
During async init we just need to create a HTTP session so we can keep outgoing connexions to the platform alive .
47
24
10,431
def accept ( self , stack : Stack ) : for name , pattern in self . PATTERNS . items ( ) : if stack . match_exp ( pattern ) : stack . annotation = name return True return False
Checks that the stack can be accepted according to the PATTERNS .
44
15
10,432
def send ( self , request : Request , stack : Stack ) -> Coroutine : if stack . annotation not in self . PATTERNS : if not self . accept ( stack ) : raise UnacceptableStack ( 'Cannot accept stack {}' . format ( stack ) ) func = getattr ( self , '_send_' + stack . annotation ) return func ( request , stack )
Send a stack to the platform .
80
7
10,433
def to_unit_memory ( number ) : kb = 1024 number /= kb if number < 100 : return '{} Kb' . format ( round ( number , 2 ) ) number /= kb if number < 300 : return '{} Mb' . format ( round ( number , 2 ) ) number /= kb return '{} Gb' . format ( round ( number , 2 ) )
Creates a string representation of memory size given number .
85
11
10,434
def to_percentage ( number , rounding = 2 ) : number = float ( number ) * 100 number_as_int = int ( number ) rounded = round ( number , rounding ) return '{}%' . format ( number_as_int if number_as_int == rounded else rounded )
Creates a percentage string representation from the given number . The number is multiplied by 100 before adding a % character .
64
23
10,435
def set_editor ( self , editor ) : if self . _editor is not None : try : self . _editor . offset_calculator . pic_infos_available . disconnect ( self . _update ) except ( AttributeError , RuntimeError , ReferenceError ) : # see https://github.com/OpenCobolIDE/OpenCobolIDE/issues/89 pass self . _editor = weakref . proxy ( editor ) if editor else editor try : self . _editor . offset_calculator . pic_infos_available . connect ( self . _update ) except AttributeError : pass
Sets the associated editor when the editor s offset calculator mode emit the signal pic_infos_available the table is automatically refreshed .
132
27
10,436
def patch_conf ( settings_patch = None , settings_file = None ) : if settings_patch is None : settings_patch = { } reload_config ( ) os . environ [ ENVIRONMENT_VARIABLE ] = settings_file if settings_file else '' from bernard . conf import settings as l_settings # noinspection PyProtectedMember r_settings = l_settings . _settings r_settings . update ( settings_patch ) if 'bernard.i18n' in modules : from bernard . i18n import translate , intents translate . _regenerate_word_dict ( ) intents . _refresh_intents_db ( ) yield
Reload the configuration form scratch . Only the default config is loaded not the environment - specified config .
153
20
10,437
def resolve ( self , key ) : registration = self . _registrations . get ( key ) if registration is None : raise KeyError ( "Unknown key: '{0}'" . format ( key ) ) return registration . resolve ( self , key )
Resolves the requested key to an object instance raising a KeyError if the key is missing
54
18
10,438
def dispose ( self ) : for registration in self . _registrations . values ( ) : registration . dispose ( ) self . _registrations = { }
Disposes every performed registration ; the container can then be used again
34
13
10,439
def build_workspace_path ( user_id , workflow_id = None ) : workspace_path = os . path . join ( 'users' , str ( user_id ) , 'workflows' ) if workflow_id : workspace_path = os . path . join ( workspace_path , str ( workflow_id ) ) return workspace_path
Build user s workspace relative path .
75
7
10,440
def _get_workflow_with_uuid_or_name ( uuid_or_name , user_uuid ) : from reana_db . models import Workflow # Check existence if not uuid_or_name : raise ValueError ( 'No Workflow was specified.' ) # Check validity try : uuid_or_name . encode ( 'ascii' ) except UnicodeEncodeError : # `workflow_name` contains something else than just ASCII. raise ValueError ( 'Workflow name {} is not valid.' . format ( uuid_or_name ) ) # Check if UUIDv4 try : # is_uuid = UUID(uuid_or_name, version=4) is_uuid = UUID ( '{' + uuid_or_name + '}' , version = 4 ) except ( TypeError , ValueError ) : is_uuid = None if is_uuid : # `uuid_or_name` is an UUIDv4. # Search with it since it is expected to be unique. return _get_workflow_by_uuid ( uuid_or_name ) else : # `uuid_or_name` is not and UUIDv4. Expect it is a name. # Expect name might be in format 'reana.workflow.123' with arbitrary # number of dot-delimited substring, where last substring specifies # the run_number of the workflow this workflow name refers to. # Possible candidates for names are e.g. : # 'workflow_name' -> ValueError # 'workflow.name' -> True, True # 'workflow.name.123' -> True, True # '123.' -> True, False # '' -> ValueError # '.123' -> False, True # '..' -> False, False # '123.12' -> True, True # '123.12.' -> True, False # Try to split the dot-separated string. try : workflow_name , run_number = uuid_or_name . rsplit ( '.' , maxsplit = 1 ) except ValueError : # Couldn't split. Probably not a dot-separated string. # -> Search with `uuid_or_name` return _get_workflow_by_name ( uuid_or_name , user_uuid ) # Check if `run_number` was specified if not run_number : # No `run_number` specified. # -> Search by `workflow_name` return _get_workflow_by_name ( workflow_name , user_uuid ) # `run_number` was specified. # Check `run_number` is valid. if not run_number . isdigit ( ) : # `uuid_or_name` was split, so it is a dot-separated string # but it didn't contain a valid `run_number`. # Assume that this dot-separated string is the name of # the workflow and search with it. return _get_workflow_by_name ( uuid_or_name , user_uuid ) # `run_number` is valid. # Search by `run_number` since it is a primary key. workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . run_number == run_number , Workflow . owner_id == user_uuid ) . one_or_none ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name , run_number ) ) return workflow
Get Workflow from database with uuid or name .
807
11
10,441
def _get_workflow_by_name ( workflow_name , user_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . name == workflow_name , Workflow . owner_id == user_uuid ) . order_by ( Workflow . run_number . desc ( ) ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_name ) ) return workflow
From Workflows named as workflow_name the latest run_number .
136
14
10,442
def _get_workflow_by_uuid ( workflow_uuid ) : from reana_db . models import Workflow workflow = Workflow . query . filter ( Workflow . id_ == workflow_uuid ) . first ( ) if not workflow : raise ValueError ( 'REANA_WORKON is set to {0}, but ' 'that workflow does not exist. ' 'Please set your REANA_WORKON environment ' 'variable appropriately.' . format ( workflow_uuid ) ) return workflow
Get Workflow with UUIDv4 .
108
9
10,443
async def _watch ( self ) : file_name = os . path . basename ( self . _file_path ) logger . info ( 'Watching %s "%s"' , self . THING , self . _file_path , ) while self . _running : evt = await self . _watcher . get_event ( ) if evt . name == file_name : await self . _load ( ) logger . info ( 'Reloading changed %s from "%s"' , self . THING , self . _file_path )
Start the watching loop .
119
5
10,444
async def start ( self , file_path , locale = None , kwargs = None ) : self . _file_path = os . path . realpath ( file_path ) self . _locale = locale if kwargs : self . _kwargs = kwargs if settings . I18N_LIVE_RELOAD : loop = asyncio . get_event_loop ( ) self . _running = True self . _watcher = aionotify . Watcher ( ) self . _watcher . watch ( path = os . path . dirname ( self . _file_path ) , flags = aionotify . Flags . MOVED_TO | aionotify . Flags . MODIFY , ) await self . _watcher . setup ( loop ) await self . _load ( ) loop . create_task ( self . _watch ( ) ) else : await self . _load ( )
Setup the watching utilities start the loop and load data a first time .
198
14
10,445
def _update ( self , data : TransDict , * args , * * kwargs ) : for l in self . listeners : l ( data , * args , * * kwargs )
Propagate updates to listeners
42
5
10,446
def print_info ( self ) : d = dir ( self ) self . plugins = [ ] for key in d : if key . startswith ( "info_" ) : self . plugins . append ( key ) for key in self . plugins : if self . echo : Console . ok ( "> {0}" . format ( key . replace ( "_" , " " , 1 ) ) ) exec ( "self.%s()" % key )
prints some info that the user may find useful
96
9
10,447
def load_from_args_as_dataframe ( args ) : if not args . variants and not args . single_variant : return None if args . variant_source_name : variant_source_names = util . expand ( args . variant_source_name , 'variant_source_name' , 'variant source' , len ( args . variants ) ) else : variant_source_names = util . drop_prefix ( args . variants ) variant_to_sources = collections . defaultdict ( list ) dfs = [ ] for i in range ( len ( args . variants ) ) : name = variant_source_names [ i ] prefix = ( 'metadata:' if len ( args . variants ) == 1 else "metadata:%s:" % name ) df = load_as_dataframe ( args . variants [ i ] , name = name , genome = args . genome , max_variants = args . max_variants_per_source , only_passing = not args . include_failing_variants , metadata_column_prefix = prefix ) if df . shape [ 0 ] == 0 : logging . warn ( "No variants loaded from: %s" % args . variants [ i ] ) else : for variant in df . variant : variant_to_sources [ variant ] . append ( name ) dfs . append ( df ) if args . single_variant : variants = [ ] extra_args = { } if args . genome : extra_args = { 'ensembl' : varcode . reference . infer_genome ( args . genome ) } for ( locus_str , ref , alt ) in args . single_variant : locus = Locus . parse ( locus_str ) variant = varcode . Variant ( locus . contig , locus . inclusive_start , ref , alt , * * extra_args ) variants . append ( variant ) variant_to_sources [ variant ] . append ( "commandline" ) dfs . append ( variants_to_dataframe ( variants ) ) df = dfs . pop ( 0 ) for other_df in dfs : df = pandas . merge ( df , other_df , how = 'outer' , on = [ "variant" ] + STANDARD_DATAFRAME_COLUMNS ) genomes = df [ "genome" ] . unique ( ) if len ( genomes ) > 1 : raise ValueError ( "Mixing references is not supported. " "Reference genomes: %s" % ( ", " . join ( genomes ) ) ) df [ "sources" ] = [ " " . join ( variant_to_sources [ v ] ) for v in df . variant ] # Apply filters: if args . ref : df = df . ix [ df . ref . isin ( args . ref ) ] if args . alt : df = df . ix [ df . alt . isin ( args . alt ) ] loci = loci_util . load_from_args ( util . remove_prefix_from_parsed_args ( args , "variant" ) ) if loci is not None : df = df . ix [ [ loci . intersects ( pileup_collection . to_locus ( v ) ) for v in df . variant ] ] return df
Given parsed variant - loading arguments return a pandas DataFrame .
714
13
10,448
def request ( self , cmd , * args , * * kwargs ) : params = { 'action' : cmd } #TODO: serialize the kwargs? params . update ( kwargs ) return self . __request ( self . url , params )
Request data fromo the server .
58
7
10,449
def __request ( self , url , params ) : log . debug ( 'request: %s %s' % ( url , str ( params ) ) ) try : response = urlopen ( url , urlencode ( params ) ) . read ( ) if params . get ( 'action' ) != 'data' : log . debug ( 'response: %s' % response ) if params . get ( 'action' , None ) == 'data' : return response else : return json . loads ( response ) except TypeError , e : log . exception ( 'request error' ) raise ServerError ( e ) except IOError , e : log . error ( 'request error: %s' % str ( e ) ) raise ServerError ( e )
Make an HTTP POST request to the server and return JSON data .
157
13
10,450
def position ( self ) : if self . end != self . start + 1 : raise ValueError ( "Not a single base: %s" % str ( self ) ) return self . start
If this locus spans a single base this property gives that position . Otherwise raises a ValueError .
40
20
10,451
def from_interbase_coordinates ( contig , start , end = None ) : typechecks . require_string ( contig ) typechecks . require_integer ( start ) if end is None : end = start + 1 typechecks . require_integer ( end ) contig = pyensembl . locus . normalize_chromosome ( contig ) return Locus ( contig , start , end )
Given coordinates in 0 - based interbase coordinates return a Locus instance .
89
15
10,452
def variant_context ( reference_fasta , contig , inclusive_start , inclusive_end , alt , context_length ) : # Move from 1-base coorindates to 0-base coordinates start = int ( inclusive_start ) - 1 end = int ( inclusive_end ) full_sequence = reference_fasta [ contig ] left = str ( full_sequence [ start - context_length : start ] . seq ) . upper ( ) middle = str ( full_sequence [ start : end ] . seq ) . upper ( ) right = str ( full_sequence [ end : end + context_length ] . seq ) . upper ( ) # Complement and reverse the context if necessary so the ref base is a # pyrmidine (C/T) if middle [ 0 ] in ( 'A' , 'G' ) : context_5prime = pyfaidx . complement ( right ) [ : : - 1 ] context_3prime = pyfaidx . complement ( left ) [ : : - 1 ] context_mutation = "%s>%s" % ( pyfaidx . complement ( middle ) [ : : - 1 ] , pyfaidx . complement ( alt ) [ : : - 1 ] ) else : context_5prime = left context_3prime = right context_mutation = "%s>%s" % ( middle , alt ) return ( context_5prime , context_mutation , context_3prime )
Retrieve the surronding reference region from a variant .
312
12
10,453
def similarity ( self , other : 'Trigram' ) -> float : if not len ( self . _trigrams ) or not len ( other . _trigrams ) : return 0 count = float ( len ( self . _trigrams & other . _trigrams ) ) len1 = float ( len ( self . _trigrams ) ) len2 = float ( len ( other . _trigrams ) ) return count / ( len1 + len2 - count )
Compute the similarity with the provided other trigram .
104
11
10,454
def _match ( self , local : Tuple [ Trigram , ... ] , other : Trigram ) -> float : pos = local [ 0 ] % other neg = max ( ( x % other for x in local [ 1 : ] ) , default = 0 ) if neg > pos : return 0.0 return pos
Match a trigram with another one . If the negative matching wins returns an inverted matching .
68
18
10,455
def similarity ( self , other : Trigram ) -> float : return max ( ( self . _match ( x , other ) for x in self . trigrams ) , default = 0 )
Find the best similarity within known trigrams .
40
9
10,456
def similarity ( self , other : Trigram ) -> Tuple [ float , L ] : return max ( ( ( t % other , l ) for t , l in self . trigrams ) , key = lambda x : x [ 0 ] , )
Returns the best matching score and the associated label .
53
10
10,457
def _exception_for ( self , code ) : if code in self . errors : return self . errors [ code ] elif 500 <= code < 599 : return exceptions . RemoteServerError else : return exceptions . UnknownError
Return the exception class suitable for the specified HTTP status code .
48
12
10,458
def setGroups ( self , * args , * * kwargs ) : requests = 0 groups = [ ] try : for gk in self [ 'groupKeys' ] : try : g = self . mambugroupclass ( entid = gk , * args , * * kwargs ) except AttributeError as ae : from . mambugroup import MambuGroup self . mambugroupclass = MambuGroup g = self . mambugroupclass ( entid = gk , * args , * * kwargs ) requests += 1 groups . append ( g ) except KeyError : pass self [ 'groups' ] = groups return requests
Adds the groups to which this client belongs .
145
9
10,459
def setBranch ( self , * args , * * kwargs ) : try : branch = self . mambubranchclass ( entid = self [ 'assignedBranchKey' ] , * args , * * kwargs ) except AttributeError as ae : from . mambubranch import MambuBranch self . mambubranchclass = MambuBranch branch = self . mambubranchclass ( entid = self [ 'assignedBranchKey' ] , * args , * * kwargs ) self [ 'assignedBranchName' ] = branch [ 'name' ] self [ 'assignedBranch' ] = branch return 1
Adds the branch to which the client belongs .
148
9
10,460
def protected ( self , * tests , * * kwargs ) : _role = kwargs . pop ( 'role' , None ) _roles = kwargs . pop ( 'roles' , None ) or [ ] _csrf = kwargs . pop ( 'csrf' , None ) _url_sign_in = kwargs . pop ( 'url_sign_in' , None ) _request = kwargs . pop ( 'request' , None ) if _role : _roles . append ( _role ) _roles = [ to_unicode ( r ) for r in _roles ] _tests = tests _user_tests = kwargs def decorator ( f ) : @ functools . wraps ( f ) def wrapper ( * args , * * kwargs ) : logger = logging . getLogger ( __name__ ) request = _request or self . request or args and args [ 0 ] url_sign_in = self . _get_url_sign_in ( request , _url_sign_in ) user = self . get_user ( ) if not user : return self . _login_required ( request , url_sign_in ) if hasattr ( user , 'has_role' ) and _roles : if not user . has_role ( * _roles ) : logger . debug ( u'User `{0}`: has_role fail' . format ( user . login ) ) logger . debug ( u'User roles: {0}' . format ( [ r . name for r in user . roles ] ) ) return self . wsgi . raise_forbidden ( ) for test in _tests : test_pass = test ( user , * args , * * kwargs ) if not test_pass : logger . debug ( u'User `{0}`: test fail' . format ( user . login ) ) return self . wsgi . raise_forbidden ( ) for name , value in _user_tests . items ( ) : user_test = getattr ( user , name ) test_pass = user_test ( value , * args , * * kwargs ) if not test_pass : logger . debug ( u'User `{0}`: test fail' . format ( user . login ) ) return self . wsgi . raise_forbidden ( ) disable_csrf = _csrf == False # noqa if ( not self . wsgi . is_idempotent ( request ) and not disable_csrf ) or _csrf : if not self . csrf_token_is_valid ( request ) : logger . debug ( u'User `{0}`: invalid CSFR token' . format ( user . login ) ) return self . wsgi . raise_forbidden ( "CSFR token isn't valid" ) return f ( * args , * * kwargs ) return wrapper return decorator
Factory of decorators for limit the access to views .
635
11
10,461
def replace_flask_route ( self , bp , * args , * * kwargs ) : protected = self . protected def protected_route ( rule , * * options ) : """Like :meth:`Flask.route` but for a blueprint. The endpoint for the :func:`url_for` function is prefixed with the name of the blueprint. """ def decorator ( f ) : endpoint = options . pop ( "endpoint" , f . __name__ ) protected_f = protected ( * args , * * kwargs ) ( f ) bp . add_url_rule ( rule , endpoint , protected_f , * * options ) return f return decorator bp . route = protected_route
Replace the Flask app . route or blueprint . route with a version that first apply the protected decorator to the view so all views are automatically protected .
157
31
10,462
def parse_query ( self , query ) : tree = pypeg2 . parse ( query , Main , whitespace = "" ) return tree . accept ( self . converter )
Parse query string using given grammar
37
7
10,463
def decode_token ( token ) : if isinstance ( token , ( unicode , str ) ) : return _decode_token_compact ( token ) else : return _decode_token_json ( token )
Top - level method to decode a JWT . Takes either a compact - encoded JWT with a single signature or a multi - sig JWT in the JSON - serialized format .
47
37
10,464
def _verify_multi ( self , token , verifying_keys , num_required = None ) : headers , payload , raw_signatures , signing_inputs = _unpack_token_json ( token ) if num_required is None : num_required = len ( raw_signatures ) if num_required > len ( verifying_keys ) : # not possible return False if len ( headers ) != len ( raw_signatures ) : # invalid raise DecodeError ( 'Header/signature mismatch' ) verifying_keys = [ load_verifying_key ( vk , self . crypto_backend ) for vk in verifying_keys ] # sanity check: only support one type of key :( for vk in verifying_keys : if vk . curve . name != verifying_keys [ 0 ] . curve . name : raise DecodeError ( "TODO: only support using keys from one curve per JWT" ) der_signatures = [ raw_to_der_signature ( rs , verifying_keys [ 0 ] . curve ) for rs in raw_signatures ] # verify until threshold is met num_verified = 0 for ( signing_input , der_sig ) in zip ( signing_inputs , der_signatures ) : for vk in verifying_keys : verifier = self . _get_verifier ( vk , der_sig ) verifier . update ( signing_input ) try : verifier . verify ( ) num_verified += 1 verifying_keys . remove ( vk ) break except InvalidSignature : pass if num_verified >= num_required : break return ( num_verified >= num_required )
Verify a JSON - formatted JWT signed by multiple keys is authentic . Optionally set a threshold of required valid signatures with num_required . Return True if valid Return False if not
355
37
10,465
def verify ( self , token , verifying_key_or_keys , num_required = None ) : if not isinstance ( verifying_key_or_keys , ( list , str , unicode ) ) : raise ValueError ( "Invalid verifying key(s): expected list or string" ) if isinstance ( verifying_key_or_keys , list ) : return self . _verify_multi ( token , verifying_key_or_keys , num_required = num_required ) else : return self . _verify_single ( token , str ( verifying_key_or_keys ) )
Verify a compact - formated JWT or a JSON - formatted JWT signed by multiple keys . Return True if valid Return False if not valid
128
30
10,466
def activate_script ( self ) : # must be rethought # ./scripts # deploydir/./scripts self . _add_scope ( "script" ) self . scripts = { } self . script_files = [ "./scripts/script_*.txt" , "~/.cloudmesh/scripts/script_*.txt" ] self . _load_scripts ( self . script_files )
activates the script command
84
5
10,467
def touch ( path ) : parentDirPath = os . path . dirname ( path ) PathOperations . safeMakeDirs ( parentDirPath ) with open ( path , "wb" ) : pass
Creates the given path as a file also creating intermediate directories if required .
43
15
10,468
def safeRmTree ( rootPath ) : shutil . rmtree ( rootPath , True ) return not os . path . exists ( rootPath )
Deletes a tree and returns true if it was correctly deleted
33
12
10,469
def linearWalk ( rootPath , currentDirFilter = None ) : for dirTuple in os . walk ( rootPath ) : ( dirPath , dirNames , fileNames ) = dirTuple if currentDirFilter is not None and not currentDirFilter ( dirPath , dirNames , fileNames ) : continue for fileName in fileNames : yield LinearWalkItem ( dirPath , fileName )
Returns a list of LinearWalkItem s one for each file in the tree whose root is rootPath .
83
21
10,470
def init_live_reload ( run ) : from asyncio import get_event_loop from . _live_reload import start_child loop = get_event_loop ( ) if run : loop . run_until_complete ( start_child ( ) ) else : get_event_loop ( ) . create_task ( start_child ( ) )
Start the live reload task
77
5
10,471
def cmp_name ( first_node , second_node ) : if len ( first_node . children ) == len ( second_node . children ) : for first_child , second_child in zip ( first_node . children , second_node . children ) : for key in first_child . __dict__ . keys ( ) : if key . startswith ( '_' ) : continue if first_child . __dict__ [ key ] != second_child . __dict__ [ key ] : return 1 ret_val = cmp_name ( first_child , second_child ) if ret_val != 0 : return 1 else : return 1 return 0
Compare two name recursively .
143
7
10,472
def parse_division ( l , c , line , root_node , last_section_node ) : name = line name = name . replace ( "." , "" ) # trim whitespaces/tabs between XXX and DIVISION tokens = [ t for t in name . split ( ' ' ) if t ] node = Name ( Name . Type . Division , l , c , '%s %s' % ( tokens [ 0 ] , tokens [ 1 ] ) ) root_node . add_child ( node ) last_div_node = node # do not take previous sections into account if last_section_node : last_section_node . end_line = l last_section_node = None return last_div_node , last_section_node
Extracts a division node from a line
161
9
10,473
def parse_section ( l , c , last_div_node , last_vars , line ) : name = line name = name . replace ( "." , "" ) node = Name ( Name . Type . Section , l , c , name ) last_div_node . add_child ( node ) last_section_node = node # do not take previous var into account last_vars . clear ( ) return last_section_node
Extracts a section node from a line .
94
10
10,474
def parse_pic_field ( l , c , last_section_node , last_vars , line ) : parent_node = None raw_tokens = line . split ( " " ) tokens = [ ] for t in raw_tokens : if not t . isspace ( ) and t != "" : tokens . append ( t ) try : if tokens [ 0 ] . upper ( ) == "FD" : lvl = 1 else : lvl = int ( tokens [ 0 ] , 16 ) name = tokens [ 1 ] except ValueError : return None except IndexError : # line not complete return None name = name . replace ( "." , "" ) if name in ALL_KEYWORDS or name in [ '-' , '/' ] : return None m = re . findall ( r'pic.*\.' , line , re . IGNORECASE ) if m : description = ' ' . join ( [ t for t in m [ 0 ] . split ( ' ' ) if t ] ) else : description = line try : index = description . lower ( ) . index ( 'value' ) except ValueError : description = description . replace ( '.' , '' ) else : description = description [ index : ] . replace ( 'value' , '' ) [ : 80 ] if lvl == int ( '78' , 16 ) : lvl = 1 if lvl == 1 : parent_node = last_section_node last_vars . clear ( ) else : # find parent level levels = sorted ( last_vars . keys ( ) , reverse = True ) for lv in levels : if lv < lvl : parent_node = last_vars [ lv ] break if not parent_node : # malformed code return None # todo: enabled this with an option in pyqode 3.0 # if lvl == int('88', 16): # return None if not name or name . upper ( ) . strip ( ) == 'PIC' : name = 'FILLER' node = Name ( Name . Type . Variable , l , c , name , description ) parent_node . add_child ( node ) last_vars [ lvl ] = node # remove closed variables levels = sorted ( last_vars . keys ( ) , reverse = True ) for l in levels : if l > lvl : last_vars . pop ( l ) return node
Parse a pic field line . Return A VariableNode or None in case of malformed code .
502
20
10,475
def parse_paragraph ( l , c , last_div_node , last_section_node , line ) : if not line . endswith ( '.' ) : return None name = line . replace ( "." , "" ) if name . strip ( ) == '' : return None if name . upper ( ) in ALL_KEYWORDS : return None parent_node = last_div_node if last_section_node is not None : parent_node = last_section_node node = Name ( Name . Type . Paragraph , l , c , name ) parent_node . add_child ( node ) return node
Extracts a paragraph node
132
6
10,476
def find ( self , name ) : for c in self . children : if c . name == name : return c result = c . find ( name ) if result : return result
Finds a possible child whose name match the name parameter .
37
12
10,477
def to_definition ( self ) : icon = { Name . Type . Root : icons . ICON_MIMETYPE , Name . Type . Division : icons . ICON_DIVISION , Name . Type . Section : icons . ICON_SECTION , Name . Type . Variable : icons . ICON_VAR , Name . Type . Paragraph : icons . ICON_FUNC } [ self . node_type ] d = Definition ( self . name , self . line , self . column , icon , self . description ) for ch in self . children : d . add_child ( ch . to_definition ( ) ) return d
Converts the name instance to a pyqode . core . share . Definition
137
16
10,478
def connectDb ( engine = dbeng , user = dbuser , password = dbpwd , host = dbhost , port = dbport , database = dbname , params = "?charset=utf8&use_unicode=1" , echoopt = False ) : return create_engine ( '%s://%s:%s@%s:%s/%s%s' % ( engine , user , password , host , port , database , params ) , echo = echoopt )
Connect to database utility function .
108
6
10,479
def getbranchesurl ( idbranch , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass branchidparam = "" if idbranch == "" else "/" + idbranch url = getmambuurl ( * args , * * kwargs ) + "branches" + branchidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Branches URL .
206
5
10,480
def getcentresurl ( idcentre , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass centreidparam = "" if idcentre == "" else "/" + idcentre url = getmambuurl ( * args , * * kwargs ) + "centres" + centreidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Centres URL .
206
5
10,481
def getrepaymentsurl ( idcred , * args , * * kwargs ) : url = getmambuurl ( * args , * * kwargs ) + "loans/" + idcred + "/repayments" return url
Request loan Repayments URL .
55
7
10,482
def getloansurl ( idcred , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "centreId=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "creditOfficerUsername=%s" % kwargs [ "creditOfficerUsername" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass idcredparam = "" if idcred == "" else "/" + idcred url = getmambuurl ( * args , * * kwargs ) + "loans" + idcredparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Loans URL .
342
4
10,483
def getgroupurl ( idgroup , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "creditOfficerUsername=%s" % kwargs [ "creditOfficerUsername" ] ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "centreId=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass groupidparam = "" if idgroup == "" else "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Groups URL .
304
4
10,484
def getgrouploansurl ( idgroup , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass groupidparam = "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + "/loans" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Group loans URL .
176
5
10,485
def getgroupcustominformationurl ( idgroup , customfield = "" , * args , * * kwargs ) : groupidparam = "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + "/custominformation" + ( ( "/" + customfield ) if customfield else "" ) return url
Request Group Custom Information URL .
81
6
10,486
def gettransactionsurl ( idcred , * args , * * kwargs ) : getparams = [ ] if kwargs : try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "loans/" + idcred + "/transactions" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request loan Transactions URL .
146
5
10,487
def getclienturl ( idclient , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "firstName=%s" % kwargs [ "firstName" ] ) except Exception as ex : pass try : getparams . append ( "lastName=%s" % kwargs [ "lastName" ] ) except Exception as ex : pass try : getparams . append ( "idDocument=%s" % kwargs [ "idDocument" ] ) except Exception as ex : pass try : getparams . append ( "birthdate=%s" % kwargs [ "birthdate" ] ) except Exception as ex : pass try : getparams . append ( "state=%s" % kwargs [ "state" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass clientidparam = "" if idclient == "" else "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Clients URL .
355
5
10,488
def getclientloansurl ( idclient , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass clientidparam = "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + "/loans" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Client loans URL .
175
5
10,489
def getclientcustominformationurl ( idclient , customfield = "" , * args , * * kwargs ) : clientidparam = "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + "/custominformation" + ( ( "/" + customfield ) if customfield else "" ) return url
Request Client Custom Information URL .
82
6
10,490
def getuserurl ( iduser , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass useridparam = "" if iduser == "" else "/" + iduser url = getmambuurl ( * args , * * kwargs ) + "users" + useridparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Users URL .
234
4
10,491
def getproductsurl ( idproduct , * args , * * kwargs ) : productidparam = "" if idproduct == "" else "/" + idproduct url = getmambuurl ( * args , * * kwargs ) + "loanproducts" + productidparam return url
Request loan Products URL .
63
5
10,492
def gettasksurl ( dummyId = '' , * args , * * kwargs ) : getparams = [ ] if kwargs : try : getparams . append ( "username=%s" % kwargs [ "username" ] ) except Exception as ex : pass try : getparams . append ( "clientid=%s" % kwargs [ "clientId" ] ) except Exception as ex : pass try : getparams . append ( "groupid=%s" % kwargs [ "groupId" ] ) except Exception as ex : pass try : getparams . append ( "status=%s" % kwargs [ "status" ] ) except Exception as ex : getparams . append ( "status=OPEN" ) try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "tasks" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Tasks URL .
269
5
10,493
def getactivitiesurl ( dummyId = '' , * args , * * kwargs ) : from datetime import datetime getparams = [ ] if kwargs : try : getparams . append ( "from=%s" % kwargs [ "fromDate" ] ) except Exception as ex : getparams . append ( "from=%s" % '1900-01-01' ) try : getparams . append ( "to=%s" % kwargs [ "toDate" ] ) except Exception as ex : hoy = datetime . now ( ) . strftime ( '%Y-%m-%d' ) getparams . append ( "to=%s" % hoy ) try : getparams . append ( "branchID=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "clientID=%s" % kwargs [ "clientId" ] ) except Exception as ex : pass try : getparams . append ( "centreID=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "userID=%s" % kwargs [ "userId" ] ) except Exception as ex : pass try : getparams . append ( "loanAccountID=%s" % kwargs [ "loanAccountId" ] ) except Exception as ex : pass try : getparams . append ( "groupID=%s" % kwargs [ "groupId" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "activities" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
Request Activities URL .
426
4
10,494
def getrolesurl ( idrole = '' , * args , * * kwargs ) : url = getmambuurl ( * args , * * kwargs ) + "userroles" + ( ( "/" + idrole ) if idrole else "" ) return url
Request Roles URL .
61
5
10,495
def strip_tags ( html ) : from html . parser import HTMLParser class MLStripper ( HTMLParser ) : """Aux class for stripping HTML tags. fields on several Mambu entities come with additional HTML tags (they are rich text fields, I guess that's why). Sometimes they are useless, so stripping them is a good idea. """ def __init__ ( self ) : try : super ( ) . __init__ ( ) # required for python3 except TypeError as e : pass # with python2 raises TypeError self . reset ( ) self . fed = [ ] def handle_data ( self , d ) : self . fed . append ( d ) def get_data ( self ) : return '' . join ( self . fed ) s = MLStripper ( ) s . feed ( html . replace ( "&nbsp;" , " " ) ) return s . get_data ( )
Stripts HTML tags from text .
191
8
10,496
def strip_consecutive_repeated_char ( s , ch ) : sdest = "" for i , c in enumerate ( s ) : if i != 0 and s [ i ] == ch and s [ i ] == s [ i - 1 ] : continue sdest += s [ i ] return sdest
Strip characters in a string which are consecutively repeated .
67
12
10,497
def encoded_dict ( in_dict ) : out_dict = { } for k , v in in_dict . items ( ) : if isinstance ( v , unicode ) : if sys . version_info < ( 3 , 0 ) : v = v . encode ( 'utf8' ) elif isinstance ( v , str ) : # Must be encoded in UTF-8 if sys . version_info < ( 3 , 0 ) : v . decode ( 'utf8' ) out_dict [ k ] = v return out_dict
Encode every value of a dict to UTF - 8 .
115
12
10,498
def backup_db ( callback , bool_func , output_fname , * args , * * kwargs ) : from datetime import datetime try : verbose = kwargs [ 'verbose' ] except KeyError : verbose = False try : retries = kwargs [ 'retries' ] except KeyError : retries = - 1 try : force_download_latest = bool ( kwargs [ 'force_download_latest' ] ) except KeyError : force_download_latest = False if verbose : log = open ( '/tmp/log_mambu_backup' , 'a' ) log . write ( datetime . now ( ) . strftime ( '%Y-%m-%d %H:%M:%S' ) + " - Mambu DB Backup\n" ) log . flush ( ) user = kwargs . pop ( 'user' , apiuser ) pwd = kwargs . pop ( 'pwd' , apipwd ) data = { 'callback' : callback } try : posturl = iriToUri ( getmambuurl ( * args , * * kwargs ) + "database/backup" ) if verbose : log . write ( "open url: " + posturl + "\n" ) log . flush ( ) resp = requests . post ( posturl , data = data , headers = { 'content-type' : 'application/json' } , auth = ( apiuser , apipwd ) ) except Exception as ex : mess = "Error requesting backup: %s" % repr ( ex ) if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuError ( mess ) if resp . status_code != 200 : mess = "Error posting request for backup: %s" % resp . content if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuCommError ( mess ) data [ 'latest' ] = True while retries and not bool_func ( ) : if verbose : log . write ( "waiting...\n" ) log . flush ( ) sleep ( 10 ) retries -= 1 if retries < 0 : retries = - 1 if not retries : mess = "Tired of waiting, giving up..." if verbose : log . write ( mess + "\n" ) log . flush ( ) if not force_download_latest : if verbose : log . close ( ) raise MambuError ( mess ) else : data [ 'latest' ] = False sleep ( 30 ) geturl = iriToUri ( getmambuurl ( * args , * * kwargs ) + "database/backup/LATEST" ) if verbose : log . write ( "open url: " + geturl + "\n" ) log . flush ( ) resp = requests . get ( geturl , auth = ( apiuser , apipwd ) ) if resp . status_code != 200 : mess = "Error getting database backup: %s" % resp . content if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuCommError ( mess ) if verbose : log . write ( "saving...\n" ) log . flush ( ) with open ( output_fname , "w" ) as fw : fw . write ( resp . content ) if verbose : log . write ( "DONE!\n" ) log . close ( ) return data
Backup Mambu Database via REST API .
766
10
10,499
def memoize ( func ) : cache_name = '__CACHED_{}' . format ( func . __name__ ) def wrapper ( self , * args ) : cache = getattr ( self , cache_name , None ) if cache is None : cache = { } setattr ( self , cache_name , cache ) if args not in cache : cache [ args ] = func ( self , * args ) return cache [ args ] return wrapper
Provides memoization for methods on a specific instance . Results are cached for given parameter list .
95
19