query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Returns the best matching score and the associated label .
def similarity ( self , other : Trigram ) -> Tuple [ float , L ] : return max ( ( ( t % other , l ) for t , l in self . trigrams ) , key = lambda x : x [ 0 ] , )
10,500
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/trigram.py#L159-L167
[ "def", "if_sqlserver_disable_constraints_triggers", "(", "session", ":", "SqlASession", ",", "tablename", ":", "str", ")", "->", "None", ":", "with", "if_sqlserver_disable_constraints", "(", "session", ",", "tablename", ")", ":", "with", "if_sqlserver_disable_triggers", "(", "session", ",", "tablename", ")", ":", "yield" ]
Return the exception class suitable for the specified HTTP status code .
def _exception_for ( self , code ) : if code in self . errors : return self . errors [ code ] elif 500 <= code < 599 : return exceptions . RemoteServerError else : return exceptions . UnknownError
10,501
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/http/base.py#L86-L98
[ "def", "clear_intersection", "(", "self", ",", "other_dict", ")", ":", "for", "key", ",", "value", "in", "other_dict", ".", "items", "(", ")", ":", "if", "key", "in", "self", ".", "globals", "and", "self", ".", "globals", "[", "key", "]", "is", "value", ":", "del", "self", ".", "globals", "[", "key", "]", "if", "key", "in", "self", ".", "locals", "and", "self", ".", "locals", "[", "key", "]", "is", "value", ":", "del", "self", ".", "locals", "[", "key", "]", "return", "self" ]
Adds the groups to which this client belongs .
def setGroups ( self , * args , * * kwargs ) : requests = 0 groups = [ ] try : for gk in self [ 'groupKeys' ] : try : g = self . mambugroupclass ( entid = gk , * args , * * kwargs ) except AttributeError as ae : from . mambugroup import MambuGroup self . mambugroupclass = MambuGroup g = self . mambugroupclass ( entid = gk , * args , * * kwargs ) requests += 1 groups . append ( g ) except KeyError : pass self [ 'groups' ] = groups return requests
10,502
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuclient.py#L136-L162
[ "def", "createExternalTable", "(", "self", ",", "tableName", ",", "path", "=", "None", ",", "source", "=", "None", ",", "schema", "=", "None", ",", "*", "*", "options", ")", ":", "return", "self", ".", "sparkSession", ".", "catalog", ".", "createExternalTable", "(", "tableName", ",", "path", ",", "source", ",", "schema", ",", "*", "*", "options", ")" ]
Adds the branch to which the client belongs .
def setBranch ( self , * args , * * kwargs ) : try : branch = self . mambubranchclass ( entid = self [ 'assignedBranchKey' ] , * args , * * kwargs ) except AttributeError as ae : from . mambubranch import MambuBranch self . mambubranchclass = MambuBranch branch = self . mambubranchclass ( entid = self [ 'assignedBranchKey' ] , * args , * * kwargs ) self [ 'assignedBranchName' ] = branch [ 'name' ] self [ 'assignedBranch' ] = branch return 1
10,503
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuclient.py#L164-L177
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Factory of decorators for limit the access to views .
def protected ( self , * tests , * * kwargs ) : _role = kwargs . pop ( 'role' , None ) _roles = kwargs . pop ( 'roles' , None ) or [ ] _csrf = kwargs . pop ( 'csrf' , None ) _url_sign_in = kwargs . pop ( 'url_sign_in' , None ) _request = kwargs . pop ( 'request' , None ) if _role : _roles . append ( _role ) _roles = [ to_unicode ( r ) for r in _roles ] _tests = tests _user_tests = kwargs def decorator ( f ) : @ functools . wraps ( f ) def wrapper ( * args , * * kwargs ) : logger = logging . getLogger ( __name__ ) request = _request or self . request or args and args [ 0 ] url_sign_in = self . _get_url_sign_in ( request , _url_sign_in ) user = self . get_user ( ) if not user : return self . _login_required ( request , url_sign_in ) if hasattr ( user , 'has_role' ) and _roles : if not user . has_role ( * _roles ) : logger . debug ( u'User `{0}`: has_role fail' . format ( user . login ) ) logger . debug ( u'User roles: {0}' . format ( [ r . name for r in user . roles ] ) ) return self . wsgi . raise_forbidden ( ) for test in _tests : test_pass = test ( user , * args , * * kwargs ) if not test_pass : logger . debug ( u'User `{0}`: test fail' . format ( user . login ) ) return self . wsgi . raise_forbidden ( ) for name , value in _user_tests . items ( ) : user_test = getattr ( user , name ) test_pass = user_test ( value , * args , * * kwargs ) if not test_pass : logger . debug ( u'User `{0}`: test fail' . format ( user . login ) ) return self . wsgi . raise_forbidden ( ) disable_csrf = _csrf == False # noqa if ( not self . wsgi . is_idempotent ( request ) and not disable_csrf ) or _csrf : if not self . csrf_token_is_valid ( request ) : logger . debug ( u'User `{0}`: invalid CSFR token' . format ( user . login ) ) return self . wsgi . raise_forbidden ( "CSFR token isn't valid" ) return f ( * args , * * kwargs ) return wrapper return decorator
10,504
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/auth_authorization_mixin.py#L31-L117
[ "def", "get_waiting_components", "(", "self", ")", ":", "# type: () -> List[Tuple[str, str, Set[str]]]", "with", "self", ".", "__instances_lock", ":", "result", "=", "[", "]", "for", "name", ",", "(", "context", ",", "_", ")", "in", "self", ".", "__waiting_handlers", ".", "items", "(", ")", ":", "# Compute missing handlers", "missing", "=", "set", "(", "context", ".", "factory_context", ".", "get_handlers_ids", "(", ")", ")", "missing", ".", "difference_update", "(", "self", ".", "_handlers", ".", "keys", "(", ")", ")", "result", ".", "append", "(", "(", "name", ",", "context", ".", "factory_context", ".", "name", ",", "missing", ")", ")", "result", ".", "sort", "(", ")", "return", "result" ]
Replace the Flask app . route or blueprint . route with a version that first apply the protected decorator to the view so all views are automatically protected .
def replace_flask_route ( self , bp , * args , * * kwargs ) : protected = self . protected def protected_route ( rule , * * options ) : """Like :meth:`Flask.route` but for a blueprint. The endpoint for the :func:`url_for` function is prefixed with the name of the blueprint. """ def decorator ( f ) : endpoint = options . pop ( "endpoint" , f . __name__ ) protected_f = protected ( * args , * * kwargs ) ( f ) bp . add_url_rule ( rule , endpoint , protected_f , * * options ) return f return decorator bp . route = protected_route
10,505
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/auth_authorization_mixin.py#L119-L136
[ "def", "percent_cb", "(", "name", ",", "complete", ",", "total", ")", ":", "logger", ".", "debug", "(", "\"{}: {} transferred out of {}\"", ".", "format", "(", "name", ",", "sizeof_fmt", "(", "complete", ")", ",", "sizeof_fmt", "(", "total", ")", ")", ")", "progress", ".", "update_target", "(", "name", ",", "complete", ",", "total", ")" ]
Parse query string using given grammar
def parse_query ( self , query ) : tree = pypeg2 . parse ( query , Main , whitespace = "" ) return tree . accept ( self . converter )
10,506
https://github.com/inveniosoftware/invenio-query-parser/blob/21a2c36318003ff52d2e18e7196bb420db8ecb4b/invenio_query_parser/contrib/spires/converter.py#L39-L42
[ "def", "adapt_single_html", "(", "html", ")", ":", "html_root", "=", "etree", ".", "fromstring", "(", "html", ")", "metadata", "=", "parse_metadata", "(", "html_root", ".", "xpath", "(", "'//*[@data-type=\"metadata\"]'", ")", "[", "0", "]", ")", "id_", "=", "metadata", "[", "'cnx-archive-uri'", "]", "or", "'book'", "binder", "=", "Binder", "(", "id_", ",", "metadata", "=", "metadata", ")", "nav_tree", "=", "parse_navigation_html_to_tree", "(", "html_root", ",", "id_", ")", "body", "=", "html_root", ".", "xpath", "(", "'//xhtml:body'", ",", "namespaces", "=", "HTML_DOCUMENT_NAMESPACES", ")", "_adapt_single_html_tree", "(", "binder", ",", "body", "[", "0", "]", ",", "nav_tree", ",", "top_metadata", "=", "metadata", ")", "return", "binder" ]
Top - level method to decode a JWT . Takes either a compact - encoded JWT with a single signature or a multi - sig JWT in the JSON - serialized format .
def decode_token ( token ) : if isinstance ( token , ( unicode , str ) ) : return _decode_token_compact ( token ) else : return _decode_token_json ( token )
10,507
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_verifier.py#L164-L177
[ "def", "perturbParams", "(", "self", ",", "pertSize", "=", "1e-3", ")", ":", "params", "=", "self", ".", "getParams", "(", ")", "self", ".", "setParams", "(", "params", "+", "pertSize", "*", "sp", ".", "randn", "(", "params", ".", "shape", "[", "0", "]", ")", ")" ]
Verify a JSON - formatted JWT signed by multiple keys is authentic . Optionally set a threshold of required valid signatures with num_required . Return True if valid Return False if not
def _verify_multi ( self , token , verifying_keys , num_required = None ) : headers , payload , raw_signatures , signing_inputs = _unpack_token_json ( token ) if num_required is None : num_required = len ( raw_signatures ) if num_required > len ( verifying_keys ) : # not possible return False if len ( headers ) != len ( raw_signatures ) : # invalid raise DecodeError ( 'Header/signature mismatch' ) verifying_keys = [ load_verifying_key ( vk , self . crypto_backend ) for vk in verifying_keys ] # sanity check: only support one type of key :( for vk in verifying_keys : if vk . curve . name != verifying_keys [ 0 ] . curve . name : raise DecodeError ( "TODO: only support using keys from one curve per JWT" ) der_signatures = [ raw_to_der_signature ( rs , verifying_keys [ 0 ] . curve ) for rs in raw_signatures ] # verify until threshold is met num_verified = 0 for ( signing_input , der_sig ) in zip ( signing_inputs , der_signatures ) : for vk in verifying_keys : verifier = self . _get_verifier ( vk , der_sig ) verifier . update ( signing_input ) try : verifier . verify ( ) num_verified += 1 verifying_keys . remove ( vk ) break except InvalidSignature : pass if num_verified >= num_required : break return ( num_verified >= num_required )
10,508
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_verifier.py#L219-L266
[ "def", "receive_data_chunk", "(", "self", ",", "raw_data", ",", "start", ")", ":", "self", ".", "file", ".", "write", "(", "raw_data", ")", "# CHANGED: This un-hangs us long enough to keep things rolling.", "eventlet", ".", "sleep", "(", "0", ")" ]
Verify a compact - formated JWT or a JSON - formatted JWT signed by multiple keys . Return True if valid Return False if not valid
def verify ( self , token , verifying_key_or_keys , num_required = None ) : if not isinstance ( verifying_key_or_keys , ( list , str , unicode ) ) : raise ValueError ( "Invalid verifying key(s): expected list or string" ) if isinstance ( verifying_key_or_keys , list ) : return self . _verify_multi ( token , verifying_key_or_keys , num_required = num_required ) else : return self . _verify_single ( token , str ( verifying_key_or_keys ) )
10,509
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_verifier.py#L269-L284
[ "def", "future_set_exception_unless_cancelled", "(", "future", ":", "\"Union[futures.Future[_T], Future[_T]]\"", ",", "exc", ":", "BaseException", ")", "->", "None", ":", "if", "not", "future", ".", "cancelled", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "else", ":", "app_log", ".", "error", "(", "\"Exception after Future was cancelled\"", ",", "exc_info", "=", "exc", ")" ]
activates the script command
def activate_script ( self ) : # must be rethought # ./scripts # deploydir/./scripts self . _add_scope ( "script" ) self . scripts = { } self . script_files = [ "./scripts/script_*.txt" , "~/.cloudmesh/scripts/script_*.txt" ] self . _load_scripts ( self . script_files )
10,510
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/script.py#L16-L26
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]
Creates the given path as a file also creating intermediate directories if required .
def touch ( path ) : parentDirPath = os . path . dirname ( path ) PathOperations . safeMakeDirs ( parentDirPath ) with open ( path , "wb" ) : pass
10,511
https://github.com/giancosta86/Iris/blob/b3d92cca5cce3653519bd032346b211c46a57d05/info/gianlucacosta/iris/io/utils.py#L47-L57
[ "def", "register_keepalive", "(", "self", ",", "cmd", ",", "callback", ")", ":", "regid", "=", "random", ".", "random", "(", ")", "if", "self", ".", "_customkeepalives", "is", "None", ":", "self", ".", "_customkeepalives", "=", "{", "regid", ":", "(", "cmd", ",", "callback", ")", "}", "else", ":", "while", "regid", "in", "self", ".", "_customkeepalives", ":", "regid", "=", "random", ".", "random", "(", ")", "self", ".", "_customkeepalives", "[", "regid", "]", "=", "(", "cmd", ",", "callback", ")", "return", "regid" ]
Deletes a tree and returns true if it was correctly deleted
def safeRmTree ( rootPath ) : shutil . rmtree ( rootPath , True ) return not os . path . exists ( rootPath )
10,512
https://github.com/giancosta86/Iris/blob/b3d92cca5cce3653519bd032346b211c46a57d05/info/gianlucacosta/iris/io/utils.py#L61-L67
[ "def", "_sleep", "(", "current_sleep", ",", "max_sleep", "=", "_MAX_SLEEP", ",", "multiplier", "=", "_MULTIPLIER", ")", ":", "actual_sleep", "=", "random", ".", "uniform", "(", "0.0", ",", "current_sleep", ")", "time", ".", "sleep", "(", "actual_sleep", ")", "return", "min", "(", "multiplier", "*", "current_sleep", ",", "max_sleep", ")" ]
Returns a list of LinearWalkItem s one for each file in the tree whose root is rootPath .
def linearWalk ( rootPath , currentDirFilter = None ) : for dirTuple in os . walk ( rootPath ) : ( dirPath , dirNames , fileNames ) = dirTuple if currentDirFilter is not None and not currentDirFilter ( dirPath , dirNames , fileNames ) : continue for fileName in fileNames : yield LinearWalkItem ( dirPath , fileName )
10,513
https://github.com/giancosta86/Iris/blob/b3d92cca5cce3653519bd032346b211c46a57d05/info/gianlucacosta/iris/io/utils.py#L71-L99
[ "def", "_merge_meta_data", "(", "cls", ",", "first", ":", "\"HistogramBase\"", ",", "second", ":", "\"HistogramBase\"", ")", "->", "dict", ":", "keys", "=", "set", "(", "first", ".", "_meta_data", ".", "keys", "(", ")", ")", "keys", "=", "keys", ".", "union", "(", "set", "(", "second", ".", "_meta_data", ".", "keys", "(", ")", ")", ")", "return", "{", "key", ":", "(", "first", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "if", "first", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "==", "second", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "else", "None", ")", "for", "key", "in", "keys", "}" ]
Start the live reload task
def init_live_reload ( run ) : from asyncio import get_event_loop from . _live_reload import start_child loop = get_event_loop ( ) if run : loop . run_until_complete ( start_child ( ) ) else : get_event_loop ( ) . create_task ( start_child ( ) )
10,514
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/cli/_base.py#L20-L34
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Compare two name recursively .
def cmp_name ( first_node , second_node ) : if len ( first_node . children ) == len ( second_node . children ) : for first_child , second_child in zip ( first_node . children , second_node . children ) : for key in first_child . __dict__ . keys ( ) : if key . startswith ( '_' ) : continue if first_child . __dict__ [ key ] != second_child . __dict__ [ key ] : return 1 ret_val = cmp_name ( first_child , second_child ) if ret_val != 0 : return 1 else : return 1 return 0
10,515
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L99-L122
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Extracts a division node from a line
def parse_division ( l , c , line , root_node , last_section_node ) : name = line name = name . replace ( "." , "" ) # trim whitespaces/tabs between XXX and DIVISION tokens = [ t for t in name . split ( ' ' ) if t ] node = Name ( Name . Type . Division , l , c , '%s %s' % ( tokens [ 0 ] , tokens [ 1 ] ) ) root_node . add_child ( node ) last_div_node = node # do not take previous sections into account if last_section_node : last_section_node . end_line = l last_section_node = None return last_div_node , last_section_node
10,516
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L125-L150
[ "def", "emit", "(", "_", ")", ":", "if", "not", "initialized", ":", "raise", "NotInitialized", "view", "=", "{", "'version'", ":", "__version__", ",", "'counters'", ":", "{", "}", ",", "'gauges'", ":", "{", "}", ",", "'histograms'", ":", "{", "}", ",", "'meters'", ":", "{", "}", ",", "'timers'", ":", "{", "}", ",", "}", "for", "(", "ty", ",", "module", ",", "name", ")", ",", "metric", "in", "six", ".", "iteritems", "(", "all_metrics", ")", ":", "view", "[", "ty", "]", "[", "'%s.%s'", "%", "(", "module", ",", "name", ")", "]", "=", "metric", ".", "view", "(", ")", "marshalled_view", "=", "marshal", ".", "dumps", "(", "view", ")", "if", "len", "(", "marshalled_view", ")", ">", "MAX_MARSHALLED_VIEW_SIZE", ":", "log", ".", "warn", "(", "'Marshalled length too large, got %d, max %d. '", "'Try recording fewer metrics or increasing '", "'MAX_MARSHALLED_VIEW_SIZE'", "%", "(", "len", "(", "marshalled_view", ")", ",", "MAX_MARSHALLED_VIEW_SIZE", ")", ")", "return", "marshalled_metrics_mmap", ".", "seek", "(", "0", ")", "try", ":", "# Reading and writing to/from an mmap'ed buffer is not guaranteed", "# to be atomic, so we must serialize access to it.", "uwsgi", ".", "lock", "(", ")", "marshalled_metrics_mmap", ".", "write", "(", "marshalled_view", ")", "finally", ":", "uwsgi", ".", "unlock", "(", ")" ]
Extracts a section node from a line .
def parse_section ( l , c , last_div_node , last_vars , line ) : name = line name = name . replace ( "." , "" ) node = Name ( Name . Type . Section , l , c , name ) last_div_node . add_child ( node ) last_section_node = node # do not take previous var into account last_vars . clear ( ) return last_section_node
10,517
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L153-L174
[ "def", "_convert_asset_timestamp_fields", "(", "dict_", ")", ":", "for", "key", "in", "_asset_timestamp_fields", "&", "viewkeys", "(", "dict_", ")", ":", "value", "=", "pd", ".", "Timestamp", "(", "dict_", "[", "key", "]", ",", "tz", "=", "'UTC'", ")", "dict_", "[", "key", "]", "=", "None", "if", "isnull", "(", "value", ")", "else", "value", "return", "dict_" ]
Parse a pic field line . Return A VariableNode or None in case of malformed code .
def parse_pic_field ( l , c , last_section_node , last_vars , line ) : parent_node = None raw_tokens = line . split ( " " ) tokens = [ ] for t in raw_tokens : if not t . isspace ( ) and t != "" : tokens . append ( t ) try : if tokens [ 0 ] . upper ( ) == "FD" : lvl = 1 else : lvl = int ( tokens [ 0 ] , 16 ) name = tokens [ 1 ] except ValueError : return None except IndexError : # line not complete return None name = name . replace ( "." , "" ) if name in ALL_KEYWORDS or name in [ '-' , '/' ] : return None m = re . findall ( r'pic.*\.' , line , re . IGNORECASE ) if m : description = ' ' . join ( [ t for t in m [ 0 ] . split ( ' ' ) if t ] ) else : description = line try : index = description . lower ( ) . index ( 'value' ) except ValueError : description = description . replace ( '.' , '' ) else : description = description [ index : ] . replace ( 'value' , '' ) [ : 80 ] if lvl == int ( '78' , 16 ) : lvl = 1 if lvl == 1 : parent_node = last_section_node last_vars . clear ( ) else : # find parent level levels = sorted ( last_vars . keys ( ) , reverse = True ) for lv in levels : if lv < lvl : parent_node = last_vars [ lv ] break if not parent_node : # malformed code return None # todo: enabled this with an option in pyqode 3.0 # if lvl == int('88', 16): # return None if not name or name . upper ( ) . strip ( ) == 'PIC' : name = 'FILLER' node = Name ( Name . Type . Variable , l , c , name , description ) parent_node . add_child ( node ) last_vars [ lvl ] = node # remove closed variables levels = sorted ( last_vars . keys ( ) , reverse = True ) for l in levels : if l > lvl : last_vars . pop ( l ) return node
10,518
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L177-L248
[ "def", "_initialized", "(", "self", ",", "partitioner", ")", ":", "self", ".", "_partitioner", "=", "partitioner", "self", ".", "_thimble", "=", "Thimble", "(", "self", ".", "reactor", ",", "self", ".", "pool", ",", "partitioner", ",", "_blocking_partitioner_methods", ")", "self", ".", "_state", "=", "None" ]
Extracts a paragraph node
def parse_paragraph ( l , c , last_div_node , last_section_node , line ) : if not line . endswith ( '.' ) : return None name = line . replace ( "." , "" ) if name . strip ( ) == '' : return None if name . upper ( ) in ALL_KEYWORDS : return None parent_node = last_div_node if last_section_node is not None : parent_node = last_section_node node = Name ( Name . Type . Paragraph , l , c , name ) parent_node . add_child ( node ) return node
10,519
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L251-L273
[ "def", "_save_files", "(", "self", ",", "data", ",", "dtype_out_time", ")", ":", "path", "=", "self", ".", "path_out", "[", "dtype_out_time", "]", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "dir_out", ")", ":", "os", ".", "makedirs", "(", "self", ".", "dir_out", ")", "if", "'reg'", "in", "dtype_out_time", ":", "try", ":", "reg_data", "=", "xr", ".", "open_dataset", "(", "path", ")", "except", "(", "EOFError", ",", "RuntimeError", ",", "IOError", ")", ":", "reg_data", "=", "xr", ".", "Dataset", "(", ")", "reg_data", ".", "update", "(", "data", ")", "data_out", "=", "reg_data", "else", ":", "data_out", "=", "data", "if", "isinstance", "(", "data_out", ",", "xr", ".", "DataArray", ")", ":", "data_out", "=", "xr", ".", "Dataset", "(", "{", "self", ".", "name", ":", "data_out", "}", ")", "data_out", ".", "to_netcdf", "(", "path", ",", "engine", "=", "'netcdf4'", ",", "format", "=", "'NETCDF3_64BIT'", ")" ]
Finds a possible child whose name match the name parameter .
def find ( self , name ) : for c in self . children : if c . name == name : return c result = c . find ( name ) if result : return result
10,520
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L55-L69
[ "def", "urlstate", "(", "self", ",", "encryption_key", ")", ":", "lzma", "=", "LZMACompressor", "(", ")", "urlstate_data", "=", "json", ".", "dumps", "(", "self", ".", "_state_dict", ")", "urlstate_data", "=", "lzma", ".", "compress", "(", "urlstate_data", ".", "encode", "(", "\"UTF-8\"", ")", ")", "urlstate_data", "+=", "lzma", ".", "flush", "(", ")", "urlstate_data", "=", "_AESCipher", "(", "encryption_key", ")", ".", "encrypt", "(", "urlstate_data", ")", "lzma", "=", "LZMACompressor", "(", ")", "urlstate_data", "=", "lzma", ".", "compress", "(", "urlstate_data", ")", "urlstate_data", "+=", "lzma", ".", "flush", "(", ")", "urlstate_data", "=", "base64", ".", "urlsafe_b64encode", "(", "urlstate_data", ")", "return", "urlstate_data", ".", "decode", "(", "\"utf-8\"", ")" ]
Converts the name instance to a pyqode . core . share . Definition
def to_definition ( self ) : icon = { Name . Type . Root : icons . ICON_MIMETYPE , Name . Type . Division : icons . ICON_DIVISION , Name . Type . Section : icons . ICON_SECTION , Name . Type . Variable : icons . ICON_VAR , Name . Type . Paragraph : icons . ICON_FUNC } [ self . node_type ] d = Definition ( self . name , self . line , self . column , icon , self . description ) for ch in self . children : d . add_child ( ch . to_definition ( ) ) return d
10,521
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/names.py#L82-L96
[ "def", "_preprocess_movie_lens", "(", "ratings_df", ")", ":", "ratings_df", "[", "\"data\"", "]", "=", "1.0", "num_timestamps", "=", "ratings_df", "[", "[", "\"userId\"", ",", "\"timestamp\"", "]", "]", ".", "groupby", "(", "\"userId\"", ")", ".", "nunique", "(", ")", "last_user_timestamp", "=", "ratings_df", "[", "[", "\"userId\"", ",", "\"timestamp\"", "]", "]", ".", "groupby", "(", "\"userId\"", ")", ".", "max", "(", ")", "ratings_df", "[", "\"numberOfTimestamps\"", "]", "=", "ratings_df", "[", "\"userId\"", "]", ".", "apply", "(", "lambda", "x", ":", "num_timestamps", "[", "\"timestamp\"", "]", "[", "x", "]", ")", "ratings_df", "[", "\"lastTimestamp\"", "]", "=", "ratings_df", "[", "\"userId\"", "]", ".", "apply", "(", "lambda", "x", ":", "last_user_timestamp", "[", "\"timestamp\"", "]", "[", "x", "]", ")", "ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"numberOfTimestamps\"", "]", ">", "2", "]", "ratings_df", "=", "_create_row_col_indices", "(", "ratings_df", ")", "train_ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"timestamp\"", "]", "<", "ratings_df", "[", "\"lastTimestamp\"", "]", "]", "test_ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"timestamp\"", "]", "==", "ratings_df", "[", "\"lastTimestamp\"", "]", "]", "return", "ratings_df", ",", "train_ratings_df", ",", "test_ratings_df" ]
Connect to database utility function .
def connectDb ( engine = dbeng , user = dbuser , password = dbpwd , host = dbhost , port = dbport , database = dbname , params = "?charset=utf8&use_unicode=1" , echoopt = False ) : return create_engine ( '%s://%s:%s@%s:%s/%s%s' % ( engine , user , password , host , port , database , params ) , echo = echoopt )
10,522
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L67-L81
[ "def", "_ProcessRegistryKeySource", "(", "self", ",", "source", ")", ":", "keys", "=", "source", ".", "base_source", ".", "attributes", ".", "get", "(", "\"keys\"", ",", "[", "]", ")", "if", "not", "keys", ":", "return", "interpolated_paths", "=", "artifact_utils", ".", "InterpolateListKbAttributes", "(", "input_list", "=", "keys", ",", "knowledge_base", "=", "self", ".", "knowledge_base", ",", "ignore_errors", "=", "self", ".", "ignore_interpolation_errors", ")", "glob_expressions", "=", "map", "(", "rdf_paths", ".", "GlobExpression", ",", "interpolated_paths", ")", "patterns", "=", "[", "]", "for", "pattern", "in", "glob_expressions", ":", "patterns", ".", "extend", "(", "pattern", ".", "Interpolate", "(", "knowledge_base", "=", "self", ".", "knowledge_base", ")", ")", "patterns", ".", "sort", "(", "key", "=", "len", ",", "reverse", "=", "True", ")", "file_finder_action", "=", "rdf_file_finder", ".", "FileFinderAction", ".", "Stat", "(", ")", "request", "=", "rdf_file_finder", ".", "FileFinderArgs", "(", "paths", "=", "patterns", ",", "action", "=", "file_finder_action", ",", "follow_links", "=", "True", ",", "pathtype", "=", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", "action", "=", "vfs_file_finder", ".", "RegistryKeyFromClient", "yield", "action", ",", "request" ]
Request Branches URL .
def getbranchesurl ( idbranch , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass branchidparam = "" if idbranch == "" else "/" + idbranch url = getmambuurl ( * args , * * kwargs ) + "branches" + branchidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,523
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L100-L135
[ "def", "create_pgroup_snapshot", "(", "self", ",", "source", ",", "*", "*", "kwargs", ")", ":", "# In REST 1.4, support was added for snapshotting multiple pgroups. As a", "# result, the endpoint response changed from an object to an array of", "# objects. To keep the response type consistent between REST versions,", "# we unbox the response when creating a single snapshot.", "result", "=", "self", ".", "create_pgroup_snapshots", "(", "[", "source", "]", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_rest_version", ">=", "LooseVersion", "(", "\"1.4\"", ")", ":", "headers", "=", "result", ".", "headers", "result", "=", "ResponseDict", "(", "result", "[", "0", "]", ")", "result", ".", "headers", "=", "headers", "return", "result" ]
Request Centres URL .
def getcentresurl ( idcentre , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass centreidparam = "" if idcentre == "" else "/" + idcentre url = getmambuurl ( * args , * * kwargs ) + "centres" + centreidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,524
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L137-L172
[ "def", "get_devices", "(", "filename", ",", "bundled", ")", ":", "loader", "=", "Loader", "(", "filename", ",", "bundled", ")", "data", "=", "loader", ".", "data", "devices", "=", "Devices", "(", ")", "# Iterate through the resources and generate each individual device", "# on demand.", "for", "resource_name", ",", "resource_dict", "in", "data", ".", "get", "(", "'resources'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "device_name", "=", "resource_dict", "[", "'device'", "]", "dd", "=", "loader", ".", "get_device_dict", "(", "device_name", ",", "resource_dict", ".", "get", "(", "'filename'", ",", "None", ")", ",", "resource_dict", ".", "get", "(", "'bundled'", ",", "False", ")", ",", "SPEC_VERSION_TUPLE", "[", "0", "]", ")", "devices", ".", "add_device", "(", "resource_name", ",", "get_device", "(", "device_name", ",", "dd", ",", "loader", ",", "resource_dict", ")", ")", "return", "devices" ]
Request loan Repayments URL .
def getrepaymentsurl ( idcred , * args , * * kwargs ) : url = getmambuurl ( * args , * * kwargs ) + "loans/" + idcred + "/repayments" return url
10,525
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L174-L195
[ "def", "to_workspace_value", "(", "self", ",", "result", ",", "assets", ")", ":", "if", "self", ".", "dtype", "==", "int64_dtype", ":", "return", "super", "(", "Classifier", ",", "self", ")", ".", "to_workspace_value", "(", "result", ",", "assets", ")", "assert", "isinstance", "(", "result", ".", "values", ",", "pd", ".", "Categorical", ")", ",", "(", "'Expected a Categorical, got %r.'", "%", "type", "(", "result", ".", "values", ")", ")", "with_missing", "=", "pd", ".", "Series", "(", "data", "=", "pd", ".", "Categorical", "(", "result", ".", "values", ",", "result", ".", "values", ".", "categories", ".", "union", "(", "[", "self", ".", "missing_value", "]", ")", ",", ")", ",", "index", "=", "result", ".", "index", ",", ")", "return", "LabelArray", "(", "super", "(", "Classifier", ",", "self", ")", ".", "to_workspace_value", "(", "with_missing", ",", "assets", ",", ")", ",", "self", ".", "missing_value", ",", ")" ]
Request Loans URL .
def getloansurl ( idcred , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "centreId=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "creditOfficerUsername=%s" % kwargs [ "creditOfficerUsername" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass idcredparam = "" if idcred == "" else "/" + idcred url = getmambuurl ( * args , * * kwargs ) + "loans" + idcredparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,526
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L197-L252
[ "def", "_generate_noise_system", "(", "dimensions_tr", ",", "spatial_sd", ",", "temporal_sd", ",", "spatial_noise_type", "=", "'gaussian'", ",", "temporal_noise_type", "=", "'gaussian'", ",", ")", ":", "def", "noise_volume", "(", "dimensions", ",", "noise_type", ",", ")", ":", "if", "noise_type", "==", "'rician'", ":", "# Generate the Rician noise (has an SD of 1)", "noise", "=", "stats", ".", "rice", ".", "rvs", "(", "b", "=", "0", ",", "loc", "=", "0", ",", "scale", "=", "1.527", ",", "size", "=", "dimensions", ")", "elif", "noise_type", "==", "'exponential'", ":", "# Make an exponential distribution (has an SD of 1)", "noise", "=", "stats", ".", "expon", ".", "rvs", "(", "0", ",", "scale", "=", "1", ",", "size", "=", "dimensions", ")", "elif", "noise_type", "==", "'gaussian'", ":", "noise", "=", "np", ".", "random", ".", "randn", "(", "np", ".", "prod", "(", "dimensions", ")", ")", ".", "reshape", "(", "dimensions", ")", "# Return the noise", "return", "noise", "# Get just the xyz coordinates", "dimensions", "=", "np", ".", "asarray", "(", "[", "dimensions_tr", "[", "0", "]", ",", "dimensions_tr", "[", "1", "]", ",", "dimensions_tr", "[", "2", "]", ",", "1", "]", ")", "# Generate noise", "spatial_noise", "=", "noise_volume", "(", "dimensions", ",", "spatial_noise_type", ")", "temporal_noise", "=", "noise_volume", "(", "dimensions_tr", ",", "temporal_noise_type", ")", "# Make the system noise have a specific spatial variability", "spatial_noise", "*=", "spatial_sd", "# Set the size of the noise", "temporal_noise", "*=", "temporal_sd", "# The mean in time of system noise needs to be zero, so subtract the", "# means of the temporal noise in time", "temporal_noise_mean", "=", "np", ".", "mean", "(", "temporal_noise", ",", "3", ")", ".", "reshape", "(", "dimensions", "[", "0", "]", ",", "dimensions", "[", "1", "]", ",", "dimensions", "[", "2", "]", ",", "1", ")", "temporal_noise", "=", "temporal_noise", "-", "temporal_noise_mean", "# Save the combination", "system_noise", "=", "spatial_noise", "+", "temporal_noise", "return", "system_noise" ]
Request Groups URL .
def getgroupurl ( idgroup , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "creditOfficerUsername=%s" % kwargs [ "creditOfficerUsername" ] ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "centreId=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass groupidparam = "" if idgroup == "" else "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,527
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L254-L303
[ "def", "delete_everything", "(", "self", ")", ":", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "basebackups", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of a base backup'", ")", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "wal_directory", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of wal logs'", ")", "if", "self", ".", "deleter", ":", "self", ".", "deleter", ".", "close", "(", ")" ]
Request Group loans URL .
def getgrouploansurl ( idgroup , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass groupidparam = "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + "/loans" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,528
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L305-L344
[ "def", "create_event_subscription", "(", "self", ",", "instance", ",", "on_data", ",", "timeout", "=", "60", ")", ":", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ",", "resource", "=", "'events'", ")", "# Represent subscription as a future", "subscription", "=", "WebSocketSubscriptionFuture", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_event", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Request Group Custom Information URL .
def getgroupcustominformationurl ( idgroup , customfield = "" , * args , * * kwargs ) : groupidparam = "/" + idgroup url = getmambuurl ( * args , * * kwargs ) + "groups" + groupidparam + "/custominformation" + ( ( "/" + customfield ) if customfield else "" ) return url
10,529
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L346-L356
[ "def", "include", "(", "context", ",", "bundle_name", ",", "version", ")", ":", "store", "=", "Store", "(", "context", ".", "obj", "[", "'database'", "]", ",", "context", ".", "obj", "[", "'root'", "]", ")", "if", "version", ":", "version_obj", "=", "store", ".", "Version", ".", "get", "(", "version", ")", "if", "version_obj", "is", "None", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "'version not found'", ",", "fg", "=", "'red'", ")", ")", "else", ":", "bundle_obj", "=", "store", ".", "bundle", "(", "bundle_name", ")", "if", "bundle_obj", "is", "None", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "'bundle not found'", ",", "fg", "=", "'red'", ")", ")", "version_obj", "=", "bundle_obj", ".", "versions", "[", "0", "]", "try", ":", "include_version", "(", "context", ".", "obj", "[", "'root'", "]", ",", "version_obj", ")", "except", "VersionIncludedError", "as", "error", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "error", ".", "message", ",", "fg", "=", "'red'", ")", ")", "context", ".", "abort", "(", ")", "version_obj", ".", "included_at", "=", "dt", ".", "datetime", ".", "now", "(", ")", "store", ".", "commit", "(", ")", "click", ".", "echo", "(", "click", ".", "style", "(", "'included all files!'", ",", "fg", "=", "'green'", ")", ")" ]
Request loan Transactions URL .
def gettransactionsurl ( idcred , * args , * * kwargs ) : getparams = [ ] if kwargs : try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "loans/" + idcred + "/transactions" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,530
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L358-L391
[ "def", "_set_slots_to_null", "(", "self", ",", "cls", ")", ":", "if", "hasattr", "(", "cls", ",", "\"__slots__\"", ")", ":", "for", "s", "in", "cls", ".", "__slots__", ":", "self", ".", "__setattr__", "(", "s", ",", "Null", ")", "for", "b", "in", "cls", ".", "__bases__", ":", "self", ".", "_set_slots_to_null", "(", "b", ")" ]
Request Clients URL .
def getclienturl ( idclient , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "firstName=%s" % kwargs [ "firstName" ] ) except Exception as ex : pass try : getparams . append ( "lastName=%s" % kwargs [ "lastName" ] ) except Exception as ex : pass try : getparams . append ( "idDocument=%s" % kwargs [ "idDocument" ] ) except Exception as ex : pass try : getparams . append ( "birthdate=%s" % kwargs [ "birthdate" ] ) except Exception as ex : pass try : getparams . append ( "state=%s" % kwargs [ "state" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass clientidparam = "" if idclient == "" else "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,531
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L393-L452
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Request Client loans URL .
def getclientloansurl ( idclient , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "accountState=%s" % kwargs [ "accountState" ] ) except Exception as ex : pass clientidparam = "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + "/loans" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,532
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L454-L493
[ "def", "_match_to_morph_parents", "(", "self", ",", "type", ",", "results", ")", ":", "for", "result", "in", "results", ":", "if", "result", ".", "get_key", "(", ")", "in", "self", ".", "_dictionary", ".", "get", "(", "type", ",", "[", "]", ")", ":", "for", "model", "in", "self", ".", "_dictionary", "[", "type", "]", "[", "result", ".", "get_key", "(", ")", "]", ":", "model", ".", "set_relation", "(", "self", ".", "_relation", ",", "Result", "(", "result", ",", "self", ",", "model", ",", "related", "=", "result", ")", ")" ]
Request Client Custom Information URL .
def getclientcustominformationurl ( idclient , customfield = "" , * args , * * kwargs ) : clientidparam = "/" + idclient url = getmambuurl ( * args , * * kwargs ) + "clients" + clientidparam + "/custominformation" + ( ( "/" + customfield ) if customfield else "" ) return url
10,533
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L495-L505
[ "def", "include", "(", "context", ",", "bundle_name", ",", "version", ")", ":", "store", "=", "Store", "(", "context", ".", "obj", "[", "'database'", "]", ",", "context", ".", "obj", "[", "'root'", "]", ")", "if", "version", ":", "version_obj", "=", "store", ".", "Version", ".", "get", "(", "version", ")", "if", "version_obj", "is", "None", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "'version not found'", ",", "fg", "=", "'red'", ")", ")", "else", ":", "bundle_obj", "=", "store", ".", "bundle", "(", "bundle_name", ")", "if", "bundle_obj", "is", "None", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "'bundle not found'", ",", "fg", "=", "'red'", ")", ")", "version_obj", "=", "bundle_obj", ".", "versions", "[", "0", "]", "try", ":", "include_version", "(", "context", ".", "obj", "[", "'root'", "]", ",", "version_obj", ")", "except", "VersionIncludedError", "as", "error", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "error", ".", "message", ",", "fg", "=", "'red'", ")", ")", "context", ".", "abort", "(", ")", "version_obj", ".", "included_at", "=", "dt", ".", "datetime", ".", "now", "(", ")", "store", ".", "commit", "(", ")", "click", ".", "echo", "(", "click", ".", "style", "(", "'included all files!'", ",", "fg", "=", "'green'", ")", ")" ]
Request Users URL .
def getuserurl ( iduser , * args , * * kwargs ) : getparams = [ ] if kwargs : try : if kwargs [ "fullDetails" ] == True : getparams . append ( "fullDetails=true" ) else : getparams . append ( "fullDetails=false" ) except Exception as ex : pass try : getparams . append ( "branchId=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass useridparam = "" if iduser == "" else "/" + iduser url = getmambuurl ( * args , * * kwargs ) + "users" + useridparam + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,534
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L507-L546
[ "def", "_weights_callback", "(", "self", ",", "msg", ")", ":", "# Read weights", "weights", "=", "np", ".", "array", "(", "msg", ".", "data", ")", "# If needed, initialize indiv_weight_buffers", "if", "len", "(", "self", ".", "_weight_buffers", ")", "==", "0", ":", "self", ".", "_weight_buffers", "=", "[", "[", "]", "for", "i", "in", "range", "(", "len", "(", "weights", ")", ")", "]", "# Record individual weights", "for", "i", ",", "w", "in", "enumerate", "(", "weights", ")", ":", "if", "len", "(", "self", ".", "_weight_buffers", "[", "i", "]", ")", "==", "self", ".", "_ntaps", ":", "self", ".", "_weight_buffers", "[", "i", "]", ".", "pop", "(", "0", ")", "self", ".", "_weight_buffers", "[", "i", "]", ".", "append", "(", "w", ")" ]
Request loan Products URL .
def getproductsurl ( idproduct , * args , * * kwargs ) : productidparam = "" if idproduct == "" else "/" + idproduct url = getmambuurl ( * args , * * kwargs ) + "loanproducts" + productidparam return url
10,535
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L548-L562
[ "def", "rerecord", "(", "ctx", ",", "rest", ")", ":", "run", "(", "'tox -e py27 -- --cassette-mode all --record --credentials {0} -s'", ".", "format", "(", "rest", ")", ",", "pty", "=", "True", ")", "run", "(", "'tox -e py27 -- --resave --scrub --credentials test_credentials {0} -s'", ".", "format", "(", "rest", ")", ",", "pty", "=", "True", ")" ]
Request Tasks URL .
def gettasksurl ( dummyId = '' , * args , * * kwargs ) : getparams = [ ] if kwargs : try : getparams . append ( "username=%s" % kwargs [ "username" ] ) except Exception as ex : pass try : getparams . append ( "clientid=%s" % kwargs [ "clientId" ] ) except Exception as ex : pass try : getparams . append ( "groupid=%s" % kwargs [ "groupId" ] ) except Exception as ex : pass try : getparams . append ( "status=%s" % kwargs [ "status" ] ) except Exception as ex : getparams . append ( "status=OPEN" ) try : getparams . append ( "offset=%s" % kwargs [ "offset" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "tasks" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,536
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L564-L620
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Request Activities URL .
def getactivitiesurl ( dummyId = '' , * args , * * kwargs ) : from datetime import datetime getparams = [ ] if kwargs : try : getparams . append ( "from=%s" % kwargs [ "fromDate" ] ) except Exception as ex : getparams . append ( "from=%s" % '1900-01-01' ) try : getparams . append ( "to=%s" % kwargs [ "toDate" ] ) except Exception as ex : hoy = datetime . now ( ) . strftime ( '%Y-%m-%d' ) getparams . append ( "to=%s" % hoy ) try : getparams . append ( "branchID=%s" % kwargs [ "branchId" ] ) except Exception as ex : pass try : getparams . append ( "clientID=%s" % kwargs [ "clientId" ] ) except Exception as ex : pass try : getparams . append ( "centreID=%s" % kwargs [ "centreId" ] ) except Exception as ex : pass try : getparams . append ( "userID=%s" % kwargs [ "userId" ] ) except Exception as ex : pass try : getparams . append ( "loanAccountID=%s" % kwargs [ "loanAccountId" ] ) except Exception as ex : pass try : getparams . append ( "groupID=%s" % kwargs [ "groupId" ] ) except Exception as ex : pass try : getparams . append ( "limit=%s" % kwargs [ "limit" ] ) except Exception as ex : pass url = getmambuurl ( * args , * * kwargs ) + "activities" + ( "" if len ( getparams ) == 0 else "?" + "&" . join ( getparams ) ) return url
10,537
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L622-L697
[ "def", "add", "(", "self", ",", "files", ",", "recursive", "=", "False", ",", "pattern", "=", "'**'", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "#PY2: No support for kw-only parameters after glob parameters", "opts", "=", "{", "\"trickle\"", ":", "kwargs", ".", "pop", "(", "\"trickle\"", ",", "False", ")", ",", "\"only-hash\"", ":", "kwargs", ".", "pop", "(", "\"only_hash\"", ",", "False", ")", ",", "\"wrap-with-directory\"", ":", "kwargs", ".", "pop", "(", "\"wrap_with_directory\"", ",", "False", ")", ",", "\"pin\"", ":", "kwargs", ".", "pop", "(", "\"pin\"", ",", "True", ")", "}", "if", "\"chunker\"", "in", "kwargs", ":", "opts", "[", "\"chunker\"", "]", "=", "kwargs", ".", "pop", "(", "\"chunker\"", ")", "kwargs", ".", "setdefault", "(", "\"opts\"", ",", "opts", ")", "body", ",", "headers", "=", "multipart", ".", "stream_filesystem_node", "(", "files", ",", "recursive", ",", "pattern", ",", "self", ".", "chunk_size", ")", "return", "self", ".", "_client", ".", "request", "(", "'/add'", ",", "decoder", "=", "'json'", ",", "data", "=", "body", ",", "headers", "=", "headers", ",", "*", "*", "kwargs", ")" ]
Request Roles URL .
def getrolesurl ( idrole = '' , * args , * * kwargs ) : url = getmambuurl ( * args , * * kwargs ) + "userroles" + ( ( "/" + idrole ) if idrole else "" ) return url
10,538
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L699-L710
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Stripts HTML tags from text .
def strip_tags ( html ) : from html . parser import HTMLParser class MLStripper ( HTMLParser ) : """Aux class for stripping HTML tags. fields on several Mambu entities come with additional HTML tags (they are rich text fields, I guess that's why). Sometimes they are useless, so stripping them is a good idea. """ def __init__ ( self ) : try : super ( ) . __init__ ( ) # required for python3 except TypeError as e : pass # with python2 raises TypeError self . reset ( ) self . fed = [ ] def handle_data ( self , d ) : self . fed . append ( d ) def get_data ( self ) : return '' . join ( self . fed ) s = MLStripper ( ) s . feed ( html . replace ( "&nbsp;" , " " ) ) return s . get_data ( )
10,539
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L715-L744
[ "def", "save_network_to_file", "(", "self", ",", "filename", "=", "\"network0.pkl\"", ")", ":", "import", "cPickle", ",", "os", ",", "re", "if", "filename", "==", "\"network0.pkl\"", ":", "while", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "filename", ")", ")", ":", "filename", "=", "re", ".", "sub", "(", "'\\d(?!\\d)'", ",", "lambda", "x", ":", "str", "(", "int", "(", "x", ".", "group", "(", "0", ")", ")", "+", "1", ")", ",", "filename", ")", "with", "open", "(", "filename", ",", "'wb'", ")", "as", "file", ":", "store_dict", "=", "{", "\"n_inputs\"", ":", "self", ".", "n_inputs", ",", "\"layers\"", ":", "self", ".", "layers", ",", "\"n_weights\"", ":", "self", ".", "n_weights", ",", "\"weights\"", ":", "self", ".", "weights", ",", "}", "cPickle", ".", "dump", "(", "store_dict", ",", "file", ",", "2", ")" ]
Strip characters in a string which are consecutively repeated .
def strip_consecutive_repeated_char ( s , ch ) : sdest = "" for i , c in enumerate ( s ) : if i != 0 and s [ i ] == ch and s [ i ] == s [ i - 1 ] : continue sdest += s [ i ] return sdest
10,540
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L747-L761
[ "def", "clear", "(", "self", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "_root_dir", ",", "topdown", "=", "False", ")", ":", "for", "file", "in", "files", ":", "os", ".", "unlink", "(", "os", ".", "path", ".", "join", "(", "root", ",", "file", ")", ")", "os", ".", "rmdir", "(", "root", ")", "# Get the root dir back and re-initialise to start again", "root_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_root_dir", ",", "os", ".", "pardir", ")", ")", "self", ".", "__init__", "(", "root_dir", ")" ]
Encode every value of a dict to UTF - 8 .
def encoded_dict ( in_dict ) : out_dict = { } for k , v in in_dict . items ( ) : if isinstance ( v , unicode ) : if sys . version_info < ( 3 , 0 ) : v = v . encode ( 'utf8' ) elif isinstance ( v , str ) : # Must be encoded in UTF-8 if sys . version_info < ( 3 , 0 ) : v . decode ( 'utf8' ) out_dict [ k ] = v return out_dict
10,541
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L811-L826
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Backup Mambu Database via REST API .
def backup_db ( callback , bool_func , output_fname , * args , * * kwargs ) : from datetime import datetime try : verbose = kwargs [ 'verbose' ] except KeyError : verbose = False try : retries = kwargs [ 'retries' ] except KeyError : retries = - 1 try : force_download_latest = bool ( kwargs [ 'force_download_latest' ] ) except KeyError : force_download_latest = False if verbose : log = open ( '/tmp/log_mambu_backup' , 'a' ) log . write ( datetime . now ( ) . strftime ( '%Y-%m-%d %H:%M:%S' ) + " - Mambu DB Backup\n" ) log . flush ( ) user = kwargs . pop ( 'user' , apiuser ) pwd = kwargs . pop ( 'pwd' , apipwd ) data = { 'callback' : callback } try : posturl = iriToUri ( getmambuurl ( * args , * * kwargs ) + "database/backup" ) if verbose : log . write ( "open url: " + posturl + "\n" ) log . flush ( ) resp = requests . post ( posturl , data = data , headers = { 'content-type' : 'application/json' } , auth = ( apiuser , apipwd ) ) except Exception as ex : mess = "Error requesting backup: %s" % repr ( ex ) if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuError ( mess ) if resp . status_code != 200 : mess = "Error posting request for backup: %s" % resp . content if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuCommError ( mess ) data [ 'latest' ] = True while retries and not bool_func ( ) : if verbose : log . write ( "waiting...\n" ) log . flush ( ) sleep ( 10 ) retries -= 1 if retries < 0 : retries = - 1 if not retries : mess = "Tired of waiting, giving up..." if verbose : log . write ( mess + "\n" ) log . flush ( ) if not force_download_latest : if verbose : log . close ( ) raise MambuError ( mess ) else : data [ 'latest' ] = False sleep ( 30 ) geturl = iriToUri ( getmambuurl ( * args , * * kwargs ) + "database/backup/LATEST" ) if verbose : log . write ( "open url: " + geturl + "\n" ) log . flush ( ) resp = requests . get ( geturl , auth = ( apiuser , apipwd ) ) if resp . status_code != 200 : mess = "Error getting database backup: %s" % resp . content if verbose : log . write ( mess + "\n" ) log . close ( ) raise MambuCommError ( mess ) if verbose : log . write ( "saving...\n" ) log . flush ( ) with open ( output_fname , "w" ) as fw : fw . write ( resp . content ) if verbose : log . write ( "DONE!\n" ) log . close ( ) return data
10,542
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuutil.py#L831-L959
[ "def", "_loop", "(", "self", ")", ":", "self", ".", "stop_flag", ".", "value", "=", "False", "self", ".", "time_started", ".", "value", "=", "time", "(", ")", "self", ".", "time_elapsed", ".", "value", "=", "0", "while", "True", ":", "if", "self", ".", "stop_flag", ".", "value", ":", "break", "self", ".", "update_text", "(", ")", "with", "self", ".", "time_started", ".", "get_lock", "(", ")", ":", "start", "=", "self", ".", "time_started", ".", "value", "with", "self", ".", "time_elapsed", ".", "get_lock", "(", ")", ":", "self", ".", "time_elapsed", ".", "value", "=", "time", "(", ")", "-", "start", "if", "(", "self", ".", "timeout", ".", "value", "and", "(", "self", ".", "time_elapsed", ".", "value", ">", "self", ".", "timeout", ".", "value", ")", ")", ":", "self", ".", "stop", "(", ")", "raise", "ProgressTimedOut", "(", "self", ".", "name", ",", "self", ".", "time_elapsed", ".", "value", ",", ")" ]
Provides memoization for methods on a specific instance . Results are cached for given parameter list .
def memoize ( func ) : cache_name = '__CACHED_{}' . format ( func . __name__ ) def wrapper ( self , * args ) : cache = getattr ( self , cache_name , None ) if cache is None : cache = { } setattr ( self , cache_name , cache ) if args not in cache : cache [ args ] = func ( self , * args ) return cache [ args ] return wrapper
10,543
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/memoize_decorators.py#L1-L34
[ "def", "ProgressBar", "(", "title", "=", "\"RoboFab...\"", ",", "ticks", "=", "None", ",", "label", "=", "\"\"", ")", ":", "return", "dispatcher", "[", "\"ProgressBar\"", "]", "(", "title", "=", "title", ",", "ticks", "=", "ticks", ",", "label", "=", "label", ")" ]
Create the pandas . Series containing the side labels that correspond to each pathway based on the user - specified gene signature definition .
def _pathway_side_information ( pathway_positive_series , pathway_negative_series , index ) : positive_series_label = pd . Series ( [ "pos" ] * len ( pathway_positive_series ) ) negative_series_label = pd . Series ( [ "neg" ] * len ( pathway_negative_series ) ) side_information = positive_series_label . append ( negative_series_label ) side_information . index = index side_information . name = "side" return side_information
10,544
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/feature_pathway_overrepresentation.py#L114-L126
[ "def", "saturation", "(", "self", ",", "value", ")", ":", "value", "=", "clean_float", "(", "value", ")", "if", "value", "is", "None", ":", "return", "try", ":", "unit_moisture_weight", "=", "self", ".", "unit_moist_weight", "-", "self", ".", "unit_dry_weight", "unit_moisture_volume", "=", "unit_moisture_weight", "/", "self", ".", "_pw", "saturation", "=", "unit_moisture_volume", "/", "self", ".", "_calc_unit_void_volume", "(", ")", "if", "saturation", "is", "not", "None", "and", "not", "ct", ".", "isclose", "(", "saturation", ",", "value", ",", "rel_tol", "=", "self", ".", "_tolerance", ")", ":", "raise", "ModelError", "(", "\"New saturation (%.3f) is inconsistent \"", "\"with calculated value (%.3f)\"", "%", "(", "value", ",", "saturation", ")", ")", "except", "TypeError", ":", "pass", "old_value", "=", "self", ".", "saturation", "self", ".", "_saturation", "=", "value", "try", ":", "self", ".", "recompute_all_weights_and_void", "(", ")", "self", ".", "_add_to_stack", "(", "\"saturation\"", ",", "value", ")", "except", "ModelError", "as", "e", ":", "self", ".", "_saturation", "=", "old_value", "raise", "ModelError", "(", "e", ")" ]
Create the significant pathways pandas . DataFrame . Given the p - values corresponding to each pathway in a feature apply the FDR correction for multiple testing and remove those that do not have a q - value of less than alpha .
def _significant_pathways_dataframe ( pvalue_information , side_information , alpha ) : significant_pathways = pd . concat ( [ pvalue_information , side_information ] , axis = 1 ) # fdr_bh: false discovery rate, Benjamini & Hochberg (1995, 2000) below_alpha , qvalues , _ , _ = multipletests ( significant_pathways [ "p-value" ] , alpha = alpha , method = "fdr_bh" ) below_alpha = pd . Series ( below_alpha , index = pvalue_information . index , name = "pass" ) qvalues = pd . Series ( qvalues , index = pvalue_information . index , name = "q-value" ) significant_pathways = pd . concat ( [ significant_pathways , below_alpha , qvalues ] , axis = 1 ) significant_pathways = significant_pathways [ significant_pathways [ "pass" ] ] significant_pathways . drop ( "pass" , axis = 1 , inplace = True ) significant_pathways . loc [ : , "pathway" ] = significant_pathways . index return significant_pathways
10,545
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/feature_pathway_overrepresentation.py#L129-L151
[ "def", "_record_offset", "(", "self", ")", ":", "offset", "=", "self", ".", "blob_file", ".", "tell", "(", ")", "self", ".", "event_offsets", ".", "append", "(", "offset", ")" ]
For all stacks to be sent append a pause after each text layer .
async def flush ( self , request : Request , stacks : List [ Stack ] ) : ns = await self . expand_stacks ( request , stacks ) ns = self . split_stacks ( ns ) ns = self . clean_stacks ( ns ) await self . next ( request , [ Stack ( x ) for x in ns ] )
10,546
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L45-L54
[ "def", "_to_swagger", "(", "base", "=", "None", ",", "description", "=", "None", ",", "resource", "=", "None", ",", "options", "=", "None", ")", ":", "# type: (Dict[str, str], str, Resource, Dict[str, str]) -> Dict[str, str]", "definition", "=", "dict_filter", "(", "base", "or", "{", "}", ",", "options", "or", "{", "}", ")", "if", "description", ":", "definition", "[", "'description'", "]", "=", "description", ".", "format", "(", "name", "=", "getmeta", "(", "resource", ")", ".", "name", "if", "resource", "else", "\"UNKNOWN\"", ")", "if", "resource", ":", "definition", "[", "'schema'", "]", "=", "{", "'$ref'", ":", "'#/definitions/{}'", ".", "format", "(", "getmeta", "(", "resource", ")", ".", "resource_name", ")", "}", "return", "definition" ]
First step of the stacks cleanup process . We consider that if inside a stack there s a text layer showing up then it s the beginning of a new stack and split upon that .
def split_stacks ( self , stacks : List [ List [ BaseLayer ] ] ) -> List [ List [ BaseLayer ] ] : ns : List [ List [ BaseLayer ] ] = [ ] for stack in stacks : cur : List [ BaseLayer ] = [ ] for layer in stack : if cur and isinstance ( layer , lyr . RawText ) : ns . append ( cur ) cur = [ ] cur . append ( layer ) if cur : ns . append ( cur ) return ns
10,547
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L70-L93
[ "def", "_stationarystate", "(", "self", ",", "k", ")", ":", "if", "self", ".", "_distributionmodel", ":", "return", "self", ".", "model", ".", "stationarystate", "(", "k", ")", "else", ":", "return", "self", ".", "model", ".", "stationarystate" ]
Expand a layer into a list of layers including the pauses .
async def expand ( self , request : Request , layer : BaseLayer ) : if isinstance ( layer , lyr . RawText ) : t = self . reading_time ( layer . text ) yield layer yield lyr . Sleep ( t ) elif isinstance ( layer , lyr . MultiText ) : texts = await render ( layer . text , request , True ) for text in texts : t = self . reading_time ( text ) yield lyr . RawText ( text ) yield lyr . Sleep ( t ) elif isinstance ( layer , lyr . Text ) : text = await render ( layer . text , request ) t = self . reading_time ( text ) yield lyr . RawText ( text ) yield lyr . Sleep ( t ) else : yield layer
10,548
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L121-L146
[ "def", "dl_cub", "(", "cub_url", ",", "cub_archive_name", ")", ":", "with", "open", "(", "cub_archive_name", ",", "'wb'", ")", "as", "f", ":", "remote_file", "=", "urllib2", ".", "urlopen", "(", "cub_url", ")", "meta", "=", "remote_file", ".", "info", "(", ")", "# The server may provide us with the size of the file.", "cl_header", "=", "meta", ".", "getheaders", "(", "\"Content-Length\"", ")", "remote_file_size", "=", "int", "(", "cl_header", "[", "0", "]", ")", "if", "len", "(", "cl_header", ")", ">", "0", "else", "None", "# Initialise variables", "local_file_size", "=", "0", "block_size", "=", "128", "*", "1024", "# Do the download", "while", "True", ":", "data", "=", "remote_file", ".", "read", "(", "block_size", ")", "if", "not", "data", ":", "break", "f", ".", "write", "(", "data", ")", "local_file_size", "+=", "len", "(", "data", ")", "if", "(", "remote_file_size", "is", "not", "None", "and", "not", "local_file_size", "==", "remote_file_size", ")", ":", "log", ".", "warn", "(", "\"Local file size '{}' \"", "\"does not match remote '{}'\"", ".", "format", "(", "local_file_size", ",", "remote_file_size", ")", ")", "remote_file", ".", "close", "(", ")" ]
Computes the time in seconds that the user will need to read a bubble containing the text passed as parameter .
def reading_time ( self , text : TextT ) : wc = re . findall ( r'\w+' , text ) period = 60.0 / settings . USERS_READING_SPEED return float ( len ( wc ) ) * period + settings . USERS_READING_BUBBLE_START
10,549
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L148-L156
[ "def", "disassociate_public_ip", "(", "self", ",", "public_ip_id", ")", ":", "floating_ip", "=", "self", ".", "client", ".", "floating_ips", ".", "get", "(", "public_ip_id", ")", "floating_ip", "=", "floating_ip", ".", "to_dict", "(", ")", "instance_id", "=", "floating_ip", ".", "get", "(", "'instance_id'", ")", "address", "=", "floating_ip", ".", "get", "(", "'ip'", ")", "self", ".", "client", ".", "servers", ".", "remove_floating_ip", "(", "instance_id", ",", "address", ")", "return", "True" ]
Add a typing stack after each stack .
async def flush ( self , request : Request , stacks : List [ Stack ] ) : ns : List [ Stack ] = [ ] for stack in stacks : ns . extend ( self . typify ( stack ) ) if len ( ns ) > 1 and ns [ - 1 ] == Stack ( [ lyr . Typing ( ) ] ) : ns [ - 1 ] . get_layer ( lyr . Typing ) . active = False await self . next ( request , ns )
10,550
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L165-L178
[ "def", "download_wiod2013", "(", "storage_folder", ",", "years", "=", "None", ",", "overwrite_existing", "=", "False", ",", "satellite_urls", "=", "WIOD_CONFIG", "[", "'satellite_urls'", "]", ")", ":", "try", ":", "os", ".", "makedirs", "(", "storage_folder", ")", "except", "FileExistsError", ":", "pass", "if", "type", "(", "years", ")", "is", "int", "or", "type", "(", "years", ")", "is", "str", ":", "years", "=", "[", "years", "]", "years", "=", "years", "if", "years", "else", "range", "(", "1995", ",", "2012", ")", "years", "=", "[", "str", "(", "yy", ")", ".", "zfill", "(", "2", ")", "[", "-", "2", ":", "]", "for", "yy", "in", "years", "]", "wiod_web_content", "=", "_get_url_datafiles", "(", "url_db_view", "=", "WIOD_CONFIG", "[", "'url_db_view'", "]", ",", "url_db_content", "=", "WIOD_CONFIG", "[", "'url_db_content'", "]", ",", "mrio_regex", "=", "'protected.*?wiot\\d\\d.*?xlsx'", ")", "restricted_wiod_io_urls", "=", "[", "url", "for", "url", "in", "wiod_web_content", ".", "data_urls", "if", "re", ".", "search", "(", "r\"(wiot)(\\d\\d)\"", ",", "os", ".", "path", ".", "basename", "(", "url", ")", ")", ".", "group", "(", "2", ")", "in", "years", "]", "meta", "=", "MRIOMetaData", "(", "location", "=", "storage_folder", ",", "description", "=", "'WIOD metadata file for pymrio'", ",", "name", "=", "'WIOD'", ",", "system", "=", "'ixi'", ",", "version", "=", "'data13'", ")", "meta", "=", "_download_urls", "(", "url_list", "=", "restricted_wiod_io_urls", "+", "satellite_urls", ",", "storage_folder", "=", "storage_folder", ",", "overwrite_existing", "=", "overwrite_existing", ",", "meta_handler", "=", "meta", ")", "meta", ".", "save", "(", ")", "return", "meta" ]
Start typing right when the message is received .
async def pre_handle ( self , request : Request , responder : 'Responder' ) : responder . send ( [ lyr . Typing ( ) ] ) await responder . flush ( request ) responder . clear ( ) await self . next ( request , responder )
10,551
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_builtins.py#L180-L189
[ "def", "cached", "(", "namespace", "=", "None", ",", "service", "=", "\"memory\"", ",", "debug", "=", "False", ")", ":", "_service", "=", "getattr", "(", "debris", ".", "services", ",", "service", ")", "def", "wrapper", "(", "_f", ")", ":", "@", "functools", ".", "wraps", "(", "_f", ")", "def", "_stash", "(", "self", ",", "*", "a", ",", "*", "*", "k", ")", ":", "if", "debug", "is", "False", ":", "# this request is cacheable", "try", ":", "self", ".", "finish", "(", "_service", ".", "get", "(", "namespace", ")", ")", "except", "LookupError", ":", "_replace_finish", "(", "self", ",", "namespace", ",", "_service", ")", "# get the result of this request", "_f", "(", "self", ",", "*", "a", ",", "*", "*", "k", ")", "return", "# request is not cacheable", "_f", "(", "self", ",", "*", "a", ",", "*", "*", "k", ")", "return", "_stash", "return", "wrapper" ]
Let s use the first name of the user as friendly name . In some cases the user object is incomplete and in those cases the full user is fetched .
async def get_friendly_name ( self ) -> Text : if 'first_name' not in self . _user : user = await self . _get_full_user ( ) else : user = self . _user return user . get ( 'first_name' )
10,552
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L152-L164
[ "def", "viewbox", "(", "self", ")", ":", "return", "self", ".", "left", ",", "self", ".", "top", ",", "self", ".", "right", ",", "self", ".", "bottom" ]
As Telegram changes where the chat object is located in the response this method tries to be smart about finding it in the right place .
def _get_chat ( self ) -> Dict : if 'callback_query' in self . _update : query = self . _update [ 'callback_query' ] if 'message' in query : return query [ 'message' ] [ 'chat' ] else : return { 'id' : query [ 'chat_instance' ] } elif 'inline_query' in self . _update : return patch_dict ( self . _update [ 'inline_query' ] [ 'from' ] , is_inline_query = True , ) elif 'message' in self . _update : return self . _update [ 'message' ] [ 'chat' ]
10,553
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L238-L256
[ "def", "select_extended", "(", "cat_table", ")", ":", "try", ":", "l", "=", "[", "len", "(", "row", ".", "strip", "(", ")", ")", ">", "0", "for", "row", "in", "cat_table", "[", "'Extended_Source_Name'", "]", ".", "data", "]", "return", "np", ".", "array", "(", "l", ",", "bool", ")", "except", "KeyError", ":", "return", "cat_table", "[", "'Extended'", "]" ]
Intercept any potential AnswerCallbackQuery before adding the stack to the output buffer .
def send ( self , stack : Layers ) : if not isinstance ( stack , Stack ) : stack = Stack ( stack ) if 'callback_query' in self . _update and stack . has_layer ( Update ) : layer = stack . get_layer ( Update ) try : msg = self . _update [ 'callback_query' ] [ 'message' ] except KeyError : layer . inline_message_id = self . _update [ 'callback_query' ] [ 'inline_message_id' ] else : layer . chat_id = msg [ 'chat' ] [ 'id' ] layer . message_id = msg [ 'message_id' ] if stack . has_layer ( AnswerCallbackQuery ) : self . _acq = stack . get_layer ( AnswerCallbackQuery ) stack = Stack ( [ l for l in stack . layers if not isinstance ( l , AnswerCallbackQuery ) ] ) if stack . has_layer ( Reply ) : layer = stack . get_layer ( Reply ) if 'message' in self . _update : layer . message = self . _update [ 'message' ] elif 'callback_query' in self . _update : layer . message = self . _update [ 'callback_query' ] [ 'message' ] if 'inline_query' in self . _update and stack . has_layer ( AnswerInlineQuery ) : a = stack . get_layer ( AnswerInlineQuery ) a . inline_query_id = self . _update [ 'inline_query' ] [ 'id' ] if stack . layers : return super ( TelegramResponder , self ) . send ( stack )
10,554
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L313-L355
[ "def", "del_restriction", "(", "self", ",", "command", ",", "user", ",", "event_types", ")", ":", "if", "user", ".", "lower", "(", ")", "in", "self", ".", "commands_rights", "[", "command", "]", ":", "for", "event_type", "in", "event_types", ":", "try", ":", "self", ".", "commands_rights", "[", "command", "]", "[", "user", ".", "lower", "(", ")", "]", ".", "remove", "(", "event_type", ")", "except", "ValueError", ":", "pass", "if", "not", "self", ".", "commands_rights", "[", "command", "]", "[", "user", ".", "lower", "(", ")", "]", ":", "self", ".", "commands_rights", "[", "command", "]", ".", "pop", "(", "user", ".", "lower", "(", ")", ")" ]
If there s a AnswerCallbackQuery scheduled for reply place the call before actually flushing the buffer .
async def flush ( self , request : BernardRequest ) : if self . _acq and 'callback_query' in self . _update : try : cbq_id = self . _update [ 'callback_query' ] [ 'id' ] except KeyError : pass else : await self . platform . call ( 'answerCallbackQuery' , * * ( await self . _acq . serialize ( cbq_id ) ) ) return await super ( TelegramResponder , self ) . flush ( request )
10,555
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L357-L374
[ "def", "download_wiod2013", "(", "storage_folder", ",", "years", "=", "None", ",", "overwrite_existing", "=", "False", ",", "satellite_urls", "=", "WIOD_CONFIG", "[", "'satellite_urls'", "]", ")", ":", "try", ":", "os", ".", "makedirs", "(", "storage_folder", ")", "except", "FileExistsError", ":", "pass", "if", "type", "(", "years", ")", "is", "int", "or", "type", "(", "years", ")", "is", "str", ":", "years", "=", "[", "years", "]", "years", "=", "years", "if", "years", "else", "range", "(", "1995", ",", "2012", ")", "years", "=", "[", "str", "(", "yy", ")", ".", "zfill", "(", "2", ")", "[", "-", "2", ":", "]", "for", "yy", "in", "years", "]", "wiod_web_content", "=", "_get_url_datafiles", "(", "url_db_view", "=", "WIOD_CONFIG", "[", "'url_db_view'", "]", ",", "url_db_content", "=", "WIOD_CONFIG", "[", "'url_db_content'", "]", ",", "mrio_regex", "=", "'protected.*?wiot\\d\\d.*?xlsx'", ")", "restricted_wiod_io_urls", "=", "[", "url", "for", "url", "in", "wiod_web_content", ".", "data_urls", "if", "re", ".", "search", "(", "r\"(wiot)(\\d\\d)\"", ",", "os", ".", "path", ".", "basename", "(", "url", ")", ")", ".", "group", "(", "2", ")", "in", "years", "]", "meta", "=", "MRIOMetaData", "(", "location", "=", "storage_folder", ",", "description", "=", "'WIOD metadata file for pymrio'", ",", "name", "=", "'WIOD'", ",", "system", "=", "'ixi'", ",", "version", "=", "'data13'", ")", "meta", "=", "_download_urls", "(", "url_list", "=", "restricted_wiod_io_urls", "+", "satellite_urls", ",", "storage_folder", "=", "storage_folder", ",", "overwrite_existing", "=", "overwrite_existing", ",", "meta_handler", "=", "meta", ")", "meta", ".", "save", "(", ")", "return", "meta" ]
Handle updates from Telegram
async def receive_updates ( self , request : Request ) : body = await request . read ( ) try : content = ujson . loads ( body ) except ValueError : return json_response ( { 'error' : True , 'message' : 'Cannot decode body' , } , status = 400 ) logger . debug ( 'Received from Telegram: %s' , content ) message = TelegramMessage ( content , self ) responder = TelegramResponder ( content , self ) await self . _notify ( message , responder ) return json_response ( { 'error' : False , } )
10,556
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L438-L461
[ "def", "determine_end_idx_for_adjustment", "(", "self", ",", "adjustment_ts", ",", "dates", ",", "upper_bound", ",", "requested_quarter", ",", "sid_estimates", ")", ":", "end_idx", "=", "upper_bound", "# Find the next newest kd that happens on or after", "# the date of this adjustment", "newest_kd_for_qtr", "=", "sid_estimates", "[", "(", "sid_estimates", "[", "NORMALIZED_QUARTERS", "]", "==", "requested_quarter", ")", "&", "(", "sid_estimates", "[", "TS_FIELD_NAME", "]", ">=", "adjustment_ts", ")", "]", "[", "TS_FIELD_NAME", "]", ".", "min", "(", ")", "if", "pd", ".", "notnull", "(", "newest_kd_for_qtr", ")", ":", "newest_kd_idx", "=", "dates", ".", "searchsorted", "(", "newest_kd_for_qtr", ")", "# We have fresh information that comes in", "# before the end of the overwrite and", "# presumably is already split-adjusted to the", "# current split. We should stop applying the", "# adjustment the day before this new", "# information comes in.", "if", "newest_kd_idx", "<=", "upper_bound", ":", "end_idx", "=", "newest_kd_idx", "-", "1", "return", "end_idx" ]
Generate a Telegram URL for this bot .
def make_url ( self , method ) : token = self . settings ( ) [ 'token' ] return TELEGRAM_URL . format ( token = quote ( token ) , method = quote ( method ) , )
10,557
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L503-L513
[ "def", "_set_containable_view", "(", "self", ",", "session", ")", ":", "for", "obj_name", "in", "self", ".", "_containable_views", ":", "if", "self", ".", "_containable_views", "[", "obj_name", "]", "==", "SEQUESTERED", ":", "try", ":", "getattr", "(", "session", ",", "'use_sequestered_'", "+", "obj_name", "+", "'_view'", ")", "(", ")", "except", "AttributeError", ":", "pass", "else", ":", "try", ":", "getattr", "(", "session", ",", "'use_unsequestered_'", "+", "obj_name", "+", "'_view'", ")", "(", ")", "except", "AttributeError", ":", "pass" ]
Call a telegram method
async def call ( self , method : Text , _ignore : Set [ Text ] = None , * * params : Any ) : logger . debug ( 'Calling Telegram %s(%s)' , method , params ) url = self . make_url ( method ) headers = { 'content-type' : 'application/json' , } post = self . session . post ( url , data = ujson . dumps ( params ) , headers = headers , ) async with post as r : out = await self . _handle_telegram_response ( r , _ignore ) logger . debug ( 'Telegram replied: %s' , out ) return out
10,558
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L515-L546
[ "def", "process_orders", "(", "self", ",", "orderbook", ")", ":", "for", "stock", ",", "alloc", "in", "orderbook", ".", "iteritems", "(", ")", ":", "self", ".", "logger", ".", "info", "(", "'{}: Ordered {} {} stocks'", ".", "format", "(", "self", ".", "datetime", ",", "stock", ",", "alloc", ")", ")", "if", "isinstance", "(", "alloc", ",", "int", ")", ":", "self", ".", "order", "(", "stock", ",", "alloc", ")", "elif", "isinstance", "(", "alloc", ",", "float", ")", "and", "alloc", ">=", "-", "1", "and", "alloc", "<=", "1", ":", "self", ".", "order_percent", "(", "stock", ",", "alloc", ")", "else", ":", "self", ".", "logger", ".", "warning", "(", "'{}: invalid order for {}: {})'", ".", "format", "(", "self", ".", "datetime", ",", "stock", ",", "alloc", ")", ")" ]
Parse a response from Telegram . If there s an error an exception will be raised with an explicative message .
async def _handle_telegram_response ( self , response , ignore = None ) : if ignore is None : ignore = set ( ) ok = response . status == 200 try : data = await response . json ( ) if not ok : desc = data [ 'description' ] if desc in ignore : return raise PlatformOperationError ( 'Telegram replied with an error: {}' . format ( desc ) ) except ( ValueError , TypeError , KeyError ) : raise PlatformOperationError ( 'An unknown Telegram error occurred' ) return data
10,559
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L548-L578
[ "def", "with_binaries", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "dict", ")", ":", "self", ".", "_binaries", ".", "update", "(", "arg", ")", "self", ".", "_binaries", ".", "update", "(", "kw", ")", "return", "self" ]
Compute the path to the hook URL
def make_hook_path ( self ) : token = self . settings ( ) [ 'token' ] h = sha256 ( ) h . update ( token . encode ( ) ) key = str ( h . hexdigest ( ) ) return f'/hooks/telegram/{key}'
10,560
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L580-L589
[ "def", "read_tabular", "(", "filename", ",", "dtype_conversion", "=", "None", ")", ":", "if", "dtype_conversion", "is", "None", ":", "dtype_conversion", "=", "{", "}", "name", ",", "ext", "=", "filename", ".", "split", "(", "\".\"", ",", "1", ")", "ext", "=", "ext", ".", "lower", "(", ")", "# Completely empty columns are interpreted as float by default.", "dtype_conversion", "[", "\"comment\"", "]", "=", "str", "if", "\"csv\"", "in", "ext", ":", "df", "=", "pd", ".", "read_csv", "(", "filename", ",", "dtype", "=", "dtype_conversion", ",", "encoding", "=", "\"utf-8\"", ")", "elif", "\"tsv\"", "in", "ext", ":", "df", "=", "pd", ".", "read_table", "(", "filename", ",", "dtype", "=", "dtype_conversion", ",", "encoding", "=", "\"utf-8\"", ")", "elif", "\"xls\"", "in", "ext", "or", "\"xlsx\"", "in", "ext", ":", "df", "=", "pd", ".", "read_excel", "(", "filename", ",", "dtype", "=", "dtype_conversion", ",", "encoding", "=", "\"utf-8\"", ")", "# TODO: Add a function to parse ODS data into a pandas data frame.", "else", ":", "raise", "ValueError", "(", "\"Unknown file format '{}'.\"", ".", "format", "(", "ext", ")", ")", "return", "df" ]
Register the web hook onto which Telegram should send its messages .
async def _deferred_init ( self ) : hook_path = self . make_hook_path ( ) url = urljoin ( settings . BERNARD_BASE_URL , hook_path ) await self . call ( 'setWebhook' , url = url ) logger . info ( 'Setting Telegram webhook to "%s"' , url )
10,561
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L591-L599
[ "def", "run", "(", "self", ")", ":", "self", ".", "_initialize_run", "(", ")", "stimuli", "=", "self", ".", "protocol_model", ".", "allTests", "(", ")", "self", ".", "acq_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_worker", ",", "args", "=", "(", "stimuli", ",", ")", ",", ")", "# save the current calibration to data file doc ", "if", "self", ".", "save_data", ":", "info", "=", "{", "'calibration_used'", ":", "self", ".", "calname", ",", "'calibration_range'", ":", "self", ".", "cal_frange", "}", "self", ".", "datafile", ".", "set_metadata", "(", "self", ".", "current_dataset_name", ",", "info", ")", "# save the start time and set last tick to expired, so first", "# acquisition loop iteration executes immediately", "self", ".", "start_time", "=", "time", ".", "time", "(", ")", "self", ".", "last_tick", "=", "self", ".", "start_time", "-", "(", "self", ".", "interval", "/", "1000", ")", "self", ".", "acq_thread", ".", "start", "(", ")", "return", "self", ".", "acq_thread" ]
Base function for sending text
async def _send_text ( self , request : Request , stack : Stack , parse_mode : Optional [ Text ] = None ) : parts = [ ] chat_id = request . message . get_chat_id ( ) for layer in stack . layers : if isinstance ( layer , ( lyr . Text , lyr . RawText , lyr . Markdown ) ) : text = await render ( layer . text , request ) parts . append ( text ) for part in parts [ : - 1 ] : await self . call ( 'sendMessage' , text = part , chat_id = chat_id , ) msg = { 'text' : parts [ - 1 ] , 'chat_id' : chat_id , } if parse_mode is not None : msg [ 'parse_mode' ] = parse_mode await set_reply_markup ( msg , request , stack ) if stack . has_layer ( Reply ) : reply = stack . get_layer ( Reply ) if reply . message : msg [ 'reply_to_message_id' ] = reply . message [ 'message_id' ] if stack . has_layer ( Update ) : update = stack . get_layer ( Update ) if update . inline_message_id : msg [ 'inline_message_id' ] = update . inline_message_id del msg [ 'chat_id' ] else : msg [ 'message_id' ] = update . message_id await self . call ( 'editMessageText' , { 'Bad Request: message is not modified' } , * * msg ) else : await self . call ( 'sendMessage' , * * msg )
10,562
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L601-L654
[ "def", "extract_notebook_metatab", "(", "nb_path", ":", "Path", ")", ":", "from", "metatab", ".", "rowgenerators", "import", "TextRowGenerator", "import", "nbformat", "with", "nb_path", ".", "open", "(", ")", "as", "f", ":", "nb", "=", "nbformat", ".", "read", "(", "f", ",", "as_version", "=", "4", ")", "lines", "=", "'\\n'", ".", "join", "(", "[", "'Declare: metatab-latest'", "]", "+", "[", "get_cell_source", "(", "nb", ",", "tag", ")", "for", "tag", "in", "[", "'metadata'", ",", "'resources'", ",", "'schema'", "]", "]", ")", "doc", "=", "MetapackDoc", "(", "TextRowGenerator", "(", "lines", ")", ")", "doc", "[", "'Root'", "]", ".", "get_or_new_term", "(", "'Root.Title'", ")", ".", "value", "=", "get_cell_source", "(", "nb", ",", "'Title'", ")", ".", "strip", "(", "'#'", ")", ".", "strip", "(", ")", "doc", "[", "'Root'", "]", ".", "get_or_new_term", "(", "'Root.Description'", ")", ".", "value", "=", "get_cell_source", "(", "nb", ",", "'Description'", ")", "doc", "[", "'Documentation'", "]", ".", "get_or_new_term", "(", "'Root.Readme'", ")", ".", "value", "=", "get_cell_source", "(", "nb", ",", "'readme'", ")", "return", "doc" ]
Sleep for the amount of time specified in the Sleep layer
async def _send_sleep ( self , request : Request , stack : Stack ) : duration = stack . get_layer ( lyr . Sleep ) . duration await sleep ( duration )
10,563
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L670-L676
[ "def", "delete", "(", "self", ",", "file_id", ")", ":", "res", "=", "self", ".", "_files", ".", "delete_one", "(", "{", "\"_id\"", ":", "file_id", "}", ")", "self", ".", "_chunks", ".", "delete_many", "(", "{", "\"files_id\"", ":", "file_id", "}", ")", "if", "not", "res", ".", "deleted_count", ":", "raise", "NoFile", "(", "\"no file could be deleted because none matched %s\"", "%", "file_id", ")" ]
In telegram the typing stops when the message is received . Thus there is no typing stops messages to send . The API is only called when typing must start .
async def _send_typing ( self , request : Request , stack : Stack ) : t = stack . get_layer ( lyr . Typing ) if t . active : await self . call ( 'sendChatAction' , chat_id = request . message . get_chat_id ( ) , action = 'typing' , )
10,564
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/platforms/telegram/platform.py#L683-L697
[ "def", "update", "(", "self", ",", "other", ")", ":", "self", ".", "update_ttl", "(", "other", ".", "ttl", ")", "super", "(", "Rdataset", ",", "self", ")", ".", "update", "(", "other", ")" ]
Return an application instance with processors & pipes setup .
def create_app ( metadata , processors = None , pipes = None ) : instance = Application ( metadata ) # In order to avoid code duplication, we use existing built-in import # processor to import and register built-in processors on the application # instance. This is, to be honest, the main purpose of this factory # function, because otherwise one must create an Application instance # directly. import_processors . process ( instance , [ ] , imports = [ "archive = holocron.processors.archive:process" , "commonmark = holocron.processors.commonmark:process" , "feed = holocron.processors.feed:process" , "frontmatter = holocron.processors.frontmatter:process" , "import-processors = holocron.processors.import_processors:process" , "jinja2 = holocron.processors.jinja2:process" , "markdown = holocron.processors.markdown:process" , "metadata = holocron.processors.metadata:process" , "pipe = holocron.processors.pipe:process" , "prettyuri = holocron.processors.prettyuri:process" , "restructuredtext = holocron.processors.restructuredtext:process" , "save = holocron.processors.save:process" , "sitemap = holocron.processors.sitemap:process" , "source = holocron.processors.source:process" , "todatetime = holocron.processors.todatetime:process" , "when = holocron.processors.when:process" , ] ) for name , processor in ( processors or { } ) . items ( ) : instance . add_processor ( name , processor ) for name , pipeline in ( pipes or { } ) . items ( ) : instance . add_pipe ( name , pipeline ) return instance
10,565
https://github.com/ikalnytskyi/holocron/blob/d202f6bccfeca64162857c6d0ee5bb53e773d2f2/src/holocron/core/factories.py#L7-L42
[ "def", "delete_group", "(", "self", ",", "group_id", ",", "force", "=", "False", ")", ":", "params", "=", "{", "'force'", ":", "force", "}", "response", "=", "self", ".", "_do_request", "(", "'DELETE'", ",", "'/v2/groups/{group_id}'", ".", "format", "(", "group_id", "=", "group_id", ")", ",", "params", "=", "params", ")", "return", "response", ".", "json", "(", ")" ]
Log a page view .
async def page_view ( self , url : str , title : str , user_id : str , user_lang : str = '' ) -> None : ga_url = 'https://www.google-analytics.com/collect' args = { 'v' : '1' , 'ds' : 'web' , 'de' : 'UTF-8' , 'tid' : self . ga_id , 'cid' : self . hash_user_id ( user_id ) , 't' : 'pageview' , 'dh' : self . ga_domain , 'dp' : url , 'dt' : title , } if user_lang : args [ 'ul' ] = user_lang logger . debug ( 'GA settings = %s' , urlencode ( args ) ) async with self . session . post ( ga_url , data = args ) as r : if r . status == 200 : logger . debug ( f'Sent to GA {url} ({title}) for user {user_id}' ) else : logger . warning ( f'Could not contact GA' )
10,566
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/ga/_ga.py#L37-L74
[ "def", "create_alarm_subscription", "(", "self", ",", "on_data", "=", "None", ",", "timeout", "=", "60", ")", ":", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ".", "_client", ",", "resource", "=", "'alarms'", ")", "# Represent subscription as a future", "subscription", "=", "AlarmSubscription", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_alarm_data", ",", "subscription", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", "=", "self", ".", "_instance", ",", "processor", "=", "self", ".", "_processor", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Factory method to get the correct AuthInfo object .
def get ( self , username = None , password = None , headers = { } ) : if all ( ( username , password , ) ) : return BasicAuth ( username , password , headers ) elif not any ( ( username , password , ) ) : return AnonymousAuth ( headers ) else : if username is None : data = ( "username" , username , ) else : data = ( "Password" , password , ) msg = "%s must have a value (instead of '%s')" % ( data [ 0 ] , data [ 1 ] ) raise ValueError ( msg )
10,567
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/http/auth.py#L14-L40
[ "def", "body_block_caption_render", "(", "caption_tags", ",", "base_url", "=", "None", ")", ":", "caption_content", "=", "[", "]", "supplementary_material_tags", "=", "[", "]", "for", "block_tag", "in", "remove_doi_paragraph", "(", "caption_tags", ")", ":", "# Note then skip p tags with supplementary-material inside", "if", "raw_parser", ".", "supplementary_material", "(", "block_tag", ")", ":", "for", "supp_tag", "in", "raw_parser", ".", "supplementary_material", "(", "block_tag", ")", ":", "supplementary_material_tags", ".", "append", "(", "supp_tag", ")", "continue", "for", "block_content", "in", "body_block_content_render", "(", "block_tag", ",", "base_url", "=", "base_url", ")", ":", "if", "block_content", "!=", "{", "}", ":", "caption_content", ".", "append", "(", "block_content", ")", "return", "caption_content", ",", "supplementary_material_tags" ]
Add the authentication info to the supplied dictionary .
def populate_request_data ( self , request_args ) : request_args [ 'auth' ] = HTTPBasicAuth ( self . _username , self . _password ) return request_args
10,568
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/http/auth.py#L74-L86
[ "def", "split", "(", "self", ",", "k", ")", ":", "if", "not", "1", "<=", "k", "<=", "self", ".", "num_rows", "-", "1", ":", "raise", "ValueError", "(", "\"Invalid value of k. k must be between 1 and the\"", "\"number of rows - 1\"", ")", "rows", "=", "np", ".", "random", ".", "permutation", "(", "self", ".", "num_rows", ")", "first", "=", "self", ".", "take", "(", "rows", "[", ":", "k", "]", ")", "rest", "=", "self", ".", "take", "(", "rows", "[", "k", ":", "]", ")", "for", "column_label", "in", "self", ".", "_formats", ":", "first", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "rest", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "return", "first", ",", "rest" ]
Register an workflow to be showed in the workflows list .
def register_workflow ( self , name , workflow ) : assert name not in self . workflows self . workflows [ name ] = workflow
10,569
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/ext.py#L52-L55
[ "def", "NewFd", "(", "self", ",", "fd", ",", "URL", ",", "encoding", ",", "options", ")", ":", "ret", "=", "libxml2mod", ".", "xmlReaderNewFd", "(", "self", ".", "_o", ",", "fd", ",", "URL", ",", "encoding", ",", "options", ")", "return", "ret" ]
Add an additional response header not removing duplicates .
def add_header ( self , name , value ) : self . _headers . setdefault ( _hkey ( name ) , [ ] ) . append ( _hval ( value ) )
10,570
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L1756-L1758
[ "def", "FromEvent", "(", "cls", ",", "service_event", ")", ":", "_", ",", "_", ",", "name", "=", "service_event", ".", "key_path", ".", "rpartition", "(", "WindowsService", ".", "_REGISTRY_KEY_PATH_SEPARATOR", ")", "service_type", "=", "service_event", ".", "regvalue", ".", "get", "(", "'Type'", ",", "''", ")", "image_path", "=", "service_event", ".", "regvalue", ".", "get", "(", "'ImagePath'", ",", "''", ")", "start_type", "=", "service_event", ".", "regvalue", ".", "get", "(", "'Start'", ",", "''", ")", "service_dll", "=", "service_event", ".", "regvalue", ".", "get", "(", "'ServiceDll'", ",", "''", ")", "object_name", "=", "service_event", ".", "regvalue", ".", "get", "(", "'ObjectName'", ",", "''", ")", "if", "service_event", ".", "pathspec", ":", "source", "=", "(", "service_event", ".", "pathspec", ".", "location", ",", "service_event", ".", "key_path", ")", "else", ":", "source", "=", "(", "'Unknown'", ",", "'Unknown'", ")", "return", "cls", "(", "name", "=", "name", ",", "service_type", "=", "service_type", ",", "image_path", "=", "image_path", ",", "start_type", "=", "start_type", ",", "object_name", "=", "object_name", ",", "source", "=", "source", ",", "service_dll", "=", "service_dll", ")" ]
Load values from a Python module .
def load_module ( self , path , squash = True ) : config_obj = load ( path ) obj = { key : getattr ( config_obj , key ) for key in dir ( config_obj ) if key . isupper ( ) } if squash : self . load_dict ( obj ) else : self . update ( obj ) return self
10,571
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L2341-L2369
[ "def", "array_equiv", "(", "arr1", ",", "arr2", ")", ":", "arr1", ",", "arr2", "=", "as_like_arrays", "(", "arr1", ",", "arr2", ")", "if", "arr1", ".", "shape", "!=", "arr2", ".", "shape", ":", "return", "False", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'ignore'", ",", "\"In the future, 'NAT == x'\"", ")", "flag_array", "=", "(", "arr1", "==", "arr2", ")", "flag_array", "|=", "(", "isnull", "(", "arr1", ")", "&", "isnull", "(", "arr2", ")", ")", "return", "bool", "(", "flag_array", ".", "all", "(", ")", ")" ]
Recursively set or update virtual keys . Do nothing if non - virtual value is present .
def _set_virtual ( self , key , value ) : if key in self and key not in self . _virtual_keys : return # Do nothing for non-virtual keys. self . _virtual_keys . add ( key ) if key in self and self [ key ] is not value : self . _on_change ( key , value ) dict . __setitem__ ( self , key , value ) for overlay in self . _iter_overlays ( ) : overlay . _set_virtual ( key , value )
10,572
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L2484-L2495
[ "def", "read_file", "(", "path", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "with", "wrap_file_exceptions", "(", ")", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "stream", ":", "return", "stream", ".", "read", "(", ")", "with", "wrap_file_exceptions", "(", ")", ":", "stream", "=", "ca_storage", ".", "open", "(", "path", ")", "try", ":", "return", "stream", ".", "read", "(", ")", "finally", ":", "stream", ".", "close", "(", ")" ]
Recursively delete virtual entry . Do nothing if key is not virtual .
def _delete_virtual ( self , key ) : if key not in self . _virtual_keys : return # Do nothing for non-virtual keys. if key in self : self . _on_change ( key , None ) dict . __delitem__ ( self , key ) self . _virtual_keys . discard ( key ) for overlay in self . _iter_overlays ( ) : overlay . _delete_virtual ( key )
10,573
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L2497-L2508
[ "def", "makeSocket", "(", "self", ",", "timeout", "=", "1", ")", ":", "plain_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "if", "hasattr", "(", "plain_socket", ",", "'settimeout'", ")", ":", "plain_socket", ".", "settimeout", "(", "timeout", ")", "wrapped_socket", "=", "ssl", ".", "wrap_socket", "(", "plain_socket", ",", "ca_certs", "=", "self", ".", "ca_certs", ",", "cert_reqs", "=", "self", ".", "reqs", ",", "keyfile", "=", "self", ".", "keyfile", ",", "certfile", "=", "self", ".", "certfile", ")", "wrapped_socket", ".", "connect", "(", "(", "self", ".", "host", ",", "self", ".", "port", ")", ")", "return", "wrapped_socket" ]
Set the meta field for a key to a new value .
def meta_set ( self , key , metafield , value ) : self . _meta . setdefault ( key , { } ) [ metafield ] = value
10,574
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L2523-L2525
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "_adb_logcat_process", ":", "return", "try", ":", "utils", ".", "stop_standing_subprocess", "(", "self", ".", "_adb_logcat_process", ")", "except", ":", "self", ".", "_ad", ".", "log", ".", "exception", "(", "'Failed to stop adb logcat.'", ")", "self", ".", "_adb_logcat_process", "=", "None" ]
Name of the file on the client file system but normalized to ensure file system compatibility . An empty filename is returned as empty .
def filename ( self ) : fname = self . raw_filename if not isinstance ( fname , unicode ) : fname = fname . decode ( 'utf8' , 'ignore' ) fname = normalize ( 'NFKD' , fname ) fname = fname . encode ( 'ASCII' , 'ignore' ) . decode ( 'ASCII' ) fname = os . path . basename ( fname . replace ( '\\' , os . path . sep ) ) fname = re . sub ( r'[^a-zA-Z0-9-_.\s]' , '' , fname ) . strip ( ) fname = re . sub ( r'[-\s]+' , '-' , fname ) . strip ( '.-' ) return fname [ : 255 ] or 'empty'
10,575
https://github.com/samghelms/mathviz/blob/30fe89537379faea4de8c8b568ac6e52e4d15353/mathviz_hopper/src/bottle.py#L2743-L2760
[ "def", "get_theming_attribute", "(", "self", ",", "mode", ",", "name", ",", "part", "=", "None", ")", ":", "colours", "=", "int", "(", "self", ".", "_config", ".", "get", "(", "'colourmode'", ")", ")", "return", "self", ".", "_theme", ".", "get_attribute", "(", "colours", ",", "mode", ",", "name", ",", "part", ")" ]
Render either a normal string either a string to translate into an actual string for the specified request .
async def render ( text : TransText , request : Optional [ 'Request' ] , multi_line = False ) -> Union [ Text , List [ Text ] ] : if isinstance ( text , str ) : out = [ text ] elif isinstance ( text , StringToTranslate ) : out = await text . render_list ( request ) else : raise TypeError ( 'Provided text cannot be rendered' ) if multi_line : return out else : return ' ' . join ( out )
10,576
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L607-L626
[ "def", "remove_range", "(", "cls", ",", "elem", ",", "end_elem", ",", "delete_end", "=", "True", ")", ":", "while", "elem", "is", "not", "None", "and", "elem", "!=", "end_elem", "and", "end_elem", "not", "in", "elem", ".", "xpath", "(", "\"descendant::*\"", ")", ":", "parent", "=", "elem", ".", "getparent", "(", ")", "nxt", "=", "elem", ".", "getnext", "(", ")", "parent", ".", "remove", "(", "elem", ")", "if", "DEBUG", "==", "True", ":", "print", "(", "etree", ".", "tounicode", "(", "elem", ")", ")", "elem", "=", "nxt", "if", "elem", "==", "end_elem", ":", "if", "delete_end", "==", "True", ":", "cls", ".", "remove", "(", "end_elem", ",", "leave_tail", "=", "True", ")", "elif", "elem", "is", "None", ":", "if", "parent", ".", "tail", "not", "in", "[", "None", ",", "''", "]", ":", "parent", ".", "tail", "=", "''", "cls", ".", "remove_range", "(", "parent", ".", "getnext", "(", ")", ",", "end_elem", ")", "XML", ".", "remove_if_empty", "(", "parent", ")", "elif", "end_elem", "in", "elem", ".", "xpath", "(", "\"descendant::*\"", ")", ":", "if", "DEBUG", "==", "True", ":", "print", "(", "elem", ".", "text", ")", "elem", ".", "text", "=", "''", "cls", ".", "remove_range", "(", "elem", ".", "getchildren", "(", ")", "[", "0", "]", ",", "end_elem", ")", "XML", ".", "remove_if_empty", "(", "elem", ")", "else", ":", "print", "(", "\"LOGIC ERROR\"", ",", "file", "=", "sys", ".", "stderr", ")" ]
Counts how many of the flags can be matched
def score ( self , flags : Flags ) -> int : score = 0 for k , v in flags . items ( ) : if self . flags . get ( k ) == v : score += 1 return score
10,577
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L97-L108
[ "def", "connect", "(", "self", ",", "server", ")", ":", "if", "'connections'", "not", "in", "session", ":", "session", "[", "'connections'", "]", "=", "{", "}", "session", ".", "save", "(", ")", "conns", "=", "session", "[", "'connections'", "]", "id", "=", "str", "(", "len", "(", "conns", ")", ")", "conn", "=", "Connection", "(", "server", ")", "conns", "[", "id", "]", "=", "conn", "yield", "request", ".", "environ", "[", "'cogen.core'", "]", ".", "events", ".", "AddCoro", "(", "conn", ".", "pull", ")", "yield", "id" ]
Given flags find all items of this sentence that have an equal matching score and put them in a list .
def best_for_flags ( self , flags : Flags ) -> List [ TransItem ] : best_score : int = 0 best_list : List [ TransItem ] = [ ] for item in self . items : score = item . score ( flags ) if score == best_score : best_list . append ( item ) elif score > best_score : best_list = [ item ] best_score = score return best_list
10,578
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L120-L138
[ "def", "request_control", "(", "self", ",", "device_id", ",", "access_mode", "=", "True", ")", ":", "if", "access_mode", ":", "if", "not", "request_control", "(", "self", ".", "corsair_sdk", ",", "device_id", ")", ":", "self", ".", "_raise_corsair_error", "(", ")", "return", "True", "else", ":", "self", ".", "reload", "(", ")" ]
Chooses a random sentence from the list and returns it .
def render ( self , flags : Flags ) -> Text : return random . choice ( self . best_for_flags ( flags ) ) . value
10,579
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L140-L144
[ "def", "registry_key", "(", "self", ",", "key_name", ",", "value_name", ",", "value_type", ",", "*", "*", "kwargs", ")", ":", "indicator_obj", "=", "RegistryKey", "(", "key_name", ",", "value_name", ",", "value_type", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_indicator", "(", "indicator_obj", ")" ]
Erase items with the specified flags and insert the new items from the other sentence instead .
def update ( self , new : 'Sentence' , flags : Flags ) : items = [ i for i in self . items if i . flags != flags ] items . extend ( new . items ) self . items = items
10,580
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L159-L167
[ "def", "global_closeness_centrality", "(", "g", ",", "node", "=", "None", ",", "normalize", "=", "True", ")", ":", "if", "not", "node", ":", "C", "=", "{", "}", "for", "node", "in", "g", ".", "nodes", "(", ")", ":", "C", "[", "node", "]", "=", "global_closeness_centrality", "(", "g", ",", "node", ",", "normalize", "=", "normalize", ")", "return", "C", "values", "=", "nx", ".", "shortest_path_length", "(", "g", ",", "node", ")", ".", "values", "(", ")", "c", "=", "sum", "(", "[", "1.", "/", "pl", "for", "pl", "in", "values", "if", "pl", "!=", "0.", "]", ")", "/", "len", "(", "g", ")", "if", "normalize", ":", "ac", "=", "0", "for", "sg", "in", "nx", ".", "connected_component_subgraphs", "(", "g", ")", ":", "if", "len", "(", "sg", ".", "nodes", "(", ")", ")", ">", "1", ":", "aspl", "=", "nx", ".", "average_shortest_path_length", "(", "sg", ")", "ac", "+=", "(", "1.", "/", "aspl", ")", "*", "(", "float", "(", "len", "(", "sg", ")", ")", "/", "float", "(", "len", "(", "g", ")", ")", "**", "2", ")", "c", "=", "c", "/", "ac", "return", "c" ]
Returns a list of randomly chosen outcomes for each sentence of the list .
def render ( self , flags : Flags ) -> List [ Text ] : return [ x . render ( flags ) for x in self . sentences ]
10,581
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L185-L190
[ "def", "_connect_hive", "(", "self", ",", "hive", ")", ":", "try", ":", "handle", "=", "self", ".", "_remote_hives", "[", "hive", "]", "except", "KeyError", ":", "handle", "=", "win32", ".", "RegConnectRegistry", "(", "self", ".", "_machine", ",", "hive", ")", "self", ".", "_remote_hives", "[", "hive", "]", "=", "handle", "return", "handle" ]
Append an item to the list . If there is not enough sentences in the list then the list is extended as needed .
def append ( self , item : TransItem ) : if not ( 1 <= item . index <= settings . I18N_MAX_SENTENCES_PER_GROUP ) : return if len ( self . sentences ) < item . index : for _ in range ( len ( self . sentences ) , item . index ) : self . sentences . append ( Sentence ( ) ) self . sentences [ item . index - 1 ] . append ( item )
10,582
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L192-L207
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "target", "=", "self", ".", "registrar", "for", "element", "in", "key", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ":", "target", "=", "target", ".", "setdefault", "(", "element", ",", "dict", "(", ")", ")", "target", "[", "key", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "]", "=", "value" ]
This object is considered to be a global sentence group while the other one is flags - specific . All data related to the specified flags will be overwritten by the content of the specified group .
def update ( self , group : 'SentenceGroup' , flags : Flags ) -> None : to_append = [ ] for old , new in zip_longest ( self . sentences , group . sentences ) : if old is None : old = Sentence ( ) to_append . append ( old ) if new is None : new = Sentence ( ) old . update ( new , flags ) self . sentences . extend ( to_append )
10,583
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L212-L231
[ "def", "export_public_keys", "(", "self", ",", "identities", ")", ":", "public_keys", "=", "[", "]", "with", "self", ".", "device", ":", "for", "i", "in", "identities", ":", "pubkey", "=", "self", ".", "device", ".", "pubkey", "(", "identity", "=", "i", ")", "vk", "=", "formats", ".", "decompress_pubkey", "(", "pubkey", "=", "pubkey", ",", "curve_name", "=", "i", ".", "curve_name", ")", "public_key", "=", "formats", ".", "export_public_key", "(", "vk", "=", "vk", ",", "label", "=", "i", ".", "to_string", "(", ")", ")", "public_keys", ".", "append", "(", "public_key", ")", "return", "public_keys" ]
Extract only the valid sentence groups into a dictionary .
def extract ( self ) : out = { } for key , group in self . data . items ( ) : out [ key ] = group return out
10,584
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L245-L255
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
Append an item to the internal dictionary .
def append ( self , item : TransItem ) : self . data [ item . key ] . append ( item )
10,585
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L257-L262
[ "def", "find_min_required", "(", "path", ")", ":", "found_min_required", "=", "''", "for", "filename", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'*.tf'", ")", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "stream", ":", "tf_config", "=", "hcl", ".", "load", "(", "stream", ")", "if", "tf_config", ".", "get", "(", "'terraform'", ",", "{", "}", ")", ".", "get", "(", "'required_version'", ")", ":", "found_min_required", "=", "tf_config", ".", "get", "(", "'terraform'", ",", "{", "}", ")", ".", "get", "(", "'required_version'", ")", "break", "if", "found_min_required", ":", "if", "re", ".", "match", "(", "r'^!=.+'", ",", "found_min_required", ")", ":", "LOGGER", ".", "error", "(", "'Min required Terraform version is a negation (%s) '", "'- unable to determine required version'", ",", "found_min_required", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "found_min_required", "=", "re", ".", "search", "(", "r'[0-9]*\\.[0-9]*(?:\\.[0-9]*)?'", ",", "found_min_required", ")", ".", "group", "(", "0", ")", "LOGGER", ".", "debug", "(", "\"Detected minimum terraform version is %s\"", ",", "found_min_required", ")", "return", "found_min_required", "LOGGER", ".", "error", "(", "'Terraform version specified as min-required, but unable to '", "'find a specified version requirement in this module\\'s tf '", "'files'", ")", "sys", ".", "exit", "(", "1", ")" ]
This creates the loaders instances and subscribes to their updates .
def _init_loaders ( self ) -> None : for loader in settings . I18N_TRANSLATION_LOADERS : loader_class = import_class ( loader [ 'loader' ] ) instance = loader_class ( ) instance . on_update ( self . update ) run ( instance . load ( * * loader [ 'params' ] ) )
10,586
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L276-L285
[ "def", "_add_sj_index_commands", "(", "fq1", ",", "ref_file", ",", "gtf_file", ")", ":", "if", "_has_sj_index", "(", "ref_file", ")", ":", "return", "\"\"", "else", ":", "rlength", "=", "fastq", ".", "estimate_maximum_read_length", "(", "fq1", ")", "cmd", "=", "\" --sjdbGTFfile %s \"", "%", "gtf_file", "cmd", "+=", "\" --sjdbOverhang %s \"", "%", "str", "(", "rlength", "-", "1", ")", "return", "cmd" ]
Update translations for one specific lang
def update_lang ( self , lang : Optional [ Text ] , data : List [ Tuple [ Text , Text ] ] , flags : Flags ) : sd = SortingDict ( ) for item in ( self . parse_item ( x [ 0 ] , x [ 1 ] , flags ) for x in data ) : if item : sd . append ( item ) if lang not in self . dict : self . dict [ lang ] = { } d = self . dict [ lang ] for k , v in sd . extract ( ) . items ( ) : if k not in d : d [ k ] = SentenceGroup ( ) d [ k ] . update ( v , flags )
10,587
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L315-L338
[ "def", "get_raw_values", "(", "self", ",", "pydict", ",", "recovery_name", "=", "True", ")", ":", "new_dict", "=", "{", "\"id\"", ":", "pydict", "[", "\"id\"", "]", "}", "for", "field", "in", "self", ":", "raw_key", "=", "\"%s_raw\"", "%", "field", ".", "key", "if", "raw_key", "in", "pydict", ":", "if", "recovery_name", ":", "new_dict", "[", "field", ".", "name", "]", "=", "pydict", "[", "raw_key", "]", "else", ":", "new_dict", "[", "field", ".", "key", "]", "=", "pydict", "[", "raw_key", "]", "return", "new_dict" ]
Update all langs at once
def update ( self , data : TransDict , flags : Flags ) : for lang , lang_data in data . items ( ) : self . update_lang ( lang , lang_data , flags )
10,588
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L340-L346
[ "def", "factory", "(", "data", ")", ":", "if", "'object'", "not", "in", "data", ":", "raise", "exceptions", ".", "UnknownAPIResource", "(", "'Missing `object` key in resource.'", ")", "for", "reconstituable_api_resource_type", "in", "ReconstituableAPIResource", ".", "__subclasses__", "(", ")", ":", "if", "reconstituable_api_resource_type", ".", "object_type", "==", "data", "[", "'object'", "]", ":", "return", "reconstituable_api_resource_type", "(", "*", "*", "data", ")", "raise", "exceptions", ".", "UnknownAPIResource", "(", "'Unknown object `'", "+", "data", "[", "'object'", "]", "+", "'`.'", ")" ]
Get the appropriate translation given the specified parameters .
def get ( self , key : Text , count : Optional [ int ] = None , formatter : Formatter = None , locale : Text = None , params : Optional [ Dict [ Text , Any ] ] = None , flags : Optional [ Flags ] = None ) -> List [ Text ] : if params is None : params = { } if count is not None : raise TranslationError ( 'Count parameter is not supported yet' ) locale = self . choose_locale ( locale ) try : group : SentenceGroup = self . dict [ locale ] [ key ] except KeyError : raise MissingTranslationError ( 'Translation "{}" does not exist' . format ( key ) ) try : trans = group . render ( flags or { } ) out = [ ] for line in trans : if not formatter : out . append ( line . format ( * * params ) ) else : out . append ( formatter . format ( line , * * params ) ) except KeyError as e : raise MissingParamError ( 'Parameter "{}" missing to translate "{}"' . format ( e . args [ 0 ] , key ) ) else : return out
10,589
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L348-L395
[ "def", "finish", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Session disconnected.\"", ")", "try", ":", "self", ".", "sock", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", ":", "pass", "self", ".", "session_end", "(", ")" ]
If any StringToTranslate was passed as parameter then it is rendered at this moment .
async def _resolve_params ( self , params : Dict [ Text , Any ] , request : Optional [ 'Request' ] ) : out = { } for k , v in params . items ( ) : if isinstance ( v , StringToTranslate ) : out [ k ] = await render ( v , request ) else : out [ k ] = v return out
10,590
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L436-L452
[ "def", "on_close_grid", "(", "self", ",", "event", ")", ":", "if", "self", ".", "parent", ".", "grid_frame", ":", "self", ".", "parent", ".", "grid_frame", ".", "onSave", "(", "None", ")", "self", ".", "parent", ".", "grid_frame", ".", "Destroy", "(", ")" ]
Render the translation as a list if there is multiple strings for this single key .
async def render_list ( self , request = None ) -> List [ Text ] : from bernard . middleware import MiddlewareManager if request : tz = await request . user . get_timezone ( ) locale = await request . get_locale ( ) flags = await request . get_trans_flags ( ) else : tz = None locale = self . wd . list_locales ( ) [ 0 ] flags = { } rp = MiddlewareManager . instance ( ) . get ( 'resolve_trans_params' , self . _resolve_params ) resolved_params = await rp ( self . params , request ) f = I18nFormatter ( self . wd . choose_locale ( locale ) , tz ) return self . wd . get ( self . key , self . count , f , locale , resolved_params , flags , )
10,591
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/translator.py#L466-L497
[ "async", "def", "open_session", "(", "self", ",", "request", ":", "BaseRequestWebsocket", ")", "->", "Session", ":", "return", "await", "ensure_coroutine", "(", "self", ".", "session_interface", ".", "open_session", ")", "(", "self", ",", "request", ")" ]
Add a message stack to the send list .
def send ( self , stack : Layers ) : if not isinstance ( stack , Stack ) : stack = Stack ( stack ) if not self . platform . accept ( stack ) : raise UnacceptableStack ( 'The platform does not allow "{}"' . format ( stack . describe ( ) ) ) self . _stacks . append ( stack )
10,592
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/responder.py#L46-L58
[ "def", "generate_citation_counter", "(", "self", ")", ":", "cite_counter", "=", "dict", "(", ")", "filename", "=", "'%s.aux'", "%", "self", ".", "project_name", "with", "open", "(", "filename", ")", "as", "fobj", ":", "main_aux", "=", "fobj", ".", "read", "(", ")", "cite_counter", "[", "filename", "]", "=", "_count_citations", "(", "filename", ")", "for", "match", "in", "re", ".", "finditer", "(", "r'\\\\@input\\{(.*.aux)\\}'", ",", "main_aux", ")", ":", "filename", "=", "match", ".", "groups", "(", ")", "[", "0", "]", "try", ":", "counter", "=", "_count_citations", "(", "filename", ")", "except", "IOError", ":", "pass", "else", ":", "cite_counter", "[", "filename", "]", "=", "counter", "return", "cite_counter" ]
Send all queued messages .
async def flush ( self , request : 'Request' ) : from bernard . middleware import MiddlewareManager for stack in self . _stacks : await stack . convert_media ( self . platform ) func = MiddlewareManager . instance ( ) . get ( 'flush' , self . _flush ) await func ( request , self . _stacks )
10,593
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/responder.py#L67-L80
[ "def", "fingerprint", "(", "self", ",", "option_type", ",", "option_val", ")", ":", "if", "option_val", "is", "None", ":", "return", "None", "# Wrapping all other values in a list here allows us to easily handle single-valued and", "# list-valued options uniformly. For non-list-valued options, this will be a singleton list", "# (with the exception of dict, which is not modified). This dict exception works because we do", "# not currently have any \"list of dict\" type, so there is no ambiguity.", "if", "not", "isinstance", "(", "option_val", ",", "(", "list", ",", "tuple", ",", "dict", ")", ")", ":", "option_val", "=", "[", "option_val", "]", "if", "option_type", "==", "target_option", ":", "return", "self", ".", "_fingerprint_target_specs", "(", "option_val", ")", "elif", "option_type", "==", "dir_option", ":", "return", "self", ".", "_fingerprint_dirs", "(", "option_val", ")", "elif", "option_type", "==", "file_option", ":", "return", "self", ".", "_fingerprint_files", "(", "option_val", ")", "elif", "option_type", "==", "dict_with_files_option", ":", "return", "self", ".", "_fingerprint_dict_with_files", "(", "option_val", ")", "else", ":", "return", "self", ".", "_fingerprint_primitives", "(", "option_val", ")" ]
Use all underlying stacks to generate the next transition register .
async def make_transition_register ( self , request : 'Request' ) : register = { } for stack in self . _stacks : register = await stack . patch_register ( register , request ) return register
10,594
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/responder.py#L91-L101
[ "def", "AddComment", "(", "self", ",", "comment", ")", ":", "if", "not", "comment", ":", "return", "if", "not", "self", ".", "comment", ":", "self", ".", "comment", "=", "comment", "else", ":", "self", ".", "comment", "=", "''", ".", "join", "(", "[", "self", ".", "comment", ",", "comment", "]", ")" ]
adds the banner to the preloop
def preloop ( self ) : lines = textwrap . dedent ( self . banner ) . split ( "\n" ) for line in lines : Console . _print ( "BLUE" , "" , line )
10,595
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/template.py#L21-L25
[ "def", "editDirectory", "(", "self", ",", "directoryName", ",", "physicalPath", ",", "description", ")", ":", "url", "=", "self", ".", "_url", "+", "\"/directories/%s/edit\"", "%", "directoryName", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"physicalPath\"", ":", "physicalPath", ",", "\"description\"", ":", "description", "}", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")" ]
Sums up all the balances of the account and returns them .
def getDebt ( self ) : debt = float ( self [ 'principalBalance' ] ) + float ( self [ 'interestBalance' ] ) debt += float ( self [ 'feesBalance' ] ) + float ( self [ 'penaltyBalance' ] ) return debt
10,596
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L43-L49
[ "def", "store_result", "(", "self", ",", "message", ",", "result", ":", "Result", ",", "ttl", ":", "int", ")", "->", "None", ":", "message_key", "=", "self", ".", "build_message_key", "(", "message", ")", "return", "self", ".", "_store", "(", "message_key", ",", "result", ",", "ttl", ")" ]
Adds the repayments for this loan to a repayments field .
def setRepayments ( self , * args , * * kwargs ) : def duedate ( repayment ) : """Util function used for sorting repayments according to due Date""" try : return repayment [ 'dueDate' ] except KeyError as kerr : return datetime . now ( ) try : reps = self . mamburepaymentsclass ( entid = self [ 'id' ] , * args , * * kwargs ) except AttributeError as ae : from . mamburepayment import MambuRepayments self . mamburepaymentsclass = MambuRepayments reps = self . mamburepaymentsclass ( entid = self [ 'id' ] , * args , * * kwargs ) reps . attrs = sorted ( reps . attrs , key = duedate ) self [ 'repayments' ] = reps return 1
10,597
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L69-L100
[ "def", "default_index", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "check_default_index", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_file_path", "!=", "self", ".", "_index_path", "(", ")", ":", "raise", "AssertionError", "(", "\"Cannot call %r on indices that do not represent the default git index\"", "%", "func", ".", "__name__", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "# END wrapper method", "return", "check_default_index" ]
Adds the transactions for this loan to a transactions field .
def setTransactions ( self , * args , * * kwargs ) : def transactionid ( transaction ) : """Util function used for sorting transactions according to id""" try : return transaction [ 'transactionId' ] except KeyError as kerr : return None try : trans = self . mambutransactionsclass ( entid = self [ 'id' ] , * args , * * kwargs ) except AttributeError as ae : from . mambutransaction import MambuTransactions self . mambutransactionsclass = MambuTransactions trans = self . mambutransactionsclass ( entid = self [ 'id' ] , * args , * * kwargs ) trans . attrs = sorted ( trans . attrs , key = transactionid ) self [ 'transactions' ] = trans return 1
10,598
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L103-L134
[ "def", "authenticate_heat_admin", "(", "self", ",", "keystone", ")", ":", "self", ".", "log", ".", "debug", "(", "'Authenticating heat admin...'", ")", "ep", "=", "keystone", ".", "service_catalog", ".", "url_for", "(", "service_type", "=", "'orchestration'", ",", "interface", "=", "'publicURL'", ")", "if", "keystone", ".", "session", ":", "return", "heat_client", ".", "Client", "(", "endpoint", "=", "ep", ",", "session", "=", "keystone", ".", "session", ")", "else", ":", "return", "heat_client", ".", "Client", "(", "endpoint", "=", "ep", ",", "token", "=", "keystone", ".", "auth_token", ")" ]
Adds the centre for this loan to a assignedCentre field .
def setCentre ( self , * args , * * kwargs ) : try : centre = self . mambucentreclass ( entid = self [ 'assignedCentreKey' ] , * args , * * kwargs ) except AttributeError as ae : from . mambucentre import MambuCentre self . mambucentreclass = MambuCentre centre = self . mambucentreclass ( entid = self [ 'assignedCentreKey' ] , * args , * * kwargs ) self [ 'assignedCentreName' ] = centre [ 'name' ] self [ 'assignedCentre' ] = centre return 1
10,599
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L160-L180
[ "def", "write_pdb", "(", "residues", ",", "chain_id", "=", "' '", ",", "alt_states", "=", "False", ",", "strip_states", "=", "False", ")", ":", "pdb_atom_col_dict", "=", "PDB_ATOM_COLUMNS", "out_pdb", "=", "[", "]", "if", "len", "(", "str", "(", "chain_id", ")", ")", ">", "1", ":", "poly_id", "=", "' '", "else", ":", "poly_id", "=", "str", "(", "chain_id", ")", "for", "monomer", "in", "residues", ":", "if", "(", "len", "(", "monomer", ".", "states", ")", ">", "1", ")", "and", "alt_states", "and", "not", "strip_states", ":", "atom_list", "=", "itertools", ".", "chain", "(", "*", "[", "x", "[", "1", "]", ".", "items", "(", ")", "for", "x", "in", "sorted", "(", "monomer", ".", "states", ".", "items", "(", ")", ")", "]", ")", "else", ":", "atom_list", "=", "monomer", ".", "atoms", ".", "items", "(", ")", "if", "'chain_id'", "in", "monomer", ".", "tags", ":", "poly_id", "=", "monomer", ".", "tags", "[", "'chain_id'", "]", "for", "atom_t", ",", "atom", "in", "atom_list", ":", "if", "strip_states", ":", "state_label", "=", "' '", "elif", "(", "atom", ".", "tags", "[", "'state'", "]", "==", "'A'", ")", "and", "(", "len", "(", "monomer", ".", "states", ")", "==", "1", ")", ":", "state_label", "=", "' '", "else", ":", "state_label", "=", "atom", ".", "tags", "[", "'state'", "]", "atom_data", "=", "{", "'atom_number'", ":", "'{:>5}'", ".", "format", "(", "cap", "(", "atom", ".", "id", ",", "5", ")", ")", ",", "'atom_name'", ":", "'{:<4}'", ".", "format", "(", "cap", "(", "pdb_atom_col_dict", "[", "atom_t", "]", ",", "4", ")", ")", ",", "'alt_loc_ind'", ":", "'{:<1}'", ".", "format", "(", "cap", "(", "state_label", ",", "1", ")", ")", ",", "'residue_type'", ":", "'{:<3}'", ".", "format", "(", "cap", "(", "monomer", ".", "mol_code", ",", "3", ")", ")", ",", "'chain_id'", ":", "'{:<1}'", ".", "format", "(", "cap", "(", "poly_id", ",", "1", ")", ")", ",", "'res_num'", ":", "'{:>4}'", ".", "format", "(", "cap", "(", "monomer", ".", "id", ",", "4", ")", ")", ",", "'icode'", ":", "'{:<1}'", ".", "format", "(", "cap", "(", "monomer", ".", "insertion_code", ",", "1", ")", ")", ",", "'coord_str'", ":", "'{0:>8.3f}{1:>8.3f}{2:>8.3f}'", ".", "format", "(", "*", "[", "x", "for", "x", "in", "atom", "]", ")", ",", "'occupancy'", ":", "'{:>6.2f}'", ".", "format", "(", "atom", ".", "tags", "[", "'occupancy'", "]", ")", ",", "'temp_factor'", ":", "'{:>6.2f}'", ".", "format", "(", "atom", ".", "tags", "[", "'bfactor'", "]", ")", ",", "'element'", ":", "'{:>2}'", ".", "format", "(", "cap", "(", "atom", ".", "element", ",", "2", ")", ")", ",", "'charge'", ":", "'{:<2}'", ".", "format", "(", "cap", "(", "atom", ".", "tags", "[", "'charge'", "]", ",", "2", ")", ")", "}", "if", "monomer", ".", "is_hetero", ":", "pdb_line_template", "=", "(", "'HETATM{atom_number} {atom_name}{alt_loc_ind}{residue_type}'", "' {chain_id}{res_num}{icode} {coord_str}{occupancy}'", "'{temp_factor} {element}{charge}\\n'", ")", "else", ":", "pdb_line_template", "=", "(", "'ATOM {atom_number} {atom_name}{alt_loc_ind}{residue_type}'", "' {chain_id}{res_num}{icode} {coord_str}{occupancy}'", "'{temp_factor} {element}{charge}\\n'", ")", "out_pdb", ".", "append", "(", "pdb_line_template", ".", "format", "(", "*", "*", "atom_data", ")", ")", "return", "''", ".", "join", "(", "out_pdb", ")" ]