query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Return a description of the key
def key_description ( self ) : vk , scan , flags = self . _get_key_info ( ) desc = '' if vk : if vk in CODE_NAMES : desc = CODE_NAMES [ vk ] else : desc = "VK %d" % vk else : desc = "%s" % self . key return desc
9,100
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L359-L371
[ "def", "rpc_start", "(", "working_dir", ",", "port", ",", "subdomain_index", "=", "None", ",", "thread", "=", "True", ")", ":", "rpc_srv", "=", "BlockstackdRPCServer", "(", "working_dir", ",", "port", ",", "subdomain_index", "=", "subdomain_index", ")", "log", ".", "debug", "(", "\"Starting RPC on port {}\"", ".", "format", "(", "port", ")", ")", "if", "thread", ":", "rpc_srv", ".", "start", "(", ")", "return", "rpc_srv" ]
Virtual keys have extended flag set
def _get_key_info ( self ) : # copied more or less verbatim from # http://www.pinvoke.net/default.aspx/user32.sendinput if ( ( self . key >= 33 and self . key <= 46 ) or ( self . key >= 91 and self . key <= 93 ) ) : flags = KEYEVENTF_EXTENDEDKEY else : flags = 0 # This works for %{F4} - ALT + F4 #return self.key, 0, 0 # this works for Tic Tac Toe i.e. +{RIGHT} SHIFT + RIGHT return self . key , MapVirtualKey ( self . key , 0 ) , flags
9,101
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L389-L404
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
EscapedKeyAction doesn t send it as Unicode and the vk and scan code are generated differently
def _get_key_info ( self ) : vkey_scan = LoByte ( VkKeyScan ( self . key ) ) return ( vkey_scan , MapVirtualKey ( vkey_scan , 0 ) , 0 )
9,102
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L412-L417
[ "def", "balanced_binary_tree", "(", "n_leaves", ")", ":", "def", "_balanced_subtree", "(", "leaves", ")", ":", "if", "len", "(", "leaves", ")", "==", "1", ":", "return", "leaves", "[", "0", "]", "elif", "len", "(", "leaves", ")", "==", "2", ":", "return", "(", "leaves", "[", "0", "]", ",", "leaves", "[", "1", "]", ")", "else", ":", "split", "=", "len", "(", "leaves", ")", "//", "2", "return", "(", "_balanced_subtree", "(", "leaves", "[", ":", "split", "]", ")", ",", "_balanced_subtree", "(", "leaves", "[", "split", ":", "]", ")", ")", "return", "_balanced_subtree", "(", "np", ".", "arange", "(", "n_leaves", ")", ")" ]
Method runs the plugin attaching policies to the user in question
def setup ( self ) : self . template = self . _generate_inline_policy ( ) if self . dry_run is not True : self . client = self . _get_client ( ) username = self . _get_username_for_key ( ) policy_document = self . _generate_inline_policy ( ) self . _attach_inline_policy ( username , policy_document ) pass
9,103
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L27-L35
[ "def", "get_dbs", "(", ")", ":", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "'DBS'", ")", "response", "=", "requests", ".", "get", "(", "url", ")", "dbs", "=", "response", ".", "content", ".", "decode", "(", "'ascii'", ")", ".", "splitlines", "(", ")", "dbs", "=", "[", "re", ".", "sub", "(", "'\\t{2,}'", ",", "'\\t'", ",", "line", ")", ".", "split", "(", "'\\t'", ")", "for", "line", "in", "dbs", "]", "return", "dbs" ]
Returns all the policy names for a given user
def _get_policies ( self ) : username = self . _get_username_for_key ( ) policies = self . client . list_user_policies ( UserName = username ) return policies
9,104
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L53-L59
[ "def", "update_version_descriptor", "(", "self", ",", "task", ",", "releasetype", ",", "descriptor", ",", "verbrowser", ",", "commentbrowser", ")", ":", "if", "task", "is", "None", ":", "null", "=", "treemodel", ".", "TreeItem", "(", "None", ")", "verbrowser", ".", "set_model", "(", "treemodel", ".", "TreeModel", "(", "null", ")", ")", "return", "m", "=", "self", ".", "create_version_model", "(", "task", ",", "releasetype", ",", "descriptor", ")", "verbrowser", ".", "set_model", "(", "m", ")", "commentbrowser", ".", "set_model", "(", "m", ")" ]
Find the user for a given access key
def _get_username_for_key ( self ) : response = self . client . get_access_key_last_used ( AccessKeyId = self . compromised_resource [ 'access_key_id' ] ) username = response [ 'UserName' ] return username
9,105
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L66-L72
[ "def", "interior_nesting", "(", "cls", ",", "elem1", ",", "xpath", ",", "namespaces", "=", "None", ")", ":", "for", "elem2", "in", "elem1", ".", "xpath", "(", "xpath", ",", "namespaces", "=", "namespaces", ")", ":", "child_elem1", "=", "etree", ".", "Element", "(", "elem1", ".", "tag", ")", "for", "k", "in", "elem1", ".", "attrib", ":", "child_elem1", ".", "set", "(", "k", ",", "elem1", ".", "get", "(", "k", ")", ")", "child_elem1", ".", "text", ",", "elem2", ".", "text", "=", "elem2", ".", "text", ",", "''", "for", "ch", "in", "elem2", ".", "getchildren", "(", ")", ":", "child_elem1", ".", "append", "(", "ch", ")", "elem2", ".", "insert", "(", "0", ",", "child_elem1", ")", "XML", ".", "replace_with_contents", "(", "elem1", ")" ]
Renders a policy from a jinja template
def _generate_inline_policy ( self ) : template_name = self . _locate_file ( 'deny-sts-before-time.json.j2' ) template_file = open ( template_name ) template_contents = template_file . read ( ) template_file . close ( ) jinja_template = Template ( template_contents ) policy_document = jinja_template . render ( before_date = self . _get_date ( ) ) return policy_document
9,106
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L74-L84
[ "def", "_strip_ctype", "(", "name", ",", "ctype", ",", "protocol", "=", "2", ")", ":", "# parse channel type from name (e.g. 'L1:GDS-CALIB_STRAIN,reduced')", "try", ":", "name", ",", "ctypestr", "=", "name", ".", "rsplit", "(", "','", ",", "1", ")", "except", "ValueError", ":", "pass", "else", ":", "ctype", "=", "Nds2ChannelType", ".", "find", "(", "ctypestr", ")", ".", "value", "# NDS1 stores channels with trend suffix, so we put it back:", "if", "protocol", "==", "1", "and", "ctype", "in", "(", "Nds2ChannelType", ".", "STREND", ".", "value", ",", "Nds2ChannelType", ".", "MTREND", ".", "value", ")", ":", "name", "+=", "',{0}'", ".", "format", "(", "ctypestr", ")", "return", "name", ",", "ctype" ]
Attaches the policy to the user
def _attach_inline_policy ( self , username , policy_document ) : response = self . client . put_user_policy ( UserName = username , PolicyName = "threatresponse-temporal-key-revocation" , PolicyDocument = policy_document ) logger . info ( 'An inline policy has been attached for' ' {u} revoking sts tokens.' . format ( u = username ) ) return response
9,107
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L86-L97
[ "def", "_gen_ticket", "(", "prefix", "=", "None", ",", "lg", "=", "settings", ".", "CAS_TICKET_LEN", ")", ":", "random_part", "=", "u''", ".", "join", "(", "random", ".", "choice", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", "for", "_", "in", "range", "(", "lg", "-", "len", "(", "prefix", "or", "\"\"", ")", "-", "1", ")", ")", "if", "prefix", "is", "not", "None", ":", "return", "u'%s-%s'", "%", "(", "prefix", ",", "random_part", ")", "else", ":", "return", "random_part" ]
Locate all files matching supplied filename pattern in and below
def _locate_file ( self , pattern , root = os . path . dirname ( 'revokests_key.py' ) ) : for path , dirs , files in os . walk ( os . path . abspath ( root ) ) : for filename in fnmatch . filter ( files , pattern ) : return os . path . join ( path , filename )
9,108
https://github.com/ThreatResponse/aws_ir_plugins/blob/b5128ef5cbd91fc0b5d55615f1c14cb036ae7c73/aws_ir_plugins/revokests_key.py#L99-L107
[ "def", "getByteStatistic", "(", "self", ",", "wanInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wan", ".", "getServiceType", "(", "\"getByteStatistic\"", ")", "+", "str", "(", "wanInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalBytesSent\"", ",", "timeout", "=", "timeout", ")", "results2", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalBytesReceived\"", ",", "timeout", "=", "timeout", ")", "return", "[", "int", "(", "results", "[", "\"NewTotalBytesSent\"", "]", ")", ",", "int", "(", "results2", "[", "\"NewTotalBytesReceived\"", "]", ")", "]" ]
Unlike generate_tsv_lines_multifile this generates tsv lines from multiple files that may have different headers . Yields fn header as well as quant data for each protein quant
def generate_tsv_pep_protein_quants ( fns ) : for fn in fns : header = get_tsv_header ( fn ) for pquant in generate_split_tsv_lines ( fn , header ) : yield os . path . basename ( fn ) , header , pquant
9,109
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/tsv.py#L22-L29
[ "def", "union", "(", "self", ",", "other", ")", ":", "union", "=", "Rect", "(", ")", "lib", ".", "SDL_UnionRect", "(", "self", ".", "_ptr", ",", "other", ".", "_ptr", ",", "union", ".", "_ptr", ")", "return", "union" ]
Generates tuples of spectra filename and corresponding output features from kronik
def mzmlfn_kronikfeature_generator ( mzmlfns , kronikfns ) : for mzmlfn , kronikfn in zip ( mzmlfns , kronikfns ) : for quant_el in generate_kronik_feats ( kronikfn ) : yield os . path . basename ( mzmlfn ) , quant_el
9,110
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/tsv.py#L38-L43
[ "def", "_get_blocks_containing_index", "(", "self", ",", "axis", ",", "index", ")", ":", "if", "not", "axis", ":", "ErrorMessage", ".", "catch_bugs_and_request_email", "(", "index", ">", "sum", "(", "self", ".", "block_widths", ")", ")", "cumulative_column_widths", "=", "np", ".", "array", "(", "self", ".", "block_widths", ")", ".", "cumsum", "(", ")", "block_idx", "=", "int", "(", "np", ".", "digitize", "(", "index", ",", "cumulative_column_widths", ")", ")", "if", "block_idx", "==", "len", "(", "cumulative_column_widths", ")", ":", "block_idx", "-=", "1", "# Compute the internal index based on the previous lengths. This", "# is a global index, so we must subtract the lengths first.", "internal_idx", "=", "(", "index", "if", "not", "block_idx", "else", "index", "-", "cumulative_column_widths", "[", "block_idx", "-", "1", "]", ")", "else", ":", "ErrorMessage", ".", "catch_bugs_and_request_email", "(", "index", ">", "sum", "(", "self", ".", "block_lengths", ")", ")", "cumulative_row_lengths", "=", "np", ".", "array", "(", "self", ".", "block_lengths", ")", ".", "cumsum", "(", ")", "block_idx", "=", "int", "(", "np", ".", "digitize", "(", "index", ",", "cumulative_row_lengths", ")", ")", "# See note above about internal index", "internal_idx", "=", "(", "index", "if", "not", "block_idx", "else", "index", "-", "cumulative_row_lengths", "[", "block_idx", "-", "1", "]", ")", "return", "block_idx", ",", "internal_idx" ]
Returns dicts with header - keys and psm statistic values
def generate_split_tsv_lines ( fn , header ) : for line in generate_tsv_psms_line ( fn ) : yield { x : y for ( x , y ) in zip ( header , line . strip ( ) . split ( '\t' ) ) }
9,111
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/tsv.py#L55-L58
[ "def", "_CreateIndexIfNotExists", "(", "self", ",", "index_name", ",", "mappings", ")", ":", "try", ":", "if", "not", "self", ".", "_client", ".", "indices", ".", "exists", "(", "index_name", ")", ":", "self", ".", "_client", ".", "indices", ".", "create", "(", "body", "=", "{", "'mappings'", ":", "mappings", "}", ",", "index", "=", "index_name", ")", "except", "elasticsearch", ".", "exceptions", ".", "ConnectionError", "as", "exception", ":", "raise", "RuntimeError", "(", "'Unable to create Elasticsearch index with error: {0!s}'", ".", "format", "(", "exception", ")", ")" ]
From a line return list of proteins reported by Mzid2TSV . When unrolled lines are given this returns the single protein from the line .
def get_proteins_from_psm ( line ) : proteins = line [ mzidtsvdata . HEADER_PROTEIN ] . split ( ';' ) outproteins = [ ] for protein in proteins : prepost_protein = re . sub ( '\(pre=.*post=.*\)' , '' , protein ) . strip ( ) outproteins . append ( prepost_protein ) return outproteins
9,112
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/tsv.py#L74-L82
[ "def", "reconnect_redis", "(", "self", ")", ":", "if", "self", ".", "shared_client", "and", "Storage", ".", "storage", ":", "return", "Storage", ".", "storage", "storage", "=", "Redis", "(", "port", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PORT", ",", "host", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_HOST", ",", "db", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_DB", ",", "password", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PASSWORD", ")", "if", "self", ".", "shared_client", ":", "Storage", ".", "storage", "=", "storage", "return", "storage" ]
DEBUG FUNC modify argv to look like you ran a command
def aug_sysargv ( cmdstr ) : import shlex argv = shlex . split ( cmdstr ) sys . argv . extend ( argv )
9,113
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L40-L44
[ "def", "union", "(", "self", ",", "other", ")", ":", "union", "=", "Rect", "(", ")", "lib", ".", "SDL_UnionRect", "(", "self", ".", "_ptr", ",", "other", ".", "_ptr", ",", "union", ".", "_ptr", ")", "return", "union" ]
checks for standard flags for enableing module specific verbosity
def get_module_verbosity_flags ( * labels ) : verbose_prefix_list = [ '--verbose-' , '--verb' , '--verb-' ] veryverbose_prefix_list = [ '--veryverbose-' , '--veryverb' , '--veryverb-' ] verbose_flags = tuple ( [ prefix + lbl for prefix , lbl in itertools . product ( verbose_prefix_list , labels ) ] ) veryverbose_flags = tuple ( [ prefix + lbl for prefix , lbl in itertools . product ( veryverbose_prefix_list , labels ) ] ) veryverbose_module = get_argflag ( veryverbose_flags ) or VERYVERBOSE verbose_module = ( get_argflag ( verbose_flags ) or veryverbose_module or VERBOSE ) if veryverbose_module : verbose_module = 2 return verbose_module , veryverbose_module
9,114
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L48-L62
[ "def", "execute_cross_join", "(", "op", ",", "left", ",", "right", ",", "*", "*", "kwargs", ")", ":", "# generate a unique name for the temporary join key", "key", "=", "\"cross_join_{}\"", ".", "format", "(", "ibis", ".", "util", ".", "guid", "(", ")", ")", "join_key", "=", "{", "key", ":", "True", "}", "new_left", "=", "left", ".", "assign", "(", "*", "*", "join_key", ")", "new_right", "=", "right", ".", "assign", "(", "*", "*", "join_key", ")", "# inner/outer doesn't matter because every row matches every other row", "result", "=", "pd", ".", "merge", "(", "new_left", ",", "new_right", ",", "how", "=", "'inner'", ",", "on", "=", "key", ",", "copy", "=", "False", ",", "suffixes", "=", "constants", ".", "JOIN_SUFFIXES", ",", ")", "# remove the generated key", "del", "result", "[", "key", "]", "return", "result" ]
Checks if the commandline has a flag or a corresponding noflag
def get_argflag ( argstr_ , default = False , help_ = '' , return_specified = None , need_prefix = True , return_was_specified = False , argv = None , debug = None , * * kwargs ) : if argv is None : argv = sys . argv assert isinstance ( default , bool ) , 'default must be boolean' argstr_list = meta_util_iter . ensure_iterable ( argstr_ ) #if VERYVERBOSE: # print('[util_arg] checking argstr_list=%r' % (argstr_list,)) # arg registration _register_arg ( argstr_list , bool , default , help_ ) parsed_val = default was_specified = False if debug is None : debug = DEBUG # Check environment variables for default as well as argv import os #""" #set UTOOL_NOCNN=True #export UTOOL_NOCNN True #""" #argv_orig = argv[:] # HACK: make this not happen very time you loop for key , val in os . environ . items ( ) : key = key . upper ( ) sentinal = 'UTOOL_' if key . startswith ( sentinal ) : flag = '--' + key [ len ( sentinal ) : ] . lower ( ) . replace ( '_' , '-' ) if val . upper ( ) in [ 'TRUE' , 'ON' ] : pass elif val . upper ( ) in [ 'FALSE' , 'OFF' ] : continue else : continue #flag += '=False' new_argv = [ flag ] argv = argv [ : ] + new_argv if debug : print ( 'ENV SPECIFIED COMMAND LINE' ) print ( 'argv.extend(new_argv=%r)' % ( new_argv , ) ) for argstr in argstr_list : #if VERYVERBOSE: # print('[util_arg] * checking argstr=%r' % (argstr,)) if not ( argstr . find ( '--' ) == 0 or ( argstr . find ( '-' ) == 0 and len ( argstr ) == 2 ) ) : raise AssertionError ( 'Invalid argstr: %r' % ( argstr , ) ) if not need_prefix : noprefix = argstr . replace ( '--' , '' ) if noprefix in argv : parsed_val = True was_specified = True break #if argstr.find('--no') == 0: #argstr = argstr.replace('--no', '--') noarg = argstr . replace ( '--' , '--no' ) if argstr in argv : parsed_val = True was_specified = True #if VERYVERBOSE: # print('[util_arg] * ...WAS_SPECIFIED. AND PARSED') break elif noarg in argv : parsed_val = False was_specified = True #if VERYVERBOSE: # print('[util_arg] * ...WAS_SPECIFIED. AND NOT PARSED') break elif argstr + '=True' in argv : parsed_val = True was_specified = True break elif argstr + '=False' in argv : parsed_val = False was_specified = True break if return_specified is None : return_specified = return_was_specified if return_specified : return parsed_val , was_specified else : return parsed_val
9,115
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L135-L253
[ "def", "_getMostActiveCells", "(", "self", ")", ":", "poolingActivation", "=", "self", ".", "_poolingActivation", "nonZeroCells", "=", "numpy", ".", "argwhere", "(", "poolingActivation", ">", "0", ")", "[", ":", ",", "0", "]", "# include a tie-breaker before sorting", "poolingActivationSubset", "=", "poolingActivation", "[", "nonZeroCells", "]", "+", "self", ".", "_poolingActivation_tieBreaker", "[", "nonZeroCells", "]", "potentialUnionSDR", "=", "nonZeroCells", "[", "numpy", ".", "argsort", "(", "poolingActivationSubset", ")", "[", ":", ":", "-", "1", "]", "]", "topCells", "=", "potentialUnionSDR", "[", "0", ":", "self", ".", "_maxUnionCells", "]", "if", "max", "(", "self", ".", "_poolingTimer", ")", ">", "self", ".", "_minHistory", ":", "self", ".", "_unionSDR", "=", "numpy", ".", "sort", "(", "topCells", ")", ".", "astype", "(", "UINT_DTYPE", ")", "else", ":", "self", ".", "_unionSDR", "=", "[", "]", "return", "self", ".", "_unionSDR" ]
r Yet another way for parsing args
def get_arg_dict ( argv = None , prefix_list = [ '--' ] , type_hints = { } ) : if argv is None : argv = sys . argv arg_dict = { } def startswith_prefix ( arg ) : return any ( [ arg . startswith ( prefix ) for prefix in prefix_list ] ) def argx_has_value ( argv , argx ) : # Check if has a value if argv [ argx ] . find ( '=' ) > - 1 : return True if argx + 1 < len ( argv ) and not startswith_prefix ( argv [ argx + 1 ] ) : return True return False def get_arg_value ( argv , argx , argname ) : if argv [ argx ] . find ( '=' ) > - 1 : return '=' . join ( argv [ argx ] . split ( '=' ) [ 1 : ] ) else : type_ = type_hints . get ( argname , None ) if type_ is None : return argv [ argx + 1 ] else : return parse_arglist_hack ( argx , argv = argv ) for argx in range ( len ( argv ) ) : arg = argv [ argx ] for prefix in prefix_list : if arg . startswith ( prefix ) : argname = arg [ len ( prefix ) : ] if argx_has_value ( argv , argx ) : if arg . find ( '=' ) > - 1 : argname = arg [ len ( prefix ) : arg . find ( '=' ) ] argvalue = get_arg_value ( argv , argx , argname ) arg_dict [ argname ] = argvalue else : arg_dict [ argname ] = True break return arg_dict
9,116
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L602-L671
[ "def", "changes", "(", "request", ",", "slug", ",", "template_name", "=", "'wakawaka/changes.html'", ",", "extra_context", "=", "None", ")", ":", "rev_a_id", "=", "request", ".", "GET", ".", "get", "(", "'a'", ",", "None", ")", "rev_b_id", "=", "request", ".", "GET", ".", "get", "(", "'b'", ",", "None", ")", "# Some stinky fingers manipulated the url", "if", "not", "rev_a_id", "or", "not", "rev_b_id", ":", "return", "HttpResponseBadRequest", "(", "'Bad Request'", ")", "try", ":", "revision_queryset", "=", "Revision", ".", "objects", ".", "all", "(", ")", "wikipage_queryset", "=", "WikiPage", ".", "objects", ".", "all", "(", ")", "rev_a", "=", "revision_queryset", ".", "get", "(", "pk", "=", "rev_a_id", ")", "rev_b", "=", "revision_queryset", ".", "get", "(", "pk", "=", "rev_b_id", ")", "page", "=", "wikipage_queryset", ".", "get", "(", "slug", "=", "slug", ")", "except", "ObjectDoesNotExist", ":", "raise", "Http404", "if", "rev_a", ".", "content", "!=", "rev_b", ".", "content", ":", "d", "=", "difflib", ".", "unified_diff", "(", "rev_b", ".", "content", ".", "splitlines", "(", ")", ",", "rev_a", ".", "content", ".", "splitlines", "(", ")", ",", "'Original'", ",", "'Current'", ",", "lineterm", "=", "''", ",", ")", "difftext", "=", "'\\n'", ".", "join", "(", "d", ")", "else", ":", "difftext", "=", "_", "(", "u'No changes were made between this two files.'", ")", "template_context", "=", "{", "'page'", ":", "page", ",", "'diff'", ":", "difftext", ",", "'rev_a'", ":", "rev_a", ",", "'rev_b'", ":", "rev_b", ",", "}", "template_context", ".", "update", "(", "extra_context", "or", "{", "}", ")", "return", "render", "(", "request", ",", "template_name", ",", "template_context", ")" ]
Decorators which control program flow based on sys . argv the decorated function does not execute without its corresponding flag
def argv_flag_dec ( * argin , * * kwargs ) : kwargs = kwargs . copy ( ) kwargs [ 'default' ] = kwargs . get ( 'default' , False ) from utool import util_decor @ util_decor . ignores_exc_tb ( outer_wrapper = False ) def wrap_argv_flag_dec ( func ) : return __argv_flag_dec ( func , * * kwargs ) assert len ( argin ) < 2 , 'specify 0 or 1 args' if len ( argin ) == 1 and util_type . is_funclike ( argin [ 0 ] ) : func = argin [ 0 ] return wrap_argv_flag_dec ( func ) else : return wrap_argv_flag_dec
9,117
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L853-L878
[ "def", "store_many_vectors", "(", "self", ",", "vs", ",", "data", "=", "None", ")", ":", "# We will store the normalized vector (used during retrieval)", "nvs", "=", "[", "unitvec", "(", "i", ")", "for", "i", "in", "vs", "]", "# Store vector in each bucket of all hashes", "for", "lshash", "in", "self", ".", "lshashes", ":", "bucket_keys", "=", "[", "lshash", ".", "hash_vector", "(", "i", ")", "[", "0", "]", "for", "i", "in", "vs", "]", "self", ".", "storage", ".", "store_many_vectors", "(", "lshash", ".", "hash_name", ",", "bucket_keys", ",", "nvs", ",", "data", ")" ]
Logic for controlling if a function gets called based on command line
def __argv_flag_dec ( func , default = False , quiet = QUIET , indent = False ) : from utool import util_decor flagname = meta_util_six . get_funcname ( func ) if flagname . find ( 'no' ) == 0 : flagname = flagname [ 2 : ] flags = ( '--' + flagname . replace ( '_' , '-' ) , '--' + flagname , ) @ util_decor . ignores_exc_tb ( outer_wrapper = False ) def GaurdWrapper ( * args , * * kwargs ) : from utool import util_print # FIXME: the --print-all is a hack default_ = kwargs . pop ( 'default' , default ) alias_flags = kwargs . pop ( 'alias_flags' , [ ] ) is_flagged = ( get_argflag ( flags , default_ ) or get_argflag ( '--print-all' ) or any ( [ get_argflag ( _ ) for _ in alias_flags ] ) ) if flagname in kwargs : is_flagged = kwargs . pop ( flagname ) if is_flagged : func_label = flags [ 0 ] . replace ( '--' , '' ) . replace ( 'print-' , '' ) # print('') print ( '\n+ --- ' + func_label + ' ___' ) use_indent = indent is not False if indent is True : indent_ = '[%s]' % func_label else : indent_ = indent with util_print . Indenter ( indent_ , enabled = use_indent ) : ret = func ( * args , * * kwargs ) print ( 'L ___ ' + func_label + '___\n' ) return ret else : PRINT_DISABLED_FLAGDEC = not get_argflag ( '--noinform' , help_ = 'does not print disabled flag decorators' ) if not quiet and PRINT_DISABLED_FLAGDEC : #print('\n~~~ %s ~~~' % flag) print ( '~~~ %s ~~~' % flags [ 0 ] ) meta_util_six . set_funcname ( GaurdWrapper , meta_util_six . get_funcname ( func ) ) return GaurdWrapper
9,118
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L885-L930
[ "def", "upload_list", "(", "book_id_list", ",", "rdf_library", "=", "None", ")", ":", "with", "open", "(", "book_id_list", ",", "'r'", ")", "as", "f", ":", "cache", "=", "{", "}", "for", "book_id", "in", "f", ":", "book_id", "=", "book_id", ".", "strip", "(", ")", "try", ":", "if", "int", "(", "book_id", ")", "in", "missing_pgid", ":", "print", "(", "u'missing\\t{}'", ".", "format", "(", "book_id", ")", ")", "continue", "upload_book", "(", "book_id", ",", "rdf_library", "=", "rdf_library", ",", "cache", "=", "cache", ")", "except", "Exception", "as", "e", ":", "print", "(", "u'error\\t{}'", ".", "format", "(", "book_id", ")", ")", "logger", ".", "error", "(", "u\"Error processing: {}\\r{}\"", ".", "format", "(", "book_id", ",", "e", ")", ")" ]
r gets the rest of the arguments after a script has been invoked hack . accounts for python - m scripts .
def get_argv_tail ( scriptname , prefer_main = None , argv = None ) : if argv is None : argv = sys . argv import utool as ut modname = ut . get_argval ( '-m' , help_ = 'specify module name to profile' , argv = argv ) if modname is not None : # hack to account for -m scripts modpath = ut . get_modpath ( modname , prefer_main = prefer_main ) argvx = argv . index ( modname ) + 1 argv_tail = [ modpath ] + argv [ argvx : ] else : try : argvx = argv . index ( scriptname ) except ValueError : for argvx , arg in enumerate ( argv ) : # HACK if scriptname in arg : break argv_tail = argv [ ( argvx + 1 ) : ] return argv_tail
9,119
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L1068-L1127
[ "def", "update_qos_aggregated_configuration", "(", "self", ",", "qos_configuration", ",", "timeout", "=", "-", "1", ")", ":", "uri", "=", "\"{}{}\"", ".", "format", "(", "self", ".", "data", "[", "\"uri\"", "]", ",", "self", ".", "QOS_AGGREGATED_CONFIGURATION", ")", "return", "self", ".", "_helper", ".", "update", "(", "qos_configuration", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ")" ]
Returns positional args specified directly after the scriptname and before any args starting with - on the commandline .
def get_cmdline_varargs ( argv = None ) : if argv is None : argv = sys . argv scriptname = argv [ 0 ] if scriptname == '' : # python invoked by iteself pos_start = 0 pos_end = 0 else : pos_start = pos_end = 1 for idx in range ( pos_start , len ( argv ) ) : if argv [ idx ] . startswith ( '-' ) : pos_end = idx break else : pos_end = len ( argv ) cmdline_varargs = argv [ pos_start : pos_end ] return cmdline_varargs
9,120
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L1130-L1151
[ "def", "get_stats", "(", "self", ")", ":", "canRequestBusStatistics", "(", "self", ".", "_write_handle", ")", "stats", "=", "structures", ".", "BusStatistics", "(", ")", "canGetBusStatistics", "(", "self", ".", "_write_handle", ",", "ctypes", ".", "pointer", "(", "stats", ")", ",", "ctypes", ".", "sizeof", "(", "stats", ")", ")", "return", "stats" ]
alias for get_argval
def argval ( key , default = None , type = None , smartcast = True , return_exists = False , argv = None ) : defaultable_types = ( tuple , list , int , float ) if type is None and isinstance ( default , defaultable_types ) : type = builtins . type ( default ) return get_argval ( key , type_ = type , default = default , return_was_specified = return_exists , smartcast = smartcast , argv = argv )
9,121
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_arg.py#L1167-L1189
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Plot the distribution of a real - valued feature conditioned by the target .
def plot_real_feature ( df , feature_name , bins = 50 , figsize = ( 15 , 15 ) ) : ix_negative_target = df [ df . target == 0 ] . index ix_positive_target = df [ df . target == 1 ] . index plt . figure ( figsize = figsize ) ax_overall_dist = plt . subplot2grid ( ( 3 , 2 ) , ( 0 , 0 ) , colspan = 2 ) ax_target_conditional_dist = plt . subplot2grid ( ( 3 , 2 ) , ( 1 , 0 ) , colspan = 2 ) ax_botplot = plt . subplot2grid ( ( 3 , 2 ) , ( 2 , 0 ) ) ax_violin_plot = plt . subplot2grid ( ( 3 , 2 ) , ( 2 , 1 ) ) ax_overall_dist . set_title ( 'Distribution of {}' . format ( feature_name ) , fontsize = 16 ) sns . distplot ( df [ feature_name ] , bins = 50 , ax = ax_overall_dist ) sns . distplot ( df . loc [ ix_positive_target ] [ feature_name ] , bins = bins , ax = ax_target_conditional_dist , label = 'Positive Target' ) sns . distplot ( df . loc [ ix_negative_target ] [ feature_name ] , bins = bins , ax = ax_target_conditional_dist , label = 'Negative Target' ) ax_target_conditional_dist . legend ( loc = 'upper right' , prop = { 'size' : 14 } ) sns . boxplot ( y = feature_name , x = 'target' , data = df , ax = ax_botplot ) sns . violinplot ( y = feature_name , x = 'target' , data = df , ax = ax_violin_plot ) plt . show ( )
9,122
https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L6-L65
[ "def", "pem", "(", "self", ")", ":", "bio", "=", "Membio", "(", ")", "if", "not", "libcrypto", ".", "PEM_write_bio_CMS", "(", "bio", ".", "bio", ",", "self", ".", "ptr", ")", ":", "raise", "CMSError", "(", "\"writing CMS to PEM\"", ")", "return", "str", "(", "bio", ")" ]
Plot a scatterplot of two features against one another and calculate Pearson correlation coefficient .
def plot_pair ( df , feature_name_1 , feature_name_2 , kind = 'scatter' , alpha = 0.01 , * * kwargs ) : plt . figure ( ) sns . jointplot ( feature_name_1 , feature_name_2 , df , alpha = alpha , kind = kind , * * kwargs ) plt . show ( )
9,123
https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L68-L94
[ "def", "flush_devices", "(", "self", ")", ":", "self", ".", "rom", ".", "program", "(", "[", "0", "for", "i", "in", "range", "(", "self", ".", "rom", ".", "size", ")", "]", ")", "self", ".", "flash", ".", "program", "(", "[", "0", "for", "i", "in", "range", "(", "self", ".", "flash", ".", "size", ")", "]", ")", "for", "i", "in", "range", "(", "self", ".", "ram", ".", "size", ")", ":", "self", ".", "ram", ".", "write", "(", "i", ",", "0", ")" ]
Plot a correlation heatmap between every feature pair .
def plot_feature_correlation_heatmap ( df , features , font_size = 9 , figsize = ( 15 , 15 ) , save_filename = None ) : features = features [ : ] features += [ 'target' ] mcorr = df [ features ] . corr ( ) mask = np . zeros_like ( mcorr , dtype = np . bool ) mask [ np . triu_indices_from ( mask ) ] = True cmap = sns . diverging_palette ( 220 , 10 , as_cmap = True ) fig = plt . figure ( figsize = figsize ) heatmap = sns . heatmap ( mcorr , mask = mask , cmap = cmap , square = True , annot = True , fmt = '0.2f' , annot_kws = { 'size' : font_size } , ) heatmap . tick_params ( axis = 'both' , which = 'major' , labelsize = font_size ) heatmap . tick_params ( axis = 'both' , which = 'minor' , labelsize = font_size ) heatmap . set_xticklabels ( features , rotation = 90 ) heatmap . set_yticklabels ( reversed ( features ) ) plt . show ( ) if save_filename is not None : fig . savefig ( save_filename , dpi = 300 )
9,124
https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L97-L138
[ "def", "walk", "(", "self", ",", "oid", ",", "host", ",", "port", ",", "community", ")", ":", "# Initialize return value", "ret", "=", "{", "}", "# Convert OID to tuple if necessary", "if", "not", "isinstance", "(", "oid", ",", "tuple", ")", ":", "oid", "=", "self", ".", "_convert_to_oid", "(", "oid", ")", "# Convert Host to IP if necessary", "host", "=", "socket", ".", "gethostbyname", "(", "host", ")", "# Assemble SNMP Auth Data", "snmpAuthData", "=", "cmdgen", ".", "CommunityData", "(", "'agent-{}'", ".", "format", "(", "community", ")", ",", "community", ")", "# Assemble SNMP Transport Data", "snmpTransportData", "=", "cmdgen", ".", "UdpTransportTarget", "(", "(", "host", ",", "port", ")", ",", "int", "(", "self", ".", "config", "[", "'timeout'", "]", ")", ",", "int", "(", "self", ".", "config", "[", "'retries'", "]", ")", ")", "# Assemble SNMP Next Command", "resultTable", "=", "self", ".", "snmpCmdGen", ".", "nextCmd", "(", "snmpAuthData", ",", "snmpTransportData", ",", "oid", ")", "varBindTable", "=", "resultTable", "[", "3", "]", "# TODO: Error Check", "for", "varBindTableRow", "in", "varBindTable", ":", "for", "o", ",", "v", "in", "varBindTableRow", ":", "ret", "[", "str", "(", "o", ")", "]", "=", "v", ".", "prettyPrint", "(", ")", "return", "ret" ]
Plot a scatterplot matrix for a list of features colored by target value .
def scatterplot_matrix ( df , features , downsample_frac = None , figsize = ( 15 , 15 ) ) : if downsample_frac : df = df . sample ( frac = downsample_frac ) plt . figure ( figsize = figsize ) sns . pairplot ( df [ features ] , hue = 'target' ) plt . show ( )
9,125
https://github.com/YuriyGuts/pygoose/blob/4d9b8827c6d6c4b79949d1cd653393498c0bb3c2/pygoose/kg/eda.py#L141-L159
[ "def", "kernelDriverActive", "(", "self", ",", "interface", ")", ":", "result", "=", "libusb1", ".", "libusb_kernel_driver_active", "(", "self", ".", "__handle", ",", "interface", ")", "if", "result", "==", "0", ":", "return", "False", "elif", "result", "==", "1", ":", "return", "True", "raiseUSBError", "(", "result", ")" ]
Process child tags .
def process_nested_tags ( self , node , tag = '' ) : ##print("---------Processing: %s, %s"%(node.tag,tag)) if tag == '' : t = node . ltag else : t = tag . lower ( ) for child in node . children : self . xml_node_stack = [ child ] + self . xml_node_stack ctagl = child . ltag if ctagl in self . tag_parse_table and ctagl in self . valid_children [ t ] : #print("Processing known type: %s"%ctagl) self . tag_parse_table [ ctagl ] ( child ) else : #print("Processing unknown type: %s"%ctagl) self . parse_component_by_typename ( child , child . tag ) self . xml_node_stack = self . xml_node_stack [ 1 : ]
9,126
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/parser/LEMS.py#L232-L260
[ "def", "set_dwelling_current", "(", "self", ",", "settings", ")", ":", "self", ".", "_dwelling_current_settings", "[", "'now'", "]", ".", "update", "(", "settings", ")", "# if an axis specified in the `settings` is currently dwelling,", "# reset it's current to the new dwelling-current value", "dwelling_axes_to_update", "=", "{", "axis", ":", "amps", "for", "axis", ",", "amps", "in", "self", ".", "_dwelling_current_settings", "[", "'now'", "]", ".", "items", "(", ")", "if", "self", ".", "_active_axes", ".", "get", "(", "axis", ")", "is", "False", "if", "self", ".", "current", "[", "axis", "]", "!=", "amps", "}", "if", "dwelling_axes_to_update", ":", "self", ".", "_save_current", "(", "dwelling_axes_to_update", ",", "axes_active", "=", "False", ")" ]
Parse a string containing LEMS XML text .
def parse ( self , xmltext ) : xml = LEMSXMLNode ( xe . XML ( xmltext ) ) if xml . ltag != 'lems' and xml . ltag != 'neuroml' : raise ParseError ( '<Lems> expected as root element (or even <neuroml>), found: {0}' . format ( xml . ltag ) ) ''' if xml.ltag == 'lems': if 'description' in xml.lattrib: self.description = xml.lattrib['description'] ''' self . process_nested_tags ( xml )
9,127
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/parser/LEMS.py#L262-L280
[ "def", "find_best_frametype", "(", "channel", ",", "start", ",", "end", ",", "frametype_match", "=", "None", ",", "allow_tape", "=", "True", ",", "connection", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "try", ":", "return", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'error'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "except", "RuntimeError", ":", "# gaps (or something else went wrong)", "ftout", "=", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "return_all", "=", "True", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'ignore'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "try", ":", "if", "isinstance", "(", "ftout", ",", "dict", ")", ":", "return", "{", "key", ":", "ftout", "[", "key", "]", "[", "0", "]", "for", "key", "in", "ftout", "}", "return", "ftout", "[", "0", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Cannot find any valid frametypes for channel(s)\"", ")" ]
Raise a parse error .
def raise_error ( self , message , * params , * * key_params ) : s = 'Parser error in ' self . xml_node_stack . reverse ( ) if len ( self . xml_node_stack ) > 1 : node = self . xml_node_stack [ 0 ] s += '<{0}' . format ( node . tag ) if 'name' in node . lattrib : s += ' name=\"{0}\"' . format ( node . lattrib [ 'name' ] ) if 'id' in node . lattrib : s += ' id=\"{0}\"' . format ( node . lattrib [ 'id' ] ) s += '>' for node in self . xml_node_stack [ 1 : ] : s += '.<{0}' . format ( node . tag ) if 'name' in node . lattrib : s += ' name=\"{0}\"' . format ( node . lattrib [ 'name' ] ) if 'id' in node . lattrib : s += ' id=\"{0}\"' . format ( node . lattrib [ 'id' ] ) s += '>' s += ':\n ' + message raise ParseError ( s , * params , * * key_params ) self . xml_node_stack . reverse ( )
9,128
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/parser/LEMS.py#L283-L312
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Parses components defined directly by component name .
def parse_component_by_typename ( self , node , type_ ) : #print('Parsing component {0} by typename {1}'.format(node, type_)) if 'id' in node . lattrib : id_ = node . lattrib [ 'id' ] else : #self.raise_error('Component must have an id') id_ = node . tag #make_id() if 'type' in node . lattrib : type_ = node . lattrib [ 'type' ] else : type_ = node . tag component = Component ( id_ , type_ ) if self . current_component : component . set_parent_id ( self . current_component . id ) self . current_component . add_child ( component ) else : self . model . add_component ( component ) for key in node . attrib : if key . lower ( ) not in [ 'id' , 'type' ] : component . set_parameter ( key , node . attrib [ key ] ) old_component = self . current_component self . current_component = component self . process_nested_tags ( node , 'component' ) self . current_component = old_component
9,129
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/parser/LEMS.py#L446-L486
[ "def", "genRandResources", "(", "args", ",", "resources", ")", ":", "randResources", "=", "[", "]", "nEach", "=", "int", "(", "args", ".", "nPatches", "//", "len", "(", "resources", ")", ")", "extras", "=", "int", "(", "args", ".", "nPatches", "%", "len", "(", "resources", ")", ")", "for", "i", "in", "range", "(", "nEach", ")", ":", "for", "res", "in", "resources", ":", "randResources", ".", "append", "(", "res", "+", "str", "(", "i", ")", ")", "additional", "=", "random", ".", "sample", "(", "resources", ",", "extras", ")", "for", "res", "in", "additional", ":", "randResources", ".", "append", "(", "res", "+", "str", "(", "nEach", ")", ")", "random", ".", "shuffle", "(", "randResources", ")", "return", "randResources" ]
Calls xmltag generator for multiple files .
def generate_tags_multiple_files ( input_files , tag , ignore_tags , ns = None ) : return itertools . chain . from_iterable ( [ generate_xmltags ( fn , tag , ignore_tags , ns ) for fn in input_files ] )
9,130
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/xml.py#L36-L41
[ "def", "enter_alternate_screen", "(", "self", ")", ":", "if", "not", "self", ".", "_in_alternate_screen", ":", "GENERIC_READ", "=", "0x80000000", "GENERIC_WRITE", "=", "0x40000000", "# Create a new console buffer and activate that one.", "handle", "=", "self", ".", "_winapi", "(", "windll", ".", "kernel32", ".", "CreateConsoleScreenBuffer", ",", "GENERIC_READ", "|", "GENERIC_WRITE", ",", "DWORD", "(", "0", ")", ",", "None", ",", "DWORD", "(", "1", ")", ",", "None", ")", "self", ".", "_winapi", "(", "windll", ".", "kernel32", ".", "SetConsoleActiveScreenBuffer", ",", "handle", ")", "self", ".", "hconsole", "=", "handle", "self", ".", "_in_alternate_screen", "=", "True" ]
Creates stringified xml output of elements with certain tag .
def generate_tags_multiple_files_strings ( input_files , ns , tag , ignore_tags ) : for el in generate_tags_multiple_files ( input_files , tag , ignore_tags , ns ) : yield formatting . string_and_clear ( el , ns )
9,131
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/xml.py#L44-L49
[ "def", "add_login_attempt_to_db", "(", "request", ",", "login_valid", ",", "get_username", "=", "get_username_from_request", ",", "username", "=", "None", ")", ":", "if", "not", "config", ".", "STORE_ACCESS_ATTEMPTS", ":", "# If we don't want to store in the database, then don't proceed.", "return", "username", "=", "username", "or", "get_username", "(", "request", ")", "user_agent", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "'<unknown>'", ")", "[", ":", "255", "]", "ip_address", "=", "get_ip", "(", "request", ")", "http_accept", "=", "request", ".", "META", ".", "get", "(", "'HTTP_ACCEPT'", ",", "'<unknown>'", ")", "path_info", "=", "request", ".", "META", ".", "get", "(", "'PATH_INFO'", ",", "'<unknown>'", ")", "if", "config", ".", "USE_CELERY", ":", "from", ".", "tasks", "import", "add_login_attempt_task", "add_login_attempt_task", ".", "delay", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")", "else", ":", "store_login_attempt", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")" ]
Base generator for percolator xml psm peptide protein output as well as for mzML mzIdentML . ignore_tags are the ones that are cleared when met by parser .
def generate_xmltags ( fn , returntag , ignore_tags , ns = None ) : xmlns = create_namespace ( ns ) ns_ignore = [ '{0}{1}' . format ( xmlns , x ) for x in ignore_tags ] for ac , el in etree . iterparse ( fn ) : if el . tag == '{0}{1}' . format ( xmlns , returntag ) : yield el elif el . tag in ns_ignore : formatting . clear_el ( el )
9,132
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/xml.py#L52-L64
[ "def", "create_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", "=", "None", ",", "args", "=", "None", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "json", ".", "dumps", "(", "{", "'routing_key'", ":", "rt_key", ",", "'arguments'", ":", "args", "or", "[", "]", "}", ")", "path", "=", "Client", ".", "urls", "[", "'bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ")", "binding", "=", "self", ".", "_call", "(", "path", ",", "'POST'", ",", "body", "=", "body", ",", "headers", "=", "Client", ".", "json_headers", ")", "return", "binding" ]
Adds a component type to the model .
def add_component_type ( self , component_type ) : name = component_type . name # To handle colons in names in LEMS if ':' in name : name = name . replace ( ':' , '_' ) component_type . name = name self . component_types [ name ] = component_type
9,133
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L140-L154
[ "def", "start", "(", "self", ")", ":", "if", "self", ".", "_http_last_send", "is", "not", "None", ":", "raise", "RuntimeError", "(", "'HttpMock has already been started'", ")", "# 1) save request.Session.send in self._last_send", "# 2) replace request.Session.send with MockerCore send function", "super", "(", "HttpMock", ",", "self", ")", ".", "start", "(", ")", "# 3) save MockerCore send function in self._http_last_send", "# 4) replace request.Session.send with HttpMock send function", "self", ".", "_patch_last_send", "(", ")" ]
Adds a typed child object to the model .
def add ( self , child ) : if isinstance ( child , Include ) : self . add_include ( child ) elif isinstance ( child , Dimension ) : self . add_dimension ( child ) elif isinstance ( child , Unit ) : self . add_unit ( child ) elif isinstance ( child , ComponentType ) : self . add_component_type ( child ) elif isinstance ( child , Component ) : self . add_component ( child ) elif isinstance ( child , FatComponent ) : self . add_fat_component ( child ) elif isinstance ( child , Constant ) : self . add_constant ( child ) else : raise ModelError ( 'Unsupported child element' )
9,134
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L186-L208
[ "def", "gc_velocity_update", "(", "particle", ",", "social", ",", "state", ")", ":", "gbest", "=", "state", ".", "swarm", "[", "gbest_idx", "(", "state", ".", "swarm", ")", "]", ".", "position", "if", "not", "np", ".", "array_equal", "(", "gbest", ",", "particle", ".", "position", ")", ":", "return", "std_velocity", "(", "particle", ",", "social", ",", "state", ")", "rho", "=", "state", ".", "params", "[", "'rho'", "]", "inertia", "=", "state", ".", "params", "[", "'inertia'", "]", "v_max", "=", "state", ".", "params", "[", "'v_max'", "]", "size", "=", "particle", ".", "position", ".", "size", "r2", "=", "state", ".", "rng", ".", "uniform", "(", "0.0", ",", "1.0", ",", "size", ")", "velocity", "=", "__gc_velocity_equation__", "(", "inertia", ",", "rho", ",", "r2", ",", "particle", ",", "gbest", ")", "return", "__clamp__", "(", "velocity", ",", "v_max", ")" ]
Includes a file into the current model .
def include_file ( self , path , include_dirs = [ ] ) : if self . include_includes : if self . debug : print ( "------------------ Including a file: %s" % path ) inc_dirs = include_dirs if include_dirs else self . include_dirs parser = LEMSFileParser ( self , inc_dirs , self . include_includes ) if os . access ( path , os . F_OK ) : if not path in self . included_files : parser . parse ( open ( path ) . read ( ) ) self . included_files . append ( path ) return else : if self . debug : print ( "Already included: %s" % path ) return else : for inc_dir in inc_dirs : new_path = ( inc_dir + '/' + path ) if os . access ( new_path , os . F_OK ) : if not new_path in self . included_files : parser . parse ( open ( new_path ) . read ( ) ) self . included_files . append ( new_path ) return else : if self . debug : print ( "Already included: %s" % path ) return msg = 'Unable to open ' + path if self . fail_on_missing_includes : raise Exception ( msg ) elif self . debug : print ( msg )
9,135
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L220-L258
[ "def", "main", "(", ")", ":", "try", ":", "# Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000.", "device", "=", "AlarmDecoder", "(", "SocketDevice", "(", "interface", "=", "(", "HOSTNAME", ",", "PORT", ")", ")", ")", "# Set up an event handler and open the device", "device", ".", "on_message", "+=", "handle_message", "with", "device", ".", "open", "(", ")", ":", "while", "True", ":", "time", ".", "sleep", "(", "1", ")", "except", "Exception", "as", "ex", ":", "print", "(", "'Exception:'", ",", "ex", ")" ]
Import a model from a file .
def import_from_file ( self , filepath ) : inc_dirs = self . include_directories [ : ] inc_dirs . append ( dirname ( filepath ) ) parser = LEMSFileParser ( self , inc_dirs , self . include_includes ) with open ( filepath ) as f : parser . parse ( f . read ( ) )
9,136
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L260-L273
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Exports this model to a DOM .
def export_to_dom ( self ) : namespaces = 'xmlns="http://www.neuroml.org/lems/%s" ' + 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:schemaLocation="http://www.neuroml.org/lems/%s %s"' namespaces = namespaces % ( self . target_lems_version , self . target_lems_version , self . schema_location ) xmlstr = '<Lems %s>' % namespaces for include in self . includes : xmlstr += include . toxml ( ) for target in self . targets : xmlstr += '<Target component="{0}"/>' . format ( target ) for dimension in self . dimensions : xmlstr += dimension . toxml ( ) for unit in self . units : xmlstr += unit . toxml ( ) for constant in self . constants : xmlstr += constant . toxml ( ) for component_type in self . component_types : xmlstr += component_type . toxml ( ) for component in self . components : xmlstr += component . toxml ( ) xmlstr += '</Lems>' xmldom = minidom . parseString ( xmlstr ) return xmldom
9,137
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L275-L311
[ "def", "blend", "(", "self", ",", "cycles", "=", "1", ")", ":", "for", "j", "in", "range", "(", "int", "(", "cycles", ")", ")", ":", "new_colours", "=", "[", "]", "for", "i", ",", "c", "in", "enumerate", "(", "self", ".", "_colours", ")", ":", "if", "i", "!=", "0", ":", "c2", "=", "blend", "(", "c", ",", "self", ".", "_colours", "[", "i", "-", "1", "]", ")", "new_colours", ".", "append", "(", "c2", ")", "new_colours", ".", "append", "(", "c", ")", "self", ".", "_colours", "=", "new_colours" ]
Exports this model to a file .
def export_to_file ( self , filepath , level_prefix = ' ' ) : xmldom = self . export_to_dom ( ) xmlstr = xmldom . toprettyxml ( level_prefix , '\n' , ) f = open ( filepath , 'w' ) f . write ( xmlstr ) f . close ( )
9,138
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L313-L326
[ "def", "consume", "(", "self", ",", "timeout", "=", "None", ",", "loop", "=", "None", ")", ":", "if", "self", ".", "_consumer_fn", "is", "None", ":", "raise", "ValueError", "(", "'Consumer function is not defined yet'", ")", "logger", ".", "info", "(", "'Start consuming the stream'", ")", "@", "asyncio", ".", "coroutine", "def", "worker", "(", "conn_url", ")", ":", "extra_headers", "=", "{", "'Connection'", ":", "'upgrade'", ",", "'Upgrade'", ":", "'websocket'", ",", "'Sec-Websocket-Version'", ":", "13", ",", "}", "ws", "=", "yield", "from", "websockets", ".", "connect", "(", "conn_url", ",", "extra_headers", "=", "extra_headers", ")", "if", "ws", "is", "None", ":", "raise", "RuntimeError", "(", "\"Couldn't connect to the '%s'\"", "%", "conn_url", ")", "try", ":", "while", "True", ":", "message", "=", "yield", "from", "ws", ".", "recv", "(", ")", "yield", "from", "self", ".", "_consumer_fn", "(", "message", ")", "finally", ":", "yield", "from", "ws", ".", "close", "(", ")", "if", "loop", "is", "None", ":", "loop", "=", "asyncio", ".", "new_event_loop", "(", ")", "asyncio", ".", "set_event_loop", "(", "loop", ")", "try", ":", "task", "=", "worker", "(", "conn_url", "=", "self", ".", "_conn_url", ")", "if", "timeout", ":", "logger", ".", "info", "(", "'Running task with timeout %s sec'", ",", "timeout", ")", "loop", ".", "run_until_complete", "(", "asyncio", ".", "wait_for", "(", "task", ",", "timeout", "=", "timeout", ")", ")", "else", ":", "loop", ".", "run_until_complete", "(", "task", ")", "except", "asyncio", ".", "TimeoutError", ":", "logger", ".", "info", "(", "'Timeout is reached. Closing the loop'", ")", "loop", ".", "close", "(", ")", "except", "KeyboardInterrupt", ":", "logger", ".", "info", "(", "'Closing the loop'", ")", "loop", ".", "close", "(", ")" ]
Resolves references in this model .
def resolve ( self ) : model = self . copy ( ) for ct in model . component_types : model . resolve_component_type ( ct ) for c in model . components : if c . id not in model . fat_components : model . add ( model . fatten_component ( c ) ) for c in ct . constants : c2 = c . copy ( ) c2 . numeric_value = model . get_numeric_value ( c2 . value , c2 . dimension ) model . add ( c2 ) return model
9,139
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L328-L347
[ "def", "exec_request", "(", "endpoint", ",", "func", ",", "raise_for_status", "=", "False", ",", "*", "*", "kwargs", ")", ":", "try", ":", "endpoint", "=", "'{0}/api/v1/{1}'", ".", "format", "(", "settings", ".", "SEAT_URL", ",", "endpoint", ")", "headers", "=", "{", "'X-Token'", ":", "settings", ".", "SEAT_XTOKEN", ",", "'Accept'", ":", "'application/json'", "}", "logger", ".", "debug", "(", "headers", ")", "logger", ".", "debug", "(", "endpoint", ")", "ret", "=", "getattr", "(", "requests", ",", "func", ")", "(", "endpoint", ",", "headers", "=", "headers", ",", "data", "=", "kwargs", ")", "ret", ".", "raise_for_status", "(", ")", "return", "ret", ".", "json", "(", ")", "except", "requests", ".", "HTTPError", "as", "e", ":", "if", "raise_for_status", ":", "raise", "e", "logger", ".", "exception", "(", "\"Error encountered while performing API request to SeAT with url {}\"", ".", "format", "(", "endpoint", ")", ")", "return", "{", "}" ]
Resolves references in the specified component type .
def resolve_component_type ( self , component_type ) : # Resolve component type from base types if present. if component_type . extends : try : base_ct = self . component_types [ component_type . extends ] except : raise ModelError ( "Component type '{0}' trying to extend unknown component type '{1}'" , component_type . name , component_type . extends ) self . resolve_component_type ( base_ct ) self . merge_component_types ( component_type , base_ct ) component_type . types = set . union ( component_type . types , base_ct . types ) component_type . extends = None
9,140
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L349-L368
[ "def", "split", "(", "self", ",", "k", ")", ":", "if", "not", "1", "<=", "k", "<=", "self", ".", "num_rows", "-", "1", ":", "raise", "ValueError", "(", "\"Invalid value of k. k must be between 1 and the\"", "\"number of rows - 1\"", ")", "rows", "=", "np", ".", "random", ".", "permutation", "(", "self", ".", "num_rows", ")", "first", "=", "self", ".", "take", "(", "rows", "[", ":", "k", "]", ")", "rest", "=", "self", ".", "take", "(", "rows", "[", "k", ":", "]", ")", "for", "column_label", "in", "self", ".", "_formats", ":", "first", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "rest", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "return", "first", ",", "rest" ]
Merge various maps in the given component type from a base component type .
def merge_component_types ( self , ct , base_ct ) : #merge_maps(ct.parameters, base_ct.parameters) for parameter in base_ct . parameters : if parameter . name in ct . parameters : p = ct . parameters [ parameter . name ] basep = base_ct . parameters [ parameter . name ] if p . fixed : p . value = p . fixed_value p . dimension = basep . dimension else : ct . parameters [ parameter . name ] = base_ct . parameters [ parameter . name ] merge_maps ( ct . properties , base_ct . properties ) merge_maps ( ct . derived_parameters , base_ct . derived_parameters ) merge_maps ( ct . index_parameters , base_ct . index_parameters ) merge_maps ( ct . constants , base_ct . constants ) merge_maps ( ct . exposures , base_ct . exposures ) merge_maps ( ct . requirements , base_ct . requirements ) merge_maps ( ct . component_requirements , base_ct . component_requirements ) merge_maps ( ct . instance_requirements , base_ct . instance_requirements ) merge_maps ( ct . children , base_ct . children ) merge_maps ( ct . texts , base_ct . texts ) merge_maps ( ct . links , base_ct . links ) merge_maps ( ct . paths , base_ct . paths ) merge_maps ( ct . event_ports , base_ct . event_ports ) merge_maps ( ct . component_references , base_ct . component_references ) merge_maps ( ct . attachments , base_ct . attachments ) merge_maps ( ct . dynamics . state_variables , base_ct . dynamics . state_variables ) merge_maps ( ct . dynamics . derived_variables , base_ct . dynamics . derived_variables ) merge_maps ( ct . dynamics . conditional_derived_variables , base_ct . dynamics . conditional_derived_variables ) merge_maps ( ct . dynamics . time_derivatives , base_ct . dynamics . time_derivatives ) #merge_lists(ct.dynamics.event_handlers, base_ct.dynamics.event_handlers) merge_maps ( ct . dynamics . kinetic_schemes , base_ct . dynamics . kinetic_schemes ) merge_lists ( ct . structure . event_connections , base_ct . structure . event_connections ) merge_lists ( ct . structure . child_instances , base_ct . structure . child_instances ) merge_lists ( ct . structure . multi_instantiates , base_ct . structure . multi_instantiates ) merge_maps ( ct . simulation . runs , base_ct . simulation . runs ) merge_maps ( ct . simulation . records , base_ct . simulation . records ) merge_maps ( ct . simulation . event_records , base_ct . simulation . event_records ) merge_maps ( ct . simulation . data_displays , base_ct . simulation . data_displays ) merge_maps ( ct . simulation . data_writers , base_ct . simulation . data_writers ) merge_maps ( ct . simulation . event_writers , base_ct . simulation . event_writers )
9,141
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L370-L428
[ "def", "session_ended", "(", "self", ",", "f", ")", ":", "self", ".", "_session_ended_view_func", "=", "f", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "_flask_view_func", "(", "*", "args", ",", "*", "*", "kw", ")", "return", "f" ]
Resolve simulation specifications .
def resolve_simulation ( self , fc , ct ) : for run in ct . simulation . runs : try : run2 = Run ( fc . component_references [ run . component ] . referenced_component , run . variable , fc . parameters [ run . increment ] . numeric_value , fc . parameters [ run . total ] . numeric_value ) except : raise ModelError ( "Unable to resolve simulation run parameters in component '{0}'" , fc . id ) fc . simulation . add ( run2 ) for record in ct . simulation . records : try : record2 = Record ( fc . paths [ record . quantity ] . value , fc . parameters [ record . scale ] . numeric_value if record . scale else 1 , fc . texts [ record . color ] . value if record . color else '#000000' ) except : raise ModelError ( "Unable to resolve simulation record parameters in component '{0}'" , fc . id ) fc . simulation . add ( record2 ) for event_record in ct . simulation . event_records : try : event_record2 = EventRecord ( fc . paths [ event_record . quantity ] . value , fc . texts [ event_record . eventPort ] . value ) except : raise ModelError ( "Unable to resolve simulation event_record parameters in component '{0}'" , fc . id ) fc . simulation . add ( event_record2 ) for dd in ct . simulation . data_displays : try : dd2 = DataDisplay ( fc . texts [ dd . title ] . value , '' ) if 'timeScale' in fc . parameters : dd2 . timeScale = fc . parameters [ 'timeScale' ] . numeric_value except : raise ModelError ( "Unable to resolve simulation display parameters in component '{0}'" , fc . id ) fc . simulation . add ( dd2 ) for dw in ct . simulation . data_writers : try : path = '.' if fc . texts [ dw . path ] and fc . texts [ dw . path ] . value : path = fc . texts [ dw . path ] . value dw2 = DataWriter ( path , fc . texts [ dw . file_name ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( dw2 ) for ew in ct . simulation . event_writers : try : path = '.' if fc . texts [ ew . path ] and fc . texts [ ew . path ] . value : path = fc . texts [ ew . path ] . value ew2 = EventWriter ( path , fc . texts [ ew . file_name ] . value , fc . texts [ ew . format ] . value ) except : raise ModelError ( "Unable to resolve simulation writer parameters in component '{0}'" , fc . id ) fc . simulation . add ( ew2 )
9,142
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L728-L799
[ "def", "download_next_song", "(", "self", ",", "song", ")", ":", "dl_ydl_opts", "=", "dict", "(", "ydl_opts", ")", "dl_ydl_opts", "[", "\"progress_hooks\"", "]", "=", "[", "self", ".", "ytdl_progress_hook", "]", "dl_ydl_opts", "[", "\"outtmpl\"", "]", "=", "self", ".", "output_format", "# Move the songs from the next cache to the current cache", "self", ".", "move_next_cache", "(", ")", "self", ".", "state", "=", "'ready'", "self", ".", "play_empty", "(", ")", "# Download the file and create the stream", "with", "youtube_dl", ".", "YoutubeDL", "(", "dl_ydl_opts", ")", "as", "ydl", ":", "try", ":", "ydl", ".", "download", "(", "[", "song", "]", ")", "except", "DownloadStreamException", ":", "# This is a livestream, use the appropriate player", "future", "=", "asyncio", ".", "run_coroutine_threadsafe", "(", "self", ".", "create_stream_player", "(", "song", ",", "dl_ydl_opts", ")", ",", "client", ".", "loop", ")", "try", ":", "future", ".", "result", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return", "except", "PermissionError", ":", "# File is still in use, it'll get cleared next time", "pass", "except", "youtube_dl", ".", "utils", ".", "DownloadError", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "self", ".", "statuslog", ".", "error", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return" ]
Get the numeric value for a parameter value specification .
def get_numeric_value ( self , value_str , dimension = None ) : n = None i = len ( value_str ) while n is None : try : part = value_str [ 0 : i ] nn = float ( part ) n = nn s = value_str [ i : ] except ValueError : i = i - 1 number = n sym = s numeric_value = None if sym == '' : numeric_value = number else : if sym in self . units : unit = self . units [ sym ] if dimension : if dimension != unit . dimension and dimension != '*' : raise SimBuildError ( "Unit symbol '{0}' cannot " "be used for dimension '{1}'" , sym , dimension ) else : dimension = unit . dimension numeric_value = ( number * ( 10 ** unit . power ) * unit . scale ) + unit . offset else : raise SimBuildError ( "Unknown unit symbol '{0}'. Known: {1}" , sym , self . units ) #print("Have converted %s to value: %s, dimension %s"%(value_str, numeric_value, dimension)) return numeric_value
9,143
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/model/model.py#L802-L849
[ "def", "removeAllEntitlements", "(", "self", ",", "appId", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"appId\"", ":", "appId", "}", "url", "=", "self", ".", "_url", "+", "\"/licenses/removeAllEntitlements\"", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
Passed all drivers of executable checks which command is passed to the executable and then gets the options for a driver parses them from command line and runs the driver
def start_msstitch ( exec_drivers , sysargs ) : parser = populate_parser ( exec_drivers ) args = parser . parse_args ( sysargs [ 1 : ] ) args . func ( * * vars ( args ) )
9,144
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/drivers/startup.py#L47-L53
[ "def", "new_keypair", "(", "key", ",", "value", ",", "ambig", ",", "unambig", ")", ":", "if", "key", "in", "ambig", ":", "return", "if", "key", "in", "unambig", "and", "value", "!=", "unambig", "[", "key", "]", ":", "ambig", ".", "add", "(", "key", ")", "del", "unambig", "[", "key", "]", "return", "unambig", "[", "key", "]", "=", "value", "return" ]
Merge dictionaries . Later keys overwrite .
def merged ( * dicts , * * kwargs ) : if not dicts : return Struct ( ) result = dict ( ) for d in dicts : result . update ( d ) result . update ( kwargs ) struct_type = type ( dicts [ 0 ] ) return struct_type ( * * result )
9,145
https://github.com/TriOptima/tri.struct/blob/8886392da9cd77ce662e0781b0ff0bf82b38b56b/lib/tri/struct/__init__.py#L60-L76
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Finds ordering of derived_parameters .
def order_derived_parameters ( component ) : if len ( component . derived_parameters ) == 0 : return [ ] ordering = [ ] dps = [ ] for dp in component . derived_parameters : dps . append ( dp . name ) maxcount = 5 count = maxcount while count > 0 and dps != [ ] : count = count - 1 for dp1 in dps : #exp_tree = regime.derived_variables[dv1].expression_tree value = component . derived_parameters [ dp1 ] . value found = False for dp2 in dps : if dp1 != dp2 and dp2 in value : found = True if not found : ordering . append ( dp1 ) del dps [ dps . index ( dp1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "parameter in component '{0}'" ) . format ( component ) ) #return ordering + dvsnoexp return ordering
9,146
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L1059-L1107
[ "def", "update_user_lock", "(", "repository_path", ",", "session_token", ")", ":", "# NOTE ALWAYS use within lock access callback", "# While the user lock file should ALWAYS be written only within a lock_access", "# callback, it is sometimes read asynchronously. Because of this updates to", "# the file must be atomic. Write plus move is used to achieve this.", "real_path", "=", "cpjoin", "(", "repository_path", ",", "'user_file'", ")", "tmp_path", "=", "cpjoin", "(", "repository_path", ",", "'new_user_file'", ")", "with", "open", "(", "tmp_path", ",", "'w'", ")", "as", "fd2", ":", "if", "session_token", "is", "None", ":", "fd2", ".", "write", "(", "''", ")", "else", ":", "fd2", ".", "write", "(", "json", ".", "dumps", "(", "{", "'session_token'", ":", "session_token", ",", "'expires'", ":", "int", "(", "time", ".", "time", "(", ")", ")", "+", "30", "}", ")", ")", "fd2", ".", "flush", "(", ")", "os", ".", "rename", "(", "tmp_path", ",", "real_path", ")" ]
Finds ordering of derived_variables .
def order_derived_variables ( regime ) : ordering = [ ] dvs = [ ] dvsnoexp = [ ] maxcount = 5 for dv in regime . derived_variables : if dv . expression_tree == None : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) for dv in regime . conditional_derived_variables : if len ( dv . cases ) == 0 : dvsnoexp . append ( dv . name ) else : dvs . append ( dv . name ) count = maxcount while count > 0 and dvs != [ ] : count = count - 1 for dv1 in dvs : if dv1 in regime . derived_variables : dv = regime . derived_variables [ dv1 ] else : dv = regime . conditional_derived_variables [ dv1 ] found = False if isinstance ( dv , DerivedVariable ) : exp_tree = dv . expression_tree for dv2 in dvs : if dv1 != dv2 and is_var_in_exp_tree ( dv2 , exp_tree ) : found = True else : for case in dv . cases : for dv2 in dvs : if dv1 != dv2 and ( is_var_in_exp_tree ( dv2 , case . condition_expression_tree ) or is_var_in_exp_tree ( dv2 , case . value_expression_tree ) ) : found = True if not found : ordering . append ( dv1 ) del dvs [ dvs . index ( dv1 ) ] count = maxcount break if count == 0 : raise SimBuildError ( ( "Unable to find ordering for derived " "variables in regime '{0}'" ) . format ( regime . name ) ) #return ordering + dvsnoexp return dvsnoexp + ordering
9,147
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L1110-L1177
[ "def", "insert", "(", "self", ",", "crc", ",", "toc", ")", ":", "if", "self", ".", "_rw_cache", ":", "try", ":", "filename", "=", "'%s/%08X.json'", "%", "(", "self", ".", "_rw_cache", ",", "crc", ")", "cache", "=", "open", "(", "filename", ",", "'w'", ")", "cache", ".", "write", "(", "json", ".", "dumps", "(", "toc", ",", "indent", "=", "2", ",", "default", "=", "self", ".", "_encoder", ")", ")", "cache", ".", "close", "(", ")", "logger", ".", "info", "(", "'Saved cache to [%s]'", ",", "filename", ")", "self", ".", "_cache_files", "+=", "[", "filename", "]", "except", "Exception", "as", "exp", ":", "logger", ".", "warning", "(", "'Could not save cache to file [%s]: %s'", ",", "filename", ",", "str", "(", "exp", ")", ")", "else", ":", "logger", ".", "warning", "(", "'Could not save cache, no writable directory'", ")" ]
Build the simulation components from the model .
def build ( self ) : self . sim = Simulation ( ) for component_id in self . model . targets : if component_id not in self . model . components : raise SimBuildError ( "Unable to find target component '{0}'" , component_id ) component = self . model . fat_components [ component_id ] runnable = self . build_runnable ( component ) self . sim . add_runnable ( runnable ) return self . sim
9,148
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L47-L66
[ "def", "_get_fld2col_widths", "(", "self", ",", "*", "*", "kws", ")", ":", "fld2col_widths", "=", "self", ".", "_init_fld2col_widths", "(", ")", "if", "'fld2col_widths'", "not", "in", "kws", ":", "return", "fld2col_widths", "for", "fld", ",", "val", "in", "kws", "[", "'fld2col_widths'", "]", ".", "items", "(", ")", ":", "fld2col_widths", "[", "fld", "]", "=", "val", "return", "fld2col_widths" ]
Adds event connections to a runnable component based on the structure specifications in the component model .
def build_event_connections ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_event_connections of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) # Process event connections for ec in structure . event_connections : if self . debug : print ( ec . toxml ( ) ) source = runnable . parent . resolve_path ( ec . from_ ) target = runnable . parent . resolve_path ( ec . to ) if ec . receiver : receiver_template = self . build_runnable ( ec . receiver , target ) #receiver = copy.deepcopy(receiver_template) receiver = receiver_template . copy ( ) receiver . id = "{0}__{1}__" . format ( component . id , receiver_template . id ) if ec . receiver_container : target . add_attachment ( receiver , ec . receiver_container ) target . add_child ( receiver_template . id , receiver ) target = receiver else : source = runnable . resolve_path ( ec . from_ ) target = runnable . resolve_path ( ec . to ) source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
9,149
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L231-L289
[ "def", "array", "(", "self", ")", ":", "if", "self", ".", "bands", "==", "1", ":", "return", "self", ".", "matrix", "(", ")", "else", ":", "arr", "=", "self", ".", "raster", ".", "ReadAsArray", "(", ")", ".", "transpose", "(", "1", ",", "2", ",", "0", ")", "if", "isinstance", "(", "self", ".", "nodata", ",", "list", ")", ":", "for", "i", "in", "range", "(", "0", ",", "self", ".", "bands", ")", ":", "arr", "[", ":", ",", ":", ",", "i", "]", "[", "arr", "[", ":", ",", ":", ",", "i", "]", "==", "self", ".", "nodata", "[", "i", "]", "]", "=", "np", ".", "nan", "else", ":", "arr", "[", "arr", "==", "self", ".", "nodata", "]", "=", "np", ".", "nan", "return", "arr" ]
Adds structure to a runnable component based on the structure specifications in the component model .
def build_structure ( self , component , runnable , structure ) : if self . debug : print ( "\n++++++++ Calling build_structure of %s with runnable %s, parent %s" % ( component . id , runnable . id , runnable . parent ) ) # Process single-child instantiations for ch in structure . child_instances : child_runnable = self . build_runnable ( ch . referenced_component , runnable ) runnable . add_child ( child_runnable . id , child_runnable ) runnable . add_child_typeref ( ch . component , child_runnable ) # Process multi-child instatiantions for mi in structure . multi_instantiates : template = self . build_runnable ( mi . component , runnable ) for i in range ( mi . number ) : #instance = copy.deepcopy(template) instance = template . copy ( ) instance . id = "{0}__{1}__{2}" . format ( component . id , template . id , i ) runnable . array . append ( instance ) # Process foreach statements for fe in structure . for_eachs : self . build_foreach ( component , runnable , fe ) self . build_event_connections ( component , runnable , structure )
9,150
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L293-L334
[ "def", "bundles", "(", ")", ":", "per_page", "=", "int", "(", "request", ".", "args", ".", "get", "(", "'per_page'", ",", "30", ")", ")", "page", "=", "int", "(", "request", ".", "args", ".", "get", "(", "'page'", ",", "1", ")", ")", "query", "=", "store", ".", "bundles", "(", ")", "query_page", "=", "query", ".", "paginate", "(", "page", ",", "per_page", "=", "per_page", ")", "data", "=", "[", "]", "for", "bundle_obj", "in", "query_page", ".", "items", ":", "bundle_data", "=", "bundle_obj", ".", "to_dict", "(", ")", "bundle_data", "[", "'versions'", "]", "=", "[", "version", ".", "to_dict", "(", ")", "for", "version", "in", "bundle_obj", ".", "versions", "]", "data", ".", "append", "(", "bundle_data", ")", "return", "jsonify", "(", "bundles", "=", "data", ")" ]
Iterate over ForEach constructs and process nested elements .
def build_foreach ( self , component , runnable , foreach , name_mappings = { } ) : if self . debug : print ( "\n++++++++ Calling build_foreach of %s with runnable %s, parent %s, name_mappings: %s" % ( component . id , runnable . id , runnable . parent , name_mappings ) ) target_array = runnable . resolve_path ( foreach . instances ) for target_runnable in target_array : if self . debug : print ( "Applying contents of for_each to %s, as %s" % ( target_runnable . id , foreach . as_ ) ) name_mappings [ foreach . as_ ] = target_runnable # Process foreach statements for fe2 in foreach . for_eachs : #print fe2.toxml() target_array2 = runnable . resolve_path ( fe2 . instances ) for target_runnable2 in target_array2 : name_mappings [ fe2 . as_ ] = target_runnable2 self . build_foreach ( component , runnable , fe2 , name_mappings ) # Process event connections for ec in foreach . event_connections : source = name_mappings [ ec . from_ ] target = name_mappings [ ec . to ] source_port = ec . source_port target_port = ec . target_port if not source_port : if len ( source . event_out_ports ) == 1 : source_port = source . event_out_ports [ 0 ] else : raise SimBuildError ( ( "No source event port " "uniquely identifiable" " in '{0}'" ) . format ( source . id ) ) if not target_port : if len ( target . event_in_ports ) == 1 : target_port = target . event_in_ports [ 0 ] else : raise SimBuildError ( ( "No destination event port " "uniquely identifiable " "in '{0}'" ) . format ( target ) ) if self . debug : print ( "register_event_out_callback\n Source: %s, %s (port: %s) \n -> %s, %s (port: %s)" % ( source , id ( source ) , source_port , target , id ( target ) , target_port ) ) source . register_event_out_callback ( source_port , lambda : target . inc_event_in ( target_port ) )
9,151
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L338-L394
[ "def", "get_sync_sql", "(", "self", ",", "field_name", ",", "missing_langs", ",", "model", ")", ":", "qn", "=", "connection", ".", "ops", ".", "quote_name", "style", "=", "no_style", "(", ")", "sql_output", "=", "[", "]", "db_table", "=", "model", ".", "_meta", ".", "db_table", "for", "lang", "in", "missing_langs", ":", "new_field", "=", "build_localized_fieldname", "(", "field_name", ",", "lang", ")", "f", "=", "model", ".", "_meta", ".", "get_field", "(", "new_field", ")", "col_type", "=", "f", ".", "db_type", "(", "connection", "=", "connection", ")", "field_sql", "=", "[", "style", ".", "SQL_FIELD", "(", "qn", "(", "f", ".", "column", ")", ")", ",", "style", ".", "SQL_COLTYPE", "(", "col_type", ")", "]", "# column creation", "stmt", "=", "\"ALTER TABLE %s ADD COLUMN %s\"", "%", "(", "qn", "(", "db_table", ")", ",", "' '", ".", "join", "(", "field_sql", ")", ")", "if", "not", "f", ".", "null", ":", "stmt", "+=", "\" \"", "+", "style", ".", "SQL_KEYWORD", "(", "'NOT NULL'", ")", "sql_output", ".", "append", "(", "stmt", "+", "\";\"", ")", "return", "sql_output" ]
Process simulation - related aspects to a runnable component based on the dynamics specifications in the component model .
def process_simulation_specs ( self , component , runnable , simulation ) : # Process runs for run in simulation . runs : cid = run . component . id + '_' + component . id target = self . build_runnable ( run . component , runnable , cid ) self . sim . add_runnable ( target ) self . current_record_target = target target . configure_time ( run . increment , run . total )
9,152
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L606-L640
[ "def", "_tofile", "(", "self", ",", "fh", ",", "pam", "=", "False", ")", ":", "fh", ".", "seek", "(", "0", ")", "fh", ".", "write", "(", "self", ".", "_header", "(", "pam", ")", ")", "data", "=", "self", ".", "asarray", "(", "copy", "=", "False", ")", "if", "self", ".", "maxval", "==", "1", ":", "data", "=", "numpy", ".", "packbits", "(", "data", ",", "axis", "=", "-", "1", ")", "data", ".", "tofile", "(", "fh", ")" ]
Recursively builds a Python expression from a parsed expression tree .
def build_expression_from_tree ( self , runnable , regime , tree_node ) : component_type = self . model . component_types [ runnable . component . type ] dynamics = component_type . dynamics if tree_node . type == ExprNode . VALUE : if tree_node . value [ 0 ] . isalpha ( ) : if tree_node . value == 't' : return 'self.time_completed' elif tree_node . value in component_type . requirements : var_prefix = 'self' v = tree_node . value r = runnable while ( v not in r . instance_variables and v not in r . derived_variables ) : var_prefix = '{0}.{1}' . format ( var_prefix , 'parent' ) r = r . parent if r == None : raise SimBuildError ( "Unable to resolve required " "variable '{0}'" . format ( v ) ) return '{0}.{1}' . format ( var_prefix , v ) elif ( tree_node . value in dynamics . derived_variables or ( regime is not None and tree_node . value in regime . derived_variables ) ) : return 'self.{0}' . format ( tree_node . value ) else : return 'self.{0}_shadow' . format ( tree_node . value ) else : return tree_node . value elif tree_node . type == ExprNode . FUNC1 : pattern = '({0}({1}))' func = self . convert_func ( tree_node . func ) if 'random.uniform' in func : pattern = '({0}(0,{1}))' return pattern . format ( func , self . build_expression_from_tree ( runnable , regime , tree_node . param ) ) else : return '({0}) {1} ({2})' . format ( self . build_expression_from_tree ( runnable , regime , tree_node . left ) , self . convert_op ( tree_node . op ) , self . build_expression_from_tree ( runnable , regime , tree_node . right ) )
9,153
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L703-L767
[ "def", "open", "(", "self", ",", "uri", ",", "*", "*", "kwargs", ")", ":", "handler", "=", "self", ".", "handler", "(", "uri", ".", "scheme", "(", ")", ")", "if", "handler", "is", "None", ":", "raise", "WSchemeCollection", ".", "NoHandlerFound", "(", "uri", ")", "if", "uri", ".", "scheme", "(", ")", "is", "None", ":", "uri", ".", "component", "(", "'scheme'", ",", "handler", ".", "scheme_specification", "(", ")", ".", "scheme_name", "(", ")", ")", "if", "handler", ".", "scheme_specification", "(", ")", ".", "is_compatible", "(", "uri", ")", "is", "False", ":", "raise", "WSchemeCollection", ".", "SchemeIncompatible", "(", "uri", ")", "return", "handler", ".", "create_handler", "(", "uri", ",", "*", "*", "kwargs", ")" ]
Build event handler code .
def build_event_handler ( self , runnable , regime , event_handler ) : if isinstance ( event_handler , OnCondition ) : return self . build_on_condition ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEvent ) : return self . build_on_event ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnStart ) : return self . build_on_start ( runnable , regime , event_handler ) elif isinstance ( event_handler , OnEntry ) : return self . build_on_entry ( runnable , regime , event_handler ) else : return [ ]
9,154
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L769-L789
[ "def", "getShocks", "(", "self", ")", ":", "PersistentShockConsumerType", ".", "getShocks", "(", "self", ")", "# Get permanent and transitory income shocks", "MedShkNow", "=", "np", ".", "zeros", "(", "self", ".", "AgentCount", ")", "# Initialize medical shock array", "MedPriceNow", "=", "np", ".", "zeros", "(", "self", ".", "AgentCount", ")", "# Initialize relative price array", "for", "t", "in", "range", "(", "self", ".", "T_cycle", ")", ":", "these", "=", "t", "==", "self", ".", "t_cycle", "N", "=", "np", ".", "sum", "(", "these", ")", "if", "N", ">", "0", ":", "MedShkAvg", "=", "self", ".", "MedShkAvg", "[", "t", "]", "MedShkStd", "=", "self", ".", "MedShkStd", "[", "t", "]", "MedPrice", "=", "self", ".", "MedPrice", "[", "t", "]", "MedShkNow", "[", "these", "]", "=", "self", ".", "RNG", ".", "permutation", "(", "approxLognormal", "(", "N", ",", "mu", "=", "np", ".", "log", "(", "MedShkAvg", ")", "-", "0.5", "*", "MedShkStd", "**", "2", ",", "sigma", "=", "MedShkStd", ")", "[", "1", "]", ")", "MedPriceNow", "[", "these", "]", "=", "MedPrice", "self", ".", "MedShkNow", "=", "MedShkNow", "self", ".", "MedPriceNow", "=", "MedPriceNow" ]
Build OnCondition event handler code .
def build_on_condition ( self , runnable , regime , on_condition ) : on_condition_code = [ ] on_condition_code += [ 'if {0}:' . format ( self . build_expression_from_tree ( runnable , regime , on_condition . expression_tree ) ) ] for action in on_condition . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_condition_code += [ ' ' + line ] return on_condition_code
9,155
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L791-L814
[ "def", "formatday", "(", "self", ",", "day", ",", "weekday", ")", ":", "if", "day", "and", "day", "in", "self", ".", "day_entries", ":", "day_date", "=", "date", "(", "self", ".", "current_year", ",", "self", ".", "current_month", ",", "day", ")", "archive_day_url", "=", "reverse", "(", "'zinnia:entry_archive_day'", ",", "args", "=", "[", "day_date", ".", "strftime", "(", "'%Y'", ")", ",", "day_date", ".", "strftime", "(", "'%m'", ")", ",", "day_date", ".", "strftime", "(", "'%d'", ")", "]", ")", "return", "'<td class=\"%s entry\"><a href=\"%s\" '", "'class=\"archives\">%d</a></td>'", "%", "(", "self", ".", "cssclasses", "[", "weekday", "]", ",", "archive_day_url", ",", "day", ")", "return", "super", "(", "Calendar", ",", "self", ")", ".", "formatday", "(", "day", ",", "weekday", ")" ]
Build OnEvent event handler code .
def build_on_event ( self , runnable , regime , on_event ) : on_event_code = [ ] if self . debug : on_event_code += [ 'print("Maybe handling something for %s ("+str(id(self))+")")' % ( runnable . id ) , 'print("EICs ("+str(id(self))+"): "+str(self.event_in_counters))' ] on_event_code += [ 'count = self.event_in_counters[\'{0}\']' . format ( on_event . port ) , 'while count > 0:' , ' print(" Handling event")' if self . debug else '' , ' count -= 1' ] for action in on_event . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_event_code += [ ' ' + line ] on_event_code += [ 'self.event_in_counters[\'{0}\'] = 0' . format ( on_event . port ) , ] return on_event_code
9,156
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L816-L844
[ "def", "update_or_create", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "keys", "=", "kwargs", ".", "pop", "(", "'keys'", ")", "if", "'keys'", "in", "kwargs", "else", "[", "]", "filter_kwargs", "=", "subdict", "(", "kwargs", ",", "keys", ")", "if", "filter_kwargs", "==", "{", "}", ":", "obj", "=", "None", "else", ":", "obj", "=", "cls", ".", "first", "(", "*", "*", "filter_kwargs", ")", "if", "obj", "is", "not", "None", ":", "for", "key", ",", "value", "in", "kwargs", ".", "iteritems", "(", ")", ":", "if", "(", "key", "not", "in", "keys", "and", "key", "not", "in", "cls", ".", "_no_overwrite_", ")", ":", "setattr", "(", "obj", ",", "key", ",", "value", ")", "try", ":", "cls", ".", "session", ".", "commit", "(", ")", "except", ":", "cls", ".", "session", ".", "rollback", "(", ")", "raise", "else", ":", "obj", "=", "cls", ".", "create", "(", "*", "*", "kwargs", ")", "return", "obj" ]
Build OnStart start handler code .
def build_on_start ( self , runnable , regime , on_start ) : on_start_code = [ ] for action in on_start . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_start_code += [ line ] return on_start_code
9,157
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L846-L864
[ "def", "open_pager", "(", "self", ")", ":", "n_rows", ",", "n_cols", "=", "self", ".", "term", ".", "stdscr", ".", "getmaxyx", "(", ")", "if", "self", ".", "config", "[", "'max_pager_cols'", "]", "is", "not", "None", ":", "n_cols", "=", "min", "(", "n_cols", ",", "self", ".", "config", "[", "'max_pager_cols'", "]", ")", "data", "=", "self", ".", "get_selected_item", "(", ")", "if", "data", "[", "'type'", "]", "==", "'Submission'", ":", "text", "=", "'\\n\\n'", ".", "join", "(", "(", "data", "[", "'permalink'", "]", ",", "data", "[", "'text'", "]", ")", ")", "self", ".", "term", ".", "open_pager", "(", "text", ",", "wrap", "=", "n_cols", ")", "elif", "data", "[", "'type'", "]", "==", "'Comment'", ":", "text", "=", "'\\n\\n'", ".", "join", "(", "(", "data", "[", "'permalink'", "]", ",", "data", "[", "'body'", "]", ")", ")", "self", ".", "term", ".", "open_pager", "(", "text", ",", "wrap", "=", "n_cols", ")", "else", ":", "self", ".", "term", ".", "flash", "(", ")" ]
Build OnEntry start handler code .
def build_on_entry ( self , runnable , regime , on_entry ) : on_entry_code = [ ] on_entry_code += [ 'if self.current_regime != self.last_regime:' ] on_entry_code += [ ' self.last_regime = self.current_regime' ] for action in on_entry . actions : code = self . build_action ( runnable , regime , action ) for line in code : on_entry_code += [ ' ' + line ] return on_entry_code
9,158
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L866-L887
[ "def", "handle_memory", "(", "self", ",", "obj", ")", ":", "if", "obj", ".", "subject", "is", "not", "None", ":", "with", "self", ".", "con", "as", "db", ":", "SchemaBase", ".", "note", "(", "db", ",", "obj", ".", "subject", ",", "obj", ".", "state", ",", "obj", ".", "object", ",", "text", "=", "obj", ".", "text", ",", "html", "=", "obj", ".", "html", ",", ")", "return", "obj" ]
Build event handler action code .
def build_action ( self , runnable , regime , action ) : if isinstance ( action , StateAssignment ) : return self . build_state_assignment ( runnable , regime , action ) if isinstance ( action , EventOut ) : return self . build_event_out ( action ) if isinstance ( action , Transition ) : return self . build_transition ( action ) else : return [ 'pass' ]
9,159
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L889-L907
[ "def", "_generate_overlays", "(", "self", ")", ":", "overlays", "=", "defaultdict", "(", "dict", ")", "for", "handle", "in", "self", ".", "_storage_broker", ".", "iter_item_handles", "(", ")", ":", "identifier", "=", "dtoolcore", ".", "utils", ".", "generate_identifier", "(", "handle", ")", "item_metadata", "=", "self", ".", "_storage_broker", ".", "get_item_metadata", "(", "handle", ")", "for", "k", ",", "v", "in", "item_metadata", ".", "items", "(", ")", ":", "overlays", "[", "k", "]", "[", "identifier", "]", "=", "v", "return", "overlays" ]
Build state assignment code .
def build_state_assignment ( self , runnable , regime , state_assignment ) : return [ 'self.{0} = {1}' . format ( state_assignment . variable , self . build_expression_from_tree ( runnable , regime , state_assignment . expression_tree ) ) ]
9,160
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L909-L924
[ "def", "get_directory_properties", "(", "self", ",", "share_name", ",", "directory_name", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_not_none", "(", "'directory_name'", ",", "directory_name", ")", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'GET'", "request", ".", "host_locations", "=", "self", ".", "_get_host_locations", "(", ")", "request", ".", "path", "=", "_get_path", "(", "share_name", ",", "directory_name", ")", "request", ".", "query", "=", "{", "'restype'", ":", "'directory'", ",", "'timeout'", ":", "_int_to_str", "(", "timeout", ")", ",", "'sharesnapshot'", ":", "_to_str", "(", "snapshot", ")", "}", "return", "self", ".", "_perform_request", "(", "request", ",", "_parse_directory", ",", "[", "directory_name", "]", ")" ]
Build event out code .
def build_event_out ( self , event_out ) : event_out_code = [ 'if "{0}" in self.event_out_callbacks:' . format ( event_out . port ) , ' for c in self.event_out_callbacks[\'{0}\']:' . format ( event_out . port ) , ' c()' ] return event_out_code
9,161
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L926-L941
[ "def", "normalize", "(", "template_dict", ")", ":", "resources", "=", "template_dict", ".", "get", "(", "RESOURCES_KEY", ",", "{", "}", ")", "for", "logical_id", ",", "resource", "in", "resources", ".", "items", "(", ")", ":", "resource_metadata", "=", "resource", ".", "get", "(", "METADATA_KEY", ",", "{", "}", ")", "asset_path", "=", "resource_metadata", ".", "get", "(", "ASSET_PATH_METADATA_KEY", ")", "asset_property", "=", "resource_metadata", ".", "get", "(", "ASSET_PROPERTY_METADATA_KEY", ")", "ResourceMetadataNormalizer", ".", "_replace_property", "(", "asset_property", ",", "asset_path", ",", "resource", ",", "logical_id", ")" ]
Builds a reduce operation on the selected target range .
def build_reduce_code ( self , result , select , reduce ) : select = select . replace ( '/' , '.' ) select = select . replace ( ' ' , '' ) if reduce == 'add' : reduce_op = '+' acc_start = 0 else : reduce_op = '*' acc_start = 1 #bits = select.split('[*]') bits = re . split ( '\[.*\]' , select ) seps = re . findall ( '\[.*\]' , select ) code = [ 'self.{0} = {1}' . format ( result , acc_start ) ] code += [ 'self.{0}_shadow = {1}' . format ( result , acc_start ) ] code += [ 'try:' ] if len ( bits ) == 1 : target = select code += [ ' self.{0} = self.{1}' . format ( result , target ) ] code += [ ' self.{0}_shadow = self.{1}' . format ( result , target ) ] elif len ( bits ) == 2 : sep = seps [ 0 ] [ 1 : - 1 ] if sep == '*' : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : bits2 = sep . split ( '=' ) if len ( bits2 ) > 1 : array = bits [ 0 ] ref = bits [ 1 ] code += [ ' acc = {0}' . format ( acc_start ) ] code += [ ' for o in self.{0}:' . format ( array ) ] code += [ ' if o.{0} == {1}:' . format ( bits2 [ 0 ] , bits2 [ 1 ] ) ] code += [ ' acc = acc {0} o{1}' . format ( reduce_op , ref ) ] code += [ ' self.{0} = acc' . format ( result ) ] code += [ ' self.{0}_shadow = acc' . format ( result ) ] else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) else : raise SimbuildError ( "Invalid reduce target - '{0}'" . format ( select ) ) code += [ 'except:' ] code += [ ' pass' ] return code
9,162
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L956-L1015
[ "def", "run_tensorboard", "(", "logdir", ",", "listen_on", "=", "\"0.0.0.0\"", ",", "port", "=", "0", ",", "tensorboard_args", "=", "None", ",", "timeout", "=", "10", ")", ":", "if", "tensorboard_args", "is", "None", ":", "tensorboard_args", "=", "[", "]", "tensorboard_instance", "=", "Process", ".", "create_process", "(", "TENSORBOARD_BINARY", ".", "split", "(", "\" \"", ")", "+", "[", "\"--logdir\"", ",", "logdir", ",", "\"--host\"", ",", "listen_on", ",", "\"--port\"", ",", "str", "(", "port", ")", "]", "+", "tensorboard_args", ")", "try", ":", "tensorboard_instance", ".", "run", "(", ")", "except", "FileNotFoundError", "as", "ex", ":", "raise", "TensorboardNotFoundError", "(", "ex", ")", "# Wait for a message that signaliezes start of Tensorboard", "start", "=", "time", ".", "time", "(", ")", "data", "=", "\"\"", "while", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "line", "=", "tensorboard_instance", ".", "read_line_stderr", "(", "time_limit", "=", "timeout", ")", "data", "+=", "line", "if", "\"at http://\"", "in", "line", ":", "port", "=", "parse_port_from_tensorboard_output", "(", "line", ")", "# Good case", "return", "port", "elif", "\"TensorBoard attempted to bind to port\"", "in", "line", ":", "break", "tensorboard_instance", ".", "terminate", "(", ")", "raise", "UnexpectedOutputError", "(", "data", ",", "expected", "=", "\"Confirmation that Tensorboard has started\"", ")" ]
Adds recording - related dynamics to a runnable component based on the dynamics specifications in the component model .
def add_recording_behavior ( self , component , runnable ) : simulation = component . simulation for rec in simulation . records : rec . id = runnable . id self . current_record_target . add_variable_recorder ( self . current_data_output , rec )
9,163
https://github.com/LEMS/pylems/blob/4eeb719d2f23650fe16c38626663b69b5c83818b/lems/sim/build.py#L1038-L1055
[ "def", "bucketCSVs", "(", "csvFile", ",", "bucketIdx", "=", "2", ")", ":", "try", ":", "with", "open", "(", "csvFile", ",", "\"rU\"", ")", "as", "f", ":", "reader", "=", "csv", ".", "reader", "(", "f", ")", "headers", "=", "next", "(", "reader", ",", "None", ")", "dataDict", "=", "OrderedDict", "(", ")", "for", "lineNumber", ",", "line", "in", "enumerate", "(", "reader", ")", ":", "if", "line", "[", "bucketIdx", "]", "in", "dataDict", ":", "dataDict", "[", "line", "[", "bucketIdx", "]", "]", ".", "append", "(", "line", ")", "else", ":", "# new bucket", "dataDict", "[", "line", "[", "bucketIdx", "]", "]", "=", "[", "line", "]", "except", "IOError", "as", "e", ":", "print", "e", "filePaths", "=", "[", "]", "for", "i", ",", "(", "_", ",", "lines", ")", "in", "enumerate", "(", "dataDict", ".", "iteritems", "(", ")", ")", ":", "bucketFile", "=", "csvFile", ".", "replace", "(", "\".\"", ",", "\"_\"", "+", "str", "(", "i", ")", "+", "\".\"", ")", "writeCSV", "(", "lines", ",", "headers", ",", "bucketFile", ")", "filePaths", ".", "append", "(", "bucketFile", ")", "return", "filePaths" ]
class_ can either be live object or a classname
def check_static_member_vars ( class_ , fpath = None , only_init = True ) : #import ast #import astor import utool as ut if isinstance ( class_ , six . string_types ) : classname = class_ if fpath is None : raise Exception ( 'must specify fpath' ) else : # We were given a live object if not isinstance ( class_ , type ) : # We were given the class instance not the class class_instance = class_ class_ = class_instance . __class__ classname = class_ . __name__ if fpath is None : module = ut . get_module_from_class ( class_ ) fpath = ut . get_modpath ( module ) sourcecode = ut . readfrom ( fpath ) import redbaron # Pares a FULL syntax tree that keeps blockcomments baron = redbaron . RedBaron ( sourcecode ) for node in baron : if node . type == 'class' and node . name == classname : classnode = node break def find_parent_method ( node ) : par = node . parent_find ( 'def' ) if par is not None and par . parent is not None : if par . parent . type == 'class' : return par else : return find_parent_method ( par ) # TODO: Find inherited attrs #classnode.inherit_from # inhertied_attrs = ['parent'] # inhertied_attrs = [] class_methods = [ ] for node in classnode : if node . type == 'def' : if only_init : if node . name == '__init__' : class_methods . append ( node ) else : class_methods . append ( node ) class_vars = [ ] self_vars = [ ] for method_node in class_methods : self_var = method_node . arguments [ 0 ] . dumps ( ) self_vars . append ( self_var ) for assign in method_node . find_all ( 'assignment' ) : # method_node = find_parent_method(assign) if assign . target . dumps ( ) . startswith ( self_var + '.' ) : class_vars . append ( assign . target . value [ 1 ] . dumps ( ) ) static_attrs = ut . unique ( class_vars ) return static_attrs # class_members = ut.unique(class_vars + class_methods + inhertied_attrs) if False : self_var = self_vars [ 0 ] # Find everything that is used complex_cases = [ ] simple_cases = [ ] all_self_ref = classnode . find_all ( 'name_' , value = re . compile ( '.*' + self_var + '\\.*' ) ) for x in all_self_ref : if x . parent . type == 'def_argument' : continue if x . parent . type == 'atomtrailers' : atom = x . parent if ut . depth ( atom . fst ( ) ) <= 3 : simple_cases . append ( atom ) else : complex_cases . append ( atom ) #print(ut.depth(atom.value.data)) #print(atom.value) #print(atom.dumps()) #if len(atom.dumps()) > 200: # break accessed_attrs = [ ] for x in simple_cases : if x . value [ 0 ] . dumps ( ) == self_var : attr = x . value [ 1 ] . dumps ( ) accessed_attrs . append ( attr ) accessed_attrs = ut . unique ( accessed_attrs ) ut . setdiff ( accessed_attrs , class_vars )
9,164
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L194-L296
[ "def", "read_stats", "(", "self", ",", "*", "stats", ")", ":", "self", ".", "statistics", "=", "OrderedDict", "(", ")", "for", "port", "in", "self", ".", "ports", ":", "port_stats", "=", "IxeStatTotal", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", "port_stats", ".", "update", "(", "{", "c", "+", "'_rate'", ":", "v", "for", "c", ",", "v", "in", "IxeStatRate", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", ".", "items", "(", ")", "}", ")", "self", ".", "statistics", "[", "str", "(", "port", ")", "]", "=", "port_stats", "return", "self", ".", "statistics" ]
Get all functions defined in module
def get_funcnames_from_modpath ( modpath , include_methods = True ) : import utool as ut if True : import jedi source = ut . read_from ( modpath ) #script = jedi.Script(source=source, source_path=modpath, line=source.count('\n') + 1) definition_list = jedi . names ( source ) funcname_list = [ definition . name for definition in definition_list if definition . type == 'function' ] if include_methods : classdef_list = [ definition for definition in definition_list if definition . type == 'class' ] defined_methods = ut . flatten ( [ definition . defined_names ( ) for definition in classdef_list ] ) funcname_list += [ method . name for method in defined_methods if method . type == 'function' and not method . name . startswith ( '_' ) ] else : import redbaron # Pares a FULL syntax tree that keeps blockcomments sourcecode = ut . read_from ( modpath ) baron = redbaron . RedBaron ( sourcecode ) funcname_list = [ node . name for node in baron . find_all ( 'def' , recursive = include_methods ) if not node . name . startswith ( '_' ) ] return funcname_list
9,165
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L311-L334
[ "def", "tag_pos_volume", "(", "line", ")", ":", "def", "tagger", "(", "match", ")", ":", "groups", "=", "match", ".", "groupdict", "(", ")", "try", ":", "year", "=", "match", ".", "group", "(", "'year'", ")", "except", "IndexError", ":", "# Extract year from volume name", "# which should always include the year", "g", "=", "re", ".", "search", "(", "re_pos_year_num", ",", "match", ".", "group", "(", "'volume_num'", ")", ",", "re", ".", "UNICODE", ")", "year", "=", "g", ".", "group", "(", "0", ")", "if", "year", ":", "groups", "[", "'year'", "]", "=", "' <cds.YR>(%s)</cds.YR>'", "%", "year", ".", "strip", "(", ")", ".", "strip", "(", "'()'", ")", "else", ":", "groups", "[", "'year'", "]", "=", "''", "return", "'<cds.JOURNAL>PoS</cds.JOURNAL>'", "' <cds.VOL>%(volume_name)s%(volume_num)s</cds.VOL>'", "'%(year)s'", "' <cds.PG>%(page)s</cds.PG>'", "%", "groups", "for", "p", "in", "re_pos", ":", "line", "=", "p", ".", "sub", "(", "tagger", ",", "line", ")", "return", "line" ]
r Inspects members of a class
def help_members ( obj , use_other = False ) : import utool as ut attrnames = dir ( obj ) attr_list = [ getattr ( obj , attrname ) for attrname in attrnames ] attr_types = ut . lmap ( ut . type_str , map ( type , attr_list ) ) unique_types , groupxs = ut . group_indices ( attr_types ) type_to_items = ut . dzip ( unique_types , ut . apply_grouping ( attr_list , groupxs ) ) type_to_itemname = ut . dzip ( unique_types , ut . apply_grouping ( attrnames , groupxs ) ) #if memtypes is None: # memtypes = list(type_to_items.keys()) memtypes = [ 'instancemethod' ] # , 'method-wrapper'] func_mems = ut . dict_subset ( type_to_items , memtypes , [ ] ) func_list = ut . flatten ( func_mems . values ( ) ) defsig_list = [ ] num_unbound_args_list = [ ] num_args_list = [ ] for func in func_list : #args = ut.get_func_argspec(func).args argspec = ut . get_func_argspec ( func ) args = argspec . args unbound_args = get_unbound_args ( argspec ) defsig = ut . func_defsig ( func ) defsig_list . append ( defsig ) num_unbound_args_list . append ( len ( unbound_args ) ) num_args_list . append ( len ( args ) ) group = ut . hierarchical_group_items ( defsig_list , [ num_unbound_args_list , num_args_list ] ) print ( repr ( obj ) ) print ( ut . repr3 ( group , strvals = True ) ) if use_other : other_mems = ut . delete_keys ( type_to_items . copy ( ) , memtypes ) other_mems_attrnames = ut . dict_subset ( type_to_itemname , other_mems . keys ( ) ) named_other_attrs = ut . dict_union_combine ( other_mems_attrnames , other_mems , lambda x , y : list ( zip ( x , y ) ) ) print ( ut . repr4 ( named_other_attrs , nl = 2 , strvals = True ) )
9,166
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L569-L621
[ "def", "save_scatter_table", "(", "self", ",", "fn", ",", "description", "=", "\"\"", ")", ":", "data", "=", "{", "\"description\"", ":", "description", ",", "\"time\"", ":", "datetime", ".", "now", "(", ")", ",", "\"psd_scatter\"", ":", "(", "self", ".", "num_points", ",", "self", ".", "D_max", ",", "self", ".", "_psd_D", ",", "self", ".", "_S_table", ",", "self", ".", "_Z_table", ",", "self", ".", "_angular_table", ",", "self", ".", "_m_table", ",", "self", ".", "geometries", ")", ",", "\"version\"", ":", "tmatrix_aux", ".", "VERSION", "}", "pickle", ".", "dump", "(", "data", ",", "file", "(", "fn", ",", "'w'", ")", ",", "pickle", ".", "HIGHEST_PROTOCOL", ")" ]
Check if item is directly defined by a module . This check may be prone to errors .
def is_defined_by_module ( item , module , parent = None ) : flag = False if isinstance ( item , types . ModuleType ) : if not hasattr ( item , '__file__' ) : try : # hack for cv2 and xfeatures2d import utool as ut name = ut . get_modname_from_modpath ( module . __file__ ) flag = name in str ( item ) except : flag = False else : item_modpath = os . path . realpath ( dirname ( item . __file__ ) ) mod_fpath = module . __file__ . replace ( '.pyc' , '.py' ) if not mod_fpath . endswith ( '__init__.py' ) : flag = False else : modpath = os . path . realpath ( dirname ( mod_fpath ) ) modpath = modpath . replace ( '.pyc' , '.py' ) flag = item_modpath . startswith ( modpath ) elif hasattr ( item , '_utinfo' ) : # Capture case where there is a utool wrapper orig_func = item . _utinfo [ 'orig_func' ] flag = is_defined_by_module ( orig_func , module , parent ) else : if isinstance ( item , staticmethod ) : # static methods are a wrapper around a function item = item . __func__ try : func_globals = meta_util_six . get_funcglobals ( item ) func_module_name = func_globals [ '__name__' ] if func_module_name == 'line_profiler' : valid_names = dir ( module ) if parent is not None : valid_names += dir ( parent ) if item . func_name in valid_names : # hack to prevent small names #if len(item.func_name) > 8: if len ( item . func_name ) > 6 : flag = True elif func_module_name == module . __name__ : flag = True except AttributeError : if hasattr ( item , '__module__' ) : flag = item . __module__ == module . __name__ return flag
9,167
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L1052-L1101
[ "def", "text2vocab", "(", "text", ",", "output_file", ",", "text2wfreq_kwargs", "=", "{", "}", ",", "wfreq2vocab_kwargs", "=", "{", "}", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.wfreq'", ",", "delete", "=", "False", ")", "as", "f", ":", "wfreq_file", "=", "f", ".", "name", "try", ":", "text2wfreq", "(", "text", ",", "wfreq_file", ",", "*", "*", "text2wfreq_kwargs", ")", "wfreq2vocab", "(", "wfreq_file", ",", "output_file", ",", "*", "*", "wfreq2vocab_kwargs", ")", "except", "ConversionError", ":", "raise", "finally", ":", "os", ".", "remove", "(", "wfreq_file", ")" ]
Returns if a value is a python builtin function
def is_bateries_included ( item ) : flag = False if hasattr ( item , '__call__' ) and hasattr ( item , '__module__' ) : if item . __module__ is not None : module = sys . modules [ item . __module__ ] if module == builtins : flag = True elif hasattr ( module , '__file__' ) : flag = LIB_PATH == dirname ( module . __file__ ) return flag
9,168
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L1118-L1150
[ "def", "_parse_parallel_sentences", "(", "f1", ",", "f2", ")", ":", "def", "_parse_text", "(", "path", ")", ":", "\"\"\"Returns the sentences from a single text file, which may be gzipped.\"\"\"", "split_path", "=", "path", ".", "split", "(", "\".\"", ")", "if", "split_path", "[", "-", "1", "]", "==", "\"gz\"", ":", "lang", "=", "split_path", "[", "-", "2", "]", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "path", ")", "as", "f", ",", "gzip", ".", "GzipFile", "(", "fileobj", "=", "f", ")", "as", "g", ":", "return", "g", ".", "read", "(", ")", ".", "split", "(", "\"\\n\"", ")", ",", "lang", "if", "split_path", "[", "-", "1", "]", "==", "\"txt\"", ":", "# CWMT", "lang", "=", "split_path", "[", "-", "2", "]", ".", "split", "(", "\"_\"", ")", "[", "-", "1", "]", "lang", "=", "\"zh\"", "if", "lang", "in", "(", "\"ch\"", ",", "\"cn\"", ")", "else", "lang", "else", ":", "lang", "=", "split_path", "[", "-", "1", "]", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "path", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")", ".", "split", "(", "\"\\n\"", ")", ",", "lang", "def", "_parse_sgm", "(", "path", ")", ":", "\"\"\"Returns sentences from a single SGML file.\"\"\"", "lang", "=", "path", ".", "split", "(", "\".\"", ")", "[", "-", "2", "]", "sentences", "=", "[", "]", "# Note: We can't use the XML parser since some of the files are badly", "# formatted.", "seg_re", "=", "re", ".", "compile", "(", "r\"<seg id=\\\"\\d+\\\">(.*)</seg>\"", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "path", ")", "as", "f", ":", "for", "line", "in", "f", ":", "seg_match", "=", "re", ".", "match", "(", "seg_re", ",", "line", ")", "if", "seg_match", ":", "assert", "len", "(", "seg_match", ".", "groups", "(", ")", ")", "==", "1", "sentences", ".", "append", "(", "seg_match", ".", "groups", "(", ")", "[", "0", "]", ")", "return", "sentences", ",", "lang", "parse_file", "=", "_parse_sgm", "if", "f1", ".", "endswith", "(", "\".sgm\"", ")", "else", "_parse_text", "# Some datasets (e.g., CWMT) contain multiple parallel files specified with", "# a wildcard. We sort both sets to align them and parse them one by one.", "f1_files", "=", "tf", ".", "io", ".", "gfile", ".", "glob", "(", "f1", ")", "f2_files", "=", "tf", ".", "io", ".", "gfile", ".", "glob", "(", "f2", ")", "assert", "f1_files", "and", "f2_files", ",", "\"No matching files found: %s, %s.\"", "%", "(", "f1", ",", "f2", ")", "assert", "len", "(", "f1_files", ")", "==", "len", "(", "f2_files", ")", ",", "(", "\"Number of files do not match: %d vs %d for %s vs %s.\"", "%", "(", "len", "(", "f1_files", ")", ",", "len", "(", "f2_files", ")", ",", "f1", ",", "f2", ")", ")", "for", "f1_i", ",", "f2_i", "in", "zip", "(", "sorted", "(", "f1_files", ")", ",", "sorted", "(", "f2_files", ")", ")", ":", "l1_sentences", ",", "l1", "=", "parse_file", "(", "f1_i", ")", "l2_sentences", ",", "l2", "=", "parse_file", "(", "f2_i", ")", "assert", "len", "(", "l1_sentences", ")", "==", "len", "(", "l2_sentences", ")", ",", "(", "\"Sizes do not match: %d vs %d for %s vs %s.\"", "%", "(", "len", "(", "l1_sentences", ")", ",", "len", "(", "l2_sentences", ")", ",", "f1_i", ",", "f2_i", ")", ")", "for", "s1", ",", "s2", "in", "zip", "(", "l1_sentences", ",", "l2_sentences", ")", ":", "yield", "{", "l1", ":", "s1", ",", "l2", ":", "s2", "}" ]
test func for kwargs parseing
def dummy_func ( arg1 , arg2 , arg3 = None , arg4 = [ 1 , 2 , 3 ] , arg5 = { } , * * kwargs ) : foo = kwargs . get ( 'foo' , None ) bar = kwargs . pop ( 'bar' , 4 ) foo2 = kwargs [ 'foo2' ] foobar = str ( foo ) + str ( bar ) + str ( foo2 ) return foobar
9,169
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L1261-L1269
[ "def", "console", "(", "self", ",", "console", ")", ":", "if", "console", "==", "self", ".", "_console", ":", "return", "if", "self", ".", "_console_type", "==", "\"vnc\"", "and", "console", "is", "not", "None", "and", "console", "<", "5900", ":", "raise", "NodeError", "(", "\"VNC console require a port superior or equal to 5900 currently it's {}\"", ".", "format", "(", "console", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "console", "is", "not", "None", ":", "if", "self", ".", "console_type", "==", "\"vnc\"", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ",", "port_range_start", "=", "5900", ",", "port_range_end", "=", "6000", ")", "else", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: console port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "console", ")", ")" ]
Get the docstring from a live object
def get_docstr ( func_or_class ) : import utool as ut try : docstr_ = func_or_class . func_doc except AttributeError : docstr_ = func_or_class . __doc__ if docstr_ is None : docstr_ = '' docstr = ut . unindent ( docstr_ ) return docstr
9,170
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L1354-L1364
[ "def", "_format_int", "(", "format", ",", "subtype", ",", "endian", ")", ":", "result", "=", "_check_format", "(", "format", ")", "if", "subtype", "is", "None", ":", "subtype", "=", "default_subtype", "(", "format", ")", "if", "subtype", "is", "None", ":", "raise", "TypeError", "(", "\"No default subtype for major format {0!r}\"", ".", "format", "(", "format", ")", ")", "elif", "not", "isinstance", "(", "subtype", ",", "(", "_unicode", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "\"Invalid subtype: {0!r}\"", ".", "format", "(", "subtype", ")", ")", "try", ":", "result", "|=", "_subtypes", "[", "subtype", ".", "upper", "(", ")", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Unknown subtype: {0!r}\"", ".", "format", "(", "subtype", ")", ")", "if", "endian", "is", "None", ":", "endian", "=", "'FILE'", "elif", "not", "isinstance", "(", "endian", ",", "(", "_unicode", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "\"Invalid endian-ness: {0!r}\"", ".", "format", "(", "endian", ")", ")", "try", ":", "result", "|=", "_endians", "[", "endian", ".", "upper", "(", ")", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"Unknown endian-ness: {0!r}\"", ".", "format", "(", "endian", ")", ")", "info", "=", "_ffi", ".", "new", "(", "\"SF_INFO*\"", ")", "info", ".", "format", "=", "result", "info", ".", "channels", "=", "1", "if", "_snd", ".", "sf_format_check", "(", "info", ")", "==", "_snd", ".", "SF_FALSE", ":", "raise", "ValueError", "(", "\"Invalid combination of format, subtype and endian\"", ")", "return", "result" ]
r Finds functions that are called with the keyword kwargs variable
def find_funcs_called_with_kwargs ( sourcecode , target_kwargs_name = 'kwargs' ) : import ast sourcecode = 'from __future__ import print_function\n' + sourcecode pt = ast . parse ( sourcecode ) child_funcnamess = [ ] debug = False or VERYVERB_INSPECT if debug : print ( '\nInput:' ) print ( 'target_kwargs_name = %r' % ( target_kwargs_name , ) ) print ( '\nSource:' ) print ( sourcecode ) import astor print ( '\nParse:' ) print ( astor . dump ( pt ) ) class KwargParseVisitor ( ast . NodeVisitor ) : """ TODO: understand ut.update_existing and dict update ie, know when kwargs is passed to these functions and then look assume the object that was updated is a dictionary and check wherever that is passed to kwargs as well. """ def visit_FunctionDef ( self , node ) : if debug : print ( '\nVISIT FunctionDef node = %r' % ( node , ) ) print ( 'node.args.kwarg = %r' % ( node . args . kwarg , ) ) if six . PY2 : kwarg_name = node . args . kwarg else : if node . args . kwarg is None : kwarg_name = None else : kwarg_name = node . args . kwarg . arg #import utool as ut #ut.embed() if kwarg_name != target_kwargs_name : # target kwargs is still in scope ast . NodeVisitor . generic_visit ( self , node ) def visit_Call ( self , node ) : if debug : print ( '\nVISIT Call node = %r' % ( node , ) ) #print(ut.repr4(node.__dict__,)) if isinstance ( node . func , ast . Attribute ) : try : funcname = node . func . value . id + '.' + node . func . attr except AttributeError : funcname = None elif isinstance ( node . func , ast . Name ) : funcname = node . func . id else : raise NotImplementedError ( 'do not know how to parse: node.func = %r' % ( node . func , ) ) if six . PY2 : kwargs = node . kwargs kwargs_name = None if kwargs is None else kwargs . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) else : if node . keywords : for kwargs in node . keywords : if kwargs . arg is None : if hasattr ( kwargs . value , 'id' ) : kwargs_name = kwargs . value . id if funcname is not None and kwargs_name == target_kwargs_name : child_funcnamess . append ( funcname ) if debug : print ( 'funcname = %r' % ( funcname , ) ) print ( 'kwargs_name = %r' % ( kwargs_name , ) ) ast . NodeVisitor . generic_visit ( self , node ) try : KwargParseVisitor ( ) . visit ( pt ) except Exception : raise pass #import utool as ut #if ut.SUPER_STRICT: # raise return child_funcnamess
9,171
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L1683-L1792
[ "def", "_index_audio_cmu", "(", "self", ",", "basename", "=", "None", ",", "replace_already_indexed", "=", "False", ")", ":", "self", ".", "_prepare_audio", "(", "basename", "=", "basename", ",", "replace_already_indexed", "=", "replace_already_indexed", ")", "for", "staging_audio_basename", "in", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", ":", "original_audio_name", "=", "''", ".", "join", "(", "staging_audio_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "[", ":", "-", "3", "]", "pocketsphinx_command", "=", "''", ".", "join", "(", "[", "\"pocketsphinx_continuous\"", ",", "\"-infile\"", ",", "str", "(", "\"{}/staging/{}\"", ".", "format", "(", "self", ".", "src_dir", ",", "staging_audio_basename", ")", ")", ",", "\"-time\"", ",", "\"yes\"", ",", "\"-logfn\"", ",", "\"/dev/null\"", "]", ")", "try", ":", "if", "self", ".", "get_verbosity", "(", ")", ":", "print", "(", "\"Now indexing {}\"", ".", "format", "(", "staging_audio_basename", ")", ")", "output", "=", "subprocess", ".", "check_output", "(", "[", "\"pocketsphinx_continuous\"", ",", "\"-infile\"", ",", "str", "(", "\"{}/staging/{}\"", ".", "format", "(", "self", ".", "src_dir", ",", "staging_audio_basename", ")", ")", ",", "\"-time\"", ",", "\"yes\"", ",", "\"-logfn\"", ",", "\"/dev/null\"", "]", ",", "universal_newlines", "=", "True", ")", ".", "split", "(", "'\\n'", ")", "str_timestamps_with_sil_conf", "=", "list", "(", "map", "(", "lambda", "x", ":", "x", ".", "split", "(", "\" \"", ")", ",", "filter", "(", "None", ",", "output", "[", "1", ":", "]", ")", ")", ")", "# Timestamps are putted in a list of a single element. To match", "# Watson's output.", "self", ".", "__timestamps_unregulated", "[", "original_audio_name", "+", "\".wav\"", "]", "=", "[", "(", "self", ".", "_timestamp_extractor_cmu", "(", "staging_audio_basename", ",", "str_timestamps_with_sil_conf", ")", ")", "]", "if", "self", ".", "get_verbosity", "(", ")", ":", "print", "(", "\"Done indexing {}\"", ".", "format", "(", "staging_audio_basename", ")", ")", "except", "OSError", "as", "e", ":", "if", "self", ".", "get_verbosity", "(", ")", ":", "print", "(", "e", ",", "\"The command was: {}\"", ".", "format", "(", "pocketsphinx_command", ")", ")", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "staging_audio_basename", ")", "]", "=", "e", "self", ".", "_timestamp_regulator", "(", ")", "if", "self", ".", "get_verbosity", "(", ")", ":", "print", "(", "\"Finished indexing procedure\"", ")" ]
wrapper around inspect . getargspec but takes into account utool decorators
def get_func_argspec ( func ) : if hasattr ( func , '_utinfo' ) : argspec = func . _utinfo [ 'orig_argspec' ] return argspec if isinstance ( func , property ) : func = func . fget try : argspec = inspect . getargspec ( func ) except Exception : argspec = inspect . getfullargspec ( func ) return argspec
9,172
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L2445-L2458
[ "def", "finish", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Session disconnected.\"", ")", "try", ":", "self", ".", "sock", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", ":", "pass", "self", ".", "session_end", "(", ")" ]
hacky inference of kwargs keys
def parse_func_kwarg_keys ( func , with_vals = False ) : sourcecode = get_func_sourcecode ( func , strip_docstr = True , strip_comments = True ) kwkeys = parse_kwarg_keys ( sourcecode , with_vals = with_vals ) #ut.get_func_kwargs TODO return kwkeys
9,173
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L2740-L2755
[ "def", "open", "(", "self", ",", "file_path", ")", ":", "if", "self", ".", "is_opened", "(", ")", "and", "self", ".", "workbook", ".", "file_path", "==", "file_path", ":", "self", ".", "_logger", ".", "logger", ".", "debug", "(", "\"workbook already opened: {}\"", ".", "format", "(", "self", ".", "workbook", ".", "file_path", ")", ")", "return", "self", ".", "close", "(", ")", "self", ".", "_open", "(", "file_path", ")" ]
func = ibeis . run_experiment
def get_func_kwargs ( func , recursive = True ) : import utool as ut argspec = ut . get_func_argspec ( func ) if argspec . defaults is None : header_kw = { } else : header_kw = dict ( zip ( argspec . args [ : : - 1 ] , argspec . defaults [ : : - 1 ] ) ) if argspec . keywords is not None : header_kw . update ( dict ( ut . recursive_parse_kwargs ( func ) ) ) return header_kw
9,174
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L2758-L2777
[ "def", "iter_dialogs", "(", "self", ",", "offset_date", ":", "int", "=", "0", ",", "limit", ":", "int", "=", "0", ")", "->", "Generator", "[", "\"pyrogram.Dialog\"", ",", "None", ",", "None", "]", ":", "current", "=", "0", "total", "=", "limit", "or", "(", "1", "<<", "31", ")", "-", "1", "limit", "=", "min", "(", "100", ",", "total", ")", "pinned_dialogs", "=", "self", ".", "get_dialogs", "(", "pinned_only", "=", "True", ")", ".", "dialogs", "for", "dialog", "in", "pinned_dialogs", ":", "yield", "dialog", "current", "+=", "1", "if", "current", ">=", "total", ":", "return", "while", "True", ":", "dialogs", "=", "self", ".", "get_dialogs", "(", "offset_date", "=", "offset_date", ",", "limit", "=", "limit", ")", ".", "dialogs", "if", "not", "dialogs", ":", "return", "offset_date", "=", "dialogs", "[", "-", "1", "]", ".", "top_message", ".", "date", "for", "dialog", "in", "dialogs", ":", "yield", "dialog", "current", "+=", "1", "if", "current", ">=", "total", ":", "return" ]
allows kwargs to be specified on the commandline from testfuncs
def argparse_funckw ( func , defaults = { } , * * kwargs ) : import utool as ut funckw_ = ut . get_funckw ( func , recursive = True ) funckw_ . update ( defaults ) funckw = ut . argparse_dict ( funckw_ , * * kwargs ) return funckw
9,175
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_inspect.py#L3030-L3069
[ "def", "get_analysis_type", "(", "self", ",", "instance", ")", ":", "if", "IDuplicateAnalysis", ".", "providedBy", "(", "instance", ")", ":", "return", "'d'", "elif", "IReferenceAnalysis", ".", "providedBy", "(", "instance", ")", ":", "return", "instance", ".", "getReferenceType", "(", ")", "elif", "IRoutineAnalysis", ".", "providedBy", "(", "instance", ")", ":", "return", "'a'", "return", "None" ]
Sets backend data using QVariants
def _qt_set_leaf_data ( self , qvar ) : if VERBOSE_PREF : print ( '' ) print ( '+--- [pref.qt_set_leaf_data]' ) print ( '[pref.qt_set_leaf_data] qvar = %r' % qvar ) print ( '[pref.qt_set_leaf_data] _intern.name=%r' % self . _intern . name ) print ( '[pref.qt_set_leaf_data] _intern.type_=%r' % self . _intern . get_type ( ) ) print ( '[pref.qt_set_leaf_data] type(_intern.value)=%r' % type ( self . _intern . value ) ) print ( '[pref.qt_set_leaf_data] _intern.value=%r' % self . _intern . value ) #print('[pref.qt_set_leaf_data] qvar.toString()=%s' % six.text_type(qvar.toString())) if self . _tree . parent is None : raise Exception ( '[Pref.qtleaf] Cannot set root preference' ) if self . qt_is_editable ( ) : new_val = '[Pref.qtleaf] BadThingsHappenedInPref' if self . _intern . value == PrefNode : raise Exception ( '[Pref.qtleaf] Qt can only change leafs' ) elif self . _intern . value is None : # None could be a number of types def cast_order ( var , order = [ bool , int , float , six . text_type ] ) : for type_ in order : try : ret = type_ ( var ) return ret except Exception : continue new_val = cast_order ( six . text_type ( qvar ) ) self . _intern . get_type ( ) if isinstance ( self . _intern . value , bool ) : #new_val = bool(qvar.toBool()) print ( 'qvar = %r' % ( qvar , ) ) new_val = util_type . smart_cast ( qvar , bool ) #new_val = bool(eval(qvar, {}, {})) print ( 'new_val = %r' % ( new_val , ) ) elif isinstance ( self . _intern . value , int ) : #new_val = int(qvar.toInt()[0]) new_val = int ( qvar ) # elif isinstance(self._intern.value, float): elif self . _intern . get_type ( ) in util_type . VALID_FLOAT_TYPES : #new_val = float(qvar.toDouble()[0]) new_val = float ( qvar ) elif isinstance ( self . _intern . value , six . string_types ) : #new_val = six.text_type(qvar.toString()) new_val = six . text_type ( qvar ) elif isinstance ( self . _intern . value , PrefChoice ) : #new_val = qvar.toString() new_val = six . text_type ( qvar ) if new_val . upper ( ) == 'NONE' : new_val = None else : try : #new_val = six.text_type(qvar.toString()) type_ = self . _intern . get_type ( ) if type_ is not None : new_val = type_ ( six . text_type ( qvar ) ) else : new_val = six . text_type ( qvar ) except Exception : raise NotImplementedError ( ( '[Pref.qtleaf] Unknown internal type. ' 'type(_intern.value) = %r, ' '_intern.get_type() = %r, ' ) % type ( self . _intern . value ) , self . _intern . get_type ( ) ) # Check for a set of None if isinstance ( new_val , six . string_types ) : if new_val . lower ( ) == 'none' : new_val = None elif new_val . lower ( ) == 'true' : new_val = True elif new_val . lower ( ) == 'false' : new_val = False # save to disk after modifying data if VERBOSE_PREF : print ( '---' ) print ( '[pref.qt_set_leaf_data] new_val=%r' % new_val ) print ( '[pref.qt_set_leaf_data] type(new_val)=%r' % type ( new_val ) ) print ( 'L____ [pref.qt_set_leaf_data]' ) # TODO Add ability to set a callback function when certain # preferences are changed. return self . _tree . parent . pref_update ( self . _intern . name , new_val ) return 'PrefNotEditable'
9,176
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L511-L591
[ "def", "read_dependencies", "(", "filename", ")", ":", "dependencies", "=", "[", "]", "filepath", "=", "os", ".", "path", ".", "join", "(", "'requirements'", ",", "filename", ")", "with", "open", "(", "filepath", ",", "'r'", ")", "as", "stream", ":", "for", "line", "in", "stream", ":", "package", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'#'", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "package", "and", "package", ".", "split", "(", "' '", ")", "[", "0", "]", "!=", "'-r'", ":", "dependencies", ".", "append", "(", "package", ")", "return", "dependencies" ]
Toggles a boolean key
def toggle ( self , key ) : val = self [ key ] assert isinstance ( val , bool ) , 'key[%r] = %r is not a bool' % ( key , val ) self . pref_update ( key , not val )
9,177
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L143-L147
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Checks to see if a selection is a valid index or choice of a combo preference
def change_combo_val ( self , new_val ) : choice_obj = self . _intern . value assert isinstance ( self . _intern . value , PrefChoice ) , 'must be a choice' return choice_obj . get_tuple ( )
9,178
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L149-L156
[ "def", "IsHuntStarted", "(", "self", ")", ":", "state", "=", "self", ".", "hunt_obj", ".", "Get", "(", "self", ".", "hunt_obj", ".", "Schema", ".", "STATE", ")", "if", "state", "!=", "\"STARTED\"", ":", "return", "False", "# Stop the hunt due to expiry.", "if", "self", ".", "CheckExpiry", "(", ")", ":", "return", "False", "return", "True" ]
Wow this class is messed up . I had to overwrite items when moving to python3 just because I haden t called it yet
def iteritems ( self ) : for ( key , val ) in six . iteritems ( self . __dict__ ) : if key in self . _printable_exclude : continue yield ( key , val )
9,179
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L320-L328
[ "def", "_check_registry_type", "(", "folder", "=", "None", ")", ":", "folder", "=", "_registry_folder", "(", "folder", ")", "default_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'registry_type.txt'", ")", "try", ":", "with", "open", "(", "default_file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "data", "=", "data", ".", "strip", "(", ")", "ComponentRegistry", ".", "SetBackingStore", "(", "data", ")", "except", "IOError", ":", "pass" ]
Converts prefeters to a dictionary . Children Pref can be optionally separated
def to_dict ( self , split_structs_bit = False ) : pref_dict = { } struct_dict = { } for ( key , val ) in six . iteritems ( self ) : if split_structs_bit and isinstance ( val , Pref ) : struct_dict [ key ] = val continue pref_dict [ key ] = val if split_structs_bit : return ( pref_dict , struct_dict ) return pref_dict
9,180
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L342-L354
[ "def", "_compute_inter_event_std", "(", "self", ",", "C", ",", "C_PGA", ",", "pga1100", ",", "mag", ",", "vs30", ")", ":", "tau_0", "=", "self", ".", "_compute_std_0", "(", "C", "[", "'s3'", "]", ",", "C", "[", "'s4'", "]", ",", "mag", ")", "tau_b_pga", "=", "self", ".", "_compute_std_0", "(", "C_PGA", "[", "'s3'", "]", ",", "C_PGA", "[", "'s4'", "]", ",", "mag", ")", "delta_amp", "=", "self", ".", "_compute_partial_derivative_site_amp", "(", "C", ",", "pga1100", ",", "vs30", ")", "std_inter", "=", "np", ".", "sqrt", "(", "tau_0", "**", "2", "+", "(", "delta_amp", "**", "2", ")", "*", "(", "tau_b_pga", "**", "2", ")", "+", "2", "*", "delta_amp", "*", "tau_0", "*", "tau_b_pga", "*", "C", "[", "'rho'", "]", ")", "return", "std_inter" ]
Saves prefs to disk in dict format
def save ( self ) : fpath = self . get_fpath ( ) if fpath in [ '' , None ] : if self . _tree . parent is not None : if VERBOSE_PREF : print ( '[pref.save] Can my parent save me?' ) # ...to disk return self . _tree . parent . save ( ) if VERBOSE_PREF : print ( '[pref.save] I cannot be saved. I have no parents.' ) return False with open ( fpath , 'wb' ) as f : print ( '[pref] Saving to ' + fpath ) pref_dict = self . to_dict ( ) pickle . dump ( pref_dict , f , protocol = 2 ) # Use protocol 2 to support python2 and 3 return True
9,181
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L358-L373
[ "def", "paginate", "(", "limit", ",", "start_arg", "=", "\"next_token\"", ",", "limit_arg", "=", "\"max_results\"", ")", ":", "default_start", "=", "0", "def", "outer_wrapper", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "start", "=", "int", "(", "default_start", "if", "kwargs", ".", "get", "(", "start_arg", ")", "is", "None", "else", "kwargs", "[", "start_arg", "]", ")", "lim", "=", "int", "(", "limit", "if", "kwargs", ".", "get", "(", "limit_arg", ")", "is", "None", "else", "kwargs", "[", "limit_arg", "]", ")", "stop", "=", "start", "+", "lim", "result", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "limited_results", "=", "list", "(", "itertools", ".", "islice", "(", "result", ",", "start", ",", "stop", ")", ")", "next_token", "=", "stop", "if", "stop", "<", "len", "(", "result", ")", "else", "None", "return", "limited_results", ",", "next_token", "return", "wrapper", "return", "outer_wrapper" ]
Read pref dict stored on disk . Overwriting current values .
def load ( self ) : if VERBOSE_PREF : print ( '[pref.load()]' ) #if not os.path.exists(self._intern.fpath): # msg = '[pref] fpath=%r does not exist' % (self._intern.fpath) # return msg fpath = self . get_fpath ( ) try : with open ( fpath , 'rb' ) as f : if VERBOSE_PREF : print ( 'load: %r' % fpath ) pref_dict = pickle . load ( f ) except EOFError as ex1 : util_dbg . printex ( ex1 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) #warnings.warn(msg) raise #return msg except ImportError as ex2 : util_dbg . printex ( ex2 , 'did not load pref fpath=%r correctly' % fpath , iswarning = True ) #warnings.warn(msg) raise #return msg if not util_type . is_dict ( pref_dict ) : raise Exception ( 'Preference file is corrupted' ) self . add_dict ( pref_dict ) return True
9,182
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L378-L404
[ "def", "is_valid_package_name", "(", "name", ",", "raise_error", "=", "False", ")", ":", "is_valid", "=", "PACKAGE_NAME_REGEX", ".", "match", "(", "name", ")", "if", "raise_error", "and", "not", "is_valid", ":", "raise", "PackageRequestError", "(", "\"Not a valid package name: %r\"", "%", "name", ")", "return", "is_valid" ]
returns name all the way up the tree
def full_name ( self ) : if self . _tree . parent is None : return self . _intern . name return self . _tree . parent . full_name ( ) + '.' + self . _intern . name
9,183
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L418-L422
[ "def", "_hcsi_null_range", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "[", "HCSINullField", "(", "'Reserved{:02d}'", ".", "format", "(", "x", ")", ")", "for", "x", "in", "range", "(", "*", "args", ",", "*", "*", "kwargs", ")", "]" ]
Changes a preference value and saves it to disk
def pref_update ( self , key , new_val ) : print ( 'Update and save pref from: %s=%r, to: %s=%r' % ( key , six . text_type ( self [ key ] ) , key , six . text_type ( new_val ) ) ) self . __setattr__ ( key , new_val ) return self . save ( )
9,184
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/Preferences.py#L441-L446
[ "def", "format_title", "(", "self", ")", ":", "def", "asciify", "(", "_title", ")", ":", "_title", "=", "unicodedata", ".", "normalize", "(", "'NFD'", ",", "unicode", "(", "_title", ")", ")", "ascii", "=", "True", "out", "=", "[", "]", "ok", "=", "u\"1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM- ',\"", "for", "ch", "in", "_title", ":", "if", "ch", "in", "ok", ":", "out", ".", "append", "(", "ch", ")", "elif", "unicodedata", ".", "category", "(", "ch", ")", "[", "0", "]", "==", "(", "\"L\"", ")", ":", "#a letter", "out", ".", "append", "(", "hex", "(", "ord", "(", "ch", ")", ")", ")", "ascii", "=", "False", "elif", "ch", "in", "u'\\r\\n\\t'", ":", "out", ".", "append", "(", "u'-'", ")", "return", "(", "ascii", ",", "sub", "(", "\"[ ',-]+\"", ",", "'-'", ",", "\"\"", ".", "join", "(", "out", ")", ")", ")", "(", "ascii", ",", "_title", ")", "=", "asciify", "(", "self", ".", "meta", ".", "title", ")", "if", "not", "ascii", "and", "self", ".", "meta", ".", "alternative_title", ":", "(", "ascii", ",", "_title2", ")", "=", "asciify", "(", "self", ".", "meta", ".", "alternative_title", ")", "if", "ascii", ":", "_title", "=", "_title2", "title_length", "=", "99", "-", "len", "(", "str", "(", "self", ".", "book_id", ")", ")", "-", "1", "if", "len", "(", "_title", ")", ">", "title_length", ":", "# if the title was shortened, replace the trailing _ with an ellipsis", "repo_title", "=", "\"{0}__{1}\"", ".", "format", "(", "_title", "[", ":", "title_length", "]", ",", "self", ".", "book_id", ")", "else", ":", "repo_title", "=", "\"{0}_{1}\"", ".", "format", "(", "_title", "[", ":", "title_length", "]", ",", "self", ".", "book_id", ")", "logger", ".", "debug", "(", "\"%s %s\"", "%", "(", "len", "(", "repo_title", ")", ",", "repo_title", ")", ")", "self", ".", "meta", ".", "metadata", "[", "'_repo'", "]", "=", "repo_title", "return", "repo_title" ]
This call returns current login user s permissions .
def __get_permissions ( self , res , * * kwargs ) : response = res . _ ( * * kwargs ) return response . get ( 'permissions' , None )
9,185
https://github.com/product-definition-center/pdc-client/blob/7236fd8b72e675ebb321bbe337289d9fbeb6119f/pdc_client/plugins/permission.py#L32-L37
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
dynamically injects registered module methods into a class instance
def inject_all_external_modules ( self , classname = None , allow_override = 'override+warn' , strict = True ) : #import utool as ut if classname is None : classname = self . __class__ . __name__ #import utool as ut #ut.embed() NEW = True if NEW : classkey_list = [ key for key in __CLASSTYPE_ATTRIBUTES__ if key [ 0 ] == classname ] else : injected_modules = get_injected_modules ( classname ) # the variable must be named CLASS_INJECT_KEY # and only one class can be specified per module. classkey_list = [ module . CLASS_INJECT_KEY for module in injected_modules ] for classkey in classkey_list : inject_instance ( self , classkey = classkey , allow_override = allow_override , strict = False ) for classkey in classkey_list : postinject_instance ( self , classkey = classkey )
9,186
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L128-L160
[ "def", "get_can_error_counter", "(", "self", ",", "channel", ")", ":", "tx_error_counter", "=", "DWORD", "(", "0", ")", "rx_error_counter", "=", "DWORD", "(", "0", ")", "UcanGetCanErrorCounter", "(", "self", ".", "_handle", ",", "channel", ",", "byref", "(", "tx_error_counter", ")", ",", "byref", "(", "rx_error_counter", ")", ")", "return", "tx_error_counter", ",", "rx_error_counter" ]
Will inject all decorated function as methods of classkey
def decorate_class_method ( func , classkey = None , skipmain = False ) : #import utool as ut global __CLASSTYPE_ATTRIBUTES__ assert classkey is not None , 'must specify classkey' #if not (skipmain and ut.get_caller_modname() == '__main__'): __CLASSTYPE_ATTRIBUTES__ [ classkey ] . append ( func ) return func
9,187
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L413-L426
[ "def", "SearchSeasonDirTable", "(", "self", ",", "showID", ",", "seasonNum", ")", ":", "goodlogging", ".", "Log", ".", "Info", "(", "\"DB\"", ",", "\"Looking up directory for ShowID={0} Season={1} in database\"", ".", "format", "(", "showID", ",", "seasonNum", ")", ",", "verbosity", "=", "self", ".", "logVerbosity", ")", "queryString", "=", "\"SELECT SeasonDir FROM SeasonDir WHERE ShowID=? AND Season=?\"", "queryTuple", "=", "(", "showID", ",", "seasonNum", ")", "result", "=", "self", ".", "_ActionDatabase", "(", "queryString", ",", "queryTuple", ",", "error", "=", "False", ")", "if", "result", "is", "None", ":", "goodlogging", ".", "Log", ".", "Info", "(", "\"DB\"", ",", "\"No match found in database\"", ",", "verbosity", "=", "self", ".", "logVerbosity", ")", "return", "None", "elif", "len", "(", "result", ")", "==", "0", ":", "return", "None", "elif", "len", "(", "result", ")", "==", "1", ":", "goodlogging", ".", "Log", ".", "Info", "(", "\"DB\"", ",", "\"Found database match: {0}\"", ".", "format", "(", "result", ")", ",", "verbosity", "=", "self", ".", "logVerbosity", ")", "return", "result", "[", "0", "]", "[", "0", "]", "elif", "len", "(", "result", ")", ">", "1", ":", "goodlogging", ".", "Log", ".", "Fatal", "(", "\"DB\"", ",", "\"Database corrupted - multiple matches found in database table for: {0}\"", ".", "format", "(", "result", ")", ")" ]
Will perform func with argument self after inject_instance is called on classkey
def decorate_postinject ( func , classkey = None , skipmain = False ) : #import utool as ut global __CLASSTYPE_POSTINJECT_FUNCS__ assert classkey is not None , 'must specify classkey' #if not (skipmain and ut.get_caller_modname() == '__main__'): __CLASSTYPE_POSTINJECT_FUNCS__ [ classkey ] . append ( func ) return func
9,188
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L429-L440
[ "def", "_check_registry_type", "(", "folder", "=", "None", ")", ":", "folder", "=", "_registry_folder", "(", "folder", ")", "default_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'registry_type.txt'", ")", "try", ":", "with", "open", "(", "default_file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "data", "=", "data", ".", "strip", "(", ")", "ComponentRegistry", ".", "SetBackingStore", "(", "data", ")", "except", "IOError", ":", "pass" ]
Injects a function into an object as a method
def inject_func_as_method ( self , func , method_name = None , class_ = None , allow_override = False , allow_main = False , verbose = True , override = None , force = False ) : if override is not None : # TODO depcirate allow_override allow_override = override if method_name is None : method_name = get_funcname ( func ) if force : allow_override = True allow_main = True old_method = getattr ( self , method_name , None ) # Bind function to the class instance #new_method = types.MethodType(func, self, self.__class__) new_method = func . __get__ ( self , self . __class__ ) #new_method = profile(func.__get__(self, self.__class__)) if old_method is not None : old_im_func = get_method_func ( old_method ) new_im_func = get_method_func ( new_method ) if not allow_main and old_im_func is not None and ( get_funcglobals ( old_im_func ) [ '__name__' ] != '__main__' and get_funcglobals ( new_im_func ) [ '__name__' ] == '__main__' ) : if True or VERBOSE_CLASS : print ( '[util_class] skipping re-inject of %r from __main__' % method_name ) return if old_method is new_method or old_im_func is new_im_func : #if verbose and util_arg.NOT_QUIET: # print('WARNING: Skipping injecting the same function twice: %r' % new_method) #print('WARNING: Injecting the same function twice: %r' % new_method) return elif allow_override is False : raise AssertionError ( 'Overrides are not allowed. Already have method_name=%r' % ( method_name ) ) elif allow_override == 'warn' : print ( 'WARNING: Overrides are not allowed. Already have method_name=%r. Skipping' % ( method_name ) ) return elif allow_override == 'override+warn' : #import utool as ut #ut.embed() print ( 'WARNING: Overrides are allowed, but dangerous. method_name=%r.' % ( method_name ) ) print ( 'old_method = %r, im_func=%s' % ( old_method , str ( old_im_func ) ) ) print ( 'new_method = %r, im_func=%s' % ( new_method , str ( new_im_func ) ) ) print ( get_funcglobals ( old_im_func ) [ '__name__' ] ) print ( get_funcglobals ( new_im_func ) [ '__name__' ] ) # TODO: does this actually decrement the refcount enough? del old_method setattr ( self , method_name , new_method )
9,189
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L454-L520
[ "def", "get_price_id_list", "(", "self", ",", "package_keyname", ",", "item_keynames", ",", "core", "=", "None", ")", ":", "mask", "=", "'id, itemCategory, keyName, prices[categories]'", "items", "=", "self", ".", "list_items", "(", "package_keyname", ",", "mask", "=", "mask", ")", "prices", "=", "[", "]", "category_dict", "=", "{", "\"gpu0\"", ":", "-", "1", ",", "\"pcie_slot0\"", ":", "-", "1", "}", "for", "item_keyname", "in", "item_keynames", ":", "try", ":", "# Need to find the item in the package that has a matching", "# keyName with the current item we are searching for", "matching_item", "=", "[", "i", "for", "i", "in", "items", "if", "i", "[", "'keyName'", "]", "==", "item_keyname", "]", "[", "0", "]", "except", "IndexError", ":", "raise", "exceptions", ".", "SoftLayerError", "(", "\"Item {} does not exist for package {}\"", ".", "format", "(", "item_keyname", ",", "package_keyname", ")", ")", "# we want to get the price ID that has no location attached to it,", "# because that is the most generic price. verifyOrder/placeOrder", "# can take that ID and create the proper price for us in the location", "# in which the order is made", "item_category", "=", "matching_item", "[", "'itemCategory'", "]", "[", "'categoryCode'", "]", "if", "item_category", "not", "in", "category_dict", ":", "price_id", "=", "self", ".", "get_item_price_id", "(", "core", ",", "matching_item", "[", "'prices'", "]", ")", "else", ":", "# GPU and PCIe items has two generic prices and they are added to the list", "# according to the number of items in the order.", "category_dict", "[", "item_category", "]", "+=", "1", "category_code", "=", "item_category", "[", ":", "-", "1", "]", "+", "str", "(", "category_dict", "[", "item_category", "]", ")", "price_id", "=", "[", "p", "[", "'id'", "]", "for", "p", "in", "matching_item", "[", "'prices'", "]", "if", "not", "p", "[", "'locationGroupId'", "]", "and", "p", "[", "'categories'", "]", "[", "0", "]", "[", "'categoryCode'", "]", "==", "category_code", "]", "[", "0", "]", "prices", ".", "append", "(", "price_id", ")", "return", "prices" ]
This is actually quite simple
def inject_func_as_unbound_method ( class_ , func , method_name = None ) : if method_name is None : method_name = get_funcname ( func ) setattr ( class_ , method_name , func )
9,190
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L538-L542
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
hack for pyqt
def reloading_meta_metaclass_factory ( BASE_TYPE = type ) : class ReloadingMetaclass2 ( BASE_TYPE ) : def __init__ ( metaself , name , bases , dct ) : super ( ReloadingMetaclass2 , metaself ) . __init__ ( name , bases , dct ) #print('Making rrr for %r' % (name,)) metaself . rrr = reload_class return ReloadingMetaclass2
9,191
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L695-L702
[ "def", "stc_system_info", "(", "stc_addr", ")", ":", "stc", "=", "stchttp", ".", "StcHttp", "(", "stc_addr", ")", "sessions", "=", "stc", ".", "sessions", "(", ")", "if", "sessions", ":", "# If a session already exists, use it to get STC information.", "stc", ".", "join_session", "(", "sessions", "[", "0", "]", ")", "sys_info", "=", "stc", ".", "system_info", "(", ")", "else", ":", "# Create a new session to get STC information.", "stc", ".", "new_session", "(", "'anonymous'", ")", "try", ":", "sys_info", "=", "stc", ".", "system_info", "(", ")", "finally", ":", "# Make sure the temporary session in terminated.", "stc", ".", "end_session", "(", ")", "return", "sys_info" ]
special class reloading function This function is often injected as rrr of classes
def reload_class ( self , verbose = True , reload_module = True ) : import utool as ut verbose = verbose or VERBOSE_CLASS classname = self . __class__ . __name__ try : modname = self . __class__ . __module__ if verbose : print ( '[class] reloading ' + classname + ' from ' + modname ) # --HACK-- if hasattr ( self , '_on_reload' ) : if verbose > 1 : print ( '[class] calling _on_reload for ' + classname ) self . _on_reload ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _on_reload function' ) # Do for all inheriting classes def find_base_clases ( _class , find_base_clases = None ) : class_list = [ ] for _baseclass in _class . __bases__ : parents = find_base_clases ( _baseclass , find_base_clases ) class_list . extend ( parents ) if _class is not object : class_list . append ( _class ) return class_list head_class = self . __class__ # Determine if parents need reloading class_list = find_base_clases ( head_class , find_base_clases ) # HACK ignore = { HashComparable2 } class_list = [ _class for _class in class_list if _class not in ignore ] for _class in class_list : if verbose : print ( '[class] reloading parent ' + _class . __name__ + ' from ' + _class . __module__ ) if _class . __module__ == '__main__' : # Attempt to find the module that is the main module # This may be very hacky and potentially break main_module_ = sys . modules [ _class . __module__ ] main_modname = ut . get_modname_from_modpath ( main_module_ . __file__ ) module_ = sys . modules [ main_modname ] else : module_ = sys . modules [ _class . __module__ ] if hasattr ( module_ , 'rrr' ) : if reload_module : module_ . rrr ( verbose = verbose ) else : if reload_module : import imp if verbose : print ( '[class] reloading ' + _class . __module__ + ' with imp' ) try : imp . reload ( module_ ) except ( ImportError , AttributeError ) : print ( '[class] fallback reloading ' + _class . __module__ + ' with imp' ) # one last thing to try. probably used ut.import_module_from_fpath # when importing this module imp . load_source ( module_ . __name__ , module_ . __file__ ) # Reset class attributes _newclass = getattr ( module_ , _class . __name__ ) reload_class_methods ( self , _newclass , verbose = verbose ) # --HACK-- # TODO: handle injected definitions if hasattr ( self , '_initialize_self' ) : if verbose > 1 : print ( '[class] calling _initialize_self for ' + classname ) self . _initialize_self ( ) elif verbose > 1 : print ( '[class] ' + classname + ' does not have an _initialize_self function' ) except Exception as ex : ut . printex ( ex , 'Error Reloading Class' , keys = [ 'modname' , 'module' , 'class_' , 'class_list' , 'self' , ] ) raise
9,192
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L705-L785
[ "def", "default_bitcoind_opts", "(", "config_file", "=", "None", ",", "prefix", "=", "False", ")", ":", "default_bitcoin_opts", "=", "virtualchain", ".", "get_bitcoind_config", "(", "config_file", "=", "config_file", ")", "# drop dict values that are None", "default_bitcoin_opts", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "default_bitcoin_opts", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}", "# strip 'bitcoind_'", "if", "not", "prefix", ":", "default_bitcoin_opts", "=", "opt_strip", "(", "'bitcoind_'", ",", "default_bitcoin_opts", ")", "return", "default_bitcoin_opts" ]
rebinds all class methods
def reload_class_methods ( self , class_ , verbose = True ) : if verbose : print ( '[util_class] Reloading self=%r as class_=%r' % ( self , class_ ) ) self . __class__ = class_ for key in dir ( class_ ) : # Get unbound reloaded method func = getattr ( class_ , key ) if isinstance ( func , types . MethodType ) : # inject it into the old instance inject_func_as_method ( self , func , class_ = class_ , allow_override = True , verbose = verbose )
9,193
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L788-L814
[ "def", "jpgMin", "(", "file", ",", "force", "=", "False", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "file", "+", "'.original'", ")", "or", "force", ":", "data", "=", "_read", "(", "file", ",", "'rb'", ")", "_save", "(", "file", "+", "'.original'", ",", "data", ",", "'w+b'", ")", "print", "'Optmising JPG {} - {:.2f}kB'", ".", "format", "(", "file", ",", "len", "(", "data", ")", "/", "1024.0", ")", ",", "url", "=", "'http://jpgoptimiser.com/optimise'", "parts", ",", "headers", "=", "encode_multipart", "(", "{", "}", ",", "{", "'input'", ":", "{", "'filename'", ":", "'wherever.jpg'", ",", "'content'", ":", "data", "}", "}", ")", "req", "=", "urllib2", ".", "Request", "(", "url", ",", "data", "=", "parts", ",", "headers", "=", "headers", ")", "try", ":", "f", "=", "urllib2", ".", "urlopen", "(", "req", ")", "response", "=", "f", ".", "read", "(", ")", "f", ".", "close", "(", ")", "print", "' - {:.2f} - {:.1f}%'", ".", "format", "(", "len", "(", "response", ")", "/", "1024.0", ",", "100.0", "*", "len", "(", "response", ")", "/", "len", "(", "data", ")", ")", "_save", "(", "file", ",", "response", ",", "'w+b'", ")", "except", ":", "print", "'Oops!! Failed :('", "return", "1", "else", ":", "print", "'Ignoring file: {}'", ".", "format", "(", "file", ")", "return", "0" ]
removes the python obfuscation of class privates so they can be executed as they appear in class source . Useful when playing with IPython .
def remove_private_obfuscation ( self ) : classname = self . __class__ . __name__ attrlist = [ attr for attr in dir ( self ) if attr . startswith ( '_' + classname + '__' ) ] for attr in attrlist : method = getattr ( self , attr ) truename = attr . replace ( '_' + classname + '__' , '__' ) setattr ( self , truename , method )
9,194
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_class.py#L968-L978
[ "def", "_config_options", "(", "self", ")", ":", "self", ".", "_config_sortable", "(", "self", ".", "_sortable", ")", "self", ".", "_config_drag_cols", "(", "self", ".", "_drag_cols", ")" ]
Calls lower level function to create a peptide quant lookup
def create_peptidequant_lookup ( fns , pqdb , poolnames , pepseq_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , pepseq_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
9,195
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/proteinquant.py#L5-L15
[ "def", "catch_websocket_connection_errors", "(", ")", ":", "try", ":", "yield", "except", "websocket", ".", "WebSocketConnectionClosedException", ":", "raise", "ConnectionError", "(", "\"Connection already closed. SC2 probably crashed. \"", "\"Check the error log.\"", ")", "except", "websocket", ".", "WebSocketTimeoutException", ":", "raise", "ConnectionError", "(", "\"Websocket timed out.\"", ")", "except", "socket", ".", "error", "as", "e", ":", "raise", "ConnectionError", "(", "\"Socket error: %s\"", "%", "e", ")" ]
Calls lower level function to create a protein quant lookup
def create_proteinquant_lookup ( fns , pqdb , poolnames , protacc_colnr , ms1_qcolpattern = None , isobqcolpattern = None , psmnrpattern = None , probcolpattern = None , fdrcolpattern = None , pepcolpattern = None ) : patterns = [ ms1_qcolpattern , probcolpattern , fdrcolpattern , pepcolpattern ] storefuns = [ pqdb . store_precursor_quants , pqdb . store_probability , pqdb . store_fdr , pqdb . store_pep ] create_pep_protein_quant_lookup ( fns , pqdb , poolnames , protacc_colnr , patterns , storefuns , isobqcolpattern , psmnrpattern )
9,196
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/proteinquant.py#L18-L28
[ "def", "catch_websocket_connection_errors", "(", ")", ":", "try", ":", "yield", "except", "websocket", ".", "WebSocketConnectionClosedException", ":", "raise", "ConnectionError", "(", "\"Connection already closed. SC2 probably crashed. \"", "\"Check the error log.\"", ")", "except", "websocket", ".", "WebSocketTimeoutException", ":", "raise", "ConnectionError", "(", "\"Websocket timed out.\"", ")", "except", "socket", ".", "error", "as", "e", ":", "raise", "ConnectionError", "(", "\"Socket error: %s\"", "%", "e", ")" ]
Does the work when creating peptide and protein quant lookups . This loops through storing options and parses columns passing on to the storing functions
def create_pep_protein_quant_lookup ( fns , pqdb , poolnames , featcolnr , patterns , storefuns , isobqcolpattern = None , psmnrpattern = None ) : tablefn_map = create_tablefn_map ( fns , pqdb , poolnames ) feat_map = pqdb . get_feature_map ( ) for pattern , storefun in zip ( patterns , storefuns ) : if pattern is None : continue colmap = get_colmap ( fns , pattern , single_col = True ) if colmap : store_single_col_data ( fns , tablefn_map , feat_map , storefun , featcolnr , colmap ) if isobqcolpattern is not None : isocolmap = get_colmap ( fns , isobqcolpattern , antipattern = psmnrpattern ) else : return if psmnrpattern is not None : psmcolmap = get_colmap ( fns , psmnrpattern ) else : psmcolmap = False create_isobaric_quant_lookup ( fns , tablefn_map , feat_map , pqdb , featcolnr , isocolmap , psmcolmap )
9,197
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/proteinquant.py#L31-L57
[ "def", "set_advanced_configs", "(", "vm_name", ",", "datacenter", ",", "advanced_configs", ",", "service_instance", "=", "None", ")", ":", "current_config", "=", "get_vm_config", "(", "vm_name", ",", "datacenter", "=", "datacenter", ",", "objects", "=", "True", ",", "service_instance", "=", "service_instance", ")", "diffs", "=", "compare_vm_configs", "(", "{", "'name'", ":", "vm_name", ",", "'advanced_configs'", ":", "advanced_configs", "}", ",", "current_config", ")", "datacenter_ref", "=", "salt", ".", "utils", ".", "vmware", ".", "get_datacenter", "(", "service_instance", ",", "datacenter", ")", "vm_ref", "=", "salt", ".", "utils", ".", "vmware", ".", "get_mor_by_property", "(", "service_instance", ",", "vim", ".", "VirtualMachine", ",", "vm_name", ",", "property_name", "=", "'name'", ",", "container_ref", "=", "datacenter_ref", ")", "config_spec", "=", "vim", ".", "vm", ".", "ConfigSpec", "(", ")", "changes", "=", "diffs", "[", "'advanced_configs'", "]", ".", "diffs", "_apply_advanced_config", "(", "config_spec", ",", "diffs", "[", "'advanced_configs'", "]", ".", "new_values", ",", "vm_ref", ".", "config", ".", "extraConfig", ")", "if", "changes", ":", "salt", ".", "utils", ".", "vmware", ".", "update_vm", "(", "vm_ref", ",", "config_spec", ")", "return", "{", "'advanced_config_changes'", ":", "changes", "}" ]
General method to store single column data from protein tables in lookup
def store_single_col_data ( fns , prottable_id_map , pacc_map , pqdbmethod , protacc_colnr , colmap ) : to_store = [ ] for fn , header , pquant in tsvreader . generate_tsv_pep_protein_quants ( fns ) : pacc_id = pacc_map [ pquant [ header [ protacc_colnr ] ] ] pqdata = ( pacc_id , prottable_id_map [ fn ] , pquant [ colmap [ fn ] ] ) to_store . append ( pqdata ) if len ( to_store ) > 10000 : pqdbmethod ( to_store ) to_store = [ ] pqdbmethod ( to_store )
9,198
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/proteinquant.py#L87-L99
[ "def", "_send_unary_request", "(", "self", ",", "request", ")", ":", "if", "request", ".", "ack_ids", ":", "self", ".", "_client", ".", "acknowledge", "(", "subscription", "=", "self", ".", "_subscription", ",", "ack_ids", "=", "list", "(", "request", ".", "ack_ids", ")", ")", "if", "request", ".", "modify_deadline_ack_ids", ":", "# Send ack_ids with the same deadline seconds together.", "deadline_to_ack_ids", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "n", ",", "ack_id", "in", "enumerate", "(", "request", ".", "modify_deadline_ack_ids", ")", ":", "deadline", "=", "request", ".", "modify_deadline_seconds", "[", "n", "]", "deadline_to_ack_ids", "[", "deadline", "]", ".", "append", "(", "ack_id", ")", "for", "deadline", ",", "ack_ids", "in", "six", ".", "iteritems", "(", "deadline_to_ack_ids", ")", ":", "self", ".", "_client", ".", "modify_ack_deadline", "(", "subscription", "=", "self", ".", "_subscription", ",", "ack_ids", "=", "ack_ids", ",", "ack_deadline_seconds", "=", "deadline", ",", ")", "_LOGGER", ".", "debug", "(", "\"Sent request(s) over unary RPC.\"", ")" ]
This function yields tuples of table filename isobaric quant column and if necessary number - of - PSM column
def map_psmnrcol_to_quantcol ( quantcols , psmcols , tablefn_map ) : if not psmcols : for fn in quantcols : for qcol in quantcols [ fn ] : yield ( tablefn_map [ fn ] , qcol ) else : for fn in quantcols : for qcol , psmcol in zip ( quantcols [ fn ] , psmcols [ fn ] ) : yield ( tablefn_map [ fn ] , qcol , psmcol )
9,199
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/proteinquant.py#L122-L132
[ "def", "wrap_get_user", "(", "cls", ",", "response", ")", ":", "json", "=", "response", ".", "json", "(", ")", "u", "=", "cls", ".", "wrap_json", "(", "json", ")", "return", "u" ]