query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
tries to load data but computes it if it can t give a compute function
def tryload_cache_list_with_compute ( use_cache , dpath , fname , cfgstr_list , compute_fn , * args ) : # Load precomputed values if use_cache is False : data_list = [ None ] * len ( cfgstr_list ) ismiss_list = [ True ] * len ( cfgstr_list ) # Don't load or save, just compute data_list = compute_fn ( ismiss_list , * args ) return data_list else : data_list , ismiss_list = tryload_cache_list ( dpath , fname , cfgstr_list , verbose = False ) num_total = len ( cfgstr_list ) if any ( ismiss_list ) : # Compute missing values newdata_list = compute_fn ( ismiss_list , * args ) newcfgstr_list = util_list . compress ( cfgstr_list , ismiss_list ) index_list = util_list . list_where ( ismiss_list ) print ( '[cache] %d/%d cache hits for %s in %s' % ( num_total - len ( index_list ) , num_total , fname , util_path . tail ( dpath ) ) ) # Cache write for newcfgstr , newdata in zip ( newcfgstr_list , newdata_list ) : save_cache ( dpath , fname , newcfgstr , newdata , verbose = False ) # Populate missing result for index , newdata in zip ( index_list , newdata_list ) : data_list [ index ] = newdata else : print ( '[cache] %d/%d cache hits for %s in %s' % ( num_total , num_total , fname , util_path . tail ( dpath ) ) ) return data_list
9,000
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L284-L319
[ "def", "value", "(", "self", ",", "name", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "_StructuredElement", ")", ":", "self", ".", "_struct_fields_as_values", "(", "name", ",", "value", ")", "elif", "name", ".", "startswith", "(", "'header:'", ")", ":", "self", ".", "_header_values", "[", "name", ".", "partition", "(", "':'", ")", "[", "-", "1", "]", "]", "=", "value", "else", ":", "self", ".", "_field_values", "[", "name", "]", "=", "value" ]
r Converts a python object to a JSON string using the utool convention
def to_json ( val , allow_pickle = False , pretty = False ) : UtoolJSONEncoder = make_utool_json_encoder ( allow_pickle ) json_kw = { } json_kw [ 'cls' ] = UtoolJSONEncoder if pretty : json_kw [ 'indent' ] = 4 json_kw [ 'separators' ] = ( ',' , ': ' ) json_str = json . dumps ( val , * * json_kw ) return json_str
9,001
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L532-L600
[ "def", "read_stats", "(", "self", ",", "*", "stats", ")", ":", "self", ".", "statistics", "=", "OrderedDict", "(", ")", "for", "port", "in", "self", ".", "ports", ":", "port_stats", "=", "IxeStatTotal", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", "port_stats", ".", "update", "(", "{", "c", "+", "'_rate'", ":", "v", "for", "c", ",", "v", "in", "IxeStatRate", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", ".", "items", "(", ")", "}", ")", "self", ".", "statistics", "[", "str", "(", "port", ")", "]", "=", "port_stats", "return", "self", ".", "statistics" ]
Decodes a JSON object specified in the utool convention
def from_json ( json_str , allow_pickle = False ) : if six . PY3 : if isinstance ( json_str , bytes ) : json_str = json_str . decode ( 'utf-8' ) UtoolJSONEncoder = make_utool_json_encoder ( allow_pickle ) object_hook = UtoolJSONEncoder . _json_object_hook val = json . loads ( json_str , object_hook = object_hook ) return val
9,002
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L603-L634
[ "def", "percentile_ranks", "(", "self", ",", "affinities", ",", "allele", "=", "None", ",", "alleles", "=", "None", ",", "throw", "=", "True", ")", ":", "if", "allele", "is", "not", "None", ":", "try", ":", "transform", "=", "self", ".", "allele_to_percent_rank_transform", "[", "allele", "]", "return", "transform", ".", "transform", "(", "affinities", ")", "except", "KeyError", ":", "msg", "=", "\"Allele %s has no percentile rank information\"", "%", "allele", "if", "throw", ":", "raise", "ValueError", "(", "msg", ")", "else", ":", "warnings", ".", "warn", "(", "msg", ")", "# Return NaNs", "return", "numpy", ".", "ones", "(", "len", "(", "affinities", ")", ")", "*", "numpy", ".", "nan", "if", "alleles", "is", "None", ":", "raise", "ValueError", "(", "\"Specify allele or alleles\"", ")", "df", "=", "pandas", ".", "DataFrame", "(", "{", "\"affinity\"", ":", "affinities", "}", ")", "df", "[", "\"allele\"", "]", "=", "alleles", "df", "[", "\"result\"", "]", "=", "numpy", ".", "nan", "for", "(", "allele", ",", "sub_df", ")", "in", "df", ".", "groupby", "(", "\"allele\"", ")", ":", "df", ".", "loc", "[", "sub_df", ".", "index", ",", "\"result\"", "]", "=", "self", ".", "percentile_ranks", "(", "sub_df", ".", "affinity", ",", "allele", "=", "allele", ",", "throw", "=", "throw", ")", "return", "df", ".", "result", ".", "values" ]
Representation of an object as a cache string .
def cachestr_repr ( val ) : try : memview = memoryview ( val ) return memview . tobytes ( ) except Exception : try : return to_json ( val ) except Exception : # SUPER HACK if repr ( val . __class__ ) == "<class 'ibeis.control.IBEISControl.IBEISController'>" : return val . get_dbname ( )
9,003
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L678-L691
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id", ",", "to_book_id", ")", "try", ":", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "from_book_id", ")", "except", ":", "# something went wrong, roll back assignment to to_book_id", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "to_book_id", ")", "raise" ]
r Wraps a function with a Cacher object
def cached_func ( fname = None , cache_dir = 'default' , appname = 'utool' , key_argx = None , key_kwds = None , use_cache = None , verbose = None ) : if verbose is None : verbose = VERBOSE_CACHE def cached_closure ( func ) : from utool import util_decor import utool as ut fname_ = util_inspect . get_funcname ( func ) if fname is None else fname kwdefaults = util_inspect . get_kwdefaults ( func ) argnames = util_inspect . get_argnames ( func ) if ut . is_method ( func ) : # ignore self for methods argnames = argnames [ 1 : ] cacher = Cacher ( fname_ , cache_dir = cache_dir , appname = appname , verbose = verbose ) if use_cache is None : use_cache_ = not util_arg . get_argflag ( '--nocache-' + fname_ ) else : use_cache_ = use_cache #_dbgdict = dict(fname_=fname_, key_kwds=key_kwds, appname=appname, # key_argx=key_argx, use_cache_=use_cache_) #@functools.wraps(func) def cached_wraper ( * args , * * kwargs ) : """ Cached Wrapper Function Additional Kwargs: use_cache (bool) : enables cache """ try : if verbose > 2 : print ( '[util_cache] computing cached function fname_=%s' % ( fname_ , ) ) # Implicitly adds use_cache to kwargs cfgstr = get_cfgstr_from_args ( func , args , kwargs , key_argx , key_kwds , kwdefaults , argnames ) if util_cplat . WIN32 : # remove potentially invalid chars cfgstr = '_' + util_hash . hashstr27 ( cfgstr ) assert cfgstr is not None , 'cfgstr=%r cannot be None' % ( cfgstr , ) use_cache__ = kwargs . pop ( 'use_cache' , use_cache_ ) if use_cache__ : # Make cfgstr from specified input data = cacher . tryload ( cfgstr ) if data is not None : return data # Cached missed compute function data = func ( * args , * * kwargs ) # Cache save #if use_cache__: # TODO: save_cache cacher . save ( data , cfgstr ) return data #except ValueError as ex: # handle protocal error except Exception as ex : from utool import util_dbg _dbgdict2 = dict ( key_argx = key_argx , lenargs = len ( args ) , lenkw = len ( kwargs ) , ) msg = '\n' . join ( [ '+--- UTOOL --- ERROR IN CACHED FUNCTION' , #'dbgdict = ' + utool.repr4(_dbgdict), 'dbgdict2 = ' + util_str . repr4 ( _dbgdict2 ) , ] ) util_dbg . printex ( ex , msg ) raise # Give function a handle to the cacher object cached_wraper = util_decor . preserve_sig ( cached_wraper , func ) cached_wraper . cacher = cacher return cached_wraper return cached_closure
9,004
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L778-L884
[ "def", "site_url", "(", "self", ",", "url", ")", ":", "# Regular expression to URL validate", "regex", "=", "re", ".", "compile", "(", "r'^(?:http|https)://'", "# Scheme only HTTP/HTTPS", "r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}\\.?| \\\n [A-Z0-9-]{2,}(?<!-)\\.?)|'", "# Domain", "r'localhost|'", "# localhost...", "r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|'", "# or ipv4", "r'\\[?[A-F0-9]*:[A-F0-9:]+\\]?)'", "# or ipv6", "r'(?::\\d+)?'", "# Port", "r'(?:/?|[/?]\\S+)$'", ",", "re", ".", "IGNORECASE", ")", "# Validate URL", "if", "re", ".", "match", "(", "'^(?:http|https)://'", ",", "url", ")", ":", "if", "re", ".", "search", "(", "regex", ",", "url", ")", ":", "self", ".", "__site_url", "=", "url", "else", ":", "raise", "PybooruError", "(", "\"Invalid URL: {0}\"", ".", "format", "(", "url", ")", ")", "else", ":", "raise", "PybooruError", "(", "\"Invalid URL scheme, use HTTP or HTTPS: {0}\"", ".", "format", "(", "url", ")", ")" ]
Returns the filepath to the global shelf
def get_global_shelf_fpath ( appname = 'default' , ensure = False ) : global_cache_dir = get_global_cache_dir ( appname , ensure = ensure ) shelf_fpath = join ( global_cache_dir , meta_util_constants . global_cache_fname ) return shelf_fpath
9,005
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L906-L910
[ "def", "_deduce_security", "(", "kwargs", ")", "->", "nmcli", ".", "SECURITY_TYPES", ":", "# Security should be one of our valid strings", "sec_translation", "=", "{", "'wpa-psk'", ":", "nmcli", ".", "SECURITY_TYPES", ".", "WPA_PSK", ",", "'none'", ":", "nmcli", ".", "SECURITY_TYPES", ".", "NONE", ",", "'wpa-eap'", ":", "nmcli", ".", "SECURITY_TYPES", ".", "WPA_EAP", ",", "}", "if", "not", "kwargs", ".", "get", "(", "'securityType'", ")", ":", "if", "kwargs", ".", "get", "(", "'psk'", ")", "and", "kwargs", ".", "get", "(", "'eapConfig'", ")", ":", "raise", "ConfigureArgsError", "(", "'Cannot deduce security type: psk and eap both passed'", ")", "elif", "kwargs", ".", "get", "(", "'psk'", ")", ":", "kwargs", "[", "'securityType'", "]", "=", "'wpa-psk'", "elif", "kwargs", ".", "get", "(", "'eapConfig'", ")", ":", "kwargs", "[", "'securityType'", "]", "=", "'wpa-eap'", "else", ":", "kwargs", "[", "'securityType'", "]", "=", "'none'", "try", ":", "return", "sec_translation", "[", "kwargs", "[", "'securityType'", "]", "]", "except", "KeyError", ":", "raise", "ConfigureArgsError", "(", "'securityType must be one of {}'", ".", "format", "(", "','", ".", "join", "(", "sec_translation", ".", "keys", "(", ")", ")", ")", ")" ]
Writes cache files to a safe place in each operating system
def global_cache_write ( key , val , appname = 'default' ) : with GlobalShelfContext ( appname ) as shelf : shelf [ key ] = val
9,006
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L983-L986
[ "def", "column", "(", "self", ",", "column", ",", "option", "=", "None", ",", "*", "*", "kw", ")", ":", "config", "=", "False", "if", "option", "==", "'type'", ":", "return", "self", ".", "_column_types", "[", "column", "]", "elif", "'type'", "in", "kw", ":", "config", "=", "True", "self", ".", "_column_types", "[", "column", "]", "=", "kw", ".", "pop", "(", "'type'", ")", "if", "kw", ":", "self", ".", "_visual_drag", ".", "column", "(", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "'id'", ")", ",", "option", ",", "*", "*", "kw", ")", "if", "kw", "or", "option", ":", "return", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "option", ",", "*", "*", "kw", ")", "elif", "not", "config", ":", "res", "=", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "option", ",", "*", "*", "kw", ")", "res", "[", "'type'", "]", "=", "self", ".", "_column_types", "[", "column", "]", "return", "res" ]
Reads cache files to a safe place in each operating system
def delete_global_cache ( appname = 'default' ) : #close_global_shelf(appname) shelf_fpath = get_global_shelf_fpath ( appname ) util_path . remove_file ( shelf_fpath , verbose = True , dryrun = False )
9,007
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L989-L993
[ "def", "column", "(", "self", ",", "column", ",", "option", "=", "None", ",", "*", "*", "kw", ")", ":", "config", "=", "False", "if", "option", "==", "'type'", ":", "return", "self", ".", "_column_types", "[", "column", "]", "elif", "'type'", "in", "kw", ":", "config", "=", "True", "self", ".", "_column_types", "[", "column", "]", "=", "kw", ".", "pop", "(", "'type'", ")", "if", "kw", ":", "self", ".", "_visual_drag", ".", "column", "(", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "'id'", ")", ",", "option", ",", "*", "*", "kw", ")", "if", "kw", "or", "option", ":", "return", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "option", ",", "*", "*", "kw", ")", "elif", "not", "config", ":", "res", "=", "ttk", ".", "Treeview", ".", "column", "(", "self", ",", "column", ",", "option", ",", "*", "*", "kw", ")", "res", "[", "'type'", "]", "=", "self", ".", "_column_types", "[", "column", "]", "return", "res" ]
Returns data with different cfgstr values that were previously computed with this cacher .
def existing_versions ( self ) : import glob pattern = self . fname + '_*' + self . ext for fname in glob . glob1 ( self . dpath , pattern ) : fpath = join ( self . dpath , fname ) yield fpath
9,008
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L345-L354
[ "def", "deauthorize_application", "(", "request", ")", ":", "if", "request", ".", "facebook", ":", "user", "=", "User", ".", "objects", ".", "get", "(", "facebook_id", "=", "request", ".", "facebook", ".", "signed_request", ".", "user", ".", "id", ")", "user", ".", "authorized", "=", "False", "user", ".", "save", "(", ")", "return", "HttpResponse", "(", ")", "else", ":", "return", "HttpResponse", "(", "status", "=", "400", ")" ]
Like load but returns None if the load fails
def tryload ( self , cfgstr = None ) : if cfgstr is None : cfgstr = self . cfgstr if cfgstr is None : import warnings warnings . warn ( 'No cfgstr given in Cacher constructor or call' ) cfgstr = '' # assert cfgstr is not None, ( # 'must specify cfgstr in constructor or call') if not self . enabled : if self . verbose > 0 : print ( '[cache] ... %s Cacher disabled' % ( self . fname ) ) return None try : if self . verbose > 1 : print ( '[cache] tryload fname=%s' % ( self . fname , ) ) # if self.verbose > 2: # print('[cache] cfgstr=%r' % (cfgstr,)) return self . load ( cfgstr ) except IOError : if self . verbose > 0 : print ( '[cache] ... %s Cacher miss' % ( self . fname ) )
9,009
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L375-L399
[ "def", "move_vobject", "(", "self", ",", "uid", ",", "from_file", ",", "to_file", ")", ":", "if", "from_file", "not", "in", "self", ".", "_reminders", "or", "to_file", "not", "in", "self", ".", "_reminders", ":", "return", "uid", "=", "uid", ".", "split", "(", "'@'", ")", "[", "0", "]", "with", "self", ".", "_lock", ":", "rem", "=", "open", "(", "from_file", ")", ".", "readlines", "(", ")", "for", "(", "index", ",", "line", ")", "in", "enumerate", "(", "rem", ")", ":", "if", "uid", "==", "md5", "(", "line", "[", ":", "-", "1", "]", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", ":", "del", "rem", "[", "index", "]", "open", "(", "from_file", ",", "'w'", ")", ".", "writelines", "(", "rem", ")", "open", "(", "to_file", ",", "'a'", ")", ".", "write", "(", "line", ")", "break" ]
Try and load from a partially specified configuration string
def fuzzyload ( self , cachedir = None , partial_cfgstr = '' , * * kwargs ) : valid_targets = self . glob_valid_targets ( cachedir , partial_cfgstr ) if len ( valid_targets ) != 1 : import utool as ut msg = 'need to further specify target. valid_targets=%s' % ( ut . repr3 ( valid_targets , ) ) raise ValueError ( msg ) fpath = valid_targets [ 0 ] self . load ( fpath = fpath , * * kwargs )
9,010
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L1108-L1118
[ "def", "__getPeriodUsers", "(", "self", ",", "start_date", ",", "final_date", ")", ":", "self", ".", "__logger", ".", "info", "(", "\"Getting users from \"", "+", "start_date", "+", "\" to \"", "+", "final_date", ")", "url", "=", "self", ".", "__getURL", "(", "1", ",", "start_date", ",", "final_date", ")", "data", "=", "self", ".", "__readAPI", "(", "url", ")", "users", "=", "[", "]", "total_pages", "=", "10000", "page", "=", "1", "while", "total_pages", ">=", "page", ":", "url", "=", "self", ".", "__getURL", "(", "page", ",", "start_date", ",", "final_date", ")", "data", "=", "self", ".", "__readAPI", "(", "url", ")", "self", ".", "__logger", ".", "debug", "(", "str", "(", "len", "(", "data", "[", "'items'", "]", ")", ")", "+", "\" users found\"", ")", "for", "u", "in", "data", "[", "'items'", "]", ":", "users", ".", "append", "(", "u", "[", "\"login\"", "]", ")", "self", ".", "__usersToProccess", ".", "put", "(", "u", "[", "\"login\"", "]", ")", "total_count", "=", "data", "[", "\"total_count\"", "]", "total_pages", "=", "int", "(", "total_count", "/", "100", ")", "+", "1", "page", "+=", "1", "return", "users" ]
Loads the result from the given database
def load ( self , cachedir = None , cfgstr = None , fpath = None , verbose = None , quiet = QUIET , ignore_keys = None ) : if verbose is None : verbose = getattr ( self , 'verbose' , VERBOSE ) if fpath is None : fpath = self . get_fpath ( cachedir , cfgstr = cfgstr ) if verbose : print ( '[Cachable] cache tryload: %r' % ( basename ( fpath ) , ) ) try : self . _unsafe_load ( fpath , ignore_keys ) if verbose : print ( '... self cache hit: %r' % ( basename ( fpath ) , ) ) except ValueError as ex : import utool as ut msg = '[!Cachable] Cachable(%s) is likely corrupt' % ( self . get_cfgstr ( ) ) print ( 'CORRUPT fpath = %s' % ( fpath , ) ) ut . printex ( ex , msg , iswarning = True ) raise #except BadZipFile as ex: except zipfile . error as ex : import utool as ut msg = '[!Cachable] Cachable(%s) has bad zipfile' % ( self . get_cfgstr ( ) ) print ( 'CORRUPT fpath = %s' % ( fpath , ) ) ut . printex ( ex , msg , iswarning = True ) raise #if exists(fpath): # #print('[Cachable] Removing corrupted file: %r' % fpath) # #os.remove(fpath) # raise hsexcept.HotsNeedsRecomputeError(msg) #else: # raise Exception(msg) except IOError as ex : import utool as ut if not exists ( fpath ) : msg = '... self cache miss: %r' % ( basename ( fpath ) , ) if verbose : print ( msg ) raise print ( 'CORRUPT fpath = %s' % ( fpath , ) ) msg = '[!Cachable] Cachable(%s) is corrupt' % ( self . get_cfgstr ( ) ) ut . printex ( ex , msg , iswarning = True ) raise except Exception as ex : import utool as ut ut . printex ( ex , 'unknown exception while loading query result' ) raise
9,011
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cache.py#L1121-L1169
[ "def", "_file_watcher", "(", "state", ")", ":", "conf", "=", "state", ".", "app", ".", "config", "file_path", "=", "conf", ".", "get", "(", "'WAFFLE_WATCHER_FILE'", ",", "'/tmp/waffleconf.txt'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "# Create watch file", "open", "(", "file_path", ",", "'a'", ")", ".", "close", "(", ")", "while", "True", ":", "tstamp", "=", "os", ".", "path", ".", "getmtime", "(", "file_path", ")", "# Compare timestamps and update config if needed", "if", "tstamp", ">", "state", ".", "_tstamp", ":", "state", ".", "update_conf", "(", ")", "state", ".", "_tstamp", "=", "tstamp", "# Not too critical", "time", ".", "sleep", "(", "10", ")" ]
Normalizes and returns absolute path with so specs
def truepath_relative ( path , otherpath = None ) : if otherpath is None : otherpath = os . getcwd ( ) otherpath = truepath ( otherpath ) path_ = normpath ( relpath ( path , otherpath ) ) return path_
9,012
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L109-L137
[ "def", "UpdateUser", "(", "username", ",", "password", "=", "None", ",", "is_admin", "=", "False", ")", ":", "user_type", ",", "password", "=", "_GetUserTypeAndPassword", "(", "username", ",", "password", "=", "password", ",", "is_admin", "=", "is_admin", ")", "grr_api", "=", "maintenance_utils", ".", "InitGRRRootAPI", "(", ")", "grr_user", "=", "grr_api", ".", "GrrUser", "(", "username", ")", ".", "Get", "(", ")", "grr_user", ".", "Modify", "(", "user_type", "=", "user_type", ",", "password", "=", "password", ")" ]
Alias for path_ndir_split
def tail ( fpath , n = 2 , trailing = True ) : return path_ndir_split ( fpath , n = n , trailing = trailing )
9,013
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L140-L142
[ "def", "get_model_fields", "(", "model", ",", "add_reserver_flag", "=", "True", ")", ":", "import", "uliweb", ".", "orm", "as", "orm", "fields", "=", "[", "]", "m", "=", "{", "'type'", ":", "'type_name'", ",", "'hint'", ":", "'hint'", ",", "'default'", ":", "'default'", ",", "'required'", ":", "'required'", "}", "m1", "=", "{", "'index'", ":", "'index'", ",", "'unique'", ":", "'unique'", "}", "for", "name", ",", "prop", "in", "model", ".", "properties", ".", "items", "(", ")", ":", "if", "name", "==", "'id'", ":", "continue", "d", "=", "{", "}", "for", "k", ",", "v", "in", "m", ".", "items", "(", ")", ":", "d", "[", "k", "]", "=", "getattr", "(", "prop", ",", "v", ")", "for", "k", ",", "v", "in", "m1", ".", "items", "(", ")", ":", "d", "[", "k", "]", "=", "bool", "(", "prop", ".", "kwargs", ".", "get", "(", "v", ")", ")", "d", "[", "'name'", "]", "=", "prop", ".", "fieldname", "or", "name", "d", "[", "'verbose_name'", "]", "=", "unicode", "(", "prop", ".", "verbose_name", ")", "d", "[", "'nullable'", "]", "=", "bool", "(", "prop", ".", "kwargs", ".", "get", "(", "'nullable'", ",", "orm", ".", "__nullable__", ")", ")", "if", "d", "[", "'type'", "]", "in", "(", "'VARCHAR'", ",", "'CHAR'", ",", "'BINARY'", ",", "'VARBINARY'", ")", ":", "d", "[", "'max_length'", "]", "=", "prop", ".", "max_length", "if", "d", "[", "'type'", "]", "in", "(", "'Reference'", ",", "'OneToOne'", ",", "'ManyToMany'", ")", ":", "d", "[", "'reference_class'", "]", "=", "prop", ".", "reference_class", "#collection_name will be _collection_name, it the original value", "d", "[", "'collection_name'", "]", "=", "prop", ".", "_collection_name", "d", "[", "'server_default'", "]", "=", "prop", ".", "kwargs", ".", "get", "(", "'server_default'", ")", "d", "[", "'_reserved'", "]", "=", "True", "fields", ".", "append", "(", "d", ")", "return", "fields" ]
r Replaces home directory with ~
def unexpanduser ( path ) : homedir = expanduser ( '~' ) if path . startswith ( homedir ) : path = '~' + path [ len ( homedir ) : ] return path
9,014
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L145-L152
[ "def", "load", "(", "dbname", ")", ":", "db", "=", "Database", "(", "dbname", ")", "# Get the name of the objects", "tables", "=", "get_table_list", "(", "db", ".", "cur", ")", "# Create a Trace instance for each object", "chains", "=", "0", "for", "name", "in", "tables", ":", "db", ".", "_traces", "[", "name", "]", "=", "Trace", "(", "name", "=", "name", ",", "db", "=", "db", ")", "db", ".", "_traces", "[", "name", "]", ".", "_shape", "=", "get_shape", "(", "db", ".", "cur", ",", "name", ")", "setattr", "(", "db", ",", "name", ",", "db", ".", "_traces", "[", "name", "]", ")", "db", ".", "cur", ".", "execute", "(", "'SELECT MAX(trace) FROM [%s]'", "%", "name", ")", "chains", "=", "max", "(", "chains", ",", "db", ".", "cur", ".", "fetchall", "(", ")", "[", "0", "]", "[", "0", "]", "+", "1", ")", "db", ".", "chains", "=", "chains", "db", ".", "trace_names", "=", "chains", "*", "[", "tables", ",", "]", "db", ".", "_state_", "=", "{", "}", "return", "db" ]
r Shows only a little bit of the path . Up to the n bottom - level directories
def path_ndir_split ( path_ , n , force_unix = True , winroot = 'C:' , trailing = True ) : if not isinstance ( path_ , six . string_types ) : # Probably given a file pointer return path_ if n is None : cplat_path = ensure_crossplat_path ( path_ ) elif n == 0 : cplat_path = '' else : sep = '/' if force_unix else os . sep ndirs_list = [ ] head = path_ reached_end = False for nx in range ( n ) : head , tail = split ( head ) if tail == '' : if head == '' : reached_end = True else : root = head if len ( ndirs_list ) == 0 else head . strip ( '\\/' ) ndirs_list . append ( root ) reached_end = True break else : ndirs_list . append ( tail ) if trailing and not reached_end : head , tail = split ( head ) if len ( tail ) == 0 : if len ( head ) == 0 : # or head == '/': reached_end = True ndirs = sep . join ( ndirs_list [ : : - 1 ] ) cplat_path = ensure_crossplat_path ( ndirs ) #if trailing and not reached_end: if trailing and not reached_end : cplat_path = '.../' + cplat_path return cplat_path
9,015
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L155-L234
[ "def", "ttl", "(", "self", ")", ":", "now", "=", "time", ".", "time", "(", ")", "*", "1000", "if", "self", ".", "_need_update", ":", "ttl", "=", "0", "else", ":", "metadata_age", "=", "now", "-", "self", ".", "_last_successful_refresh_ms", "ttl", "=", "self", ".", "config", "[", "'metadata_max_age_ms'", "]", "-", "metadata_age", "retry_age", "=", "now", "-", "self", ".", "_last_refresh_ms", "next_retry", "=", "self", ".", "config", "[", "'retry_backoff_ms'", "]", "-", "retry_age", "return", "max", "(", "ttl", ",", "next_retry", ",", "0", ")" ]
augments end of path before the extension .
def augpath ( path , augsuf = '' , augext = '' , augpref = '' , augdir = None , newext = None , newfname = None , ensure = False , prefix = None , suffix = None ) : if prefix is not None : augpref = prefix if suffix is not None : augsuf = suffix # Breakup path dpath , fname = split ( path ) fname_noext , ext = splitext ( fname ) if newfname is not None : fname_noext = newfname # Augment ext if newext is None : newext = ext # Augment fname new_fname = '' . join ( ( augpref , fname_noext , augsuf , newext , augext ) ) # Augment dpath if augdir is not None : new_dpath = join ( dpath , augdir ) if ensure : # create new dir if needebe ensuredir ( new_dpath ) else : new_dpath = dpath # Recombine into new path newpath = join ( new_dpath , new_fname ) return newpath
9,016
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L308-L368
[ "def", "collapse_nodes", "(", "graph", ",", "survivor_mapping", ":", "Mapping", "[", "BaseEntity", ",", "Set", "[", "BaseEntity", "]", "]", ")", "->", "None", ":", "inconsistencies", "=", "surviors_are_inconsistent", "(", "survivor_mapping", ")", "if", "inconsistencies", ":", "raise", "ValueError", "(", "'survivor mapping is inconsistent: {}'", ".", "format", "(", "inconsistencies", ")", ")", "for", "survivor", ",", "victims", "in", "survivor_mapping", ".", "items", "(", ")", ":", "for", "victim", "in", "victims", ":", "collapse_pair", "(", "graph", ",", "survivor", "=", "survivor", ",", "victim", "=", "victim", ")", "_remove_self_edges", "(", "graph", ")" ]
Removes files matching a pattern from a directory
def remove_files_in_dir ( dpath , fname_pattern_list = '*' , recursive = False , verbose = VERBOSE , dryrun = False , ignore_errors = False ) : if isinstance ( fname_pattern_list , six . string_types ) : fname_pattern_list = [ fname_pattern_list ] if verbose > 2 : print ( '[util_path] Removing files:' ) print ( ' * from dpath = %r ' % dpath ) print ( ' * with patterns = %r' % fname_pattern_list ) print ( ' * recursive = %r' % recursive ) num_removed , num_matched = ( 0 , 0 ) if not exists ( dpath ) : msg = ( '!!! dir = %r does not exist!' % dpath ) if verbose : print ( msg ) warnings . warn ( msg , category = UserWarning ) for root , dname_list , fname_list in os . walk ( dpath ) : for fname_pattern in fname_pattern_list : for fname in fnmatch . filter ( fname_list , fname_pattern ) : num_matched += 1 num_removed += remove_file ( join ( root , fname ) , ignore_errors = ignore_errors , dryrun = dryrun , verbose = verbose > 5 ) if not recursive : break if verbose > 0 : print ( '[util_path] ... Removed %d/%d files' % ( num_removed , num_matched ) ) return True
9,017
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L371-L399
[ "def", "create_server", "(", "self", ")", ":", "self", ".", "check_port", "(", "self", ".", "port", ")", "try", ":", "# Establish communication grpc", "self", ".", "server", "=", "grpc", ".", "server", "(", "ThreadPoolExecutor", "(", "max_workers", "=", "10", ")", ")", "self", ".", "unity_to_external", "=", "UnityToExternalServicerImplementation", "(", ")", "add_UnityToExternalServicer_to_server", "(", "self", ".", "unity_to_external", ",", "self", ".", "server", ")", "# Using unspecified address, which means that grpc is communicating on all IPs", "# This is so that the docker container can connect.", "self", ".", "server", ".", "add_insecure_port", "(", "'[::]:'", "+", "str", "(", "self", ".", "port", ")", ")", "self", ".", "server", ".", "start", "(", ")", "self", ".", "is_open", "=", "True", "except", ":", "raise", "UnityWorkerInUseException", "(", "self", ".", "worker_id", ")" ]
Removes a file directory or symlink
def delete ( path , dryrun = False , recursive = True , verbose = None , print_exists = True , ignore_errors = True ) : if verbose is None : verbose = VERBOSE if not QUIET : verbose = 1 if verbose > 0 : print ( '[util_path] Deleting path=%r' % path ) exists_flag = exists ( path ) link_flag = islink ( path ) if not exists_flag and not link_flag : if print_exists and verbose : print ( '..does not exist!' ) flag = False else : rmargs = dict ( verbose = verbose > 1 , ignore_errors = ignore_errors , dryrun = dryrun ) if islink ( path ) : os . unlink ( path ) flag = True elif isdir ( path ) : # First remove everything in the directory flag = remove_files_in_dir ( path , recursive = recursive , * * rmargs ) # Then remove the directory itself flag = flag and remove_dirs ( path , * * rmargs ) elif isfile ( path ) : flag = remove_file ( path , * * rmargs ) else : raise ValueError ( 'Unknown type of path=%r' % ( path , ) ) if verbose > 0 : print ( '[util_path] Finished deleting path=%r' % path ) return flag
9,018
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L402-L434
[ "def", "group_experiments_greedy", "(", "tomo_expt", ":", "TomographyExperiment", ")", ":", "diag_sets", "=", "_max_tpb_overlap", "(", "tomo_expt", ")", "grouped_expt_settings_list", "=", "list", "(", "diag_sets", ".", "values", "(", ")", ")", "grouped_tomo_expt", "=", "TomographyExperiment", "(", "grouped_expt_settings_list", ",", "program", "=", "tomo_expt", ".", "program", ")", "return", "grouped_tomo_expt" ]
checks existance before removing . then tries to remove exisint paths
def remove_existing_fpaths ( fpath_list , verbose = VERBOSE , quiet = QUIET , strict = False , print_caller = PRINT_CALLER , lbl = 'files' ) : import utool as ut if print_caller : print ( util_dbg . get_caller_name ( range ( 1 , 4 ) ) + ' called remove_existing_fpaths' ) fpath_list_ = ut . filter_Nones ( fpath_list ) exists_list = list ( map ( exists , fpath_list_ ) ) if verbose : n_total = len ( fpath_list ) n_valid = len ( fpath_list_ ) n_exist = sum ( exists_list ) print ( '[util_path.remove_existing_fpaths] request delete of %d %s' % ( n_total , lbl ) ) if n_valid != n_total : print ( ( '[util_path.remove_existing_fpaths] ' 'trying to delete %d/%d non None %s ' ) % ( n_valid , n_total , lbl ) ) print ( ( '[util_path.remove_existing_fpaths] ' ' %d/%d exist and need to be deleted' ) % ( n_exist , n_valid ) ) existing_fpath_list = ut . compress ( fpath_list_ , exists_list ) return remove_fpaths ( existing_fpath_list , verbose = verbose , quiet = quiet , strict = strict , print_caller = False , lbl = lbl )
9,019
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L437-L461
[ "def", "saveAsTFRecords", "(", "df", ",", "output_dir", ")", ":", "tf_rdd", "=", "df", ".", "rdd", ".", "mapPartitions", "(", "toTFExample", "(", "df", ".", "dtypes", ")", ")", "tf_rdd", ".", "saveAsNewAPIHadoopFile", "(", "output_dir", ",", "\"org.tensorflow.hadoop.io.TFRecordFileOutputFormat\"", ",", "keyClass", "=", "\"org.apache.hadoop.io.BytesWritable\"", ",", "valueClass", "=", "\"org.apache.hadoop.io.NullWritable\"", ")" ]
Removes multiple file paths
def remove_fpaths ( fpaths , verbose = VERBOSE , quiet = QUIET , strict = False , print_caller = PRINT_CALLER , lbl = 'files' ) : import utool as ut if print_caller : print ( util_dbg . get_caller_name ( range ( 1 , 4 ) ) + ' called remove_fpaths' ) n_total = len ( fpaths ) _verbose = ( not quiet and n_total > 0 ) or VERYVERBOSE if _verbose : print ( '[util_path.remove_fpaths] try removing %d %s' % ( n_total , lbl ) ) n_removed = 0 prog = ut . ProgIter ( fpaths , label = 'removing files' , enabled = verbose ) _iter = iter ( prog ) # Try to be fast at first try : for fpath in _iter : os . remove ( fpath ) n_removed += 1 except OSError as ex : # Buf if we fail put a try in the inner loop if VERYVERBOSE : print ( 'WARNING: Could not remove fpath = %r' % ( fpath , ) ) if strict : util_dbg . printex ( ex , 'Could not remove fpath = %r' % ( fpath , ) , iswarning = False ) raise for fpath in _iter : try : os . remove ( fpath ) n_removed += 1 except OSError as ex : if VERYVERBOSE : print ( 'WARNING: Could not remove fpath = %r' % ( fpath , ) ) if _verbose : print ( '[util_path.remove_fpaths] ... removed %d / %d %s' % ( n_removed , n_total , lbl ) ) return n_removed
9,020
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L464-L502
[ "def", "calibrate_data", "(", "params", ",", "raw_data", ",", "calib_data", ")", ":", "start", "=", "calib_data", ".", "before", "(", "datetime", ".", "max", ")", "if", "start", "is", "None", ":", "start", "=", "datetime", ".", "min", "start", "=", "raw_data", ".", "after", "(", "start", "+", "SECOND", ")", "if", "start", "is", "None", ":", "return", "start", "del", "calib_data", "[", "start", ":", "]", "calibrator", "=", "Calib", "(", "params", ",", "raw_data", ")", "def", "calibgen", "(", "inputdata", ")", ":", "\"\"\"Internal generator function\"\"\"", "count", "=", "0", "for", "data", "in", "inputdata", ":", "idx", "=", "data", "[", "'idx'", "]", "count", "+=", "1", "if", "count", "%", "10000", "==", "0", ":", "logger", ".", "info", "(", "\"calib: %s\"", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "elif", "count", "%", "500", "==", "0", ":", "logger", ".", "debug", "(", "\"calib: %s\"", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "for", "key", "in", "(", "'rain'", ",", "'abs_pressure'", ",", "'temp_in'", ")", ":", "if", "data", "[", "key", "]", "is", "None", ":", "logger", ".", "error", "(", "'Ignoring invalid data at %s'", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "break", "else", ":", "yield", "calibrator", ".", "calib", "(", "data", ")", "calib_data", ".", "update", "(", "calibgen", "(", "raw_data", "[", "start", ":", "]", ")", ")", "return", "start" ]
r Returns the longest root of _path that exists
def longest_existing_path ( _path ) : existing_path = _path while True : _path_new = os . path . dirname ( existing_path ) if exists ( _path_new ) : existing_path = _path_new break if _path_new == existing_path : print ( '!!! [utool] This is a very illformated path indeed.' ) existing_path = '' break existing_path = _path_new return existing_path
9,021
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L508-L543
[ "def", "setGroups", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "requests", "=", "0", "groups", "=", "[", "]", "try", ":", "for", "gk", "in", "self", "[", "'groupKeys'", "]", ":", "try", ":", "g", "=", "self", ".", "mambugroupclass", "(", "entid", "=", "gk", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "AttributeError", "as", "ae", ":", "from", ".", "mambugroup", "import", "MambuGroup", "self", ".", "mambugroupclass", "=", "MambuGroup", "g", "=", "self", ".", "mambugroupclass", "(", "entid", "=", "gk", ",", "*", "args", ",", "*", "*", "kwargs", ")", "requests", "+=", "1", "groups", ".", "append", "(", "g", ")", "except", "KeyError", ":", "pass", "self", "[", "'groups'", "]", "=", "groups", "return", "requests" ]
r returns if a path is a file directory link or mount
def get_path_type ( path_ ) : path_type = '' if isfile ( path_ ) : path_type += 'file' if isdir ( path_ ) : path_type += 'directory' if islink ( path_ ) : path_type += 'link' if ismount ( path_ ) : path_type += 'mount' return path_type
9,022
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L546-L559
[ "def", "GetClientConfig", "(", "filename", ")", ":", "config_lib", ".", "SetPlatformArchContext", "(", ")", "config_lib", ".", "ParseConfigCommandLine", "(", ")", "context", "=", "list", "(", "grr_config", ".", "CONFIG", ".", "context", ")", "context", ".", "append", "(", "\"Client Context\"", ")", "deployer", "=", "build", ".", "ClientRepacker", "(", ")", "# Disable timestamping so we can get a reproducible and cachable config file.", "config_data", "=", "deployer", ".", "GetClientConfig", "(", "context", ",", "validate", "=", "True", ",", "deploy_timestamp", "=", "False", ")", "builder", "=", "build", ".", "ClientBuilder", "(", ")", "with", "open", "(", "filename", ",", "\"w\"", ")", "as", "fd", ":", "fd", ".", "write", "(", "config_data", ")", "builder", ".", "WriteBuildYaml", "(", "fd", ",", "build_timestamp", "=", "False", ")" ]
r verbose wrapper around os . path . exists
def checkpath ( path_ , verbose = VERYVERBOSE , n = None , info = VERYVERBOSE ) : assert isinstance ( path_ , six . string_types ) , ( 'path_=%r is not a string. type(path_) = %r' % ( path_ , type ( path_ ) ) ) path_ = normpath ( path_ ) if sys . platform . startswith ( 'win32' ) : # convert back to windows style path if using unix style if path_ . startswith ( '\\' ) : dirs = path_ . split ( '\\' ) if len ( dirs ) > 1 and len ( dirs [ 0 ] ) == 0 and len ( dirs [ 1 ] ) == 1 : dirs [ 1 ] = dirs [ 1 ] . upper ( ) + ':' path_ = '\\' . join ( dirs [ 1 : ] ) does_exist = exists ( path_ ) if verbose : #print_('[utool] checkpath(%r)' % (path_)) pretty_path = path_ndir_split ( path_ , n ) caller_name = util_dbg . get_caller_name ( allow_genexpr = False ) print ( '[%s] checkpath(%r)' % ( caller_name , pretty_path ) ) if does_exist : path_type = get_path_type ( path_ ) #path_type = 'file' if isfile(path_) else 'directory' print ( '[%s] ...(%s) exists' % ( caller_name , path_type , ) ) else : print ( '[%s] ... does not exist' % ( caller_name ) ) if not does_exist and info : #print('[util_path] ! Does not exist') _longest_path = longest_existing_path ( path_ ) _longest_path_type = get_path_type ( _longest_path ) print ( '[util_path] ... The longest existing path is: %r' % _longest_path ) print ( '[util_path] ... and has type %r' % ( _longest_path_type , ) ) return does_exist
9,023
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L562-L628
[ "def", "new_request_session", "(", "config", ",", "cookies", ")", ":", "session", "=", "requests", ".", "Session", "(", ")", "if", "cookies", ":", "session", ".", "cookies", "=", "cookies", "session", ".", "max_redirects", "=", "config", "[", "\"maxhttpredirects\"", "]", "session", ".", "headers", ".", "update", "(", "{", "\"User-Agent\"", ":", "config", "[", "\"useragent\"", "]", ",", "}", ")", "if", "config", "[", "\"cookiefile\"", "]", ":", "for", "cookie", "in", "cookies", ".", "from_file", "(", "config", "[", "\"cookiefile\"", "]", ")", ":", "session", ".", "cookies", "=", "requests", ".", "cookies", ".", "merge_cookies", "(", "session", ".", "cookies", ",", "cookie", ")", "return", "session" ]
DEPRICATE - alias - use ensuredir instead
def ensurepath ( path_ , verbose = None ) : if verbose is None : verbose = VERYVERBOSE return ensuredir ( path_ , verbose = verbose )
9,024
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L631-L635
[ "def", "complete_experiment", "(", "self", ",", "status", ")", ":", "self", ".", "log", "(", "\"Bot player completing experiment. Status: {}\"", ".", "format", "(", "status", ")", ")", "while", "True", ":", "url", "=", "\"{host}/{status}?participant_id={participant_id}\"", ".", "format", "(", "host", "=", "self", ".", "host", ",", "participant_id", "=", "self", ".", "participant_id", ",", "status", "=", "status", ")", "try", ":", "result", "=", "requests", ".", "get", "(", "url", ")", "result", ".", "raise_for_status", "(", ")", "except", "RequestException", ":", "self", ".", "stochastic_sleep", "(", ")", "continue", "return", "result" ]
r Ensures that directory will exist . creates new dir with sticky bits by default
def ensuredir ( path_ , verbose = None , info = False , mode = 0o1777 ) : if verbose is None : verbose = VERYVERBOSE if isinstance ( path_ , ( list , tuple ) ) : path_ = join ( * path_ ) if HAVE_PATHLIB and isinstance ( path_ , pathlib . Path ) : path_ = str ( path_ ) if not checkpath ( path_ , verbose = verbose , info = info ) : if verbose : print ( '[util_path] mkdir(%r)' % path_ ) try : os . makedirs ( normpath ( path_ ) , mode = mode ) except OSError as ex : util_dbg . printex ( ex , 'check that the longest existing path ' 'is not a bad windows symlink.' , keys = [ 'path_' ] ) raise return path_
9,025
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L638-L669
[ "def", "parse_source", "(", "info", ")", ":", "if", "\"extractor_key\"", "in", "info", ":", "source", "=", "info", "[", "\"extractor_key\"", "]", "lower_source", "=", "source", ".", "lower", "(", ")", "for", "key", "in", "SOURCE_TO_NAME", ":", "lower_key", "=", "key", ".", "lower", "(", ")", "if", "lower_source", "==", "lower_key", ":", "source", "=", "SOURCE_TO_NAME", "[", "lower_key", "]", "if", "source", "!=", "\"Generic\"", ":", "return", "source", "if", "\"url\"", "in", "info", "and", "info", "[", "\"url\"", "]", "is", "not", "None", ":", "p", "=", "urlparse", "(", "info", "[", "\"url\"", "]", ")", "if", "p", "and", "p", ".", "netloc", ":", "return", "p", ".", "netloc", "return", "\"Unknown\"" ]
r Creates file if it doesnt exist
def touch ( fpath , times = None , verbose = True ) : try : if verbose : print ( '[util_path] touching %r' % fpath ) with open ( fpath , 'a' ) : os . utime ( fpath , times ) except Exception as ex : import utool utool . printex ( ex , 'touch %s' % fpath ) raise return fpath
9,026
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L672-L702
[ "def", "parse_event_xml", "(", "self", ",", "event_data", ")", "->", "dict", ":", "event", "=", "{", "}", "event_xml", "=", "event_data", ".", "decode", "(", ")", "message", "=", "MESSAGE", ".", "search", "(", "event_xml", ")", "if", "not", "message", ":", "return", "{", "}", "event", "[", "EVENT_OPERATION", "]", "=", "message", ".", "group", "(", "EVENT_OPERATION", ")", "topic", "=", "TOPIC", ".", "search", "(", "event_xml", ")", "if", "topic", ":", "event", "[", "EVENT_TOPIC", "]", "=", "topic", ".", "group", "(", "EVENT_TOPIC", ")", "source", "=", "SOURCE", ".", "search", "(", "event_xml", ")", "if", "source", ":", "event", "[", "EVENT_SOURCE", "]", "=", "source", ".", "group", "(", "EVENT_SOURCE", ")", "event", "[", "EVENT_SOURCE_IDX", "]", "=", "source", ".", "group", "(", "EVENT_SOURCE_IDX", ")", "data", "=", "DATA", ".", "search", "(", "event_xml", ")", "if", "data", ":", "event", "[", "EVENT_TYPE", "]", "=", "data", ".", "group", "(", "EVENT_TYPE", ")", "event", "[", "EVENT_VALUE", "]", "=", "data", ".", "group", "(", "EVENT_VALUE", ")", "_LOGGER", ".", "debug", "(", "event", ")", "return", "event" ]
Copies all data and stat info
def copy_list ( src_list , dst_list , lbl = 'Copying' , ioerr_ok = False , sherro_ok = False , oserror_ok = False ) : # Feb - 6 - 2014 Copy function task_iter = zip ( src_list , dst_list ) def docopy ( src , dst ) : try : shutil . copy2 ( src , dst ) except OSError : if ioerr_ok : return False raise except shutil . Error : if sherro_ok : return False raise except IOError : if ioerr_ok : return False raise return True progiter = util_progress . ProgIter ( task_iter , adjust = True , lbl = lbl ) success_list = [ docopy ( src , dst ) for ( src , dst ) in progiter ] return success_list
9,027
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L871-L894
[ "def", "create", "(", "self", ",", "image", ",", "geometry", ",", "options", ")", ":", "image", "=", "self", ".", "cropbox", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "orientation", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "colorspace", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "remove_border", "(", "image", ",", "options", ")", "image", "=", "self", ".", "scale", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "crop", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "rounded", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "blur", "(", "image", ",", "geometry", ",", "options", ")", "image", "=", "self", ".", "padding", "(", "image", ",", "geometry", ",", "options", ")", "return", "image" ]
r Globs directory for pattern
def glob ( dpath , pattern = None , recursive = False , with_files = True , with_dirs = True , maxdepth = None , exclude_dirs = [ ] , fullpath = True , * * kwargs ) : gen = iglob ( dpath , pattern , recursive = recursive , with_files = with_files , with_dirs = with_dirs , maxdepth = maxdepth , fullpath = fullpath , exclude_dirs = exclude_dirs , * * kwargs ) path_list = list ( gen ) return path_list
9,028
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L957-L1024
[ "def", "from_input", "(", "cls", ",", "input_file", "=", "sys", ".", "stdin", ",", "modify", "=", "False", ",", "udas", "=", "None", ")", ":", "original_task", "=", "input_file", ".", "readline", "(", ")", ".", "strip", "(", ")", "if", "modify", ":", "modified_task", "=", "input_file", ".", "readline", "(", ")", ".", "strip", "(", ")", "return", "cls", "(", "json", ".", "loads", "(", "modified_task", ")", ",", "udas", "=", "udas", ")", "return", "cls", "(", "json", ".", "loads", "(", "original_task", ")", ",", "udas", "=", "udas", ")" ]
returns the number of images in a directory
def num_images_in_dir ( path ) : num_imgs = 0 for root , dirs , files in os . walk ( path ) : for fname in files : if fpath_has_imgext ( fname ) : num_imgs += 1 return num_imgs
9,029
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1150-L1159
[ "def", "console_type", "(", "self", ",", "console_type", ")", ":", "if", "console_type", "!=", "self", ".", "_console_type", ":", "# get a new port if the console type change", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "if", "console_type", "==", "\"vnc\"", ":", "# VNC is a special case and the range must be 5900-6000", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "get_free_tcp_port", "(", "self", ".", "_project", ",", "5900", ",", "6000", ")", "else", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "get_free_tcp_port", "(", "self", ".", "_project", ")", "self", ".", "_console_type", "=", "console_type", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: console type set to {console_type}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "console_type", "=", "console_type", ")", ")" ]
returns true if the filename has any of the given extensions
def fpath_has_ext ( fname , exts , case_sensitive = False ) : fname_ = fname . lower ( ) if not case_sensitive else fname if case_sensitive : ext_pats = [ '*' + ext for ext in exts ] else : ext_pats = [ '*' + ext . lower ( ) for ext in exts ] return any ( [ fnmatch . fnmatch ( fname_ , pat ) for pat in ext_pats ] )
9,030
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1167-L1174
[ "def", "get_stats", "(", "self", ")", ":", "canRequestBusStatistics", "(", "self", ".", "_write_handle", ")", "stats", "=", "structures", ".", "BusStatistics", "(", ")", "canGetBusStatistics", "(", "self", ".", "_write_handle", ",", "ctypes", ".", "pointer", "(", "stats", ")", ",", "ctypes", ".", "sizeof", "(", "stats", ")", ")", "return", "stats" ]
r Returns path to module
def get_modpath ( modname , prefer_pkg = False , prefer_main = False ) : import importlib if isinstance ( modname , six . string_types ) : module = importlib . import_module ( modname ) else : module = modname # Hack modpath = module . __file__ . replace ( '.pyc' , '.py' ) initname = '__init__.py' mainname = '__main__.py' if prefer_pkg : if modpath . endswith ( initname ) or modpath . endswith ( mainname ) : modpath = dirname ( modpath ) # modpath = modpath[:-len(initname)] if prefer_main : if modpath . endswith ( initname ) : main_modpath = modpath [ : - len ( initname ) ] + mainname if exists ( main_modpath ) : modpath = main_modpath #modname = modname.replace('.__init__', '').strip() #module_dir = get_module_dir(module) return modpath
9,031
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1233-L1305
[ "def", "max_range", "(", "ranges", ",", "combined", "=", "True", ")", ":", "try", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'ignore'", ",", "r'All-NaN (slice|axis) encountered'", ")", "values", "=", "[", "tuple", "(", "np", ".", "NaN", "if", "v", "is", "None", "else", "v", "for", "v", "in", "r", ")", "for", "r", "in", "ranges", "]", "if", "pd", "and", "any", "(", "isinstance", "(", "v", ",", "datetime_types", ")", "and", "not", "isinstance", "(", "v", ",", "cftime_types", ")", "for", "r", "in", "values", "for", "v", "in", "r", ")", ":", "converted", "=", "[", "]", "for", "l", ",", "h", "in", "values", ":", "if", "isinstance", "(", "l", ",", "datetime_types", ")", "and", "isinstance", "(", "h", ",", "datetime_types", ")", ":", "l", ",", "h", "=", "(", "pd", ".", "Timestamp", "(", "l", ")", ".", "to_datetime64", "(", ")", ",", "pd", ".", "Timestamp", "(", "h", ")", ".", "to_datetime64", "(", ")", ")", "converted", ".", "append", "(", "(", "l", ",", "h", ")", ")", "values", "=", "converted", "arr", "=", "np", ".", "array", "(", "values", ")", "if", "not", "len", "(", "arr", ")", ":", "return", "np", ".", "NaN", ",", "np", ".", "NaN", "elif", "arr", ".", "dtype", ".", "kind", "in", "'OSU'", ":", "arr", "=", "list", "(", "python2sort", "(", "[", "v", "for", "r", "in", "values", "for", "v", "in", "r", "if", "not", "is_nan", "(", "v", ")", "and", "v", "is", "not", "None", "]", ")", ")", "return", "arr", "[", "0", "]", ",", "arr", "[", "-", "1", "]", "elif", "arr", ".", "dtype", ".", "kind", "in", "'M'", ":", "return", "(", "(", "arr", ".", "min", "(", ")", ",", "arr", ".", "max", "(", ")", ")", "if", "combined", "else", "(", "arr", "[", ":", ",", "0", "]", ".", "min", "(", ")", ",", "arr", "[", ":", ",", "1", "]", ".", "min", "(", ")", ")", ")", "if", "combined", ":", "return", "(", "np", ".", "nanmin", "(", "arr", ")", ",", "np", ".", "nanmax", "(", "arr", ")", ")", "else", ":", "return", "(", "np", ".", "nanmin", "(", "arr", "[", ":", ",", "0", "]", ")", ",", "np", ".", "nanmax", "(", "arr", "[", ":", ",", "1", "]", ")", ")", "except", ":", "return", "(", "np", ".", "NaN", ",", "np", ".", "NaN", ")" ]
Returns path to module relative to the package root
def get_relative_modpath ( module_fpath ) : modsubdir_list = get_module_subdir_list ( module_fpath ) _ , ext = splitext ( module_fpath ) rel_modpath = join ( * modsubdir_list ) + ext rel_modpath = ensure_crossplat_path ( rel_modpath ) return rel_modpath
9,032
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1350-L1375
[ "def", "_read_columns_file", "(", "f", ")", ":", "try", ":", "columns", "=", "json", ".", "loads", "(", "open", "(", "f", ",", "'r'", ")", ".", "read", "(", ")", ",", "object_pairs_hook", "=", "collections", ".", "OrderedDict", ")", "except", "Exception", "as", "err", ":", "raise", "InvalidColumnsFileError", "(", "\"There was an error while reading {0}: {1}\"", ".", "format", "(", "f", ",", "err", ")", ")", "# Options are not supported yet:", "if", "'__options'", "in", "columns", ":", "del", "columns", "[", "'__options'", "]", "return", "columns" ]
returns importable name from file path
def get_modname_from_modpath ( module_fpath ) : modsubdir_list = get_module_subdir_list ( module_fpath ) modname = '.' . join ( modsubdir_list ) modname = modname . replace ( '.__init__' , '' ) . strip ( ) modname = modname . replace ( '.__main__' , '' ) . strip ( ) return modname
9,033
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1378-L1404
[ "def", "reinit_nested_vars", "(", "variables", ",", "indices", "=", "None", ")", ":", "if", "isinstance", "(", "variables", ",", "(", "tuple", ",", "list", ")", ")", ":", "return", "tf", ".", "group", "(", "*", "[", "reinit_nested_vars", "(", "variable", ",", "indices", ")", "for", "variable", "in", "variables", "]", ")", "if", "indices", "is", "None", ":", "return", "variables", ".", "assign", "(", "tf", ".", "zeros_like", "(", "variables", ")", ")", "else", ":", "zeros", "=", "tf", ".", "zeros", "(", "[", "tf", ".", "shape", "(", "indices", ")", "[", "0", "]", "]", "+", "variables", ".", "shape", "[", "1", ":", "]", ".", "as_list", "(", ")", ")", "return", "tf", ".", "scatter_update", "(", "variables", ",", "indices", ",", "zeros", ")" ]
like unix ls - lists all files and dirs in path
def ls ( path , pattern = '*' ) : path_iter = glob ( path , pattern , recursive = False ) return sorted ( list ( path_iter ) )
9,034
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1437-L1440
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
lists all dirs which are python modules in path
def ls_moduledirs ( path , private = True , full = True ) : dir_list = ls_dirs ( path ) module_dir_iter = filter ( is_module_dir , dir_list ) if not private : module_dir_iter = filterfalse ( is_private_module , module_dir_iter ) if not full : module_dir_iter = map ( basename , module_dir_iter ) return list ( module_dir_iter )
9,035
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1460-L1468
[ "def", "_read_columns_file", "(", "f", ")", ":", "try", ":", "columns", "=", "json", ".", "loads", "(", "open", "(", "f", ",", "'r'", ")", ".", "read", "(", ")", ",", "object_pairs_hook", "=", "collections", ".", "OrderedDict", ")", "except", "Exception", "as", "err", ":", "raise", "InvalidColumnsFileError", "(", "\"There was an error while reading {0}: {1}\"", ".", "format", "(", "f", ",", "err", ")", ")", "# Options are not supported yet:", "if", "'__options'", "in", "columns", ":", "del", "columns", "[", "'__options'", "]", "return", "columns" ]
r Returns a list of images in a directory . By default returns relative paths .
def list_images ( img_dpath_ , ignore_list = [ ] , recursive = False , fullpath = False , full = None , sort = True ) : #if not QUIET: # print(ignore_list) if full is not None : fullpath = fullpath or full img_dpath_ = util_str . ensure_unicode ( img_dpath_ ) img_dpath = realpath ( img_dpath_ ) ignore_set = set ( ignore_list ) gname_list_ = [ ] assertpath ( img_dpath ) # Get all the files in a directory recursively true_imgpath = truepath ( img_dpath ) for root , dlist , flist in os . walk ( true_imgpath ) : root = util_str . ensure_unicode ( root ) rel_dpath = relpath ( root , img_dpath ) # Ignore directories if any ( [ dname in ignore_set for dname in dirsplit ( rel_dpath ) ] ) : continue for fname in iter ( flist ) : fname = util_str . ensure_unicode ( fname ) gname = join ( rel_dpath , fname ) . replace ( '\\' , '/' ) if gname . startswith ( './' ) : gname = gname [ 2 : ] if fpath_has_imgext ( gname ) : # Ignore Files if gname in ignore_set : continue if fullpath : gpath = join ( img_dpath , gname ) gname_list_ . append ( gpath ) else : gname_list_ . append ( gname ) if not recursive : break if sort : gname_list = sorted ( gname_list_ ) return gname_list
9,036
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1502-L1573
[ "def", "aggregate", "(", "self", ",", "val1", ",", "val2", ")", ":", "assert", "val1", "is", "not", "None", "assert", "val2", "is", "not", "None", "return", "self", ".", "_aggregator", "(", "val1", ",", "val2", ")" ]
Asserts that a patha exists
def assertpath ( path_ , msg = '' , * * kwargs ) : if NO_ASSERTS : return if path_ is None : raise AssertionError ( 'path is None! %s' % ( path_ , msg ) ) if path_ == '' : raise AssertionError ( 'path=%r is the empty string! %s' % ( path_ , msg ) ) if not checkpath ( path_ , * * kwargs ) : raise AssertionError ( 'path=%r does not exist! %s' % ( path_ , msg ) )
9,037
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1579-L1588
[ "def", "unindex_layers_with_issues", "(", "self", ",", "use_cache", "=", "False", ")", ":", "from", "hypermap", ".", "aggregator", ".", "models", "import", "Issue", ",", "Layer", ",", "Service", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "layer_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Layer", ")", "service_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Service", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "layer_type", ".", "id", ")", ":", "unindex_layer", "(", "issue", ".", "content_object", ".", "id", ",", "use_cache", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "service_type", ".", "id", ")", ":", "for", "layer", "in", "issue", ".", "content_object", ".", "layer_set", ".", "all", "(", ")", ":", "unindex_layer", "(", "layer", ".", "id", ",", "use_cache", ")" ]
r walks dpath lists returning all directories that match the requested pattern .
def matching_fpaths ( dpath_list , include_patterns , exclude_dirs = [ ] , greater_exclude_dirs = [ ] , exclude_patterns = [ ] , recursive = True ) : if isinstance ( dpath_list , six . string_types ) : dpath_list = [ dpath_list ] for dpath in dpath_list : for root , dname_list , fname_list in os . walk ( dpath ) : # Look at all subdirs subdirs = pathsplit_full ( relpath ( root , dpath ) ) # HACK: if any ( [ dir_ in greater_exclude_dirs for dir_ in subdirs ] ) : continue # Look at one subdir if basename ( root ) in exclude_dirs : continue _match = fnmatch . fnmatch for name in fname_list : # yeild filepaths that are included if any ( _match ( name , pat ) for pat in include_patterns ) : # ... and not excluded if not any ( _match ( name , pat ) for pat in exclude_patterns ) : fpath = join ( root , name ) yield fpath if not recursive : break
9,038
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1608-L1660
[ "def", "update", "(", "self", ",", "friendly_name", "=", "values", ".", "unset", ",", "status", "=", "values", ".", "unset", ")", ":", "return", "self", ".", "_proxy", ".", "update", "(", "friendly_name", "=", "friendly_name", ",", "status", "=", "status", ",", ")" ]
Python implementation of sed . NOT FINISHED
def sed ( regexpr , repl , force = False , recursive = False , dpath_list = None , fpath_list = None , verbose = None , include_patterns = None , exclude_patterns = [ ] ) : #_grep(r, [repl], dpath_list=dpath_list, recursive=recursive) if include_patterns is None : include_patterns = [ '*.py' , '*.pyx' , '*.pxi' , '*.cxx' , '*.cpp' , '*.hxx' , '*.hpp' , '*.c' , '*.h' , '*.html' , '*.tex' ] if dpath_list is None : dpath_list = [ os . getcwd ( ) ] if verbose is None : verbose = ut . NOT_QUIET if fpath_list is None : greater_exclude_dirs = get_standard_exclude_dnames ( ) exclude_dirs = [ ] fpath_generator = matching_fpaths ( dpath_list , include_patterns , exclude_dirs , greater_exclude_dirs = greater_exclude_dirs , recursive = recursive , exclude_patterns = exclude_patterns ) else : fpath_generator = fpath_list if verbose : print ( 'sed-ing %r' % ( dpath_list , ) ) print ( ' * regular expression : %r' % ( regexpr , ) ) print ( ' * replacement : %r' % ( repl , ) ) print ( ' * include_patterns : %r' % ( include_patterns , ) ) print ( ' * recursive: %r' % ( recursive , ) ) print ( ' * force: %r' % ( force , ) ) from utool import util_str print ( ' * fpath_list: %s' % ( util_str . repr3 ( fpath_list ) , ) ) regexpr = extend_regex ( regexpr ) #if '\x08' in regexpr: # print('Remember \\x08 != \\b') # print('subsituting for you for you') # regexpr = regexpr.replace('\x08', '\\b') # print(' * regular expression : %r' % (regexpr,)) # Walk through each directory recursively num_changed = 0 num_files_checked = 0 fpaths_changed = [ ] for fpath in fpath_generator : num_files_checked += 1 changed_lines = sedfile ( fpath , regexpr , repl , force , verbose = verbose ) if changed_lines is not None : fpaths_changed . append ( fpath ) num_changed += len ( changed_lines ) import utool as ut print ( 'num_files_checked = %r' % ( num_files_checked , ) ) print ( 'fpaths_changed = %s' % ( ut . repr3 ( sorted ( fpaths_changed ) ) , ) ) print ( 'total lines changed = %r' % ( num_changed , ) )
9,039
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1663-L1723
[ "def", "_get_nic_attachements", "(", "self", ",", "maximum_adapters", ")", ":", "nics", "=", "[", "]", "vm_info", "=", "yield", "from", "self", ".", "_get_vm_info", "(", ")", "for", "adapter_number", "in", "range", "(", "0", ",", "maximum_adapters", ")", ":", "entry", "=", "\"nic{}\"", ".", "format", "(", "adapter_number", "+", "1", ")", "if", "entry", "in", "vm_info", ":", "value", "=", "vm_info", "[", "entry", "]", "nics", ".", "append", "(", "value", ".", "lower", "(", ")", ")", "else", ":", "nics", ".", "append", "(", "None", ")", "return", "nics" ]
r greps for patterns Python implementation of grep . NOT FINISHED
def grep ( regex_list , recursive = True , dpath_list = None , include_patterns = None , exclude_dirs = [ ] , greater_exclude_dirs = None , inverse = False , exclude_patterns = [ ] , verbose = VERBOSE , fpath_list = None , reflags = 0 , cache = None ) : from utool import util_regex # from utool import util_str from utool import util_list if include_patterns is None : include_patterns = [ '*' ] # include_patterns = get_standard_include_patterns() if greater_exclude_dirs is None : greater_exclude_dirs = [ ] # greater_exclude_dirs = get_standard_exclude_dnames() # ensure list input if isinstance ( include_patterns , six . string_types ) : include_patterns = [ include_patterns ] if dpath_list is None : dpath_list = [ os . getcwd ( ) ] if verbose : recursive_stat_str = [ 'flat' , 'recursive' ] [ recursive ] print ( '[util_path] Greping (%s) %r for %r' % ( recursive_stat_str , dpath_list , regex_list ) ) print ( '[util_path] regex_list = %s' % ( regex_list ) ) if isinstance ( regex_list , six . string_types ) : regex_list = [ regex_list ] found_fpath_list = [ ] found_lines_list = [ ] found_lxs_list = [ ] # Walk through each directory recursively if fpath_list is None : fpath_generator = matching_fpaths ( dpath_list = dpath_list , include_patterns = include_patterns , exclude_dirs = exclude_dirs , greater_exclude_dirs = greater_exclude_dirs , exclude_patterns = exclude_patterns , recursive = recursive ) else : fpath_generator = fpath_list # from utool import util_regex # extended_regex_list, reflags = util_regex.extend_regex3(regex_list, reflags) # if verbose: # print('extended_regex_list = %r' % (extended_regex_list,)) # print('reflags = %r' % (reflags,)) _exprs_flags = [ util_regex . extend_regex2 ( expr , reflags ) for expr in regex_list ] extended_regex_list = util_list . take_column ( _exprs_flags , 0 ) reflags_list = util_list . take_column ( _exprs_flags , 1 ) # HACK reflags = reflags_list [ 0 ] # For each matching filepath for fpath in fpath_generator : # For each search pattern found_lines , found_lxs = grepfile ( fpath , extended_regex_list , reflags_list , cache = cache ) if inverse : if len ( found_lines ) == 0 : # Append files that the pattern was not found in found_fpath_list . append ( fpath ) found_lines_list . append ( [ ] ) found_lxs_list . append ( [ ] ) elif len ( found_lines ) > 0 : found_fpath_list . append ( fpath ) # regular matching found_lines_list . append ( found_lines ) found_lxs_list . append ( found_lxs ) grep_result = ( found_fpath_list , found_lines_list , found_lxs_list ) if verbose : print ( '==========' ) print ( '==========' ) print ( '[util_path] found matches in %d files' % len ( found_fpath_list ) ) print ( make_grep_resultstr ( grep_result , extended_regex_list , reflags ) ) # print('[util_path] found matches in %d files' % len(found_fpath_list)) # pat = util_regex.regex_or(extended_regex_list) # for fpath, found, lxs in zip(found_fpath_list, found_lines_list, # found_lxs_list): # if len(found) > 0: # print('----------------------') # print('Found %d line(s) in %r: ' % (len(found), fpath)) # name = split(fpath)[1] # max_line = len(lxs) # ndigits = str(len(str(max_line))) # fmt_str = '%s : %' + ndigits + 'd |%s' # for (lx, line) in zip(lxs, found): # # hack # colored_line = util_str.highlight_regex( # line.rstrip('\n'), pat, reflags=reflags) # print(fmt_str % (name, lx, colored_line)) #print('[util_path] found matches in %d files' % len(found_fpath_list)) return grep_result
9,040
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L1974-L2101
[ "def", "validate", "(", "self", ",", "data", ")", ":", "video_id", "=", "self", ".", "context", ".", "get", "(", "'video_id'", ")", "video", "=", "Video", ".", "get_or_none", "(", "edx_video_id", "=", "video_id", ")", "if", "not", "video", ":", "raise", "serializers", ".", "ValidationError", "(", "'Video \"{video_id}\" is not valid.'", ".", "format", "(", "video_id", "=", "video_id", ")", ")", "data", ".", "update", "(", "video", "=", "video", ")", "return", "data" ]
Gets the short path name of a given long path .
def get_win32_short_path_name ( long_name ) : import ctypes from ctypes import wintypes _GetShortPathNameW = ctypes . windll . kernel32 . GetShortPathNameW _GetShortPathNameW . argtypes = [ wintypes . LPCWSTR , wintypes . LPWSTR , wintypes . DWORD ] _GetShortPathNameW . restype = wintypes . DWORD output_buf_size = 0 while True : output_buf = ctypes . create_unicode_buffer ( output_buf_size ) needed = _GetShortPathNameW ( long_name , output_buf , output_buf_size ) if output_buf_size >= needed : short_name = output_buf . value break else : output_buf_size = needed return short_name
9,041
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2129-L2166
[ "def", "is_dtype_union_equal", "(", "source", ",", "target", ")", ":", "source", "=", "_get_dtype", "(", "source", ")", "target", "=", "_get_dtype", "(", "target", ")", "if", "is_categorical_dtype", "(", "source", ")", "and", "is_categorical_dtype", "(", "target", ")", ":", "# ordered False for both", "return", "source", ".", "ordered", "is", "target", ".", "ordered", "return", "is_dtype_equal", "(", "source", ",", "target", ")" ]
r Returns platform specific path for pyinstaller usage
def platform_path ( path ) : try : if path == '' : raise ValueError ( 'path cannot be the empty string' ) # get path relative to cwd path1 = truepath_relative ( path ) if sys . platform . startswith ( 'win32' ) : path2 = expand_win32_shortname ( path1 ) else : path2 = path1 except Exception as ex : util_dbg . printex ( ex , keys = [ 'path' , 'path1' , 'path2' ] ) raise return path2
9,042
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2191-L2238
[ "def", "cache_affected_objects_review_history", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Caching review_history ...\"", ")", "query", "=", "dict", "(", "portal_type", "=", "NEW_SENAITE_WORKFLOW_BINDINGS", ")", "brains", "=", "api", ".", "search", "(", "query", ",", "UID_CATALOG", ")", "total", "=", "len", "(", "brains", ")", "for", "num", ",", "brain", "in", "enumerate", "(", "brains", ")", ":", "if", "num", "%", "100", "==", "0", ":", "logger", ".", "info", "(", "\"Caching review_history: {}/{}\"", ".", "format", "(", "num", ",", "total", ")", ")", "review_history", "=", "get_review_history_for", "(", "brain", ")", "review_history_cache", "[", "api", ".", "get_uid", "(", "brain", ")", "]", "=", "review_history" ]
Search for the library
def find_lib_fpath ( libname , root_dir , recurse_down = True , verbose = False , debug = False ) : def get_lib_fname_list ( libname ) : """ input <libname>: library name (e.g. 'hesaff', not 'libhesaff') returns <libnames>: list of plausible library file names """ if sys . platform . startswith ( 'win32' ) : libnames = [ 'lib' + libname + '.dll' , libname + '.dll' ] elif sys . platform . startswith ( 'darwin' ) : libnames = [ 'lib' + libname + '.dylib' ] elif sys . platform . startswith ( 'linux' ) : libnames = [ 'lib' + libname + '.so' ] else : raise Exception ( 'Unknown operating system: %s' % sys . platform ) return libnames def get_lib_dpath_list ( root_dir ) : """ input <root_dir>: deepest directory to look for a library (dll, so, dylib) returns <libnames>: list of plausible directories to look. """ 'returns possible lib locations' get_lib_dpath_list = [ root_dir , join ( root_dir , 'lib' ) , join ( root_dir , 'build' ) , join ( root_dir , 'build' , 'lib' ) ] return get_lib_dpath_list lib_fname_list = get_lib_fname_list ( libname ) tried_fpaths = [ ] while root_dir is not None : for lib_fname in lib_fname_list : for lib_dpath in get_lib_dpath_list ( root_dir ) : lib_fpath = normpath ( join ( lib_dpath , lib_fname ) ) if exists ( lib_fpath ) : if verbose : print ( '\n[c] Checked: ' . join ( tried_fpaths ) ) if debug : print ( 'using: %r' % lib_fpath ) return lib_fpath else : # Remember which candiate library fpaths did not exist tried_fpaths . append ( lib_fpath ) _new_root = dirname ( root_dir ) if _new_root == root_dir : root_dir = None break else : root_dir = _new_root if not recurse_down : break msg = ( '\n[C!] load_clib(libname=%r root_dir=%r, recurse_down=%r, verbose=%r)' % ( libname , root_dir , recurse_down , verbose ) + '\n[c!] Cannot FIND dynamic library' ) print ( msg ) print ( '\n[c!] Checked: ' . join ( tried_fpaths ) ) raise ImportError ( msg )
9,043
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2320-L2379
[ "def", "query", "(", "self", ",", "domain", ")", ":", "result", "=", "{", "}", "try", ":", "result", "=", "self", ".", "pdns", ".", "query", "(", "domain", ")", "except", ":", "self", ".", "error", "(", "'Exception while querying passiveDNS. Check the domain format.'", ")", "# Clean the datetime problems in order to correct the json serializability", "clean_result", "=", "[", "]", "for", "ind", ",", "resultset", "in", "enumerate", "(", "result", ")", ":", "if", "resultset", ".", "get", "(", "'time_first'", ",", "None", ")", ":", "resultset", "[", "'time_first'", "]", "=", "resultset", ".", "get", "(", "'time_first'", ")", ".", "isoformat", "(", "' '", ")", "if", "resultset", ".", "get", "(", "'time_last'", ",", "None", ")", ":", "resultset", "[", "'time_last'", "]", "=", "resultset", ".", "get", "(", "'time_last'", ")", ".", "isoformat", "(", "' '", ")", "clean_result", ".", "append", "(", "resultset", ")", "return", "clean_result" ]
r replaces windows drives with mingw style drives
def ensure_mingw_drive ( win32_path ) : win32_drive , _path = splitdrive ( win32_path ) mingw_drive = '/' + win32_drive [ : - 1 ] . lower ( ) mingw_path = mingw_drive + _path return mingw_path
9,044
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2382-L2402
[ "def", "get_user_last_submissions", "(", "self", ",", "limit", "=", "5", ",", "request", "=", "None", ")", ":", "if", "request", "is", "None", ":", "request", "=", "{", "}", "request", ".", "update", "(", "{", "\"username\"", ":", "self", ".", "_user_manager", ".", "session_username", "(", ")", "}", ")", "# Before, submissions were first sorted by submission date, then grouped", "# and then resorted by submission date before limiting. Actually, grouping", "# and pushing, keeping the max date, followed by result filtering is much more", "# efficient", "data", "=", "self", ".", "_database", ".", "submissions", ".", "aggregate", "(", "[", "{", "\"$match\"", ":", "request", "}", ",", "{", "\"$group\"", ":", "{", "\"_id\"", ":", "{", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", "}", ",", "\"submitted_on\"", ":", "{", "\"$max\"", ":", "\"$submitted_on\"", "}", ",", "\"submissions\"", ":", "{", "\"$push\"", ":", "{", "\"_id\"", ":", "\"$_id\"", ",", "\"result\"", ":", "\"$result\"", ",", "\"status\"", ":", "\"$status\"", ",", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", ",", "\"submitted_on\"", ":", "\"$submitted_on\"", "}", "}", ",", "}", "}", ",", "{", "\"$project\"", ":", "{", "\"submitted_on\"", ":", "1", ",", "\"submissions\"", ":", "{", "# This could be replaced by $filter if mongo v3.2 is set as dependency", "\"$setDifference\"", ":", "[", "{", "\"$map\"", ":", "{", "\"input\"", ":", "\"$submissions\"", ",", "\"as\"", ":", "\"submission\"", ",", "\"in\"", ":", "{", "\"$cond\"", ":", "[", "{", "\"$eq\"", ":", "[", "\"$submitted_on\"", ",", "\"$$submission.submitted_on\"", "]", "}", ",", "\"$$submission\"", ",", "False", "]", "}", "}", "}", ",", "[", "False", "]", "]", "}", "}", "}", ",", "{", "\"$sort\"", ":", "{", "\"submitted_on\"", ":", "pymongo", ".", "DESCENDING", "}", "}", ",", "{", "\"$limit\"", ":", "limit", "}", "]", ")", "return", "[", "item", "[", "\"submissions\"", "]", "[", "0", "]", "for", "item", "in", "data", "]" ]
All paths above you
def ancestor_paths ( start = None , limit = { } ) : import utool as ut limit = ut . ensure_iterable ( limit ) limit = { expanduser ( p ) for p in limit } . union ( set ( limit ) ) if start is None : start = os . getcwd ( ) path = start prev = None while path != prev and prev not in limit : yield path prev = path path = dirname ( path )
9,045
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2436-L2450
[ "def", "HandleMessageBundles", "(", "self", ",", "request_comms", ",", "response_comms", ")", ":", "messages", ",", "source", ",", "timestamp", "=", "self", ".", "_communicator", ".", "DecodeMessages", "(", "request_comms", ")", "now", "=", "time", ".", "time", "(", ")", "if", "messages", ":", "# Receive messages in line.", "self", ".", "ReceiveMessages", "(", "source", ",", "messages", ")", "# We send the client a maximum of self.max_queue_size messages", "required_count", "=", "max", "(", "0", ",", "self", ".", "max_queue_size", "-", "request_comms", ".", "queue_size", ")", "tasks", "=", "[", "]", "message_list", "=", "rdf_flows", ".", "MessageList", "(", ")", "# Only give the client messages if we are able to receive them in a", "# reasonable time.", "if", "time", ".", "time", "(", ")", "-", "now", "<", "10", ":", "tasks", "=", "self", ".", "DrainTaskSchedulerQueueForClient", "(", "source", ",", "required_count", ")", "message_list", ".", "job", "=", "tasks", "# Encode the message_list in the response_comms using the same API version", "# the client used.", "self", ".", "_communicator", ".", "EncodeMessages", "(", "message_list", ",", "response_comms", ",", "destination", "=", "source", ",", "timestamp", "=", "timestamp", ",", "api_version", "=", "request_comms", ".", "api_version", ")", "return", "source", ",", "len", "(", "messages", ")" ]
searches for existing paths that meed a requirement
def search_candidate_paths ( candidate_path_list , candidate_name_list = None , priority_paths = None , required_subpaths = [ ] , verbose = None ) : import utool as ut if verbose is None : verbose = 0 if QUIET else 1 if verbose >= 1 : print ( '[search_candidate_paths] Searching for candidate paths' ) if candidate_name_list is not None : candidate_path_list_ = [ join ( dpath , fname ) for dpath , fname in itertools . product ( candidate_path_list , candidate_name_list ) ] else : candidate_path_list_ = candidate_path_list if priority_paths is not None : candidate_path_list_ = priority_paths + candidate_path_list_ return_path = None for path in candidate_path_list_ : if path is not None and exists ( path ) : if verbose >= 2 : print ( '[search_candidate_paths] Found candidate directory %r' % ( path , ) ) print ( '[search_candidate_paths] ... checking for approprate structure' ) # tomcat directory exists. Make sure it also contains a webapps dir subpath_list = [ join ( path , subpath ) for subpath in required_subpaths ] if all ( ut . checkpath ( path_ , verbose = verbose ) for path_ in subpath_list ) : return_path = path if verbose >= 2 : print ( '[search_candidate_paths] Found acceptable path' ) return return_path break if verbose >= 1 : print ( '[search_candidate_paths] Failed to find acceptable path' ) return return_path
9,046
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2453-L2525
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
Attempt to create a symbolic link .
def symlink ( real_path , link_path , overwrite = False , on_error = 'raise' , verbose = 2 ) : path = normpath ( real_path ) link = normpath ( link_path ) if verbose : print ( '[util_path] Creating symlink: path={} link={}' . format ( path , link ) ) if os . path . islink ( link ) : if verbose : print ( '[util_path] symlink already exists' ) os_readlink = getattr ( os , "readlink" , None ) if callable ( os_readlink ) : if os_readlink ( link ) == path : if verbose > 1 : print ( '[path] ... and points to the right place' ) return link else : print ( '[util_path] Warning, symlinks are not implemented on windows' ) if verbose > 1 : print ( '[util_path] ... but it points somewhere else' ) if overwrite : delete ( link , verbose > 1 ) elif on_error == 'ignore' : return False try : os_symlink = getattr ( os , "symlink" , None ) if callable ( os_symlink ) : os_symlink ( path , link ) else : win_shortcut ( path , link ) except Exception as ex : import utool as ut checkpath ( link , verbose = True ) checkpath ( path , verbose = True ) do_raise = ( on_error == 'raise' ) ut . printex ( ex , '[util_path] error making symlink' , iswarning = not do_raise ) if do_raise : raise return link
9,047
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2567-L2656
[ "def", "cart_db", "(", ")", ":", "config", "=", "_config_file", "(", ")", "_config_test", "(", "config", ")", "juicer", ".", "utils", ".", "Log", ".", "log_debug", "(", "\"Establishing cart connection:\"", ")", "cart_con", "=", "MongoClient", "(", "dict", "(", "config", ".", "items", "(", "config", ".", "sections", "(", ")", "[", "0", "]", ")", ")", "[", "'cart_host'", "]", ")", "cart_db", "=", "cart_con", ".", "carts", "return", "cart_db" ]
Removes all broken links in a directory
def remove_broken_links ( dpath , verbose = True ) : fname_list = [ join ( dpath , fname ) for fname in os . listdir ( dpath ) ] broken_links = list ( filterfalse ( exists , filter ( islink , fname_list ) ) ) num_broken = len ( broken_links ) if verbose : if verbose > 1 or num_broken > 0 : print ( '[util_path] Removing %d broken links in %r' % ( num_broken , dpath , ) ) for link in broken_links : os . unlink ( link ) return num_broken
9,048
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2659-L2707
[ "def", "create_token_response", "(", "self", ",", "request", ",", "token_handler", ")", ":", "headers", "=", "self", ".", "_get_default_headers", "(", ")", "try", ":", "if", "self", ".", "request_validator", ".", "client_authentication_required", "(", "request", ")", ":", "log", ".", "debug", "(", "'Authenticating client, %r.'", ",", "request", ")", "if", "not", "self", ".", "request_validator", ".", "authenticate_client", "(", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "elif", "not", "self", ".", "request_validator", ".", "authenticate_client_id", "(", "request", ".", "client_id", ",", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "log", ".", "debug", "(", "'Validating access token request, %r.'", ",", "request", ")", "self", ".", "validate_token_request", "(", "request", ")", "except", "errors", ".", "OAuth2Error", "as", "e", ":", "log", ".", "debug", "(", "'Client error in token request, %s.'", ",", "e", ")", "headers", ".", "update", "(", "e", ".", "headers", ")", "return", "headers", ",", "e", ".", "json", ",", "e", ".", "status_code", "token", "=", "token_handler", ".", "create_token", "(", "request", ",", "self", ".", "refresh_token", ")", "for", "modifier", "in", "self", ".", "_token_modifiers", ":", "token", "=", "modifier", "(", "token", ")", "self", ".", "request_validator", ".", "save_token", "(", "token", ",", "request", ")", "log", ".", "debug", "(", "'Issuing token %r to client id %r (%r) and username %s.'", ",", "token", ",", "request", ".", "client_id", ",", "request", ".", "client", ",", "request", ".", "username", ")", "return", "headers", ",", "json", ".", "dumps", "(", "token", ")", ",", "200" ]
r Searches for and finds a path garuenteed to not exist .
def non_existing_path ( path_ , dpath = None , offset = 0 , suffix = None , force_fmt = False ) : import utool as ut from os . path import basename , dirname if dpath is None : dpath = dirname ( path_ ) base_fmtstr = basename ( path_ ) if suffix is not None : base_fmtstr = ut . augpath ( base_fmtstr , suffix ) if '%' not in base_fmtstr : if not force_fmt : # If we have don't have to format, # then try to use the first choice first_choice = join ( dpath , base_fmtstr ) if not exists ( first_choice ) : return first_choice # otherwise we ensure we can format and we continue base_fmtstr = ut . augpath ( base_fmtstr , '%d' ) dname_list = ut . glob ( dpath , pattern = '*' , recursive = False , with_files = True , with_dirs = True ) conflict_set = set ( basename ( dname ) for dname in dname_list ) newname = ut . get_nonconflicting_string ( base_fmtstr , conflict_set , offset = offset ) newpath = join ( dpath , newname ) return newpath
9,049
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_path.py#L2710-L2779
[ "def", "mmGetPlotUnionSDRActivity", "(", "self", ",", "title", "=", "\"Union SDR Activity Raster\"", ",", "showReset", "=", "False", ",", "resetShading", "=", "0.25", ")", ":", "unionSDRTrace", "=", "self", ".", "mmGetTraceUnionSDR", "(", ")", ".", "data", "columnCount", "=", "self", ".", "getNumColumns", "(", ")", "activityType", "=", "\"Union SDR Activity\"", "return", "self", ".", "mmGetCellTracePlot", "(", "unionSDRTrace", ",", "columnCount", ",", "activityType", ",", "title", "=", "title", ",", "showReset", "=", "showReset", ",", "resetShading", "=", "resetShading", ")" ]
Creates an sqlite lookup table of scannrs with quant data .
def create_isobaric_quant_lookup ( quantdb , specfn_consensus_els , channelmap ) : # store quantchannels in lookup and generate a db_id vs channel map channels_store = ( ( name , ) for name , c_id in sorted ( channelmap . items ( ) , key = lambda x : x [ 1 ] ) ) quantdb . store_channelmap ( channels_store ) channelmap_dbid = { channelmap [ ch_name ] : ch_id for ch_id , ch_name in quantdb . get_channelmap ( ) } quants = [ ] mzmlmap = quantdb . get_mzmlfile_map ( ) for specfn , consensus_el in specfn_consensus_els : rt = openmsreader . get_consxml_rt ( consensus_el ) rt = round ( float ( Decimal ( rt ) / 60 ) , 12 ) qdata = get_quant_data ( consensus_el ) spectra_id = quantdb . get_spectra_id ( mzmlmap [ specfn ] , retention_time = rt ) for channel_no in sorted ( qdata . keys ( ) ) : quants . append ( ( spectra_id , channelmap_dbid [ channel_no ] , qdata [ channel_no ] ) ) if len ( quants ) == DB_STORE_CHUNK : quantdb . store_isobaric_quants ( quants ) quantdb . store_isobaric_quants ( quants ) quantdb . index_isobaric_quants ( )
9,050
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/quant.py#L9-L34
[ "def", "folder_shared_message", "(", "self", ",", "request", ",", "user", ",", "folder", ")", ":", "messages", ".", "success", "(", "request", ",", "_", "(", "\"Folder {} is now shared with {}\"", ".", "format", "(", "folder", ",", "user", ")", ")", ")" ]
Returns a dict of a specified amount of features from the ms1 quant database and the highest mz of those features
def get_precursors_from_window ( quantdb , minmz ) : featmap = { } mz = False features = quantdb . get_precursor_quant_window ( FEATURE_ALIGN_WINDOW_AMOUNT , minmz ) for feat_id , fn_id , charge , mz , rt in features : try : featmap [ fn_id ] [ charge ] . append ( ( mz , rt , feat_id ) ) except KeyError : try : featmap [ fn_id ] [ charge ] = [ ( mz , rt , feat_id ) ] except KeyError : featmap [ fn_id ] = { charge : [ ( mz , rt , feat_id ) ] } return featmap , mz
9,051
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/quant.py#L105-L120
[ "def", "on_redis_error", "(", "self", ",", "fname", ",", "exc_type", ",", "exc_value", ")", ":", "if", "self", ".", "shared_client", ":", "Storage", ".", "storage", "=", "None", "else", ":", "self", ".", "storage", "=", "None", "if", "self", ".", "context", ".", "config", ".", "REDIS_STORAGE_IGNORE_ERRORS", "is", "True", ":", "logger", ".", "error", "(", "\"[REDIS_STORAGE] %s\"", "%", "exc_value", ")", "if", "fname", "==", "'_exists'", ":", "return", "False", "return", "None", "else", ":", "raise", "exc_value" ]
Gets quant data from consensusXML element
def get_quant_data ( cons_el ) : quant_out = { } for reporter in cons_el . findall ( './/element' ) : quant_out [ reporter . attrib [ 'map' ] ] = reporter . attrib [ 'it' ] return quant_out
9,052
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/mslookup/quant.py#L139-L144
[ "def", "_remove_pi_crossings", "(", "ts", ")", ":", "orig_ts", "=", "ts", "if", "ts", ".", "ndim", "is", "1", ":", "ts", "=", "ts", "[", ":", ",", "np", ".", "newaxis", ",", "np", ".", "newaxis", "]", "elif", "ts", ".", "ndim", "is", "2", ":", "ts", "=", "ts", "[", ":", ",", "np", ".", "newaxis", "]", "# Get the indices of those variables that have range of approx -pi to pi", "tsmax", "=", "ts", ".", "max", "(", "axis", "=", "0", ")", "tsmin", "=", "ts", ".", "min", "(", "axis", "=", "0", ")", "phase_vars", "=", "np", ".", "transpose", "(", "np", ".", "nonzero", "(", "(", "np", ".", "abs", "(", "tsmax", "-", "np", ".", "pi", ")", "<", "0.01", ")", "&", "(", "np", ".", "abs", "(", "tsmin", "+", "np", ".", "pi", ")", "<", "0.01", ")", ")", ")", "if", "len", "(", "phase_vars", ")", "is", "0", ":", "return", "orig_ts", "else", ":", "ts", "=", "ts", ".", "copy", "(", ")", "for", "v", "in", "phase_vars", ":", "ts1", "=", "np", ".", "asarray", "(", "ts", "[", ":", ",", "v", "[", "0", "]", ",", "v", "[", "1", "]", "]", ")", "# time series of single variable", "ts1a", "=", "ts1", "[", "0", ":", "-", "1", "]", "ts1b", "=", "ts1", "[", "1", ":", "]", "p2", "=", "np", ".", "pi", "/", "2", "# Find time indices where phase crosses pi. Set those values to nan.", "pc", "=", "np", ".", "nonzero", "(", "(", "ts1a", ">", "p2", ")", "&", "(", "ts1b", "<", "-", "p2", ")", "|", "(", "ts1a", "<", "-", "p2", ")", "&", "(", "ts1b", ">", "p2", ")", ")", "[", "0", "]", "+", "1", "ts1", "[", "pc", "]", "=", "np", ".", "nan", "ts", "[", ":", ",", "v", "[", "0", "]", ",", "v", "[", "1", "]", "]", "=", "ts1", "return", "ts" ]
Standard platform specifier used by distutils
def get_plat_specifier ( ) : import setuptools # NOQA import distutils plat_name = distutils . util . get_platform ( ) plat_specifier = ".%s-%s" % ( plat_name , sys . version [ 0 : 3 ] ) if hasattr ( sys , 'gettotalrefcount' ) : plat_specifier += '-pydebug' return plat_specifier
9,053
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L66-L76
[ "def", "_read_config_file", "(", "args", ")", ":", "stage", "=", "args", ".", "stage", "with", "open", "(", "args", ".", "config", ",", "'rt'", ")", "as", "f", ":", "config", "=", "yaml", ".", "safe_load", "(", "f", ".", "read", "(", ")", ")", "STATE", "[", "'stages'", "]", "=", "config", "[", "'stages'", "]", "config", "[", "'config'", "]", "=", "_decrypt_item", "(", "config", "[", "'config'", "]", ",", "stage", "=", "stage", ",", "key", "=", "''", ",", "render", "=", "True", ")", "return", "config", "[", "'stages'", "]", ",", "config", "[", "'config'", "]" ]
FIXME ; hacky way of finding python library . Not cross platform yet .
def get_system_python_library ( ) : import os import utool as ut from os . path import basename , realpath pyname = basename ( realpath ( sys . executable ) ) ld_library_path = os . environ [ 'LD_LIBRARY_PATH' ] libdirs = [ x for x in ld_library_path . split ( os . pathsep ) if x ] + [ '/usr/lib' ] libfiles = ut . flatten ( [ ut . glob ( d , '*' + ut . get_lib_ext ( ) , recursive = True ) for d in libdirs ] ) python_libs = [ realpath ( f ) for f in libfiles if 'lib' + pyname in basename ( f ) ] python_libs = ut . unique_ordered ( python_libs ) assert len ( python_libs ) == 1 , str ( python_libs ) return python_libs [ 0 ]
9,054
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L88-L102
[ "def", "update_qos_aggregated_configuration", "(", "self", ",", "qos_configuration", ",", "timeout", "=", "-", "1", ")", ":", "uri", "=", "\"{}{}\"", ".", "format", "(", "self", ".", "data", "[", "\"uri\"", "]", ",", "self", ".", "QOS_AGGREGATED_CONFIGURATION", ")", "return", "self", ".", "_helper", ".", "update", "(", "qos_configuration", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ")" ]
Executes tools for inspecting dynamic library dependencies depending on the current platform .
def get_dynlib_dependencies ( lib_path ) : if LINUX : ldd_fpath = '/usr/bin/ldd' depend_out , depend_err , ret = cmd ( ldd_fpath , lib_path , verbose = False ) elif DARWIN : otool_fpath = '/opt/local/bin/otool' depend_out , depend_err , ret = cmd ( otool_fpath , '-L' , lib_path , verbose = False ) elif WIN32 : depend_out , depend_err , ret = cmd ( 'objdump' , '-p' , lib_path , verbose = False ) #fnmatch.filter(depend_out.split('\n'), '*DLL*') relevant_lines = [ line for line in depend_out . splitlines ( ) if 'DLL Name:' in line ] depend_out = '\n' . join ( relevant_lines ) assert ret == 0 , 'bad dependency check' return depend_out
9,055
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L329-L346
[ "def", "get", "(", "self", ",", "event", ")", ":", "try", ":", "data", ",", "schema", ",", "user", ",", "client", "=", "self", ".", "_get_args", "(", "event", ")", "except", "AttributeError", ":", "return", "object_filter", "=", "self", ".", "_get_filter", "(", "event", ")", "if", "'subscribe'", "in", "data", ":", "do_subscribe", "=", "data", "[", "'subscribe'", "]", "is", "True", "else", ":", "do_subscribe", "=", "False", "try", ":", "uuid", "=", "str", "(", "data", "[", "'uuid'", "]", ")", "except", "(", "KeyError", ",", "TypeError", ")", ":", "uuid", "=", "\"\"", "opts", "=", "schemastore", "[", "schema", "]", ".", "get", "(", "'options'", ",", "{", "}", ")", "hidden", "=", "opts", ".", "get", "(", "'hidden'", ",", "[", "]", ")", "if", "object_filter", "==", "{", "}", ":", "if", "uuid", "==", "\"\"", ":", "self", ".", "log", "(", "'Object with no filter/uuid requested:'", ",", "schema", ",", "data", ",", "lvl", "=", "warn", ")", "return", "object_filter", "=", "{", "'uuid'", ":", "uuid", "}", "storage_object", "=", "None", "storage_object", "=", "objectmodels", "[", "schema", "]", ".", "find_one", "(", "object_filter", ")", "if", "not", "storage_object", ":", "self", ".", "_cancel_by_error", "(", "event", ",", "uuid", "+", "'('", "+", "str", "(", "object_filter", ")", "+", "') of '", "+", "schema", "+", "' unavailable'", ")", "return", "if", "storage_object", ":", "self", ".", "log", "(", "\"Object found, checking permissions: \"", ",", "data", ",", "lvl", "=", "verbose", ")", "if", "not", "self", ".", "_check_permissions", "(", "user", ",", "'read'", ",", "storage_object", ")", ":", "self", ".", "_cancel_by_permission", "(", "schema", ",", "data", ",", "event", ")", "return", "for", "field", "in", "hidden", ":", "storage_object", ".", "_fields", ".", "pop", "(", "field", ",", "None", ")", "if", "do_subscribe", "and", "uuid", "!=", "\"\"", ":", "self", ".", "_add_subscription", "(", "uuid", ",", "event", ")", "result", "=", "{", "'component'", ":", "'hfos.events.objectmanager'", ",", "'action'", ":", "'get'", ",", "'data'", ":", "{", "'schema'", ":", "schema", ",", "'uuid'", ":", "uuid", ",", "'object'", ":", "storage_object", ".", "serializablefields", "(", ")", "}", "}", "self", ".", "_respond", "(", "None", ",", "result", ",", "event", ")" ]
Uses default program defined by the system to open a file .
def startfile ( fpath , detatch = True , quote = False , verbose = False , quiet = True ) : print ( '[cplat] startfile(%r)' % fpath ) fpath = normpath ( fpath ) # print('[cplat] fpath=%s' % fpath) if not exists ( fpath ) : raise Exception ( 'Cannot start nonexistant file: %r' % fpath ) #if quote: # fpath = '"%s"' % (fpath,) if not WIN32 : fpath = pipes . quote ( fpath ) if LINUX : #out, err, ret = cmd(['xdg-open', fpath], detatch=True) outtup = cmd ( ( 'xdg-open' , fpath ) , detatch = detatch , verbose = verbose , quiet = quiet ) #outtup = cmd('xdg-open', fpath, detatch=detatch) elif DARWIN : outtup = cmd ( ( 'open' , fpath ) , detatch = detatch , verbose = verbose , quiet = quiet ) elif WIN32 : os . startfile ( fpath ) else : raise RuntimeError ( 'Unknown Platform' ) if outtup is not None : out , err , ret = outtup if not ret : raise Exception ( out + ' -- ' + err ) pass
9,056
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L465-L495
[ "def", "remove_tags", "(", "TagKeys", ",", "DomainName", "=", "None", ",", "ARN", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "ARN", "is", "None", ":", "if", "DomainName", "is", "None", ":", "raise", "SaltInvocationError", "(", "'One (but not both) of ARN or '", "'domain must be specified.'", ")", "domaindata", "=", "status", "(", "DomainName", "=", "DomainName", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "not", "domaindata", "or", "'domain'", "not", "in", "domaindata", ":", "log", ".", "warning", "(", "'Domain tags not updated'", ")", "return", "{", "'tagged'", ":", "False", "}", "ARN", "=", "domaindata", ".", "get", "(", "'domain'", ",", "{", "}", ")", ".", "get", "(", "'ARN'", ")", "elif", "DomainName", "is", "not", "None", ":", "raise", "SaltInvocationError", "(", "'One (but not both) of ARN or '", "'domain must be specified.'", ")", "conn", ".", "remove_tags", "(", "ARN", "=", "domaindata", ".", "get", "(", "'domain'", ",", "{", "}", ")", ".", "get", "(", "'ARN'", ")", ",", "TagKeys", "=", "TagKeys", ")", "return", "{", "'tagged'", ":", "True", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'tagged'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
View a directory in the operating system file browser . Currently supports windows explorer mac open and linux nautlius .
def view_directory ( dname = None , fname = None , verbose = True ) : from utool . util_arg import STRICT from utool . util_path import checkpath # from utool.util_str import SINGLE_QUOTE, DOUBLE_QUOTE if HAVE_PATHLIB and isinstance ( dname , pathlib . Path ) : dname = str ( dname ) if verbose : print ( '[cplat] view_directory(%r) ' % dname ) dname = os . getcwd ( ) if dname is None else dname open_prog = { 'win32' : 'explorer.exe' , 'linux' : 'nautilus' , 'darwin' : 'open' } [ OS_TYPE ] dname = normpath ( dname ) if STRICT : assert checkpath ( dname , verbose = verbose ) , 'directory doesnt exit' if fname is not None and OS_TYPE == 'linux' : arg = join ( dname , fname ) else : arg = dname # if ' ' in dname and not dname.startswith((SINGLE_QUOTE, DOUBLE_QUOTE)): # # Ensure quotations # dname = '"%s"' % dname # if not WIN32: # arg = dname # # arg = subprocess.list2cmdline([dname]) # # arg = pipes.quote(dname) # else: # arg = dname # spawn and detatch process args = ( open_prog , arg ) print ( subprocess . list2cmdline ( args ) ) subprocess . Popen ( args )
9,057
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L544-L622
[ "async", "def", "_wait_exponentially", "(", "self", ",", "exception", ",", "max_wait_time", "=", "300", ")", ":", "wait_time", "=", "min", "(", "(", "2", "**", "self", ".", "_connection_attempts", ")", "+", "random", ".", "random", "(", ")", ",", "max_wait_time", ")", "try", ":", "wait_time", "=", "exception", ".", "response", "[", "\"headers\"", "]", "[", "\"Retry-After\"", "]", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "self", ".", "_logger", ".", "debug", "(", "\"Waiting %s seconds before reconnecting.\"", ",", "wait_time", ")", "await", "asyncio", ".", "sleep", "(", "float", "(", "wait_time", ")", ")" ]
Returns a directory which should be writable for any application This should be used for temporary deletable data .
def platform_cache_dir ( ) : if WIN32 : # nocover dpath_ = '~/AppData/Local' elif LINUX : # nocover dpath_ = '~/.cache' elif DARWIN : # nocover dpath_ = '~/Library/Caches' else : # nocover raise NotImplementedError ( 'Unknown Platform %r' % ( sys . platform , ) ) dpath = normpath ( expanduser ( dpath_ ) ) return dpath
9,058
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L633-L647
[ "def", "trace_integration", "(", "tracer", "=", "None", ")", ":", "log", ".", "info", "(", "'Integrated module: {}'", ".", "format", "(", "MODULE_NAME", ")", ")", "# Wrap the httplib request function", "request_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_REQUEST_FUNC", ")", "wrapped_request", "=", "wrap_httplib_request", "(", "request_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "request_func", ".", "__name__", ",", "wrapped_request", ")", "# Wrap the httplib response function", "response_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_RESPONSE_FUNC", ")", "wrapped_response", "=", "wrap_httplib_response", "(", "response_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "response_func", ".", "__name__", ",", "wrapped_response", ")" ]
When shell is True Popen will only accept strings . No tuples Shell really should not be true .
def __parse_cmd_args ( args , sudo , shell ) : # Case where tuple is passed in as only argument if isinstance ( args , tuple ) and len ( args ) == 1 and isinstance ( args [ 0 ] , tuple ) : args = args [ 0 ] if shell : # When shell is True, ensure args is a string if isinstance ( args , six . string_types ) : pass elif isinstance ( args , ( list , tuple ) ) and len ( args ) > 1 : args = ' ' . join ( args ) elif isinstance ( args , ( list , tuple ) ) and len ( args ) == 1 : if isinstance ( args [ 0 ] , ( tuple , list ) ) : args = ' ' . join ( args ) elif isinstance ( args [ 0 ] , six . string_types ) : args = args [ 0 ] else : # When shell is False, ensure args is a tuple if isinstance ( args , six . string_types ) : args = shlex . split ( args , posix = not WIN32 ) elif isinstance ( args , ( list , tuple ) ) : if len ( args ) > 1 : args = tuple ( args ) elif len ( args ) == 1 : if isinstance ( args [ 0 ] , ( tuple , list ) ) : args = tuple ( args [ 0 ] ) elif isinstance ( args [ 0 ] , six . string_types ) : args = shlex . split ( args [ 0 ] , posix = not WIN32 ) if sudo is True : if not WIN32 : if shell : args = 'sudo ' + args else : args = tuple ( [ 'sudo' ] ) + tuple ( args ) #if isinstance(args, six.string_types): # args = shlex.split(args) #args = ['sudo'] + args ## using sudo means we need to use a single string I believe #args = ' '.join(args) else : # TODO: strip out sudos pass # HACK FOR WINDOWS AGAIN # makes this command work: # python -c "import utool as ut; ut.cmd('build\\hesaffexe.exe ' + ut.grab_test_imgpath('star.png'))" # and this should still work # python -c "import utool as ut; ut.cmd('build\\hesaffexe.exe', ut.grab_test_imgpath('star.png'))" if WIN32 : if len ( args ) == 1 and isinstance ( args [ 0 ] , six . string_types ) : args = shlex . split ( args [ 0 ] , posix = not WIN32 ) return args
9,059
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L714-L790
[ "def", "get_monitors", "(", "self", ")", "->", "List", "[", "Monitor", "]", ":", "raw_monitors", "=", "self", ".", "_zm_request", "(", "'get'", ",", "ZoneMinder", ".", "MONITOR_URL", ")", "if", "not", "raw_monitors", ":", "_LOGGER", ".", "warning", "(", "\"Could not fetch monitors from ZoneMinder\"", ")", "return", "[", "]", "monitors", "=", "[", "]", "for", "raw_result", "in", "raw_monitors", "[", "'monitors'", "]", ":", "_LOGGER", ".", "debug", "(", "\"Initializing camera %s\"", ",", "raw_result", "[", "'Monitor'", "]", "[", "'Id'", "]", ")", "monitors", ".", "append", "(", "Monitor", "(", "self", ",", "raw_result", ")", ")", "return", "monitors" ]
Trying to clean up cmd
def cmd2 ( command , shell = False , detatch = False , verbose = False , verbout = None ) : import shlex if isinstance ( command , ( list , tuple ) ) : raise ValueError ( 'command tuple not supported yet' ) args = shlex . split ( command , posix = not WIN32 ) if verbose is True : verbose = 2 if verbout is None : verbout = verbose >= 1 if verbose >= 2 : print ( '+=== START CMD2 ===' ) print ( 'Command:' ) print ( command ) if verbout : print ( '----' ) print ( 'Stdout:' ) proc = subprocess . Popen ( args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , shell = shell , universal_newlines = True ) if detatch : info = { 'proc' : proc } else : write_fn = sys . stdout . write flush_fn = sys . stdout . flush logged_out = [ ] for line in _run_process ( proc ) : #line_ = line if six.PY2 else line.decode('utf-8') line_ = line if six . PY2 else line if len ( line_ ) > 0 : if verbout : write_fn ( line_ ) flush_fn ( ) logged_out . append ( line ) try : from utool import util_str # NOQA # out = '\n'.join(logged_out) out = '' . join ( logged_out ) except UnicodeDecodeError : from utool import util_str # NOQA logged_out = util_str . ensure_unicode_strlist ( logged_out ) # out = '\n'.join(logged_out) out = '' . join ( logged_out ) # print('logged_out = %r' % (logged_out,)) # raise ( out_ , err ) = proc . communicate ( ) ret = proc . wait ( ) info = { 'out' : out , 'err' : err , 'ret' : ret , } if verbose >= 2 : print ( 'L___ END CMD2 ___' ) return info
9,060
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L1005-L1072
[ "def", "get_comments", "(", "self", ",", "sharekey", "=", "None", ")", ":", "if", "not", "sharekey", ":", "raise", "Exception", "(", "\"You must specify a sharekey of the file you\"", "\"want to 'like'.\"", ")", "endpoint", "=", "'/api/sharedfile/{0}/comments'", ".", "format", "(", "sharekey", ")", "data", "=", "self", ".", "_make_request", "(", "\"GET\"", ",", "endpoint", "=", "endpoint", ")", "return", "[", "Comment", ".", "NewFromJSON", "(", "c", ")", "for", "c", "in", "data", "[", "'comments'", "]", "]" ]
r Searches your PATH to see if fname exists
def search_env_paths ( fname , key_list = None , verbose = None ) : import utool as ut # from os.path import join if key_list is None : key_list = [ key for key in os . environ if key . find ( 'PATH' ) > - 1 ] print ( 'key_list = %r' % ( key_list , ) ) found = ut . ddict ( list ) for key in key_list : dpath_list = os . environ [ key ] . split ( os . pathsep ) for dpath in dpath_list : #if verbose: # print('dpath = %r' % (dpath,)) # testname = join(dpath, fname) matches = ut . glob ( dpath , fname ) found [ key ] . extend ( matches ) #import fnmatch #import utool #utool.embed() #if ut.checkpath(testname, verbose=False): # if verbose: # print('Found in key=%r' % (key,)) # ut.checkpath(testname, verbose=True, info=True) # found += [testname] return dict ( found )
9,061
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L1187-L1238
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
only works on unix systems only tested on Ubuntu GNOME changes text on terminal title for identifying debugging tasks .
def change_term_title ( title ) : if True : # Disabled return if not WIN32 : #print("CHANGE TERM TITLE to %r" % (title,)) if title : #os.environ['PS1'] = os.environ['PS1'] + '''"\e]2;\"''' + title + '''\"\a"''' cmd_str = r'''echo -en "\033]0;''' + title + '''\a"''' os . system ( cmd_str )
9,062
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L1264-L1300
[ "def", "_check_rest_version", "(", "self", ",", "version", ")", ":", "version", "=", "str", "(", "version", ")", "if", "version", "not", "in", "self", ".", "supported_rest_versions", ":", "msg", "=", "\"Library is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "array_rest_versions", "=", "self", ".", "_list_available_rest_versions", "(", ")", "if", "version", "not", "in", "array_rest_versions", ":", "msg", "=", "\"Array is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "return", "LooseVersion", "(", "version", ")" ]
WARNING POTENTIALLY DANGEROUS AND MAY NOT WORK
def unload_module ( modname ) : import sys import gc if modname in sys . modules : referrer_list = gc . get_referrers ( sys . modules [ modname ] ) #module = sys.modules[modname] for referer in referrer_list : if referer is not sys . modules : referer [ modname ] = None #del referer[modname] #sys.modules[modname] = module #del module refcount = sys . getrefcount ( sys . modules [ modname ] ) print ( '%s refcount=%r' % ( modname , refcount ) ) del sys . modules [ modname ]
9,063
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_cplat.py#L1450-L1486
[ "def", "complete_upload", "(", "self", ")", ":", "xml", "=", "self", ".", "to_xml", "(", ")", "return", "self", ".", "bucket", ".", "complete_multipart_upload", "(", "self", ".", "key_name", ",", "self", ".", "id", ",", "xml", ")" ]
Generic function that takes a peptide or protein table and adds quant data from ANOTHER such table .
def base_add_isoquant_data ( features , quantfeatures , acc_col , quantacc_col , quantfields ) : quant_map = get_quantmap ( quantfeatures , quantacc_col , quantfields ) for feature in features : feat_acc = feature [ acc_col ] outfeat = { k : v for k , v in feature . items ( ) } try : outfeat . update ( quant_map [ feat_acc ] ) except KeyError : outfeat . update ( { field : 'NA' for field in quantfields } ) yield outfeat
9,064
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/shared/pepprot_isoquant.py#L4-L16
[ "def", "pull_session", "(", "session_id", "=", "None", ",", "url", "=", "'default'", ",", "io_loop", "=", "None", ",", "arguments", "=", "None", ")", ":", "coords", "=", "_SessionCoordinates", "(", "session_id", "=", "session_id", ",", "url", "=", "url", ")", "session", "=", "ClientSession", "(", "session_id", "=", "session_id", ",", "websocket_url", "=", "websocket_url_for_server_url", "(", "coords", ".", "url", ")", ",", "io_loop", "=", "io_loop", ",", "arguments", "=", "arguments", ")", "session", ".", "pull", "(", ")", "return", "session" ]
Runs through proteins that are in a quanted protein table extracts and maps their information based on the quantfields list input . Map is a dict with protein_accessions as keys .
def get_quantmap ( features , acc_col , quantfields ) : qmap = { } for feature in features : feat_acc = feature . pop ( acc_col ) qmap [ feat_acc ] = { qf : feature [ qf ] for qf in quantfields } return qmap
9,065
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/actions/shared/pepprot_isoquant.py#L19-L27
[ "def", "check_repository_url", "(", "self", ")", ":", "repository_url", "=", "self", ".", "repository_config", "[", "'repository'", "]", "if", "repository_url", ".", "startswith", "(", "(", "repository", ".", "LEGACY_PYPI", ",", "repository", ".", "LEGACY_TEST_PYPI", ")", ")", ":", "raise", "exceptions", ".", "UploadToDeprecatedPyPIDetected", ".", "from_args", "(", "repository_url", ",", "utils", ".", "DEFAULT_REPOSITORY", ",", "utils", ".", "TEST_REPOSITORY", ")" ]
r Separates varied from non - varied parameters in a list of configs
def partition_varied_cfg_list ( cfg_list , default_cfg = None , recursive = False ) : import utool as ut if default_cfg is None : nonvaried_cfg = reduce ( ut . dict_intersection , cfg_list ) else : nonvaried_cfg = reduce ( ut . dict_intersection , [ default_cfg ] + cfg_list ) nonvaried_keys = list ( nonvaried_cfg . keys ( ) ) varied_cfg_list = [ ut . delete_dict_keys ( cfg . copy ( ) , nonvaried_keys ) for cfg in cfg_list ] if recursive : # Find which varied keys have dict values varied_keys = list ( set ( [ key for cfg in varied_cfg_list for key in cfg ] ) ) varied_vals_list = [ [ cfg [ key ] for cfg in varied_cfg_list if key in cfg ] for key in varied_keys ] for key , varied_vals in zip ( varied_keys , varied_vals_list ) : if len ( varied_vals ) == len ( cfg_list ) : if all ( [ isinstance ( val , dict ) for val in varied_vals ] ) : nonvaried_subdict , varied_subdicts = partition_varied_cfg_list ( varied_vals , recursive = recursive ) nonvaried_cfg [ key ] = nonvaried_subdict for cfg , subdict in zip ( varied_cfg_list , varied_subdicts ) : cfg [ key ] = subdict return nonvaried_cfg , varied_cfg_list
9,066
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L210-L264
[ "def", "create_option_group", "(", "name", ",", "engine_name", ",", "major_engine_version", ",", "option_group_description", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "res", "=", "__salt__", "[", "'boto_rds.option_group_exists'", "]", "(", "name", ",", "tags", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "res", ".", "get", "(", "'exists'", ")", ":", "return", "{", "'exists'", ":", "bool", "(", "res", ")", "}", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "not", "conn", ":", "return", "{", "'results'", ":", "bool", "(", "conn", ")", "}", "taglist", "=", "_tag_doc", "(", "tags", ")", "rds", "=", "conn", ".", "create_option_group", "(", "OptionGroupName", "=", "name", ",", "EngineName", "=", "engine_name", ",", "MajorEngineVersion", "=", "major_engine_version", ",", "OptionGroupDescription", "=", "option_group_description", ",", "Tags", "=", "taglist", ")", "return", "{", "'exists'", ":", "bool", "(", "rds", ")", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
r Formats a flat configuration dict into a short string label . This is useful for re - creating command line strings .
def get_cfg_lbl ( cfg , name = None , nonlbl_keys = INTERNAL_CFGKEYS , key_order = None , with_name = True , default_cfg = None , sep = '' ) : import utool as ut if name is None : name = cfg . get ( '_cfgname' , '' ) if default_cfg is not None : # Remove defaulted labels cfg = ut . partition_varied_cfg_list ( [ cfg ] , default_cfg ) [ 1 ] [ 0 ] # remove keys that should not belong to the label _clean_cfg = ut . delete_keys ( cfg . copy ( ) , nonlbl_keys ) _lbl = ut . repr4 ( _clean_cfg , explicit = True , nl = False , strvals = True , key_order = key_order , itemsep = sep ) # _search = ['dict(', ')', ' '] _search = [ 'dict(' , ')' ] _repl = [ '' ] * len ( _search ) _lbl = ut . multi_replace ( _lbl , _search , _repl ) . rstrip ( ',' ) if not with_name : return _lbl if NAMEVARSEP in name : # hack for when name contains a little bit of the _lbl # VERY HACKY TO PARSE OUT PARTS OF THE GIVEN NAME. hacked_name , _cfgstr , _ = parse_cfgstr_name_options ( name ) _cfgstr_options_list = re . split ( r',\s*' + ut . negative_lookahead ( r'[^\[\]]*\]' ) , _cfgstr ) #cfgstr_options_list = cfgopt_strs.split(',') _cfg_options = ut . parse_cfgstr_list ( _cfgstr_options_list , smartcast = False , oldmode = False ) # ut . delete_keys ( _cfg_options , cfg . keys ( ) ) _preflbl = ut . repr4 ( _cfg_options , explicit = True , nl = False , strvals = True ) _preflbl = ut . multi_replace ( _preflbl , _search , _repl ) . rstrip ( ',' ) hacked_name += NAMEVARSEP + _preflbl ### cfg_lbl = hacked_name + _lbl else : cfg_lbl = name + NAMEVARSEP + _lbl return cfg_lbl
9,067
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L267-L356
[ "def", "set", "(", "self", ",", "data", "=", "None", ")", ":", "self", ".", "__data", "=", "data", "self", ".", "__exception", "=", "None", "self", ".", "__event", ".", "set", "(", ")" ]
r Parses config strings . By looking up name in a dict of configs
def parse_cfgstr_list2 ( cfgstr_list , named_defaults_dict = None , cfgtype = None , alias_keys = None , valid_keys = None , expand_nested = True , strict = True , special_join_dict = None , is_nestedcfgtype = False , metadata = None ) : import utool as ut #with ut.Indenter(' '): cfg_combos_list = [ ] cfgstr_list_ = [ ] # special named defaults assignment dyndef_named_defaults = { } for cfgstr in cfgstr_list : if cfgstr . find ( '=:' ) > - 1 : cfgname , cfgopt_strs , subx = parse_cfgstr_name_options ( cfgstr ) assert cfgname . endswith ( '=' ) cfgname = cfgname [ : - 1 ] base_cfg_list = lookup_base_cfg_list ( cfgname , named_defaults_dict , metadata = metadata ) cfg_options = noexpand_parse_cfgstrs ( cfgopt_strs ) dyndef_named_defaults [ cfgname ] = cfg_options else : cfgstr_list_ . append ( cfgstr ) if len ( dyndef_named_defaults ) > 0 and named_defaults_dict is None : named_defaults_dict = dyndef_named_defaults for cfgstr in cfgstr_list_ : cfg_combos = [ ] # Parse special joined cfg case if cfgstr . find ( '::' ) > - 1 : special_cfgstr_list = cfgstr . split ( '::' ) # Recursive call special_combo_list = parse_cfgstr_list2 ( special_cfgstr_list , named_defaults_dict = named_defaults_dict , cfgtype = cfgtype , alias_keys = alias_keys , valid_keys = valid_keys , strict = strict , expand_nested = expand_nested , is_nestedcfgtype = False , metadata = metadata ) if special_join_dict is not None : for special_combo in special_combo_list : for cfg in special_combo : cfg . update ( special_join_dict ) if is_nestedcfgtype : cfg_combo = tuple ( [ combo for combo in special_combo_list ] ) else : # not sure if this is right cfg_combo = special_combo_list # FIXME DUPLICATE CODE if expand_nested : cfg_combos . extend ( cfg_combo ) else : #print('Appending: ' + str(ut.depth_profile(cfg_combo))) #if ut.depth_profile(cfg_combo) == [1, 9]: # ut.embed() cfg_combos_list . append ( cfg_combo ) else : # Normal Case cfgname , cfgopt_strs , subx = parse_cfgstr_name_options ( cfgstr ) # -- # Lookup named default settings try : base_cfg_list = lookup_base_cfg_list ( cfgname , named_defaults_dict , metadata = metadata ) except Exception as ex : ut . printex ( ex , keys = [ 'cfgstr_list' , 'cfgstr_list_' ] ) raise # -- for base_cfg in base_cfg_list : print ( 'cfgname = %r' % ( cfgname , ) ) print ( 'cfgopt_strs = %r' % ( cfgopt_strs , ) ) print ( 'base_cfg = %r' % ( base_cfg , ) ) print ( 'alias_keys = %r' % ( alias_keys , ) ) print ( 'cfgtype = %r' % ( cfgtype , ) ) print ( 'offset = %r' % ( len ( cfg_combos ) , ) ) print ( 'valid_keys = %r' % ( valid_keys , ) ) print ( 'strict = %r' % ( strict , ) ) cfg_combo = customize_base_cfg ( cfgname , cfgopt_strs , base_cfg , cfgtype , alias_keys , valid_keys , strict = strict , offset = len ( cfg_combos ) ) if is_nestedcfgtype : cfg_combo = [ cfg_combo ] if expand_nested : cfg_combos . extend ( cfg_combo ) else : cfg_combos_list . append ( cfg_combo ) # SUBX Cannot work here because of acfg hackiness #if subx is not None: # cfg_combo = ut.take(cfg_combo, subx) if expand_nested : cfg_combos_list . append ( cfg_combos ) # print('Updated to: ' + str(ut.depth_profile(cfg_combos_list))) #print('Returning len(cfg_combos_list) = %r' % (len(cfg_combos_list),)) return cfg_combos_list
9,068
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L1078-L1294
[ "def", "is_fresh", "(", "self", ",", "freshness", ")", ":", "if", "self", ".", "expire_after", "is", "None", ":", "return", "True", "return", "self", ".", "freshness", "(", ")", "-", "freshness", "<=", "self", ".", "expire_after" ]
r Iteratively yeilds individual configuration points inside a defined basis .
def grid_search_generator ( grid_basis = [ ] , * args , * * kwargs ) : grid_basis_ = grid_basis + list ( args ) + list ( kwargs . items ( ) ) grid_basis_dict = OrderedDict ( grid_basis_ ) grid_point_iter = util_dict . iter_all_dict_combinations_ordered ( grid_basis_dict ) for grid_point in grid_point_iter : yield grid_point
9,069
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L1837-L1882
[ "async", "def", "register", "(", "self", ",", "request", ")", ":", "session", "=", "await", "get_session", "(", "request", ")", "user_id", "=", "session", ".", "get", "(", "'user_id'", ")", "if", "user_id", ":", "return", "redirect", "(", "request", ",", "'timeline'", ")", "error", "=", "None", "form", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "await", "request", ".", "post", "(", ")", "user_id", "=", "await", "db", ".", "get_user_id", "(", "self", ".", "mongo", ".", "user", ",", "form", "[", "'username'", "]", ")", "if", "not", "form", "[", "'username'", "]", ":", "error", "=", "'You have to enter a username'", "elif", "not", "form", "[", "'email'", "]", "or", "'@'", "not", "in", "form", "[", "'email'", "]", ":", "error", "=", "'You have to enter a valid email address'", "elif", "not", "form", "[", "'password'", "]", ":", "error", "=", "'You have to enter a password'", "elif", "form", "[", "'password'", "]", "!=", "form", "[", "'password2'", "]", ":", "error", "=", "'The two passwords do not match'", "elif", "user_id", "is", "not", "None", ":", "error", "=", "'The username is already taken'", "else", ":", "await", "self", ".", "mongo", ".", "user", ".", "insert", "(", "{", "'username'", ":", "form", "[", "'username'", "]", ",", "'email'", ":", "form", "[", "'email'", "]", ",", "'pw_hash'", ":", "generate_password_hash", "(", "form", "[", "'password'", "]", ")", "}", ")", "return", "redirect", "(", "request", ",", "'login'", ")", "return", "{", "\"error\"", ":", "error", ",", "\"form\"", ":", "form", "}" ]
r returns list of unique dictionaries only with keys specified in keys
def get_cfgdict_list_subset ( cfgdict_list , keys ) : import utool as ut cfgdict_sublist_ = [ ut . dict_subset ( cfgdict , keys ) for cfgdict in cfgdict_list ] cfgtups_sublist_ = [ tuple ( ut . dict_to_keyvals ( cfgdict ) ) for cfgdict in cfgdict_sublist_ ] cfgtups_sublist = ut . unique_ordered ( cfgtups_sublist_ ) cfgdict_sublist = list ( map ( dict , cfgtups_sublist ) ) return cfgdict_sublist
9,070
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L1944-L1988
[ "def", "read", "(", "self", ")", ":", "# Read value", "try", ":", "buf", "=", "os", ".", "read", "(", "self", ".", "_fd", ",", "8", ")", "except", "OSError", "as", "e", ":", "raise", "LEDError", "(", "e", ".", "errno", ",", "\"Reading LED brightness: \"", "+", "e", ".", "strerror", ")", "# Rewind", "try", ":", "os", ".", "lseek", "(", "self", ".", "_fd", ",", "0", ",", "os", ".", "SEEK_SET", ")", "except", "OSError", "as", "e", ":", "raise", "LEDError", "(", "e", ".", "errno", ",", "\"Rewinding LED brightness: \"", "+", "e", ".", "strerror", ")", "return", "int", "(", "buf", ")" ]
constrains configurations and removes duplicates
def constrain_cfgdict_list ( cfgdict_list_ , constraint_func ) : cfgdict_list = [ ] for cfg_ in cfgdict_list_ : cfg = cfg_ . copy ( ) if constraint_func ( cfg ) is not False and len ( cfg ) > 0 : if cfg not in cfgdict_list : cfgdict_list . append ( cfg ) return cfgdict_list
9,071
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L1991-L1999
[ "def", "_send_register_payload", "(", "self", ",", "websocket", ")", ":", "file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "HANDSHAKE_FILE_NAME", ")", "data", "=", "codecs", ".", "open", "(", "file", ",", "'r'", ",", "'utf-8'", ")", "raw_handshake", "=", "data", ".", "read", "(", ")", "handshake", "=", "json", ".", "loads", "(", "raw_handshake", ")", "handshake", "[", "'payload'", "]", "[", "'client-key'", "]", "=", "self", ".", "client_key", "yield", "from", "websocket", ".", "send", "(", "json", ".", "dumps", "(", "handshake", ")", ")", "raw_response", "=", "yield", "from", "websocket", ".", "recv", "(", ")", "response", "=", "json", ".", "loads", "(", "raw_response", ")", "if", "response", "[", "'type'", "]", "==", "'response'", "and", "response", "[", "'payload'", "]", "[", "'pairingType'", "]", "==", "'PROMPT'", ":", "raw_response", "=", "yield", "from", "websocket", ".", "recv", "(", ")", "response", "=", "json", ".", "loads", "(", "raw_response", ")", "if", "response", "[", "'type'", "]", "==", "'registered'", ":", "self", ".", "client_key", "=", "response", "[", "'payload'", "]", "[", "'client-key'", "]", "self", ".", "save_key_file", "(", ")" ]
Show only the text in labels that mater from the cfgdict
def make_cfglbls ( cfgdict_list , varied_dict ) : import textwrap wrapper = textwrap . TextWrapper ( width = 50 ) cfglbl_list = [ ] for cfgdict_ in cfgdict_list : cfgdict = cfgdict_ . copy ( ) for key in six . iterkeys ( cfgdict_ ) : try : vals = varied_dict [ key ] # Dont print label if not varied if len ( vals ) == 1 : del cfgdict [ key ] else : # Dont print label if it is None (irrelevant) if cfgdict [ key ] is None : del cfgdict [ key ] except KeyError : # Don't print keys not in varydict del cfgdict [ key ] cfglbl = six . text_type ( cfgdict ) search_repl_list = [ ( '\'' , '' ) , ( '}' , '' ) , ( '{' , '' ) , ( ': ' , '=' ) ] for search , repl in search_repl_list : cfglbl = cfglbl . replace ( search , repl ) #cfglbl = str(cfgdict).replace('\'', '').replace('}', '').replace('{', '').replace(': ', '=') cfglbl = ( '\n' . join ( wrapper . wrap ( cfglbl ) ) ) cfglbl_list . append ( cfglbl ) return cfglbl_list
9,072
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L2002-L2030
[ "def", "finish", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Session disconnected.\"", ")", "try", ":", "self", ".", "sock", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", ":", "pass", "self", ".", "session_end", "(", ")" ]
Times a series of functions on a series of inputs
def gridsearch_timer ( func_list , args_list , niters = None , * * searchkw ) : import utool as ut timings = ut . ddict ( list ) if niters is None : niters = len ( args_list ) if ut . is_funclike ( args_list ) : get_args = args_list else : get_args = args_list . __getitem__ #func_labels = searchkw.get('func_labels', list(range(len(func_list)))) func_labels = searchkw . get ( 'func_labels' , [ ut . get_funcname ( func ) for func in func_list ] ) use_cache = searchkw . get ( 'use_cache' , not ut . get_argflag ( ( '--nocache' , '--nocache-time' ) ) ) assert_eq = searchkw . get ( 'assert_eq' , True ) count_list = list ( range ( niters ) ) xlabel_list = [ ] cache = ut . ShelfCacher ( 'timeings.shelf' , enabled = use_cache ) for count in ut . ProgressIter ( count_list , lbl = 'Testing Timeings' ) : args_ = get_args ( count ) xlabel_list . append ( args_ ) if True : # HACK # There is an unhandled corner case that will fail if the function expects a tuple. if not isinstance ( args_ , tuple ) : args_ = ( args_ , ) assert isinstance ( args_ , tuple ) , 'args_ should be a tuple so it can be unpacked' ret_list = [ ] for func_ in func_list : try : kwargs_ = { } func_cachekey = ut . get_func_result_cachekey ( func_ , args_ , kwargs_ ) ellapsed = cache . load ( func_cachekey ) except ut . CacheMissException : with ut . Timer ( verbose = False ) as t : ret = func_ ( * args_ ) ret_list . append ( ret ) ellapsed = t . ellapsed cache . save ( func_cachekey , ellapsed ) timings [ func_ ] . append ( ellapsed ) if assert_eq : # Hacky, not guarenteed to work if cache is one ut . assert_all_eq ( list ( map ( ut . cachestr_repr , ret_list ) ) ) cache . close ( ) count_to_xtick = searchkw . get ( 'count_to_xtick' , lambda x , y : x ) xtick_list = [ count_to_xtick ( count , get_args ( count ) ) for count in count_list ] def plot_timings ( ) : import plottool as pt ydata_list = ut . dict_take ( timings , func_list ) xdata = xtick_list ylabel = 'seconds' xlabel = 'input size' pt . multi_plot ( xdata , ydata_list , label_list = func_labels , ylabel = ylabel , xlabel = xlabel , * * searchkw ) time_result = { 'plot_timings' : plot_timings , 'timings' : timings , } return time_result
9,073
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/util_gridsearch.py#L2120-L2227
[ "def", "clear", "(", "self", ",", "page_size", "=", "10", ",", "vtimeout", "=", "10", ")", ":", "n", "=", "0", "l", "=", "self", ".", "get_messages", "(", "page_size", ",", "vtimeout", ")", "while", "l", ":", "for", "m", "in", "l", ":", "self", ".", "delete_message", "(", "m", ")", "n", "+=", "1", "l", "=", "self", ".", "get_messages", "(", "page_size", ",", "vtimeout", ")", "return", "n" ]
Return Heroku Connect mapping for the entire project .
def get_mapping ( version = 1 , exported_at = None , app_name = None ) : if exported_at is None : exported_at = timezone . now ( ) app_name = app_name or settings . HEROKU_CONNECT_APP_NAME return { 'version' : version , 'connection' : { 'organization_id' : settings . HEROKU_CONNECT_ORGANIZATION_ID , 'app_name' : app_name , 'exported_at' : exported_at . isoformat ( ) , } , 'mappings' : [ model . get_heroku_connect_mapping ( ) for model in get_heroku_connect_models ( ) ] }
9,074
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L30-L61
[ "def", "_load_state", "(", "self", ",", "context", ")", ":", "try", ":", "state", "=", "cookie_to_state", "(", "context", ".", "cookie", ",", "self", ".", "config", "[", "\"COOKIE_STATE_NAME\"", "]", ",", "self", ".", "config", "[", "\"STATE_ENCRYPTION_KEY\"", "]", ")", "except", "SATOSAStateError", "as", "e", ":", "msg_tmpl", "=", "'Failed to decrypt state {state} with {error}'", "msg", "=", "msg_tmpl", ".", "format", "(", "state", "=", "context", ".", "cookie", ",", "error", "=", "str", "(", "e", ")", ")", "satosa_logging", "(", "logger", ",", "logging", ".", "WARNING", ",", "msg", ",", "None", ")", "state", "=", "State", "(", ")", "finally", ":", "context", ".", "state", "=", "state" ]
Return all registered Heroku Connect Models .
def get_heroku_connect_models ( ) : from django . apps import apps apps . check_models_ready ( ) from heroku_connect . db . models import HerokuConnectModel return ( model for models in apps . all_models . values ( ) for model in models . values ( ) if issubclass ( model , HerokuConnectModel ) and not model . _meta . managed )
9,075
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L64-L84
[ "def", "read_cpp_source_file", "(", "self", ",", "source_file", ")", ":", "xml_file", "=", "''", "try", ":", "ffname", "=", "self", ".", "__file_full_name", "(", "source_file", ")", "self", ".", "logger", ".", "debug", "(", "\"Reading source file: [%s].\"", ",", "ffname", ")", "decls", "=", "self", ".", "__dcache", ".", "cached_value", "(", "ffname", ",", "self", ".", "__config", ")", "if", "not", "decls", ":", "self", ".", "logger", ".", "debug", "(", "\"File has not been found in cache, parsing...\"", ")", "xml_file", "=", "self", ".", "create_xml_file", "(", "ffname", ")", "decls", ",", "files", "=", "self", ".", "__parse_xml_file", "(", "xml_file", ")", "self", ".", "__dcache", ".", "update", "(", "ffname", ",", "self", ".", "__config", ",", "decls", ",", "files", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "(", "\"File has not been changed, reading declarations \"", "+", "\"from cache.\"", ")", ")", "except", "Exception", ":", "if", "xml_file", ":", "utils", ".", "remove_file_no_raise", "(", "xml_file", ",", "self", ".", "__config", ")", "raise", "if", "xml_file", ":", "utils", ".", "remove_file_no_raise", "(", "xml_file", ",", "self", ".", "__config", ")", "return", "decls" ]
Create Heroku Connect schema .
def create_heroku_connect_schema ( using = DEFAULT_DB_ALIAS ) : connection = connections [ using ] with connection . cursor ( ) as cursor : cursor . execute ( _SCHEMA_EXISTS_QUERY , [ settings . HEROKU_CONNECT_SCHEMA ] ) schema_exists = cursor . fetchone ( ) [ 0 ] if schema_exists : return False cursor . execute ( "CREATE SCHEMA %s;" , [ AsIs ( settings . HEROKU_CONNECT_SCHEMA ) ] ) with connection . schema_editor ( ) as editor : for model in get_heroku_connect_models ( ) : editor . create_model ( model ) # Needs PostgreSQL and database superuser privileges (which is the case on Heroku): editor . execute ( 'CREATE EXTENSION IF NOT EXISTS "hstore";' ) from heroku_connect . models import ( TriggerLog , TriggerLogArchive ) for cls in [ TriggerLog , TriggerLogArchive ] : editor . create_model ( cls ) return True
9,076
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L105-L142
[ "def", "delete_attachments", "(", "self", ",", "volumeID", ",", "attachmentsID", ")", ":", "log", ".", "debug", "(", "\"deleting attachments from volume '{}': {}\"", ".", "format", "(", "volumeID", ",", "attachmentsID", ")", ")", "rawVolume", "=", "self", ".", "_req_raw_volume", "(", "volumeID", ")", "insID", "=", "[", "a", "[", "'id'", "]", "for", "a", "in", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", "]", "# check that all requested file are present", "for", "id", "in", "attachmentsID", ":", "if", "id", "not", "in", "insID", ":", "raise", "NotFoundException", "(", "\"could not found attachment '{}' of the volume '{}'\"", ".", "format", "(", "id", ",", "volumeID", ")", ")", "for", "index", ",", "id", "in", "enumerate", "(", "attachmentsID", ")", ":", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", ".", "pop", "(", "insID", ".", "index", "(", "id", ")", ")", "self", ".", "_db", ".", "modify_book", "(", "volumeID", ",", "rawVolume", "[", "'_source'", "]", ",", "version", "=", "rawVolume", "[", "'_version'", "]", ")" ]
Return all Heroku Connect connections setup with the given application .
def get_connections ( app ) : payload = { 'app' : app } url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' ) response = requests . get ( url , params = payload , headers = _get_authorization_headers ( ) ) response . raise_for_status ( ) return response . json ( ) [ 'results' ]
9,077
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L151-L186
[ "def", "fetch", "(", "self", ")", ":", "if", "self", ".", "_file_path", "is", "not", "None", ":", "return", "self", ".", "_file_path", "temp_path", "=", "self", ".", "context", ".", "work_path", "if", "self", ".", "_content_hash", "is", "not", "None", ":", "self", ".", "_file_path", "=", "storage", ".", "load_file", "(", "self", ".", "_content_hash", ",", "temp_path", "=", "temp_path", ")", "return", "self", ".", "_file_path", "if", "self", ".", "response", "is", "not", "None", ":", "self", ".", "_file_path", "=", "random_filename", "(", "temp_path", ")", "content_hash", "=", "sha1", "(", ")", "with", "open", "(", "self", ".", "_file_path", ",", "'wb'", ")", "as", "fh", ":", "for", "chunk", "in", "self", ".", "response", ".", "iter_content", "(", "chunk_size", "=", "8192", ")", ":", "content_hash", ".", "update", "(", "chunk", ")", "fh", ".", "write", "(", "chunk", ")", "self", ".", "_remove_file", "=", "True", "chash", "=", "content_hash", ".", "hexdigest", "(", ")", "self", ".", "_content_hash", "=", "storage", ".", "archive_file", "(", "self", ".", "_file_path", ",", "content_hash", "=", "chash", ")", "if", "self", ".", "http", ".", "cache", "and", "self", ".", "ok", ":", "self", ".", "context", ".", "set_tag", "(", "self", ".", "request_id", ",", "self", ".", "serialize", "(", ")", ")", "self", ".", "retrieved_at", "=", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", "return", "self", ".", "_file_path" ]
Get Heroku Connection connection information .
def get_connection ( connection_id , deep = False ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' , connection_id ) payload = { 'deep' : deep } response = requests . get ( url , params = payload , headers = _get_authorization_headers ( ) ) response . raise_for_status ( ) return response . json ( )
9,078
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L189-L240
[ "def", "ttl", "(", "self", ",", "value", ")", ":", "# get timer", "timer", "=", "getattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "None", ")", "# if timer is running, stop the timer", "if", "timer", "is", "not", "None", ":", "timer", ".", "cancel", "(", ")", "# initialize timestamp", "timestamp", "=", "None", "# if value is None", "if", "value", "is", "None", ":", "# nonify timer", "timer", "=", "None", "else", ":", "# else, renew a timer", "# get timestamp", "timestamp", "=", "time", "(", ")", "+", "value", "# start a new timer", "timer", "=", "Timer", "(", "value", ",", "self", ".", "__del__", ")", "timer", ".", "start", "(", ")", "# set/update attributes", "setattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "timer", ")", "setattr", "(", "self", ",", "Annotation", ".", "__TS", ",", "timestamp", ")" ]
Import Heroku Connection mapping for given connection .
def import_mapping ( connection_id , mapping ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'connections' , connection_id , 'actions' , 'import' ) response = requests . post ( url = url , json = mapping , headers = _get_authorization_headers ( ) ) response . raise_for_status ( )
9,079
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L243-L264
[ "def", "apply_transaction", "(", "self", ",", "transaction", ":", "BaseTransaction", ")", "->", "Tuple", "[", "BaseBlock", ",", "Receipt", ",", "BaseComputation", "]", ":", "vm", "=", "self", ".", "get_vm", "(", "self", ".", "header", ")", "base_block", "=", "vm", ".", "block", "receipt", ",", "computation", "=", "vm", ".", "apply_transaction", "(", "base_block", ".", "header", ",", "transaction", ")", "header_with_receipt", "=", "vm", ".", "add_receipt_to_header", "(", "base_block", ".", "header", ",", "receipt", ")", "# since we are building the block locally, we have to persist all the incremental state", "vm", ".", "state", ".", "persist", "(", ")", "new_header", "=", "header_with_receipt", ".", "copy", "(", "state_root", "=", "vm", ".", "state", ".", "state_root", ")", "transactions", "=", "base_block", ".", "transactions", "+", "(", "transaction", ",", ")", "receipts", "=", "base_block", ".", "get_receipts", "(", "self", ".", "chaindb", ")", "+", "(", "receipt", ",", ")", "new_block", "=", "vm", ".", "set_block_transactions", "(", "base_block", ",", "new_header", ",", "transactions", ",", "receipts", ")", "self", ".", "header", "=", "new_block", ".", "header", "return", "new_block", ",", "receipt", ",", "computation" ]
Link the connection to your Heroku user account .
def link_connection_to_account ( app ) : url = os . path . join ( settings . HEROKU_CONNECT_API_ENDPOINT , 'users' , 'me' , 'apps' , app , 'auth' ) response = requests . post ( url = url , headers = _get_authorization_headers ( ) ) response . raise_for_status ( )
9,080
https://github.com/Thermondo/django-heroku-connect/blob/f390e0fbf256ee79b30bb88f9a8c9576c6c8d9b5/heroku_connect/utils.py#L267-L278
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Searches a base element for subelement by name then takes the cvParams of that subelement and returns the values as a list for the paramnames that match . Value order in list equals input paramnames order .
def fetch_cvparams_values_from_subel ( base , subelname , paramnames , ns ) : sub_el = basereader . find_element_xpath ( base , subelname , ns ) cvparams = get_all_cvparams ( sub_el , ns ) output = [ ] for param in paramnames : output . append ( fetch_cvparam_value_by_name ( cvparams , param ) ) return output
9,081
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/readers/spectra.py#L39-L49
[ "def", "generation", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "if", "self", ".", "state", "is", "not", "MemberState", ".", "STABLE", ":", "return", "None", "return", "self", ".", "_generation" ]
Creates database tables in sqlite lookup db
def create_tables ( self , tables ) : cursor = self . get_cursor ( ) for table in tables : columns = mslookup_tables [ table ] try : cursor . execute ( 'CREATE TABLE {0}({1})' . format ( table , ', ' . join ( columns ) ) ) except sqlite3 . OperationalError as error : print ( error ) print ( 'Warning: Table {} already exists in database, will ' 'add to existing tables instead of creating ' 'new.' . format ( table ) ) else : self . conn . commit ( )
9,082
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L368-L382
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
SQLite connect method initialize db
def connect ( self , fn ) : self . conn = sqlite3 . connect ( fn ) cur = self . get_cursor ( ) cur . execute ( 'PRAGMA page_size=4096' ) cur . execute ( 'PRAGMA FOREIGN_KEYS=ON' ) cur . execute ( 'PRAGMA cache_size=10000' ) cur . execute ( 'PRAGMA journal_mode=MEMORY' )
9,083
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L384-L391
[ "def", "show_message", "(", "device", ",", "msg", ",", "y_offset", "=", "0", ",", "fill", "=", "None", ",", "font", "=", "None", ",", "scroll_delay", "=", "0.03", ")", ":", "fps", "=", "0", "if", "scroll_delay", "==", "0", "else", "1.0", "/", "scroll_delay", "regulator", "=", "framerate_regulator", "(", "fps", ")", "font", "=", "font", "or", "DEFAULT_FONT", "with", "canvas", "(", "device", ")", "as", "draw", ":", "w", ",", "h", "=", "textsize", "(", "msg", ",", "font", ")", "x", "=", "device", ".", "width", "virtual", "=", "viewport", "(", "device", ",", "width", "=", "w", "+", "x", "+", "x", ",", "height", "=", "device", ".", "height", ")", "with", "canvas", "(", "virtual", ")", "as", "draw", ":", "text", "(", "draw", ",", "(", "x", ",", "y_offset", ")", ",", "msg", ",", "font", "=", "font", ",", "fill", "=", "fill", ")", "i", "=", "0", "while", "i", "<=", "w", "+", "x", ":", "with", "regulator", ":", "virtual", ".", "set_position", "(", "(", "i", ",", "0", ")", ")", "i", "+=", "1" ]
Called by interfaces to index specific column in table
def index_column ( self , index_name , table , column ) : cursor = self . get_cursor ( ) try : cursor . execute ( 'CREATE INDEX {0} on {1}({2})' . format ( index_name , table , column ) ) except sqlite3 . OperationalError as error : print ( error ) print ( 'Skipping index creation and assuming it exists already' ) else : self . conn . commit ( )
9,084
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L401-L411
[ "def", "svg2rlg", "(", "path", ",", "*", "*", "kwargs", ")", ":", "# unzip .svgz file into .svg", "unzipped", "=", "False", "if", "isinstance", "(", "path", ",", "str", ")", "and", "os", ".", "path", ".", "splitext", "(", "path", ")", "[", "1", "]", ".", "lower", "(", ")", "==", "\".svgz\"", ":", "with", "gzip", ".", "open", "(", "path", ",", "'rb'", ")", "as", "f_in", ",", "open", "(", "path", "[", ":", "-", "1", "]", ",", "'wb'", ")", "as", "f_out", ":", "shutil", ".", "copyfileobj", "(", "f_in", ",", "f_out", ")", "path", "=", "path", "[", ":", "-", "1", "]", "unzipped", "=", "True", "svg_root", "=", "load_svg_file", "(", "path", ")", "if", "svg_root", "is", "None", ":", "return", "# convert to a RLG drawing", "svgRenderer", "=", "SvgRenderer", "(", "path", ",", "*", "*", "kwargs", ")", "drawing", "=", "svgRenderer", ".", "render", "(", "svg_root", ")", "# remove unzipped .svgz file (.svg)", "if", "unzipped", ":", "os", ".", "remove", "(", "path", ")", "return", "drawing" ]
Creates and returns an SQL SELECT statement
def get_sql_select ( self , columns , table , distinct = False ) : sql = 'SELECT {0} {1} FROM {2}' dist = { True : 'DISTINCT' , False : '' } [ distinct ] return sql . format ( dist , ', ' . join ( columns ) , table )
9,085
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L417-L421
[ "def", "write", "(", "self", ",", "name", ",", "*", "*", "data", ")", ":", "data", "[", "\"name\"", "]", "=", "name", "if", "not", "(", "\"timestamp\"", "in", "data", ")", ":", "data", "[", "\"timestamp\"", "]", "=", "datetime", ".", "utcnow", "(", ")", "try", ":", "self", ".", "client", ".", "index", "(", "index", "=", "self", ".", "get_index", "(", ")", ",", "doc_type", "=", "self", ".", "doc_type", ",", "id", "=", "None", ",", "body", "=", "data", ")", "except", "TransportError", "as", "exc", ":", "logger", ".", "warning", "(", "'writing metric %r failure %r'", ",", "data", ",", "exc", ")" ]
Abstraction over executemany method
def store_many ( self , sql , values ) : cursor = self . get_cursor ( ) cursor . executemany ( sql , values ) self . conn . commit ( )
9,086
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L423-L427
[ "async", "def", "write_close_frame", "(", "self", ",", "data", ":", "bytes", "=", "b\"\"", ")", "->", "None", ":", "# Test and set the connection state before sending the close frame to", "# avoid sending two frames in case of concurrent calls.", "if", "self", ".", "state", "is", "State", ".", "OPEN", ":", "# 7.1.3. The WebSocket Closing Handshake is Started", "self", ".", "state", "=", "State", ".", "CLOSING", "logger", ".", "debug", "(", "\"%s - state = CLOSING\"", ",", "self", ".", "side", ")", "# 7.1.2. Start the WebSocket Closing Handshake", "await", "self", ".", "write_frame", "(", "True", ",", "OP_CLOSE", ",", "data", ",", "_expected_state", "=", "State", ".", "CLOSING", ")" ]
Executes SQL and returns cursor for it
def execute_sql ( self , sql ) : cursor = self . get_cursor ( ) cursor . execute ( sql ) return cursor
9,087
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L429-L433
[ "def", "merge_config", "(", "self", ",", "user_config", ")", ":", "# provisioanlly update the default configurations with the user preferences", "temp_data_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "data_config", ")", ".", "update", "(", "user_config", ")", "temp_model_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "model_config", ")", ".", "update", "(", "user_config", ")", "temp_conversation_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "conversation_config", ")", ".", "update", "(", "user_config", ")", "# if the new configurations validate, apply them", "if", "validate_data_config", "(", "temp_data_config", ")", ":", "self", ".", "data_config", "=", "temp_data_config", "if", "validate_model_config", "(", "temp_model_config", ")", ":", "self", ".", "model_config", "=", "temp_model_config", "if", "validate_conversation_config", "(", "temp_conversation_config", ")", ":", "self", ".", "conversation_config", "=", "temp_conversation_config" ]
Returns dict of mzmlfilenames and their db ids
def get_mzmlfile_map ( self ) : cursor = self . get_cursor ( ) cursor . execute ( 'SELECT mzmlfile_id, mzmlfilename FROM mzmlfiles' ) return { fn : fnid for fnid , fn in cursor . fetchall ( ) }
9,088
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L440-L444
[ "def", "dispatch", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "REG_VALIDATION_STR", "not", "in", "request", ".", "session", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "try", ":", "self", ".", "temporaryRegistration", "=", "TemporaryRegistration", ".", "objects", ".", "get", "(", "id", "=", "self", ".", "request", ".", "session", "[", "REG_VALIDATION_STR", "]", ".", "get", "(", "'temporaryRegistrationId'", ")", ")", "except", "ObjectDoesNotExist", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "'Invalid registration identifier passed to sign-up form.'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "expiry", "=", "parse_datetime", "(", "self", ".", "request", ".", "session", "[", "REG_VALIDATION_STR", "]", ".", "get", "(", "'temporaryRegistrationExpiry'", ",", "''", ")", ",", ")", "if", "not", "expiry", "or", "expiry", "<", "timezone", ".", "now", "(", ")", ":", "messages", ".", "info", "(", "request", ",", "_", "(", "'Your registration session has expired. Please try again.'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "return", "super", "(", "StudentInfoView", ",", "self", ")", ".", "dispatch", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Returns spectra id for spectra filename and retention time
def get_spectra_id ( self , fn_id , retention_time = None , scan_nr = None ) : cursor = self . get_cursor ( ) sql = 'SELECT spectra_id FROM mzml WHERE mzmlfile_id=? ' values = [ fn_id ] if retention_time is not None : sql = '{0} AND retention_time=?' . format ( sql ) values . append ( retention_time ) if scan_nr is not None : sql = '{0} AND scan_nr=?' . format ( sql ) values . append ( scan_nr ) cursor . execute ( sql , tuple ( values ) ) return cursor . fetchone ( ) [ 0 ]
9,089
https://github.com/glormph/msstitch/blob/ded7e5cbd813d7797dc9d42805778266e59ff042/src/app/lookups/sqlite/base.py#L446-L458
[ "def", "update_sandbox_product", "(", "self", ",", "product_id", ",", "surge_multiplier", "=", "None", ",", "drivers_available", "=", "None", ",", ")", ":", "args", "=", "{", "'surge_multiplier'", ":", "surge_multiplier", ",", "'drivers_available'", ":", "drivers_available", ",", "}", "endpoint", "=", "'v1.2/sandbox/products/{}'", ".", "format", "(", "product_id", ")", "return", "self", ".", "_api_call", "(", "'PUT'", ",", "endpoint", ",", "args", "=", "args", ")" ]
monkey patch to pandas to highlight the maximum value in specified cols of a row
def to_string_monkey ( df , highlight_cols = None , latex = False ) : try : import pandas as pd import utool as ut import numpy as np import six if isinstance ( highlight_cols , six . string_types ) and highlight_cols == 'all' : highlight_cols = np . arange ( len ( df . columns ) ) # kwds = dict(buf=None, columns=None, col_space=None, header=True, # index=True, na_rep='NaN', formatters=None, # float_format=None, sparsify=None, index_names=True, # justify=None, line_width=None, max_rows=None, # max_cols=None, show_dimensions=False) # self = pd.formats.format.DataFrameFormatter(df, **kwds) try : self = pd . formats . format . DataFrameFormatter ( df ) except AttributeError : self = pd . io . formats . format . DataFrameFormatter ( df ) self . highlight_cols = highlight_cols def monkey ( self ) : return monkey_to_str_columns ( self , latex = latex ) ut . inject_func_as_method ( self , monkey , '_to_str_columns' , override = True , force = True ) def strip_ansi ( text ) : import re ansi_escape = re . compile ( r'\x1b[^m]*m' ) return ansi_escape . sub ( '' , text ) def justify_ansi ( self , texts , max_len , mode = 'right' ) : if mode == 'left' : return [ x . ljust ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] elif mode == 'center' : return [ x . center ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] else : return [ x . rjust ( max_len + ( len ( x ) - len ( strip_ansi ( x ) ) ) ) for x in texts ] ut . inject_func_as_method ( self . adj , justify_ansi , 'justify' , override = True , force = True ) def strlen_ansii ( self , text ) : return pd . compat . strlen ( strip_ansi ( text ) , encoding = self . encoding ) ut . inject_func_as_method ( self . adj , strlen_ansii , 'len' , override = True , force = True ) if False : strlen = ut . partial ( strlen_ansii , self . adj ) # NOQA justfunc = ut . partial ( justify_ansi , self . adj ) # NOQA # Essentially what to_string does strcols = monkey_to_str_columns ( self ) # texts = strcols[2] space = 1 lists = strcols str_ = self . adj . adjoin ( space , * lists ) print ( str_ ) print ( strip_ansi ( str_ ) ) self . to_string ( ) result = self . buf . getvalue ( ) # hack because adjoin is not working correctly with injected strlen result = '\n' . join ( [ x . rstrip ( ) for x in result . split ( '\n' ) ] ) return result except Exception as ex : ut . printex ( 'pandas monkey-patch is broken: {}' . format ( str ( ex ) ) , tb = True , iswarning = True ) return str ( df )
9,090
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/experimental/pandas_highlight.py#L131-L214
[ "def", "wave_infochunk", "(", "path", ")", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "file", ":", "if", "file", ".", "read", "(", "4", ")", "!=", "b\"RIFF\"", ":", "return", "None", "data_size", "=", "file", ".", "read", "(", "4", ")", "# container size", "if", "file", ".", "read", "(", "4", ")", "!=", "b\"WAVE\"", ":", "return", "None", "while", "True", ":", "chunkid", "=", "file", ".", "read", "(", "4", ")", "sizebuf", "=", "file", ".", "read", "(", "4", ")", "if", "len", "(", "sizebuf", ")", "<", "4", "or", "len", "(", "chunkid", ")", "<", "4", ":", "return", "None", "size", "=", "struct", ".", "unpack", "(", "b'<L'", ",", "sizebuf", ")", "[", "0", "]", "if", "chunkid", "[", "0", ":", "3", "]", "!=", "b\"fmt\"", ":", "if", "size", "%", "2", "==", "1", ":", "seek", "=", "size", "+", "1", "else", ":", "seek", "=", "size", "file", ".", "seek", "(", "size", ",", "1", ")", "else", ":", "return", "bytearray", "(", "b\"RIFF\"", "+", "data_size", "+", "b\"WAVE\"", "+", "chunkid", "+", "sizebuf", "+", "file", ".", "read", "(", "size", ")", ")" ]
Translates given schema from pythonic syntax to a validator .
def translate ( value ) : if isinstance ( value , BaseValidator ) : return value if value is None : return Anything ( ) if isinstance ( value , type ) : return IsA ( value ) if type ( value ) in compat . func_types : real_value = value ( ) return IsA ( type ( real_value ) , default = real_value ) if isinstance ( value , list ) : if value == [ ] : # no inner spec, just an empty list as the default value return IsA ( list ) elif len ( value ) == 1 : # the only item as spec for each item of the collection return ListOf ( translate ( value [ 0 ] ) ) else : raise StructureSpecificationError ( 'Expected a list containing exactly 1 item; ' 'got {cnt}: {spec}' . format ( cnt = len ( value ) , spec = value ) ) if isinstance ( value , dict ) : if not value : return IsA ( dict ) items = [ ] for k , v in value . items ( ) : if isinstance ( k , BaseValidator ) : k_validator = k else : k_validator = translate ( k ) default = k_validator . get_default_for ( None ) if default is not None : k_validator = Equals ( default ) v_validator = translate ( v ) items . append ( ( k_validator , v_validator ) ) return DictOf ( items ) return IsA ( type ( value ) , default = value )
9,091
https://github.com/neithere/monk/blob/4b2ee5152b081ac288ce8568422a027b5e7d2b1c/monk/validators.py#L699-L753
[ "def", "update_experiment", "(", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "return", "None", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "experiment_dict", "[", "key", "]", ",", "dict", ")", ":", "if", "experiment_dict", "[", "key", "]", ".", "get", "(", "'status'", ")", "!=", "'STOPPED'", ":", "nni_config", "=", "Config", "(", "experiment_dict", "[", "key", "]", "[", "'fileName'", "]", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "not", "detect_process", "(", "rest_pid", ")", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "'STOPPED'", ")", "continue", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "startTime", ",", "endTime", "=", "get_experiment_time", "(", "rest_port", ")", "if", "startTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'startTime'", ",", "startTime", ")", "if", "endTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'endTime'", ",", "endTime", ")", "status", "=", "get_experiment_status", "(", "rest_port", ")", "if", "status", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "status", ")" ]
Returns a dictionary based on value with each value recursively merged with spec .
def _merge ( self , value ) : if value is not None and not isinstance ( value , dict ) : # bogus value; will not pass validation but should be preserved return value if not self . _pairs : return { } collected = { } # collected.update(value) for k_validator , v_validator in self . _pairs : k_default = k_validator . get_default_for ( None ) if k_default is None : continue # even None is ok if value : v_for_this_k = value . get ( k_default ) else : v_for_this_k = None v_default = v_validator . get_default_for ( v_for_this_k ) collected . update ( { k_default : v_default } ) if value : for k , v in value . items ( ) : if k not in collected : collected [ k ] = v return collected
9,092
https://github.com/neithere/monk/blob/4b2ee5152b081ac288ce8568422a027b5e7d2b1c/monk/validators.py#L589-L622
[ "def", "_ensure_connection", "(", "self", ")", ":", "conn", "=", "self", ".", "connect", "(", ")", "if", "conn", ".", "recycle", "and", "conn", ".", "recycle", "<", "time", ".", "time", "(", ")", ":", "logger", ".", "debug", "(", "'Client session expired after %is. Recycling.'", ",", "self", ".", "_recycle", ")", "self", ".", "close", "(", ")", "conn", "=", "self", ".", "connect", "(", ")", "return", "conn" ]
Handle a key or sequence of keys in braces
def handle_code ( code ) : code_keys = [ ] # it is a known code (e.g. {DOWN}, {ENTER}, etc) if code in CODES : code_keys . append ( VirtualKeyAction ( CODES [ code ] ) ) # it is an escaped modifier e.g. {%}, {^}, {+} elif len ( code ) == 1 : code_keys . append ( KeyAction ( code ) ) # it is a repetition or a pause {DOWN 5}, {PAUSE 1.3} elif ' ' in code : to_repeat , count = code . rsplit ( None , 1 ) if to_repeat == "PAUSE" : try : pause_time = float ( count ) except ValueError : raise KeySequenceError ( 'invalid pause time %s' % count ) code_keys . append ( PauseAction ( pause_time ) ) else : try : count = int ( count ) except ValueError : raise KeySequenceError ( 'invalid repetition count %s' % count ) # If the value in to_repeat is a VK e.g. DOWN # we need to add the code repeated if to_repeat in CODES : code_keys . extend ( [ VirtualKeyAction ( CODES [ to_repeat ] ) ] * count ) # otherwise parse the keys and we get back a KeyAction else : to_repeat = parse_keys ( to_repeat ) if isinstance ( to_repeat , list ) : keys = to_repeat * count else : keys = [ to_repeat ] * count code_keys . extend ( keys ) else : raise RuntimeError ( "Unknown code: %s" % code ) return code_keys
9,093
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L478-L523
[ "def", "tfidf_weight", "(", "X", ")", ":", "X", "=", "coo_matrix", "(", "X", ")", "# calculate IDF", "N", "=", "float", "(", "X", ".", "shape", "[", "0", "]", ")", "idf", "=", "log", "(", "N", ")", "-", "log1p", "(", "bincount", "(", "X", ".", "col", ")", ")", "# apply TF-IDF adjustment", "X", ".", "data", "=", "sqrt", "(", "X", ".", "data", ")", "*", "idf", "[", "X", ".", "col", "]", "return", "X" ]
Return the parsed keys
def parse_keys ( string , with_spaces = False , with_tabs = False , with_newlines = False , modifiers = None ) : keys = [ ] if not modifiers : modifiers = [ ] index = 0 while index < len ( string ) : c = string [ index ] index += 1 # check if one of CTRL, SHIFT, ALT has been pressed if c in MODIFIERS . keys ( ) : modifier = MODIFIERS [ c ] # remember that we are currently modified modifiers . append ( modifier ) # hold down the modifier key keys . append ( VirtualKeyAction ( modifier , up = False ) ) if DEBUG : print ( "MODS+" , modifiers ) continue # Apply modifiers over a bunch of characters (not just one!) elif c == "(" : # find the end of the bracketed text end_pos = string . find ( ")" , index ) if end_pos == - 1 : raise KeySequenceError ( '`)` not found' ) keys . extend ( parse_keys ( string [ index : end_pos ] , modifiers = modifiers ) ) index = end_pos + 1 # Escape or named key elif c == "{" : # We start searching from index + 1 to account for the case {}} end_pos = string . find ( "}" , index + 1 ) if end_pos == - 1 : raise KeySequenceError ( '`}` not found' ) code = string [ index : end_pos ] index = end_pos + 1 keys . extend ( handle_code ( code ) ) # unmatched ")" elif c == ')' : raise KeySequenceError ( '`)` should be preceeded by `(`' ) # unmatched "}" elif c == '}' : raise KeySequenceError ( '`}` should be preceeded by `{`' ) # so it is a normal character else : # don't output white space unless flags to output have been set if ( c == ' ' and not with_spaces or c == '\t' and not with_tabs or c == '\n' and not with_newlines ) : continue # output nuewline if c in ( '~' , '\n' ) : keys . append ( VirtualKeyAction ( CODES [ "ENTER" ] ) ) # safest are the virtual keys - so if our key is a virtual key # use a VirtualKeyAction #if ord(c) in CODE_NAMES: # keys.append(VirtualKeyAction(ord(c))) elif modifiers : keys . append ( EscapedKeyAction ( c ) ) else : keys . append ( KeyAction ( c ) ) # as we have handled the text - release the modifiers while modifiers : if DEBUG : print ( "MODS-" , modifiers ) keys . append ( VirtualKeyAction ( modifiers . pop ( ) , down = False ) ) # just in case there were any modifiers left pressed - release them while modifiers : keys . append ( VirtualKeyAction ( modifiers . pop ( ) , down = False ) ) return keys
9,094
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L526-L614
[ "def", "get_closest_sibling_state", "(", "state_m", ",", "from_logical_port", "=", "None", ")", ":", "if", "not", "state_m", ".", "parent", ":", "logger", ".", "warning", "(", "\"A state can not have a closest sibling state if it has not parent as {0}\"", ".", "format", "(", "state_m", ")", ")", "return", "margin", "=", "cal_margin", "(", "state_m", ".", "parent", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", ")", "pos", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "size", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "# otherwise measure from reference state itself", "if", "from_logical_port", "in", "[", "\"outcome\"", ",", "\"income\"", "]", ":", "size", "=", "(", "margin", ",", "margin", ")", "if", "from_logical_port", "==", "\"outcome\"", ":", "outcomes_m", "=", "[", "outcome_m", "for", "outcome_m", "in", "state_m", ".", "outcomes", "if", "outcome_m", ".", "outcome", ".", "outcome_id", ">=", "0", "]", "free_outcomes_m", "=", "[", "oc_m", "for", "oc_m", "in", "outcomes_m", "if", "not", "state_m", ".", "state", ".", "parent", ".", "get_transition_for_outcome", "(", "state_m", ".", "state", ",", "oc_m", ".", "outcome", ")", "]", "if", "free_outcomes_m", ":", "outcome_m", "=", "free_outcomes_m", "[", "0", "]", "else", ":", "outcome_m", "=", "outcomes_m", "[", "0", "]", "pos", "=", "add_pos", "(", "pos", ",", "outcome_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "elif", "from_logical_port", "==", "\"income\"", ":", "pos", "=", "add_pos", "(", "pos", ",", "state_m", ".", "income", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "min_distance", "=", "None", "for", "sibling_state_m", "in", "state_m", ".", "parent", ".", "states", ".", "values", "(", ")", ":", "if", "sibling_state_m", "is", "state_m", ":", "continue", "sibling_pos", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "sibling_size", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "distance", "=", "geometry", ".", "cal_dist_between_2_coord_frame_aligned_boxes", "(", "pos", ",", "size", ",", "sibling_pos", ",", "sibling_size", ")", "if", "not", "min_distance", "or", "min_distance", "[", "0", "]", ">", "distance", ":", "min_distance", "=", "(", "distance", ",", "sibling_state_m", ")", "return", "min_distance" ]
Parse the keys and type them
def SendKeys ( keys , pause = 0.05 , with_spaces = False , with_tabs = False , with_newlines = False , turn_off_numlock = True ) : keys = parse_keys ( keys , with_spaces , with_tabs , with_newlines ) for k in keys : k . Run ( ) time . sleep ( pause )
9,095
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L624-L635
[ "def", "_remove_advices", "(", "target", ",", "advices", ",", "ctx", ")", ":", "# if ctx is not None", "if", "ctx", "is", "not", "None", ":", "# check if intercepted ctx is ctx", "_", ",", "intercepted_ctx", "=", "get_intercepted", "(", "target", ")", "if", "intercepted_ctx", "is", "None", "or", "intercepted_ctx", "is", "not", "ctx", ":", "return", "interception_fn", "=", "_get_function", "(", "target", ")", "target_advices", "=", "getattr", "(", "interception_fn", ",", "_ADVICES", ",", "None", ")", "if", "target_advices", "is", "not", "None", ":", "if", "advices", "is", "None", ":", "target_advices", "=", "[", "]", "else", ":", "target_advices", "=", "[", "advice", "for", "advice", "in", "target_advices", "if", "advice", "not", "in", "advices", "]", "if", "target_advices", ":", "# update target advices", "setattr", "(", "interception_fn", ",", "_ADVICES", ",", "target_advices", ")", "else", ":", "# free target advices if necessary", "delattr", "(", "interception_fn", ",", "_ADVICES", ")", "_unapply_interception", "(", "target", ",", "ctx", "=", "ctx", ")" ]
Send some test strings
def main ( ) : actions = """ {LWIN} {PAUSE .25} r {PAUSE .25} Notepad.exe{ENTER} {PAUSE 1} Hello{SPACE}World! {PAUSE 1} %{F4} {PAUSE .25} n """ SendKeys ( actions , pause = .1 ) keys = parse_keys ( actions ) for k in keys : print ( k ) k . Run ( ) time . sleep ( .1 ) test_strings = [ "\n" "(aa)some text\n" , "(a)some{ }text\n" , "(b)some{{}text\n" , "(c)some{+}text\n" , "(d)so%me{ab 4}text" , "(e)so%me{LEFT 4}text" , "(f)so%me{ENTER 4}text" , "(g)so%me{^aa 4}text" , "(h)some +(asdf)text" , "(i)some %^+(asdf)text" , "(j)some %^+a text+" , "(k)some %^+a tex+{&}" , "(l)some %^+a tex+(dsf)" , "" , ] for s in test_strings : print ( repr ( s ) ) keys = parse_keys ( s , with_newlines = True ) print ( keys ) for k in keys : k . Run ( ) time . sleep ( .1 ) print ( )
9,096
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L638-L688
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
Build the INPUT structure for the action
def GetInput ( self ) : actions = 1 # if both up and down if self . up and self . down : actions = 2 inputs = ( INPUT * actions ) ( ) vk , scan , flags = self . _get_key_info ( ) for inp in inputs : inp . type = INPUT_KEYBOARD inp . _ . ki . wVk = vk inp . _ . ki . wScan = scan inp . _ . ki . dwFlags |= flags # if we are releasing - then let it up if self . up : inputs [ - 1 ] . _ . ki . dwFlags |= KEYEVENTF_KEYUP return inputs
9,097
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L312-L334
[ "def", "save_excel", "(", "self", ",", "fd", ")", ":", "from", "pylon", ".", "io", ".", "excel", "import", "ExcelWriter", "ExcelWriter", "(", "self", ")", ".", "write", "(", "fd", ")" ]
Execute the action
def Run ( self ) : inputs = self . GetInput ( ) return SendInput ( len ( inputs ) , ctypes . byref ( inputs ) , ctypes . sizeof ( INPUT ) )
9,098
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L336-L342
[ "def", "untrack", "(", "context", ",", "file_names", ")", ":", "context", ".", "obj", ".", "find_repo_type", "(", ")", "for", "fn", "in", "file_names", ":", "if", "context", ".", "obj", ".", "vc_name", "==", "'git'", ":", "context", ".", "obj", ".", "call", "(", "[", "'git'", ",", "'rm'", ",", "'--cached'", ",", "fn", "]", ")", "elif", "context", ".", "obj", ".", "vc_name", "==", "'hg'", ":", "context", ".", "obj", ".", "call", "(", "[", "'hg'", ",", "'forget'", ",", "fn", "]", ")" ]
Return a string that will show whether the string is up or down
def _get_down_up_string ( self ) : down_up = "" if not ( self . down and self . up ) : if self . down : down_up = "down" elif self . up : down_up = "up" return down_up
9,099
https://github.com/Erotemic/utool/blob/3b27e1f4e6e6fb23cd8744af7b7195b57d99e03a/utool/_internal/win32_send_keys.py#L344-L357
[ "def", "_ParsePage", "(", "self", ",", "parser_mediator", ",", "file_offset", ",", "page_data", ")", ":", "page_header_map", "=", "self", ".", "_GetDataTypeMap", "(", "'binarycookies_page_header'", ")", "try", ":", "page_header", "=", "self", ".", "_ReadStructureFromByteStream", "(", "page_data", ",", "file_offset", ",", "page_header_map", ")", "except", "(", "ValueError", ",", "errors", ".", "ParseError", ")", "as", "exception", ":", "raise", "errors", ".", "ParseError", "(", "(", "'Unable to map page header data at offset: 0x{0:08x} with error: '", "'{1!s}'", ")", ".", "format", "(", "file_offset", ",", "exception", ")", ")", "for", "record_offset", "in", "page_header", ".", "offsets", ":", "if", "parser_mediator", ".", "abort", ":", "break", "self", ".", "_ParseRecord", "(", "parser_mediator", ",", "page_data", ",", "record_offset", ")" ]