idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
8,900
def publish ( func ) : @ wraps ( func ) def wrapper ( self , * args , ** kwargs ) : payload = func ( self , * args , ** kwargs ) payload . pop ( 'self' , None ) self . _publish ( func . __name__ , payload ) return None wrapper . is_publish = True return wrapper
publish the return value of this function as a message from this endpoint
8,901
def request ( func = None , timeout = 600 ) : if func is None : return partial ( request , timeout = timeout ) @ wraps ( func ) def wrapper ( self , * args , ** kwargs ) : params = func ( self , * args , ** kwargs ) self = params . pop ( 'self' , None ) entity = params . pop ( 'entity' , None ) app_name = params . pop ...
use to request an api call from a specific endpoint
8,902
def serialize_problem ( req , resp , problem ) : preferred = req . client_prefers ( ( 'application/json' , 'application/problem+json' ) ) if preferred is None : preferred = 'application/json' resp . data = problem . to_json ( ) . encode ( 'utf-8' ) resp . content_type = preferred resp . append_header ( 'Vary' , 'Accept...
Serialize the given instance of Problem .
8,903
def add_psms_to_proteindata ( proteindata , p_acc , pool , psmdata ) : seq , psm_id = psmdata [ 2 ] , psmdata [ 3 ] try : proteindata [ p_acc ] [ 'pools' ] [ pool ] [ 'psms' ] . add ( psm_id ) except KeyError : emptyinfo = { 'psms' : set ( ) , 'peptides' : set ( ) , 'unipeps' : 0 } try : proteindata [ p_acc ] [ 'pools'...
Fill function for create_featuredata_map
8,904
def print_traceback ( with_colors = True ) : import traceback stack = traceback . extract_stack ( ) stack_lines = traceback . format_list ( stack ) tbtext = '' . join ( stack_lines ) if with_colors : try : from pygments import highlight from pygments . lexers import get_lexer_by_name from pygments . formatters import T...
prints current stack
8,905
def is_valid_varname ( varname ) : if not isinstance ( varname , six . string_types ) : return False match_obj = re . match ( varname_regex , varname ) valid_syntax = match_obj is not None valid_name = not keyword . iskeyword ( varname ) isvalid = valid_syntax and valid_name return isvalid
Checks syntax and validity of a variable name
8,906
def execstr_dict ( dict_ , local_name = None , exclude_list = None , explicit = False ) : import utool as ut if explicit : expr_list = [ ] for ( key , val ) in sorted ( dict_ . items ( ) ) : assert isinstance ( key , six . string_types ) , 'keys must be strings' expr_list . append ( '%s = %s' % ( key , ut . repr2 ( val...
returns execable python code that declares variables using keys and values
8,907
def embed2 ( ** kwargs ) : config = kwargs . get ( 'config' ) header = kwargs . pop ( 'header' , u'' ) stack_depth = kwargs . pop ( 'stack_depth' , 2 ) compile_flags = kwargs . pop ( 'compile_flags' , None ) import IPython from IPython . core . interactiveshell import InteractiveShell from IPython . terminal . embed im...
Modified from IPython . terminal . embed . embed so I can mess with stack_depth
8,908
def search_stack_for_localvar ( varname ) : curr_frame = inspect . currentframe ( ) print ( ' * Searching parent frames for: ' + six . text_type ( varname ) ) frame_no = 0 while curr_frame . f_back is not None : if varname in curr_frame . f_locals . keys ( ) : print ( ' * Found in frame: ' + six . text_type ( frame_no ...
Finds a local varable somewhere in the stack and returns the value
8,909
def formatex ( ex , msg = '[!?] Caught exception' , prefix = None , key_list = [ ] , locals_ = None , iswarning = False , tb = False , N = 0 , keys = None , colored = None ) : r if prefix is None : prefix = get_caller_prefix ( aserror = True , N = N ) if locals_ is None : locals_ = get_parent_frame ( N = N ) . f_locals...
r Formats an exception with relevant info
8,910
def parse_locals_keylist ( locals_ , key_list , strlist_ = None , prefix = '' ) : from utool import util_str if strlist_ is None : strlist_ = [ ] for key in key_list : try : if key is None : strlist_ . append ( '' ) elif isinstance ( key , tuple ) : tup = key func , key_ = tup val = get_varval_from_locals ( key_ , loca...
For each key in keylist puts its value in locals into a stringlist
8,911
def __send_rdy ( self , connection , command ) : if self . __consumer . original_rdy is None : node_count = self . __consumer . get_node_count_for_topic ( connection . context . topic ) self . __logger_rdy . debug ( "Calculating RDY: max_in_flight=(%d) " "node_count=(%d)" , self . __consumer . max_in_flight , node_coun...
Determine the RDY value and set it . It can either be a static value a callback or None . If it s None we ll calculate the value based on our limits and connection counts .
8,912
def switch_psm_to_peptable_fields ( oldheader ) : return { old : new for old , new in zip ( [ mzidtsvdata . HEADER_PEPTIDE , mzidtsvdata . HEADER_PROTEIN , mzidtsvdata . HEADER_PEPTIDE_Q , mzidtsvdata . HEADER_PEPTIDE_PEP ] , [ peptabledata . HEADER_PEPTIDE , peptabledata . HEADER_PROTEINS , peptabledata . HEADER_QVAL ...
Returns a dict map with old to new header fields
8,913
def add_instruction ( self , instr ) : assert ( isinstance ( instr , Instruction ) ) self . instruction_list . append ( instr ) if instr . lhs not in self . defined_variables : if isinstance ( instr . lhs , Variable ) : self . defined_variables . append ( instr . lhs ) if isinstance ( instr , EqInstruction ) : if isins...
Adds the argument instruction in the list of instructions of this basic block .
8,914
def set_condition ( self , condition , condition_instr = None ) : assert ( isinstance ( condition , Numeric ) ) if condition_instr is not None : assert ( isinstance ( condition_instr , CmpInstruction ) ) self . condition = condition self . condition_instr = condition_instr if condition_instr is not None : if condition_...
Defines the condition which decides how the basic block exits
8,915
def add_basic_block ( self , basic_block ) : assert ( isinstance ( basic_block , BasicBlock ) ) self . basic_block_list . append ( basic_block )
Adds the given basic block in the function
8,916
def get_variable ( self , var_name ) : assert ( isinstance ( var_name , str ) ) if isinstance ( var_name , str ) : for var in self . variable_list : if var . name == var_name : return var new_var = Variable ( var_name ) self . variable_list . append ( new_var ) return new_var
If a variable with the name var_name exists in this function s variable list \ then that variable object is returned ; else a new variable is created \ with the given name and added to the variable list of this function \ and returned back
8,917
def add_input_variable ( self , var ) : assert ( isinstance ( var , Variable ) ) self . input_variable_list . append ( var )
Adds the argument variable as one of the input variable
8,918
def add_output_variable ( self , var ) : assert ( isinstance ( var , Variable ) ) self . output_variable_list . append ( var )
Adds the argument variable as one of the output variable
8,919
def tokenize ( self ) : self . token_list = [ ] ps = self . parse_string . strip ( ) i = 0 last_token = None while i < len ( ps ) and ps [ i ] . isspace ( ) : i += 1 while i < len ( ps ) : token = '' if ps [ i ] . isalpha ( ) : while i < len ( ps ) and ( ps [ i ] . isalnum ( ) or ps [ i ] == '_' ) : token += ps [ i ] i...
Tokenizes the string stored in the parser object into a list of tokens .
8,920
def parse ( self ) : self . tokenize ( ) if self . debug : print ( "Tokens found: %s" % self . token_list ) try : parse_tree = self . parse2 ( ) except Exception as e : raise e return parse_tree
Tokenizes and parses an arithmetic expression into a parse tree .
8,921
def insert_keys ( self , keys ) : start = 0 bulk_insert = self . bulk_insert keys_len = len ( keys ) query = 'INSERT IGNORE INTO gauged_keys (namespace, `key`) VALUES ' execute = self . cursor . execute while start < keys_len : rows = keys [ start : start + bulk_insert ] params = [ param for params in rows for param in...
Insert keys into a table which assigns an ID
8,922
def get_writer_position ( self , name ) : cursor = self . cursor cursor . execute ( 'SELECT timestamp FROM gauged_writer_history ' 'WHERE id = %s' , ( name , ) ) result = cursor . fetchone ( ) return result [ 0 ] if result else 0
Get the current writer position
8,923
def get_namespaces ( self ) : cursor = self . cursor cursor . execute ( 'SELECT DISTINCT namespace FROM gauged_statistics' ) return [ namespace for namespace , in cursor ]
Get a list of namespaces
8,924
def remove_namespace ( self , namespace ) : params = ( namespace , ) execute = self . cursor . execute execute ( 'DELETE FROM gauged_data WHERE namespace = %s' , params ) execute ( 'DELETE FROM gauged_statistics WHERE namespace = %s' , params ) execute ( 'DELETE FROM gauged_keys WHERE namespace = %s' , params ) self . ...
Remove all data associated with the current namespace
8,925
def remove_cache ( self , namespace , key = None ) : if key is None : self . cursor . execute ( 'DELETE FROM gauged_cache ' 'WHERE namespace = %s' , ( namespace , ) ) else : self . cursor . execute ( 'DELETE FROM gauged_cache ' 'WHERE namespace = %s and `key` = %s' , ( namespace , key ) )
Remove all cached values for the specified namespace optionally specifying a key
8,926
def clear_schema ( self ) : execute = self . cursor . execute execute ( 'TRUNCATE TABLE gauged_data' ) execute ( 'TRUNCATE TABLE gauged_keys' ) execute ( 'TRUNCATE TABLE gauged_writer_history' ) execute ( 'TRUNCATE TABLE gauged_cache' ) execute ( 'TRUNCATE TABLE gauged_statistics' ) self . db . commit ( )
Clear all gauged data
8,927
def quantum_random ( ) : import quantumrandom data16 = quantumrandom . uint16 ( array_length = 2 ) assert data16 . flags [ 'C_CONTIGUOUS' ] data32 = data16 . view ( np . dtype ( 'uint32' ) ) [ 0 ] return data32
returns a 32 bit unsigned integer quantum random number
8,928
def _npstate_to_pystate ( npstate ) : PY_VERSION = 3 version , keys , pos , has_gauss , cached_gaussian_ = npstate keys_pos = tuple ( map ( int , keys ) ) + ( int ( pos ) , ) cached_gaussian_ = cached_gaussian_ if has_gauss else None pystate = ( PY_VERSION , keys_pos , cached_gaussian_ ) return pystate
Convert state of a NumPy RandomState object to a state that can be used by Python s Random .
8,929
def _pystate_to_npstate ( pystate ) : NP_VERSION = 'MT19937' version , keys_pos_ , cached_gaussian_ = pystate keys , pos = keys_pos_ [ : - 1 ] , keys_pos_ [ - 1 ] keys = np . array ( keys , dtype = np . uint32 ) has_gauss = cached_gaussian_ is not None cached_gaussian = cached_gaussian_ if has_gauss else 0.0 npstate = ...
Convert state of a Python Random object to state usable by NumPy RandomState .
8,930
def ensure_rng ( rng , impl = 'numpy' ) : if impl == 'numpy' : if rng is None : rng = np . random elif isinstance ( rng , int ) : rng = np . random . RandomState ( seed = rng ) elif isinstance ( rng , random . Random ) : py_rng = rng pystate = py_rng . getstate ( ) npstate = _pystate_to_npstate ( pystate ) rng = np_rng...
Returns a random number generator
8,931
def random_indexes ( max_index , subset_size = None , seed = None , rng = None ) : subst_ = np . arange ( 0 , max_index ) rng = ensure_rng ( seed if rng is None else rng ) rng . shuffle ( subst_ ) if subset_size is None : subst = subst_ else : subst = subst_ [ 0 : min ( subset_size , max_index ) ] return subst
random unrepeated indicies
8,932
def spaced_indexes ( len_ , n , trunc = False ) : if n is None : return np . arange ( len_ ) all_indexes = np . arange ( len_ ) if trunc : n = min ( len_ , n ) if n == 0 : return np . empty ( 0 ) stride = len_ // n try : indexes = all_indexes [ 0 : - 1 : stride ] except ValueError : raise ValueError ( 'cannot slice lis...
Returns n evenly spaced indexes . Returns as many as possible if trunc is true
8,933
def random_sample ( list_ , nSample , strict = False , rng = None , seed = None ) : rng = ensure_rng ( seed if rng is None else rng ) if isinstance ( list_ , list ) : list2_ = list_ [ : ] else : list2_ = np . copy ( list_ ) if len ( list2_ ) == 0 and not strict : return list2_ rng . shuffle ( list2_ ) if nSample is Non...
Grabs data randomly
8,934
def deterministic_sample ( list_ , nSample , seed = 0 , rng = None , strict = False ) : rng = ensure_rng ( seed if rng is None else rng ) sample_list = random_sample ( list_ , nSample , strict = strict , rng = rng ) return sample_list
Grabs data randomly but in a repeatable way
8,935
def spaced_items ( list_ , n , ** kwargs ) : indexes = spaced_indexes ( len ( list_ ) , n , ** kwargs ) items = list_ [ indexes ] return items
Returns n evenly spaced items
8,936
def get_servers ( self , topic ) : return ( nsq . node . ServerNode ( sh ) for sh in self . __server_hosts )
We re assuming that the static list of servers can serve the given topic since we have to preexisting knowledge about them .
8,937
def tokenizer ( text ) : for entry in text . split ( '$$$$\n' ) : if entry . rstrip ( ) : lines_stream = deque ( entry . split ( '\n' ) ) else : continue for token in _molfile ( stream = lines_stream ) : yield token if len ( lines_stream ) : for token in _sdfile ( stream = lines_stream ) : yield token yield EndOfFile (...
A lexical analyzer for the CTfile formatted files .
8,938
def _ctab_atom_bond_block ( number_of_lines , block_type , stream ) : for _ in range ( int ( number_of_lines ) ) : line = stream . popleft ( ) yield block_type ( * line . split ( ) )
Process atom and bond blocks of Ctab .
8,939
def _ctab_property_block ( stream ) : line = stream . popleft ( ) while line != 'M END' : name = line . split ( ) [ 1 ] yield CtabPropertiesBlockLine ( name , line ) line = stream . popleft ( )
Process properties block of Ctab .
8,940
def set_features ( self ) : self . scores = { } for t_or_d , feats in zip ( [ 'target' , 'decoy' ] , [ self . target , self . decoy ] ) : self . scores [ t_or_d ] = { } self . scores [ t_or_d ] [ 'scores' ] = self . score_get_fun ( feats , self . featuretype , self . prepare_percolator_output ) self . scores [ t_or_d ]...
Creates scorefiles for qvality s target and decoy distributions
8,941
def write ( self ) : outfn = self . create_outfilepath ( self . fn , self . outsuffix ) command = [ 'qvality' ] command . extend ( self . qvalityoptions ) command . extend ( [ self . scores [ 'target' ] [ 'fn' ] , self . scores [ 'decoy' ] [ 'fn' ] , '-o' , outfn ] ) subprocess . call ( command )
This actually runs the qvality program from PATH .
8,942
def setup_repo ( ) : r print ( '\n [setup_repo]!' ) from functools import partial import utool as ut code_dpath = ut . truepath ( ut . get_argval ( '--code-dir' , default = '~/code' ) ) _code_dpath = ut . unexpanduser ( code_dpath ) repo_fname = ( ut . get_argval ( ( '--repo' , '--repo-name' ) , type_ = str ) ) repo_dp...
r Creates default structure for a new repo
8,943
def grep_projects ( tofind_list , user_profile = None , verbose = True , new = False , ** kwargs ) : r import utool as ut user_profile = ensure_user_profile ( user_profile ) print ( 'user_profile = {!r}' . format ( user_profile ) ) kwargs = kwargs . copy ( ) colored = kwargs . pop ( 'colored' , True ) grepkw = { } grep...
r Greps the projects defined in the current UserProfile
8,944
def run ( self ) : if sys . platform == "linux" or sys . platform == "linux2" : libname = 'libfaketime.so.1' libnamemt = 'libfaketimeMT.so.1' elif sys . platform == "darwin" : libname = 'libfaketime.1.dylib' libnamemt = 'libfaketimeMT.1.dylib' else : sys . stderr . write ( "WARNING : libfaketime does not support platfo...
Compile libfaketime .
8,945
def generate2 ( func , args_gen , kw_gen = None , ntasks = None , ordered = True , force_serial = False , use_pool = False , chunksize = None , nprocs = None , progkw = { } , nTasks = None , verbose = None ) : r if verbose is None : verbose = 2 if ntasks is None : ntasks = nTasks if ntasks is None : try : ntasks = len ...
r Interfaces to either multiprocessing or futures . Esentially maps args_gen onto func using pool . imap . However args_gen must be a tuple of args that will be unpacked and send to the function . Thus the function can take multiple args . Also specifing keyword args is supported .
8,946
def _generate_serial2 ( func , args_gen , kw_gen = None , ntasks = None , progkw = { } , verbose = None , nTasks = None ) : if verbose is None : verbose = 2 if ntasks is None : ntasks = nTasks if ntasks is None : ntasks = len ( args_gen ) if verbose > 0 : print ( '[ut._generate_serial2] executing %d %s tasks in serial'...
internal serial generator
8,947
def buffered_generator ( source_gen , buffer_size = 2 , use_multiprocessing = False ) : r if buffer_size < 2 : raise RuntimeError ( "Minimal buffer_ size is 2!" ) if use_multiprocessing : print ( 'WARNING seems to freeze if passed in a generator' ) if False : pool = multiprocessing . Pool ( processes = get_default_nump...
r Generator that runs a slow source generator in a separate process .
8,948
def sort_window_ids ( winid_list , order = 'mru' ) : import utool as ut winid_order = XCtrl . sorted_window_ids ( order ) sorted_win_ids = ut . isect ( winid_order , winid_list ) return sorted_win_ids
Orders window ids by most recently used
8,949
def focus_window ( winhandle , path = None , name = None , sleeptime = .01 ) : import utool as ut import time print ( 'focus: ' + winhandle ) args = [ 'wmctrl' , '-xa' , winhandle ] ut . cmd ( * args , verbose = False , quiet = True ) time . sleep ( sleeptime )
sudo apt - get install xautomation apt - get install autokey - gtk
8,950
def setup_chmod ( setup_fpath , setup_dir , chmod_patterns ) : st_mode = 33277 for pattern in chmod_patterns : for fpath in util_path . glob ( setup_dir , pattern , recursive = True ) : print ( '[setup] chmod fpath=%r' % fpath ) os . chmod ( fpath , st_mode )
Gives files matching pattern the same chmod flags as setup . py
8,951
def __infer_setup_kwargs ( module , kwargs ) : name = kwargs [ 'name' ] packages = kwargs . get ( 'packages' , [ ] ) if name not in packages : packages . append ( name ) kwargs [ 'packages' ] = packages if 'version' not in kwargs : version = parse_package_for_version ( name ) kwargs [ 'version' ] = version if 'license'...
Implicitly build kwargs based on standard info
8,952
def _replaced ( __values , ** __replacements ) : return tuple ( o for o in ( __replacements . get ( name , name ) for name in __values ) if o )
Replace elements in iterable with values from an alias dict suppressing empty values .
8,953
def _get_admin_route_name ( model_or_instance ) : model = model_or_instance if isinstance ( model_or_instance , type ) else type ( model_or_instance ) return 'admin:{meta.app_label}_{meta.model_name}' . format ( meta = model . _meta )
Get the base name of the admin route for a model or model instance .
8,954
def _build_admin_filter_url ( model , filters ) : url = reverse ( _get_admin_route_name ( model ) + '_changelist' ) parts = urlsplit ( url ) query = parse_qs ( parts . query ) query . update ( filters ) parts_with_filter = parts . _replace ( query = urlencode ( query ) ) return urlunsplit ( parts_with_filter )
Build a filter URL to an admin changelist of all objects with similar field values .
8,955
def _make_admin_link_to_similar ( primary_field , * fields , name = None ) : fields = ( primary_field , ) + fields url_template = '<a href="{url}">{name_or_value}</a>' def field_link ( self , obj ) : value = getattr ( obj , primary_field , None ) name_or_value = name or value filters = { field_name : getattr ( obj , fi...
Create a function that links to a changelist of all objects with similar field values .
8,956
def _retry_failed_log ( failed_trigger_log ) : model = type ( failed_trigger_log ) try : failed_trigger_log = ( model . objects . select_for_update ( ) . get ( id = failed_trigger_log . id , state = TRIGGER_LOG_STATE [ 'FAILED' ] , ) ) except model . DoesNotExist : return False failed_trigger_log . redo ( ) return True
Try to re - apply a failed trigger log action .
8,957
def ignore_failed_logs_action ( self , request , queryset ) : count = _ignore_failed_logs ( queryset ) self . message_user ( request , _ ( '{count} failed trigger logs marked as ignored.' ) . format ( count = count ) , )
Set FAILED trigger logs in queryset to IGNORED .
8,958
def retry_failed_logs_action ( self , request , queryset ) : count = 0 for trigger_log in queryset : retried = _retry_failed_log ( trigger_log ) if retried : count += 1 self . message_user ( request , _ ( '{count} failed trigger logs retried.' ) . format ( count = count ) , )
Try to re - apply FAILED trigger log actions in the queryset .
8,959
def create_psm_lookup ( fn , fastafn , mapfn , header , pgdb , unroll = False , specfncol = None , decoy = False , fastadelim = None , genefield = None ) : proteins = store_proteins_descriptions ( pgdb , fastafn , fn , mapfn , header , decoy , fastadelim , genefield ) mzmlmap = pgdb . get_mzmlfile_map ( ) sequences = {...
Reads PSMs from file stores them to a database backend in chunked PSMs .
8,960
def store_psm_protein_relations ( fn , header , pgdb , proteins ) : allpsms = OrderedDict ( ) last_id , psmids_to_store = None , set ( ) store_soon = False for psm in tsvreader . generate_tsv_psms ( fn , header ) : psm_id , prots = tsvreader . get_pepproteins ( psm ) prots = [ x for x in prots if x in proteins ] try : ...
Reads PSMs from file extracts their proteins and peptides and passes them to a database backend in chunks .
8,961
def on_exception_report_input ( func_ = None , force = False , keys = None ) : def _closure_onexceptreport ( func ) : if not ONEX_REPORT_INPUT and not force : return func @ ignores_exc_tb ( outer_wrapper = False ) def wrp_onexceptreport ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except Exception ...
If an error is thrown in the scope of this function s stack frame then the decorated function name and the arguments passed to it will be printed to the utool print function .
8,962
def _indent_decor ( lbl ) : def closure_indent ( func ) : if util_arg . TRACE : @ ignores_exc_tb ( outer_wrapper = False ) def wrp_indent ( * args , ** kwargs ) : with util_print . Indenter ( lbl ) : print ( ' ...trace[in]' ) ret = func ( * args , ** kwargs ) print ( ' ...trace[out]' ) return ret else : @ ignores...
does the actual work of indent_func
8,963
def indent_func ( input_ ) : if isinstance ( input_ , six . string_types ) : lbl = input_ return _indent_decor ( lbl ) elif isinstance ( input_ , ( bool , tuple ) ) : func = input_ return func else : func = input_ lbl = '[' + meta_util_six . get_funcname ( func ) + ']' return _indent_decor ( lbl ) ( func )
Takes either no arguments or an alias label
8,964
def tracefunc_xml ( func ) : funcname = meta_util_six . get_funcname ( func ) def wrp_tracefunc2 ( * args , ** kwargs ) : verbose = kwargs . get ( 'verbose' , True ) if verbose : print ( '<%s>' % ( funcname , ) ) with util_print . Indenter ( ' ' ) : ret = func ( * args , ** kwargs ) if verbose : print ( '</%s>' % ( ...
Causes output of function to be printed in an XML style block
8,965
def accepts_scalar_input ( func ) : @ ignores_exc_tb ( outer_wrapper = False ) def wrp_asi ( self , input_ , * args , ** kwargs ) : if util_iter . isiterable ( input_ ) : return func ( self , input_ , * args , ** kwargs ) else : ret = func ( self , [ input_ ] , * args , ** kwargs ) if ret is not None : return ret [ 0 ]...
DEPRICATE in favor of accepts_scalar_input2 only accepts one input as vector
8,966
def __assert_param_consistency ( args , argx_list_ ) : if util_arg . NO_ASSERTS : return if len ( argx_list_ ) == 0 : return True argx_flags = [ util_iter . isiterable ( args [ argx ] ) for argx in argx_list_ ] try : assert all ( [ argx_flags [ 0 ] == flag for flag in argx_flags ] ) , ( 'invalid mixing of iterable and ...
debugging function for accepts_scalar_input2 checks to make sure all the iterable inputs are of the same length
8,967
def accepts_scalar_input_vector_output ( func ) : @ ignores_exc_tb ( outer_wrapper = False ) def wrp_asivo ( self , input_ , * args , ** kwargs ) : if util_iter . isiterable ( input_ ) : return func ( self , input_ , * args , ** kwargs ) else : result = func ( self , ( input_ , ) , * args , ** kwargs ) if len ( result ...
DEPRICATE IN FAVOR OF accepts_scalar_input2
8,968
def accepts_numpy ( func ) : def wrp_accepts_numpy ( self , input_ , * args , ** kwargs ) : if not ( util_type . HAVE_NUMPY and isinstance ( input_ , np . ndarray ) ) : return func ( self , input_ , * args , ** kwargs ) else : if UNIQUE_NUMPY : input_list , inverse_unique = np . unique ( input_ , return_inverse = True ...
Allows the first input to be a numpy array and get result in numpy form
8,969
def memoize_nonzero ( func ) : class _memorizer ( dict ) : def __init__ ( self , func ) : self . func = func def __call__ ( self , * args ) : return self [ args ] def __missing__ ( self , key ) : ret = self [ key ] = self . func ( * key ) return ret return _memorizer ( func )
Memoization decorator for functions taking a nonzero number of arguments .
8,970
def memoize ( func ) : cache = func . _util_decor_memoize_cache = { } def memoizer ( * args , ** kwargs ) : key = str ( args ) + str ( kwargs ) if key not in cache : cache [ key ] = func ( * args , ** kwargs ) return cache [ key ] memoizer = preserve_sig ( memoizer , func ) memoizer . cache = cache return memoizer
simple memoization decorator
8,971
def lazyfunc ( func ) : closuremem_ = [ { } ] def wrapper ( * args , ** kwargs ) : mem = closuremem_ [ 0 ] key = ( repr ( args ) , repr ( kwargs ) ) try : return mem [ key ] except KeyError : mem [ key ] = func ( * args , ** kwargs ) return mem [ key ] return wrapper
Returns a memcached version of a function
8,972
def apply_docstr ( docstr_func ) : def docstr_applier ( func ) : if isinstance ( docstr_func , six . string_types ) : olddoc = meta_util_six . get_funcdoc ( func ) if olddoc is None : olddoc = '' newdoc = olddoc + docstr_func meta_util_six . set_funcdoc ( func , newdoc ) return func else : preserved_func = preserve_sig...
Changes docstr of one functio to that of another
8,973
def preserve_sig ( wrapper , orig_func , force = False ) : from utool . _internal import meta_util_six from utool import util_str from utool import util_inspect if wrapper is orig_func : return orig_func orig_docstr = meta_util_six . get_funcdoc ( orig_func ) orig_docstr = '' if orig_docstr is None else orig_docstr ori...
Decorates a wrapper function .
8,974
def _sigfigs ( n , sigfigs = 3 ) : 'helper function to round a number to significant figures' n = float ( n ) if n == 0 or math . isnan ( n ) : return n return round ( n , - int ( math . floor ( math . log10 ( abs ( n ) ) ) - sigfigs + 1 ) )
helper function to round a number to significant figures
8,975
def merge_moments ( m_a , m_a2 , m_a3 , m_a4 , n_a , m_b , m_b2 , m_b3 , m_b4 , n_b ) : delta = m_b - m_a delta_2 = delta * delta delta_3 = delta * delta_2 delta_4 = delta * delta_3 n_x = n_a + n_b m_x = m_a + delta * n_b / n_x m_x2 = m_a2 + m_b2 + delta_2 * n_a * n_b / n_x m_x3 = m_a3 + m_b3 + delta_3 * n_a * n_b * ( ...
Merge moments of two samples A and B . parameters are m_a ... m_a4 = first through fourth moment of sample A n_a = size of sample A m_b ... m_b4 = first through fourth moment of sample B n_b = size of sample B
8,976
def _transition ( self , nxt , cur = None , since = None ) : self . transition_intervals [ ( cur , nxt ) ] . tick ( ) if since : self . state_durations [ cur ] . end ( since )
Register that a transition has taken place . nxt is an identifier for the state being entered . cur is an identifier for the state being left . since is the time at which the previous state was entered .
8,977
def _cleanup ( self , ref ) : 'cleanup after a transitor weakref fires' self . transitor_states [ self . _weakref_holder [ ref ] ] -= 1 del self . _weakref_holder [ ref ]
cleanup after a transitor weakref fires
8,978
def _commit ( self , ref ) : 'commit a walkers data after it is collected' path_times = self . _weakref_path_map [ ref ] path_times . append ( nanotime ( ) ) del self . _weakref_path_map [ ref ] path = tuple ( path_times [ 1 : : 2 ] ) times = path_times [ : : 2 ] if path not in self . path_stats : self . path_stats [ p...
commit a walkers data after it is collected
8,979
def pformat ( self , prefix = ( ) ) : nan = float ( "nan" ) def sformat ( segment , stat ) : FMT = "n={0}, mean={1}, p50/95={2}/{3}, max={4}" line_segs = [ segment ] for s in [ stat ] : p = s . get_percentiles ( ) p50 , p95 = p . get ( 0.50 , nan ) , p . get ( 0.95 , nan ) line_segs . append ( FMT . format ( s . n , s ...
Makes a pretty ASCII format of the data suitable for displaying in a console or saving to a text file . Returns a list of lines .
8,980
def specfn_quant_generator ( specfiles , quantfiles , tag , ignore_tags ) : for specfn , qfn in zip ( specfiles , quantfiles ) : for quant_el in basereader . generate_xmltags ( qfn , tag , ignore_tags ) : yield os . path . basename ( specfn ) , quant_el
Generates tuples of specfile and quant element for general formats
8,981
def get_feature_info ( feature ) : dimensions = feature . findall ( 'position' ) for dim in dimensions : if dim . attrib [ 'dim' ] == '0' : rt = dim . text elif dim . attrib [ 'dim' ] == '1' : mz = dim . text return { 'rt' : float ( rt ) , 'mz' : float ( mz ) , 'charge' : int ( feature . find ( 'charge' ) . text ) , 'i...
Returns a dict with feature information
8,982
def merge_maps ( m , base ) : for k in base . keys ( ) : if k not in m : m [ k ] = base [ k ]
Merge in undefined map entries from given map .
8,983
def merge_lists ( l , base ) : for i in base : if i not in l : l . append ( i )
Merge in undefined list entries from given list .
8,984
def generate_top_psms ( psms , protcol ) : top_ms1_psms = { } for psm in psms : protacc = psm [ protcol ] precursor_amount = psm [ mzidtsvdata . HEADER_PRECURSOR_QUANT ] if ';' in protacc or precursor_amount == 'NA' : continue precursor_amount = float ( precursor_amount ) psm_seq = psm [ mzidtsvdata . HEADER_PEPTIDE ] ...
Fed with a psms generator this returns the 3 PSMs with the highest precursor intensities ( or areas or whatever is given in the HEADER_PRECURSOR_QUANT
8,985
def add_ms1_quant_from_top3_mzidtsv ( proteins , psms , headerfields , protcol ) : if not protcol : protcol = mzidtsvdata . HEADER_MASTER_PROT top_ms1_psms = generate_top_psms ( psms , protcol ) for protein in proteins : prot_acc = protein [ prottabledata . HEADER_PROTEIN ] prec_area = calculate_protein_precursor_quant...
Collects PSMs with the highes precursor quant values adds sum of the top 3 of these to a protein table
8,986
def toc ( tt , return_msg = False , write_msg = True , verbose = None ) : if verbose is not None : write_msg = verbose ( msg , start_time ) = tt ellapsed = ( default_timer ( ) - start_time ) if ( not return_msg ) and write_msg and msg is not None : sys . stdout . write ( '...toc(%.4fs, ' % ellapsed + '"' + str ( msg ) ...
similar to matlab toc
8,987
def parse_timestamp ( timestamp , zone = 'UTC' , timestamp_format = None ) : r if timestamp is None : return None use_delorean = True or six . PY2 if use_delorean : import delorean if not isinstance ( timestamp , six . string_types ) : raise NotImplementedError ( 'Unknown format: timestamp=%r' % ( timestamp , ) ) if ti...
r pip install delorean
8,988
def date_to_datetime ( date , fraction = 0.0 ) : day_seconds = ( 60 * 60 * 24 ) - 1 total_seconds = int ( day_seconds * fraction ) delta = datetime . timedelta ( seconds = total_seconds ) time = datetime . time ( ) dt = datetime . datetime . combine ( date , time ) + delta return dt
fraction is how much through the day you are . 0 = start of the day 1 = end of the day .
8,989
def ec2_instances ( ) : "Use the EC2 API to get a list of all machines" region = boto . ec2 . get_region ( REGION ) reservations = region . connect ( ) . get_all_instances ( ) instances = [ ] for reservation in reservations : instances += reservation . instances return instances
Use the EC2 API to get a list of all machines
8,990
def instances ( exp = ".*" ) : "Filter list of machines matching an expression" expression = re . compile ( exp ) instances = [ ] for node in ec2_instances ( ) : if node . tags and ip ( node ) : try : if expression . match ( node . tags . get ( "Name" ) ) : instances . append ( node ) except TypeError : pass return ins...
Filter list of machines matching an expression
8,991
def use ( node ) : "Set the fabric environment for the specifed node" try : role = node . tags . get ( "Name" ) . split ( '-' ) [ 1 ] env . roledefs [ role ] += [ ip ( node ) ] except IndexError : pass env . nodes += [ node ] env . hosts += [ ip ( node ) ]
Set the fabric environment for the specifed node
8,992
def build_alias_map ( regex_map , tag_vocab ) : import utool as ut import re alias_map = ut . odict ( [ ] ) for pats , new_tag in reversed ( regex_map ) : pats = ut . ensure_iterable ( pats ) for pat in pats : flags = [ re . match ( pat , t ) for t in tag_vocab ] for old_tag in ut . compress ( tag_vocab , flags ) : ali...
Constructs explicit mapping . Order of items in regex map matters . Items at top are given preference .
8,993
def alias_tags ( tags_list , alias_map ) : def _alias_dict ( tags ) : tags_ = [ alias_map . get ( t , t ) for t in tags ] return list ( set ( [ t for t in tags_ if t is not None ] ) ) tags_list_ = [ _alias_dict ( tags ) for tags in tags_list ] return tags_list_
update tags to new values
8,994
def setup ( self ) : self . client = self . _get_client ( ) sg = self . _create_isolation_security_group ( ) if self . exists is not True : acl = self . _create_network_acl ( ) self . _add_network_acl_entries ( acl ) self . _add_security_group_rule ( sg ) self . _add_security_group_to_instance ( sg ) if self . dry_run ...
Conditions that can not be dry_run
8,995
def _args2_fpath ( dpath , fname , cfgstr , ext ) : r if len ( ext ) > 0 and ext [ 0 ] != '.' : raise ValueError ( 'Please be explicit and use a dot in ext' ) max_len = 128 cfgstr_hashlen = 16 prefix = fname fname_cfgstr = consensed_cfgstr ( prefix , cfgstr , max_len = max_len , cfgstr_hashlen = cfgstr_hashlen ) fpath ...
r Ensures that the filename is not too long
8,996
def save_cache ( dpath , fname , cfgstr , data , ext = '.cPkl' , verbose = None ) : fpath = _args2_fpath ( dpath , fname , cfgstr , ext ) util_io . save_data ( fpath , data , verbose = verbose ) return fpath
Saves data using util_io but smartly constructs a filename
8,997
def load_cache ( dpath , fname , cfgstr , ext = '.cPkl' , verbose = None , enabled = True ) : if verbose is None : verbose = VERBOSE_CACHE if not USE_CACHE or not enabled : if verbose > 1 : print ( '[util_cache] ... cache disabled: dpath=%s cfgstr=%r' % ( basename ( dpath ) , cfgstr , ) ) raise IOError ( 3 , 'Cache Loa...
Loads data using util_io but smartly constructs a filename
8,998
def tryload_cache ( dpath , fname , cfgstr , verbose = None ) : try : return load_cache ( dpath , fname , cfgstr , verbose = verbose ) except IOError : return None
returns None if cache cannot be loaded
8,999
def tryload_cache_list ( dpath , fname , cfgstr_list , verbose = False ) : data_list = [ tryload_cache ( dpath , fname , cfgstr , verbose ) for cfgstr in cfgstr_list ] ismiss_list = [ data is None for data in data_list ] return data_list , ismiss_list
loads a list of similar cached datas . Returns flags that needs to be computed