idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
8,800
def _get_license_description ( license_code ) : req = requests . get ( "{base_url}/licenses/{license_code}" . format ( base_url = BASE_URL , license_code = license_code ) , headers = _HEADERS ) if req . status_code == requests . codes . ok : s = req . json ( ) [ "body" ] search_curly = re . search ( r'\{(.*)\}' , s ) s...
Gets the body for a license based on a license code
8,801
def get_license_summary ( license_code ) : try : abs_file = os . path . join ( _ROOT , "summary.json" ) with open ( abs_file , 'r' ) as f : summary_license = json . loads ( f . read ( ) ) [ license_code ] print ( Fore . YELLOW + 'SUMMARY' ) print ( Style . RESET_ALL ) , print ( summary_license [ 'summary' ] ) print ( S...
Gets the license summary and permitted forbidden and required behaviour
8,802
def main ( ) : arguments = docopt ( __doc__ , version = __version__ ) if arguments [ 'ls' ] or arguments [ 'list' ] : _get_licences ( ) elif arguments [ '--tldr' ] and arguments [ '<NAME>' ] : get_license_summary ( arguments [ '<NAME>' ] . lower ( ) ) elif arguments [ '--export' ] and arguments [ '<NAME>' ] : save_lice...
harvey helps you manage and add license from the command line
8,803
def get ( self , user_id ) : path = '/' . join ( [ 'person' , user_id ] ) return self . rachio . get ( path )
Retrieve the information for a person entity .
8,804
def copy_template ( self , name = None ) : ret = Table ( self . table_name ) ret . _indexes . update ( dict ( ( k , v . copy_template ( ) ) for k , v in self . _indexes . items ( ) ) ) ret ( name ) return ret
Create empty copy of the current table with copies of all index definitions .
8,805
def clone ( self , name = None ) : ret = self . copy_template ( ) . insert_many ( self . obs ) ( name ) return ret
Create full copy of the current table including table contents and index definitions .
8,806
def delete_index ( self , attr ) : if attr in self . _indexes : del self . _indexes [ attr ] self . _uniqueIndexes = [ ind for ind in self . _indexes . values ( ) if ind . is_unique ] return self
Deletes an index from the Table . Can be used to drop and rebuild an index or to convert a non - unique index to a unique index or vice versa .
8,807
def insert_many ( self , it ) : unique_indexes = self . _uniqueIndexes NO_SUCH_ATTR = object ( ) new_objs = list ( it ) if unique_indexes : for ind in unique_indexes : ind_attr = ind . attr new_keys = dict ( ( getattr ( obj , ind_attr , NO_SUCH_ATTR ) , obj ) for obj in new_objs ) if not ind . accept_none and ( None in...
Inserts a collection of objects into the table .
8,808
def remove_many ( self , it ) : to_be_deleted = list ( it ) del_indices = [ ] for i , ob in enumerate ( self . obs ) : try : tbd_index = to_be_deleted . index ( ob ) except ValueError : continue else : del_indices . append ( i ) to_be_deleted . pop ( tbd_index ) if not to_be_deleted : break for i in sorted ( del_indice...
Removes a collection of objects from the table .
8,809
def _query_attr_sort_fn ( self , attr_val ) : attr , v = attr_val if attr in self . _indexes : idx = self . _indexes [ attr ] if v in idx : return len ( idx [ v ] ) else : return 0 else : return 1e9
Used to order where keys by most selective key first
8,810
def delete ( self , ** kwargs ) : if not kwargs : return 0 affected = self . where ( ** kwargs ) self . remove_many ( affected ) return len ( affected )
Deletes matching objects from the table based on given named parameters . If multiple named parameters are given then only objects that satisfy all of the query criteria will be removed .
8,811
def sort ( self , key , reverse = False ) : if isinstance ( key , ( basestring , list , tuple ) ) : if isinstance ( key , basestring ) : attrdefs = [ s . strip ( ) for s in key . split ( ',' ) ] attr_orders = [ ( a . split ( ) + [ 'asc' , ] ) [ : 2 ] for a in attrdefs ] else : if isinstance ( key [ 0 ] , basestring ) :...
Sort Table in place using given fields as sort key .
8,812
def select ( self , fields , ** exprs ) : fields = self . _parse_fields_string ( fields ) def _make_string_callable ( expr ) : if isinstance ( expr , basestring ) : return lambda r : expr % r else : return expr exprs = dict ( ( k , _make_string_callable ( v ) ) for k , v in exprs . items ( ) ) raw_tuples = [ ] for ob i...
Create a new table containing a subset of attributes with optionally newly - added fields computed from each rec in the original table .
8,813
def formatted_table ( self , * fields , ** exprs ) : fields = set ( fields ) select_exprs = ODict ( ( f , lambda r , f = f : str ( getattr , f , None ) ) for f in fields ) for ename , expr in exprs . items ( ) : if isinstance ( expr , basestring ) : if re . match ( r'^[a-zA-Z_][a-zA-Z0-9_]*$' , expr ) : select_exprs [ ...
Create a new table with all string formatted attribute values typically in preparation for formatted output .
8,814
def join_on ( self , attr ) : if attr not in self . _indexes : raise ValueError ( "can only join on indexed attributes" ) return JoinTerm ( self , attr )
Creates a JoinTerm in preparation for joining with another table to indicate what attribute should be used in the join . Only indexed attributes may be used in a join .
8,815
def csv_import ( self , csv_source , encoding = 'utf-8' , transforms = None , row_class = DataObject , ** kwargs ) : reader_args = dict ( ( k , v ) for k , v in kwargs . items ( ) if k not in [ 'encoding' , 'csv_source' , 'transforms' , 'row_class' ] ) reader = lambda src : csv . DictReader ( src , ** reader_args ) ret...
Imports the contents of a CSV - formatted file into this table .
8,816
def tsv_import ( self , xsv_source , encoding = "UTF-8" , transforms = None , row_class = DataObject , ** kwargs ) : return self . _xsv_import ( xsv_source , encoding , transforms = transforms , delimiter = "\t" , row_class = row_class , ** kwargs )
Imports the contents of a tab - separated data file into this table .
8,817
def csv_export ( self , csv_dest , fieldnames = None , encoding = "UTF-8" ) : close_on_exit = False if isinstance ( csv_dest , basestring ) : if PY_3 : csv_dest = open ( csv_dest , 'w' , newline = '' , encoding = encoding ) else : csv_dest = open ( csv_dest , 'wb' ) close_on_exit = True try : if fieldnames is None : fi...
Exports the contents of the table to a CSV - formatted file .
8,818
def json_import ( self , source , encoding = "UTF-8" , transforms = None , row_class = DataObject ) : class _JsonFileReader ( object ) : def __init__ ( self , src ) : self . source = src def __iter__ ( self ) : current = '' for line in self . source : if current : current += ' ' current += line try : yield json . loads...
Imports the contents of a JSON data file into this table .
8,819
def json_export ( self , dest , fieldnames = None , encoding = "UTF-8" ) : close_on_exit = False if isinstance ( dest , basestring ) : if PY_3 : dest = open ( dest , 'w' , encoding = encoding ) else : dest = open ( dest , 'w' ) close_on_exit = True try : if isinstance ( fieldnames , basestring ) : fieldnames = fieldnam...
Exports the contents of the table to a JSON - formatted file .
8,820
def add_field ( self , attrname , fn , default = None ) : def _add_field_to_rec ( rec_ , fn_ = fn , default_ = default ) : try : val = fn_ ( rec_ ) except Exception : val = default_ if isinstance ( rec_ , DataObject ) : rec_ . __dict__ [ attrname ] = val else : setattr ( rec_ , attrname , val ) try : do_all ( _add_fiel...
Computes a new attribute for each object in table or replaces an existing attribute in each record with a computed value
8,821
def groupby ( self , keyexpr , ** outexprs ) : if isinstance ( keyexpr , basestring ) : keyattrs = keyexpr . split ( ) keyfn = lambda o : tuple ( getattr ( o , k ) for k in keyattrs ) elif isinstance ( keyexpr , tuple ) : keyattrs = ( keyexpr [ 0 ] , ) keyfn = keyexpr [ 1 ] else : raise TypeError ( "keyexpr must be str...
simple prototype of group by with support for expressions in the group - by clause and outputs
8,822
def unique ( self , key = None ) : if isinstance ( key , basestring ) : key = lambda r , attr = key : getattr ( r , attr , None ) ret = self . copy_template ( ) seen = set ( ) for ob in self : if key is None : try : ob_dict = vars ( ob ) except TypeError : ob_dict = dict ( ( k , getattr ( ob , k ) ) for k in _object_at...
Create a new table of objects containing no duplicate values .
8,823
def as_html ( self , fields = '*' ) : fields = self . _parse_fields_string ( fields ) def td_value ( v ) : return '<td><div align="{}">{}</div></td>' . format ( ( 'left' , 'right' ) [ isinstance ( v , ( int , float ) ) ] , str ( v ) ) def row_to_tr ( r ) : return "<tr>" + "" . join ( td_value ( getattr ( r , fld ) ) fo...
Output the table as a rudimentary HTML table .
8,824
def dump ( self , out = sys . stdout , row_fn = repr , limit = - 1 , indent = 0 ) : NL = '\n' if indent : out . write ( " " * indent + self . pivot_key_str ( ) ) else : out . write ( "Pivot: %s" % ',' . join ( self . _pivot_attrs ) ) out . write ( NL ) if self . has_subtables ( ) : do_all ( sub . dump ( out , row_fn ,...
Dump out the contents of this table in a nested listing .
8,825
def dump_counts ( self , out = sys . stdout , count_fn = len , colwidth = 10 ) : if len ( self . _pivot_attrs ) == 1 : out . write ( "Pivot: %s\n" % ',' . join ( self . _pivot_attrs ) ) maxkeylen = max ( len ( str ( k ) ) for k in self . keys ( ) ) maxvallen = colwidth keytally = { } for k , sub in self . items ( ) : s...
Dump out the summary counts of entries in this pivot table as a tabular listing .
8,826
def as_table ( self , fn = None , col = None , col_label = None ) : if col_label is None : col_label = col if fn is None : fn = len if col_label is None : col_label = 'count' ret = Table ( ) do_all ( ret . create_index ( attr ) for attr in self . _pivot_attrs ) if len ( self . _pivot_attrs ) == 1 : for sub in self . su...
Dump out the summary counts of this pivot table as a Table .
8,827
def _update_record ( record ) : dt = datetime . fromtimestamp ( record . created ) record . springtime = str ( dt ) [ : - 3 ] record . levelname_spring = "WARN" if record . levelname == "WARNING" else record . levelname record . process_id = str ( os . getpid ( ) ) record . thread_name = ( current_thread ( ) . getName ...
Collates values needed by LOG_FORMAT
8,828
def _tracing_information ( ) : values = b3 . values ( ) if values [ b3 . b3_trace_id ] : return [ current_app . name if current_app . name else " - " , values [ b3 . b3_trace_id ] , values [ b3 . b3_span_id ] , "false" , ]
Gets B3 distributed tracing information if available . This is returned as a list ready to be formatted into Spring Cloud Sleuth compatible format .
8,829
def _authenticate ( self ) : data = { 'username' : self . username , 'password' : self . password } url = '{base}/client/login' . format ( base = self . base_url ) response = self . _session . get ( url , params = data ) print ( response . text ) data = response . json ( ) if not data . get ( 'success' ) : raise Invali...
Authenticates to the api and sets up client information .
8,830
def _logout ( self , reset = True ) : url = '{base}/client/auth/logout' . format ( base = self . base_url ) response = self . _session . get ( url , params = self . _parameters ) if response . ok : if reset : self . _reset ( ) return True else : return False
Log out of the API .
8,831
def _state ( self ) : state = { } required_keys = ( 'deviceStatusInfo' , 'gasUsage' , 'powerUsage' , 'thermostatInfo' , 'thermostatStates' ) try : for _ in range ( self . _state_retries ) : state . update ( self . _get_data ( '/client/auth/retrieveToonState' ) ) except TypeError : self . _logger . exception ( 'Could no...
The internal state of the object .
8,832
def get_smokedetector_by_name ( self , name ) : return next ( ( smokedetector for smokedetector in self . smokedetectors if smokedetector . name . lower ( ) == name . lower ( ) ) , None )
Retrieves a smokedetector object by its name
8,833
def get_light_by_name ( self , name ) : return next ( ( light for light in self . lights if light . name . lower ( ) == name . lower ( ) ) , None )
Retrieves a light object by its name
8,834
def get_smartplug_by_name ( self , name ) : return next ( ( plug for plug in self . smartplugs if plug . name . lower ( ) == name . lower ( ) ) , None )
Retrieves a smartplug object by its name
8,835
def get_thermostat_state_by_name ( self , name ) : self . _validate_thermostat_state_name ( name ) return next ( ( state for state in self . thermostat_states if state . name . lower ( ) == name . lower ( ) ) , None )
Retrieves a thermostat state object by its assigned name
8,836
def get_thermostat_state_by_id ( self , id_ ) : return next ( ( state for state in self . thermostat_states if state . id == id_ ) , None )
Retrieves a thermostat state object by its id
8,837
def thermostat_state ( self ) : current_state = self . thermostat_info . active_state state = self . get_thermostat_state_by_id ( current_state ) if not state : self . _logger . debug ( 'Manually set temperature, no Thermostat ' 'State chosen!' ) return state
The state of the thermostat programming
8,838
def thermostat_state ( self , name ) : self . _validate_thermostat_state_name ( name ) id_ = next ( ( key for key in STATES . keys ( ) if STATES [ key ] . lower ( ) == name . lower ( ) ) , None ) data = copy . copy ( self . _parameters ) data . update ( { 'state' : 2 , 'temperatureState' : id_ } ) response = self . _ge...
Changes the thermostat state to the one passed as an argument as name
8,839
def thermostat ( self , temperature ) : target = int ( temperature * 100 ) data = copy . copy ( self . _parameters ) data . update ( { 'value' : target } ) response = self . _get_data ( '/client/auth/setPoint' , data ) self . _logger . debug ( 'Response received {}' . format ( response ) ) self . _clear_cache ( )
A temperature to set the thermostat to . Requires a float .
8,840
def euler_tour_dfs ( G , source = None ) : if source is None : nodes = G else : nodes = [ source ] yielder = [ ] visited = set ( ) for start in nodes : if start in visited : continue visited . add ( start ) stack = [ ( start , iter ( G [ start ] ) ) ] while stack : parent , children = stack [ - 1 ] try : child = next (...
adaptation of networkx dfs
8,841
def reroot ( self , s ) : o_s1 = self . first_lookup [ s ] splice1 = self . tour [ 1 : o_s1 ] rest = self . tour [ o_s1 + 1 : ] new_tour = [ s ] + rest + splice1 + [ s ] new_tree = TestETT . from_tour ( new_tour , fast = self . fast ) return new_tree
s = 3 s = B
8,842
def remove_edge ( self , u , v ) : print ( 'Dynamically removing uv=(%r, %r)' % ( u , v ) ) self . graph . remove_edge ( u , v ) e = ( u , v ) if not self . forests [ 0 ] . has_edge ( u , v ) : return for i in reversed ( range ( 0 , self . level [ e ] + 1 ) ) : self . forests [ i ] . remove_edge ( u , v ) for i in reve...
Using notation where 0 is top level
8,843
def extend_regex2 ( regexpr , reflags = 0 ) : regexpr = extend_regex ( regexpr ) IGNORE_CASE_PREF = '\\c' if regexpr . startswith ( IGNORE_CASE_PREF ) : regexpr = regexpr [ len ( IGNORE_CASE_PREF ) : ] reflags = reflags | re . IGNORECASE return regexpr , reflags
also preprocesses flags
8,844
def named_field ( key , regex , vim = False ) : if key is None : return r'(%s)' % ( regex , ) if vim : return r'\(%s\)' % ( regex ) else : return r'(?P<%s>%s)' % ( key , regex )
Creates a named regex group that can be referend via a backref . If key is None the backref is referenced by number .
8,845
def regex_replace ( regex , repl , text ) : r return re . sub ( regex , repl , text , ** RE_KWARGS )
r thin wrapper around re . sub regex_replace
8,846
def clear ( prompt = True , cache = None ) : cache = cache or config . cache ( ) if prompt : answer = input ( 'Clear library cache files in %s/? (yN) ' % cache ) if not answer . startswith ( 'y' ) : return False shutil . rmtree ( cache , ignore_errors = True ) return True
Clear loady s cache .
8,847
def create ( gitpath , cache = None ) : if gitpath . startswith ( config . LIBRARY_PREFIX ) : path = gitpath [ len ( config . LIBRARY_PREFIX ) : ] return Library ( * path . split ( '/' ) , cache = cache )
Create a Library from a git path .
8,848
def load ( self ) : if not git : raise EnvironmentError ( MISSING_GIT_ERROR ) if os . path . exists ( self . path ) : if not config . CACHE_DISABLE : return shutil . rmtree ( self . path , ignore_errors = True ) with files . remove_on_exception ( self . path ) : url = self . GIT_URL . format ( ** vars ( self ) ) repo =...
Load the library .
8,849
def check_existens_of_staging_tag_in_remote_repo ( ) : staging_tag = Git . create_git_version_tag ( APISettings . GIT_STAGING_PRE_TAG ) command_git = 'git ls-remote -t' command_awk = 'awk \'{print $2}\'' command_cut_1 = 'cut -d \'/\' -f 3' command_cut_2 = 'cut -d \'^\' -f 1' command_sort = 'sort -b -t . -k 1,1nr -k 2,2...
This method will check if the given tag exists as a staging tag in the remote repository .
8,850
def __debug ( command , dry = False ) : if dry : command . append ( '--dry-run' ) Shell . debug ( command ) if dry : call ( command ) exit ( 1 )
This method will be called if the debug mode is on .
8,851
def __git_add ( args = '' ) : command = [ 'git' , 'add' , '.' ] Shell . msg ( 'Adding files...' ) if APISettings . DEBUG : Git . __debug ( command , True ) for key in args : command . append ( key ) if not call ( command ) : pass return False
Add files to staging . The function call will return 0 if the command success .
8,852
def __git_commit ( git_tag ) : Shell . msg ( 'Commit changes.' ) if APISettings . DEBUG : Shell . debug ( 'Execute "git commit" in dry mode.' ) if not call ( [ 'git' , 'commit' , '-m' , '\'' + git_tag + '\'' , '--dry-run' ] ) : pass return True if not call ( [ 'git' , 'commit' , '-m' , '\'' + git_tag + '\'' ] ) : retur...
Commit files to branch . The function call will return 0 if the command success .
8,853
def __git_tag ( git_tag ) : command = [ 'git' , 'tag' , '-a' , git_tag , '-m' , '\'' + git_tag + '\'' ] Shell . msg ( 'Create tag from version ' + git_tag ) if APISettings . DEBUG : Git . __debug ( command , False ) if not call ( command ) : return True return False
Create new tag . The function call will return 0 if the command success .
8,854
def __git_tag_push ( ) : command = [ 'git' , 'push' , 'origin' , '--tags' ] Shell . msg ( 'Pushing tags...' ) if APISettings . DEBUG : Git . __debug ( command , True ) if not call ( command ) : return True return False
Push all tags . The function call will return 0 if the command success .
8,855
def split_into_batches ( input_list , batch_size , batch_storage_dir , checkpoint = False ) : if checkpoint and not os . path . exists ( batch_storage_dir ) : os . mkdir ( batch_storage_dir ) batches = [ { 'index' : batch_index , 'data' : input_list [ start_index : start_index + batch_size ] , 'input_filename' : os . p...
Break the input data into smaller batches optionally saving each one to disk .
8,856
def map_batch_parallel ( input_list , batch_size , item_mapper = None , batch_mapper = None , flatten = True , n_jobs = - 1 , ** kwargs ) : if item_mapper is None and batch_mapper is None : raise ValueError ( 'You should specify either batch_mapper or item_mapper.' ) if batch_mapper is None : batch_mapper = _default_ba...
Split the data into batches and process each batch in its own thread .
8,857
def get_cfg ( ast_func ) : cfg_func = cfg . Function ( ) for ast_var in ast_func . input_variable_list : cfg_var = cfg_func . get_variable ( ast_var . name ) cfg_func . add_input_variable ( cfg_var ) for ast_var in ast_func . output_variable_list : cfg_var = cfg_func . get_variable ( ast_var . name ) cfg_func . add_out...
Traverses the AST and returns the corresponding CFG
8,858
def overrideable_partial ( func , * args , ** default_kwargs ) : import functools @ functools . wraps ( func ) def partial_wrapper ( * given_args , ** given_kwargs ) : kwargs = default_kwargs . copy ( ) kwargs . update ( given_kwargs ) return func ( * ( args + given_args ) , ** kwargs ) return partial_wrapper
like partial but given kwargs can be overrideden at calltime
8,859
def get_nonconflicting_string ( base_fmtstr , conflict_set , offset = 0 ) : conflict_set_ = set ( conflict_set ) for count in it . count ( offset ) : base_str = base_fmtstr % count if base_str not in conflict_set_ : return base_str
gets a new string that wont conflict with something that already exists
8,860
def get_nonconflicting_path_old ( base_fmtstr , dpath , offset = 0 ) : r import utool as ut from os . path import basename pattern = '*' dname_list = ut . glob ( dpath , pattern , recursive = False , with_files = True , with_dirs = True ) conflict_set = set ( [ basename ( dname ) for dname in dname_list ] ) newname = u...
r base_fmtstr must have a %d in it
8,861
def are_you_sure ( msg = '' ) : r print ( msg ) from utool import util_arg from utool import util_str override = util_arg . get_argflag ( ( '--yes' , '--y' , '-y' ) ) if override : print ( 'accepting based on command line flag' ) return True valid_ans = [ 'yes' , 'y' ] valid_prompt = util_str . conj_phrase ( valid_ans ...
r Prompts user to accept or checks command line for - y
8,862
def grace_period ( msg = '' , seconds = 10 ) : import time print ( msg ) override = util_arg . get_argflag ( ( '--yes' , '--y' , '-y' ) ) print ( 'starting grace period' ) if override : print ( 'ending based on command line flag' ) return True for count in reversed ( range ( 1 , seconds + 1 ) ) : time . sleep ( 1 ) pri...
Gives user a window to stop a process before it happens
8,863
def delayed_retry_gen ( delay_schedule = [ .1 , 1 , 10 ] , msg = None , timeout = None , raise_ = True ) : import utool as ut import time if not ut . isiterable ( delay_schedule ) : delay_schedule = [ delay_schedule ] tt = ut . tic ( ) yield 0 for count in it . count ( 0 ) : if timeout is not None and ut . toc ( tt ) >...
template code for a infinte retry loop
8,864
def get_stats_str ( list_ = None , newlines = False , keys = None , exclude_keys = [ ] , lbl = None , precision = None , axis = 0 , stat_dict = None , use_nan = False , align = False , use_median = False , ** kwargs ) : from utool . util_str import repr4 import utool as ut if stat_dict is None : stat_dict = get_stats (...
Returns the string version of get_stats
8,865
def make_call_graph ( func , * args , ** kwargs ) : from pycallgraph import PyCallGraph from pycallgraph . output import GraphvizOutput with PyCallGraph ( output = GraphvizOutput ) : func ( * args , ** kwargs )
profile with pycallgraph
8,866
def _memory_profile ( with_gc = False ) : import utool as ut if with_gc : garbage_collect ( ) import guppy hp = guppy . hpy ( ) print ( '[hpy] Waiting for heap output...' ) heap_output = hp . heap ( ) print ( heap_output ) print ( '[hpy] total heap size: ' + ut . byte_str2 ( heap_output . size ) ) ut . util_resources ....
Helper for memory debugging . Mostly just a namespace where I experiment with guppy and heapy .
8,867
def make_object_graph ( obj , fpath = 'sample_graph.png' ) : import objgraph objgraph . show_most_common_types ( ) objgraph . show_refs ( [ obj ] , filename = 'ref_graph.png' ) objgraph . show_backrefs ( [ obj ] , filename = 'backref_graph.png' )
memoryprofile with objgraph
8,868
def inverable_unique_two_lists ( item1_list , item2_list ) : import utool as ut unique_list1 , inverse1 = np . unique ( item1_list , return_inverse = True ) unique_list2 , inverse2 = np . unique ( item2_list , return_inverse = True ) flat_stacked , cumsum = ut . invertible_flatten2 ( ( unique_list1 , unique_list2 ) ) f...
item1_list = aid1_list item2_list = aid2_list
8,869
def uninvert_unique_two_lists ( flat_list , reconstruct_tup ) : import utool as ut ( inverse3 , cumsum , inverse2 , inverse1 ) = reconstruct_tup flat_stacked_ = ut . take ( flat_list , inverse3 ) unique_list1_ , unique_list2_ = ut . unflatten2 ( flat_stacked_ , cumsum ) res_list1_ = ut . take ( unique_list1_ , inverse1...
flat_list = thumb_list
8,870
def search_module ( mod , pat , ignore_case = True , recursive = False , _seen = None ) : r if _seen is not None and mod in _seen : return [ ] import utool as ut reflags = re . IGNORECASE * ignore_case found_list = [ name for name in dir ( mod ) if re . search ( pat , name , flags = reflags ) ] if recursive : if _seen ...
r Searches module functions classes and constants for members matching a pattern .
8,871
def instancelist ( obj_list , check = False , shared_attrs = None ) : class InstanceList_ ( object ) : def __init__ ( self , obj_list , shared_attrs = None ) : self . _obj_list = [ ] self . _shared_public_attrs = [ ] self . _example_type = None if len ( obj_list ) > 0 : import utool as ut self . _obj_list = obj_list ex...
Executes methods and attribute calls on a list of objects of the same type
8,872
def _heappush_max ( heap , item ) : heap . append ( item ) heapq . _siftdown_max ( heap , 0 , len ( heap ) - 1 )
why is this not in heapq
8,873
def take_column ( self , keys , * extra_keys ) : import utool as ut keys = ut . ensure_iterable ( keys ) + list ( extra_keys ) key_to_list = ut . dict_subset ( self . _key_to_list , keys ) newself = self . __class__ ( key_to_list , self . _meta . copy ( ) ) return newself
Takes a subset of columns
8,874
def take ( self , idxs ) : import utool as ut if False : key_to_list = ut . odict ( [ ( key , ut . take ( val , idxs ) ) for key , val in six . iteritems ( self . _key_to_list ) ] ) else : import numpy as np key_to_list = ut . odict ( [ ( key , ut . take ( val , idxs ) ) if not isinstance ( val , np . ndarray ) else va...
Takes a subset of rows
8,875
def remove ( self , idxs ) : import utool as ut keep_idxs = ut . index_complement ( idxs , len ( self ) ) return self . take ( keep_idxs )
Returns a copy with idxs removed
8,876
def group_items ( self , labels ) : import utool as ut unique_labels , groups = self . group ( labels ) label_to_group = ut . odict ( zip ( unique_labels , groups ) ) return label_to_group
group as dict
8,877
def group ( self , labels ) : unique_labels , groupxs = self . group_indicies ( labels ) groups = [ self . take ( idxs ) for idxs in groupxs ] return unique_labels , groups
group as list
8,878
def cast_column ( self , keys , func ) : import utool as ut for key in ut . ensure_iterable ( keys ) : self [ key ] = [ func ( v ) for v in self [ key ] ]
like map column but applies values inplace
8,879
def merge_rows ( self , key , merge_scalars = True ) : import utool as ut unique_labels , groupxs = self . group_indicies ( key ) single_xs = [ xs for xs in groupxs if len ( xs ) == 1 ] multi_xs = [ xs for xs in groupxs if len ( xs ) > 1 ] singles = self . take ( ut . flatten ( single_xs ) ) multis = [ self . take ( id...
Uses key as a unique index an merges all duplicates rows . Use cast_column to modify types of columns before merging to affect behavior of duplicate rectification .
8,880
def peek ( self ) : _heap = self . _heap _dict = self . _dict val , key = _heap [ 0 ] while key not in _dict or _dict [ key ] != val : self . _heappop ( _heap ) val , key = _heap [ 0 ] return key , val
Peek at the next item in the queue
8,881
def peek_many ( self , n ) : if n == 0 : return [ ] elif n == 1 : return [ self . peek ( ) ] else : items = list ( self . pop_many ( n ) ) self . update ( items ) return items
Actually this can be quite inefficient
8,882
def pop ( self , key = util_const . NoParam , default = util_const . NoParam ) : if key is not util_const . NoParam : if default is util_const . NoParam : return ( key , self . _dict . pop ( key ) ) else : return ( key , self . _dict . pop ( key , default ) ) try : _heap = self . _heap _dict = self . _dict val , key = ...
Pop the next item off the queue
8,883
def __execute_fromimport ( module , modname , import_tuples , verbose = False ) : if verbose : print ( '[UTIL_IMPORT] EXECUTING %d FROM IMPORT TUPLES' % ( len ( import_tuples ) , ) ) from_imports = __get_from_imports ( import_tuples ) for name , fromlist in from_imports : full_modname = '.' . join ( ( modname , name ) ...
Module From Imports
8,884
def _initstr ( modname , imports , from_imports , inject_execstr , withheader = True ) : header = _make_module_header ( ) if withheader else '' import_str = _make_imports_str ( imports , modname ) fromimport_str = _make_fromimport_str ( from_imports , modname ) initstr = '\n' . join ( [ str_ for str_ in [ header , impo...
Calls the other string makers
8,885
def _inject_execstr ( modname , import_tuples ) : if modname == 'utool' : injecter = 'util_inject' injecter_import = '' else : injecter_import = 'import utool' injecter = 'utool' injectstr_fmt = textwrap . dedent ( r ) injectstr_fmt = injectstr_fmt . replace ( '# STARTBLOCK' , '' ) injectstr_fmt = injectstr_fmt . repla...
Injection and Reload String Defs
8,886
def dynamic_import ( modname , import_tuples , developing = True , ignore_froms = [ ] , dump = False , ignore_startswith = [ ] , ignore_endswith = [ ] , ignore_list = [ ] , check_not_imported = True , return_initstr = False , verbose = False ) : if verbose : print ( '[UTIL_IMPORT] Running Dynamic Imports for modname=%r...
MAIN ENTRY POINT
8,887
def make_initstr ( modname , import_tuples , verbose = False ) : imports = [ tup [ 0 ] for tup in import_tuples ] from_imports = __get_from_imports ( import_tuples ) inject_execstr = _inject_execstr ( modname , import_tuples ) return _initstr ( modname , imports , from_imports , inject_execstr )
Just creates the string representation . Does no importing .
8,888
def make_import_tuples ( module_path , exclude_modnames = [ ] ) : from utool import util_path kwargs = dict ( private = False , full = False ) module_list = util_path . ls_modulefiles ( module_path , noext = True , ** kwargs ) package_list = util_path . ls_moduledirs ( module_path , ** kwargs ) exclude_set = set ( excl...
Infer the import_tuples from a module_path
8,889
def get_resource_dir ( ) : if WIN32 : dpath_ = '~/AppData/Roaming' elif LINUX : dpath_ = '~/.config' elif DARWIN : dpath_ = '~/Library/Application Support' else : raise AssertionError ( 'unknown os' ) dpath = normpath ( expanduser ( dpath_ ) ) return dpath
Returns a directory which should be writable for any application
8,890
def load_data ( fpath , ** kwargs ) : ext = splitext ( fpath ) [ 1 ] if ext in [ '.pickle' , '.cPkl' , '.pkl' ] : return load_cPkl ( fpath , ** kwargs ) elif ext in [ '.json' ] : return load_json ( fpath , ** kwargs ) elif ext in [ '.hdf5' ] : return load_hdf5 ( fpath , ** kwargs ) elif ext in [ '.txt' ] : return load_...
More generic interface to load data
8,891
def save_data ( fpath , data , ** kwargs ) : ext = splitext ( fpath ) [ 1 ] if ext in [ '.pickle' , '.cPkl' , '.pkl' ] : return save_cPkl ( fpath , data , ** kwargs ) elif ext in [ '.json' ] : return save_json ( fpath , data , ** kwargs ) elif ext in [ '.hdf5' ] : return save_hdf5 ( fpath , data , ** kwargs ) elif ext ...
More generic interface to write data
8,892
def write_to ( fpath , to_write , aslines = False , verbose = None , onlyifdiff = False , mode = 'w' , n = None ) : if onlyifdiff : import utool as ut if ut . hashstr ( read_from ( fpath ) ) == ut . hashstr ( to_write ) : print ( '[util_io] * no difference' ) return verbose = _rectify_verb_write ( verbose ) if verbose ...
Writes text to a file . Automatically encodes text as utf8 .
8,893
def read_from ( fpath , verbose = None , aslines = False , strict = True , n = None , errors = 'replace' ) : r if n is None : n = __READ_TAIL_N__ verbose = _rectify_verb_read ( verbose ) if verbose : print ( '[util_io] * Reading text file: %r ' % util_path . tail ( fpath , n = n ) ) try : if not util_path . checkpath (...
r Reads text from a file . Automatically returns utf8 .
8,894
def save_cPkl ( fpath , data , verbose = None , n = None ) : verbose = _rectify_verb_write ( verbose ) if verbose : print ( '[util_io] * save_cPkl(%r, data)' % ( util_path . tail ( fpath , n = n ) , ) ) with open ( fpath , 'wb' ) as file_ : pickle . dump ( data , file_ , protocol = 2 )
Saves data to a pickled file with optional verbosity
8,895
def load_cPkl ( fpath , verbose = None , n = None ) : verbose = _rectify_verb_read ( verbose ) if verbose : print ( '[util_io] * load_cPkl(%r)' % ( util_path . tail ( fpath , n = n ) , ) ) try : with open ( fpath , 'rb' ) as file_ : data = pickle . load ( file_ ) except UnicodeDecodeError : if six . PY3 : with open ( f...
Loads a pickled file with optional verbosity . Aims for compatibility between python2 and python3 .
8,896
def save_hdf5 ( fpath , data , verbose = None , compression = 'lzf' ) : r import h5py verbose = _rectify_verb_write ( verbose ) if verbose : print ( '[util_io] * save_hdf5(%r, data)' % ( util_path . tail ( fpath ) , ) ) if verbose > 1 : if isinstance ( data , dict ) : print ( '[util_io] ... shapes=%r' % ( [ val . shape...
r Restricted save of data using hdf5 . Can only save ndarrays and dicts of ndarrays .
8,897
def save_pytables ( fpath , data , verbose = False ) : import tables verbose = _rectify_verb_write ( verbose ) if verbose : print ( '[util_io] * save_pytables(%r, data)' % ( util_path . tail ( fpath ) , ) ) with tables . open_file ( fpath , 'w' ) as file_ : atom = tables . Atom . from_dtype ( data . dtype ) filters = t...
sudo pip install numexpr sudo pip install tables
8,898
def start_simple_webserver ( domain = None , port = 5832 ) : r import tornado . ioloop import tornado . web import tornado . httpserver import tornado . wsgi import flask app = flask . Flask ( '__simple__' ) @ app . route ( '/' , methods = [ 'GET' , 'POST' , 'DELETE' , 'PUT' ] ) def echo_args ( * args , ** kwargs ) : f...
r simple webserver that echos its arguments
8,899
def render_html ( html_str ) : import utool as ut from os . path import abspath import webbrowser try : html_str = html_str . decode ( 'utf8' ) except Exception : pass html_dpath = ut . ensure_app_resource_dir ( 'utool' , 'temp_html' ) fpath = abspath ( ut . unixjoin ( html_dpath , 'temp.html' ) ) url = 'file://' + fpa...
makes a temporary html rendering