idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
5,800
def format_name ( net : str ) -> str : if net . startswith ( 't' ) or 'testnet' in net : net = net [ 1 : ] + '-test' else : net = net return net
take care of specifics of cryptoid naming system
50
9
5,801
def get_url ( url : str ) -> Union [ dict , int , float , str ] : request = Request ( url , headers = { "User-Agent" : "pypeerassets" } ) response = cast ( HTTPResponse , urlopen ( request ) ) if response . status != 200 : raise Exception ( response . reason ) return json . loads ( response . read ( ) . decode ( ) )
Perform a GET request for the url and return a dictionary parsed from the JSON response .
89
18
5,802
def _scan_nodes ( nodelist , context , instance_types , current_block = None , ignore_blocks = None ) : results = [ ] for node in nodelist : # first check if this is the object instance to look for. if isinstance ( node , instance_types ) : results . append ( node ) # if it's a Constant Include Node ({% include "template_name.html" %}) # scan the child template elif isinstance ( node , IncludeNode ) : # if there's an error in the to-be-included template, node.template becomes None if node . template : # This is required for Django 1.7 but works on older version too # Check if it quacks like a template object, if not # presume is a template path and get the object out of it if not callable ( getattr ( node . template , 'render' , None ) ) : template = get_template ( node . template . var ) else : template = node . template if TemplateAdapter is not None and isinstance ( template , TemplateAdapter ) : # Django 1.8: received a new object, take original template template = template . template results += _scan_nodes ( template . nodelist , context , instance_types , current_block ) # handle {% extends ... %} tags elif isinstance ( node , ExtendsNode ) : results += _extend_nodelist ( node , context , instance_types ) # in block nodes we have to scan for super blocks elif isinstance ( node , VariableNode ) and current_block : if node . filter_expression . token == 'block.super' : # Found a {{ block.super }} line if not hasattr ( current_block . parent , 'nodelist' ) : raise TemplateSyntaxError ( "Cannot read {{{{ block.super }}}} for {{% block {0} %}}, " "the parent template doesn't have this block." . format ( current_block . name ) ) results += _scan_nodes ( current_block . parent . nodelist , context , instance_types , current_block . parent ) # ignore nested blocks which are already handled elif isinstance ( node , BlockNode ) and ignore_blocks and node . name in ignore_blocks : continue # if the node has the newly introduced 'child_nodelists' attribute, scan # those attributes for nodelists and recurse them elif hasattr ( node , 'child_nodelists' ) : for nodelist_name in node . child_nodelists : if hasattr ( node , nodelist_name ) : subnodelist = getattr ( node , nodelist_name ) if isinstance ( subnodelist , NodeList ) : if isinstance ( node , BlockNode ) : current_block = node results += _scan_nodes ( subnodelist , context , instance_types , current_block ) # else just scan the node for nodelist instance attributes else : for attr in dir ( node ) : obj = getattr ( node , attr ) if isinstance ( obj , NodeList ) : if isinstance ( node , BlockNode ) : current_block = node results += _scan_nodes ( obj , context , instance_types , current_block ) return results
Loop through all nodes of a single scope level .
704
10
5,803
def get_node_instances ( nodelist , instances ) : context = _get_main_context ( nodelist ) # The Django 1.8 loader returns an adapter class; it wraps the original Template in a new object to be API compatible if TemplateAdapter is not None and isinstance ( nodelist , TemplateAdapter ) : nodelist = nodelist . template return _scan_nodes ( nodelist , context , instances )
Find the nodes of a given instance .
89
8
5,804
def get_config_file ( ) : # type: () -> AnyStr parser = argparse . ArgumentParser ( description = "Read configuration file." ) parser . add_argument ( '-ini' , help = "Full path of configuration file" ) args = parser . parse_args ( ) ini_file = args . ini if not FileClass . is_file_exists ( ini_file ) : print ( "Usage: -ini <full path to the configuration file.>" ) exit ( - 1 ) return ini_file
Get model configuration file name from argv
116
8
5,805
def isnumerical ( x ) : # type: (...) -> bool try : xx = float ( x ) except TypeError : return False except ValueError : return False except Exception : return False else : return True
Check the input x is numerical or not .
44
9
5,806
def rsquare ( obsvalues , # type: Union[numpy.ndarray, List[Union[float, int]]] simvalues # type: Union[numpy.ndarray, List[Union[float, int]]] ) : # type: (...) -> Union[float, numpy.ScalarType] if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be " "the same for R-square calculation!" ) if not isinstance ( obsvalues , numpy . ndarray ) : obsvalues = numpy . array ( obsvalues ) if not isinstance ( simvalues , numpy . ndarray ) : simvalues = numpy . array ( simvalues ) obs_avg = numpy . mean ( obsvalues ) pred_avg = numpy . mean ( simvalues ) obs_minus_avg_sq = numpy . sum ( ( obsvalues - obs_avg ) ** 2 ) pred_minus_avg_sq = numpy . sum ( ( simvalues - pred_avg ) ** 2 ) obs_pred_minus_avgs = numpy . sum ( ( obsvalues - obs_avg ) * ( simvalues - pred_avg ) ) # Calculate R-square yy = obs_minus_avg_sq ** 0.5 * pred_minus_avg_sq ** 0.5 if MathClass . floatequal ( yy , 0. ) : return 1. return ( obs_pred_minus_avgs / yy ) ** 2.
Calculate Coefficient of determination .
342
8
5,807
def rmse ( obsvalues , # type: Union[numpy.ndarray, List[Union[float, int]]] simvalues # type: Union[numpy.ndarray, List[Union[float, int]]] ) : # type: (...) -> Union[float, numpy.ScalarType] if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be " "the same for R-square calculation!" ) if not isinstance ( obsvalues , numpy . ndarray ) : obsvalues = numpy . array ( obsvalues ) if not isinstance ( simvalues , numpy . ndarray ) : simvalues = numpy . array ( simvalues ) return numpy . sqrt ( numpy . mean ( ( obsvalues - simvalues ) ** 2. ) )
Calculate RMSE .
185
6
5,808
def pbias ( obsvalues , # type: Union[numpy.ndarray, List[Union[float, int]]] simvalues # type: Union[numpy.ndarray, List[Union[float, int]]] ) : # type: (...) -> Union[float, numpy.ScalarType] if len ( obsvalues ) != len ( simvalues ) : raise ValueError ( "The size of observed and simulated values must be" " the same for PBIAS calculation!" ) return sum ( map ( lambda x , y : ( x - y ) * 100 , obsvalues , simvalues ) ) / sum ( obsvalues )
Calculate PBIAS or percent model bias .
139
11
5,809
def convert_str2num ( unicode_str # type: Union[AnyStr, int, float, List[Union[AnyStr, float, int]], Tuple[Union[AnyStr, float, int]]] ) : # type: (...) -> Union[AnyStr, int, float, List[Union[AnyStr, float, int]], Tuple[Union[AnyStr, float, int]]] if MathClass . isnumerical ( unicode_str ) : unicode_str = float ( unicode_str ) if unicode_str % 1. == 0. : unicode_str = int ( unicode_str ) return unicode_str elif is_string ( unicode_str ) : return str ( unicode_str ) elif isinstance ( unicode_str , tuple ) : return tuple ( StringClass . convert_str2num ( v ) for v in unicode_str ) elif isinstance ( unicode_str , list ) : return list ( StringClass . convert_str2num ( v ) for v in unicode_str ) else : return unicode_str
Convert string to string integer or float . Support tuple or list .
242
14
5,810
def string_in_list ( tmp_str , strlist ) : # type: (AnyStr, List[AnyStr]) -> bool new_str_list = strlist [ : ] for i , str_in_list in enumerate ( new_str_list ) : new_str_list [ i ] = str_in_list . lower ( ) return tmp_str . lower ( ) in new_str_list
Is tmp_str in strlist case insensitive .
91
10
5,811
def is_file_exists ( filename ) : # type: (AnyStr) -> bool if filename is None or not os . path . exists ( filename ) or not os . path . isfile ( filename ) : return False else : return True
Check the existence of file path .
52
7
5,812
def is_dir_exists ( dirpath ) : # type: (AnyStr) -> bool if dirpath is None or not os . path . exists ( dirpath ) or not os . path . isdir ( dirpath ) : return False else : return True
Check the existence of folder path .
56
7
5,813
def copy_files ( filename , dstfilename ) : # type: (AnyStr, AnyStr) -> None FileClass . remove_files ( dstfilename ) dst_prefix = os . path . splitext ( dstfilename ) [ 0 ] pattern = os . path . splitext ( filename ) [ 0 ] + '.*' for f in glob . iglob ( pattern ) : ext = os . path . splitext ( f ) [ 1 ] dst = dst_prefix + ext copy ( f , dst )
Copy files with the same name and different suffixes such as ESRI Shapefile .
110
17
5,814
def remove_files ( filename ) : # type: (AnyStr) -> None pattern = os . path . splitext ( filename ) [ 0 ] + '.*' for f in glob . iglob ( pattern ) : os . remove ( f )
Delete all files with same root as fileName i . e . regardless of suffix such as ESRI shapefile
54
22
5,815
def is_up_to_date ( outfile , basedatetime ) : # type: (AnyStr, datetime) -> bool if os . path . exists ( outfile ) : if os . path . getmtime ( outfile ) >= basedatetime : return True return False
Return true if outfile exists and is no older than base datetime .
61
15
5,816
def get_executable_fullpath ( name , dirname = None ) : # type: (AnyStr, Optional[AnyStr]) -> Optional[AnyStr] if name is None : return None if is_string ( name ) : name = str ( name ) else : raise RuntimeError ( 'The input function name or path must be string!' ) if dirname is not None : # check the given path first dirname = os . path . abspath ( dirname ) fpth = dirname + os . sep + name if os . path . isfile ( fpth ) : return fpth # If dirname is not specified, check the env then. if sysstr == 'Windows' : findout = UtilClass . run_command ( 'where %s' % name ) else : findout = UtilClass . run_command ( 'which %s' % name ) if not findout or len ( findout ) == 0 : print ( "%s is not included in the env path" % name ) exit ( - 1 ) first_path = findout [ 0 ] . split ( '\n' ) [ 0 ] if os . path . exists ( first_path ) : return first_path return None
get the full path of a given executable name
261
9
5,817
def get_file_fullpath ( name , dirname = None ) : # type: (AnyStr, Optional[AnyStr]) -> Optional[AnyStr] if name is None : return None if is_string ( name ) : name = str ( name ) else : raise RuntimeError ( 'The input function name or path must be string!' ) for sep in [ '\\' , '/' , os . sep ] : # Loop all possible separators if sep in name : # name is full path already name = os . path . abspath ( name ) return name if dirname is not None : dirname = os . path . abspath ( dirname ) name = dirname + os . sep + name return name
Return full path if available .
151
6
5,818
def get_filename_by_suffixes ( dir_src , suffixes ) : # type: (AnyStr, Union[AnyStr, List[AnyStr]]) -> Optional[List[AnyStr]] list_files = os . listdir ( dir_src ) re_files = list ( ) if is_string ( suffixes ) : suffixes = [ suffixes ] if not isinstance ( suffixes , list ) : return None for i , suf in enumerate ( suffixes ) : if len ( suf ) >= 1 and suf [ 0 ] != '.' : suffixes [ i ] = '.' + suf for f in list_files : name , ext = os . path . splitext ( f ) if StringClass . string_in_list ( ext , suffixes ) : re_files . append ( f ) return re_files
get file names with the given suffixes in the given directory
185
12
5,819
def get_full_filename_by_suffixes ( dir_src , suffixes ) : # type: (AnyStr, Union[AnyStr, List[AnyStr]]) -> Optional[List[AnyStr]] file_names = FileClass . get_filename_by_suffixes ( dir_src , suffixes ) if file_names is None : return None return list ( dir_src + os . sep + name for name in file_names )
get full file names with the given suffixes in the given directory
99
13
5,820
def get_core_name_without_suffix ( file_path ) : # type: (AnyStr) -> AnyStr if '\\' in file_path : file_path = file_path . replace ( '\\' , '/' ) file_name = os . path . basename ( file_path ) core_names = file_name . split ( '.' ) if len ( core_names ) > 1 : core_names = core_names [ : - 1 ] if isinstance ( core_names , list ) : return str ( '.' . join ( core_names ) ) else : return str ( core_names )
Return core file name without suffix .
136
7
5,821
def add_postfix ( file_path , postfix ) : # type: (AnyStr, AnyStr) -> AnyStr cur_sep = '' for sep in [ '\\' , '/' , os . sep ] : if sep in file_path : cur_sep = sep break corename = FileClass . get_core_name_without_suffix ( file_path ) tmpspliter = os . path . basename ( file_path ) . split ( '.' ) suffix = '' if len ( tmpspliter ) > 1 : suffix = tmpspliter [ - 1 ] newname = os . path . dirname ( file_path ) + cur_sep + corename + '_' + postfix if suffix != '' : newname += '.' + suffix return str ( newname )
Add postfix for a full file path .
180
9
5,822
def day_of_year ( dt ) : # type: (int) -> int sec = time . mktime ( dt . timetuple ( ) ) t = time . localtime ( sec ) return t . tm_yday
Day index of year from 1 to 365 or 366
51
10
5,823
def run_command ( commands ) : # type: (Union[AnyStr, List[AnyStr]]) -> List[AnyStr] # commands = StringClass.convert_unicode2str(commands) # print(commands) use_shell = False subprocess_flags = 0 startupinfo = None if sysstr == 'Windows' : if isinstance ( commands , list ) : commands = ' ' . join ( str ( c ) for c in commands ) import ctypes SEM_NOGPFAULTERRORBOX = 0x0002 # From MSDN ctypes . windll . kernel32 . SetErrorMode ( SEM_NOGPFAULTERRORBOX ) subprocess_flags = 0x8000000 # win32con.CREATE_NO_WINDOW? # this startupinfo structure prevents a console window from popping up on Windows startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW # not sure if node outputs on stderr or stdout so capture both else : # for Linux/Unix OS, commands is better to be a list. if is_string ( commands ) : use_shell = True # https://docs.python.org/2/library/subprocess.html # Using shell=True can be a security hazard. elif isinstance ( commands , list ) : # the executable path may be enclosed with quotes, if not windows, delete the quotes if commands [ 0 ] [ 0 ] == commands [ 0 ] [ - 1 ] == '"' or commands [ 0 ] [ 0 ] == commands [ 0 ] [ - 1 ] == "'" : commands [ 0 ] = commands [ 0 ] [ 1 : - 1 ] for idx , v in enumerate ( commands ) : if isinstance ( v , int ) or isinstance ( v , float ) : # Fix :TypeError: execv() arg 2 must contain only strings commands [ idx ] = repr ( v ) print ( commands ) process = subprocess . Popen ( commands , shell = use_shell , stdout = subprocess . PIPE , stdin = open ( os . devnull ) , stderr = subprocess . STDOUT , universal_newlines = True , startupinfo = startupinfo , creationflags = subprocess_flags ) out , err = process . communicate ( ) recode = process . returncode if out is None : return [ '' ] if recode is not None and recode != 0 : raise subprocess . CalledProcessError ( - 1 , commands , "ERROR occurred when running subprocess!" ) if '\n' in out : return out . split ( '\n' ) return [ out ]
Execute external command and return the output lines list . In windows refers to handling - subprocess - crash - in - windows _ .
574
27
5,824
def current_path ( local_function ) : from inspect import getsourcefile fpath = getsourcefile ( local_function ) if fpath is None : return None return os . path . dirname ( os . path . abspath ( fpath ) )
Get current path refers to how - do - i - get - the - path - of - the - current - executed - file - in - python _
54
31
5,825
def mkdir ( dir_path ) : # type: (AnyStr) -> None if not os . path . isdir ( dir_path ) or not os . path . exists ( dir_path ) : os . makedirs ( dir_path )
Make directory if not existed
54
5
5,826
def rmmkdir ( dir_path ) : # type: (AnyStr) -> None if not os . path . isdir ( dir_path ) or not os . path . exists ( dir_path ) : os . makedirs ( dir_path ) else : rmtree ( dir_path , True ) os . makedirs ( dir_path )
If directory existed then remove and make ; else make it .
78
12
5,827
def print_msg ( contentlist ) : # type: (Union[AnyStr, List[AnyStr], Tuple[AnyStr]]) -> AnyStr if isinstance ( contentlist , list ) or isinstance ( contentlist , tuple ) : return '\n' . join ( contentlist ) else : # strings if len ( contentlist ) > 1 and contentlist [ - 1 ] != '\n' : contentlist += '\n' return contentlist
concatenate message list as single string with line feed .
98
13
5,828
def decode_strs_in_dict ( unicode_dict # type: Dict[Union[AnyStr, int], Union[int, float, AnyStr, List[Union[int, float, AnyStr]]]] ) : # type: (...) -> Dict[Union[AnyStr, int], Any] unicode_dict = { StringClass . convert_str2num ( k ) : StringClass . convert_str2num ( v ) for k , v in iteritems ( unicode_dict ) } for k , v in iteritems ( unicode_dict ) : if isinstance ( v , dict ) : unicode_dict [ k ] = UtilClass . decode_strs_in_dict ( v ) return unicode_dict
Decode strings in dictionary which may contains unicode strings or numeric values .
162
15
5,829
def undo ( self ) : self . undo_manager . undo ( ) self . notify_observers ( ) logging . debug ( 'undo_manager undo stack={}' . format ( self . undo_manager . _undo_stack ) )
Rewind the game to the previous state .
52
9
5,830
def redo ( self ) : self . undo_manager . redo ( ) self . notify_observers ( ) logging . debug ( 'undo_manager redo stack={}' . format ( self . undo_manager . _redo_stack ) )
Redo the latest undone command .
55
7
5,831
def net_query ( name : str ) -> Constants : for net_params in networks : if name in ( net_params . name , net_params . shortname , ) : return net_params raise UnsupportedNetwork
Find the NetworkParams for a network by its long or short name . Raises UnsupportedNetwork if no NetworkParams is found .
47
28
5,832
def get_port_at ( self , tile_id , direction ) : for port in self . ports : if port . tile_id == tile_id and port . direction == direction : return port port = Port ( tile_id , direction , PortType . none ) self . ports . append ( port ) return port
If no port is found a new none port is made and added to self . ports .
67
18
5,833
def rotate_ports ( self ) : for port in self . ports : port . tile_id = ( ( port . tile_id + 1 ) % len ( hexgrid . coastal_tile_ids ( ) ) ) + 1 port . direction = hexgrid . rotate_direction ( hexgrid . EDGE , port . direction , ccw = True ) self . notify_observers ( )
Rotates the ports 90 degrees . Useful when using the default port setup but the spectator is watching at a rotated angle from true north .
83
27
5,834
def intersect_keys ( keys , reffile , cache = False , clean_accs = False ) : # Build/load the index of reference sequences index = None if cache : refcache = reffile + '.sqlite' if os . path . exists ( refcache ) : if os . stat ( refcache ) . st_mtime < os . stat ( reffile ) . st_mtime : logging . warn ( "Outdated cache; rebuilding index" ) else : try : index = ( SeqIO . index_db ( refcache , key_function = clean_accession ) if clean_accs else SeqIO . index_db ( refcache ) ) except Exception : logging . warn ( "Skipping corrupted cache; rebuilding index" ) index = None else : refcache = ':memory:' if index is None : # Rebuild the index, for whatever reason index = ( SeqIO . index_db ( refcache , [ reffile ] , 'fasta' , key_function = clean_accession ) if clean_accs else SeqIO . index_db ( refcache , [ reffile ] , 'fasta' ) ) # Extract records by key if clean_accs : keys = ( clean_accession ( k ) for k in keys ) for key in keys : try : record = index [ key ] except LookupError : # Missing keys are rare, so it's faster not to check every time logging . info ( "No match: %s" , repr ( key ) ) continue yield record
Extract SeqRecords from the index by matching keys .
332
13
5,835
def aa_frequencies ( seq , gap_chars = '-.' ) : aa_counts = Counter ( seq ) # Don't count gaps for gap_char in gap_chars : if gap_char in aa_counts : del aa_counts [ gap_char ] # Reduce to frequencies scale = 1.0 / sum ( aa_counts . values ( ) ) return dict ( ( aa , cnt * scale ) for aa , cnt in aa_counts . iteritems ( ) )
Calculate the amino acid frequencies in a sequence set .
119
12
5,836
def giving ( self ) : logging . debug ( 'give={}' . format ( self . _give ) ) c = Counter ( self . _give . copy ( ) ) return [ ( n , t ) for t , n in c . items ( ) ]
Returns tuples corresponding to the number and type of each resource in the trade from giver - > getter
55
22
5,837
def getting ( self ) : c = Counter ( self . _get . copy ( ) ) return [ ( n , t ) for t , n in c . items ( ) ]
Returns tuples corresponding to the number and type of each resource in the trade from getter - > giver
37
22
5,838
def family_check ( self ) : #TODO Make some tests for these self . logger . info ( "Checking family relations for {0}" . format ( self . family_id ) ) for individual_id in self . individuals : self . logger . debug ( "Checking individual {0}" . format ( individual_id ) ) individual = self . individuals [ individual_id ] self . logger . debug ( "Checking if individual {0} is affected" . format ( individual_id ) ) if individual . affected : self . logger . debug ( "Found affected individual {0}" . format ( individual_id ) ) self . affected_individuals . add ( individual_id ) father = individual . father mother = individual . mother if individual . has_parents : self . logger . debug ( "Individual {0} has parents" . format ( individual_id ) ) self . no_relations = False try : self . check_parent ( father , father = True ) self . check_parent ( mother , father = False ) except PedigreeError as e : self . logger . error ( e . message ) raise e # Check if there is a trio if individual . has_both_parents : self . trios . append ( set ( [ individual_id , father , mother ] ) ) elif father != '0' : self . duos . append ( set ( [ individual_id , father ] ) ) else : self . duos . append ( set ( [ individual_id , mother ] ) ) ##TODO self.check_grandparents(individual) # Annotate siblings: for individual_2_id in self . individuals : if individual_id != individual_2_id : if self . check_siblings ( individual_id , individual_2_id ) : individual . siblings . add ( individual_2_id )
Check if the family members break the structure of the family . eg . nonexistent parent wrong sex on parent etc . Also extracts all trios found this is of help for many at the moment since GATK can only do phasing of trios and duos .
391
53
5,839
def check_parent ( self , parent_id , father = False ) : self . logger . debug ( "Checking parent {0}" . format ( parent_id ) ) if parent_id != '0' : if parent_id not in self . individuals : raise PedigreeError ( self . family_id , parent_id , 'Parent is not in family.' ) if father : if self . individuals [ parent_id ] . sex != 1 : raise PedigreeError ( self . family_id , parent_id , 'Father is not specified as male.' ) else : if self . individuals [ parent_id ] . sex != 2 : raise PedigreeError ( self . family_id , parent_id , 'Mother is not specified as female.' ) return
Check if the parent info is correct . If an individual is not present in file raise exeption .
161
21
5,840
def to_ped ( self , outfile = None ) : ped_header = [ '#FamilyID' , 'IndividualID' , 'PaternalID' , 'MaternalID' , 'Sex' , 'Phenotype' , ] extra_headers = [ 'InheritanceModel' , 'Proband' , 'Consultand' , 'Alive' ] for individual_id in self . individuals : individual = self . individuals [ individual_id ] for info in individual . extra_info : if info in extra_headers : if info not in ped_header : ped_header . append ( info ) self . logger . debug ( "Ped headers found: {0}" . format ( ', ' . join ( ped_header ) ) ) if outfile : outfile . write ( '\t' . join ( ped_header ) + '\n' ) else : print ( '\t' . join ( ped_header ) ) for individual in self . to_json ( ) : ped_info = [ ] ped_info . append ( individual [ 'family_id' ] ) ped_info . append ( individual [ 'id' ] ) ped_info . append ( individual [ 'father' ] ) ped_info . append ( individual [ 'mother' ] ) ped_info . append ( individual [ 'sex' ] ) ped_info . append ( individual [ 'phenotype' ] ) if len ( ped_header ) > 6 : for header in ped_header [ 6 : ] : ped_info . append ( individual [ 'extra_info' ] . get ( header , '.' ) ) if outfile : outfile . write ( '\t' . join ( ped_info ) + '\n' ) else : print ( '\t' . join ( ped_info ) )
Print the individuals of the family in ped format The header will be the original ped header plus all headers found in extra info of the individuals
385
27
5,841
def find_deck ( provider : Provider , key : str , version : int , prod : bool = True ) -> Optional [ Deck ] : pa_params = param_query ( provider . network ) if prod : p2th = pa_params . P2TH_addr else : p2th = pa_params . test_P2TH_addr rawtx = provider . getrawtransaction ( key , 1 ) deck = deck_parser ( ( provider , rawtx , 1 , p2th ) ) return deck
Find specific deck by deck id .
109
7
5,842
def deck_spawn ( provider : Provider , deck : Deck , inputs : dict , change_address : str , locktime : int = 0 ) -> Transaction : network_params = net_query ( deck . network ) pa_params = param_query ( deck . network ) if deck . production : p2th_addr = pa_params . P2TH_addr else : p2th_addr = pa_params . test_P2TH_addr # first round of txn making is done by presuming minimal fee change_sum = Decimal ( inputs [ 'total' ] - network_params . min_tx_fee - pa_params . P2TH_fee ) txouts = [ tx_output ( network = deck . network , value = pa_params . P2TH_fee , n = 0 , script = p2pkh_script ( address = p2th_addr , network = deck . network ) ) , # p2th tx_output ( network = deck . network , value = Decimal ( 0 ) , n = 1 , script = nulldata_script ( deck . metainfo_to_protobuf ) ) , # op_return tx_output ( network = deck . network , value = change_sum , n = 2 , script = p2pkh_script ( address = change_address , network = deck . network ) ) # change ] unsigned_tx = make_raw_transaction ( network = deck . network , inputs = inputs [ 'utxos' ] , outputs = txouts , locktime = Locktime ( locktime ) ) return unsigned_tx
Creates Deck spawn raw transaction .
342
7
5,843
def get_card_transfer ( provider : Provider , deck : Deck , txid : str , debug : bool = False ) -> Iterator : rawtx = provider . getrawtransaction ( txid , 1 ) bundle = card_bundler ( provider , deck , rawtx ) return card_bundle_parser ( bundle , debug )
get a single card transfer by it s id
72
9
5,844
def find_all_valid_cards ( provider : Provider , deck : Deck ) -> Generator : # validate_card_issue_modes must recieve a full list of cards, not batches unfiltered = ( card for batch in get_card_bundles ( provider , deck ) for card in batch ) for card in validate_card_issue_modes ( deck . issue_mode , list ( unfiltered ) ) : yield card
find all the valid cards on this deck filtering out cards which don t play nice with deck issue mode
94
20
5,845
def card_transfer ( provider : Provider , card : CardTransfer , inputs : dict , change_address : str , locktime : int = 0 ) -> Transaction : network_params = net_query ( provider . network ) pa_params = param_query ( provider . network ) if card . deck_p2th is None : raise Exception ( "card.deck_p2th required for tx_output" ) outs = [ tx_output ( network = provider . network , value = pa_params . P2TH_fee , n = 0 , script = p2pkh_script ( address = card . deck_p2th , network = provider . network ) ) , # deck p2th tx_output ( network = provider . network , value = Decimal ( 0 ) , n = 1 , script = nulldata_script ( card . metainfo_to_protobuf ) ) # op_return ] for addr , index in zip ( card . receiver , range ( len ( card . receiver ) ) ) : outs . append ( # TxOut for each receiver, index + 2 because we have two outs already tx_output ( network = provider . network , value = Decimal ( 0 ) , n = index + 2 , script = p2pkh_script ( address = addr , network = provider . network ) ) ) # first round of txn making is done by presuming minimal fee change_sum = Decimal ( inputs [ 'total' ] - network_params . min_tx_fee - pa_params . P2TH_fee ) outs . append ( tx_output ( network = provider . network , value = change_sum , n = len ( outs ) + 1 , script = p2pkh_script ( address = change_address , network = provider . network ) ) ) unsigned_tx = make_raw_transaction ( network = provider . network , inputs = inputs [ 'utxos' ] , outputs = outs , locktime = Locktime ( locktime ) ) return unsigned_tx
Prepare the CardTransfer Transaction object
429
7
5,846
def rfc3339_to_datetime ( data ) : try : ts = time . strptime ( data , '%Y-%m-%d' ) return date ( * ts [ : 3 ] ) except ValueError : pass try : dt , _ , tz = data . partition ( 'Z' ) if tz : tz = offset ( tz ) else : tz = offset ( '00:00' ) if '.' in dt and dt . rsplit ( '.' , 1 ) [ - 1 ] . isdigit ( ) : ts = time . strptime ( dt , '%Y-%m-%dT%H:%M:%S.%f' ) else : ts = time . strptime ( dt , '%Y-%m-%dT%H:%M:%S' ) return datetime ( * ts [ : 6 ] , tzinfo = tz ) except ValueError : raise ValueError ( 'date-time {!r} is not a valid rfc3339 date representation' . format ( data ) )
convert a rfc3339 date representation into a Python datetime
241
14
5,847
def log_config ( verbose = 1 ) : # ENH: # - do print levelname before DEBUG and WARNING # - instead of %module, name the currently running script # - make a subclass of logging.handlers.X instead? # - tweak %root? # - take __file__ as an argument? if verbose == 0 : level = logging . WARNING fmt = "%(module)s: %(message)s" elif verbose == 1 : level = logging . INFO fmt = "%(module)s [@%(lineno)s]: %(message)s" else : level = logging . DEBUG fmt = "%(module)s [%(lineno)s]: %(levelname)s: %(message)s" logging . basicConfig ( format = fmt , level = level )
Set up logging the way I like it .
174
9
5,848
def refresh_instruments ( self ) : def list_access_nested_dict ( dict , somelist ) : """ Allows one to use a list to access a nested dictionary, for example: listAccessNestedDict({'a': {'b': 1}}, ['a', 'b']) returns 1 Args: dict: somelist: Returns: """ return reduce ( operator . getitem , somelist , dict ) def update ( item ) : if item . isExpanded ( ) : for index in range ( item . childCount ( ) ) : child = item . child ( index ) if child . childCount ( ) == 0 : instrument , path_to_instrument = child . get_instrument ( ) path_to_instrument . reverse ( ) try : #check if item is in probes value = instrument . read_probes ( path_to_instrument [ - 1 ] ) except AssertionError : #if item not in probes, get value from settings instead value = list_access_nested_dict ( instrument . settings , path_to_instrument ) child . value = value else : update ( child ) #need to block signals during update so that tree.itemChanged doesn't fire and the gui doesn't try to #reupdate the instruments to their current value self . tree_settings . blockSignals ( True ) for index in range ( self . tree_settings . topLevelItemCount ( ) ) : instrument = self . tree_settings . topLevelItem ( index ) update ( instrument ) self . tree_settings . blockSignals ( False )
if self . tree_settings has been expanded ask instruments for their actual values
338
15
5,849
def update_parameters ( self , treeWidget ) : if treeWidget == self . tree_settings : item = treeWidget . currentItem ( ) instrument , path_to_instrument = item . get_instrument ( ) # build nested dictionary to update instrument dictator = item . value for element in path_to_instrument : dictator = { element : dictator } # get old value from instrument old_value = instrument . settings path_to_instrument . reverse ( ) for element in path_to_instrument : old_value = old_value [ element ] # send new value from tree to instrument instrument . update ( dictator ) new_value = item . value if new_value is not old_value : msg = "changed parameter {:s} from {:s} to {:s} on {:s}" . format ( item . name , str ( old_value ) , str ( new_value ) , instrument . name ) else : msg = "did not change parameter {:s} on {:s}" . format ( item . name , instrument . name ) self . log ( msg ) elif treeWidget == self . tree_scripts : item = treeWidget . currentItem ( ) script , path_to_script , _ = item . get_script ( ) # check if changes value is from an instrument instrument , path_to_instrument = item . get_instrument ( ) if instrument is not None : new_value = item . value msg = "changed parameter {:s} to {:s} in {:s}" . format ( item . name , str ( new_value ) , script . name ) else : new_value = item . value msg = "changed parameter {:s} to {:s} in {:s}" . format ( item . name , str ( new_value ) , script . name ) self . log ( msg )
updates the internal dictionaries for scripts and instruments with values from the respective trees
388
16
5,850
def script_finished ( self ) : script = self . current_script script . updateProgress . disconnect ( self . update_status ) self . script_thread . started . disconnect ( ) script . finished . disconnect ( ) self . current_script = None self . plot_script ( script ) self . progressBar . setValue ( 100 ) self . btn_start_script . setEnabled ( True ) self . btn_skip_subscript . setEnabled ( False )
waits for the script to emit the script_finshed signal
100
14
5,851
def update_probes ( self , progress ) : new_values = self . read_probes . probes_values probe_count = len ( self . read_probes . probes ) if probe_count > self . tree_probes . topLevelItemCount ( ) : # when run for the first time, there are no probes in the tree, so we have to fill it first self . fill_treewidget ( self . tree_probes , new_values ) else : for x in range ( probe_count ) : topLvlItem = self . tree_probes . topLevelItem ( x ) for child_id in range ( topLvlItem . childCount ( ) ) : child = topLvlItem . child ( child_id ) child . value = new_values [ topLvlItem . name ] [ child . name ] child . setText ( 1 , str ( child . value ) ) if self . probe_to_plot is not None : self . probe_to_plot . plot ( self . matplotlibwidget_1 . axes ) self . matplotlibwidget_1 . draw ( ) if self . chk_probe_log . isChecked ( ) : data = ',' . join ( list ( np . array ( [ [ str ( p ) for p in list ( p_dict . values ( ) ) ] for instr , p_dict in new_values . items ( ) ] ) . flatten ( ) ) ) self . probe_file . write ( '{:s}\n' . format ( data ) )
update the probe tree
335
4
5,852
def update_script_from_item ( self , item ) : script , path_to_script , script_item = item . get_script ( ) # build dictionary # get full information from script dictator = list ( script_item . to_dict ( ) . values ( ) ) [ 0 ] # there is only one item in the dictionary for instrument in list ( script . instruments . keys ( ) ) : # update instrument script . instruments [ instrument ] [ 'settings' ] = dictator [ instrument ] [ 'settings' ] # remove instrument del dictator [ instrument ] for sub_script_name in list ( script . scripts . keys ( ) ) : sub_script_item = script_item . get_subscript ( sub_script_name ) self . update_script_from_item ( sub_script_item ) del dictator [ sub_script_name ] script . update ( dictator ) # update datefolder path script . data_path = self . gui_settings [ 'data_folder' ]
updates the script based on the information provided in item
211
11
5,853
def message_search ( self , text , on_success , peer = None , min_date = None , max_date = None , max_id = None , offset = 0 , limit = 255 ) : raise TWXUnsupportedMethod ( )
Unsupported in the Bot API
52
6
5,854
def remove ( self , pointer ) : doc = deepcopy ( self . document ) parent , obj = None , doc try : # fetching for token in Pointer ( pointer ) : parent , obj = obj , token . extract ( obj , bypass_ref = True ) # removing if isinstance ( parent , Mapping ) : del parent [ token ] if isinstance ( parent , MutableSequence ) : parent . pop ( int ( token ) ) except Exception as error : raise Error ( * error . args ) return Target ( doc )
Remove element from sequence member from mapping .
112
8
5,855
def _netname ( name : str ) -> dict : try : long = net_query ( name ) . name short = net_query ( name ) . shortname except AttributeError : raise UnsupportedNetwork ( '''This blockchain network is not supported by the pypeerassets, check networks.py for list of supported networks.''' ) return { 'long' : long , 'short' : short }
resolute network name required because some providers use shortnames and other use longnames .
86
17
5,856
def sendrawtransaction ( cls , rawtxn : str ) -> str : if cls . is_testnet : url = 'https://testnet-explorer.peercoin.net/api/sendrawtransaction?hex={0}' . format ( rawtxn ) else : url = 'https://explorer.peercoin.net/api/sendrawtransaction?hex={0}' . format ( rawtxn ) resp = urllib . request . urlopen ( url ) return resp . read ( ) . decode ( 'utf-8' )
sendrawtransaction remote API
124
6
5,857
def validateaddress ( self , address : str ) -> bool : try : Address . from_string ( address , self . network_properties ) except InvalidAddress : return False return True
Returns True if the passed address is valid False otherwise .
37
11
5,858
def chunker ( l , n ) : for i in ranger ( 0 , len ( l ) , n ) : yield l [ i : i + n ]
Generates n - sized chunks from the list l
33
10
5,859
def post ( self , endpoint , data , parallelism = 5 ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } if not self . logging : headers [ "x-suppress-logging" ] = "false" params = { "auth-id" : self . auth_id , "auth-token" : self . auth_token } url = self . BASE_URL + endpoint rs = ( grequests . post ( url = url , data = json . dumps ( stringify ( data_chunk ) ) , params = params , headers = headers , ) for data_chunk in chunker ( data , 100 ) ) responses = grequests . imap ( rs , size = parallelism ) status_codes = { } addresses = AddressCollection ( [ ] ) for response in responses : if response . status_code not in status_codes . keys ( ) : status_codes [ response . status_code ] = 1 else : status_codes [ response . status_code ] += 1 if response . status_code == 200 : addresses [ 0 : 0 ] = AddressCollection ( response . json ( ) ) # Fast list insertion # If an auth error is raised, it's safe to say that this is # going to affect every request, so raise the exception immediately.. elif response . status_code == 401 : raise ERROR_CODES [ 401 ] # The return value or exception is simple if it is consistent. if len ( status_codes . keys ( ) ) == 1 : if 200 in status_codes : return addresses , status_codes else : raise ERROR_CODES . get ( status_codes . keys ( ) [ 0 ] , SmartyStreetsError ) # For any other mix not really sure of the best way to handle it. If it's a mix of 200 # and error codes, then returning the resultant addresses and status code dictionary # seems pretty sensible. But if it's a mix of all error codes (could be a mix of payment # error, input error, potentially server error) this will probably require careful # checking in the code using this interface. return addresses , status_codes
Executes most of the request .
529
7
5,860
def _cache_init ( self ) : cache_ = cache . get ( self . CACHE_KEY ) if cache_ is None : cache_ = defaultdict ( dict ) self . _cache = cache_
Initializes local cache from Django cache .
45
8
5,861
def get_contents_static ( self , block_alias , context ) : if 'request' not in context : # No use in further actions as we won't ever know current URL. return '' current_url = context [ 'request' ] . path # Resolve current view name to support view names as block URLs. try : resolver_match = resolve ( current_url ) namespace = '' if resolver_match . namespaces : # More than one namespace, really? Hmm. namespace = resolver_match . namespaces [ 0 ] resolved_view_name = ':%s:%s' % ( namespace , resolver_match . url_name ) except Resolver404 : resolved_view_name = None self . _cache_init ( ) cache_entry_name = cache_get_key ( block_alias ) siteblocks_static = self . _cache_get ( cache_entry_name ) if not siteblocks_static : blocks = Block . objects . filter ( alias = block_alias , hidden = False ) . only ( 'url' , 'contents' ) siteblocks_static = [ defaultdict ( list ) , defaultdict ( list ) ] for block in blocks : if block . url == '*' : url_re = block . url elif block . url . startswith ( ':' ) : url_re = block . url # Normalize URL name to include namespace. if url_re . count ( ':' ) == 1 : url_re = ':%s' % url_re else : url_re = re . compile ( r'%s' % block . url ) if block . access_guest : siteblocks_static [ self . IDX_GUEST ] [ url_re ] . append ( block . contents ) elif block . access_loggedin : siteblocks_static [ self . IDX_AUTH ] [ url_re ] . append ( block . contents ) else : siteblocks_static [ self . IDX_GUEST ] [ url_re ] . append ( block . contents ) siteblocks_static [ self . IDX_AUTH ] [ url_re ] . append ( block . contents ) self . _cache_set ( cache_entry_name , siteblocks_static ) self . _cache_save ( ) user = getattr ( context [ 'request' ] , 'user' , None ) is_authenticated = getattr ( user , 'is_authenticated' , False ) if not DJANGO_2 : is_authenticated = is_authenticated ( ) if is_authenticated : lookup_area = siteblocks_static [ self . IDX_AUTH ] else : lookup_area = siteblocks_static [ self . IDX_GUEST ] static_block_contents = '' if '*' in lookup_area : static_block_contents = choice ( lookup_area [ '*' ] ) elif resolved_view_name in lookup_area : static_block_contents = choice ( lookup_area [ resolved_view_name ] ) else : for url , contents in lookup_area . items ( ) : if url . match ( current_url ) : static_block_contents = choice ( contents ) break return static_block_contents
Returns contents of a static block .
703
7
5,862
def get_contents_dynamic ( self , block_alias , context ) : dynamic_block = get_dynamic_blocks ( ) . get ( block_alias , [ ] ) if not dynamic_block : return '' dynamic_block = choice ( dynamic_block ) return dynamic_block ( block_alias = block_alias , block_context = context )
Returns contents of a dynamic block .
77
7
5,863
def hash_full_tree ( self , leaves ) : root_hash , hashes = self . _hash_full ( leaves , 0 , len ( leaves ) ) assert len ( hashes ) == count_bits_set ( len ( leaves ) ) assert ( self . _hash_fold ( hashes ) == root_hash if hashes else root_hash == self . hash_empty ( ) ) return root_hash
Hash a set of leaves representing a valid full tree .
85
11
5,864
def cal_model_performance ( obsl , siml ) : nse = MathClass . nashcoef ( obsl , siml ) r2 = MathClass . rsquare ( obsl , siml ) rmse = MathClass . rmse ( obsl , siml ) pbias = MathClass . pbias ( obsl , siml ) rsr = MathClass . rsr ( obsl , siml ) print ( 'NSE: %.2f, R-square: %.2f, PBIAS: %.2f%%, RMSE: %.2f, RSR: %.2f' % ( nse , r2 , pbias , rmse , rsr ) )
Calculate model performance indexes .
158
7
5,865
def load_features ( self ) : # Loading all loci that # are in self.loci variable defined # when the pyGFE object is created for loc in self . loci : if self . verbose : self . logger . info ( self . logname + "Loading features for " + loc ) # Loading all features for loc from feature service self . all_feats . update ( { loc : self . locus_features ( loc ) } ) if self . verbose : self . logger . info ( self . logname + "Finished loading features for " + loc ) if self . verbose : mem = "{:4.4f}" . format ( sys . getsizeof ( self . all_feats ) / 1000000 ) self . logger . info ( self . logname + "Finished loading all features * all_feats = " + mem + " MB *" )
Loads all the known features from the feature service
194
10
5,866
def locus_features ( self , locus ) : features = self . api . list_features ( locus = locus ) feat_dict = { ":" . join ( [ a . locus , str ( a . rank ) , a . term , a . sequence ] ) : a . accession for a in features } return feat_dict
Returns all features associated with a locus
74
8
5,867
def tarfile_to_pif ( filename , temp_root_dir = '' , verbose = 0 ) : temp_dir = temp_root_dir + str ( uuid . uuid4 ( ) ) os . makedirs ( temp_dir ) try : tar = tarfile . open ( filename , 'r' ) tar . extractall ( path = temp_dir ) tar . close ( ) for i in os . listdir ( temp_dir ) : cur_dir = temp_dir + '/' + i if os . path . isdir ( cur_dir ) : return directory_to_pif ( cur_dir , verbose = verbose ) return directory_to_pif ( temp_dir , verbose = verbose ) finally : shutil . rmtree ( temp_dir )
Process a tar file that contains DFT data .
174
10
5,868
def archive_to_pif ( filename , verbose = 0 ) : if tarfile . is_tarfile ( filename ) : return tarfile_to_pif ( filename , verbose ) raise Exception ( 'Cannot process file type' )
Given a archive file that contains output from a DFT calculation parse the data and return a PIF object .
53
22
5,869
def files_to_pif ( files , verbose = 0 , quality_report = True , inline = True ) : # Look for the first parser compatible with the directory found_parser = False for possible_parser in [ PwscfParser , VaspParser ] : try : parser = possible_parser ( files ) found_parser = True break except InvalidIngesterException : # Constructors fail when they cannot find appropriate files pass if not found_parser : raise Exception ( 'Directory is not in correct format for an existing parser' ) if verbose > 0 : print ( "Found a {} directory" . format ( parser . get_name ( ) ) ) # Get information about the chemical system chem = ChemicalSystem ( ) chem . chemical_formula = parser . get_composition ( ) # Get software information, to list as method software = Software ( name = parser . get_name ( ) , version = parser . get_version_number ( ) ) # Define the DFT method object method = Method ( name = 'Density Functional Theory' , software = [ software ] ) # Get the settings (aka. "conditions") of the DFT calculations conditions = [ ] for name , func in parser . get_setting_functions ( ) . items ( ) : # Get the condition cond = getattr ( parser , func ) ( ) # If the condition is None or False, skip it if cond is None : continue if inline and cond . files is not None : continue # Set the name cond . name = name # Set the types conditions . append ( cond ) # Get the properties of the system chem . properties = [ ] for name , func in parser . get_result_functions ( ) . items ( ) : # Get the property prop = getattr ( parser , func ) ( ) # If the property is None, skip it if prop is None : continue if inline and prop . files is not None : continue # Add name and other data prop . name = name prop . methods = [ method , ] prop . data_type = 'COMPUTATIONAL' if verbose > 0 and isinstance ( prop , Value ) : print ( name ) if prop . conditions is None : prop . conditions = conditions else : if not isinstance ( prop . conditions , list ) : prop . conditions = [ prop . conditions ] prop . conditions . extend ( conditions ) # Add it to the output chem . properties . append ( prop ) # Check to see if we should add the quality report if quality_report and isinstance ( parser , VaspParser ) : _add_quality_report ( parser , chem ) return chem
Given a directory that contains output from a DFT calculation parse the data and return a pif object
549
20
5,870
def wait_for_confirmation ( provider , transaction_id ) : while ( True ) : transaction = provider . gettransaction ( transaction_id ) if transaction [ "confirmations" ] > 0 : break time . sleep ( 10 )
Sleep on a loop until we see a confirmation of the transaction .
51
13
5,871
def validate_card_issue_modes ( issue_mode : int , cards : list ) -> list : supported_mask = 63 # sum of all issue_mode values if not bool ( issue_mode & supported_mask ) : return [ ] # return empty list for i in [ 1 << x for x in range ( len ( IssueMode ) ) ] : if bool ( i & issue_mode ) : try : parser_fn = cast ( Callable [ [ list ] , Optional [ list ] ] , parsers [ IssueMode ( i ) . name ] ) except ValueError : continue parsed_cards = parser_fn ( cards ) if not parsed_cards : return [ ] cards = parsed_cards return cards
validate cards against deck_issue modes
150
8
5,872
def p2th_address ( self ) -> Optional [ str ] : if self . id : return Kutil ( network = self . network , privkey = bytearray . fromhex ( self . id ) ) . address else : return None
P2TH address of this deck
52
7
5,873
def p2th_wif ( self ) -> Optional [ str ] : if self . id : return Kutil ( network = self . network , privkey = bytearray . fromhex ( self . id ) ) . wif else : return None
P2TH privkey in WIF format
54
9
5,874
def metainfo_to_dict ( self ) -> dict : r = { "version" : self . version , "name" : self . name , "number_of_decimals" : self . number_of_decimals , "issue_mode" : self . issue_mode } if self . asset_specific_data : r . update ( { 'asset_specific_data' : self . asset_specific_data } ) return r
encode deck into dictionary
99
5
5,875
def to_json ( self ) -> dict : d = self . __dict__ d [ 'p2th_wif' ] = self . p2th_wif return d
export the Deck object to json - ready format
39
9
5,876
def metainfo_to_dict ( self ) -> dict : r = { "version" : self . version , "amount" : self . amount , "number_of_decimals" : self . number_of_decimals } if self . asset_specific_data : r . update ( { 'asset_specific_data' : self . asset_specific_data } ) return r
encode card into dictionary
87
5
5,877
def _sort_cards ( self , cards : Generator ) -> list : return sorted ( [ card . __dict__ for card in cards ] , key = itemgetter ( 'blocknum' , 'blockseq' , 'cardseq' ) )
sort cards by blocknum and blockseq
52
8
5,878
def main ( ) : input_tif = "../tests/data/Jamaica_dem.tif" output_tif = "../tests/data/tmp_results/log_dem.tif" rst = RasterUtilClass . read_raster ( input_tif ) # raster data (with noDataValue as numpy.nan) as numpy array rst_valid = rst . validValues output_data = np . log ( rst_valid ) # write output raster RasterUtilClass . write_gtiff_file ( output_tif , rst . nRows , rst . nCols , output_data , rst . geotrans , rst . srs , rst . noDataValue , rst . dataType )
Read GeoTiff raster data and perform log transformation .
169
12
5,879
def val_factory ( val , datatypes ) : exceptions = [ ] for dt in datatypes : try : if isinstance ( val , dt ) : return val return type_handler_object ( val , dt ) except Exception as e : exceptions . append ( str ( e ) ) # if we get here, we never found a valid value. raise an error raise ValueError ( 'val_factory: Unable to instantiate {val} from types {types}. Exceptions: {excs}' . format ( val = val , types = datatypes , excs = exceptions ) )
return an instance of val that is of type datatype . keep track of exceptions so we can produce meaningful error messages .
130
25
5,880
def handler_for ( obj ) : for handler_type in handlers : if isinstance ( obj , handler_type ) : return handlers [ handler_type ] try : for handler_type in handlers : if issubclass ( obj , handler_type ) : return handlers [ handler_type ] except TypeError : # if obj isn't a class, issubclass will raise a TypeError pass
return the handler for the object type
82
7
5,881
def check_orthogonal ( angle ) : flow_dir_taudem = - 1 flow_dir = - 1 if MathClass . floatequal ( angle , FlowModelConst . e ) : flow_dir_taudem = FlowModelConst . e flow_dir = 1 elif MathClass . floatequal ( angle , FlowModelConst . ne ) : flow_dir_taudem = FlowModelConst . ne flow_dir = 128 elif MathClass . floatequal ( angle , FlowModelConst . n ) : flow_dir_taudem = FlowModelConst . n flow_dir = 64 elif MathClass . floatequal ( angle , FlowModelConst . nw ) : flow_dir_taudem = FlowModelConst . nw flow_dir = 32 elif MathClass . floatequal ( angle , FlowModelConst . w ) : flow_dir_taudem = FlowModelConst . w flow_dir = 16 elif MathClass . floatequal ( angle , FlowModelConst . sw ) : flow_dir_taudem = FlowModelConst . sw flow_dir = 8 elif MathClass . floatequal ( angle , FlowModelConst . s ) : flow_dir_taudem = FlowModelConst . s flow_dir = 4 elif MathClass . floatequal ( angle , FlowModelConst . se ) : flow_dir_taudem = FlowModelConst . se flow_dir = 2 return flow_dir_taudem , flow_dir
Check the given Dinf angle based on D8 flow direction encoding code by ArcGIS
327
18
5,882
def start ( self , * args , * * kwargs ) : self . _stop = False super ( Plant , self ) . start ( * args , * * kwargs )
start the instrument thread
39
4
5,883
def quit ( self , * args , * * kwargs ) : # real signature unknown self . stop ( ) self . _stop = True self . msleep ( 2 * int ( 1e3 / self . settings [ 'update frequency' ] ) ) super ( Plant , self ) . quit ( * args , * * kwargs )
quit the instrument thread
72
4
5,884
def controler_output ( self , current_value ) : set_point = self . settings [ 'set_point' ] Kp = self . settings [ 'gains' ] [ 'proportional' ] Ki = self . settings [ 'gains' ] [ 'integral' ] output_range = self . settings [ 'output_range' ] time_step = self . settings [ 'time_step' ] error_new = set_point - current_value print ( ( 'PD- error:\t' , error_new , Ki , Kp , time_step ) ) #proportional action self . u_P = Kp * error_new * time_step print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) #integral action self . u_I += Kp * Ki * ( error_new + self . error ) / 2.0 * time_step self . error = error_new print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) # anti-windup if self . u_P + self . u_I > output_range [ 'max' ] : self . u_I = output_range [ 'max' ] - self . u_P if self . u_P + self . u_I < output_range [ 'min' ] : self . u_I = output_range [ 'min' ] - self . u_P output = self . u_P + self . u_I print ( ( 'PD- output:\t' , output ) ) return output
Calculate PI output value for given reference input and feedback
361
12
5,885
def get_opts ( opts ) : defaults = { 'board' : None , 'terrain' : Opt . random , 'numbers' : Opt . preset , 'ports' : Opt . preset , 'pieces' : Opt . preset , 'players' : Opt . preset , } _opts = defaults . copy ( ) if opts is None : opts = dict ( ) try : for key , val in opts . copy ( ) . items ( ) : if key == 'board' : # board is a string, not a regular opt, and gets special handling # in _read_tiles_from_string continue opts [ key ] = Opt ( val ) _opts . update ( opts ) except Exception : raise ValueError ( 'Invalid options={}' . format ( opts ) ) logging . debug ( 'used defaults=\n{}\n on opts=\n{}\nreturned total opts=\n{}' . format ( pprint . pformat ( defaults ) , pprint . pformat ( opts ) , pprint . pformat ( _opts ) ) ) return _opts
Validate options and apply defaults for options not supplied .
245
11
5,886
def _get_tiles ( board = None , terrain = None , numbers = None ) : if board is not None : # we have a board given, ignore the terrain and numbers opts and log warnings # if they were supplied tiles = _read_tiles_from_string ( board ) else : # we are being asked to generate a board tiles = _generate_tiles ( terrain , numbers ) return tiles
Generate a list of tiles using the given terrain and numbers options .
88
14
5,887
def _get_ports ( port_opts ) : if port_opts in [ Opt . preset , Opt . debug ] : _preset_ports = [ ( 1 , 'NW' , catan . board . PortType . any3 ) , ( 2 , 'W' , catan . board . PortType . wood ) , ( 4 , 'W' , catan . board . PortType . brick ) , ( 5 , 'SW' , catan . board . PortType . any3 ) , ( 6 , 'SE' , catan . board . PortType . any3 ) , ( 8 , 'SE' , catan . board . PortType . sheep ) , ( 9 , 'E' , catan . board . PortType . any3 ) , ( 10 , 'NE' , catan . board . PortType . ore ) , ( 12 , 'NE' , catan . board . PortType . wheat ) ] return [ catan . board . Port ( tile , dir , port_type ) for tile , dir , port_type in _preset_ports ] elif port_opts in [ Opt . empty , Opt . random ] : logging . warning ( '{} option not yet implemented' . format ( port_opts ) ) return [ ]
Generate a list of ports using the given options .
274
11
5,888
def _get_pieces ( tiles , ports , players_opts , pieces_opts ) : if pieces_opts == Opt . empty : return dict ( ) elif pieces_opts == Opt . debug : players = catan . game . Game . get_debug_players ( ) return { ( hexgrid . NODE , 0x23 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 0 ] ) , ( hexgrid . EDGE , 0x22 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 0 ] ) , ( hexgrid . NODE , 0x67 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 1 ] ) , ( hexgrid . EDGE , 0x98 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 1 ] ) , ( hexgrid . NODE , 0x87 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 2 ] ) , ( hexgrid . EDGE , 0x89 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 2 ] ) , ( hexgrid . EDGE , 0xA9 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 3 ] ) , ( hexgrid . TILE , 0x77 ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) , } elif pieces_opts in ( Opt . preset , ) : deserts = filter ( lambda tile : tile . terrain == catan . board . Terrain . desert , tiles ) coord = hexgrid . tile_id_to_coord ( list ( deserts ) [ 0 ] . tile_id ) return { ( hexgrid . TILE , coord ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) } elif pieces_opts in ( Opt . random , ) : logging . warning ( '{} option not yet implemented' . format ( pieces_opts ) )
Generate a dictionary of pieces using the given options .
472
11
5,889
def create_feature ( self , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . create_feature_with_http_info ( * * kwargs ) else : ( data ) = self . create_feature_with_http_info ( * * kwargs ) return data
Create an enumerated sequence feature
88
6
5,890
def list_features ( self , locus , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_with_http_info ( locus , * * kwargs ) else : ( data ) = self . list_features_with_http_info ( locus , * * kwargs ) return data
List the enumerated sequence features at a locus
97
10
5,891
def list_features_0 ( self , locus , term , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_0_with_http_info ( locus , term , * * kwargs ) else : ( data ) = self . list_features_0_with_http_info ( locus , term , * * kwargs ) return data
List the enumerated sequence features matching a term at a locus
109
13
5,892
def list_features_1 ( self , locus , term , rank , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_1_with_http_info ( locus , term , rank , * * kwargs ) else : ( data ) = self . list_features_1_with_http_info ( locus , term , rank , * * kwargs ) return data
List the enumerated sequence features matching a term and rank at a locus
115
15
5,893
def request ( self , method , uri , headers = None , bodyProducer = None ) : if self . _parent_trace is None : trace = Trace ( method ) else : trace = self . _parent_trace . child ( method ) if self . _endpoint is not None : trace . set_endpoint ( self . _endpoint ) if headers is None : headers = Headers ( { } ) # These headers are based on the headers used by finagle's tracing # http Codec. # # https://github.com/twitter/finagle/blob/master/finagle-http/ # # Currently not implemented are X-B3-Sampled and X-B3-Flags # Tryfer's underlying Trace implementation has no notion of a Sampled # trace and I haven't figured out what flags are for. headers . setRawHeaders ( 'X-B3-TraceId' , [ hex_str ( trace . trace_id ) ] ) headers . setRawHeaders ( 'X-B3-SpanId' , [ hex_str ( trace . span_id ) ] ) if trace . parent_span_id is not None : headers . setRawHeaders ( 'X-B3-ParentSpanId' , [ hex_str ( trace . parent_span_id ) ] ) # Similar to the headers above we use the annotation 'http.uri' for # because that is the standard set forth in the finagle http Codec. trace . record ( Annotation . string ( 'http.uri' , uri ) ) trace . record ( Annotation . client_send ( ) ) def _finished ( resp ) : # TODO: It may be advantageous here to return a wrapped response # whose deliverBody can wrap it's protocol and record when the # application has finished reading the contents. trace . record ( Annotation . string ( 'http.responsecode' , '{0} {1}' . format ( resp . code , resp . phrase ) ) ) trace . record ( Annotation . client_recv ( ) ) return resp d = self . _agent . request ( method , uri , headers , bodyProducer ) d . addBoth ( _finished ) return d
Send a client request following HTTP redirects .
474
9
5,894
def stringify ( data ) : def serialize ( k , v ) : if k == "candidates" : return int ( v ) if isinstance ( v , numbers . Number ) : if k == "zipcode" : # If values are presented as integers then leading digits may be cut off, # and these are significant for the zipcode. Add them back. return str ( v ) . zfill ( 5 ) return str ( v ) return v return [ { k : serialize ( k , v ) for k , v in json_dict . items ( ) } for json_dict in data ]
Ensure all values in the dictionary are strings except for the value for candidate which should just be an integer .
127
22
5,895
def post ( self , endpoint , data ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } if not self . logging : headers [ "x-suppress-logging" ] = "true" params = { "auth-id" : self . auth_id , "auth-token" : self . auth_token } url = self . BASE_URL + endpoint response = self . session . post ( url , json . dumps ( stringify ( data ) ) , params = params , headers = headers , timeout = self . timeout , ) if response . status_code == 200 : return response . json ( ) raise ERROR_CODES . get ( response . status_code , SmartyStreetsError )
Executes the HTTP POST request
233
6
5,896
def street_addresses ( self , addresses ) : # While it's okay in theory to accept freeform addresses they do need to be submitted in # a dictionary format. if type ( addresses [ 0 ] ) != dict : addresses = [ { "street" : arg for arg in addresses } ] return AddressCollection ( self . post ( "street-address" , data = addresses ) )
API method for verifying street address and geolocating
80
10
5,897
def street_address ( self , address ) : address = self . street_addresses ( [ address ] ) if not len ( address ) : return None return Address ( address [ 0 ] )
Geocode one and only address get a single Address object back
40
13
5,898
def load ( schema , uri = None , spec = None , provider = None ) : factory = Factory ( provider , spec ) return factory ( schema , uri or '#' )
Scaffold a validator against a schema .
39
10
5,899
def sizeHint ( self ) : w , h = self . get_width_height ( ) return QtCore . QSize ( w , h )
gives qt a starting point for widget size during window resizing
32
14