idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
5,600
def call ( self , command , * * kwargs ) : for hook in self . _hooks [ 'precall' ] : hook ( self , command , kwargs ) op = getattr ( self . client , command ) attempt = 0 while True : try : data = op ( * * kwargs ) break except ClientError as e : exc = translate_exception ( e , kwargs ) attempt += 1 if isinstance ( exc , ThroughputException ) : if attempt > self . request_retries : exc . re_raise ( ) self . exponential_sleep ( attempt ) else : exc . re_raise ( ) for hook in self . _hooks [ 'postcall' ] : hook ( self , command , kwargs , data ) if 'ConsumedCapacity' in data : is_read = command in READ_COMMANDS consumed = data [ 'ConsumedCapacity' ] if isinstance ( consumed , list ) : data [ 'consumed_capacity' ] = [ ConsumedCapacity . from_response ( cap , is_read ) for cap in consumed ] else : capacity = ConsumedCapacity . from_response ( consumed , is_read ) data [ 'consumed_capacity' ] = capacity if 'consumed_capacity' in data : if isinstance ( data [ 'consumed_capacity' ] , list ) : all_caps = data [ 'consumed_capacity' ] else : all_caps = [ data [ 'consumed_capacity' ] ] for hook in self . _hooks [ 'capacity' ] : for cap in all_caps : hook ( self , command , kwargs , data , cap ) return data
Make a request to DynamoDB using the raw botocore API
359
13
5,601
def subscribe ( self , event , hook ) : if hook not in self . _hooks [ event ] : self . _hooks [ event ] . append ( hook )
Subscribe a callback to an event
36
6
5,602
def unsubscribe ( self , event , hook ) : if hook in self . _hooks [ event ] : self . _hooks [ event ] . remove ( hook )
Unsubscribe a hook from an event
36
8
5,603
def add_rate_limit ( self , limiter ) : if limiter not in self . rate_limiters : self . subscribe ( 'capacity' , limiter . on_capacity ) self . rate_limiters . append ( limiter )
Add a RateLimit to the connection
52
7
5,604
def remove_rate_limit ( self , limiter ) : if limiter in self . rate_limiters : self . unsubscribe ( 'capacity' , limiter . on_capacity ) self . rate_limiters . remove ( limiter )
Remove a RateLimit from the connection
52
7
5,605
def _default_capacity ( self , value ) : if value is not None : return value if self . default_return_capacity or self . rate_limiters : return INDEXES return NONE
Get the value for ReturnConsumedCapacity from provided value
42
12
5,606
def _count ( self , method , limit , keywords ) : # The limit will be mutated, so copy it and leave the original intact limit = limit . copy ( ) has_more = True count = None while has_more : limit . set_request_args ( keywords ) response = self . call ( method , * * keywords ) limit . post_fetch ( response ) count += Count . from_response ( response ) last_evaluated_key = response . get ( 'LastEvaluatedKey' ) has_more = last_evaluated_key is not None and not limit . complete if has_more : keywords [ 'ExclusiveStartKey' ] = last_evaluated_key return count
Do a scan or query and aggregate the results into a Count
149
12
5,607
def describe_table ( self , tablename ) : try : response = self . call ( 'describe_table' , TableName = tablename ) [ 'Table' ] return Table . from_response ( response ) except DynamoDBError as e : if e . kwargs [ 'Code' ] == 'ResourceNotFoundException' : return None else : # pragma: no cover raise
Get the details about a table
85
6
5,608
def put_item ( self , tablename , item , expected = None , returns = NONE , return_capacity = None , expect_or = False , * * kwargs ) : keywords = { } if kwargs : keywords [ 'Expected' ] = encode_query_kwargs ( self . dynamizer , kwargs ) if len ( keywords [ 'Expected' ] ) > 1 : keywords [ 'ConditionalOperator' ] = 'OR' if expect_or else 'AND' elif expected is not None : keywords [ 'Expected' ] = build_expected ( self . dynamizer , expected ) keywords [ 'ReturnConsumedCapacity' ] = self . _default_capacity ( return_capacity ) item = self . dynamizer . encode_keys ( item ) ret = self . call ( 'put_item' , TableName = tablename , Item = item , ReturnValues = returns , * * keywords ) if ret : return Result ( self . dynamizer , ret , 'Attributes' )
Store an item overwriting existing data
219
8
5,609
def delete_item2 ( self , tablename , key , expr_values = None , alias = None , condition = None , returns = NONE , return_capacity = None , return_item_collection_metrics = NONE , * * kwargs ) : keywords = { 'TableName' : tablename , 'Key' : self . dynamizer . encode_keys ( key ) , 'ReturnValues' : returns , 'ReturnConsumedCapacity' : self . _default_capacity ( return_capacity ) , 'ReturnItemCollectionMetrics' : return_item_collection_metrics , } values = build_expression_values ( self . dynamizer , expr_values , kwargs ) if values : keywords [ 'ExpressionAttributeValues' ] = values if alias : keywords [ 'ExpressionAttributeNames' ] = alias if condition : keywords [ 'ConditionExpression' ] = condition result = self . call ( 'delete_item' , * * keywords ) if result : return Result ( self . dynamizer , result , 'Attributes' )
Delete an item from a table
226
6
5,610
def batch_write ( self , tablename , return_capacity = None , return_item_collection_metrics = NONE ) : return_capacity = self . _default_capacity ( return_capacity ) return BatchWriter ( self , tablename , return_capacity = return_capacity , return_item_collection_metrics = return_item_collection_metrics )
Perform a batch write on a table
82
8
5,611
def batch_get ( self , tablename , keys , attributes = None , alias = None , consistent = False , return_capacity = None ) : keys = [ self . dynamizer . encode_keys ( k ) for k in keys ] return_capacity = self . _default_capacity ( return_capacity ) ret = GetResultSet ( self , tablename , keys , consistent = consistent , attributes = attributes , alias = alias , return_capacity = return_capacity ) return ret
Perform a batch get of many items in a table
102
11
5,612
def update_item ( self , tablename , key , updates , returns = NONE , return_capacity = None , expect_or = False , * * kwargs ) : key = self . dynamizer . encode_keys ( key ) attr_updates = { } expected = { } keywords = { 'ReturnConsumedCapacity' : self . _default_capacity ( return_capacity ) , } for update in updates : attr_updates . update ( update . attrs ( self . dynamizer ) ) expected . update ( update . expected ( self . dynamizer ) ) # Pull the 'expected' constraints from the kwargs for k , v in six . iteritems ( encode_query_kwargs ( self . dynamizer , kwargs ) ) : if k in expected : raise ValueError ( "Cannot have more than one condition on a single field" ) expected [ k ] = v if expected : keywords [ 'Expected' ] = expected if len ( expected ) > 1 : keywords [ 'ConditionalOperator' ] = 'OR' if expect_or else 'AND' result = self . call ( 'update_item' , TableName = tablename , Key = key , AttributeUpdates = attr_updates , ReturnValues = returns , * * keywords ) if result : return Result ( self . dynamizer , result , 'Attributes' )
Update a single item in a table
296
7
5,613
def query ( self , tablename , attributes = None , consistent = False , count = False , index = None , limit = None , desc = False , return_capacity = None , filter = None , filter_or = False , exclusive_start_key = None , * * kwargs ) : keywords = { 'TableName' : tablename , 'ReturnConsumedCapacity' : self . _default_capacity ( return_capacity ) , 'ConsistentRead' : consistent , 'ScanIndexForward' : not desc , 'KeyConditions' : encode_query_kwargs ( self . dynamizer , kwargs ) , } if attributes is not None : keywords [ 'AttributesToGet' ] = attributes if index is not None : keywords [ 'IndexName' ] = index if filter is not None : if len ( filter ) > 1 : keywords [ 'ConditionalOperator' ] = 'OR' if filter_or else 'AND' keywords [ 'QueryFilter' ] = encode_query_kwargs ( self . dynamizer , filter ) if exclusive_start_key is not None : keywords [ 'ExclusiveStartKey' ] = self . dynamizer . maybe_encode_keys ( exclusive_start_key ) if not isinstance ( limit , Limit ) : limit = Limit ( limit ) if count : keywords [ 'Select' ] = COUNT return self . _count ( 'query' , limit , keywords ) else : return ResultSet ( self , limit , 'query' , * * keywords )
Perform an index query on a table
323
8
5,614
def read_logodata ( handle ) : seqs = weblogolib . read_seq_data ( handle , alphabet = unambiguous_protein_alphabet ) ldata = weblogolib . LogoData . from_seqs ( seqs ) letters = ldata . alphabet . letters ( ) counts = ldata . counts . array logodata = [ ] for i , coldata , entropy , weight in zip ( range ( len ( counts ) ) , counts , ldata . entropy , ldata . weight ) : cnts = dict ( ( let , int ( cnt ) ) for let , cnt in zip ( letters , coldata ) ) logodata . append ( ( i + 1 , cnts , entropy , weight ) ) return logodata
Get weblogo data for a sequence alignment .
165
10
5,615
def aln2logodata ( aln ) : handle = StringIO ( aln . format ( 'fasta' ) ) logodata = read_logodata ( handle ) handle . close ( ) return logodata
Get weblogo data for an alignment object .
49
10
5,616
def letter_scales ( counts ) : try : scale = 1.0 / sum ( counts . values ( ) ) except ZeroDivisionError : # This logo is all gaps, nothing can be done return [ ] freqs = [ ( aa , cnt * scale ) for aa , cnt in counts . iteritems ( ) if cnt ] freqs . sort ( key = lambda pair : pair [ 1 ] ) return freqs
Convert letter counts to frequencies sorted increasing .
93
9
5,617
def replace ( doc , pointer , value ) : return Target ( doc ) . replace ( pointer , value ) . document
Replace element from sequence member from mapping .
24
9
5,618
def set_options ( self , * * kw ) : for k , v in kw . iteritems ( ) : if k in self . __options : self . __options [ k ] = v
r Set Parser options .
43
6
5,619
def xml2object ( self , content ) : content = self . xml_filter ( content ) element = ET . fromstring ( content ) tree = self . parse ( element ) if self . __options [ 'strip_attr' ] else self . parse_full ( element ) if not self . __options [ 'strip_root' ] : node = self . get_node ( element ) if not self . __options [ 'strip_attr' ] : tree [ 'attrs' ] = node [ 'attr' ] return { node [ 'tag' ] : tree } return tree
r Convert xml content to python object .
123
8
5,620
def xml_filter ( self , content ) : content = utils . strip_whitespace ( content , True ) if self . __options [ 'strip' ] else content . strip ( ) if not self . __options [ 'encoding' ] : encoding = self . guess_xml_encoding ( content ) or self . __encoding self . set_options ( encoding = encoding ) if self . __options [ 'encoding' ] . lower ( ) != self . __encoding : # 编码转换去除xml头 content = self . strip_xml_header ( content . decode ( self . __options [ 'encoding' ] , errors = self . __options [ 'errors' ] ) ) if self . __options [ 'unescape' ] : content = utils . html_entity_decode ( content ) return content
r Filter and preprocess xml content
192
7
5,621
def guess_xml_encoding ( self , content ) : matchobj = self . __regex [ 'xml_encoding' ] . match ( content ) return matchobj and matchobj . group ( 1 ) . lower ( )
r Guess encoding from xml header declaration .
49
8
5,622
def parse ( self , element ) : values = { } for child in element : node = self . get_node ( child ) subs = self . parse ( child ) value = subs or node [ 'value' ] if node [ 'tag' ] not in values : values [ node [ 'tag' ] ] = value else : if not isinstance ( values [ node [ 'tag' ] ] , list ) : values [ node [ 'tag' ] ] = [ values . pop ( node [ 'tag' ] ) ] values [ node [ 'tag' ] ] . append ( value ) return values
r Parse xml element .
126
6
5,623
def parse_full ( self , element ) : values = collections . defaultdict ( dict ) for child in element : node = self . get_node ( child ) subs = self . parse_full ( child ) value = subs or { 'values' : node [ 'value' ] } value [ 'attrs' ] = node [ 'attr' ] if node [ 'tag' ] not in values [ 'values' ] : values [ 'values' ] [ node [ 'tag' ] ] = value else : if not isinstance ( values [ 'values' ] [ node [ 'tag' ] ] , list ) : values [ 'values' ] [ node [ 'tag' ] ] = [ values [ 'values' ] . pop ( node [ 'tag' ] ) ] values [ 'values' ] [ node [ 'tag' ] ] . append ( value ) return values
r Parse xml element include the node attributes .
185
10
5,624
def get_node ( self , element ) : ns , tag = self . split_namespace ( element . tag ) return { 'tag' : tag , 'value' : ( element . text or '' ) . strip ( ) , 'attr' : element . attrib , 'namespace' : ns }
r Get node info .
65
5
5,625
def split_namespace ( self , tag ) : matchobj = self . __regex [ 'xml_ns' ] . search ( tag ) return matchobj . groups ( ) if matchobj else ( '' , tag )
r Split tag namespace .
47
5
5,626
def instantiate_probes ( probes , instruments ) : probe_instances = { } for name , sub_dict in probes . items ( ) : assert isinstance ( sub_dict , dict ) assert "probe_name" in sub_dict assert "instrument_name" in sub_dict probe_name = sub_dict [ 'probe_name' ] instrument_name = sub_dict [ 'instrument_name' ] if "probe_info" in sub_dict : probe_info = sub_dict [ 'probe_info' ] else : probe_info = '' assert instrument_name in instruments , "{:s} not in {:s}" . format ( instrument_name , list ( instruments . keys ( ) ) ) assert probe_name in instruments [ instrument_name ] . _PROBES probe_instances . update ( { name : Probe ( instruments [ instrument_name ] , probe_name , name , probe_info ) } ) return probe_instances
Creates instances of the probes inputed ;
212
9
5,627
def compressed ( self ) : order = ecdsa . SECP256k1 . generator . order ( ) p = ecdsa . VerifyingKey . from_string ( compat_bytes ( self ) , curve = ecdsa . SECP256k1 ) . pubkey . point x_str = ecdsa . util . number_to_string ( p . x ( ) , order ) # y_str = ecdsa.util.number_to_string(p.y(), order) compressed = hexlify ( compat_bytes ( chr ( 2 + ( p . y ( ) & 1 ) ) , 'ascii' ) + x_str ) . decode ( 'ascii' ) return ( compressed )
Derive compressed public key
158
5
5,628
def unCompressed ( self ) : public_key = repr ( self . _pk ) prefix = public_key [ 0 : 2 ] if prefix == "04" : return public_key assert prefix == "02" or prefix == "03" x = int ( public_key [ 2 : ] , 16 ) y = self . _derive_y_from_x ( x , ( prefix == "02" ) ) key = '04' + '%064x' % x + '%064x' % y return key
Derive uncompressed key
116
5
5,629
def compressedpubkey ( self ) : secret = unhexlify ( repr ( self . _wif ) ) order = ecdsa . SigningKey . from_string ( secret , curve = ecdsa . SECP256k1 ) . curve . generator . order ( ) p = ecdsa . SigningKey . from_string ( secret , curve = ecdsa . SECP256k1 ) . verifying_key . pubkey . point x_str = ecdsa . util . number_to_string ( p . x ( ) , order ) y_str = ecdsa . util . number_to_string ( p . y ( ) , order ) compressed = hexlify ( chr ( 2 + ( p . y ( ) & 1 ) ) . encode ( 'ascii' ) + x_str ) . decode ( 'ascii' ) uncompressed = hexlify ( chr ( 4 ) . encode ( 'ascii' ) + x_str + y_str ) . decode ( 'ascii' ) return [ compressed , uncompressed ]
Derive uncompressed public key
235
6
5,630
def get_private ( self ) : a = compat_bytes ( self . account + self . role + self . password , 'utf8' ) s = hashlib . sha256 ( a ) . digest ( ) return PrivateKey ( hexlify ( s ) . decode ( 'ascii' ) )
Derive private key from the brain key and the current sequence number
66
13
5,631
def downstream_index ( dir_value , i , j , alg = 'taudem' ) : assert alg . lower ( ) in FlowModelConst . d8_deltas delta = FlowModelConst . d8_deltas . get ( alg . lower ( ) ) drow , dcol = delta [ int ( dir_value ) ] return i + drow , j + dcol
find downslope coordinate for D8 direction .
88
10
5,632
def convert_group ( tokens ) : tok = tokens . asList ( ) dic = dict ( tok ) if not ( len ( dic ) == len ( tok ) ) : raise ParseFatalException ( "Names in group must be unique: %s" % tokens ) return ConfGroup ( dic )
Converts parseResult from to ConfGroup type .
69
10
5,633
def load_elements ( self , filename ) : input_data = load_b26_file ( filename ) if isinstance ( input_data , dict ) and self . elements_type in input_data : return input_data [ self . elements_type ] else : return { }
loads the elements from file filename
61
6
5,634
def add_script_sequence ( self ) : def empty_tree ( tree_model ) : # COMMENT_ME def add_children_to_list ( item , somelist ) : if item . hasChildren ( ) : for rownum in range ( 0 , item . rowCount ( ) ) : somelist . append ( str ( item . child ( rownum , 0 ) . text ( ) ) ) output_list = [ ] root = tree_model . invisibleRootItem ( ) add_children_to_list ( root , output_list ) tree_model . clear ( ) return output_list name = str ( self . txt_script_sequence_name . text ( ) ) new_script_list = empty_tree ( self . tree_script_sequence_model ) new_script_dict = { } for script in new_script_list : if script in self . elements_old : new_script_dict . update ( { script : self . elements_old [ script ] } ) elif script in self . elements_from_file : new_script_dict . update ( { script : self . elements_from_file [ script ] } ) new_script_parameter_dict = { } for index , script in enumerate ( new_script_list ) : new_script_parameter_dict . update ( { script : index } ) # QtGui.QTextEdit.toPlainText() # get the module of the current dialogue package = get_python_package ( inspect . getmodule ( self ) . __file__ ) assert package is not None # check that we actually find a module # class_name = Script.set_up_dynamic_script(factory_scripts, new_script_parameter_list, self.cmb_looping_variable.currentText() == 'Parameter Sweep') new_script_dict = { name : { 'class' : 'ScriptIterator' , 'package' : package , 'scripts' : new_script_dict , 'info' : str ( self . txt_info . toPlainText ( ) ) , 'settings' : { 'script_order' : new_script_parameter_dict , 'iterator_type' : str ( self . cmb_looping_variable . currentText ( ) ) } } } self . selected_element_name = name self . fill_tree ( self . tree_loaded , new_script_dict ) self . elements_from_file . update ( new_script_dict )
creates a script sequence based on the script iterator type selected and the selected scripts and sends it to the tree self . tree_loaded
542
27
5,635
def cli ( family_file , family_type , to_json , to_madeline , to_ped , to_dict , outfile , logfile , loglevel ) : from pprint import pprint as pp my_parser = FamilyParser ( family_file , family_type ) if to_json : if outfile : outfile . write ( my_parser . to_json ( ) ) else : print ( my_parser . to_json ( ) ) elif to_madeline : for line in my_parser . to_madeline ( ) : if outfile : outfile . write ( line + '\n' ) else : print ( line ) elif to_ped : for line in my_parser . to_ped ( ) : if outfile : outfile . write ( line + '\n' ) else : print ( line ) elif to_dict : pp ( my_parser . to_dict ( ) )
Cli for testing the ped parser .
203
8
5,636
def check_line_length ( self , splitted_line , expected_length ) : if len ( splitted_line ) != expected_length : raise WrongLineFormat ( message = 'WRONG FORMATED PED LINE!' , ped_line = '\t' . join ( splitted_line ) ) return
Check if the line is correctly formated . Throw a SyntaxError if it is not .
67
19
5,637
def deck_vote_tag ( deck : Deck ) -> str : if deck . id is None : raise Exception ( "deck.id is required" ) deck_vote_tag_privkey = sha256 ( unhexlify ( deck . id ) + b"vote_init" ) . hexdigest ( ) deck_vote_tag_address = Kutil ( network = deck . network , privkey = bytearray . fromhex ( deck_vote_tag_privkey ) ) return deck_vote_tag_address . address
deck vote tag address
116
4
5,638
def parse_vote_info ( protobuf : bytes ) -> dict : vote = pavoteproto . Vote ( ) vote . ParseFromString ( protobuf ) assert vote . version > 0 , { "error" : "Vote info incomplete, version can't be 0." } assert vote . start_block < vote . end_block , { "error" : "vote can't end in the past." } return { "version" : vote . version , "description" : vote . description , "count_mode" : vote . MODE . Name ( vote . count_mode ) , "choices" : vote . choices , "start_block" : vote . start_block , "end_block" : vote . end_block , "vote_metainfo" : vote . vote_metainfo }
decode vote init tx op_return protobuf message and validate it .
177
16
5,639
def vote_init ( vote : Vote , inputs : dict , change_address : str ) -> bytes : network_params = net_query ( vote . deck . network ) deck_vote_tag_address = deck_vote_tag ( vote . deck ) tx_fee = network_params . min_tx_fee # settle for min tx fee for now for utxo in inputs [ 'utxos' ] : utxo [ 'txid' ] = unhexlify ( utxo [ 'txid' ] ) utxo [ 'scriptSig' ] = unhexlify ( utxo [ 'scriptSig' ] ) outputs = [ { "redeem" : 0.01 , "outputScript" : transactions . monosig_script ( deck_vote_tag_address ) } , { "redeem" : 0 , "outputScript" : transactions . op_return_script ( vote . to_protobuf ) } , { "redeem" : float ( inputs [ 'total' ] ) - float ( tx_fee ) - float ( 0.01 ) , "outputScript" : transactions . monosig_script ( change_address ) } ] return transactions . make_raw_transaction ( inputs [ 'utxos' ] , outputs )
initialize vote transaction must be signed by the deck_issuer privkey
278
15
5,640
def find_vote_inits ( provider : Provider , deck : Deck ) -> Iterable [ Vote ] : vote_ints = provider . listtransactions ( deck_vote_tag ( deck ) ) for txid in vote_ints : try : raw_vote = provider . getrawtransaction ( txid ) vote = parse_vote_info ( read_tx_opreturn ( raw_vote ) ) vote [ "vote_id" ] = txid vote [ "sender" ] = find_tx_sender ( provider , raw_vote ) vote [ "deck" ] = deck yield Vote ( * * vote ) except AssertionError : pass
find vote_inits on this deck
140
8
5,641
def vote_cast ( vote : Vote , choice_index : int , inputs : dict , change_address : str ) -> bytes : network_params = net_query ( vote . deck . network ) vote_cast_addr = vote . vote_choice_address [ choice_index ] tx_fee = network_params . min_tx_fee # settle for min tx fee for now for utxo in inputs [ 'utxos' ] : utxo [ 'txid' ] = unhexlify ( utxo [ 'txid' ] ) utxo [ 'scriptSig' ] = unhexlify ( utxo [ 'scriptSig' ] ) outputs = [ { "redeem" : 0.01 , "outputScript" : transactions . monosig_script ( vote_cast_addr ) } , { "redeem" : float ( inputs [ 'total' ] ) - float ( tx_fee ) - float ( 0.01 ) , "outputScript" : transactions . monosig_script ( change_address ) } ] return transactions . make_raw_transaction ( inputs [ 'utxos' ] , outputs )
vote cast transaction
251
3
5,642
def find_vote_casts ( provider : Provider , vote : Vote , choice_index : int ) -> Iterable [ VoteCast ] : vote_casts = provider . listtransactions ( vote . vote_choice_address [ choice_index ] ) for tx in vote_casts : raw_tx = provider . getrawtransaction ( tx , 1 ) sender = find_tx_sender ( provider , raw_tx ) confirmations = raw_tx [ "confirmations" ] blocknum = provider . getblock ( raw_tx [ "blockhash" ] ) [ "height" ] yield VoteCast ( vote , sender , blocknum , confirmations , raw_tx [ "blocktime" ] )
find and verify vote_casts on this vote_choice_address
149
13
5,643
def to_protobuf ( self ) -> str : vote = pavoteproto . Vote ( ) vote . version = self . version vote . description = self . description vote . count_mode = vote . MODE . Value ( self . count_mode ) vote . start_block = self . start_block vote . end_block = self . end_block vote . choices . extend ( self . choices ) if not isinstance ( self . vote_metainfo , bytes ) : vote . vote_metainfo = self . vote_metainfo . encode ( ) else : vote . vote_metainfo = self . vote_metainfo proto = vote . SerializeToString ( ) if len ( proto ) > 80 : warnings . warn ( '\nMetainfo size exceeds maximum of 80 bytes allowed by OP_RETURN.' ) return proto
encode vote into protobuf
184
7
5,644
def to_dict ( self ) -> dict : return { "version" : self . version , "description" : self . description , "count_mode" : self . count_mode , "start_block" : self . start_block , "end_block" : self . end_block , "choices" : self . choices , "vote_metainfo" : self . vote_metainfo }
vote info as dict
89
4
5,645
def vote_choice_address ( self ) -> List [ str ] : if self . vote_id is None : raise Exception ( "vote_id is required" ) addresses = [ ] vote_init_txid = unhexlify ( self . vote_id ) for choice in self . choices : vote_cast_privkey = sha256 ( vote_init_txid + bytes ( list ( self . choices ) . index ( choice ) ) ) . hexdigest ( ) addresses . append ( Kutil ( network = self . deck . network , privkey = bytearray . fromhex ( vote_cast_privkey ) ) . address ) return addresses
calculate the addresses on which the vote is casted .
142
13
5,646
def is_valid ( self ) -> bool : if not ( self . blocknum >= self . vote . start_block and self . blocknum <= self . vote . end_block ) : return False if not self . confirmations >= 6 : return False return True
check if VoteCast is valid
55
6
5,647
def search_refdata ( self , seq , locus ) : # TODO: ONLY MAKE ONE CONNECTION # TODO: add try statement # TODO: take password from environment variable if self . server_avail : hla , loc = locus . split ( '-' ) p1 = "SELECT ent.name " p2 = "FROM bioentry ent,biosequence seq,biodatabase dbb " p3 = "WHERE dbb.biodatabase_id = ent.biodatabase_id AND seq.bioentry_id = ent.bioentry_id " p4 = " AND dbb.name = \"" + self . dbversion + "_" + loc + "\"" p5 = " AND seq.seq = \"" + str ( seq . seq ) + "\"" select_stm = p1 + p2 + p3 + p4 + p5 # TODO: add try statement conn = pymysql . connect ( host = biosqlhost , port = biosqlport , user = biosqluser , passwd = biosqlpass , db = biosqldb ) cur = conn . cursor ( ) cur . execute ( select_stm ) typ = '' for row in cur : typ = row [ 0 ] cur . close ( ) conn . close ( ) if typ : if self . verbose : self . logger . info ( "Exact typing found in BioSQL database" ) seqrecord = self . seqrecord ( typ , loc ) return self . seqannotation ( seqrecord , typ , loc ) else : return else : if str ( seq . seq ) in self . seqref : if self . verbose : self . logger . info ( "Exact typing found in dat file" ) seqrec = self . hlaref [ self . seqref [ str ( seq . seq ) ] ] return self . seqannotation ( seqrec , self . seqref [ str ( seq . seq ) ] , locus ) else : return
This checks to see if a sequence already exists in the reference data . If it does then it ll return the known annotation .
432
25
5,648
def update ( self , * args ) : for d in args : for ( key , value ) in d . items ( ) : self . __setitem__ ( key , value )
updates the values of the parameter just as a regular dictionary
38
12
5,649
def error ( msg , log_file = None ) : UtilClass . print_msg ( msg + os . linesep ) if log_file is not None : UtilClass . writelog ( log_file , msg , 'append' ) raise RuntimeError ( msg )
Print output error message and raise RuntimeError .
59
9
5,650
def log ( lines , log_file = None ) : err = False for line in lines : print ( line ) if log_file is not None : UtilClass . writelog ( log_file , line , 'append' ) if 'BAD TERMINATION' in line . upper ( ) : err = True break if err : TauDEM . error ( 'Error occurred when calling TauDEM function, please check!' , log_file )
Output log message .
94
4
5,651
def write_time_log ( logfile , time ) : if os . path . exists ( logfile ) : log_status = open ( logfile , 'a' , encoding = 'utf-8' ) else : log_status = open ( logfile , 'w' , encoding = 'utf-8' ) log_status . write ( 'Function Name\tRead Time\tCompute Time\tWrite Time\tTotal Time\t\n' ) log_status . write ( '%s\t%.5f\t%.5f\t%.5f\t%.5f\t\n' % ( time [ 'name' ] , time [ 'readt' ] , time [ 'computet' ] , time [ 'writet' ] , time [ 'totalt' ] ) ) log_status . flush ( ) log_status . close ( )
Write time log .
192
4
5,652
def check_infile_and_wp ( curinf , curwp ) : if not os . path . exists ( curinf ) : if curwp is None : TauDEM . error ( 'You must specify one of the workspace and the ' 'full path of input file!' ) curinf = curwp + os . sep + curinf curinf = os . path . abspath ( curinf ) if not os . path . exists ( curinf ) : TauDEM . error ( 'Input files parameter %s is not existed!' % curinf ) else : curinf = os . path . abspath ( curinf ) if curwp is None : curwp = os . path . dirname ( curinf ) return curinf , curwp
Check the existence of the given file and directory path . 1 . Raise Runtime exception of both not existed . 2 . If the curwp is None the set the base folder of curinf to it .
155
40
5,653
def pitremove ( np , dem , filleddem , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'pitremove' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-z' : dem } , workingdir , None , { '-fel' : filleddem } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run pit remove using the flooding approach
161
7
5,654
def d8flowdir ( np , filleddem , flowdir , slope , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'd8flowdir' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : filleddem } , workingdir , None , { '-p' : flowdir , '-sd8' : slope } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run D8 flow direction
177
5
5,655
def dinfflowdir ( np , filleddem , flowangle , slope , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'dinfflowdir' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : filleddem } , workingdir , None , { '-ang' : flowangle , '-slp' : slope } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run Dinf flow direction
177
5
5,656
def aread8 ( np , flowdir , acc , outlet = None , streamskeleton = None , edgecontaimination = False , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : # -nc means do not consider edge contaimination if not edgecontaimination : in_params = { '-nc' : None } else : in_params = None fname = TauDEM . func_name ( 'aread8' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-p' : flowdir , '-o' : outlet , '-wg' : streamskeleton } , workingdir , in_params , { '-ad8' : acc } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run Accumulate area according to D8 flow direction
234
11
5,657
def areadinf ( np , angfile , sca , outlet = None , wg = None , edgecontaimination = False , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : # -nc means do not consider edge contaimination if edgecontaimination : in_params = { '-nc' : None } else : in_params = None fname = TauDEM . func_name ( 'areadinf' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-ang' : angfile , '-o' : outlet , '-wg' : wg } , workingdir , in_params , { '-sca' : sca } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run Accumulate area according to Dinf flow direction
235
11
5,658
def connectdown ( np , p , acc , outlet , wtsd = None , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : # If watershed is not specified, use acc to generate a mask layer. if wtsd is None or not os . path . isfile ( wtsd ) : p , workingdir = TauDEM . check_infile_and_wp ( p , workingdir ) wtsd = workingdir + os . sep + 'wtsd_default.tif' RasterUtilClass . get_mask_from_raster ( p , wtsd , True ) fname = TauDEM . func_name ( 'connectdown' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-p' : p , '-ad8' : acc , '-w' : wtsd } , workingdir , None , { '-o' : outlet } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Reads an ad8 contributing area file identifies the location of the largest ad8 value as the outlet of the largest watershed
283
24
5,659
def threshold ( np , acc , stream_raster , threshold = 100. , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'threshold' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-ssa' : acc } , workingdir , { '-thresh' : threshold } , { '-src' : stream_raster } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run threshold for stream raster
177
6
5,660
def moveoutletstostrm ( np , flowdir , streamRaster , outlet , modifiedOutlet , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'moveoutletstostrm' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-p' : flowdir , '-src' : streamRaster , '-o' : outlet } , workingdir , None , { '-om' : modifiedOutlet } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run move the given outlets to stream
194
7
5,661
def convertdistmethod ( method_str ) : if StringClass . string_match ( method_str , 'Horizontal' ) : return 'h' elif StringClass . string_match ( method_str , 'Vertical' ) : return 'v' elif StringClass . string_match ( method_str , 'Pythagoras' ) : return 'p' elif StringClass . string_match ( method_str , 'Surface' ) : return 's' elif method_str . lower ( ) in [ 'h' , 'v' , 'p' , 's' ] : return method_str . lower ( ) else : return 's'
Convert distance method to h v p and s .
145
11
5,662
def convertstatsmethod ( method_str ) : if StringClass . string_match ( method_str , 'Average' ) : return 'ave' elif StringClass . string_match ( method_str , 'Maximum' ) : return 'max' elif StringClass . string_match ( method_str , 'Minimum' ) : return 'min' elif method_str . lower ( ) in [ 'ave' , 'max' , 'min' ] : return method_str . lower ( ) else : return 'ave'
Convert statistics method to ave min and max .
113
11
5,663
def d8hdisttostrm ( np , p , src , dist , thresh , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'd8hdisttostrm' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-p' : p , '-src' : src } , workingdir , { '-thresh' : thresh } , { '-dist' : dist } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run D8 horizontal distance down to stream .
189
9
5,664
def d8distdowntostream ( np , p , fel , src , dist , distancemethod , thresh , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'd8distdowntostream' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : fel , '-p' : p , '-src' : src } , workingdir , { '-thresh' : thresh , '-m' : TauDEM . convertdistmethod ( distancemethod ) } , { '-dist' : dist } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run D8 distance down to stream by different method for distance . This function is extended from d8hdisttostrm by Liangjun .
218
29
5,665
def dinfdistdown ( np , ang , fel , slp , src , statsm , distm , edgecontamination , wg , dist , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : in_params = { '-m' : '%s %s' % ( TauDEM . convertstatsmethod ( statsm ) , TauDEM . convertdistmethod ( distm ) ) } if StringClass . string_match ( edgecontamination , 'false' ) or edgecontamination is False : in_params [ '-nc' ] = None fname = TauDEM . func_name ( 'dinfdistdown' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : fel , '-slp' : slp , '-ang' : ang , '-src' : src , '-wg' : wg } , workingdir , in_params , { '-dd' : dist } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run D - inf distance down to stream
292
8
5,666
def peukerdouglas ( np , fel , streamSkeleton , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : fname = TauDEM . func_name ( 'peukerdouglas' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : fel } , workingdir , None , { '-ss' : streamSkeleton } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Run peuker - douglas function
169
8
5,667
def dropanalysis ( np , fel , p , ad8 , ssa , outlet , minthresh , maxthresh , numthresh , logspace , drp , workingdir = None , mpiexedir = None , exedir = None , log_file = None , runtime_file = None , hostfile = None ) : parstr = '%f %f %f' % ( minthresh , maxthresh , numthresh ) if logspace == 'false' : parstr += ' 1' else : parstr += ' 0' fname = TauDEM . func_name ( 'dropanalysis' ) return TauDEM . run ( FileClass . get_executable_fullpath ( fname , exedir ) , { '-fel' : fel , '-p' : p , '-ad8' : ad8 , '-ssa' : ssa , '-o' : outlet } , workingdir , { '-par' : parstr } , { '-drp' : drp } , { 'mpipath' : mpiexedir , 'hostfile' : hostfile , 'n' : np } , { 'logfile' : log_file , 'runtimefile' : runtime_file } )
Drop analysis for optimal threshold for extracting stream .
274
9
5,668
def resolve ( self , pointer ) : dp = DocumentPointer ( pointer ) obj , fetcher = self . prototype ( dp ) for token in dp . pointer : obj = token . extract ( obj , bypass_ref = True ) reference = ref ( obj ) if reference : obj = fetcher . resolve ( reference ) return obj
Resolve from documents .
71
5
5,669
def count_diffs ( align , feats , inseq , locus , cutoff , verbose = False , verbosity = 0 ) : nfeats = len ( feats . keys ( ) ) mm = 0 insr = 0 dels = 0 gaps = 0 match = 0 lastb = '' l = len ( align [ 0 ] ) if len ( align [ 0 ] ) > len ( align [ 1 ] ) else len ( align [ 1 ] ) # Counting gaps, mismatches and insertions for i in range ( 0 , l ) : if align [ 0 ] [ i ] == "-" or align [ 1 ] [ i ] == "-" : if align [ 0 ] [ i ] == "-" : insr += 1 if lastb != '-' : gaps += 1 lastb = "-" if align [ 1 ] [ i ] == "-" : dels += 1 if lastb != '-' : gaps += 1 lastb = "-" else : lastb = '' if align [ 0 ] [ i ] != align [ 1 ] [ i ] : mm += 1 else : match += 1 gper = gaps / nfeats delper = dels / l iper = insr / l mmper = mm / l mper = match / l mper2 = match / len ( inseq ) logger = logging . getLogger ( "Logger." + __name__ ) if verbose and verbosity > 0 : logger . info ( "Features algined = " + "," . join ( list ( feats . keys ( ) ) ) ) logger . info ( '{:<22}{:<6d}' . format ( "Number of feats: " , nfeats ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of gaps: " , gaps , gper ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of deletions: " , dels , delper ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of insertions: " , insr , iper ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of mismatches: " , mm , mmper ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of matches: " , match , mper ) ) logger . info ( '{:<22}{:<6d}{:<1.2f}' . format ( "Number of matches: " , match , mper2 ) ) indel = iper + delper # ** HARD CODED LOGIC ** # if len ( inseq ) > 6000 and mmper < .10 and mper2 > .80 : if verbose : logger . info ( "Alignment coverage high enough to complete annotation 11" ) return insr , dels else : # TODO: These numbers need to be fine tuned indel_mm = indel + mper2 if ( indel > 0.5 or mmper > 0.05 ) and mper2 < cutoff and indel_mm != 1 : if verbose : logger . info ( "Alignment coverage NOT high enough to return annotation" ) return Annotation ( complete_annotation = False ) else : if verbose : logger . info ( "Alignment coverage high enough to complete annotation" ) return insr , dels
count_diffs - Counts the number of mismatches gaps and insertions and then determines if those are within an acceptable range .
779
27
5,670
def cmd_stop ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s stop" % self . progname , description = help , ) instances = self . get_instances ( command = 'stop' ) parser . add_argument ( "instance" , nargs = 1 , metavar = "instance" , help = "Name of the instance from the config." , choices = sorted ( instances ) ) args = parser . parse_args ( argv ) instance = instances [ args . instance [ 0 ] ] instance . stop ( )
Stops the instance
122
4
5,671
def cmd_terminate ( self , argv , help ) : from ploy . common import yesno parser = argparse . ArgumentParser ( prog = "%s terminate" % self . progname , description = help , ) instances = self . get_instances ( command = 'terminate' ) parser . add_argument ( "instance" , nargs = 1 , metavar = "instance" , help = "Name of the instance from the config." , choices = sorted ( instances ) ) args = parser . parse_args ( argv ) instance = instances [ args . instance [ 0 ] ] if not yesno ( "Are you sure you want to terminate '%s'?" % instance . config_id ) : return instance . hooks . before_terminate ( instance ) instance . terminate ( ) instance . hooks . after_terminate ( instance )
Terminates the instance
180
4
5,672
def cmd_start ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s start" % self . progname , description = help , ) instances = self . get_instances ( command = 'start' ) parser . add_argument ( "instance" , nargs = 1 , metavar = "instance" , help = "Name of the instance from the config." , choices = sorted ( instances ) ) parser . add_argument ( "-o" , "--override" , nargs = "*" , type = str , dest = "overrides" , metavar = "OVERRIDE" , help = "Option to override in instance config for startup script (name=value)." ) args = parser . parse_args ( argv ) overrides = self . _parse_overrides ( args ) overrides [ 'instances' ] = self . instances instance = instances [ args . instance [ 0 ] ] instance . hooks . before_start ( instance ) result = instance . start ( overrides ) instance . hooks . after_start ( instance ) if result is None : return instance . status ( )
Starts the instance
246
4
5,673
def cmd_annotate ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s annotate" % self . progname , description = help , ) parser . parse_args ( argv ) list ( self . instances . values ( ) ) # trigger instance augmentation for global_section in sorted ( self . config ) : for sectionname in sorted ( self . config [ global_section ] ) : print ( "[%s:%s]" % ( global_section , sectionname ) ) section = self . config [ global_section ] [ sectionname ] for option , value in sorted ( section . _dict . items ( ) ) : print ( "%s = %s" % ( option , value . value ) ) print ( " %s" % value . src ) print ( )
Prints annotated config
173
5
5,674
def cmd_debug ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s debug" % self . progname , description = help , ) instances = self . instances parser . add_argument ( "instance" , nargs = 1 , metavar = "instance" , help = "Name of the instance from the config." , choices = sorted ( instances ) ) parser . add_argument ( "-v" , "--verbose" , dest = "verbose" , action = "store_true" , help = "Print more info and output the startup script" ) parser . add_argument ( "-c" , "--console-output" , dest = "console_output" , action = "store_true" , help = "Prints the console output of the instance if available" ) parser . add_argument ( "-i" , "--interactive" , dest = "interactive" , action = "store_true" , help = "Creates a connection and drops you into an interactive Python session" ) parser . add_argument ( "-r" , "--raw" , dest = "raw" , action = "store_true" , help = "Outputs the raw possibly compressed startup script" ) parser . add_argument ( "-o" , "--override" , nargs = "*" , type = str , dest = "overrides" , metavar = "OVERRIDE" , help = "Option to override instance config for startup script (name=value)." ) args = parser . parse_args ( argv ) overrides = self . _parse_overrides ( args ) overrides [ 'instances' ] = self . instances instance = instances [ args . instance [ 0 ] ] if hasattr ( instance , 'startup_script' ) : startup_script = instance . startup_script ( overrides = overrides , debug = True ) max_size = getattr ( instance , 'max_startup_script_size' , 16 * 1024 ) log . info ( "Length of startup script: %s/%s" , len ( startup_script [ 'raw' ] ) , max_size ) if args . verbose : if 'startup_script' in instance . config : if startup_script [ 'original' ] == startup_script [ 'raw' ] : log . info ( "Startup script:" ) elif args . raw : log . info ( "Compressed startup script:" ) else : log . info ( "Uncompressed startup script:" ) else : log . info ( "No startup script specified" ) if args . raw : print ( startup_script [ 'raw' ] , end = '' ) elif args . verbose : print ( startup_script [ 'original' ] , end = '' ) if args . console_output : if hasattr ( instance , 'get_console_output' ) : print ( instance . get_console_output ( ) ) else : log . error ( "The instance doesn't support console output." ) if args . interactive : # pragma: no cover import readline from pprint import pprint local = dict ( ctrl = self , instances = self . instances , instance = instance , pprint = pprint ) readline . parse_and_bind ( 'tab: complete' ) try : import rlcompleter readline . set_completer ( rlcompleter . Completer ( local ) . complete ) except ImportError : pass __import__ ( "code" ) . interact ( local = local )
Prints some debug info for this script
763
8
5,675
def cmd_list ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s list" % self . progname , description = help , ) parser . add_argument ( "list" , nargs = 1 , metavar = "listname" , help = "Name of list to show." , choices = sorted ( self . list_cmds ) ) parser . add_argument ( "listopts" , metavar = "..." , nargs = argparse . REMAINDER , help = "list command options" ) args = parser . parse_args ( argv ) for name , func in sorted ( self . list_cmds [ args . list [ 0 ] ] ) : func ( args . listopts , func . __doc__ )
Return a list of various things
170
6
5,676
def cmd_ssh ( self , argv , help ) : parser = argparse . ArgumentParser ( prog = "%s ssh" % self . progname , description = help , ) instances = self . get_instances ( command = 'init_ssh_key' ) parser . add_argument ( "instance" , nargs = 1 , metavar = "instance" , help = "Name of the instance from the config." , choices = sorted ( instances ) ) parser . add_argument ( "..." , nargs = argparse . REMAINDER , help = "ssh options" ) iargs = enumerate ( argv ) sid_index = None user = None for i , arg in iargs : if not arg . startswith ( '-' ) : sid_index = i break if arg [ 1 ] in '1246AaCfgKkMNnqsTtVvXxYy' : continue elif arg [ 1 ] in 'bcDeFiLlmOopRSw' : value = iargs . next ( ) if arg [ 1 ] == 'l' : user = value [ 1 ] continue # fake parsing for nice error messages if sid_index is None : parser . parse_args ( [ ] ) else : sid = argv [ sid_index ] if '@' in sid : user , sid = sid . split ( '@' , 1 ) parser . parse_args ( [ sid ] ) instance = instances [ sid ] if user is None : user = instance . config . get ( 'user' ) try : ssh_info = instance . init_ssh_key ( user = user ) except ( instance . paramiko . SSHException , socket . error ) as e : log . error ( "Couldn't validate fingerprint for ssh connection." ) log . error ( unicode ( e ) ) log . error ( "Is the instance finished starting up?" ) sys . exit ( 1 ) client = ssh_info [ 'client' ] client . get_transport ( ) . sock . close ( ) client . close ( ) argv [ sid_index : sid_index + 1 ] = instance . ssh_args_from_info ( ssh_info ) argv [ 0 : 0 ] = [ 'ssh' ] os . execvp ( 'ssh' , argv )
Log into the instance with ssh using the automatically generated known hosts
492
12
5,677
def initialize ( self , store ) : assert isinstance ( store , stores . BaseStore ) self . messages = Queue ( ) self . store = store self . store . register ( self )
Common initialization of handlers happens here . If additional initialization is required this method must either be called with super or the child class must assign the store attribute and register itself with the store .
40
36
5,678
def prepare ( self ) : request_time = 1000.0 * self . request . request_time ( ) access_log . info ( "%d %s %.2fms" , self . get_status ( ) , self . _request_summary ( ) , request_time )
Log access .
61
3
5,679
async def publish ( self , message ) : try : self . write ( 'data: {}\n\n' . format ( message ) ) await self . flush ( ) except StreamClosedError : self . finished = True
Pushes data to a listener .
48
7
5,680
async def open ( self ) : self . store . register ( self ) while not self . finished : message = await self . messages . get ( ) await self . publish ( message )
Register with the publisher .
39
5
5,681
async def publish ( self , message ) : try : self . write_message ( dict ( data = message ) ) except WebSocketClosedError : self . _close ( )
Push a new message to the client . The data will be available as a JSON object with the key data .
38
22
5,682
def _useChunk ( self , index ) -> None : if self . currentChunk is not None : if self . currentChunkIndex == index and not self . currentChunk . closed : return self . currentChunk . close ( ) self . currentChunk = self . _openChunk ( index ) self . currentChunkIndex = index self . itemNum = self . currentChunk . numKeys + 1
Switch to specific chunk
90
4
5,683
def numKeys ( self ) -> int : chunks = self . _listChunks ( ) num_chunks = len ( chunks ) if num_chunks == 0 : return 0 count = ( num_chunks - 1 ) * self . chunkSize last_chunk = self . _openChunk ( chunks [ - 1 ] ) count += sum ( 1 for _ in last_chunk . _lines ( ) ) last_chunk . close ( ) return count
This will iterate only over the last chunk since the name of the last chunk indicates how many lines in total exist in all other chunks
99
27
5,684
def register ( self , subscriber ) : assert isinstance ( subscriber , RequestHandler ) logger . debug ( 'New subscriber' ) self . subscribers . add ( subscriber )
Register a new subscriber . This method should be invoked by listeners to start receiving messages .
34
17
5,685
def deregister ( self , subscriber ) : try : logger . debug ( 'Subscriber left' ) self . subscribers . remove ( subscriber ) except KeyError : logger . debug ( 'Error removing subscriber: ' + str ( subscriber ) )
Stop publishing to a subscriber .
50
6
5,686
def shutdown ( self ) : self . _done . set ( ) self . executor . shutdown ( wait = False )
Stop the publishing loop .
25
5
5,687
def add_alignment ( self , ref_seq , annotation ) -> Annotation : seq_features = get_seqs ( ref_seq ) annoated_align = { } allele = ref_seq . description . split ( "," ) [ 0 ] locus = allele . split ( "*" ) [ 0 ] . split ( "-" ) [ 1 ] for feat in seq_features : if feat in annotation . annotation : if isinstance ( annotation . annotation [ feat ] , DBSeq ) : seq_len = len ( str ( annotation . annotation [ feat ] ) ) ref_len = len ( seq_features [ feat ] ) else : seq_len = len ( str ( annotation . annotation [ feat ] . seq ) ) ref_len = len ( seq_features [ feat ] ) if seq_len == ref_len : seq = list ( annotation . annotation [ feat ] . seq ) gaps = self . refdata . annoated_alignments [ locus ] [ allele ] [ feat ] [ 'Gaps' ] if self . verbose and self . verbosity > 0 : self . logger . info ( self . logname + " Lengths match for " + feat ) self . logger . info ( self . logname + " Gaps at " + feat ) self . logger . info ( self . logname + "-" . join ( [ "," . join ( [ str ( s ) for s in g ] ) for g in gaps ] ) ) for i in range ( 0 , len ( gaps ) ) : for j in gaps [ i ] : loc = j seq . insert ( loc , '-' ) nseq = '' . join ( seq ) annoated_align . update ( { feat : nseq } ) else : in_seq = str ( annotation . annotation [ feat ] . seq ) ref_seq = self . refdata . annoated_alignments [ locus ] [ allele ] [ feat ] [ 'Seq' ] alignment = pairwise2 . align . globalxx ( in_seq , ref_seq ) if self . verbose and self . verbosity > 0 : self . logger . info ( self . logname + " Align2 -> in_seq != ref_len " + feat ) self . logger . info ( self . logname + " " + str ( len ( in_seq ) ) + " == " + str ( ref_len ) ) annoated_align . update ( { feat : alignment [ 0 ] [ 0 ] } ) else : nseq = '' . join ( list ( repeat ( '-' , len ( seq_features [ feat ] ) ) ) ) annoated_align . update ( { feat : nseq } ) annotation . aligned = annoated_align return annotation
add_alignment - method for adding the alignment to an annotation
591
13
5,688
def object2xml ( self , data ) : if not self . __options [ 'encoding' ] : self . set_options ( encoding = self . __encoding ) if self . __options [ 'header_declare' ] : self . __tree . append ( self . build_xml_header ( ) ) root = self . __options [ 'root' ] if not root : assert ( isinstance ( data , utils . DictTypes ) and len ( data ) == 1 ) , 'if root not specified, the data that dict object and length must be one required.' root , data = data . items ( ) [ 0 ] self . build_tree ( data , root ) xml = unicode ( '' . join ( self . __tree ) . strip ( ) ) if self . __options [ 'encoding' ] != self . __encoding : xml = xml . encode ( self . __options [ 'encoding' ] , errors = self . __options [ 'errors' ] ) return xml
r Convert python object to xml string .
214
8
5,689
def build_tree ( self , data , tagname , attrs = None , depth = 0 ) : if data is None : data = '' indent = ( '\n%s' % ( self . __options [ 'indent' ] * depth ) ) if self . __options [ 'indent' ] else '' if isinstance ( data , utils . DictTypes ) : if self . __options [ 'hasattr' ] and self . check_structure ( data . keys ( ) ) : attrs , values = self . pickdata ( data ) self . build_tree ( values , tagname , attrs , depth ) else : self . __tree . append ( '%s%s' % ( indent , self . tag_start ( tagname , attrs ) ) ) iter = data . iteritems ( ) if self . __options [ 'ksort' ] : iter = sorted ( iter , key = lambda x : x [ 0 ] , reverse = self . __options [ 'reverse' ] ) for k , v in iter : attrs = { } if self . __options [ 'hasattr' ] and isinstance ( v , utils . DictTypes ) and self . check_structure ( v . keys ( ) ) : attrs , v = self . pickdata ( v ) self . build_tree ( v , k , attrs , depth + 1 ) self . __tree . append ( '%s%s' % ( indent , self . tag_end ( tagname ) ) ) elif utils . is_iterable ( data ) : for v in data : self . build_tree ( v , tagname , attrs , depth ) else : self . __tree . append ( indent ) data = self . safedata ( data , self . __options [ 'cdata' ] ) self . __tree . append ( self . build_tag ( tagname , data , attrs ) )
r Build xml tree .
413
5
5,690
def check_structure ( self , keys ) : return set ( keys ) <= set ( [ self . __options [ 'attrkey' ] , self . __options [ 'valuekey' ] ] )
r Check structure availability by attrkey and valuekey option .
43
13
5,691
def pickdata ( self , data ) : attrs = data . get ( self . __options [ 'attrkey' ] ) or { } values = data . get ( self . __options [ 'valuekey' ] ) or '' return ( attrs , values )
r Pick data from attrkey and valuekey option .
56
12
5,692
def safedata ( self , data , cdata = True ) : safe = ( '<![CDATA[%s]]>' % data ) if cdata else cgi . escape ( str ( data ) , True ) return safe
r Convert xml special chars to entities .
51
8
5,693
def build_tag ( self , tag , text = '' , attrs = None ) : return '%s%s%s' % ( self . tag_start ( tag , attrs ) , text , self . tag_end ( tag ) )
r Build tag full info include the attributes .
53
9
5,694
def build_attr ( self , attrs ) : attrs = sorted ( attrs . iteritems ( ) , key = lambda x : x [ 0 ] ) return ' ' . join ( map ( lambda x : '%s="%s"' % x , attrs ) )
r Build tag attributes .
59
5
5,695
def tag_start ( self , tag , attrs = None ) : return '<%s %s>' % ( tag , self . build_attr ( attrs ) ) if attrs else '<%s>' % tag
r Build started tag info .
50
6
5,696
def open_file_dialog ( self ) : dialog = QtWidgets . QFileDialog sender = self . sender ( ) if sender == self . btn_open_source : textbox = self . source_path elif sender == self . btn_open_target : textbox = self . target_path folder = dialog . getExistingDirectory ( self , 'Select a file:' , textbox . text ( ) , options = QtWidgets . QFileDialog . ShowDirsOnly ) if str ( folder ) != '' : textbox . setText ( folder ) # load elements from file and display in tree if sender == self . btn_open_source : self . reset_avaliable ( folder )
Opens a file dialog to get the path to a file and put tha tpath in the correct textbox
155
22
5,697
def class_type_changed ( self ) : if self . source_path . text ( ) : self . reset_avaliable ( self . source_path . text ( ) )
Forces a reset if the class type is changed from instruments to scripts or vice versa
38
17
5,698
def select_inputs ( self , address : str , amount : int ) -> dict : utxos = [ ] utxo_sum = Decimal ( 0 ) for tx in sorted ( self . listunspent ( address = address ) , key = itemgetter ( 'confirmations' ) ) : if tx [ "address" ] not in ( self . pa_parameters . P2TH_addr , self . pa_parameters . test_P2TH_addr ) : utxos . append ( MutableTxIn ( txid = tx [ 'txid' ] , txout = tx [ 'vout' ] , sequence = Sequence . max ( ) , script_sig = ScriptSig . empty ( ) ) ) utxo_sum += Decimal ( tx [ "amount" ] ) if utxo_sum >= amount : return { 'utxos' : utxos , 'total' : utxo_sum } if utxo_sum < amount : raise InsufficientFunds ( "Insufficient funds." ) raise Exception ( "undefined behavior :.(" )
finds apropriate utxo s to include in rawtx while being careful to never spend old transactions with a lot of coin age . Argument is intiger returns list of apropriate UTXO s
240
45
5,699
def listunspent ( self , address : str = "" , minconf : int = 1 , maxconf : int = 999999 , ) -> list : if address : return self . req ( "listunspent" , [ minconf , maxconf , [ address ] ] ) return self . req ( "listunspent" , [ minconf , maxconf ] )
list UTXOs modified version to allow filtering by address .
80
12