idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
46,400 | def _get_last_cachefile_modification ( self ) : try : mtime = os . path . getmtime ( self . _tld_list_path ) except OSError : return None return datetime . fromtimestamp ( mtime ) | Get last modification of cache file with TLDs . |
46,401 | def _get_after_tld_chars ( self ) : after_tld_chars = set ( string . whitespace ) after_tld_chars |= { '/' , '\"' , '\'' , '<' , '>' , '?' , ':' , '.' , ',' } _ , right_enclosure = zip ( * self . _enclosure ) after_tld_chars |= set ( right_enclosure ) return after_tld_chars | Initialize after tld characters |
46,402 | def update_when_older ( self , days ) : last_cache = self . _get_last_cachefile_modification ( ) if last_cache is None : return self . update ( ) time_to_update = last_cache + timedelta ( days = days ) if datetime . now ( ) >= time_to_update : return self . update ( ) return True | Update TLD list cache file if the list is older than number of days given in parameter days or if does not exist . |
46,403 | def set_stop_chars ( self , stop_chars ) : warnings . warn ( "Method set_stop_chars is deprecated, " "use `set_stop_chars_left` or " "`set_stop_chars_right` instead" , DeprecationWarning ) self . _stop_chars = set ( stop_chars ) self . _stop_chars_left = self . _stop_chars self . _stop_chars_right = self . _stop_chars | Set stop characters used when determining end of URL . |
46,404 | def set_stop_chars_left ( self , stop_chars ) : if not isinstance ( stop_chars , set ) : raise TypeError ( "stop_chars should be type set " "but {} was given" . format ( type ( stop_chars ) ) ) self . _stop_chars_left = stop_chars self . _stop_chars = self . _stop_chars_left | self . _stop_chars_right | Set stop characters for text on left from TLD . Stop characters are used when determining end of URL . |
46,405 | def add_enclosure ( self , left_char , right_char ) : assert len ( left_char ) == 1 , "Parameter left_char must be character not string" assert len ( right_char ) == 1 , "Parameter right_char must be character not string" self . _enclosure . add ( ( left_char , right_char ) ) self . _after_tld_chars = self . _get_after_tld_chars ( ) | Add new enclosure pair of characters . That and should be removed when their presence is detected at beginning and end of found URL |
46,406 | def remove_enclosure ( self , left_char , right_char ) : assert len ( left_char ) == 1 , "Parameter left_char must be character not string" assert len ( right_char ) == 1 , "Parameter right_char must be character not string" rm_enclosure = ( left_char , right_char ) if rm_enclosure in self . _enclosure : self . _enclosure . remove ( rm_enclosure ) self . _after_tld_chars = self . _get_after_tld_chars ( ) | Remove enclosure pair from set of enclosures . |
46,407 | def _complete_url ( self , text , tld_pos , tld ) : left_ok = True right_ok = True max_len = len ( text ) - 1 end_pos = tld_pos start_pos = tld_pos while left_ok or right_ok : if left_ok : if start_pos <= 0 : left_ok = False else : if text [ start_pos - 1 ] not in self . _stop_chars_left : start_pos -= 1 else : left_ok = False if right_ok : if end_pos >= max_len : right_ok = False else : if text [ end_pos + 1 ] not in self . _stop_chars_right : end_pos += 1 else : right_ok = False complete_url = text [ start_pos : end_pos + 1 ] . lstrip ( '/' ) temp_tlds = { tld + c for c in self . _after_tld_chars } if complete_url [ len ( complete_url ) - len ( tld ) - 1 : ] in temp_tlds : complete_url = complete_url [ : - 1 ] complete_url = self . _split_markdown ( complete_url , tld_pos - start_pos ) complete_url = self . _remove_enclosure_from_url ( complete_url , tld_pos - start_pos , tld ) if not self . _is_domain_valid ( complete_url , tld ) : return "" return complete_url | Expand string in both sides to match whole URL . |
46,408 | def _validate_tld_match ( self , text , matched_tld , tld_pos ) : if tld_pos > len ( text ) : return False right_tld_pos = tld_pos + len ( matched_tld ) if len ( text ) > right_tld_pos : if text [ right_tld_pos ] in self . _after_tld_chars : if tld_pos > 0 and text [ tld_pos - 1 ] not in self . _stop_chars_left : return True else : if tld_pos > 0 and text [ tld_pos - 1 ] not in self . _stop_chars_left : return True return False | Validate TLD match - tells if at found position is really TLD . |
46,409 | def _split_markdown ( text_url , tld_pos ) : left_bracket_pos = text_url . find ( '[' ) if left_bracket_pos > tld_pos - 3 : return text_url right_bracket_pos = text_url . find ( ')' ) if right_bracket_pos < tld_pos : return text_url middle_pos = text_url . rfind ( "](" ) if middle_pos > tld_pos : return text_url [ left_bracket_pos + 1 : middle_pos ] return text_url | Split markdown URL . There is an issue wen Markdown URL is found . Parsing of the URL does not stop on right place so wrongly found URL has to be split . |
46,410 | def gen_urls ( self , text ) : tld_pos = 0 matched_tlds = self . _tlds_re . findall ( text ) for tld in matched_tlds : tmp_text = text [ tld_pos : ] offset = tld_pos tld_pos = tmp_text . find ( tld ) validated = self . _validate_tld_match ( text , tld , offset + tld_pos ) if tld_pos != - 1 and validated : tmp_url = self . _complete_url ( text , offset + tld_pos , tld ) if tmp_url : yield tmp_url tld_pos_url = tmp_url . find ( tld ) tld_pos += len ( tld ) + offset tld_pos += len ( tmp_url [ tld_pos_url + len ( tld ) : ] ) continue tld_pos += len ( tld ) + offset | Creates generator over found URLs in given text . |
46,411 | def find_urls ( self , text , only_unique = False ) : urls = self . gen_urls ( text ) urls = OrderedDict . fromkeys ( urls ) if only_unique else urls return list ( urls ) | Find all URLs in given text . |
46,412 | def possibly_award ( self , ** state ) : assert "user" in state if self . async : from . tasks import AsyncBadgeAward state = self . freeze ( ** state ) AsyncBadgeAward . delay ( self , state ) return self . actually_possibly_award ( ** state ) | Will see if the user should be awarded a badge . If this badge is asynchronous it just queues up the badge awarding . |
46,413 | def actually_possibly_award ( self , ** state ) : user = state [ "user" ] force_timestamp = state . pop ( "force_timestamp" , None ) awarded = self . award ( ** state ) if awarded is None : return if awarded . level is None : assert len ( self . levels ) == 1 awarded . level = 1 awarded = awarded . level - 1 assert awarded < len ( self . levels ) if ( not self . multiple and BadgeAward . objects . filter ( user = user , slug = self . slug , level = awarded ) ) : return extra_kwargs = { } if force_timestamp is not None : extra_kwargs [ "awarded_at" ] = force_timestamp badge = BadgeAward . objects . create ( user = user , slug = self . slug , level = awarded , ** extra_kwargs ) self . send_badge_messages ( badge ) badge_awarded . send ( sender = self , badge_award = badge ) | Does the actual work of possibly awarding a badge . |
46,414 | def send_badge_messages ( self , badge_award ) : user_message = getattr ( badge_award . badge , "user_message" , None ) if callable ( user_message ) : message = user_message ( badge_award ) else : message = user_message if message is not None : badge_award . user . message_set . create ( message = message ) | If the Badge class defines a message send it to the user who was just awarded the badge . |
46,415 | def visit_Print ( self , node ) : self . prints_used [ ( node . lineno , node . col_offset ) ] = VIOLATIONS [ "found" ] [ PRINT_FUNCTION_NAME ] | Only exists in python 2 . |
46,416 | def create_lockfile ( self ) : process = subprocess . Popen ( self . pin_command , stdout = subprocess . PIPE , stderr = subprocess . PIPE , ) stdout , stderr = process . communicate ( ) if process . returncode == 0 : self . fix_lockfile ( ) else : logger . critical ( "ERROR executing %s" , ' ' . join ( self . pin_command ) ) logger . critical ( "Exit code: %s" , process . returncode ) logger . critical ( stdout . decode ( 'utf-8' ) ) logger . critical ( stderr . decode ( 'utf-8' ) ) raise RuntimeError ( "Failed to pip-compile {0}" . format ( self . infile ) ) | Write recursive dependencies list to outfile with hard - pinned versions . Then fix it . |
46,417 | def infile ( self ) : return os . path . join ( OPTIONS [ 'base_dir' ] , '{0}.{1}' . format ( self . name , OPTIONS [ 'in_ext' ] ) ) | Path of the input file |
46,418 | def outfile ( self ) : return os . path . join ( OPTIONS [ 'base_dir' ] , '{0}.{1}' . format ( self . name , OPTIONS [ 'out_ext' ] ) ) | Path of the output file |
46,419 | def pin_command ( self ) : parts = [ 'pip-compile' , '--no-header' , '--verbose' , '--rebuild' , '--no-index' , '--output-file' , self . outfile , self . infile , ] if OPTIONS [ 'upgrade' ] : parts . insert ( 3 , '--upgrade' ) if self . add_hashes : parts . insert ( 1 , '--generate-hashes' ) return parts | Compose pip - compile shell command |
46,420 | def fix_lockfile ( self ) : with open ( self . outfile , 'rt' ) as fp : lines = [ self . fix_pin ( line ) for line in self . concatenated ( fp ) ] with open ( self . outfile , 'wt' ) as fp : fp . writelines ( [ line + '\n' for line in lines if line is not None ] ) | Run each line of outfile through fix_pin |
46,421 | def fix_pin ( self , line ) : dep = Dependency ( line ) if dep . valid : if dep . package in self . ignore : ignored_version = self . ignore [ dep . package ] if ignored_version is not None : if dep . version and dep . version != ignored_version : logger . error ( "Package %s was resolved to different " "versions in different environments: %s and %s" , dep . package , dep . version , ignored_version , ) raise RuntimeError ( "Please add constraints for the package " "version listed above" ) return None self . packages [ dep . package ] = dep . version if self . forbid_post or dep . is_compatible : dep . drop_post ( ) return dep . serialize ( ) return line . strip ( ) | Fix dependency by removing post - releases from versions and loosing constraints on internal packages . Drop packages from ignore set |
46,422 | def add_references ( self , other_names ) : if not other_names : return with open ( self . outfile , 'rt' ) as fp : header , body = self . split_header ( fp ) with open ( self . outfile , 'wt' ) as fp : fp . writelines ( header ) fp . writelines ( '-r {0}.{1}\n' . format ( other_name , OPTIONS [ 'out_ext' ] ) for other_name in sorted ( other_names ) ) fp . writelines ( body ) | Add references to other_names in outfile |
46,423 | def replace_header ( self , header_text ) : with open ( self . outfile , 'rt' ) as fp : _ , body = self . split_header ( fp ) with open ( self . outfile , 'wt' ) as fp : fp . write ( header_text ) fp . writelines ( body ) | Replace pip - compile header with custom text |
46,424 | def order_by_refs ( envs ) : topology = { env [ 'name' ] : set ( env [ 'refs' ] ) for env in envs } by_name = { env [ 'name' ] : env for env in envs } return [ by_name [ name ] for name in toposort_flatten ( topology ) ] | Return topologicaly sorted list of environments . I . e . all referenced environments are placed before their references . |
46,425 | def is_compatible ( self ) : for pattern in OPTIONS [ 'compatible_patterns' ] : if fnmatch ( self . package . lower ( ) , pattern ) : return True return False | Check if package name is matched by compatible_patterns |
46,426 | def drop_post ( self ) : post_index = self . version . find ( '.post' ) if post_index >= 0 : self . version = self . version [ : post_index ] | Remove . postXXXX postfix from version |
46,427 | def verify_environments ( ) : env_confs = discover ( os . path . join ( OPTIONS [ 'base_dir' ] , '*.' + OPTIONS [ 'in_ext' ] , ) ) success = True for conf in env_confs : env = Environment ( name = conf [ 'name' ] ) current_comment = generate_hash_comment ( env . infile ) existing_comment = parse_hash_comment ( env . outfile ) if current_comment == existing_comment : logger . info ( "OK - %s was generated from %s." , env . outfile , env . infile ) else : logger . error ( "ERROR! %s was not regenerated after changes in %s." , env . outfile , env . infile ) logger . error ( "Expecting: %s" , current_comment . strip ( ) ) logger . error ( "Found: %s" , existing_comment . strip ( ) ) success = False return success | For each environment verify hash comments and report failures . If any failure occured exit with code 1 . |
46,428 | def generate_hash_comment ( file_path ) : with open ( file_path , 'rb' ) as fp : hexdigest = hashlib . sha1 ( fp . read ( ) . strip ( ) ) . hexdigest ( ) return "# SHA1:{0}\n" . format ( hexdigest ) | Read file with given file_path and return string of format |
46,429 | def parse_value ( key , value ) : default = OPTIONS . get ( key ) if isinstance ( default , collections . Iterable ) : if not isinstance ( default , six . string_types ) : return [ item . strip ( ) for item in value . split ( ',' ) ] return value | Parse value as comma - delimited list if default value for it is list |
46,430 | def python_version_matchers ( ) : version = sys . version_info patterns = [ "{0}" , "{0}{1}" , "{0}.{1}" , ] matchers = [ pattern . format ( * version ) for pattern in patterns ] + [ None ] return set ( matchers ) | Return set of string representations of current python version |
46,431 | def verify ( ctx ) : oks = run_configurations ( skipper ( verify_environments ) , read_sections , ) ctx . exit ( 0 if False not in oks else 1 ) | Upgrade locked dependency versions |
46,432 | def skipper ( func ) : @ functools . wraps ( func ) def wrapped ( ) : key = ( OPTIONS [ 'base_dir' ] , OPTIONS [ 'in_ext' ] , OPTIONS [ 'out_ext' ] ) if key not in seen : seen [ key ] = func ( ) return seen [ key ] seen = { } return wrapped | Decorator that memorizes base_dir in_ext and out_ext from OPTIONS and skips execution for duplicates . |
46,433 | def run_configurations ( callback , sections_reader ) : base = dict ( OPTIONS ) sections = sections_reader ( ) if sections is None : logger . info ( "Configuration not found in .ini files. " "Running with default settings" ) recompile ( ) elif sections == [ ] : logger . info ( "Configuration does not match current runtime. " "Exiting" ) results = [ ] for section , options in sections : OPTIONS . clear ( ) OPTIONS . update ( base ) OPTIONS . update ( options ) logger . debug ( "Running configuration from section \"%s\". OPTIONS: %r" , section , OPTIONS ) results . append ( callback ( ) ) return results | Parse configurations and execute callback for matching . |
46,434 | def recompile ( ) : pinned_packages = { } env_confs = discover ( os . path . join ( OPTIONS [ 'base_dir' ] , '*.' + OPTIONS [ 'in_ext' ] , ) , ) if OPTIONS [ 'header_file' ] : with open ( OPTIONS [ 'header_file' ] ) as fp : base_header_text = fp . read ( ) else : base_header_text = DEFAULT_HEADER hashed_by_reference = set ( ) for name in OPTIONS [ 'add_hashes' ] : hashed_by_reference . update ( reference_cluster ( env_confs , name ) ) included_and_refs = set ( OPTIONS [ 'include_names' ] ) for name in set ( included_and_refs ) : included_and_refs . update ( recursive_refs ( env_confs , name ) ) for conf in env_confs : if included_and_refs : if conf [ 'name' ] not in included_and_refs : continue rrefs = recursive_refs ( env_confs , conf [ 'name' ] ) add_hashes = conf [ 'name' ] in hashed_by_reference env = Environment ( name = conf [ 'name' ] , ignore = merged_packages ( pinned_packages , rrefs ) , forbid_post = conf [ 'name' ] in OPTIONS [ 'forbid_post' ] , add_hashes = add_hashes , ) logger . info ( "Locking %s to %s. References: %r" , env . infile , env . outfile , sorted ( rrefs ) ) env . create_lockfile ( ) header_text = generate_hash_comment ( env . infile ) + base_header_text env . replace_header ( header_text ) env . add_references ( conf [ 'refs' ] ) pinned_packages [ conf [ 'name' ] ] = env . packages | Compile requirements files for all environments . |
46,435 | def merged_packages ( env_packages , names ) : combined_packages = sorted ( itertools . chain . from_iterable ( env_packages [ name ] . items ( ) for name in names ) ) result = { } errors = set ( ) for name , version in combined_packages : if name in result : if result [ name ] != version : errors . add ( ( name , version , result [ name ] ) ) else : result [ name ] = version if errors : for error in sorted ( errors ) : logger . error ( "Package %s was resolved to different " "versions in different environments: %s and %s" , error [ 0 ] , error [ 1 ] , error [ 2 ] , ) raise RuntimeError ( "Please add constraints for the package version listed above" ) return result | Return union set of environment packages with given names |
46,436 | def recursive_refs ( envs , name ) : refs_by_name = { env [ 'name' ] : set ( env [ 'refs' ] ) for env in envs } refs = refs_by_name [ name ] if refs : indirect_refs = set ( itertools . chain . from_iterable ( [ recursive_refs ( envs , ref ) for ref in refs ] ) ) else : indirect_refs = set ( ) return set . union ( refs , indirect_refs ) | Return set of recursive refs for given env name |
46,437 | def reference_cluster ( envs , name ) : edges = [ set ( [ env [ 'name' ] , ref ] ) for env in envs for ref in env [ 'refs' ] ] prev , cluster = set ( ) , set ( [ name ] ) while prev != cluster : prev = set ( cluster ) to_visit = [ ] for edge in edges : if cluster & edge : cluster |= edge else : to_visit . append ( edge ) edges = to_visit return cluster | Return set of all env names referencing or referenced by given name . |
46,438 | def get_requests_session ( ) : session = requests . sessions . Session ( ) session . mount ( 'http://' , HTTPAdapter ( pool_connections = 25 , pool_maxsize = 25 , pool_block = True ) ) session . mount ( 'https://' , HTTPAdapter ( pool_connections = 25 , pool_maxsize = 25 , pool_block = True ) ) return session | Set connection pool maxsize and block value to avoid connection pool full warnings . |
46,439 | def request_tokens ( self , amount , account ) : address = account . address try : tx_hash = self . send_transaction ( 'requestTokens' , ( amount , ) , transact = { 'from' : address , 'passphrase' : account . password } ) logging . debug ( f'{address} requests {amount} tokens, returning receipt' ) try : receipt = Web3Provider . get_web3 ( ) . eth . waitForTransactionReceipt ( tx_hash , timeout = 20 ) logging . debug ( f'requestTokens receipt: {receipt}' ) except Timeout : receipt = None if not receipt : return False if receipt . status == 0 : logging . warning ( f'request tokens failed: Tx-receipt={receipt}' ) logging . warning ( f'request tokens failed: account {address}' ) return False rfe = EventFilter ( 'RequestFrequencyExceeded' , self . events . RequestFrequencyExceeded , argument_filters = { 'requester' : Web3Provider . get_web3 ( ) . toBytes ( hexstr = address ) } , from_block = 'latest' , to_block = 'latest' , ) logs = rfe . get_all_entries ( max_tries = 5 ) if logs : logging . warning ( f'request tokens failed RequestFrequencyExceeded' ) logging . info ( f'RequestFrequencyExceeded event logs: {logs}' ) return False rle = EventFilter ( 'RequestLimitExceeded' , self . events . RequestLimitExceeded , argument_filters = { 'requester' : Web3Provider . get_web3 ( ) . toBytes ( hexstr = address ) } , from_block = 'latest' , to_block = 'latest' , ) logs = rle . get_all_entries ( max_tries = 5 ) if logs : logging . warning ( f'request tokens failed RequestLimitExceeded' ) logging . info ( f'RequestLimitExceeded event logs: {logs}' ) return False return True except ValueError as err : raise OceanInvalidTransaction ( f'Requesting {amount} tokens' f' to {address} failed with error: {err}' ) | Request an amount of tokens for a particular address . This transaction has gas cost |
46,440 | def get_network_name ( network_id ) : if os . environ . get ( 'KEEPER_NETWORK_NAME' ) : logging . debug ( 'keeper network name overridden by an environment variable: {}' . format ( os . environ . get ( 'KEEPER_NETWORK_NAME' ) ) ) return os . environ . get ( 'KEEPER_NETWORK_NAME' ) return Keeper . _network_name_map . get ( network_id , Keeper . DEFAULT_NETWORK_NAME ) | Return the keeper network name based on the current ethereum network id . Return development for every network id that is not mapped . |
46,441 | def unlock_account ( account ) : return Web3Provider . get_web3 ( ) . personal . unlockAccount ( account . address , account . password ) | Unlock the account . |
46,442 | def get_condition_name_by_address ( self , address ) : if self . lock_reward_condition . address == address : return 'lockReward' elif self . access_secret_store_condition . address == address : return 'accessSecretStore' elif self . escrow_reward_condition . address == address : return 'escrowReward' else : logging . error ( f'The current address {address} is not a condition address' ) | Return the condition name for a given address . |
46,443 | def consume_service ( service_agreement_id , service_endpoint , account , files , destination_folder , index = None ) : signature = Keeper . get_instance ( ) . sign_hash ( service_agreement_id , account ) if index is not None : assert isinstance ( index , int ) , logger . error ( 'index has to be an integer.' ) assert index >= 0 , logger . error ( 'index has to be 0 or a positive integer.' ) assert index < len ( files ) , logger . error ( 'index can not be bigger than the number of files' ) consume_url = Brizo . _create_consume_url ( service_endpoint , service_agreement_id , account , None , signature , index ) logger . info ( f'invoke consume endpoint with this url: {consume_url}' ) response = Brizo . _http_client . get ( consume_url , stream = True ) file_name = Brizo . _get_file_name ( response ) Brizo . write_file ( response , destination_folder , file_name ) else : for i , _file in enumerate ( files ) : consume_url = Brizo . _create_consume_url ( service_endpoint , service_agreement_id , account , _file , signature , i ) logger . info ( f'invoke consume endpoint with this url: {consume_url}' ) response = Brizo . _http_client . get ( consume_url , stream = True ) file_name = Brizo . _get_file_name ( response ) Brizo . write_file ( response , destination_folder , file_name ) | Call the brizo endpoint to get access to the different files that form the asset . |
46,444 | def _prepare_consume_payload ( did , service_agreement_id , service_definition_id , signature , consumer_address ) : return json . dumps ( { 'did' : did , 'serviceAgreementId' : service_agreement_id , ServiceAgreement . SERVICE_DEFINITION_ID : service_definition_id , 'signature' : signature , 'consumerAddress' : consumer_address } ) | Prepare a payload to send to Brizo . |
46,445 | def get_brizo_url ( config ) : brizo_url = 'http://localhost:8030' if config . has_option ( 'resources' , 'brizo.url' ) : brizo_url = config . get ( 'resources' , 'brizo.url' ) or brizo_url brizo_path = '/api/v1/brizo' return f'{brizo_url}{brizo_path}' | Return the Brizo component url . |
46,446 | def validate ( metadata ) : for section_key in Metadata . REQUIRED_SECTIONS : if section_key not in metadata or not metadata [ section_key ] or not isinstance ( metadata [ section_key ] , dict ) : return False section = Metadata . MAIN_SECTIONS [ section_key ] section_metadata = metadata [ section_key ] for subkey in section . REQUIRED_VALUES_KEYS : if subkey not in section_metadata or section_metadata [ subkey ] is None : return False return True | Validator of the metadata composition |
46,447 | def get_example ( ) : example = dict ( ) for section_key , section in Metadata . MAIN_SECTIONS . items ( ) : example [ section_key ] = section . EXAMPLE . copy ( ) return example | Retrieve an example of the metadata |
46,448 | def encrypt_document ( self , document_id , content , threshold = 0 ) : return self . _secret_store_client ( self . _account ) . publish_document ( remove_0x_prefix ( document_id ) , content , threshold ) | encrypt string data using the DID as an secret store id if secret store is enabled then return the result from secret store encryption |
46,449 | def download ( service_agreement_id , service_definition_id , ddo , consumer_account , destination , brizo , secret_store , index = None ) : did = ddo . did encrypted_files = ddo . metadata [ 'base' ] [ 'encryptedFiles' ] encrypted_files = ( encrypted_files if isinstance ( encrypted_files , str ) else encrypted_files [ 0 ] ) sa = ServiceAgreement . from_ddo ( service_definition_id , ddo ) consume_url = sa . consume_endpoint if not consume_url : logger . error ( 'Consume asset failed, service definition is missing the "serviceEndpoint".' ) raise AssertionError ( 'Consume asset failed, service definition is missing the "serviceEndpoint".' ) if ddo . get_service ( 'Authorization' ) : secret_store_service = ddo . get_service ( service_type = ServiceTypes . AUTHORIZATION ) secret_store_url = secret_store_service . endpoints . service secret_store . set_secret_store_url ( secret_store_url ) decrypted_content_urls = json . loads ( secret_store . decrypt_document ( did_to_id ( did ) , encrypted_files ) ) if isinstance ( decrypted_content_urls , str ) : decrypted_content_urls = [ decrypted_content_urls ] logger . debug ( f'got decrypted contentUrls: {decrypted_content_urls}' ) if not os . path . isabs ( destination ) : destination = os . path . abspath ( destination ) if not os . path . exists ( destination ) : os . mkdir ( destination ) asset_folder = os . path . join ( destination , f'datafile.{did_to_id(did)}.{sa.service_definition_id}' ) if not os . path . exists ( asset_folder ) : os . mkdir ( asset_folder ) if index is not None : assert isinstance ( index , int ) , logger . error ( 'index has to be an integer.' ) assert index >= 0 , logger . error ( 'index has to be 0 or a positive integer.' ) assert index < len ( decrypted_content_urls ) , logger . error ( 'index can not be bigger than the number of files' ) brizo . consume_service ( service_agreement_id , consume_url , consumer_account , decrypted_content_urls , asset_folder , index ) return asset_folder | Download asset data files or result files from a compute job . |
46,450 | def set_key_value ( self , value , store_type = PUBLIC_KEY_STORE_TYPE_BASE64 ) : if isinstance ( value , dict ) : if PUBLIC_KEY_STORE_TYPE_HEX in value : self . set_key_value ( value [ PUBLIC_KEY_STORE_TYPE_HEX ] , PUBLIC_KEY_STORE_TYPE_HEX ) elif PUBLIC_KEY_STORE_TYPE_BASE64 in value : self . set_key_value ( value [ PUBLIC_KEY_STORE_TYPE_BASE64 ] , PUBLIC_KEY_STORE_TYPE_BASE64 ) elif PUBLIC_KEY_STORE_TYPE_BASE85 in value : self . set_key_value ( value [ PUBLIC_KEY_STORE_TYPE_BASE85 ] , PUBLIC_KEY_STORE_TYPE_BASE85 ) elif PUBLIC_KEY_STORE_TYPE_JWK in value : self . set_key_value ( value [ PUBLIC_KEY_STORE_TYPE_JWK ] , PUBLIC_KEY_STORE_TYPE_JWK ) elif PUBLIC_KEY_STORE_TYPE_PEM in value : self . set_key_value ( value [ PUBLIC_KEY_STORE_TYPE_PEM ] , PUBLIC_KEY_STORE_TYPE_PEM ) else : self . _value = value self . _store_type = store_type | Set the key value based on it s storage type . |
46,451 | def set_encode_key_value ( self , value , store_type ) : self . _store_type = store_type if store_type == PUBLIC_KEY_STORE_TYPE_HEX : self . _value = value . hex ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_BASE64 : self . _value = b64encode ( value ) . decode ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_BASE85 : self . _value = b85encode ( value ) . decode ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_JWK : raise NotImplementedError else : self . _value = value return value | Save the key value base on it s storage type . |
46,452 | def get_decode_value ( self ) : if self . _store_type == PUBLIC_KEY_STORE_TYPE_HEX : value = bytes . fromhex ( self . _value ) elif self . _store_type == PUBLIC_KEY_STORE_TYPE_BASE64 : value = b64decode ( self . _value ) elif self . _store_type == PUBLIC_KEY_STORE_TYPE_BASE85 : value = b85decode ( self . _value ) elif self . _store_type == PUBLIC_KEY_STORE_TYPE_JWK : raise NotImplementedError else : value = self . _value return value | Return the key value based on it s storage type . |
46,453 | def as_text ( self , is_pretty = False ) : values = { 'id' : self . _id , 'type' : self . _type } if self . _owner : values [ 'owner' ] = self . _owner if is_pretty : return json . dumps ( values , indent = 4 , separators = ( ',' , ': ' ) ) return json . dumps ( values ) | Return the key as JSON text . |
46,454 | def as_dictionary ( self ) : values = { 'id' : self . _id , 'type' : self . _type } if self . _owner : values [ 'owner' ] = self . _owner return values | Return the key as a python dictionary . |
46,455 | def get_agreement ( self , agreement_id ) : agreement = self . contract_concise . getAgreement ( agreement_id ) if agreement and len ( agreement ) == 6 : agreement = AgreementValues ( * agreement ) did = add_0x_prefix ( agreement . did . hex ( ) ) cond_ids = [ add_0x_prefix ( _id . hex ( ) ) for _id in agreement . condition_ids ] return AgreementValues ( did , agreement . owner , agreement . template_id , cond_ids , agreement . updated_by , agreement . block_number_updated ) return None | Retrieve the agreement for a agreement_id . |
46,456 | def _load ( contract_name ) : contract_definition = ContractHandler . get_contract_dict_by_name ( contract_name ) address = Web3Provider . get_web3 ( ) . toChecksumAddress ( contract_definition [ 'address' ] ) abi = contract_definition [ 'abi' ] contract = Web3Provider . get_web3 ( ) . eth . contract ( address = address , abi = abi ) ContractHandler . _contracts [ contract_name ] = ( contract , ConciseContract ( contract ) ) return ContractHandler . _contracts [ contract_name ] | Retrieve the contract instance for contract_name that represent the smart contract in the keeper network . |
46,457 | def get_contract_dict_by_name ( contract_name ) : network_name = Keeper . get_network_name ( Keeper . get_network_id ( ) ) . lower ( ) artifacts_path = ConfigProvider . get_config ( ) . keeper_path path = ContractHandler . _get_contract_file_path ( artifacts_path , contract_name , network_name ) if not ( path and os . path . exists ( path ) ) : path = ContractHandler . _get_contract_file_path ( artifacts_path , contract_name , network_name . lower ( ) ) if not ( path and os . path . exists ( path ) ) : path = ContractHandler . _get_contract_file_path ( artifacts_path , contract_name , Keeper . DEFAULT_NETWORK_NAME ) if not ( path and os . path . exists ( path ) ) : raise FileNotFoundError ( f'Keeper contract {contract_name} file ' f'not found in {artifacts_path} ' f'using network name {network_name}' ) with open ( path ) as f : contract_dict = json . loads ( f . read ( ) ) return contract_dict | Retrieve the Contract instance for a given contract name . |
46,458 | def buy_asset ( ) : ConfigProvider . set_config ( ExampleConfig . get_config ( ) ) config = ConfigProvider . get_config ( ) ocn = Ocean ( ) acc = get_publisher_account ( config ) if not acc : acc = ( [ acc for acc in ocn . accounts . list ( ) if acc . password ] or ocn . accounts . list ( ) ) [ 0 ] ddo = ocn . assets . create ( Metadata . get_example ( ) , acc , providers = [ acc . address ] , use_secret_store = False ) logging . info ( f'registered ddo: {ddo.did}' ) keeper = Keeper . get_instance ( ) if 'TEST_LOCAL_NILE' in os . environ and os . environ [ 'TEST_LOCAL_NILE' ] == '1' : provider = keeper . did_registry . to_checksum_address ( '0x413c9ba0a05b8a600899b41b0c62dd661e689354' ) keeper . did_registry . add_provider ( ddo . asset_id , provider , acc ) logging . debug ( f'is did provider: ' f'{keeper.did_registry.is_did_provider(ddo.asset_id, provider)}' ) cons_ocn = Ocean ( ) consumer_account = get_account_from_config ( config , 'parity.address1' , 'parity.password1' ) service = ddo . get_service ( service_type = ServiceTypes . ASSET_ACCESS ) cons_ocn . accounts . request_tokens ( consumer_account , 100 ) sa = ServiceAgreement . from_service_dict ( service . as_dictionary ( ) ) agreement_id = cons_ocn . assets . order ( ddo . did , sa . service_definition_id , consumer_account ) logging . info ( 'placed order: %s, %s' , ddo . did , agreement_id ) i = 0 while ocn . agreements . is_access_granted ( agreement_id , ddo . did , consumer_account . address ) is not True and i < 30 : time . sleep ( 1 ) i += 1 assert ocn . agreements . is_access_granted ( agreement_id , ddo . did , consumer_account . address ) ocn . assets . consume ( agreement_id , ddo . did , sa . service_definition_id , consumer_account , config . downloads_path ) logging . info ( 'Success buying asset.' ) | Requires all ocean services running . |
46,459 | def token_approve ( self , spender_address , price , from_account ) : if not Web3Provider . get_web3 ( ) . isChecksumAddress ( spender_address ) : spender_address = Web3Provider . get_web3 ( ) . toChecksumAddress ( spender_address ) tx_hash = self . send_transaction ( 'approve' , ( spender_address , price ) , transact = { 'from' : from_account . address , 'passphrase' : from_account . password } ) return self . get_tx_receipt ( tx_hash ) . status == 1 | Approve the passed address to spend the specified amount of tokens . |
46,460 | def transfer ( self , receiver_address , amount , from_account ) : tx_hash = self . send_transaction ( 'transfer' , ( receiver_address , amount ) , transact = { 'from' : from_account . address , 'passphrase' : from_account . password } ) return self . get_tx_receipt ( tx_hash ) . status == 1 | Transfer tokens from one account to the receiver address . |
46,461 | def fulfill_access_secret_store_condition ( event , agreement_id , did , service_agreement , consumer_address , publisher_account ) : logger . debug ( f"release reward after event {event}." ) name_to_parameter = { param . name : param for param in service_agreement . condition_by_name [ 'accessSecretStore' ] . parameters } document_id = add_0x_prefix ( name_to_parameter [ '_documentId' ] . value ) asset_id = add_0x_prefix ( did_to_id ( did ) ) assert document_id == asset_id , f'document_id {document_id} <=> asset_id {asset_id} mismatch.' try : tx_hash = Keeper . get_instance ( ) . access_secret_store_condition . fulfill ( agreement_id , document_id , consumer_address , publisher_account ) process_tx_receipt ( tx_hash , Keeper . get_instance ( ) . access_secret_store_condition . FULFILLED_EVENT , 'AccessSecretStoreCondition.Fulfilled' ) except Exception as e : raise e | Fulfill the access condition . |
46,462 | def retire ( self , did ) : try : ddo = self . resolve ( did ) metadata_service = ddo . find_service_by_type ( ServiceTypes . METADATA ) self . _get_aquarius ( metadata_service . endpoints . service ) . retire_asset_ddo ( did ) return True except AquariusGenericError as err : logger . error ( err ) return False | Retire this did of Aquarius |
46,463 | def search ( self , text , sort = None , offset = 100 , page = 1 , aquarius_url = None ) : assert page >= 1 , f'Invalid page value {page}. Required page >= 1.' logger . info ( f'Searching asset containing: {text}' ) return [ DDO ( dictionary = ddo_dict ) for ddo_dict in self . _get_aquarius ( aquarius_url ) . text_search ( text , sort , offset , page ) [ 'results' ] ] | Search an asset in oceanDB using aquarius . |
46,464 | def query ( self , query , sort = None , offset = 100 , page = 1 , aquarius_url = None ) : logger . info ( f'Searching asset query: {query}' ) aquarius = self . _get_aquarius ( aquarius_url ) return [ DDO ( dictionary = ddo_dict ) for ddo_dict in aquarius . query_search ( query , sort , offset , page ) [ 'results' ] ] | Search an asset in oceanDB using search query . |
46,465 | def order ( self , did , service_definition_id , consumer_account , auto_consume = False ) : assert consumer_account . address in self . _keeper . accounts , f'Unrecognized consumer ' f'address `consumer_account`' agreement_id , signature = self . _agreements . prepare ( did , service_definition_id , consumer_account ) logger . debug ( f'about to request create agreement: {agreement_id}' ) self . _agreements . send ( did , agreement_id , service_definition_id , signature , consumer_account , auto_consume = auto_consume ) return agreement_id | Sign service agreement . |
46,466 | def consume ( self , service_agreement_id , did , service_definition_id , consumer_account , destination , index = None ) : ddo = self . resolve ( did ) if index is not None : assert isinstance ( index , int ) , logger . error ( 'index has to be an integer.' ) assert index >= 0 , logger . error ( 'index has to be 0 or a positive integer.' ) return self . _asset_consumer . download ( service_agreement_id , service_definition_id , ddo , consumer_account , destination , BrizoProvider . get_brizo ( ) , self . _get_secret_store ( consumer_account ) , index ) | Consume the asset data . |
46,467 | def propose ( self , template_address , account ) : try : proposed = self . _keeper . template_manager . propose_template ( template_address , account ) return proposed except ValueError as err : template_values = self . _keeper . template_manager . get_template ( template_address ) if not template_values : logger . warning ( f'Propose template failed: {err}' ) return False if template_values . state != 1 : logger . warning ( f'Propose template failed, current state is set to {template_values.state}' ) return False return True | Propose a new template . |
46,468 | def approve ( self , template_address , account ) : try : approved = self . _keeper . template_manager . approve_template ( template_address , account ) return approved except ValueError as err : template_values = self . _keeper . template_manager . get_template ( template_address ) if not template_values : logger . warning ( f'Approve template failed: {err}' ) return False if template_values . state == 1 : logger . warning ( f'Approve template failed, this template is ' f'currently in "proposed" state.' ) return False if template_values . state == 3 : logger . warning ( f'Approve template failed, this template appears to be ' f'revoked.' ) return False if template_values . state == 2 : return True return False | Approve a template already proposed . The account needs to be owner of the templateManager contract to be able of approve the template . |
46,469 | def revoke ( self , template_address , account ) : try : revoked = self . _keeper . template_manager . revoke_template ( template_address , account ) return revoked except ValueError as err : template_values = self . _keeper . template_manager . get_template ( template_address ) if not template_values : logger . warning ( f'Cannot revoke template since it does not exist: {err}' ) return False logger . warning ( f'Only template admin or owner can revoke a template: {err}' ) return False | Revoke a template already approved . The account needs to be owner of the templateManager contract to be able of revoke the template . |
46,470 | def get_price ( self ) : for cond in self . conditions : for p in cond . parameters : if p . name == '_amount' : return p . value | Return the price from the conditions parameters . |
46,471 | def get_service_agreement_hash ( self , agreement_id , asset_id , consumer_address , publisher_address , keeper ) : agreement_hash = ServiceAgreement . generate_service_agreement_hash ( self . template_id , self . generate_agreement_condition_ids ( agreement_id , asset_id , consumer_address , publisher_address , keeper ) , self . conditions_timelocks , self . conditions_timeouts , agreement_id ) return agreement_hash | Return the hash of the service agreement values to be signed by a consumer . |
46,472 | def keeper_path ( self ) : keeper_path_string = self . get ( self . _section_name , NAME_KEEPER_PATH ) path = Path ( keeper_path_string ) . expanduser ( ) . resolve ( ) if os . path . exists ( path ) : pass elif os . getenv ( 'VIRTUAL_ENV' ) : path = os . path . join ( os . getenv ( 'VIRTUAL_ENV' ) , 'artifacts' ) else : path = os . path . join ( site . PREFIXES [ 0 ] , 'artifacts' ) return path | Path where the keeper - contracts artifacts are allocated . |
46,473 | def add_public_key ( self , did , public_key ) : logger . debug ( f'Adding public key {public_key} to the did {did}' ) self . _public_keys . append ( PublicKeyBase ( did , ** { "owner" : public_key , "type" : "EthereumECDSAKey" } ) ) | Add a public key object to the list of public keys . |
46,474 | def add_authentication ( self , public_key , authentication_type = None ) : authentication = { } if public_key : authentication = { 'type' : authentication_type , 'publicKey' : public_key } logger . debug ( f'Adding authentication {authentication}' ) self . _authentications . append ( authentication ) | Add a authentication public key id and type to the list of authentications . |
46,475 | def add_service ( self , service_type , service_endpoint = None , values = None ) : if isinstance ( service_type , Service ) : service = service_type else : service = Service ( service_endpoint , service_type , values , did = self . _did ) logger . debug ( f'Adding service with service type {service_type} with did {self._did}' ) self . _services . append ( service ) | Add a service to the list of services on the DDO . |
46,476 | def as_text ( self , is_proof = True , is_pretty = False ) : data = self . as_dictionary ( is_proof ) if is_pretty : return json . dumps ( data , indent = 2 , separators = ( ',' , ': ' ) ) return json . dumps ( data ) | Return the DDO as a JSON text . |
46,477 | def as_dictionary ( self , is_proof = True ) : if self . _created is None : self . _created = DDO . _get_timestamp ( ) data = { '@context' : DID_DDO_CONTEXT_URL , 'id' : self . _did , 'created' : self . _created , } if self . _public_keys : values = [ ] for public_key in self . _public_keys : values . append ( public_key . as_dictionary ( ) ) data [ 'publicKey' ] = values if self . _authentications : values = [ ] for authentication in self . _authentications : values . append ( authentication ) data [ 'authentication' ] = values if self . _services : values = [ ] for service in self . _services : values . append ( service . as_dictionary ( ) ) data [ 'service' ] = values if self . _proof and is_proof : data [ 'proof' ] = self . _proof return data | Return the DDO as a JSON dict . |
46,478 | def _read_dict ( self , dictionary ) : values = dictionary self . _did = values [ 'id' ] self . _created = values . get ( 'created' , None ) if 'publicKey' in values : self . _public_keys = [ ] for value in values [ 'publicKey' ] : if isinstance ( value , str ) : value = json . loads ( value ) self . _public_keys . append ( DDO . create_public_key_from_json ( value ) ) if 'authentication' in values : self . _authentications = [ ] for value in values [ 'authentication' ] : if isinstance ( value , str ) : value = json . loads ( value ) self . _authentications . append ( DDO . create_authentication_from_json ( value ) ) if 'service' in values : self . _services = [ ] for value in values [ 'service' ] : if isinstance ( value , str ) : value = json . loads ( value ) service = Service . from_json ( value ) service . set_did ( self . _did ) self . _services . append ( service ) if 'proof' in values : self . _proof = values [ 'proof' ] | Import a JSON dict into this DDO . |
46,479 | def get_public_key ( self , key_id , is_search_embedded = False ) : if isinstance ( key_id , int ) : return self . _public_keys [ key_id ] for item in self . _public_keys : if item . get_id ( ) == key_id : return item if is_search_embedded : for authentication in self . _authentications : if authentication . get_public_key_id ( ) == key_id : return authentication . get_public_key ( ) return None | Key_id can be a string or int . If int then the index in the list of keys . |
46,480 | def _get_public_key_count ( self ) : index = len ( self . _public_keys ) for authentication in self . _authentications : if authentication . is_public_key ( ) : index += 1 return index | Return the count of public keys in the list and embedded . |
46,481 | def _get_authentication_from_public_key_id ( self , key_id ) : for authentication in self . _authentications : if authentication . is_key_id ( key_id ) : return authentication return None | Return the authentication based on it s id . |
46,482 | def get_service ( self , service_type = None ) : for service in self . _services : if service . type == service_type and service_type : return service return None | Return a service using . |
46,483 | def find_service_by_id ( self , service_id ) : service_id_key = 'serviceDefinitionId' service_id = str ( service_id ) for service in self . _services : if service_id_key in service . values and str ( service . values [ service_id_key ] ) == service_id : return service try : int ( service_id ) return None except ValueError : pass return self . find_service_by_type ( service_id ) | Get service for a given service_id . |
46,484 | def find_service_by_type ( self , service_type ) : for service in self . _services : if service_type == service . type : return service return None | Get service for a given service type . |
46,485 | def create_public_key_from_json ( values ) : _id = values . get ( 'id' ) if not _id : _id = '' if values . get ( 'type' ) == PUBLIC_KEY_TYPE_RSA : public_key = PublicKeyRSA ( _id , owner = values . get ( 'owner' ) ) else : public_key = PublicKeyBase ( _id , owner = values . get ( 'owner' ) , type = 'EthereumECDSAKey' ) public_key . set_key_value ( values ) return public_key | Create a public key object based on the values from the JSON record . |
46,486 | def create_authentication_from_json ( values ) : key_id = values . get ( 'publicKey' ) authentication_type = values . get ( 'type' ) if not key_id : raise ValueError ( f'Invalid authentication definition, "publicKey" is missing: {values}' ) authentication = { 'type' : authentication_type , 'publicKey' : key_id } return authentication | Create authentitaciton object from a JSON string . |
46,487 | def generate_checksum ( did , metadata ) : files_checksum = '' for file in metadata [ 'base' ] [ 'files' ] : if 'checksum' in file : files_checksum = files_checksum + file [ 'checksum' ] return hashlib . sha3_256 ( ( files_checksum + metadata [ 'base' ] [ 'name' ] + metadata [ 'base' ] [ 'author' ] + metadata [ 'base' ] [ 'license' ] + did ) . encode ( 'UTF-8' ) ) . hexdigest ( ) | Generation of the hash for integrity checksum . |
46,488 | def register ( self , did , checksum , url , account , providers = None ) : did_source_id = did_to_id_bytes ( did ) if not did_source_id : raise ValueError ( f'{did} must be a valid DID to register' ) if not urlparse ( url ) : raise ValueError ( f'Invalid URL {url} to register for DID {did}' ) if checksum is None : checksum = Web3Provider . get_web3 ( ) . toBytes ( 0 ) if not isinstance ( checksum , bytes ) : raise ValueError ( f'Invalid checksum value {checksum}, must be bytes or string' ) if account is None : raise ValueError ( 'You must provide an account to use to register a DID' ) transaction = self . _register_attribute ( did_source_id , checksum , url , account , providers or [ ] ) receipt = self . get_tx_receipt ( transaction ) return receipt and receipt . status == 1 | Register or update a DID on the block chain using the DIDRegistry smart contract . |
46,489 | def _register_attribute ( self , did , checksum , value , account , providers ) : assert isinstance ( providers , list ) , '' return self . send_transaction ( 'registerAttribute' , ( did , checksum , providers , value ) , transact = { 'from' : account . address , 'passphrase' : account . password } ) | Register an DID attribute as an event on the block chain . |
46,490 | def remove_provider ( self , did , provider_address , account ) : tx_hash = self . send_transaction ( 'removeDIDProvider' , ( did , provider_address ) , transact = { 'from' : account . address , 'passphrase' : account . password } ) return self . get_tx_receipt ( tx_hash ) | Remove a provider |
46,491 | def get_did_providers ( self , did ) : register_values = self . contract_concise . getDIDRegister ( did ) if register_values and len ( register_values ) == 5 : return DIDRegisterValues ( * register_values ) . providers return None | Return the list providers registered on - chain for the given did . |
46,492 | def get_owner_asset_ids ( self , address ) : block_filter = self . _get_event_filter ( owner = address ) log_items = block_filter . get_all_entries ( max_tries = 5 ) did_list = [ ] for log_i in log_items : did_list . append ( id_to_did ( log_i . args [ '_did' ] ) ) return did_list | Get the list of assets owned by an address owner . |
46,493 | def set_encode_key_value ( self , value , store_type = PUBLIC_KEY_STORE_TYPE_BASE64 ) : if store_type == PUBLIC_KEY_STORE_TYPE_PEM : PublicKeyBase . set_encode_key_value ( self , value . exportKey ( 'PEM' ) . decode ( ) , store_type ) else : PublicKeyBase . set_encode_key_value ( self , value . exportKey ( 'DER' ) , store_type ) | Set the value based on the type of encoding supported by RSA . |
46,494 | def did_parse ( did ) : if not isinstance ( did , str ) : raise TypeError ( f'Expecting DID of string type, got {did} of {type(did)} type' ) match = re . match ( '^did:([a-z0-9]+):([a-zA-Z0-9-.]+)(.*)' , did ) if not match : raise ValueError ( f'DID {did} does not seem to be valid.' ) result = { 'method' : match . group ( 1 ) , 'id' : match . group ( 2 ) , } return result | Parse a DID into it s parts . |
46,495 | def id_to_did ( did_id , method = 'op' ) : if isinstance ( did_id , bytes ) : did_id = Web3 . toHex ( did_id ) if isinstance ( did_id , str ) : did_id = remove_0x_prefix ( did_id ) else : raise TypeError ( "did id must be a hex string or bytes" ) if Web3 . toBytes ( hexstr = did_id ) == b'' : did_id = '0' return f'did:{method}:{did_id}' | Return an Ocean DID from given a hex id . |
46,496 | def did_to_id_bytes ( did ) : if isinstance ( did , str ) : if re . match ( '^[0x]?[0-9A-Za-z]+$' , did ) : raise ValueError ( f'{did} must be a DID not a hex string' ) else : did_result = did_parse ( did ) if not did_result : raise ValueError ( f'{did} is not a valid did' ) if not did_result [ 'id' ] : raise ValueError ( f'{did} is not a valid ocean did' ) id_bytes = Web3 . toBytes ( hexstr = did_result [ 'id' ] ) elif isinstance ( did , bytes ) : id_bytes = did else : raise TypeError ( f'Unknown did format, expected str or bytes, got {did} of type {type(did)}' ) return id_bytes | Return an Ocean DID to it s correspondng hex id in bytes . |
46,497 | def get_template ( self , template_id ) : template = self . contract_concise . getTemplate ( template_id ) if template and len ( template ) == 4 : return AgreementTemplate ( * template ) return None | Get the template for a given template id . |
46,498 | def propose_template ( self , template_id , from_account ) : tx_hash = self . send_transaction ( 'proposeTemplate' , ( template_id , ) , transact = { 'from' : from_account . address , 'passphrase' : from_account . password } ) return self . get_tx_receipt ( tx_hash ) . status == 1 | Propose a template . |
46,499 | def access_service_descriptor ( price , consume_endpoint , service_endpoint , timeout , template_id ) : return ( ServiceTypes . ASSET_ACCESS , { 'price' : price , 'consumeEndpoint' : consume_endpoint , 'serviceEndpoint' : service_endpoint , 'timeout' : timeout , 'templateId' : template_id } ) | Access service descriptor . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.