docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Returns the value at an address if it was an input to the txn but never set. It returns None if that address was never set in the merkle database, or if the address is not within the context. Args: addresses (list of str): The full 70 character addresses. Returns: ...
def get_if_not_set(self, addresses): with self._lock: results = [] for add in addresses: results.append(self._get_if_not_set(add)) return results
163,861
Create futures needed before starting the process of reading the address's value from the merkle tree. Args: addresses (list of str): addresses in the txn's inputs that aren't in any base context (or any in the chain).
def create_prefetch(self, addresses): with self._lock: for add in addresses: self._state[add] = _ContextFuture(address=add, wait_for_tree=True)
163,864
Create futures from inputs with the current value for that address at the start of that context. Args: address_values (list of tuple): The tuple is string, bytes of the address and value.
def create_initial(self, address_values): with self._lock: for add, val in address_values: self._state[add] = _ContextFuture(address=add, result=val)
163,865
Set the result for each future at the given addresses with the value stored in the merkle database. Args: address_value_dict (dict of str: bytes): The unique full addresses that the bytes values should be set with.
def set_from_tree(self, address_value_dict): for address, value in address_value_dict.items(): if address in self._state: self._state[address].set_result(result=value, from_tree=True)
163,866
Called in the context manager's delete method to either mark an entry for deletion , or create a new future and immediately set it for deletion in the future. Args: address_list (list of str): The unique full addresses. Raises: AuthorizationException
def delete_direct(self, addresses): with self._lock: for address in addresses: self._validate_write(address) if address in self._state: self._state[address].set_deleted() else: fut = _ContextFuture(addr...
163,867
Called in the context manager's set method to either overwrite the value for an address, or create a new future and immediately set a value in the future. Args: address_value_dict (dict of str:bytes): The unique full addresses with bytes to set at that address. ...
def set_direct(self, address_value_dict): with self._lock: for address, value in address_value_dict.items(): self._validate_write(address) if address in self._state: self._state[address].set_result(result=value) else: ...
163,868
Raises an exception if the address is not allowed to be read in this context, based on txn inputs. Args: address (str): An address to be validated. Returns: None Raises: AuthorizationException
def validate_read(self, address): if not any(address.startswith(ns) for ns in self._read_list): raise AuthorizationException(address=address)
163,870
Set the addresses's value unless the future has been declared read only. Args: result (bytes): The value at an address. from_tree (bool): Whether the value is being set by a read from the merkle tree. Returns: None
def set_result(self, result, from_tree=False): if self._read_only: if not from_tree: LOGGER.warning("Tried to set address %s on a" " read-only context.", self.address) return with self._condi...
163,874
Runs the transaction list or show command, printing to the console Args: args: The parsed arguments sent to the command at runtime
def do_transaction(args): rest_client = RestClient(args.url, args.user) if args.subcommand == 'list': transactions = rest_client.list_transactions() keys = ('transaction_id', 'family', 'version', 'size', 'payload') headers = tuple(k.upper() if k != 'version' else 'VERS' for k in ke...
163,893
Returns the TransactionReceipt Args: txn_id (str): the id of the transaction for which the receipt should be retrieved. Returns: TransactionReceipt: The receipt for the given transaction id. Raises: KeyError: if the transaction id is unknown...
def get(self, txn_id): if txn_id not in self._receipt_db: raise KeyError('Unknown transaction id {}'.format(txn_id)) txn_receipt_bytes = self._receipt_db[txn_id] txn_receipt = TransactionReceipt() txn_receipt.ParseFromString(txn_receipt_bytes) return txn_rec...
163,896
Sends a list of batches to the validator. Args: batch_list (:obj:`BatchList`): the list of batches Returns: dict: the json result data, as a dict
def send_batches(self, batch_list): if isinstance(batch_list, BaseMessage): batch_list = batch_list.SerializeToString() return self._post('/batches', batch_list)
163,903
Adds arguments parsers for the block list and block show commands Args: subparsers: Add parsers to this subparser object parent_parser: The parent argparse.ArgumentParser object
def add_block_parser(subparsers, parent_parser): parser = subparsers.add_parser( 'block', description='Provides subcommands to display information about the ' 'blocks in the current blockchain.', help='Displays information on blocks in the current blockchain') grand_parsers...
163,926
Runs the block list or block show command, printing output to the console Args: args: The parsed arguments sent to the command at runtime
def do_block(args): rest_client = RestClient(args.url, args.user) if args.subcommand == 'list': block_generator = rest_client.list_blocks() blocks = [] left = args.count for block in block_generator: blocks.append(block) left -= 1 if left...
163,927
Register a callback for a specific connection state change. Register a callback to be triggered when the connection changes to the specified state, signified by a ConnectionEvent. The callback must be a coroutine. Args: event_type (ConnectionEvent): the connection event to...
def on_connection_state_change(self, event_type, callback): listeners = self._connection_state_listeners.get(event_type, []) listeners.append(callback) self._connection_state_listeners[event_type] = listeners
163,942
Constructor for the LMDBNoLockDatabase class. Args: filename (str): The filename of the database file. flag (str): a flag indicating the mode for opening the database. Refer to the documentation for anydbm.open().
def __init__(self, filename, flag): super(LMDBNoLockDatabase, self).__init__() create = bool(flag == 'c') if flag == 'n': if os.path.isfile(filename): os.remove(filename) create = True self._lmdb = lmdb.Environment( path=fil...
163,950
Removes a key:value from the database Args: key (str): The key to remove.
def delete(self, key): with self._lmdb.begin(write=True, buffers=True) as txn: txn.delete(key.encode())
163,955
Checks if a status enum matches the trigger originally set, and if so, raises the appropriate error. Args: status (int, enum): A protobuf enum response status to check. Raises: AssertionError: If trigger or error were not set. _ApiError: If the statuses don'...
def check(cls, status): assert cls.trigger is not None, 'Invalid ErrorTrap, trigger not set' assert cls.error is not None, 'Invalid ErrorTrap, error not set' if status == cls.trigger: # pylint: disable=not-callable # cls.error will be callable at runtime ...
163,989
Does the tree contain an address. Args: item (str): An address. Returns: (bool): True if it does contain, False otherwise.
def __contains__(self, item): try: _libexec('merkle_db_contains', self.pointer, item.encode()) # No error implies found return True except KeyError: return False
163,994
Do initialization necessary for the consensus to claim a block, this may include initiating voting activates, starting proof of work hash generation, or create a PoET wait timer. Args: block_header (BlockHeader): the BlockHeader to initialize. Returns: True
def initialize_block(self, block_header): # Using the current chain head, we need to create a state view so we # can get our config values. state_view = \ BlockWrapper.state_view_for_block( self._block_cache.block_store.chain_head, self._state...
164,007
The longest chain is selected. If they are equal, then the hash value of the previous block id and publisher signature is computed. The lowest result value is the winning block. Args: cur_fork_head: The current head of the block chain. new_fork_head: The head of the fork ...
def compare_forks(self, cur_fork_head, new_fork_head): # If the new fork head is not DevMode consensus, bail out. This should # never happen, but we need to protect against it. if new_fork_head.consensus != b"Devmode": raise \ TypeError( ...
164,010
Check the public key of a node on the network to see if they are permitted to participate. The roles being checked are the following, from first to last: "network" "default" The first role that is set will be the one used to enforce if the ...
def check_network_role(self, public_key): state_root = self._current_root_func() if state_root == INIT_ROOT_KEY: LOGGER.debug("Chain head is not set yet. Permit all.") return True self._cache.update_view(state_root) role = self._cache.get_role("network",...
164,016
Used to retrieve an identity role. Args: item (string): the name of the role to be fetched state_root(string): The state root of the previous block. from_state (bool): Whether the identity value should be read directly from state, instead of using the cached v...
def get_role(self, item, state_root, from_state=False): if from_state: # if from state use identity_view and do not add to cache if self._identity_view is None: self.update_view(state_root) value = self._identity_view.get_role(item) return...
164,021
Takes a statuses dict and formats it for transmission with Protobuf and ZMQ. Args: statuses (dict of int): Dict with batch ids as the key, status as value batch_ids (list of str): The batch ids in their original order tracker (BatchTracker): A batch tracker with access to invalid info
def _format_batch_statuses(statuses, batch_ids, tracker): proto_statuses = [] for batch_id in batch_ids: if statuses[batch_id] == \ client_batch_submit_pb2.ClientBatchStatus.INVALID: invalid_txns = tracker.get_invalid_txn_info(batch_id) for txn_info in invalid_tx...
164,022
Handles parsing incoming requests, and wrapping the final response. Args: connection_id (str): ZMQ identity sent over ZMQ socket message_content (bytes): Byte encoded request protobuf to be parsed Returns: HandlerResult: result to be sent in response back to client
def handle(self, connection_id, message_content): try: request = self._request_proto() request.ParseFromString(message_content) except DecodeError: LOGGER.info('Protobuf %s failed to deserialize', request) return self._wrap_result(self._status.INT...
164,025
Wraps child's response in a HandlerResult to be sent back to client. Args: response (enum or dict): Either an integer status enum, or a dict of attributes to be added to the protobuf response.
def _wrap_result(self, response): if isinstance(response, int): response = self._wrap_response(response) return HandlerResult( status=HandlerStatus.RETURN, message_out=self._response_proto(**response), message_type=self._response_type)
164,026
Convenience method to wrap a status with any key word args. Args: status (enum): enum response status, defaults to OK Returns: dict: inlcudes a 'status' attribute and any key word arguments
def _wrap_response(self, status=None, **kwargs): kwargs['status'] = status if status is not None else self._status.OK return kwargs
164,027
Fetches the request specified head block, or the chain head. Note: This method will fail if `_block_store` has not been set Args: request (object): The parsed protobuf request object Returns: Block: the block object at the head of the requested chain ...
def _get_head_block(self, request): if request.head_id: if self._id_regex.fullmatch(request.head_id) is None: LOGGER.debug('Invalid head id requested: %s', request.head_id) raise _ResponseFailed(self._status.NO_ROOT) try: return se...
164,028
Sets the root of the merkle tree, returning any head id used. Note: This method will fail if `_tree` has not been set Args: request (object): The parsed protobuf request object Returns: str: the state root of the head block used to specify the root ...
def _set_root(self, request): if request.state_root: root = request.state_root else: head = self._get_chain_head() root = head.state_root_hash try: self._tree.set_merkle_root(root) except KeyError as e: LOGGER.debug('U...
164,030
Validates a list of ids, raising a ResponseFailed error if invalid. Args: resource_id (list of str): The ids to validate Raises: ResponseFailed: The id was invalid, and a status of INVALID_ID will be sent with the response.
def _validate_ids(self, resource_ids): for resource_id in resource_ids: if self._id_regex.fullmatch(resource_id) is None: LOGGER.debug('Invalid resource id requested: %s', resource_id) raise _ResponseFailed(self._status.INVALID_ID)
164,032
Validates a state root, raising a ResponseFailed error if invalid. Args: state_root (str): The state_root to validate Raises: ResponseFailed: The state_root was invalid, and a status of INVALID_ROOT will be sent with the response.
def _validate_state_root(self, state_root): if self._state_root_regex.fullmatch(state_root) is None: LOGGER.debug('Invalid state root: %s', state_root) raise _ResponseFailed(self._status.INVALID_ROOT)
164,033
Validates a namespace, raising a ResponseFailed error if invalid. Args: state_root (str): The state_root to validate Raises: ResponseFailed: The state_root was invalid, and a status of INVALID_ROOT will be sent with the response.
def _validate_namespace(self, namespace): if self._namespace_regex.fullmatch(namespace) is None: LOGGER.debug('Invalid namespace: %s', namespace) raise _ResponseFailed(self._status.INVALID_ADDRESS)
164,034
Truncates a list of resources based on ClientPagingControls Args: request (object): The parsed protobuf request object resources (list of objects): The resources to be paginated Returns: list: The paginated list of resources object: The ClientPagingRespo...
def paginate_resources(cls, request, resources, on_fail_status): if not resources: return (resources, client_list_control_pb2.ClientPagingResponse()) paging = request.paging limit = min(paging.limit, MAX_PAGE_SIZE) or DEFAULT_PAGE_SIZE # Find the start index from th...
164,035
Helper method to fetch the index of a resource by its id or address Args: resources (list of objects): The resources to be paginated target_id (string): The address or header_signature of the resource Returns: integer: The index of the target resource Raise...
def index_by_id(cls, target_id, resources): for index in range(len(resources)): if cls.id_by_index(index, resources) == target_id: return index raise AssertionError
164,036
Helper method to fetch the id or address of a resource by its index Args: resources (list of objects): The resources to be paginated index (integer): The index of the target resource Returns: str: The address or header_signature of the resource, retu...
def id_by_index(index, resources): if index < 0 or index >= len(resources): return '' try: return resources[index].header_signature except AttributeError: return resources[index].address
164,037
Sorts a list of resources based on a list of sort controls Args: request (object): The parsed protobuf request object resources (list of objects): The resources to be sorted fail_enum (int, enum): The enum status to raise with invalid keys header_proto(class): Cl...
def sort_resources(cls, request, resources, fail_enum, header_proto=None): if not request.sorting: return resources value_handlers = cls._get_handler_set(request, fail_enum, header_proto) def sorter(resource_a, resource_b): for handler in value_handlers: ...
164,038
Called by the BatchTracker the _BatchWaiter is observing. Should not be called by handlers. Args: statuses (dict of int): A dict with keys of batch ids, and values of status enums
def notify_batches_finished(self, statuses): with self._wait_condition: self._statuses = statuses self._wait_condition.notify()
164,041
Locks until a list of batch ids is committed to the block chain or a timeout is exceeded. Returns the statuses of those batches. Args: batch_ids (list of str): The ids of the batches to wait for timeout(int): Maximum time in seconds to wait for Returns: list...
def wait_for_batches(self, batch_ids, timeout=None): self._batch_tracker.watch_statuses(self, batch_ids) timeout = timeout or DEFAULT_TIMEOUT start_time = time() with self._wait_condition: while True: if self._statuses is not None: ...
164,042
Verify that the directory exists and is readable and writable. Args: path (str): a directory which should exist and be writable human_readable_name (str): a human readable string for the directory which is used in logging statements Returns: bool: False if an error exists, ...
def check_directory(path, human_readable_name): if not os.path.exists(path): LOGGER.error("%s directory does not exist: %s", human_readable_name, path) return False if not os.path.isdir(path): LOGGER.error("%s directory is not a directory: ...
164,072
Reads the given file as a hex key. Args: key_filename: The filename where the key is stored. If None, defaults to the default key for the current user. Returns: Signer: the signer Raises: CliException: If unable to read the file.
def _read_signer(key_filename): filename = key_filename if filename is None: filename = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys', getpass.getuser() + '.priv') try: with open...
164,082
Creates a batch from a list of transactions and a public key, and signs the resulting batch with the given signing key. Args: signer (:obj:`Signer`): The cryptographic signer transactions (list of `Transaction`): The transactions to add to the batch. Returns: `Batch`: T...
def _create_batch(signer, transactions): txn_ids = [txn.header_signature for txn in transactions] batch_header = BatchHeader( signer_public_key=signer.get_public_key().as_hex(), transaction_ids=txn_ids).SerializeToString() return Batch( header=batch_header, header_signa...
164,083
Create a ExecutionContext to run a transaction against. Args: state_hash: (str): Merkle root to base state on. base_contexts (list of str): Context ids of contexts that will have their state applied to make this context. inputs (list of str): Addresses that c...
def create_context(self, state_hash, base_contexts, inputs, outputs): for address in inputs: if not self.namespace_is_valid(address): raise CreateContextException( "Address or namespace {} listed in inputs is not " "valid".format(addr...
164,087
Breadth first search through the chain of contexts searching for the bytes values at the addresses in addresses_to_find. Args: base_contexts (list of str): The context ids to start with. addresses_to_find (list of str): Addresses to find values in the chain of co...
def _find_address_values_in_chain(self, base_contexts, addresses_to_find): contexts_in_chain = deque() contexts_in_chain.extend(base_contexts) reads = list(addresses_to_find) address_values = [] context_ids_already_searched = [] context_ids_already_searched.exte...
164,088
Delete contexts from the ContextManager. Args: context_id_list (list): a list of context ids Returns: None
def delete_contexts(self, context_id_list): for c_id in context_id_list: if c_id in self._contexts: del self._contexts[c_id]
164,089
Within a context, append data to the execution result. Args: context_id (str): the context id returned by create_context data (bytes): data to append Returns: (bool): True if the operation is successful, False if the context_id doesn't reference a kn...
def add_execution_data(self, context_id, data): if context_id not in self._contexts: LOGGER.warning("Context_id not in contexts, %s", context_id) return False context = self._contexts.get(context_id) context.add_execution_data(data) return True
164,095
Within a context, append data to the execution result. Args: context_id (str): the context id returned by create_context data_type (str): type of data to append data (bytes): data to append Returns: (bool): True if the operation is successful, False if ...
def add_execution_event(self, context_id, event): if context_id not in self._contexts: LOGGER.warning("Context_id not in contexts, %s", context_id) return False context = self._contexts.get(context_id) context.add_execution_event(event) return True
164,096
Applies the given puts and deletes atomically. Args: puts (:iterable:`tuple`): an iterable of key/value pairs to insert deletes (:iterable:str:) an iterable of keys to delete
def update(self, puts, deletes): with self._lmdb.begin(write=True, buffers=True) as txn: cursor = txn.cursor(self._main_db) # Process deletes first, to handle the case of new items replacing # old index locations for key in deletes: if not...
164,127
Walk to ADDRESS, creating nodes if necessary, and set the data there to UPDATER(data). Arguments: address (str): the address to be updated
def update(self, address, updater, prune=False): node = self._get_or_create(address) node.data = updater(node.data) if prune: node.children.clear()
164,261
Remove all children (and descendants) below ADDRESS. Arguments: address (str): the address to be pruned
def prune(self, address): try: for step in self._walk_to_address(address): node = step except AddressNotInTree: return node.children.clear()
164,262
Returns a stream of pairs of node addresses and data, raising AddressNotInTree if ADDRESS is not in the tree. First the ancestors of ADDRESS (including itself) are yielded, earliest to latest, and then the descendants of ADDRESS are yielded in an unspecified order. Arguments: ...
def walk(self, address): for step in self._walk_to_address(address): node = step yield node.address, node.data to_process = deque() to_process.extendleft( node.children) while to_process: node = to_process.pop() yi...
164,263
Returns all predecessor transaction ids for a write of the provided address. Arguments: address (str): the radix address Returns: a set of transaction ids
def find_write_predecessors(self, address): # A write operation must be preceded by: # - The "enclosing writer", which is the writer at the address or # the nearest writer higher (closer to the root) in the tree. # - The "enclosing readers", which are the readers at the ...
164,268
Add a predecessor-successor relationship between one txn id and a set of predecessors. Args: txn_id (str): The transaction id of the transaction. predecessors (set): The transaction ids of the transaction's predecessors Returns: None
def add_relationship(self, txn_id, predecessors): all_pred = set(predecessors) for pred in predecessors: all_pred.update(self._predecessors_by_id[pred]) self._predecessors_by_id[txn_id] = all_pred
164,269
Returns whether the predecessor is a predecessor or a predecessor of a predecessor...of any of the others. Args: predecessor (str): The txn id of the predecessor. others (list(str)): The txn id of the successor. Returns: (bool)
def is_predecessor_of_other(self, predecessor, others): return any(predecessor in self._predecessors_by_id[o] for o in others)
164,270
Starting with the batch referenced by batch_signature, iterate back through the batches and for each valid batch collect the context_id. At the end remove contexts for txns that are other txn's predecessors. Args: batch_signature (str): The batch to start from, moving back through ...
def _get_contexts_for_squash(self, batch_signature): batch = self._batches_by_id[batch_signature].batch index = self._batches.index(batch) contexts = [] txns_added_predecessors = [] for b in self._batches[index::-1]: batch_is_valid = True context...
164,277
Decide if possible_successor should be replayed. Args: txn_id (str): Id of txn in failed batch. possible_successor (str): Id of txn to possibly replay. already_seen (list): A list of possible_successors that have been replayed. Returns: (...
def _is_txn_to_replay(self, txn_id, possible_successor, already_seen): is_successor = self._is_predecessor_of_possible_successor( txn_id, possible_successor) in_different_batch = not self._is_in_same_batch(txn_id, ...
164,281
Remove transactions from scheduled and txn_results for successors of txns in a failed batch. These transactions will now, or in the future be rescheduled in next_transaction; giving a replay ability. Args: sig (str): Transaction header signature
def _remove_subsequent_result_because_of_batch_failure(self, sig): batch = self._batches_by_txn_id[sig] seen = [] for txn in batch.transactions: txn_id = txn.header_signature for poss_successor in self._scheduled.copy(): if not self.is_transactio...
164,282
Set the first batch id that doesn't have all results. Args: txn_signature (str): The txn identifier of the transaction with results being set.
def _set_least_batch_id(self, txn_signature): batch = self._batches_by_txn_id[txn_signature] least_index = self._index_of_batch( self._batches_by_id[self._least_batch_id_wo_results].batch) current_index = self._index_of_batch(batch) all_prior = False if c...
164,285
Returns whether the transaction is in a valid batch. Args: txn_id (str): The transaction header signature. Returns: (bool): True if the txn's batch is valid, False otherwise.
def _txn_is_in_valid_batch(self, txn_id): batch = self._batches_by_txn_id[txn_id] # Return whether every transaction in the batch with a # transaction result is valid return all( self._txn_results[sig].is_valid for sig in set(self._txn_results).intersec...
164,288
Executes the key generation operation, given the parsed arguments. Args: args (:obj:`Namespace`): The parsed args.
def do_keygen(args): if args.key_name is not None: key_name = args.key_name else: key_name = 'validator' key_dir = get_key_dir() if not os.path.exists(key_dir): raise CliException("Key directory does not exist: {}".format(key_dir)) priv_filename = os.path.join(key_dir...
164,303
Sends a message containing our peers to the connection identified by connection_id. Args: connection_id (str): A unique identifier which identifies an connection on the network server socket.
def send_peers(self, connection_id): with self._lock: # Needs to actually be the list of advertised endpoints of # our peers peer_endpoints = list(self._peers.values()) if self._endpoint: peer_endpoints.append(self._endpoint) p...
164,306
Adds candidate endpoints to the list of endpoints to attempt to peer with. Args: peer_endpoints ([str]): A list of public uri's which the validator can attempt to peer with.
def add_candidate_peer_endpoints(self, peer_endpoints): if self._topology: self._topology.add_candidate_peer_endpoints(peer_endpoints) else: LOGGER.debug("Could not add peer endpoints to topology. " "ConnectionManager does not exist.")
164,307
Registers a connected connection_id. Args: connection_id (str): A unique identifier which identifies an connection on the network server socket. endpoint (str): The publically reachable endpoint of the new peer
def register_peer(self, connection_id, endpoint): with self._lock: if len(self._peers) < self._maximum_peer_connectivity: self._peers[connection_id] = endpoint self._topology.set_connection_status(connection_id, ...
164,309
Removes a connection_id from the registry. Args: connection_id (str): A unique identifier which identifies an connection on the network server socket.
def unregister_peer(self, connection_id): public_key = self.peer_to_public_key(connection_id) if public_key: self._consensus_notifier.notify_peer_disconnected(public_key) with self._lock: if connection_id in self._peers: del self._peers[connectio...
164,310
Sends a message via the network. Args: message_type (str): The type of the message. message (bytes): The message to be sent. connection_id (str): The connection to send it to.
def send(self, message_type, message, connection_id, one_way=False): try: self._network.send(message_type, message, connection_id, one_way=one_way) except ValueError: LOGGER.debug("Connection %s is no longer valid. " ...
164,320
Broadcast gossip messages. Broadcast the message to all peers unless they are in the excluded list. Args: gossip_message: The message to be broadcast. message_type: Type of the message. exclude: A list of connection_ids that should be excluded from this ...
def broadcast(self, gossip_message, message_type, exclude=None): with self._lock: if exclude is None: exclude = [] for connection_id in self._peers.copy(): if connection_id not in exclude and \ self._network.is_connection_h...
164,321
Adds candidate endpoints to the list of endpoints to attempt to peer with. Args: peer_endpoints ([str]): A list of public uri's which the validator can attempt to peer with.
def add_candidate_peer_endpoints(self, peer_endpoints): with self._lock: for endpoint in peer_endpoints: if endpoint not in self._candidate_peer_endpoints: self._candidate_peer_endpoints.append(endpoint)
164,331
Constructs an owned pointer. Initializing the pointer is left to the extending classes Args: drop_ffi_call_fn (str): the name of the FFI function to call on drop or garbage collection. initialized_ptr (ctypes.c_void_p:optional): a preinitialized p...
def __init__(self, drop_ffi_call_fn, initialized_ptr=None): if initialized_ptr is not None: self._ptr = initialized_ptr else: self._ptr = ctypes.c_void_p() self._drop_ffi_fn = drop_ffi_call_fn
164,348
Starts the genesis block creation process. Will call the given `on_done` callback on successful completion. Args: on_done (function): a function called on completion Raises: InvalidGenesisStateError: raises this error if a genesis block is unable to be ...
def start(self, on_done): genesis_file = os.path.join(self._data_dir, 'genesis.batch') try: with open(genesis_file, 'rb') as batch_file: genesis_data = genesis_pb2.GenesisData() genesis_data.ParseFromString(batch_file.read()) LOGGER.info('...
164,361
Returns the block publisher based on the consensus module set by the "sawtooth_settings" transaction family. Args: state_hash (str): The current state root hash for reading settings. Raises: InvalidGenesisStateError: if any errors occur getting the Block...
def _get_block_publisher(self, state_hash): state_view = self._state_view_factory.create_view(state_hash) try: class BatchPublisher: def send(self, transactions): # Consensus implementations are expected to have handling # in p...
164,362
Adds argument parser for the peer command Args: subparsers: Add parsers to this subparser object parent_parser: The parent argparse.ArgumentParser object
def add_peer_parser(subparsers, parent_parser): parser = subparsers.add_parser( 'peer', help='Displays information about validator peers', description="Provides a subcommand to list a validator's peers") grand_parsers = parser.add_subparsers(title='subcommands', ...
164,383
Setup the asyncio event loop. Args: socket_type (int from zmq.*): One of zmq.DEALER or zmq.ROUTER complete_or_error_queue (queue.Queue): A way to propagate errors back to the calling thread. Needed since this function is directly used in Thread. ...
def setup(self, socket_type, complete_or_error_queue): try: if self._secured: if self._server_public_key is None or \ self._server_private_key is None: raise LocalConfigurationError( "Attempting to start soc...
164,416
Adds an outbound connection to the network. Args: uri (str): The zmq-style (e.g. tcp://hostname:port) uri to attempt to connect to.
def add_outbound_connection(self, uri): LOGGER.debug("Adding connection to %s", uri) conn = OutboundConnection( connections=self._connections, endpoint=uri, dispatcher=self._dispatcher, zmq_identity=self._zmq_identity, secured=self._se...
164,428
Returns the connection id associated with a publically reachable endpoint or raises KeyError if the endpoint is not found. Args: endpoint (str): A zmq-style uri which identifies a publically reachable endpoint.
def get_connection_id_by_endpoint(self, endpoint): with self._connections_lock: for connection_id in self._connections: connection_info = self._connections[connection_id] if connection_info.uri == endpoint: return connection_id ...
164,439
Adds the endpoint to the connection definition. When the connection is created by the send/receive thread, we do not yet have the endpoint of the remote node. That is not known until we process the incoming ConnectRequest. Args: connection_id (str): The identifier for the co...
def update_connection_endpoint(self, connection_id, endpoint): if connection_id in self._connections: connection_info = self._connections[connection_id] self._connections[connection_id] = \ ConnectionInfo(connection_info.connection_type, ...
164,440
Adds the public_key to the connection definition. Args: connection_id (str): The identifier for the connection. public_key (str): The public key used to enforce permissions on connections.
def update_connection_public_key(self, connection_id, public_key): if connection_id in self._connections: connection_info = self._connections[connection_id] self._connections[connection_id] = \ ConnectionInfo(connection_info.connection_type, ...
164,441
Sends a message of message_type Args: message_type (validator_pb2.Message): enum value data (bytes): serialized protobuf callback (function): a callback function to call when a response to this message is received Returns: future.Future
def send(self, message_type, data, callback=None, one_way=False): message = validator_pb2.Message( correlation_id=_generate_id(), content=data, message_type=message_type) fut = future.Future(message.correlation_id, message.content, ...
164,449
Returns the state view for an arbitrary block. Args: block_wrapper (BlockWrapper): The block for which a state view is to be returned state_view_factory (StateViewFactory): The state view factory used to create the StateView object Returns: ...
def state_view_for_block(block_wrapper, state_view_factory): state_root_hash = \ block_wrapper.state_root_hash \ if block_wrapper is not None else None return state_view_factory.create_view(state_root_hash)
164,451
Returns the settings view for an arbitrary block. Args: block_wrapper (BlockWrapper): The block for which a settings view is to be returned settings_view_factory (SettingsViewFactory): The settings view factory used to create the SettingsView object ...
def settings_view_for_block(block_wrapper, settings_view_factory): state_root_hash = \ block_wrapper.state_root_hash \ if block_wrapper is not None else None return settings_view_factory.create_settings_view(state_root_hash)
164,452
Remove spines of axis. Parameters: ax: axes to operate on sides: list of sides: top, left, bottom, right Examples: removespines(ax, ['top']) removespines(ax, ['top', 'bottom', 'right', 'left'])
def remove_spines(ax, sides): for side in sides: ax.spines[side].set_visible(False) return ax
165,344
Move the entire spine relative to the figure. Parameters: ax: axes to operate on sides: list of sides to move. Sides: top, left, bottom, right dists: list of float distances to move. Should match sides in length. Example: move_spines(ax, sides=['left', 'bottom'], dists=[-0.02, 0.1])
def move_spines(ax, sides, dists): for side, dist in zip(sides, dists): ax.spines[side].set_position(("axes", dist)) return ax
165,345
Remove ticks from axis. Parameters: ax: axes to work on x: if True, remove xticks. Default False. y: if True, remove yticks. Default False. Examples: removeticks(ax, x=True) removeticks(ax, x=True, y=True)
def remove_ticks(ax, x=False, y=False): if x: ax.xaxis.set_ticks_position("none") if y: ax.yaxis.set_ticks_position("none") return ax
165,346
Normalize embeddings matrix row-wise. Args: ord: normalization order. Possible values {1, 2, 'inf', '-inf'}
def normalize_words(self, ord=2, inplace=False): if ord == 2: ord = None # numpy uses this flag to indicate l2. vectors = self.vectors.T / np.linalg.norm(self.vectors, ord, axis=1) if inplace: self.vectors = vectors.T return self return Embedding(vectors=vectors.T, vocabulary=self...
165,393
Return the nearest k words to the given `word`. Args: word (string): single word. top_k (integer): decides how many neighbors to report. Returns: A list of words sorted by the distances. The closest is the first. Note: L2 metric is used to calculate distances.
def nearest_neighbors(self, word, top_k=10): #TODO(rmyeid): Use scikit ball tree, if scikit is available point = self[word] diff = self.vectors - point distances = np.linalg.norm(diff, axis=1) top_ids = distances.argsort()[1:top_k+1] return [self.vocabulary.id_word[i] for i in top_ids]
165,394
Calculate eucledean pairwise distances between `word` and `words`. Args: word (string): single word. words (list): list of strings. Returns: numpy array of the distances. Note: L2 metric is used to calculate distances.
def distances(self, word, words): point = self[word] vectors = np.asarray([self[w] for w in words]) diff = vectors - point distances = np.linalg.norm(diff, axis=1) return distances
165,395
Return the collection that represents a specific language or task. Args: lang (string): Language code. task (string): Task name.
def get_collection(self, lang=None, task=None): if lang: id = "{}{}".format(Downloader.LANG_PREFIX, lang) elif task: id = "{}{}".format(Downloader.TASK_PREFIX, task) else: raise ValueError("You should pass either the task or the lang") try: return self.info(id) except ValueError as e: ...
165,435
Return True if polyglot supports the language. Args: lang (string): Language code.
def supported_language(lang): try: self.get_collection(lang=lang) return True except LanguageNotSupported as e: return False
165,436
Languages that are covered by a specific task. Args: task (string): Task name.
def supported_languages(self, task=None): if task: collection = self.get_collection(task=task) return [isoLangs[x.id.split('.')[1]]["name"] for x in collection.packages] else: return [x.name.split()[0] for x in self.collections() ...
165,437
Languages that are covered by a specific task. Args: lang (string): Language code name.
def supported_tasks(self, lang=None): if lang: collection = self.get_collection(lang=lang) return [x.id.split('.')[0] for x in collection.packages] else: return [x.name.split()[0] for x in self.collections() if Downloader.TASK_PREFIX in x.id]
165,439
Concatenates two text objects the same way Python strings are concatenated. Arguments: - `other`: a string or a text object
def __add__(self, other): if isinstance(other, basestring): return self.__class__(self.raw + other) elif isinstance(other, BaseBlob): return self.__class__(self.raw + other.raw) else: raise TypeError('Operands must be either strings or {0} objects' .format(self.__cla...
165,467
Detector of the language used in `text`. Args: text (string): unicode string.
def __init__(self, text, quiet=False): self.__text = text self.reliable = True self.quiet = quiet self.detect(text)
165,495
Decide which language is used to write the text. The method tries first to detect the language with high reliability. If that is not possible, the method switches to best effort strategy. Args: text (string): A snippet of text, the longer it is the more reliable we can detect t...
def detect(self, text): t = text.encode("utf-8") reliable, index, top_3_choices = cld2.detect(t, bestEffort=False) if not reliable: self.reliable = False reliable, index, top_3_choices = cld2.detect(t, bestEffort=True) if not self.quiet: if not reliable: rais...
165,496
Return filename that contains specific language resource name. Args: name (string): Name of the resource. lang (string): language code to be loaded.
def locate_resource(name, lang, filter=None): task_dir = resource_dir.get(name, name) package_id = u"{}.{}".format(task_dir, lang) p = path.join(polyglot_path, task_dir, lang) if not path.isdir(p): if downloader.status(package_id) != downloader.INSTALLED: raise ValueError("This resource is availabl...
165,507
Return a word embeddings object for `lang` and of type `type` Args: lang (string): language code. task (string): parameters that define task. type (string): skipgram, cw, cbow ... noramlized (boolean): returns noramlized word embeddings vectors.
def load_embeddings(lang="en", task="embeddings", type="cw", normalize=False): src_dir = "_".join((type, task)) if type else task p = locate_resource(src_dir, lang) e = Embedding.load(p) if type == "cw": e.apply_expansion(CaseExpander) e.apply_expansion(DigitExpander) if type == "sgns": e.apply...
165,508
Return a CountedVocabulary object. Args: lang (string): language code. type (string): wiki,...
def load_vocabulary(lang="en", type="wiki"): src_dir = "{}_vocab".format(type) p = locate_resource(src_dir, lang) return CountedVocabulary.from_vocabfile(p)
165,509
Return a named entity extractor parameters for `lang` and of version `version` Args: lang (string): language code. version (string): version of the parameters to be used.
def load_ner_model(lang="en", version="2"): src_dir = "ner{}".format(version) p = locate_resource(src_dir, lang) fh = _open(p) try: return pickle.load(fh) except UnicodeDecodeError: fh.seek(0) return pickle.load(fh, encoding='latin1')
165,510
Return a part of speech tagger parameters for `lang` and of version `version` Args: lang (string): language code. version (string): version of the parameters to be used.
def load_pos_model(lang="en", version="2"): src_dir = "pos{}".format(version) p = locate_resource(src_dir, lang) fh = _open(p) return dict(np.load(fh))
165,511
Return a morfessor model for `lang` and of version `version` Args: lang (string): language code. version (string): version of the parameters to be used.
def load_morfessor_model(lang="en", version="2"): src_dir = "morph{}".format(version) p = locate_resource(src_dir, lang) file_handler = _open(p) tmp_file_ = NamedTemporaryFile(delete=False) tmp_file_.write(file_handler.read()) tmp_file_.close() io = morfessor.MorfessorIO() model = io.read_any_model(t...
165,513
Return a morfessor model for `lang` and of version `version` Args: lang (string): language code. version (string): version of the parameters to be used.
def load_transliteration_table(lang="en", version="2"): src_dir = "transliteration{}".format(version) p = locate_resource(src_dir, lang) file_handler = _open(p) return pickle.load(file_handler)
165,514
Build attributes word_id and id_word from input. Args: words (list): list of sorted words according to frequency.
def __init__(self, words=None): words = self.sanitize_words(words) self.word_id = {w:i for i, w in enumerate(words)} self.id_word = {i:w for w,i in iteritems(self.word_id)}
165,539
Build attributes word_id and id_word from input. Args: word_count (dictionary): A dictionary of the type word:count or list of tuples of the type (word, count).
def __init__(self, word_count=None): if isinstance(word_count, dict): word_count = iteritems(word_count) sorted_counts = list(sorted(word_count, key=lambda wc: wc[1], reverse=True)) words = [w for w,c in sorted_counts] super(CountedVocabulary, self).__init__(words=words) self.word_count ...
165,540
Returns a vocabulary with the most frequent `k` words. Args: k (integer): specifies the top k most frequent words to be returned.
def most_frequent(self, k): word_count = {w:self.word_count[w] for w in self.words[:k]} return CountedVocabulary(word_count=word_count)
165,543