repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/manager.py
NodePoolManager.get
def get(self, name: str, config: dict = None) -> NodePool: """ Return node pool in input name and optional configuration. :param name: name of configured pool :param config: pool configuration with optional 'timeout' int, 'extended_timeout' int, 'preordered_nodes' array of strings :return: node pool """ LOGGER.debug('NodePoolManager.node_pool >>>') rv = NodePool(name, self.protocol, config) LOGGER.debug('NodePoolManager.node_pool <<< %s', rv) return rv
python
def get(self, name: str, config: dict = None) -> NodePool: """ Return node pool in input name and optional configuration. :param name: name of configured pool :param config: pool configuration with optional 'timeout' int, 'extended_timeout' int, 'preordered_nodes' array of strings :return: node pool """ LOGGER.debug('NodePoolManager.node_pool >>>') rv = NodePool(name, self.protocol, config) LOGGER.debug('NodePoolManager.node_pool <<< %s', rv) return rv
[ "def", "get", "(", "self", ",", "name", ":", "str", ",", "config", ":", "dict", "=", "None", ")", "->", "NodePool", ":", "LOGGER", ".", "debug", "(", "'NodePoolManager.node_pool >>>'", ")", "rv", "=", "NodePool", "(", "name", ",", "self", ".", "protoco...
Return node pool in input name and optional configuration. :param name: name of configured pool :param config: pool configuration with optional 'timeout' int, 'extended_timeout' int, 'preordered_nodes' array of strings :return: node pool
[ "Return", "node", "pool", "in", "input", "name", "and", "optional", "configuration", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/manager.py#L120-L135
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/manager.py
NodePoolManager.remove
async def remove(self, name: str) -> None: """ Remove serialized pool info if it exists. Abstain from removing open node pool. """ LOGGER.debug('NodePoolManager.remove >>> name: %s', name) try: await pool.delete_pool_ledger_config(name) except IndyError as x_indy: LOGGER.info('Abstaining from node pool removal; indy-sdk error code %s', x_indy.error_code) LOGGER.debug('NodePool.remove <<<')
python
async def remove(self, name: str) -> None: """ Remove serialized pool info if it exists. Abstain from removing open node pool. """ LOGGER.debug('NodePoolManager.remove >>> name: %s', name) try: await pool.delete_pool_ledger_config(name) except IndyError as x_indy: LOGGER.info('Abstaining from node pool removal; indy-sdk error code %s', x_indy.error_code) LOGGER.debug('NodePool.remove <<<')
[ "async", "def", "remove", "(", "self", ",", "name", ":", "str", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'NodePoolManager.remove >>> name: %s'", ",", "name", ")", "try", ":", "await", "pool", ".", "delete_pool_ledger_config", "(", "name", ")", ...
Remove serialized pool info if it exists. Abstain from removing open node pool.
[ "Remove", "serialized", "pool", "info", "if", "it", "exists", ".", "Abstain", "from", "removing", "open", "node", "pool", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/manager.py#L137-L149
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/diddoc.py
DIDDoc.authnkey
def authnkey(self) -> dict: """ Accessor for public keys marked as authentication keys, by identifier. """ return {k: self._pubkey[k] for k in self._pubkey if self._pubkey[k].authn}
python
def authnkey(self) -> dict: """ Accessor for public keys marked as authentication keys, by identifier. """ return {k: self._pubkey[k] for k in self._pubkey if self._pubkey[k].authn}
[ "def", "authnkey", "(", "self", ")", "->", "dict", ":", "return", "{", "k", ":", "self", ".", "_pubkey", "[", "k", "]", "for", "k", "in", "self", ".", "_pubkey", "if", "self", ".", "_pubkey", "[", "k", "]", ".", "authn", "}" ]
Accessor for public keys marked as authentication keys, by identifier.
[ "Accessor", "for", "public", "keys", "marked", "as", "authentication", "keys", "by", "identifier", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/diddoc.py#L81-L86
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/diddoc.py
DIDDoc.set
def set(self, item: Union[Service, PublicKey]) -> 'DIDDoc': """ Add or replace service or public key; return current DIDDoc. Raise BadDIDDocItem if input item is neither service nor public key. :param item: service or public key to set :return: current DIDDoc """ if isinstance(item, Service): self.service[item.id] = item elif isinstance(item, PublicKey): self.pubkey[item.id] = item else: raise BadDIDDocItem('Cannot add item {} to DIDDoc on DID {}'.format(item, self.did))
python
def set(self, item: Union[Service, PublicKey]) -> 'DIDDoc': """ Add or replace service or public key; return current DIDDoc. Raise BadDIDDocItem if input item is neither service nor public key. :param item: service or public key to set :return: current DIDDoc """ if isinstance(item, Service): self.service[item.id] = item elif isinstance(item, PublicKey): self.pubkey[item.id] = item else: raise BadDIDDocItem('Cannot add item {} to DIDDoc on DID {}'.format(item, self.did))
[ "def", "set", "(", "self", ",", "item", ":", "Union", "[", "Service", ",", "PublicKey", "]", ")", "->", "'DIDDoc'", ":", "if", "isinstance", "(", "item", ",", "Service", ")", ":", "self", ".", "service", "[", "item", ".", "id", "]", "=", "item", ...
Add or replace service or public key; return current DIDDoc. Raise BadDIDDocItem if input item is neither service nor public key. :param item: service or public key to set :return: current DIDDoc
[ "Add", "or", "replace", "service", "or", "public", "key", ";", "return", "current", "DIDDoc", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/diddoc.py#L96-L111
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/diddoc.py
DIDDoc.serialize
def serialize(self) -> str: """ Dump current object to a JSON-compatible dictionary. :return: dict representation of current DIDDoc """ return { '@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did), 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()], 'authentication': [{ 'type': pubkey.type.authn_type, 'publicKey': canon_ref(self.did, pubkey.id) } for pubkey in self.pubkey.values() if pubkey.authn], 'service': [service.to_dict() for service in self.service.values()] }
python
def serialize(self) -> str: """ Dump current object to a JSON-compatible dictionary. :return: dict representation of current DIDDoc """ return { '@context': DIDDoc.CONTEXT, 'id': canon_ref(self.did, self.did), 'publicKey': [pubkey.to_dict() for pubkey in self.pubkey.values()], 'authentication': [{ 'type': pubkey.type.authn_type, 'publicKey': canon_ref(self.did, pubkey.id) } for pubkey in self.pubkey.values() if pubkey.authn], 'service': [service.to_dict() for service in self.service.values()] }
[ "def", "serialize", "(", "self", ")", "->", "str", ":", "return", "{", "'@context'", ":", "DIDDoc", ".", "CONTEXT", ",", "'id'", ":", "canon_ref", "(", "self", ".", "did", ",", "self", ".", "did", ")", ",", "'publicKey'", ":", "[", "pubkey", ".", "...
Dump current object to a JSON-compatible dictionary. :return: dict representation of current DIDDoc
[ "Dump", "current", "object", "to", "a", "JSON", "-", "compatible", "dictionary", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/diddoc.py#L113-L129
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/diddoc.py
DIDDoc.add_service_pubkeys
def add_service_pubkeys(self, service: dict, tags: Union[Sequence[str], str]) -> List[PublicKey]: """ Add public keys specified in service. Return public keys so discovered. Raise AbsentDIDDocItem for public key reference not present in DID document. :param service: service from DID document :param tags: potential tags marking public keys of type of interest - the standard is still coalescing :return: list of public keys that service specification in DID document identifies. """ rv = [] for tag in [tags] if isinstance(tags, str) else list(tags): for svc_key in service.get(tag, {}): canon_key = canon_ref(self.did, svc_key) pubkey = None if '#' in svc_key: if canon_key in self.pubkey: pubkey = self.pubkey[canon_key] else: # service key refers to another DID doc LOGGER.debug( 'DIDDoc.add_service_pubkeys <!< DID document %s has no public key %s', self.did, svc_key) raise AbsentDIDDocItem('DID document {} has no public key {}'.format(self.did, svc_key)) else: for existing_pubkey in self.pubkey.values(): if existing_pubkey.value == svc_key: pubkey = existing_pubkey break else: pubkey = PublicKey( self.did, ident=svc_key[-9:-1], # industrial-grade uniqueness value=svc_key) self._pubkey[pubkey.id] = pubkey if pubkey and pubkey not in rv: # perverse case: could specify same key multiple ways; append once rv.append(pubkey) return rv
python
def add_service_pubkeys(self, service: dict, tags: Union[Sequence[str], str]) -> List[PublicKey]: """ Add public keys specified in service. Return public keys so discovered. Raise AbsentDIDDocItem for public key reference not present in DID document. :param service: service from DID document :param tags: potential tags marking public keys of type of interest - the standard is still coalescing :return: list of public keys that service specification in DID document identifies. """ rv = [] for tag in [tags] if isinstance(tags, str) else list(tags): for svc_key in service.get(tag, {}): canon_key = canon_ref(self.did, svc_key) pubkey = None if '#' in svc_key: if canon_key in self.pubkey: pubkey = self.pubkey[canon_key] else: # service key refers to another DID doc LOGGER.debug( 'DIDDoc.add_service_pubkeys <!< DID document %s has no public key %s', self.did, svc_key) raise AbsentDIDDocItem('DID document {} has no public key {}'.format(self.did, svc_key)) else: for existing_pubkey in self.pubkey.values(): if existing_pubkey.value == svc_key: pubkey = existing_pubkey break else: pubkey = PublicKey( self.did, ident=svc_key[-9:-1], # industrial-grade uniqueness value=svc_key) self._pubkey[pubkey.id] = pubkey if pubkey and pubkey not in rv: # perverse case: could specify same key multiple ways; append once rv.append(pubkey) return rv
[ "def", "add_service_pubkeys", "(", "self", ",", "service", ":", "dict", ",", "tags", ":", "Union", "[", "Sequence", "[", "str", "]", ",", "str", "]", ")", "->", "List", "[", "PublicKey", "]", ":", "rv", "=", "[", "]", "for", "tag", "in", "[", "ta...
Add public keys specified in service. Return public keys so discovered. Raise AbsentDIDDocItem for public key reference not present in DID document. :param service: service from DID document :param tags: potential tags marking public keys of type of interest - the standard is still coalescing :return: list of public keys that service specification in DID document identifies.
[ "Add", "public", "keys", "specified", "in", "service", ".", "Return", "public", "keys", "so", "discovered", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/diddoc.py#L140-L182
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/diddoc.py
DIDDoc.deserialize
def deserialize(cls, did_doc: dict) -> 'DIDDoc': """ Construct DIDDoc object from dict representation. Raise BadIdentifier for bad DID. :param did_doc: DIDDoc dict reprentation. :return: DIDDoc from input json. """ rv = None if 'id' in did_doc: rv = DIDDoc(did_doc['id']) else: # get DID to serve as DID document identifier from first public key if 'publicKey' not in did_doc: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document') raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: pubkey_did = canon_did(resource(pubkey['id'])) if ok_did(pubkey_did): rv = DIDDoc(pubkey_did) break else: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document') raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: # include public keys and authentication keys by reference pubkey_type = PublicKeyType.get(pubkey['type']) authn = any( canon_ref(rv.did, ak.get('publicKey', '')) == canon_ref(rv.did, pubkey['id']) for ak in did_doc.get('authentication', {}) if isinstance(ak.get('publicKey', None), str)) key = PublicKey( # initialization canonicalizes id rv.did, pubkey['id'], pubkey[pubkey_type.specifier], pubkey_type, canon_did(pubkey['controller']), authn) rv.pubkey[key.id] = key for akey in did_doc.get('authentication', {}): # include embedded authentication keys pk_ref = akey.get('publicKey', None) if pk_ref: pass # got it already with public keys else: pubkey_type = PublicKeyType.get(akey['type']) key = PublicKey( # initialization canonicalized id rv.did, akey['id'], akey[pubkey_type.specifier], pubkey_type, canon_did(akey['controller']), True) rv.pubkey[key.id] = key for service in did_doc.get('service', {}): endpoint = service['serviceEndpoint'] svc = Service( # initialization canonicalizes id rv.did, service.get('id', canon_ref(rv.did, 'assigned-service-{}'.format(len(rv.service)), ';')), service['type'], rv.add_service_pubkeys(service, 'recipientKeys'), rv.add_service_pubkeys(service, ['mediatorKeys', 'routingKeys']), canon_ref(rv.did, endpoint, ';') if ';' in endpoint else endpoint, service.get('priority', None)) rv.service[svc.id] = svc return rv
python
def deserialize(cls, did_doc: dict) -> 'DIDDoc': """ Construct DIDDoc object from dict representation. Raise BadIdentifier for bad DID. :param did_doc: DIDDoc dict reprentation. :return: DIDDoc from input json. """ rv = None if 'id' in did_doc: rv = DIDDoc(did_doc['id']) else: # get DID to serve as DID document identifier from first public key if 'publicKey' not in did_doc: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document') raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: pubkey_did = canon_did(resource(pubkey['id'])) if ok_did(pubkey_did): rv = DIDDoc(pubkey_did) break else: LOGGER.debug('DIDDoc.deserialize <!< no identifier in DID document') raise AbsentDIDDocItem('No identifier in DID document') for pubkey in did_doc['publicKey']: # include public keys and authentication keys by reference pubkey_type = PublicKeyType.get(pubkey['type']) authn = any( canon_ref(rv.did, ak.get('publicKey', '')) == canon_ref(rv.did, pubkey['id']) for ak in did_doc.get('authentication', {}) if isinstance(ak.get('publicKey', None), str)) key = PublicKey( # initialization canonicalizes id rv.did, pubkey['id'], pubkey[pubkey_type.specifier], pubkey_type, canon_did(pubkey['controller']), authn) rv.pubkey[key.id] = key for akey in did_doc.get('authentication', {}): # include embedded authentication keys pk_ref = akey.get('publicKey', None) if pk_ref: pass # got it already with public keys else: pubkey_type = PublicKeyType.get(akey['type']) key = PublicKey( # initialization canonicalized id rv.did, akey['id'], akey[pubkey_type.specifier], pubkey_type, canon_did(akey['controller']), True) rv.pubkey[key.id] = key for service in did_doc.get('service', {}): endpoint = service['serviceEndpoint'] svc = Service( # initialization canonicalizes id rv.did, service.get('id', canon_ref(rv.did, 'assigned-service-{}'.format(len(rv.service)), ';')), service['type'], rv.add_service_pubkeys(service, 'recipientKeys'), rv.add_service_pubkeys(service, ['mediatorKeys', 'routingKeys']), canon_ref(rv.did, endpoint, ';') if ';' in endpoint else endpoint, service.get('priority', None)) rv.service[svc.id] = svc return rv
[ "def", "deserialize", "(", "cls", ",", "did_doc", ":", "dict", ")", "->", "'DIDDoc'", ":", "rv", "=", "None", "if", "'id'", "in", "did_doc", ":", "rv", "=", "DIDDoc", "(", "did_doc", "[", "'id'", "]", ")", "else", ":", "# get DID to serve as DID document...
Construct DIDDoc object from dict representation. Raise BadIdentifier for bad DID. :param did_doc: DIDDoc dict reprentation. :return: DIDDoc from input json.
[ "Construct", "DIDDoc", "object", "from", "dict", "representation", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/diddoc.py#L185-L252
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.get
def get(version: str) -> 'Protocol': """ Return enum instance corresponding to input version value ('1.6' etc.) """ return Protocol.V_13 if version == Protocol.V_13.value.name else Protocol.DEFAULT
python
def get(version: str) -> 'Protocol': """ Return enum instance corresponding to input version value ('1.6' etc.) """ return Protocol.V_13 if version == Protocol.V_13.value.name else Protocol.DEFAULT
[ "def", "get", "(", "version", ":", "str", ")", "->", "'Protocol'", ":", "return", "Protocol", ".", "V_13", "if", "version", "==", "Protocol", ".", "V_13", ".", "value", ".", "name", "else", "Protocol", ".", "DEFAULT" ]
Return enum instance corresponding to input version value ('1.6' etc.)
[ "Return", "enum", "instance", "corresponding", "to", "input", "version", "value", "(", "1", ".", "6", "etc", ".", ")" ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L43-L48
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.cd_id_tag
def cd_id_tag(self, for_box_id: bool = False) -> str: """ Return (place-holder) credential definition identifier tag for current version of node protocol. At present, von_anchor always uses the tag of 'tag' if the protocol calls for one. :param for_box_id: whether to prefix a colon, if current protocol uses one, in constructing a cred def id or rev reg id. :return: cred def id tag """ if for_box_id: return '' if self == Protocol.V_13 else ':tag' return 'tag'
python
def cd_id_tag(self, for_box_id: bool = False) -> str: """ Return (place-holder) credential definition identifier tag for current version of node protocol. At present, von_anchor always uses the tag of 'tag' if the protocol calls for one. :param for_box_id: whether to prefix a colon, if current protocol uses one, in constructing a cred def id or rev reg id. :return: cred def id tag """ if for_box_id: return '' if self == Protocol.V_13 else ':tag' return 'tag'
[ "def", "cd_id_tag", "(", "self", ",", "for_box_id", ":", "bool", "=", "False", ")", "->", "str", ":", "if", "for_box_id", ":", "return", "''", "if", "self", "==", "Protocol", ".", "V_13", "else", "':tag'", "return", "'tag'" ]
Return (place-holder) credential definition identifier tag for current version of node protocol. At present, von_anchor always uses the tag of 'tag' if the protocol calls for one. :param for_box_id: whether to prefix a colon, if current protocol uses one, in constructing a cred def id or rev reg id. :return: cred def id tag
[ "Return", "(", "place", "-", "holder", ")", "credential", "definition", "identifier", "tag", "for", "current", "version", "of", "node", "protocol", ".", "At", "present", "von_anchor", "always", "uses", "the", "tag", "of", "tag", "if", "the", "protocol", "cal...
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L62-L74
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.cred_def_id
def cred_def_id(self, issuer_did: str, schema_seq_no: int) -> str: """ Return credential definition identifier for input issuer DID and schema sequence number. :param issuer_did: DID of credential definition issuer :param schema_seq_no: schema sequence number :return: credential definition identifier """ return '{}:3:CL:{}{}'.format( # 3 marks indy cred def id, CL is sig type issuer_did, schema_seq_no, self.cd_id_tag(True))
python
def cred_def_id(self, issuer_did: str, schema_seq_no: int) -> str: """ Return credential definition identifier for input issuer DID and schema sequence number. :param issuer_did: DID of credential definition issuer :param schema_seq_no: schema sequence number :return: credential definition identifier """ return '{}:3:CL:{}{}'.format( # 3 marks indy cred def id, CL is sig type issuer_did, schema_seq_no, self.cd_id_tag(True))
[ "def", "cred_def_id", "(", "self", ",", "issuer_did", ":", "str", ",", "schema_seq_no", ":", "int", ")", "->", "str", ":", "return", "'{}:3:CL:{}{}'", ".", "format", "(", "# 3 marks indy cred def id, CL is sig type", "issuer_did", ",", "schema_seq_no", ",", "self"...
Return credential definition identifier for input issuer DID and schema sequence number. :param issuer_did: DID of credential definition issuer :param schema_seq_no: schema sequence number :return: credential definition identifier
[ "Return", "credential", "definition", "identifier", "for", "input", "issuer", "DID", "and", "schema", "sequence", "number", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L76-L88
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.txn_data2schema_key
def txn_data2schema_key(self, txn: dict) -> SchemaKey: """ Return schema key from ledger transaction data. :param txn: get-schema transaction (by sequence number) :return: schema key identified """ rv = None if self == Protocol.V_13: rv = SchemaKey(txn['identifier'], txn['data']['name'], txn['data']['version']) else: txn_txn = txn.get('txn', None) or txn # may have already run this txn through txn2data() below rv = SchemaKey( txn_txn['metadata']['from'], txn_txn['data']['data']['name'], txn_txn['data']['data']['version']) return rv
python
def txn_data2schema_key(self, txn: dict) -> SchemaKey: """ Return schema key from ledger transaction data. :param txn: get-schema transaction (by sequence number) :return: schema key identified """ rv = None if self == Protocol.V_13: rv = SchemaKey(txn['identifier'], txn['data']['name'], txn['data']['version']) else: txn_txn = txn.get('txn', None) or txn # may have already run this txn through txn2data() below rv = SchemaKey( txn_txn['metadata']['from'], txn_txn['data']['data']['name'], txn_txn['data']['data']['version']) return rv
[ "def", "txn_data2schema_key", "(", "self", ",", "txn", ":", "dict", ")", "->", "SchemaKey", ":", "rv", "=", "None", "if", "self", "==", "Protocol", ".", "V_13", ":", "rv", "=", "SchemaKey", "(", "txn", "[", "'identifier'", "]", ",", "txn", "[", "'dat...
Return schema key from ledger transaction data. :param txn: get-schema transaction (by sequence number) :return: schema key identified
[ "Return", "schema", "key", "from", "ledger", "transaction", "data", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L90-L108
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.txn2data
def txn2data(self, txn: dict) -> str: """ Given ledger transaction, return its data json. :param txn: transaction as dict :return: transaction data json """ rv_json = json.dumps({}) if self == Protocol.V_13: rv_json = json.dumps(txn['result'].get('data', {})) else: rv_json = json.dumps((txn['result'].get('data', {}) or {}).get('txn', {})) # "data": null for no such txn return rv_json
python
def txn2data(self, txn: dict) -> str: """ Given ledger transaction, return its data json. :param txn: transaction as dict :return: transaction data json """ rv_json = json.dumps({}) if self == Protocol.V_13: rv_json = json.dumps(txn['result'].get('data', {})) else: rv_json = json.dumps((txn['result'].get('data', {}) or {}).get('txn', {})) # "data": null for no such txn return rv_json
[ "def", "txn2data", "(", "self", ",", "txn", ":", "dict", ")", "->", "str", ":", "rv_json", "=", "json", ".", "dumps", "(", "{", "}", ")", "if", "self", "==", "Protocol", ".", "V_13", ":", "rv_json", "=", "json", ".", "dumps", "(", "txn", "[", "...
Given ledger transaction, return its data json. :param txn: transaction as dict :return: transaction data json
[ "Given", "ledger", "transaction", "return", "its", "data", "json", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L110-L124
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.txn2epoch
def txn2epoch(self, txn: dict) -> int: """ Given ledger transaction, return its epoch time. :param txn: transaction as dict :return: transaction time """ rv = None if self == Protocol.V_13: rv = txn['result']['txnTime'] else: rv = txn['result']['txnMetadata']['txnTime'] return rv
python
def txn2epoch(self, txn: dict) -> int: """ Given ledger transaction, return its epoch time. :param txn: transaction as dict :return: transaction time """ rv = None if self == Protocol.V_13: rv = txn['result']['txnTime'] else: rv = txn['result']['txnMetadata']['txnTime'] return rv
[ "def", "txn2epoch", "(", "self", ",", "txn", ":", "dict", ")", "->", "int", ":", "rv", "=", "None", "if", "self", "==", "Protocol", ".", "V_13", ":", "rv", "=", "txn", "[", "'result'", "]", "[", "'txnTime'", "]", "else", ":", "rv", "=", "txn", ...
Given ledger transaction, return its epoch time. :param txn: transaction as dict :return: transaction time
[ "Given", "ledger", "transaction", "return", "its", "epoch", "time", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L126-L140
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/protocol.py
Protocol.genesis_host_port
def genesis_host_port(self, genesis_txn: dict) -> tuple: """ Given a genesis transaction, return its node host and port. :param genesis_txn: genesis transaction as dict :return: node host and port """ txn_data = genesis_txn['data'] if self == Protocol.V_13 else genesis_txn['txn']['data']['data'] return (txn_data['node_ip'], txn_data['node_port'])
python
def genesis_host_port(self, genesis_txn: dict) -> tuple: """ Given a genesis transaction, return its node host and port. :param genesis_txn: genesis transaction as dict :return: node host and port """ txn_data = genesis_txn['data'] if self == Protocol.V_13 else genesis_txn['txn']['data']['data'] return (txn_data['node_ip'], txn_data['node_port'])
[ "def", "genesis_host_port", "(", "self", ",", "genesis_txn", ":", "dict", ")", "->", "tuple", ":", "txn_data", "=", "genesis_txn", "[", "'data'", "]", "if", "self", "==", "Protocol", ".", "V_13", "else", "genesis_txn", "[", "'txn'", "]", "[", "'data'", "...
Given a genesis transaction, return its node host and port. :param genesis_txn: genesis transaction as dict :return: node host and port
[ "Given", "a", "genesis", "transaction", "return", "its", "node", "host", "and", "port", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/protocol.py#L142-L151
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.open
async def open(self) -> 'Tails': """ Open reader handle and return current object. :return: current object """ LOGGER.debug('Tails.open >>>') self._reader_handle = await blob_storage.open_reader('default', self._tails_config_json) LOGGER.debug('Tails.open <<<') return self
python
async def open(self) -> 'Tails': """ Open reader handle and return current object. :return: current object """ LOGGER.debug('Tails.open >>>') self._reader_handle = await blob_storage.open_reader('default', self._tails_config_json) LOGGER.debug('Tails.open <<<') return self
[ "async", "def", "open", "(", "self", ")", "->", "'Tails'", ":", "LOGGER", ".", "debug", "(", "'Tails.open >>>'", ")", "self", ".", "_reader_handle", "=", "await", "blob_storage", ".", "open_reader", "(", "'default'", ",", "self", ".", "_tails_config_json", "...
Open reader handle and return current object. :return: current object
[ "Open", "reader", "handle", "and", "return", "current", "object", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L97-L109
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.ok_hash
def ok_hash(token: str) -> bool: """ Whether input token looks like a valid tails hash. :param token: candidate string :return: whether input token looks like a valid tails hash """ LOGGER.debug('Tails.ok_hash >>> token: %s', token) rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None LOGGER.debug('Tails.ok_hash <<< %s', rv) return rv
python
def ok_hash(token: str) -> bool: """ Whether input token looks like a valid tails hash. :param token: candidate string :return: whether input token looks like a valid tails hash """ LOGGER.debug('Tails.ok_hash >>> token: %s', token) rv = re.match('[{}]{{42,44}}$'.format(B58), token) is not None LOGGER.debug('Tails.ok_hash <<< %s', rv) return rv
[ "def", "ok_hash", "(", "token", ":", "str", ")", "->", "bool", ":", "LOGGER", ".", "debug", "(", "'Tails.ok_hash >>> token: %s'", ",", "token", ")", "rv", "=", "re", ".", "match", "(", "'[{}]{{42,44}}$'", ".", "format", "(", "B58", ")", ",", "token", "...
Whether input token looks like a valid tails hash. :param token: candidate string :return: whether input token looks like a valid tails hash
[ "Whether", "input", "token", "looks", "like", "a", "valid", "tails", "hash", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L112-L124
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.associate
def associate(base_dir: str, rr_id: str, tails_hash: str) -> None: """ Create symbolic link to tails file named tails_hash for rev reg id rr_id. :param rr_id: rev reg id :param tails_hash: hash of tails file, serving as file name """ LOGGER.debug('Tails.associate >>> base_dir: %s, rr_id: %s, tails_hash: %s', base_dir, rr_id, tails_hash) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.associate <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) if not Tails.ok_hash(tails_hash): LOGGER.debug('Tails.associate <!< Bad tails hash %s', tails_hash) raise BadIdentifier('Bad tails hash {}'.format(tails_hash)) cd_id = rev_reg_id2cred_def_id(rr_id) directory = join(base_dir, cd_id) cwd = getcwd() makedirs(directory, exist_ok=True) chdir(directory) symlink(tails_hash, rr_id) chdir(cwd) LOGGER.debug('Tails.associate <<<')
python
def associate(base_dir: str, rr_id: str, tails_hash: str) -> None: """ Create symbolic link to tails file named tails_hash for rev reg id rr_id. :param rr_id: rev reg id :param tails_hash: hash of tails file, serving as file name """ LOGGER.debug('Tails.associate >>> base_dir: %s, rr_id: %s, tails_hash: %s', base_dir, rr_id, tails_hash) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.associate <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) if not Tails.ok_hash(tails_hash): LOGGER.debug('Tails.associate <!< Bad tails hash %s', tails_hash) raise BadIdentifier('Bad tails hash {}'.format(tails_hash)) cd_id = rev_reg_id2cred_def_id(rr_id) directory = join(base_dir, cd_id) cwd = getcwd() makedirs(directory, exist_ok=True) chdir(directory) symlink(tails_hash, rr_id) chdir(cwd) LOGGER.debug('Tails.associate <<<')
[ "def", "associate", "(", "base_dir", ":", "str", ",", "rr_id", ":", "str", ",", "tails_hash", ":", "str", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'Tails.associate >>> base_dir: %s, rr_id: %s, tails_hash: %s'", ",", "base_dir", ",", "rr_id", ",", ...
Create symbolic link to tails file named tails_hash for rev reg id rr_id. :param rr_id: rev reg id :param tails_hash: hash of tails file, serving as file name
[ "Create", "symbolic", "link", "to", "tails", "file", "named", "tails_hash", "for", "rev", "reg", "id", "rr_id", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L127-L153
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.dir
def dir(base_dir: str, rr_id: str) -> str: """ Return correct subdirectory of input base dir for artifacts corresponding to input rev reg id. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id """ LOGGER.debug('Tails.dir >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.dir <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = join(base_dir, rev_reg_id2cred_def_id(rr_id)) LOGGER.debug('Tails.dir <<< %s', rv) return rv
python
def dir(base_dir: str, rr_id: str) -> str: """ Return correct subdirectory of input base dir for artifacts corresponding to input rev reg id. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id """ LOGGER.debug('Tails.dir >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.dir <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = join(base_dir, rev_reg_id2cred_def_id(rr_id)) LOGGER.debug('Tails.dir <<< %s', rv) return rv
[ "def", "dir", "(", "base_dir", ":", "str", ",", "rr_id", ":", "str", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Tails.dir >>> base_dir: %s, rr_id: %s'", ",", "base_dir", ",", "rr_id", ")", "if", "not", "ok_rev_reg_id", "(", "rr_id", ")", ":", ...
Return correct subdirectory of input base dir for artifacts corresponding to input rev reg id. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id
[ "Return", "correct", "subdirectory", "of", "input", "base", "dir", "for", "artifacts", "corresponding", "to", "input", "rev", "reg", "id", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L156-L172
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.linked
def linked(base_dir: str, rr_id: str) -> str: """ Get, from the specified directory, the path to the tails file associated with the input revocation registry identifier, or None for no such file. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id :return: (stringified) path to tails file of interest, or None for no such file. """ LOGGER.debug('Tails.linked >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.linked <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) cd_id = rev_reg_id2cred_def_id(rr_id) link = join(base_dir, cd_id, rr_id) rv = join(base_dir, cd_id, readlink(link)) if islink(link) else None LOGGER.debug('Tails.linked <<< %s', rv) return rv
python
def linked(base_dir: str, rr_id: str) -> str: """ Get, from the specified directory, the path to the tails file associated with the input revocation registry identifier, or None for no such file. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id :return: (stringified) path to tails file of interest, or None for no such file. """ LOGGER.debug('Tails.linked >>> base_dir: %s, rr_id: %s', base_dir, rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('Tails.linked <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) cd_id = rev_reg_id2cred_def_id(rr_id) link = join(base_dir, cd_id, rr_id) rv = join(base_dir, cd_id, readlink(link)) if islink(link) else None LOGGER.debug('Tails.linked <<< %s', rv) return rv
[ "def", "linked", "(", "base_dir", ":", "str", ",", "rr_id", ":", "str", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Tails.linked >>> base_dir: %s, rr_id: %s'", ",", "base_dir", ",", "rr_id", ")", "if", "not", "ok_rev_reg_id", "(", "rr_id", ")", "...
Get, from the specified directory, the path to the tails file associated with the input revocation registry identifier, or None for no such file. :param base_dir: base directory for tails files, thereafter split by cred def id :param rr_id: rev reg id :return: (stringified) path to tails file of interest, or None for no such file.
[ "Get", "from", "the", "specified", "directory", "the", "path", "to", "the", "tails", "file", "associated", "with", "the", "input", "revocation", "registry", "identifier", "or", "None", "for", "no", "such", "file", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L175-L196
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.links
def links(base_dir: str, issuer_did: str = None) -> set: """ Return set of all paths to symbolic links (rev reg ids) associating their respective tails files, in specified base tails directory recursively (omitting the .hopper subdirectory), on input issuer DID if specified. :param base_dir: base directory for tails files, thereafter split by cred def id :param issuer_did: issuer DID of interest :return: set of paths to symbolic links associating tails files """ LOGGER.debug('Tails.links >>> base_dir: %s, issuer_did: %s', base_dir, issuer_did) if issuer_did and not ok_did(issuer_did): LOGGER.debug('Tails.links <!< Bad DID %s', issuer_did) raise BadIdentifier('Bad DID {}'.format(issuer_did)) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if islink(join(dir_path, file_name)) and (not issuer_did or ok_rev_reg_id(file_name, issuer_did)): rv.add(join(dir_path, file_name)) LOGGER.debug('Tails.links <<< %s', rv) return rv
python
def links(base_dir: str, issuer_did: str = None) -> set: """ Return set of all paths to symbolic links (rev reg ids) associating their respective tails files, in specified base tails directory recursively (omitting the .hopper subdirectory), on input issuer DID if specified. :param base_dir: base directory for tails files, thereafter split by cred def id :param issuer_did: issuer DID of interest :return: set of paths to symbolic links associating tails files """ LOGGER.debug('Tails.links >>> base_dir: %s, issuer_did: %s', base_dir, issuer_did) if issuer_did and not ok_did(issuer_did): LOGGER.debug('Tails.links <!< Bad DID %s', issuer_did) raise BadIdentifier('Bad DID {}'.format(issuer_did)) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if islink(join(dir_path, file_name)) and (not issuer_did or ok_rev_reg_id(file_name, issuer_did)): rv.add(join(dir_path, file_name)) LOGGER.debug('Tails.links <<< %s', rv) return rv
[ "def", "links", "(", "base_dir", ":", "str", ",", "issuer_did", ":", "str", "=", "None", ")", "->", "set", ":", "LOGGER", ".", "debug", "(", "'Tails.links >>> base_dir: %s, issuer_did: %s'", ",", "base_dir", ",", "issuer_did", ")", "if", "issuer_did", "and", ...
Return set of all paths to symbolic links (rev reg ids) associating their respective tails files, in specified base tails directory recursively (omitting the .hopper subdirectory), on input issuer DID if specified. :param base_dir: base directory for tails files, thereafter split by cred def id :param issuer_did: issuer DID of interest :return: set of paths to symbolic links associating tails files
[ "Return", "set", "of", "all", "paths", "to", "symbolic", "links", "(", "rev", "reg", "ids", ")", "associating", "their", "respective", "tails", "files", "in", "specified", "base", "tails", "directory", "recursively", "(", "omitting", "the", ".", "hopper", "s...
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L199-L224
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.unlinked
def unlinked(base_dir: str) -> set: """ Return all paths to tails files, in specified tails base directory recursively (omitting the .hopper subdirectory), without symbolic links associating revocation registry identifiers. At an Issuer, tails files should not persist long without revocation registry identifier association via symbolic link. At a HolderProver, a newly downloaded tails file stays unlinked until the anchor stores a credential or creates a proof needing it, or else the anchor restarts. :param base_dir: base directory for tails files, thereafter split by cred def id :return: set of paths to tails files with no local symbolic links to them """ LOGGER.debug('Tails.unlinked >>> base_dir: %s', base_dir) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if isfile(join(dir_path, file_name)) and Tails.ok_hash(file_name): rv.add(join(dir_path, file_name)) rv -= {join(dirname(path_link), readlink(path_link)) for path_link in Tails.links(base_dir)} LOGGER.debug('Tails.unlinked <<< %s', rv) return rv
python
def unlinked(base_dir: str) -> set: """ Return all paths to tails files, in specified tails base directory recursively (omitting the .hopper subdirectory), without symbolic links associating revocation registry identifiers. At an Issuer, tails files should not persist long without revocation registry identifier association via symbolic link. At a HolderProver, a newly downloaded tails file stays unlinked until the anchor stores a credential or creates a proof needing it, or else the anchor restarts. :param base_dir: base directory for tails files, thereafter split by cred def id :return: set of paths to tails files with no local symbolic links to them """ LOGGER.debug('Tails.unlinked >>> base_dir: %s', base_dir) rv = set() for dir_path, dir_names, file_names in walk(base_dir, topdown=True): dir_names[:] = [d for d in dir_names if not d.startswith('.')] for file_name in file_names: if isfile(join(dir_path, file_name)) and Tails.ok_hash(file_name): rv.add(join(dir_path, file_name)) rv -= {join(dirname(path_link), readlink(path_link)) for path_link in Tails.links(base_dir)} LOGGER.debug('Tails.unlinked <<< %s', rv) return rv
[ "def", "unlinked", "(", "base_dir", ":", "str", ")", "->", "set", ":", "LOGGER", ".", "debug", "(", "'Tails.unlinked >>> base_dir: %s'", ",", "base_dir", ")", "rv", "=", "set", "(", ")", "for", "dir_path", ",", "dir_names", ",", "file_names", "in", "walk",...
Return all paths to tails files, in specified tails base directory recursively (omitting the .hopper subdirectory), without symbolic links associating revocation registry identifiers. At an Issuer, tails files should not persist long without revocation registry identifier association via symbolic link. At a HolderProver, a newly downloaded tails file stays unlinked until the anchor stores a credential or creates a proof needing it, or else the anchor restarts. :param base_dir: base directory for tails files, thereafter split by cred def id :return: set of paths to tails files with no local symbolic links to them
[ "Return", "all", "paths", "to", "tails", "files", "in", "specified", "tails", "base", "directory", "recursively", "(", "omitting", "the", ".", "hopper", "subdirectory", ")", "without", "symbolic", "links", "associating", "revocation", "registry", "identifiers", "....
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L227-L253
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.next_tag
def next_tag(base_dir: str, cd_id: str) -> (str, int): """ Return the next tag name available for a new rev reg id on input cred def id in base directory, and suggested size of associated rev reg. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: stringified least non-negative integer not yet used in a rev reg id associated with a tails file in base directory, and recommendation for next size to use """ LOGGER.debug('Tails.next_tag >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.next_tag <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tag = 1 + max([int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] + [-1]) # -1: next tag is '0' if no tags so far size = min(2**(tag + 6), Tails.MAX_SIZE) rv = (tag, size) LOGGER.debug('Tails.next_tag <<< %s', rv) return rv
python
def next_tag(base_dir: str, cd_id: str) -> (str, int): """ Return the next tag name available for a new rev reg id on input cred def id in base directory, and suggested size of associated rev reg. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: stringified least non-negative integer not yet used in a rev reg id associated with a tails file in base directory, and recommendation for next size to use """ LOGGER.debug('Tails.next_tag >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.next_tag <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tag = 1 + max([int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] + [-1]) # -1: next tag is '0' if no tags so far size = min(2**(tag + 6), Tails.MAX_SIZE) rv = (tag, size) LOGGER.debug('Tails.next_tag <<< %s', rv) return rv
[ "def", "next_tag", "(", "base_dir", ":", "str", ",", "cd_id", ":", "str", ")", "->", "(", "str", ",", "int", ")", ":", "LOGGER", ".", "debug", "(", "'Tails.next_tag >>> base_dir: %s, cd_id: %s'", ",", "base_dir", ",", "cd_id", ")", "if", "not", "ok_cred_de...
Return the next tag name available for a new rev reg id on input cred def id in base directory, and suggested size of associated rev reg. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: stringified least non-negative integer not yet used in a rev reg id associated with a tails file in base directory, and recommendation for next size to use
[ "Return", "the", "next", "tag", "name", "available", "for", "a", "new", "rev", "reg", "id", "on", "input", "cred", "def", "id", "in", "base", "directory", "and", "suggested", "size", "of", "associated", "rev", "reg", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L256-L279
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.current_rev_reg_id
def current_rev_reg_id(base_dir: str, cd_id: str) -> str: """ Return the current revocation registry identifier for input credential definition identifier, in input directory. Raise AbsentTails if no corresponding tails file, signifying no such revocation registry defined. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: identifier for current revocation registry on input credential definition identifier """ LOGGER.debug('Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.current_rev_reg_id <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tags = [int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] if not tags: raise AbsentTails('No tails files present for cred def id {}'.format(cd_id)) rv = rev_reg_id(cd_id, str(max(tags))) # ensure 10 > 9, not '9' > '10' LOGGER.debug('Tails.current_rev_reg_id <<< %s', rv) return rv
python
def current_rev_reg_id(base_dir: str, cd_id: str) -> str: """ Return the current revocation registry identifier for input credential definition identifier, in input directory. Raise AbsentTails if no corresponding tails file, signifying no such revocation registry defined. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: identifier for current revocation registry on input credential definition identifier """ LOGGER.debug('Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s', base_dir, cd_id) if not ok_cred_def_id(cd_id): LOGGER.debug('Tails.current_rev_reg_id <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) tags = [int(rev_reg_id2tag(basename(f))) for f in Tails.links(base_dir) if cd_id in basename(f)] if not tags: raise AbsentTails('No tails files present for cred def id {}'.format(cd_id)) rv = rev_reg_id(cd_id, str(max(tags))) # ensure 10 > 9, not '9' > '10' LOGGER.debug('Tails.current_rev_reg_id <<< %s', rv) return rv
[ "def", "current_rev_reg_id", "(", "base_dir", ":", "str", ",", "cd_id", ":", "str", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Tails.current_rev_reg_id >>> base_dir: %s, cd_id: %s'", ",", "base_dir", ",", "cd_id", ")", "if", "not", "ok_cred_def_id", "...
Return the current revocation registry identifier for input credential definition identifier, in input directory. Raise AbsentTails if no corresponding tails file, signifying no such revocation registry defined. :param base_dir: base directory for tails files, thereafter split by cred def id :param cd_id: credential definition identifier of interest :return: identifier for current revocation registry on input credential definition identifier
[ "Return", "the", "current", "revocation", "registry", "identifier", "for", "input", "credential", "definition", "identifier", "in", "input", "directory", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L282-L307
PSPC-SPAC-buyandsell/von_anchor
von_anchor/tails.py
Tails.path
def path(self) -> str: """ Accessor for (stringified) path to current tails file. :return: (stringified) path to current tails file. """ config = json.loads(self._tails_config_json) return join(config['base_dir'], config['file'])
python
def path(self) -> str: """ Accessor for (stringified) path to current tails file. :return: (stringified) path to current tails file. """ config = json.loads(self._tails_config_json) return join(config['base_dir'], config['file'])
[ "def", "path", "(", "self", ")", "->", "str", ":", "config", "=", "json", ".", "loads", "(", "self", ".", "_tails_config_json", ")", "return", "join", "(", "config", "[", "'base_dir'", "]", ",", "config", "[", "'file'", "]", ")" ]
Accessor for (stringified) path to current tails file. :return: (stringified) path to current tails file.
[ "Accessor", "for", "(", "stringified", ")", "path", "to", "current", "tails", "file", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/tails.py#L331-L339
PSPC-SPAC-buyandsell/von_anchor
von_anchor/indytween.py
encode
def encode(orig: Any) -> str: """ Encode credential attribute value, purely stringifying any int32 and leaving numeric int32 strings alone, but mapping any other input to a stringified 256-bit (but not 32-bit) integer. Predicates in indy-sdk operate on int32 values properly only when their encoded values match their raw values. :param orig: original value to encode :return: encoded value """ if isinstance(orig, int) and -I32_BOUND <= orig < I32_BOUND: return str(int(orig)) # python bools are ints try: i32orig = int(str(orig)) # don't encode floats as ints if -I32_BOUND <= i32orig < I32_BOUND: return str(i32orig) except (ValueError, TypeError): pass rv = int.from_bytes(sha256(raw(orig).encode()).digest(), 'big') while -I32_BOUND <= rv < I32_BOUND: rv = int.from_bytes(sha256(rv.encode()).digest(), 'big') # sha256 maps no 32-bit int to another: terminates return str(rv)
python
def encode(orig: Any) -> str: """ Encode credential attribute value, purely stringifying any int32 and leaving numeric int32 strings alone, but mapping any other input to a stringified 256-bit (but not 32-bit) integer. Predicates in indy-sdk operate on int32 values properly only when their encoded values match their raw values. :param orig: original value to encode :return: encoded value """ if isinstance(orig, int) and -I32_BOUND <= orig < I32_BOUND: return str(int(orig)) # python bools are ints try: i32orig = int(str(orig)) # don't encode floats as ints if -I32_BOUND <= i32orig < I32_BOUND: return str(i32orig) except (ValueError, TypeError): pass rv = int.from_bytes(sha256(raw(orig).encode()).digest(), 'big') while -I32_BOUND <= rv < I32_BOUND: rv = int.from_bytes(sha256(rv.encode()).digest(), 'big') # sha256 maps no 32-bit int to another: terminates return str(rv)
[ "def", "encode", "(", "orig", ":", "Any", ")", "->", "str", ":", "if", "isinstance", "(", "orig", ",", "int", ")", "and", "-", "I32_BOUND", "<=", "orig", "<", "I32_BOUND", ":", "return", "str", "(", "int", "(", "orig", ")", ")", "# python bools are i...
Encode credential attribute value, purely stringifying any int32 and leaving numeric int32 strings alone, but mapping any other input to a stringified 256-bit (but not 32-bit) integer. Predicates in indy-sdk operate on int32 values properly only when their encoded values match their raw values. :param orig: original value to encode :return: encoded value
[ "Encode", "credential", "attribute", "value", "purely", "stringifying", "any", "int32", "and", "leaving", "numeric", "int32", "strings", "alone", "but", "mapping", "any", "other", "input", "to", "a", "stringified", "256", "-", "bit", "(", "but", "not", "32", ...
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/indytween.py#L32-L56
PSPC-SPAC-buyandsell/von_anchor
von_anchor/indytween.py
Predicate.get
def get(relation: str) -> 'Predicate': """ Return enum instance corresponding to input relation string """ for pred in Predicate: if relation.upper() in (pred.value.fortran, pred.value.wql.upper(), pred.value.math): return pred return None
python
def get(relation: str) -> 'Predicate': """ Return enum instance corresponding to input relation string """ for pred in Predicate: if relation.upper() in (pred.value.fortran, pred.value.wql.upper(), pred.value.math): return pred return None
[ "def", "get", "(", "relation", ":", "str", ")", "->", "'Predicate'", ":", "for", "pred", "in", "Predicate", ":", "if", "relation", ".", "upper", "(", ")", "in", "(", "pred", ".", "value", ".", "fortran", ",", "pred", ".", "value", ".", "wql", ".", ...
Return enum instance corresponding to input relation string
[ "Return", "enum", "instance", "corresponding", "to", "input", "relation", "string" ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/indytween.py#L100-L108
PSPC-SPAC-buyandsell/von_anchor
von_anchor/indytween.py
Predicate.to_int
def to_int(value: Any) -> int: """ Cast a value as its equivalent int for indy predicate argument. Raise ValueError for any input but int, stringified int, or boolean. :param value: value to coerce. """ if isinstance(value, (bool, int)): return int(value) return int(str(value))
python
def to_int(value: Any) -> int: """ Cast a value as its equivalent int for indy predicate argument. Raise ValueError for any input but int, stringified int, or boolean. :param value: value to coerce. """ if isinstance(value, (bool, int)): return int(value) return int(str(value))
[ "def", "to_int", "(", "value", ":", "Any", ")", "->", "int", ":", "if", "isinstance", "(", "value", ",", "(", "bool", ",", "int", ")", ")", ":", "return", "int", "(", "value", ")", "return", "int", "(", "str", "(", "value", ")", ")" ]
Cast a value as its equivalent int for indy predicate argument. Raise ValueError for any input but int, stringified int, or boolean. :param value: value to coerce.
[ "Cast", "a", "value", "as", "its", "equivalent", "int", "for", "indy", "predicate", "argument", ".", "Raise", "ValueError", "for", "any", "input", "but", "int", "stringified", "int", "or", "boolean", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/indytween.py#L111-L121
PSPC-SPAC-buyandsell/von_anchor
von_anchor/indytween.py
Role.get
def get(token: Union[str, int] = None) -> 'Role': """ Return enum instance corresponding to input token. :param token: token identifying role to indy-sdk: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', '' or None :return: enum instance corresponding to input token """ if token is None: return Role.USER for role in Role: if role == Role.ROLE_REMOVE: continue # ROLE_REMOVE is not a sensible role to parse from any configuration if isinstance(token, int) and token in role.value: return role if str(token).upper() == role.name or token in (str(v) for v in role.value): # could be numeric string return role return None
python
def get(token: Union[str, int] = None) -> 'Role': """ Return enum instance corresponding to input token. :param token: token identifying role to indy-sdk: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', '' or None :return: enum instance corresponding to input token """ if token is None: return Role.USER for role in Role: if role == Role.ROLE_REMOVE: continue # ROLE_REMOVE is not a sensible role to parse from any configuration if isinstance(token, int) and token in role.value: return role if str(token).upper() == role.name or token in (str(v) for v in role.value): # could be numeric string return role return None
[ "def", "get", "(", "token", ":", "Union", "[", "str", ",", "int", "]", "=", "None", ")", "->", "'Role'", ":", "if", "token", "is", "None", ":", "return", "Role", ".", "USER", "for", "role", "in", "Role", ":", "if", "role", "==", "Role", ".", "R...
Return enum instance corresponding to input token. :param token: token identifying role to indy-sdk: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', '' or None :return: enum instance corresponding to input token
[ "Return", "enum", "instance", "corresponding", "to", "input", "token", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/indytween.py#L136-L155
PSPC-SPAC-buyandsell/von_anchor
von_anchor/indytween.py
Role.token
def token(self) -> str: """ Return token identifying role to indy-sdk. :return: token: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', or None (for USER) """ return self.value[0] if self in (Role.USER, Role.ROLE_REMOVE) else self.name
python
def token(self) -> str: """ Return token identifying role to indy-sdk. :return: token: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', or None (for USER) """ return self.value[0] if self in (Role.USER, Role.ROLE_REMOVE) else self.name
[ "def", "token", "(", "self", ")", "->", "str", ":", "return", "self", ".", "value", "[", "0", "]", "if", "self", "in", "(", "Role", ".", "USER", ",", "Role", ".", "ROLE_REMOVE", ")", "else", "self", ".", "name" ]
Return token identifying role to indy-sdk. :return: token: 'STEWARD', 'TRUSTEE', 'TRUST_ANCHOR', or None (for USER)
[ "Return", "token", "identifying", "role", "to", "indy", "-", "sdk", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/indytween.py#L167-L174
praekelt/django-ultracache
ultracache/decorators.py
cached_get
def cached_get(timeout, *params): """Decorator applied specifically to a view's get method""" def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(view_or_request, *args, **kwargs): # The type of the request gets muddled when using a function based # decorator. We must use a function based decorator so it can be # used in urls.py. request = getattr(view_or_request, "request", view_or_request) if not hasattr(_thread_locals, "ultracache_request"): setattr(_thread_locals, "ultracache_request", request) # If request not GET or HEAD never cache if request.method.lower() not in ("get", "head"): return view_func(view_or_request, *args, **kwargs) # If request contains messages never cache l = 0 try: l = len(request._messages) except (AttributeError, TypeError): pass if l: return view_func(view_or_request, *args, **kwargs) # Compute a cache key li = [str(view_or_request.__class__), view_func.__name__] # request.get_full_path is implicitly added it no other request # path is provided. get_full_path includes the querystring and is # the more conservative approach but makes it trivially easy for a # request to bust through the cache. if not set(params).intersection(set(( "request.get_full_path()", "request.path", "request.path_info" ))): li.append(request.get_full_path()) if "django.contrib.sites" in settings.INSTALLED_APPS: li.append(get_current_site_pk(request)) # Pre-sort kwargs keys = list(kwargs.keys()) keys.sort() for key in keys: li.append("%s,%s" % (key, kwargs[key])) # Extend cache key with custom variables for param in params: if not isinstance(param, str): param = str(param) li.append(eval(param)) s = ":".join([str(l) for l in li]) hashed = hashlib.md5(s.encode("utf-8")).hexdigest() cache_key = "ucache-get-%s" % hashed cached = cache.get(cache_key, None) if cached is None: # The get view as outermost caller may bluntly set _ultracache request._ultracache = [] response = view_func(view_or_request, *args, **kwargs) content = None if isinstance(response, TemplateResponse): content = response.render().rendered_content elif isinstance(response, HttpResponse): content = response.content if content is not None: headers = getattr(response, "_headers", {}) cache.set( cache_key, {"content": content, "headers": headers}, timeout ) cache_meta(request, cache_key) else: response = HttpResponse(cached["content"]) # Headers has a non-obvious format for k, v in cached["headers"].items(): response[v[0]] = v[1] return response return _wrapped_view return decorator
python
def cached_get(timeout, *params): """Decorator applied specifically to a view's get method""" def decorator(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(view_or_request, *args, **kwargs): # The type of the request gets muddled when using a function based # decorator. We must use a function based decorator so it can be # used in urls.py. request = getattr(view_or_request, "request", view_or_request) if not hasattr(_thread_locals, "ultracache_request"): setattr(_thread_locals, "ultracache_request", request) # If request not GET or HEAD never cache if request.method.lower() not in ("get", "head"): return view_func(view_or_request, *args, **kwargs) # If request contains messages never cache l = 0 try: l = len(request._messages) except (AttributeError, TypeError): pass if l: return view_func(view_or_request, *args, **kwargs) # Compute a cache key li = [str(view_or_request.__class__), view_func.__name__] # request.get_full_path is implicitly added it no other request # path is provided. get_full_path includes the querystring and is # the more conservative approach but makes it trivially easy for a # request to bust through the cache. if not set(params).intersection(set(( "request.get_full_path()", "request.path", "request.path_info" ))): li.append(request.get_full_path()) if "django.contrib.sites" in settings.INSTALLED_APPS: li.append(get_current_site_pk(request)) # Pre-sort kwargs keys = list(kwargs.keys()) keys.sort() for key in keys: li.append("%s,%s" % (key, kwargs[key])) # Extend cache key with custom variables for param in params: if not isinstance(param, str): param = str(param) li.append(eval(param)) s = ":".join([str(l) for l in li]) hashed = hashlib.md5(s.encode("utf-8")).hexdigest() cache_key = "ucache-get-%s" % hashed cached = cache.get(cache_key, None) if cached is None: # The get view as outermost caller may bluntly set _ultracache request._ultracache = [] response = view_func(view_or_request, *args, **kwargs) content = None if isinstance(response, TemplateResponse): content = response.render().rendered_content elif isinstance(response, HttpResponse): content = response.content if content is not None: headers = getattr(response, "_headers", {}) cache.set( cache_key, {"content": content, "headers": headers}, timeout ) cache_meta(request, cache_key) else: response = HttpResponse(cached["content"]) # Headers has a non-obvious format for k, v in cached["headers"].items(): response[v[0]] = v[1] return response return _wrapped_view return decorator
[ "def", "cached_get", "(", "timeout", ",", "*", "params", ")", ":", "def", "decorator", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "view_or_re...
Decorator applied specifically to a view's get method
[ "Decorator", "applied", "specifically", "to", "a", "view", "s", "get", "method" ]
train
https://github.com/praekelt/django-ultracache/blob/8898f10e50fc8f8d0a4cb7d3fe4d945bf257bd9f/ultracache/decorators.py#L16-L101
praekelt/django-ultracache
ultracache/decorators.py
ultracache
def ultracache(timeout, *params): """Decorator applied to a view class. The get method is decorated implicitly.""" def decorator(cls): class WrappedClass(cls): def __init__(self, *args, **kwargs): super(WrappedClass, self).__init__(*args, **kwargs) @cached_get(timeout, *params) def get(self, *args, **kwargs): return super(WrappedClass, self).get(*args, **kwargs) return WrappedClass return decorator
python
def ultracache(timeout, *params): """Decorator applied to a view class. The get method is decorated implicitly.""" def decorator(cls): class WrappedClass(cls): def __init__(self, *args, **kwargs): super(WrappedClass, self).__init__(*args, **kwargs) @cached_get(timeout, *params) def get(self, *args, **kwargs): return super(WrappedClass, self).get(*args, **kwargs) return WrappedClass return decorator
[ "def", "ultracache", "(", "timeout", ",", "*", "params", ")", ":", "def", "decorator", "(", "cls", ")", ":", "class", "WrappedClass", "(", "cls", ")", ":", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "super"...
Decorator applied to a view class. The get method is decorated implicitly.
[ "Decorator", "applied", "to", "a", "view", "class", ".", "The", "get", "method", "is", "decorated", "implicitly", "." ]
train
https://github.com/praekelt/django-ultracache/blob/8898f10e50fc8f8d0a4cb7d3fe4d945bf257bd9f/ultracache/decorators.py#L104-L118
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.open
async def open(self) -> 'Issuer': """ Explicit entry. Perform ancestor opening operations, then synchronize revocation registry to tails tree content. :return: current object """ LOGGER.debug('Issuer.open >>>') await super().open() for path_rr_id in Tails.links(self.dir_tails, self.did): await self._sync_revoc_for_issue(basename(path_rr_id)) LOGGER.debug('Issuer.open <<<') return self
python
async def open(self) -> 'Issuer': """ Explicit entry. Perform ancestor opening operations, then synchronize revocation registry to tails tree content. :return: current object """ LOGGER.debug('Issuer.open >>>') await super().open() for path_rr_id in Tails.links(self.dir_tails, self.did): await self._sync_revoc_for_issue(basename(path_rr_id)) LOGGER.debug('Issuer.open <<<') return self
[ "async", "def", "open", "(", "self", ")", "->", "'Issuer'", ":", "LOGGER", ".", "debug", "(", "'Issuer.open >>>'", ")", "await", "super", "(", ")", ".", "open", "(", ")", "for", "path_rr_id", "in", "Tails", ".", "links", "(", "self", ".", "dir_tails", ...
Explicit entry. Perform ancestor opening operations, then synchronize revocation registry to tails tree content. :return: current object
[ "Explicit", "entry", ".", "Perform", "ancestor", "opening", "operations", "then", "synchronize", "revocation", "registry", "to", "tails", "tree", "content", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L111-L126
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer._send_rev_reg_def
async def _send_rev_reg_def(self, rr_id: str) -> None: """ Move tails file from hopper; deserialize revocation registry definition and initial entry; send to ledger and cache revocation registry definition. Operation serializes to subdirectory within tails hopper directory; symbolic link presence signals completion. Raise AbsentRevReg if revocation registry is not ready in hopper, or AbsentTails if tails file is not yet linked by its revocation registry identifier. :param rr_id: revocation registry identifier """ LOGGER.debug('Issuer._send_rev_reg_def >>> rr_id: %s', rr_id) dir_tails_rr_id = self.rrb.dir_tails_top(rr_id) dir_target = self.rrb.dir_tails_target(rr_id) if not Tails.linked(dir_tails_rr_id, rr_id): LOGGER.debug( 'Issuer._send_rev_reg_def <!< Tails file for rev reg %s not ready in dir %s', rr_id, dir_target) raise AbsentRevReg('Tails file for rev reg {} not ready in dir {}'.format(rr_id, dir_target)) file_rr_def = join(dir_target, 'rr_def.json') if not isfile(file_rr_def): LOGGER.debug('Issuer._send_rev_reg_def <!< Rev reg def file %s not present', file_rr_def) raise AbsentRevReg('Rev reg def file {} not present'.format(file_rr_def)) with open(file_rr_def, 'r') as fh_rr_def: rr_def_json = fh_rr_def.read() file_rr_ent = join(dir_target, 'rr_ent.json') if not isfile(file_rr_ent): LOGGER.debug('Issuer._send_rev_reg_def <!< Rev reg entry file %s not present', file_rr_ent) raise AbsentRevReg('Rev reg entry file {} not present'.format(file_rr_ent)) with open(file_rr_ent, 'r') as fh_rr_ent: rr_ent_json = fh_rr_ent.read() file_tails = Tails.linked(dir_tails_rr_id, rr_id) if not file_tails: LOGGER.debug('Issuer._send_rev_reg_def <!< Tails link %s not present in dir %s', rr_id, dir_target) raise AbsentTails('Tails link {} not present in dir {}'.format(rr_id, dir_target)) if self.rrbx: dir_cd_id = join(self.dir_tails, rev_reg_id2cred_def_id(rr_id)) makedirs(dir_cd_id, exist_ok=True) rename(file_tails, join(dir_cd_id, basename(file_tails))) with REVO_CACHE.lock: rr_def_req_json = await ledger.build_revoc_reg_def_request(self.did, rr_def_json) await self._sign_submit(rr_def_req_json) await self.get_rev_reg_def(rr_id) # add to cache en passant rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rr_ent_json) await self._sign_submit(rr_ent_req_json) if self.rrbx: Tails.associate(self.dir_tails, rr_id, basename(file_tails)) rmtree(dir_tails_rr_id) else: remove(file_rr_def) remove(file_rr_ent) LOGGER.debug('Issuer._send_rev_reg_def <<<')
python
async def _send_rev_reg_def(self, rr_id: str) -> None: """ Move tails file from hopper; deserialize revocation registry definition and initial entry; send to ledger and cache revocation registry definition. Operation serializes to subdirectory within tails hopper directory; symbolic link presence signals completion. Raise AbsentRevReg if revocation registry is not ready in hopper, or AbsentTails if tails file is not yet linked by its revocation registry identifier. :param rr_id: revocation registry identifier """ LOGGER.debug('Issuer._send_rev_reg_def >>> rr_id: %s', rr_id) dir_tails_rr_id = self.rrb.dir_tails_top(rr_id) dir_target = self.rrb.dir_tails_target(rr_id) if not Tails.linked(dir_tails_rr_id, rr_id): LOGGER.debug( 'Issuer._send_rev_reg_def <!< Tails file for rev reg %s not ready in dir %s', rr_id, dir_target) raise AbsentRevReg('Tails file for rev reg {} not ready in dir {}'.format(rr_id, dir_target)) file_rr_def = join(dir_target, 'rr_def.json') if not isfile(file_rr_def): LOGGER.debug('Issuer._send_rev_reg_def <!< Rev reg def file %s not present', file_rr_def) raise AbsentRevReg('Rev reg def file {} not present'.format(file_rr_def)) with open(file_rr_def, 'r') as fh_rr_def: rr_def_json = fh_rr_def.read() file_rr_ent = join(dir_target, 'rr_ent.json') if not isfile(file_rr_ent): LOGGER.debug('Issuer._send_rev_reg_def <!< Rev reg entry file %s not present', file_rr_ent) raise AbsentRevReg('Rev reg entry file {} not present'.format(file_rr_ent)) with open(file_rr_ent, 'r') as fh_rr_ent: rr_ent_json = fh_rr_ent.read() file_tails = Tails.linked(dir_tails_rr_id, rr_id) if not file_tails: LOGGER.debug('Issuer._send_rev_reg_def <!< Tails link %s not present in dir %s', rr_id, dir_target) raise AbsentTails('Tails link {} not present in dir {}'.format(rr_id, dir_target)) if self.rrbx: dir_cd_id = join(self.dir_tails, rev_reg_id2cred_def_id(rr_id)) makedirs(dir_cd_id, exist_ok=True) rename(file_tails, join(dir_cd_id, basename(file_tails))) with REVO_CACHE.lock: rr_def_req_json = await ledger.build_revoc_reg_def_request(self.did, rr_def_json) await self._sign_submit(rr_def_req_json) await self.get_rev_reg_def(rr_id) # add to cache en passant rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rr_ent_json) await self._sign_submit(rr_ent_req_json) if self.rrbx: Tails.associate(self.dir_tails, rr_id, basename(file_tails)) rmtree(dir_tails_rr_id) else: remove(file_rr_def) remove(file_rr_ent) LOGGER.debug('Issuer._send_rev_reg_def <<<')
[ "async", "def", "_send_rev_reg_def", "(", "self", ",", "rr_id", ":", "str", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'Issuer._send_rev_reg_def >>> rr_id: %s'", ",", "rr_id", ")", "dir_tails_rr_id", "=", "self", ".", "rrb", ".", "dir_tails_top", "(...
Move tails file from hopper; deserialize revocation registry definition and initial entry; send to ledger and cache revocation registry definition. Operation serializes to subdirectory within tails hopper directory; symbolic link presence signals completion. Raise AbsentRevReg if revocation registry is not ready in hopper, or AbsentTails if tails file is not yet linked by its revocation registry identifier. :param rr_id: revocation registry identifier
[ "Move", "tails", "file", "from", "hopper", ";", "deserialize", "revocation", "registry", "definition", "and", "initial", "entry", ";", "send", "to", "ledger", "and", "cache", "revocation", "registry", "definition", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L128-L193
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer._set_rev_reg
async def _set_rev_reg(self, rr_id: str, rr_size: int) -> None: """ Move precomputed revocation registry data from hopper into place within tails directory. :param rr_id: revocation registry identifier :param rr_size: revocation registry size, in case creation required """ LOGGER.debug('Issuer._set_rev_reg >>> rr_id: %s, rr_size: %s', rr_id, rr_size) assert self.rrbx dir_hopper_rr_id = join(self.rrb.dir_tails_hopper, rr_id) while Tails.linked(dir_hopper_rr_id, rr_id) is None: await asyncio.sleep(1) await self._send_rev_reg_def(rr_id) cd_id = rev_reg_id2cred_def_id(rr_id) (next_tag, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, next_tag) self.rrb.mark_in_progress(rr_id, rr_size or rr_size_suggested) LOGGER.debug('Issuer._set_rev_reg <<<')
python
async def _set_rev_reg(self, rr_id: str, rr_size: int) -> None: """ Move precomputed revocation registry data from hopper into place within tails directory. :param rr_id: revocation registry identifier :param rr_size: revocation registry size, in case creation required """ LOGGER.debug('Issuer._set_rev_reg >>> rr_id: %s, rr_size: %s', rr_id, rr_size) assert self.rrbx dir_hopper_rr_id = join(self.rrb.dir_tails_hopper, rr_id) while Tails.linked(dir_hopper_rr_id, rr_id) is None: await asyncio.sleep(1) await self._send_rev_reg_def(rr_id) cd_id = rev_reg_id2cred_def_id(rr_id) (next_tag, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, next_tag) self.rrb.mark_in_progress(rr_id, rr_size or rr_size_suggested) LOGGER.debug('Issuer._set_rev_reg <<<')
[ "async", "def", "_set_rev_reg", "(", "self", ",", "rr_id", ":", "str", ",", "rr_size", ":", "int", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'Issuer._set_rev_reg >>> rr_id: %s, rr_size: %s'", ",", "rr_id", ",", "rr_size", ")", "assert", "self", "...
Move precomputed revocation registry data from hopper into place within tails directory. :param rr_id: revocation registry identifier :param rr_size: revocation registry size, in case creation required
[ "Move", "precomputed", "revocation", "registry", "data", "from", "hopper", "into", "place", "within", "tails", "directory", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L195-L217
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer._sync_revoc_for_issue
async def _sync_revoc_for_issue(self, rr_id: str, rr_size: int = None) -> None: """ Create revocation registry if need be for input revocation registry identifier; open and cache tails file reader. :param rr_id: revocation registry identifier :param rr_size: if new revocation registry necessary, its size (default as per RevRegBuilder.create_rev_reg()) """ LOGGER.debug('Issuer._sync_revoc_for_issue >>> rr_id: %s, rr_size: %s', rr_id, rr_size) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer._sync_revoc_for_issue <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) (cd_id, tag) = rev_reg_id2cred_def_id_tag(rr_id) try: await self.get_cred_def(cd_id) except AbsentCredDef: LOGGER.debug( 'Issuer._sync_revoc_for_issue <!< tails tree %s may be for another ledger; no cred def found on %s', self.dir_tails, cd_id) raise AbsentCredDef('Tails tree {} may be for another ledger; no cred def found on {}'.format( self.dir_tails, cd_id)) with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = None if revo_cache_entry is None else revo_cache_entry.tails if tails is None: # it's a new revocation registry, or not yet set in cache try: tails = await Tails(self.dir_tails, cd_id, tag).open() except AbsentTails: # it's a new revocation registry if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size) await self._send_rev_reg_def(rr_id) tails = await Tails(self.dir_tails, cd_id, tag).open() # symlink should exist now if revo_cache_entry is None: REVO_CACHE[rr_id] = RevoCacheEntry(None, tails) else: REVO_CACHE[rr_id].tails = tails LOGGER.debug('Issuer._sync_revoc_for_issue <<<')
python
async def _sync_revoc_for_issue(self, rr_id: str, rr_size: int = None) -> None: """ Create revocation registry if need be for input revocation registry identifier; open and cache tails file reader. :param rr_id: revocation registry identifier :param rr_size: if new revocation registry necessary, its size (default as per RevRegBuilder.create_rev_reg()) """ LOGGER.debug('Issuer._sync_revoc_for_issue >>> rr_id: %s, rr_size: %s', rr_id, rr_size) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer._sync_revoc_for_issue <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) (cd_id, tag) = rev_reg_id2cred_def_id_tag(rr_id) try: await self.get_cred_def(cd_id) except AbsentCredDef: LOGGER.debug( 'Issuer._sync_revoc_for_issue <!< tails tree %s may be for another ledger; no cred def found on %s', self.dir_tails, cd_id) raise AbsentCredDef('Tails tree {} may be for another ledger; no cred def found on {}'.format( self.dir_tails, cd_id)) with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = None if revo_cache_entry is None else revo_cache_entry.tails if tails is None: # it's a new revocation registry, or not yet set in cache try: tails = await Tails(self.dir_tails, cd_id, tag).open() except AbsentTails: # it's a new revocation registry if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size) await self._send_rev_reg_def(rr_id) tails = await Tails(self.dir_tails, cd_id, tag).open() # symlink should exist now if revo_cache_entry is None: REVO_CACHE[rr_id] = RevoCacheEntry(None, tails) else: REVO_CACHE[rr_id].tails = tails LOGGER.debug('Issuer._sync_revoc_for_issue <<<')
[ "async", "def", "_sync_revoc_for_issue", "(", "self", ",", "rr_id", ":", "str", ",", "rr_size", ":", "int", "=", "None", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'Issuer._sync_revoc_for_issue >>> rr_id: %s, rr_size: %s'", ",", "rr_id", ",", "rr_size...
Create revocation registry if need be for input revocation registry identifier; open and cache tails file reader. :param rr_id: revocation registry identifier :param rr_size: if new revocation registry necessary, its size (default as per RevRegBuilder.create_rev_reg())
[ "Create", "revocation", "registry", "if", "need", "be", "for", "input", "revocation", "registry", "identifier", ";", "open", "and", "cache", "tails", "file", "reader", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L219-L266
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.path_tails
def path_tails(self, rr_id: str) -> str: """ Return path to tails file for input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails file for input revocation registry identifier """ LOGGER.debug('Issuer.path_tails >>>') if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.path_tails <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = Tails.linked(self.dir_tails, rr_id) LOGGER.debug('Issuer.path_tails <<< %s', rv) return rv
python
def path_tails(self, rr_id: str) -> str: """ Return path to tails file for input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails file for input revocation registry identifier """ LOGGER.debug('Issuer.path_tails >>>') if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.path_tails <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = Tails.linked(self.dir_tails, rr_id) LOGGER.debug('Issuer.path_tails <<< %s', rv) return rv
[ "def", "path_tails", "(", "self", ",", "rr_id", ":", "str", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Issuer.path_tails >>>'", ")", "if", "not", "ok_rev_reg_id", "(", "rr_id", ")", ":", "LOGGER", ".", "debug", "(", "'Issuer.path_tails <!< Bad rev...
Return path to tails file for input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails file for input revocation registry identifier
[ "Return", "path", "to", "tails", "file", "for", "input", "revocation", "registry", "identifier", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L268-L284
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer._create_cred_def
async def _create_cred_def(self, schema: dict, ledger_cred_def: dict, revo: bool) -> (str, bool): """ Create credential definition in wallet as part of the send_cred_def() sequence. Return whether the private key for the cred def is OK to continue with the sequence, propagating the cred def and revocation registry info to the ledger. :param schema: schema on which to create cred def :param ledger_cred_def: credential definition as ledger has it (typically, None) :param revo: whether cred def supports revocation :return: cred def json and whether local cred def private key is OK, hence cred def is OK to send to the ledger """ LOGGER.debug( 'Issuer._create_cred_def >>> schema: %s, ledger_cred_def: %s, revo: %s', schema, ledger_cred_def, revo) cred_def_json = '{}' private_key_ok = True try: (_, cred_def_json) = await anoncreds.issuer_create_and_store_credential_def( self.wallet.handle, self.did, # issuer DID json.dumps(schema), self.pool.protocol.cd_id_tag(False), # expect only one cred def per schema and issuer 'CL', json.dumps({'support_revocation': revo})) if ledger_cred_def: private_key_ok = False LOGGER.warning( 'New cred def on %s in wallet shadows existing one on ledger: private key not usable', cred_def_id(self.did, schema['seqNo'], self.pool.protocol)) # carry on though, this anchor may have other capacities so public key may be good enough except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsCredDefAlreadyExistsError: if ledger_cred_def: LOGGER.info( 'Issuer wallet %s reusing existing cred def on schema %s version %s', self.name, schema['name'], schema['version']) else: LOGGER.debug('Issuer._create_cred_def <!< corrupt wallet %s', self.name) raise CorruptWallet('Corrupt Issuer wallet {} has cred def on schema {} not on ledger'.format( self.name, schema['id'])) else: LOGGER.debug( 'Issuer._create_cred_def <!< cannot store cred def in wallet %s: indy error code %s', self.name, x_indy.error_code) raise rv = (cred_def_json, private_key_ok) LOGGER.debug('Issuer._create_cred_def <<< %s', rv) return rv
python
async def _create_cred_def(self, schema: dict, ledger_cred_def: dict, revo: bool) -> (str, bool): """ Create credential definition in wallet as part of the send_cred_def() sequence. Return whether the private key for the cred def is OK to continue with the sequence, propagating the cred def and revocation registry info to the ledger. :param schema: schema on which to create cred def :param ledger_cred_def: credential definition as ledger has it (typically, None) :param revo: whether cred def supports revocation :return: cred def json and whether local cred def private key is OK, hence cred def is OK to send to the ledger """ LOGGER.debug( 'Issuer._create_cred_def >>> schema: %s, ledger_cred_def: %s, revo: %s', schema, ledger_cred_def, revo) cred_def_json = '{}' private_key_ok = True try: (_, cred_def_json) = await anoncreds.issuer_create_and_store_credential_def( self.wallet.handle, self.did, # issuer DID json.dumps(schema), self.pool.protocol.cd_id_tag(False), # expect only one cred def per schema and issuer 'CL', json.dumps({'support_revocation': revo})) if ledger_cred_def: private_key_ok = False LOGGER.warning( 'New cred def on %s in wallet shadows existing one on ledger: private key not usable', cred_def_id(self.did, schema['seqNo'], self.pool.protocol)) # carry on though, this anchor may have other capacities so public key may be good enough except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsCredDefAlreadyExistsError: if ledger_cred_def: LOGGER.info( 'Issuer wallet %s reusing existing cred def on schema %s version %s', self.name, schema['name'], schema['version']) else: LOGGER.debug('Issuer._create_cred_def <!< corrupt wallet %s', self.name) raise CorruptWallet('Corrupt Issuer wallet {} has cred def on schema {} not on ledger'.format( self.name, schema['id'])) else: LOGGER.debug( 'Issuer._create_cred_def <!< cannot store cred def in wallet %s: indy error code %s', self.name, x_indy.error_code) raise rv = (cred_def_json, private_key_ok) LOGGER.debug('Issuer._create_cred_def <<< %s', rv) return rv
[ "async", "def", "_create_cred_def", "(", "self", ",", "schema", ":", "dict", ",", "ledger_cred_def", ":", "dict", ",", "revo", ":", "bool", ")", "->", "(", "str", ",", "bool", ")", ":", "LOGGER", ".", "debug", "(", "'Issuer._create_cred_def >>> schema: %s, l...
Create credential definition in wallet as part of the send_cred_def() sequence. Return whether the private key for the cred def is OK to continue with the sequence, propagating the cred def and revocation registry info to the ledger. :param schema: schema on which to create cred def :param ledger_cred_def: credential definition as ledger has it (typically, None) :param revo: whether cred def supports revocation :return: cred def json and whether local cred def private key is OK, hence cred def is OK to send to the ledger
[ "Create", "credential", "definition", "in", "wallet", "as", "part", "of", "the", "send_cred_def", "()", "sequence", ".", "Return", "whether", "the", "private", "key", "for", "the", "cred", "def", "is", "OK", "to", "continue", "with", "the", "sequence", "prop...
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L286-L342
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.send_cred_def
async def send_cred_def(self, s_id: str, revo: bool = True, rr_size: int = None) -> str: """ Create a credential definition as Issuer, store it in its wallet, and send it to the ledger. Raise CorruptWallet for wallet not pertaining to current ledger, BadLedgerTxn on failure to send credential definition to ledger if need be, WalletState for closed wallet, or IndyError for any other failure to create and store credential definition in wallet. :param s_id: schema identifier :param revo: whether to support revocation for cred def :param rr_size: size of initial revocation registry (default as per RevRegBuilder.create_rev_reg()), if revocation supported :return: json credential definition as it appears on ledger """ LOGGER.debug('Issuer.send_cred_def >>> s_id: %s, revo: %s, rr_size: %s', s_id, revo, rr_size) if not ok_schema_id(s_id): LOGGER.debug('Issuer.send_cred_def <!< Bad schema id %s', s_id) raise BadIdentifier('Bad schema id {}'.format(s_id)) if not self.wallet.handle: LOGGER.debug('Issuer.send_cred_def <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.send_cred_def <!< issuer %s has no pool', self.name) raise AbsentPool('Issuer {} has no pool: cannot send cred def'.format(self.name)) rv_json = json.dumps({}) schema_json = await self.get_schema(schema_key(s_id)) schema = json.loads(schema_json) cd_id = cred_def_id(self.did, schema['seqNo'], self.pool.protocol) private_key_ok = True with CRED_DEF_CACHE.lock: try: rv_json = await self.get_cred_def(cd_id) LOGGER.info( 'Cred def on schema %s version %s already exists on ledger; Issuer %s not sending another', schema['name'], schema['version'], self.name) except AbsentCredDef: pass # OK - about to create, store, and send it (cred_def_json, private_key_ok) = await self._create_cred_def(schema, json.loads(rv_json), revo) if not json.loads(rv_json): # checking the ledger returned no cred def: send it req_json = await ledger.build_cred_def_request(self.did, cred_def_json) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout try: rv_json = await self.get_cred_def(cd_id) # adds to cache break except AbsentCredDef: await asyncio.sleep(1) LOGGER.info('Sent cred def %s to ledger, waiting 1s for its appearance', cd_id) if not rv_json: LOGGER.debug('Issuer.send_cred_def <!< timed out waiting on sent cred_def %s', cd_id) raise BadLedgerTxn('Timed out waiting on sent cred_def {}'.format(cd_id)) if revo: # create new rev reg for tag '0' if self.rrbx: (_, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) self.rrb.mark_in_progress(rev_reg_id(cd_id, '0'), rr_size or rr_size_suggested) await self._sync_revoc_for_issue(rev_reg_id(cd_id, '0'), rr_size) # sync rev reg on tag '0' if revo and private_key_ok: for tag in [str(t) for t in range(1, int(Tails.next_tag(self.dir_tails, cd_id)[0]))]: # '1' to next-1 await self._sync_revoc_for_issue(rev_reg_id(cd_id, tag), rr_size if tag == '0' else None) makedirs(join(self.dir_tails, cd_id), exist_ok=True) # dir required for box id collection, revo or not LOGGER.debug('Issuer.send_cred_def <<< %s', rv_json) return rv_json
python
async def send_cred_def(self, s_id: str, revo: bool = True, rr_size: int = None) -> str: """ Create a credential definition as Issuer, store it in its wallet, and send it to the ledger. Raise CorruptWallet for wallet not pertaining to current ledger, BadLedgerTxn on failure to send credential definition to ledger if need be, WalletState for closed wallet, or IndyError for any other failure to create and store credential definition in wallet. :param s_id: schema identifier :param revo: whether to support revocation for cred def :param rr_size: size of initial revocation registry (default as per RevRegBuilder.create_rev_reg()), if revocation supported :return: json credential definition as it appears on ledger """ LOGGER.debug('Issuer.send_cred_def >>> s_id: %s, revo: %s, rr_size: %s', s_id, revo, rr_size) if not ok_schema_id(s_id): LOGGER.debug('Issuer.send_cred_def <!< Bad schema id %s', s_id) raise BadIdentifier('Bad schema id {}'.format(s_id)) if not self.wallet.handle: LOGGER.debug('Issuer.send_cred_def <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.send_cred_def <!< issuer %s has no pool', self.name) raise AbsentPool('Issuer {} has no pool: cannot send cred def'.format(self.name)) rv_json = json.dumps({}) schema_json = await self.get_schema(schema_key(s_id)) schema = json.loads(schema_json) cd_id = cred_def_id(self.did, schema['seqNo'], self.pool.protocol) private_key_ok = True with CRED_DEF_CACHE.lock: try: rv_json = await self.get_cred_def(cd_id) LOGGER.info( 'Cred def on schema %s version %s already exists on ledger; Issuer %s not sending another', schema['name'], schema['version'], self.name) except AbsentCredDef: pass # OK - about to create, store, and send it (cred_def_json, private_key_ok) = await self._create_cred_def(schema, json.loads(rv_json), revo) if not json.loads(rv_json): # checking the ledger returned no cred def: send it req_json = await ledger.build_cred_def_request(self.did, cred_def_json) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout try: rv_json = await self.get_cred_def(cd_id) # adds to cache break except AbsentCredDef: await asyncio.sleep(1) LOGGER.info('Sent cred def %s to ledger, waiting 1s for its appearance', cd_id) if not rv_json: LOGGER.debug('Issuer.send_cred_def <!< timed out waiting on sent cred_def %s', cd_id) raise BadLedgerTxn('Timed out waiting on sent cred_def {}'.format(cd_id)) if revo: # create new rev reg for tag '0' if self.rrbx: (_, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) self.rrb.mark_in_progress(rev_reg_id(cd_id, '0'), rr_size or rr_size_suggested) await self._sync_revoc_for_issue(rev_reg_id(cd_id, '0'), rr_size) # sync rev reg on tag '0' if revo and private_key_ok: for tag in [str(t) for t in range(1, int(Tails.next_tag(self.dir_tails, cd_id)[0]))]: # '1' to next-1 await self._sync_revoc_for_issue(rev_reg_id(cd_id, tag), rr_size if tag == '0' else None) makedirs(join(self.dir_tails, cd_id), exist_ok=True) # dir required for box id collection, revo or not LOGGER.debug('Issuer.send_cred_def <<< %s', rv_json) return rv_json
[ "async", "def", "send_cred_def", "(", "self", ",", "s_id", ":", "str", ",", "revo", ":", "bool", "=", "True", ",", "rr_size", ":", "int", "=", "None", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Issuer.send_cred_def >>> s_id: %s, revo: %s, rr_size:...
Create a credential definition as Issuer, store it in its wallet, and send it to the ledger. Raise CorruptWallet for wallet not pertaining to current ledger, BadLedgerTxn on failure to send credential definition to ledger if need be, WalletState for closed wallet, or IndyError for any other failure to create and store credential definition in wallet. :param s_id: schema identifier :param revo: whether to support revocation for cred def :param rr_size: size of initial revocation registry (default as per RevRegBuilder.create_rev_reg()), if revocation supported :return: json credential definition as it appears on ledger
[ "Create", "a", "credential", "definition", "as", "Issuer", "store", "it", "in", "its", "wallet", "and", "send", "it", "to", "the", "ledger", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L344-L422
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.create_cred_offer
async def create_cred_offer(self, schema_seq_no: int) -> str: """ Create credential offer as Issuer for given schema. Raise CorruptWallet if the wallet has no private key for the corresponding credential definition. Raise WalletState for closed wallet. :param schema_seq_no: schema sequence number :return: credential offer json for use in storing credentials at HolderProver. """ LOGGER.debug('Issuer.create_cred_offer >>> schema_seq_no: %s', schema_seq_no) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred_offer <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.create_cred_offer <!< issuer %s has no pool', self.name) raise AbsentPool('Issuer {} has no pool: cannot create cred offer'.format(self.name)) rv = None cd_id = cred_def_id(self.did, schema_seq_no, self.pool.protocol) try: rv = await anoncreds.issuer_create_credential_offer(self.wallet.handle, cd_id) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: LOGGER.debug( 'Issuer.create_cred_offer <!< did not issue cred definition from wallet %s', self.name) raise CorruptWallet('Cannot create cred offer: did not issue cred definition from wallet {}'.format( self.name)) LOGGER.debug( 'Issuer.create_cred_offer <!< cannot create cred offer, indy error code %s', x_indy.error_code) raise LOGGER.debug('Issuer.create_cred_offer <<< %s', rv) return rv
python
async def create_cred_offer(self, schema_seq_no: int) -> str: """ Create credential offer as Issuer for given schema. Raise CorruptWallet if the wallet has no private key for the corresponding credential definition. Raise WalletState for closed wallet. :param schema_seq_no: schema sequence number :return: credential offer json for use in storing credentials at HolderProver. """ LOGGER.debug('Issuer.create_cred_offer >>> schema_seq_no: %s', schema_seq_no) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred_offer <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not self.pool: LOGGER.debug('Issuer.create_cred_offer <!< issuer %s has no pool', self.name) raise AbsentPool('Issuer {} has no pool: cannot create cred offer'.format(self.name)) rv = None cd_id = cred_def_id(self.did, schema_seq_no, self.pool.protocol) try: rv = await anoncreds.issuer_create_credential_offer(self.wallet.handle, cd_id) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: LOGGER.debug( 'Issuer.create_cred_offer <!< did not issue cred definition from wallet %s', self.name) raise CorruptWallet('Cannot create cred offer: did not issue cred definition from wallet {}'.format( self.name)) LOGGER.debug( 'Issuer.create_cred_offer <!< cannot create cred offer, indy error code %s', x_indy.error_code) raise LOGGER.debug('Issuer.create_cred_offer <<< %s', rv) return rv
[ "async", "def", "create_cred_offer", "(", "self", ",", "schema_seq_no", ":", "int", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Issuer.create_cred_offer >>> schema_seq_no: %s'", ",", "schema_seq_no", ")", "if", "not", "self", ".", "wallet", ".", "handl...
Create credential offer as Issuer for given schema. Raise CorruptWallet if the wallet has no private key for the corresponding credential definition. Raise WalletState for closed wallet. :param schema_seq_no: schema sequence number :return: credential offer json for use in storing credentials at HolderProver.
[ "Create", "credential", "offer", "as", "Issuer", "for", "given", "schema", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L424-L462
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.create_cred
async def create_cred( self, cred_offer_json, cred_req_json: str, cred_attrs: dict, rr_size: int = None) -> (str, str): """ Create credential as Issuer out of credential request and dict of key:value (raw, unencoded) entries for attributes. Return credential json, and if cred def supports revocation, credential revocation identifier. Raise WalletState for closed wallet. If the credential definition supports revocation, and the current revocation registry is full, the processing creates a new revocation registry en passant. Depending on the revocation registry size (by default starting at 64 and doubling iteratively through a maximum of 100000) and the revocation registry builder posture (see RevRegBuilder.__init__()), this operation may delay credential creation by several seconds. The use of an external revocation registry builder runs a parallel process, skirting this delay, but is more costly at initialization. :param cred_offer_json: credential offer json as created by Issuer :param cred_req_json: credential request json as created by HolderProver :param cred_attrs: dict mapping each attribute to its original value (the operation encodes it); e.g., :: { 'favourite_drink': 'martini', 'height': 180, 'last_visit_date': '2017-12-31', 'weaknesses': None } :param rr_size: size of new revocation registry (default as per RevRegBuilder.create_rev_reg()) if necessary :return: tuple with newly issued credential json, credential revocation identifier (if cred def supports revocation, None otherwise). """ LOGGER.debug( 'Issuer.create_cred >>> cred_offer_json: %s, cred_req_json: %s, cred_attrs: %s, rr_size: %s', cred_offer_json, cred_req_json, cred_attrs, rr_size) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) cd_id = json.loads(cred_offer_json)['cred_def_id'] if not ok_cred_def_id(cd_id): LOGGER.debug('Issuer.create_cred <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) cred_def = json.loads(await self.get_cred_def(cd_id)) # ensure cred def is in cache if 'revocation' in cred_def['value']: with REVO_CACHE.lock: rr_id = Tails.current_rev_reg_id(self.dir_tails, cd_id) tails = REVO_CACHE[rr_id].tails assert tails # at (re)start, at cred def, Issuer sync_revoc_for_issue() sets this index in revo cache try: (cred_json, cred_revoc_id, _) = await anoncreds.issuer_create_credential( # issue by default to rr self.wallet.handle, cred_offer_json, cred_req_json, json.dumps({k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), rr_id, tails.reader_handle) rv = (cred_json, cred_revoc_id) except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsRevocationRegistryFullError: (tag, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, tag) if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size or rr_size_suggested) await self._send_rev_reg_def(rr_id) REVO_CACHE[rr_id].tails = await Tails(self.dir_tails, cd_id).open() # symlink OK now return await self.create_cred(cred_offer_json, cred_req_json, cred_attrs) LOGGER.debug('Issuer.create_cred <!< cannot create cred, indy error code %s', x_indy.error_code) raise else: try: (cred_json, _, _) = await anoncreds.issuer_create_credential( self.wallet.handle, cred_offer_json, cred_req_json, json.dumps({k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), None, None) rv = (cred_json, None) except IndyError as x_indy: LOGGER.debug('Issuer.create_cred <!< cannot create cred, indy error code %s', x_indy.error_code) raise LOGGER.debug('Issuer.create_cred <<< %s', rv) return rv
python
async def create_cred( self, cred_offer_json, cred_req_json: str, cred_attrs: dict, rr_size: int = None) -> (str, str): """ Create credential as Issuer out of credential request and dict of key:value (raw, unencoded) entries for attributes. Return credential json, and if cred def supports revocation, credential revocation identifier. Raise WalletState for closed wallet. If the credential definition supports revocation, and the current revocation registry is full, the processing creates a new revocation registry en passant. Depending on the revocation registry size (by default starting at 64 and doubling iteratively through a maximum of 100000) and the revocation registry builder posture (see RevRegBuilder.__init__()), this operation may delay credential creation by several seconds. The use of an external revocation registry builder runs a parallel process, skirting this delay, but is more costly at initialization. :param cred_offer_json: credential offer json as created by Issuer :param cred_req_json: credential request json as created by HolderProver :param cred_attrs: dict mapping each attribute to its original value (the operation encodes it); e.g., :: { 'favourite_drink': 'martini', 'height': 180, 'last_visit_date': '2017-12-31', 'weaknesses': None } :param rr_size: size of new revocation registry (default as per RevRegBuilder.create_rev_reg()) if necessary :return: tuple with newly issued credential json, credential revocation identifier (if cred def supports revocation, None otherwise). """ LOGGER.debug( 'Issuer.create_cred >>> cred_offer_json: %s, cred_req_json: %s, cred_attrs: %s, rr_size: %s', cred_offer_json, cred_req_json, cred_attrs, rr_size) if not self.wallet.handle: LOGGER.debug('Issuer.create_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) cd_id = json.loads(cred_offer_json)['cred_def_id'] if not ok_cred_def_id(cd_id): LOGGER.debug('Issuer.create_cred <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) cred_def = json.loads(await self.get_cred_def(cd_id)) # ensure cred def is in cache if 'revocation' in cred_def['value']: with REVO_CACHE.lock: rr_id = Tails.current_rev_reg_id(self.dir_tails, cd_id) tails = REVO_CACHE[rr_id].tails assert tails # at (re)start, at cred def, Issuer sync_revoc_for_issue() sets this index in revo cache try: (cred_json, cred_revoc_id, _) = await anoncreds.issuer_create_credential( # issue by default to rr self.wallet.handle, cred_offer_json, cred_req_json, json.dumps({k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), rr_id, tails.reader_handle) rv = (cred_json, cred_revoc_id) except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsRevocationRegistryFullError: (tag, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, tag) if self.rrbx: await self._set_rev_reg(rr_id, rr_size) else: await self.rrb.create_rev_reg(rr_id, rr_size or rr_size_suggested) await self._send_rev_reg_def(rr_id) REVO_CACHE[rr_id].tails = await Tails(self.dir_tails, cd_id).open() # symlink OK now return await self.create_cred(cred_offer_json, cred_req_json, cred_attrs) LOGGER.debug('Issuer.create_cred <!< cannot create cred, indy error code %s', x_indy.error_code) raise else: try: (cred_json, _, _) = await anoncreds.issuer_create_credential( self.wallet.handle, cred_offer_json, cred_req_json, json.dumps({k: cred_attr_value(cred_attrs[k]) for k in cred_attrs}), None, None) rv = (cred_json, None) except IndyError as x_indy: LOGGER.debug('Issuer.create_cred <!< cannot create cred, indy error code %s', x_indy.error_code) raise LOGGER.debug('Issuer.create_cred <<< %s', rv) return rv
[ "async", "def", "create_cred", "(", "self", ",", "cred_offer_json", ",", "cred_req_json", ":", "str", ",", "cred_attrs", ":", "dict", ",", "rr_size", ":", "int", "=", "None", ")", "->", "(", "str", ",", "str", ")", ":", "LOGGER", ".", "debug", "(", "...
Create credential as Issuer out of credential request and dict of key:value (raw, unencoded) entries for attributes. Return credential json, and if cred def supports revocation, credential revocation identifier. Raise WalletState for closed wallet. If the credential definition supports revocation, and the current revocation registry is full, the processing creates a new revocation registry en passant. Depending on the revocation registry size (by default starting at 64 and doubling iteratively through a maximum of 100000) and the revocation registry builder posture (see RevRegBuilder.__init__()), this operation may delay credential creation by several seconds. The use of an external revocation registry builder runs a parallel process, skirting this delay, but is more costly at initialization. :param cred_offer_json: credential offer json as created by Issuer :param cred_req_json: credential request json as created by HolderProver :param cred_attrs: dict mapping each attribute to its original value (the operation encodes it); e.g., :: { 'favourite_drink': 'martini', 'height': 180, 'last_visit_date': '2017-12-31', 'weaknesses': None } :param rr_size: size of new revocation registry (default as per RevRegBuilder.create_rev_reg()) if necessary :return: tuple with newly issued credential json, credential revocation identifier (if cred def supports revocation, None otherwise).
[ "Create", "credential", "as", "Issuer", "out", "of", "credential", "request", "and", "dict", "of", "key", ":", "value", "(", "raw", "unencoded", ")", "entries", "for", "attributes", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L464-L566
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.revoke_cred
async def revoke_cred(self, rr_id: str, cr_id) -> int: """ Revoke credential that input revocation registry identifier and credential revocation identifier specify. Return (epoch seconds) time of revocation. Raise AbsentTails if no tails file is available for input revocation registry identifier. Raise WalletState for closed wallet. Raise BadRevocation if issuer cannot revoke specified credential for any other reason (e.g., did not issue it, already revoked it). :param rr_id: revocation registry identifier :param cr_id: credential revocation identifier :return: time of revocation, in epoch seconds """ LOGGER.debug('Issuer.revoke_cred >>> rr_id: %s, cr_id: %s', rr_id, cr_id) if not self.wallet.handle: LOGGER.debug('Issuer.revoke_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.revoke_cred <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) tails_reader_handle = (await Tails( self.dir_tails, *rev_reg_id2cred_def_id_tag(rr_id)).open()).reader_handle try: rrdelta_json = await anoncreds.issuer_revoke_credential( self.wallet.handle, tails_reader_handle, rr_id, cr_id) except IndyError as x_indy: LOGGER.debug( 'Issuer.revoke_cred <!< Could not revoke revoc reg id %s, cred rev id %s: indy error code %s', rr_id, cr_id, x_indy.error_code) raise BadRevocation( 'Could not revoke revoc reg id {}, cred rev id {}: indy error code {}'.format( rr_id, cr_id, x_indy.error_code)) rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rrdelta_json) resp_json = await self._sign_submit(rr_ent_req_json) # raises AbsentPool or ClosedPool if applicable resp = json.loads(resp_json) rv = self.pool.protocol.txn2epoch(resp) LOGGER.debug('Issuer.revoke_cred <<< %s', rv) return rv
python
async def revoke_cred(self, rr_id: str, cr_id) -> int: """ Revoke credential that input revocation registry identifier and credential revocation identifier specify. Return (epoch seconds) time of revocation. Raise AbsentTails if no tails file is available for input revocation registry identifier. Raise WalletState for closed wallet. Raise BadRevocation if issuer cannot revoke specified credential for any other reason (e.g., did not issue it, already revoked it). :param rr_id: revocation registry identifier :param cr_id: credential revocation identifier :return: time of revocation, in epoch seconds """ LOGGER.debug('Issuer.revoke_cred >>> rr_id: %s, cr_id: %s', rr_id, cr_id) if not self.wallet.handle: LOGGER.debug('Issuer.revoke_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not ok_rev_reg_id(rr_id): LOGGER.debug('Issuer.revoke_cred <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) tails_reader_handle = (await Tails( self.dir_tails, *rev_reg_id2cred_def_id_tag(rr_id)).open()).reader_handle try: rrdelta_json = await anoncreds.issuer_revoke_credential( self.wallet.handle, tails_reader_handle, rr_id, cr_id) except IndyError as x_indy: LOGGER.debug( 'Issuer.revoke_cred <!< Could not revoke revoc reg id %s, cred rev id %s: indy error code %s', rr_id, cr_id, x_indy.error_code) raise BadRevocation( 'Could not revoke revoc reg id {}, cred rev id {}: indy error code {}'.format( rr_id, cr_id, x_indy.error_code)) rr_ent_req_json = await ledger.build_revoc_reg_entry_request(self.did, rr_id, 'CL_ACCUM', rrdelta_json) resp_json = await self._sign_submit(rr_ent_req_json) # raises AbsentPool or ClosedPool if applicable resp = json.loads(resp_json) rv = self.pool.protocol.txn2epoch(resp) LOGGER.debug('Issuer.revoke_cred <<< %s', rv) return rv
[ "async", "def", "revoke_cred", "(", "self", ",", "rr_id", ":", "str", ",", "cr_id", ")", "->", "int", ":", "LOGGER", ".", "debug", "(", "'Issuer.revoke_cred >>> rr_id: %s, cr_id: %s'", ",", "rr_id", ",", "cr_id", ")", "if", "not", "self", ".", "wallet", "....
Revoke credential that input revocation registry identifier and credential revocation identifier specify. Return (epoch seconds) time of revocation. Raise AbsentTails if no tails file is available for input revocation registry identifier. Raise WalletState for closed wallet. Raise BadRevocation if issuer cannot revoke specified credential for any other reason (e.g., did not issue it, already revoked it). :param rr_id: revocation registry identifier :param cr_id: credential revocation identifier :return: time of revocation, in epoch seconds
[ "Revoke", "credential", "that", "input", "revocation", "registry", "identifier", "and", "credential", "revocation", "identifier", "specify", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L568-L622
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/issuer.py
Issuer.get_box_ids_issued
async def get_box_ids_issued(self) -> str: """ Return json object on lists of all unique box identifiers (schema identifiers, credential definition identifiers, and revocation registry identifiers) for all credential definitions and credentials issued; e.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", ... ] } An issuer must issue a credential definition to include its schema identifier in the returned values; the schema identifier in isolation belongs properly to an Origin, not necessarily to an Issuer. The operation may be useful for a Verifier anchor going off-line to seed its cache before doing so. :return: tuple of sets for schema ids, cred def ids, rev reg ids """ LOGGER.debug('Issuer.get_box_ids_issued >>>') cd_ids = [ d for d in listdir(self.dir_tails) if isdir(join(self.dir_tails, d)) and ok_cred_def_id(d, self.did)] s_ids = [] for cd_id in cd_ids: try: s_ids.append(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id)))['id']) except AbsentSchema: LOGGER.error( 'Issuer %s has issued cred def %s but no corresponding schema on ledger', self.name, cd_id) rr_ids = [basename(link) for link in Tails.links(self.dir_tails, self.did)] rv = json.dumps({ 'schema_id': s_ids, 'cred_def_id': cd_ids, 'rev_reg_id': rr_ids }) LOGGER.debug('Issuer.get_box_ids_issued <<< %s', rv) return rv
python
async def get_box_ids_issued(self) -> str: """ Return json object on lists of all unique box identifiers (schema identifiers, credential definition identifiers, and revocation registry identifiers) for all credential definitions and credentials issued; e.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", ... ] } An issuer must issue a credential definition to include its schema identifier in the returned values; the schema identifier in isolation belongs properly to an Origin, not necessarily to an Issuer. The operation may be useful for a Verifier anchor going off-line to seed its cache before doing so. :return: tuple of sets for schema ids, cred def ids, rev reg ids """ LOGGER.debug('Issuer.get_box_ids_issued >>>') cd_ids = [ d for d in listdir(self.dir_tails) if isdir(join(self.dir_tails, d)) and ok_cred_def_id(d, self.did)] s_ids = [] for cd_id in cd_ids: try: s_ids.append(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id)))['id']) except AbsentSchema: LOGGER.error( 'Issuer %s has issued cred def %s but no corresponding schema on ledger', self.name, cd_id) rr_ids = [basename(link) for link in Tails.links(self.dir_tails, self.did)] rv = json.dumps({ 'schema_id': s_ids, 'cred_def_id': cd_ids, 'rev_reg_id': rr_ids }) LOGGER.debug('Issuer.get_box_ids_issued <<< %s', rv) return rv
[ "async", "def", "get_box_ids_issued", "(", "self", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'Issuer.get_box_ids_issued >>>'", ")", "cd_ids", "=", "[", "d", "for", "d", "in", "listdir", "(", "self", ".", "dir_tails", ")", "if", "isdir", "(", "...
Return json object on lists of all unique box identifiers (schema identifiers, credential definition identifiers, and revocation registry identifiers) for all credential definitions and credentials issued; e.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", ... ] } An issuer must issue a credential definition to include its schema identifier in the returned values; the schema identifier in isolation belongs properly to an Origin, not necessarily to an Issuer. The operation may be useful for a Verifier anchor going off-line to seed its cache before doing so. :return: tuple of sets for schema ids, cred def ids, rev reg ids
[ "Return", "json", "object", "on", "lists", "of", "all", "unique", "box", "identifiers", "(", "schema", "identifiers", "credential", "definition", "identifiers", "and", "revocation", "registry", "identifiers", ")", "for", "all", "credential", "definitions", "and", ...
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/issuer.py#L624-L679
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/docutil.py
resource
def resource(ref: str, delimiter: str = None) -> str: """ Given a (URI) reference, return up to its delimiter (exclusively), or all of it if there is none. :param ref: reference :param delimiter: delimiter character (default None maps to '#', or ';' introduces identifiers) """ return ref.split(delimiter if delimiter else '#')[0]
python
def resource(ref: str, delimiter: str = None) -> str: """ Given a (URI) reference, return up to its delimiter (exclusively), or all of it if there is none. :param ref: reference :param delimiter: delimiter character (default None maps to '#', or ';' introduces identifiers) """ return ref.split(delimiter if delimiter else '#')[0]
[ "def", "resource", "(", "ref", ":", "str", ",", "delimiter", ":", "str", "=", "None", ")", "->", "str", ":", "return", "ref", ".", "split", "(", "delimiter", "if", "delimiter", "else", "'#'", ")", "[", "0", "]" ]
Given a (URI) reference, return up to its delimiter (exclusively), or all of it if there is none. :param ref: reference :param delimiter: delimiter character (default None maps to '#', or ';' introduces identifiers)
[ "Given", "a", "(", "URI", ")", "reference", "return", "up", "to", "its", "delimiter", "(", "exclusively", ")", "or", "all", "of", "it", "if", "there", "is", "none", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/docutil.py#L24-L32
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/docutil.py
canon_did
def canon_did(uri: str) -> str: """ Convert a URI into a DID if need be, left-stripping 'did:sov:' if present. Return input if already a DID. Raise BadIdentifier for invalid input. :param uri: input URI or DID :return: corresponding DID """ if ok_did(uri): return uri if uri.startswith('did:sov:'): rv = uri[8:] if ok_did(rv): return rv raise BadIdentifier('Bad specification {} does not correspond to a sovrin DID'.format(uri))
python
def canon_did(uri: str) -> str: """ Convert a URI into a DID if need be, left-stripping 'did:sov:' if present. Return input if already a DID. Raise BadIdentifier for invalid input. :param uri: input URI or DID :return: corresponding DID """ if ok_did(uri): return uri if uri.startswith('did:sov:'): rv = uri[8:] if ok_did(rv): return rv raise BadIdentifier('Bad specification {} does not correspond to a sovrin DID'.format(uri))
[ "def", "canon_did", "(", "uri", ":", "str", ")", "->", "str", ":", "if", "ok_did", "(", "uri", ")", ":", "return", "uri", "if", "uri", ".", "startswith", "(", "'did:sov:'", ")", ":", "rv", "=", "uri", "[", "8", ":", "]", "if", "ok_did", "(", "r...
Convert a URI into a DID if need be, left-stripping 'did:sov:' if present. Return input if already a DID. Raise BadIdentifier for invalid input. :param uri: input URI or DID :return: corresponding DID
[ "Convert", "a", "URI", "into", "a", "DID", "if", "need", "be", "left", "-", "stripping", "did", ":", "sov", ":", "if", "present", ".", "Return", "input", "if", "already", "a", "DID", ".", "Raise", "BadIdentifier", "for", "invalid", "input", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/docutil.py#L35-L51
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/docutil.py
canon_ref
def canon_ref(did: str, ref: str, delimiter: str = None): """ Given a reference in a DID document, return it in its canonical form of a URI. :param did: DID acting as the identifier of the DID document :param ref: reference to canonicalize, either a DID or a fragment pointing to a location in the DID doc :param delimiter: delimiter character marking fragment (default '#') or introducing identifier (';') against DID resource """ if not ok_did(did): raise BadIdentifier('Bad DID {} cannot act as DID document identifier'.format(did)) if ok_did(ref): # e.g., LjgpST2rjsoxYegQDRm7EL return 'did:sov:{}'.format(did) if ok_did(resource(ref, delimiter)): # e.g., LjgpST2rjsoxYegQDRm7EL#keys-1 return 'did:sov:{}'.format(ref) if ref.startswith('did:sov:'): # e.g., did:sov:LjgpST2rjsoxYegQDRm7EL, did:sov:LjgpST2rjsoxYegQDRm7EL#3 rv = ref[8:] if ok_did(resource(rv, delimiter)): return ref raise BadIdentifier('Bad URI {} does not correspond to a sovrin DID'.format(ref)) if urlparse(ref).scheme: # e.g., https://example.com/messages/8377464 return ref return 'did:sov:{}{}{}'.format(did, delimiter if delimiter else '#', ref)
python
def canon_ref(did: str, ref: str, delimiter: str = None): """ Given a reference in a DID document, return it in its canonical form of a URI. :param did: DID acting as the identifier of the DID document :param ref: reference to canonicalize, either a DID or a fragment pointing to a location in the DID doc :param delimiter: delimiter character marking fragment (default '#') or introducing identifier (';') against DID resource """ if not ok_did(did): raise BadIdentifier('Bad DID {} cannot act as DID document identifier'.format(did)) if ok_did(ref): # e.g., LjgpST2rjsoxYegQDRm7EL return 'did:sov:{}'.format(did) if ok_did(resource(ref, delimiter)): # e.g., LjgpST2rjsoxYegQDRm7EL#keys-1 return 'did:sov:{}'.format(ref) if ref.startswith('did:sov:'): # e.g., did:sov:LjgpST2rjsoxYegQDRm7EL, did:sov:LjgpST2rjsoxYegQDRm7EL#3 rv = ref[8:] if ok_did(resource(rv, delimiter)): return ref raise BadIdentifier('Bad URI {} does not correspond to a sovrin DID'.format(ref)) if urlparse(ref).scheme: # e.g., https://example.com/messages/8377464 return ref return 'did:sov:{}{}{}'.format(did, delimiter if delimiter else '#', ref)
[ "def", "canon_ref", "(", "did", ":", "str", ",", "ref", ":", "str", ",", "delimiter", ":", "str", "=", "None", ")", ":", "if", "not", "ok_did", "(", "did", ")", ":", "raise", "BadIdentifier", "(", "'Bad DID {} cannot act as DID document identifier'", ".", ...
Given a reference in a DID document, return it in its canonical form of a URI. :param did: DID acting as the identifier of the DID document :param ref: reference to canonicalize, either a DID or a fragment pointing to a location in the DID doc :param delimiter: delimiter character marking fragment (default '#') or introducing identifier (';') against DID resource
[ "Given", "a", "reference", "in", "a", "DID", "document", "return", "it", "in", "its", "canonical", "form", "of", "a", "URI", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/docutil.py#L54-L82
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/record.py
StorageRecord.ok_tags
def ok_tags(tags: dict) -> bool: """ Whether input tags dict is OK as an indy-sdk tags structure (depth=1, string values). """ if not tags: return True depth = 0 queue = [(i, depth+1) for i in tags.values() if isinstance(i, dict)] max_depth = 0 while queue and max_depth < 2: sub, depth = queue.pop() max_depth = max(max_depth, depth) queue = queue + [(i, depth+1) for i in sub.values() if isinstance(i, dict)] return max_depth < 2 and all(isinstance(k, str) and isinstance(tags[k], str) for k in tags)
python
def ok_tags(tags: dict) -> bool: """ Whether input tags dict is OK as an indy-sdk tags structure (depth=1, string values). """ if not tags: return True depth = 0 queue = [(i, depth+1) for i in tags.values() if isinstance(i, dict)] max_depth = 0 while queue and max_depth < 2: sub, depth = queue.pop() max_depth = max(max_depth, depth) queue = queue + [(i, depth+1) for i in sub.values() if isinstance(i, dict)] return max_depth < 2 and all(isinstance(k, str) and isinstance(tags[k], str) for k in tags)
[ "def", "ok_tags", "(", "tags", ":", "dict", ")", "->", "bool", ":", "if", "not", "tags", ":", "return", "True", "depth", "=", "0", "queue", "=", "[", "(", "i", ",", "depth", "+", "1", ")", "for", "i", "in", "tags", ".", "values", "(", ")", "i...
Whether input tags dict is OK as an indy-sdk tags structure (depth=1, string values).
[ "Whether", "input", "tags", "dict", "is", "OK", "as", "an", "indy", "-", "sdk", "tags", "structure", "(", "depth", "=", "1", "string", "values", ")", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/record.py#L57-L72
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/record.py
StorageRecord.tags
def tags(self, val: str) -> None: """ Accessor for record tags (metadata). :param val: record tags """ if not StorageRecord.ok_tags(val): LOGGER.debug('StorageRecord.__init__ <!< Tags %s must map strings to strings', val) raise BadRecord('Tags {} must map strings to strings'.format(val)) self._tags = val or {}
python
def tags(self, val: str) -> None: """ Accessor for record tags (metadata). :param val: record tags """ if not StorageRecord.ok_tags(val): LOGGER.debug('StorageRecord.__init__ <!< Tags %s must map strings to strings', val) raise BadRecord('Tags {} must map strings to strings'.format(val)) self._tags = val or {}
[ "def", "tags", "(", "self", ",", "val", ":", "str", ")", "->", "None", ":", "if", "not", "StorageRecord", ".", "ok_tags", "(", "val", ")", ":", "LOGGER", ".", "debug", "(", "'StorageRecord.__init__ <!< Tags %s must map strings to strings'", ",", "val", ")", ...
Accessor for record tags (metadata). :param val: record tags
[ "Accessor", "for", "record", "tags", "(", "metadata", ")", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/record.py#L135-L146
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/record.py
StorageRecord.clear_tags
def clear_tags(self) -> dict: """ Accessor for record tags (metadata) stored in the clear. :return: record tags stored in the clear """ return {t: self.tags[t] for t in (self.tags or {}) if t.startswith('~')} or None
python
def clear_tags(self) -> dict: """ Accessor for record tags (metadata) stored in the clear. :return: record tags stored in the clear """ return {t: self.tags[t] for t in (self.tags or {}) if t.startswith('~')} or None
[ "def", "clear_tags", "(", "self", ")", "->", "dict", ":", "return", "{", "t", ":", "self", ".", "tags", "[", "t", "]", "for", "t", "in", "(", "self", ".", "tags", "or", "{", "}", ")", "if", "t", ".", "startswith", "(", "'~'", ")", "}", "or", ...
Accessor for record tags (metadata) stored in the clear. :return: record tags stored in the clear
[ "Accessor", "for", "record", "tags", "(", "metadata", ")", "stored", "in", "the", "clear", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/record.py#L149-L156
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/record.py
StorageRecord.encr_tags
def encr_tags(self) -> dict: """ Accessor for record tags (metadata) stored encrypted. :return: record tags stored encrypted """ return {t: self._tags[t] for t in self.tags or {} if not t.startswith('~')} or None
python
def encr_tags(self) -> dict: """ Accessor for record tags (metadata) stored encrypted. :return: record tags stored encrypted """ return {t: self._tags[t] for t in self.tags or {} if not t.startswith('~')} or None
[ "def", "encr_tags", "(", "self", ")", "->", "dict", ":", "return", "{", "t", ":", "self", ".", "_tags", "[", "t", "]", "for", "t", "in", "self", ".", "tags", "or", "{", "}", "if", "not", "t", ".", "startswith", "(", "'~'", ")", "}", "or", "No...
Accessor for record tags (metadata) stored encrypted. :return: record tags stored encrypted
[ "Accessor", "for", "record", "tags", "(", "metadata", ")", "stored", "encrypted", "." ]
train
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/record.py#L159-L166
ubernostrum/pwned-passwords-django
src/pwned_passwords_django/api.py
_get_pwned
def _get_pwned(prefix): """ Fetches a dict of all hash suffixes from Pwned Passwords for a given SHA-1 prefix. """ try: response = requests.get( url=API_ENDPOINT.format(prefix), headers={'User-Agent': USER_AGENT}, timeout=getattr( settings, 'PWNED_PASSWORDS_API_TIMEOUT', REQUEST_TIMEOUT, ), ) response.raise_for_status() except requests.RequestException as e: # Gracefully handle timeouts and HTTP error response codes. log.warning( 'Skipped Pwned Passwords check due to error: %r', e ) return None results = {} for line in response.text.splitlines(): line_suffix, _, times = line.partition(':') results[line_suffix] = int(times) return results
python
def _get_pwned(prefix): """ Fetches a dict of all hash suffixes from Pwned Passwords for a given SHA-1 prefix. """ try: response = requests.get( url=API_ENDPOINT.format(prefix), headers={'User-Agent': USER_AGENT}, timeout=getattr( settings, 'PWNED_PASSWORDS_API_TIMEOUT', REQUEST_TIMEOUT, ), ) response.raise_for_status() except requests.RequestException as e: # Gracefully handle timeouts and HTTP error response codes. log.warning( 'Skipped Pwned Passwords check due to error: %r', e ) return None results = {} for line in response.text.splitlines(): line_suffix, _, times = line.partition(':') results[line_suffix] = int(times) return results
[ "def", "_get_pwned", "(", "prefix", ")", ":", "try", ":", "response", "=", "requests", ".", "get", "(", "url", "=", "API_ENDPOINT", ".", "format", "(", "prefix", ")", ",", "headers", "=", "{", "'User-Agent'", ":", "USER_AGENT", "}", ",", "timeout", "="...
Fetches a dict of all hash suffixes from Pwned Passwords for a given SHA-1 prefix.
[ "Fetches", "a", "dict", "of", "all", "hash", "suffixes", "from", "Pwned", "Passwords", "for", "a", "given", "SHA", "-", "1", "prefix", "." ]
train
https://github.com/ubernostrum/pwned-passwords-django/blob/e61f3ec21c37f1c2b4568bf11ea7eb782e2a5fb5/src/pwned_passwords_django/api.py#L29-L58
ubernostrum/pwned-passwords-django
src/pwned_passwords_django/api.py
pwned_password
def pwned_password(password): """ Checks a password against the Pwned Passwords database. """ if not isinstance(password, text_type): raise TypeError('Password values to check must be Unicode strings.') password_hash = hashlib.sha1(password.encode('utf-8')).hexdigest().upper() prefix, suffix = password_hash[:5], password_hash[5:] results = _get_pwned(prefix) if results is None: # Gracefully handle timeouts and HTTP error response codes. return None return results.get(suffix, 0)
python
def pwned_password(password): """ Checks a password against the Pwned Passwords database. """ if not isinstance(password, text_type): raise TypeError('Password values to check must be Unicode strings.') password_hash = hashlib.sha1(password.encode('utf-8')).hexdigest().upper() prefix, suffix = password_hash[:5], password_hash[5:] results = _get_pwned(prefix) if results is None: # Gracefully handle timeouts and HTTP error response codes. return None return results.get(suffix, 0)
[ "def", "pwned_password", "(", "password", ")", ":", "if", "not", "isinstance", "(", "password", ",", "text_type", ")", ":", "raise", "TypeError", "(", "'Password values to check must be Unicode strings.'", ")", "password_hash", "=", "hashlib", ".", "sha1", "(", "p...
Checks a password against the Pwned Passwords database.
[ "Checks", "a", "password", "against", "the", "Pwned", "Passwords", "database", "." ]
train
https://github.com/ubernostrum/pwned-passwords-django/blob/e61f3ec21c37f1c2b4568bf11ea7eb782e2a5fb5/src/pwned_passwords_django/api.py#L61-L74
ganguli-lab/proxalgs
proxalgs/core.py
Optimizer.add_regularizer
def add_regularizer(self, proxfun, **kwargs): """ Add a regularizer from the operators module to the list of objectives Parameters ---------- proxfun : string or function If a string, then it must be the name of a corresponding function in the `operators` module. If a function, then it must apply a proximal update given an initial point x0, momentum parameter rho, and optional arguments given in `**kwargs`. \\*\\*kwargs : keyword arguments Any optional arguments required for the given function """ # if proxfun is a string, grab the corresponding function from operators.py if isinstance(proxfun, str): try: proxfun_name = proxfun.split(None, 1)[0] # Ignore everything after white space op = getattr(operators, proxfun_name) self.objectives.append(lambda theta, rho: op(theta.copy(), float(rho), **kwargs)) except AttributeError as e: print(str(e) + '\n' + 'Could not find the function ' + proxfun + ' in the operators module!') # if proxfun is a function, add it as its own proximal operator elif hasattr(proxfun, '__call__'): self.objectives.append(lambda theta, rho: proxfun(theta.copy(), float(rho))) # type of proxfun must be a string or a function else: raise TypeError('The argument "proxfun" must be a string or a function!')
python
def add_regularizer(self, proxfun, **kwargs): """ Add a regularizer from the operators module to the list of objectives Parameters ---------- proxfun : string or function If a string, then it must be the name of a corresponding function in the `operators` module. If a function, then it must apply a proximal update given an initial point x0, momentum parameter rho, and optional arguments given in `**kwargs`. \\*\\*kwargs : keyword arguments Any optional arguments required for the given function """ # if proxfun is a string, grab the corresponding function from operators.py if isinstance(proxfun, str): try: proxfun_name = proxfun.split(None, 1)[0] # Ignore everything after white space op = getattr(operators, proxfun_name) self.objectives.append(lambda theta, rho: op(theta.copy(), float(rho), **kwargs)) except AttributeError as e: print(str(e) + '\n' + 'Could not find the function ' + proxfun + ' in the operators module!') # if proxfun is a function, add it as its own proximal operator elif hasattr(proxfun, '__call__'): self.objectives.append(lambda theta, rho: proxfun(theta.copy(), float(rho))) # type of proxfun must be a string or a function else: raise TypeError('The argument "proxfun" must be a string or a function!')
[ "def", "add_regularizer", "(", "self", ",", "proxfun", ",", "*", "*", "kwargs", ")", ":", "# if proxfun is a string, grab the corresponding function from operators.py", "if", "isinstance", "(", "proxfun", ",", "str", ")", ":", "try", ":", "proxfun_name", "=", "proxf...
Add a regularizer from the operators module to the list of objectives Parameters ---------- proxfun : string or function If a string, then it must be the name of a corresponding function in the `operators` module. If a function, then it must apply a proximal update given an initial point x0, momentum parameter rho, and optional arguments given in `**kwargs`. \\*\\*kwargs : keyword arguments Any optional arguments required for the given function
[ "Add", "a", "regularizer", "from", "the", "operators", "module", "to", "the", "list", "of", "objectives" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/core.py#L59-L92
ganguli-lab/proxalgs
proxalgs/core.py
Optimizer.set_regularizers
def set_regularizers(self, regularizers, clear=True): """ Adds a set of regularizers Parameters ---------- regularizers : dict Each key is the name of a corresponding proximal operator, and the value associated with that key is a set of keyword arguments clear : boolean, optional Whether or not to clear the existing regularizers. (Default: True) """ # clear existing operators if clear: self.clear() # add new regularizers list([self.add_regularizer(proxfun, **regularizers[proxfun]) for proxfun in regularizers.keys()])
python
def set_regularizers(self, regularizers, clear=True): """ Adds a set of regularizers Parameters ---------- regularizers : dict Each key is the name of a corresponding proximal operator, and the value associated with that key is a set of keyword arguments clear : boolean, optional Whether or not to clear the existing regularizers. (Default: True) """ # clear existing operators if clear: self.clear() # add new regularizers list([self.add_regularizer(proxfun, **regularizers[proxfun]) for proxfun in regularizers.keys()])
[ "def", "set_regularizers", "(", "self", ",", "regularizers", ",", "clear", "=", "True", ")", ":", "# clear existing operators", "if", "clear", ":", "self", ".", "clear", "(", ")", "# add new regularizers", "list", "(", "[", "self", ".", "add_regularizer", "(",...
Adds a set of regularizers Parameters ---------- regularizers : dict Each key is the name of a corresponding proximal operator, and the value associated with that key is a set of keyword arguments clear : boolean, optional Whether or not to clear the existing regularizers. (Default: True)
[ "Adds", "a", "set", "of", "regularizers" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/core.py#L94-L115
ganguli-lab/proxalgs
proxalgs/core.py
Optimizer.minimize
def minimize(self, theta_init, max_iter=50, callback=None, disp=0, tau=(10., 2., 2.), tol=1e-3): """ Minimize a list of objectives using a proximal consensus algorithm Parameters ---------- theta_init : ndarray Initial parameter vector (numpy array) max_iter : int, optional Maximum number of iterations to run (default: 50) callback : function, optional a function that gets called on each iteration with the following arguments: the current parameter value (ndarray), and a dictionary that contains a information about the status of the algorithm disp : int, optional determines how much information to display when running. Ranges from 0 (nothing) to 3 (lots of information) Returns ------- theta : ndarray The parameters found after running the optimization procedure Other Parameters ---------------- tau : (float, float, float), optional initial, increment and decrement parameters for the momentum scheduler (default: (10, 2, 2)) tol : float, optional residual tolerance for assessing convergence. if both the primal and dual residuals are less than this value, then the algorithm has converged (default: 1e-3) """ # get list of objectives for this parameter num_obj = len(self.objectives) assert num_obj >= 1, "There must be at least one objective!" # initialize lists of primal and dual variable copies, one for each objective orig_shape = theta_init.shape primals = [theta_init.flatten() for _ in range(num_obj)] duals = [np.zeros(theta_init.size) for _ in range(num_obj)] theta_avg = np.mean(primals, axis=0).ravel() # initialize penalty parameter tau = namedtuple('tau', ('init', 'inc', 'dec'))(*tau) rho = tau.init # store cumulative runtimes of each iteration, starting now tstart = time.time() # clear metadata self.metadata = defaultdict(list) # run ADMM iterations self.converged = False for cur_iter in range(max_iter): # store the parameters from the previous iteration theta_prev = theta_avg # update each primal variable copy by taking a proximal step via each objective for varidx, dual in enumerate(duals): primals[varidx] = self.objectives[varidx]((theta_prev - dual).reshape(orig_shape), rho).ravel() # average primal copies theta_avg = np.mean(primals, axis=0) # update the dual variables (after primal update has finished) for varidx, primal in enumerate(primals): duals[varidx] += primal - theta_avg # compute primal and dual residuals primal_resid = float(np.sum([np.linalg.norm(primal - theta_avg) for primal in primals])) dual_resid = num_obj * rho ** 2 * np.linalg.norm(theta_avg - theta_prev) # update penalty parameter according to primal and dual residuals # (see sect. 3.4.1 of the Boyd and Parikh ADMM paper) if primal_resid > tau.init * dual_resid: rho *= float(tau.inc) elif dual_resid > tau.init * primal_resid: rho /= float(tau.dec) # update metadata for this iteration self.metadata['Primal resid'].append(primal_resid) self.metadata['Dual resid'].append(dual_resid) self.metadata['Time (s)'].append(time.time() - tstart) self.metadata['rho'].append(rho) # invoke the callback function with the current parameters and # history if callback is not None: # get the metadata from this iteration data = valmap(last, self.metadata) callback(theta_avg.reshape(orig_shape), data) # update the display self.update_display(cur_iter + 1, disp) # check for convergence if (primal_resid <= tol) & (dual_resid <= tol): self.converged = True break # clean up display self.update_display(-1, disp) # store and return final parameters self.theta = theta_avg.reshape(orig_shape) return self.theta
python
def minimize(self, theta_init, max_iter=50, callback=None, disp=0, tau=(10., 2., 2.), tol=1e-3): """ Minimize a list of objectives using a proximal consensus algorithm Parameters ---------- theta_init : ndarray Initial parameter vector (numpy array) max_iter : int, optional Maximum number of iterations to run (default: 50) callback : function, optional a function that gets called on each iteration with the following arguments: the current parameter value (ndarray), and a dictionary that contains a information about the status of the algorithm disp : int, optional determines how much information to display when running. Ranges from 0 (nothing) to 3 (lots of information) Returns ------- theta : ndarray The parameters found after running the optimization procedure Other Parameters ---------------- tau : (float, float, float), optional initial, increment and decrement parameters for the momentum scheduler (default: (10, 2, 2)) tol : float, optional residual tolerance for assessing convergence. if both the primal and dual residuals are less than this value, then the algorithm has converged (default: 1e-3) """ # get list of objectives for this parameter num_obj = len(self.objectives) assert num_obj >= 1, "There must be at least one objective!" # initialize lists of primal and dual variable copies, one for each objective orig_shape = theta_init.shape primals = [theta_init.flatten() for _ in range(num_obj)] duals = [np.zeros(theta_init.size) for _ in range(num_obj)] theta_avg = np.mean(primals, axis=0).ravel() # initialize penalty parameter tau = namedtuple('tau', ('init', 'inc', 'dec'))(*tau) rho = tau.init # store cumulative runtimes of each iteration, starting now tstart = time.time() # clear metadata self.metadata = defaultdict(list) # run ADMM iterations self.converged = False for cur_iter in range(max_iter): # store the parameters from the previous iteration theta_prev = theta_avg # update each primal variable copy by taking a proximal step via each objective for varidx, dual in enumerate(duals): primals[varidx] = self.objectives[varidx]((theta_prev - dual).reshape(orig_shape), rho).ravel() # average primal copies theta_avg = np.mean(primals, axis=0) # update the dual variables (after primal update has finished) for varidx, primal in enumerate(primals): duals[varidx] += primal - theta_avg # compute primal and dual residuals primal_resid = float(np.sum([np.linalg.norm(primal - theta_avg) for primal in primals])) dual_resid = num_obj * rho ** 2 * np.linalg.norm(theta_avg - theta_prev) # update penalty parameter according to primal and dual residuals # (see sect. 3.4.1 of the Boyd and Parikh ADMM paper) if primal_resid > tau.init * dual_resid: rho *= float(tau.inc) elif dual_resid > tau.init * primal_resid: rho /= float(tau.dec) # update metadata for this iteration self.metadata['Primal resid'].append(primal_resid) self.metadata['Dual resid'].append(dual_resid) self.metadata['Time (s)'].append(time.time() - tstart) self.metadata['rho'].append(rho) # invoke the callback function with the current parameters and # history if callback is not None: # get the metadata from this iteration data = valmap(last, self.metadata) callback(theta_avg.reshape(orig_shape), data) # update the display self.update_display(cur_iter + 1, disp) # check for convergence if (primal_resid <= tol) & (dual_resid <= tol): self.converged = True break # clean up display self.update_display(-1, disp) # store and return final parameters self.theta = theta_avg.reshape(orig_shape) return self.theta
[ "def", "minimize", "(", "self", ",", "theta_init", ",", "max_iter", "=", "50", ",", "callback", "=", "None", ",", "disp", "=", "0", ",", "tau", "=", "(", "10.", ",", "2.", ",", "2.", ")", ",", "tol", "=", "1e-3", ")", ":", "# get list of objectives...
Minimize a list of objectives using a proximal consensus algorithm Parameters ---------- theta_init : ndarray Initial parameter vector (numpy array) max_iter : int, optional Maximum number of iterations to run (default: 50) callback : function, optional a function that gets called on each iteration with the following arguments: the current parameter value (ndarray), and a dictionary that contains a information about the status of the algorithm disp : int, optional determines how much information to display when running. Ranges from 0 (nothing) to 3 (lots of information) Returns ------- theta : ndarray The parameters found after running the optimization procedure Other Parameters ---------------- tau : (float, float, float), optional initial, increment and decrement parameters for the momentum scheduler (default: (10, 2, 2)) tol : float, optional residual tolerance for assessing convergence. if both the primal and dual residuals are less than this value, then the algorithm has converged (default: 1e-3)
[ "Minimize", "a", "list", "of", "objectives", "using", "a", "proximal", "consensus", "algorithm" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/core.py#L121-L232
ganguli-lab/proxalgs
proxalgs/core.py
Optimizer.update_display
def update_display(self, iteration, disp_level, col_width=12): # pragma: no cover """ Prints information about the optimization procedure to standard output Parameters ---------- iteration : int The current iteration. Must either a positive integer or -1, which indicates the end of the algorithm disp_level : int An integer which controls how much information to display, ranging from 0 (nothing) to 3 (lots of stuff) col_width : int The width of each column in the data table, used if disp_level > 1 """ # exit and print nothing if disp_level is zero if disp_level == 0: return else: # simple update, no table if disp_level == 1 and iteration >= 0: print('[Iteration %i]' % iteration) # fancy table updates if disp_level > 1: # get the metadata from this iteration data = valmap(last, self.metadata) # choose what keys to use keys = ['Time (s)', 'Primal resid', 'Dual resid', 'rho'] # initial update. print out table headers if iteration == 1: print(tableprint.header(keys, width=col_width)) # print data print(tableprint.row([data[k] for k in keys], width=col_width, format_spec='4g')) if iteration == -1: print(tableprint.bottom(len(keys), width=col_width) + '\n') # print convergence statement if iteration == -1 and self.converged: print('Converged after %i iterations!' % len(self.metadata['Primal resid']))
python
def update_display(self, iteration, disp_level, col_width=12): # pragma: no cover """ Prints information about the optimization procedure to standard output Parameters ---------- iteration : int The current iteration. Must either a positive integer or -1, which indicates the end of the algorithm disp_level : int An integer which controls how much information to display, ranging from 0 (nothing) to 3 (lots of stuff) col_width : int The width of each column in the data table, used if disp_level > 1 """ # exit and print nothing if disp_level is zero if disp_level == 0: return else: # simple update, no table if disp_level == 1 and iteration >= 0: print('[Iteration %i]' % iteration) # fancy table updates if disp_level > 1: # get the metadata from this iteration data = valmap(last, self.metadata) # choose what keys to use keys = ['Time (s)', 'Primal resid', 'Dual resid', 'rho'] # initial update. print out table headers if iteration == 1: print(tableprint.header(keys, width=col_width)) # print data print(tableprint.row([data[k] for k in keys], width=col_width, format_spec='4g')) if iteration == -1: print(tableprint.bottom(len(keys), width=col_width) + '\n') # print convergence statement if iteration == -1 and self.converged: print('Converged after %i iterations!' % len(self.metadata['Primal resid']))
[ "def", "update_display", "(", "self", ",", "iteration", ",", "disp_level", ",", "col_width", "=", "12", ")", ":", "# pragma: no cover", "# exit and print nothing if disp_level is zero", "if", "disp_level", "==", "0", ":", "return", "else", ":", "# simple update, no ta...
Prints information about the optimization procedure to standard output Parameters ---------- iteration : int The current iteration. Must either a positive integer or -1, which indicates the end of the algorithm disp_level : int An integer which controls how much information to display, ranging from 0 (nothing) to 3 (lots of stuff) col_width : int The width of each column in the data table, used if disp_level > 1
[ "Prints", "information", "about", "the", "optimization", "procedure", "to", "standard", "output" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/core.py#L234-L281
ganguli-lab/proxalgs
proxalgs/tensor.py
susvd
def susvd(x, x_obs, rho, penalties): """ Sequential unfolding SVD Parameters ---------- x : Tensor x_obs : array_like rho : float penalties : array_like penalty for each unfolding of the input tensor """ assert type(x) == Tensor, "Input array must be a Tensor" while True: # proximal operator for the Fro. norm x = squared_error(x, rho, x_obs) # sequential singular value thresholding for ix, penalty in enumerate(penalties): x = x.unfold(ix).svt(penalty / rho).fold() yield x
python
def susvd(x, x_obs, rho, penalties): """ Sequential unfolding SVD Parameters ---------- x : Tensor x_obs : array_like rho : float penalties : array_like penalty for each unfolding of the input tensor """ assert type(x) == Tensor, "Input array must be a Tensor" while True: # proximal operator for the Fro. norm x = squared_error(x, rho, x_obs) # sequential singular value thresholding for ix, penalty in enumerate(penalties): x = x.unfold(ix).svt(penalty / rho).fold() yield x
[ "def", "susvd", "(", "x", ",", "x_obs", ",", "rho", ",", "penalties", ")", ":", "assert", "type", "(", "x", ")", "==", "Tensor", ",", "\"Input array must be a Tensor\"", "while", "True", ":", "# proximal operator for the Fro. norm", "x", "=", "squared_error", ...
Sequential unfolding SVD Parameters ---------- x : Tensor x_obs : array_like rho : float penalties : array_like penalty for each unfolding of the input tensor
[ "Sequential", "unfolding", "SVD" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/tensor.py#L69-L96
malinoff/structures
structures/core.py
Construct.build
def build(self, obj, context=None) -> bytes: """ Build bytes from the python object. :param obj: Python object to build bytes from. :param context: Optional context dictionary. """ stream = BytesIO() self.build_stream(obj, stream, context) return stream.getvalue()
python
def build(self, obj, context=None) -> bytes: """ Build bytes from the python object. :param obj: Python object to build bytes from. :param context: Optional context dictionary. """ stream = BytesIO() self.build_stream(obj, stream, context) return stream.getvalue()
[ "def", "build", "(", "self", ",", "obj", ",", "context", "=", "None", ")", "->", "bytes", ":", "stream", "=", "BytesIO", "(", ")", "self", ".", "build_stream", "(", "obj", ",", "stream", ",", "context", ")", "return", "stream", ".", "getvalue", "(", ...
Build bytes from the python object. :param obj: Python object to build bytes from. :param context: Optional context dictionary.
[ "Build", "bytes", "from", "the", "python", "object", "." ]
train
https://github.com/malinoff/structures/blob/36b1d641d399cd0b2a824704da53d8b5c8bd4f10/structures/core.py#L41-L50
malinoff/structures
structures/core.py
Construct.parse
def parse(self, data: bytes, context=None): """ Parse some python object from the data. :param data: Data to be parsed. :param context: Optional context dictionary. """ stream = BytesIO(data) return self.parse_stream(stream, context)
python
def parse(self, data: bytes, context=None): """ Parse some python object from the data. :param data: Data to be parsed. :param context: Optional context dictionary. """ stream = BytesIO(data) return self.parse_stream(stream, context)
[ "def", "parse", "(", "self", ",", "data", ":", "bytes", ",", "context", "=", "None", ")", ":", "stream", "=", "BytesIO", "(", "data", ")", "return", "self", ".", "parse_stream", "(", "stream", ",", "context", ")" ]
Parse some python object from the data. :param data: Data to be parsed. :param context: Optional context dictionary.
[ "Parse", "some", "python", "object", "from", "the", "data", "." ]
train
https://github.com/malinoff/structures/blob/36b1d641d399cd0b2a824704da53d8b5c8bd4f10/structures/core.py#L52-L60
malinoff/structures
structures/core.py
Construct.build_stream
def build_stream(self, obj, stream: BytesIO, context=None) -> None: """ Build bytes from the python object into the stream. :param obj: Python object to build bytes from. :param stream: A ``io.BytesIO`` instance to write bytes into. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: self._build_stream(obj, stream, context) except Error: raise except Exception as exc: raise BuildingError(str(exc))
python
def build_stream(self, obj, stream: BytesIO, context=None) -> None: """ Build bytes from the python object into the stream. :param obj: Python object to build bytes from. :param stream: A ``io.BytesIO`` instance to write bytes into. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: self._build_stream(obj, stream, context) except Error: raise except Exception as exc: raise BuildingError(str(exc))
[ "def", "build_stream", "(", "self", ",", "obj", ",", "stream", ":", "BytesIO", ",", "context", "=", "None", ")", "->", "None", ":", "if", "context", "is", "None", ":", "context", "=", "Context", "(", ")", "if", "not", "isinstance", "(", "context", ",...
Build bytes from the python object into the stream. :param obj: Python object to build bytes from. :param stream: A ``io.BytesIO`` instance to write bytes into. :param context: Optional context dictionary.
[ "Build", "bytes", "from", "the", "python", "object", "into", "the", "stream", "." ]
train
https://github.com/malinoff/structures/blob/36b1d641d399cd0b2a824704da53d8b5c8bd4f10/structures/core.py#L62-L79
malinoff/structures
structures/core.py
Construct.parse_stream
def parse_stream(self, stream: BytesIO, context=None): """ Parse some python object from the stream. :param stream: Stream from which the data is read and parsed. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._parse_stream(stream, context) except Error: raise except Exception as exc: raise ParsingError(str(exc))
python
def parse_stream(self, stream: BytesIO, context=None): """ Parse some python object from the stream. :param stream: Stream from which the data is read and parsed. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._parse_stream(stream, context) except Error: raise except Exception as exc: raise ParsingError(str(exc))
[ "def", "parse_stream", "(", "self", ",", "stream", ":", "BytesIO", ",", "context", "=", "None", ")", ":", "if", "context", "is", "None", ":", "context", "=", "Context", "(", ")", "if", "not", "isinstance", "(", "context", ",", "Context", ")", ":", "c...
Parse some python object from the stream. :param stream: Stream from which the data is read and parsed. :param context: Optional context dictionary.
[ "Parse", "some", "python", "object", "from", "the", "stream", "." ]
train
https://github.com/malinoff/structures/blob/36b1d641d399cd0b2a824704da53d8b5c8bd4f10/structures/core.py#L81-L97
malinoff/structures
structures/core.py
Construct.sizeof
def sizeof(self, context=None) -> int: """ Return the size of the construct in bytes. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._sizeof(context) except Error: raise except Exception as exc: raise SizeofError(str(exc))
python
def sizeof(self, context=None) -> int: """ Return the size of the construct in bytes. :param context: Optional context dictionary. """ if context is None: context = Context() if not isinstance(context, Context): context = Context(context) try: return self._sizeof(context) except Error: raise except Exception as exc: raise SizeofError(str(exc))
[ "def", "sizeof", "(", "self", ",", "context", "=", "None", ")", "->", "int", ":", "if", "context", "is", "None", ":", "context", "=", "Context", "(", ")", "if", "not", "isinstance", "(", "context", ",", "Context", ")", ":", "context", "=", "Context",...
Return the size of the construct in bytes. :param context: Optional context dictionary.
[ "Return", "the", "size", "of", "the", "construct", "in", "bytes", "." ]
train
https://github.com/malinoff/structures/blob/36b1d641d399cd0b2a824704da53d8b5c8bd4f10/structures/core.py#L99-L114
ganguli-lab/proxalgs
proxalgs/operators.py
sfo
def sfo(x0, rho, optimizer, num_steps=50): """ Proximal operator for an arbitrary function minimized via the Sum-of-Functions optimizer (SFO) Notes ----- SFO is a function optimizer for the case where the target function breaks into a sum over minibatches, or a sum over contributing functions. It is described in more detail in [1]_. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) optimizer : SFO instance Instance of the SFO object in `SFO_admm.py` num_steps : int, optional Number of SFO steps to take Returns ------- theta : array_like The parameter vector found after running `num_steps` iterations of the SFO optimizer References ---------- .. [1] Jascha Sohl-Dickstein, Ben Poole, and Surya Ganguli. Fast large-scale optimization by unifying stochastic gradient and quasi-Newton methods. International Conference on Machine Learning (2014). `arXiv preprint arXiv:1311.2115 (2013) <http://arxiv.org/abs/1311.2115>`_. """ # set the current parameter value of SFO to the given value optimizer.set_theta(x0, float(rho)) # set the previous ADMM location as the flattened paramter array optimizer.theta_admm_prev = optimizer.theta_original_to_flat(x0) # run the optimizer for n steps return optimizer.optimize(num_steps=num_steps)
python
def sfo(x0, rho, optimizer, num_steps=50): """ Proximal operator for an arbitrary function minimized via the Sum-of-Functions optimizer (SFO) Notes ----- SFO is a function optimizer for the case where the target function breaks into a sum over minibatches, or a sum over contributing functions. It is described in more detail in [1]_. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) optimizer : SFO instance Instance of the SFO object in `SFO_admm.py` num_steps : int, optional Number of SFO steps to take Returns ------- theta : array_like The parameter vector found after running `num_steps` iterations of the SFO optimizer References ---------- .. [1] Jascha Sohl-Dickstein, Ben Poole, and Surya Ganguli. Fast large-scale optimization by unifying stochastic gradient and quasi-Newton methods. International Conference on Machine Learning (2014). `arXiv preprint arXiv:1311.2115 (2013) <http://arxiv.org/abs/1311.2115>`_. """ # set the current parameter value of SFO to the given value optimizer.set_theta(x0, float(rho)) # set the previous ADMM location as the flattened paramter array optimizer.theta_admm_prev = optimizer.theta_original_to_flat(x0) # run the optimizer for n steps return optimizer.optimize(num_steps=num_steps)
[ "def", "sfo", "(", "x0", ",", "rho", ",", "optimizer", ",", "num_steps", "=", "50", ")", ":", "# set the current parameter value of SFO to the given value", "optimizer", ".", "set_theta", "(", "x0", ",", "float", "(", "rho", ")", ")", "# set the previous ADMM loca...
Proximal operator for an arbitrary function minimized via the Sum-of-Functions optimizer (SFO) Notes ----- SFO is a function optimizer for the case where the target function breaks into a sum over minibatches, or a sum over contributing functions. It is described in more detail in [1]_. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) optimizer : SFO instance Instance of the SFO object in `SFO_admm.py` num_steps : int, optional Number of SFO steps to take Returns ------- theta : array_like The parameter vector found after running `num_steps` iterations of the SFO optimizer References ---------- .. [1] Jascha Sohl-Dickstein, Ben Poole, and Surya Ganguli. Fast large-scale optimization by unifying stochastic gradient and quasi-Newton methods. International Conference on Machine Learning (2014). `arXiv preprint arXiv:1311.2115 (2013) <http://arxiv.org/abs/1311.2115>`_.
[ "Proximal", "operator", "for", "an", "arbitrary", "function", "minimized", "via", "the", "Sum", "-", "of", "-", "Functions", "optimizer", "(", "SFO", ")" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L42-L86
ganguli-lab/proxalgs
proxalgs/operators.py
poissreg
def poissreg(x0, rho, x, y): """ Proximal operator for Poisson regression Computes the proximal operator of the negative log-likelihood loss assumping a Poisson noise distribution. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) x : (n, k) array_like A design matrix consisting of n examples of k-dimensional features (or input). y : (n,) array_like A vector containing the responses (outupt) to the n features given in x. Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # objective and gradient n = float(x.shape[0]) f = lambda w: np.mean(np.exp(x.dot(w)) - y * x.dot(w)) df = lambda w: (x.T.dot(np.exp(x.dot(w))) - x.T.dot(y)) / n # minimize via BFGS return bfgs(x0, rho, f, df)
python
def poissreg(x0, rho, x, y): """ Proximal operator for Poisson regression Computes the proximal operator of the negative log-likelihood loss assumping a Poisson noise distribution. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) x : (n, k) array_like A design matrix consisting of n examples of k-dimensional features (or input). y : (n,) array_like A vector containing the responses (outupt) to the n features given in x. Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # objective and gradient n = float(x.shape[0]) f = lambda w: np.mean(np.exp(x.dot(w)) - y * x.dot(w)) df = lambda w: (x.T.dot(np.exp(x.dot(w))) - x.T.dot(y)) / n # minimize via BFGS return bfgs(x0, rho, f, df)
[ "def", "poissreg", "(", "x0", ",", "rho", ",", "x", ",", "y", ")", ":", "# objective and gradient", "n", "=", "float", "(", "x", ".", "shape", "[", "0", "]", ")", "f", "=", "lambda", "w", ":", "np", ".", "mean", "(", "np", ".", "exp", "(", "x...
Proximal operator for Poisson regression Computes the proximal operator of the negative log-likelihood loss assumping a Poisson noise distribution. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) x : (n, k) array_like A design matrix consisting of n examples of k-dimensional features (or input). y : (n,) array_like A vector containing the responses (outupt) to the n features given in x. Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "Poisson", "regression" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L90-L122
ganguli-lab/proxalgs
proxalgs/operators.py
bfgs
def bfgs(x0, rho, f_df, maxiter=50, method='BFGS'): """ Proximal operator for minimizing an arbitrary function using BFGS Uses the BFGS algorithm to find the proximal update for an arbitrary function, `f`, whose gradient is known. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) f_df : function The objective function and gradient maxiter : int, optional Maximum number of iterations to take (default: 50) method : str, optional Which scipy.optimize algorithm to use (default: 'BFGS') Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # keep track of the original shape orig_shape = x0.shape # specify the objective function and gradient for the proximal operator def f_df_augmented(x): xk = x.reshape(orig_shape) obj, grad = f_df(xk) g = obj + (rho / 2.) * np.sum((xk - x0) ** 2) dg = (grad + rho * (xk - x0)).ravel() return g, dg # minimize via BFGS options = {'maxiter': maxiter, 'disp': False} return opt.minimize(f_df_augmented, x0.ravel(), method=method, jac=True, options=options).x.reshape(orig_shape)
python
def bfgs(x0, rho, f_df, maxiter=50, method='BFGS'): """ Proximal operator for minimizing an arbitrary function using BFGS Uses the BFGS algorithm to find the proximal update for an arbitrary function, `f`, whose gradient is known. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) f_df : function The objective function and gradient maxiter : int, optional Maximum number of iterations to take (default: 50) method : str, optional Which scipy.optimize algorithm to use (default: 'BFGS') Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # keep track of the original shape orig_shape = x0.shape # specify the objective function and gradient for the proximal operator def f_df_augmented(x): xk = x.reshape(orig_shape) obj, grad = f_df(xk) g = obj + (rho / 2.) * np.sum((xk - x0) ** 2) dg = (grad + rho * (xk - x0)).ravel() return g, dg # minimize via BFGS options = {'maxiter': maxiter, 'disp': False} return opt.minimize(f_df_augmented, x0.ravel(), method=method, jac=True, options=options).x.reshape(orig_shape)
[ "def", "bfgs", "(", "x0", ",", "rho", ",", "f_df", ",", "maxiter", "=", "50", ",", "method", "=", "'BFGS'", ")", ":", "# keep track of the original shape", "orig_shape", "=", "x0", ".", "shape", "# specify the objective function and gradient for the proximal operator"...
Proximal operator for minimizing an arbitrary function using BFGS Uses the BFGS algorithm to find the proximal update for an arbitrary function, `f`, whose gradient is known. Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) f_df : function The objective function and gradient maxiter : int, optional Maximum number of iterations to take (default: 50) method : str, optional Which scipy.optimize algorithm to use (default: 'BFGS') Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "minimizing", "an", "arbitrary", "function", "using", "BFGS" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L126-L176
ganguli-lab/proxalgs
proxalgs/operators.py
smooth
def smooth(x0, rho, gamma, axis=0): """ Proximal operator for a smoothing function enforced via the discrete laplacian operator Notes ----- Currently only works with matrices (2-D arrays) as input Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # Apply Laplacian smoothing n = x0.shape[axis] lap_op = spdiags([(2 + rho / gamma) * np.ones(n), -1 * np.ones(n), -1 * np.ones(n)], [0, -1, 1], n, n, format='csc') x_out = np.rollaxis(spsolve(gamma * lap_op, rho * np.rollaxis(x0, axis, 0)), axis, 0) return x_out
python
def smooth(x0, rho, gamma, axis=0): """ Proximal operator for a smoothing function enforced via the discrete laplacian operator Notes ----- Currently only works with matrices (2-D arrays) as input Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # Apply Laplacian smoothing n = x0.shape[axis] lap_op = spdiags([(2 + rho / gamma) * np.ones(n), -1 * np.ones(n), -1 * np.ones(n)], [0, -1, 1], n, n, format='csc') x_out = np.rollaxis(spsolve(gamma * lap_op, rho * np.rollaxis(x0, axis, 0)), axis, 0) return x_out
[ "def", "smooth", "(", "x0", ",", "rho", ",", "gamma", ",", "axis", "=", "0", ")", ":", "# Apply Laplacian smoothing", "n", "=", "x0", ".", "shape", "[", "axis", "]", "lap_op", "=", "spdiags", "(", "[", "(", "2", "+", "rho", "/", "gamma", ")", "*"...
Proximal operator for a smoothing function enforced via the discrete laplacian operator Notes ----- Currently only works with matrices (2-D arrays) as input Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "a", "smoothing", "function", "enforced", "via", "the", "discrete", "laplacian", "operator" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L180-L210
ganguli-lab/proxalgs
proxalgs/operators.py
nucnorm
def nucnorm(x0, rho, gamma): """ Proximal operator for the nuclear norm (sum of the singular values of a matrix) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # compute SVD u, s, v = np.linalg.svd(x0, full_matrices=False) # soft threshold the singular values sthr = np.maximum(s - (gamma / float(rho)), 0) # reconstruct x_out = (u.dot(np.diag(sthr)).dot(v)) return x_out
python
def nucnorm(x0, rho, gamma): """ Proximal operator for the nuclear norm (sum of the singular values of a matrix) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # compute SVD u, s, v = np.linalg.svd(x0, full_matrices=False) # soft threshold the singular values sthr = np.maximum(s - (gamma / float(rho)), 0) # reconstruct x_out = (u.dot(np.diag(sthr)).dot(v)) return x_out
[ "def", "nucnorm", "(", "x0", ",", "rho", ",", "gamma", ")", ":", "# compute SVD", "u", ",", "s", ",", "v", "=", "np", ".", "linalg", ".", "svd", "(", "x0", ",", "full_matrices", "=", "False", ")", "# soft threshold the singular values", "sthr", "=", "n...
Proximal operator for the nuclear norm (sum of the singular values of a matrix) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "the", "nuclear", "norm", "(", "sum", "of", "the", "singular", "values", "of", "a", "matrix", ")" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L214-L244
ganguli-lab/proxalgs
proxalgs/operators.py
tvd
def tvd(x0, rho, gamma): """ Proximal operator for the total variation denoising penalty Requires scikit-image be installed Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step Raises ------ ImportError If scikit-image fails to be imported """ try: from skimage.restoration import denoise_tv_bregman except ImportError: print('Error: scikit-image not found. TVD will not work.') return x0 return denoise_tv_bregman(x0, rho / gamma)
python
def tvd(x0, rho, gamma): """ Proximal operator for the total variation denoising penalty Requires scikit-image be installed Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step Raises ------ ImportError If scikit-image fails to be imported """ try: from skimage.restoration import denoise_tv_bregman except ImportError: print('Error: scikit-image not found. TVD will not work.') return x0 return denoise_tv_bregman(x0, rho / gamma)
[ "def", "tvd", "(", "x0", ",", "rho", ",", "gamma", ")", ":", "try", ":", "from", "skimage", ".", "restoration", "import", "denoise_tv_bregman", "except", "ImportError", ":", "print", "(", "'Error: scikit-image not found. TVD will not work.'", ")", "return", "x0", ...
Proximal operator for the total variation denoising penalty Requires scikit-image be installed Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step Raises ------ ImportError If scikit-image fails to be imported
[ "Proximal", "operator", "for", "the", "total", "variation", "denoising", "penalty" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L272-L305
ganguli-lab/proxalgs
proxalgs/operators.py
sparse
def sparse(x0, rho, gamma): """ Proximal operator for the l1 norm (induces sparsity) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ lmbda = float(gamma) / rho return (x0 - lmbda) * (x0 >= lmbda) + (x0 + lmbda) * (x0 <= -lmbda)
python
def sparse(x0, rho, gamma): """ Proximal operator for the l1 norm (induces sparsity) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ lmbda = float(gamma) / rho return (x0 - lmbda) * (x0 >= lmbda) + (x0 + lmbda) * (x0 <= -lmbda)
[ "def", "sparse", "(", "x0", ",", "rho", ",", "gamma", ")", ":", "lmbda", "=", "float", "(", "gamma", ")", "/", "rho", "return", "(", "x0", "-", "lmbda", ")", "*", "(", "x0", ">=", "lmbda", ")", "+", "(", "x0", "+", "lmbda", ")", "*", "(", "...
Proximal operator for the l1 norm (induces sparsity) Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "the", "l1", "norm", "(", "induces", "sparsity", ")" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L309-L332
ganguli-lab/proxalgs
proxalgs/operators.py
linsys
def linsys(x0, rho, P, q): """ Proximal operator for the linear approximation Ax = b Minimizes the function: .. math:: f(x) = (1/2)||Ax-b||_2^2 = (1/2)x^TA^TAx - (b^TA)x + b^Tb Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) P : array_like The symmetric matrix A^TA, where we are trying to approximate Ax=b q : array_like The vector A^Tb, where we are trying to approximate Ax=b Returns ------- theta : array_like The parameter vector found after running the proximal update step """ return np.linalg.solve(rho * np.eye(q.shape[0]) + P, rho * x0.copy() + q)
python
def linsys(x0, rho, P, q): """ Proximal operator for the linear approximation Ax = b Minimizes the function: .. math:: f(x) = (1/2)||Ax-b||_2^2 = (1/2)x^TA^TAx - (b^TA)x + b^Tb Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) P : array_like The symmetric matrix A^TA, where we are trying to approximate Ax=b q : array_like The vector A^Tb, where we are trying to approximate Ax=b Returns ------- theta : array_like The parameter vector found after running the proximal update step """ return np.linalg.solve(rho * np.eye(q.shape[0]) + P, rho * x0.copy() + q)
[ "def", "linsys", "(", "x0", ",", "rho", ",", "P", ",", "q", ")", ":", "return", "np", ".", "linalg", ".", "solve", "(", "rho", "*", "np", ".", "eye", "(", "q", ".", "shape", "[", "0", "]", ")", "+", "P", ",", "rho", "*", "x0", ".", "copy"...
Proximal operator for the linear approximation Ax = b Minimizes the function: .. math:: f(x) = (1/2)||Ax-b||_2^2 = (1/2)x^TA^TAx - (b^TA)x + b^Tb Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) P : array_like The symmetric matrix A^TA, where we are trying to approximate Ax=b q : array_like The vector A^Tb, where we are trying to approximate Ax=b Returns ------- theta : array_like The parameter vector found after running the proximal update step
[ "Proximal", "operator", "for", "the", "linear", "approximation", "Ax", "=", "b" ]
train
https://github.com/ganguli-lab/proxalgs/blob/74f54467ad072d3229edea93fa84ddd98dd77c67/proxalgs/operators.py#L358-L385
kobinpy/kobin
kobin/responses.py
BaseResponse.status
def status(self): """ The HTTP status line as a string (e.g. ``404 Not Found``).""" status = _HTTP_STATUS_LINES.get(self._status_code) return str(status or ('{} Unknown'.format(self._status_code)))
python
def status(self): """ The HTTP status line as a string (e.g. ``404 Not Found``).""" status = _HTTP_STATUS_LINES.get(self._status_code) return str(status or ('{} Unknown'.format(self._status_code)))
[ "def", "status", "(", "self", ")", ":", "status", "=", "_HTTP_STATUS_LINES", ".", "get", "(", "self", ".", "_status_code", ")", "return", "str", "(", "status", "or", "(", "'{} Unknown'", ".", "format", "(", "self", ".", "_status_code", ")", ")", ")" ]
The HTTP status line as a string (e.g. ``404 Not Found``).
[ "The", "HTTP", "status", "line", "as", "a", "string", "(", "e", ".", "g", ".", "404", "Not", "Found", ")", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/responses.py#L56-L59
kobinpy/kobin
kobin/responses.py
BaseResponse.headerlist
def headerlist(self): """ WSGI conform list of (header, value) tuples. """ if 'Content-Type' not in self.headers: self.headers.add_header('Content-Type', self.default_content_type) if self._cookies: for c in self._cookies.values(): self.headers.add_header('Set-Cookie', c.OutputString()) return self.headers.items()
python
def headerlist(self): """ WSGI conform list of (header, value) tuples. """ if 'Content-Type' not in self.headers: self.headers.add_header('Content-Type', self.default_content_type) if self._cookies: for c in self._cookies.values(): self.headers.add_header('Set-Cookie', c.OutputString()) return self.headers.items()
[ "def", "headerlist", "(", "self", ")", ":", "if", "'Content-Type'", "not", "in", "self", ".", "headers", ":", "self", ".", "headers", ".", "add_header", "(", "'Content-Type'", ",", "self", ".", "default_content_type", ")", "if", "self", ".", "_cookies", ":...
WSGI conform list of (header, value) tuples.
[ "WSGI", "conform", "list", "of", "(", "header", "value", ")", "tuples", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/responses.py#L68-L75
sunscrapers/djet
djet/assertions.py
StatusCodeAssertionsMixin.assert_redirect
def assert_redirect(self, response, expected_url=None): """ assertRedirects from Django TestCase follows the redirects chains, this assertion does not - which is more like real unit testing """ self.assertIn( response.status_code, self.redirect_codes, self._get_redirect_assertion_message(response), ) if expected_url: location_header = response._headers.get('location', None) self.assertEqual( location_header, ('Location', str(expected_url)), 'Response should redirect to {0}, but it redirects to {1} instead'.format( expected_url, location_header[1], ) )
python
def assert_redirect(self, response, expected_url=None): """ assertRedirects from Django TestCase follows the redirects chains, this assertion does not - which is more like real unit testing """ self.assertIn( response.status_code, self.redirect_codes, self._get_redirect_assertion_message(response), ) if expected_url: location_header = response._headers.get('location', None) self.assertEqual( location_header, ('Location', str(expected_url)), 'Response should redirect to {0}, but it redirects to {1} instead'.format( expected_url, location_header[1], ) )
[ "def", "assert_redirect", "(", "self", ",", "response", ",", "expected_url", "=", "None", ")", ":", "self", ".", "assertIn", "(", "response", ".", "status_code", ",", "self", ".", "redirect_codes", ",", "self", ".", "_get_redirect_assertion_message", "(", "res...
assertRedirects from Django TestCase follows the redirects chains, this assertion does not - which is more like real unit testing
[ "assertRedirects", "from", "Django", "TestCase", "follows", "the", "redirects", "chains", "this", "assertion", "does", "not", "-", "which", "is", "more", "like", "real", "unit", "testing" ]
train
https://github.com/sunscrapers/djet/blob/9e28278404bfeab4b2e2abb22094a0d41f07f805/djet/assertions.py#L40-L59
sunscrapers/djet
djet/assertions.py
InstanceAssertionsMixin.assert_instance_created
def assert_instance_created(self, model_class, **kwargs): """ Checks if a model instance was created in the database. For example:: >>> with self.assert_instance_created(Article, slug='lorem-ipsum'): ... Article.objects.create(slug='lorem-ipsum') """ return _InstanceContext( self.assert_instance_does_not_exist, self.assert_instance_exists, model_class, **kwargs )
python
def assert_instance_created(self, model_class, **kwargs): """ Checks if a model instance was created in the database. For example:: >>> with self.assert_instance_created(Article, slug='lorem-ipsum'): ... Article.objects.create(slug='lorem-ipsum') """ return _InstanceContext( self.assert_instance_does_not_exist, self.assert_instance_exists, model_class, **kwargs )
[ "def", "assert_instance_created", "(", "self", ",", "model_class", ",", "*", "*", "kwargs", ")", ":", "return", "_InstanceContext", "(", "self", ".", "assert_instance_does_not_exist", ",", "self", ".", "assert_instance_exists", ",", "model_class", ",", "*", "*", ...
Checks if a model instance was created in the database. For example:: >>> with self.assert_instance_created(Article, slug='lorem-ipsum'): ... Article.objects.create(slug='lorem-ipsum')
[ "Checks", "if", "a", "model", "instance", "was", "created", "in", "the", "database", "." ]
train
https://github.com/sunscrapers/djet/blob/9e28278404bfeab4b2e2abb22094a0d41f07f805/djet/assertions.py#L174-L188
sunscrapers/djet
djet/assertions.py
InstanceAssertionsMixin.assert_instance_deleted
def assert_instance_deleted(self, model_class, **kwargs): """ Checks if the model instance was deleted from the database. For example:: >>> with self.assert_instance_deleted(Article, slug='lorem-ipsum'): ... Article.objects.get(slug='lorem-ipsum').delete() """ return _InstanceContext( self.assert_instance_exists, self.assert_instance_does_not_exist, model_class, **kwargs )
python
def assert_instance_deleted(self, model_class, **kwargs): """ Checks if the model instance was deleted from the database. For example:: >>> with self.assert_instance_deleted(Article, slug='lorem-ipsum'): ... Article.objects.get(slug='lorem-ipsum').delete() """ return _InstanceContext( self.assert_instance_exists, self.assert_instance_does_not_exist, model_class, **kwargs )
[ "def", "assert_instance_deleted", "(", "self", ",", "model_class", ",", "*", "*", "kwargs", ")", ":", "return", "_InstanceContext", "(", "self", ".", "assert_instance_exists", ",", "self", ".", "assert_instance_does_not_exist", ",", "model_class", ",", "*", "*", ...
Checks if the model instance was deleted from the database. For example:: >>> with self.assert_instance_deleted(Article, slug='lorem-ipsum'): ... Article.objects.get(slug='lorem-ipsum').delete()
[ "Checks", "if", "the", "model", "instance", "was", "deleted", "from", "the", "database", "." ]
train
https://github.com/sunscrapers/djet/blob/9e28278404bfeab4b2e2abb22094a0d41f07f805/djet/assertions.py#L190-L204
kobinpy/kobin
kobin/requests.py
_split_into_mimetype_and_priority
def _split_into_mimetype_and_priority(x): """Split an accept header item into mimetype and priority. >>> _split_into_mimetype_and_priority('text/*') ('text/*', 1.0) >>> _split_into_mimetype_and_priority('application/json;q=0.5') ('application/json', 0.5) """ if ';' in x: content_type, priority = x.split(';') casted_priority = float(priority.split('=')[1]) else: content_type, casted_priority = x, 1.0 content_type = content_type.lstrip().rstrip() # Replace ' text/html' to 'text/html' return content_type, casted_priority
python
def _split_into_mimetype_and_priority(x): """Split an accept header item into mimetype and priority. >>> _split_into_mimetype_and_priority('text/*') ('text/*', 1.0) >>> _split_into_mimetype_and_priority('application/json;q=0.5') ('application/json', 0.5) """ if ';' in x: content_type, priority = x.split(';') casted_priority = float(priority.split('=')[1]) else: content_type, casted_priority = x, 1.0 content_type = content_type.lstrip().rstrip() # Replace ' text/html' to 'text/html' return content_type, casted_priority
[ "def", "_split_into_mimetype_and_priority", "(", "x", ")", ":", "if", "';'", "in", "x", ":", "content_type", ",", "priority", "=", "x", ".", "split", "(", "';'", ")", "casted_priority", "=", "float", "(", "priority", ".", "split", "(", "'='", ")", "[", ...
Split an accept header item into mimetype and priority. >>> _split_into_mimetype_and_priority('text/*') ('text/*', 1.0) >>> _split_into_mimetype_and_priority('application/json;q=0.5') ('application/json', 0.5)
[ "Split", "an", "accept", "header", "item", "into", "mimetype", "and", "priority", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/requests.py#L161-L177
kobinpy/kobin
kobin/requests.py
_parse_and_sort_accept_header
def _parse_and_sort_accept_header(accept_header): """Parse and sort the accept header items. >>> _parse_and_sort_accept_header('application/json;q=0.5, text/*') [('text/*', 1.0), ('application/json', 0.5)] """ return sorted([_split_into_mimetype_and_priority(x) for x in accept_header.split(',')], key=lambda x: x[1], reverse=True)
python
def _parse_and_sort_accept_header(accept_header): """Parse and sort the accept header items. >>> _parse_and_sort_accept_header('application/json;q=0.5, text/*') [('text/*', 1.0), ('application/json', 0.5)] """ return sorted([_split_into_mimetype_and_priority(x) for x in accept_header.split(',')], key=lambda x: x[1], reverse=True)
[ "def", "_parse_and_sort_accept_header", "(", "accept_header", ")", ":", "return", "sorted", "(", "[", "_split_into_mimetype_and_priority", "(", "x", ")", "for", "x", "in", "accept_header", ".", "split", "(", "','", ")", "]", ",", "key", "=", "lambda", "x", "...
Parse and sort the accept header items. >>> _parse_and_sort_accept_header('application/json;q=0.5, text/*') [('text/*', 1.0), ('application/json', 0.5)]
[ "Parse", "and", "sort", "the", "accept", "header", "items", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/requests.py#L180-L187
kobinpy/kobin
kobin/requests.py
accept_best_match
def accept_best_match(accept_header, mimetypes): """Return a mimetype best matched the accept headers. >>> accept_best_match('application/json, text/html', ['application/json', 'text/plain']) 'application/json' >>> accept_best_match('application/json;q=0.5, text/*', ['application/json', 'text/plain']) 'text/plain' """ for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header): matched_types = fnmatch.filter(mimetypes, mimetype_pattern) if matched_types: return matched_types[0] return mimetypes[0]
python
def accept_best_match(accept_header, mimetypes): """Return a mimetype best matched the accept headers. >>> accept_best_match('application/json, text/html', ['application/json', 'text/plain']) 'application/json' >>> accept_best_match('application/json;q=0.5, text/*', ['application/json', 'text/plain']) 'text/plain' """ for mimetype_pattern, _ in _parse_and_sort_accept_header(accept_header): matched_types = fnmatch.filter(mimetypes, mimetype_pattern) if matched_types: return matched_types[0] return mimetypes[0]
[ "def", "accept_best_match", "(", "accept_header", ",", "mimetypes", ")", ":", "for", "mimetype_pattern", ",", "_", "in", "_parse_and_sort_accept_header", "(", "accept_header", ")", ":", "matched_types", "=", "fnmatch", ".", "filter", "(", "mimetypes", ",", "mimety...
Return a mimetype best matched the accept headers. >>> accept_best_match('application/json, text/html', ['application/json', 'text/plain']) 'application/json' >>> accept_best_match('application/json;q=0.5, text/*', ['application/json', 'text/plain']) 'text/plain'
[ "Return", "a", "mimetype", "best", "matched", "the", "accept", "headers", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/requests.py#L190-L203
kobinpy/kobin
kobin/routes.py
match_url_vars_type
def match_url_vars_type(url_vars, type_hints): """ Match types of url vars. >>> match_url_vars_type({'user_id': '1'}, {'user_id': int}) (True, {'user_id': 1}) >>> match_url_vars_type({'user_id': 'foo'}, {'user_id': int}) (False, {}) """ typed_url_vars = {} try: for k, v in url_vars.items(): arg_type = type_hints.get(k) if arg_type and arg_type != str: typed_url_vars[k] = arg_type(v) else: typed_url_vars[k] = v except ValueError: return False, {} return True, typed_url_vars
python
def match_url_vars_type(url_vars, type_hints): """ Match types of url vars. >>> match_url_vars_type({'user_id': '1'}, {'user_id': int}) (True, {'user_id': 1}) >>> match_url_vars_type({'user_id': 'foo'}, {'user_id': int}) (False, {}) """ typed_url_vars = {} try: for k, v in url_vars.items(): arg_type = type_hints.get(k) if arg_type and arg_type != str: typed_url_vars[k] = arg_type(v) else: typed_url_vars[k] = v except ValueError: return False, {} return True, typed_url_vars
[ "def", "match_url_vars_type", "(", "url_vars", ",", "type_hints", ")", ":", "typed_url_vars", "=", "{", "}", "try", ":", "for", "k", ",", "v", "in", "url_vars", ".", "items", "(", ")", ":", "arg_type", "=", "type_hints", ".", "get", "(", "k", ")", "i...
Match types of url vars. >>> match_url_vars_type({'user_id': '1'}, {'user_id': int}) (True, {'user_id': 1}) >>> match_url_vars_type({'user_id': 'foo'}, {'user_id': int}) (False, {})
[ "Match", "types", "of", "url", "vars", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/routes.py#L84-L102
kobinpy/kobin
kobin/routes.py
match_path
def match_path(rule, path): """ Match path. >>> match_path('/foo', '/foo') (True, {}) >>> match_path('/foo', '/bar') (False, {}) >>> match_path('/users/{user_id}', '/users/1') (True, {'user_id': '1'}) >>> match_path('/users/{user_id}', '/users/not-integer') (True, {'user_id': 'not-integer'}) """ split_rule = split_by_slash(rule) split_path = split_by_slash(path) url_vars = {} if len(split_rule) != len(split_path): return False, {} for r, p in zip(split_rule, split_path): if r.startswith('{') and r.endswith('}'): url_vars[r[1:-1]] = p continue if r != p: return False, {} return True, url_vars
python
def match_path(rule, path): """ Match path. >>> match_path('/foo', '/foo') (True, {}) >>> match_path('/foo', '/bar') (False, {}) >>> match_path('/users/{user_id}', '/users/1') (True, {'user_id': '1'}) >>> match_path('/users/{user_id}', '/users/not-integer') (True, {'user_id': 'not-integer'}) """ split_rule = split_by_slash(rule) split_path = split_by_slash(path) url_vars = {} if len(split_rule) != len(split_path): return False, {} for r, p in zip(split_rule, split_path): if r.startswith('{') and r.endswith('}'): url_vars[r[1:-1]] = p continue if r != p: return False, {} return True, url_vars
[ "def", "match_path", "(", "rule", ",", "path", ")", ":", "split_rule", "=", "split_by_slash", "(", "rule", ")", "split_path", "=", "split_by_slash", "(", "path", ")", "url_vars", "=", "{", "}", "if", "len", "(", "split_rule", ")", "!=", "len", "(", "sp...
Match path. >>> match_path('/foo', '/foo') (True, {}) >>> match_path('/foo', '/bar') (False, {}) >>> match_path('/users/{user_id}', '/users/1') (True, {'user_id': '1'}) >>> match_path('/users/{user_id}', '/users/not-integer') (True, {'user_id': 'not-integer'})
[ "Match", "path", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/routes.py#L105-L130
kobinpy/kobin
kobin/routes.py
Router.match
def match(self, path, method): """ Get callback and url_vars. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> callback, url_vars = r.match('/users/1', 'GET') >>> url_vars {'user_id': 1} >>> response = callback(**url_vars) >>> response.body [b'You are 1'] >>> callback, url_vars = r.match('/notfound', 'GET') Traceback (most recent call last): ... kobin.responses.HTTPError """ if path != '/': path = path.rstrip('/') method = method.upper() status = 404 for p, n, m in self.endpoints: matched, url_vars = match_path(p, path) if not matched: # path: not matched continue if method not in m: # path: matched, method: not matched status = 405 raise HTTPError(status=status, body=f'Method not found: {path} {method}') # it has security issue?? callback, type_hints = m[method] type_matched, typed_url_vars = match_url_vars_type(url_vars, type_hints) if not type_matched: continue # path: not matched (types are different) return callback, typed_url_vars raise HTTPError(status=status, body=f'Not found: {path}')
python
def match(self, path, method): """ Get callback and url_vars. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> callback, url_vars = r.match('/users/1', 'GET') >>> url_vars {'user_id': 1} >>> response = callback(**url_vars) >>> response.body [b'You are 1'] >>> callback, url_vars = r.match('/notfound', 'GET') Traceback (most recent call last): ... kobin.responses.HTTPError """ if path != '/': path = path.rstrip('/') method = method.upper() status = 404 for p, n, m in self.endpoints: matched, url_vars = match_path(p, path) if not matched: # path: not matched continue if method not in m: # path: matched, method: not matched status = 405 raise HTTPError(status=status, body=f'Method not found: {path} {method}') # it has security issue?? callback, type_hints = m[method] type_matched, typed_url_vars = match_url_vars_type(url_vars, type_hints) if not type_matched: continue # path: not matched (types are different) return callback, typed_url_vars raise HTTPError(status=status, body=f'Not found: {path}')
[ "def", "match", "(", "self", ",", "path", ",", "method", ")", ":", "if", "path", "!=", "'/'", ":", "path", "=", "path", ".", "rstrip", "(", "'/'", ")", "method", "=", "method", ".", "upper", "(", ")", "status", "=", "404", "for", "p", ",", "n",...
Get callback and url_vars. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> callback, url_vars = r.match('/users/1', 'GET') >>> url_vars {'user_id': 1} >>> response = callback(**url_vars) >>> response.body [b'You are 1'] >>> callback, url_vars = r.match('/notfound', 'GET') Traceback (most recent call last): ... kobin.responses.HTTPError
[ "Get", "callback", "and", "url_vars", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/routes.py#L137-L178
kobinpy/kobin
kobin/routes.py
Router.add
def add(self, rule, method, name, callback): """ Add a new rule or replace the target for an existing rule. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> path, name, methods = r.endpoints[0] >>> path '/users/{user_id}' >>> name 'user-detail' >>> callback, type_hints = methods['GET'] >>> view == callback True >>> type_hints['user_id'] == int True """ if rule != '/': rule = rule.rstrip('/') method = method.upper() for i, e in enumerate(self.endpoints): r, n, callbacks = e if r == rule: assert name == n and n is not None, ( "A same path should set a same name for reverse routing." ) callbacks[method] = (callback, get_type_hints(callback)) self.endpoints[i] = (r, name, callbacks) break else: e = (rule, name, {method: (callback, get_type_hints(callback))}) self.endpoints.append(e)
python
def add(self, rule, method, name, callback): """ Add a new rule or replace the target for an existing rule. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> path, name, methods = r.endpoints[0] >>> path '/users/{user_id}' >>> name 'user-detail' >>> callback, type_hints = methods['GET'] >>> view == callback True >>> type_hints['user_id'] == int True """ if rule != '/': rule = rule.rstrip('/') method = method.upper() for i, e in enumerate(self.endpoints): r, n, callbacks = e if r == rule: assert name == n and n is not None, ( "A same path should set a same name for reverse routing." ) callbacks[method] = (callback, get_type_hints(callback)) self.endpoints[i] = (r, name, callbacks) break else: e = (rule, name, {method: (callback, get_type_hints(callback))}) self.endpoints.append(e)
[ "def", "add", "(", "self", ",", "rule", ",", "method", ",", "name", ",", "callback", ")", ":", "if", "rule", "!=", "'/'", ":", "rule", "=", "rule", ".", "rstrip", "(", "'/'", ")", "method", "=", "method", ".", "upper", "(", ")", "for", "i", ","...
Add a new rule or replace the target for an existing rule. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> path, name, methods = r.endpoints[0] >>> path '/users/{user_id}' >>> name 'user-detail' >>> callback, type_hints = methods['GET'] >>> view == callback True >>> type_hints['user_id'] == int True
[ "Add", "a", "new", "rule", "or", "replace", "the", "target", "for", "an", "existing", "rule", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/routes.py#L180-L215
kobinpy/kobin
kobin/routes.py
Router.reverse
def reverse(self, name, **kwargs): """ Reverse routing. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> r.reverse('user-detail', user_id=1) '/users/1' """ for p, n, _ in self.endpoints: if name == n: return p.format(**kwargs)
python
def reverse(self, name, **kwargs): """ Reverse routing. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> r.reverse('user-detail', user_id=1) '/users/1' """ for p, n, _ in self.endpoints: if name == n: return p.format(**kwargs)
[ "def", "reverse", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "for", "p", ",", "n", ",", "_", "in", "self", ".", "endpoints", ":", "if", "name", "==", "n", ":", "return", "p", ".", "format", "(", "*", "*", "kwargs", ")" ]
Reverse routing. >>> from kobin import Response >>> r = Router() >>> def view(user_id: int) -> Response: ... return Response(f'You are {user_id}') ... >>> r.add('/users/{user_id}', 'GET', 'user-detail', view) >>> r.reverse('user-detail', user_id=1) '/users/1'
[ "Reverse", "routing", "." ]
train
https://github.com/kobinpy/kobin/blob/e6caff5af05db8a6e511d3de275d262466ab36a6/kobin/routes.py#L217-L231
maxcountryman/atomos
atomos/util.py
synchronized
def synchronized(fn): ''' A decorator which acquires a lock before attempting to execute its wrapped function. Releases the lock in a finally clause. :param fn: The function to wrap. ''' lock = threading.Lock() @functools.wraps(fn) def decorated(*args, **kwargs): lock.acquire() try: return fn(*args, **kwargs) finally: lock.release() return decorated
python
def synchronized(fn): ''' A decorator which acquires a lock before attempting to execute its wrapped function. Releases the lock in a finally clause. :param fn: The function to wrap. ''' lock = threading.Lock() @functools.wraps(fn) def decorated(*args, **kwargs): lock.acquire() try: return fn(*args, **kwargs) finally: lock.release() return decorated
[ "def", "synchronized", "(", "fn", ")", ":", "lock", "=", "threading", ".", "Lock", "(", ")", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "lock", ".", "acquire", "(", ")"...
A decorator which acquires a lock before attempting to execute its wrapped function. Releases the lock in a finally clause. :param fn: The function to wrap.
[ "A", "decorator", "which", "acquires", "a", "lock", "before", "attempting", "to", "execute", "its", "wrapped", "function", ".", "Releases", "the", "lock", "in", "a", "finally", "clause", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/util.py#L21-L38
maxcountryman/atomos
atomos/atomic.py
AtomicReference.set
def set(self, value): ''' Atomically sets the value to `value`. :param value: The value to set. ''' with self._lock.exclusive: self._value = value return value
python
def set(self, value): ''' Atomically sets the value to `value`. :param value: The value to set. ''' with self._lock.exclusive: self._value = value return value
[ "def", "set", "(", "self", ",", "value", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "self", ".", "_value", "=", "value", "return", "value" ]
Atomically sets the value to `value`. :param value: The value to set.
[ "Atomically", "sets", "the", "value", "to", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L38-L46
maxcountryman/atomos
atomos/atomic.py
AtomicReference.get_and_set
def get_and_set(self, value): ''' Atomically sets the value to `value` and returns the old value. :param value: The value to set. ''' with self._lock.exclusive: oldval = self._value self._value = value return oldval
python
def get_and_set(self, value): ''' Atomically sets the value to `value` and returns the old value. :param value: The value to set. ''' with self._lock.exclusive: oldval = self._value self._value = value return oldval
[ "def", "get_and_set", "(", "self", ",", "value", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "oldval", "=", "self", ".", "_value", "self", ".", "_value", "=", "value", "return", "oldval" ]
Atomically sets the value to `value` and returns the old value. :param value: The value to set.
[ "Atomically", "sets", "the", "value", "to", "value", "and", "returns", "the", "old", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L48-L57
maxcountryman/atomos
atomos/atomic.py
AtomicReference.compare_and_set
def compare_and_set(self, expect, update): ''' Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value. ''' with self._lock.exclusive: if self._value == expect: self._value = update return True return False
python
def compare_and_set(self, expect, update): ''' Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value. ''' with self._lock.exclusive: if self._value == expect: self._value = update return True return False
[ "def", "compare_and_set", "(", "self", ",", "expect", ",", "update", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "if", "self", ".", "_value", "==", "expect", ":", "self", ".", "_value", "=", "update", "return", "True", "return", "Fal...
Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value.
[ "Atomically", "sets", "the", "value", "to", "update", "if", "the", "current", "value", "is", "equal", "to", "expect", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L59-L73
maxcountryman/atomos
atomos/atomic.py
AtomicNumber.add_and_get
def add_and_get(self, delta): ''' Atomically adds `delta` to the current value. :param delta: The delta to add. ''' with self._lock.exclusive: self._value += delta return self._value
python
def add_and_get(self, delta): ''' Atomically adds `delta` to the current value. :param delta: The delta to add. ''' with self._lock.exclusive: self._value += delta return self._value
[ "def", "add_and_get", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "self", ".", "_value", "+=", "delta", "return", "self", ".", "_value" ]
Atomically adds `delta` to the current value. :param delta: The delta to add.
[ "Atomically", "adds", "delta", "to", "the", "current", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L111-L119
maxcountryman/atomos
atomos/atomic.py
AtomicNumber.get_and_add
def get_and_add(self, delta): ''' Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add. ''' with self._lock.exclusive: oldval = self._value self._value += delta return oldval
python
def get_and_add(self, delta): ''' Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add. ''' with self._lock.exclusive: oldval = self._value self._value += delta return oldval
[ "def", "get_and_add", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "oldval", "=", "self", ".", "_value", "self", ".", "_value", "+=", "delta", "return", "oldval" ]
Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add.
[ "Atomically", "adds", "delta", "to", "the", "current", "value", "and", "returns", "the", "old", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L121-L130
maxcountryman/atomos
atomos/atomic.py
AtomicNumber.subtract_and_get
def subtract_and_get(self, delta): ''' Atomically subtracts `delta` from the current value. :param delta: The delta to subtract. ''' with self._lock.exclusive: self._value -= delta return self._value
python
def subtract_and_get(self, delta): ''' Atomically subtracts `delta` from the current value. :param delta: The delta to subtract. ''' with self._lock.exclusive: self._value -= delta return self._value
[ "def", "subtract_and_get", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "self", ".", "_value", "-=", "delta", "return", "self", ".", "_value" ]
Atomically subtracts `delta` from the current value. :param delta: The delta to subtract.
[ "Atomically", "subtracts", "delta", "from", "the", "current", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L132-L140
maxcountryman/atomos
atomos/atomic.py
AtomicNumber.get_and_subtract
def get_and_subtract(self, delta): ''' Atomically subtracts `delta` from the current value and returns the old value. :param delta: The delta to subtract. ''' with self._lock.exclusive: oldval = self._value self._value -= delta return oldval
python
def get_and_subtract(self, delta): ''' Atomically subtracts `delta` from the current value and returns the old value. :param delta: The delta to subtract. ''' with self._lock.exclusive: oldval = self._value self._value -= delta return oldval
[ "def", "get_and_subtract", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_lock", ".", "exclusive", ":", "oldval", "=", "self", ".", "_value", "self", ".", "_value", "-=", "delta", "return", "oldval" ]
Atomically subtracts `delta` from the current value and returns the old value. :param delta: The delta to subtract.
[ "Atomically", "subtracts", "delta", "from", "the", "current", "value", "and", "returns", "the", "old", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atomic.py#L142-L152
maxcountryman/atomos
atomos/atom.py
ARef.notify_watches
def notify_watches(self, oldval, newval): ''' Passes `oldval` and `newval` to each `fn` in the watches dictionary, passing along its respective key and the reference to this object. :param oldval: The old value which will be passed to the watch. :param newval: The new value which will be passed to the watch. ''' watches = self._watches.copy() for k in watches: fn = watches[k] if isinstance(fn, collections.Callable): fn(k, self, oldval, newval)
python
def notify_watches(self, oldval, newval): ''' Passes `oldval` and `newval` to each `fn` in the watches dictionary, passing along its respective key and the reference to this object. :param oldval: The old value which will be passed to the watch. :param newval: The new value which will be passed to the watch. ''' watches = self._watches.copy() for k in watches: fn = watches[k] if isinstance(fn, collections.Callable): fn(k, self, oldval, newval)
[ "def", "notify_watches", "(", "self", ",", "oldval", ",", "newval", ")", ":", "watches", "=", "self", ".", "_watches", ".", "copy", "(", ")", "for", "k", "in", "watches", ":", "fn", "=", "watches", "[", "k", "]", "if", "isinstance", "(", "fn", ",",...
Passes `oldval` and `newval` to each `fn` in the watches dictionary, passing along its respective key and the reference to this object. :param oldval: The old value which will be passed to the watch. :param newval: The new value which will be passed to the watch.
[ "Passes", "oldval", "and", "newval", "to", "each", "fn", "in", "the", "watches", "dictionary", "passing", "along", "its", "respective", "key", "and", "the", "reference", "to", "this", "object", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atom.py#L64-L76
maxcountryman/atomos
atomos/atom.py
Atom.swap
def swap(self, fn, *args, **kwargs): ''' Given a mutator `fn`, calls `fn` with the atom's current state, `args`, and `kwargs`. The return value of this invocation becomes the new value of the atom. Returns the new value. :param fn: A function which will be passed the current state. Should return a new state. This absolutely *MUST NOT* mutate the reference to the current state! If it does, this function may loop indefinitely. :param \*args: Arguments to be passed to `fn`. :param \*\*kwargs: Keyword arguments to be passed to `fn`. ''' while True: oldval = self.deref() newval = fn(oldval, *args, **kwargs) if self._state.compare_and_set(oldval, newval): self.notify_watches(oldval, newval) return newval
python
def swap(self, fn, *args, **kwargs): ''' Given a mutator `fn`, calls `fn` with the atom's current state, `args`, and `kwargs`. The return value of this invocation becomes the new value of the atom. Returns the new value. :param fn: A function which will be passed the current state. Should return a new state. This absolutely *MUST NOT* mutate the reference to the current state! If it does, this function may loop indefinitely. :param \*args: Arguments to be passed to `fn`. :param \*\*kwargs: Keyword arguments to be passed to `fn`. ''' while True: oldval = self.deref() newval = fn(oldval, *args, **kwargs) if self._state.compare_and_set(oldval, newval): self.notify_watches(oldval, newval) return newval
[ "def", "swap", "(", "self", ",", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "while", "True", ":", "oldval", "=", "self", ".", "deref", "(", ")", "newval", "=", "fn", "(", "oldval", ",", "*", "args", ",", "*", "*", "kwargs", ")...
Given a mutator `fn`, calls `fn` with the atom's current state, `args`, and `kwargs`. The return value of this invocation becomes the new value of the atom. Returns the new value. :param fn: A function which will be passed the current state. Should return a new state. This absolutely *MUST NOT* mutate the reference to the current state! If it does, this function may loop indefinitely. :param \*args: Arguments to be passed to `fn`. :param \*\*kwargs: Keyword arguments to be passed to `fn`.
[ "Given", "a", "mutator", "fn", "calls", "fn", "with", "the", "atom", "s", "current", "state", "args", "and", "kwargs", ".", "The", "return", "value", "of", "this", "invocation", "becomes", "the", "new", "value", "of", "the", "atom", ".", "Returns", "the"...
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atom.py#L154-L172
maxcountryman/atomos
atomos/atom.py
Atom.reset
def reset(self, newval): ''' Resets the atom's value to `newval`, returning `newval`. :param newval: The new value to set. ''' oldval = self._state.get() self._state.set(newval) self.notify_watches(oldval, newval) return newval
python
def reset(self, newval): ''' Resets the atom's value to `newval`, returning `newval`. :param newval: The new value to set. ''' oldval = self._state.get() self._state.set(newval) self.notify_watches(oldval, newval) return newval
[ "def", "reset", "(", "self", ",", "newval", ")", ":", "oldval", "=", "self", ".", "_state", ".", "get", "(", ")", "self", ".", "_state", ".", "set", "(", "newval", ")", "self", ".", "notify_watches", "(", "oldval", ",", "newval", ")", "return", "ne...
Resets the atom's value to `newval`, returning `newval`. :param newval: The new value to set.
[ "Resets", "the", "atom", "s", "value", "to", "newval", "returning", "newval", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atom.py#L174-L183
maxcountryman/atomos
atomos/atom.py
Atom.compare_and_set
def compare_and_set(self, oldval, newval): ''' Given `oldval` and `newval`, sets the atom's value to `newval` if and only if `oldval` is the atom's current value. Returns `True` upon success, otherwise `False`. :param oldval: The old expected value. :param newval: The new value which will be set if and only if `oldval` equals the current value. ''' ret = self._state.compare_and_set(oldval, newval) if ret: self.notify_watches(oldval, newval) return ret
python
def compare_and_set(self, oldval, newval): ''' Given `oldval` and `newval`, sets the atom's value to `newval` if and only if `oldval` is the atom's current value. Returns `True` upon success, otherwise `False`. :param oldval: The old expected value. :param newval: The new value which will be set if and only if `oldval` equals the current value. ''' ret = self._state.compare_and_set(oldval, newval) if ret: self.notify_watches(oldval, newval) return ret
[ "def", "compare_and_set", "(", "self", ",", "oldval", ",", "newval", ")", ":", "ret", "=", "self", ".", "_state", ".", "compare_and_set", "(", "oldval", ",", "newval", ")", "if", "ret", ":", "self", ".", "notify_watches", "(", "oldval", ",", "newval", ...
Given `oldval` and `newval`, sets the atom's value to `newval` if and only if `oldval` is the atom's current value. Returns `True` upon success, otherwise `False`. :param oldval: The old expected value. :param newval: The new value which will be set if and only if `oldval` equals the current value.
[ "Given", "oldval", "and", "newval", "sets", "the", "atom", "s", "value", "to", "newval", "if", "and", "only", "if", "oldval", "is", "the", "atom", "s", "current", "value", ".", "Returns", "True", "upon", "success", "otherwise", "False", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/atom.py#L185-L199
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicCtypesReference.set
def set(self, value): ''' Atomically sets the value to `value`. :param value: The value to set. ''' with self._reference.get_lock(): self._reference.value = value return value
python
def set(self, value): ''' Atomically sets the value to `value`. :param value: The value to set. ''' with self._reference.get_lock(): self._reference.value = value return value
[ "def", "set", "(", "self", ",", "value", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "self", ".", "_reference", ".", "value", "=", "value", "return", "value" ]
Atomically sets the value to `value`. :param value: The value to set.
[ "Atomically", "sets", "the", "value", "to", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L95-L103
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicCtypesReference.get_and_set
def get_and_set(self, value): ''' Atomically sets the value to `value` and returns the old value. :param value: The value to set. ''' with self._reference.get_lock(): oldval = self._reference.value self._reference.value = value return oldval
python
def get_and_set(self, value): ''' Atomically sets the value to `value` and returns the old value. :param value: The value to set. ''' with self._reference.get_lock(): oldval = self._reference.value self._reference.value = value return oldval
[ "def", "get_and_set", "(", "self", ",", "value", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "oldval", "=", "self", ".", "_reference", ".", "value", "self", ".", "_reference", ".", "value", "=", "value", "return", "oldv...
Atomically sets the value to `value` and returns the old value. :param value: The value to set.
[ "Atomically", "sets", "the", "value", "to", "value", "and", "returns", "the", "old", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L105-L114
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicCtypesReference.compare_and_set
def compare_and_set(self, expect, update): ''' Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value. ''' with self._reference.get_lock(): if self._reference.value == expect: self._reference.value = update return True return False
python
def compare_and_set(self, expect, update): ''' Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value. ''' with self._reference.get_lock(): if self._reference.value == expect: self._reference.value = update return True return False
[ "def", "compare_and_set", "(", "self", ",", "expect", ",", "update", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "if", "self", ".", "_reference", ".", "value", "==", "expect", ":", "self", ".", "_reference", ".", "value...
Atomically sets the value to `update` if the current value is equal to `expect`. :param expect: The expected current value. :param update: The value to set if and only if `expect` equals the current value.
[ "Atomically", "sets", "the", "value", "to", "update", "if", "the", "current", "value", "is", "equal", "to", "expect", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L116-L130
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicNumber.add_and_get
def add_and_get(self, delta): ''' Atomically adds `delta` to the current value. :param delta: The delta to add. ''' with self._reference.get_lock(): self._reference.value += delta return self._reference.value
python
def add_and_get(self, delta): ''' Atomically adds `delta` to the current value. :param delta: The delta to add. ''' with self._reference.get_lock(): self._reference.value += delta return self._reference.value
[ "def", "add_and_get", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "self", ".", "_reference", ".", "value", "+=", "delta", "return", "self", ".", "_reference", ".", "value" ]
Atomically adds `delta` to the current value. :param delta: The delta to add.
[ "Atomically", "adds", "delta", "to", "the", "current", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L169-L177
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicNumber.get_and_add
def get_and_add(self, delta): ''' Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add. ''' with self._reference.get_lock(): oldval = self._reference.value self._reference.value += delta return oldval
python
def get_and_add(self, delta): ''' Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add. ''' with self._reference.get_lock(): oldval = self._reference.value self._reference.value += delta return oldval
[ "def", "get_and_add", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "oldval", "=", "self", ".", "_reference", ".", "value", "self", ".", "_reference", ".", "value", "+=", "delta", "return", "old...
Atomically adds `delta` to the current value and returns the old value. :param delta: The delta to add.
[ "Atomically", "adds", "delta", "to", "the", "current", "value", "and", "returns", "the", "old", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L179-L188
maxcountryman/atomos
atomos/multiprocessing/atomic.py
AtomicNumber.subtract_and_get
def subtract_and_get(self, delta): ''' Atomically subtracts `delta` from the current value. :param delta: The delta to subtract. ''' with self._reference.get_lock(): self._reference.value -= delta return self._reference.value
python
def subtract_and_get(self, delta): ''' Atomically subtracts `delta` from the current value. :param delta: The delta to subtract. ''' with self._reference.get_lock(): self._reference.value -= delta return self._reference.value
[ "def", "subtract_and_get", "(", "self", ",", "delta", ")", ":", "with", "self", ".", "_reference", ".", "get_lock", "(", ")", ":", "self", ".", "_reference", ".", "value", "-=", "delta", "return", "self", ".", "_reference", ".", "value" ]
Atomically subtracts `delta` from the current value. :param delta: The delta to subtract.
[ "Atomically", "subtracts", "delta", "from", "the", "current", "value", "." ]
train
https://github.com/maxcountryman/atomos/blob/418746c69134efba3c4f999405afe9113dee4827/atomos/multiprocessing/atomic.py#L190-L198