repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
extract_subject_from_dn
def extract_subject_from_dn(cert_obj): """Serialize a DN to a DataONE subject string. Args: cert_obj: cryptography.Certificate Returns: str: Primary subject extracted from the certificate DN. The certificate DN (DistinguishedName) is a sequence of RDNs (RelativeDistinguishedName). Each RDN is a set of AVAs (AttributeValueAssertion / AttributeTypeAndValue). A DataONE subject is a plain string. As there is no single standard specifying how to create a string representation of a DN, DataONE selected one of the most common ways, which yield strings such as: CN=Some Name A123,O=Some Organization,C=US,DC=Some Domain,DC=org In particular, the sequence of RDNs is reversed. Attribute values are escaped, attribute type and value pairs are separated by "=", and AVAs are joined together with ",". If an RDN contains an unknown OID, the OID is serialized as a dotted string. As all the information in the DN is preserved, it is not possible to create the same subject with two different DNs, and the DN can be recreated from the subject. """ return ",".join( "{}={}".format( OID_TO_SHORT_NAME_DICT.get(v.oid.dotted_string, v.oid.dotted_string), rdn_escape(v.value), ) for v in reversed(list(cert_obj.subject)) )
python
def extract_subject_from_dn(cert_obj): """Serialize a DN to a DataONE subject string. Args: cert_obj: cryptography.Certificate Returns: str: Primary subject extracted from the certificate DN. The certificate DN (DistinguishedName) is a sequence of RDNs (RelativeDistinguishedName). Each RDN is a set of AVAs (AttributeValueAssertion / AttributeTypeAndValue). A DataONE subject is a plain string. As there is no single standard specifying how to create a string representation of a DN, DataONE selected one of the most common ways, which yield strings such as: CN=Some Name A123,O=Some Organization,C=US,DC=Some Domain,DC=org In particular, the sequence of RDNs is reversed. Attribute values are escaped, attribute type and value pairs are separated by "=", and AVAs are joined together with ",". If an RDN contains an unknown OID, the OID is serialized as a dotted string. As all the information in the DN is preserved, it is not possible to create the same subject with two different DNs, and the DN can be recreated from the subject. """ return ",".join( "{}={}".format( OID_TO_SHORT_NAME_DICT.get(v.oid.dotted_string, v.oid.dotted_string), rdn_escape(v.value), ) for v in reversed(list(cert_obj.subject)) )
[ "def", "extract_subject_from_dn", "(", "cert_obj", ")", ":", "return", "\",\"", ".", "join", "(", "\"{}={}\"", ".", "format", "(", "OID_TO_SHORT_NAME_DICT", ".", "get", "(", "v", ".", "oid", ".", "dotted_string", ",", "v", ".", "oid", ".", "dotted_string", ...
Serialize a DN to a DataONE subject string. Args: cert_obj: cryptography.Certificate Returns: str: Primary subject extracted from the certificate DN. The certificate DN (DistinguishedName) is a sequence of RDNs (RelativeDistinguishedName). Each RDN is a set of AVAs (AttributeValueAssertion / AttributeTypeAndValue). A DataONE subject is a plain string. As there is no single standard specifying how to create a string representation of a DN, DataONE selected one of the most common ways, which yield strings such as: CN=Some Name A123,O=Some Organization,C=US,DC=Some Domain,DC=org In particular, the sequence of RDNs is reversed. Attribute values are escaped, attribute type and value pairs are separated by "=", and AVAs are joined together with ",". If an RDN contains an unknown OID, the OID is serialized as a dotted string. As all the information in the DN is preserved, it is not possible to create the same subject with two different DNs, and the DN can be recreated from the subject.
[ "Serialize", "a", "DN", "to", "a", "DataONE", "subject", "string", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L107-L140
train
45,400
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
create_d1_dn_subject
def create_d1_dn_subject(common_name_str): """Create the DN Subject for certificate that will be used in a DataONE environment. The DN is formatted into a DataONE subject, which is used in authentication, authorization and event tracking. Args: common_name_str: str DataONE uses simple DNs without physical location information, so only the ``common_name_str`` (``CommonName``) needs to be specified. For Member Node Client Side certificates or CSRs, ``common_name_str`` is the ``node_id``, e.g., ``urn:node:ABCD`` for production, or ``urn:node:mnTestABCD`` for the test environments. For a local CA, something like ``localCA`` may be used. For a locally trusted client side certificate, something like ``localClient`` may be used. """ return cryptography.x509.Name( [ cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.COUNTRY_NAME, "US" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.STATE_OR_PROVINCE_NAME, "California" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.LOCALITY_NAME, "San Francisco" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.ORGANIZATION_NAME, "Root CA" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.COMMON_NAME, "ca.ca.com" ), ] )
python
def create_d1_dn_subject(common_name_str): """Create the DN Subject for certificate that will be used in a DataONE environment. The DN is formatted into a DataONE subject, which is used in authentication, authorization and event tracking. Args: common_name_str: str DataONE uses simple DNs without physical location information, so only the ``common_name_str`` (``CommonName``) needs to be specified. For Member Node Client Side certificates or CSRs, ``common_name_str`` is the ``node_id``, e.g., ``urn:node:ABCD`` for production, or ``urn:node:mnTestABCD`` for the test environments. For a local CA, something like ``localCA`` may be used. For a locally trusted client side certificate, something like ``localClient`` may be used. """ return cryptography.x509.Name( [ cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.COUNTRY_NAME, "US" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.STATE_OR_PROVINCE_NAME, "California" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.LOCALITY_NAME, "San Francisco" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.ORGANIZATION_NAME, "Root CA" ), cryptography.x509.NameAttribute( cryptography.x509.oid.NameOID.COMMON_NAME, "ca.ca.com" ), ] )
[ "def", "create_d1_dn_subject", "(", "common_name_str", ")", ":", "return", "cryptography", ".", "x509", ".", "Name", "(", "[", "cryptography", ".", "x509", ".", "NameAttribute", "(", "cryptography", ".", "x509", ".", "oid", ".", "NameOID", ".", "COUNTRY_NAME",...
Create the DN Subject for certificate that will be used in a DataONE environment. The DN is formatted into a DataONE subject, which is used in authentication, authorization and event tracking. Args: common_name_str: str DataONE uses simple DNs without physical location information, so only the ``common_name_str`` (``CommonName``) needs to be specified. For Member Node Client Side certificates or CSRs, ``common_name_str`` is the ``node_id``, e.g., ``urn:node:ABCD`` for production, or ``urn:node:mnTestABCD`` for the test environments. For a local CA, something like ``localCA`` may be used. For a locally trusted client side certificate, something like ``localClient`` may be used.
[ "Create", "the", "DN", "Subject", "for", "certificate", "that", "will", "be", "used", "in", "a", "DataONE", "environment", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L143-L181
train
45,401
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
serialize_cert_to_pem
def serialize_cert_to_pem(cert_obj): """Serialize certificate to PEM. The certificate can be also be a Certificate Signing Request (CSR). Args: cert_obj: cryptography.Certificate Returns: bytes: PEM encoded certificate """ return cert_obj.public_bytes( encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM )
python
def serialize_cert_to_pem(cert_obj): """Serialize certificate to PEM. The certificate can be also be a Certificate Signing Request (CSR). Args: cert_obj: cryptography.Certificate Returns: bytes: PEM encoded certificate """ return cert_obj.public_bytes( encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM )
[ "def", "serialize_cert_to_pem", "(", "cert_obj", ")", ":", "return", "cert_obj", ".", "public_bytes", "(", "encoding", "=", "cryptography", ".", "hazmat", ".", "primitives", ".", "serialization", ".", "Encoding", ".", "PEM", ")" ]
Serialize certificate to PEM. The certificate can be also be a Certificate Signing Request (CSR). Args: cert_obj: cryptography.Certificate Returns: bytes: PEM encoded certificate
[ "Serialize", "certificate", "to", "PEM", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L301-L315
train
45,402
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
extract_subject_info_extension
def extract_subject_info_extension(cert_obj): """Extract DataONE SubjectInfo XML doc from certificate. Certificates issued by DataONE may include an embedded XML doc containing additional information about the subject specified in the certificate DN. If present, the doc is stored as an extension with an OID specified by DataONE and formatted as specified in the DataONE SubjectInfo schema definition. Args: cert_obj: cryptography.Certificate Returns: str : SubjectInfo XML doc if present, else None """ try: subject_info_der = cert_obj.extensions.get_extension_for_oid( cryptography.x509.oid.ObjectIdentifier(DATAONE_SUBJECT_INFO_OID) ).value.value return str(pyasn1.codec.der.decoder.decode(subject_info_der)[0]) except Exception as e: logging.debug('SubjectInfo not extracted. reason="{}"'.format(e))
python
def extract_subject_info_extension(cert_obj): """Extract DataONE SubjectInfo XML doc from certificate. Certificates issued by DataONE may include an embedded XML doc containing additional information about the subject specified in the certificate DN. If present, the doc is stored as an extension with an OID specified by DataONE and formatted as specified in the DataONE SubjectInfo schema definition. Args: cert_obj: cryptography.Certificate Returns: str : SubjectInfo XML doc if present, else None """ try: subject_info_der = cert_obj.extensions.get_extension_for_oid( cryptography.x509.oid.ObjectIdentifier(DATAONE_SUBJECT_INFO_OID) ).value.value return str(pyasn1.codec.der.decoder.decode(subject_info_der)[0]) except Exception as e: logging.debug('SubjectInfo not extracted. reason="{}"'.format(e))
[ "def", "extract_subject_info_extension", "(", "cert_obj", ")", ":", "try", ":", "subject_info_der", "=", "cert_obj", ".", "extensions", ".", "get_extension_for_oid", "(", "cryptography", ".", "x509", ".", "oid", ".", "ObjectIdentifier", "(", "DATAONE_SUBJECT_INFO_OID"...
Extract DataONE SubjectInfo XML doc from certificate. Certificates issued by DataONE may include an embedded XML doc containing additional information about the subject specified in the certificate DN. If present, the doc is stored as an extension with an OID specified by DataONE and formatted as specified in the DataONE SubjectInfo schema definition. Args: cert_obj: cryptography.Certificate Returns: str : SubjectInfo XML doc if present, else None
[ "Extract", "DataONE", "SubjectInfo", "XML", "doc", "from", "certificate", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L321-L342
train
45,403
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
decode_der
def decode_der(cert_der): """Decode cert DER string to Certificate object. Args: cert_der : Certificate as a DER encoded string Returns: cryptography.Certificate() """ return cryptography.x509.load_der_x509_certificate( data=cert_der, backend=cryptography.hazmat.backends.default_backend() )
python
def decode_der(cert_der): """Decode cert DER string to Certificate object. Args: cert_der : Certificate as a DER encoded string Returns: cryptography.Certificate() """ return cryptography.x509.load_der_x509_certificate( data=cert_der, backend=cryptography.hazmat.backends.default_backend() )
[ "def", "decode_der", "(", "cert_der", ")", ":", "return", "cryptography", ".", "x509", ".", "load_der_x509_certificate", "(", "data", "=", "cert_der", ",", "backend", "=", "cryptography", ".", "hazmat", ".", "backends", ".", "default_backend", "(", ")", ")" ]
Decode cert DER string to Certificate object. Args: cert_der : Certificate as a DER encoded string Returns: cryptography.Certificate()
[ "Decode", "cert", "DER", "string", "to", "Certificate", "object", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L454-L466
train
45,404
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
disable_cert_validation
def disable_cert_validation(): """Context manager to temporarily disable certificate validation in the standard SSL library. Note: This should not be used in production code but is sometimes useful for troubleshooting certificate validation issues. By design, the standard SSL library does not provide a way to disable verification of the server side certificate. However, a patch to disable validation is described by the library developers. This context manager allows applying the patch for specific sections of code. """ current_context = ssl._create_default_https_context ssl._create_default_https_context = ssl._create_unverified_context try: yield finally: ssl._create_default_https_context = current_context
python
def disable_cert_validation(): """Context manager to temporarily disable certificate validation in the standard SSL library. Note: This should not be used in production code but is sometimes useful for troubleshooting certificate validation issues. By design, the standard SSL library does not provide a way to disable verification of the server side certificate. However, a patch to disable validation is described by the library developers. This context manager allows applying the patch for specific sections of code. """ current_context = ssl._create_default_https_context ssl._create_default_https_context = ssl._create_unverified_context try: yield finally: ssl._create_default_https_context = current_context
[ "def", "disable_cert_validation", "(", ")", ":", "current_context", "=", "ssl", ".", "_create_default_https_context", "ssl", ".", "_create_default_https_context", "=", "ssl", ".", "_create_unverified_context", "try", ":", "yield", "finally", ":", "ssl", ".", "_create_...
Context manager to temporarily disable certificate validation in the standard SSL library. Note: This should not be used in production code but is sometimes useful for troubleshooting certificate validation issues. By design, the standard SSL library does not provide a way to disable verification of the server side certificate. However, a patch to disable validation is described by the library developers. This context manager allows applying the patch for specific sections of code.
[ "Context", "manager", "to", "temporarily", "disable", "certificate", "validation", "in", "the", "standard", "SSL", "library", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L471-L489
train
45,405
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
extract_issuer_ca_cert_url
def extract_issuer_ca_cert_url(cert_obj): """Extract issuer CA certificate URL from certificate. Certificates may include a URL where the root certificate for the CA which was used for signing the certificate can be downloaded. This function returns the URL if present. The primary use for this is to fix validation failure due to non-trusted issuer by downloading the root CA certificate from the URL and installing it in the local trust store. Args: cert_obj: cryptography.Certificate Returns: str: Issuer certificate URL if present, else None """ for extension in cert_obj.extensions: if extension.oid.dotted_string == AUTHORITY_INFO_ACCESS_OID: authority_info_access = extension.value for access_description in authority_info_access: if access_description.access_method.dotted_string == CA_ISSUERS_OID: return access_description.access_location.value
python
def extract_issuer_ca_cert_url(cert_obj): """Extract issuer CA certificate URL from certificate. Certificates may include a URL where the root certificate for the CA which was used for signing the certificate can be downloaded. This function returns the URL if present. The primary use for this is to fix validation failure due to non-trusted issuer by downloading the root CA certificate from the URL and installing it in the local trust store. Args: cert_obj: cryptography.Certificate Returns: str: Issuer certificate URL if present, else None """ for extension in cert_obj.extensions: if extension.oid.dotted_string == AUTHORITY_INFO_ACCESS_OID: authority_info_access = extension.value for access_description in authority_info_access: if access_description.access_method.dotted_string == CA_ISSUERS_OID: return access_description.access_location.value
[ "def", "extract_issuer_ca_cert_url", "(", "cert_obj", ")", ":", "for", "extension", "in", "cert_obj", ".", "extensions", ":", "if", "extension", ".", "oid", ".", "dotted_string", "==", "AUTHORITY_INFO_ACCESS_OID", ":", "authority_info_access", "=", "extension", ".",...
Extract issuer CA certificate URL from certificate. Certificates may include a URL where the root certificate for the CA which was used for signing the certificate can be downloaded. This function returns the URL if present. The primary use for this is to fix validation failure due to non-trusted issuer by downloading the root CA certificate from the URL and installing it in the local trust store. Args: cert_obj: cryptography.Certificate Returns: str: Issuer certificate URL if present, else None
[ "Extract", "issuer", "CA", "certificate", "URL", "from", "certificate", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L492-L515
train
45,406
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
serialize_private_key_to_pem
def serialize_private_key_to_pem(private_key, passphrase_bytes=None): """Serialize private key to PEM. Args: private_key: passphrase_bytes: Returns: bytes: PEM encoded private key """ return private_key.private_bytes( encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM, format=cryptography.hazmat.primitives.serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=cryptography.hazmat.primitives.serialization.BestAvailableEncryption( passphrase_bytes ) if passphrase_bytes is not None else cryptography.hazmat.primitives.serialization.NoEncryption(), )
python
def serialize_private_key_to_pem(private_key, passphrase_bytes=None): """Serialize private key to PEM. Args: private_key: passphrase_bytes: Returns: bytes: PEM encoded private key """ return private_key.private_bytes( encoding=cryptography.hazmat.primitives.serialization.Encoding.PEM, format=cryptography.hazmat.primitives.serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=cryptography.hazmat.primitives.serialization.BestAvailableEncryption( passphrase_bytes ) if passphrase_bytes is not None else cryptography.hazmat.primitives.serialization.NoEncryption(), )
[ "def", "serialize_private_key_to_pem", "(", "private_key", ",", "passphrase_bytes", "=", "None", ")", ":", "return", "private_key", ".", "private_bytes", "(", "encoding", "=", "cryptography", ".", "hazmat", ".", "primitives", ".", "serialization", ".", "Encoding", ...
Serialize private key to PEM. Args: private_key: passphrase_bytes: Returns: bytes: PEM encoded private key
[ "Serialize", "private", "key", "to", "PEM", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L521-L540
train
45,407
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
generate_private_key
def generate_private_key(key_size=2048): """Generate a private key""" return cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key( public_exponent=65537, key_size=key_size, backend=cryptography.hazmat.backends.default_backend(), )
python
def generate_private_key(key_size=2048): """Generate a private key""" return cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key( public_exponent=65537, key_size=key_size, backend=cryptography.hazmat.backends.default_backend(), )
[ "def", "generate_private_key", "(", "key_size", "=", "2048", ")", ":", "return", "cryptography", ".", "hazmat", ".", "primitives", ".", "asymmetric", ".", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "key_size",...
Generate a private key
[ "Generate", "a", "private", "key" ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L543-L549
train
45,408
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
load_csr
def load_csr(pem_path): """Load CSR from PEM encoded file""" with open(pem_path, "rb") as f: return cryptography.x509.load_pem_x509_csr( data=f.read(), backend=cryptography.hazmat.backends.default_backend() )
python
def load_csr(pem_path): """Load CSR from PEM encoded file""" with open(pem_path, "rb") as f: return cryptography.x509.load_pem_x509_csr( data=f.read(), backend=cryptography.hazmat.backends.default_backend() )
[ "def", "load_csr", "(", "pem_path", ")", ":", "with", "open", "(", "pem_path", ",", "\"rb\"", ")", "as", "f", ":", "return", "cryptography", ".", "x509", ".", "load_pem_x509_csr", "(", "data", "=", "f", ".", "read", "(", ")", ",", "backend", "=", "cr...
Load CSR from PEM encoded file
[ "Load", "CSR", "from", "PEM", "encoded", "file" ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L580-L585
train
45,409
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
load_private_key
def load_private_key(pem_path, passphrase_bytes=None): """Load private key from PEM encoded file""" with open(pem_path, "rb") as f: return cryptography.hazmat.primitives.serialization.load_pem_private_key( data=f.read(), password=passphrase_bytes, backend=cryptography.hazmat.backends.default_backend(), )
python
def load_private_key(pem_path, passphrase_bytes=None): """Load private key from PEM encoded file""" with open(pem_path, "rb") as f: return cryptography.hazmat.primitives.serialization.load_pem_private_key( data=f.read(), password=passphrase_bytes, backend=cryptography.hazmat.backends.default_backend(), )
[ "def", "load_private_key", "(", "pem_path", ",", "passphrase_bytes", "=", "None", ")", ":", "with", "open", "(", "pem_path", ",", "\"rb\"", ")", "as", "f", ":", "return", "cryptography", ".", "hazmat", ".", "primitives", ".", "serialization", ".", "load_pem_...
Load private key from PEM encoded file
[ "Load", "private", "key", "from", "PEM", "encoded", "file" ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L588-L595
train
45,410
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
serialize_cert_to_der
def serialize_cert_to_der(cert_obj): """Serialize certificate to DER. Args: cert_obj: cryptography.Certificate Returns: bytes: DER encoded certificate """ return cert_obj.public_bytes( cryptography.hazmat.primitives.serialization.Encoding.DER )
python
def serialize_cert_to_der(cert_obj): """Serialize certificate to DER. Args: cert_obj: cryptography.Certificate Returns: bytes: DER encoded certificate """ return cert_obj.public_bytes( cryptography.hazmat.primitives.serialization.Encoding.DER )
[ "def", "serialize_cert_to_der", "(", "cert_obj", ")", ":", "return", "cert_obj", ".", "public_bytes", "(", "cryptography", ".", "hazmat", ".", "primitives", ".", "serialization", ".", "Encoding", ".", "DER", ")" ]
Serialize certificate to DER. Args: cert_obj: cryptography.Certificate Returns: bytes: DER encoded certificate
[ "Serialize", "certificate", "to", "DER", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L631-L643
train
45,411
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
log_cert_info
def log_cert_info(logger, msg_str, cert_obj): """Dump basic certificate values to the log. Args: logger: Logger Logger to which to write the certificate values. msg_str: str A message to write to the log before the certificate values. cert_obj: cryptography.Certificate Certificate containing values to log. Returns: None """ list( map( logger, ["{}:".format(msg_str)] + [ " {}".format(v) for v in [ "Subject: {}".format( _get_val_str(cert_obj, ["subject", "value"], reverse=True) ), "Issuer: {}".format( _get_val_str(cert_obj, ["issuer", "value"], reverse=True) ), "Not Valid Before: {}".format( cert_obj.not_valid_before.isoformat() ), "Not Valid After: {}".format(cert_obj.not_valid_after.isoformat()), "Subject Alt Names: {}".format( _get_ext_val_str( cert_obj, "SUBJECT_ALTERNATIVE_NAME", ["value", "value"] ) ), "CRL Distribution Points: {}".format( _get_ext_val_str( cert_obj, "CRL_DISTRIBUTION_POINTS", ["value", "full_name", "value", "value"], ) ), "Authority Access Location: {}".format( extract_issuer_ca_cert_url(cert_obj) or "<not found>" ), ] ], ) )
python
def log_cert_info(logger, msg_str, cert_obj): """Dump basic certificate values to the log. Args: logger: Logger Logger to which to write the certificate values. msg_str: str A message to write to the log before the certificate values. cert_obj: cryptography.Certificate Certificate containing values to log. Returns: None """ list( map( logger, ["{}:".format(msg_str)] + [ " {}".format(v) for v in [ "Subject: {}".format( _get_val_str(cert_obj, ["subject", "value"], reverse=True) ), "Issuer: {}".format( _get_val_str(cert_obj, ["issuer", "value"], reverse=True) ), "Not Valid Before: {}".format( cert_obj.not_valid_before.isoformat() ), "Not Valid After: {}".format(cert_obj.not_valid_after.isoformat()), "Subject Alt Names: {}".format( _get_ext_val_str( cert_obj, "SUBJECT_ALTERNATIVE_NAME", ["value", "value"] ) ), "CRL Distribution Points: {}".format( _get_ext_val_str( cert_obj, "CRL_DISTRIBUTION_POINTS", ["value", "full_name", "value", "value"], ) ), "Authority Access Location: {}".format( extract_issuer_ca_cert_url(cert_obj) or "<not found>" ), ] ], ) )
[ "def", "log_cert_info", "(", "logger", ",", "msg_str", ",", "cert_obj", ")", ":", "list", "(", "map", "(", "logger", ",", "[", "\"{}:\"", ".", "format", "(", "msg_str", ")", "]", "+", "[", "\" {}\"", ".", "format", "(", "v", ")", "for", "v", "in",...
Dump basic certificate values to the log. Args: logger: Logger Logger to which to write the certificate values. msg_str: str A message to write to the log before the certificate values. cert_obj: cryptography.Certificate Certificate containing values to log. Returns: None
[ "Dump", "basic", "certificate", "values", "to", "the", "log", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L774-L826
train
45,412
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
get_extension_by_name
def get_extension_by_name(cert_obj, extension_name): """Get a standard certificate extension by attribute name. Args: cert_obj: cryptography.Certificate Certificate containing a standard extension. extension_name : str Extension name. E.g., 'SUBJECT_DIRECTORY_ATTRIBUTES'. Returns: Cryptography.Extension """ try: return cert_obj.extensions.get_extension_for_oid( getattr(cryptography.x509.oid.ExtensionOID, extension_name) ) except cryptography.x509.ExtensionNotFound: pass
python
def get_extension_by_name(cert_obj, extension_name): """Get a standard certificate extension by attribute name. Args: cert_obj: cryptography.Certificate Certificate containing a standard extension. extension_name : str Extension name. E.g., 'SUBJECT_DIRECTORY_ATTRIBUTES'. Returns: Cryptography.Extension """ try: return cert_obj.extensions.get_extension_for_oid( getattr(cryptography.x509.oid.ExtensionOID, extension_name) ) except cryptography.x509.ExtensionNotFound: pass
[ "def", "get_extension_by_name", "(", "cert_obj", ",", "extension_name", ")", ":", "try", ":", "return", "cert_obj", ".", "extensions", ".", "get_extension_for_oid", "(", "getattr", "(", "cryptography", ".", "x509", ".", "oid", ".", "ExtensionOID", ",", "extensio...
Get a standard certificate extension by attribute name. Args: cert_obj: cryptography.Certificate Certificate containing a standard extension. extension_name : str Extension name. E.g., 'SUBJECT_DIRECTORY_ATTRIBUTES'. Returns: Cryptography.Extension
[ "Get", "a", "standard", "certificate", "extension", "by", "attribute", "name", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L829-L848
train
45,413
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
_get_val_list
def _get_val_list(obj, path_list, reverse=False): """Extract values from nested objects by attribute names. Objects contain attributes which are named references to objects. This will descend down a tree of nested objects, starting at the given object, following the given path. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: list of objects """ try: y = getattr(obj, path_list[0]) except AttributeError: return [] if len(path_list) == 1: return [y] else: val_list = [x for a in y for x in _get_val_list(a, path_list[1:], reverse)] if reverse: val_list.reverse() return val_list
python
def _get_val_list(obj, path_list, reverse=False): """Extract values from nested objects by attribute names. Objects contain attributes which are named references to objects. This will descend down a tree of nested objects, starting at the given object, following the given path. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: list of objects """ try: y = getattr(obj, path_list[0]) except AttributeError: return [] if len(path_list) == 1: return [y] else: val_list = [x for a in y for x in _get_val_list(a, path_list[1:], reverse)] if reverse: val_list.reverse() return val_list
[ "def", "_get_val_list", "(", "obj", ",", "path_list", ",", "reverse", "=", "False", ")", ":", "try", ":", "y", "=", "getattr", "(", "obj", ",", "path_list", "[", "0", "]", ")", "except", "AttributeError", ":", "return", "[", "]", "if", "len", "(", ...
Extract values from nested objects by attribute names. Objects contain attributes which are named references to objects. This will descend down a tree of nested objects, starting at the given object, following the given path. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: list of objects
[ "Extract", "values", "from", "nested", "objects", "by", "attribute", "names", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L854-L885
train
45,414
DataONEorg/d1_python
lib_common/src/d1_common/cert/x509.py
_get_val_str
def _get_val_str(obj, path_list=None, reverse=False): """Extract values from nested objects by attribute names and concatenate their string representations. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: str: Concatenated extracted values. """ val_list = _get_val_list(obj, path_list or [], reverse) return "<not found>" if obj is None else " / ".join(map(str, val_list))
python
def _get_val_str(obj, path_list=None, reverse=False): """Extract values from nested objects by attribute names and concatenate their string representations. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: str: Concatenated extracted values. """ val_list = _get_val_list(obj, path_list or [], reverse) return "<not found>" if obj is None else " / ".join(map(str, val_list))
[ "def", "_get_val_str", "(", "obj", ",", "path_list", "=", "None", ",", "reverse", "=", "False", ")", ":", "val_list", "=", "_get_val_list", "(", "obj", ",", "path_list", "or", "[", "]", ",", "reverse", ")", "return", "\"<not found>\"", "if", "obj", "is",...
Extract values from nested objects by attribute names and concatenate their string representations. Args: obj: object Any type of object path_list: list Attribute names reverse: bool Reverse the list of values before concatenation. Returns: str: Concatenated extracted values.
[ "Extract", "values", "from", "nested", "objects", "by", "attribute", "names", "and", "concatenate", "their", "string", "representations", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/x509.py#L888-L907
train
45,415
wilson-eft/wilson
wilson/translate/wet.py
_JMS_to_Bern_II
def _JMS_to_Bern_II(C, udlnu): """From JMS to BernII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] return { '1' + ind : C["VnueduLL"][lp, l, d, u].conj(), '5' + ind : C["SnueduRL"][lp, l, d, u].conj(), '1p' + ind : C["VnueduLR"][lp, l, d, u].conj(), '5p' + ind : C["SnueduRR"][lp, l, d, u].conj(), '7p' + ind : C["TnueduRR"][lp, l, d, u].conj() }
python
def _JMS_to_Bern_II(C, udlnu): """From JMS to BernII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] return { '1' + ind : C["VnueduLL"][lp, l, d, u].conj(), '5' + ind : C["SnueduRL"][lp, l, d, u].conj(), '1p' + ind : C["VnueduLR"][lp, l, d, u].conj(), '5p' + ind : C["SnueduRR"][lp, l, d, u].conj(), '7p' + ind : C["TnueduRR"][lp, l, d, u].conj() }
[ "def", "_JMS_to_Bern_II", "(", "C", ",", "udlnu", ")", ":", "u", "=", "uflav", "[", "udlnu", "[", "0", "]", "]", "d", "=", "dflav", "[", "udlnu", "[", "1", "]", "]", "l", "=", "lflav", "[", "udlnu", "[", "4", ":", "udlnu", ".", "find", "(", ...
From JMS to BernII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.
[ "From", "JMS", "to", "BernII", "basis", "for", "charged", "current", "process", "semileptonic", "operators", ".", "udlnu", "should", "be", "of", "the", "form", "udl_enu_tau", "cbl_munu_e", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L156-L171
train
45,416
wilson-eft/wilson
wilson/translate/wet.py
_Bern_to_JMS_II
def _Bern_to_JMS_II(C, udlnu): """From BernII to JMS basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] return { "VnueduLL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1' + ind].conjugate(), "SnueduRL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5' + ind].conjugate(), "VnueduLR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1p' + ind].conjugate(), "SnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5p' + ind].conjugate(), "TnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['7p' + ind].conjugate() }
python
def _Bern_to_JMS_II(C, udlnu): """From BernII to JMS basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] return { "VnueduLL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1' + ind].conjugate(), "SnueduRL_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5' + ind].conjugate(), "VnueduLR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['1p' + ind].conjugate(), "SnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['5p' + ind].conjugate(), "TnueduRR_{}{}{}{}".format(lp + 1, l + 1, d + 1, u + 1): C['7p' + ind].conjugate() }
[ "def", "_Bern_to_JMS_II", "(", "C", ",", "udlnu", ")", ":", "u", "=", "uflav", "[", "udlnu", "[", "0", "]", "]", "d", "=", "dflav", "[", "udlnu", "[", "1", "]", "]", "l", "=", "lflav", "[", "udlnu", "[", "4", ":", "udlnu", ".", "find", "(", ...
From BernII to JMS basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.
[ "From", "BernII", "to", "JMS", "basis", "for", "charged", "current", "process", "semileptonic", "operators", ".", "udlnu", "should", "be", "of", "the", "form", "udl_enu_tau", "cbl_munu_e", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L174-L189
train
45,417
wilson-eft/wilson
wilson/translate/wet.py
_BernII_to_Flavio_II
def _BernII_to_Flavio_II(C, udlnu, parameters): """From BernII to FlavioII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" p = parameters u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] ind2 = udlnu[1]+udlnu[0]+udlnu[4:udlnu.find('n')]+'nu'+udlnu[ udlnu.find('_',5)+1:len(udlnu)] dic = { 'CVL_' + ind2 : C['1' + ind], 'CVR_'+ ind2 : C['1p' + ind], 'CSR_'+ ind2 : C['5' + ind], 'CSL_'+ ind2 : C['5p' + ind], 'CT_'+ ind2 : C['7p' + ind] } V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) prefactor = -sqrt(2) / p['GF'] / V[u, d] / 4 return {k: prefactor * v for k, v in dic.items()}
python
def _BernII_to_Flavio_II(C, udlnu, parameters): """From BernII to FlavioII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.""" p = parameters u = uflav[udlnu[0]] d = dflav[udlnu[1]] l = lflav[udlnu[4:udlnu.find('n')]] lp = lflav[udlnu[udlnu.find('_',5)+1:len(udlnu)]] ind = udlnu[0]+udlnu[1]+udlnu[4:udlnu.find('n')]+udlnu[udlnu.find('_',5)+1 :len(udlnu)] ind2 = udlnu[1]+udlnu[0]+udlnu[4:udlnu.find('n')]+'nu'+udlnu[ udlnu.find('_',5)+1:len(udlnu)] dic = { 'CVL_' + ind2 : C['1' + ind], 'CVR_'+ ind2 : C['1p' + ind], 'CSR_'+ ind2 : C['5' + ind], 'CSL_'+ ind2 : C['5p' + ind], 'CT_'+ ind2 : C['7p' + ind] } V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) prefactor = -sqrt(2) / p['GF'] / V[u, d] / 4 return {k: prefactor * v for k, v in dic.items()}
[ "def", "_BernII_to_Flavio_II", "(", "C", ",", "udlnu", ",", "parameters", ")", ":", "p", "=", "parameters", "u", "=", "uflav", "[", "udlnu", "[", "0", "]", "]", "d", "=", "dflav", "[", "udlnu", "[", "1", "]", "]", "l", "=", "lflav", "[", "udlnu",...
From BernII to FlavioII basis for charged current process semileptonic operators. `udlnu` should be of the form 'udl_enu_tau', 'cbl_munu_e' etc.
[ "From", "BernII", "to", "FlavioII", "basis", "for", "charged", "current", "process", "semileptonic", "operators", ".", "udlnu", "should", "be", "of", "the", "form", "udl_enu_tau", "cbl_munu_e", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L192-L214
train
45,418
wilson-eft/wilson
wilson/translate/wet.py
Flavio_to_Fierz_nunu
def Flavio_to_Fierz_nunu(C, ddll, parameters, norm_gf=True): """From Flavio semileptonic basis to semileptonic Fierz basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) if ddll[:2] == 'sb': xi = V[2, 2] * V[2, 1].conj() elif ddll[:2] == 'db': xi = V[2, 2] * V[2, 0].conj() elif ddll[:2] == 'ds': xi = V[2, 1] * V[2, 0].conj() else: raise ValueError("Unexpected flavours: {}".format(ddll[:2])) q1, q2 = ddll[:2] l1 = ddll[4:ddll.find('n')] l2 = ddll[ddll.find('_', 5) + 1:] ind = q1 + q2 + l1 + l2 # flavio has indices within currents inverted indnu = q2 + q1 + 'nu' + l2 + 'nu' + l1 e = sqrt(4* pi * parameters['alpha_e']) dic = { 'F' + ind + 'nu': C["CL_" + indnu] / ((8 * pi**2) / e**2), 'F' + ind + 'nup': C["CR_" + indnu] / ((8 * pi**2) / e**2), } if norm_gf: prefactor = sqrt(2)/p['GF']/xi/4 else: prefactor = 1 / xi return {k: v / prefactor for k, v in dic.items()}
python
def Flavio_to_Fierz_nunu(C, ddll, parameters, norm_gf=True): """From Flavio semileptonic basis to semileptonic Fierz basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) if ddll[:2] == 'sb': xi = V[2, 2] * V[2, 1].conj() elif ddll[:2] == 'db': xi = V[2, 2] * V[2, 0].conj() elif ddll[:2] == 'ds': xi = V[2, 1] * V[2, 0].conj() else: raise ValueError("Unexpected flavours: {}".format(ddll[:2])) q1, q2 = ddll[:2] l1 = ddll[4:ddll.find('n')] l2 = ddll[ddll.find('_', 5) + 1:] ind = q1 + q2 + l1 + l2 # flavio has indices within currents inverted indnu = q2 + q1 + 'nu' + l2 + 'nu' + l1 e = sqrt(4* pi * parameters['alpha_e']) dic = { 'F' + ind + 'nu': C["CL_" + indnu] / ((8 * pi**2) / e**2), 'F' + ind + 'nup': C["CR_" + indnu] / ((8 * pi**2) / e**2), } if norm_gf: prefactor = sqrt(2)/p['GF']/xi/4 else: prefactor = 1 / xi return {k: v / prefactor for k, v in dic.items()}
[ "def", "Flavio_to_Fierz_nunu", "(", "C", ",", "ddll", ",", "parameters", ",", "norm_gf", "=", "True", ")", ":", "p", "=", "parameters", "V", "=", "ckmutil", ".", "ckm", ".", "ckm_tree", "(", "p", "[", "\"Vus\"", "]", ",", "p", "[", "\"Vub\"", "]", ...
From Flavio semileptonic basis to semileptonic Fierz basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.
[ "From", "Flavio", "semileptonic", "basis", "to", "semileptonic", "Fierz", "basis", "for", "Class", "V", ".", "C", "should", "be", "the", "corresponding", "leptonic", "Fierz", "basis", "and", "ddll", "should", "be", "of", "the", "form", "sbl_enu_tau", "dbl_munu...
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1224-L1253
train
45,419
wilson-eft/wilson
wilson/translate/wet.py
Fierz_to_EOS_lep
def Fierz_to_EOS_lep(C, ddll, parameters): """From semileptonic Fierz basis to EOS semileptonic basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) Vtb = V[2,2] Vts = V[2,1] ind = ddll.replace('l_','').replace('nu_','') ind2 = ddll.replace('l_','').replace('nu_','')[2::] e = sqrt(4* pi * parameters['alpha_e']) dic = { 'b->s' + ind2 + '::c9' : (16 * pi**2) / e**2 * C['F' + ind + '9'], 'b->s' + ind2 + "::c9'" : (16 * pi**2) / e**2 * C['F' + ind + '9p'], 'b->s' + ind2 + "::c10" : (16 * pi**2) / e**2 * C['F' + ind + '10'], 'b->s' + ind2 + "::c10'" : (16 * pi**2) / e**2 * C['F' + ind + '10p'], 'b->s' + ind2 + "::cS" : (16 * pi**2) / e**2 * C['F' + ind + 'S'], 'b->s' + ind2 + "::cS'" : (16 * pi**2) / e**2 * C['F' + ind + 'Sp'], 'b->s' + ind2 + "::cP" : (16 * pi**2) / e**2 * C['F' + ind + 'P'], 'b->s' + ind2 + "::cP'" : (16 * pi**2) / e**2 * C['F' + ind + 'Pp'], 'b->s' + ind2 + "::cT" : (16 * pi**2) / e**2 * C['F' + ind + 'T'], 'b->s' + ind2 + "::cT5" : (16 * pi**2) / e**2 * C['F' + ind + 'T5'] } prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4 return {k: prefactor * v for k,v in dic.items()}
python
def Fierz_to_EOS_lep(C, ddll, parameters): """From semileptonic Fierz basis to EOS semileptonic basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) Vtb = V[2,2] Vts = V[2,1] ind = ddll.replace('l_','').replace('nu_','') ind2 = ddll.replace('l_','').replace('nu_','')[2::] e = sqrt(4* pi * parameters['alpha_e']) dic = { 'b->s' + ind2 + '::c9' : (16 * pi**2) / e**2 * C['F' + ind + '9'], 'b->s' + ind2 + "::c9'" : (16 * pi**2) / e**2 * C['F' + ind + '9p'], 'b->s' + ind2 + "::c10" : (16 * pi**2) / e**2 * C['F' + ind + '10'], 'b->s' + ind2 + "::c10'" : (16 * pi**2) / e**2 * C['F' + ind + '10p'], 'b->s' + ind2 + "::cS" : (16 * pi**2) / e**2 * C['F' + ind + 'S'], 'b->s' + ind2 + "::cS'" : (16 * pi**2) / e**2 * C['F' + ind + 'Sp'], 'b->s' + ind2 + "::cP" : (16 * pi**2) / e**2 * C['F' + ind + 'P'], 'b->s' + ind2 + "::cP'" : (16 * pi**2) / e**2 * C['F' + ind + 'Pp'], 'b->s' + ind2 + "::cT" : (16 * pi**2) / e**2 * C['F' + ind + 'T'], 'b->s' + ind2 + "::cT5" : (16 * pi**2) / e**2 * C['F' + ind + 'T5'] } prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4 return {k: prefactor * v for k,v in dic.items()}
[ "def", "Fierz_to_EOS_lep", "(", "C", ",", "ddll", ",", "parameters", ")", ":", "p", "=", "parameters", "V", "=", "ckmutil", ".", "ckm", ".", "ckm_tree", "(", "p", "[", "\"Vus\"", "]", ",", "p", "[", "\"Vub\"", "]", ",", "p", "[", "\"Vcb\"", "]", ...
From semileptonic Fierz basis to EOS semileptonic basis for Class V. C should be the corresponding leptonic Fierz basis and `ddll` should be of the form 'sbl_enu_tau', 'dbl_munu_e' etc.
[ "From", "semileptonic", "Fierz", "basis", "to", "EOS", "semileptonic", "basis", "for", "Class", "V", ".", "C", "should", "be", "the", "corresponding", "leptonic", "Fierz", "basis", "and", "ddll", "should", "be", "of", "the", "form", "sbl_enu_tau", "dbl_munu_e"...
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1256-L1280
train
45,420
wilson-eft/wilson
wilson/translate/wet.py
JMS_to_FormFlavor_lep
def JMS_to_FormFlavor_lep(C, dd): """From JMS to semileptonic Fierz basis for Classes V. C should be the JMS basis and `ddll` should be of the form 'sbl_eni_tau', 'dbl_munu_e' etc.""" b = dflav[dd[0]] s = dflav[dd[1]] return { 'CVLL_' + dd + 'mm' : C["VedLL"][1, 1, s, b], 'CVRR_' + dd + 'mm' : C["VedRR"][1, 1, s, b], 'CVLR_' + dd + 'mm' : C["VdeLR"][s, b, 1, 1], 'CVRL_' + dd + 'mm' : C["VedLR"][1, 1, s, b], 'CSLL_' + dd + 'mm' : C["SedRR"][1, 1, b, s].conj(), 'CSRR_' + dd + 'mm' : C["SedRR"][1, 1, s, b], 'CSLR_' + dd + 'mm' : C["SedRL"][1, 1, s, b], 'CSRL_' + dd + 'mm' : C["SedRL"][1, 1, b, s].conj(), 'CTLL_' + dd + 'mm' : C["TedRR"][1, 1, b, s].conj(), 'CTRR_' + dd + 'mm' : C["TedRR"][1, 1, s, b], 'CVLL_sdnn' : 1 / 3 * C["VnudLL"][0, 0, s-1, s] + 1 / 3 * C["VnudLL"][1, 1, s-1, s] + 1 / 3 * C["VnudLL"][2, 2, s-1, s], 'CVRL_sdnn' : 1 / 3 * C["VnudLR"][0, 0, s-1, s] + 1 / 3 * C["VnudLR"][1, 1, s-1, s] + 1 / 3 * C["VnudLR"][2, 2, s-1, s] }
python
def JMS_to_FormFlavor_lep(C, dd): """From JMS to semileptonic Fierz basis for Classes V. C should be the JMS basis and `ddll` should be of the form 'sbl_eni_tau', 'dbl_munu_e' etc.""" b = dflav[dd[0]] s = dflav[dd[1]] return { 'CVLL_' + dd + 'mm' : C["VedLL"][1, 1, s, b], 'CVRR_' + dd + 'mm' : C["VedRR"][1, 1, s, b], 'CVLR_' + dd + 'mm' : C["VdeLR"][s, b, 1, 1], 'CVRL_' + dd + 'mm' : C["VedLR"][1, 1, s, b], 'CSLL_' + dd + 'mm' : C["SedRR"][1, 1, b, s].conj(), 'CSRR_' + dd + 'mm' : C["SedRR"][1, 1, s, b], 'CSLR_' + dd + 'mm' : C["SedRL"][1, 1, s, b], 'CSRL_' + dd + 'mm' : C["SedRL"][1, 1, b, s].conj(), 'CTLL_' + dd + 'mm' : C["TedRR"][1, 1, b, s].conj(), 'CTRR_' + dd + 'mm' : C["TedRR"][1, 1, s, b], 'CVLL_sdnn' : 1 / 3 * C["VnudLL"][0, 0, s-1, s] + 1 / 3 * C["VnudLL"][1, 1, s-1, s] + 1 / 3 * C["VnudLL"][2, 2, s-1, s], 'CVRL_sdnn' : 1 / 3 * C["VnudLR"][0, 0, s-1, s] + 1 / 3 * C["VnudLR"][1, 1, s-1, s] + 1 / 3 * C["VnudLR"][2, 2, s-1, s] }
[ "def", "JMS_to_FormFlavor_lep", "(", "C", ",", "dd", ")", ":", "b", "=", "dflav", "[", "dd", "[", "0", "]", "]", "s", "=", "dflav", "[", "dd", "[", "1", "]", "]", "return", "{", "'CVLL_'", "+", "dd", "+", "'mm'", ":", "C", "[", "\"VedLL\"", "...
From JMS to semileptonic Fierz basis for Classes V. C should be the JMS basis and `ddll` should be of the form 'sbl_eni_tau', 'dbl_munu_e' etc.
[ "From", "JMS", "to", "semileptonic", "Fierz", "basis", "for", "Classes", "V", ".", "C", "should", "be", "the", "JMS", "basis", "and", "ddll", "should", "be", "of", "the", "form", "sbl_eni_tau", "dbl_munu_e", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1282-L1305
train
45,421
wilson-eft/wilson
wilson/translate/wet.py
JMS_to_Fierz_chrom
def JMS_to_Fierz_chrom(C, qq): """From JMS to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'ds' etc.""" if qq[0] in dflav: s = dflav[qq[0]] b = dflav[qq[1]] return { 'F7gamma' + qq : C['dgamma'][s, b], 'F8g' + qq : C['dG'][s, b], 'F7pgamma' + qq : C['dgamma'][b, s].conj(), 'F8pg' + qq : C['dG'][b, s].conj() } else: u = uflav[qq[0]] c = uflav[qq[1]] return { 'F7gamma' + qq : C['ugamma'][u, c], 'F8g' + qq : C['uG'][u, c], 'F7pgamma' + qq : C['ugamma'][c, u].conj(), 'F8pg' + qq : C['uG'][c, u].conj() }
python
def JMS_to_Fierz_chrom(C, qq): """From JMS to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'ds' etc.""" if qq[0] in dflav: s = dflav[qq[0]] b = dflav[qq[1]] return { 'F7gamma' + qq : C['dgamma'][s, b], 'F8g' + qq : C['dG'][s, b], 'F7pgamma' + qq : C['dgamma'][b, s].conj(), 'F8pg' + qq : C['dG'][b, s].conj() } else: u = uflav[qq[0]] c = uflav[qq[1]] return { 'F7gamma' + qq : C['ugamma'][u, c], 'F8g' + qq : C['uG'][u, c], 'F7pgamma' + qq : C['ugamma'][c, u].conj(), 'F8pg' + qq : C['uG'][c, u].conj() }
[ "def", "JMS_to_Fierz_chrom", "(", "C", ",", "qq", ")", ":", "if", "qq", "[", "0", "]", "in", "dflav", ":", "s", "=", "dflav", "[", "qq", "[", "0", "]", "]", "b", "=", "dflav", "[", "qq", "[", "1", "]", "]", "return", "{", "'F7gamma'", "+", ...
From JMS to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'ds' etc.
[ "From", "JMS", "to", "chromomagnetic", "Fierz", "basis", "for", "Class", "V", ".", "qq", "should", "be", "of", "the", "form", "sb", "ds", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1308-L1328
train
45,422
wilson-eft/wilson
wilson/translate/wet.py
Fierz_to_JMS_chrom
def Fierz_to_JMS_chrom(C, qq): """From chromomagnetic Fierz to JMS basis for Class V. qq should be of the form 'sb', 'ds' etc.""" if qq[0] in dflav: s = dflav[qq[0]] + 1 b = dflav[qq[1]] + 1 return {'dgamma_{}{}'.format(s, b): C['F7gamma' + qq], 'dG_{}{}'.format(s, b): C['F8g' + qq], 'dgamma_{}{}'.format(b, s): C['F7pgamma' + qq].conjugate(), 'dG_{}{}'.format(b, s): C['F8pg' + qq].conjugate(), } else: u = uflav[qq[0]] + 1 c = uflav[qq[1]] + 1 return {'ugamma_{}{}'.format(u, c): C['F7gamma' + qq], 'uG_{}{}'.format(u, c): C['F8g' + qq], 'ugamma_{}{}'.format(c, u): C['F7pgamma' + qq].conjugate(), 'uG_{}{}'.format(c, u): C['F8pg' + qq].conjugate(), }
python
def Fierz_to_JMS_chrom(C, qq): """From chromomagnetic Fierz to JMS basis for Class V. qq should be of the form 'sb', 'ds' etc.""" if qq[0] in dflav: s = dflav[qq[0]] + 1 b = dflav[qq[1]] + 1 return {'dgamma_{}{}'.format(s, b): C['F7gamma' + qq], 'dG_{}{}'.format(s, b): C['F8g' + qq], 'dgamma_{}{}'.format(b, s): C['F7pgamma' + qq].conjugate(), 'dG_{}{}'.format(b, s): C['F8pg' + qq].conjugate(), } else: u = uflav[qq[0]] + 1 c = uflav[qq[1]] + 1 return {'ugamma_{}{}'.format(u, c): C['F7gamma' + qq], 'uG_{}{}'.format(u, c): C['F8g' + qq], 'ugamma_{}{}'.format(c, u): C['F7pgamma' + qq].conjugate(), 'uG_{}{}'.format(c, u): C['F8pg' + qq].conjugate(), }
[ "def", "Fierz_to_JMS_chrom", "(", "C", ",", "qq", ")", ":", "if", "qq", "[", "0", "]", "in", "dflav", ":", "s", "=", "dflav", "[", "qq", "[", "0", "]", "]", "+", "1", "b", "=", "dflav", "[", "qq", "[", "1", "]", "]", "+", "1", "return", "...
From chromomagnetic Fierz to JMS basis for Class V. qq should be of the form 'sb', 'ds' etc.
[ "From", "chromomagnetic", "Fierz", "to", "JMS", "basis", "for", "Class", "V", ".", "qq", "should", "be", "of", "the", "form", "sb", "ds", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1331-L1349
train
45,423
wilson-eft/wilson
wilson/translate/wet.py
Fierz_to_Bern_chrom
def Fierz_to_Bern_chrom(C, dd, parameters): """From Fierz to chromomagnetic Bern basis for Class V. dd should be of the form 'sb', 'ds' etc.""" e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) if dd == 'sb' or dd == 'db': mq = parameters['m_b'] elif dd == 'ds': mq = parameters['m_s'] else: KeyError("Not sure what to do with quark mass for flavour {}".format(dd)) return { '7gamma' + dd : gs**2 / e / mq * C['F7gamma' + dd ], '8g' + dd : gs / mq * C['F8g' + dd ], '7pgamma' + dd : gs**2 / e /mq * C['F7pgamma' + dd], '8pg' + dd : gs / mq * C['F8pg' + dd] }
python
def Fierz_to_Bern_chrom(C, dd, parameters): """From Fierz to chromomagnetic Bern basis for Class V. dd should be of the form 'sb', 'ds' etc.""" e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) if dd == 'sb' or dd == 'db': mq = parameters['m_b'] elif dd == 'ds': mq = parameters['m_s'] else: KeyError("Not sure what to do with quark mass for flavour {}".format(dd)) return { '7gamma' + dd : gs**2 / e / mq * C['F7gamma' + dd ], '8g' + dd : gs / mq * C['F8g' + dd ], '7pgamma' + dd : gs**2 / e /mq * C['F7pgamma' + dd], '8pg' + dd : gs / mq * C['F8pg' + dd] }
[ "def", "Fierz_to_Bern_chrom", "(", "C", ",", "dd", ",", "parameters", ")", ":", "e", "=", "sqrt", "(", "4", "*", "pi", "*", "parameters", "[", "'alpha_e'", "]", ")", "gs", "=", "sqrt", "(", "4", "*", "pi", "*", "parameters", "[", "'alpha_s'", "]", ...
From Fierz to chromomagnetic Bern basis for Class V. dd should be of the form 'sb', 'ds' etc.
[ "From", "Fierz", "to", "chromomagnetic", "Bern", "basis", "for", "Class", "V", ".", "dd", "should", "be", "of", "the", "form", "sb", "ds", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1352-L1368
train
45,424
wilson-eft/wilson
wilson/translate/wet.py
Flavio_to_Fierz_chrom
def Flavio_to_Fierz_chrom(C, qq, parameters): """From Flavio to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'db' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) if qq == 'sb': xi = V[2, 2] * V[2, 1].conj() elif qq == 'db': xi = V[2, 2] * V[2, 0].conj() elif qq == 'ds': xi = V[2, 1] * V[2, 0].conj() elif qq == 'uc': xi = V[1, 2].conj() * V[0, 2] else: raise ValueError("Unexpected flavours: {}".format(qq)) qqfl = qq[::-1] e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) if qq == 'sb' or qq == 'db': mq = parameters['m_b'] elif qq == 'ds': mq = parameters['m_s'] elif qq == 'uc': mq = parameters['m_c'] else: KeyError("Not sure what to do with quark mass for flavour {}".format(qq)) dic = { 'F7gamma' + qq: C["C7_" + qqfl] / ((16 * pi**2) / e / mq), 'F8g' + qq: C["C8_" + qqfl] / ((16 * pi**2) / gs / mq), 'F7pgamma' + qq: C["C7p_" + qqfl] / ((16 * pi**2) / e / mq), 'F8pg' + qq: C["C8p_" + qqfl] / ((16 * pi**2) / gs / mq) } prefactor = sqrt(2)/p['GF']/xi/4 return {k: v / prefactor for k, v in dic.items()}
python
def Flavio_to_Fierz_chrom(C, qq, parameters): """From Flavio to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'db' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) if qq == 'sb': xi = V[2, 2] * V[2, 1].conj() elif qq == 'db': xi = V[2, 2] * V[2, 0].conj() elif qq == 'ds': xi = V[2, 1] * V[2, 0].conj() elif qq == 'uc': xi = V[1, 2].conj() * V[0, 2] else: raise ValueError("Unexpected flavours: {}".format(qq)) qqfl = qq[::-1] e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) if qq == 'sb' or qq == 'db': mq = parameters['m_b'] elif qq == 'ds': mq = parameters['m_s'] elif qq == 'uc': mq = parameters['m_c'] else: KeyError("Not sure what to do with quark mass for flavour {}".format(qq)) dic = { 'F7gamma' + qq: C["C7_" + qqfl] / ((16 * pi**2) / e / mq), 'F8g' + qq: C["C8_" + qqfl] / ((16 * pi**2) / gs / mq), 'F7pgamma' + qq: C["C7p_" + qqfl] / ((16 * pi**2) / e / mq), 'F8pg' + qq: C["C8p_" + qqfl] / ((16 * pi**2) / gs / mq) } prefactor = sqrt(2)/p['GF']/xi/4 return {k: v / prefactor for k, v in dic.items()}
[ "def", "Flavio_to_Fierz_chrom", "(", "C", ",", "qq", ",", "parameters", ")", ":", "p", "=", "parameters", "V", "=", "ckmutil", ".", "ckm", ".", "ckm_tree", "(", "p", "[", "\"Vus\"", "]", ",", "p", "[", "\"Vub\"", "]", ",", "p", "[", "\"Vcb\"", "]",...
From Flavio to chromomagnetic Fierz basis for Class V. qq should be of the form 'sb', 'db' etc.
[ "From", "Flavio", "to", "chromomagnetic", "Fierz", "basis", "for", "Class", "V", ".", "qq", "should", "be", "of", "the", "form", "sb", "db", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1426-L1459
train
45,425
wilson-eft/wilson
wilson/translate/wet.py
Fierz_to_EOS_chrom
def Fierz_to_EOS_chrom(C, dd, parameters): """From Fierz to chromomagnetic EOS basis for Class V. dd should be of the form 'sb', 'ds' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) Vtb = V[2,2] Vts = V[2,1] e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) mb = parameters['m_b'] dic = {"b->s::c7": 16 * pi**2 / mb / e * C["F7gamma" + dd], "b->s::c7'": 16 * pi**2 / mb / e * C["F7pgamma" + dd], "b->s::c8": 16 * pi**2 / mb / gs * C["F8g" + dd], "b->s::c8'": 16 * pi**2 / mb / gs * C["F8pg" + dd] } prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4 return {k: prefactor * v for k,v in dic.items()}
python
def Fierz_to_EOS_chrom(C, dd, parameters): """From Fierz to chromomagnetic EOS basis for Class V. dd should be of the form 'sb', 'ds' etc.""" p = parameters V = ckmutil.ckm.ckm_tree(p["Vus"], p["Vub"], p["Vcb"], p["delta"]) Vtb = V[2,2] Vts = V[2,1] e = sqrt(4 * pi * parameters['alpha_e']) gs = sqrt(4 * pi * parameters['alpha_s']) mb = parameters['m_b'] dic = {"b->s::c7": 16 * pi**2 / mb / e * C["F7gamma" + dd], "b->s::c7'": 16 * pi**2 / mb / e * C["F7pgamma" + dd], "b->s::c8": 16 * pi**2 / mb / gs * C["F8g" + dd], "b->s::c8'": 16 * pi**2 / mb / gs * C["F8pg" + dd] } prefactor = sqrt(2)/p['GF']/Vtb/Vts.conj()/4 return {k: prefactor * v for k,v in dic.items()}
[ "def", "Fierz_to_EOS_chrom", "(", "C", ",", "dd", ",", "parameters", ")", ":", "p", "=", "parameters", "V", "=", "ckmutil", ".", "ckm", ".", "ckm_tree", "(", "p", "[", "\"Vus\"", "]", ",", "p", "[", "\"Vub\"", "]", ",", "p", "[", "\"Vcb\"", "]", ...
From Fierz to chromomagnetic EOS basis for Class V. dd should be of the form 'sb', 'ds' etc.
[ "From", "Fierz", "to", "chromomagnetic", "EOS", "basis", "for", "Class", "V", ".", "dd", "should", "be", "of", "the", "form", "sb", "ds", "etc", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1462-L1478
train
45,426
wilson-eft/wilson
wilson/translate/wet.py
_JMS_to_Flavio_VII
def _JMS_to_Flavio_VII(C, parameters): """From JMS to flavio basis for class VII, i.e. flavour blind operators.""" d = {} dtrans = json.loads(pkgutil.get_data('wilson', 'data/flavio_jms_vii.json').decode('utf8')) for cj, cf in dtrans.items(): d[cf] = C.get(cj, 0) gs = sqrt(4 * pi * parameters['alpha_s']) e = sqrt(4 * pi * parameters['alpha_e']) preC7 = 16 * pi**2 / e preC8 = 16 * pi**2 / gs d['C8_uu'] = preC8 / parameters['m_u'] * C.get('uG_11', 0) d['C8_cc'] = preC8 / parameters['m_c'] * C.get('uG_22', 0) d['C8_dd'] = preC8 / parameters['m_d'] * C.get('dG_11', 0) d['C8_ss'] = preC8 / parameters['m_s'] * C.get('dG_22', 0) d['C8_bb'] = preC8 / parameters['m_b'] * C.get('dG_33', 0) d['C7_uu'] = preC7 / parameters['m_u'] * C.get('ugamma_11', 0) d['C7_cc'] = preC7 / parameters['m_c'] * C.get('ugamma_22', 0) d['C7_dd'] = preC7 / parameters['m_d'] * C.get('dgamma_11', 0) d['C7_ss'] = preC7 / parameters['m_s'] * C.get('dgamma_22', 0) d['C7_bb'] = preC7 / parameters['m_b'] * C.get('dgamma_33', 0) d['C7_ee'] = preC7 / parameters['m_e'] * C.get('egamma_11', 0) d['C7_mumu'] = preC7 / parameters['m_mu'] * C.get('egamma_22', 0) d['C7_tautau'] = preC7 / parameters['m_tau'] * C.get('egamma_33', 0) preGF = sqrt(2) / parameters['GF'] / 4 return {k: preGF * v for k,v in d.items()}
python
def _JMS_to_Flavio_VII(C, parameters): """From JMS to flavio basis for class VII, i.e. flavour blind operators.""" d = {} dtrans = json.loads(pkgutil.get_data('wilson', 'data/flavio_jms_vii.json').decode('utf8')) for cj, cf in dtrans.items(): d[cf] = C.get(cj, 0) gs = sqrt(4 * pi * parameters['alpha_s']) e = sqrt(4 * pi * parameters['alpha_e']) preC7 = 16 * pi**2 / e preC8 = 16 * pi**2 / gs d['C8_uu'] = preC8 / parameters['m_u'] * C.get('uG_11', 0) d['C8_cc'] = preC8 / parameters['m_c'] * C.get('uG_22', 0) d['C8_dd'] = preC8 / parameters['m_d'] * C.get('dG_11', 0) d['C8_ss'] = preC8 / parameters['m_s'] * C.get('dG_22', 0) d['C8_bb'] = preC8 / parameters['m_b'] * C.get('dG_33', 0) d['C7_uu'] = preC7 / parameters['m_u'] * C.get('ugamma_11', 0) d['C7_cc'] = preC7 / parameters['m_c'] * C.get('ugamma_22', 0) d['C7_dd'] = preC7 / parameters['m_d'] * C.get('dgamma_11', 0) d['C7_ss'] = preC7 / parameters['m_s'] * C.get('dgamma_22', 0) d['C7_bb'] = preC7 / parameters['m_b'] * C.get('dgamma_33', 0) d['C7_ee'] = preC7 / parameters['m_e'] * C.get('egamma_11', 0) d['C7_mumu'] = preC7 / parameters['m_mu'] * C.get('egamma_22', 0) d['C7_tautau'] = preC7 / parameters['m_tau'] * C.get('egamma_33', 0) preGF = sqrt(2) / parameters['GF'] / 4 return {k: preGF * v for k,v in d.items()}
[ "def", "_JMS_to_Flavio_VII", "(", "C", ",", "parameters", ")", ":", "d", "=", "{", "}", "dtrans", "=", "json", ".", "loads", "(", "pkgutil", ".", "get_data", "(", "'wilson'", ",", "'data/flavio_jms_vii.json'", ")", ".", "decode", "(", "'utf8'", ")", ")",...
From JMS to flavio basis for class VII, i.e. flavour blind operators.
[ "From", "JMS", "to", "flavio", "basis", "for", "class", "VII", "i", ".", "e", ".", "flavour", "blind", "operators", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/translate/wet.py#L1515-L1539
train
45,427
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
cut_from_chain
def cut_from_chain(sciobj_model): """Remove an object from a revision chain. The object can be at any location in the chain, including the head or tail. Preconditions: - The object with the pid is verified to exist and to be a member of an revision chain. E.g., with: d1_gmn.app.views.asserts.is_existing_object(pid) d1_gmn.app.views.asserts.is_in_revision_chain(pid) Postconditions: - The given object is a standalone object with empty obsoletes, obsoletedBy and seriesId fields. - The previously adjacent objects in the chain are adjusted to close any gap that was created or remove dangling reference at the head or tail. - If the object was the last object in the chain and the chain has a SID, the SID reference is shifted over to the new last object in the chain. """ if _is_head(sciobj_model): old_pid = sciobj_model.obsoletes.did _cut_head_from_chain(sciobj_model) elif _is_tail(sciobj_model): old_pid = sciobj_model.obsoleted_by.did _cut_tail_from_chain(sciobj_model) else: old_pid = sciobj_model.obsoleted_by.did _cut_embedded_from_chain(sciobj_model) _update_sid_to_last_existing_pid_map(old_pid)
python
def cut_from_chain(sciobj_model): """Remove an object from a revision chain. The object can be at any location in the chain, including the head or tail. Preconditions: - The object with the pid is verified to exist and to be a member of an revision chain. E.g., with: d1_gmn.app.views.asserts.is_existing_object(pid) d1_gmn.app.views.asserts.is_in_revision_chain(pid) Postconditions: - The given object is a standalone object with empty obsoletes, obsoletedBy and seriesId fields. - The previously adjacent objects in the chain are adjusted to close any gap that was created or remove dangling reference at the head or tail. - If the object was the last object in the chain and the chain has a SID, the SID reference is shifted over to the new last object in the chain. """ if _is_head(sciobj_model): old_pid = sciobj_model.obsoletes.did _cut_head_from_chain(sciobj_model) elif _is_tail(sciobj_model): old_pid = sciobj_model.obsoleted_by.did _cut_tail_from_chain(sciobj_model) else: old_pid = sciobj_model.obsoleted_by.did _cut_embedded_from_chain(sciobj_model) _update_sid_to_last_existing_pid_map(old_pid)
[ "def", "cut_from_chain", "(", "sciobj_model", ")", ":", "if", "_is_head", "(", "sciobj_model", ")", ":", "old_pid", "=", "sciobj_model", ".", "obsoletes", ".", "did", "_cut_head_from_chain", "(", "sciobj_model", ")", "elif", "_is_tail", "(", "sciobj_model", ")",...
Remove an object from a revision chain. The object can be at any location in the chain, including the head or tail. Preconditions: - The object with the pid is verified to exist and to be a member of an revision chain. E.g., with: d1_gmn.app.views.asserts.is_existing_object(pid) d1_gmn.app.views.asserts.is_in_revision_chain(pid) Postconditions: - The given object is a standalone object with empty obsoletes, obsoletedBy and seriesId fields. - The previously adjacent objects in the chain are adjusted to close any gap that was created or remove dangling reference at the head or tail. - If the object was the last object in the chain and the chain has a SID, the SID reference is shifted over to the new last object in the chain.
[ "Remove", "an", "object", "from", "a", "revision", "chain", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L51-L81
train
45,428
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
resolve_sid
def resolve_sid(sid): """Get the PID to which the ``sid`` currently maps. Preconditions: - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid(). """ return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
python
def resolve_sid(sid): """Get the PID to which the ``sid`` currently maps. Preconditions: - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid(). """ return d1_gmn.app.models.Chain.objects.get(sid__did=sid).head_pid.did
[ "def", "resolve_sid", "(", "sid", ")", ":", "return", "d1_gmn", ".", "app", ".", "models", ".", "Chain", ".", "objects", ".", "get", "(", "sid__did", "=", "sid", ")", ".", "head_pid", ".", "did" ]
Get the PID to which the ``sid`` currently maps. Preconditions: - ``sid`` is verified to exist. E.g., with d1_gmn.app.views.asserts.is_sid().
[ "Get", "the", "PID", "to", "which", "the", "sid", "currently", "maps", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L94-L101
train
45,429
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
is_obsoletes_pid
def is_obsoletes_pid(pid): """Return True if ``pid`` is referenced in the obsoletes field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain. """ return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists()
python
def is_obsoletes_pid(pid): """Return True if ``pid`` is referenced in the obsoletes field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain. """ return d1_gmn.app.models.ScienceObject.objects.filter(obsoletes__did=pid).exists()
[ "def", "is_obsoletes_pid", "(", "pid", ")", ":", "return", "d1_gmn", ".", "app", ".", "models", ".", "ScienceObject", ".", "objects", ".", "filter", "(", "obsoletes__did", "=", "pid", ")", ".", "exists", "(", ")" ]
Return True if ``pid`` is referenced in the obsoletes field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain.
[ "Return", "True", "if", "pid", "is", "referenced", "in", "the", "obsoletes", "field", "of", "any", "object", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L132-L139
train
45,430
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
is_obsoleted_by_pid
def is_obsoleted_by_pid(pid): """Return True if ``pid`` is referenced in the obsoletedBy field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain. """ return d1_gmn.app.models.ScienceObject.objects.filter( obsoleted_by__did=pid ).exists()
python
def is_obsoleted_by_pid(pid): """Return True if ``pid`` is referenced in the obsoletedBy field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain. """ return d1_gmn.app.models.ScienceObject.objects.filter( obsoleted_by__did=pid ).exists()
[ "def", "is_obsoleted_by_pid", "(", "pid", ")", ":", "return", "d1_gmn", ".", "app", ".", "models", ".", "ScienceObject", ".", "objects", ".", "filter", "(", "obsoleted_by__did", "=", "pid", ")", ".", "exists", "(", ")" ]
Return True if ``pid`` is referenced in the obsoletedBy field of any object. This will return True even if the PID is in the obsoletes field of an object that does not exist on the local MN, such as replica that is in an incomplete chain.
[ "Return", "True", "if", "pid", "is", "referenced", "in", "the", "obsoletedBy", "field", "of", "any", "object", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L142-L151
train
45,431
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
_merge_chains
def _merge_chains(chain_model_a, chain_model_b): """Merge two chains. For use when it becomes known that two chains that were created separately actually are separate sections of the same chain E.g.: - A obsoleted by X is created. A has no SID. X does not exist yet. A chain is created for A. - B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created for B. - C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the same chain, which means that A and B are in the same chain. At this point, the two chains need to be merged. Merging the chains causes A to take on the SID of B. """ _set_chain_sid( chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid) ) for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b): member_model.chain = chain_model_a member_model.save() chain_model_b.delete()
python
def _merge_chains(chain_model_a, chain_model_b): """Merge two chains. For use when it becomes known that two chains that were created separately actually are separate sections of the same chain E.g.: - A obsoleted by X is created. A has no SID. X does not exist yet. A chain is created for A. - B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created for B. - C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the same chain, which means that A and B are in the same chain. At this point, the two chains need to be merged. Merging the chains causes A to take on the SID of B. """ _set_chain_sid( chain_model_a, d1_gmn.app.did.get_did_by_foreign_key(chain_model_b.sid) ) for member_model in _get_all_chain_member_queryset_by_chain(chain_model_b): member_model.chain = chain_model_a member_model.save() chain_model_b.delete()
[ "def", "_merge_chains", "(", "chain_model_a", ",", "chain_model_b", ")", ":", "_set_chain_sid", "(", "chain_model_a", ",", "d1_gmn", ".", "app", ".", "did", ".", "get_did_by_foreign_key", "(", "chain_model_b", ".", "sid", ")", ")", "for", "member_model", "in", ...
Merge two chains. For use when it becomes known that two chains that were created separately actually are separate sections of the same chain E.g.: - A obsoleted by X is created. A has no SID. X does not exist yet. A chain is created for A. - B obsoleting Y is created. B has SID. Y does not exist yet. A chain is created for B. - C obsoleting X, obsoleted by Y is created. C tells us that X and Y are in the same chain, which means that A and B are in the same chain. At this point, the two chains need to be merged. Merging the chains causes A to take on the SID of B.
[ "Merge", "two", "chains", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L196-L220
train
45,432
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
_set_chain_sid
def _set_chain_sid(chain_model, sid): """Set or update SID for chain. If the chain already has a SID, ``sid`` must either be None or match the existing SID. """ if not sid: return if chain_model.sid and chain_model.sid.did != sid: raise d1_common.types.exceptions.ServiceFailure( 0, 'Attempted to modify existing SID. ' 'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid), ) chain_model.sid = d1_gmn.app.did.get_or_create_did(sid) chain_model.save()
python
def _set_chain_sid(chain_model, sid): """Set or update SID for chain. If the chain already has a SID, ``sid`` must either be None or match the existing SID. """ if not sid: return if chain_model.sid and chain_model.sid.did != sid: raise d1_common.types.exceptions.ServiceFailure( 0, 'Attempted to modify existing SID. ' 'existing_sid="{}", new_sid="{}"'.format(chain_model.sid.did, sid), ) chain_model.sid = d1_gmn.app.did.get_or_create_did(sid) chain_model.save()
[ "def", "_set_chain_sid", "(", "chain_model", ",", "sid", ")", ":", "if", "not", "sid", ":", "return", "if", "chain_model", ".", "sid", "and", "chain_model", ".", "sid", ".", "did", "!=", "sid", ":", "raise", "d1_common", ".", "types", ".", "exceptions", ...
Set or update SID for chain. If the chain already has a SID, ``sid`` must either be None or match the existing SID.
[ "Set", "or", "update", "SID", "for", "chain", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L230-L246
train
45,433
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
_get_chain_by_pid
def _get_chain_by_pid(pid): """Find chain by pid. Return None if not found. """ try: return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain except d1_gmn.app.models.ChainMember.DoesNotExist: pass
python
def _get_chain_by_pid(pid): """Find chain by pid. Return None if not found. """ try: return d1_gmn.app.models.ChainMember.objects.get(pid__did=pid).chain except d1_gmn.app.models.ChainMember.DoesNotExist: pass
[ "def", "_get_chain_by_pid", "(", "pid", ")", ":", "try", ":", "return", "d1_gmn", ".", "app", ".", "models", ".", "ChainMember", ".", "objects", ".", "get", "(", "pid__did", "=", "pid", ")", ".", "chain", "except", "d1_gmn", ".", "app", ".", "models", ...
Find chain by pid. Return None if not found.
[ "Find", "chain", "by", "pid", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L281-L290
train
45,434
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
_get_chain_by_sid
def _get_chain_by_sid(sid): """Return None if not found.""" try: return d1_gmn.app.models.Chain.objects.get(sid__did=sid) except d1_gmn.app.models.Chain.DoesNotExist: pass
python
def _get_chain_by_sid(sid): """Return None if not found.""" try: return d1_gmn.app.models.Chain.objects.get(sid__did=sid) except d1_gmn.app.models.Chain.DoesNotExist: pass
[ "def", "_get_chain_by_sid", "(", "sid", ")", ":", "try", ":", "return", "d1_gmn", ".", "app", ".", "models", ".", "Chain", ".", "objects", ".", "get", "(", "sid__did", "=", "sid", ")", "except", "d1_gmn", ".", "app", ".", "models", ".", "Chain", ".",...
Return None if not found.
[ "Return", "None", "if", "not", "found", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L293-L298
train
45,435
DataONEorg/d1_python
gmn/src/d1_gmn/app/revision.py
_update_sid_to_last_existing_pid_map
def _update_sid_to_last_existing_pid_map(pid): """Set chain head PID to the last existing object in the chain to which ``pid`` belongs. If SID has been set for chain, it resolves to chain head PID. Intended to be called in MNStorage.delete() and other chain manipulation. Preconditions: - ``pid`` must exist and be verified to be a PID. d1_gmn.app.views.asserts.is_existing_object() """ last_pid = _find_head_or_latest_connected(pid) chain_model = _get_chain_by_pid(last_pid) if not chain_model: return chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid) chain_model.save()
python
def _update_sid_to_last_existing_pid_map(pid): """Set chain head PID to the last existing object in the chain to which ``pid`` belongs. If SID has been set for chain, it resolves to chain head PID. Intended to be called in MNStorage.delete() and other chain manipulation. Preconditions: - ``pid`` must exist and be verified to be a PID. d1_gmn.app.views.asserts.is_existing_object() """ last_pid = _find_head_or_latest_connected(pid) chain_model = _get_chain_by_pid(last_pid) if not chain_model: return chain_model.head_pid = d1_gmn.app.did.get_or_create_did(last_pid) chain_model.save()
[ "def", "_update_sid_to_last_existing_pid_map", "(", "pid", ")", ":", "last_pid", "=", "_find_head_or_latest_connected", "(", "pid", ")", "chain_model", "=", "_get_chain_by_pid", "(", "last_pid", ")", "if", "not", "chain_model", ":", "return", "chain_model", ".", "he...
Set chain head PID to the last existing object in the chain to which ``pid`` belongs. If SID has been set for chain, it resolves to chain head PID. Intended to be called in MNStorage.delete() and other chain manipulation. Preconditions: - ``pid`` must exist and be verified to be a PID. d1_gmn.app.views.asserts.is_existing_object()
[ "Set", "chain", "head", "PID", "to", "the", "last", "existing", "object", "in", "the", "chain", "to", "which", "pid", "belongs", ".", "If", "SID", "has", "been", "set", "for", "chain", "it", "resolves", "to", "chain", "head", "PID", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/revision.py#L301-L317
train
45,436
genialis/resolwe
resolwe/flow/migrations/0025_entity_type.py
populate_entity_type
def populate_entity_type(apps, schema_editor): """Populate entity type from attached descriptor schema.""" Entity = apps.get_model('flow', 'Entity') for entity in Entity.objects.all(): if entity.descriptor_schema is not None: entity.type = entity.descriptor_schema.slug entity.save()
python
def populate_entity_type(apps, schema_editor): """Populate entity type from attached descriptor schema.""" Entity = apps.get_model('flow', 'Entity') for entity in Entity.objects.all(): if entity.descriptor_schema is not None: entity.type = entity.descriptor_schema.slug entity.save()
[ "def", "populate_entity_type", "(", "apps", ",", "schema_editor", ")", ":", "Entity", "=", "apps", ".", "get_model", "(", "'flow'", ",", "'Entity'", ")", "for", "entity", "in", "Entity", ".", "objects", ".", "all", "(", ")", ":", "if", "entity", ".", "...
Populate entity type from attached descriptor schema.
[ "Populate", "entity", "type", "from", "attached", "descriptor", "schema", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/migrations/0025_entity_type.py#L8-L15
train
45,437
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
deserialize
def deserialize(doc_xml, pyxb_binding=None): """Deserialize DataONE XML types to PyXB. Args: doc_xml: UTF-8 encoded ``bytes`` pyxb_binding: PyXB binding object. If not specified, the correct one should be selected automatically. Returns: PyXB object See Also: ``deserialize_d1_exception()`` for deserializing DataONE Exception types. """ pyxb_binding = pyxb_binding or d1_common.types.dataoneTypes try: return pyxb_binding.CreateFromDocument(doc_xml) except pyxb.ValidationError as e: raise ValueError( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format( e.details(), doc_xml ) ) except (pyxb.PyXBException, xml.sax.SAXParseException, Exception) as e: raise ValueError( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format( str(e), doc_xml ) )
python
def deserialize(doc_xml, pyxb_binding=None): """Deserialize DataONE XML types to PyXB. Args: doc_xml: UTF-8 encoded ``bytes`` pyxb_binding: PyXB binding object. If not specified, the correct one should be selected automatically. Returns: PyXB object See Also: ``deserialize_d1_exception()`` for deserializing DataONE Exception types. """ pyxb_binding = pyxb_binding or d1_common.types.dataoneTypes try: return pyxb_binding.CreateFromDocument(doc_xml) except pyxb.ValidationError as e: raise ValueError( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format( e.details(), doc_xml ) ) except (pyxb.PyXBException, xml.sax.SAXParseException, Exception) as e: raise ValueError( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"'.format( str(e), doc_xml ) )
[ "def", "deserialize", "(", "doc_xml", ",", "pyxb_binding", "=", "None", ")", ":", "pyxb_binding", "=", "pyxb_binding", "or", "d1_common", ".", "types", ".", "dataoneTypes", "try", ":", "return", "pyxb_binding", ".", "CreateFromDocument", "(", "doc_xml", ")", "...
Deserialize DataONE XML types to PyXB. Args: doc_xml: UTF-8 encoded ``bytes`` pyxb_binding: PyXB binding object. If not specified, the correct one should be selected automatically. Returns: PyXB object See Also: ``deserialize_d1_exception()`` for deserializing DataONE Exception types.
[ "Deserialize", "DataONE", "XML", "types", "to", "PyXB", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L36-L66
train
45,438
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
serialize_gen
def serialize_gen( obj_pyxb, encoding='utf-8', pretty=False, strip_prolog=False, xslt_url=None ): """Serialize PyXB object to XML. Args: obj_pyxb: PyXB object PyXB object to serialize. encoding: str Encoding to use for XML doc bytes pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: XML document """ assert d1_common.type_conversions.is_pyxb(obj_pyxb) assert encoding in (None, 'utf-8', 'UTF-8') try: obj_dom = obj_pyxb.toDOM() except pyxb.ValidationError as e: raise ValueError( 'Unable to serialize PyXB to XML. error="{}"'.format(e.details()) ) except pyxb.PyXBException as e: raise ValueError('Unable to serialize PyXB to XML. error="{}"'.format(str(e))) if xslt_url: xslt_processing_instruction = obj_dom.createProcessingInstruction( 'xml-stylesheet', 'type="text/xsl" href="{}"'.format(xslt_url) ) root = obj_dom.firstChild obj_dom.insertBefore(xslt_processing_instruction, root) if pretty: xml_str = obj_dom.toprettyxml(indent=' ', encoding=encoding) # Remove empty lines in the result caused by a bug in toprettyxml() if encoding is None: xml_str = re.sub(r'^\s*$\n', r'', xml_str, flags=re.MULTILINE) else: xml_str = re.sub(b'^\s*$\n', b'', xml_str, flags=re.MULTILINE) else: xml_str = obj_dom.toxml(encoding) if strip_prolog: if encoding is None: xml_str = re.sub(r'^<\?(.*)\?>', r'', xml_str) else: xml_str = re.sub(b'^<\?(.*)\?>', b'', xml_str) return xml_str.strip()
python
def serialize_gen( obj_pyxb, encoding='utf-8', pretty=False, strip_prolog=False, xslt_url=None ): """Serialize PyXB object to XML. Args: obj_pyxb: PyXB object PyXB object to serialize. encoding: str Encoding to use for XML doc bytes pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: XML document """ assert d1_common.type_conversions.is_pyxb(obj_pyxb) assert encoding in (None, 'utf-8', 'UTF-8') try: obj_dom = obj_pyxb.toDOM() except pyxb.ValidationError as e: raise ValueError( 'Unable to serialize PyXB to XML. error="{}"'.format(e.details()) ) except pyxb.PyXBException as e: raise ValueError('Unable to serialize PyXB to XML. error="{}"'.format(str(e))) if xslt_url: xslt_processing_instruction = obj_dom.createProcessingInstruction( 'xml-stylesheet', 'type="text/xsl" href="{}"'.format(xslt_url) ) root = obj_dom.firstChild obj_dom.insertBefore(xslt_processing_instruction, root) if pretty: xml_str = obj_dom.toprettyxml(indent=' ', encoding=encoding) # Remove empty lines in the result caused by a bug in toprettyxml() if encoding is None: xml_str = re.sub(r'^\s*$\n', r'', xml_str, flags=re.MULTILINE) else: xml_str = re.sub(b'^\s*$\n', b'', xml_str, flags=re.MULTILINE) else: xml_str = obj_dom.toxml(encoding) if strip_prolog: if encoding is None: xml_str = re.sub(r'^<\?(.*)\?>', r'', xml_str) else: xml_str = re.sub(b'^<\?(.*)\?>', b'', xml_str) return xml_str.strip()
[ "def", "serialize_gen", "(", "obj_pyxb", ",", "encoding", "=", "'utf-8'", ",", "pretty", "=", "False", ",", "strip_prolog", "=", "False", ",", "xslt_url", "=", "None", ")", ":", "assert", "d1_common", ".", "type_conversions", ".", "is_pyxb", "(", "obj_pyxb",...
Serialize PyXB object to XML. Args: obj_pyxb: PyXB object PyXB object to serialize. encoding: str Encoding to use for XML doc bytes pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: XML document
[ "Serialize", "PyXB", "object", "to", "XML", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L79-L139
train
45,439
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
serialize_for_transport
def serialize_for_transport(obj_pyxb, pretty=False, strip_prolog=False, xslt_url=None): """Serialize PyXB object to XML ``bytes`` with UTF-8 encoding for transport over the network, filesystem storage and other machine usage. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: bytes: UTF-8 encoded XML document See Also: ``serialize_for_display()`` """ return serialize_gen(obj_pyxb, 'utf-8', pretty, strip_prolog, xslt_url)
python
def serialize_for_transport(obj_pyxb, pretty=False, strip_prolog=False, xslt_url=None): """Serialize PyXB object to XML ``bytes`` with UTF-8 encoding for transport over the network, filesystem storage and other machine usage. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: bytes: UTF-8 encoded XML document See Also: ``serialize_for_display()`` """ return serialize_gen(obj_pyxb, 'utf-8', pretty, strip_prolog, xslt_url)
[ "def", "serialize_for_transport", "(", "obj_pyxb", ",", "pretty", "=", "False", ",", "strip_prolog", "=", "False", ",", "xslt_url", "=", "None", ")", ":", "return", "serialize_gen", "(", "obj_pyxb", ",", "'utf-8'", ",", "pretty", ",", "strip_prolog", ",", "x...
Serialize PyXB object to XML ``bytes`` with UTF-8 encoding for transport over the network, filesystem storage and other machine usage. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: bytes: UTF-8 encoded XML document See Also: ``serialize_for_display()``
[ "Serialize", "PyXB", "object", "to", "XML", "bytes", "with", "UTF", "-", "8", "encoding", "for", "transport", "over", "the", "network", "filesystem", "storage", "and", "other", "machine", "usage", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L142-L168
train
45,440
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
serialize_to_xml_str
def serialize_to_xml_str(obj_pyxb, pretty=True, strip_prolog=False, xslt_url=None): """Serialize PyXB object to pretty printed XML ``str`` for display. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool False: Disable pretty print formatting. XML will not have line breaks. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: str: Pretty printed XML document """ return serialize_gen(obj_pyxb, None, pretty, strip_prolog, xslt_url)
python
def serialize_to_xml_str(obj_pyxb, pretty=True, strip_prolog=False, xslt_url=None): """Serialize PyXB object to pretty printed XML ``str`` for display. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool False: Disable pretty print formatting. XML will not have line breaks. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: str: Pretty printed XML document """ return serialize_gen(obj_pyxb, None, pretty, strip_prolog, xslt_url)
[ "def", "serialize_to_xml_str", "(", "obj_pyxb", ",", "pretty", "=", "True", ",", "strip_prolog", "=", "False", ",", "xslt_url", "=", "None", ")", ":", "return", "serialize_gen", "(", "obj_pyxb", ",", "None", ",", "pretty", ",", "strip_prolog", ",", "xslt_url...
Serialize PyXB object to pretty printed XML ``str`` for display. Args: obj_pyxb: PyXB object PyXB object to serialize. pretty: bool False: Disable pretty print formatting. XML will not have line breaks. strip_prolog: True: remove any XML prolog (e.g., ``<?xml version="1.0" encoding="utf-8"?>``), from the resulting XML doc. xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: str: Pretty printed XML document
[ "Serialize", "PyXB", "object", "to", "pretty", "printed", "XML", "str", "for", "display", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L172-L194
train
45,441
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
reformat_to_pretty_xml
def reformat_to_pretty_xml(doc_xml): """Pretty print XML doc. Args: doc_xml : str Well formed XML doc Returns: str: Pretty printed XML doc """ assert isinstance(doc_xml, str) dom_obj = xml.dom.minidom.parseString(doc_xml) pretty_xml = dom_obj.toprettyxml(indent=' ') # Remove empty lines in the result caused by a bug in toprettyxml() return re.sub(r'^\s*$\n', r'', pretty_xml, flags=re.MULTILINE)
python
def reformat_to_pretty_xml(doc_xml): """Pretty print XML doc. Args: doc_xml : str Well formed XML doc Returns: str: Pretty printed XML doc """ assert isinstance(doc_xml, str) dom_obj = xml.dom.minidom.parseString(doc_xml) pretty_xml = dom_obj.toprettyxml(indent=' ') # Remove empty lines in the result caused by a bug in toprettyxml() return re.sub(r'^\s*$\n', r'', pretty_xml, flags=re.MULTILINE)
[ "def", "reformat_to_pretty_xml", "(", "doc_xml", ")", ":", "assert", "isinstance", "(", "doc_xml", ",", "str", ")", "dom_obj", "=", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "doc_xml", ")", "pretty_xml", "=", "dom_obj", ".", "toprettyxml", ...
Pretty print XML doc. Args: doc_xml : str Well formed XML doc Returns: str: Pretty printed XML doc
[ "Pretty", "print", "XML", "doc", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L197-L212
train
45,442
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
are_equivalent
def are_equivalent(a_xml, b_xml, encoding=None): """Return True if two XML docs are semantically equivalent, else False. - TODO: Include test for tails. Skipped for now because tails are not used in any D1 types. """ assert isinstance(a_xml, str) assert isinstance(b_xml, str) a_tree = str_to_etree(a_xml, encoding) b_tree = str_to_etree(b_xml, encoding) return are_equal_or_superset(a_tree, b_tree) and are_equal_or_superset( b_tree, a_tree )
python
def are_equivalent(a_xml, b_xml, encoding=None): """Return True if two XML docs are semantically equivalent, else False. - TODO: Include test for tails. Skipped for now because tails are not used in any D1 types. """ assert isinstance(a_xml, str) assert isinstance(b_xml, str) a_tree = str_to_etree(a_xml, encoding) b_tree = str_to_etree(b_xml, encoding) return are_equal_or_superset(a_tree, b_tree) and are_equal_or_superset( b_tree, a_tree )
[ "def", "are_equivalent", "(", "a_xml", ",", "b_xml", ",", "encoding", "=", "None", ")", ":", "assert", "isinstance", "(", "a_xml", ",", "str", ")", "assert", "isinstance", "(", "b_xml", ",", "str", ")", "a_tree", "=", "str_to_etree", "(", "a_xml", ",", ...
Return True if two XML docs are semantically equivalent, else False. - TODO: Include test for tails. Skipped for now because tails are not used in any D1 types.
[ "Return", "True", "if", "two", "XML", "docs", "are", "semantically", "equivalent", "else", "False", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L222-L235
train
45,443
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
are_equal_or_superset
def are_equal_or_superset(superset_tree, base_tree): """Return True if ``superset_tree`` is equal to or a superset of ``base_tree`` - Checks that all elements and attributes in ``superset_tree`` are present and contain the same values as in ``base_tree``. For elements, also checks that the order is the same. - Can be used for checking if one XML document is based on another, as long as all the information in ``base_tree`` is also present and unmodified in ``superset_tree``. """ try: _compare_attr(superset_tree, base_tree) _compare_text(superset_tree, base_tree) except CompareError as e: logger.debug(str(e)) return False return True
python
def are_equal_or_superset(superset_tree, base_tree): """Return True if ``superset_tree`` is equal to or a superset of ``base_tree`` - Checks that all elements and attributes in ``superset_tree`` are present and contain the same values as in ``base_tree``. For elements, also checks that the order is the same. - Can be used for checking if one XML document is based on another, as long as all the information in ``base_tree`` is also present and unmodified in ``superset_tree``. """ try: _compare_attr(superset_tree, base_tree) _compare_text(superset_tree, base_tree) except CompareError as e: logger.debug(str(e)) return False return True
[ "def", "are_equal_or_superset", "(", "superset_tree", ",", "base_tree", ")", ":", "try", ":", "_compare_attr", "(", "superset_tree", ",", "base_tree", ")", "_compare_text", "(", "superset_tree", ",", "base_tree", ")", "except", "CompareError", "as", "e", ":", "l...
Return True if ``superset_tree`` is equal to or a superset of ``base_tree`` - Checks that all elements and attributes in ``superset_tree`` are present and contain the same values as in ``base_tree``. For elements, also checks that the order is the same. - Can be used for checking if one XML document is based on another, as long as all the information in ``base_tree`` is also present and unmodified in ``superset_tree``.
[ "Return", "True", "if", "superset_tree", "is", "equal", "to", "or", "a", "superset", "of", "base_tree" ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L238-L255
train
45,444
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
are_equal_xml
def are_equal_xml(a_xml, b_xml): """Normalize and compare XML documents for equality. The document may or may not be a DataONE type. Args: a_xml: str b_xml: str XML documents to compare for equality. Returns: bool: ``True`` if the XML documents are semantically equivalent. """ a_dom = xml.dom.minidom.parseString(a_xml) b_dom = xml.dom.minidom.parseString(b_xml) return are_equal_elements(a_dom.documentElement, b_dom.documentElement)
python
def are_equal_xml(a_xml, b_xml): """Normalize and compare XML documents for equality. The document may or may not be a DataONE type. Args: a_xml: str b_xml: str XML documents to compare for equality. Returns: bool: ``True`` if the XML documents are semantically equivalent. """ a_dom = xml.dom.minidom.parseString(a_xml) b_dom = xml.dom.minidom.parseString(b_xml) return are_equal_elements(a_dom.documentElement, b_dom.documentElement)
[ "def", "are_equal_xml", "(", "a_xml", ",", "b_xml", ")", ":", "a_dom", "=", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "a_xml", ")", "b_dom", "=", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "b_xml", ")", "return", "a...
Normalize and compare XML documents for equality. The document may or may not be a DataONE type. Args: a_xml: str b_xml: str XML documents to compare for equality. Returns: bool: ``True`` if the XML documents are semantically equivalent.
[ "Normalize", "and", "compare", "XML", "documents", "for", "equality", ".", "The", "document", "may", "or", "may", "not", "be", "a", "DataONE", "type", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L337-L352
train
45,445
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
are_equal_elements
def are_equal_elements(a_el, b_el): """Normalize and compare ElementTrees for equality. Args: a_el: ElementTree b_el: ElementTree ElementTrees to compare for equality. Returns: bool: ``True`` if the ElementTrees are semantically equivalent. """ if a_el.tagName != b_el.tagName: return False if sorted(a_el.attributes.items()) != sorted(b_el.attributes.items()): return False if len(a_el.childNodes) != len(b_el.childNodes): return False for a_child_el, b_child_el in zip(a_el.childNodes, b_el.childNodes): if a_child_el.nodeType != b_child_el.nodeType: return False if ( a_child_el.nodeType == a_child_el.TEXT_NODE and a_child_el.data != b_child_el.data ): return False if a_child_el.nodeType == a_child_el.ELEMENT_NODE and not are_equal_elements( a_child_el, b_child_el ): return False return True
python
def are_equal_elements(a_el, b_el): """Normalize and compare ElementTrees for equality. Args: a_el: ElementTree b_el: ElementTree ElementTrees to compare for equality. Returns: bool: ``True`` if the ElementTrees are semantically equivalent. """ if a_el.tagName != b_el.tagName: return False if sorted(a_el.attributes.items()) != sorted(b_el.attributes.items()): return False if len(a_el.childNodes) != len(b_el.childNodes): return False for a_child_el, b_child_el in zip(a_el.childNodes, b_el.childNodes): if a_child_el.nodeType != b_child_el.nodeType: return False if ( a_child_el.nodeType == a_child_el.TEXT_NODE and a_child_el.data != b_child_el.data ): return False if a_child_el.nodeType == a_child_el.ELEMENT_NODE and not are_equal_elements( a_child_el, b_child_el ): return False return True
[ "def", "are_equal_elements", "(", "a_el", ",", "b_el", ")", ":", "if", "a_el", ".", "tagName", "!=", "b_el", ".", "tagName", ":", "return", "False", "if", "sorted", "(", "a_el", ".", "attributes", ".", "items", "(", ")", ")", "!=", "sorted", "(", "b_...
Normalize and compare ElementTrees for equality. Args: a_el: ElementTree b_el: ElementTree ElementTrees to compare for equality. Returns: bool: ``True`` if the ElementTrees are semantically equivalent.
[ "Normalize", "and", "compare", "ElementTrees", "for", "equality", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L370-L400
train
45,446
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
sort_elements_by_child_values
def sort_elements_by_child_values(obj_pyxb, child_name_list): """In-place sort simple or complex elements in a PyXB object by values they contain in child elements. Args: obj_pyxb: PyXB object child_name_list: list of str List of element names that are direct children of the PyXB object. """ obj_pyxb.sort(key=lambda x: [get_auto(getattr(x, n)) for n in child_name_list])
python
def sort_elements_by_child_values(obj_pyxb, child_name_list): """In-place sort simple or complex elements in a PyXB object by values they contain in child elements. Args: obj_pyxb: PyXB object child_name_list: list of str List of element names that are direct children of the PyXB object. """ obj_pyxb.sort(key=lambda x: [get_auto(getattr(x, n)) for n in child_name_list])
[ "def", "sort_elements_by_child_values", "(", "obj_pyxb", ",", "child_name_list", ")", ":", "obj_pyxb", ".", "sort", "(", "key", "=", "lambda", "x", ":", "[", "get_auto", "(", "getattr", "(", "x", ",", "n", ")", ")", "for", "n", "in", "child_name_list", "...
In-place sort simple or complex elements in a PyXB object by values they contain in child elements. Args: obj_pyxb: PyXB object child_name_list: list of str List of element names that are direct children of the PyXB object.
[ "In", "-", "place", "sort", "simple", "or", "complex", "elements", "in", "a", "PyXB", "object", "by", "values", "they", "contain", "in", "child", "elements", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L412-L423
train
45,447
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
format_diff_pyxb
def format_diff_pyxb(a_pyxb, b_pyxb): """Create a diff between two PyXB objects. Args: a_pyxb: PyXB object b_pyxb: PyXB object Returns: str : `Differ`-style delta """ return '\n'.join( difflib.ndiff( serialize_to_xml_str(a_pyxb).splitlines(), serialize_to_xml_str(b_pyxb).splitlines(), ) )
python
def format_diff_pyxb(a_pyxb, b_pyxb): """Create a diff between two PyXB objects. Args: a_pyxb: PyXB object b_pyxb: PyXB object Returns: str : `Differ`-style delta """ return '\n'.join( difflib.ndiff( serialize_to_xml_str(a_pyxb).splitlines(), serialize_to_xml_str(b_pyxb).splitlines(), ) )
[ "def", "format_diff_pyxb", "(", "a_pyxb", ",", "b_pyxb", ")", ":", "return", "'\\n'", ".", "join", "(", "difflib", ".", "ndiff", "(", "serialize_to_xml_str", "(", "a_pyxb", ")", ".", "splitlines", "(", ")", ",", "serialize_to_xml_str", "(", "b_pyxb", ")", ...
Create a diff between two PyXB objects. Args: a_pyxb: PyXB object b_pyxb: PyXB object Returns: str : `Differ`-style delta
[ "Create", "a", "diff", "between", "two", "PyXB", "objects", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L426-L442
train
45,448
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
format_diff_xml
def format_diff_xml(a_xml, b_xml): """Create a diff between two XML documents. Args: a_xml: str b_xml: str Returns: str : `Differ`-style delta """ return '\n'.join( difflib.ndiff( reformat_to_pretty_xml(a_xml).splitlines(), reformat_to_pretty_xml(b_xml).splitlines(), ) )
python
def format_diff_xml(a_xml, b_xml): """Create a diff between two XML documents. Args: a_xml: str b_xml: str Returns: str : `Differ`-style delta """ return '\n'.join( difflib.ndiff( reformat_to_pretty_xml(a_xml).splitlines(), reformat_to_pretty_xml(b_xml).splitlines(), ) )
[ "def", "format_diff_xml", "(", "a_xml", ",", "b_xml", ")", ":", "return", "'\\n'", ".", "join", "(", "difflib", ".", "ndiff", "(", "reformat_to_pretty_xml", "(", "a_xml", ")", ".", "splitlines", "(", ")", ",", "reformat_to_pretty_xml", "(", "b_xml", ")", "...
Create a diff between two XML documents. Args: a_xml: str b_xml: str Returns: str : `Differ`-style delta
[ "Create", "a", "diff", "between", "two", "XML", "documents", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L445-L461
train
45,449
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
get_opt_attr
def get_opt_attr(obj_pyxb, attr_str, default_val=None): """Get an optional attribute value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``. """ v = getattr(obj_pyxb, attr_str, default_val) return v if v is not None else default_val
python
def get_opt_attr(obj_pyxb, attr_str, default_val=None): """Get an optional attribute value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``. """ v = getattr(obj_pyxb, attr_str, default_val) return v if v is not None else default_val
[ "def", "get_opt_attr", "(", "obj_pyxb", ",", "attr_str", ",", "default_val", "=", "None", ")", ":", "v", "=", "getattr", "(", "obj_pyxb", ",", "attr_str", ",", "default_val", ")", "return", "v", "if", "v", "is", "not", "None", "else", "default_val" ]
Get an optional attribute value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``.
[ "Get", "an", "optional", "attribute", "value", "from", "a", "PyXB", "element", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L508-L529
train
45,450
DataONEorg/d1_python
lib_common/src/d1_common/xml.py
get_opt_val
def get_opt_val(obj_pyxb, attr_str, default_val=None): """Get an optional Simple Content value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``. """ try: return get_req_val(getattr(obj_pyxb, attr_str)) except (ValueError, AttributeError): return default_val
python
def get_opt_val(obj_pyxb, attr_str, default_val=None): """Get an optional Simple Content value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``. """ try: return get_req_val(getattr(obj_pyxb, attr_str)) except (ValueError, AttributeError): return default_val
[ "def", "get_opt_val", "(", "obj_pyxb", ",", "attr_str", ",", "default_val", "=", "None", ")", ":", "try", ":", "return", "get_req_val", "(", "getattr", "(", "obj_pyxb", ",", "attr_str", ")", ")", "except", "(", "ValueError", ",", "AttributeError", ")", ":"...
Get an optional Simple Content value from a PyXB element. The attributes for elements that are optional according to the schema and not set in the PyXB object are present and set to None. PyXB validation will fail if required elements are missing. Args: obj_pyxb: PyXB object attr_str: str Name of an attribute that the PyXB object may contain. default_val: any object Value to return if the attribute is not present. Returns: str : Value of the attribute if present, else ``default_val``.
[ "Get", "an", "optional", "Simple", "Content", "value", "from", "a", "PyXB", "element", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/xml.py#L532-L556
train
45,451
genialis/resolwe
resolwe/flow/utils/exceptions.py
resolwe_exception_handler
def resolwe_exception_handler(exc, context): """Handle exceptions raised in API and make them nicer. To enable this, you have to add it to the settings: .. code:: python REST_FRAMEWORK = { 'EXCEPTION_HANDLER': 'resolwe.flow.utils.exceptions.resolwe_exception_handler', } """ response = exception_handler(exc, context) if isinstance(exc, ValidationError): if response is None: response = Response({}) response.status_code = 400 response.data['error'] = exc.message return response
python
def resolwe_exception_handler(exc, context): """Handle exceptions raised in API and make them nicer. To enable this, you have to add it to the settings: .. code:: python REST_FRAMEWORK = { 'EXCEPTION_HANDLER': 'resolwe.flow.utils.exceptions.resolwe_exception_handler', } """ response = exception_handler(exc, context) if isinstance(exc, ValidationError): if response is None: response = Response({}) response.status_code = 400 response.data['error'] = exc.message return response
[ "def", "resolwe_exception_handler", "(", "exc", ",", "context", ")", ":", "response", "=", "exception_handler", "(", "exc", ",", "context", ")", "if", "isinstance", "(", "exc", ",", "ValidationError", ")", ":", "if", "response", "is", "None", ":", "response"...
Handle exceptions raised in API and make them nicer. To enable this, you have to add it to the settings: .. code:: python REST_FRAMEWORK = { 'EXCEPTION_HANDLER': 'resolwe.flow.utils.exceptions.resolwe_exception_handler', }
[ "Handle", "exceptions", "raised", "in", "API", "and", "make", "them", "nicer", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/utils/exceptions.py#L16-L36
train
45,452
DataONEorg/d1_python
utilities/src/d1_util/jwt_token_tasks.py
validate_and_decode
def validate_and_decode(jwt_bu64, cert_obj): """Example for validating the signature of a JWT using only the cryptography library. Note that this does NOT validate the claims in the claim set. """ public_key = cert_obj.public_key() message = '.'.join(d1_common.cert.jwt.get_bu64_tup(jwt_bu64)[:2]) signature = d1_common.cert.jwt.get_jwt_tup(jwt_bu64)[2] try: public_key.verify( signature, message, cryptography.hazmat.primitives.asymmetric.padding.PKCS1v15(), cryptography.hazmat.primitives.hashes.SHA256(), ) except cryptography.exceptions.InvalidSignature as e: raise Exception('Signature is invalid. error="{}"'.format(str(e))) return d1_common.cert.jwt.get_jwt_dict(jwt_bu64)
python
def validate_and_decode(jwt_bu64, cert_obj): """Example for validating the signature of a JWT using only the cryptography library. Note that this does NOT validate the claims in the claim set. """ public_key = cert_obj.public_key() message = '.'.join(d1_common.cert.jwt.get_bu64_tup(jwt_bu64)[:2]) signature = d1_common.cert.jwt.get_jwt_tup(jwt_bu64)[2] try: public_key.verify( signature, message, cryptography.hazmat.primitives.asymmetric.padding.PKCS1v15(), cryptography.hazmat.primitives.hashes.SHA256(), ) except cryptography.exceptions.InvalidSignature as e: raise Exception('Signature is invalid. error="{}"'.format(str(e))) return d1_common.cert.jwt.get_jwt_dict(jwt_bu64)
[ "def", "validate_and_decode", "(", "jwt_bu64", ",", "cert_obj", ")", ":", "public_key", "=", "cert_obj", ".", "public_key", "(", ")", "message", "=", "'.'", ".", "join", "(", "d1_common", ".", "cert", ".", "jwt", ".", "get_bu64_tup", "(", "jwt_bu64", ")", ...
Example for validating the signature of a JWT using only the cryptography library. Note that this does NOT validate the claims in the claim set.
[ "Example", "for", "validating", "the", "signature", "of", "a", "JWT", "using", "only", "the", "cryptography", "library", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/utilities/src/d1_util/jwt_token_tasks.py#L82-L101
train
45,453
DataONEorg/d1_python
utilities/src/d1_util/jwt_token_tasks.py
find_valid_combinations
def find_valid_combinations(cert_file_name_list, jwt_file_name_list): """Given a list of cert and JWT file names, print a list showing each combination along with indicators for combinations where the JWT signature was successfully validated with the cert.""" for cert_file_name in cert_file_name_list: cert_pem = '' # self.test_files.load_utf8_to_str(cert_file_name) cert_obj = d1_common.cert.x509.deserialize_pem(cert_pem) # d1_common.cert.x509.log_cert_info(logging.info, 'CERT', cert_obj) for jwt_file_name in jwt_file_name_list: jwt_bu64 = '' # self.test_files.load_utf8_to_str(jwt_file_name) # d1_common.cert.jwt.log_jwt_bu64_info(logging.info, 'JWT', jwt_bu64) is_ok = False try: d1_common.cert.jwt.validate_and_decode(jwt_bu64, cert_obj) except d1_common.cert.jwt.JwtException as e: logging.info('Invalid. msg="{}"'.format(str(e))) else: is_ok = True logging.info( '{} {} {}'.format( '***' if is_ok else ' ', cert_file_name, jwt_file_name ) )
python
def find_valid_combinations(cert_file_name_list, jwt_file_name_list): """Given a list of cert and JWT file names, print a list showing each combination along with indicators for combinations where the JWT signature was successfully validated with the cert.""" for cert_file_name in cert_file_name_list: cert_pem = '' # self.test_files.load_utf8_to_str(cert_file_name) cert_obj = d1_common.cert.x509.deserialize_pem(cert_pem) # d1_common.cert.x509.log_cert_info(logging.info, 'CERT', cert_obj) for jwt_file_name in jwt_file_name_list: jwt_bu64 = '' # self.test_files.load_utf8_to_str(jwt_file_name) # d1_common.cert.jwt.log_jwt_bu64_info(logging.info, 'JWT', jwt_bu64) is_ok = False try: d1_common.cert.jwt.validate_and_decode(jwt_bu64, cert_obj) except d1_common.cert.jwt.JwtException as e: logging.info('Invalid. msg="{}"'.format(str(e))) else: is_ok = True logging.info( '{} {} {}'.format( '***' if is_ok else ' ', cert_file_name, jwt_file_name ) )
[ "def", "find_valid_combinations", "(", "cert_file_name_list", ",", "jwt_file_name_list", ")", ":", "for", "cert_file_name", "in", "cert_file_name_list", ":", "cert_pem", "=", "''", "# self.test_files.load_utf8_to_str(cert_file_name)", "cert_obj", "=", "d1_common", ".", "cer...
Given a list of cert and JWT file names, print a list showing each combination along with indicators for combinations where the JWT signature was successfully validated with the cert.
[ "Given", "a", "list", "of", "cert", "and", "JWT", "file", "names", "print", "a", "list", "showing", "each", "combination", "along", "with", "indicators", "for", "combinations", "where", "the", "JWT", "signature", "was", "successfully", "validated", "with", "th...
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/utilities/src/d1_util/jwt_token_tasks.py#L104-L126
train
45,454
DataONEorg/d1_python
lib_common/src/d1_common/url.py
parseUrl
def parseUrl(url): """Return a dict containing scheme, netloc, url, params, query, fragment keys. query is a dict where the values are always lists. If the query key appears only once in the URL, the list will have a single value. """ scheme, netloc, url, params, query, fragment = urllib.parse.urlparse(url) query_dict = { k: sorted(v) if len(v) > 1 else v[0] for k, v in list(urllib.parse.parse_qs(query).items()) } return { 'scheme': scheme, 'netloc': netloc, 'url': url, 'params': params, 'query': query_dict, 'fragment': fragment, }
python
def parseUrl(url): """Return a dict containing scheme, netloc, url, params, query, fragment keys. query is a dict where the values are always lists. If the query key appears only once in the URL, the list will have a single value. """ scheme, netloc, url, params, query, fragment = urllib.parse.urlparse(url) query_dict = { k: sorted(v) if len(v) > 1 else v[0] for k, v in list(urllib.parse.parse_qs(query).items()) } return { 'scheme': scheme, 'netloc': netloc, 'url': url, 'params': params, 'query': query_dict, 'fragment': fragment, }
[ "def", "parseUrl", "(", "url", ")", ":", "scheme", ",", "netloc", ",", "url", ",", "params", ",", "query", ",", "fragment", "=", "urllib", ".", "parse", ".", "urlparse", "(", "url", ")", "query_dict", "=", "{", "k", ":", "sorted", "(", "v", ")", ...
Return a dict containing scheme, netloc, url, params, query, fragment keys. query is a dict where the values are always lists. If the query key appears only once in the URL, the list will have a single value.
[ "Return", "a", "dict", "containing", "scheme", "netloc", "url", "params", "query", "fragment", "keys", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L28-L47
train
45,455
DataONEorg/d1_python
lib_common/src/d1_common/url.py
encodePathElement
def encodePathElement(element): """Encode a URL path element according to RFC3986.""" return urllib.parse.quote( ( element.encode('utf-8') if isinstance(element, str) else str(element) if isinstance(element, int) else element ), safe=d1_common.const.URL_PATHELEMENT_SAFE_CHARS, )
python
def encodePathElement(element): """Encode a URL path element according to RFC3986.""" return urllib.parse.quote( ( element.encode('utf-8') if isinstance(element, str) else str(element) if isinstance(element, int) else element ), safe=d1_common.const.URL_PATHELEMENT_SAFE_CHARS, )
[ "def", "encodePathElement", "(", "element", ")", ":", "return", "urllib", ".", "parse", ".", "quote", "(", "(", "element", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "element", ",", "str", ")", "else", "str", "(", "element", ")", "if"...
Encode a URL path element according to RFC3986.
[ "Encode", "a", "URL", "path", "element", "according", "to", "RFC3986", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L59-L70
train
45,456
DataONEorg/d1_python
lib_common/src/d1_common/url.py
encodeQueryElement
def encodeQueryElement(element): """Encode a URL query element according to RFC3986.""" return urllib.parse.quote( ( element.encode('utf-8') if isinstance(element, str) else str(element) if isinstance(element, int) else element ), safe=d1_common.const.URL_QUERYELEMENT_SAFE_CHARS, )
python
def encodeQueryElement(element): """Encode a URL query element according to RFC3986.""" return urllib.parse.quote( ( element.encode('utf-8') if isinstance(element, str) else str(element) if isinstance(element, int) else element ), safe=d1_common.const.URL_QUERYELEMENT_SAFE_CHARS, )
[ "def", "encodeQueryElement", "(", "element", ")", ":", "return", "urllib", ".", "parse", ".", "quote", "(", "(", "element", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "element", ",", "str", ")", "else", "str", "(", "element", ")", "if...
Encode a URL query element according to RFC3986.
[ "Encode", "a", "URL", "query", "element", "according", "to", "RFC3986", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L78-L89
train
45,457
DataONEorg/d1_python
lib_common/src/d1_common/url.py
urlencode
def urlencode(query, doseq=0): """Modified version of the standard urllib.urlencode that is conforms to RFC3986. The urllib version encodes spaces as '+' which can lead to inconsistency. This version will always encode spaces as '%20'. Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input. """ if hasattr(query, "items"): # Remove None parameters from query. Dictionaries are mutable, so we can # remove the the items directly. dict.keys() creates a copy of the # dictionary keys, making it safe to remove elements from the dictionary # while iterating. for k in list(query.keys()): if query[k] is None: del query[k] # mapping objects query = list(query.items()) else: # Remove None parameters from query. Tuples are immutable, so we have to # build a new version that does not contain the elements we want to remove, # and replace the original with it. query = list(filter((lambda x: x[1] is not None), query)) # it's a bother at times that strings and string-like objects are # sequences... try: # non-sequence items should not work with len() # non-empty strings will fail this if len(query) and not isinstance(query[0], tuple): raise TypeError # zero-length sequences of all types will get here and succeed, # but that's a minor nit - since the original implementation # allowed empty dicts that type of behavior probably should be # preserved for consistency except TypeError: ty, va, tb = sys.exc_info() raise TypeError( "not a valid non-string sequence or mapping object" ).with_traceback(tb) l = [] if not doseq: # preserve old behavior for k, v in query: k = encodeQueryElement(str(k)) v = encodeQueryElement(str(v)) l.append(k + '=' + v) else: for k, v in query: k = encodeQueryElement(str(k)) if isinstance(v, str): v = encodeQueryElement(v) l.append(k + '=' + v) elif isinstance(v, str): # is there a reasonable way to convert to ASCII? # encode generates a string, but "replace" or "ignore" # lose information and "strict" can raise UnicodeError v = encodeQueryElement(v.encode("ASCII", "replace")) l.append(k + '=' + v) else: try: # is this a sufficient test for sequence-ness? len(v) except TypeError: # not a sequence v = encodeQueryElement(str(v)) l.append(k + '=' + v) else: # loop over the sequence for elt in v: l.append(k + '=' + encodeQueryElement(str(elt))) return '&'.join(sorted(l))
python
def urlencode(query, doseq=0): """Modified version of the standard urllib.urlencode that is conforms to RFC3986. The urllib version encodes spaces as '+' which can lead to inconsistency. This version will always encode spaces as '%20'. Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input. """ if hasattr(query, "items"): # Remove None parameters from query. Dictionaries are mutable, so we can # remove the the items directly. dict.keys() creates a copy of the # dictionary keys, making it safe to remove elements from the dictionary # while iterating. for k in list(query.keys()): if query[k] is None: del query[k] # mapping objects query = list(query.items()) else: # Remove None parameters from query. Tuples are immutable, so we have to # build a new version that does not contain the elements we want to remove, # and replace the original with it. query = list(filter((lambda x: x[1] is not None), query)) # it's a bother at times that strings and string-like objects are # sequences... try: # non-sequence items should not work with len() # non-empty strings will fail this if len(query) and not isinstance(query[0], tuple): raise TypeError # zero-length sequences of all types will get here and succeed, # but that's a minor nit - since the original implementation # allowed empty dicts that type of behavior probably should be # preserved for consistency except TypeError: ty, va, tb = sys.exc_info() raise TypeError( "not a valid non-string sequence or mapping object" ).with_traceback(tb) l = [] if not doseq: # preserve old behavior for k, v in query: k = encodeQueryElement(str(k)) v = encodeQueryElement(str(v)) l.append(k + '=' + v) else: for k, v in query: k = encodeQueryElement(str(k)) if isinstance(v, str): v = encodeQueryElement(v) l.append(k + '=' + v) elif isinstance(v, str): # is there a reasonable way to convert to ASCII? # encode generates a string, but "replace" or "ignore" # lose information and "strict" can raise UnicodeError v = encodeQueryElement(v.encode("ASCII", "replace")) l.append(k + '=' + v) else: try: # is this a sufficient test for sequence-ness? len(v) except TypeError: # not a sequence v = encodeQueryElement(str(v)) l.append(k + '=' + v) else: # loop over the sequence for elt in v: l.append(k + '=' + encodeQueryElement(str(elt))) return '&'.join(sorted(l))
[ "def", "urlencode", "(", "query", ",", "doseq", "=", "0", ")", ":", "if", "hasattr", "(", "query", ",", "\"items\"", ")", ":", "# Remove None parameters from query. Dictionaries are mutable, so we can", "# remove the the items directly. dict.keys() creates a copy of the", "# ...
Modified version of the standard urllib.urlencode that is conforms to RFC3986. The urllib version encodes spaces as '+' which can lead to inconsistency. This version will always encode spaces as '%20'. Encode a sequence of two-element tuples or dictionary into a URL query string. If any values in the query arg are sequences and doseq is true, each sequence element is converted to a separate parameter. If the query arg is a sequence of two-element tuples, the order of the parameters in the output will match the order of parameters in the input.
[ "Modified", "version", "of", "the", "standard", "urllib", ".", "urlencode", "that", "is", "conforms", "to", "RFC3986", ".", "The", "urllib", "version", "encodes", "spaces", "as", "+", "which", "can", "lead", "to", "inconsistency", ".", "This", "version", "wi...
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L129-L207
train
45,458
DataONEorg/d1_python
lib_common/src/d1_common/url.py
makeCNBaseURL
def makeCNBaseURL(url): """Attempt to create a valid CN BaseURL when one or more sections of the URL are missing.""" o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_CN_PROTOCOL) if o.netloc and o.path: netloc = o.netloc path = o.path elif o.netloc: netloc = o.netloc path = d1_common.const.DEFAULT_CN_PATH elif o.path: s = o.path.split('/', 1) netloc = s[0] if len(s) == 1: path = d1_common.const.DEFAULT_CN_PATH else: path = s[1] else: netloc = d1_common.const.DEFAULT_CN_HOST path = d1_common.const.DEFAULT_CN_PATH return urllib.parse.urlunparse( (o.scheme, netloc, path, o.params, o.query, o.fragment) )
python
def makeCNBaseURL(url): """Attempt to create a valid CN BaseURL when one or more sections of the URL are missing.""" o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_CN_PROTOCOL) if o.netloc and o.path: netloc = o.netloc path = o.path elif o.netloc: netloc = o.netloc path = d1_common.const.DEFAULT_CN_PATH elif o.path: s = o.path.split('/', 1) netloc = s[0] if len(s) == 1: path = d1_common.const.DEFAULT_CN_PATH else: path = s[1] else: netloc = d1_common.const.DEFAULT_CN_HOST path = d1_common.const.DEFAULT_CN_PATH return urllib.parse.urlunparse( (o.scheme, netloc, path, o.params, o.query, o.fragment) )
[ "def", "makeCNBaseURL", "(", "url", ")", ":", "o", "=", "urllib", ".", "parse", ".", "urlparse", "(", "url", ",", "scheme", "=", "d1_common", ".", "const", ".", "DEFAULT_CN_PROTOCOL", ")", "if", "o", ".", "netloc", "and", "o", ".", "path", ":", "netl...
Attempt to create a valid CN BaseURL when one or more sections of the URL are missing.
[ "Attempt", "to", "create", "a", "valid", "CN", "BaseURL", "when", "one", "or", "more", "sections", "of", "the", "URL", "are", "missing", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L210-L232
train
45,459
DataONEorg/d1_python
lib_common/src/d1_common/url.py
makeMNBaseURL
def makeMNBaseURL(url): """Attempt to create a valid MN BaseURL when one or more sections of the URL are missing.""" o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_MN_PROTOCOL) if o.netloc and o.path: netloc = o.netloc path = o.path elif o.netloc: netloc = o.netloc path = d1_common.const.DEFAULT_MN_PATH elif o.path: s = o.path.split('/', 1) netloc = s[0] if len(s) == 1: path = d1_common.const.DEFAULT_MN_PATH else: path = s[1] else: netloc = d1_common.const.DEFAULT_MN_HOST path = d1_common.const.DEFAULT_MN_PATH return urllib.parse.urlunparse( (o.scheme, netloc, path, o.params, o.query, o.fragment) )
python
def makeMNBaseURL(url): """Attempt to create a valid MN BaseURL when one or more sections of the URL are missing.""" o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_MN_PROTOCOL) if o.netloc and o.path: netloc = o.netloc path = o.path elif o.netloc: netloc = o.netloc path = d1_common.const.DEFAULT_MN_PATH elif o.path: s = o.path.split('/', 1) netloc = s[0] if len(s) == 1: path = d1_common.const.DEFAULT_MN_PATH else: path = s[1] else: netloc = d1_common.const.DEFAULT_MN_HOST path = d1_common.const.DEFAULT_MN_PATH return urllib.parse.urlunparse( (o.scheme, netloc, path, o.params, o.query, o.fragment) )
[ "def", "makeMNBaseURL", "(", "url", ")", ":", "o", "=", "urllib", ".", "parse", ".", "urlparse", "(", "url", ",", "scheme", "=", "d1_common", ".", "const", ".", "DEFAULT_MN_PROTOCOL", ")", "if", "o", ".", "netloc", "and", "o", ".", "path", ":", "netl...
Attempt to create a valid MN BaseURL when one or more sections of the URL are missing.
[ "Attempt", "to", "create", "a", "valid", "MN", "BaseURL", "when", "one", "or", "more", "sections", "of", "the", "URL", "are", "missing", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L235-L257
train
45,460
DataONEorg/d1_python
lib_common/src/d1_common/url.py
find_url_mismatches
def find_url_mismatches(a_url, b_url): """Given two URLs, return a list of any mismatches. If the list is empty, the URLs are equivalent. Implemented by parsing and comparing the elements. See RFC 1738 for details. """ diff_list = [] a_parts = urllib.parse.urlparse(a_url) b_parts = urllib.parse.urlparse(b_url) # scheme if a_parts.scheme.lower() != b_parts.scheme.lower(): diff_list.append( 'Schemes differ. a="{}" b="{}" differ'.format( a_parts.scheme.lower(), b_parts.scheme.lower() ) ) # netloc if a_parts.netloc.lower() != b_parts.netloc.lower(): diff_list.append( 'Network locations differ. a="{}" b="{}"'.format( a_parts.netloc.lower(), b_parts.netloc.lower ) ) # path if a_parts.path != b_parts.path: diff_list.append( 'Paths differ: a="{}" b="{}"'.format(a_parts.path, b_parts.path) ) # fragment if a_parts.fragment != b_parts.fragment: diff_list.append( 'Fragments differ. a="{}" b="{}"'.format(a_parts.fragment, b_parts.fragment) ) # param a_param_list = sorted(a_parts.params.split(";")) b_param_list = sorted(b_parts.params.split(";")) if a_param_list != b_param_list: diff_list.append( 'Parameters differ. a="{}" b="{}"'.format( ', '.join(a_param_list), ', '.join(b_param_list) ) ) # query a_query_dict = urllib.parse.parse_qs(a_parts.query) b_query_dict = urllib.parse.parse_qs(b_parts.query) if len(list(a_query_dict.keys())) != len(list(b_query_dict.keys())): diff_list.append( 'Number of query keys differs. a={} b={}'.format( len(list(a_query_dict.keys())), len(list(b_query_dict.keys())) ) ) for a_key in b_query_dict: if a_key not in list(b_query_dict.keys()): diff_list.append( 'Query key in first missing in second. a_key="{}"'.format(a_key) ) elif sorted(a_query_dict[a_key]) != sorted(b_query_dict[a_key]): diff_list.append( 'Query values differ. key="{}" a_value="{}" b_value="{}"'.format( a_key, sorted(a_query_dict[a_key]), sorted(b_query_dict[a_key]) ) ) for b_key in b_query_dict: if b_key not in a_query_dict: diff_list.append( 'Query key in second missing in first. b_key="{}"'.format(b_key) ) return diff_list
python
def find_url_mismatches(a_url, b_url): """Given two URLs, return a list of any mismatches. If the list is empty, the URLs are equivalent. Implemented by parsing and comparing the elements. See RFC 1738 for details. """ diff_list = [] a_parts = urllib.parse.urlparse(a_url) b_parts = urllib.parse.urlparse(b_url) # scheme if a_parts.scheme.lower() != b_parts.scheme.lower(): diff_list.append( 'Schemes differ. a="{}" b="{}" differ'.format( a_parts.scheme.lower(), b_parts.scheme.lower() ) ) # netloc if a_parts.netloc.lower() != b_parts.netloc.lower(): diff_list.append( 'Network locations differ. a="{}" b="{}"'.format( a_parts.netloc.lower(), b_parts.netloc.lower ) ) # path if a_parts.path != b_parts.path: diff_list.append( 'Paths differ: a="{}" b="{}"'.format(a_parts.path, b_parts.path) ) # fragment if a_parts.fragment != b_parts.fragment: diff_list.append( 'Fragments differ. a="{}" b="{}"'.format(a_parts.fragment, b_parts.fragment) ) # param a_param_list = sorted(a_parts.params.split(";")) b_param_list = sorted(b_parts.params.split(";")) if a_param_list != b_param_list: diff_list.append( 'Parameters differ. a="{}" b="{}"'.format( ', '.join(a_param_list), ', '.join(b_param_list) ) ) # query a_query_dict = urllib.parse.parse_qs(a_parts.query) b_query_dict = urllib.parse.parse_qs(b_parts.query) if len(list(a_query_dict.keys())) != len(list(b_query_dict.keys())): diff_list.append( 'Number of query keys differs. a={} b={}'.format( len(list(a_query_dict.keys())), len(list(b_query_dict.keys())) ) ) for a_key in b_query_dict: if a_key not in list(b_query_dict.keys()): diff_list.append( 'Query key in first missing in second. a_key="{}"'.format(a_key) ) elif sorted(a_query_dict[a_key]) != sorted(b_query_dict[a_key]): diff_list.append( 'Query values differ. key="{}" a_value="{}" b_value="{}"'.format( a_key, sorted(a_query_dict[a_key]), sorted(b_query_dict[a_key]) ) ) for b_key in b_query_dict: if b_key not in a_query_dict: diff_list.append( 'Query key in second missing in first. b_key="{}"'.format(b_key) ) return diff_list
[ "def", "find_url_mismatches", "(", "a_url", ",", "b_url", ")", ":", "diff_list", "=", "[", "]", "a_parts", "=", "urllib", ".", "parse", ".", "urlparse", "(", "a_url", ")", "b_parts", "=", "urllib", ".", "parse", ".", "urlparse", "(", "b_url", ")", "# s...
Given two URLs, return a list of any mismatches. If the list is empty, the URLs are equivalent. Implemented by parsing and comparing the elements. See RFC 1738 for details.
[ "Given", "two", "URLs", "return", "a", "list", "of", "any", "mismatches", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/url.py#L260-L328
train
45,461
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor.search
def search(self, line): """CN search.""" if self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) == "solr": return self._search_solr(line) raise d1_cli.impl.exceptions.InvalidArguments( "Unsupported query engine: {}".format( self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) ) )
python
def search(self, line): """CN search.""" if self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) == "solr": return self._search_solr(line) raise d1_cli.impl.exceptions.InvalidArguments( "Unsupported query engine: {}".format( self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) ) )
[ "def", "search", "(", "self", ",", "line", ")", ":", "if", "self", ".", "_session", ".", "get", "(", "d1_cli", ".", "impl", ".", "session", ".", "QUERY_ENGINE_NAME", ")", "==", "\"solr\"", ":", "return", "self", ".", "_search_solr", "(", "line", ")", ...
CN search.
[ "CN", "search", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L93-L101
train
45,462
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor.resolve
def resolve(self, pid): """Get Object Locations for Object.""" client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = client.resolve(pid) for location in object_location_list_pyxb.objectLocation: d1_cli.impl.util.print_info(location.url)
python
def resolve(self, pid): """Get Object Locations for Object.""" client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = client.resolve(pid) for location in object_location_list_pyxb.objectLocation: d1_cli.impl.util.print_info(location.url)
[ "def", "resolve", "(", "self", ",", "pid", ")", ":", "client", "=", "d1_cli", ".", "impl", ".", "client", ".", "CLICNClient", "(", "*", "*", "self", ".", "_cn_client_connect_params_from_session", "(", ")", ")", "object_location_list_pyxb", "=", "client", "."...
Get Object Locations for Object.
[ "Get", "Object", "Locations", "for", "Object", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L111-L118
train
45,463
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor.science_object_get
def science_object_get(self, pid, path): """First try the MN set in the session. Then try to resolve via the CN set in the session. """ mn_client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) try: response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return cn_client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = cn_client.resolve(pid) for location in object_location_list_pyxb.objectLocation: try: params = self._mn_client_connect_params_from_session() params["base_url"] = location.baseURL mn_client = d1_cli.impl.client.CLIMNClient(**params) response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return raise d1_cli.impl.exceptions.CLIError("Could not find object: {}".format(pid))
python
def science_object_get(self, pid, path): """First try the MN set in the session. Then try to resolve via the CN set in the session. """ mn_client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) try: response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return cn_client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = cn_client.resolve(pid) for location in object_location_list_pyxb.objectLocation: try: params = self._mn_client_connect_params_from_session() params["base_url"] = location.baseURL mn_client = d1_cli.impl.client.CLIMNClient(**params) response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return raise d1_cli.impl.exceptions.CLIError("Could not find object: {}".format(pid))
[ "def", "science_object_get", "(", "self", ",", "pid", ",", "path", ")", ":", "mn_client", "=", "d1_cli", ".", "impl", ".", "client", ".", "CLIMNClient", "(", "*", "*", "self", ".", "_mn_client_connect_params_from_session", "(", ")", ")", "try", ":", "respo...
First try the MN set in the session. Then try to resolve via the CN set in the session.
[ "First", "try", "the", "MN", "set", "in", "the", "session", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L136-L169
train
45,464
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor.science_object_create
def science_object_create(self, pid, path, format_id=None): """Create a new Science Object on a Member Node.""" self._queue_science_object_create(pid, path, format_id)
python
def science_object_create(self, pid, path, format_id=None): """Create a new Science Object on a Member Node.""" self._queue_science_object_create(pid, path, format_id)
[ "def", "science_object_create", "(", "self", ",", "pid", ",", "path", ",", "format_id", "=", "None", ")", ":", "self", ".", "_queue_science_object_create", "(", "pid", ",", "path", ",", "format_id", ")" ]
Create a new Science Object on a Member Node.
[ "Create", "a", "new", "Science", "Object", "on", "a", "Member", "Node", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L223-L225
train
45,465
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor.science_object_update
def science_object_update(self, pid_old, path, pid_new, format_id=None): """Obsolete a Science Object on a Member Node with a different one.""" self._queue_science_object_update(pid_old, path, pid_new, format_id)
python
def science_object_update(self, pid_old, path, pid_new, format_id=None): """Obsolete a Science Object on a Member Node with a different one.""" self._queue_science_object_update(pid_old, path, pid_new, format_id)
[ "def", "science_object_update", "(", "self", ",", "pid_old", ",", "path", ",", "pid_new", ",", "format_id", "=", "None", ")", ":", "self", ".", "_queue_science_object_update", "(", "pid_old", ",", "path", ",", "pid_new", ",", "format_id", ")" ]
Obsolete a Science Object on a Member Node with a different one.
[ "Obsolete", "a", "Science", "Object", "on", "a", "Member", "Node", "with", "a", "different", "one", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L227-L229
train
45,466
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor._search_solr
def _search_solr(self, line): """Perform a SOLR search.""" try: query_str = self._create_solr_query(line) client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_list_pyxb = client.search( queryType=d1_common.const.DEFAULT_SEARCH_ENGINE, query=query_str, start=self._session.get(d1_cli.impl.session.START_NAME), rows=self._session.get(d1_cli.impl.session.COUNT_NAME), ) d1_cli.impl.util.print_info(self._pretty(object_list_pyxb.toxml("utf-8"))) except d1_common.types.exceptions.ServiceFailure as e: e = "%".join(str(e).splitlines()) # Flatten line regexp = re.compile( r"errorCode: (?P<error_code>\d+)%.*%Status code: (?P<status_code>\d+)" ) result = regexp.search(e) if ( (result is not None) and (result.group("error_code") == "500") and (result.group("status_code") == "400") ): # noqa: E129 result = re.search( r"<b>description</b> <u>(?P<description>[^<]+)</u>", e ) msg = re.sub( "&([^;]+);", lambda m: chr(html.entities.name2codepoint[m.group(1)]), result.group("description"), ) d1_cli.impl.util.print_info("Warning: %s" % msg) else: d1_cli.impl.util.print_error("Unexpected error:\n%s" % str(e))
python
def _search_solr(self, line): """Perform a SOLR search.""" try: query_str = self._create_solr_query(line) client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_list_pyxb = client.search( queryType=d1_common.const.DEFAULT_SEARCH_ENGINE, query=query_str, start=self._session.get(d1_cli.impl.session.START_NAME), rows=self._session.get(d1_cli.impl.session.COUNT_NAME), ) d1_cli.impl.util.print_info(self._pretty(object_list_pyxb.toxml("utf-8"))) except d1_common.types.exceptions.ServiceFailure as e: e = "%".join(str(e).splitlines()) # Flatten line regexp = re.compile( r"errorCode: (?P<error_code>\d+)%.*%Status code: (?P<status_code>\d+)" ) result = regexp.search(e) if ( (result is not None) and (result.group("error_code") == "500") and (result.group("status_code") == "400") ): # noqa: E129 result = re.search( r"<b>description</b> <u>(?P<description>[^<]+)</u>", e ) msg = re.sub( "&([^;]+);", lambda m: chr(html.entities.name2codepoint[m.group(1)]), result.group("description"), ) d1_cli.impl.util.print_info("Warning: %s" % msg) else: d1_cli.impl.util.print_error("Unexpected error:\n%s" % str(e))
[ "def", "_search_solr", "(", "self", ",", "line", ")", ":", "try", ":", "query_str", "=", "self", ".", "_create_solr_query", "(", "line", ")", "client", "=", "d1_cli", ".", "impl", ".", "client", ".", "CLICNClient", "(", "*", "*", "self", ".", "_cn_clie...
Perform a SOLR search.
[ "Perform", "a", "SOLR", "search", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L288-L323
train
45,467
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
CommandProcessor._create_solr_query
def _create_solr_query(self, line): """Actual search - easier to test. """ p0 = "" if line: p0 = line.strip() p1 = self._query_string_to_solr_filter(line) p2 = self._object_format_to_solr_filter(line) p3 = self._time_span_to_solr_filter() result = p0 + p1 + p2 + p3 return result.strip()
python
def _create_solr_query(self, line): """Actual search - easier to test. """ p0 = "" if line: p0 = line.strip() p1 = self._query_string_to_solr_filter(line) p2 = self._object_format_to_solr_filter(line) p3 = self._time_span_to_solr_filter() result = p0 + p1 + p2 + p3 return result.strip()
[ "def", "_create_solr_query", "(", "self", ",", "line", ")", ":", "p0", "=", "\"\"", "if", "line", ":", "p0", "=", "line", ".", "strip", "(", ")", "p1", "=", "self", ".", "_query_string_to_solr_filter", "(", "line", ")", "p2", "=", "self", ".", "_obje...
Actual search - easier to test.
[ "Actual", "search", "-", "easier", "to", "test", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_cli/src/d1_cli/impl/command_processor.py#L325-L334
train
45,468
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
apply_filter_list
def apply_filter_list(func, obj): """Apply `func` to list or tuple `obj` element-wise and directly otherwise.""" if isinstance(obj, (list, tuple)): return [func(item) for item in obj] return func(obj)
python
def apply_filter_list(func, obj): """Apply `func` to list or tuple `obj` element-wise and directly otherwise.""" if isinstance(obj, (list, tuple)): return [func(item) for item in obj] return func(obj)
[ "def", "apply_filter_list", "(", "func", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "[", "func", "(", "item", ")", "for", "item", "in", "obj", "]", "return", "func", "(", "obj", ...
Apply `func` to list or tuple `obj` element-wise and directly otherwise.
[ "Apply", "func", "to", "list", "or", "tuple", "obj", "element", "-", "wise", "and", "directly", "otherwise", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L13-L17
train
45,469
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
_get_data_attr
def _get_data_attr(data, attr): """Get data object field.""" if isinstance(data, dict): # `Data` object's id is hydrated as `__id` in expression engine data = data['__id'] data_obj = Data.objects.get(id=data) return getattr(data_obj, attr)
python
def _get_data_attr(data, attr): """Get data object field.""" if isinstance(data, dict): # `Data` object's id is hydrated as `__id` in expression engine data = data['__id'] data_obj = Data.objects.get(id=data) return getattr(data_obj, attr)
[ "def", "_get_data_attr", "(", "data", ",", "attr", ")", ":", "if", "isinstance", "(", "data", ",", "dict", ")", ":", "# `Data` object's id is hydrated as `__id` in expression engine", "data", "=", "data", "[", "'__id'", "]", "data_obj", "=", "Data", ".", "object...
Get data object field.
[ "Get", "data", "object", "field", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L20-L28
train
45,470
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
input_
def input_(data, field_path): """Return a hydrated value of the ``input`` field.""" data_obj = Data.objects.get(id=data['__id']) inputs = copy.deepcopy(data_obj.input) # XXX: Optimize by hydrating only the required field (major refactoring). hydrate_input_references(inputs, data_obj.process.input_schema) hydrate_input_uploads(inputs, data_obj.process.input_schema) return dict_dot(inputs, field_path)
python
def input_(data, field_path): """Return a hydrated value of the ``input`` field.""" data_obj = Data.objects.get(id=data['__id']) inputs = copy.deepcopy(data_obj.input) # XXX: Optimize by hydrating only the required field (major refactoring). hydrate_input_references(inputs, data_obj.process.input_schema) hydrate_input_uploads(inputs, data_obj.process.input_schema) return dict_dot(inputs, field_path)
[ "def", "input_", "(", "data", ",", "field_path", ")", ":", "data_obj", "=", "Data", ".", "objects", ".", "get", "(", "id", "=", "data", "[", "'__id'", "]", ")", "inputs", "=", "copy", ".", "deepcopy", "(", "data_obj", ".", "input", ")", "# XXX: Optim...
Return a hydrated value of the ``input`` field.
[ "Return", "a", "hydrated", "value", "of", "the", "input", "field", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L41-L50
train
45,471
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
_get_hydrated_path
def _get_hydrated_path(field): """Return HydratedPath object for file-type field.""" # Get only file path if whole file object is given. if isinstance(field, str) and hasattr(field, 'file_name'): # field is already actually a HydratedPath object return field if isinstance(field, dict) and 'file' in field: hydrated_path = field['file'] if not hasattr(hydrated_path, 'file_name'): raise TypeError("Filter argument must be a valid file-type field.") return hydrated_path
python
def _get_hydrated_path(field): """Return HydratedPath object for file-type field.""" # Get only file path if whole file object is given. if isinstance(field, str) and hasattr(field, 'file_name'): # field is already actually a HydratedPath object return field if isinstance(field, dict) and 'file' in field: hydrated_path = field['file'] if not hasattr(hydrated_path, 'file_name'): raise TypeError("Filter argument must be a valid file-type field.") return hydrated_path
[ "def", "_get_hydrated_path", "(", "field", ")", ":", "# Get only file path if whole file object is given.", "if", "isinstance", "(", "field", ",", "str", ")", "and", "hasattr", "(", "field", ",", "'file_name'", ")", ":", "# field is already actually a HydratedPath object"...
Return HydratedPath object for file-type field.
[ "Return", "HydratedPath", "object", "for", "file", "-", "type", "field", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L88-L101
train
45,472
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
get_url
def get_url(field): """Return file's url based on base url set in settings.""" hydrated_path = _get_hydrated_path(field) base_url = getattr(settings, 'RESOLWE_HOST_URL', 'localhost') return "{}/data/{}/{}".format(base_url, hydrated_path.data_id, hydrated_path.file_name)
python
def get_url(field): """Return file's url based on base url set in settings.""" hydrated_path = _get_hydrated_path(field) base_url = getattr(settings, 'RESOLWE_HOST_URL', 'localhost') return "{}/data/{}/{}".format(base_url, hydrated_path.data_id, hydrated_path.file_name)
[ "def", "get_url", "(", "field", ")", ":", "hydrated_path", "=", "_get_hydrated_path", "(", "field", ")", "base_url", "=", "getattr", "(", "settings", ",", "'RESOLWE_HOST_URL'", ",", "'localhost'", ")", "return", "\"{}/data/{}/{}\"", ".", "format", "(", "base_url...
Return file's url based on base url set in settings.
[ "Return", "file", "s", "url", "based", "on", "base", "url", "set", "in", "settings", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L104-L108
train
45,473
genialis/resolwe
resolwe/flow/expression_engines/jinja/filters.py
descriptor
def descriptor(obj, path=''): """Return descriptor of given object. If ``path`` is specified, only the content on that path is returned. """ if isinstance(obj, dict): # Current object is hydrated, so we need to get descriptor from # dict representation. desc = obj['__descriptor'] else: desc = obj.descriptor resp = dict_dot(desc, path) if isinstance(resp, list) or isinstance(resp, dict): return json.dumps(resp) return resp
python
def descriptor(obj, path=''): """Return descriptor of given object. If ``path`` is specified, only the content on that path is returned. """ if isinstance(obj, dict): # Current object is hydrated, so we need to get descriptor from # dict representation. desc = obj['__descriptor'] else: desc = obj.descriptor resp = dict_dot(desc, path) if isinstance(resp, list) or isinstance(resp, dict): return json.dumps(resp) return resp
[ "def", "descriptor", "(", "obj", ",", "path", "=", "''", ")", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "# Current object is hydrated, so we need to get descriptor from", "# dict representation.", "desc", "=", "obj", "[", "'__descriptor'", "]", "...
Return descriptor of given object. If ``path`` is specified, only the content on that path is returned.
[ "Return", "descriptor", "of", "given", "object", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/expression_engines/jinja/filters.py#L117-L135
train
45,474
DataONEorg/d1_python
client_onedrive/src/d1_onedrive/impl/clients/onedrive_solr_client.py
OneDriveSolrClient._close_open_date_ranges
def _close_open_date_ranges(self, record): """If a date range is missing the start or end date, close it by copying the date from the existing value.""" date_ranges = (('beginDate', 'endDate'),) for begin, end in date_ranges: if begin in record and end in record: return elif begin in record: record[end] = record[begin] elif end in record: record[begin] = record[end]
python
def _close_open_date_ranges(self, record): """If a date range is missing the start or end date, close it by copying the date from the existing value.""" date_ranges = (('beginDate', 'endDate'),) for begin, end in date_ranges: if begin in record and end in record: return elif begin in record: record[end] = record[begin] elif end in record: record[begin] = record[end]
[ "def", "_close_open_date_ranges", "(", "self", ",", "record", ")", ":", "date_ranges", "=", "(", "(", "'beginDate'", ",", "'endDate'", ")", ",", ")", "for", "begin", ",", "end", "in", "date_ranges", ":", "if", "begin", "in", "record", "and", "end", "in",...
If a date range is missing the start or end date, close it by copying the date from the existing value.
[ "If", "a", "date", "range", "is", "missing", "the", "start", "or", "end", "date", "close", "it", "by", "copying", "the", "date", "from", "the", "existing", "value", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_onedrive/src/d1_onedrive/impl/clients/onedrive_solr_client.py#L77-L87
train
45,475
DataONEorg/d1_python
client_onedrive/src/d1_onedrive/impl/drivers/dokan/solrclient.py
SolrConnection.getSolrType
def getSolrType(self, field): """Returns the SOLR type of the specified field name. Assumes the convention of dynamic fields using an underscore + type character code for the field name. """ ftype = 'string' try: ftype = self.fieldtypes[field] return ftype except Exception: pass fta = field.split('_') if len(fta) > 1: ft = fta[len(fta) - 1] try: ftype = self.fieldtypes[ft] # cache the type so it's used next time self.fieldtypes[field] = ftype except Exception: pass return ftype
python
def getSolrType(self, field): """Returns the SOLR type of the specified field name. Assumes the convention of dynamic fields using an underscore + type character code for the field name. """ ftype = 'string' try: ftype = self.fieldtypes[field] return ftype except Exception: pass fta = field.split('_') if len(fta) > 1: ft = fta[len(fta) - 1] try: ftype = self.fieldtypes[ft] # cache the type so it's used next time self.fieldtypes[field] = ftype except Exception: pass return ftype
[ "def", "getSolrType", "(", "self", ",", "field", ")", ":", "ftype", "=", "'string'", "try", ":", "ftype", "=", "self", ".", "fieldtypes", "[", "field", "]", "return", "ftype", "except", "Exception", ":", "pass", "fta", "=", "field", ".", "split", "(", ...
Returns the SOLR type of the specified field name. Assumes the convention of dynamic fields using an underscore + type character code for the field name.
[ "Returns", "the", "SOLR", "type", "of", "the", "specified", "field", "name", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_onedrive/src/d1_onedrive/impl/drivers/dokan/solrclient.py#L316-L338
train
45,476
DataONEorg/d1_python
client_onedrive/src/d1_onedrive/impl/drivers/dokan/solrclient.py
SolrConnection.getftype
def getftype(self, name): """Returns the python type for the specified field name. The field list is cached so multiple calls do not invoke a getFields request each time. @param name(string) The name of the SOLR field @returns Python type of the field. """ fields = self.getFields() try: fld = fields['fields'][name] except Exception: return str if fld['type'] in ['string', 'text', 'stext', 'text_ws']: return str if fld['type'] in ['sint', 'integer', 'long', 'slong']: return int if fld['type'] in ['sdouble', 'double', 'sfloat', 'float']: return float if fld['type'] in ['boolean']: return bool return fld['type']
python
def getftype(self, name): """Returns the python type for the specified field name. The field list is cached so multiple calls do not invoke a getFields request each time. @param name(string) The name of the SOLR field @returns Python type of the field. """ fields = self.getFields() try: fld = fields['fields'][name] except Exception: return str if fld['type'] in ['string', 'text', 'stext', 'text_ws']: return str if fld['type'] in ['sint', 'integer', 'long', 'slong']: return int if fld['type'] in ['sdouble', 'double', 'sfloat', 'float']: return float if fld['type'] in ['boolean']: return bool return fld['type']
[ "def", "getftype", "(", "self", ",", "name", ")", ":", "fields", "=", "self", ".", "getFields", "(", ")", "try", ":", "fld", "=", "fields", "[", "'fields'", "]", "[", "name", "]", "except", "Exception", ":", "return", "str", "if", "fld", "[", "'typ...
Returns the python type for the specified field name. The field list is cached so multiple calls do not invoke a getFields request each time. @param name(string) The name of the SOLR field @returns Python type of the field.
[ "Returns", "the", "python", "type", "for", "the", "specified", "field", "name", ".", "The", "field", "list", "is", "cached", "so", "multiple", "calls", "do", "not", "invoke", "a", "getFields", "request", "each", "time", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/client_onedrive/src/d1_onedrive/impl/drivers/dokan/solrclient.py#L571-L592
train
45,477
genialis/resolwe
resolwe/process/fields.py
Field.clean
def clean(self, value): """Run validators and return the clean value.""" if value is None: value = self.default try: value = self.to_python(value) self.validate(value) except ValidationError as error: raise ValidationError("invalid value for {}: {}".format( self.name, error.args[0] )) return value
python
def clean(self, value): """Run validators and return the clean value.""" if value is None: value = self.default try: value = self.to_python(value) self.validate(value) except ValidationError as error: raise ValidationError("invalid value for {}: {}".format( self.name, error.args[0] )) return value
[ "def", "clean", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "value", "=", "self", ".", "default", "try", ":", "value", "=", "self", ".", "to_python", "(", "value", ")", "self", ".", "validate", "(", "value", ")", "except"...
Run validators and return the clean value.
[ "Run", "validators", "and", "return", "the", "clean", "value", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/process/fields.py#L92-L105
train
45,478
genialis/resolwe
resolwe/process/fields.py
FileDescriptor.import_file
def import_file(self, imported_format=None, progress_from=0.0, progress_to=None): """Import field source file to working directory. :param imported_format: Import file format (extracted, compressed or both) :param progress_from: Initial progress value :param progress_to: Final progress value :return: Destination file path (if extracted and compressed, extracted path given) """ if not hasattr(resolwe_runtime_utils, 'import_file'): raise RuntimeError('Requires resolwe-runtime-utils >= 2.0.0') if imported_format is None: imported_format = resolwe_runtime_utils.ImportedFormat.BOTH return resolwe_runtime_utils.import_file( src=self.file_temp, file_name=self.path, imported_format=imported_format, progress_from=progress_from, progress_to=progress_to )
python
def import_file(self, imported_format=None, progress_from=0.0, progress_to=None): """Import field source file to working directory. :param imported_format: Import file format (extracted, compressed or both) :param progress_from: Initial progress value :param progress_to: Final progress value :return: Destination file path (if extracted and compressed, extracted path given) """ if not hasattr(resolwe_runtime_utils, 'import_file'): raise RuntimeError('Requires resolwe-runtime-utils >= 2.0.0') if imported_format is None: imported_format = resolwe_runtime_utils.ImportedFormat.BOTH return resolwe_runtime_utils.import_file( src=self.file_temp, file_name=self.path, imported_format=imported_format, progress_from=progress_from, progress_to=progress_to )
[ "def", "import_file", "(", "self", ",", "imported_format", "=", "None", ",", "progress_from", "=", "0.0", ",", "progress_to", "=", "None", ")", ":", "if", "not", "hasattr", "(", "resolwe_runtime_utils", ",", "'import_file'", ")", ":", "raise", "RuntimeError", ...
Import field source file to working directory. :param imported_format: Import file format (extracted, compressed or both) :param progress_from: Initial progress value :param progress_to: Final progress value :return: Destination file path (if extracted and compressed, extracted path given)
[ "Import", "field", "source", "file", "to", "working", "directory", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/process/fields.py#L261-L281
train
45,479
genialis/resolwe
resolwe/process/fields.py
DataDescriptor._get
def _get(self, key): """Return given key from cache.""" self._populate_cache() if key not in self._cache: raise AttributeError("DataField has no member {}".format(key)) return self._cache[key]
python
def _get(self, key): """Return given key from cache.""" self._populate_cache() if key not in self._cache: raise AttributeError("DataField has no member {}".format(key)) return self._cache[key]
[ "def", "_get", "(", "self", ",", "key", ")", ":", "self", ".", "_populate_cache", "(", ")", "if", "key", "not", "in", "self", ".", "_cache", ":", "raise", "AttributeError", "(", "\"DataField has no member {}\"", ".", "format", "(", "key", ")", ")", "retu...
Return given key from cache.
[ "Return", "given", "key", "from", "cache", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/process/fields.py#L522-L528
train
45,480
genialis/resolwe
resolwe/flow/managers/state.py
update_constants
def update_constants(): """Recreate channel name constants with changed settings. This kludge is mostly needed due to the way Django settings are patched for testing and how modules need to be imported throughout the project. On import time, settings are not patched yet, but some of the code needs static values immediately. Updating functions such as this one are then needed to fix dummy values. """ global MANAGER_CONTROL_CHANNEL, MANAGER_EXECUTOR_CHANNELS # pylint: disable=global-statement global MANAGER_LISTENER_STATS, MANAGER_STATE_PREFIX # pylint: disable=global-statement redis_prefix = getattr(settings, 'FLOW_MANAGER', {}).get('REDIS_PREFIX', '') MANAGER_CONTROL_CHANNEL = '{}.control'.format(redis_prefix) MANAGER_EXECUTOR_CHANNELS = ManagerChannelPair( '{}.result_queue'.format(redis_prefix), '{}.result_queue_response'.format(redis_prefix), ) MANAGER_STATE_PREFIX = '{}.state'.format(redis_prefix) MANAGER_LISTENER_STATS = '{}.listener_stats'.format(redis_prefix)
python
def update_constants(): """Recreate channel name constants with changed settings. This kludge is mostly needed due to the way Django settings are patched for testing and how modules need to be imported throughout the project. On import time, settings are not patched yet, but some of the code needs static values immediately. Updating functions such as this one are then needed to fix dummy values. """ global MANAGER_CONTROL_CHANNEL, MANAGER_EXECUTOR_CHANNELS # pylint: disable=global-statement global MANAGER_LISTENER_STATS, MANAGER_STATE_PREFIX # pylint: disable=global-statement redis_prefix = getattr(settings, 'FLOW_MANAGER', {}).get('REDIS_PREFIX', '') MANAGER_CONTROL_CHANNEL = '{}.control'.format(redis_prefix) MANAGER_EXECUTOR_CHANNELS = ManagerChannelPair( '{}.result_queue'.format(redis_prefix), '{}.result_queue_response'.format(redis_prefix), ) MANAGER_STATE_PREFIX = '{}.state'.format(redis_prefix) MANAGER_LISTENER_STATS = '{}.listener_stats'.format(redis_prefix)
[ "def", "update_constants", "(", ")", ":", "global", "MANAGER_CONTROL_CHANNEL", ",", "MANAGER_EXECUTOR_CHANNELS", "# pylint: disable=global-statement", "global", "MANAGER_LISTENER_STATS", ",", "MANAGER_STATE_PREFIX", "# pylint: disable=global-statement", "redis_prefix", "=", "getatt...
Recreate channel name constants with changed settings. This kludge is mostly needed due to the way Django settings are patched for testing and how modules need to be imported throughout the project. On import time, settings are not patched yet, but some of the code needs static values immediately. Updating functions such as this one are then needed to fix dummy values.
[ "Recreate", "channel", "name", "constants", "with", "changed", "settings", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/managers/state.py#L33-L52
train
45,481
genialis/resolwe
resolwe/flow/managers/state.py
ManagerState.destroy_channels
def destroy_channels(self): """Destroy Redis channels managed by this state instance.""" for item_name in dir(self): item = getattr(self, item_name) if isinstance(item, self.RedisAtomicBase): self.redis.delete(item.item_name)
python
def destroy_channels(self): """Destroy Redis channels managed by this state instance.""" for item_name in dir(self): item = getattr(self, item_name) if isinstance(item, self.RedisAtomicBase): self.redis.delete(item.item_name)
[ "def", "destroy_channels", "(", "self", ")", ":", "for", "item_name", "in", "dir", "(", "self", ")", ":", "item", "=", "getattr", "(", "self", ",", "item_name", ")", "if", "isinstance", "(", "item", ",", "self", ".", "RedisAtomicBase", ")", ":", "self"...
Destroy Redis channels managed by this state instance.
[ "Destroy", "Redis", "channels", "managed", "by", "this", "state", "instance", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/managers/state.py#L201-L206
train
45,482
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer.render_to_image_file
def render_to_image_file( self, image_out_path, width_pixels=None, height_pixels=None, dpi=90 ): """Render the SubjectInfo to an image file. Args: image_out_path : str Path to where image image will be written. Valid extensions are ``.svg,`` ``.pdf``, and ``.png``. width_pixels : int Width of image to write. height_pixels : int Height of image to write, in pixels. dpi: Dots Per Inch to declare in image file. This does not change the resolution of the image but may change the size of the image when rendered. Returns: None """ self._render_type = "file" self._tree.render( file_name=image_out_path, w=width_pixels, h=height_pixels, dpi=dpi, units="px", tree_style=self._get_tree_style(), )
python
def render_to_image_file( self, image_out_path, width_pixels=None, height_pixels=None, dpi=90 ): """Render the SubjectInfo to an image file. Args: image_out_path : str Path to where image image will be written. Valid extensions are ``.svg,`` ``.pdf``, and ``.png``. width_pixels : int Width of image to write. height_pixels : int Height of image to write, in pixels. dpi: Dots Per Inch to declare in image file. This does not change the resolution of the image but may change the size of the image when rendered. Returns: None """ self._render_type = "file" self._tree.render( file_name=image_out_path, w=width_pixels, h=height_pixels, dpi=dpi, units="px", tree_style=self._get_tree_style(), )
[ "def", "render_to_image_file", "(", "self", ",", "image_out_path", ",", "width_pixels", "=", "None", ",", "height_pixels", "=", "None", ",", "dpi", "=", "90", ")", ":", "self", ".", "_render_type", "=", "\"file\"", "self", ".", "_tree", ".", "render", "(",...
Render the SubjectInfo to an image file. Args: image_out_path : str Path to where image image will be written. Valid extensions are ``.svg,`` ``.pdf``, and ``.png``. width_pixels : int Width of image to write. height_pixels : int Height of image to write, in pixels. dpi: Dots Per Inch to declare in image file. This does not change the resolution of the image but may change the size of the image when rendered. Returns: None
[ "Render", "the", "SubjectInfo", "to", "an", "image", "file", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L72-L105
train
45,483
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer.browse_in_qt5_ui
def browse_in_qt5_ui(self): """Browse and edit the SubjectInfo in a simple Qt5 based UI.""" self._render_type = "browse" self._tree.show(tree_style=self._get_tree_style())
python
def browse_in_qt5_ui(self): """Browse and edit the SubjectInfo in a simple Qt5 based UI.""" self._render_type = "browse" self._tree.show(tree_style=self._get_tree_style())
[ "def", "browse_in_qt5_ui", "(", "self", ")", ":", "self", ".", "_render_type", "=", "\"browse\"", "self", ".", "_tree", ".", "show", "(", "tree_style", "=", "self", ".", "_get_tree_style", "(", ")", ")" ]
Browse and edit the SubjectInfo in a simple Qt5 based UI.
[ "Browse", "and", "edit", "the", "SubjectInfo", "in", "a", "simple", "Qt5", "based", "UI", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L107-L110
train
45,484
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer._gen_etetoolkit_tree
def _gen_etetoolkit_tree(self, node, subject_info_tree): """Copy SubjectInfoTree to a ETE Tree.""" for si_node in subject_info_tree.child_list: if si_node.type_str == TYPE_NODE_TAG: child = self._add_type_node(node, si_node.label_str) elif si_node.type_str == SUBJECT_NODE_TAG: child = self._add_subject_node(node, si_node.label_str) else: raise AssertionError( 'Unknown node type. type_str="{}"'.format(si_node.type_str) ) self._gen_etetoolkit_tree(child, si_node)
python
def _gen_etetoolkit_tree(self, node, subject_info_tree): """Copy SubjectInfoTree to a ETE Tree.""" for si_node in subject_info_tree.child_list: if si_node.type_str == TYPE_NODE_TAG: child = self._add_type_node(node, si_node.label_str) elif si_node.type_str == SUBJECT_NODE_TAG: child = self._add_subject_node(node, si_node.label_str) else: raise AssertionError( 'Unknown node type. type_str="{}"'.format(si_node.type_str) ) self._gen_etetoolkit_tree(child, si_node)
[ "def", "_gen_etetoolkit_tree", "(", "self", ",", "node", ",", "subject_info_tree", ")", ":", "for", "si_node", "in", "subject_info_tree", ".", "child_list", ":", "if", "si_node", ".", "type_str", "==", "TYPE_NODE_TAG", ":", "child", "=", "self", ".", "_add_typ...
Copy SubjectInfoTree to a ETE Tree.
[ "Copy", "SubjectInfoTree", "to", "a", "ETE", "Tree", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L124-L135
train
45,485
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer._add_type_node
def _add_type_node(self, node, label): """Add a node representing a SubjectInfo type.""" child = node.add_child(name=label) child.add_feature(TYPE_NODE_TAG, True) return child
python
def _add_type_node(self, node, label): """Add a node representing a SubjectInfo type.""" child = node.add_child(name=label) child.add_feature(TYPE_NODE_TAG, True) return child
[ "def", "_add_type_node", "(", "self", ",", "node", ",", "label", ")", ":", "child", "=", "node", ".", "add_child", "(", "name", "=", "label", ")", "child", ".", "add_feature", "(", "TYPE_NODE_TAG", ",", "True", ")", "return", "child" ]
Add a node representing a SubjectInfo type.
[ "Add", "a", "node", "representing", "a", "SubjectInfo", "type", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L137-L141
train
45,486
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer._add_subject_node
def _add_subject_node(self, node, subj_str): """Add a node containing a subject string.""" child = node.add_child(name=subj_str) child.add_feature(SUBJECT_NODE_TAG, True) return child
python
def _add_subject_node(self, node, subj_str): """Add a node containing a subject string.""" child = node.add_child(name=subj_str) child.add_feature(SUBJECT_NODE_TAG, True) return child
[ "def", "_add_subject_node", "(", "self", ",", "node", ",", "subj_str", ")", ":", "child", "=", "node", ".", "add_child", "(", "name", "=", "subj_str", ")", "child", ".", "add_feature", "(", "SUBJECT_NODE_TAG", ",", "True", ")", "return", "child" ]
Add a node containing a subject string.
[ "Add", "a", "node", "containing", "a", "subject", "string", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L143-L147
train
45,487
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer._get_node_path
def _get_node_path(self, node): """Return the path from the root to ``node`` as a list of node names.""" path = [] while node.up: path.append(node.name) node = node.up return list(reversed(path))
python
def _get_node_path(self, node): """Return the path from the root to ``node`` as a list of node names.""" path = [] while node.up: path.append(node.name) node = node.up return list(reversed(path))
[ "def", "_get_node_path", "(", "self", ",", "node", ")", ":", "path", "=", "[", "]", "while", "node", ".", "up", ":", "path", ".", "append", "(", "node", ".", "name", ")", "node", "=", "node", ".", "up", "return", "list", "(", "reversed", "(", "pa...
Return the path from the root to ``node`` as a list of node names.
[ "Return", "the", "path", "from", "the", "root", "to", "node", "as", "a", "list", "of", "node", "names", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L149-L155
train
45,488
DataONEorg/d1_python
lib_common/src/d1_common/cert/subject_info_renderer.py
SubjectInfoRenderer._layout
def _layout(self, node): """ETE calls this function to style each node before rendering. - ETE terms: - A Style is a specification for how to render the node itself - A Face defines extra information that is rendered outside of the node - Face objects are used here to provide more control on how to draw the nodes. """ def set_edge_style(): """Set the style for edges and make the node invisible.""" node_style = ete3.NodeStyle() node_style["vt_line_color"] = EDGE_COLOR node_style["hz_line_color"] = EDGE_COLOR node_style["vt_line_width"] = EDGE_WIDTH node_style["hz_line_width"] = EDGE_WIDTH node_style["size"] = 0 node.set_style(node_style) def style_subject_node(color="Black"): """Specify the appearance of Subject nodes.""" face = ete3.TextFace(node.name, fsize=SUBJECT_NODE_FONT_SIZE, fgcolor=color) set_face_margin(face) node.add_face(face, column=0, position="branch-right") def style_type_node(color="Black"): """Specify the appearance of Type nodes.""" face = ete3.CircleFace( radius=TYPE_NODE_RADIUS, color=TYPE_NODE_COLOR_DICT.get(node.name, "White"), style="circle", label={ "text": node.name, "color": color, "fontsize": ( TYPE_NODE_FONT_SIZE_FILE if self._render_type == "file" else TYPE_NODE_FONT_SIZE_BROWSE ), }, ) set_face_margin(face) node.add_face(face, column=0, position="branch-right") def set_face_margin(face): """Add margins to Face object. - Add space between inner_border and border on TextFace. - Add space outside bounding area of CircleFace. """ face.margin_left = 5 face.margin_right = 5 # face.margin_top = 5 # face.margin_bottom = 5 set_edge_style() if hasattr(node, SUBJECT_NODE_TAG): style_subject_node() elif hasattr(node, TYPE_NODE_TAG): style_type_node() else: raise AssertionError("Unknown node type")
python
def _layout(self, node): """ETE calls this function to style each node before rendering. - ETE terms: - A Style is a specification for how to render the node itself - A Face defines extra information that is rendered outside of the node - Face objects are used here to provide more control on how to draw the nodes. """ def set_edge_style(): """Set the style for edges and make the node invisible.""" node_style = ete3.NodeStyle() node_style["vt_line_color"] = EDGE_COLOR node_style["hz_line_color"] = EDGE_COLOR node_style["vt_line_width"] = EDGE_WIDTH node_style["hz_line_width"] = EDGE_WIDTH node_style["size"] = 0 node.set_style(node_style) def style_subject_node(color="Black"): """Specify the appearance of Subject nodes.""" face = ete3.TextFace(node.name, fsize=SUBJECT_NODE_FONT_SIZE, fgcolor=color) set_face_margin(face) node.add_face(face, column=0, position="branch-right") def style_type_node(color="Black"): """Specify the appearance of Type nodes.""" face = ete3.CircleFace( radius=TYPE_NODE_RADIUS, color=TYPE_NODE_COLOR_DICT.get(node.name, "White"), style="circle", label={ "text": node.name, "color": color, "fontsize": ( TYPE_NODE_FONT_SIZE_FILE if self._render_type == "file" else TYPE_NODE_FONT_SIZE_BROWSE ), }, ) set_face_margin(face) node.add_face(face, column=0, position="branch-right") def set_face_margin(face): """Add margins to Face object. - Add space between inner_border and border on TextFace. - Add space outside bounding area of CircleFace. """ face.margin_left = 5 face.margin_right = 5 # face.margin_top = 5 # face.margin_bottom = 5 set_edge_style() if hasattr(node, SUBJECT_NODE_TAG): style_subject_node() elif hasattr(node, TYPE_NODE_TAG): style_type_node() else: raise AssertionError("Unknown node type")
[ "def", "_layout", "(", "self", ",", "node", ")", ":", "def", "set_edge_style", "(", ")", ":", "\"\"\"Set the style for edges and make the node invisible.\"\"\"", "node_style", "=", "ete3", ".", "NodeStyle", "(", ")", "node_style", "[", "\"vt_line_color\"", "]", "=",...
ETE calls this function to style each node before rendering. - ETE terms: - A Style is a specification for how to render the node itself - A Face defines extra information that is rendered outside of the node - Face objects are used here to provide more control on how to draw the nodes.
[ "ETE", "calls", "this", "function", "to", "style", "each", "node", "before", "rendering", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/cert/subject_info_renderer.py#L167-L231
train
45,489
genialis/resolwe
resolwe/flow/executors/local/prepare.py
FlowExecutorPreparer.extend_settings
def extend_settings(self, data_id, files, secrets): """Prevent processes requiring access to secrets from being run.""" process = Data.objects.get(pk=data_id).process if process.requirements.get('resources', {}).get('secrets', False): raise PermissionDenied( "Process which requires access to secrets cannot be run using the local executor" ) return super().extend_settings(data_id, files, secrets)
python
def extend_settings(self, data_id, files, secrets): """Prevent processes requiring access to secrets from being run.""" process = Data.objects.get(pk=data_id).process if process.requirements.get('resources', {}).get('secrets', False): raise PermissionDenied( "Process which requires access to secrets cannot be run using the local executor" ) return super().extend_settings(data_id, files, secrets)
[ "def", "extend_settings", "(", "self", ",", "data_id", ",", "files", ",", "secrets", ")", ":", "process", "=", "Data", ".", "objects", ".", "get", "(", "pk", "=", "data_id", ")", ".", "process", "if", "process", ".", "requirements", ".", "get", "(", ...
Prevent processes requiring access to secrets from being run.
[ "Prevent", "processes", "requiring", "access", "to", "secrets", "from", "being", "run", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/executors/local/prepare.py#L22-L30
train
45,490
genialis/resolwe
resolwe/flow/finders.py
get_finder
def get_finder(import_path): """Get a process finder.""" finder_class = import_string(import_path) if not issubclass(finder_class, BaseProcessesFinder): raise ImproperlyConfigured( 'Finder "{}" is not a subclass of "{}"'.format(finder_class, BaseProcessesFinder)) return finder_class()
python
def get_finder(import_path): """Get a process finder.""" finder_class = import_string(import_path) if not issubclass(finder_class, BaseProcessesFinder): raise ImproperlyConfigured( 'Finder "{}" is not a subclass of "{}"'.format(finder_class, BaseProcessesFinder)) return finder_class()
[ "def", "get_finder", "(", "import_path", ")", ":", "finder_class", "=", "import_string", "(", "import_path", ")", "if", "not", "issubclass", "(", "finder_class", ",", "BaseProcessesFinder", ")", ":", "raise", "ImproperlyConfigured", "(", "'Finder \"{}\" is not a subcl...
Get a process finder.
[ "Get", "a", "process", "finder", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/finders.py#L67-L73
train
45,491
genialis/resolwe
resolwe/flow/finders.py
AppDirectoriesFinder._find_folders
def _find_folders(self, folder_name): """Return a list of sub-directories.""" found_folders = [] for app_config in apps.get_app_configs(): folder_path = os.path.join(app_config.path, folder_name) if os.path.isdir(folder_path): found_folders.append(folder_path) return found_folders
python
def _find_folders(self, folder_name): """Return a list of sub-directories.""" found_folders = [] for app_config in apps.get_app_configs(): folder_path = os.path.join(app_config.path, folder_name) if os.path.isdir(folder_path): found_folders.append(folder_path) return found_folders
[ "def", "_find_folders", "(", "self", ",", "folder_name", ")", ":", "found_folders", "=", "[", "]", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", ":", "folder_path", "=", "os", ".", "path", ".", "join", "(", "app_config", ".", "path...
Return a list of sub-directories.
[ "Return", "a", "list", "of", "sub", "-", "directories", "." ]
f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86
https://github.com/genialis/resolwe/blob/f7bb54932c81ec0cfc5b5e80d238fceaeaa48d86/resolwe/flow/finders.py#L43-L50
train
45,492
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/assert_sysmeta.py
sanity
def sanity(request, sysmeta_pyxb): """Check that sysmeta_pyxb is suitable for creating a new object and matches the uploaded sciobj bytes.""" _does_not_contain_replica_sections(sysmeta_pyxb) _is_not_archived(sysmeta_pyxb) _obsoleted_by_not_specified(sysmeta_pyxb) if 'HTTP_VENDOR_GMN_REMOTE_URL' in request.META: return _has_correct_file_size(request, sysmeta_pyxb) _is_supported_checksum_algorithm(sysmeta_pyxb) _is_correct_checksum(request, sysmeta_pyxb)
python
def sanity(request, sysmeta_pyxb): """Check that sysmeta_pyxb is suitable for creating a new object and matches the uploaded sciobj bytes.""" _does_not_contain_replica_sections(sysmeta_pyxb) _is_not_archived(sysmeta_pyxb) _obsoleted_by_not_specified(sysmeta_pyxb) if 'HTTP_VENDOR_GMN_REMOTE_URL' in request.META: return _has_correct_file_size(request, sysmeta_pyxb) _is_supported_checksum_algorithm(sysmeta_pyxb) _is_correct_checksum(request, sysmeta_pyxb)
[ "def", "sanity", "(", "request", ",", "sysmeta_pyxb", ")", ":", "_does_not_contain_replica_sections", "(", "sysmeta_pyxb", ")", "_is_not_archived", "(", "sysmeta_pyxb", ")", "_obsoleted_by_not_specified", "(", "sysmeta_pyxb", ")", "if", "'HTTP_VENDOR_GMN_REMOTE_URL'", "in...
Check that sysmeta_pyxb is suitable for creating a new object and matches the uploaded sciobj bytes.
[ "Check", "that", "sysmeta_pyxb", "is", "suitable", "for", "creating", "a", "new", "object", "and", "matches", "the", "uploaded", "sciobj", "bytes", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/assert_sysmeta.py#L36-L46
train
45,493
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/assert_sysmeta.py
is_valid_sid_for_new_standalone
def is_valid_sid_for_new_standalone(sysmeta_pyxb): """Assert that any SID in ``sysmeta_pyxb`` can be assigned to a new standalone object.""" sid = d1_common.xml.get_opt_val(sysmeta_pyxb, 'seriesId') if not d1_gmn.app.did.is_valid_sid_for_new_standalone(sid): raise d1_common.types.exceptions.IdentifierNotUnique( 0, 'Identifier is already in use as {}. did="{}"'.format( d1_gmn.app.did.classify_identifier(sid), sid ), identifier=sid, )
python
def is_valid_sid_for_new_standalone(sysmeta_pyxb): """Assert that any SID in ``sysmeta_pyxb`` can be assigned to a new standalone object.""" sid = d1_common.xml.get_opt_val(sysmeta_pyxb, 'seriesId') if not d1_gmn.app.did.is_valid_sid_for_new_standalone(sid): raise d1_common.types.exceptions.IdentifierNotUnique( 0, 'Identifier is already in use as {}. did="{}"'.format( d1_gmn.app.did.classify_identifier(sid), sid ), identifier=sid, )
[ "def", "is_valid_sid_for_new_standalone", "(", "sysmeta_pyxb", ")", ":", "sid", "=", "d1_common", ".", "xml", ".", "get_opt_val", "(", "sysmeta_pyxb", ",", "'seriesId'", ")", "if", "not", "d1_gmn", ".", "app", ".", "did", ".", "is_valid_sid_for_new_standalone", ...
Assert that any SID in ``sysmeta_pyxb`` can be assigned to a new standalone object.
[ "Assert", "that", "any", "SID", "in", "sysmeta_pyxb", "can", "be", "assigned", "to", "a", "new", "standalone", "object", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/assert_sysmeta.py#L108-L119
train
45,494
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/assert_sysmeta.py
is_valid_sid_for_chain
def is_valid_sid_for_chain(pid, sid): """Assert that ``sid`` can be assigned to the single object ``pid`` or to the chain to which ``pid`` belongs. - If the chain does not have a SID, the new SID must be previously unused. - If the chain already has a SID, the new SID must match the existing SID. """ if not d1_gmn.app.did.is_valid_sid_for_chain(pid, sid): existing_sid = d1_gmn.app.revision.get_sid_by_pid(pid) raise d1_common.types.exceptions.IdentifierNotUnique( 0, 'A different SID is already assigned to the revision chain to which ' 'the object being created or updated belongs. A SID cannot be changed ' 'once it has been assigned to a chain. ' 'existing_sid="{}", new_sid="{}", pid="{}"'.format(existing_sid, sid, pid), )
python
def is_valid_sid_for_chain(pid, sid): """Assert that ``sid`` can be assigned to the single object ``pid`` or to the chain to which ``pid`` belongs. - If the chain does not have a SID, the new SID must be previously unused. - If the chain already has a SID, the new SID must match the existing SID. """ if not d1_gmn.app.did.is_valid_sid_for_chain(pid, sid): existing_sid = d1_gmn.app.revision.get_sid_by_pid(pid) raise d1_common.types.exceptions.IdentifierNotUnique( 0, 'A different SID is already assigned to the revision chain to which ' 'the object being created or updated belongs. A SID cannot be changed ' 'once it has been assigned to a chain. ' 'existing_sid="{}", new_sid="{}", pid="{}"'.format(existing_sid, sid, pid), )
[ "def", "is_valid_sid_for_chain", "(", "pid", ",", "sid", ")", ":", "if", "not", "d1_gmn", ".", "app", ".", "did", ".", "is_valid_sid_for_chain", "(", "pid", ",", "sid", ")", ":", "existing_sid", "=", "d1_gmn", ".", "app", ".", "revision", ".", "get_sid_b...
Assert that ``sid`` can be assigned to the single object ``pid`` or to the chain to which ``pid`` belongs. - If the chain does not have a SID, the new SID must be previously unused. - If the chain already has a SID, the new SID must match the existing SID.
[ "Assert", "that", "sid", "can", "be", "assigned", "to", "the", "single", "object", "pid", "or", "to", "the", "chain", "to", "which", "pid", "belongs", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/assert_sysmeta.py#L122-L138
train
45,495
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/assert_sysmeta.py
_does_not_contain_replica_sections
def _does_not_contain_replica_sections(sysmeta_pyxb): """Assert that ``sysmeta_pyxb`` does not contain any replica information.""" if len(getattr(sysmeta_pyxb, 'replica', [])): raise d1_common.types.exceptions.InvalidSystemMetadata( 0, 'A replica section was included. A new object object created via ' 'create() or update() cannot already have replicas. pid="{}"'.format( d1_common.xml.get_req_val(sysmeta_pyxb.identifier) ), identifier=d1_common.xml.get_req_val(sysmeta_pyxb.identifier), )
python
def _does_not_contain_replica_sections(sysmeta_pyxb): """Assert that ``sysmeta_pyxb`` does not contain any replica information.""" if len(getattr(sysmeta_pyxb, 'replica', [])): raise d1_common.types.exceptions.InvalidSystemMetadata( 0, 'A replica section was included. A new object object created via ' 'create() or update() cannot already have replicas. pid="{}"'.format( d1_common.xml.get_req_val(sysmeta_pyxb.identifier) ), identifier=d1_common.xml.get_req_val(sysmeta_pyxb.identifier), )
[ "def", "_does_not_contain_replica_sections", "(", "sysmeta_pyxb", ")", ":", "if", "len", "(", "getattr", "(", "sysmeta_pyxb", ",", "'replica'", ",", "[", "]", ")", ")", ":", "raise", "d1_common", ".", "types", ".", "exceptions", ".", "InvalidSystemMetadata", "...
Assert that ``sysmeta_pyxb`` does not contain any replica information.
[ "Assert", "that", "sysmeta_pyxb", "does", "not", "contain", "any", "replica", "information", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/assert_sysmeta.py#L141-L151
train
45,496
DataONEorg/d1_python
gmn/src/d1_gmn/app/views/assert_sysmeta.py
_is_not_archived
def _is_not_archived(sysmeta_pyxb): """Assert that ``sysmeta_pyxb`` does not have have the archived flag set.""" if _is_archived(sysmeta_pyxb): raise d1_common.types.exceptions.InvalidSystemMetadata( 0, 'Archived flag was set. A new object created via create() or update() ' 'cannot already be archived. pid="{}"'.format( d1_common.xml.get_req_val(sysmeta_pyxb.identifier) ), identifier=d1_common.xml.get_req_val(sysmeta_pyxb.identifier), )
python
def _is_not_archived(sysmeta_pyxb): """Assert that ``sysmeta_pyxb`` does not have have the archived flag set.""" if _is_archived(sysmeta_pyxb): raise d1_common.types.exceptions.InvalidSystemMetadata( 0, 'Archived flag was set. A new object created via create() or update() ' 'cannot already be archived. pid="{}"'.format( d1_common.xml.get_req_val(sysmeta_pyxb.identifier) ), identifier=d1_common.xml.get_req_val(sysmeta_pyxb.identifier), )
[ "def", "_is_not_archived", "(", "sysmeta_pyxb", ")", ":", "if", "_is_archived", "(", "sysmeta_pyxb", ")", ":", "raise", "d1_common", ".", "types", ".", "exceptions", ".", "InvalidSystemMetadata", "(", "0", ",", "'Archived flag was set. A new object created via create() ...
Assert that ``sysmeta_pyxb`` does not have have the archived flag set.
[ "Assert", "that", "sysmeta_pyxb", "does", "not", "have", "have", "the", "archived", "flag", "set", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/gmn/src/d1_gmn/app/views/assert_sysmeta.py#L154-L164
train
45,497
DataONEorg/d1_python
lib_common/src/d1_common/env.py
get_d1_env_by_base_url
def get_d1_env_by_base_url(cn_base_url): """Given the BaseURL for a CN, return the DataONE environment dict for the CN's environemnt.""" for k, v in D1_ENV_DICT: if v['base_url'].startswith(cn_base_url): return D1_ENV_DICT[k]
python
def get_d1_env_by_base_url(cn_base_url): """Given the BaseURL for a CN, return the DataONE environment dict for the CN's environemnt.""" for k, v in D1_ENV_DICT: if v['base_url'].startswith(cn_base_url): return D1_ENV_DICT[k]
[ "def", "get_d1_env_by_base_url", "(", "cn_base_url", ")", ":", "for", "k", ",", "v", "in", "D1_ENV_DICT", ":", "if", "v", "[", "'base_url'", "]", ".", "startswith", "(", "cn_base_url", ")", ":", "return", "D1_ENV_DICT", "[", "k", "]" ]
Given the BaseURL for a CN, return the DataONE environment dict for the CN's environemnt.
[ "Given", "the", "BaseURL", "for", "a", "CN", "return", "the", "DataONE", "environment", "dict", "for", "the", "CN", "s", "environemnt", "." ]
3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d
https://github.com/DataONEorg/d1_python/blob/3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d/lib_common/src/d1_common/env.py#L71-L76
train
45,498
wilson-eft/wilson
wilson/match/smeft.py
match_all
def match_all(d_SMEFT, parameters=None): """Match the SMEFT Warsaw basis onto the WET JMS basis.""" p = default_parameters.copy() if parameters is not None: # if parameters are passed in, overwrite the default values p.update(parameters) C = wilson.util.smeftutil.wcxf2arrays_symmetrized(d_SMEFT) C['vT'] = 246.22 C_WET = match_all_array(C, p) C_WET = wilson.translate.wet.rotate_down(C_WET, p) C_WET = wetutil.unscale_dict_wet(C_WET) d_WET = wilson.util.smeftutil.arrays2wcxf(C_WET) basis = wcxf.Basis['WET', 'JMS'] keys = set(d_WET.keys()) & set(basis.all_wcs) d_WET = {k: d_WET[k] for k in keys} return d_WET
python
def match_all(d_SMEFT, parameters=None): """Match the SMEFT Warsaw basis onto the WET JMS basis.""" p = default_parameters.copy() if parameters is not None: # if parameters are passed in, overwrite the default values p.update(parameters) C = wilson.util.smeftutil.wcxf2arrays_symmetrized(d_SMEFT) C['vT'] = 246.22 C_WET = match_all_array(C, p) C_WET = wilson.translate.wet.rotate_down(C_WET, p) C_WET = wetutil.unscale_dict_wet(C_WET) d_WET = wilson.util.smeftutil.arrays2wcxf(C_WET) basis = wcxf.Basis['WET', 'JMS'] keys = set(d_WET.keys()) & set(basis.all_wcs) d_WET = {k: d_WET[k] for k in keys} return d_WET
[ "def", "match_all", "(", "d_SMEFT", ",", "parameters", "=", "None", ")", ":", "p", "=", "default_parameters", ".", "copy", "(", ")", "if", "parameters", "is", "not", "None", ":", "# if parameters are passed in, overwrite the default values", "p", ".", "update", ...
Match the SMEFT Warsaw basis onto the WET JMS basis.
[ "Match", "the", "SMEFT", "Warsaw", "basis", "onto", "the", "WET", "JMS", "basis", "." ]
4164f55ff663d4f668c6e2b4575fd41562662cc9
https://github.com/wilson-eft/wilson/blob/4164f55ff663d4f668c6e2b4575fd41562662cc9/wilson/match/smeft.py#L206-L221
train
45,499