_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q270600 | ProtocolVersion.read | test | def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the ProtocolVersion struct and decode it into
its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if either the major or minor protocol versions
are missing from the encoding.
"""
super(ProtocolVersion, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.PROTOCOL_VERSION_MAJOR, local_stream):
self._major = primitives.Integer(
tag=enums.Tags.PROTOCOL_VERSION_MAJOR
)
self._major.read(local_stream, kmip_version=kmip_version)
else:
raise ValueError(
"Invalid encoding missing the major protocol version number."
)
if self.is_tag_next(enums.Tags.PROTOCOL_VERSION_MINOR, local_stream):
self._minor = primitives.Integer(
tag=enums.Tags.PROTOCOL_VERSION_MINOR
)
self._minor.read(local_stream, kmip_version=kmip_version)
else:
raise ValueError(
"Invalid encoding missing the minor protocol version number."
)
self.is_oversized(local_stream) | python | {
"resource": ""
} |
q270601 | ProtocolVersion.write | test | def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the ProtocolVersion struct to a stream.
Args:
output_stream (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is not defined.
"""
local_stream = utils.BytearrayStream()
if self._major:
self._major.write(local_stream, kmip_version=kmip_version)
else:
raise ValueError(
"Invalid struct missing the major protocol version number."
)
if self._minor:
self._minor.write(local_stream, kmip_version=kmip_version)
else:
raise ValueError(
"Invalid struct missing the minor protocol version number."
)
self.length = local_stream.length()
super(ProtocolVersion, self).write(
output_stream,
kmip_version=kmip_version
)
output_stream.write(local_stream.buffer) | python | {
"resource": ""
} |
q270602 | Authentication.read | test | def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Authentication struct and decode it into
its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
"""
super(Authentication, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
credentials = []
while self.is_tag_next(enums.Tags.CREDENTIAL, local_stream):
credential = objects.Credential()
credential.read(local_stream, kmip_version=kmip_version)
credentials.append(credential)
if len(credentials) == 0:
raise ValueError("Authentication encoding missing credentials.")
self._credentials = credentials
self.is_oversized(local_stream) | python | {
"resource": ""
} |
q270603 | Authentication.write | test | def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the Authentication struct to a stream.
Args:
output_stream (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
"""
local_stream = utils.BytearrayStream()
if len(self._credentials) == 0:
raise ValueError("Authentication struct missing credentials.")
for credential in self._credentials:
credential.write(local_stream, kmip_version=kmip_version)
self.length = local_stream.length()
super(Authentication, self).write(
output_stream,
kmip_version=kmip_version
)
output_stream.write(local_stream.buffer) | python | {
"resource": ""
} |
q270604 | PollRequestPayload.read | test | def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Poll request payload and decode it into
its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is missing from the
encoded payload.
"""
super(PollRequestPayload, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(
enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE,
local_stream
):
self._asynchronous_correlation_value = primitives.ByteString(
tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE
)
self._asynchronous_correlation_value.read(
local_stream,
kmip_version=kmip_version
)
self.is_oversized(local_stream) | python | {
"resource": ""
} |
q270605 | Certificate.read | test | def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Certificate object and decode it into its
constituent parts.
Args:
istream (Stream): A data stream containing encoded object data,
supporting a read method; usually a BytearrayStream object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
"""
super(Certificate, self).read(istream, kmip_version=kmip_version)
tstream = BytearrayStream(istream.read(self.length))
self.certificate_type = CertificateType()
self.certificate_value = CertificateValue()
self.certificate_type.read(tstream, kmip_version=kmip_version)
self.certificate_value.read(tstream, kmip_version=kmip_version)
self.is_oversized(tstream) | python | {
"resource": ""
} |
q270606 | Certificate.write | test | def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the Certificate object to a stream.
Args:
ostream (Stream): A data stream in which to encode object data,
supporting a write method; usually a BytearrayStream object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
"""
tstream = BytearrayStream()
self.certificate_type.write(tstream, kmip_version=kmip_version)
self.certificate_value.write(tstream, kmip_version=kmip_version)
self.length = tstream.length()
super(Certificate, self).write(ostream, kmip_version=kmip_version)
ostream.write(tstream.buffer) | python | {
"resource": ""
} |
q270607 | SLUGSConnector.authenticate | test | def authenticate(self,
connection_certificate=None,
connection_info=None,
request_credentials=None):
"""
Query the configured SLUGS service with the provided credentials.
Args:
connection_certificate (cryptography.x509.Certificate): An X.509
certificate object obtained from the connection being
authenticated. Required for SLUGS authentication.
connection_info (tuple): A tuple of information pertaining to the
connection being authenticated, including the source IP address
and a timestamp (e.g., ('127.0.0.1', 1519759267.467451)).
Optional, defaults to None. Ignored for SLUGS authentication.
request_credentials (list): A list of KMIP Credential structures
containing credential information to use for authentication.
Optional, defaults to None. Ignored for SLUGS authentication.
"""
if (self.users_url is None) or (self.groups_url is None):
raise exceptions.ConfigurationError(
"The SLUGS URL must be specified."
)
user_id = utils.get_client_identity_from_certificate(
connection_certificate
)
try:
response = requests.get(self.users_url.format(user_id))
except Exception:
raise exceptions.ConfigurationError(
"A connection could not be established using the SLUGS URL."
)
if response.status_code == 404:
raise exceptions.PermissionDenied(
"Unrecognized user ID: {}".format(user_id)
)
response = requests.get(self.groups_url.format(user_id))
if response.status_code == 404:
raise exceptions.PermissionDenied(
"Group information could not be retrieved for user ID: "
"{}".format(user_id)
)
return user_id, response.json().get('groups') | python | {
"resource": ""
} |
q270608 | ArchiveResponsePayload.read | test | def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Archive response payload and decode it
into its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is missing from the
encoded payload.
"""
super(ArchiveResponsePayload, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream):
self._unique_identifier = primitives.TextString(
tag=enums.Tags.UNIQUE_IDENTIFIER
)
self._unique_identifier.read(
local_stream,
kmip_version=kmip_version
)
self.is_oversized(local_stream) | python | {
"resource": ""
} |
q270609 | ArchiveResponsePayload.write | test | def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the Archive response payload to a stream.
Args:
output_stream (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the data attribute is not defined.
"""
local_stream = utils.BytearrayStream()
if self._unique_identifier:
self._unique_identifier.write(
local_stream,
kmip_version=kmip_version
)
self.length = local_stream.length()
super(ArchiveResponsePayload, self).write(
output_stream,
kmip_version=kmip_version
)
output_stream.write(local_stream.buffer) | python | {
"resource": ""
} |
q270610 | KmipSession.run | test | def run(self):
"""
The main thread routine executed by invoking thread.start.
This method manages the new client connection, running a message
handling loop. Once this method completes, the thread is finished.
"""
self._logger.info("Starting session: {0}".format(self.name))
try:
self._connection.do_handshake()
except Exception as e:
self._logger.info("Failure running TLS handshake")
self._logger.exception(e)
else:
while True:
try:
self._handle_message_loop()
except exceptions.ConnectionClosed as e:
break
except Exception as e:
self._logger.info("Failure handling message loop")
self._logger.exception(e)
self._connection.shutdown(socket.SHUT_RDWR)
self._connection.close()
self._logger.info("Stopping session: {0}".format(self.name)) | python | {
"resource": ""
} |
q270611 | RekeyResponsePayload.read | test | def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the Rekey response payload and decode it into
its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
ValueError: Raised if the unique identifier attribute is missing
from the encoded payload.
"""
super(RekeyResponsePayload, self).read(
input_stream,
kmip_version=kmip_version
)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream):
self._unique_identifier = primitives.TextString(
tag=enums.Tags.UNIQUE_IDENTIFIER
)
self._unique_identifier.read(
local_stream,
kmip_version=kmip_version
)
else:
raise ValueError(
"The Rekey response payload encoding is missing the unique "
"identifier."
)
if self.is_tag_next(enums.Tags.TEMPLATE_ATTRIBUTE, local_stream):
self._template_attribute = objects.TemplateAttribute()
self._template_attribute.read(
local_stream,
kmip_version=kmip_version
)
self.is_oversized(local_stream) | python | {
"resource": ""
} |
q270612 | KMIPProxy.is_profile_supported | test | def is_profile_supported(self, conformance_clause, authentication_suite):
"""
Check if a profile is supported by the client.
Args:
conformance_clause (ConformanceClause):
authentication_suite (AuthenticationSuite):
Returns:
bool: True if the profile is supported, False otherwise.
Example:
>>> client.is_profile_supported(
... ConformanceClause.DISCOVER_VERSIONS,
... AuthenticationSuite.BASIC)
True
"""
return (self.is_conformance_clause_supported(conformance_clause) and
self.is_authentication_suite_supported(authentication_suite)) | python | {
"resource": ""
} |
q270613 | KMIPProxy.derive_key | test | def derive_key(self,
object_type,
unique_identifiers,
derivation_method,
derivation_parameters,
template_attribute,
credential=None):
"""
Derive a new key or secret data from an existing managed object.
Args:
object_type (ObjectType): An ObjectType enumeration specifying
what type of object to create. Required.
unique_identifiers (list): A list of strings specifying the unique
IDs of the existing managed objects to use for key derivation.
Required.
derivation_method (DerivationMethod): A DerivationMethod
enumeration specifying what key derivation method to use.
Required.
derivation_parameters (DerivationParameters): A
DerivationParameters struct containing the settings and
options to use for key derivation.
template_attribute (TemplateAttribute): A TemplateAttribute struct
containing the attributes to set on the newly derived object.
credential (Credential): A Credential struct containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the derivation operation, containing the
following key/value pairs:
Key | Value
---------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the newly
| derived object.
'template_attribute' | (TemplateAttribute) A struct containing
| any attributes set on the newly derived
| object.
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result.
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.DERIVE_KEY)
request_payload = payloads.DeriveKeyRequestPayload(
object_type=object_type,
unique_identifiers=unique_identifiers,
derivation_method=derivation_method,
derivation_parameters=derivation_parameters,
template_attribute=template_attribute
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['template_attribute'] = payload.template_attribute
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result | python | {
"resource": ""
} |
q270614 | KMIPProxy.get_attributes | test | def get_attributes(self, uuid=None, attribute_names=None):
"""
Send a GetAttributes request to the server.
Args:
uuid (string): The ID of the managed object with which the
retrieved attributes should be associated. Optional, defaults
to None.
attribute_names (list): A list of AttributeName values indicating
what object attributes the client wants from the server.
Optional, defaults to None.
Returns:
result (GetAttributesResult): A structure containing the results
of the operation.
"""
batch_item = self._build_get_attributes_batch_item(
uuid,
attribute_names
)
request = self._build_request_message(None, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0] | python | {
"resource": ""
} |
q270615 | KMIPProxy.get_attribute_list | test | def get_attribute_list(self, uid=None):
"""
Send a GetAttributeList request to the server.
Args:
uid (string): The ID of the managed object with which the retrieved
attribute names should be associated.
Returns:
result (GetAttributeListResult): A structure containing the results
of the operation.
"""
batch_item = self._build_get_attribute_list_batch_item(uid)
request = self._build_request_message(None, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0] | python | {
"resource": ""
} |
q270616 | KMIPProxy.query | test | def query(self, batch=False, query_functions=None, credential=None):
"""
Send a Query request to the server.
Args:
batch (boolean): A flag indicating if the operation should be sent
with a batch of additional operations. Defaults to False.
query_functions (list): A list of QueryFunction enumerations
indicating what information the client wants from the server.
Optional, defaults to None.
credential (Credential): A Credential object containing
authentication information for the server. Optional, defaults
to None.
"""
batch_item = self._build_query_batch_item(query_functions)
# TODO (peter-hamilton): Replace this with official client batch mode.
if batch:
self.batch_items.append(batch_item)
else:
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0] | python | {
"resource": ""
} |
q270617 | KMIPProxy.sign | test | def sign(self, data, unique_identifier=None,
cryptographic_parameters=None, credential=None):
"""
Sign specified data using a specified signing key.
Args:
data (bytes): Data to be signed. Required.
unique_identifier (string): The unique ID of the signing
key to be used. Optional, defaults to None.
cryptographic_parameters (CryptographicParameters): A structure
containing various cryptographic settings to be used for
creating the signature. Optional, defaults to None.
credential (Credential): A credential object containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the sign operation, containing the
following key/value pairs:
Key | Value
---------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the signing
| key used to create the signature
'signature' | (bytes) The bytes of the signature
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.SIGN)
request_payload = payloads.SignRequestPayload(
unique_identifier=unique_identifier,
cryptographic_parameters=cryptographic_parameters,
data=data
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['signature'] = payload.signature_data
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result | python | {
"resource": ""
} |
q270618 | ProxyKmipClient.open | test | def open(self):
"""
Open the client connection.
Raises:
ClientConnectionFailure: if the client connection is already open
Exception: if an error occurs while trying to open the connection
"""
if self._is_open:
raise exceptions.ClientConnectionFailure(
"client connection already open")
else:
try:
self.proxy.open()
self._is_open = True
except Exception as e:
self.logger.error("could not open client connection: %s", e)
raise | python | {
"resource": ""
} |
q270619 | ProxyKmipClient.close | test | def close(self):
"""
Close the client connection.
Raises:
Exception: if an error occurs while trying to close the connection
"""
if not self._is_open:
return
else:
try:
self.proxy.close()
self._is_open = False
except Exception as e:
self.logger.error("could not close client connection: %s", e)
raise | python | {
"resource": ""
} |
q270620 | ProxyKmipClient.create | test | def create(self, algorithm, length, operation_policy_name=None, name=None,
cryptographic_usage_mask=None):
"""
Create a symmetric key on a KMIP appliance.
Args:
algorithm (CryptographicAlgorithm): An enumeration defining the
algorithm to use to generate the symmetric key.
length (int): The length in bits for the symmetric key.
operation_policy_name (string): The name of the operation policy
to use for the new symmetric key. Optional, defaults to None
name (string): The name to give the key. Optional, defaults to None
cryptographic_usage_mask (list): list of enumerations of crypto
usage mask passing to the symmetric key. Optional, defaults to
None
Returns:
string: The uid of the newly created symmetric key.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
"""
# Check inputs
if not isinstance(algorithm, enums.CryptographicAlgorithm):
raise TypeError(
"algorithm must be a CryptographicAlgorithm enumeration")
elif not isinstance(length, six.integer_types) or length <= 0:
raise TypeError("length must be a positive integer")
if cryptographic_usage_mask is not None:
if not isinstance(cryptographic_usage_mask, list) or \
all(isinstance(item, enums.CryptographicUsageMask)
for item in cryptographic_usage_mask) is False:
raise TypeError(
"cryptographic_usage_mask must be a list of "
"CryptographicUsageMask enumerations")
# Create the template containing the attributes
common_attributes = self._build_common_attributes(
operation_policy_name
)
key_attributes = self._build_key_attributes(
algorithm, length, cryptographic_usage_mask)
key_attributes.extend(common_attributes)
if name:
key_attributes.extend(self._build_name_attribute(name))
template = cobjects.TemplateAttribute(attributes=key_attributes)
# Create the symmetric key and handle the results
result = self.proxy.create(enums.ObjectType.SYMMETRIC_KEY, template)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return result.uuid
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270621 | ProxyKmipClient.create_key_pair | test | def create_key_pair(self,
algorithm,
length,
operation_policy_name=None,
public_name=None,
public_usage_mask=None,
private_name=None,
private_usage_mask=None):
"""
Create an asymmetric key pair on a KMIP appliance.
Args:
algorithm (CryptographicAlgorithm): An enumeration defining the
algorithm to use to generate the key pair.
length (int): The length in bits for the key pair.
operation_policy_name (string): The name of the operation policy
to use for the new key pair. Optional, defaults to None.
public_name (string): The name to give the public key. Optional,
defaults to None.
public_usage_mask (list): A list of CryptographicUsageMask
enumerations indicating how the public key should be used.
Optional, defaults to None.
private_name (string): The name to give the public key. Optional,
defaults to None.
private_usage_mask (list): A list of CryptographicUsageMask
enumerations indicating how the private key should be used.
Optional, defaults to None.
Returns:
string: The uid of the newly created public key.
string: The uid of the newly created private key.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
"""
# Check inputs
if not isinstance(algorithm, enums.CryptographicAlgorithm):
raise TypeError(
"algorithm must be a CryptographicAlgorithm enumeration")
elif not isinstance(length, six.integer_types) or length <= 0:
raise TypeError("length must be a positive integer")
# Create the common attributes that are shared
common_attributes = self._build_common_attributes(
operation_policy_name
)
algorithm_attribute = self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM,
algorithm
)
length_attribute = self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_LENGTH,
length
)
common_attributes.extend([algorithm_attribute, length_attribute])
template = cobjects.TemplateAttribute(
attributes=common_attributes,
tag=enums.Tags.COMMON_TEMPLATE_ATTRIBUTE
)
# Create public / private specific attributes
public_template = None
names = None
if public_name:
names = self._build_name_attribute(name=public_name)
attrs = []
if public_usage_mask:
attrs = [
self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK,
public_usage_mask
)
]
if names or attrs:
public_template = cobjects.TemplateAttribute(
names=names,
attributes=attrs,
tag=enums.Tags.PUBLIC_KEY_TEMPLATE_ATTRIBUTE
)
private_template = None
names = None
if private_name:
names = self._build_name_attribute(name=private_name)
attrs = []
if private_usage_mask:
attrs = [
self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK,
private_usage_mask
)
]
if names or attrs:
private_template = cobjects.TemplateAttribute(
names=names,
attributes=attrs,
tag=enums.Tags.PRIVATE_KEY_TEMPLATE_ATTRIBUTE
)
# Create the asymmetric key pair and handle the results
result = self.proxy.create_key_pair(
common_template_attribute=template,
private_key_template_attribute=private_template,
public_key_template_attribute=public_template)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
public_uid = result.public_key_uuid
private_uid = result.private_key_uuid
return public_uid, private_uid
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270622 | ProxyKmipClient.register | test | def register(self, managed_object):
"""
Register a managed object with a KMIP appliance.
Args:
managed_object (ManagedObject): A managed object to register. An
instantiatable subclass of ManagedObject from the Pie API.
Returns:
string: The uid of the newly registered managed object.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input argument is invalid
"""
# Check input
if not isinstance(managed_object, pobjects.ManagedObject):
raise TypeError("managed object must be a Pie ManagedObject")
# Extract and create attributes
object_attributes = list()
if hasattr(managed_object, 'cryptographic_usage_masks'):
if managed_object.cryptographic_usage_masks is not None:
mask_attribute = self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK,
managed_object.cryptographic_usage_masks
)
object_attributes.append(mask_attribute)
if hasattr(managed_object, 'operation_policy_name'):
if managed_object.operation_policy_name is not None:
opn_attribute = self.attribute_factory.create_attribute(
enums.AttributeType.OPERATION_POLICY_NAME,
managed_object.operation_policy_name
)
object_attributes.append(opn_attribute)
if hasattr(managed_object, 'names'):
if managed_object.names:
for name in managed_object.names:
name_attribute = self.attribute_factory.create_attribute(
enums.AttributeType.NAME,
name
)
object_attributes.append(name_attribute)
template = cobjects.TemplateAttribute(attributes=object_attributes)
object_type = managed_object.object_type
# Register the managed object and handle the results
secret = self.object_factory.convert(managed_object)
result = self.proxy.register(object_type, template, secret)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return result.uuid
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270623 | ProxyKmipClient.rekey | test | def rekey(self,
uid=None,
offset=None,
**kwargs):
"""
Rekey an existing key.
Args:
uid (string): The unique ID of the symmetric key to rekey.
Optional, defaults to None.
offset (int): The time delta, in seconds, between the new key's
initialization date and activation date. Optional, defaults
to None.
**kwargs (various): A placeholder for object attributes that
should be set on the newly rekeyed key. Currently
supported attributes include:
activation_date (int)
process_start_date (int)
protect_stop_date (int)
deactivation_date (int)
Returns:
string: The unique ID of the newly rekeyed key.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
"""
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("The unique identifier must be a string.")
if offset is not None:
if not isinstance(offset, six.integer_types):
raise TypeError("The offset must be an integer.")
# TODO (peter-hamilton) Unify attribute handling across operations
attributes = []
if kwargs.get('activation_date'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.ACTIVATION_DATE,
kwargs.get('activation_date')
)
)
if kwargs.get('process_start_date'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.PROCESS_START_DATE,
kwargs.get('process_start_date')
)
)
if kwargs.get('protect_stop_date'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.PROTECT_STOP_DATE,
kwargs.get('protect_stop_date')
)
)
if kwargs.get('deactivation_date'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.DEACTIVATION_DATE,
kwargs.get('deactivation_date')
)
)
template_attribute = cobjects.TemplateAttribute(
attributes=attributes
)
# Derive the new key/data and handle the results
result = self.proxy.rekey(
uuid=uid,
offset=offset,
template_attribute=template_attribute
)
status = result.get('result_status')
if status == enums.ResultStatus.SUCCESS:
return result.get('unique_identifier')
else:
raise exceptions.KmipOperationFailure(
status,
result.get('result_reason'),
result.get('result_message')
) | python | {
"resource": ""
} |
q270624 | ProxyKmipClient.derive_key | test | def derive_key(self,
object_type,
unique_identifiers,
derivation_method,
derivation_parameters,
**kwargs):
"""
Derive a new key or secret data from existing managed objects.
Args:
object_type (ObjectType): An ObjectType enumeration specifying
what type of object to derive. Only SymmetricKeys and
SecretData can be specified. Required.
unique_identifiers (list): A list of strings specifying the
unique IDs of the existing managed objects to use for
derivation. Multiple objects can be specified to fit the
requirements of the given derivation method. Required.
derivation_method (DerivationMethod): A DerivationMethod
enumeration specifying how key derivation should be done.
Required.
derivation_parameters (dict): A dictionary containing various
settings for the key derivation process. See Note below.
Required.
**kwargs (various): A placeholder for object attributes that
should be set on the newly derived object. Currently
supported attributes include:
cryptographic_algorithm (enums.CryptographicAlgorithm)
cryptographic_length (int)
Returns:
string: The unique ID of the newly derived object.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
Notes:
The derivation_parameters argument is a dictionary that can
contain the following key/value pairs:
Key | Value
---------------------------|---------------------------------------
'cryptographic_parameters' | A dictionary containing additional
| cryptographic settings. See the
| decrypt method for more information.
'initialization_vector' | Bytes to be used to initialize the key
| derivation function, if needed.
'derivation_data' | Bytes to be used as the basis for the
| key derivation process (e.g., the
| bytes to be encrypted, hashed, etc).
'salt' | Bytes to used as a salt value for the
| key derivation function, if needed.
| Usually used with PBKDF2.
'iteration_count' | An integer defining how many
| iterations should be used with the key
| derivation function, if needed.
| Usually used with PBKDF2.
"""
# Check input
if not isinstance(object_type, enums.ObjectType):
raise TypeError("Object type must be an ObjectType enumeration.")
if not isinstance(unique_identifiers, list):
raise TypeError("Unique identifiers must be a list of strings.")
else:
for unique_identifier in unique_identifiers:
if not isinstance(unique_identifier, six.string_types):
raise TypeError(
"Unique identifiers must be a list of strings."
)
if not isinstance(derivation_method, enums.DerivationMethod):
raise TypeError(
"Derivation method must be a DerivationMethod enumeration."
)
if not isinstance(derivation_parameters, dict):
raise TypeError("Derivation parameters must be a dictionary.")
derivation_parameters = DerivationParameters(
cryptographic_parameters=self._build_cryptographic_parameters(
derivation_parameters.get('cryptographic_parameters')
),
initialization_vector=derivation_parameters.get(
'initialization_vector'
),
derivation_data=derivation_parameters.get('derivation_data'),
salt=derivation_parameters.get('salt'),
iteration_count=derivation_parameters.get('iteration_count')
)
# Handle object attributes
attributes = []
if kwargs.get('cryptographic_length'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_LENGTH,
kwargs.get('cryptographic_length')
)
)
if kwargs.get('cryptographic_algorithm'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_ALGORITHM,
kwargs.get('cryptographic_algorithm')
)
)
if kwargs.get('cryptographic_usage_mask'):
attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.CRYPTOGRAPHIC_USAGE_MASK,
kwargs.get('cryptographic_usage_mask')
)
)
template_attribute = cobjects.TemplateAttribute(
attributes=attributes
)
# Derive the new key/data and handle the results
result = self.proxy.derive_key(
object_type,
unique_identifiers,
derivation_method,
derivation_parameters,
template_attribute
)
status = result.get('result_status')
if status == enums.ResultStatus.SUCCESS:
return result.get('unique_identifier')
else:
raise exceptions.KmipOperationFailure(
status,
result.get('result_reason'),
result.get('result_message')
) | python | {
"resource": ""
} |
q270625 | ProxyKmipClient.locate | test | def locate(self, maximum_items=None, storage_status_mask=None,
object_group_member=None, attributes=None):
"""
Search for managed objects, depending on the attributes specified in
the request.
Args:
maximum_items (integer): Maximum number of object identifiers the
server MAY return.
storage_status_mask (integer): A bit mask that indicates whether
on-line or archived objects are to be searched.
object_group_member (ObjectGroupMember): An enumeration that
indicates the object group member type.
attributes (list): Attributes the are REQUIRED to match those in a
candidate object.
Returns:
list: The Unique Identifiers of the located objects
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
"""
# Check inputs
if maximum_items is not None:
if not isinstance(maximum_items, six.integer_types):
raise TypeError("maximum_items must be an integer")
if storage_status_mask is not None:
if not isinstance(storage_status_mask, six.integer_types):
raise TypeError("storage_status_mask must be an integer")
if object_group_member is not None:
if not isinstance(object_group_member, enums.ObjectGroupMember):
raise TypeError(
"object_group_member must be a ObjectGroupMember"
"enumeration")
if attributes is not None:
if not isinstance(attributes, list) or \
all(isinstance(item, cobjects.Attribute)
for item in attributes) is False:
raise TypeError(
"attributes must be a list of attributes")
# Search for managed objects and handle the results
result = self.proxy.locate(
maximum_items, storage_status_mask,
object_group_member, attributes)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return result.uuids
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270626 | ProxyKmipClient.check | test | def check(self,
uid=None,
usage_limits_count=None,
cryptographic_usage_mask=None,
lease_time=None):
"""
Check the constraints for a managed object.
Args:
uid (string): The unique ID of the managed object to check.
Optional, defaults to None.
usage_limits_count (int): The number of items that can be secured
with the specified managed object. Optional, defaults to None.
cryptographic_usage_mask (list): A list of CryptographicUsageMask
enumerations specifying the operations possible with the
specified managed object. Optional, defaults to None.
lease_time (int): The number of seconds that can be leased for the
specified managed object. Optional, defaults to None.
"""
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("The unique identifier must be a string.")
if usage_limits_count is not None:
if not isinstance(usage_limits_count, six.integer_types):
raise TypeError("The usage limits count must be an integer.")
if cryptographic_usage_mask is not None:
if not isinstance(cryptographic_usage_mask, list) or \
not all(isinstance(
x,
enums.CryptographicUsageMask
) for x in cryptographic_usage_mask):
raise TypeError(
"The cryptographic usage mask must be a list of "
"CryptographicUsageMask enumerations."
)
if lease_time is not None:
if not isinstance(lease_time, six.integer_types):
raise TypeError("The lease time must be an integer.")
result = self.proxy.check(
uid,
usage_limits_count,
cryptographic_usage_mask,
lease_time
)
status = result.get('result_status')
if status == enums.ResultStatus.SUCCESS:
return result.get('unique_identifier')
else:
raise exceptions.KmipOperationFailure(
status,
result.get('result_reason'),
result.get('result_message')
) | python | {
"resource": ""
} |
q270627 | ProxyKmipClient.get | test | def get(self, uid=None, key_wrapping_specification=None):
"""
Get a managed object from a KMIP appliance.
Args:
uid (string): The unique ID of the managed object to retrieve.
key_wrapping_specification (dict): A dictionary containing various
settings to be used when wrapping the key during retrieval.
See Note below. Optional, defaults to None.
Returns:
ManagedObject: The retrieved managed object object.
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input argument is invalid
Notes:
The derivation_parameters argument is a dictionary that can
contain the following key/value pairs:
Key | Value
--------------------------------|---------------------------------
'wrapping_method' | A WrappingMethod enumeration
| that specifies how the object
| should be wrapped.
'encryption_key_information' | A dictionary containing the ID
| of the wrapping key and
| associated cryptographic
| parameters.
'mac_signature_key_information' | A dictionary containing the ID
| of the wrapping key and
| associated cryptographic
| parameters.
'attribute_names' | A list of strings representing
| the names of attributes that
| should be included with the
| wrapped object.
'encoding_option' | An EncodingOption enumeration
| that specifies the encoding of
| the object before it is wrapped.
"""
# Check input
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
if key_wrapping_specification is not None:
if not isinstance(key_wrapping_specification, dict):
raise TypeError(
"Key wrapping specification must be a dictionary."
)
spec = self._build_key_wrapping_specification(
key_wrapping_specification
)
# Get the managed object and handle the results
result = self.proxy.get(uid, key_wrapping_specification=spec)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
managed_object = self.object_factory.convert(result.secret)
return managed_object
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270628 | ProxyKmipClient.get_attributes | test | def get_attributes(self, uid=None, attribute_names=None):
"""
Get the attributes associated with a managed object.
If the uid is not specified, the appliance will use the ID placeholder
by default.
If the attribute_names list is not specified, the appliance will
return all viable attributes for the managed object.
Args:
uid (string): The unique ID of the managed object with which the
retrieved attributes should be associated. Optional, defaults
to None.
attribute_names (list): A list of string attribute names
indicating which attributes should be retrieved. Optional,
defaults to None.
"""
# Check input
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
if attribute_names is not None:
if not isinstance(attribute_names, list):
raise TypeError("attribute_names must be a list of strings")
else:
for attribute_name in attribute_names:
if not isinstance(attribute_name, six.string_types):
raise TypeError(
"attribute_names must be a list of strings"
)
# Get the list of attributes for a managed object
result = self.proxy.get_attributes(uid, attribute_names)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return result.uuid, result.attributes
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270629 | ProxyKmipClient.activate | test | def activate(self, uid=None):
"""
Activate a managed object stored by a KMIP appliance.
Args:
uid (string): The unique ID of the managed object to activate.
Optional, defaults to None.
Returns:
None
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input argument is invalid
"""
# Check input
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
# Activate the managed object and handle the results
result = self.proxy.activate(uid)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270630 | ProxyKmipClient.revoke | test | def revoke(self, revocation_reason, uid=None, revocation_message=None,
compromise_occurrence_date=None):
"""
Revoke a managed object stored by a KMIP appliance.
Args:
revocation_reason (RevocationReasonCode): An enumeration indicating
the revocation reason.
uid (string): The unique ID of the managed object to revoke.
Optional, defaults to None.
revocation_message (string): A message regarding the revocation.
Optional, defaults to None.
compromise_occurrence_date (int): An integer, the number of seconds
since the epoch, which will be converted to the Datetime when
the managed object was first believed to be compromised.
Optional, defaults to None.
Returns:
None
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input argument is invalid
"""
# Check input
if not isinstance(revocation_reason, enums.RevocationReasonCode):
raise TypeError(
"revocation_reason must be a RevocationReasonCode enumeration")
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
if revocation_message is not None:
if not isinstance(revocation_message, six.string_types):
raise TypeError("revocation_message must be a string")
if compromise_occurrence_date is not None:
if not isinstance(compromise_occurrence_date, six.integer_types):
raise TypeError(
"compromise_occurrence_date must be an integer")
compromise_occurrence_date = primitives.DateTime(
compromise_occurrence_date,
enums.Tags.COMPROMISE_OCCURRENCE_DATE)
# revoke the managed object and handle the results
result = self.proxy.revoke(revocation_reason, uid, revocation_message,
compromise_occurrence_date)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
return
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270631 | ProxyKmipClient.mac | test | def mac(self, data, uid=None, algorithm=None):
"""
Get the message authentication code for data.
Args:
data (string): The data to be MACed.
uid (string): The unique ID of the managed object that is the key
to use for the MAC operation.
algorithm (CryptographicAlgorithm): An enumeration defining the
algorithm to use to generate the MAC.
Returns:
string: The unique ID of the managed object that is the key
to use for the MAC operation.
string: The data MACed
Raises:
ClientConnectionNotOpen: if the client connection is unusable
KmipOperationFailure: if the operation result is a failure
TypeError: if the input arguments are invalid
"""
# Check inputs
if not isinstance(data, six.binary_type):
raise TypeError("data must be bytes")
if uid is not None:
if not isinstance(uid, six.string_types):
raise TypeError("uid must be a string")
if algorithm is not None:
if not isinstance(algorithm, enums.CryptographicAlgorithm):
raise TypeError(
"algorithm must be a CryptographicAlgorithm enumeration")
parameters_attribute = self._build_cryptographic_parameters(
{'cryptographic_algorithm': algorithm}
)
# Get the message authentication code and handle the results
result = self.proxy.mac(data, uid, parameters_attribute)
status = result.result_status.value
if status == enums.ResultStatus.SUCCESS:
uid = result.uuid.value
mac_data = result.mac_data.value
return uid, mac_data
else:
reason = result.result_reason.value
message = result.result_message.value
raise exceptions.KmipOperationFailure(status, reason, message) | python | {
"resource": ""
} |
q270632 | ProxyKmipClient._build_cryptographic_parameters | test | def _build_cryptographic_parameters(self, value):
"""
Build a CryptographicParameters struct from a dictionary.
Args:
value (dict): A dictionary containing the key/value pairs for a
CryptographicParameters struct.
Returns:
None: if value is None
CryptographicParameters: a CryptographicParameters struct
Raises:
TypeError: if the input argument is invalid
"""
if value is None:
return None
elif not isinstance(value, dict):
raise TypeError("Cryptographic parameters must be a dictionary.")
cryptographic_parameters = CryptographicParameters(
block_cipher_mode=value.get('block_cipher_mode'),
padding_method=value.get('padding_method'),
hashing_algorithm=value.get('hashing_algorithm'),
key_role_type=value.get('key_role_type'),
digital_signature_algorithm=value.get(
'digital_signature_algorithm'
),
cryptographic_algorithm=value.get('cryptographic_algorithm'),
random_iv=value.get('random_iv'),
iv_length=value.get('iv_length'),
tag_length=value.get('tag_length'),
fixed_field_length=value.get('fixed_field_length'),
invocation_field_length=value.get('invocation_field_length'),
counter_length=value.get('counter_length'),
initial_counter_value=value.get('initial_counter_value')
)
return cryptographic_parameters | python | {
"resource": ""
} |
q270633 | ProxyKmipClient._build_encryption_key_information | test | def _build_encryption_key_information(self, value):
"""
Build an EncryptionKeyInformation struct from a dictionary.
Args:
value (dict): A dictionary containing the key/value pairs for a
EncryptionKeyInformation struct.
Returns:
EncryptionKeyInformation: an EncryptionKeyInformation struct
Raises:
TypeError: if the input argument is invalid
"""
if value is None:
return None
if not isinstance(value, dict):
raise TypeError("Encryption key information must be a dictionary.")
cryptographic_parameters = value.get('cryptographic_parameters')
if cryptographic_parameters:
cryptographic_parameters = self._build_cryptographic_parameters(
cryptographic_parameters
)
encryption_key_information = cobjects.EncryptionKeyInformation(
unique_identifier=value.get('unique_identifier'),
cryptographic_parameters=cryptographic_parameters
)
return encryption_key_information | python | {
"resource": ""
} |
q270634 | ProxyKmipClient._build_mac_signature_key_information | test | def _build_mac_signature_key_information(self, value):
"""
Build an MACSignatureKeyInformation struct from a dictionary.
Args:
value (dict): A dictionary containing the key/value pairs for a
MACSignatureKeyInformation struct.
Returns:
MACSignatureInformation: a MACSignatureKeyInformation struct
Raises:
TypeError: if the input argument is invalid
"""
if value is None:
return None
if not isinstance(value, dict):
raise TypeError(
"MAC/signature key information must be a dictionary."
)
cryptographic_parameters = value.get('cryptographic_parameters')
if cryptographic_parameters:
cryptographic_parameters = self._build_cryptographic_parameters(
cryptographic_parameters
)
mac_signature_key_information = cobjects.MACSignatureKeyInformation(
unique_identifier=value.get('unique_identifier'),
cryptographic_parameters=cryptographic_parameters
)
return mac_signature_key_information | python | {
"resource": ""
} |
q270635 | ProxyKmipClient._build_key_wrapping_specification | test | def _build_key_wrapping_specification(self, value):
"""
Build a KeyWrappingSpecification struct from a dictionary.
Args:
value (dict): A dictionary containing the key/value pairs for a
KeyWrappingSpecification struct.
Returns:
KeyWrappingSpecification: a KeyWrappingSpecification struct
Raises:
TypeError: if the input argument is invalid
"""
if value is None:
return None
if not isinstance(value, dict):
raise TypeError("Key wrapping specification must be a dictionary.")
encryption_key_info = self._build_encryption_key_information(
value.get('encryption_key_information')
)
mac_signature_key_info = self._build_mac_signature_key_information(
value.get('mac_signature_key_information')
)
key_wrapping_specification = cobjects.KeyWrappingSpecification(
wrapping_method=value.get('wrapping_method'),
encryption_key_information=encryption_key_info,
mac_signature_key_information=mac_signature_key_info,
attribute_names=value.get('attribute_names'),
encoding_option=value.get('encoding_option')
)
return key_wrapping_specification | python | {
"resource": ""
} |
q270636 | ProxyKmipClient._build_common_attributes | test | def _build_common_attributes(self, operation_policy_name=None):
'''
Build a list of common attributes that are shared across
symmetric as well as asymmetric objects
'''
common_attributes = []
if operation_policy_name:
common_attributes.append(
self.attribute_factory.create_attribute(
enums.AttributeType.OPERATION_POLICY_NAME,
operation_policy_name
)
)
return common_attributes | python | {
"resource": ""
} |
q270637 | ProxyKmipClient._build_name_attribute | test | def _build_name_attribute(self, name=None):
'''
Build a name attribute, returned in a list for ease
of use in the caller
'''
name_list = []
if name:
name_list.append(self.attribute_factory.create_attribute(
enums.AttributeType.NAME,
name)
)
return name_list | python | {
"resource": ""
} |
q270638 | QueryRequestPayload.read | test | def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the QueryRequestPayload object and decode it
into its constituent parts.
Args:
input_buffer (Stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
Raises:
InvalidKmipEncoding: Raised if the query functions are missing
from the encoded payload.
"""
super(QueryRequestPayload, self).read(
input_buffer,
kmip_version=kmip_version
)
local_buffer = utils.BytearrayStream(input_buffer.read(self.length))
query_functions = []
while(self.is_tag_next(enums.Tags.QUERY_FUNCTION, local_buffer)):
query_function = primitives.Enumeration(
enums.QueryFunction,
tag=enums.Tags.QUERY_FUNCTION
)
query_function.read(local_buffer, kmip_version=kmip_version)
query_functions.append(query_function)
if query_functions:
self._query_functions = query_functions
else:
raise exceptions.InvalidKmipEncoding(
"The Query request payload encoding is missing the query "
"functions."
)
self.is_oversized(local_buffer) | python | {
"resource": ""
} |
q270639 | QueryRequestPayload.write | test | def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the QueryRequestPayload object to a stream.
Args:
output_buffer (Stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
Raises:
InvalidField: Raised if the query functions are not defined.
"""
local_buffer = utils.BytearrayStream()
if self._query_functions:
for query_function in self._query_functions:
query_function.write(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidField(
"The Query request payload is missing the query functions "
"field."
)
self.length = local_buffer.length()
super(QueryRequestPayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer) | python | {
"resource": ""
} |
q270640 | QueryResponsePayload.write | test | def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the QueryResponsePayload object to a stream.
Args:
output_buffer (Stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
"""
local_buffer = utils.BytearrayStream()
if self._operations:
for operation in self._operations:
operation.write(local_buffer, kmip_version=kmip_version)
if self._object_types:
for object_type in self._object_types:
object_type.write(local_buffer, kmip_version=kmip_version)
if self._vendor_identification:
self._vendor_identification.write(
local_buffer,
kmip_version=kmip_version
)
if self._server_information:
self._server_information.write(
local_buffer,
kmip_version=kmip_version
)
if self._application_namespaces:
for application_namespace in self._application_namespaces:
application_namespace.write(
local_buffer,
kmip_version=kmip_version
)
if kmip_version >= enums.KMIPVersion.KMIP_1_1:
if self._extension_information:
for extension_information in self._extension_information:
extension_information.write(
local_buffer,
kmip_version=kmip_version
)
if kmip_version >= enums.KMIPVersion.KMIP_1_2:
if self._attestation_types:
for attestation_type in self._attestation_types:
attestation_type.write(
local_buffer,
kmip_version=kmip_version
)
if kmip_version >= enums.KMIPVersion.KMIP_1_3:
if self._rng_parameters:
for rng_parameters in self._rng_parameters:
rng_parameters.write(
local_buffer,
kmip_version=kmip_version
)
if self._profile_information:
for profile_information in self._profile_information:
profile_information.write(
local_buffer,
kmip_version=kmip_version
)
if self._validation_information:
for validation_information in self._validation_information:
validation_information.write(
local_buffer,
kmip_version=kmip_version
)
if self._capability_information:
for capability_information in self._capability_information:
capability_information.write(
local_buffer,
kmip_version=kmip_version
)
if self._client_registration_methods:
for client_reg_method in self._client_registration_methods:
client_reg_method.write(
local_buffer,
kmip_version=kmip_version
)
if kmip_version >= enums.KMIPVersion.KMIP_2_0:
if self._defaults_information:
self._defaults_information.write(
local_buffer,
kmip_version=kmip_version
)
if self._storage_protection_masks:
for storage_protection_mask in self._storage_protection_masks:
storage_protection_mask.write(
local_buffer,
kmip_version=kmip_version
)
self.length = local_buffer.length()
super(QueryResponsePayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer) | python | {
"resource": ""
} |
q270641 | GetAttributesResponsePayload.read | test | def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Read the data encoding the GetAttributes response payload and decode
it into its constituent parts.
Args:
input_buffer (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be decoded. Optional,
defaults to KMIP 1.0.
"""
super(GetAttributesResponsePayload, self).read(
input_buffer,
kmip_version=kmip_version
)
local_buffer = utils.BytearrayStream(input_buffer.read(self.length))
if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_buffer):
unique_identifier = primitives.TextString(
tag=enums.Tags.UNIQUE_IDENTIFIER
)
unique_identifier.read(local_buffer, kmip_version=kmip_version)
self.unique_identifier = unique_identifier.value
else:
raise exceptions.InvalidKmipEncoding(
"The GetAttributes response payload encoding is missing the "
"unique identifier."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
self._attributes = list()
while self.is_tag_next(enums.Tags.ATTRIBUTE, local_buffer):
attribute = objects.Attribute()
attribute.read(local_buffer, kmip_version=kmip_version)
self._attributes.append(attribute)
else:
if self.is_tag_next(enums.Tags.ATTRIBUTES, local_buffer):
attributes = objects.Attributes()
attributes.read(local_buffer, kmip_version=kmip_version)
# TODO (ph) Add a new utility to avoid using TemplateAttributes
temp_attr = objects.convert_attributes_to_template_attribute(
attributes
)
self._attributes = temp_attr.attributes
else:
raise exceptions.InvalidKmipEncoding(
"The GetAttributes response payload encoding is missing "
"the attributes structure."
)
self.is_oversized(local_buffer) | python | {
"resource": ""
} |
q270642 | GetAttributesResponsePayload.write | test | def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0):
"""
Write the data encoding the GetAttributes response payload to a
stream.
Args:
output_buffer (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optional,
defaults to KMIP 1.0.
"""
local_buffer = utils.BytearrayStream()
if self._unique_identifier:
self._unique_identifier.write(
local_buffer,
kmip_version=kmip_version
)
else:
raise exceptions.InvalidField(
"The GetAttributes response payload is missing the unique "
"identifier field."
)
if kmip_version < enums.KMIPVersion.KMIP_2_0:
for attribute in self._attributes:
attribute.write(local_buffer, kmip_version=kmip_version)
else:
if self._attributes:
# TODO (ph) Add a new utility to avoid using TemplateAttributes
template_attribute = objects.TemplateAttribute(
attributes=self.attributes
)
attributes = objects.convert_template_attribute_to_attributes(
template_attribute
)
attributes.write(local_buffer, kmip_version=kmip_version)
else:
raise exceptions.InvalidField(
"The GetAttributes response payload is missing the "
"attributes list."
)
self.length = local_buffer.length()
super(GetAttributesResponsePayload, self).write(
output_buffer,
kmip_version=kmip_version
)
output_buffer.write(local_buffer.buffer) | python | {
"resource": ""
} |
q270643 | get_single | test | def get_single(group, name, path=None):
"""Find a single entry point.
Returns an :class:`EntryPoint` object, or raises :exc:`NoSuchEntryPoint`
if no match is found.
"""
for config, distro in iter_files_distros(path=path):
if (group in config) and (name in config[group]):
epstr = config[group][name]
with BadEntryPoint.err_to_warnings():
return EntryPoint.from_string(epstr, name, distro)
raise NoSuchEntryPoint(group, name) | python | {
"resource": ""
} |
q270644 | get_group_named | test | def get_group_named(group, path=None):
"""Find a group of entry points with unique names.
Returns a dictionary of names to :class:`EntryPoint` objects.
"""
result = {}
for ep in get_group_all(group, path=path):
if ep.name not in result:
result[ep.name] = ep
return result | python | {
"resource": ""
} |
q270645 | get_group_all | test | def get_group_all(group, path=None):
"""Find all entry points in a group.
Returns a list of :class:`EntryPoint` objects.
"""
result = []
for config, distro in iter_files_distros(path=path):
if group in config:
for name, epstr in config[group].items():
with BadEntryPoint.err_to_warnings():
result.append(EntryPoint.from_string(epstr, name, distro))
return result | python | {
"resource": ""
} |
q270646 | EntryPoint.load | test | def load(self):
"""Load the object to which this entry point refers.
"""
mod = import_module(self.module_name)
obj = mod
if self.object_name:
for attr in self.object_name.split('.'):
obj = getattr(obj, attr)
return obj | python | {
"resource": ""
} |
q270647 | EntryPoint.from_string | test | def from_string(cls, epstr, name, distro=None):
"""Parse an entry point from the syntax in entry_points.txt
:param str epstr: The entry point string (not including 'name =')
:param str name: The name of this entry point
:param Distribution distro: The distribution in which the entry point was found
:rtype: EntryPoint
:raises BadEntryPoint: if *epstr* can't be parsed as an entry point.
"""
m = entry_point_pattern.match(epstr)
if m:
mod, obj, extras = m.group('modulename', 'objectname', 'extras')
if extras is not None:
extras = re.split(r',\s*', extras)
return cls(name, mod, obj, extras, distro)
else:
raise BadEntryPoint(epstr) | python | {
"resource": ""
} |
q270648 | live | test | def live():
"""Run livereload server"""
from livereload import Server
server = Server(app)
map(server.watch, glob2.glob("application/pages/**/*.*")) # pages
map(server.watch, glob2.glob("application/macros/**/*.html")) # macros
map(server.watch, glob2.glob("application/static/**/*.*")) # public assets
server.serve(port=PORT) | python | {
"resource": ""
} |
q270649 | generate_project | test | def generate_project(args):
"""New project."""
# Project templates path
src = os.path.join(dirname(abspath(__file__)), 'project')
project_name = args.get('<project>')
if not project_name:
logger.warning('Project name cannot be empty.')
return
# Destination project path
dst = os.path.join(os.getcwd(), project_name)
if os.path.isdir(dst):
logger.warning('Project directory already exists.')
return
logger.info('Start generating project files.')
_mkdir_p(dst)
for src_dir, sub_dirs, filenames in os.walk(src):
# Build and create destination directory path
relative_path = src_dir.split(src)[1].lstrip(os.path.sep)
dst_dir = os.path.join(dst, relative_path)
if src != src_dir:
_mkdir_p(dst_dir)
# Copy, rewrite and move project files
for filename in filenames:
if filename in ['development.py', 'production.py']:
continue
src_file = os.path.join(src_dir, filename)
dst_file = os.path.join(dst_dir, filename)
if filename.endswith(REWRITE_FILE_EXTS):
_rewrite_and_copy(src_file, dst_file, project_name)
else:
shutil.copy(src_file, dst_file)
logger.info("New: %s" % dst_file)
if filename in ['development_sample.py', 'production_sample.py']:
dst_file = os.path.join(dst_dir, "%s.py" % filename.split('_')[0])
_rewrite_and_copy(src_file, dst_file, project_name)
logger.info("New: %s" % dst_file)
logger.info('Finish generating project files.') | python | {
"resource": ""
} |
q270650 | generate_controller | test | def generate_controller(args):
"""Generate controller, include the controller file, template & css & js directories."""
controller_template = os.path.join(dirname(abspath(__file__)), 'templates/controller.py')
test_template = os.path.join(dirname(abspath(__file__)), 'templates/unittest.py')
controller_name = args.get('<controller>')
current_path = os.getcwd()
logger.info('Start generating controller.')
if not controller_name:
logger.warning('Controller name cannot be empty.')
return
# controller file
with open(controller_template, 'r') as template_file:
controller_file_path = os.path.join(current_path, 'application/controllers',
controller_name + '.py')
with open(controller_file_path, 'w+') as controller_file:
for line in template_file:
new_line = line.replace('#{controller}', controller_name)
controller_file.write(new_line)
logger.info("New: %s" % _relative_path(controller_file_path))
# test file
with open(test_template, 'r') as template_file:
test_file_path = os.path.join(current_path, 'tests',
'test_%s.py' % controller_name)
with open(test_file_path, 'w+') as test_file:
for line in template_file:
new_line = line.replace('#{controller}', controller_name) \
.replace('#{controller|title}', controller_name.title())
test_file.write(new_line)
logger.info("New: %s" % _relative_path(test_file_path))
# assets dir
assets_dir_path = os.path.join(current_path, 'application/pages/%s' % controller_name)
_mkdir_p(assets_dir_path)
# form file
_generate_form(controller_name)
logger.info('Finish generating controller.') | python | {
"resource": ""
} |
q270651 | generate_action | test | def generate_action(args):
"""Generate action."""
controller = args.get('<controller>')
action = args.get('<action>')
with_template = args.get('-t')
current_path = os.getcwd()
logger.info('Start generating action.')
controller_file_path = os.path.join(current_path, 'application/controllers', controller + '.py')
if not os.path.exists(controller_file_path):
logger.warning("The controller %s does't exist." % controller)
return
if with_template:
action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action.py')
else:
action_source_path = os.path.join(dirname(abspath(__file__)), 'templates/action_without_template.py')
# Add action source codes
with open(action_source_path, 'r') as action_source_file:
with open(controller_file_path, 'a') as controller_file:
for action_line in action_source_file:
new_line = action_line.replace('#{controller}', controller). \
replace('#{action}', action)
controller_file.write(new_line)
logger.info("Updated: %s" % _relative_path(controller_file_path))
if with_template:
# assets dir
assets_dir_path = os.path.join(current_path, 'application/pages/%s/%s' % (controller, action))
_mkdir_p(assets_dir_path)
# html
action_html_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.html')
action_html_path = os.path.join(assets_dir_path, '%s.html' % action)
with open(action_html_template_path, 'r') as action_html_template_file:
with open(action_html_path, 'w') as action_html_file:
for line in action_html_template_file:
new_line = line.replace('#{action}', action) \
.replace('#{action|title}', action.title()) \
.replace('#{controller}', controller)
action_html_file.write(new_line)
logger.info("New: %s" % _relative_path(action_html_path))
# js
action_js_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.js')
action_js_path = os.path.join(assets_dir_path, '%s.js' % action)
shutil.copy(action_js_template_path, action_js_path)
logger.info("New: %s" % _relative_path(action_js_path))
# less
action_less_template_path = os.path.join(dirname(abspath(__file__)), 'templates/action.less')
action_less_path = os.path.join(assets_dir_path, '%s.less' % action)
shutil.copy(action_less_template_path, action_less_path)
logger.info("New: %s" % _relative_path(action_less_path))
logger.info('Finish generating action.') | python | {
"resource": ""
} |
q270652 | generate_form | test | def generate_form(args):
"""Generate form."""
form_name = args.get('<form>')
logger.info('Start generating form.')
_generate_form(form_name)
logger.info('Finish generating form.') | python | {
"resource": ""
} |
q270653 | generate_model | test | def generate_model(args):
"""Generate model."""
model_name = args.get('<model>')
if not model_name:
logger.warning('Model name cannot be empty.')
return
logger.info('Start generating model.')
model_template = os.path.join(dirname(abspath(__file__)), 'templates/model.py')
current_path = os.getcwd()
with open(model_template, 'r') as template_file:
model_file_path = os.path.join(current_path, 'application/models',
model_name + '.py')
with open(model_file_path, 'w+') as model_file:
for line in template_file:
new_line = line.replace('#{model|title}', model_name.title())
model_file.write(new_line)
logger.info("New: %s" % _relative_path(model_file_path))
with open(os.path.join(current_path, 'application/models/__init__.py'), 'a') as package_file:
package_file.write('\nfrom .%s import *' % model_name)
logger.info('Finish generating model.') | python | {
"resource": ""
} |
q270654 | generate_macro | test | def generate_macro(args):
"""Genarate macro."""
macro = args.get('<macro>').replace('-', '_')
category = args.get('<category>')
if not macro:
logger.warning('Macro name cannot be empty.')
return
logger.info('Start generating macro.')
current_path = os.getcwd()
if category:
macro_root_path = os.path.join(current_path, 'application/macros', category, macro)
else:
macro_root_path = os.path.join(current_path, 'application/macros', macro)
_mkdir_p(macro_root_path)
macro_html_path = os.path.join(macro_root_path, '_%s.html' % macro)
macro_css_path = os.path.join(macro_root_path, '_%s.less' % macro)
macro_js_path = os.path.join(macro_root_path, '_%s.js' % macro)
# html
macro_html_template_path = os.path.join(dirname(abspath(__file__)), 'templates/macro.html')
with open(macro_html_template_path, 'r') as template_file:
with open(macro_html_path, 'w+') as html_file:
for line in template_file:
new_line = line.replace('#{macro}', macro)
html_file.write(new_line)
logger.info("New: %s" % _relative_path(macro_html_path))
# css
open(macro_css_path, 'a').close()
logger.info("New: %s" % _relative_path(macro_css_path))
# js
open(macro_js_path, 'a').close()
logger.info("New: %s" % _relative_path(macro_js_path))
logger.info('Finish generating macro.') | python | {
"resource": ""
} |
q270655 | _mkdir_p | test | def _mkdir_p(path):
"""mkdir -p path"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
else:
logger.info("New: %s%s", path, os.path.sep) | python | {
"resource": ""
} |
q270656 | _rewrite_and_copy | test | def _rewrite_and_copy(src_file, dst_file, project_name):
"""Replace vars and copy."""
# Create temp file
fh, abs_path = mkstemp()
with io.open(abs_path, 'w', encoding='utf-8') as new_file:
with io.open(src_file, 'r', encoding='utf-8') as old_file:
for line in old_file:
new_line = line.replace('#{project}', project_name). \
replace('#{project|title}', project_name.title())
new_file.write(new_line)
# Copy to new file
shutil.copy(abs_path, dst_file)
os.close(fh) | python | {
"resource": ""
} |
q270657 | timesince | test | def timesince(value):
"""Friendly time gap"""
if not value:
return ""
if not isinstance(value, datetime.date):
return value
now = datetime.datetime.now()
delta = now - value
if value > now:
return "right now"
elif delta.days > 365:
return '%d years ago' % (delta.days / 365)
elif delta.days > 30:
return '%d months ago' % (delta.days / 30)
elif delta.days > 0:
return '%d days ago' % delta.days
elif delta.seconds > 3600:
return '%d hours ago' % (delta.seconds / 3600)
elif delta.seconds > 60:
return '%d minutes ago' % (delta.seconds / 60)
else:
return 'right now' | python | {
"resource": ""
} |
q270658 | check_url | test | def check_url(form, field):
"""Check url schema."""
url = field.data.strip()
if not url:
return
result = urlparse(url)
if result.scheme == "":
field.data = "http://%s" % re.sub(r'^:?/*', '', url) | python | {
"resource": ""
} |
q270659 | encode | test | def encode(something):
"""Encode something with SECRET_KEY."""
secret_key = current_app.config.get('SECRET_KEY')
s = URLSafeSerializer(secret_key)
return s.dumps(something) | python | {
"resource": ""
} |
q270660 | decode | test | def decode(something):
"""Decode something with SECRET_KEY."""
secret_key = current_app.config.get('SECRET_KEY')
s = URLSafeSerializer(secret_key)
try:
return s.loads(something)
except BadSignature:
return None | python | {
"resource": ""
} |
q270661 | jsonify | test | def jsonify(func):
"""JSON decorator."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
r = func(*args, **kwargs)
if isinstance(r, tuple):
code, data = r
else:
code, data = 200, r
return Response(json.dumps(data), status=code, mimetype='application/json')
return wrapper | python | {
"resource": ""
} |
q270662 | absolute_url_for | test | def absolute_url_for(endpoint, **values):
"""Absolute url for endpoint."""
config = current_app.config
site_domain = config.get('SITE_DOMAIN')
relative_url = url_for(endpoint, **values)
return join_url(site_domain, relative_url) | python | {
"resource": ""
} |
q270663 | load_config | test | def load_config():
"""Load config."""
mode = os.environ.get('MODE')
try:
if mode == 'PRODUCTION':
from .production import ProductionConfig
return ProductionConfig
elif mode == 'TESTING':
from .testing import TestingConfig
return TestingConfig
else:
from .development import DevelopmentConfig
return DevelopmentConfig
except ImportError:
from .default import Config
return Config | python | {
"resource": ""
} |
q270664 | signin_user | test | def signin_user(user, permenent=True):
"""Sign in user."""
session.permanent = permenent
session['user_id'] = user.id | python | {
"resource": ""
} |
q270665 | get_current_user | test | def get_current_user():
"""Get current user."""
if not 'user_id' in session:
return None
user = User.query.filter(User.id == session['user_id']).first()
if not user:
signout_user()
return None
return user | python | {
"resource": ""
} |
q270666 | create_app | test | def create_app():
"""Create Flask app."""
config = load_config()
app = Flask(__name__)
app.config.from_object(config)
# Proxy fix
app.wsgi_app = ProxyFix(app.wsgi_app)
# CSRF protect
CsrfProtect(app)
if app.debug or app.testing:
DebugToolbarExtension(app)
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/pages': os.path.join(app.config.get('PROJECT_PATH'), 'application/pages')
})
else:
# Log errors to stderr in production mode
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.ERROR)
# Enable Sentry
if app.config.get('SENTRY_DSN'):
from .utils.sentry import sentry
sentry.init_app(app, dsn=app.config.get('SENTRY_DSN'))
# Serve static files
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/static': os.path.join(app.config.get('PROJECT_PATH'), 'output/static'),
'/pkg': os.path.join(app.config.get('PROJECT_PATH'), 'output/pkg'),
'/pages': os.path.join(app.config.get('PROJECT_PATH'), 'output/pages')
})
# Register components
register_db(app)
register_routes(app)
register_jinja(app)
register_error_handle(app)
register_hooks(app)
return app | python | {
"resource": ""
} |
q270667 | register_jinja | test | def register_jinja(app):
"""Register jinja filters, vars, functions."""
import jinja2
from .utils import filters, permissions, helpers
if app.debug or app.testing:
my_loader = jinja2.ChoiceLoader([
app.jinja_loader,
jinja2.FileSystemLoader([
os.path.join(app.config.get('PROJECT_PATH'), 'application/macros'),
os.path.join(app.config.get('PROJECT_PATH'), 'application/pages')
])
])
else:
my_loader = jinja2.ChoiceLoader([
app.jinja_loader,
jinja2.FileSystemLoader([
os.path.join(app.config.get('PROJECT_PATH'), 'output/macros'),
os.path.join(app.config.get('PROJECT_PATH'), 'output/pages')
])
])
app.jinja_loader = my_loader
app.jinja_env.filters.update({
'timesince': filters.timesince
})
def url_for_other_page(page):
"""Generate url for pagination."""
view_args = request.view_args.copy()
args = request.args.copy().to_dict()
combined_args = dict(view_args.items() + args.items())
combined_args['page'] = page
return url_for(request.endpoint, **combined_args)
rules = {}
for endpoint, _rules in iteritems(app.url_map._rules_by_endpoint):
if any(item in endpoint for item in ['_debug_toolbar', 'debugtoolbar', 'static']):
continue
rules[endpoint] = [{'rule': rule.rule} for rule in _rules]
app.jinja_env.globals.update({
'absolute_url_for': helpers.absolute_url_for,
'url_for_other_page': url_for_other_page,
'rules': rules,
'permissions': permissions
}) | python | {
"resource": ""
} |
q270668 | register_routes | test | def register_routes(app):
"""Register routes."""
from . import controllers
from flask.blueprints import Blueprint
for module in _import_submodules_from_package(controllers):
bp = getattr(module, 'bp')
if bp and isinstance(bp, Blueprint):
app.register_blueprint(bp) | python | {
"resource": ""
} |
q270669 | register_error_handle | test | def register_error_handle(app):
"""Register HTTP error pages."""
@app.errorhandler(403)
def page_403(error):
return render_template('site/403/403.html'), 403
@app.errorhandler(404)
def page_404(error):
return render_template('site/404/404.html'), 404
@app.errorhandler(500)
def page_500(error):
return render_template('site/500/500.html'), 500 | python | {
"resource": ""
} |
q270670 | register_hooks | test | def register_hooks(app):
"""Register hooks."""
@app.before_request
def before_request():
g.user = get_current_user()
if g.user and g.user.is_admin:
g._before_request_time = time.time()
@app.after_request
def after_request(response):
if hasattr(g, '_before_request_time'):
delta = time.time() - g._before_request_time
response.headers['X-Render-Time'] = delta * 1000
return response | python | {
"resource": ""
} |
q270671 | _dataframe_to_csv | test | def _dataframe_to_csv(writer, dataframe, delimiter, with_header):
"""serialize the dataframe with different delimiters"""
encoding_writer = codecs.getwriter('utf-8')(writer)
dataframe.to_csv(
path_or_buf=encoding_writer,
sep=delimiter,
header=with_header,
index=False
) | python | {
"resource": ""
} |
q270672 | _dataframe_from_csv | test | def _dataframe_from_csv(reader, delimiter, with_header, skipspace):
"""Returns csv data as a pandas Dataframe object"""
sep = delimiter
header = 0
if not with_header:
header = None
return pd.read_csv(
reader,
header=header,
sep=sep,
skipinitialspace=skipspace,
encoding='utf-8-sig'
) | python | {
"resource": ""
} |
q270673 | serialize_dataframe | test | def serialize_dataframe(writer, data_type_id, dataframe):
"""
Serialize a dataframe.
Parameters
----------
writer : file
File-like object to write to. Must be opened in binary mode.
data_type_id : dict
Serialization format to use.
See the azureml.DataTypeIds class for constants.
dataframe: pandas.DataFrame
Dataframe to serialize.
"""
_not_none('writer', writer)
_not_none_or_empty('data_type_id', data_type_id)
_not_none('dataframe', dataframe)
serializer = _SERIALIZERS.get(data_type_id)
if serializer is None:
raise UnsupportedDatasetTypeError(data_type_id)
serializer[0](writer=writer, dataframe=dataframe) | python | {
"resource": ""
} |
q270674 | deserialize_dataframe | test | def deserialize_dataframe(reader, data_type_id):
"""
Deserialize a dataframe.
Parameters
----------
reader : file
File-like object to read from. Must be opened in binary mode.
data_type_id : dict
Serialization format of the raw data.
See the azureml.DataTypeIds class for constants.
Returns
-------
pandas.DataFrame
Dataframe object.
"""
_not_none('reader', reader)
_not_none_or_empty('data_type_id', data_type_id)
serializer = _SERIALIZERS.get(data_type_id)
if serializer is None:
raise UnsupportedDatasetTypeError(data_type_id)
return serializer[1](reader=reader) | python | {
"resource": ""
} |
q270675 | SourceDataset._update_from_dataframe | test | def _update_from_dataframe(self, dataframe, data_type_id=None, name=None,
description=None):
"""
Serialize the specified DataFrame and replace the existing dataset.
Parameters
----------
dataframe : pandas.DataFrame
Data to serialize.
data_type_id : str, optional
Format to serialize to.
If None, the existing format is preserved.
Supported formats are:
'PlainText'
'GenericCSV'
'GenericTSV'
'GenericCSVNoHeader'
'GenericTSVNoHeader'
See the azureml.DataTypeIds class for constants.
name : str, optional
Name for the dataset.
If None, the name of the existing dataset is used.
description : str, optional
Description for the dataset.
If None, the name of the existing dataset is used.
"""
_not_none('dataframe', dataframe)
if data_type_id is None:
data_type_id = self.data_type_id
if name is None:
name = self.name
if description is None:
description = self.description
try:
output = BytesIO()
serialize_dataframe(output, data_type_id, dataframe)
raw_data = output.getvalue()
finally:
output.close()
self._upload_and_refresh(raw_data, data_type_id, name, description) | python | {
"resource": ""
} |
q270676 | SourceDataset._update_from_raw_data | test | def _update_from_raw_data(self, raw_data, data_type_id=None, name=None,
description=None):
"""
Upload already serialized raw data and replace the existing dataset.
Parameters
----------
raw_data: bytes
Dataset contents to upload.
data_type_id : str
Serialization format of the raw data.
If None, the format of the existing dataset is used.
Supported formats are:
'PlainText'
'GenericCSV'
'GenericTSV'
'GenericCSVNoHeader'
'GenericTSVNoHeader'
'ARFF'
See the azureml.DataTypeIds class for constants.
name : str, optional
Name for the dataset.
If None, the name of the existing dataset is used.
description : str, optional
Description for the dataset.
If None, the name of the existing dataset is used.
"""
_not_none('raw_data', raw_data)
if data_type_id is None:
data_type_id = self.data_type_id
if name is None:
name = self.name
if description is None:
description = self.description
self._upload_and_refresh(raw_data, data_type_id, name, description) | python | {
"resource": ""
} |
q270677 | SourceDataset.contents_url | test | def contents_url(self):
"""Full URL to the dataset contents."""
loc = self.download_location
return loc.base_uri + loc.location + loc.access_credential | python | {
"resource": ""
} |
q270678 | Datasets.add_from_dataframe | test | def add_from_dataframe(self, dataframe, data_type_id, name, description):
"""
Serialize the specified DataFrame and upload it as a new dataset.
Parameters
----------
dataframe : pandas.DataFrame
Data to serialize.
data_type_id : str
Format to serialize to.
Supported formats are:
'PlainText'
'GenericCSV'
'GenericTSV'
'GenericCSVNoHeader'
'GenericTSVNoHeader'
See the azureml.DataTypeIds class for constants.
name : str
Name for the new dataset.
description : str
Description for the new dataset.
Returns
-------
SourceDataset
Dataset that was just created.
Use open(), read_as_binary(), read_as_text() or to_dataframe() on
the dataset object to get its contents as a stream, bytes, str or
pandas DataFrame.
"""
_not_none('dataframe', dataframe)
_not_none_or_empty('data_type_id', data_type_id)
_not_none_or_empty('name', name)
_not_none_or_empty('description', description)
try:
output = BytesIO()
serialize_dataframe(output, data_type_id, dataframe)
raw_data = output.getvalue()
finally:
output.close()
return self._upload(raw_data, data_type_id, name, description) | python | {
"resource": ""
} |
q270679 | Datasets.add_from_raw_data | test | def add_from_raw_data(self, raw_data, data_type_id, name, description):
"""
Upload already serialized raw data as a new dataset.
Parameters
----------
raw_data: bytes
Dataset contents to upload.
data_type_id : str
Serialization format of the raw data.
Supported formats are:
'PlainText'
'GenericCSV'
'GenericTSV'
'GenericCSVNoHeader'
'GenericTSVNoHeader'
'ARFF'
See the azureml.DataTypeIds class for constants.
name : str
Name for the new dataset.
description : str
Description for the new dataset.
Returns
-------
SourceDataset
Dataset that was just created.
Use open(), read_as_binary(), read_as_text() or to_dataframe() on
the dataset object to get its contents as a stream, bytes, str or
pandas DataFrame.
"""
_not_none('raw_data', raw_data)
_not_none_or_empty('data_type_id', data_type_id)
_not_none_or_empty('name', name)
_not_none_or_empty('description', description)
return self._upload(raw_data, data_type_id, name, description) | python | {
"resource": ""
} |
q270680 | IntermediateDataset.open | test | def open(self):
'''Open and return a stream for the dataset contents.'''
return self.workspace._rest.open_intermediate_dataset_contents(
self.workspace.workspace_id,
self.experiment.experiment_id,
self.node_id,
self.port_name
) | python | {
"resource": ""
} |
q270681 | IntermediateDataset.read_as_binary | test | def read_as_binary(self):
'''Read and return the dataset contents as binary.'''
return self.workspace._rest.read_intermediate_dataset_contents_binary(
self.workspace.workspace_id,
self.experiment.experiment_id,
self.node_id,
self.port_name
) | python | {
"resource": ""
} |
q270682 | IntermediateDataset.read_as_text | test | def read_as_text(self):
'''Read and return the dataset contents as text.'''
return self.workspace._rest.read_intermediate_dataset_contents_text(
self.workspace.workspace_id,
self.experiment.experiment_id,
self.node_id,
self.port_name
) | python | {
"resource": ""
} |
q270683 | IntermediateDataset._to_dataframe | test | def _to_dataframe(self):
"""Read and return the dataset contents as a pandas DataFrame."""
#TODO: figure out why passing in the opened stream directly gives invalid data
data = self.read_as_binary()
reader = BytesIO(data)
return deserialize_dataframe(reader, self.data_type_id) | python | {
"resource": ""
} |
q270684 | Experiment.get_intermediate_dataset | test | def get_intermediate_dataset(self, node_id, port_name, data_type_id):
"""
Get an intermediate dataset.
Parameters
----------
node_id : str
Module node id from the experiment graph.
port_name : str
Output port of the module.
data_type_id : str
Serialization format of the raw data.
See the azureml.DataTypeIds class for constants.
Returns
-------
IntermediateDataset
Dataset object.
Use open(), read_as_binary(), read_as_text() or to_dataframe() on
the dataset object to get its contents as a stream, bytes, str or
pandas DataFrame.
"""
return IntermediateDataset(self.workspace, self, node_id, port_name, data_type_id) | python | {
"resource": ""
} |
q270685 | _RestClient.get_experiments | test | def get_experiments(self, workspace_id):
"""Runs HTTP GET request to retrieve the list of experiments."""
api_path = self.EXPERIMENTS_URI_FMT.format(workspace_id)
return self._send_get_req(api_path) | python | {
"resource": ""
} |
q270686 | _RestClient.get_datasets | test | def get_datasets(self, workspace_id):
"""Runs HTTP GET request to retrieve the list of datasets."""
api_path = self.DATASOURCES_URI_FMT.format(workspace_id)
return self._send_get_req(api_path) | python | {
"resource": ""
} |
q270687 | _RestClient.get_dataset | test | def get_dataset(self, workspace_id, dataset_id):
"""Runs HTTP GET request to retrieve a single dataset."""
api_path = self.DATASOURCE_URI_FMT.format(workspace_id, dataset_id)
return self._send_get_req(api_path) | python | {
"resource": ""
} |
q270688 | publish | test | def publish(func_or_workspace_id, workspace_id_or_token = None, workspace_token_or_none = None, files=(), endpoint=None):
'''publishes a callable function or decorates a function to be published.
Returns a callable, iterable object. Calling the object will invoke the published service.
Iterating the object will give the API URL, API key, and API help url.
To define a function which will be published to Azure you can simply decorate it with
the @publish decorator. This will publish the service, and then future calls to the
function will run against the operationalized version of the service in the cloud.
>>> @publish(workspace_id, workspace_token)
>>> def func(a, b):
>>> return a + b
After publishing you can then invoke the function using:
func.service(1, 2)
Or continue to invoke the function locally:
func(1, 2)
You can also just call publish directly to publish a function:
>>> def func(a, b): return a + b
>>>
>>> res = publish(func, workspace_id, workspace_token)
>>>
>>> url, api_key, help_url = res
>>> res(2, 3)
5
>>> url, api_key, help_url = res.url, res.api_key, res.help_url
The returned result will be the published service.
You can specify a list of files which should be published along with the function.
The resulting files will be stored in a subdirectory called 'Script Bundle'. The
list of files can be one of:
(('file1.txt', None), ) # file is read from disk
(('file1.txt', b'contents'), ) # file contents are provided
('file1.txt', 'file2.txt') # files are read from disk, written with same filename
((('file1.txt', 'destname.txt'), None), ) # file is read from disk, written with different destination name
The various formats for each filename can be freely mixed and matched.
'''
if not callable(func_or_workspace_id):
def do_publish(func):
func.service = _publish_worker(func, files, func_or_workspace_id, workspace_id_or_token, endpoint)
return func
return do_publish
return _publish_worker(func_or_workspace_id, files, workspace_id_or_token, workspace_token_or_none, endpoint) | python | {
"resource": ""
} |
q270689 | service | test | def service(url, api_key, help_url = None):
'''Marks a function as having been published and causes all invocations to go to the remote
operationalized service.
>>> @service(url, api_key)
>>> def f(a, b):
>>> pass
'''
def do_publish(func):
return published(url, api_key, help_url, func, None)
return do_publish | python | {
"resource": ""
} |
q270690 | types | test | def types(**args):
"""Specifies the types used for the arguments of a published service.
@types(a=int, b = str)
def f(a, b):
pass
"""
def l(func):
if hasattr(func, '__annotations__'):
func.__annotations__.update(args)
else:
func.__annotations__ = args
return func
return l | python | {
"resource": ""
} |
q270691 | returns | test | def returns(type):
"""Specifies the return type for a published service.
@returns(int)
def f(...):
pass
"""
def l(func):
if hasattr(func, '__annotations__'):
func.__annotations__['return'] = type
else:
func.__annotations__ = {'return': type}
return func
return l | python | {
"resource": ""
} |
q270692 | attach | test | def attach(name, contents = None):
"""attaches a file to the payload to be uploaded.
If contents is omitted the file is read from disk.
If name is a tuple it specifies the on-disk filename and the destination filename.
"""
def do_attach(func):
if hasattr(func, '__attachments__'):
func.__attachments__.append((name, contents))
else:
func.__attachments__ = [(name, contents)]
return func
return do_attach | python | {
"resource": ""
} |
q270693 | _Serializer.find_globals | test | def find_globals(code):
"""walks the byte code to find the variables which are actually globals"""
cur_byte = 0
byte_code = code.co_code
names = set()
while cur_byte < len(byte_code):
op = ord(byte_code[cur_byte])
if op >= dis.HAVE_ARGUMENT:
if op == _LOAD_GLOBAL:
oparg = ord(byte_code[cur_byte + 1]) + (ord(byte_code[cur_byte + 2]) << 8)
name = code.co_names[oparg]
names.add(name)
cur_byte += 2
cur_byte += 1
return names | python | {
"resource": ""
} |
q270694 | Pen.copy | test | def copy(self):
"""Create a copy of this pen."""
pen = Pen()
pen.__dict__ = self.__dict__.copy()
return pen | python | {
"resource": ""
} |
q270695 | lookup_color | test | def lookup_color(c):
"""Return RGBA values of color c
c should be either an X11 color or a brewer color set and index
e.g. "navajowhite", "greens3/2"
"""
import sys
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('PangoCairo', '1.0')
from gi.repository import Gdk
try:
color = Gdk.color_parse(c)
except ValueError:
pass
else:
s = 1.0/65535.0
r = color.red*s
g = color.green*s
b = color.blue*s
a = 1.0
return r, g, b, a
try:
dummy, scheme, index = c.split('/')
r, g, b = brewer_colors[scheme][int(index)]
except (ValueError, KeyError):
pass
else:
s = 1.0/255.0
r = r*s
g = g*s
b = b*s
a = 1.0
return r, g, b, a
sys.stderr.write("warning: unknown color '%s'\n" % c)
return None | python | {
"resource": ""
} |
q270696 | Shape.draw | test | def draw(self, cr, highlight=False, bounding=None):
"""Draw this shape with the given cairo context"""
if bounding is None or self._intersects(bounding):
self._draw(cr, highlight, bounding) | python | {
"resource": ""
} |
q270697 | BezierShape._cubic_bernstein_extrema | test | def _cubic_bernstein_extrema(p0, p1, p2, p3):
"""
Find extremas of a function of real domain defined by evaluating
a cubic bernstein polynomial of given bernstein coefficients.
"""
# compute coefficients of derivative
a = 3.*(p3-p0+3.*(p1-p2))
b = 6.*(p0+p2-2.*p1)
c = 3.*(p1-p0)
if a == 0:
if b == 0:
return () # constant
return (-c / b,) # linear
# quadratic
# compute discriminant
d = b*b - 4.*a*c
if d < 0:
return ()
k = -2. * a
if d == 0:
return (b / k,)
r = math.sqrt(d)
return ((b + r) / k, (b - r) / k) | python | {
"resource": ""
} |
q270698 | BezierShape._cubic_bernstein | test | def _cubic_bernstein(p0, p1, p2, p3, t):
"""
Evaluate polynomial of given bernstein coefficients
using de Casteljau's algorithm.
"""
u = 1 - t
return p0*(u**3) + 3*t*u*(p1*u + p2*t) + p3*(t**3) | python | {
"resource": ""
} |
q270699 | TreeItemChoiceField._build_choices | test | def _build_choices(self):
"""Build choices list runtime using 'sitetree_tree' tag"""
tree_token = u'sitetree_tree from "%s" template "%s"' % (self.tree, self.template)
context_kwargs = {'current_app': 'admin'}
context = template.Context(context_kwargs) if VERSION >= (1, 8) else template.Context(**context_kwargs)
context.update({'request': object()})
choices_str = sitetree_tree(
Parser(None), Token(token_type=TOKEN_BLOCK, contents=tree_token)
).render(context)
tree_choices = [(ITEMS_FIELD_ROOT_ID, self.root_title)]
for line in choices_str.splitlines():
if line.strip():
splitted = line.split(':::')
tree_choices.append((splitted[0], mark_safe(splitted[1])))
return tree_choices | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.