id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
22,700 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient.refresh_access_token | def refresh_access_token(self, refresh_token):
''' Refreshes the current access token.
Gets a new access token, updates client auth and returns it.
Args:
refresh_token (str): Refresh token to use
Returns:
The new access token
'''
request = self._get_request()
response = request.post(self.OAUTH_TOKEN_URL, {
"grant_type": "refresh_token",
"refresh_token": refresh_token
})
self.auth = HSAccessTokenAuth.from_response(response)
return self.auth.access_token | python | def refresh_access_token(self, refresh_token):
''' Refreshes the current access token.
Gets a new access token, updates client auth and returns it.
Args:
refresh_token (str): Refresh token to use
Returns:
The new access token
'''
request = self._get_request()
response = request.post(self.OAUTH_TOKEN_URL, {
"grant_type": "refresh_token",
"refresh_token": refresh_token
})
self.auth = HSAccessTokenAuth.from_response(response)
return self.auth.access_token | [
"def",
"refresh_access_token",
"(",
"self",
",",
"refresh_token",
")",
":",
"request",
"=",
"self",
".",
"_get_request",
"(",
")",
"response",
"=",
"request",
".",
"post",
"(",
"self",
".",
"OAUTH_TOKEN_URL",
",",
"{",
"\"grant_type\"",
":",
"\"refresh_token\"... | Refreshes the current access token.
Gets a new access token, updates client auth and returns it.
Args:
refresh_token (str): Refresh token to use
Returns:
The new access token | [
"Refreshes",
"the",
"current",
"access",
"token",
"."
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1255-L1273 |
22,701 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._get_request | def _get_request(self, auth=None):
''' Return an http request object
auth: Auth data to use
Returns:
A HSRequest object
'''
self.request = HSRequest(auth or self.auth, self.env)
self.request.response_callback = self.response_callback
return self.request | python | def _get_request(self, auth=None):
''' Return an http request object
auth: Auth data to use
Returns:
A HSRequest object
'''
self.request = HSRequest(auth or self.auth, self.env)
self.request.response_callback = self.response_callback
return self.request | [
"def",
"_get_request",
"(",
"self",
",",
"auth",
"=",
"None",
")",
":",
"self",
".",
"request",
"=",
"HSRequest",
"(",
"auth",
"or",
"self",
".",
"auth",
",",
"self",
".",
"env",
")",
"self",
".",
"request",
".",
"response_callback",
"=",
"self",
"."... | Return an http request object
auth: Auth data to use
Returns:
A HSRequest object | [
"Return",
"an",
"http",
"request",
"object"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1286-L1296 |
22,702 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._authenticate | def _authenticate(self, email_address=None, password=None, api_key=None, access_token=None, access_token_type=None):
''' Create authentication object to send requests
Args:
email_address (str): Email address of the account to make the requests
password (str): Password of the account used with email address
api_key (str): API Key. You can find your API key in https://www.hellosign.com/home/myAccount/current_tab/integrations
access_token (str): OAuth access token
access_token_type (str): Type of OAuth access token
Raises:
NoAuthMethod: If no authentication information found
Returns:
A HTTPBasicAuth or HSAccessTokenAuth object
'''
if access_token_type and access_token:
return HSAccessTokenAuth(access_token, access_token_type)
elif api_key:
return HTTPBasicAuth(api_key, '')
elif email_address and password:
return HTTPBasicAuth(email_address, password)
else:
raise NoAuthMethod("No authentication information found!") | python | def _authenticate(self, email_address=None, password=None, api_key=None, access_token=None, access_token_type=None):
''' Create authentication object to send requests
Args:
email_address (str): Email address of the account to make the requests
password (str): Password of the account used with email address
api_key (str): API Key. You can find your API key in https://www.hellosign.com/home/myAccount/current_tab/integrations
access_token (str): OAuth access token
access_token_type (str): Type of OAuth access token
Raises:
NoAuthMethod: If no authentication information found
Returns:
A HTTPBasicAuth or HSAccessTokenAuth object
'''
if access_token_type and access_token:
return HSAccessTokenAuth(access_token, access_token_type)
elif api_key:
return HTTPBasicAuth(api_key, '')
elif email_address and password:
return HTTPBasicAuth(email_address, password)
else:
raise NoAuthMethod("No authentication information found!") | [
"def",
"_authenticate",
"(",
"self",
",",
"email_address",
"=",
"None",
",",
"password",
"=",
"None",
",",
"api_key",
"=",
"None",
",",
"access_token",
"=",
"None",
",",
"access_token_type",
"=",
"None",
")",
":",
"if",
"access_token_type",
"and",
"access_to... | Create authentication object to send requests
Args:
email_address (str): Email address of the account to make the requests
password (str): Password of the account used with email address
api_key (str): API Key. You can find your API key in https://www.hellosign.com/home/myAccount/current_tab/integrations
access_token (str): OAuth access token
access_token_type (str): Type of OAuth access token
Raises:
NoAuthMethod: If no authentication information found
Returns:
A HTTPBasicAuth or HSAccessTokenAuth object | [
"Create",
"authentication",
"object",
"to",
"send",
"requests"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1298-L1328 |
22,703 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._check_required_fields | def _check_required_fields(self, fields=None, either_fields=None):
''' Check the values of the fields
If no value found in `fields`, an exception will be raised.
`either_fields` are the fields that one of them must have a value
Raises:
HSException: If no value found in at least one item of`fields`, or
no value found in one of the items of `either_fields`
Returns:
None
'''
for (key, value) in fields.items():
# If value is a dict, one of the fields in the dict is required ->
# exception if all are None
if not value:
raise HSException("Field '%s' is required." % key)
if either_fields is not None:
for field in either_fields:
if not any(field.values()):
raise HSException("One of the following fields is required: %s" % ", ".join(field.keys())) | python | def _check_required_fields(self, fields=None, either_fields=None):
''' Check the values of the fields
If no value found in `fields`, an exception will be raised.
`either_fields` are the fields that one of them must have a value
Raises:
HSException: If no value found in at least one item of`fields`, or
no value found in one of the items of `either_fields`
Returns:
None
'''
for (key, value) in fields.items():
# If value is a dict, one of the fields in the dict is required ->
# exception if all are None
if not value:
raise HSException("Field '%s' is required." % key)
if either_fields is not None:
for field in either_fields:
if not any(field.values()):
raise HSException("One of the following fields is required: %s" % ", ".join(field.keys())) | [
"def",
"_check_required_fields",
"(",
"self",
",",
"fields",
"=",
"None",
",",
"either_fields",
"=",
"None",
")",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"fields",
".",
"items",
"(",
")",
":",
"# If value is a dict, one of the fields in the dict is requ... | Check the values of the fields
If no value found in `fields`, an exception will be raised.
`either_fields` are the fields that one of them must have a value
Raises:
HSException: If no value found in at least one item of`fields`, or
no value found in one of the items of `either_fields`
Returns:
None | [
"Check",
"the",
"values",
"of",
"the",
"fields"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1330-L1353 |
22,704 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._send_signature_request | def _send_signature_request(self, test_mode=False, client_id=None, files=None, file_urls=None, title=None, subject=None, message=None, signing_redirect_url=None, signers=None, cc_email_addresses=None, form_fields_per_document=None, use_text_tags=False, hide_text_tags=False, metadata=None, ux_version=None, allow_decline=False):
''' To share the same logic between send_signature_request &
send_signature_request_embedded functions
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
files (list of str): The uploaded file(s) to send for signature
file_urls (list of str): URLs of the file for HelloSign to download to send for signature. Use either `files` or `file_urls`
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign
signers (list of dict): A list of signers, which each has the following attributes:
name (str): The name of the signer
email_address (str): Email address of the signer
order (str, optional): The order the signer is required to sign in
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
cc_email_addresses (list, optional): A list of email addresses that should be CCed
form_fields_per_document (str): The fields that should appear on the document, expressed as a serialized JSON data structure which is a list of lists of the form fields. Please refer to the API reference of HelloSign for more details (https://www.hellosign.com/api/reference#SignatureRequest)
use_text_tags (bool, optional): Use text tags in the provided file(s) to create form fields
hide_text_tags (bool, optional): Hide text tag areas
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional); Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object
'''
# Files
files_payload = HSFormat.format_file_params(files)
# File URLs
file_urls_payload = HSFormat.format_file_url_params(file_urls)
# Signers
signers_payload = HSFormat.format_dict_list(signers, 'signers')
# CCs
cc_email_addresses_payload = HSFormat.format_param_list(cc_email_addresses, 'cc_email_addresses')
# Metadata
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"form_fields_per_document": form_fields_per_document,
"use_text_tags": self._boolean(use_text_tags),
"hide_text_tags": self._boolean(hide_text_tags),
"allow_decline": self._boolean(allow_decline)
}
if ux_version is not None:
payload['ux_version'] = ux_version
# remove attributes with none value
payload = HSFormat.strip_none_values(payload)
url = self.SIGNATURE_REQUEST_CREATE_URL
if client_id:
url = self.SIGNATURE_REQUEST_CREATE_EMBEDDED_URL
data = {}
data.update(payload)
data.update(signers_payload)
data.update(cc_email_addresses_payload)
data.update(file_urls_payload)
data.update(metadata_payload)
request = self._get_request()
response = request.post(url, data=data, files=files_payload)
return response | python | def _send_signature_request(self, test_mode=False, client_id=None, files=None, file_urls=None, title=None, subject=None, message=None, signing_redirect_url=None, signers=None, cc_email_addresses=None, form_fields_per_document=None, use_text_tags=False, hide_text_tags=False, metadata=None, ux_version=None, allow_decline=False):
''' To share the same logic between send_signature_request &
send_signature_request_embedded functions
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
files (list of str): The uploaded file(s) to send for signature
file_urls (list of str): URLs of the file for HelloSign to download to send for signature. Use either `files` or `file_urls`
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign
signers (list of dict): A list of signers, which each has the following attributes:
name (str): The name of the signer
email_address (str): Email address of the signer
order (str, optional): The order the signer is required to sign in
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
cc_email_addresses (list, optional): A list of email addresses that should be CCed
form_fields_per_document (str): The fields that should appear on the document, expressed as a serialized JSON data structure which is a list of lists of the form fields. Please refer to the API reference of HelloSign for more details (https://www.hellosign.com/api/reference#SignatureRequest)
use_text_tags (bool, optional): Use text tags in the provided file(s) to create form fields
hide_text_tags (bool, optional): Hide text tag areas
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional); Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object
'''
# Files
files_payload = HSFormat.format_file_params(files)
# File URLs
file_urls_payload = HSFormat.format_file_url_params(file_urls)
# Signers
signers_payload = HSFormat.format_dict_list(signers, 'signers')
# CCs
cc_email_addresses_payload = HSFormat.format_param_list(cc_email_addresses, 'cc_email_addresses')
# Metadata
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"form_fields_per_document": form_fields_per_document,
"use_text_tags": self._boolean(use_text_tags),
"hide_text_tags": self._boolean(hide_text_tags),
"allow_decline": self._boolean(allow_decline)
}
if ux_version is not None:
payload['ux_version'] = ux_version
# remove attributes with none value
payload = HSFormat.strip_none_values(payload)
url = self.SIGNATURE_REQUEST_CREATE_URL
if client_id:
url = self.SIGNATURE_REQUEST_CREATE_EMBEDDED_URL
data = {}
data.update(payload)
data.update(signers_payload)
data.update(cc_email_addresses_payload)
data.update(file_urls_payload)
data.update(metadata_payload)
request = self._get_request()
response = request.post(url, data=data, files=files_payload)
return response | [
"def",
"_send_signature_request",
"(",
"self",
",",
"test_mode",
"=",
"False",
",",
"client_id",
"=",
"None",
",",
"files",
"=",
"None",
",",
"file_urls",
"=",
"None",
",",
"title",
"=",
"None",
",",
"subject",
"=",
"None",
",",
"message",
"=",
"None",
... | To share the same logic between send_signature_request &
send_signature_request_embedded functions
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
files (list of str): The uploaded file(s) to send for signature
file_urls (list of str): URLs of the file for HelloSign to download to send for signature. Use either `files` or `file_urls`
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign
signers (list of dict): A list of signers, which each has the following attributes:
name (str): The name of the signer
email_address (str): Email address of the signer
order (str, optional): The order the signer is required to sign in
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
cc_email_addresses (list, optional): A list of email addresses that should be CCed
form_fields_per_document (str): The fields that should appear on the document, expressed as a serialized JSON data structure which is a list of lists of the form fields. Please refer to the API reference of HelloSign for more details (https://www.hellosign.com/api/reference#SignatureRequest)
use_text_tags (bool, optional): Use text tags in the provided file(s) to create form fields
hide_text_tags (bool, optional): Hide text tag areas
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional); Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object | [
"To",
"share",
"the",
"same",
"logic",
"between",
"send_signature_request",
"&",
"send_signature_request_embedded",
"functions"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1356-L1451 |
22,705 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._send_signature_request_with_template | def _send_signature_request_with_template(self, test_mode=False, client_id=None, template_id=None, template_ids=None, title=None, subject=None, message=None, signing_redirect_url=None, signers=None, ccs=None, custom_fields=None, metadata=None, ux_version=None, allow_decline=False):
''' To share the same logic between send_signature_request_with_template
and send_signature_request_embedded_with_template
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
template_id (str): The id of the Template to use when creating the SignatureRequest. Mutually exclusive with template_ids.
template_ids (list): The ids of the Templates to use when creating the SignatureRequest. Mutually exclusive with template_id.
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign.
signers (list of dict): A list of signers, which each has the following attributes:
role_name (str): Role the signer is assigned to
name (str): The name of the signer
email_address (str): Email address of the signer
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
ccs (list of dict, optional): The email address of the CC filling the role of RoleName. Required when a CC role exists for the Template. Each dict has the following attributes:
role_name (str): CC role name
email_address (str): CC email address
custom_fields (list of dict, optional): A list of custom fields. Required when a CustomField exists in the Template. An item of the list should look like this: `{'name: value'}`
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional): Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object
'''
# Signers
signers_payload = HSFormat.format_dict_list(signers, 'signers', 'role_name')
# CCs
ccs_payload = HSFormat.format_dict_list(ccs, 'ccs', 'role_name')
# Custom fields
custom_fields_payload = HSFormat.format_custom_fields(custom_fields)
# Metadata
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
# Template ids
template_ids_payload = {}
if template_ids:
for i in range(len(template_ids)):
template_ids_payload["template_ids[%s]" % i] = template_ids[i]
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"template_id": template_id,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"allow_decline": self._boolean(allow_decline)
}
if ux_version is not None:
payload['ux_version'] = ux_version
# remove attributes with empty value
payload = HSFormat.strip_none_values(payload)
url = self.SIGNATURE_REQUEST_CREATE_WITH_TEMPLATE_URL
if client_id:
url = self.SIGNATURE_REQUEST_CREATE_EMBEDDED_WITH_TEMPLATE_URL
data = payload.copy()
data.update(signers_payload)
data.update(ccs_payload)
data.update(custom_fields_payload)
data.update(metadata_payload)
data.update(template_ids_payload)
request = self._get_request()
response = request.post(url, data=data)
return response | python | def _send_signature_request_with_template(self, test_mode=False, client_id=None, template_id=None, template_ids=None, title=None, subject=None, message=None, signing_redirect_url=None, signers=None, ccs=None, custom_fields=None, metadata=None, ux_version=None, allow_decline=False):
''' To share the same logic between send_signature_request_with_template
and send_signature_request_embedded_with_template
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
template_id (str): The id of the Template to use when creating the SignatureRequest. Mutually exclusive with template_ids.
template_ids (list): The ids of the Templates to use when creating the SignatureRequest. Mutually exclusive with template_id.
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign.
signers (list of dict): A list of signers, which each has the following attributes:
role_name (str): Role the signer is assigned to
name (str): The name of the signer
email_address (str): Email address of the signer
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
ccs (list of dict, optional): The email address of the CC filling the role of RoleName. Required when a CC role exists for the Template. Each dict has the following attributes:
role_name (str): CC role name
email_address (str): CC email address
custom_fields (list of dict, optional): A list of custom fields. Required when a CustomField exists in the Template. An item of the list should look like this: `{'name: value'}`
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional): Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object
'''
# Signers
signers_payload = HSFormat.format_dict_list(signers, 'signers', 'role_name')
# CCs
ccs_payload = HSFormat.format_dict_list(ccs, 'ccs', 'role_name')
# Custom fields
custom_fields_payload = HSFormat.format_custom_fields(custom_fields)
# Metadata
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
# Template ids
template_ids_payload = {}
if template_ids:
for i in range(len(template_ids)):
template_ids_payload["template_ids[%s]" % i] = template_ids[i]
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"template_id": template_id,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"allow_decline": self._boolean(allow_decline)
}
if ux_version is not None:
payload['ux_version'] = ux_version
# remove attributes with empty value
payload = HSFormat.strip_none_values(payload)
url = self.SIGNATURE_REQUEST_CREATE_WITH_TEMPLATE_URL
if client_id:
url = self.SIGNATURE_REQUEST_CREATE_EMBEDDED_WITH_TEMPLATE_URL
data = payload.copy()
data.update(signers_payload)
data.update(ccs_payload)
data.update(custom_fields_payload)
data.update(metadata_payload)
data.update(template_ids_payload)
request = self._get_request()
response = request.post(url, data=data)
return response | [
"def",
"_send_signature_request_with_template",
"(",
"self",
",",
"test_mode",
"=",
"False",
",",
"client_id",
"=",
"None",
",",
"template_id",
"=",
"None",
",",
"template_ids",
"=",
"None",
",",
"title",
"=",
"None",
",",
"subject",
"=",
"None",
",",
"messa... | To share the same logic between send_signature_request_with_template
and send_signature_request_embedded_with_template
Args:
test_mode (bool, optional): Whether this is a test, the signature request will not be legally binding if set to True. Defaults to False.
client_id (str): Client id of the app you're using to create this embedded signature request. Visit the embedded page to learn more about this parameter (https://www.hellosign.com/api/embeddedSigningWalkthrough)
template_id (str): The id of the Template to use when creating the SignatureRequest. Mutually exclusive with template_ids.
template_ids (list): The ids of the Templates to use when creating the SignatureRequest. Mutually exclusive with template_id.
title (str, optional): The title you want to assign to the SignatureRequest
subject (str, optional): The subject in the email that will be sent to the signers
message (str, optional): The custom message in the email that will be sent to the signers
signing_redirect_url (str, optional): The URL you want the signer redirected to after they successfully sign.
signers (list of dict): A list of signers, which each has the following attributes:
role_name (str): Role the signer is assigned to
name (str): The name of the signer
email_address (str): Email address of the signer
pin (str, optional): The 4- to 12-character access code that will secure this signer's signature page
ccs (list of dict, optional): The email address of the CC filling the role of RoleName. Required when a CC role exists for the Template. Each dict has the following attributes:
role_name (str): CC role name
email_address (str): CC email address
custom_fields (list of dict, optional): A list of custom fields. Required when a CustomField exists in the Template. An item of the list should look like this: `{'name: value'}`
metadata (dict, optional): Metadata to associate with the signature request
ux_version (int): UX version, either 1 (default) or 2.
allow_decline (bool, optional): Allows signers to decline to sign a document if set to 1. Defaults to 0.
Returns:
A SignatureRequest object | [
"To",
"share",
"the",
"same",
"logic",
"between",
"send_signature_request_with_template",
"and",
"send_signature_request_embedded_with_template"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1454-L1550 |
22,706 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._add_remove_user_template | def _add_remove_user_template(self, url, template_id, account_id=None, email_address=None):
''' Add or Remove user from a Template
We use this function for two tasks because they have the same API call
Args:
template_id (str): The id of the template
account_id (str): ID of the account to add/remove access to/from
email_address (str): The email_address of the account to add/remove access to/from
Raises:
HSException: If no email address or account_id specified
Returns:
A Template object
'''
if not email_address and not account_id:
raise HSException("No email address or account_id specified")
data = {}
if account_id is not None:
data = {
"account_id": account_id
}
else:
data = {
"email_address": email_address
}
request = self._get_request()
response = request.post(url + template_id, data)
return response | python | def _add_remove_user_template(self, url, template_id, account_id=None, email_address=None):
''' Add or Remove user from a Template
We use this function for two tasks because they have the same API call
Args:
template_id (str): The id of the template
account_id (str): ID of the account to add/remove access to/from
email_address (str): The email_address of the account to add/remove access to/from
Raises:
HSException: If no email address or account_id specified
Returns:
A Template object
'''
if not email_address and not account_id:
raise HSException("No email address or account_id specified")
data = {}
if account_id is not None:
data = {
"account_id": account_id
}
else:
data = {
"email_address": email_address
}
request = self._get_request()
response = request.post(url + template_id, data)
return response | [
"def",
"_add_remove_user_template",
"(",
"self",
",",
"url",
",",
"template_id",
",",
"account_id",
"=",
"None",
",",
"email_address",
"=",
"None",
")",
":",
"if",
"not",
"email_address",
"and",
"not",
"account_id",
":",
"raise",
"HSException",
"(",
"\"No emai... | Add or Remove user from a Template
We use this function for two tasks because they have the same API call
Args:
template_id (str): The id of the template
account_id (str): ID of the account to add/remove access to/from
email_address (str): The email_address of the account to add/remove access to/from
Raises:
HSException: If no email address or account_id specified
Returns:
A Template object | [
"Add",
"or",
"Remove",
"user",
"from",
"a",
"Template"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1659-L1696 |
22,707 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._add_remove_team_member | def _add_remove_team_member(self, url, email_address=None, account_id=None):
''' Add or Remove a team member
We use this function for two different tasks because they have the same
API call
Args:
email_address (str): Email address of the Account to add/remove
account_id (str): ID of the Account to add/remove
Returns:
A Team object
'''
if not email_address and not account_id:
raise HSException("No email address or account_id specified")
data = {}
if account_id is not None:
data = {
"account_id": account_id
}
else:
data = {
"email_address": email_address
}
request = self._get_request()
response = request.post(url, data)
return response | python | def _add_remove_team_member(self, url, email_address=None, account_id=None):
''' Add or Remove a team member
We use this function for two different tasks because they have the same
API call
Args:
email_address (str): Email address of the Account to add/remove
account_id (str): ID of the Account to add/remove
Returns:
A Team object
'''
if not email_address and not account_id:
raise HSException("No email address or account_id specified")
data = {}
if account_id is not None:
data = {
"account_id": account_id
}
else:
data = {
"email_address": email_address
}
request = self._get_request()
response = request.post(url, data)
return response | [
"def",
"_add_remove_team_member",
"(",
"self",
",",
"url",
",",
"email_address",
"=",
"None",
",",
"account_id",
"=",
"None",
")",
":",
"if",
"not",
"email_address",
"and",
"not",
"account_id",
":",
"raise",
"HSException",
"(",
"\"No email address or account_id sp... | Add or Remove a team member
We use this function for two different tasks because they have the same
API call
Args:
email_address (str): Email address of the Account to add/remove
account_id (str): ID of the Account to add/remove
Returns:
A Team object | [
"Add",
"or",
"Remove",
"a",
"team",
"member"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1699-L1732 |
22,708 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._create_embedded_template_draft | def _create_embedded_template_draft(self, client_id, signer_roles, test_mode=False, files=None, file_urls=None, title=None, subject=None, message=None, cc_roles=None, merge_fields=None, use_preexisting_fields=False):
''' Helper method for creating embedded template drafts.
See public function for params.
'''
url = self.TEMPLATE_CREATE_EMBEDDED_DRAFT_URL
payload = {
'test_mode': self._boolean(test_mode),
'client_id': client_id,
'title': title,
'subject': subject,
'message': message,
'use_preexisting_fields': self._boolean(use_preexisting_fields)
}
# Prep files
files_payload = HSFormat.format_file_params(files)
file_urls_payload = HSFormat.format_file_url_params(file_urls)
# Prep Signer Roles
signer_roles_payload = HSFormat.format_dict_list(signer_roles, 'signer_roles')
# Prep CCs
ccs_payload = HSFormat.format_param_list(cc_roles, 'cc_roles')
# Prep Merge Fields
merge_fields_payload = {
'merge_fields': json.dumps(merge_fields)
}
# Assemble data for sending
data = {}
data.update(payload)
data.update(file_urls_payload)
data.update(signer_roles_payload)
data.update(ccs_payload)
if (merge_fields is not None):
data.update(merge_fields_payload)
data = HSFormat.strip_none_values(data)
request = self._get_request()
response = request.post(url, data=data, files=files_payload)
return response | python | def _create_embedded_template_draft(self, client_id, signer_roles, test_mode=False, files=None, file_urls=None, title=None, subject=None, message=None, cc_roles=None, merge_fields=None, use_preexisting_fields=False):
''' Helper method for creating embedded template drafts.
See public function for params.
'''
url = self.TEMPLATE_CREATE_EMBEDDED_DRAFT_URL
payload = {
'test_mode': self._boolean(test_mode),
'client_id': client_id,
'title': title,
'subject': subject,
'message': message,
'use_preexisting_fields': self._boolean(use_preexisting_fields)
}
# Prep files
files_payload = HSFormat.format_file_params(files)
file_urls_payload = HSFormat.format_file_url_params(file_urls)
# Prep Signer Roles
signer_roles_payload = HSFormat.format_dict_list(signer_roles, 'signer_roles')
# Prep CCs
ccs_payload = HSFormat.format_param_list(cc_roles, 'cc_roles')
# Prep Merge Fields
merge_fields_payload = {
'merge_fields': json.dumps(merge_fields)
}
# Assemble data for sending
data = {}
data.update(payload)
data.update(file_urls_payload)
data.update(signer_roles_payload)
data.update(ccs_payload)
if (merge_fields is not None):
data.update(merge_fields_payload)
data = HSFormat.strip_none_values(data)
request = self._get_request()
response = request.post(url, data=data, files=files_payload)
return response | [
"def",
"_create_embedded_template_draft",
"(",
"self",
",",
"client_id",
",",
"signer_roles",
",",
"test_mode",
"=",
"False",
",",
"files",
"=",
"None",
",",
"file_urls",
"=",
"None",
",",
"title",
"=",
"None",
",",
"subject",
"=",
"None",
",",
"message",
... | Helper method for creating embedded template drafts.
See public function for params. | [
"Helper",
"method",
"for",
"creating",
"embedded",
"template",
"drafts",
".",
"See",
"public",
"function",
"for",
"params",
"."
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1735-L1778 |
22,709 | hellosign/hellosign-python-sdk | hellosign_sdk/hsclient.py | HSClient._create_embedded_unclaimed_draft_with_template | def _create_embedded_unclaimed_draft_with_template(self, test_mode=False, client_id=None, is_for_embedded_signing=False, template_id=None, template_ids=None, requester_email_address=None, title=None, subject=None, message=None, signers=None, ccs=None, signing_redirect_url=None, requesting_redirect_url=None, metadata=None, custom_fields=None, allow_decline=False):
''' Helper method for creating unclaimed drafts from templates
See public function for params.
'''
#single params
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"is_for_embedded_signing": self._boolean(is_for_embedded_signing),
"template_id": template_id,
"requester_email_address": requester_email_address,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"requesting_redirect_url": requesting_redirect_url,
"allow_decline": self._boolean(allow_decline)
}
#format multi params
template_ids_payload = HSFormat.format_param_list(template_ids, 'template_ids')
signers_payload = HSFormat.format_dict_list(signers, 'signers', 'role_name')
ccs_payload = HSFormat.format_dict_list(ccs, 'ccs', 'role_name')
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
custom_fields_payload = HSFormat.format_custom_fields(custom_fields)
#assemble payload
data = {}
data.update(payload)
data.update(template_ids_payload)
data.update(signers_payload)
data.update(ccs_payload)
data.update(metadata_payload)
data.update(custom_fields_payload)
data = HSFormat.strip_none_values(data)
#send call
url = self.UNCLAIMED_DRAFT_CREATE_EMBEDDED_WITH_TEMPLATE_URL
request = self._get_request()
response = request.post(url, data=data)
return response | python | def _create_embedded_unclaimed_draft_with_template(self, test_mode=False, client_id=None, is_for_embedded_signing=False, template_id=None, template_ids=None, requester_email_address=None, title=None, subject=None, message=None, signers=None, ccs=None, signing_redirect_url=None, requesting_redirect_url=None, metadata=None, custom_fields=None, allow_decline=False):
''' Helper method for creating unclaimed drafts from templates
See public function for params.
'''
#single params
payload = {
"test_mode": self._boolean(test_mode),
"client_id": client_id,
"is_for_embedded_signing": self._boolean(is_for_embedded_signing),
"template_id": template_id,
"requester_email_address": requester_email_address,
"title": title,
"subject": subject,
"message": message,
"signing_redirect_url": signing_redirect_url,
"requesting_redirect_url": requesting_redirect_url,
"allow_decline": self._boolean(allow_decline)
}
#format multi params
template_ids_payload = HSFormat.format_param_list(template_ids, 'template_ids')
signers_payload = HSFormat.format_dict_list(signers, 'signers', 'role_name')
ccs_payload = HSFormat.format_dict_list(ccs, 'ccs', 'role_name')
metadata_payload = HSFormat.format_single_dict(metadata, 'metadata')
custom_fields_payload = HSFormat.format_custom_fields(custom_fields)
#assemble payload
data = {}
data.update(payload)
data.update(template_ids_payload)
data.update(signers_payload)
data.update(ccs_payload)
data.update(metadata_payload)
data.update(custom_fields_payload)
data = HSFormat.strip_none_values(data)
#send call
url = self.UNCLAIMED_DRAFT_CREATE_EMBEDDED_WITH_TEMPLATE_URL
request = self._get_request()
response = request.post(url, data=data)
return response | [
"def",
"_create_embedded_unclaimed_draft_with_template",
"(",
"self",
",",
"test_mode",
"=",
"False",
",",
"client_id",
"=",
"None",
",",
"is_for_embedded_signing",
"=",
"False",
",",
"template_id",
"=",
"None",
",",
"template_ids",
"=",
"None",
",",
"requester_emai... | Helper method for creating unclaimed drafts from templates
See public function for params. | [
"Helper",
"method",
"for",
"creating",
"unclaimed",
"drafts",
"from",
"templates",
"See",
"public",
"function",
"for",
"params",
"."
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/hsclient.py#L1781-L1823 |
22,710 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest.get_file | def get_file(self, url, path_or_file=None, headers=None, filename=None):
''' Get a file from a url and save it as `filename`
Args:
url (str): URL to send the request to
path_or_file (str or file): A writable File-like object or a path to save the file to.
filename (str): [DEPRECATED] File name to save the file as, this can be either
a full path or a relative path
headers (str, optional): custom headers
Returns:
True if file is downloaded and written successfully, False
otherwise.
'''
path_or_file = path_or_file or filename
if self.debug:
print("GET FILE: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
if headers is not None:
self.headers.update(headers)
response = requests.get(url, headers=self.headers, auth=self.auth, verify=self.verify_ssl)
self.http_status_code = response.status_code
try:
# No need to check for warnings here
self._check_error(response)
try:
path_or_file.write(response.content)
except AttributeError:
fd = os.open(path_or_file, os.O_CREAT | os.O_RDWR)
with os.fdopen(fd, "w+b") as f:
f.write(response.content)
except:
return False
return True | python | def get_file(self, url, path_or_file=None, headers=None, filename=None):
''' Get a file from a url and save it as `filename`
Args:
url (str): URL to send the request to
path_or_file (str or file): A writable File-like object or a path to save the file to.
filename (str): [DEPRECATED] File name to save the file as, this can be either
a full path or a relative path
headers (str, optional): custom headers
Returns:
True if file is downloaded and written successfully, False
otherwise.
'''
path_or_file = path_or_file or filename
if self.debug:
print("GET FILE: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
if headers is not None:
self.headers.update(headers)
response = requests.get(url, headers=self.headers, auth=self.auth, verify=self.verify_ssl)
self.http_status_code = response.status_code
try:
# No need to check for warnings here
self._check_error(response)
try:
path_or_file.write(response.content)
except AttributeError:
fd = os.open(path_or_file, os.O_CREAT | os.O_RDWR)
with os.fdopen(fd, "w+b") as f:
f.write(response.content)
except:
return False
return True | [
"def",
"get_file",
"(",
"self",
",",
"url",
",",
"path_or_file",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"filename",
"=",
"None",
")",
":",
"path_or_file",
"=",
"path_or_file",
"or",
"filename",
"if",
"self",
".",
"debug",
":",
"print",
"(",
"\"... | Get a file from a url and save it as `filename`
Args:
url (str): URL to send the request to
path_or_file (str or file): A writable File-like object or a path to save the file to.
filename (str): [DEPRECATED] File name to save the file as, this can be either
a full path or a relative path
headers (str, optional): custom headers
Returns:
True if file is downloaded and written successfully, False
otherwise. | [
"Get",
"a",
"file",
"from",
"a",
"url",
"and",
"save",
"it",
"as",
"filename"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L69-L111 |
22,711 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest.get | def get(self, url, headers=None, parameters=None, get_json=True):
''' Send a GET request with custome headers and parameters
Args:
url (str): URL to send the request to
headers (str, optional): custom headers
parameters (str, optional): optional parameters
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise
'''
if self.debug:
print("GET: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
get_parameters = self.parameters
if get_parameters is None:
# In case self.parameters is still empty
get_parameters = {}
if headers is not None:
self.headers.update(headers)
if parameters is not None:
get_parameters.update(parameters)
response = requests.get(url, headers=self.headers, params=get_parameters, auth=self.auth, verify=self.verify_ssl)
json_response = self._process_json_response(response)
return json_response if get_json is True else response | python | def get(self, url, headers=None, parameters=None, get_json=True):
''' Send a GET request with custome headers and parameters
Args:
url (str): URL to send the request to
headers (str, optional): custom headers
parameters (str, optional): optional parameters
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise
'''
if self.debug:
print("GET: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
get_parameters = self.parameters
if get_parameters is None:
# In case self.parameters is still empty
get_parameters = {}
if headers is not None:
self.headers.update(headers)
if parameters is not None:
get_parameters.update(parameters)
response = requests.get(url, headers=self.headers, params=get_parameters, auth=self.auth, verify=self.verify_ssl)
json_response = self._process_json_response(response)
return json_response if get_json is True else response | [
"def",
"get",
"(",
"self",
",",
"url",
",",
"headers",
"=",
"None",
",",
"parameters",
"=",
"None",
",",
"get_json",
"=",
"True",
")",
":",
"if",
"self",
".",
"debug",
":",
"print",
"(",
"\"GET: %s, headers=%s\"",
"%",
"(",
"url",
",",
"headers",
")"... | Send a GET request with custome headers and parameters
Args:
url (str): URL to send the request to
headers (str, optional): custom headers
parameters (str, optional): optional parameters
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise | [
"Send",
"a",
"GET",
"request",
"with",
"custome",
"headers",
"and",
"parameters"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L113-L143 |
22,712 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest.post | def post(self, url, data=None, files=None, headers=None, get_json=True):
''' Make POST request to a url
Args:
url (str): URL to send the request to
data (dict, optional): Data to send
files (dict, optional): Files to send with the request
headers (str, optional): custom headers
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise
'''
if self.debug:
print("POST: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
if headers is not None:
self.headers.update(headers)
response = requests.post(url, headers=self.headers, data=data, auth=self.auth, files=files, verify=self.verify_ssl)
json_response = self._process_json_response(response)
return json_response if get_json is True else response | python | def post(self, url, data=None, files=None, headers=None, get_json=True):
''' Make POST request to a url
Args:
url (str): URL to send the request to
data (dict, optional): Data to send
files (dict, optional): Files to send with the request
headers (str, optional): custom headers
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise
'''
if self.debug:
print("POST: %s, headers=%s" % (url, headers))
self.headers = self._get_default_headers()
if headers is not None:
self.headers.update(headers)
response = requests.post(url, headers=self.headers, data=data, auth=self.auth, files=files, verify=self.verify_ssl)
json_response = self._process_json_response(response)
return json_response if get_json is True else response | [
"def",
"post",
"(",
"self",
",",
"url",
",",
"data",
"=",
"None",
",",
"files",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"get_json",
"=",
"True",
")",
":",
"if",
"self",
".",
"debug",
":",
"print",
"(",
"\"POST: %s, headers=%s\"",
"%",
"(",
"... | Make POST request to a url
Args:
url (str): URL to send the request to
data (dict, optional): Data to send
files (dict, optional): Files to send with the request
headers (str, optional): custom headers
Returns:
A JSON object of the returned response if `get_json` is True,
Requests' response object otherwise | [
"Make",
"POST",
"request",
"to",
"a",
"url"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L145-L170 |
22,713 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest._get_json_response | def _get_json_response(self, resp):
''' Parse a JSON response '''
if resp is not None and resp.text is not None:
try:
text = resp.text.strip('\n')
if len(text) > 0:
return json.loads(text)
except ValueError as e:
if self.debug:
print("Could not decode JSON response: \"%s\"" % resp.text)
raise e | python | def _get_json_response(self, resp):
''' Parse a JSON response '''
if resp is not None and resp.text is not None:
try:
text = resp.text.strip('\n')
if len(text) > 0:
return json.loads(text)
except ValueError as e:
if self.debug:
print("Could not decode JSON response: \"%s\"" % resp.text)
raise e | [
"def",
"_get_json_response",
"(",
"self",
",",
"resp",
")",
":",
"if",
"resp",
"is",
"not",
"None",
"and",
"resp",
".",
"text",
"is",
"not",
"None",
":",
"try",
":",
"text",
"=",
"resp",
".",
"text",
".",
"strip",
"(",
"'\\n'",
")",
"if",
"len",
... | Parse a JSON response | [
"Parse",
"a",
"JSON",
"response"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L175-L185 |
22,714 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest._process_json_response | def _process_json_response(self, response):
''' Process a given response '''
json_response = self._get_json_response(response)
if self.response_callback is not None:
json_response = self.response_callback(json_response)
response._content = json.dumps(json_response)
self.http_status_code = response.status_code
self._check_error(response, json_response)
self._check_warnings(json_response)
return json_response | python | def _process_json_response(self, response):
''' Process a given response '''
json_response = self._get_json_response(response)
if self.response_callback is not None:
json_response = self.response_callback(json_response)
response._content = json.dumps(json_response)
self.http_status_code = response.status_code
self._check_error(response, json_response)
self._check_warnings(json_response)
return json_response | [
"def",
"_process_json_response",
"(",
"self",
",",
"response",
")",
":",
"json_response",
"=",
"self",
".",
"_get_json_response",
"(",
"response",
")",
"if",
"self",
".",
"response_callback",
"is",
"not",
"None",
":",
"json_response",
"=",
"self",
".",
"respon... | Process a given response | [
"Process",
"a",
"given",
"response"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L199-L212 |
22,715 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest._check_error | def _check_error(self, response, json_response=None):
''' Check for HTTP error code from the response, raise exception if there's any
Args:
response (object): Object returned by requests' `get` and `post`
methods
json_response (dict): JSON response, if applicable
Raises:
HTTPError: If the status code of response is either 4xx or 5xx
Returns:
True if status code is not error code
'''
# If status code is 4xx or 5xx, that should be an error
if response.status_code >= 400:
json_response = json_response or self._get_json_response(response)
err_cls = self._check_http_error_code(response.status_code)
try:
raise err_cls("%s error: %s" % (response.status_code, json_response["error"]["error_msg"]), response.status_code)
# This is to catch error when we post get oauth data
except TypeError:
raise err_cls("%s error: %s" % (response.status_code, json_response["error_description"]), response.status_code)
# Return True if everything is OK
return True | python | def _check_error(self, response, json_response=None):
''' Check for HTTP error code from the response, raise exception if there's any
Args:
response (object): Object returned by requests' `get` and `post`
methods
json_response (dict): JSON response, if applicable
Raises:
HTTPError: If the status code of response is either 4xx or 5xx
Returns:
True if status code is not error code
'''
# If status code is 4xx or 5xx, that should be an error
if response.status_code >= 400:
json_response = json_response or self._get_json_response(response)
err_cls = self._check_http_error_code(response.status_code)
try:
raise err_cls("%s error: %s" % (response.status_code, json_response["error"]["error_msg"]), response.status_code)
# This is to catch error when we post get oauth data
except TypeError:
raise err_cls("%s error: %s" % (response.status_code, json_response["error_description"]), response.status_code)
# Return True if everything is OK
return True | [
"def",
"_check_error",
"(",
"self",
",",
"response",
",",
"json_response",
"=",
"None",
")",
":",
"# If status code is 4xx or 5xx, that should be an error",
"if",
"response",
".",
"status_code",
">=",
"400",
":",
"json_response",
"=",
"json_response",
"or",
"self",
... | Check for HTTP error code from the response, raise exception if there's any
Args:
response (object): Object returned by requests' `get` and `post`
methods
json_response (dict): JSON response, if applicable
Raises:
HTTPError: If the status code of response is either 4xx or 5xx
Returns:
True if status code is not error code | [
"Check",
"for",
"HTTP",
"error",
"code",
"from",
"the",
"response",
"raise",
"exception",
"if",
"there",
"s",
"any"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L214-L242 |
22,716 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/request.py | HSRequest._check_warnings | def _check_warnings(self, json_response):
''' Extract warnings from the response to make them accessible
Args:
json_response (dict): JSON response
'''
self.warnings = None
if json_response:
self.warnings = json_response.get('warnings')
if self.debug and self.warnings:
for w in self.warnings:
print("WARNING: %s - %s" % (w['warning_name'], w['warning_msg'])) | python | def _check_warnings(self, json_response):
''' Extract warnings from the response to make them accessible
Args:
json_response (dict): JSON response
'''
self.warnings = None
if json_response:
self.warnings = json_response.get('warnings')
if self.debug and self.warnings:
for w in self.warnings:
print("WARNING: %s - %s" % (w['warning_name'], w['warning_msg'])) | [
"def",
"_check_warnings",
"(",
"self",
",",
"json_response",
")",
":",
"self",
".",
"warnings",
"=",
"None",
"if",
"json_response",
":",
"self",
".",
"warnings",
"=",
"json_response",
".",
"get",
"(",
"'warnings'",
")",
"if",
"self",
".",
"debug",
"and",
... | Extract warnings from the response to make them accessible
Args:
json_response (dict): JSON response | [
"Extract",
"warnings",
"from",
"the",
"response",
"to",
"make",
"them",
"accessible"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/request.py#L244-L258 |
22,717 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/hsaccesstokenauth.py | HSAccessTokenAuth.from_response | def from_response(self, response_data):
''' Builds a new HSAccessTokenAuth straight from response data
Args:
response_data (dict): Response data to use
Returns:
A HSAccessTokenAuth objet
'''
return HSAccessTokenAuth(
response_data['access_token'],
response_data['token_type'],
response_data['refresh_token'],
response_data['expires_in'],
response_data.get('state') # Not always here
) | python | def from_response(self, response_data):
''' Builds a new HSAccessTokenAuth straight from response data
Args:
response_data (dict): Response data to use
Returns:
A HSAccessTokenAuth objet
'''
return HSAccessTokenAuth(
response_data['access_token'],
response_data['token_type'],
response_data['refresh_token'],
response_data['expires_in'],
response_data.get('state') # Not always here
) | [
"def",
"from_response",
"(",
"self",
",",
"response_data",
")",
":",
"return",
"HSAccessTokenAuth",
"(",
"response_data",
"[",
"'access_token'",
"]",
",",
"response_data",
"[",
"'token_type'",
"]",
",",
"response_data",
"[",
"'refresh_token'",
"]",
",",
"response_... | Builds a new HSAccessTokenAuth straight from response data
Args:
response_data (dict): Response data to use
Returns:
A HSAccessTokenAuth objet | [
"Builds",
"a",
"new",
"HSAccessTokenAuth",
"straight",
"from",
"response",
"data"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/hsaccesstokenauth.py#L53-L69 |
22,718 | hellosign/hellosign-python-sdk | hellosign_sdk/resource/signature_request.py | SignatureRequest.find_response_component | def find_response_component(self, api_id=None, signature_id=None):
''' Find one or many repsonse components.
Args:
api_id (str): Api id associated with the component(s) to be retrieved.
signature_id (str): Signature id associated with the component(s) to be retrieved.
Returns:
A list of dictionaries containing component data
'''
if not api_id and not signature_id:
raise ValueError('At least one of api_id and signature_id is required')
components = list()
if self.response_data:
for component in self.response_data:
if (api_id and component['api_id']) == api_id or (signature_id and component['signature_id'] == signature_id):
components.append(component)
return components | python | def find_response_component(self, api_id=None, signature_id=None):
''' Find one or many repsonse components.
Args:
api_id (str): Api id associated with the component(s) to be retrieved.
signature_id (str): Signature id associated with the component(s) to be retrieved.
Returns:
A list of dictionaries containing component data
'''
if not api_id and not signature_id:
raise ValueError('At least one of api_id and signature_id is required')
components = list()
if self.response_data:
for component in self.response_data:
if (api_id and component['api_id']) == api_id or (signature_id and component['signature_id'] == signature_id):
components.append(component)
return components | [
"def",
"find_response_component",
"(",
"self",
",",
"api_id",
"=",
"None",
",",
"signature_id",
"=",
"None",
")",
":",
"if",
"not",
"api_id",
"and",
"not",
"signature_id",
":",
"raise",
"ValueError",
"(",
"'At least one of api_id and signature_id is required'",
")",... | Find one or many repsonse components.
Args:
api_id (str): Api id associated with the component(s) to be retrieved.
signature_id (str): Signature id associated with the component(s) to be retrieved.
Returns:
A list of dictionaries containing component data | [
"Find",
"one",
"or",
"many",
"repsonse",
"components",
"."
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/resource/signature_request.py#L114-L137 |
22,719 | hellosign/hellosign-python-sdk | hellosign_sdk/resource/signature_request.py | SignatureRequest.find_signature | def find_signature(self, signature_id=None, signer_email_address=None):
''' Return a signature for the given parameters
Args:
signature_id (str): Id of the signature to retrieve.
signer_email_address (str): Email address of the associated signer for the signature to retrieve.
Returns:
A Signature object or None
'''
if self.signatures:
for signature in self.signatures:
if signature.signature_id == signature_id or signature.signer_email_address == signer_email_address:
return signature | python | def find_signature(self, signature_id=None, signer_email_address=None):
''' Return a signature for the given parameters
Args:
signature_id (str): Id of the signature to retrieve.
signer_email_address (str): Email address of the associated signer for the signature to retrieve.
Returns:
A Signature object or None
'''
if self.signatures:
for signature in self.signatures:
if signature.signature_id == signature_id or signature.signer_email_address == signer_email_address:
return signature | [
"def",
"find_signature",
"(",
"self",
",",
"signature_id",
"=",
"None",
",",
"signer_email_address",
"=",
"None",
")",
":",
"if",
"self",
".",
"signatures",
":",
"for",
"signature",
"in",
"self",
".",
"signatures",
":",
"if",
"signature",
".",
"signature_id"... | Return a signature for the given parameters
Args:
signature_id (str): Id of the signature to retrieve.
signer_email_address (str): Email address of the associated signer for the signature to retrieve.
Returns:
A Signature object or None | [
"Return",
"a",
"signature",
"for",
"the",
"given",
"parameters"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/resource/signature_request.py#L139-L154 |
22,720 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/__init__.py | api_resource._uncamelize | def _uncamelize(self, s):
''' Convert a camel-cased string to using underscores '''
res = ''
if s:
for i in range(len(s)):
if i > 0 and s[i].lower() != s[i]:
res += '_'
res += s[i].lower()
return res | python | def _uncamelize(self, s):
''' Convert a camel-cased string to using underscores '''
res = ''
if s:
for i in range(len(s)):
if i > 0 and s[i].lower() != s[i]:
res += '_'
res += s[i].lower()
return res | [
"def",
"_uncamelize",
"(",
"self",
",",
"s",
")",
":",
"res",
"=",
"''",
"if",
"s",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"s",
")",
")",
":",
"if",
"i",
">",
"0",
"and",
"s",
"[",
"i",
"]",
".",
"lower",
"(",
")",
"!=",
"s",
... | Convert a camel-cased string to using underscores | [
"Convert",
"a",
"camel",
"-",
"cased",
"string",
"to",
"using",
"underscores"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/__init__.py#L53-L61 |
22,721 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/hsformat.py | HSFormat.format_file_params | def format_file_params(files):
'''
Utility method for formatting file parameters for transmission
'''
files_payload = {}
if files:
for idx, filename in enumerate(files):
files_payload["file[" + str(idx) + "]"] = open(filename, 'rb')
return files_payload | python | def format_file_params(files):
'''
Utility method for formatting file parameters for transmission
'''
files_payload = {}
if files:
for idx, filename in enumerate(files):
files_payload["file[" + str(idx) + "]"] = open(filename, 'rb')
return files_payload | [
"def",
"format_file_params",
"(",
"files",
")",
":",
"files_payload",
"=",
"{",
"}",
"if",
"files",
":",
"for",
"idx",
",",
"filename",
"in",
"enumerate",
"(",
"files",
")",
":",
"files_payload",
"[",
"\"file[\"",
"+",
"str",
"(",
"idx",
")",
"+",
"\"]... | Utility method for formatting file parameters for transmission | [
"Utility",
"method",
"for",
"formatting",
"file",
"parameters",
"for",
"transmission"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/hsformat.py#L41-L49 |
22,722 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/hsformat.py | HSFormat.format_file_url_params | def format_file_url_params(file_urls):
'''
Utility method for formatting file URL parameters for transmission
'''
file_urls_payload = {}
if file_urls:
for idx, fileurl in enumerate(file_urls):
file_urls_payload["file_url[" + str(idx) + "]"] = fileurl
return file_urls_payload | python | def format_file_url_params(file_urls):
'''
Utility method for formatting file URL parameters for transmission
'''
file_urls_payload = {}
if file_urls:
for idx, fileurl in enumerate(file_urls):
file_urls_payload["file_url[" + str(idx) + "]"] = fileurl
return file_urls_payload | [
"def",
"format_file_url_params",
"(",
"file_urls",
")",
":",
"file_urls_payload",
"=",
"{",
"}",
"if",
"file_urls",
":",
"for",
"idx",
",",
"fileurl",
"in",
"enumerate",
"(",
"file_urls",
")",
":",
"file_urls_payload",
"[",
"\"file_url[\"",
"+",
"str",
"(",
... | Utility method for formatting file URL parameters for transmission | [
"Utility",
"method",
"for",
"formatting",
"file",
"URL",
"parameters",
"for",
"transmission"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/hsformat.py#L52-L60 |
22,723 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/hsformat.py | HSFormat.format_single_dict | def format_single_dict(dictionary, output_name):
'''
Currently used for metadata fields
'''
output_payload = {}
if dictionary:
for (k, v) in dictionary.items():
output_payload[output_name + '[' + k + ']'] = v
return output_payload | python | def format_single_dict(dictionary, output_name):
'''
Currently used for metadata fields
'''
output_payload = {}
if dictionary:
for (k, v) in dictionary.items():
output_payload[output_name + '[' + k + ']'] = v
return output_payload | [
"def",
"format_single_dict",
"(",
"dictionary",
",",
"output_name",
")",
":",
"output_payload",
"=",
"{",
"}",
"if",
"dictionary",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"dictionary",
".",
"items",
"(",
")",
":",
"output_payload",
"[",
"output_name",
... | Currently used for metadata fields | [
"Currently",
"used",
"for",
"metadata",
"fields"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/hsformat.py#L106-L114 |
22,724 | hellosign/hellosign-python-sdk | hellosign_sdk/utils/hsformat.py | HSFormat.format_custom_fields | def format_custom_fields(list_of_custom_fields):
'''
Custom fields formatting for submission
'''
output_payload = {}
if list_of_custom_fields:
# custom_field: {"name": value}
for custom_field in list_of_custom_fields:
for key, value in custom_field.items():
output_payload["custom_fields[" + key + "]"] = value
return output_payload | python | def format_custom_fields(list_of_custom_fields):
'''
Custom fields formatting for submission
'''
output_payload = {}
if list_of_custom_fields:
# custom_field: {"name": value}
for custom_field in list_of_custom_fields:
for key, value in custom_field.items():
output_payload["custom_fields[" + key + "]"] = value
return output_payload | [
"def",
"format_custom_fields",
"(",
"list_of_custom_fields",
")",
":",
"output_payload",
"=",
"{",
"}",
"if",
"list_of_custom_fields",
":",
"# custom_field: {\"name\": value}",
"for",
"custom_field",
"in",
"list_of_custom_fields",
":",
"for",
"key",
",",
"value",
"in",
... | Custom fields formatting for submission | [
"Custom",
"fields",
"formatting",
"for",
"submission"
] | 4325a29ad5766380a214eac3914511f62f7ecba4 | https://github.com/hellosign/hellosign-python-sdk/blob/4325a29ad5766380a214eac3914511f62f7ecba4/hellosign_sdk/utils/hsformat.py#L117-L127 |
22,725 | GaretJax/django-click | setup.py | Setup.read | def read(fname, fail_silently=False):
"""
Read the content of the given file. The path is evaluated from the
directory containing this file.
"""
try:
filepath = os.path.join(os.path.dirname(__file__), fname)
with io.open(filepath, 'rt', encoding='utf8') as f:
return f.read()
except:
if not fail_silently:
raise
return '' | python | def read(fname, fail_silently=False):
try:
filepath = os.path.join(os.path.dirname(__file__), fname)
with io.open(filepath, 'rt', encoding='utf8') as f:
return f.read()
except:
if not fail_silently:
raise
return '' | [
"def",
"read",
"(",
"fname",
",",
"fail_silently",
"=",
"False",
")",
":",
"try",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"fname",
")",
"with",
"io",
".",
"open",
"(... | Read the content of the given file. The path is evaluated from the
directory containing this file. | [
"Read",
"the",
"content",
"of",
"the",
"given",
"file",
".",
"The",
"path",
"is",
"evaluated",
"from",
"the",
"directory",
"containing",
"this",
"file",
"."
] | 3584bff81cb7891a1aa2d7fe49c1db501f5b0e84 | https://github.com/GaretJax/django-click/blob/3584bff81cb7891a1aa2d7fe49c1db501f5b0e84/setup.py#L35-L47 |
22,726 | GaretJax/django-click | djclick/adapter.py | pass_verbosity | def pass_verbosity(f):
"""
Marks a callback as wanting to receive the verbosity as a keyword argument.
"""
def new_func(*args, **kwargs):
kwargs['verbosity'] = click.get_current_context().verbosity
return f(*args, **kwargs)
return update_wrapper(new_func, f) | python | def pass_verbosity(f):
def new_func(*args, **kwargs):
kwargs['verbosity'] = click.get_current_context().verbosity
return f(*args, **kwargs)
return update_wrapper(new_func, f) | [
"def",
"pass_verbosity",
"(",
"f",
")",
":",
"def",
"new_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'verbosity'",
"]",
"=",
"click",
".",
"get_current_context",
"(",
")",
".",
"verbosity",
"return",
"f",
"(",
"*",
"ar... | Marks a callback as wanting to receive the verbosity as a keyword argument. | [
"Marks",
"a",
"callback",
"as",
"wanting",
"to",
"receive",
"the",
"verbosity",
"as",
"a",
"keyword",
"argument",
"."
] | 3584bff81cb7891a1aa2d7fe49c1db501f5b0e84 | https://github.com/GaretJax/django-click/blob/3584bff81cb7891a1aa2d7fe49c1db501f5b0e84/djclick/adapter.py#L232-L239 |
22,727 | GaretJax/django-click | djclick/adapter.py | DjangoCommandMixin.run_from_argv | def run_from_argv(self, argv):
"""
Called when run from the command line.
"""
try:
return self.main(args=argv[2:], standalone_mode=False)
except click.ClickException as e:
if getattr(e.ctx, 'traceback', False):
raise
e.show()
sys.exit(e.exit_code) | python | def run_from_argv(self, argv):
try:
return self.main(args=argv[2:], standalone_mode=False)
except click.ClickException as e:
if getattr(e.ctx, 'traceback', False):
raise
e.show()
sys.exit(e.exit_code) | [
"def",
"run_from_argv",
"(",
"self",
",",
"argv",
")",
":",
"try",
":",
"return",
"self",
".",
"main",
"(",
"args",
"=",
"argv",
"[",
"2",
":",
"]",
",",
"standalone_mode",
"=",
"False",
")",
"except",
"click",
".",
"ClickException",
"as",
"e",
":",
... | Called when run from the command line. | [
"Called",
"when",
"run",
"from",
"the",
"command",
"line",
"."
] | 3584bff81cb7891a1aa2d7fe49c1db501f5b0e84 | https://github.com/GaretJax/django-click/blob/3584bff81cb7891a1aa2d7fe49c1db501f5b0e84/djclick/adapter.py#L57-L67 |
22,728 | xxtea/xxtea-python | xxtea/__init__.py | encrypt | def encrypt(data, key):
'''encrypt the data with the key'''
data = __tobytes(data)
data_len = len(data)
data = ffi.from_buffer(data)
key = ffi.from_buffer(__tobytes(key))
out_len = ffi.new('size_t *')
result = lib.xxtea_encrypt(data, data_len, key, out_len)
ret = ffi.buffer(result, out_len[0])[:]
lib.free(result)
return ret | python | def encrypt(data, key):
'''encrypt the data with the key'''
data = __tobytes(data)
data_len = len(data)
data = ffi.from_buffer(data)
key = ffi.from_buffer(__tobytes(key))
out_len = ffi.new('size_t *')
result = lib.xxtea_encrypt(data, data_len, key, out_len)
ret = ffi.buffer(result, out_len[0])[:]
lib.free(result)
return ret | [
"def",
"encrypt",
"(",
"data",
",",
"key",
")",
":",
"data",
"=",
"__tobytes",
"(",
"data",
")",
"data_len",
"=",
"len",
"(",
"data",
")",
"data",
"=",
"ffi",
".",
"from_buffer",
"(",
"data",
")",
"key",
"=",
"ffi",
".",
"from_buffer",
"(",
"__toby... | encrypt the data with the key | [
"encrypt",
"the",
"data",
"with",
"the",
"key"
] | 35bd893cb42dce338631d051be9302fcbc21b7fc | https://github.com/xxtea/xxtea-python/blob/35bd893cb42dce338631d051be9302fcbc21b7fc/xxtea/__init__.py#L30-L40 |
22,729 | xxtea/xxtea-python | xxtea/__init__.py | decrypt | def decrypt(data, key):
'''decrypt the data with the key'''
data_len = len(data)
data = ffi.from_buffer(data)
key = ffi.from_buffer(__tobytes(key))
out_len = ffi.new('size_t *')
result = lib.xxtea_decrypt(data, data_len, key, out_len)
ret = ffi.buffer(result, out_len[0])[:]
lib.free(result)
return ret | python | def decrypt(data, key):
'''decrypt the data with the key'''
data_len = len(data)
data = ffi.from_buffer(data)
key = ffi.from_buffer(__tobytes(key))
out_len = ffi.new('size_t *')
result = lib.xxtea_decrypt(data, data_len, key, out_len)
ret = ffi.buffer(result, out_len[0])[:]
lib.free(result)
return ret | [
"def",
"decrypt",
"(",
"data",
",",
"key",
")",
":",
"data_len",
"=",
"len",
"(",
"data",
")",
"data",
"=",
"ffi",
".",
"from_buffer",
"(",
"data",
")",
"key",
"=",
"ffi",
".",
"from_buffer",
"(",
"__tobytes",
"(",
"key",
")",
")",
"out_len",
"=",
... | decrypt the data with the key | [
"decrypt",
"the",
"data",
"with",
"the",
"key"
] | 35bd893cb42dce338631d051be9302fcbc21b7fc | https://github.com/xxtea/xxtea-python/blob/35bd893cb42dce338631d051be9302fcbc21b7fc/xxtea/__init__.py#L42-L51 |
22,730 | mozilla/taar | taar/flask_app.py | flaskrun | def flaskrun(app, default_host="127.0.0.1", default_port="8000"):
"""
Takes a flask.Flask instance and runs it. Parses
command-line flags to configure the app.
"""
# Set up the command-line options
parser = optparse.OptionParser()
parser.add_option(
"-H",
"--host",
help="Hostname of the Flask app " + "[default %s]" % default_host,
default=default_host,
)
parser.add_option(
"-P",
"--port",
help="Port for the Flask app " + "[default %s]" % default_port,
default=default_port,
)
# Two options useful for debugging purposes, but
# a bit dangerous so not exposed in the help message.
parser.add_option(
"-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP
)
parser.add_option(
"-p",
"--profile",
action="store_true",
dest="profile",
help=optparse.SUPPRESS_HELP,
)
options, _ = parser.parse_args()
# If the user selects the profiling option, then we need
# to do a little extra setup
if options.profile:
from werkzeug.contrib.profiler import ProfilerMiddleware
app.config["PROFILE"] = True
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
options.debug = True
app.run(debug=options.debug, host=options.host, port=int(options.port)) | python | def flaskrun(app, default_host="127.0.0.1", default_port="8000"):
# Set up the command-line options
parser = optparse.OptionParser()
parser.add_option(
"-H",
"--host",
help="Hostname of the Flask app " + "[default %s]" % default_host,
default=default_host,
)
parser.add_option(
"-P",
"--port",
help="Port for the Flask app " + "[default %s]" % default_port,
default=default_port,
)
# Two options useful for debugging purposes, but
# a bit dangerous so not exposed in the help message.
parser.add_option(
"-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP
)
parser.add_option(
"-p",
"--profile",
action="store_true",
dest="profile",
help=optparse.SUPPRESS_HELP,
)
options, _ = parser.parse_args()
# If the user selects the profiling option, then we need
# to do a little extra setup
if options.profile:
from werkzeug.contrib.profiler import ProfilerMiddleware
app.config["PROFILE"] = True
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
options.debug = True
app.run(debug=options.debug, host=options.host, port=int(options.port)) | [
"def",
"flaskrun",
"(",
"app",
",",
"default_host",
"=",
"\"127.0.0.1\"",
",",
"default_port",
"=",
"\"8000\"",
")",
":",
"# Set up the command-line options",
"parser",
"=",
"optparse",
".",
"OptionParser",
"(",
")",
"parser",
".",
"add_option",
"(",
"\"-H\"",
"... | Takes a flask.Flask instance and runs it. Parses
command-line flags to configure the app. | [
"Takes",
"a",
"flask",
".",
"Flask",
"instance",
"and",
"runs",
"it",
".",
"Parses",
"command",
"-",
"line",
"flags",
"to",
"configure",
"the",
"app",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/flask_app.py#L41-L86 |
22,731 | mozilla/taar | taar/recommenders/hybrid_recommender.py | CuratedWhitelistCache.get_randomized_guid_sample | def get_randomized_guid_sample(self, item_count):
""" Fetch a subset of randomzied GUIDs from the whitelist """
dataset = self.get_whitelist()
random.shuffle(dataset)
return dataset[:item_count] | python | def get_randomized_guid_sample(self, item_count):
dataset = self.get_whitelist()
random.shuffle(dataset)
return dataset[:item_count] | [
"def",
"get_randomized_guid_sample",
"(",
"self",
",",
"item_count",
")",
":",
"dataset",
"=",
"self",
".",
"get_whitelist",
"(",
")",
"random",
".",
"shuffle",
"(",
"dataset",
")",
"return",
"dataset",
"[",
":",
"item_count",
"]"
] | Fetch a subset of randomzied GUIDs from the whitelist | [
"Fetch",
"a",
"subset",
"of",
"randomzied",
"GUIDs",
"from",
"the",
"whitelist"
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/hybrid_recommender.py#L28-L32 |
22,732 | mozilla/taar | taar/recommenders/hybrid_recommender.py | CuratedRecommender.can_recommend | def can_recommend(self, client_data, extra_data={}):
"""The Curated recommender will always be able to recommend
something"""
self.logger.info("Curated can_recommend: {}".format(True))
return True | python | def can_recommend(self, client_data, extra_data={}):
self.logger.info("Curated can_recommend: {}".format(True))
return True | [
"def",
"can_recommend",
"(",
"self",
",",
"client_data",
",",
"extra_data",
"=",
"{",
"}",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Curated can_recommend: {}\"",
".",
"format",
"(",
"True",
")",
")",
"return",
"True"
] | The Curated recommender will always be able to recommend
something | [
"The",
"Curated",
"recommender",
"will",
"always",
"be",
"able",
"to",
"recommend",
"something"
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/hybrid_recommender.py#L52-L56 |
22,733 | mozilla/taar | taar/recommenders/hybrid_recommender.py | CuratedRecommender.recommend | def recommend(self, client_data, limit, extra_data={}):
"""
Curated recommendations are just random selections
"""
guids = self._curated_wl.get_randomized_guid_sample(limit)
results = [(guid, 1.0) for guid in guids]
log_data = (client_data["client_id"], str(guids))
self.logger.info(
"Curated recommendations client_id: [%s], guids: [%s]" % log_data
)
return results | python | def recommend(self, client_data, limit, extra_data={}):
guids = self._curated_wl.get_randomized_guid_sample(limit)
results = [(guid, 1.0) for guid in guids]
log_data = (client_data["client_id"], str(guids))
self.logger.info(
"Curated recommendations client_id: [%s], guids: [%s]" % log_data
)
return results | [
"def",
"recommend",
"(",
"self",
",",
"client_data",
",",
"limit",
",",
"extra_data",
"=",
"{",
"}",
")",
":",
"guids",
"=",
"self",
".",
"_curated_wl",
".",
"get_randomized_guid_sample",
"(",
"limit",
")",
"results",
"=",
"[",
"(",
"guid",
",",
"1.0",
... | Curated recommendations are just random selections | [
"Curated",
"recommendations",
"are",
"just",
"random",
"selections"
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/hybrid_recommender.py#L58-L70 |
22,734 | mozilla/taar | taar/recommenders/hybrid_recommender.py | HybridRecommender.recommend | def recommend(self, client_data, limit, extra_data={}):
"""
Hybrid recommendations simply select half recommendations from
the ensemble recommender, and half from the curated one.
Duplicate recommendations are accomodated by rank ordering
by weight.
"""
preinstalled_addon_ids = client_data.get("installed_addons", [])
# Compute an extended limit by adding the length of
# the list of any preinstalled addons.
extended_limit = limit + len(preinstalled_addon_ids)
ensemble_suggestions = self._ensemble_recommender.recommend(
client_data, extended_limit, extra_data
)
curated_suggestions = self._curated_recommender.recommend(
client_data, extended_limit, extra_data
)
# Generate a set of results from each of the composite
# recommenders. We select one item from each recommender
# sequentially so that we do not bias one recommender over the
# other.
merged_results = set()
while (
len(merged_results) < limit
and len(ensemble_suggestions) > 0
and len(curated_suggestions) > 0
):
r1 = ensemble_suggestions.pop()
if r1[0] not in [temp[0] for temp in merged_results]:
merged_results.add(r1)
# Terminate early if we have an odd number for the limit
if not (
len(merged_results) < limit
and len(ensemble_suggestions) > 0
and len(curated_suggestions) > 0
):
break
r2 = curated_suggestions.pop()
if r2[0] not in [temp[0] for temp in merged_results]:
merged_results.add(r2)
if len(merged_results) < limit:
msg = (
"Defaulting to empty results. Insufficient recommendations found for client: %s"
% client_data["client_id"]
)
self.logger.info(msg)
return []
sorted_results = sorted(
list(merged_results), key=op.itemgetter(1), reverse=True
)
log_data = (client_data["client_id"], str([r[0] for r in sorted_results]))
self.logger.info(
"Hybrid recommendations client_id: [%s], guids: [%s]" % log_data
)
return sorted_results | python | def recommend(self, client_data, limit, extra_data={}):
preinstalled_addon_ids = client_data.get("installed_addons", [])
# Compute an extended limit by adding the length of
# the list of any preinstalled addons.
extended_limit = limit + len(preinstalled_addon_ids)
ensemble_suggestions = self._ensemble_recommender.recommend(
client_data, extended_limit, extra_data
)
curated_suggestions = self._curated_recommender.recommend(
client_data, extended_limit, extra_data
)
# Generate a set of results from each of the composite
# recommenders. We select one item from each recommender
# sequentially so that we do not bias one recommender over the
# other.
merged_results = set()
while (
len(merged_results) < limit
and len(ensemble_suggestions) > 0
and len(curated_suggestions) > 0
):
r1 = ensemble_suggestions.pop()
if r1[0] not in [temp[0] for temp in merged_results]:
merged_results.add(r1)
# Terminate early if we have an odd number for the limit
if not (
len(merged_results) < limit
and len(ensemble_suggestions) > 0
and len(curated_suggestions) > 0
):
break
r2 = curated_suggestions.pop()
if r2[0] not in [temp[0] for temp in merged_results]:
merged_results.add(r2)
if len(merged_results) < limit:
msg = (
"Defaulting to empty results. Insufficient recommendations found for client: %s"
% client_data["client_id"]
)
self.logger.info(msg)
return []
sorted_results = sorted(
list(merged_results), key=op.itemgetter(1), reverse=True
)
log_data = (client_data["client_id"], str([r[0] for r in sorted_results]))
self.logger.info(
"Hybrid recommendations client_id: [%s], guids: [%s]" % log_data
)
return sorted_results | [
"def",
"recommend",
"(",
"self",
",",
"client_data",
",",
"limit",
",",
"extra_data",
"=",
"{",
"}",
")",
":",
"preinstalled_addon_ids",
"=",
"client_data",
".",
"get",
"(",
"\"installed_addons\"",
",",
"[",
"]",
")",
"# Compute an extended limit by adding the len... | Hybrid recommendations simply select half recommendations from
the ensemble recommender, and half from the curated one.
Duplicate recommendations are accomodated by rank ordering
by weight. | [
"Hybrid",
"recommendations",
"simply",
"select",
"half",
"recommendations",
"from",
"the",
"ensemble",
"recommender",
"and",
"half",
"from",
"the",
"curated",
"one",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/hybrid_recommender.py#L102-L169 |
22,735 | mozilla/taar | taar/recommenders/ensemble_recommender.py | EnsembleRecommender._recommend | def _recommend(self, client_data, limit, extra_data={}):
"""
Ensemble recommendations are aggregated from individual
recommenders. The ensemble recommender applies a weight to
the recommendation outputs of each recommender to reorder the
recommendations to be a better fit.
The intuitive understanding is that the total space of
recommended addons across all recommenders will include the
'true' addons that should be recommended better than any
individual recommender. The ensemble method simply needs to
weight each recommender appropriate so that the ordering is
correct.
"""
self.logger.info("Ensemble recommend invoked")
preinstalled_addon_ids = client_data.get("installed_addons", [])
# Compute an extended limit by adding the length of
# the list of any preinstalled addons.
extended_limit = limit + len(preinstalled_addon_ids)
flattened_results = []
ensemble_weights = self._weight_cache.getWeights()
for rkey in self.RECOMMENDER_KEYS:
recommender = self._recommender_map[rkey]
if recommender.can_recommend(client_data):
raw_results = recommender.recommend(
client_data, extended_limit, extra_data
)
reweighted_results = []
for guid, weight in raw_results:
item = (guid, weight * ensemble_weights[rkey])
reweighted_results.append(item)
flattened_results.extend(reweighted_results)
# Sort the results by the GUID
flattened_results.sort(key=lambda item: item[0])
# group by the guid, sum up the weights for recurring GUID
# suggestions across all recommenders
guid_grouper = itertools.groupby(flattened_results, lambda item: item[0])
ensemble_suggestions = []
for (guid, guid_group) in guid_grouper:
weight_sum = sum([v for (g, v) in guid_group])
item = (guid, weight_sum)
ensemble_suggestions.append(item)
# Sort in reverse order (greatest weight to least)
ensemble_suggestions.sort(key=lambda x: -x[1])
filtered_ensemble_suggestions = [
(guid, weight)
for (guid, weight) in ensemble_suggestions
if guid not in preinstalled_addon_ids
]
results = filtered_ensemble_suggestions[:limit]
log_data = (
client_data["client_id"],
str(ensemble_weights),
str([r[0] for r in results]),
)
self.logger.info(
"client_id: [%s], ensemble_weight: [%s], guids: [%s]" % log_data
)
return results | python | def _recommend(self, client_data, limit, extra_data={}):
self.logger.info("Ensemble recommend invoked")
preinstalled_addon_ids = client_data.get("installed_addons", [])
# Compute an extended limit by adding the length of
# the list of any preinstalled addons.
extended_limit = limit + len(preinstalled_addon_ids)
flattened_results = []
ensemble_weights = self._weight_cache.getWeights()
for rkey in self.RECOMMENDER_KEYS:
recommender = self._recommender_map[rkey]
if recommender.can_recommend(client_data):
raw_results = recommender.recommend(
client_data, extended_limit, extra_data
)
reweighted_results = []
for guid, weight in raw_results:
item = (guid, weight * ensemble_weights[rkey])
reweighted_results.append(item)
flattened_results.extend(reweighted_results)
# Sort the results by the GUID
flattened_results.sort(key=lambda item: item[0])
# group by the guid, sum up the weights for recurring GUID
# suggestions across all recommenders
guid_grouper = itertools.groupby(flattened_results, lambda item: item[0])
ensemble_suggestions = []
for (guid, guid_group) in guid_grouper:
weight_sum = sum([v for (g, v) in guid_group])
item = (guid, weight_sum)
ensemble_suggestions.append(item)
# Sort in reverse order (greatest weight to least)
ensemble_suggestions.sort(key=lambda x: -x[1])
filtered_ensemble_suggestions = [
(guid, weight)
for (guid, weight) in ensemble_suggestions
if guid not in preinstalled_addon_ids
]
results = filtered_ensemble_suggestions[:limit]
log_data = (
client_data["client_id"],
str(ensemble_weights),
str([r[0] for r in results]),
)
self.logger.info(
"client_id: [%s], ensemble_weight: [%s], guids: [%s]" % log_data
)
return results | [
"def",
"_recommend",
"(",
"self",
",",
"client_data",
",",
"limit",
",",
"extra_data",
"=",
"{",
"}",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Ensemble recommend invoked\"",
")",
"preinstalled_addon_ids",
"=",
"client_data",
".",
"get",
"(",
"\"... | Ensemble recommendations are aggregated from individual
recommenders. The ensemble recommender applies a weight to
the recommendation outputs of each recommender to reorder the
recommendations to be a better fit.
The intuitive understanding is that the total space of
recommended addons across all recommenders will include the
'true' addons that should be recommended better than any
individual recommender. The ensemble method simply needs to
weight each recommender appropriate so that the ordering is
correct. | [
"Ensemble",
"recommendations",
"are",
"aggregated",
"from",
"individual",
"recommenders",
".",
"The",
"ensemble",
"recommender",
"applies",
"a",
"weight",
"to",
"the",
"recommendation",
"outputs",
"of",
"each",
"recommender",
"to",
"reorder",
"the",
"recommendations",... | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/ensemble_recommender.py#L81-L150 |
22,736 | mozilla/taar | taar/recommenders/lazys3.py | LazyJSONLoader.get | def get(self, transform=None):
"""
Return the JSON defined at the S3 location in the constructor.
The get method will reload the S3 object after the TTL has
expired.
Fetch the JSON object from cache or S3 if necessary
"""
if not self.has_expired() and self._cached_copy is not None:
return self._cached_copy, False
return self._refresh_cache(transform), True | python | def get(self, transform=None):
if not self.has_expired() and self._cached_copy is not None:
return self._cached_copy, False
return self._refresh_cache(transform), True | [
"def",
"get",
"(",
"self",
",",
"transform",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"has_expired",
"(",
")",
"and",
"self",
".",
"_cached_copy",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_cached_copy",
",",
"False",
"return",
"self",
... | Return the JSON defined at the S3 location in the constructor.
The get method will reload the S3 object after the TTL has
expired.
Fetch the JSON object from cache or S3 if necessary | [
"Return",
"the",
"JSON",
"defined",
"at",
"the",
"S3",
"location",
"in",
"the",
"constructor",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/lazys3.py#L43-L54 |
22,737 | mozilla/taar | bin/pipstrap.py | hashed_download | def hashed_download(url, temp, digest):
"""Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``,
and return its path."""
# Based on pip 1.4.1's URLOpener but with cert verification removed
def opener():
opener = build_opener(HTTPSHandler())
# Strip out HTTPHandler to prevent MITM spoof:
for handler in opener.handlers:
if isinstance(handler, HTTPHandler):
opener.handlers.remove(handler)
return opener
def read_chunks(response, chunk_size):
while True:
chunk = response.read(chunk_size)
if not chunk:
break
yield chunk
response = opener().open(url)
path = join(temp, urlparse(url).path.split('/')[-1])
actual_hash = sha256()
with open(path, 'wb') as file:
for chunk in read_chunks(response, 4096):
file.write(chunk)
actual_hash.update(chunk)
actual_digest = actual_hash.hexdigest()
if actual_digest != digest:
raise HashError(url, path, actual_digest, digest)
return path | python | def hashed_download(url, temp, digest):
# Based on pip 1.4.1's URLOpener but with cert verification removed
def opener():
opener = build_opener(HTTPSHandler())
# Strip out HTTPHandler to prevent MITM spoof:
for handler in opener.handlers:
if isinstance(handler, HTTPHandler):
opener.handlers.remove(handler)
return opener
def read_chunks(response, chunk_size):
while True:
chunk = response.read(chunk_size)
if not chunk:
break
yield chunk
response = opener().open(url)
path = join(temp, urlparse(url).path.split('/')[-1])
actual_hash = sha256()
with open(path, 'wb') as file:
for chunk in read_chunks(response, 4096):
file.write(chunk)
actual_hash.update(chunk)
actual_digest = actual_hash.hexdigest()
if actual_digest != digest:
raise HashError(url, path, actual_digest, digest)
return path | [
"def",
"hashed_download",
"(",
"url",
",",
"temp",
",",
"digest",
")",
":",
"# Based on pip 1.4.1's URLOpener but with cert verification removed",
"def",
"opener",
"(",
")",
":",
"opener",
"=",
"build_opener",
"(",
"HTTPSHandler",
"(",
")",
")",
"# Strip out HTTPHandl... | Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``,
and return its path. | [
"Download",
"url",
"to",
"temp",
"make",
"sure",
"it",
"has",
"the",
"SHA",
"-",
"256",
"digest",
"and",
"return",
"its",
"path",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/bin/pipstrap.py#L65-L95 |
22,738 | mozilla/taar | taar/recommenders/similarity_recommender.py | SimilarityRecommender._build_features_caches | def _build_features_caches(self):
"""This function build two feature cache matrices.
That's the self.categorical_features and
self.continuous_features attributes.
One matrix is for the continuous features and the other is for
the categorical features. This is needed to speed up the similarity
recommendation process."""
_donors_pool = self._donors_pool.get()[0]
_lr_curves = self._lr_curves.get()[0]
if _donors_pool is None or _lr_curves is None:
# We need to have both donors_pool and lr_curves defined
# to reconstruct the matrices
return None
self.num_donors = len(_donors_pool)
# Build a numpy matrix cache for the continuous features.
self.continuous_features = np.zeros((self.num_donors, len(CONTINUOUS_FEATURES)))
for idx, d in enumerate(_donors_pool):
features = [d.get(specified_key) for specified_key in CONTINUOUS_FEATURES]
self.continuous_features[idx] = features
# Build the cache for categorical features.
self.categorical_features = np.zeros(
(self.num_donors, len(CATEGORICAL_FEATURES)), dtype="object"
)
for idx, d in enumerate(_donors_pool):
features = [d.get(specified_key) for specified_key in CATEGORICAL_FEATURES]
self.categorical_features[idx] = np.array([features], dtype="object")
self.logger.info("Reconstructed matrices for similarity recommender") | python | def _build_features_caches(self):
_donors_pool = self._donors_pool.get()[0]
_lr_curves = self._lr_curves.get()[0]
if _donors_pool is None or _lr_curves is None:
# We need to have both donors_pool and lr_curves defined
# to reconstruct the matrices
return None
self.num_donors = len(_donors_pool)
# Build a numpy matrix cache for the continuous features.
self.continuous_features = np.zeros((self.num_donors, len(CONTINUOUS_FEATURES)))
for idx, d in enumerate(_donors_pool):
features = [d.get(specified_key) for specified_key in CONTINUOUS_FEATURES]
self.continuous_features[idx] = features
# Build the cache for categorical features.
self.categorical_features = np.zeros(
(self.num_donors, len(CATEGORICAL_FEATURES)), dtype="object"
)
for idx, d in enumerate(_donors_pool):
features = [d.get(specified_key) for specified_key in CATEGORICAL_FEATURES]
self.categorical_features[idx] = np.array([features], dtype="object")
self.logger.info("Reconstructed matrices for similarity recommender") | [
"def",
"_build_features_caches",
"(",
"self",
")",
":",
"_donors_pool",
"=",
"self",
".",
"_donors_pool",
".",
"get",
"(",
")",
"[",
"0",
"]",
"_lr_curves",
"=",
"self",
".",
"_lr_curves",
".",
"get",
"(",
")",
"[",
"0",
"]",
"if",
"_donors_pool",
"is"... | This function build two feature cache matrices.
That's the self.categorical_features and
self.continuous_features attributes.
One matrix is for the continuous features and the other is for
the categorical features. This is needed to speed up the similarity
recommendation process. | [
"This",
"function",
"build",
"two",
"feature",
"cache",
"matrices",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/similarity_recommender.py#L103-L136 |
22,739 | mozilla/taar | taar/recommenders/recommendation_manager.py | RecommendationManager.recommend | def recommend(self, client_id, limit, extra_data={}):
"""Return recommendations for the given client.
The recommendation logic will go through each recommender and
pick the first one that "can_recommend".
:param client_id: the client unique id.
:param limit: the maximum number of recommendations to return.
:param extra_data: a dictionary with extra client data.
"""
if client_id in TEST_CLIENT_IDS:
data = self._whitelist_data.get()[0]
random.shuffle(data)
samples = data[:limit]
self.logger.info("Test ID detected [{}]".format(client_id))
return [(s, 1.1) for s in samples]
if client_id in EMPTY_TEST_CLIENT_IDS:
self.logger.info("Empty Test ID detected [{}]".format(client_id))
return []
client_info = self.profile_fetcher.get(client_id)
if client_info is None:
self.logger.info(
"Defaulting to empty results. No client info fetched from dynamo."
)
return []
results = self._ensemble_recommender.recommend(client_info, limit, extra_data)
return results | python | def recommend(self, client_id, limit, extra_data={}):
if client_id in TEST_CLIENT_IDS:
data = self._whitelist_data.get()[0]
random.shuffle(data)
samples = data[:limit]
self.logger.info("Test ID detected [{}]".format(client_id))
return [(s, 1.1) for s in samples]
if client_id in EMPTY_TEST_CLIENT_IDS:
self.logger.info("Empty Test ID detected [{}]".format(client_id))
return []
client_info = self.profile_fetcher.get(client_id)
if client_info is None:
self.logger.info(
"Defaulting to empty results. No client info fetched from dynamo."
)
return []
results = self._ensemble_recommender.recommend(client_info, limit, extra_data)
return results | [
"def",
"recommend",
"(",
"self",
",",
"client_id",
",",
"limit",
",",
"extra_data",
"=",
"{",
"}",
")",
":",
"if",
"client_id",
"in",
"TEST_CLIENT_IDS",
":",
"data",
"=",
"self",
".",
"_whitelist_data",
".",
"get",
"(",
")",
"[",
"0",
"]",
"random",
... | Return recommendations for the given client.
The recommendation logic will go through each recommender and
pick the first one that "can_recommend".
:param client_id: the client unique id.
:param limit: the maximum number of recommendations to return.
:param extra_data: a dictionary with extra client data. | [
"Return",
"recommendations",
"for",
"the",
"given",
"client",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/recommenders/recommendation_manager.py#L85-L116 |
22,740 | mozilla/taar | taar/profile_fetcher.py | ProfileController.get_client_profile | def get_client_profile(self, client_id):
"""This fetches a single client record out of DynamoDB
"""
try:
response = self._table.get_item(Key={'client_id': client_id})
compressed_bytes = response['Item']['json_payload'].value
json_byte_data = zlib.decompress(compressed_bytes)
json_str_data = json_byte_data.decode('utf8')
return json.loads(json_str_data)
except KeyError:
# No client ID found - not really an error
return None
except Exception as e:
# Return None on error. The caller in ProfileFetcher will
# handle error logging
msg = "Error loading client data for {}. Error: {}"
self.logger.debug(msg.format(client_id, str(e)))
return None | python | def get_client_profile(self, client_id):
try:
response = self._table.get_item(Key={'client_id': client_id})
compressed_bytes = response['Item']['json_payload'].value
json_byte_data = zlib.decompress(compressed_bytes)
json_str_data = json_byte_data.decode('utf8')
return json.loads(json_str_data)
except KeyError:
# No client ID found - not really an error
return None
except Exception as e:
# Return None on error. The caller in ProfileFetcher will
# handle error logging
msg = "Error loading client data for {}. Error: {}"
self.logger.debug(msg.format(client_id, str(e)))
return None | [
"def",
"get_client_profile",
"(",
"self",
",",
"client_id",
")",
":",
"try",
":",
"response",
"=",
"self",
".",
"_table",
".",
"get_item",
"(",
"Key",
"=",
"{",
"'client_id'",
":",
"client_id",
"}",
")",
"compressed_bytes",
"=",
"response",
"[",
"'Item'",
... | This fetches a single client record out of DynamoDB | [
"This",
"fetches",
"a",
"single",
"client",
"record",
"out",
"of",
"DynamoDB"
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/profile_fetcher.py#L33-L50 |
22,741 | mozilla/taar | taar/plugin.py | clean_promoted_guids | def clean_promoted_guids(raw_promoted_guids):
""" Verify that the promoted GUIDs are formatted correctly,
otherwise strip it down into an empty list.
"""
valid = True
for row in raw_promoted_guids:
if len(row) != 2:
valid = False
break
if not (
(isinstance(row[0], str) or isinstance(row[0], unicode))
and (isinstance(row[1], int) or isinstance(row[1], float)) # noqa
):
valid = False
break
if valid:
return raw_promoted_guids
return [] | python | def clean_promoted_guids(raw_promoted_guids):
valid = True
for row in raw_promoted_guids:
if len(row) != 2:
valid = False
break
if not (
(isinstance(row[0], str) or isinstance(row[0], unicode))
and (isinstance(row[1], int) or isinstance(row[1], float)) # noqa
):
valid = False
break
if valid:
return raw_promoted_guids
return [] | [
"def",
"clean_promoted_guids",
"(",
"raw_promoted_guids",
")",
":",
"valid",
"=",
"True",
"for",
"row",
"in",
"raw_promoted_guids",
":",
"if",
"len",
"(",
"row",
")",
"!=",
"2",
":",
"valid",
"=",
"False",
"break",
"if",
"not",
"(",
"(",
"isinstance",
"(... | Verify that the promoted GUIDs are formatted correctly,
otherwise strip it down into an empty list. | [
"Verify",
"that",
"the",
"promoted",
"GUIDs",
"are",
"formatted",
"correctly",
"otherwise",
"strip",
"it",
"down",
"into",
"an",
"empty",
"list",
"."
] | 4002eb395f0b7ad837f1578e92d590e2cf82bdca | https://github.com/mozilla/taar/blob/4002eb395f0b7ad837f1578e92d590e2cf82bdca/taar/plugin.py#L32-L52 |
22,742 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.login | def login(self):
"""Login to Tahoma API."""
if self.__logged_in:
return
login = {'userId': self.__username, 'userPassword': self.__password}
header = BASE_HEADERS.copy()
request = requests.post(BASE_URL + 'login',
data=login,
headers=header,
timeout=10)
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for login, " +
"protocol error: " + request.status_code + ' - ' +
request.reason + "(" + str(error) + ")")
if 'error' in result.keys():
raise Exception("Could not login: " + result['error'])
if request.status_code != 200:
raise Exception(
"Could not login, HTTP code: " +
str(request.status_code) + ' - ' + request.reason)
if 'success' not in result.keys() or not result['success']:
raise Exception("Could not login, no success")
cookie = request.headers.get("set-cookie")
if cookie is None:
raise Exception("Could not login, no cookie set")
self.__cookie = cookie
self.__logged_in = True
return self.__logged_in | python | def login(self):
if self.__logged_in:
return
login = {'userId': self.__username, 'userPassword': self.__password}
header = BASE_HEADERS.copy()
request = requests.post(BASE_URL + 'login',
data=login,
headers=header,
timeout=10)
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for login, " +
"protocol error: " + request.status_code + ' - ' +
request.reason + "(" + str(error) + ")")
if 'error' in result.keys():
raise Exception("Could not login: " + result['error'])
if request.status_code != 200:
raise Exception(
"Could not login, HTTP code: " +
str(request.status_code) + ' - ' + request.reason)
if 'success' not in result.keys() or not result['success']:
raise Exception("Could not login, no success")
cookie = request.headers.get("set-cookie")
if cookie is None:
raise Exception("Could not login, no cookie set")
self.__cookie = cookie
self.__logged_in = True
return self.__logged_in | [
"def",
"login",
"(",
"self",
")",
":",
"if",
"self",
".",
"__logged_in",
":",
"return",
"login",
"=",
"{",
"'userId'",
":",
"self",
".",
"__username",
",",
"'userPassword'",
":",
"self",
".",
"__password",
"}",
"header",
"=",
"BASE_HEADERS",
".",
"copy",... | Login to Tahoma API. | [
"Login",
"to",
"Tahoma",
"API",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L32-L68 |
22,743 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.get_user | def get_user(self):
"""Get the user informations from the server.
:return: a dict with all the informations
:rtype: dict
raises ValueError in case of protocol issues
:Example:
>>> "creationTime": <time>,
>>> "lastUpdateTime": <time>,
>>> "userId": "<email for login>",
>>> "title": 0,
>>> "firstName": "<First>",
>>> "lastName": "<Last>",
>>> "email": "<contact email>",
>>> "phoneNumber": "<phone>",
>>> "mobilePhone": "<mobile>",
>>> "locale": "<two char country code>"
:Warning:
The type and amount of values in the dictionary can change any time.
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(BASE_URL + 'getEndUser',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_user()
return
try:
result = request.json()
except ValueError:
raise Exception(
"Not a valid result for getEndUser, protocol error!")
return result['endUser'] | python | def get_user(self):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(BASE_URL + 'getEndUser',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_user()
return
try:
result = request.json()
except ValueError:
raise Exception(
"Not a valid result for getEndUser, protocol error!")
return result['endUser'] | [
"def",
"get_user",
"(",
"self",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
"+",
"'getEndUser'",
",",
"headers",
... | Get the user informations from the server.
:return: a dict with all the informations
:rtype: dict
raises ValueError in case of protocol issues
:Example:
>>> "creationTime": <time>,
>>> "lastUpdateTime": <time>,
>>> "userId": "<email for login>",
>>> "title": 0,
>>> "firstName": "<First>",
>>> "lastName": "<Last>",
>>> "email": "<contact email>",
>>> "phoneNumber": "<phone>",
>>> "mobilePhone": "<mobile>",
>>> "locale": "<two char country code>"
:Warning:
The type and amount of values in the dictionary can change any time. | [
"Get",
"the",
"user",
"informations",
"from",
"the",
"server",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L70-L114 |
22,744 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi._get_setup | def _get_setup(self, result):
"""Internal method which process the results from the server."""
self.__devices = {}
if ('setup' not in result.keys() or
'devices' not in result['setup'].keys()):
raise Exception(
"Did not find device definition.")
for device_data in result['setup']['devices']:
device = Device(self, device_data)
self.__devices[device.url] = device
self.__location = result['setup']['location']
self.__gateway = result['setup']['gateways'] | python | def _get_setup(self, result):
self.__devices = {}
if ('setup' not in result.keys() or
'devices' not in result['setup'].keys()):
raise Exception(
"Did not find device definition.")
for device_data in result['setup']['devices']:
device = Device(self, device_data)
self.__devices[device.url] = device
self.__location = result['setup']['location']
self.__gateway = result['setup']['gateways'] | [
"def",
"_get_setup",
"(",
"self",
",",
"result",
")",
":",
"self",
".",
"__devices",
"=",
"{",
"}",
"if",
"(",
"'setup'",
"not",
"in",
"result",
".",
"keys",
"(",
")",
"or",
"'devices'",
"not",
"in",
"result",
"[",
"'setup'",
"]",
".",
"keys",
"(",... | Internal method which process the results from the server. | [
"Internal",
"method",
"which",
"process",
"the",
"results",
"from",
"the",
"server",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L156-L170 |
22,745 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.apply_actions | def apply_actions(self, name_of_action, actions):
"""Start to execute an action or a group of actions.
This method takes a bunch of actions and runs them on your
Tahoma box.
:param name_of_action: the label/name for the action
:param actions: an array of Action objects
:return: the execution identifier **************
what if it fails
:rtype: string
raises ValueError in case of protocol issues
:Seealso:
- get_events
- get_current_executions
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
actions_serialized = []
for action in actions:
actions_serialized.append(action.serialize())
data = {"label": name_of_action, "actions": actions_serialized}
json_data = json.dumps(data, indent=None, sort_keys=True)
request = requests.post(
BASE_URL + "apply",
headers=header, data=json_data,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.apply_actions(name_of_action, actions)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for applying an " +
"action, protocol error: " + request.status_code +
' - ' + request.reason + " (" + error + ")")
if 'execId' not in result.keys():
raise Exception("Could not run actions, missing execId.")
return result['execId'] | python | def apply_actions(self, name_of_action, actions):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
actions_serialized = []
for action in actions:
actions_serialized.append(action.serialize())
data = {"label": name_of_action, "actions": actions_serialized}
json_data = json.dumps(data, indent=None, sort_keys=True)
request = requests.post(
BASE_URL + "apply",
headers=header, data=json_data,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.apply_actions(name_of_action, actions)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for applying an " +
"action, protocol error: " + request.status_code +
' - ' + request.reason + " (" + error + ")")
if 'execId' not in result.keys():
raise Exception("Could not run actions, missing execId.")
return result['execId'] | [
"def",
"apply_actions",
"(",
"self",
",",
"name_of_action",
",",
"actions",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"actions_serialized",
"=",
"[",
"]",
"for",
"action",
... | Start to execute an action or a group of actions.
This method takes a bunch of actions and runs them on your
Tahoma box.
:param name_of_action: the label/name for the action
:param actions: an array of Action objects
:return: the execution identifier **************
what if it fails
:rtype: string
raises ValueError in case of protocol issues
:Seealso:
- get_events
- get_current_executions | [
"Start",
"to",
"execute",
"an",
"action",
"or",
"a",
"group",
"of",
"actions",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L281-L333 |
22,746 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.get_events | def get_events(self):
"""Return a set of events.
Which have been occured since the last call of this method.
This method should be called regulary to get all occuring
Events. There are three different Event types/classes
which can be returned:
- DeviceStateChangedEvent, if any device changed it's state
due to an applied action or just because of other reasons
- CommandExecutionStateChangedEvent, a executed command goes
through several phases which can be followed
- ExecutionStateChangedEvent, ******** todo
:return: an array of Events or empty array
:rtype: list
raises ValueError in case of protocol issues
:Seealso:
- apply_actions
- launch_action_group
- get_history
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.post(BASE_URL + 'getEvents',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_events()
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for getEvent," +
" protocol error: " + error)
return self._get_events(result) | python | def get_events(self):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.post(BASE_URL + 'getEvents',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_events()
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for getEvent," +
" protocol error: " + error)
return self._get_events(result) | [
"def",
"get_events",
"(",
"self",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"post",
"(",
"BASE_URL",
"+",
"'getEvents'",
",",
"headers",
... | Return a set of events.
Which have been occured since the last call of this method.
This method should be called regulary to get all occuring
Events. There are three different Event types/classes
which can be returned:
- DeviceStateChangedEvent, if any device changed it's state
due to an applied action or just because of other reasons
- CommandExecutionStateChangedEvent, a executed command goes
through several phases which can be followed
- ExecutionStateChangedEvent, ******** todo
:return: an array of Events or empty array
:rtype: list
raises ValueError in case of protocol issues
:Seealso:
- apply_actions
- launch_action_group
- get_history | [
"Return",
"a",
"set",
"of",
"events",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L335-L381 |
22,747 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi._get_events | def _get_events(self, result):
""""Internal method for being able to run unit tests."""
events = []
for event_data in result:
event = Event.factory(event_data)
if event is not None:
events.append(event)
if isinstance(event, DeviceStateChangedEvent):
# change device state
if self.__devices[event.device_url] is None:
raise Exception(
"Received device change " +
"state for unknown device '" +
event.device_url + "'")
self.__devices[event.device_url].set_active_states(
event.states)
return events | python | def _get_events(self, result):
"events = []
for event_data in result:
event = Event.factory(event_data)
if event is not None:
events.append(event)
if isinstance(event, DeviceStateChangedEvent):
# change device state
if self.__devices[event.device_url] is None:
raise Exception(
"Received device change " +
"state for unknown device '" +
event.device_url + "'")
self.__devices[event.device_url].set_active_states(
event.states)
return events | [
"def",
"_get_events",
"(",
"self",
",",
"result",
")",
":",
"events",
"=",
"[",
"]",
"for",
"event_data",
"in",
"result",
":",
"event",
"=",
"Event",
".",
"factory",
"(",
"event_data",
")",
"if",
"event",
"is",
"not",
"None",
":",
"events",
".",
"app... | Internal method for being able to run unit tests. | [
"Internal",
"method",
"for",
"being",
"able",
"to",
"run",
"unit",
"tests",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L383-L404 |
22,748 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.get_current_executions | def get_current_executions(self):
"""Get all current running executions.
:return: Returns a set of running Executions or empty list.
:rtype: list
raises ValueError in case of protocol issues
:Seealso:
- apply_actions
- launch_action_group
- get_history
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL +
'getCurrentExecutions',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_current_executions()
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for" +
"get_current_executions, protocol error: " + error)
if 'executions' not in result.keys():
return None
executions = []
for execution_data in result['executions']:
exe = Execution(execution_data)
executions.append(exe)
return executions | python | def get_current_executions(self):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL +
'getCurrentExecutions',
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_current_executions()
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for" +
"get_current_executions, protocol error: " + error)
if 'executions' not in result.keys():
return None
executions = []
for execution_data in result['executions']:
exe = Execution(execution_data)
executions.append(exe)
return executions | [
"def",
"get_current_executions",
"(",
"self",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
"+",
"'getCurrentExecutions'"... | Get all current running executions.
:return: Returns a set of running Executions or empty list.
:rtype: list
raises ValueError in case of protocol issues
:Seealso:
- apply_actions
- launch_action_group
- get_history | [
"Get",
"all",
"current",
"running",
"executions",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L406-L451 |
22,749 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.get_action_groups | def get_action_groups(self):
"""Get all Action Groups.
:return: List of Action Groups
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(BASE_URL + "getActionGroups",
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_action_groups()
return
try:
result = request.json()
except ValueError:
raise Exception(
"get_action_groups: Not a valid result for ")
if 'actionGroups' not in result.keys():
return None
groups = []
for group_data in result['actionGroups']:
group = ActionGroup(group_data)
groups.append(group)
return groups | python | def get_action_groups(self):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(BASE_URL + "getActionGroups",
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_action_groups()
return
try:
result = request.json()
except ValueError:
raise Exception(
"get_action_groups: Not a valid result for ")
if 'actionGroups' not in result.keys():
return None
groups = []
for group_data in result['actionGroups']:
group = ActionGroup(group_data)
groups.append(group)
return groups | [
"def",
"get_action_groups",
"(",
"self",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
"+",
"\"getActionGroups\"",
",",... | Get all Action Groups.
:return: List of Action Groups | [
"Get",
"all",
"Action",
"Groups",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L494-L527 |
22,750 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.launch_action_group | def launch_action_group(self, action_id):
"""Start action group."""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL + 'launchActionGroup?oid=' +
action_id,
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.launch_action_group(action_id)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for launch" +
"action group, protocol error: " +
request.status_code + ' - ' + request.reason +
" (" + error + ")")
if 'actionGroup' not in result.keys():
raise Exception(
"Could not launch action" +
"group, missing execId.")
return result['actionGroup'][0]['execId'] | python | def launch_action_group(self, action_id):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL + 'launchActionGroup?oid=' +
action_id,
headers=header,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.launch_action_group(action_id)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for launch" +
"action group, protocol error: " +
request.status_code + ' - ' + request.reason +
" (" + error + ")")
if 'actionGroup' not in result.keys():
raise Exception(
"Could not launch action" +
"group, missing execId.")
return result['actionGroup'][0]['execId'] | [
"def",
"launch_action_group",
"(",
"self",
",",
"action_id",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
"+",
"'lau... | Start action group. | [
"Start",
"action",
"group",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L529-L560 |
22,751 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.get_states | def get_states(self, devices):
"""Get States of Devices."""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
json_data = self._create_get_state_request(devices)
request = requests.post(
BASE_URL + 'getStates',
headers=header,
data=json_data,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_states(devices)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for" +
"getStates, protocol error:" + error)
self._get_states(result) | python | def get_states(self, devices):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
json_data = self._create_get_state_request(devices)
request = requests.post(
BASE_URL + 'getStates',
headers=header,
data=json_data,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.get_states(devices)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for" +
"getStates, protocol error:" + error)
self._get_states(result) | [
"def",
"get_states",
"(",
"self",
",",
"devices",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"json_data",
"=",
"self",
".",
"_create_get_state_request",
"(",
"devices",
")",... | Get States of Devices. | [
"Get",
"States",
"of",
"Devices",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L562-L588 |
22,752 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi._create_get_state_request | def _create_get_state_request(self, given_devices):
"""Create state request."""
dev_list = []
if isinstance(given_devices, list):
devices = given_devices
else:
devices = []
for dev_name, item in self.__devices.items():
if item:
devices.append(self.__devices[dev_name])
for device in devices:
states = []
for state_name in sorted(device.active_states.keys()):
states.append({'name': state_name})
dev_list.append({'deviceURL': device.url, 'states': states})
return json.dumps(
dev_list, indent=None,
sort_keys=True, separators=(',', ': ')) | python | def _create_get_state_request(self, given_devices):
dev_list = []
if isinstance(given_devices, list):
devices = given_devices
else:
devices = []
for dev_name, item in self.__devices.items():
if item:
devices.append(self.__devices[dev_name])
for device in devices:
states = []
for state_name in sorted(device.active_states.keys()):
states.append({'name': state_name})
dev_list.append({'deviceURL': device.url, 'states': states})
return json.dumps(
dev_list, indent=None,
sort_keys=True, separators=(',', ': ')) | [
"def",
"_create_get_state_request",
"(",
"self",
",",
"given_devices",
")",
":",
"dev_list",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"given_devices",
",",
"list",
")",
":",
"devices",
"=",
"given_devices",
"else",
":",
"devices",
"=",
"[",
"]",
"for",
"dev... | Create state request. | [
"Create",
"state",
"request",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L590-L612 |
22,753 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi._get_states | def _get_states(self, result):
"""Get states of devices."""
if 'devices' not in result.keys():
return
for device_states in result['devices']:
device = self.__devices[device_states['deviceURL']]
try:
device.set_active_states(device_states['states'])
except KeyError:
pass | python | def _get_states(self, result):
if 'devices' not in result.keys():
return
for device_states in result['devices']:
device = self.__devices[device_states['deviceURL']]
try:
device.set_active_states(device_states['states'])
except KeyError:
pass | [
"def",
"_get_states",
"(",
"self",
",",
"result",
")",
":",
"if",
"'devices'",
"not",
"in",
"result",
".",
"keys",
"(",
")",
":",
"return",
"for",
"device_states",
"in",
"result",
"[",
"'devices'",
"]",
":",
"device",
"=",
"self",
".",
"__devices",
"["... | Get states of devices. | [
"Get",
"states",
"of",
"devices",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L614-L624 |
22,754 | philklei/tahoma-api | tahoma_api/tahoma_api.py | TahomaApi.refresh_all_states | def refresh_all_states(self):
"""Update all states."""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL + "refreshAllStates", headers=header, timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.refresh_all_states()
return | python | def refresh_all_states(self):
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
request = requests.get(
BASE_URL + "refreshAllStates", headers=header, timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.refresh_all_states()
return | [
"def",
"refresh_all_states",
"(",
"self",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"request",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
"+",
"\"refreshAllStates\"",
",... | Update all states. | [
"Update",
"all",
"states",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L626-L638 |
22,755 | philklei/tahoma-api | tahoma_api/tahoma_api.py | Device.set_active_state | def set_active_state(self, name, value):
"""Set active state."""
if name not in self.__active_states.keys():
raise ValueError("Can not set unknown state '" + name + "'")
if (isinstance(self.__active_states[name], int) and
isinstance(value, str)):
# we get an update as str but current value is
# an int, try to convert
self.__active_states[name] = int(value)
elif (isinstance(self.__active_states[name], float) and
isinstance(value, str)):
# we get an update as str but current value is
# a float, try to convert
self.__active_states[name] = float(value)
else:
self.__active_states[name] = value | python | def set_active_state(self, name, value):
if name not in self.__active_states.keys():
raise ValueError("Can not set unknown state '" + name + "'")
if (isinstance(self.__active_states[name], int) and
isinstance(value, str)):
# we get an update as str but current value is
# an int, try to convert
self.__active_states[name] = int(value)
elif (isinstance(self.__active_states[name], float) and
isinstance(value, str)):
# we get an update as str but current value is
# a float, try to convert
self.__active_states[name] = float(value)
else:
self.__active_states[name] = value | [
"def",
"set_active_state",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"__active_states",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Can not set unknown state '\"",
"+",
"name",
"+",
"\"'\"",
")... | Set active state. | [
"Set",
"active",
"state",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L749-L765 |
22,756 | philklei/tahoma-api | tahoma_api/tahoma_api.py | Action.add_command | def add_command(self, cmd_name, *args):
"""Add command to action."""
self.__commands.append(Command(cmd_name, args)) | python | def add_command(self, cmd_name, *args):
self.__commands.append(Command(cmd_name, args)) | [
"def",
"add_command",
"(",
"self",
",",
"cmd_name",
",",
"*",
"args",
")",
":",
"self",
".",
"__commands",
".",
"append",
"(",
"Command",
"(",
"cmd_name",
",",
"args",
")",
")"
] | Add command to action. | [
"Add",
"command",
"to",
"action",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L818-L820 |
22,757 | philklei/tahoma-api | tahoma_api/tahoma_api.py | Action.serialize | def serialize(self):
"""Serialize action."""
commands = []
for cmd in self.commands:
commands.append(cmd.serialize())
out = {'commands': commands, 'deviceURL': self.__device_url}
return out | python | def serialize(self):
commands = []
for cmd in self.commands:
commands.append(cmd.serialize())
out = {'commands': commands, 'deviceURL': self.__device_url}
return out | [
"def",
"serialize",
"(",
"self",
")",
":",
"commands",
"=",
"[",
"]",
"for",
"cmd",
"in",
"self",
".",
"commands",
":",
"commands",
".",
"append",
"(",
"cmd",
".",
"serialize",
"(",
")",
")",
"out",
"=",
"{",
"'commands'",
":",
"commands",
",",
"'d... | Serialize action. | [
"Serialize",
"action",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L827-L836 |
22,758 | philklei/tahoma-api | tahoma_api/tahoma_api.py | Event.factory | def factory(data):
"""Tahoma Event factory."""
if data['name'] is "DeviceStateChangedEvent":
return DeviceStateChangedEvent(data)
elif data['name'] is "ExecutionStateChangedEvent":
return ExecutionStateChangedEvent(data)
elif data['name'] is "CommandExecutionStateChangedEvent":
return CommandExecutionStateChangedEvent(data)
else:
raise ValueError("Unknown event '" + data['name'] + "' occurred.") | python | def factory(data):
if data['name'] is "DeviceStateChangedEvent":
return DeviceStateChangedEvent(data)
elif data['name'] is "ExecutionStateChangedEvent":
return ExecutionStateChangedEvent(data)
elif data['name'] is "CommandExecutionStateChangedEvent":
return CommandExecutionStateChangedEvent(data)
else:
raise ValueError("Unknown event '" + data['name'] + "' occurred.") | [
"def",
"factory",
"(",
"data",
")",
":",
"if",
"data",
"[",
"'name'",
"]",
"is",
"\"DeviceStateChangedEvent\"",
":",
"return",
"DeviceStateChangedEvent",
"(",
"data",
")",
"elif",
"data",
"[",
"'name'",
"]",
"is",
"\"ExecutionStateChangedEvent\"",
":",
"return",... | Tahoma Event factory. | [
"Tahoma",
"Event",
"factory",
"."
] | fc84f6ba3b673d0cd0e9e618777834a74a3c7b64 | https://github.com/philklei/tahoma-api/blob/fc84f6ba3b673d0cd0e9e618777834a74a3c7b64/tahoma_api/tahoma_api.py#L948-L957 |
22,759 | openknowledge-archive/flexidate | flexidate/__init__.py | parse | def parse(date, dayfirst=True):
'''Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-]
'''
if not date:
return None
if isinstance(date, FlexiDate):
return date
if isinstance(date, int):
return FlexiDate(year=date)
elif isinstance(date, datetime.datetime):
parser = PythonDateTimeParser()
return parser.parse(date)
elif isinstance(date, datetime.date):
parser = PythonDateParser()
return parser.parse(date)
else: # assuming its a string
parser = DateutilDateParser()
out = parser.parse(date, **{'dayfirst': dayfirst})
if out is not None:
return out
# msg = 'Unable to parse %s' % date
# raise ValueError(date)
val = 'UNPARSED: %s' % date
val = val.encode('ascii', 'ignore')
return FlexiDate(qualifier=val) | python | def parse(date, dayfirst=True):
'''Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-]
'''
if not date:
return None
if isinstance(date, FlexiDate):
return date
if isinstance(date, int):
return FlexiDate(year=date)
elif isinstance(date, datetime.datetime):
parser = PythonDateTimeParser()
return parser.parse(date)
elif isinstance(date, datetime.date):
parser = PythonDateParser()
return parser.parse(date)
else: # assuming its a string
parser = DateutilDateParser()
out = parser.parse(date, **{'dayfirst': dayfirst})
if out is not None:
return out
# msg = 'Unable to parse %s' % date
# raise ValueError(date)
val = 'UNPARSED: %s' % date
val = val.encode('ascii', 'ignore')
return FlexiDate(qualifier=val) | [
"def",
"parse",
"(",
"date",
",",
"dayfirst",
"=",
"True",
")",
":",
"if",
"not",
"date",
":",
"return",
"None",
"if",
"isinstance",
"(",
"date",
",",
"FlexiDate",
")",
":",
"return",
"date",
"if",
"isinstance",
"(",
"date",
",",
"int",
")",
":",
"... | Parse a `date` into a `FlexiDate`.
@param date: the date to parse - may be a string, datetime.date,
datetime.datetime or FlexiDate.
TODO: support for quarters e.g. Q4 1980 or 1954 Q3
TODO: support latin stuff like M.DCC.LIII
TODO: convert '-' to '?' when used that way
e.g. had this date [181-] | [
"Parse",
"a",
"date",
"into",
"a",
"FlexiDate",
"."
] | d4fb7d6c7786725bd892fbccd8c3837ac45bcb67 | https://github.com/openknowledge-archive/flexidate/blob/d4fb7d6c7786725bd892fbccd8c3837ac45bcb67/flexidate/__init__.py#L162-L194 |
22,760 | openknowledge-archive/flexidate | flexidate/__init__.py | FlexiDate.as_datetime | def as_datetime(self):
'''Get as python datetime.datetime.
Require year to be a valid datetime year. Default month and day to 1 if
do not exist.
@return: datetime.datetime object.
'''
year = int(self.year)
month = int(self.month) if self.month else 1
day = int(self.day) if self.day else 1
hour = int(self.hour) if self.hour else 0
minute = int(self.minute) if self.minute else 0
second = int(self.second) if self.second else 0
microsecond = int(self.microsecond) if self.microsecond else 0
return datetime.datetime(year, month, day, hour, minute, second, microsecond) | python | def as_datetime(self):
'''Get as python datetime.datetime.
Require year to be a valid datetime year. Default month and day to 1 if
do not exist.
@return: datetime.datetime object.
'''
year = int(self.year)
month = int(self.month) if self.month else 1
day = int(self.day) if self.day else 1
hour = int(self.hour) if self.hour else 0
minute = int(self.minute) if self.minute else 0
second = int(self.second) if self.second else 0
microsecond = int(self.microsecond) if self.microsecond else 0
return datetime.datetime(year, month, day, hour, minute, second, microsecond) | [
"def",
"as_datetime",
"(",
"self",
")",
":",
"year",
"=",
"int",
"(",
"self",
".",
"year",
")",
"month",
"=",
"int",
"(",
"self",
".",
"month",
")",
"if",
"self",
".",
"month",
"else",
"1",
"day",
"=",
"int",
"(",
"self",
".",
"day",
")",
"if",... | Get as python datetime.datetime.
Require year to be a valid datetime year. Default month and day to 1 if
do not exist.
@return: datetime.datetime object. | [
"Get",
"as",
"python",
"datetime",
".",
"datetime",
"."
] | d4fb7d6c7786725bd892fbccd8c3837ac45bcb67 | https://github.com/openknowledge-archive/flexidate/blob/d4fb7d6c7786725bd892fbccd8c3837ac45bcb67/flexidate/__init__.py#L144-L159 |
22,761 | bjmorgan/vasppy | vasppy/utils.py | md5sum | def md5sum( string ):
"""
Generate the md5 checksum for a string
Args:
string (Str): The string to be checksummed.
Returns:
(Str): The hex checksum.
"""
h = hashlib.new( 'md5' )
h.update( string.encode( 'utf-8' ) )
return h.hexdigest() | python | def md5sum( string ):
h = hashlib.new( 'md5' )
h.update( string.encode( 'utf-8' ) )
return h.hexdigest() | [
"def",
"md5sum",
"(",
"string",
")",
":",
"h",
"=",
"hashlib",
".",
"new",
"(",
"'md5'",
")",
"h",
".",
"update",
"(",
"string",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"h",
".",
"hexdigest",
"(",
")"
] | Generate the md5 checksum for a string
Args:
string (Str): The string to be checksummed.
Returns:
(Str): The hex checksum. | [
"Generate",
"the",
"md5",
"checksum",
"for",
"a",
"string"
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/utils.py#L17-L29 |
22,762 | bjmorgan/vasppy | vasppy/utils.py | file_md5 | def file_md5( filename ):
"""
Generate the md5 checksum for a file
Args:
filename (Str): The file to be checksummed.
Returns:
(Str): The hex checksum
Notes:
If the file is gzipped, the md5 checksum returned is
for the uncompressed ASCII file.
"""
with zopen( filename, 'r' ) as f:
file_string = f.read()
try: # attempt to decode byte object
file_string = file_string.decode()
except AttributeError:
pass
return( md5sum( file_string ) ) | python | def file_md5( filename ):
with zopen( filename, 'r' ) as f:
file_string = f.read()
try: # attempt to decode byte object
file_string = file_string.decode()
except AttributeError:
pass
return( md5sum( file_string ) ) | [
"def",
"file_md5",
"(",
"filename",
")",
":",
"with",
"zopen",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"file_string",
"=",
"f",
".",
"read",
"(",
")",
"try",
":",
"# attempt to decode byte object",
"file_string",
"=",
"file_string",
".",
"decode"... | Generate the md5 checksum for a file
Args:
filename (Str): The file to be checksummed.
Returns:
(Str): The hex checksum
Notes:
If the file is gzipped, the md5 checksum returned is
for the uncompressed ASCII file. | [
"Generate",
"the",
"md5",
"checksum",
"for",
"a",
"file"
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/utils.py#L31-L51 |
22,763 | bjmorgan/vasppy | vasppy/utils.py | validate_checksum | def validate_checksum( filename, md5sum ):
"""
Compares the md5 checksum of a file with an expected value.
If the calculated and expected checksum values are not equal,
ValueError is raised.
If the filename `foo` is not found, will try to read a gzipped file named
`foo.gz`. In this case, the checksum is calculated for the unzipped file.
Args:
filename (str): Path for the file to be checksummed.
md5sum (str): The expected hex checksum.
Returns:
None
"""
filename = match_filename( filename )
md5_hash = file_md5( filename=filename )
if md5_hash != md5sum:
raise ValueError('md5 checksums are inconsistent: {}'.format( filename )) | python | def validate_checksum( filename, md5sum ):
filename = match_filename( filename )
md5_hash = file_md5( filename=filename )
if md5_hash != md5sum:
raise ValueError('md5 checksums are inconsistent: {}'.format( filename )) | [
"def",
"validate_checksum",
"(",
"filename",
",",
"md5sum",
")",
":",
"filename",
"=",
"match_filename",
"(",
"filename",
")",
"md5_hash",
"=",
"file_md5",
"(",
"filename",
"=",
"filename",
")",
"if",
"md5_hash",
"!=",
"md5sum",
":",
"raise",
"ValueError",
"... | Compares the md5 checksum of a file with an expected value.
If the calculated and expected checksum values are not equal,
ValueError is raised.
If the filename `foo` is not found, will try to read a gzipped file named
`foo.gz`. In this case, the checksum is calculated for the unzipped file.
Args:
filename (str): Path for the file to be checksummed.
md5sum (str): The expected hex checksum.
Returns:
None | [
"Compares",
"the",
"md5",
"checksum",
"of",
"a",
"file",
"with",
"an",
"expected",
"value",
".",
"If",
"the",
"calculated",
"and",
"expected",
"checksum",
"values",
"are",
"not",
"equal",
"ValueError",
"is",
"raised",
".",
"If",
"the",
"filename",
"foo",
"... | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/utils.py#L69-L87 |
22,764 | bjmorgan/vasppy | vasppy/optics.py | to_matrix | def to_matrix( xx, yy, zz, xy, yz, xz ):
"""
Convert a list of matrix components to a symmetric 3x3 matrix.
Inputs should be in the order xx, yy, zz, xy, yz, xz.
Args:
xx (float): xx component of the matrix.
yy (float): yy component of the matrix.
zz (float): zz component of the matrix.
xy (float): xy component of the matrix.
yz (float): yz component of the matrix.
xz (float): xz component of the matrix.
Returns:
(np.array): The matrix, as a 3x3 numpy array.
"""
matrix = np.array( [[xx, xy, xz], [xy, yy, yz], [xz, yz, zz]] )
return matrix | python | def to_matrix( xx, yy, zz, xy, yz, xz ):
matrix = np.array( [[xx, xy, xz], [xy, yy, yz], [xz, yz, zz]] )
return matrix | [
"def",
"to_matrix",
"(",
"xx",
",",
"yy",
",",
"zz",
",",
"xy",
",",
"yz",
",",
"xz",
")",
":",
"matrix",
"=",
"np",
".",
"array",
"(",
"[",
"[",
"xx",
",",
"xy",
",",
"xz",
"]",
",",
"[",
"xy",
",",
"yy",
",",
"yz",
"]",
",",
"[",
"xz"... | Convert a list of matrix components to a symmetric 3x3 matrix.
Inputs should be in the order xx, yy, zz, xy, yz, xz.
Args:
xx (float): xx component of the matrix.
yy (float): yy component of the matrix.
zz (float): zz component of the matrix.
xy (float): xy component of the matrix.
yz (float): yz component of the matrix.
xz (float): xz component of the matrix.
Returns:
(np.array): The matrix, as a 3x3 numpy array. | [
"Convert",
"a",
"list",
"of",
"matrix",
"components",
"to",
"a",
"symmetric",
"3x3",
"matrix",
".",
"Inputs",
"should",
"be",
"in",
"the",
"order",
"xx",
"yy",
"zz",
"xy",
"yz",
"xz",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/optics.py#L24-L41 |
22,765 | bjmorgan/vasppy | vasppy/optics.py | absorption_coefficient | def absorption_coefficient( dielectric ):
"""
Calculate the optical absorption coefficient from an input set of
pymatgen vasprun dielectric constant data.
Args:
dielectric (list): A list containing the dielectric response function
in the pymatgen vasprun format.
| element 0: list of energies
| element 1: real dielectric tensors, in ``[xx, yy, zz, xy, xz, yz]`` format.
| element 2: imaginary dielectric tensors, in ``[xx, yy, zz, xy, xz, yz]`` format.
Returns:
(np.array): absorption coefficient using eV as frequency units (cm^-1).
Notes:
The absorption coefficient is calculated as
.. math:: \\alpha = \\frac{2\sqrt{2} \pi}{\lambda} \sqrt{-\epsilon_1+\sqrt{\epsilon_1^2+\epsilon_2^2}}
"""
energies_in_eV = np.array( dielectric[0] )
real_dielectric = parse_dielectric_data( dielectric[1] )
imag_dielectric = parse_dielectric_data( dielectric[2] )
epsilon_1 = np.mean( real_dielectric, axis=1 )
epsilon_2 = np.mean( imag_dielectric, axis=1 )
return ( 2.0 * np.sqrt(2.0)*pi*eV_to_recip_cm*energies_in_eV
* np.sqrt( -epsilon_1 + np.sqrt( epsilon_1**2 + epsilon_2**2 ) ) ) | python | def absorption_coefficient( dielectric ):
energies_in_eV = np.array( dielectric[0] )
real_dielectric = parse_dielectric_data( dielectric[1] )
imag_dielectric = parse_dielectric_data( dielectric[2] )
epsilon_1 = np.mean( real_dielectric, axis=1 )
epsilon_2 = np.mean( imag_dielectric, axis=1 )
return ( 2.0 * np.sqrt(2.0)*pi*eV_to_recip_cm*energies_in_eV
* np.sqrt( -epsilon_1 + np.sqrt( epsilon_1**2 + epsilon_2**2 ) ) ) | [
"def",
"absorption_coefficient",
"(",
"dielectric",
")",
":",
"energies_in_eV",
"=",
"np",
".",
"array",
"(",
"dielectric",
"[",
"0",
"]",
")",
"real_dielectric",
"=",
"parse_dielectric_data",
"(",
"dielectric",
"[",
"1",
"]",
")",
"imag_dielectric",
"=",
"par... | Calculate the optical absorption coefficient from an input set of
pymatgen vasprun dielectric constant data.
Args:
dielectric (list): A list containing the dielectric response function
in the pymatgen vasprun format.
| element 0: list of energies
| element 1: real dielectric tensors, in ``[xx, yy, zz, xy, xz, yz]`` format.
| element 2: imaginary dielectric tensors, in ``[xx, yy, zz, xy, xz, yz]`` format.
Returns:
(np.array): absorption coefficient using eV as frequency units (cm^-1).
Notes:
The absorption coefficient is calculated as
.. math:: \\alpha = \\frac{2\sqrt{2} \pi}{\lambda} \sqrt{-\epsilon_1+\sqrt{\epsilon_1^2+\epsilon_2^2}} | [
"Calculate",
"the",
"optical",
"absorption",
"coefficient",
"from",
"an",
"input",
"set",
"of",
"pymatgen",
"vasprun",
"dielectric",
"constant",
"data",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/optics.py#L72-L100 |
22,766 | bjmorgan/vasppy | vasppy/configuration.py | Configuration.dr | def dr( self, atom1, atom2 ):
"""
Calculate the distance between two atoms.
Args:
atom1 (vasppy.Atom): Atom 1.
atom2 (vasppy.Atom): Atom 2.
Returns:
(float): The distance between Atom 1 and Atom 2.
"""
return self.cell.dr( atom1.r, atom2.r ) | python | def dr( self, atom1, atom2 ):
return self.cell.dr( atom1.r, atom2.r ) | [
"def",
"dr",
"(",
"self",
",",
"atom1",
",",
"atom2",
")",
":",
"return",
"self",
".",
"cell",
".",
"dr",
"(",
"atom1",
".",
"r",
",",
"atom2",
".",
"r",
")"
] | Calculate the distance between two atoms.
Args:
atom1 (vasppy.Atom): Atom 1.
atom2 (vasppy.Atom): Atom 2.
Returns:
(float): The distance between Atom 1 and Atom 2. | [
"Calculate",
"the",
"distance",
"between",
"two",
"atoms",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/configuration.py#L13-L24 |
22,767 | bjmorgan/vasppy | vasppy/procar.py | area_of_a_triangle_in_cartesian_space | def area_of_a_triangle_in_cartesian_space( a, b, c ):
"""
Returns the area of a triangle defined by three points in Cartesian space.
Args:
a (np.array): Cartesian coordinates of point A.
b (np.array): Cartesian coordinates of point B.
c (np.array): Cartesian coordinates of point C.
Returns:
(float): the area of the triangle.
"""
return 0.5 * np.linalg.norm( np.cross( b-a, c-a ) ) | python | def area_of_a_triangle_in_cartesian_space( a, b, c ):
return 0.5 * np.linalg.norm( np.cross( b-a, c-a ) ) | [
"def",
"area_of_a_triangle_in_cartesian_space",
"(",
"a",
",",
"b",
",",
"c",
")",
":",
"return",
"0.5",
"*",
"np",
".",
"linalg",
".",
"norm",
"(",
"np",
".",
"cross",
"(",
"b",
"-",
"a",
",",
"c",
"-",
"a",
")",
")"
] | Returns the area of a triangle defined by three points in Cartesian space.
Args:
a (np.array): Cartesian coordinates of point A.
b (np.array): Cartesian coordinates of point B.
c (np.array): Cartesian coordinates of point C.
Returns:
(float): the area of the triangle. | [
"Returns",
"the",
"area",
"of",
"a",
"triangle",
"defined",
"by",
"three",
"points",
"in",
"Cartesian",
"space",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/procar.py#L24-L36 |
22,768 | bjmorgan/vasppy | vasppy/procar.py | points_are_in_a_straight_line | def points_are_in_a_straight_line( points, tolerance=1e-7 ):
"""
Check whether a set of points fall on a straight line.
Calculates the areas of triangles formed by triplets of the points.
Returns False is any of these areas are larger than the tolerance.
Args:
points (list(np.array)): list of Cartesian coordinates for each point.
tolerance (optional:float): the maximum triangle size for these points to be considered colinear. Default is 1e-7.
Returns:
(bool): True if all points fall on a straight line (within the allowed tolerance).
"""
a = points[0]
b = points[1]
for c in points[2:]:
if area_of_a_triangle_in_cartesian_space( a, b, c ) > tolerance:
return False
return True | python | def points_are_in_a_straight_line( points, tolerance=1e-7 ):
a = points[0]
b = points[1]
for c in points[2:]:
if area_of_a_triangle_in_cartesian_space( a, b, c ) > tolerance:
return False
return True | [
"def",
"points_are_in_a_straight_line",
"(",
"points",
",",
"tolerance",
"=",
"1e-7",
")",
":",
"a",
"=",
"points",
"[",
"0",
"]",
"b",
"=",
"points",
"[",
"1",
"]",
"for",
"c",
"in",
"points",
"[",
"2",
":",
"]",
":",
"if",
"area_of_a_triangle_in_cart... | Check whether a set of points fall on a straight line.
Calculates the areas of triangles formed by triplets of the points.
Returns False is any of these areas are larger than the tolerance.
Args:
points (list(np.array)): list of Cartesian coordinates for each point.
tolerance (optional:float): the maximum triangle size for these points to be considered colinear. Default is 1e-7.
Returns:
(bool): True if all points fall on a straight line (within the allowed tolerance). | [
"Check",
"whether",
"a",
"set",
"of",
"points",
"fall",
"on",
"a",
"straight",
"line",
".",
"Calculates",
"the",
"areas",
"of",
"triangles",
"formed",
"by",
"triplets",
"of",
"the",
"points",
".",
"Returns",
"False",
"is",
"any",
"of",
"these",
"areas",
... | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/procar.py#L38-L56 |
22,769 | bjmorgan/vasppy | vasppy/procar.py | two_point_effective_mass | def two_point_effective_mass( cartesian_k_points, eigenvalues ):
"""
Calculate the effective mass given eigenvalues at two k-points.
Reimplemented from Aron Walsh's original effective mass Fortran code.
Args:
cartesian_k_points (np.array): 2D numpy array containing the k-points in (reciprocal) Cartesian coordinates.
eigenvalues (np.array): numpy array containing the eigenvalues at each k-point.
Returns:
(float): The effective mass
"""
assert( cartesian_k_points.shape[0] == 2 )
assert( eigenvalues.size == 2 )
dk = cartesian_k_points[ 1 ] - cartesian_k_points[ 0 ]
mod_dk = np.sqrt( np.dot( dk, dk ) )
delta_e = ( eigenvalues[ 1 ] - eigenvalues[ 0 ] ) * ev_to_hartree * 2.0
effective_mass = mod_dk * mod_dk / delta_e
return effective_mass | python | def two_point_effective_mass( cartesian_k_points, eigenvalues ):
assert( cartesian_k_points.shape[0] == 2 )
assert( eigenvalues.size == 2 )
dk = cartesian_k_points[ 1 ] - cartesian_k_points[ 0 ]
mod_dk = np.sqrt( np.dot( dk, dk ) )
delta_e = ( eigenvalues[ 1 ] - eigenvalues[ 0 ] ) * ev_to_hartree * 2.0
effective_mass = mod_dk * mod_dk / delta_e
return effective_mass | [
"def",
"two_point_effective_mass",
"(",
"cartesian_k_points",
",",
"eigenvalues",
")",
":",
"assert",
"(",
"cartesian_k_points",
".",
"shape",
"[",
"0",
"]",
"==",
"2",
")",
"assert",
"(",
"eigenvalues",
".",
"size",
"==",
"2",
")",
"dk",
"=",
"cartesian_k_p... | Calculate the effective mass given eigenvalues at two k-points.
Reimplemented from Aron Walsh's original effective mass Fortran code.
Args:
cartesian_k_points (np.array): 2D numpy array containing the k-points in (reciprocal) Cartesian coordinates.
eigenvalues (np.array): numpy array containing the eigenvalues at each k-point.
Returns:
(float): The effective mass | [
"Calculate",
"the",
"effective",
"mass",
"given",
"eigenvalues",
"at",
"two",
"k",
"-",
"points",
".",
"Reimplemented",
"from",
"Aron",
"Walsh",
"s",
"original",
"effective",
"mass",
"Fortran",
"code",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/procar.py#L58-L76 |
22,770 | bjmorgan/vasppy | vasppy/procar.py | least_squares_effective_mass | def least_squares_effective_mass( cartesian_k_points, eigenvalues ):
"""
Calculate the effective mass using a least squares quadratic fit.
Args:
cartesian_k_points (np.array): Cartesian reciprocal coordinates for the k-points
eigenvalues (np.array): Energy eigenvalues at each k-point to be used in the fit.
Returns:
(float): The fitted effective mass
Notes:
If the k-points do not sit on a straight line a ValueError will be raised.
"""
if not points_are_in_a_straight_line( cartesian_k_points ):
raise ValueError( 'k-points are not collinear' )
dk = cartesian_k_points - cartesian_k_points[0]
mod_dk = np.linalg.norm( dk, axis = 1 )
delta_e = eigenvalues - eigenvalues[0]
effective_mass = 1.0 / ( np.polyfit( mod_dk, eigenvalues, 2 )[0] * ev_to_hartree * 2.0 )
return effective_mass | python | def least_squares_effective_mass( cartesian_k_points, eigenvalues ):
if not points_are_in_a_straight_line( cartesian_k_points ):
raise ValueError( 'k-points are not collinear' )
dk = cartesian_k_points - cartesian_k_points[0]
mod_dk = np.linalg.norm( dk, axis = 1 )
delta_e = eigenvalues - eigenvalues[0]
effective_mass = 1.0 / ( np.polyfit( mod_dk, eigenvalues, 2 )[0] * ev_to_hartree * 2.0 )
return effective_mass | [
"def",
"least_squares_effective_mass",
"(",
"cartesian_k_points",
",",
"eigenvalues",
")",
":",
"if",
"not",
"points_are_in_a_straight_line",
"(",
"cartesian_k_points",
")",
":",
"raise",
"ValueError",
"(",
"'k-points are not collinear'",
")",
"dk",
"=",
"cartesian_k_poin... | Calculate the effective mass using a least squares quadratic fit.
Args:
cartesian_k_points (np.array): Cartesian reciprocal coordinates for the k-points
eigenvalues (np.array): Energy eigenvalues at each k-point to be used in the fit.
Returns:
(float): The fitted effective mass
Notes:
If the k-points do not sit on a straight line a ValueError will be raised. | [
"Calculate",
"the",
"effective",
"mass",
"using",
"a",
"least",
"squares",
"quadratic",
"fit",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/procar.py#L78-L98 |
22,771 | bjmorgan/vasppy | vasppy/procar.py | Procar.read_from_file | def read_from_file( self, filename, negative_occupancies='warn' ):
"""
Reads the projected wavefunction character of each band from a VASP PROCAR file.
Args:
filename (str): Filename of the PROCAR file.
negative_occupancies (:obj:Str, optional): Sets the behaviour for handling
negative occupancies. Default is `warn`.
Returns:
None
Note:
Valid options for `negative_occupancies` are:
`warn` (default): Warn that some partial occupancies are negative,
but do not alter any values.
`raise`: Raise an AttributeError.
`ignore`: Do nothing.
`zero`: Negative partial occupancies will be set to zero.
"""
valid_negative_occupancies = [ 'warn', 'raise', 'ignore', 'zero' ]
if negative_occupancies not in valid_negative_occupancies:
raise ValueError( '"{}" is not a valid value for the keyword `negative_occupancies`.'.format( negative_occupancies ) )
with open( filename, 'r' ) as file_in:
file_in.readline()
self.number_of_k_points, self.number_of_bands, self.number_of_ions = [ int( f ) for f in get_numbers_from_string( file_in.readline() ) ]
self.read_in = file_in.read()
self.parse_k_points()
self.parse_bands()
self.parse_occupancy()
if np.any( self.occupancy[:,1] < 0 ): # Handle negative occupancies
if negative_occupancies == 'warn':
warnings.warn( "One or more occupancies in your PROCAR file are negative." )
elif negative_occupancies == 'raise':
raise ValueError( "One or more occupancies in your PROCAR file are negative." )
elif negative_occupancies == 'zero':
self.occupancy[ self.occupancy < 0 ] = 0.0
self.parse_projections()
self.sanity_check()
self.read_in = None
if self.calculation[ 'spin_polarised' ]:
self.data = self.projection_data.reshape( self.spin_channels, self.number_of_k_points, self.number_of_bands, self.number_of_ions + 1, self.number_of_projections )[:,:,:,:,1:].swapaxes( 0, 1).swapaxes( 1, 2 )
else:
self.data = self.projection_data.reshape( self.number_of_k_points, self.number_of_bands, self.spin_channels, self.number_of_ions + 1, self.number_of_projections )[:,:,:,:,1:] | python | def read_from_file( self, filename, negative_occupancies='warn' ):
valid_negative_occupancies = [ 'warn', 'raise', 'ignore', 'zero' ]
if negative_occupancies not in valid_negative_occupancies:
raise ValueError( '"{}" is not a valid value for the keyword `negative_occupancies`.'.format( negative_occupancies ) )
with open( filename, 'r' ) as file_in:
file_in.readline()
self.number_of_k_points, self.number_of_bands, self.number_of_ions = [ int( f ) for f in get_numbers_from_string( file_in.readline() ) ]
self.read_in = file_in.read()
self.parse_k_points()
self.parse_bands()
self.parse_occupancy()
if np.any( self.occupancy[:,1] < 0 ): # Handle negative occupancies
if negative_occupancies == 'warn':
warnings.warn( "One or more occupancies in your PROCAR file are negative." )
elif negative_occupancies == 'raise':
raise ValueError( "One or more occupancies in your PROCAR file are negative." )
elif negative_occupancies == 'zero':
self.occupancy[ self.occupancy < 0 ] = 0.0
self.parse_projections()
self.sanity_check()
self.read_in = None
if self.calculation[ 'spin_polarised' ]:
self.data = self.projection_data.reshape( self.spin_channels, self.number_of_k_points, self.number_of_bands, self.number_of_ions + 1, self.number_of_projections )[:,:,:,:,1:].swapaxes( 0, 1).swapaxes( 1, 2 )
else:
self.data = self.projection_data.reshape( self.number_of_k_points, self.number_of_bands, self.spin_channels, self.number_of_ions + 1, self.number_of_projections )[:,:,:,:,1:] | [
"def",
"read_from_file",
"(",
"self",
",",
"filename",
",",
"negative_occupancies",
"=",
"'warn'",
")",
":",
"valid_negative_occupancies",
"=",
"[",
"'warn'",
",",
"'raise'",
",",
"'ignore'",
",",
"'zero'",
"]",
"if",
"negative_occupancies",
"not",
"in",
"valid_... | Reads the projected wavefunction character of each band from a VASP PROCAR file.
Args:
filename (str): Filename of the PROCAR file.
negative_occupancies (:obj:Str, optional): Sets the behaviour for handling
negative occupancies. Default is `warn`.
Returns:
None
Note:
Valid options for `negative_occupancies` are:
`warn` (default): Warn that some partial occupancies are negative,
but do not alter any values.
`raise`: Raise an AttributeError.
`ignore`: Do nothing.
`zero`: Negative partial occupancies will be set to zero. | [
"Reads",
"the",
"projected",
"wavefunction",
"character",
"of",
"each",
"band",
"from",
"a",
"VASP",
"PROCAR",
"file",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/procar.py#L159-L202 |
22,772 | bjmorgan/vasppy | vasppy/summary.py | load_vasp_summary | def load_vasp_summary( filename ):
"""
Reads a `vasp_summary.yaml` format YAML file and returns
a dictionary of dictionaries. Each YAML document in the file
corresponds to one sub-dictionary, with the corresponding
top-level key given by the `title` value.
Example:
The file:
---
title: foo
data: foo_data
---
title: bar
data: bar_data
is converted to the dictionary
{ 'foo': { 'title': 'foo', 'data': 'foo_data' },
'bar': { 'title': 'bar', 'data': 'bar_data' } }
Args:
filename (str): File path for the `vasp_summary.yaml` file.
Returns:
dict(dict,dict,...): A dictionary of separate YAML documents,
each as dictionaries.a
"""
with open( filename, 'r' ) as stream:
docs = yaml.load_all( stream, Loader=yaml.SafeLoader )
data = { d['title']: d for d in docs }
return data | python | def load_vasp_summary( filename ):
with open( filename, 'r' ) as stream:
docs = yaml.load_all( stream, Loader=yaml.SafeLoader )
data = { d['title']: d for d in docs }
return data | [
"def",
"load_vasp_summary",
"(",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"stream",
":",
"docs",
"=",
"yaml",
".",
"load_all",
"(",
"stream",
",",
"Loader",
"=",
"yaml",
".",
"SafeLoader",
")",
"data",
"=",
"{",
"... | Reads a `vasp_summary.yaml` format YAML file and returns
a dictionary of dictionaries. Each YAML document in the file
corresponds to one sub-dictionary, with the corresponding
top-level key given by the `title` value.
Example:
The file:
---
title: foo
data: foo_data
---
title: bar
data: bar_data
is converted to the dictionary
{ 'foo': { 'title': 'foo', 'data': 'foo_data' },
'bar': { 'title': 'bar', 'data': 'bar_data' } }
Args:
filename (str): File path for the `vasp_summary.yaml` file.
Returns:
dict(dict,dict,...): A dictionary of separate YAML documents,
each as dictionaries.a | [
"Reads",
"a",
"vasp_summary",
".",
"yaml",
"format",
"YAML",
"file",
"and",
"returns",
"a",
"dictionary",
"of",
"dictionaries",
".",
"Each",
"YAML",
"document",
"in",
"the",
"file",
"corresponds",
"to",
"one",
"sub",
"-",
"dictionary",
"with",
"the",
"corres... | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/summary.py#L18-L51 |
22,773 | bjmorgan/vasppy | vasppy/summary.py | potcar_spec | def potcar_spec( filename ):
"""
Returns a dictionary specifying the pseudopotentials contained in a POTCAR file.
Args:
filename (Str): The name of the POTCAR file to process.
Returns:
(Dict): A dictionary of pseudopotential filename: dataset pairs, e.g.
{ 'Fe_pv': 'PBE_54', 'O', 'PBE_54' }
"""
p_spec = {}
with open( filename, 'r' ) as f:
potcars = re.split('(End of Dataset\n)', f.read() )
potcar_md5sums = [ md5sum( ''.join( pair ) ) for pair in zip( potcars[::2], potcars[1:-1:2] ) ]
for this_md5sum in potcar_md5sums:
for ps in potcar_sets:
for p, p_md5sum in potcar_md5sum_data[ ps ].items():
if this_md5sum == p_md5sum:
p_spec[ p ] = ps
if len( p_spec ) != len( potcar_md5sums ):
raise ValueError( 'One or more POTCARs did not have matching md5 hashes' )
return p_spec | python | def potcar_spec( filename ):
p_spec = {}
with open( filename, 'r' ) as f:
potcars = re.split('(End of Dataset\n)', f.read() )
potcar_md5sums = [ md5sum( ''.join( pair ) ) for pair in zip( potcars[::2], potcars[1:-1:2] ) ]
for this_md5sum in potcar_md5sums:
for ps in potcar_sets:
for p, p_md5sum in potcar_md5sum_data[ ps ].items():
if this_md5sum == p_md5sum:
p_spec[ p ] = ps
if len( p_spec ) != len( potcar_md5sums ):
raise ValueError( 'One or more POTCARs did not have matching md5 hashes' )
return p_spec | [
"def",
"potcar_spec",
"(",
"filename",
")",
":",
"p_spec",
"=",
"{",
"}",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"potcars",
"=",
"re",
".",
"split",
"(",
"'(End of Dataset\\n)'",
",",
"f",
".",
"read",
"(",
")",
")",
"pot... | Returns a dictionary specifying the pseudopotentials contained in a POTCAR file.
Args:
filename (Str): The name of the POTCAR file to process.
Returns:
(Dict): A dictionary of pseudopotential filename: dataset pairs, e.g.
{ 'Fe_pv': 'PBE_54', 'O', 'PBE_54' } | [
"Returns",
"a",
"dictionary",
"specifying",
"the",
"pseudopotentials",
"contained",
"in",
"a",
"POTCAR",
"file",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/summary.py#L53-L75 |
22,774 | bjmorgan/vasppy | vasppy/summary.py | find_vasp_calculations | def find_vasp_calculations():
"""
Returns a list of all subdirectories that contain either a vasprun.xml file
or a compressed vasprun.xml.gz file.
Args:
None
Returns:
(List): list of all VASP calculation subdirectories.
"""
dir_list = [ './' + re.sub( r'vasprun\.xml', '', path ) for path in glob.iglob( '**/vasprun.xml', recursive=True ) ]
gz_dir_list = [ './' + re.sub( r'vasprun\.xml\.gz', '', path ) for path in glob.iglob( '**/vasprun.xml.gz', recursive=True ) ]
return dir_list + gz_dir_list | python | def find_vasp_calculations():
dir_list = [ './' + re.sub( r'vasprun\.xml', '', path ) for path in glob.iglob( '**/vasprun.xml', recursive=True ) ]
gz_dir_list = [ './' + re.sub( r'vasprun\.xml\.gz', '', path ) for path in glob.iglob( '**/vasprun.xml.gz', recursive=True ) ]
return dir_list + gz_dir_list | [
"def",
"find_vasp_calculations",
"(",
")",
":",
"dir_list",
"=",
"[",
"'./'",
"+",
"re",
".",
"sub",
"(",
"r'vasprun\\.xml'",
",",
"''",
",",
"path",
")",
"for",
"path",
"in",
"glob",
".",
"iglob",
"(",
"'**/vasprun.xml'",
",",
"recursive",
"=",
"True",
... | Returns a list of all subdirectories that contain either a vasprun.xml file
or a compressed vasprun.xml.gz file.
Args:
None
Returns:
(List): list of all VASP calculation subdirectories. | [
"Returns",
"a",
"list",
"of",
"all",
"subdirectories",
"that",
"contain",
"either",
"a",
"vasprun",
".",
"xml",
"file",
"or",
"a",
"compressed",
"vasprun",
".",
"xml",
".",
"gz",
"file",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/summary.py#L77-L90 |
22,775 | bjmorgan/vasppy | vasppy/summary.py | Summary.parse_vasprun | def parse_vasprun( self ):
"""
Read in `vasprun.xml` as a pymatgen Vasprun object.
Args:
None
Returns:
None
None:
If the vasprun.xml is not well formed this method will catch the ParseError
and set self.vasprun = None.
"""
self.vasprun_filename = match_filename( 'vasprun.xml' )
if not self.vasprun_filename:
raise FileNotFoundError( 'Could not find vasprun.xml or vasprun.xml.gz file' )
try:
self.vasprun = Vasprun( self.vasprun_filename, parse_potcar_file=False )
except ET.ParseError:
self.vasprun = None
except:
raise | python | def parse_vasprun( self ):
self.vasprun_filename = match_filename( 'vasprun.xml' )
if not self.vasprun_filename:
raise FileNotFoundError( 'Could not find vasprun.xml or vasprun.xml.gz file' )
try:
self.vasprun = Vasprun( self.vasprun_filename, parse_potcar_file=False )
except ET.ParseError:
self.vasprun = None
except:
raise | [
"def",
"parse_vasprun",
"(",
"self",
")",
":",
"self",
".",
"vasprun_filename",
"=",
"match_filename",
"(",
"'vasprun.xml'",
")",
"if",
"not",
"self",
".",
"vasprun_filename",
":",
"raise",
"FileNotFoundError",
"(",
"'Could not find vasprun.xml or vasprun.xml.gz file'",... | Read in `vasprun.xml` as a pymatgen Vasprun object.
Args:
None
Returns:
None
None:
If the vasprun.xml is not well formed this method will catch the ParseError
and set self.vasprun = None. | [
"Read",
"in",
"vasprun",
".",
"xml",
"as",
"a",
"pymatgen",
"Vasprun",
"object",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/summary.py#L160-L182 |
22,776 | bjmorgan/vasppy | vasppy/doscar.py | Doscar.read_projected_dos | def read_projected_dos( self ):
"""
Read the projected density of states data into """
pdos_list = []
for i in range( self.number_of_atoms ):
df = self.read_atomic_dos_as_df( i+1 )
pdos_list.append( df )
self.pdos = np.vstack( [ np.array( df ) for df in pdos_list ] ).reshape(
self.number_of_atoms, self.number_of_data_points, self.number_of_channels, self.ispin ) | python | def read_projected_dos( self ):
pdos_list = []
for i in range( self.number_of_atoms ):
df = self.read_atomic_dos_as_df( i+1 )
pdos_list.append( df )
self.pdos = np.vstack( [ np.array( df ) for df in pdos_list ] ).reshape(
self.number_of_atoms, self.number_of_data_points, self.number_of_channels, self.ispin ) | [
"def",
"read_projected_dos",
"(",
"self",
")",
":",
"pdos_list",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"number_of_atoms",
")",
":",
"df",
"=",
"self",
".",
"read_atomic_dos_as_df",
"(",
"i",
"+",
"1",
")",
"pdos_list",
".",
"appen... | Read the projected density of states data into | [
"Read",
"the",
"projected",
"density",
"of",
"states",
"data",
"into"
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/doscar.py#L108-L116 |
22,777 | bjmorgan/vasppy | vasppy/doscar.py | Doscar.pdos_select | def pdos_select( self, atoms=None, spin=None, l=None, m=None ):
"""
Returns a subset of the projected density of states array.
Args:
atoms (int or list(int)): Atom numbers to include in the selection. Atom numbers count from 1.
Default is to select all atoms.
spin (str): Select up or down, or both spin channels to include in the selection.
Accepted options are 'up', 'down', and 'both'. Default is to select both spins.
l (str): Select one angular momentum to include in the selectrion.
Accepted options are 's', 'p', 'd', and 'f'. Default is to include all l-values.
Setting `l` and not setting `m` will return all projections for that angular momentum value.
m (list(str)): Select one or more m-values. Requires `l` to be set.
The accepted values depend on the value of `l`:
`l='s'`: Only one projection. Not set.
`l='p'`: One or more of [ 'x', 'y', 'z' ]
`l='d'`: One or more of [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ]
`l='f'`: One or more of [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ]
Returns:
np.array: A 4-dimensional numpy array containing the selected pdos values.
The array dimensions are [ atom_no, energy_value, lm-projection, spin ]
"""
valid_m_values = { 's': [],
'p': [ 'x', 'y', 'z' ],
'd': [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ],
'f': [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ] }
if not atoms:
atom_idx = list(range( self.number_of_atoms ))
else:
atom_idx = atoms
to_return = self.pdos[ atom_idx, :, :, : ]
if not spin:
spin_idx = list(range( self.ispin ))
elif spin is 'up':
spin_idx = [0]
elif spin is 'down':
spin_idx = [1]
elif spin is 'both':
spin_idx = [0,1]
else:
raise ValueError( "valid spin values are 'up', 'down', and 'both'. The default is 'both'" )
to_return = to_return[ :, :, :, spin_idx ]
if not l:
channel_idx = list(range( self.number_of_channels ))
elif l == 's':
channel_idx = [ 0 ]
elif l == 'p':
if not m:
channel_idx = [ 1, 2, 3 ]
else: # TODO this looks like it should be i+1
channel_idx = [ i+1 for i, v in enumerate( valid_m_values['p'] ) if v in m ]
elif l == 'd':
if not m:
channel_idx = [ 4, 5, 6, 7, 8 ]
else: # TODO this looks like it should be i+4
channel_idx = [ i+4 for i, v in enumerate( valid_m_values['d'] ) if v in m ]
elif l == 'f':
if not m:
channel_idx = [ 9, 10, 11, 12, 13, 14, 15 ]
else: # TODO this looks like it should be i+9
channel_idx = [ i+9 for i, v in enumerate( valid_m_values['f'] ) if v in m ]
else:
raise ValueError
return to_return[ :, :, channel_idx, : ] | python | def pdos_select( self, atoms=None, spin=None, l=None, m=None ):
valid_m_values = { 's': [],
'p': [ 'x', 'y', 'z' ],
'd': [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ],
'f': [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ] }
if not atoms:
atom_idx = list(range( self.number_of_atoms ))
else:
atom_idx = atoms
to_return = self.pdos[ atom_idx, :, :, : ]
if not spin:
spin_idx = list(range( self.ispin ))
elif spin is 'up':
spin_idx = [0]
elif spin is 'down':
spin_idx = [1]
elif spin is 'both':
spin_idx = [0,1]
else:
raise ValueError( "valid spin values are 'up', 'down', and 'both'. The default is 'both'" )
to_return = to_return[ :, :, :, spin_idx ]
if not l:
channel_idx = list(range( self.number_of_channels ))
elif l == 's':
channel_idx = [ 0 ]
elif l == 'p':
if not m:
channel_idx = [ 1, 2, 3 ]
else: # TODO this looks like it should be i+1
channel_idx = [ i+1 for i, v in enumerate( valid_m_values['p'] ) if v in m ]
elif l == 'd':
if not m:
channel_idx = [ 4, 5, 6, 7, 8 ]
else: # TODO this looks like it should be i+4
channel_idx = [ i+4 for i, v in enumerate( valid_m_values['d'] ) if v in m ]
elif l == 'f':
if not m:
channel_idx = [ 9, 10, 11, 12, 13, 14, 15 ]
else: # TODO this looks like it should be i+9
channel_idx = [ i+9 for i, v in enumerate( valid_m_values['f'] ) if v in m ]
else:
raise ValueError
return to_return[ :, :, channel_idx, : ] | [
"def",
"pdos_select",
"(",
"self",
",",
"atoms",
"=",
"None",
",",
"spin",
"=",
"None",
",",
"l",
"=",
"None",
",",
"m",
"=",
"None",
")",
":",
"valid_m_values",
"=",
"{",
"'s'",
":",
"[",
"]",
",",
"'p'",
":",
"[",
"'x'",
",",
"'y'",
",",
"'... | Returns a subset of the projected density of states array.
Args:
atoms (int or list(int)): Atom numbers to include in the selection. Atom numbers count from 1.
Default is to select all atoms.
spin (str): Select up or down, or both spin channels to include in the selection.
Accepted options are 'up', 'down', and 'both'. Default is to select both spins.
l (str): Select one angular momentum to include in the selectrion.
Accepted options are 's', 'p', 'd', and 'f'. Default is to include all l-values.
Setting `l` and not setting `m` will return all projections for that angular momentum value.
m (list(str)): Select one or more m-values. Requires `l` to be set.
The accepted values depend on the value of `l`:
`l='s'`: Only one projection. Not set.
`l='p'`: One or more of [ 'x', 'y', 'z' ]
`l='d'`: One or more of [ 'xy', 'yz', 'z2-r2', 'xz', 'x2-y2' ]
`l='f'`: One or more of [ 'y(3x2-y2)', 'xyz', 'yz2', 'z3', 'xz2', 'z(x2-y2)', 'x(x2-3y2)' ]
Returns:
np.array: A 4-dimensional numpy array containing the selected pdos values.
The array dimensions are [ atom_no, energy_value, lm-projection, spin ] | [
"Returns",
"a",
"subset",
"of",
"the",
"projected",
"density",
"of",
"states",
"array",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/doscar.py#L118-L183 |
22,778 | bjmorgan/vasppy | vasppy/calculation.py | Calculation.scale_stoichiometry | def scale_stoichiometry( self, scaling ):
"""
Scale the Calculation stoichiometry
Returns the stoichiometry, scaled by the argument scaling.
Args:
scaling (float): The scaling factor.
Returns:
(Counter(Str:Int)): The scaled stoichiometry as a Counter of label: stoichiometry pairs
"""
return { k:v*scaling for k,v in self.stoichiometry.items() } | python | def scale_stoichiometry( self, scaling ):
return { k:v*scaling for k,v in self.stoichiometry.items() } | [
"def",
"scale_stoichiometry",
"(",
"self",
",",
"scaling",
")",
":",
"return",
"{",
"k",
":",
"v",
"*",
"scaling",
"for",
"k",
",",
"v",
"in",
"self",
".",
"stoichiometry",
".",
"items",
"(",
")",
"}"
] | Scale the Calculation stoichiometry
Returns the stoichiometry, scaled by the argument scaling.
Args:
scaling (float): The scaling factor.
Returns:
(Counter(Str:Int)): The scaled stoichiometry as a Counter of label: stoichiometry pairs | [
"Scale",
"the",
"Calculation",
"stoichiometry",
"Returns",
"the",
"stoichiometry",
"scaled",
"by",
"the",
"argument",
"scaling",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/calculation.py#L54-L65 |
22,779 | bjmorgan/vasppy | vasppy/cell.py | angle | def angle( x, y ):
"""
Calculate the angle between two vectors, in degrees.
Args:
x (np.array): one vector.
y (np.array): the other vector.
Returns:
(float): the angle between x and y in degrees.
"""
dot = np.dot( x, y )
x_mod = np.linalg.norm( x )
y_mod = np.linalg.norm( y )
cos_angle = dot / ( x_mod * y_mod )
return np.degrees( np.arccos( cos_angle ) ) | python | def angle( x, y ):
dot = np.dot( x, y )
x_mod = np.linalg.norm( x )
y_mod = np.linalg.norm( y )
cos_angle = dot / ( x_mod * y_mod )
return np.degrees( np.arccos( cos_angle ) ) | [
"def",
"angle",
"(",
"x",
",",
"y",
")",
":",
"dot",
"=",
"np",
".",
"dot",
"(",
"x",
",",
"y",
")",
"x_mod",
"=",
"np",
".",
"linalg",
".",
"norm",
"(",
"x",
")",
"y_mod",
"=",
"np",
".",
"linalg",
".",
"norm",
"(",
"y",
")",
"cos_angle",
... | Calculate the angle between two vectors, in degrees.
Args:
x (np.array): one vector.
y (np.array): the other vector.
Returns:
(float): the angle between x and y in degrees. | [
"Calculate",
"the",
"angle",
"between",
"two",
"vectors",
"in",
"degrees",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L4-L19 |
22,780 | bjmorgan/vasppy | vasppy/cell.py | Cell.minimum_image | def minimum_image( self, r1, r2 ):
"""
Find the minimum image vector from point r1 to point r2.
Args:
r1 (np.array): fractional coordinates of point r1.
r2 (np.array): fractional coordinates of point r2.
Returns:
(np.array): the fractional coordinate vector from r1 to the nearest image of r2.
"""
delta_r = r2 - r1
delta_r = np.array( [ x - math.copysign( 1.0, x ) if abs(x) > 0.5 else x for x in delta_r ] )
return( delta_r ) | python | def minimum_image( self, r1, r2 ):
delta_r = r2 - r1
delta_r = np.array( [ x - math.copysign( 1.0, x ) if abs(x) > 0.5 else x for x in delta_r ] )
return( delta_r ) | [
"def",
"minimum_image",
"(",
"self",
",",
"r1",
",",
"r2",
")",
":",
"delta_r",
"=",
"r2",
"-",
"r1",
"delta_r",
"=",
"np",
".",
"array",
"(",
"[",
"x",
"-",
"math",
".",
"copysign",
"(",
"1.0",
",",
"x",
")",
"if",
"abs",
"(",
"x",
")",
">",... | Find the minimum image vector from point r1 to point r2.
Args:
r1 (np.array): fractional coordinates of point r1.
r2 (np.array): fractional coordinates of point r2.
Returns:
(np.array): the fractional coordinate vector from r1 to the nearest image of r2. | [
"Find",
"the",
"minimum",
"image",
"vector",
"from",
"point",
"r1",
"to",
"point",
"r2",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L94-L107 |
22,781 | bjmorgan/vasppy | vasppy/cell.py | Cell.minimum_image_dr | def minimum_image_dr( self, r1, r2, cutoff=None ):
"""
Calculate the shortest distance between two points in the cell,
accounting for periodic boundary conditions.
Args:
r1 (np.array): fractional coordinates of point r1.
r2 (np.array): fractional coordinates of point r2.
cutoff (:obj: `float`, optional): if set, return zero if the minimum distance is greater than `cutoff`. Defaults to None.
Returns:
(float): The distance between r1 and r2.
"""
delta_r_vector = self.minimum_image( r1, r2 )
return( self.dr( np.zeros( 3 ), delta_r_vector, cutoff ) ) | python | def minimum_image_dr( self, r1, r2, cutoff=None ):
delta_r_vector = self.minimum_image( r1, r2 )
return( self.dr( np.zeros( 3 ), delta_r_vector, cutoff ) ) | [
"def",
"minimum_image_dr",
"(",
"self",
",",
"r1",
",",
"r2",
",",
"cutoff",
"=",
"None",
")",
":",
"delta_r_vector",
"=",
"self",
".",
"minimum_image",
"(",
"r1",
",",
"r2",
")",
"return",
"(",
"self",
".",
"dr",
"(",
"np",
".",
"zeros",
"(",
"3",... | Calculate the shortest distance between two points in the cell,
accounting for periodic boundary conditions.
Args:
r1 (np.array): fractional coordinates of point r1.
r2 (np.array): fractional coordinates of point r2.
cutoff (:obj: `float`, optional): if set, return zero if the minimum distance is greater than `cutoff`. Defaults to None.
Returns:
(float): The distance between r1 and r2. | [
"Calculate",
"the",
"shortest",
"distance",
"between",
"two",
"points",
"in",
"the",
"cell",
"accounting",
"for",
"periodic",
"boundary",
"conditions",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L109-L123 |
22,782 | bjmorgan/vasppy | vasppy/cell.py | Cell.lengths | def lengths( self ):
"""
The cell lengths.
Args:
None
Returns:
(np.array(a,b,c)): The cell lengths.
"""
return( np.array( [ math.sqrt( sum( row**2 ) ) for row in self.matrix ] ) ) | python | def lengths( self ):
return( np.array( [ math.sqrt( sum( row**2 ) ) for row in self.matrix ] ) ) | [
"def",
"lengths",
"(",
"self",
")",
":",
"return",
"(",
"np",
".",
"array",
"(",
"[",
"math",
".",
"sqrt",
"(",
"sum",
"(",
"row",
"**",
"2",
")",
")",
"for",
"row",
"in",
"self",
".",
"matrix",
"]",
")",
")"
] | The cell lengths.
Args:
None
Returns:
(np.array(a,b,c)): The cell lengths. | [
"The",
"cell",
"lengths",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L125-L135 |
22,783 | bjmorgan/vasppy | vasppy/cell.py | Cell.inside_cell | def inside_cell( self, r ):
"""
Given a fractional-coordinate, if this lies outside the cell return the equivalent point inside the cell.
Args:
r (np.array): Fractional coordinates of a point (this may be outside the cell boundaries).
Returns:
(np.array): Fractional coordinates of an equivalent point, inside the cell boundaries.
"""
centre = np.array( [ 0.5, 0.5, 0.5 ] )
new_r = self.nearest_image( centre, r )
return new_r | python | def inside_cell( self, r ):
centre = np.array( [ 0.5, 0.5, 0.5 ] )
new_r = self.nearest_image( centre, r )
return new_r | [
"def",
"inside_cell",
"(",
"self",
",",
"r",
")",
":",
"centre",
"=",
"np",
".",
"array",
"(",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
")",
"new_r",
"=",
"self",
".",
"nearest_image",
"(",
"centre",
",",
"r",
")",
"return",
"new_r"
] | Given a fractional-coordinate, if this lies outside the cell return the equivalent point inside the cell.
Args:
r (np.array): Fractional coordinates of a point (this may be outside the cell boundaries).
Returns:
(np.array): Fractional coordinates of an equivalent point, inside the cell boundaries. | [
"Given",
"a",
"fractional",
"-",
"coordinate",
"if",
"this",
"lies",
"outside",
"the",
"cell",
"return",
"the",
"equivalent",
"point",
"inside",
"the",
"cell",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L174-L186 |
22,784 | bjmorgan/vasppy | vasppy/cell.py | Cell.volume | def volume( self ):
"""
The cell volume.
Args:
None
Returns:
(float): The cell volume.
"""
return np.dot( self.matrix[0], np.cross( self.matrix[1], self.matrix[2] ) ) | python | def volume( self ):
return np.dot( self.matrix[0], np.cross( self.matrix[1], self.matrix[2] ) ) | [
"def",
"volume",
"(",
"self",
")",
":",
"return",
"np",
".",
"dot",
"(",
"self",
".",
"matrix",
"[",
"0",
"]",
",",
"np",
".",
"cross",
"(",
"self",
".",
"matrix",
"[",
"1",
"]",
",",
"self",
".",
"matrix",
"[",
"2",
"]",
")",
")"
] | The cell volume.
Args:
None
Returns:
(float): The cell volume. | [
"The",
"cell",
"volume",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/cell.py#L188-L198 |
22,785 | bjmorgan/vasppy | vasppy/vaspmeta.py | VASPMeta.from_file | def from_file( cls, filename ):
"""
Create a VASPMeta object by reading a `vaspmeta.yaml` file
Args:
filename (Str): filename to read in.
Returns:
(vasppy.VASPMeta): the VASPMeta object
"""
with open( filename, 'r' ) as stream:
data = yaml.load( stream, Loader=yaml.SafeLoader )
notes = data.get( 'notes' )
v_type = data.get( 'type' )
track = data.get( 'track' )
xargs = {}
if track:
if type( track ) is str:
track = [ track ]
xargs['track'] = track
vaspmeta = VASPMeta( data['title'],
data['description'],
data['status'],
notes=notes,
type=v_type,
**xargs )
return vaspmeta | python | def from_file( cls, filename ):
with open( filename, 'r' ) as stream:
data = yaml.load( stream, Loader=yaml.SafeLoader )
notes = data.get( 'notes' )
v_type = data.get( 'type' )
track = data.get( 'track' )
xargs = {}
if track:
if type( track ) is str:
track = [ track ]
xargs['track'] = track
vaspmeta = VASPMeta( data['title'],
data['description'],
data['status'],
notes=notes,
type=v_type,
**xargs )
return vaspmeta | [
"def",
"from_file",
"(",
"cls",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"stream",
":",
"data",
"=",
"yaml",
".",
"load",
"(",
"stream",
",",
"Loader",
"=",
"yaml",
".",
"SafeLoader",
")",
"notes",
"=",
"da... | Create a VASPMeta object by reading a `vaspmeta.yaml` file
Args:
filename (Str): filename to read in.
Returns:
(vasppy.VASPMeta): the VASPMeta object | [
"Create",
"a",
"VASPMeta",
"object",
"by",
"reading",
"a",
"vaspmeta",
".",
"yaml",
"file"
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/vaspmeta.py#L47-L73 |
22,786 | bjmorgan/vasppy | vasppy/outcar.py | vasp_version_from_outcar | def vasp_version_from_outcar( filename='OUTCAR' ):
"""
Returns the first line from a VASP OUTCAR file, to get the VASP source version string.
Args:
filename (Str, optional): OUTCAR filename. Defaults to 'OUTCAR'.
Returns:
(Str): The first line read from the OUTCAR file.
"""
with open( filename ) as f:
line = f.readline().strip()
return line | python | def vasp_version_from_outcar( filename='OUTCAR' ):
with open( filename ) as f:
line = f.readline().strip()
return line | [
"def",
"vasp_version_from_outcar",
"(",
"filename",
"=",
"'OUTCAR'",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"line",
"=",
"f",
".",
"readline",
"(",
")",
".",
"strip",
"(",
")",
"return",
"line"
] | Returns the first line from a VASP OUTCAR file, to get the VASP source version string.
Args:
filename (Str, optional): OUTCAR filename. Defaults to 'OUTCAR'.
Returns:
(Str): The first line read from the OUTCAR file. | [
"Returns",
"the",
"first",
"line",
"from",
"a",
"VASP",
"OUTCAR",
"file",
"to",
"get",
"the",
"VASP",
"source",
"version",
"string",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/outcar.py#L41-L53 |
22,787 | bjmorgan/vasppy | vasppy/outcar.py | potcar_eatom_list_from_outcar | def potcar_eatom_list_from_outcar( filename='OUTCAR' ):
"""
Returns a list of EATOM values for the pseudopotentials used.
Args:
filename (Str, optional): OUTCAR filename. Defaults to 'OUTCAR'.
Returns:
(List(Float)): A list of EATOM values, in the order they appear in the OUTCAR.
"""
with open( filename ) as f:
outcar = f.read()
eatom_re = re.compile( "energy of atom\s+\d+\s+EATOM=\s*([-\d\.]+)" )
eatom = [ float( e ) for e in eatom_re.findall( outcar ) ]
return eatom | python | def potcar_eatom_list_from_outcar( filename='OUTCAR' ):
with open( filename ) as f:
outcar = f.read()
eatom_re = re.compile( "energy of atom\s+\d+\s+EATOM=\s*([-\d\.]+)" )
eatom = [ float( e ) for e in eatom_re.findall( outcar ) ]
return eatom | [
"def",
"potcar_eatom_list_from_outcar",
"(",
"filename",
"=",
"'OUTCAR'",
")",
":",
"with",
"open",
"(",
"filename",
")",
"as",
"f",
":",
"outcar",
"=",
"f",
".",
"read",
"(",
")",
"eatom_re",
"=",
"re",
".",
"compile",
"(",
"\"energy of atom\\s+\\d+\\s+EATO... | Returns a list of EATOM values for the pseudopotentials used.
Args:
filename (Str, optional): OUTCAR filename. Defaults to 'OUTCAR'.
Returns:
(List(Float)): A list of EATOM values, in the order they appear in the OUTCAR. | [
"Returns",
"a",
"list",
"of",
"EATOM",
"values",
"for",
"the",
"pseudopotentials",
"used",
"."
] | cc2d1449697b17ee1c43715a02cddcb1139a6834 | https://github.com/bjmorgan/vasppy/blob/cc2d1449697b17ee1c43715a02cddcb1139a6834/vasppy/outcar.py#L55-L69 |
22,788 | brandon-rhodes/logging_tree | logging_tree/format.py | build_description | def build_description(node=None):
"""Return a multi-line string describing a `logging_tree.nodes.Node`.
If no `node` argument is provided, then the entire tree of currently
active `logging` loggers is printed out.
"""
if node is None:
from logging_tree.nodes import tree
node = tree()
return '\n'.join([ line.rstrip() for line in describe(node) ]) + '\n' | python | def build_description(node=None):
if node is None:
from logging_tree.nodes import tree
node = tree()
return '\n'.join([ line.rstrip() for line in describe(node) ]) + '\n' | [
"def",
"build_description",
"(",
"node",
"=",
"None",
")",
":",
"if",
"node",
"is",
"None",
":",
"from",
"logging_tree",
".",
"nodes",
"import",
"tree",
"node",
"=",
"tree",
"(",
")",
"return",
"'\\n'",
".",
"join",
"(",
"[",
"line",
".",
"rstrip",
"... | Return a multi-line string describing a `logging_tree.nodes.Node`.
If no `node` argument is provided, then the entire tree of currently
active `logging` loggers is printed out. | [
"Return",
"a",
"multi",
"-",
"line",
"string",
"describing",
"a",
"logging_tree",
".",
"nodes",
".",
"Node",
"."
] | 8513cf85b3bf8ff1b58e54c73718a41ef6524a4c | https://github.com/brandon-rhodes/logging_tree/blob/8513cf85b3bf8ff1b58e54c73718a41ef6524a4c/logging_tree/format.py#L20-L30 |
22,789 | brandon-rhodes/logging_tree | logging_tree/format.py | _describe | def _describe(node, parent):
"""Generate lines describing the given `node` tuple.
This is the recursive back-end that powers ``describe()``. With its
extra ``parent`` parameter, this routine remembers the nearest
non-placeholder ancestor so that it can compare it against the
actual value of the ``.parent`` attribute of each node.
"""
name, logger, children = node
is_placeholder = isinstance(logger, logging.PlaceHolder)
if is_placeholder:
yield '<--[%s]' % name
else:
parent_is_correct = (parent is None) or (logger.parent is parent)
if not logger.propagate:
arrow = ' '
elif parent_is_correct:
arrow = '<--'
else:
arrow = ' !-'
yield '%s"%s"' % (arrow, name)
if not parent_is_correct:
if logger.parent is None:
yield (' Broken .parent is None, so messages stop here')
else:
yield (' Broken .parent redirects messages to %r instead'
% (logger.parent.name,))
if logger.level == logging.NOTSET:
yield ' Level NOTSET so inherits level ' + logging.getLevelName(
logger.getEffectiveLevel())
else:
yield ' Level ' + logging.getLevelName(logger.level)
if not logger.propagate:
yield ' Propagate OFF'
if logger.disabled:
yield ' Disabled'
# In case someone has defined a custom logger that lacks a
# `filters` or `handlers` attribute, we call getattr() and
# provide an empty sequence as a fallback.
for f in getattr(logger, 'filters', ()):
yield ' Filter %s' % describe_filter(f)
for h in getattr(logger, 'handlers', ()):
g = describe_handler(h)
yield ' Handler %s' % next(g)
for line in g:
yield ' ' + line
if children:
if not is_placeholder:
parent = logger
last_child = children[-1]
for child in children:
g = _describe(child, parent)
yield ' |'
yield ' o' + next(g)
if child is last_child:
prefix = ' '
else:
prefix = ' |'
for line in g:
yield prefix + line | python | def _describe(node, parent):
name, logger, children = node
is_placeholder = isinstance(logger, logging.PlaceHolder)
if is_placeholder:
yield '<--[%s]' % name
else:
parent_is_correct = (parent is None) or (logger.parent is parent)
if not logger.propagate:
arrow = ' '
elif parent_is_correct:
arrow = '<--'
else:
arrow = ' !-'
yield '%s"%s"' % (arrow, name)
if not parent_is_correct:
if logger.parent is None:
yield (' Broken .parent is None, so messages stop here')
else:
yield (' Broken .parent redirects messages to %r instead'
% (logger.parent.name,))
if logger.level == logging.NOTSET:
yield ' Level NOTSET so inherits level ' + logging.getLevelName(
logger.getEffectiveLevel())
else:
yield ' Level ' + logging.getLevelName(logger.level)
if not logger.propagate:
yield ' Propagate OFF'
if logger.disabled:
yield ' Disabled'
# In case someone has defined a custom logger that lacks a
# `filters` or `handlers` attribute, we call getattr() and
# provide an empty sequence as a fallback.
for f in getattr(logger, 'filters', ()):
yield ' Filter %s' % describe_filter(f)
for h in getattr(logger, 'handlers', ()):
g = describe_handler(h)
yield ' Handler %s' % next(g)
for line in g:
yield ' ' + line
if children:
if not is_placeholder:
parent = logger
last_child = children[-1]
for child in children:
g = _describe(child, parent)
yield ' |'
yield ' o' + next(g)
if child is last_child:
prefix = ' '
else:
prefix = ' |'
for line in g:
yield prefix + line | [
"def",
"_describe",
"(",
"node",
",",
"parent",
")",
":",
"name",
",",
"logger",
",",
"children",
"=",
"node",
"is_placeholder",
"=",
"isinstance",
"(",
"logger",
",",
"logging",
".",
"PlaceHolder",
")",
"if",
"is_placeholder",
":",
"yield",
"'<--[%s]'",
"... | Generate lines describing the given `node` tuple.
This is the recursive back-end that powers ``describe()``. With its
extra ``parent`` parameter, this routine remembers the nearest
non-placeholder ancestor so that it can compare it against the
actual value of the ``.parent`` attribute of each node. | [
"Generate",
"lines",
"describing",
"the",
"given",
"node",
"tuple",
"."
] | 8513cf85b3bf8ff1b58e54c73718a41ef6524a4c | https://github.com/brandon-rhodes/logging_tree/blob/8513cf85b3bf8ff1b58e54c73718a41ef6524a4c/logging_tree/format.py#L41-L104 |
22,790 | brandon-rhodes/logging_tree | logging_tree/format.py | describe_filter | def describe_filter(f):
"""Return text describing the logging filter `f`."""
if f.__class__ is logging.Filter: # using type() breaks in Python <= 2.6
return 'name=%r' % f.name
return repr(f) | python | def describe_filter(f):
if f.__class__ is logging.Filter: # using type() breaks in Python <= 2.6
return 'name=%r' % f.name
return repr(f) | [
"def",
"describe_filter",
"(",
"f",
")",
":",
"if",
"f",
".",
"__class__",
"is",
"logging",
".",
"Filter",
":",
"# using type() breaks in Python <= 2.6",
"return",
"'name=%r'",
"%",
"f",
".",
"name",
"return",
"repr",
"(",
"f",
")"
] | Return text describing the logging filter `f`. | [
"Return",
"text",
"describing",
"the",
"logging",
"filter",
"f",
"."
] | 8513cf85b3bf8ff1b58e54c73718a41ef6524a4c | https://github.com/brandon-rhodes/logging_tree/blob/8513cf85b3bf8ff1b58e54c73718a41ef6524a4c/logging_tree/format.py#L112-L116 |
22,791 | brandon-rhodes/logging_tree | logging_tree/format.py | describe_handler | def describe_handler(h):
"""Yield one or more lines describing the logging handler `h`."""
t = h.__class__ # using type() breaks in Python <= 2.6
format = handler_formats.get(t)
if format is not None:
yield format % h.__dict__
else:
yield repr(h)
level = getattr(h, 'level', logging.NOTSET)
if level != logging.NOTSET:
yield ' Level ' + logging.getLevelName(level)
for f in getattr(h, 'filters', ()):
yield ' Filter %s' % describe_filter(f)
formatter = getattr(h, 'formatter', None)
if formatter is not None:
if type(formatter) is logging.Formatter:
yield ' Formatter fmt=%r datefmt=%r' % (
getattr(formatter, '_fmt', None),
getattr(formatter, 'datefmt', None))
else:
yield ' Formatter %r' % (formatter,)
if t is logging.handlers.MemoryHandler and h.target is not None:
yield ' Flushes output to:'
g = describe_handler(h.target)
yield ' Handler ' + next(g)
for line in g:
yield ' ' + line | python | def describe_handler(h):
t = h.__class__ # using type() breaks in Python <= 2.6
format = handler_formats.get(t)
if format is not None:
yield format % h.__dict__
else:
yield repr(h)
level = getattr(h, 'level', logging.NOTSET)
if level != logging.NOTSET:
yield ' Level ' + logging.getLevelName(level)
for f in getattr(h, 'filters', ()):
yield ' Filter %s' % describe_filter(f)
formatter = getattr(h, 'formatter', None)
if formatter is not None:
if type(formatter) is logging.Formatter:
yield ' Formatter fmt=%r datefmt=%r' % (
getattr(formatter, '_fmt', None),
getattr(formatter, 'datefmt', None))
else:
yield ' Formatter %r' % (formatter,)
if t is logging.handlers.MemoryHandler and h.target is not None:
yield ' Flushes output to:'
g = describe_handler(h.target)
yield ' Handler ' + next(g)
for line in g:
yield ' ' + line | [
"def",
"describe_handler",
"(",
"h",
")",
":",
"t",
"=",
"h",
".",
"__class__",
"# using type() breaks in Python <= 2.6",
"format",
"=",
"handler_formats",
".",
"get",
"(",
"t",
")",
"if",
"format",
"is",
"not",
"None",
":",
"yield",
"format",
"%",
"h",
".... | Yield one or more lines describing the logging handler `h`. | [
"Yield",
"one",
"or",
"more",
"lines",
"describing",
"the",
"logging",
"handler",
"h",
"."
] | 8513cf85b3bf8ff1b58e54c73718a41ef6524a4c | https://github.com/brandon-rhodes/logging_tree/blob/8513cf85b3bf8ff1b58e54c73718a41ef6524a4c/logging_tree/format.py#L144-L170 |
22,792 | brandon-rhodes/logging_tree | logging_tree/nodes.py | tree | def tree():
"""Return a tree of tuples representing the logger layout.
Each tuple looks like ``('logger-name', <Logger>, [...])`` where the
third element is a list of zero or more child tuples that share the
same layout.
"""
root = ('', logging.root, [])
nodes = {}
items = list(logging.root.manager.loggerDict.items()) # for Python 2 and 3
items.sort()
for name, logger in items:
nodes[name] = node = (name, logger, [])
i = name.rfind('.', 0, len(name) - 1) # same formula used in `logging`
if i == -1:
parent = root
else:
parent = nodes[name[:i]]
parent[2].append(node)
return root | python | def tree():
root = ('', logging.root, [])
nodes = {}
items = list(logging.root.manager.loggerDict.items()) # for Python 2 and 3
items.sort()
for name, logger in items:
nodes[name] = node = (name, logger, [])
i = name.rfind('.', 0, len(name) - 1) # same formula used in `logging`
if i == -1:
parent = root
else:
parent = nodes[name[:i]]
parent[2].append(node)
return root | [
"def",
"tree",
"(",
")",
":",
"root",
"=",
"(",
"''",
",",
"logging",
".",
"root",
",",
"[",
"]",
")",
"nodes",
"=",
"{",
"}",
"items",
"=",
"list",
"(",
"logging",
".",
"root",
".",
"manager",
".",
"loggerDict",
".",
"items",
"(",
")",
")",
... | Return a tree of tuples representing the logger layout.
Each tuple looks like ``('logger-name', <Logger>, [...])`` where the
third element is a list of zero or more child tuples that share the
same layout. | [
"Return",
"a",
"tree",
"of",
"tuples",
"representing",
"the",
"logger",
"layout",
"."
] | 8513cf85b3bf8ff1b58e54c73718a41ef6524a4c | https://github.com/brandon-rhodes/logging_tree/blob/8513cf85b3bf8ff1b58e54c73718a41ef6524a4c/logging_tree/nodes.py#L5-L25 |
22,793 | signalwire/signalwire-python | signalwire/rest/__init__.py | patched_str | def patched_str(self):
""" Try to pretty-print the exception, if this is going on screen. """
def red(words):
return u("\033[31m\033[49m%s\033[0m") % words
def white(words):
return u("\033[37m\033[49m%s\033[0m") % words
def blue(words):
return u("\033[34m\033[49m%s\033[0m") % words
def teal(words):
return u("\033[36m\033[49m%s\033[0m") % words
def get_uri(code):
return "https://www.signalwire.com/docs/errors/{0}".format(code)
# If it makes sense to print a human readable error message, try to
# do it. The one problem is that someone might catch this error and
# try to display the message from it to an end user.
if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
msg = (
"\n{red_error} {request_was}\n\n{http_line}"
"\n\n{sw_returned}\n\n{message}\n".format(
red_error=red("HTTP Error"),
request_was=white("Your request was:"),
http_line=teal("%s %s" % (self.method, self.uri)),
sw_returned=white(
"Signalwire returned the following information:"),
message=blue(str(self.msg))
))
if self.code:
msg = "".join([msg, "\n{more_info}\n\n{uri}\n\n".format(
more_info=white("More information may be available here:"),
uri=blue(get_uri(self.code))),
])
return msg
else:
return "HTTP {0} error: {1}".format(self.status, self.msg) | python | def patched_str(self):
def red(words):
return u("\033[31m\033[49m%s\033[0m") % words
def white(words):
return u("\033[37m\033[49m%s\033[0m") % words
def blue(words):
return u("\033[34m\033[49m%s\033[0m") % words
def teal(words):
return u("\033[36m\033[49m%s\033[0m") % words
def get_uri(code):
return "https://www.signalwire.com/docs/errors/{0}".format(code)
# If it makes sense to print a human readable error message, try to
# do it. The one problem is that someone might catch this error and
# try to display the message from it to an end user.
if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
msg = (
"\n{red_error} {request_was}\n\n{http_line}"
"\n\n{sw_returned}\n\n{message}\n".format(
red_error=red("HTTP Error"),
request_was=white("Your request was:"),
http_line=teal("%s %s" % (self.method, self.uri)),
sw_returned=white(
"Signalwire returned the following information:"),
message=blue(str(self.msg))
))
if self.code:
msg = "".join([msg, "\n{more_info}\n\n{uri}\n\n".format(
more_info=white("More information may be available here:"),
uri=blue(get_uri(self.code))),
])
return msg
else:
return "HTTP {0} error: {1}".format(self.status, self.msg) | [
"def",
"patched_str",
"(",
"self",
")",
":",
"def",
"red",
"(",
"words",
")",
":",
"return",
"u",
"(",
"\"\\033[31m\\033[49m%s\\033[0m\"",
")",
"%",
"words",
"def",
"white",
"(",
"words",
")",
":",
"return",
"u",
"(",
"\"\\033[37m\\033[49m%s\\033[0m\"",
")",... | Try to pretty-print the exception, if this is going on screen. | [
"Try",
"to",
"pretty",
"-",
"print",
"the",
"exception",
"if",
"this",
"is",
"going",
"on",
"screen",
"."
] | 71eebb38d23f39f5de716991ca49128a6084b75d | https://github.com/signalwire/signalwire-python/blob/71eebb38d23f39f5de716991ca49128a6084b75d/signalwire/rest/__init__.py#L27-L66 |
22,794 | QInfer/python-qinfer | src/qinfer/score.py | ScoreMixin.h | def h(self):
r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
"""
if np.size(self._h) > 1:
assert np.size(self._h) == self.n_modelparams
return self._h
else:
return self._h * np.ones(self.n_modelparams) | python | def h(self):
r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
"""
if np.size(self._h) > 1:
assert np.size(self._h) == self.n_modelparams
return self._h
else:
return self._h * np.ones(self.n_modelparams) | [
"def",
"h",
"(",
"self",
")",
":",
"if",
"np",
".",
"size",
"(",
"self",
".",
"_h",
")",
">",
"1",
":",
"assert",
"np",
".",
"size",
"(",
"self",
".",
"_h",
")",
"==",
"self",
".",
"n_modelparams",
"return",
"self",
".",
"_h",
"else",
":",
"r... | r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently. | [
"r",
"Returns",
"the",
"step",
"size",
"to",
"be",
"used",
"in",
"numerical",
"differentiation",
"with",
"respect",
"to",
"the",
"model",
"parameters",
".",
"The",
"step",
"size",
"is",
"given",
"as",
"a",
"vector",
"with",
"length",
"n_modelparams",
"so",
... | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/score.py#L62-L74 |
22,795 | QInfer/python-qinfer | src/qinfer/parallel.py | DirectViewParallelizedModel.clear_cache | def clear_cache(self):
"""
Clears any cache associated with the serial model and the engines
seen by the direct view.
"""
self.underlying_model.clear_cache()
try:
logger.info('DirectView results has {} items. Clearing.'.format(
len(self._dv.results)
))
self._dv.purge_results('all')
if self._purge_client:
self._dv.client.purge_everything()
except:
pass | python | def clear_cache(self):
self.underlying_model.clear_cache()
try:
logger.info('DirectView results has {} items. Clearing.'.format(
len(self._dv.results)
))
self._dv.purge_results('all')
if self._purge_client:
self._dv.client.purge_everything()
except:
pass | [
"def",
"clear_cache",
"(",
"self",
")",
":",
"self",
".",
"underlying_model",
".",
"clear_cache",
"(",
")",
"try",
":",
"logger",
".",
"info",
"(",
"'DirectView results has {} items. Clearing.'",
".",
"format",
"(",
"len",
"(",
"self",
".",
"_dv",
".",
"resu... | Clears any cache associated with the serial model and the engines
seen by the direct view. | [
"Clears",
"any",
"cache",
"associated",
"with",
"the",
"serial",
"model",
"and",
"the",
"engines",
"seen",
"by",
"the",
"direct",
"view",
"."
] | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/parallel.py#L167-L181 |
22,796 | QInfer/python-qinfer | src/qinfer/smc.py | SMCUpdater._maybe_resample | def _maybe_resample(self):
"""
Checks the resample threshold and conditionally resamples.
"""
ess = self.n_ess
if ess <= 10:
warnings.warn(
"Extremely small n_ess encountered ({}). "
"Resampling is likely to fail. Consider adding particles, or "
"resampling more often.".format(ess),
ApproximationWarning
)
if ess < self.n_particles * self.resample_thresh:
self.resample()
pass | python | def _maybe_resample(self):
ess = self.n_ess
if ess <= 10:
warnings.warn(
"Extremely small n_ess encountered ({}). "
"Resampling is likely to fail. Consider adding particles, or "
"resampling more often.".format(ess),
ApproximationWarning
)
if ess < self.n_particles * self.resample_thresh:
self.resample()
pass | [
"def",
"_maybe_resample",
"(",
"self",
")",
":",
"ess",
"=",
"self",
".",
"n_ess",
"if",
"ess",
"<=",
"10",
":",
"warnings",
".",
"warn",
"(",
"\"Extremely small n_ess encountered ({}). \"",
"\"Resampling is likely to fail. Consider adding particles, or \"",
"\"resampling... | Checks the resample threshold and conditionally resamples. | [
"Checks",
"the",
"resample",
"threshold",
"and",
"conditionally",
"resamples",
"."
] | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/smc.py#L263-L277 |
22,797 | QInfer/python-qinfer | src/qinfer/smc.py | SMCUpdater.reset | def reset(self, n_particles=None, only_params=None, reset_weights=True):
"""
Causes all particle locations and weights to be drawn fresh from the
initial prior.
:param int n_particles: Forces the size of the new particle set. If
`None`, the size of the particle set is not changed.
:param slice only_params: Resets only some of the parameters. Cannot
be set if ``n_particles`` is also given.
:param bool reset_weights: Resets the weights as well as the particles.
"""
# Particles are stored using two arrays, particle_locations and
# particle_weights, such that:
#
# particle_locations[idx_particle, idx_modelparam] is the idx_modelparam
# parameter of the particle idx_particle.
# particle_weights[idx_particle] is the weight of the particle
# idx_particle.
if n_particles is not None and only_params is not None:
raise ValueError("Cannot set both n_particles and only_params.")
if n_particles is None:
n_particles = self.n_particles
if reset_weights:
self.particle_weights = np.ones((n_particles,)) / n_particles
if only_params is None:
sl = np.s_[:, :]
# Might as well make a new array if we're resetting everything.
self.particle_locations = np.zeros((n_particles, self.model.n_modelparams))
else:
sl = np.s_[:, only_params]
self.particle_locations[sl] = self.prior.sample(n=n_particles)[sl]
# Since this changes particle positions, we must recanonicalize.
if self._canonicalize:
self.particle_locations[sl] = self.model.canonicalize(self.particle_locations[sl]) | python | def reset(self, n_particles=None, only_params=None, reset_weights=True):
# Particles are stored using two arrays, particle_locations and
# particle_weights, such that:
#
# particle_locations[idx_particle, idx_modelparam] is the idx_modelparam
# parameter of the particle idx_particle.
# particle_weights[idx_particle] is the weight of the particle
# idx_particle.
if n_particles is not None and only_params is not None:
raise ValueError("Cannot set both n_particles and only_params.")
if n_particles is None:
n_particles = self.n_particles
if reset_weights:
self.particle_weights = np.ones((n_particles,)) / n_particles
if only_params is None:
sl = np.s_[:, :]
# Might as well make a new array if we're resetting everything.
self.particle_locations = np.zeros((n_particles, self.model.n_modelparams))
else:
sl = np.s_[:, only_params]
self.particle_locations[sl] = self.prior.sample(n=n_particles)[sl]
# Since this changes particle positions, we must recanonicalize.
if self._canonicalize:
self.particle_locations[sl] = self.model.canonicalize(self.particle_locations[sl]) | [
"def",
"reset",
"(",
"self",
",",
"n_particles",
"=",
"None",
",",
"only_params",
"=",
"None",
",",
"reset_weights",
"=",
"True",
")",
":",
"# Particles are stored using two arrays, particle_locations and",
"# particle_weights, such that:",
"#",
"# particle_locations[idx_pa... | Causes all particle locations and weights to be drawn fresh from the
initial prior.
:param int n_particles: Forces the size of the new particle set. If
`None`, the size of the particle set is not changed.
:param slice only_params: Resets only some of the parameters. Cannot
be set if ``n_particles`` is also given.
:param bool reset_weights: Resets the weights as well as the particles. | [
"Causes",
"all",
"particle",
"locations",
"and",
"weights",
"to",
"be",
"drawn",
"fresh",
"from",
"the",
"initial",
"prior",
"."
] | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/smc.py#L281-L320 |
22,798 | QInfer/python-qinfer | src/qinfer/smc.py | SMCUpdater.batch_update | def batch_update(self, outcomes, expparams, resample_interval=5):
r"""
Updates based on a batch of outcomes and experiments, rather than just
one.
:param numpy.ndarray outcomes: An array of outcomes of the experiments that
were performed.
:param numpy.ndarray expparams: Either a scalar or record single-index
array of experiments that were performed.
:param int resample_interval: Controls how often to check whether
:math:`N_{\text{ess}}` falls below the resample threshold.
"""
# TODO: write a faster implementation here using vectorized calls to
# likelihood.
# Check that the number of outcomes and experiments is the same.
n_exps = outcomes.shape[0]
if expparams.shape[0] != n_exps:
raise ValueError("The number of outcomes and experiments must match.")
if len(expparams.shape) == 1:
expparams = expparams[:, None]
# Loop over experiments and update one at a time.
for idx_exp, (outcome, experiment) in enumerate(zip(iter(outcomes), iter(expparams))):
self.update(outcome, experiment, check_for_resample=False)
if (idx_exp + 1) % resample_interval == 0:
self._maybe_resample() | python | def batch_update(self, outcomes, expparams, resample_interval=5):
r"""
Updates based on a batch of outcomes and experiments, rather than just
one.
:param numpy.ndarray outcomes: An array of outcomes of the experiments that
were performed.
:param numpy.ndarray expparams: Either a scalar or record single-index
array of experiments that were performed.
:param int resample_interval: Controls how often to check whether
:math:`N_{\text{ess}}` falls below the resample threshold.
"""
# TODO: write a faster implementation here using vectorized calls to
# likelihood.
# Check that the number of outcomes and experiments is the same.
n_exps = outcomes.shape[0]
if expparams.shape[0] != n_exps:
raise ValueError("The number of outcomes and experiments must match.")
if len(expparams.shape) == 1:
expparams = expparams[:, None]
# Loop over experiments and update one at a time.
for idx_exp, (outcome, experiment) in enumerate(zip(iter(outcomes), iter(expparams))):
self.update(outcome, experiment, check_for_resample=False)
if (idx_exp + 1) % resample_interval == 0:
self._maybe_resample() | [
"def",
"batch_update",
"(",
"self",
",",
"outcomes",
",",
"expparams",
",",
"resample_interval",
"=",
"5",
")",
":",
"# TODO: write a faster implementation here using vectorized calls to",
"# likelihood.",
"# Check that the number of outcomes and experiments is the same.",
"n... | r"""
Updates based on a batch of outcomes and experiments, rather than just
one.
:param numpy.ndarray outcomes: An array of outcomes of the experiments that
were performed.
:param numpy.ndarray expparams: Either a scalar or record single-index
array of experiments that were performed.
:param int resample_interval: Controls how often to check whether
:math:`N_{\text{ess}}` falls below the resample threshold. | [
"r",
"Updates",
"based",
"on",
"a",
"batch",
"of",
"outcomes",
"and",
"experiments",
"rather",
"than",
"just",
"one",
"."
] | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/smc.py#L459-L487 |
22,799 | QInfer/python-qinfer | src/qinfer/smc.py | SMCUpdater.resample | def resample(self):
"""
Forces the updater to perform a resampling step immediately.
"""
if self.just_resampled:
warnings.warn(
"Resampling without additional data; this may not perform as "
"desired.",
ResamplerWarning
)
# Record that we have performed a resampling step.
self._just_resampled = True
self._resample_count += 1
# If we're tracking divergences, make a copy of the weights and
# locations.
if self._resampling_divergences is not None:
old_locs = self.particle_locations.copy()
old_weights = self.particle_weights.copy()
# Record the previous mean, cov if needed.
if self._debug_resampling:
old_mean = self.est_mean()
old_cov = self.est_covariance_mtx()
# Find the new particle locations according to the chosen resampling
# algorithm.
# We pass the model so that the resampler can check for validity of
# newly placed particles.
# FIXME This feels fishy. If we update particles elsewwhere
new_distribution = self.resampler(self.model, self)
self.particle_weights = new_distribution.particle_weights
self.particle_locations = new_distribution.particle_locations
# Possibly canonicalize, if we've been asked to do so.
if self._canonicalize:
self.particle_locations[:, :] = self.model.canonicalize(self.particle_locations)
# Instruct the model to clear its cache, demoting any errors to
# warnings.
try:
self.model.clear_cache()
except Exception as e:
warnings.warn("Exception raised when clearing model cache: {}. Ignoring.".format(e))
# Possibly track the new divergence.
if self._resampling_divergences is not None:
self._resampling_divergences.append(
self._kl_divergence(old_locs, old_weights)
)
# Report current and previous mean, cov.
if self._debug_resampling:
new_mean = self.est_mean()
new_cov = self.est_covariance_mtx()
logger.debug("Resampling changed mean by {}. Norm change in cov: {}.".format(
old_mean - new_mean,
np.linalg.norm(new_cov - old_cov)
)) | python | def resample(self):
if self.just_resampled:
warnings.warn(
"Resampling without additional data; this may not perform as "
"desired.",
ResamplerWarning
)
# Record that we have performed a resampling step.
self._just_resampled = True
self._resample_count += 1
# If we're tracking divergences, make a copy of the weights and
# locations.
if self._resampling_divergences is not None:
old_locs = self.particle_locations.copy()
old_weights = self.particle_weights.copy()
# Record the previous mean, cov if needed.
if self._debug_resampling:
old_mean = self.est_mean()
old_cov = self.est_covariance_mtx()
# Find the new particle locations according to the chosen resampling
# algorithm.
# We pass the model so that the resampler can check for validity of
# newly placed particles.
# FIXME This feels fishy. If we update particles elsewwhere
new_distribution = self.resampler(self.model, self)
self.particle_weights = new_distribution.particle_weights
self.particle_locations = new_distribution.particle_locations
# Possibly canonicalize, if we've been asked to do so.
if self._canonicalize:
self.particle_locations[:, :] = self.model.canonicalize(self.particle_locations)
# Instruct the model to clear its cache, demoting any errors to
# warnings.
try:
self.model.clear_cache()
except Exception as e:
warnings.warn("Exception raised when clearing model cache: {}. Ignoring.".format(e))
# Possibly track the new divergence.
if self._resampling_divergences is not None:
self._resampling_divergences.append(
self._kl_divergence(old_locs, old_weights)
)
# Report current and previous mean, cov.
if self._debug_resampling:
new_mean = self.est_mean()
new_cov = self.est_covariance_mtx()
logger.debug("Resampling changed mean by {}. Norm change in cov: {}.".format(
old_mean - new_mean,
np.linalg.norm(new_cov - old_cov)
)) | [
"def",
"resample",
"(",
"self",
")",
":",
"if",
"self",
".",
"just_resampled",
":",
"warnings",
".",
"warn",
"(",
"\"Resampling without additional data; this may not perform as \"",
"\"desired.\"",
",",
"ResamplerWarning",
")",
"# Record that we have performed a resampling st... | Forces the updater to perform a resampling step immediately. | [
"Forces",
"the",
"updater",
"to",
"perform",
"a",
"resampling",
"step",
"immediately",
"."
] | 8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3 | https://github.com/QInfer/python-qinfer/blob/8170c84a0be1723f8c6b09e0d3c7a40a886f1fe3/src/qinfer/smc.py#L491-L551 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.