body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
|---|---|---|---|---|---|---|---|---|---|
613012aa992c15396a0a1a0f9bbbb8cdd74dedaaf41a81161170a8ae13c7e62a
|
@distributed_trace
def list_repositories(self, resource_group_name: str, workspace_name: str, repo_type: Union[(str, '_models.RepoType')], **kwargs: Any) -> Iterable['_models.RepoList']:
'Gets a list of repositories metadata.\n\n :param resource_group_name: The name of the resource group. The name is case insensitive.\n :type resource_group_name: str\n :param workspace_name: The name of the workspace.\n :type workspace_name: str\n :param repo_type: The repo type.\n :type repo_type: str or ~azure.mgmt.securityinsight.models.RepoType\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either RepoList or the result of cls(response)\n :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.securityinsight.models.RepoList]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
content_type = kwargs.pop('content_type', 'application/json')
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.list_repositories.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RepoList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), iter(list_of_elem))
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
|
Gets a list of repositories metadata.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param repo_type: The repo type.
:type repo_type: str or ~azure.mgmt.securityinsight.models.RepoType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RepoList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.securityinsight.models.RepoList]
:raises: ~azure.core.exceptions.HttpResponseError
|
sdk/securityinsight/azure-mgmt-securityinsight/azure/mgmt/securityinsight/operations/_source_control_operations.py
|
list_repositories
|
NateLehman/azure-sdk-for-python
| 1
|
python
|
@distributed_trace
def list_repositories(self, resource_group_name: str, workspace_name: str, repo_type: Union[(str, '_models.RepoType')], **kwargs: Any) -> Iterable['_models.RepoList']:
'Gets a list of repositories metadata.\n\n :param resource_group_name: The name of the resource group. The name is case insensitive.\n :type resource_group_name: str\n :param workspace_name: The name of the workspace.\n :type workspace_name: str\n :param repo_type: The repo type.\n :type repo_type: str or ~azure.mgmt.securityinsight.models.RepoType\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either RepoList or the result of cls(response)\n :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.securityinsight.models.RepoList]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
content_type = kwargs.pop('content_type', 'application/json')
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.list_repositories.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RepoList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), iter(list_of_elem))
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
|
@distributed_trace
def list_repositories(self, resource_group_name: str, workspace_name: str, repo_type: Union[(str, '_models.RepoType')], **kwargs: Any) -> Iterable['_models.RepoList']:
'Gets a list of repositories metadata.\n\n :param resource_group_name: The name of the resource group. The name is case insensitive.\n :type resource_group_name: str\n :param workspace_name: The name of the workspace.\n :type workspace_name: str\n :param repo_type: The repo type.\n :type repo_type: str or ~azure.mgmt.securityinsight.models.RepoType\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either RepoList or the result of cls(response)\n :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.securityinsight.models.RepoList]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
content_type = kwargs.pop('content_type', 'application/json')
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if (not next_link):
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=self.list_repositories.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
_json = self._serialize.body(repo_type, 'str')
request = build_list_repositories_request(subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, content_type=content_type, json=_json, template_url=next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = 'GET'
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RepoList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), iter(list_of_elem))
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)<|docstring|>Gets a list of repositories metadata.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param repo_type: The repo type.
:type repo_type: str or ~azure.mgmt.securityinsight.models.RepoType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RepoList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.securityinsight.models.RepoList]
:raises: ~azure.core.exceptions.HttpResponseError<|endoftext|>
|
5becd1a16c765404a1c97390193203f844c208bd7686d0ec212e3ad086c4beb7
|
def get_best_seller_titles(url=URL_NON_FICTION):
"Use the NY Times Books API endpoint above to get the titles that are\n on the best seller list for the longest time.\n\n Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:\n\n [('BETWEEN THE WORLD AND ME', 86),\n ('EDUCATED', 79),\n ('BECOMING', 41),\n ('THE SECOND MOUNTAIN', 18),\n ... 11 more ...\n ]\n\n Dev docs: https://developer.nytimes.com/docs/books-product/1/overview\n "
with requests.Session() as request:
response = request.get(url)
try:
data = response.json()
except json.JSONDecodeError:
print(f'JSON Decode Error')
outcome = data['results']['books']
sorted_weeks = sorted(outcome, key=(lambda outcome: outcome['weeks_on_list']), reverse=True)
title_week_tuple = [(book['title'], book['weeks_on_list']) for book in sorted_weeks]
return title_week_tuple
|
Use the NY Times Books API endpoint above to get the titles that are
on the best seller list for the longest time.
Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:
[('BETWEEN THE WORLD AND ME', 86),
('EDUCATED', 79),
('BECOMING', 41),
('THE SECOND MOUNTAIN', 18),
... 11 more ...
]
Dev docs: https://developer.nytimes.com/docs/books-product/1/overview
|
bites/bite221.py
|
get_best_seller_titles
|
ChidinmaKO/Chobe-bitesofpy
| 0
|
python
|
def get_best_seller_titles(url=URL_NON_FICTION):
"Use the NY Times Books API endpoint above to get the titles that are\n on the best seller list for the longest time.\n\n Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:\n\n [('BETWEEN THE WORLD AND ME', 86),\n ('EDUCATED', 79),\n ('BECOMING', 41),\n ('THE SECOND MOUNTAIN', 18),\n ... 11 more ...\n ]\n\n Dev docs: https://developer.nytimes.com/docs/books-product/1/overview\n "
with requests.Session() as request:
response = request.get(url)
try:
data = response.json()
except json.JSONDecodeError:
print(f'JSON Decode Error')
outcome = data['results']['books']
sorted_weeks = sorted(outcome, key=(lambda outcome: outcome['weeks_on_list']), reverse=True)
title_week_tuple = [(book['title'], book['weeks_on_list']) for book in sorted_weeks]
return title_week_tuple
|
def get_best_seller_titles(url=URL_NON_FICTION):
"Use the NY Times Books API endpoint above to get the titles that are\n on the best seller list for the longest time.\n\n Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:\n\n [('BETWEEN THE WORLD AND ME', 86),\n ('EDUCATED', 79),\n ('BECOMING', 41),\n ('THE SECOND MOUNTAIN', 18),\n ... 11 more ...\n ]\n\n Dev docs: https://developer.nytimes.com/docs/books-product/1/overview\n "
with requests.Session() as request:
response = request.get(url)
try:
data = response.json()
except json.JSONDecodeError:
print(f'JSON Decode Error')
outcome = data['results']['books']
sorted_weeks = sorted(outcome, key=(lambda outcome: outcome['weeks_on_list']), reverse=True)
title_week_tuple = [(book['title'], book['weeks_on_list']) for book in sorted_weeks]
return title_week_tuple<|docstring|>Use the NY Times Books API endpoint above to get the titles that are
on the best seller list for the longest time.
Return a list of (title, weeks_on_list) tuples, e.g. for the nonfiction:
[('BETWEEN THE WORLD AND ME', 86),
('EDUCATED', 79),
('BECOMING', 41),
('THE SECOND MOUNTAIN', 18),
... 11 more ...
]
Dev docs: https://developer.nytimes.com/docs/books-product/1/overview<|endoftext|>
|
4956ab32a5c4822854fce58e65a8190a35e08641fe25f67565d59e1e4061136a
|
def mocked_requests_get(*args, **kwargs):
'https://stackoverflow.com/a/28507806'
class MockResponse():
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
url = args[0]
fname = (NON_FICTION if ('nonfiction' in url) else FICTION)
with open(fname) as f:
return MockResponse(json.loads(f.read()), 200)
return MockResponse(None, 404)
|
https://stackoverflow.com/a/28507806
|
bites/bite221.py
|
mocked_requests_get
|
ChidinmaKO/Chobe-bitesofpy
| 0
|
python
|
def mocked_requests_get(*args, **kwargs):
class MockResponse():
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
url = args[0]
fname = (NON_FICTION if ('nonfiction' in url) else FICTION)
with open(fname) as f:
return MockResponse(json.loads(f.read()), 200)
return MockResponse(None, 404)
|
def mocked_requests_get(*args, **kwargs):
class MockResponse():
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
return self.json_data
url = args[0]
fname = (NON_FICTION if ('nonfiction' in url) else FICTION)
with open(fname) as f:
return MockResponse(json.loads(f.read()), 200)
return MockResponse(None, 404)<|docstring|>https://stackoverflow.com/a/28507806<|endoftext|>
|
8f11860108e3b3c8f9263bf38b67adb7e00c2397652421e937d0e018547446ee
|
def linearInterpolation(x_low: float, x: float, x_hi: float, y_low: float, y_hi: float) -> float:
'\n Helper function to compute linear interpolation of a given value x, and the line i defined by two points\n\n :param x_low: first point x\n :type x_low: float\n :param x: Input value x\n :type x: float\n :param x_hi: second point x\n :type x_hi: float\n :param y_low: first point y\n :type y_low: float\n :param y_hi: second point y\n :type y_hi: float\n :return: the linear interpolation of given x\n :rtype: float\n '
return ((((x - x_low) * (y_hi - y_low)) / (x_hi - x_low)) + y_low)
|
Helper function to compute linear interpolation of a given value x, and the line i defined by two points
:param x_low: first point x
:type x_low: float
:param x: Input value x
:type x: float
:param x_hi: second point x
:type x_hi: float
:param y_low: first point y
:type y_low: float
:param y_hi: second point y
:type y_hi: float
:return: the linear interpolation of given x
:rtype: float
|
src/main/python/programmingtheiot/cda/embedded/I2cHelper.py
|
linearInterpolation
|
Taowyoo/-constrained-device-app
| 0
|
python
|
def linearInterpolation(x_low: float, x: float, x_hi: float, y_low: float, y_hi: float) -> float:
'\n Helper function to compute linear interpolation of a given value x, and the line i defined by two points\n\n :param x_low: first point x\n :type x_low: float\n :param x: Input value x\n :type x: float\n :param x_hi: second point x\n :type x_hi: float\n :param y_low: first point y\n :type y_low: float\n :param y_hi: second point y\n :type y_hi: float\n :return: the linear interpolation of given x\n :rtype: float\n '
return ((((x - x_low) * (y_hi - y_low)) / (x_hi - x_low)) + y_low)
|
def linearInterpolation(x_low: float, x: float, x_hi: float, y_low: float, y_hi: float) -> float:
'\n Helper function to compute linear interpolation of a given value x, and the line i defined by two points\n\n :param x_low: first point x\n :type x_low: float\n :param x: Input value x\n :type x: float\n :param x_hi: second point x\n :type x_hi: float\n :param y_low: first point y\n :type y_low: float\n :param y_hi: second point y\n :type y_hi: float\n :return: the linear interpolation of given x\n :rtype: float\n '
return ((((x - x_low) * (y_hi - y_low)) / (x_hi - x_low)) + y_low)<|docstring|>Helper function to compute linear interpolation of a given value x, and the line i defined by two points
:param x_low: first point x
:type x_low: float
:param x: Input value x
:type x: float
:param x_hi: second point x
:type x_hi: float
:param y_low: first point y
:type y_low: float
:param y_hi: second point y
:type y_hi: float
:return: the linear interpolation of given x
:rtype: float<|endoftext|>
|
4d1514a72c158e8c3024e55904be06f663f15e6f62b61fe7eb898e11f41d8897
|
def read2byte(i2cBus: SMBus, i2cAddr: int, lowByteAddr: int, highByteAddr: int) -> int:
'\n Read two byte from i2c Bus by using SMBus library\n\n :param i2cBus: SMBus instance\n :type i2cBus: SMBus\n :param i2cAddr: i2c address, a one byte address\n :type i2cAddr: int\n :param lowByteAddr: the low byte for input address\n :type lowByteAddr: int\n :param highByteAddr: the high byte for input address\n :type highByteAddr: int\n :return: response data\n :rtype: int\n '
data_l = i2cBus.read_byte_data(i2cAddr, lowByteAddr)
data_h = i2cBus.read_byte_data(i2cAddr, highByteAddr)
data = (data_l | (data_h << 8))
data = struct.unpack('h', struct.pack('H', data))[0]
return data
|
Read two byte from i2c Bus by using SMBus library
:param i2cBus: SMBus instance
:type i2cBus: SMBus
:param i2cAddr: i2c address, a one byte address
:type i2cAddr: int
:param lowByteAddr: the low byte for input address
:type lowByteAddr: int
:param highByteAddr: the high byte for input address
:type highByteAddr: int
:return: response data
:rtype: int
|
src/main/python/programmingtheiot/cda/embedded/I2cHelper.py
|
read2byte
|
Taowyoo/-constrained-device-app
| 0
|
python
|
def read2byte(i2cBus: SMBus, i2cAddr: int, lowByteAddr: int, highByteAddr: int) -> int:
'\n Read two byte from i2c Bus by using SMBus library\n\n :param i2cBus: SMBus instance\n :type i2cBus: SMBus\n :param i2cAddr: i2c address, a one byte address\n :type i2cAddr: int\n :param lowByteAddr: the low byte for input address\n :type lowByteAddr: int\n :param highByteAddr: the high byte for input address\n :type highByteAddr: int\n :return: response data\n :rtype: int\n '
data_l = i2cBus.read_byte_data(i2cAddr, lowByteAddr)
data_h = i2cBus.read_byte_data(i2cAddr, highByteAddr)
data = (data_l | (data_h << 8))
data = struct.unpack('h', struct.pack('H', data))[0]
return data
|
def read2byte(i2cBus: SMBus, i2cAddr: int, lowByteAddr: int, highByteAddr: int) -> int:
'\n Read two byte from i2c Bus by using SMBus library\n\n :param i2cBus: SMBus instance\n :type i2cBus: SMBus\n :param i2cAddr: i2c address, a one byte address\n :type i2cAddr: int\n :param lowByteAddr: the low byte for input address\n :type lowByteAddr: int\n :param highByteAddr: the high byte for input address\n :type highByteAddr: int\n :return: response data\n :rtype: int\n '
data_l = i2cBus.read_byte_data(i2cAddr, lowByteAddr)
data_h = i2cBus.read_byte_data(i2cAddr, highByteAddr)
data = (data_l | (data_h << 8))
data = struct.unpack('h', struct.pack('H', data))[0]
return data<|docstring|>Read two byte from i2c Bus by using SMBus library
:param i2cBus: SMBus instance
:type i2cBus: SMBus
:param i2cAddr: i2c address, a one byte address
:type i2cAddr: int
:param lowByteAddr: the low byte for input address
:type lowByteAddr: int
:param highByteAddr: the high byte for input address
:type highByteAddr: int
:return: response data
:rtype: int<|endoftext|>
|
e1c79012ec713056e87eb6a26ffe701e58e26f1cd368e829c644b34d6539b31f
|
def main():
'Go!'
pgconn = get_dbconn('afos')
acursor = pgconn.cursor()
raw = sys.stdin.read()
data = raw.replace('\r\r\n', 'z')
tokens = re.findall('(K[A-Z0-9]{3} [DM]S.*?[=N]z)', data)
nws = product.TextProduct(raw)
sql = 'INSERT into products (pil, data, source, wmo, entered) values(%s,%s,%s,%s,%s) '
for token in tokens:
sqlargs = (('%s%s' % (sys.argv[1], token[1:4])), token.replace('z', '\n'), nws.source, nws.wmo, nws.valid.strftime('%Y-%m-%d %H:%M+00'))
acursor.execute(sql, sqlargs)
acursor.close()
pgconn.commit()
pgconn.close()
|
Go!
|
parsers/pywwa/workflows/dsm2afos.py
|
main
|
akrherz/pyWWA
| 9
|
python
|
def main():
pgconn = get_dbconn('afos')
acursor = pgconn.cursor()
raw = sys.stdin.read()
data = raw.replace('\r\r\n', 'z')
tokens = re.findall('(K[A-Z0-9]{3} [DM]S.*?[=N]z)', data)
nws = product.TextProduct(raw)
sql = 'INSERT into products (pil, data, source, wmo, entered) values(%s,%s,%s,%s,%s) '
for token in tokens:
sqlargs = (('%s%s' % (sys.argv[1], token[1:4])), token.replace('z', '\n'), nws.source, nws.wmo, nws.valid.strftime('%Y-%m-%d %H:%M+00'))
acursor.execute(sql, sqlargs)
acursor.close()
pgconn.commit()
pgconn.close()
|
def main():
pgconn = get_dbconn('afos')
acursor = pgconn.cursor()
raw = sys.stdin.read()
data = raw.replace('\r\r\n', 'z')
tokens = re.findall('(K[A-Z0-9]{3} [DM]S.*?[=N]z)', data)
nws = product.TextProduct(raw)
sql = 'INSERT into products (pil, data, source, wmo, entered) values(%s,%s,%s,%s,%s) '
for token in tokens:
sqlargs = (('%s%s' % (sys.argv[1], token[1:4])), token.replace('z', '\n'), nws.source, nws.wmo, nws.valid.strftime('%Y-%m-%d %H:%M+00'))
acursor.execute(sql, sqlargs)
acursor.close()
pgconn.commit()
pgconn.close()<|docstring|>Go!<|endoftext|>
|
6791d06f763e3f8a6afc02f0e71f731d681a1abd51ae4d23ae453c24424c3412
|
def callAfterImport(self, f):
'Add f to the list of functions to call on exit'
if (not isinstance(f, types.FunctionType)):
raise TypeError('Argument must be a function!')
self.__funcs.append(f)
|
Add f to the list of functions to call on exit
|
yt/pmods.py
|
callAfterImport
|
ninaMc/yt
| 2
|
python
|
def callAfterImport(self, f):
if (not isinstance(f, types.FunctionType)):
raise TypeError('Argument must be a function!')
self.__funcs.append(f)
|
def callAfterImport(self, f):
if (not isinstance(f, types.FunctionType)):
raise TypeError('Argument must be a function!')
self.__funcs.append(f)<|docstring|>Add f to the list of functions to call on exit<|endoftext|>
|
e4484e9ce6c84bb0a6c368447f6ab77a50e310c765445e76066d53d9b65234a7
|
def draw_piechart(canvas, cx, cy, rx, ry, font, items, title, font_title):
'items is a sequence of [name, quantity]'
total_quant = reduce((lambda s, i: (s + i[1])), items, 0)
items.sort((lambda l, r: cmp(r[1], l[1])))
color = get_color()
color_map = dict([(name, color.next()) for (name, q) in items])
items.reverse()
canvas.color_space('fs', jagpdf.CS_DEVICE_RGB)
angle = (math.pi / 2.0)
color = get_color()
max_str_len = 0.0
canvas.line_join(jagpdf.LINE_JOIN_BEVEL)
for (name, quant) in items:
canvas.color('fs', *color_map[name])
sweep = (((quant * 2) * math.pi) / total_quant)
canvas.arc(cx, cy, rx, ry, angle, sweep)
canvas.line_to(cx, cy)
canvas.path_close()
canvas.path_paint('fs')
angle += sweep
max_str_len = max(max_str_len, font.advance(name))
items.reverse()
(legend_x, legend_y) = ((cx - rx), ((cy + ry) + ((1 + len(items)) * font.height())))
y = 0
box_h = (font.bbox_ymax() - font.bbox_ymin())
box_w = 20
for (name, quant) in items:
canvas.color('f', *color_map[name])
canvas.rectangle(legend_x, ((legend_y - y) + font.bbox_ymin()), box_w, box_h)
canvas.path_paint('f')
y += font.height()
canvas.text_font(font)
canvas.text_start(((legend_x + box_w) + 8), legend_y)
perc_offset = (max_str_len + 10)
canvas.color('f', 0, 0, 0)
for (name, quant) in items:
canvas.text(('%s' % name))
canvas.text_translate_line(perc_offset, 0)
canvas.text(('%.2f%%' % ((100.0 * quant) / total_quant)))
canvas.text_translate_line((- perc_offset), (- font.height()))
canvas.text_end()
canvas.text_font(font_title)
canvas.color('f', 0, 0, 0)
title_w = font_title.advance(title)
canvas.text((legend_x + (((2 * rx) - title_w) / 2.0)), (legend_y + (1.4 * font_title.height())), title)
|
items is a sequence of [name, quantity]
|
code/test/apitest/py/piechart.py
|
draw_piechart
|
jgresula/jagpdf
| 54
|
python
|
def draw_piechart(canvas, cx, cy, rx, ry, font, items, title, font_title):
total_quant = reduce((lambda s, i: (s + i[1])), items, 0)
items.sort((lambda l, r: cmp(r[1], l[1])))
color = get_color()
color_map = dict([(name, color.next()) for (name, q) in items])
items.reverse()
canvas.color_space('fs', jagpdf.CS_DEVICE_RGB)
angle = (math.pi / 2.0)
color = get_color()
max_str_len = 0.0
canvas.line_join(jagpdf.LINE_JOIN_BEVEL)
for (name, quant) in items:
canvas.color('fs', *color_map[name])
sweep = (((quant * 2) * math.pi) / total_quant)
canvas.arc(cx, cy, rx, ry, angle, sweep)
canvas.line_to(cx, cy)
canvas.path_close()
canvas.path_paint('fs')
angle += sweep
max_str_len = max(max_str_len, font.advance(name))
items.reverse()
(legend_x, legend_y) = ((cx - rx), ((cy + ry) + ((1 + len(items)) * font.height())))
y = 0
box_h = (font.bbox_ymax() - font.bbox_ymin())
box_w = 20
for (name, quant) in items:
canvas.color('f', *color_map[name])
canvas.rectangle(legend_x, ((legend_y - y) + font.bbox_ymin()), box_w, box_h)
canvas.path_paint('f')
y += font.height()
canvas.text_font(font)
canvas.text_start(((legend_x + box_w) + 8), legend_y)
perc_offset = (max_str_len + 10)
canvas.color('f', 0, 0, 0)
for (name, quant) in items:
canvas.text(('%s' % name))
canvas.text_translate_line(perc_offset, 0)
canvas.text(('%.2f%%' % ((100.0 * quant) / total_quant)))
canvas.text_translate_line((- perc_offset), (- font.height()))
canvas.text_end()
canvas.text_font(font_title)
canvas.color('f', 0, 0, 0)
title_w = font_title.advance(title)
canvas.text((legend_x + (((2 * rx) - title_w) / 2.0)), (legend_y + (1.4 * font_title.height())), title)
|
def draw_piechart(canvas, cx, cy, rx, ry, font, items, title, font_title):
total_quant = reduce((lambda s, i: (s + i[1])), items, 0)
items.sort((lambda l, r: cmp(r[1], l[1])))
color = get_color()
color_map = dict([(name, color.next()) for (name, q) in items])
items.reverse()
canvas.color_space('fs', jagpdf.CS_DEVICE_RGB)
angle = (math.pi / 2.0)
color = get_color()
max_str_len = 0.0
canvas.line_join(jagpdf.LINE_JOIN_BEVEL)
for (name, quant) in items:
canvas.color('fs', *color_map[name])
sweep = (((quant * 2) * math.pi) / total_quant)
canvas.arc(cx, cy, rx, ry, angle, sweep)
canvas.line_to(cx, cy)
canvas.path_close()
canvas.path_paint('fs')
angle += sweep
max_str_len = max(max_str_len, font.advance(name))
items.reverse()
(legend_x, legend_y) = ((cx - rx), ((cy + ry) + ((1 + len(items)) * font.height())))
y = 0
box_h = (font.bbox_ymax() - font.bbox_ymin())
box_w = 20
for (name, quant) in items:
canvas.color('f', *color_map[name])
canvas.rectangle(legend_x, ((legend_y - y) + font.bbox_ymin()), box_w, box_h)
canvas.path_paint('f')
y += font.height()
canvas.text_font(font)
canvas.text_start(((legend_x + box_w) + 8), legend_y)
perc_offset = (max_str_len + 10)
canvas.color('f', 0, 0, 0)
for (name, quant) in items:
canvas.text(('%s' % name))
canvas.text_translate_line(perc_offset, 0)
canvas.text(('%.2f%%' % ((100.0 * quant) / total_quant)))
canvas.text_translate_line((- perc_offset), (- font.height()))
canvas.text_end()
canvas.text_font(font_title)
canvas.color('f', 0, 0, 0)
title_w = font_title.advance(title)
canvas.text((legend_x + (((2 * rx) - title_w) / 2.0)), (legend_y + (1.4 * font_title.height())), title)<|docstring|>items is a sequence of [name, quantity]<|endoftext|>
|
7b471719ca12923be1bb5aa0f6b4a29e588cdd4fc8f88c0ac794ebe5468e4474
|
def open_clean_bands(band_path, crop_extent, valid_range=(0, 10000)):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n band_path : string\n A path to the array to be opened\n crop-extent : geopandas.dataframe\n shape file 2d array used to clip tif arrays\n valid_range : tuple (optional)\n A tuple of min and max range of values for the data. Default = None\n\n\n Returns\n -----------\n arr : xarray DataArray\n An xarray DataArray with values that should be masked set to 1 for True (Boolean)\n '
band = rxr.open_rasterio(band_path, masked=True).rio.clip(crop_extent.geometry, from_disk=True).squeeze()
if valid_range:
mask = ((band < valid_range[0]) | (band > valid_range[1]))
band = band.where((~ xr.where(mask, True, False)))
return band
|
Open and mask a single landsat band using a pixel_qa layer.
Parameters
-----------
band_path : string
A path to the array to be opened
crop-extent : geopandas.dataframe
shape file 2d array used to clip tif arrays
valid_range : tuple (optional)
A tuple of min and max range of values for the data. Default = None
Returns
-----------
arr : xarray DataArray
An xarray DataArray with values that should be masked set to 1 for True (Boolean)
|
kraft-jennifer-ndvi-automation.py
|
open_clean_bands
|
gnarledbranches/ea-2021-ndvi-automation-review
| 0
|
python
|
def open_clean_bands(band_path, crop_extent, valid_range=(0, 10000)):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n band_path : string\n A path to the array to be opened\n crop-extent : geopandas.dataframe\n shape file 2d array used to clip tif arrays\n valid_range : tuple (optional)\n A tuple of min and max range of values for the data. Default = None\n\n\n Returns\n -----------\n arr : xarray DataArray\n An xarray DataArray with values that should be masked set to 1 for True (Boolean)\n '
band = rxr.open_rasterio(band_path, masked=True).rio.clip(crop_extent.geometry, from_disk=True).squeeze()
if valid_range:
mask = ((band < valid_range[0]) | (band > valid_range[1]))
band = band.where((~ xr.where(mask, True, False)))
return band
|
def open_clean_bands(band_path, crop_extent, valid_range=(0, 10000)):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n band_path : string\n A path to the array to be opened\n crop-extent : geopandas.dataframe\n shape file 2d array used to clip tif arrays\n valid_range : tuple (optional)\n A tuple of min and max range of values for the data. Default = None\n\n\n Returns\n -----------\n arr : xarray DataArray\n An xarray DataArray with values that should be masked set to 1 for True (Boolean)\n '
band = rxr.open_rasterio(band_path, masked=True).rio.clip(crop_extent.geometry, from_disk=True).squeeze()
if valid_range:
mask = ((band < valid_range[0]) | (band > valid_range[1]))
band = band.where((~ xr.where(mask, True, False)))
return band<|docstring|>Open and mask a single landsat band using a pixel_qa layer.
Parameters
-----------
band_path : string
A path to the array to be opened
crop-extent : geopandas.dataframe
shape file 2d array used to clip tif arrays
valid_range : tuple (optional)
A tuple of min and max range of values for the data. Default = None
Returns
-----------
arr : xarray DataArray
An xarray DataArray with values that should be masked set to 1 for True (Boolean)<|endoftext|>
|
615fbe9a01452f045ccc0aae32acc63f24784bbb7f33c03bc360066f95531dd6
|
def mask_crop_ndvi(all_bands, crop_bound, pixel_qa, vals):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n all_bands : list\n a list containing the xarray objects for landsat bands 4 and 5\n crop_bound: geopandas GeoDataFrame\n A geopandas dataframe to be used to crop the raster data using rasterio mask().\n pixel_qa: xarray DataArray\n An xarray DataArray with pixel qa values that have not yet been turned into a mask (0s and 1s)\n vals: list\n A list of values needed to create the cloud mask\n\n\n Returns\n -----------\n ndvi_crop : Xarray Dataset\n a cropped and masked xarray object containing NDVI values\n '
crop_json = crop_bound.geometry
cl_mask_crop = pixel_qa.rio.clip(crop_json)
ndvi_xr = ((all_bands[1] - all_bands[0]) / (all_bands[1] + all_bands[0]))
ndvi_crop = ndvi_xr.rio.clip(crop_json)
ndvi_crop = ndvi_crop.where((~ cl_mask_crop.isin(vals)))
return ndvi_crop
|
Open and mask a single landsat band using a pixel_qa layer.
Parameters
-----------
all_bands : list
a list containing the xarray objects for landsat bands 4 and 5
crop_bound: geopandas GeoDataFrame
A geopandas dataframe to be used to crop the raster data using rasterio mask().
pixel_qa: xarray DataArray
An xarray DataArray with pixel qa values that have not yet been turned into a mask (0s and 1s)
vals: list
A list of values needed to create the cloud mask
Returns
-----------
ndvi_crop : Xarray Dataset
a cropped and masked xarray object containing NDVI values
|
kraft-jennifer-ndvi-automation.py
|
mask_crop_ndvi
|
gnarledbranches/ea-2021-ndvi-automation-review
| 0
|
python
|
def mask_crop_ndvi(all_bands, crop_bound, pixel_qa, vals):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n all_bands : list\n a list containing the xarray objects for landsat bands 4 and 5\n crop_bound: geopandas GeoDataFrame\n A geopandas dataframe to be used to crop the raster data using rasterio mask().\n pixel_qa: xarray DataArray\n An xarray DataArray with pixel qa values that have not yet been turned into a mask (0s and 1s)\n vals: list\n A list of values needed to create the cloud mask\n\n\n Returns\n -----------\n ndvi_crop : Xarray Dataset\n a cropped and masked xarray object containing NDVI values\n '
crop_json = crop_bound.geometry
cl_mask_crop = pixel_qa.rio.clip(crop_json)
ndvi_xr = ((all_bands[1] - all_bands[0]) / (all_bands[1] + all_bands[0]))
ndvi_crop = ndvi_xr.rio.clip(crop_json)
ndvi_crop = ndvi_crop.where((~ cl_mask_crop.isin(vals)))
return ndvi_crop
|
def mask_crop_ndvi(all_bands, crop_bound, pixel_qa, vals):
'Open and mask a single landsat band using a pixel_qa layer.\n\n Parameters\n -----------\n all_bands : list\n a list containing the xarray objects for landsat bands 4 and 5\n crop_bound: geopandas GeoDataFrame\n A geopandas dataframe to be used to crop the raster data using rasterio mask().\n pixel_qa: xarray DataArray\n An xarray DataArray with pixel qa values that have not yet been turned into a mask (0s and 1s)\n vals: list\n A list of values needed to create the cloud mask\n\n\n Returns\n -----------\n ndvi_crop : Xarray Dataset\n a cropped and masked xarray object containing NDVI values\n '
crop_json = crop_bound.geometry
cl_mask_crop = pixel_qa.rio.clip(crop_json)
ndvi_xr = ((all_bands[1] - all_bands[0]) / (all_bands[1] + all_bands[0]))
ndvi_crop = ndvi_xr.rio.clip(crop_json)
ndvi_crop = ndvi_crop.where((~ cl_mask_crop.isin(vals)))
return ndvi_crop<|docstring|>Open and mask a single landsat band using a pixel_qa layer.
Parameters
-----------
all_bands : list
a list containing the xarray objects for landsat bands 4 and 5
crop_bound: geopandas GeoDataFrame
A geopandas dataframe to be used to crop the raster data using rasterio mask().
pixel_qa: xarray DataArray
An xarray DataArray with pixel qa values that have not yet been turned into a mask (0s and 1s)
vals: list
A list of values needed to create the cloud mask
Returns
-----------
ndvi_crop : Xarray Dataset
a cropped and masked xarray object containing NDVI values<|endoftext|>
|
5aa55c4a16010b702bb6141ef97276045b361579e00ae599db4fd2eaa24aabd2
|
def _step(self, payload):
' Process next token in sequence and return with:\n ``None`` if it was the last needed exchange\n ``tuple`` tuple with new token and a boolean whether it requires an\n answer token\n '
try:
data = self._authenticator.send(payload)
except StopIteration:
return
else:
return data
|
Process next token in sequence and return with:
``None`` if it was the last needed exchange
``tuple`` tuple with new token and a boolean whether it requires an
answer token
|
aiokafka/conn.py
|
_step
|
fortum-tech/aiokafka
| 731
|
python
|
def _step(self, payload):
' Process next token in sequence and return with:\n ``None`` if it was the last needed exchange\n ``tuple`` tuple with new token and a boolean whether it requires an\n answer token\n '
try:
data = self._authenticator.send(payload)
except StopIteration:
return
else:
return data
|
def _step(self, payload):
' Process next token in sequence and return with:\n ``None`` if it was the last needed exchange\n ``tuple`` tuple with new token and a boolean whether it requires an\n answer token\n '
try:
data = self._authenticator.send(payload)
except StopIteration:
return
else:
return data<|docstring|>Process next token in sequence and return with:
``None`` if it was the last needed exchange
``tuple`` tuple with new token and a boolean whether it requires an
answer token<|endoftext|>
|
ee23b6fef3f607b639e4a9d12bf24c8baf135af589ab489aa60565c5287cffdf
|
def authenticator_plain(self):
' Automaton to authenticate with SASL tokens\n '
data = '\x00'.join([self._sasl_plain_username, self._sasl_plain_username, self._sasl_plain_password]).encode('utf-8')
resp = (yield (data, True))
assert (resp == b''), 'Server should either close or send an empty response'
|
Automaton to authenticate with SASL tokens
|
aiokafka/conn.py
|
authenticator_plain
|
fortum-tech/aiokafka
| 731
|
python
|
def authenticator_plain(self):
' \n '
data = '\x00'.join([self._sasl_plain_username, self._sasl_plain_username, self._sasl_plain_password]).encode('utf-8')
resp = (yield (data, True))
assert (resp == b), 'Server should either close or send an empty response'
|
def authenticator_plain(self):
' \n '
data = '\x00'.join([self._sasl_plain_username, self._sasl_plain_username, self._sasl_plain_password]).encode('utf-8')
resp = (yield (data, True))
assert (resp == b), 'Server should either close or send an empty response'<|docstring|>Automaton to authenticate with SASL tokens<|endoftext|>
|
befe95b0030fb6c5840e78e6847d28a1a208a927612655c6cb018a92059ecc74
|
def _token_extensions(self):
'\n Return a string representation of the OPTIONAL key-value pairs\n that can be sent with an OAUTHBEARER initial request.\n '
if callable(getattr(self._sasl_oauth_token_provider, 'extensions', None)):
extensions = self._sasl_oauth_token_provider.extensions()
if (len(extensions) > 0):
msg = '\x01'.join([f'{k}={v}' for (k, v) in extensions.items()])
return ('\x01' + msg)
return ''
|
Return a string representation of the OPTIONAL key-value pairs
that can be sent with an OAUTHBEARER initial request.
|
aiokafka/conn.py
|
_token_extensions
|
fortum-tech/aiokafka
| 731
|
python
|
def _token_extensions(self):
'\n Return a string representation of the OPTIONAL key-value pairs\n that can be sent with an OAUTHBEARER initial request.\n '
if callable(getattr(self._sasl_oauth_token_provider, 'extensions', None)):
extensions = self._sasl_oauth_token_provider.extensions()
if (len(extensions) > 0):
msg = '\x01'.join([f'{k}={v}' for (k, v) in extensions.items()])
return ('\x01' + msg)
return
|
def _token_extensions(self):
'\n Return a string representation of the OPTIONAL key-value pairs\n that can be sent with an OAUTHBEARER initial request.\n '
if callable(getattr(self._sasl_oauth_token_provider, 'extensions', None)):
extensions = self._sasl_oauth_token_provider.extensions()
if (len(extensions) > 0):
msg = '\x01'.join([f'{k}={v}' for (k, v) in extensions.items()])
return ('\x01' + msg)
return <|docstring|>Return a string representation of the OPTIONAL key-value pairs
that can be sent with an OAUTHBEARER initial request.<|endoftext|>
|
b33f07c630e6221905e2a5a93ee5d3d927c9c405412c718eb315714a713fbc1e
|
def _extract_region(host):
'Extract region from Amazon S3 host.'
tokens = host.split('.')
token = tokens[1]
if (token == 'dualstack'):
token = tokens[2]
if (token == 'amazonaws'):
return None
return token
|
Extract region from Amazon S3 host.
|
minio/definitions.py
|
_extract_region
|
dtaniwaki/minio-py
| 0
|
python
|
def _extract_region(host):
tokens = host.split('.')
token = tokens[1]
if (token == 'dualstack'):
token = tokens[2]
if (token == 'amazonaws'):
return None
return token
|
def _extract_region(host):
tokens = host.split('.')
token = tokens[1]
if (token == 'dualstack'):
token = tokens[2]
if (token == 'amazonaws'):
return None
return token<|docstring|>Extract region from Amazon S3 host.<|endoftext|>
|
2821562f5f6c9234a0abad37d968cc739d8c9a271a858419c75c1d91cd236417
|
@property
def region(self):
'Get region.'
return self._region
|
Get region.
|
minio/definitions.py
|
region
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def region(self):
return self._region
|
@property
def region(self):
return self._region<|docstring|>Get region.<|endoftext|>
|
a3f277ea2ded84b523c52739c1611fd1c8ac3f7e09c537875370ffb191936f44
|
@property
def is_https(self):
'Check if scheme is HTTPS.'
return (self._url.scheme == 'https')
|
Check if scheme is HTTPS.
|
minio/definitions.py
|
is_https
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def is_https(self):
return (self._url.scheme == 'https')
|
@property
def is_https(self):
return (self._url.scheme == 'https')<|docstring|>Check if scheme is HTTPS.<|endoftext|>
|
e7ba514945e822ab38c541ec0be4d7b7115eaaa17b2df312e45ec0fc6a1b702d
|
@property
def host(self):
'Get hostname.'
return self._url.netloc
|
Get hostname.
|
minio/definitions.py
|
host
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def host(self):
return self._url.netloc
|
@property
def host(self):
return self._url.netloc<|docstring|>Get hostname.<|endoftext|>
|
2a8f066e5f679595ccbb97cffe639de97c9d7027799f4a771e6ae94617a3dff5
|
@property
def is_aws_host(self):
'Check if URL points to AWS host.'
return self._is_aws_host
|
Check if URL points to AWS host.
|
minio/definitions.py
|
is_aws_host
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def is_aws_host(self):
return self._is_aws_host
|
@property
def is_aws_host(self):
return self._is_aws_host<|docstring|>Check if URL points to AWS host.<|endoftext|>
|
aef84d12659471f3c411e887357dc5b1afc69d5bc171ed3f1502ea6bea8b9156
|
@property
def accelerate_host_flag(self):
'Check if URL points to AWS accelerate host.'
return self._accelerate_host_flag
|
Check if URL points to AWS accelerate host.
|
minio/definitions.py
|
accelerate_host_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def accelerate_host_flag(self):
return self._accelerate_host_flag
|
@property
def accelerate_host_flag(self):
return self._accelerate_host_flag<|docstring|>Check if URL points to AWS accelerate host.<|endoftext|>
|
b4b5109543c2af6a64fd24d3fe95dc0f1b3b61dc06ffa27d0795a6b55564c552
|
@accelerate_host_flag.setter
def accelerate_host_flag(self, flag):
'Check if URL points to AWS accelerate host.'
if self._is_aws_host:
self._accelerate_host_flag = flag
|
Check if URL points to AWS accelerate host.
|
minio/definitions.py
|
accelerate_host_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@accelerate_host_flag.setter
def accelerate_host_flag(self, flag):
if self._is_aws_host:
self._accelerate_host_flag = flag
|
@accelerate_host_flag.setter
def accelerate_host_flag(self, flag):
if self._is_aws_host:
self._accelerate_host_flag = flag<|docstring|>Check if URL points to AWS accelerate host.<|endoftext|>
|
b3b0b5926809995428524782228740c7b7c4a6cf86db34a4a415fc8d187f85db
|
@property
def dualstack_host_flag(self):
'Check if URL points to AWS dualstack host.'
return self._dualstack_host_flag
|
Check if URL points to AWS dualstack host.
|
minio/definitions.py
|
dualstack_host_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def dualstack_host_flag(self):
return self._dualstack_host_flag
|
@property
def dualstack_host_flag(self):
return self._dualstack_host_flag<|docstring|>Check if URL points to AWS dualstack host.<|endoftext|>
|
b1a96de9ae5951aac80d3ef19b8b362825d63867f4c80b7213e8e4cb68018696
|
@dualstack_host_flag.setter
def dualstack_host_flag(self, flag):
'Check to use virtual style or not.'
if self._is_aws_host:
self._dualstack_host_flag = flag
|
Check to use virtual style or not.
|
minio/definitions.py
|
dualstack_host_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@dualstack_host_flag.setter
def dualstack_host_flag(self, flag):
if self._is_aws_host:
self._dualstack_host_flag = flag
|
@dualstack_host_flag.setter
def dualstack_host_flag(self, flag):
if self._is_aws_host:
self._dualstack_host_flag = flag<|docstring|>Check to use virtual style or not.<|endoftext|>
|
c717d5a1ccf8d4c3331737ff2cf49efef806a412cab9b0400d4e4d67c90d6c55
|
@property
def virtual_style_flag(self):
'Check to use virtual style or not.'
return self._virtual_style_flag
|
Check to use virtual style or not.
|
minio/definitions.py
|
virtual_style_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def virtual_style_flag(self):
return self._virtual_style_flag
|
@property
def virtual_style_flag(self):
return self._virtual_style_flag<|docstring|>Check to use virtual style or not.<|endoftext|>
|
d1f37c345c67861e5446d12e3e21aad654dcb51337885a90588b4d65f3488057
|
@virtual_style_flag.setter
def virtual_style_flag(self, flag):
'Check to use virtual style or not.'
self._virtual_style_flag = flag
|
Check to use virtual style or not.
|
minio/definitions.py
|
virtual_style_flag
|
dtaniwaki/minio-py
| 0
|
python
|
@virtual_style_flag.setter
def virtual_style_flag(self, flag):
self._virtual_style_flag = flag
|
@virtual_style_flag.setter
def virtual_style_flag(self, flag):
self._virtual_style_flag = flag<|docstring|>Check to use virtual style or not.<|endoftext|>
|
e81f847d03bf1de71cadc2443e5aac8ffacb32ae8b2742eeddce2ef98d643293
|
def build(self, method, region, bucket_name=None, object_name=None, query_params=None):
'Build URL for given information.'
if ((not bucket_name) and object_name):
raise ValueError('empty bucket name for object name {0}'.format(object_name))
query = []
for (key, values) in sorted((query_params or {}).items()):
values = (values if isinstance(values, (list, tuple)) else [values])
query += ['{0}={1}'.format(queryencode(key), queryencode(value)) for value in sorted(values)]
url = url_replace(self._url, query='&'.join(query))
host = self._url.netloc
if (not bucket_name):
url = url_replace(url, path='/')
return (url_replace(url, netloc=((('s3.' + region) + '.') + host)) if self._is_aws_host else url)
enforce_path_style = (((method == 'PUT') and (not object_name) and (not query_params)) or (query_params and query_params.get('location')) or (('.' in bucket_name) and (self._url.scheme == 'https')))
if self._is_aws_host:
s3_domain = 's3.'
if self._accelerate_host_flag:
if ('.' in bucket_name):
raise ValueError("bucket name '{0}' with '.' is not allowed for accelerated endpoint".format(bucket_name))
if (not enforce_path_style):
s3_domain = 's3-accelerate.'
dual_stack = ('dualstack.' if self._dualstack_host_flag else '')
endpoint = (s3_domain + dual_stack)
if (enforce_path_style or (not self._accelerate_host_flag)):
endpoint += (region + '.')
host = (endpoint + host)
if (enforce_path_style or (not self._virtual_style_flag)):
url = url_replace(url, netloc=host)
url = url_replace(url, path=('/' + bucket_name))
else:
url = url_replace(url, netloc=((bucket_name + '.') + host), path='/')
if object_name:
path = url.path
path += (('' if path.endswith('/') else '/') + quote(object_name))
url = url_replace(url, path=path)
return url
|
Build URL for given information.
|
minio/definitions.py
|
build
|
dtaniwaki/minio-py
| 0
|
python
|
def build(self, method, region, bucket_name=None, object_name=None, query_params=None):
if ((not bucket_name) and object_name):
raise ValueError('empty bucket name for object name {0}'.format(object_name))
query = []
for (key, values) in sorted((query_params or {}).items()):
values = (values if isinstance(values, (list, tuple)) else [values])
query += ['{0}={1}'.format(queryencode(key), queryencode(value)) for value in sorted(values)]
url = url_replace(self._url, query='&'.join(query))
host = self._url.netloc
if (not bucket_name):
url = url_replace(url, path='/')
return (url_replace(url, netloc=((('s3.' + region) + '.') + host)) if self._is_aws_host else url)
enforce_path_style = (((method == 'PUT') and (not object_name) and (not query_params)) or (query_params and query_params.get('location')) or (('.' in bucket_name) and (self._url.scheme == 'https')))
if self._is_aws_host:
s3_domain = 's3.'
if self._accelerate_host_flag:
if ('.' in bucket_name):
raise ValueError("bucket name '{0}' with '.' is not allowed for accelerated endpoint".format(bucket_name))
if (not enforce_path_style):
s3_domain = 's3-accelerate.'
dual_stack = ('dualstack.' if self._dualstack_host_flag else )
endpoint = (s3_domain + dual_stack)
if (enforce_path_style or (not self._accelerate_host_flag)):
endpoint += (region + '.')
host = (endpoint + host)
if (enforce_path_style or (not self._virtual_style_flag)):
url = url_replace(url, netloc=host)
url = url_replace(url, path=('/' + bucket_name))
else:
url = url_replace(url, netloc=((bucket_name + '.') + host), path='/')
if object_name:
path = url.path
path += (( if path.endswith('/') else '/') + quote(object_name))
url = url_replace(url, path=path)
return url
|
def build(self, method, region, bucket_name=None, object_name=None, query_params=None):
if ((not bucket_name) and object_name):
raise ValueError('empty bucket name for object name {0}'.format(object_name))
query = []
for (key, values) in sorted((query_params or {}).items()):
values = (values if isinstance(values, (list, tuple)) else [values])
query += ['{0}={1}'.format(queryencode(key), queryencode(value)) for value in sorted(values)]
url = url_replace(self._url, query='&'.join(query))
host = self._url.netloc
if (not bucket_name):
url = url_replace(url, path='/')
return (url_replace(url, netloc=((('s3.' + region) + '.') + host)) if self._is_aws_host else url)
enforce_path_style = (((method == 'PUT') and (not object_name) and (not query_params)) or (query_params and query_params.get('location')) or (('.' in bucket_name) and (self._url.scheme == 'https')))
if self._is_aws_host:
s3_domain = 's3.'
if self._accelerate_host_flag:
if ('.' in bucket_name):
raise ValueError("bucket name '{0}' with '.' is not allowed for accelerated endpoint".format(bucket_name))
if (not enforce_path_style):
s3_domain = 's3-accelerate.'
dual_stack = ('dualstack.' if self._dualstack_host_flag else )
endpoint = (s3_domain + dual_stack)
if (enforce_path_style or (not self._accelerate_host_flag)):
endpoint += (region + '.')
host = (endpoint + host)
if (enforce_path_style or (not self._virtual_style_flag)):
url = url_replace(url, netloc=host)
url = url_replace(url, path=('/' + bucket_name))
else:
url = url_replace(url, netloc=((bucket_name + '.') + host), path='/')
if object_name:
path = url.path
path += (( if path.endswith('/') else '/') + quote(object_name))
url = url_replace(url, path=path)
return url<|docstring|>Build URL for given information.<|endoftext|>
|
316a3840f9344132a24e84dee1cc2ea30ee02f5dc8a7d048dcb49a5be77f0ea4
|
@property
def status(self):
'Get status.'
return (self._status or 'Off')
|
Get status.
|
minio/definitions.py
|
status
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def status(self):
return (self._status or 'Off')
|
@property
def status(self):
return (self._status or 'Off')<|docstring|>Get status.<|endoftext|>
|
f90bebac1f0043a2a071e88e8500fee0602bb890a23d7c95cd1a518714b6105b
|
@property
def mfa_delete(self):
'Get MFA delete.'
return self._mfa_delete
|
Get MFA delete.
|
minio/definitions.py
|
mfa_delete
|
dtaniwaki/minio-py
| 0
|
python
|
@property
def mfa_delete(self):
return self._mfa_delete
|
@property
def mfa_delete(self):
return self._mfa_delete<|docstring|>Get MFA delete.<|endoftext|>
|
66774ab84ad2bd4e2e456b94fdb0686ce27d5fda745b895520e5d237641cf6f0
|
@property
def list_id(self):
'Get the id of this list'
return int(self._id)
|
Get the id of this list
|
tatoebatools/user_lists.py
|
list_id
|
eumiro/tatoebatools
| 14
|
python
|
@property
def list_id(self):
return int(self._id)
|
@property
def list_id(self):
return int(self._id)<|docstring|>Get the id of this list<|endoftext|>
|
3c910ed6095e62194f2ecab190a497d97d24063afc0030fd06daa92466b58f38
|
@property
def username(self):
'Get the name of the user that built this list'
return self._usr
|
Get the name of the user that built this list
|
tatoebatools/user_lists.py
|
username
|
eumiro/tatoebatools
| 14
|
python
|
@property
def username(self):
return self._usr
|
@property
def username(self):
return self._usr<|docstring|>Get the name of the user that built this list<|endoftext|>
|
1db0d6be1d2b44173cb81f6c0aeb48070e485d15cb4c170183f5a7ee34c0ea6f
|
@property
def date_created(self):
'Get the date when this list has been created'
try:
dt = datetime.strptime(self._dcr, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt
|
Get the date when this list has been created
|
tatoebatools/user_lists.py
|
date_created
|
eumiro/tatoebatools
| 14
|
python
|
@property
def date_created(self):
try:
dt = datetime.strptime(self._dcr, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt
|
@property
def date_created(self):
try:
dt = datetime.strptime(self._dcr, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt<|docstring|>Get the date when this list has been created<|endoftext|>
|
a30f16e1d3483b821aec86895d6b57f010f8b5becf762b68b9c8941ef5f5b797
|
@property
def date_last_modified(self):
'Get the date when this list has been modified for the last time'
try:
dt = datetime.strptime(self._dlm, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt
|
Get the date when this list has been modified for the last time
|
tatoebatools/user_lists.py
|
date_last_modified
|
eumiro/tatoebatools
| 14
|
python
|
@property
def date_last_modified(self):
try:
dt = datetime.strptime(self._dlm, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt
|
@property
def date_last_modified(self):
try:
dt = datetime.strptime(self._dlm, '%Y-%m-%d %H:%M:%S')
except (ValueError, TypeError):
dt = None
finally:
return dt<|docstring|>Get the date when this list has been modified for the last time<|endoftext|>
|
67635f96d37368279c032d32020690fac4d735dcc95b351f0c82955821e8249e
|
@property
def list_name(self):
'Get the name of this list'
return self._nm
|
Get the name of this list
|
tatoebatools/user_lists.py
|
list_name
|
eumiro/tatoebatools
| 14
|
python
|
@property
def list_name(self):
return self._nm
|
@property
def list_name(self):
return self._nm<|docstring|>Get the name of this list<|endoftext|>
|
f65749d4e2f4692bb8f698eea3709420bd1674cf6b656bced00392f8d7af4808
|
@property
def editable_by(self):
'Get the users that can edit this list'
return self._edb
|
Get the users that can edit this list
|
tatoebatools/user_lists.py
|
editable_by
|
eumiro/tatoebatools
| 14
|
python
|
@property
def editable_by(self):
return self._edb
|
@property
def editable_by(self):
return self._edb<|docstring|>Get the users that can edit this list<|endoftext|>
|
3ce08397c17c0a1a50002ffe7f3468efa7da7ca7c4ad6dd247ed882e474185e4
|
def setup_platform(hass, config, add_entities, discovery_info=None):
'Set up the beewi_smartclim platform.'
mac = config[CONF_MAC]
prefix = config[CONF_NAME]
poller = BeewiSmartClimPoller(mac)
sensors = []
for sensor_type in SENSOR_TYPES:
device = sensor_type[0]
name = sensor_type[1]
unit = sensor_type[2]
if prefix:
name = f'{prefix} {name}'
sensors.append(BeewiSmartclimSensor(poller, name, mac, device, unit))
add_entities(sensors)
|
Set up the beewi_smartclim platform.
|
homeassistant/components/beewi_smartclim/sensor.py
|
setup_platform
|
uSpike/home-assistant
| 23
|
python
|
def setup_platform(hass, config, add_entities, discovery_info=None):
mac = config[CONF_MAC]
prefix = config[CONF_NAME]
poller = BeewiSmartClimPoller(mac)
sensors = []
for sensor_type in SENSOR_TYPES:
device = sensor_type[0]
name = sensor_type[1]
unit = sensor_type[2]
if prefix:
name = f'{prefix} {name}'
sensors.append(BeewiSmartclimSensor(poller, name, mac, device, unit))
add_entities(sensors)
|
def setup_platform(hass, config, add_entities, discovery_info=None):
mac = config[CONF_MAC]
prefix = config[CONF_NAME]
poller = BeewiSmartClimPoller(mac)
sensors = []
for sensor_type in SENSOR_TYPES:
device = sensor_type[0]
name = sensor_type[1]
unit = sensor_type[2]
if prefix:
name = f'{prefix} {name}'
sensors.append(BeewiSmartclimSensor(poller, name, mac, device, unit))
add_entities(sensors)<|docstring|>Set up the beewi_smartclim platform.<|endoftext|>
|
9670af109e549a333ea504ac2ce8e118007bcddbf93782bdaf0a5252856dca3e
|
def __init__(self, poller, name, mac, device, unit):
'Initialize the sensor.'
self._poller = poller
self._name = name
self._mac = mac
self._device = device
self._unit = unit
self._state = None
|
Initialize the sensor.
|
homeassistant/components/beewi_smartclim/sensor.py
|
__init__
|
uSpike/home-assistant
| 23
|
python
|
def __init__(self, poller, name, mac, device, unit):
self._poller = poller
self._name = name
self._mac = mac
self._device = device
self._unit = unit
self._state = None
|
def __init__(self, poller, name, mac, device, unit):
self._poller = poller
self._name = name
self._mac = mac
self._device = device
self._unit = unit
self._state = None<|docstring|>Initialize the sensor.<|endoftext|>
|
c2acbec88b5ad13d0f458e2f3155e56fd2fabdb29665addbac450039553aa2e4
|
@property
def name(self):
'Return the name of the sensor.'
return self._name
|
Return the name of the sensor.
|
homeassistant/components/beewi_smartclim/sensor.py
|
name
|
uSpike/home-assistant
| 23
|
python
|
@property
def name(self):
return self._name
|
@property
def name(self):
return self._name<|docstring|>Return the name of the sensor.<|endoftext|>
|
af9383b4f8d846898f0c5ffbc31fb6855b88b0535c65dfc77939cc0fe65062f0
|
@property
def state(self):
'Return the state of the sensor. State is returned in Celsius.'
return self._state
|
Return the state of the sensor. State is returned in Celsius.
|
homeassistant/components/beewi_smartclim/sensor.py
|
state
|
uSpike/home-assistant
| 23
|
python
|
@property
def state(self):
return self._state
|
@property
def state(self):
return self._state<|docstring|>Return the state of the sensor. State is returned in Celsius.<|endoftext|>
|
b3fb604fdbe069422d1a48c645e54a797d924a71640a1f28ad2d42f3340b0be7
|
@property
def device_class(self):
'Device class of this entity.'
return self._device
|
Device class of this entity.
|
homeassistant/components/beewi_smartclim/sensor.py
|
device_class
|
uSpike/home-assistant
| 23
|
python
|
@property
def device_class(self):
return self._device
|
@property
def device_class(self):
return self._device<|docstring|>Device class of this entity.<|endoftext|>
|
82a0cebfc992988ad9e5758af53273a39e24112a8ac10a685d727b977da2ae86
|
@property
def unique_id(self):
'Return a unique, HASS-friendly identifier for this entity.'
return f'{self._mac}_{self._device}'
|
Return a unique, HASS-friendly identifier for this entity.
|
homeassistant/components/beewi_smartclim/sensor.py
|
unique_id
|
uSpike/home-assistant
| 23
|
python
|
@property
def unique_id(self):
return f'{self._mac}_{self._device}'
|
@property
def unique_id(self):
return f'{self._mac}_{self._device}'<|docstring|>Return a unique, HASS-friendly identifier for this entity.<|endoftext|>
|
e52a255383cc13af28340d31ceeb04005ae1e93c32d7c12296ccfaf60339c5f9
|
@property
def unit_of_measurement(self):
'Return the unit of measurement.'
return self._unit
|
Return the unit of measurement.
|
homeassistant/components/beewi_smartclim/sensor.py
|
unit_of_measurement
|
uSpike/home-assistant
| 23
|
python
|
@property
def unit_of_measurement(self):
return self._unit
|
@property
def unit_of_measurement(self):
return self._unit<|docstring|>Return the unit of measurement.<|endoftext|>
|
9844d1bf069ad56a724b6c6b27856fe16cc172a1f3669e06064d926a2373e82c
|
def update(self):
'Fetch new state data from the poller.'
self._poller.update_sensor()
self._state = None
if (self._device == DEVICE_CLASS_TEMPERATURE):
self._state = self._poller.get_temperature()
if (self._device == DEVICE_CLASS_HUMIDITY):
self._state = self._poller.get_humidity()
if (self._device == DEVICE_CLASS_BATTERY):
self._state = self._poller.get_battery()
|
Fetch new state data from the poller.
|
homeassistant/components/beewi_smartclim/sensor.py
|
update
|
uSpike/home-assistant
| 23
|
python
|
def update(self):
self._poller.update_sensor()
self._state = None
if (self._device == DEVICE_CLASS_TEMPERATURE):
self._state = self._poller.get_temperature()
if (self._device == DEVICE_CLASS_HUMIDITY):
self._state = self._poller.get_humidity()
if (self._device == DEVICE_CLASS_BATTERY):
self._state = self._poller.get_battery()
|
def update(self):
self._poller.update_sensor()
self._state = None
if (self._device == DEVICE_CLASS_TEMPERATURE):
self._state = self._poller.get_temperature()
if (self._device == DEVICE_CLASS_HUMIDITY):
self._state = self._poller.get_humidity()
if (self._device == DEVICE_CLASS_BATTERY):
self._state = self._poller.get_battery()<|docstring|>Fetch new state data from the poller.<|endoftext|>
|
9d2c6d985280063e4a2858685f8bcbcffbf12d01b6f459fa04432357c0492324
|
def test_specifiable_on_spaces(self):
'\n Tests complex Container Spaces for being constructable from_spec.\n '
np.random.seed(10)
space = Dict.from_spec(dict(a=Tuple(FloatBox(shape=(1, 1, 2))), b=float, c=dict(type=float, shape=(2,))), add_batch_rank=True)
recursive_assert_almost_equal(space.sample(), dict(a=(np.array([[[0.77132064, 0.02075195]]]),), b=0.6336482349262754, c=np.array([0.74880388, 0.49850701])))
space = Space.from_spec(dict(type='tuple', _args=[Dict(a=bool, b=IntBox(4), c=Dict(d=FloatBox(shape=()))), BoolBox(), FloatBox(shape=(3, 2)), Tuple(bool, BoolBox())]))
recursive_assert_almost_equal(space.sample(), (dict(a=False, b=0, c=dict(d=0.709208009843012)), True, np.array([[0.16911084, 0.08833981], [0.68535982, 0.95339335], [0.00394827, 0.51219226]], dtype=np.float32), (True, False)))
space = Dict.from_spec(dict(a=Tuple(float, FloatBox(shape=(1, 2, 2))), b=FloatBox(shape=(2, 2, 2, 2)), c=dict(type=float, shape=(2,))))
self.assertEqual(space.rank, ((0, 3), 4, 1))
self.assertEqual(space.shape, (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
space = Dict(a=Tuple(int, IntBox(2), FloatBox(shape=(4, 2))), b=FloatBox(shape=(2, 2)), c=dict(type=float, shape=(4,)), add_batch_rank=True, add_time_rank=True)
self.assertEqual(space.rank, ((0, 0, 2), 2, 1))
self.assertEqual(space.shape, (((), (), (4, 2)), (2, 2), (4,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_time_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=True), (((None, None), (None, None), (None, None, 4, 2)), (None, None, 2, 2), (None, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=10, time_major=True), (((10, None), (10, None), (10, None, 4, 2)), (10, None, 2, 2), (10, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=5, with_time_rank=10, time_major=False), (((5, 10), (5, 10), (5, 10, 4, 2)), (5, 10, 2, 2), (5, 10, 4)))
|
Tests complex Container Spaces for being constructable from_spec.
|
rlgraph/tests/core/test_specifiables.py
|
test_specifiable_on_spaces
|
hgl71964/rlgraph
| 290
|
python
|
def test_specifiable_on_spaces(self):
'\n \n '
np.random.seed(10)
space = Dict.from_spec(dict(a=Tuple(FloatBox(shape=(1, 1, 2))), b=float, c=dict(type=float, shape=(2,))), add_batch_rank=True)
recursive_assert_almost_equal(space.sample(), dict(a=(np.array([[[0.77132064, 0.02075195]]]),), b=0.6336482349262754, c=np.array([0.74880388, 0.49850701])))
space = Space.from_spec(dict(type='tuple', _args=[Dict(a=bool, b=IntBox(4), c=Dict(d=FloatBox(shape=()))), BoolBox(), FloatBox(shape=(3, 2)), Tuple(bool, BoolBox())]))
recursive_assert_almost_equal(space.sample(), (dict(a=False, b=0, c=dict(d=0.709208009843012)), True, np.array([[0.16911084, 0.08833981], [0.68535982, 0.95339335], [0.00394827, 0.51219226]], dtype=np.float32), (True, False)))
space = Dict.from_spec(dict(a=Tuple(float, FloatBox(shape=(1, 2, 2))), b=FloatBox(shape=(2, 2, 2, 2)), c=dict(type=float, shape=(2,))))
self.assertEqual(space.rank, ((0, 3), 4, 1))
self.assertEqual(space.shape, (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
space = Dict(a=Tuple(int, IntBox(2), FloatBox(shape=(4, 2))), b=FloatBox(shape=(2, 2)), c=dict(type=float, shape=(4,)), add_batch_rank=True, add_time_rank=True)
self.assertEqual(space.rank, ((0, 0, 2), 2, 1))
self.assertEqual(space.shape, (((), (), (4, 2)), (2, 2), (4,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_time_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=True), (((None, None), (None, None), (None, None, 4, 2)), (None, None, 2, 2), (None, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=10, time_major=True), (((10, None), (10, None), (10, None, 4, 2)), (10, None, 2, 2), (10, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=5, with_time_rank=10, time_major=False), (((5, 10), (5, 10), (5, 10, 4, 2)), (5, 10, 2, 2), (5, 10, 4)))
|
def test_specifiable_on_spaces(self):
'\n \n '
np.random.seed(10)
space = Dict.from_spec(dict(a=Tuple(FloatBox(shape=(1, 1, 2))), b=float, c=dict(type=float, shape=(2,))), add_batch_rank=True)
recursive_assert_almost_equal(space.sample(), dict(a=(np.array([[[0.77132064, 0.02075195]]]),), b=0.6336482349262754, c=np.array([0.74880388, 0.49850701])))
space = Space.from_spec(dict(type='tuple', _args=[Dict(a=bool, b=IntBox(4), c=Dict(d=FloatBox(shape=()))), BoolBox(), FloatBox(shape=(3, 2)), Tuple(bool, BoolBox())]))
recursive_assert_almost_equal(space.sample(), (dict(a=False, b=0, c=dict(d=0.709208009843012)), True, np.array([[0.16911084, 0.08833981], [0.68535982, 0.95339335], [0.00394827, 0.51219226]], dtype=np.float32), (True, False)))
space = Dict.from_spec(dict(a=Tuple(float, FloatBox(shape=(1, 2, 2))), b=FloatBox(shape=(2, 2, 2, 2)), c=dict(type=float, shape=(2,))))
self.assertEqual(space.rank, ((0, 3), 4, 1))
self.assertEqual(space.shape, (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((), (1, 2, 2)), (2, 2, 2, 2), (2,)))
space = Dict(a=Tuple(int, IntBox(2), FloatBox(shape=(4, 2))), b=FloatBox(shape=(2, 2)), c=dict(type=float, shape=(4,)), add_batch_rank=True, add_time_rank=True)
self.assertEqual(space.rank, ((0, 0, 2), 2, 1))
self.assertEqual(space.shape, (((), (), (4, 2)), (2, 2), (4,)))
self.assertEqual(space.get_shape(with_batch_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_time_rank=True), (((None,), (None,), (None, 4, 2)), (None, 2, 2), (None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=True), (((None, None), (None, None), (None, None, 4, 2)), (None, None, 2, 2), (None, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=True, with_time_rank=10, time_major=True), (((10, None), (10, None), (10, None, 4, 2)), (10, None, 2, 2), (10, None, 4)))
self.assertEqual(space.get_shape(with_batch_rank=5, with_time_rank=10, time_major=False), (((5, 10), (5, 10), (5, 10, 4, 2)), (5, 10, 2, 2), (5, 10, 4)))<|docstring|>Tests complex Container Spaces for being constructable from_spec.<|endoftext|>
|
fd0ea1a7b13a77d303aab97b458b726f4e40cf956e50b1187b43c39e37442c2d
|
def test_cancel_subscription(self):
'Test case for cancel_subscription\n\n Cancels a subscription # noqa: E501\n '
pass
|
Test case for cancel_subscription
Cancels a subscription # noqa: E501
|
test/test_subscription_api.py
|
test_cancel_subscription
|
antenny/antenny-py
| 0
|
python
|
def test_cancel_subscription(self):
'Test case for cancel_subscription\n\n Cancels a subscription # noqa: E501\n '
pass
|
def test_cancel_subscription(self):
'Test case for cancel_subscription\n\n Cancels a subscription # noqa: E501\n '
pass<|docstring|>Test case for cancel_subscription
Cancels a subscription # noqa: E501<|endoftext|>
|
74200f74ed32e7e767dc56e76b033d1296ccc2b95171730e58b6cfe4fdee48cb
|
def test_create_subscription(self):
'Test case for create_subscription\n\n Creates a subscription # noqa: E501\n '
pass
|
Test case for create_subscription
Creates a subscription # noqa: E501
|
test/test_subscription_api.py
|
test_create_subscription
|
antenny/antenny-py
| 0
|
python
|
def test_create_subscription(self):
'Test case for create_subscription\n\n Creates a subscription # noqa: E501\n '
pass
|
def test_create_subscription(self):
'Test case for create_subscription\n\n Creates a subscription # noqa: E501\n '
pass<|docstring|>Test case for create_subscription
Creates a subscription # noqa: E501<|endoftext|>
|
d74f181d3e05e67256403b91775a83ebc9a1b1e42a961ff2939b5ff010f29112
|
def test_get_subscription(self):
'Test case for get_subscription\n\n Gets a subscription # noqa: E501\n '
pass
|
Test case for get_subscription
Gets a subscription # noqa: E501
|
test/test_subscription_api.py
|
test_get_subscription
|
antenny/antenny-py
| 0
|
python
|
def test_get_subscription(self):
'Test case for get_subscription\n\n Gets a subscription # noqa: E501\n '
pass
|
def test_get_subscription(self):
'Test case for get_subscription\n\n Gets a subscription # noqa: E501\n '
pass<|docstring|>Test case for get_subscription
Gets a subscription # noqa: E501<|endoftext|>
|
7a9569f14d186a86b5e2b54b3adf7d3f51206485ddcae9e868aee3b59790f34c
|
def test_list_subscriptions(self):
'Test case for list_subscriptions\n\n Gets a list of subscriptions # noqa: E501\n '
pass
|
Test case for list_subscriptions
Gets a list of subscriptions # noqa: E501
|
test/test_subscription_api.py
|
test_list_subscriptions
|
antenny/antenny-py
| 0
|
python
|
def test_list_subscriptions(self):
'Test case for list_subscriptions\n\n Gets a list of subscriptions # noqa: E501\n '
pass
|
def test_list_subscriptions(self):
'Test case for list_subscriptions\n\n Gets a list of subscriptions # noqa: E501\n '
pass<|docstring|>Test case for list_subscriptions
Gets a list of subscriptions # noqa: E501<|endoftext|>
|
a90c29c2df56e3ef3556c6b23c7b02a9e78a2c6f22ead3c2e650e0bc98888e76
|
def __init__(self, binned_spectrum_list, reference_time=0.0, time_intervals=None):
'\n a set of binned spectra with optional time intervals\n\n :param binned_spectrum_list: lit of binned spectal\n :param reference_time: reference time for time intervals\n :param time_intervals: optional timeinterval set\n '
self._binned_spectrum_list = binned_spectrum_list
self._reference_time = reference_time
if (time_intervals is not None):
self._time_intervals = (time_intervals - reference_time)
assert (len(time_intervals) == len(binned_spectrum_list)), 'time intervals mus be the same length as binned spectra'
else:
self._time_intervals = None
|
a set of binned spectra with optional time intervals
:param binned_spectrum_list: lit of binned spectal
:param reference_time: reference time for time intervals
:param time_intervals: optional timeinterval set
|
threeML/utils/spectrum/binned_spectrum_set.py
|
__init__
|
domeckert/threeML
| 42
|
python
|
def __init__(self, binned_spectrum_list, reference_time=0.0, time_intervals=None):
'\n a set of binned spectra with optional time intervals\n\n :param binned_spectrum_list: lit of binned spectal\n :param reference_time: reference time for time intervals\n :param time_intervals: optional timeinterval set\n '
self._binned_spectrum_list = binned_spectrum_list
self._reference_time = reference_time
if (time_intervals is not None):
self._time_intervals = (time_intervals - reference_time)
assert (len(time_intervals) == len(binned_spectrum_list)), 'time intervals mus be the same length as binned spectra'
else:
self._time_intervals = None
|
def __init__(self, binned_spectrum_list, reference_time=0.0, time_intervals=None):
'\n a set of binned spectra with optional time intervals\n\n :param binned_spectrum_list: lit of binned spectal\n :param reference_time: reference time for time intervals\n :param time_intervals: optional timeinterval set\n '
self._binned_spectrum_list = binned_spectrum_list
self._reference_time = reference_time
if (time_intervals is not None):
self._time_intervals = (time_intervals - reference_time)
assert (len(time_intervals) == len(binned_spectrum_list)), 'time intervals mus be the same length as binned spectra'
else:
self._time_intervals = None<|docstring|>a set of binned spectra with optional time intervals
:param binned_spectrum_list: lit of binned spectal
:param reference_time: reference time for time intervals
:param time_intervals: optional timeinterval set<|endoftext|>
|
451f4a056dc180ed7c05d51c9ed4e4947d2a39c5a32d730fb1611e842b00f559
|
def time_to_index(self, time):
'\n get the index of the input time\n\n :param time: time to search for\n :return: integer\n '
assert (self._time_intervals is not None), 'This spectrum set has no time intervals'
return self._time_intervals.containing_bin(time)
|
get the index of the input time
:param time: time to search for
:return: integer
|
threeML/utils/spectrum/binned_spectrum_set.py
|
time_to_index
|
domeckert/threeML
| 42
|
python
|
def time_to_index(self, time):
'\n get the index of the input time\n\n :param time: time to search for\n :return: integer\n '
assert (self._time_intervals is not None), 'This spectrum set has no time intervals'
return self._time_intervals.containing_bin(time)
|
def time_to_index(self, time):
'\n get the index of the input time\n\n :param time: time to search for\n :return: integer\n '
assert (self._time_intervals is not None), 'This spectrum set has no time intervals'
return self._time_intervals.containing_bin(time)<|docstring|>get the index of the input time
:param time: time to search for
:return: integer<|endoftext|>
|
361d7bbe7f7b1cb152cb92930d482d7eebe21a2f83f8c628b324b61a14d0a22b
|
def sort(self):
'\n sort the bin spectra in place according to time\n :return:\n '
assert (self._time_intervals is not None), 'must have time intervals to do sorting'
idx = self._time_intervals.argsort()
self._binned_spectrum_list = self._binned_spectrum_list[idx]
self._time_intervals.sort()
|
sort the bin spectra in place according to time
:return:
|
threeML/utils/spectrum/binned_spectrum_set.py
|
sort
|
domeckert/threeML
| 42
|
python
|
def sort(self):
'\n sort the bin spectra in place according to time\n :return:\n '
assert (self._time_intervals is not None), 'must have time intervals to do sorting'
idx = self._time_intervals.argsort()
self._binned_spectrum_list = self._binned_spectrum_list[idx]
self._time_intervals.sort()
|
def sort(self):
'\n sort the bin spectra in place according to time\n :return:\n '
assert (self._time_intervals is not None), 'must have time intervals to do sorting'
idx = self._time_intervals.argsort()
self._binned_spectrum_list = self._binned_spectrum_list[idx]
self._time_intervals.sort()<|docstring|>sort the bin spectra in place according to time
:return:<|endoftext|>
|
60c31929582e48523a0620cf7950c02734fd8eeceb97885704ebc87cf0903f38
|
def natural_key(self):
'return the tag natural key. In our case, we will use\n the slug as a natural key. The rationale behind this is\n that slugs, used as part of URLs, are unlikely to change\n '
return (self.slug,)
|
return the tag natural key. In our case, we will use
the slug as a natural key. The rationale behind this is
that slugs, used as part of URLs, are unlikely to change
|
main/models.py
|
natural_key
|
Lumexralph/book-store
| 0
|
python
|
def natural_key(self):
'return the tag natural key. In our case, we will use\n the slug as a natural key. The rationale behind this is\n that slugs, used as part of URLs, are unlikely to change\n '
return (self.slug,)
|
def natural_key(self):
'return the tag natural key. In our case, we will use\n the slug as a natural key. The rationale behind this is\n that slugs, used as part of URLs, are unlikely to change\n '
return (self.slug,)<|docstring|>return the tag natural key. In our case, we will use
the slug as a natural key. The rationale behind this is
that slugs, used as part of URLs, are unlikely to change<|endoftext|>
|
69e8872b89a28c396657a429050c325199d4443b65d7ff1132a426b6245ec96d
|
def build(self, host_target):
'Build TSan runtime (compiler-rt).'
rt_source_dir = join_path(self.source_dir, os.pardir, 'compiler-rt')
toolchain_path = join_path(self.args.install_destdir, 'usr')
clang = join_path(toolchain_path, 'bin', 'clang')
clangxx = join_path(toolchain_path, 'bin', 'clang++')
config_cmd = ['cmake', '-GNinja', ('-DCMAKE_PREFIX_PATH=%s' % toolchain_path), ('-DCMAKE_C_COMPILER=%s' % clang), ('-DCMAKE_CXX_COMPILER=%s' % clangxx), '-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON', '-DCOMPILER_RT_INCLUDE_TESTS=ON', '-DCOMPILER_RT_BUILD_XRAY=OFF', '-DCOMPILER_RT_INTERCEPT_LIBDISPATCH=ON', ('-DCOMPILER_RT_LIBDISPATCH_INSTALL_PATH=%s' % toolchain_path), rt_source_dir]
build_cmd = ['ninja', 'tsan']
shell.rmtree(self.build_dir)
shell.makedirs(self.build_dir)
with shell.pushd(self.build_dir):
shell.call(config_cmd)
shell.call(build_cmd)
|
Build TSan runtime (compiler-rt).
|
utils/swift_build_support/swift_build_support/products/tsan_libdispatch.py
|
build
|
alexbinary/swift
| 5
|
python
|
def build(self, host_target):
rt_source_dir = join_path(self.source_dir, os.pardir, 'compiler-rt')
toolchain_path = join_path(self.args.install_destdir, 'usr')
clang = join_path(toolchain_path, 'bin', 'clang')
clangxx = join_path(toolchain_path, 'bin', 'clang++')
config_cmd = ['cmake', '-GNinja', ('-DCMAKE_PREFIX_PATH=%s' % toolchain_path), ('-DCMAKE_C_COMPILER=%s' % clang), ('-DCMAKE_CXX_COMPILER=%s' % clangxx), '-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON', '-DCOMPILER_RT_INCLUDE_TESTS=ON', '-DCOMPILER_RT_BUILD_XRAY=OFF', '-DCOMPILER_RT_INTERCEPT_LIBDISPATCH=ON', ('-DCOMPILER_RT_LIBDISPATCH_INSTALL_PATH=%s' % toolchain_path), rt_source_dir]
build_cmd = ['ninja', 'tsan']
shell.rmtree(self.build_dir)
shell.makedirs(self.build_dir)
with shell.pushd(self.build_dir):
shell.call(config_cmd)
shell.call(build_cmd)
|
def build(self, host_target):
rt_source_dir = join_path(self.source_dir, os.pardir, 'compiler-rt')
toolchain_path = join_path(self.args.install_destdir, 'usr')
clang = join_path(toolchain_path, 'bin', 'clang')
clangxx = join_path(toolchain_path, 'bin', 'clang++')
config_cmd = ['cmake', '-GNinja', ('-DCMAKE_PREFIX_PATH=%s' % toolchain_path), ('-DCMAKE_C_COMPILER=%s' % clang), ('-DCMAKE_CXX_COMPILER=%s' % clangxx), '-DCMAKE_BUILD_TYPE=Release', '-DLLVM_ENABLE_ASSERTIONS=ON', '-DCOMPILER_RT_INCLUDE_TESTS=ON', '-DCOMPILER_RT_BUILD_XRAY=OFF', '-DCOMPILER_RT_INTERCEPT_LIBDISPATCH=ON', ('-DCOMPILER_RT_LIBDISPATCH_INSTALL_PATH=%s' % toolchain_path), rt_source_dir]
build_cmd = ['ninja', 'tsan']
shell.rmtree(self.build_dir)
shell.makedirs(self.build_dir)
with shell.pushd(self.build_dir):
shell.call(config_cmd)
shell.call(build_cmd)<|docstring|>Build TSan runtime (compiler-rt).<|endoftext|>
|
ac01cf9f267064039fa2935c40812ae9a27cee578bf59d3e416da42366173a9a
|
def test(self, host_target):
'Run check-tsan target with a LIT filter for libdispatch.'
cmd = ['ninja', 'check-tsan']
env = {'LIT_FILTER': 'libdispatch'}
with shell.pushd(self.build_dir):
shell.call(cmd, env=env)
|
Run check-tsan target with a LIT filter for libdispatch.
|
utils/swift_build_support/swift_build_support/products/tsan_libdispatch.py
|
test
|
alexbinary/swift
| 5
|
python
|
def test(self, host_target):
cmd = ['ninja', 'check-tsan']
env = {'LIT_FILTER': 'libdispatch'}
with shell.pushd(self.build_dir):
shell.call(cmd, env=env)
|
def test(self, host_target):
cmd = ['ninja', 'check-tsan']
env = {'LIT_FILTER': 'libdispatch'}
with shell.pushd(self.build_dir):
shell.call(cmd, env=env)<|docstring|>Run check-tsan target with a LIT filter for libdispatch.<|endoftext|>
|
49f041764bf4173823c1465c339bb3a8f03c262bced9eaf5034b65c5feef21ae
|
def conf_to_pipe(conf):
'Create Pipe object out of configuration.'
if isinstance(conf, six.string_types):
conf = {'function': conf}
if (not isinstance(conf, dict)):
raise ImproperlyConfigured(('Dynamicdecorator configuration should be string or dictionay:%s' % conf))
conf['enabled'] = False
if ('function' not in conf):
raise ImproperlyConfigured(('Configuration do not have function item: %s' % conf))
if ('name' not in conf):
conf['name'] = conf['function']
if ('slug' not in conf):
conf['slug'] = conf['name']
conf['slug'] = slugify(conf['slug'])
if ('meta' not in conf):
conf['meta'] = {}
return Pipe(**conf)
|
Create Pipe object out of configuration.
|
dynamicdecorators/config.py
|
conf_to_pipe
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def conf_to_pipe(conf):
if isinstance(conf, six.string_types):
conf = {'function': conf}
if (not isinstance(conf, dict)):
raise ImproperlyConfigured(('Dynamicdecorator configuration should be string or dictionay:%s' % conf))
conf['enabled'] = False
if ('function' not in conf):
raise ImproperlyConfigured(('Configuration do not have function item: %s' % conf))
if ('name' not in conf):
conf['name'] = conf['function']
if ('slug' not in conf):
conf['slug'] = conf['name']
conf['slug'] = slugify(conf['slug'])
if ('meta' not in conf):
conf['meta'] = {}
return Pipe(**conf)
|
def conf_to_pipe(conf):
if isinstance(conf, six.string_types):
conf = {'function': conf}
if (not isinstance(conf, dict)):
raise ImproperlyConfigured(('Dynamicdecorator configuration should be string or dictionay:%s' % conf))
conf['enabled'] = False
if ('function' not in conf):
raise ImproperlyConfigured(('Configuration do not have function item: %s' % conf))
if ('name' not in conf):
conf['name'] = conf['function']
if ('slug' not in conf):
conf['slug'] = conf['name']
conf['slug'] = slugify(conf['slug'])
if ('meta' not in conf):
conf['meta'] = {}
return Pipe(**conf)<|docstring|>Create Pipe object out of configuration.<|endoftext|>
|
c79de57d9d6f327b677c55fb1771302e671feb0a0c5ed9c365ba6272356ab491
|
def get_pipes():
'Get pipes from settings.'
if PIPES:
return PIPES
for c in settings.DYNAMIC_DECORATORS:
p = conf_to_pipe(c)
if any((e for e in PIPES if (p.slug == e.slug))):
raise ImproperlyConfigured(('Duplicate name in decorator configuration: %s' % p))
PIPES.append(p)
return PIPES
|
Get pipes from settings.
|
dynamicdecorators/config.py
|
get_pipes
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def get_pipes():
if PIPES:
return PIPES
for c in settings.DYNAMIC_DECORATORS:
p = conf_to_pipe(c)
if any((e for e in PIPES if (p.slug == e.slug))):
raise ImproperlyConfigured(('Duplicate name in decorator configuration: %s' % p))
PIPES.append(p)
return PIPES
|
def get_pipes():
if PIPES:
return PIPES
for c in settings.DYNAMIC_DECORATORS:
p = conf_to_pipe(c)
if any((e for e in PIPES if (p.slug == e.slug))):
raise ImproperlyConfigured(('Duplicate name in decorator configuration: %s' % p))
PIPES.append(p)
return PIPES<|docstring|>Get pipes from settings.<|endoftext|>
|
9a50a97c30e084cb47ae06dd86ea227cae610e469bd8b56bc668b2fa18540eb1
|
def get_pipelines():
'Get pipelines.'
return PIPELINES
|
Get pipelines.
|
dynamicdecorators/config.py
|
get_pipelines
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def get_pipelines():
return PIPELINES
|
def get_pipelines():
return PIPELINES<|docstring|>Get pipelines.<|endoftext|>
|
9cf375cfaedfb233dace9b78d9b6dfb8edb7dd07430c573b158b32ecb6077270
|
def register_pipeline(slug, name, meta):
'Register given pipeline.'
if (not isinstance(meta, dict)):
raise ImproperlyConfigured(('Meta value of a decorator must be a dictionay:%s' % meta))
pipeline = Pipeline(slug, name, meta)
if (not any(((p.slug == slug) for p in PIPELINES))):
PIPELINES.append(pipeline)
return pipeline
else:
logger.info(('[DYNAMIC_DECORATORS] %s is already registered. Ignoring.' % slug))
return next((p for p in PIPELINES if (p.slug == slug)))
|
Register given pipeline.
|
dynamicdecorators/config.py
|
register_pipeline
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def register_pipeline(slug, name, meta):
if (not isinstance(meta, dict)):
raise ImproperlyConfigured(('Meta value of a decorator must be a dictionay:%s' % meta))
pipeline = Pipeline(slug, name, meta)
if (not any(((p.slug == slug) for p in PIPELINES))):
PIPELINES.append(pipeline)
return pipeline
else:
logger.info(('[DYNAMIC_DECORATORS] %s is already registered. Ignoring.' % slug))
return next((p for p in PIPELINES if (p.slug == slug)))
|
def register_pipeline(slug, name, meta):
if (not isinstance(meta, dict)):
raise ImproperlyConfigured(('Meta value of a decorator must be a dictionay:%s' % meta))
pipeline = Pipeline(slug, name, meta)
if (not any(((p.slug == slug) for p in PIPELINES))):
PIPELINES.append(pipeline)
return pipeline
else:
logger.info(('[DYNAMIC_DECORATORS] %s is already registered. Ignoring.' % slug))
return next((p for p in PIPELINES if (p.slug == slug)))<|docstring|>Register given pipeline.<|endoftext|>
|
e296df9131569a0c620953178ef407ffc635faef06613d0c97bee28785bd109d
|
def get_pipeline_by_slug(slug):
'Search pipeline by slug value.'
return next((p for p in PIPELINES if (p.slug == slug)))
|
Search pipeline by slug value.
|
dynamicdecorators/config.py
|
get_pipeline_by_slug
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def get_pipeline_by_slug(slug):
return next((p for p in PIPELINES if (p.slug == slug)))
|
def get_pipeline_by_slug(slug):
return next((p for p in PIPELINES if (p.slug == slug)))<|docstring|>Search pipeline by slug value.<|endoftext|>
|
b1e5fdf35525dcc5baa860a7ca639e7cc96189237bfd0b53108e727671c4f8ca
|
def is_match(pipeline, pipe):
'Check pipe against pipeline.\n\n Check if there is any meta property on pipeline that matches with\n pipe.\n '
return ((not pipe.meta) or all(((pipe.meta[k] == v) for (k, v) in pipeline.meta.iteritems() if (k in pipe.meta))))
|
Check pipe against pipeline.
Check if there is any meta property on pipeline that matches with
pipe.
|
dynamicdecorators/config.py
|
is_match
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def is_match(pipeline, pipe):
'Check pipe against pipeline.\n\n Check if there is any meta property on pipeline that matches with\n pipe.\n '
return ((not pipe.meta) or all(((pipe.meta[k] == v) for (k, v) in pipeline.meta.iteritems() if (k in pipe.meta))))
|
def is_match(pipeline, pipe):
'Check pipe against pipeline.\n\n Check if there is any meta property on pipeline that matches with\n pipe.\n '
return ((not pipe.meta) or all(((pipe.meta[k] == v) for (k, v) in pipeline.meta.iteritems() if (k in pipe.meta))))<|docstring|>Check pipe against pipeline.
Check if there is any meta property on pipeline that matches with
pipe.<|endoftext|>
|
92a2c4552a4d6697eacd6157cf5c8d0c2799e3254c31ef1b825f47fb74965b71
|
def filter_pipes(pipeline, pipes):
'Filter given pipes by meta values of current pipeline.'
return filter(partial(is_match, pipeline), pipes)
|
Filter given pipes by meta values of current pipeline.
|
dynamicdecorators/config.py
|
filter_pipes
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def filter_pipes(pipeline, pipes):
return filter(partial(is_match, pipeline), pipes)
|
def filter_pipes(pipeline, pipes):
return filter(partial(is_match, pipeline), pipes)<|docstring|>Filter given pipes by meta values of current pipeline.<|endoftext|>
|
0294176961968fc6fecc68cd95c6e9a3397889391f1ff44d4dc39f6aa1eb5986
|
def __init__(self, function, name, slug, meta, enabled):
'Initialize Pipe.'
self.function = function
self.name = name
self.slug = slug
self.meta = meta
self.enabled = enabled
|
Initialize Pipe.
|
dynamicdecorators/config.py
|
__init__
|
huseyinyilmaz/django-dynamic-decorators
| 0
|
python
|
def __init__(self, function, name, slug, meta, enabled):
self.function = function
self.name = name
self.slug = slug
self.meta = meta
self.enabled = enabled
|
def __init__(self, function, name, slug, meta, enabled):
self.function = function
self.name = name
self.slug = slug
self.meta = meta
self.enabled = enabled<|docstring|>Initialize Pipe.<|endoftext|>
|
4e742f23a0ef8e177120422cb3117d4aaac7ec3fef834c2bc80a97eabeb5a7b6
|
def make_constant(self, constbox):
"Replace 'self.box' with a Const box."
assert isinstance(constbox, Const)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)
|
Replace 'self.box' with a Const box.
|
rpython/jit/metainterp/optimizeopt/optimizer.py
|
make_constant
|
Qointum/pypy
| 34
|
python
|
def make_constant(self, constbox):
assert isinstance(constbox, Const)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)
|
def make_constant(self, constbox):
assert isinstance(constbox, Const)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)<|docstring|>Replace 'self.box' with a Const box.<|endoftext|>
|
5eacca2ce2033b85e9e9bedb08c03675f305d79202c30d92e4b9d80c7eafd082
|
def make_constant(self, constbox):
"Replace 'self.box' with a Const box."
assert isinstance(constbox, ConstInt)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)
val = constbox.getint()
self.intbound = IntBound(val, val)
|
Replace 'self.box' with a Const box.
|
rpython/jit/metainterp/optimizeopt/optimizer.py
|
make_constant
|
Qointum/pypy
| 34
|
python
|
def make_constant(self, constbox):
assert isinstance(constbox, ConstInt)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)
val = constbox.getint()
self.intbound = IntBound(val, val)
|
def make_constant(self, constbox):
assert isinstance(constbox, ConstInt)
self.box = constbox
self.setlevel(LEVEL_CONSTANT)
val = constbox.getint()
self.intbound = IntBound(val, val)<|docstring|>Replace 'self.box' with a Const box.<|endoftext|>
|
3b96c45a9b83572c0697867e623d60689f866de5461e609557898007ba9ce73b
|
def _create_user(self, email, password, **extra_fields):
'Create and save a User with the given email and password.'
print(password)
if (not email):
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
print(user)
print(password)
return user
|
Create and save a User with the given email and password.
|
my_backend/establishment/models.py
|
_create_user
|
RodrigoBLima/card-virtual-for-establishment
| 0
|
python
|
def _create_user(self, email, password, **extra_fields):
print(password)
if (not email):
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
print(user)
print(password)
return user
|
def _create_user(self, email, password, **extra_fields):
print(password)
if (not email):
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
print(user)
print(password)
return user<|docstring|>Create and save a User with the given email and password.<|endoftext|>
|
332b8c5b696be59ac0a1065a5afe36728af28c70ca2084c12fcde7035afde2e1
|
def create_superuser(self, email, password, **extra_fields):
'Create and save a SuperUser with the given email and password.'
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if (extra_fields.get('is_staff') is not True):
raise ValueError('Superuser must have is_staff=True.')
if (extra_fields.get('is_superuser') is not True):
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(email, password, **extra_fields)
|
Create and save a SuperUser with the given email and password.
|
my_backend/establishment/models.py
|
create_superuser
|
RodrigoBLima/card-virtual-for-establishment
| 0
|
python
|
def create_superuser(self, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if (extra_fields.get('is_staff') is not True):
raise ValueError('Superuser must have is_staff=True.')
if (extra_fields.get('is_superuser') is not True):
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(email, password, **extra_fields)
|
def create_superuser(self, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if (extra_fields.get('is_staff') is not True):
raise ValueError('Superuser must have is_staff=True.')
if (extra_fields.get('is_superuser') is not True):
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(email, password, **extra_fields)<|docstring|>Create and save a SuperUser with the given email and password.<|endoftext|>
|
d13f462799e0b637a300af6c8b7262d92a058feb4253e42f9a673807b5fbe3cb
|
def wait_then_open(url):
'\n Waits for a bit then opens a URL. Useful for waiting for a proxy to come up, and then open the URL.\n '
for _ in range(1, 10):
try:
urlopen(url, context=_ssl_context())
except URLError:
time.sleep(1)
break
webbrowser.open_new_tab(url)
|
Waits for a bit then opens a URL. Useful for waiting for a proxy to come up, and then open the URL.
|
src/aks-preview/azext_aks_preview/custom.py
|
wait_then_open
|
hsrivast/azure-cli-extensions
| 1
|
python
|
def wait_then_open(url):
'\n \n '
for _ in range(1, 10):
try:
urlopen(url, context=_ssl_context())
except URLError:
time.sleep(1)
break
webbrowser.open_new_tab(url)
|
def wait_then_open(url):
'\n \n '
for _ in range(1, 10):
try:
urlopen(url, context=_ssl_context())
except URLError:
time.sleep(1)
break
webbrowser.open_new_tab(url)<|docstring|>Waits for a bit then opens a URL. Useful for waiting for a proxy to come up, and then open the URL.<|endoftext|>
|
29384d25cdd19fadf980b15e49277475d043e356a43d691a44aa5d22129e3162
|
def wait_then_open_async(url):
'\n Spawns a thread that waits for a bit then opens a URL.\n '
t = threading.Thread(target=wait_then_open, args={url})
t.daemon = True
t.start()
|
Spawns a thread that waits for a bit then opens a URL.
|
src/aks-preview/azext_aks_preview/custom.py
|
wait_then_open_async
|
hsrivast/azure-cli-extensions
| 1
|
python
|
def wait_then_open_async(url):
'\n \n '
t = threading.Thread(target=wait_then_open, args={url})
t.daemon = True
t.start()
|
def wait_then_open_async(url):
'\n \n '
t = threading.Thread(target=wait_then_open, args={url})
t.daemon = True
t.start()<|docstring|>Spawns a thread that waits for a bit then opens a URL.<|endoftext|>
|
be726a660b3ab2908e143a45957a227928a842ae6027959fd8ed98210c2695d0
|
def _remove_nulls(managed_clusters):
'\n Remove some often-empty fields from a list of ManagedClusters, so the JSON representation\n doesn\'t contain distracting null fields.\n\n This works around a quirk of the SDK for python behavior. These fields are not sent\n by the server, but get recreated by the CLI\'s own "to_dict" serialization.\n '
attrs = ['tags']
ap_attrs = ['os_disk_size_gb', 'vnet_subnet_id']
sp_attrs = ['secret']
for managed_cluster in managed_clusters:
for attr in attrs:
if (getattr(managed_cluster, attr, None) is None):
delattr(managed_cluster, attr)
if (managed_cluster.agent_pool_profiles is not None):
for ap_profile in managed_cluster.agent_pool_profiles:
for attr in ap_attrs:
if (getattr(ap_profile, attr, None) is None):
delattr(ap_profile, attr)
for attr in sp_attrs:
if (getattr(managed_cluster.service_principal_profile, attr, None) is None):
delattr(managed_cluster.service_principal_profile, attr)
return managed_clusters
|
Remove some often-empty fields from a list of ManagedClusters, so the JSON representation
doesn't contain distracting null fields.
This works around a quirk of the SDK for python behavior. These fields are not sent
by the server, but get recreated by the CLI's own "to_dict" serialization.
|
src/aks-preview/azext_aks_preview/custom.py
|
_remove_nulls
|
hsrivast/azure-cli-extensions
| 1
|
python
|
def _remove_nulls(managed_clusters):
'\n Remove some often-empty fields from a list of ManagedClusters, so the JSON representation\n doesn\'t contain distracting null fields.\n\n This works around a quirk of the SDK for python behavior. These fields are not sent\n by the server, but get recreated by the CLI\'s own "to_dict" serialization.\n '
attrs = ['tags']
ap_attrs = ['os_disk_size_gb', 'vnet_subnet_id']
sp_attrs = ['secret']
for managed_cluster in managed_clusters:
for attr in attrs:
if (getattr(managed_cluster, attr, None) is None):
delattr(managed_cluster, attr)
if (managed_cluster.agent_pool_profiles is not None):
for ap_profile in managed_cluster.agent_pool_profiles:
for attr in ap_attrs:
if (getattr(ap_profile, attr, None) is None):
delattr(ap_profile, attr)
for attr in sp_attrs:
if (getattr(managed_cluster.service_principal_profile, attr, None) is None):
delattr(managed_cluster.service_principal_profile, attr)
return managed_clusters
|
def _remove_nulls(managed_clusters):
'\n Remove some often-empty fields from a list of ManagedClusters, so the JSON representation\n doesn\'t contain distracting null fields.\n\n This works around a quirk of the SDK for python behavior. These fields are not sent\n by the server, but get recreated by the CLI\'s own "to_dict" serialization.\n '
attrs = ['tags']
ap_attrs = ['os_disk_size_gb', 'vnet_subnet_id']
sp_attrs = ['secret']
for managed_cluster in managed_clusters:
for attr in attrs:
if (getattr(managed_cluster, attr, None) is None):
delattr(managed_cluster, attr)
if (managed_cluster.agent_pool_profiles is not None):
for ap_profile in managed_cluster.agent_pool_profiles:
for attr in ap_attrs:
if (getattr(ap_profile, attr, None) is None):
delattr(ap_profile, attr)
for attr in sp_attrs:
if (getattr(managed_cluster.service_principal_profile, attr, None) is None):
delattr(managed_cluster.service_principal_profile, attr)
return managed_clusters<|docstring|>Remove some often-empty fields from a list of ManagedClusters, so the JSON representation
doesn't contain distracting null fields.
This works around a quirk of the SDK for python behavior. These fields are not sent
by the server, but get recreated by the CLI's own "to_dict" serialization.<|endoftext|>
|
4eb5f3faa57aad5cafa983fdbad91383e6a5f235b87e9cf5c8e9868cb6f698b9
|
def _print_or_merge_credentials(path, kubeconfig, overwrite_existing, context_name):
'Merge an unencrypted kubeconfig into the file at the specified path, or print it to\n stdout if the path is "-".\n '
if (path == '-'):
print(kubeconfig)
return
directory = os.path.dirname(path)
if (directory and (not os.path.exists(directory))):
try:
os.makedirs(directory)
except OSError as ex:
if (ex.errno != errno.EEXIST):
raise
if (not os.path.exists(path)):
with os.fdopen(os.open(path, (os.O_CREAT | os.O_WRONLY), 384), 'wt'):
pass
(fd, temp_path) = tempfile.mkstemp()
additional_file = os.fdopen(fd, 'w+t')
try:
additional_file.write(kubeconfig)
additional_file.flush()
merge_kubernetes_configurations(path, temp_path, overwrite_existing, context_name)
except yaml.YAMLError as ex:
logger.warning('Failed to merge credentials to kube config file: %s', ex)
finally:
additional_file.close()
os.remove(temp_path)
|
Merge an unencrypted kubeconfig into the file at the specified path, or print it to
stdout if the path is "-".
|
src/aks-preview/azext_aks_preview/custom.py
|
_print_or_merge_credentials
|
hsrivast/azure-cli-extensions
| 1
|
python
|
def _print_or_merge_credentials(path, kubeconfig, overwrite_existing, context_name):
'Merge an unencrypted kubeconfig into the file at the specified path, or print it to\n stdout if the path is "-".\n '
if (path == '-'):
print(kubeconfig)
return
directory = os.path.dirname(path)
if (directory and (not os.path.exists(directory))):
try:
os.makedirs(directory)
except OSError as ex:
if (ex.errno != errno.EEXIST):
raise
if (not os.path.exists(path)):
with os.fdopen(os.open(path, (os.O_CREAT | os.O_WRONLY), 384), 'wt'):
pass
(fd, temp_path) = tempfile.mkstemp()
additional_file = os.fdopen(fd, 'w+t')
try:
additional_file.write(kubeconfig)
additional_file.flush()
merge_kubernetes_configurations(path, temp_path, overwrite_existing, context_name)
except yaml.YAMLError as ex:
logger.warning('Failed to merge credentials to kube config file: %s', ex)
finally:
additional_file.close()
os.remove(temp_path)
|
def _print_or_merge_credentials(path, kubeconfig, overwrite_existing, context_name):
'Merge an unencrypted kubeconfig into the file at the specified path, or print it to\n stdout if the path is "-".\n '
if (path == '-'):
print(kubeconfig)
return
directory = os.path.dirname(path)
if (directory and (not os.path.exists(directory))):
try:
os.makedirs(directory)
except OSError as ex:
if (ex.errno != errno.EEXIST):
raise
if (not os.path.exists(path)):
with os.fdopen(os.open(path, (os.O_CREAT | os.O_WRONLY), 384), 'wt'):
pass
(fd, temp_path) = tempfile.mkstemp()
additional_file = os.fdopen(fd, 'w+t')
try:
additional_file.write(kubeconfig)
additional_file.flush()
merge_kubernetes_configurations(path, temp_path, overwrite_existing, context_name)
except yaml.YAMLError as ex:
logger.warning('Failed to merge credentials to kube config file: %s', ex)
finally:
additional_file.close()
os.remove(temp_path)<|docstring|>Merge an unencrypted kubeconfig into the file at the specified path, or print it to
stdout if the path is "-".<|endoftext|>
|
1034d38f98a52de4e349f3f829aac4bcef7e47d137978b2e9bac406e85b32a7f
|
def get_report_url(args):
'Checks whether there exist reports in the selected directory and creates\n them if they are not there.\n\n '
try:
with open(os.path.join(args.from_dir, REPORTS_DIR, 'symlink')) as lnk:
symlink = lnk.read()
return os.path.join(os.path.basename(symlink), ANALYZE_DIR, os.path.basename(ANALYZE_TEMPLATE))
except IOError:
return evaluations_report(args)
|
Checks whether there exist reports in the selected directory and creates
them if they are not there.
|
bigmler/report/dispatcher.py
|
get_report_url
|
bigmlcom/bigmler
| 32
|
python
|
def get_report_url(args):
'Checks whether there exist reports in the selected directory and creates\n them if they are not there.\n\n '
try:
with open(os.path.join(args.from_dir, REPORTS_DIR, 'symlink')) as lnk:
symlink = lnk.read()
return os.path.join(os.path.basename(symlink), ANALYZE_DIR, os.path.basename(ANALYZE_TEMPLATE))
except IOError:
return evaluations_report(args)
|
def get_report_url(args):
'Checks whether there exist reports in the selected directory and creates\n them if they are not there.\n\n '
try:
with open(os.path.join(args.from_dir, REPORTS_DIR, 'symlink')) as lnk:
symlink = lnk.read()
return os.path.join(os.path.basename(symlink), ANALYZE_DIR, os.path.basename(ANALYZE_TEMPLATE))
except IOError:
return evaluations_report(args)<|docstring|>Checks whether there exist reports in the selected directory and creates
them if they are not there.<|endoftext|>
|
a2e89e0ae8087ef1fe450fe9ab5d2f0c99b5726bcac83c3771d26a9f15465153
|
def report_dispatcher(args=sys.argv[1:]):
'Parses command line and calls the different report functions\n\n '
command = command_handling(args, COMMAND_LOG)
command_args = a.parse_and_check(command)
port = (DEFAULT_PORT if (not command_args.port) else command_args.port)
report_url = get_report_url(command_args)
if (not command_args.no_server):
absolute_report_url = ('http://%s:%s/%s' % (DEFAULT_HOST, port, report_url))
current_directory = os.getcwd()
os.chdir(os.path.join(HOME, SERVER_DIRECTORY))
httpd = None
try:
httpd = StoppableHTTPServer((DEFAULT_HOST, port), http.server.SimpleHTTPRequestHandler)
_thread.start_new_thread(httpd.serve, ())
except socket.error as exc:
print(exc)
webbrowser.open_new(absolute_report_url)
if httpd:
input('*********************************\nPress <RETURN> to stop the server\n*********************************\n')
os.chdir(current_directory)
if httpd:
httpd.stop()
|
Parses command line and calls the different report functions
|
bigmler/report/dispatcher.py
|
report_dispatcher
|
bigmlcom/bigmler
| 32
|
python
|
def report_dispatcher(args=sys.argv[1:]):
'\n\n '
command = command_handling(args, COMMAND_LOG)
command_args = a.parse_and_check(command)
port = (DEFAULT_PORT if (not command_args.port) else command_args.port)
report_url = get_report_url(command_args)
if (not command_args.no_server):
absolute_report_url = ('http://%s:%s/%s' % (DEFAULT_HOST, port, report_url))
current_directory = os.getcwd()
os.chdir(os.path.join(HOME, SERVER_DIRECTORY))
httpd = None
try:
httpd = StoppableHTTPServer((DEFAULT_HOST, port), http.server.SimpleHTTPRequestHandler)
_thread.start_new_thread(httpd.serve, ())
except socket.error as exc:
print(exc)
webbrowser.open_new(absolute_report_url)
if httpd:
input('*********************************\nPress <RETURN> to stop the server\n*********************************\n')
os.chdir(current_directory)
if httpd:
httpd.stop()
|
def report_dispatcher(args=sys.argv[1:]):
'\n\n '
command = command_handling(args, COMMAND_LOG)
command_args = a.parse_and_check(command)
port = (DEFAULT_PORT if (not command_args.port) else command_args.port)
report_url = get_report_url(command_args)
if (not command_args.no_server):
absolute_report_url = ('http://%s:%s/%s' % (DEFAULT_HOST, port, report_url))
current_directory = os.getcwd()
os.chdir(os.path.join(HOME, SERVER_DIRECTORY))
httpd = None
try:
httpd = StoppableHTTPServer((DEFAULT_HOST, port), http.server.SimpleHTTPRequestHandler)
_thread.start_new_thread(httpd.serve, ())
except socket.error as exc:
print(exc)
webbrowser.open_new(absolute_report_url)
if httpd:
input('*********************************\nPress <RETURN> to stop the server\n*********************************\n')
os.chdir(current_directory)
if httpd:
httpd.stop()<|docstring|>Parses command line and calls the different report functions<|endoftext|>
|
974f8d767ec3579fabb5d48a9e29e6bd2dada797c070aa54bf5a94134da82740
|
def run_server(self):
'\n\n to run the server\n\n '
print(((('\n______________________________________________________________________________________\nThe OCNI server is running at: ' + config.OCNI_IP) + ':') + config.OCNI_PORT))
wsgi.server(eventlet.listen((config.OCNI_IP, int(config.OCNI_PORT))), self.app)
print('\n______________________________________________________________________________________\nClosing correctly PyOCNI server ')
|
to run the server
|
pyocni/TDD/fake_Data/server_Mock.py
|
run_server
|
MarouenMechtri/CNG-Manager
| 1
|
python
|
def run_server(self):
'\n\n \n\n '
print(((('\n______________________________________________________________________________________\nThe OCNI server is running at: ' + config.OCNI_IP) + ':') + config.OCNI_PORT))
wsgi.server(eventlet.listen((config.OCNI_IP, int(config.OCNI_PORT))), self.app)
print('\n______________________________________________________________________________________\nClosing correctly PyOCNI server ')
|
def run_server(self):
'\n\n \n\n '
print(((('\n______________________________________________________________________________________\nThe OCNI server is running at: ' + config.OCNI_IP) + ':') + config.OCNI_PORT))
wsgi.server(eventlet.listen((config.OCNI_IP, int(config.OCNI_PORT))), self.app)
print('\n______________________________________________________________________________________\nClosing correctly PyOCNI server ')<|docstring|>to run the server<|endoftext|>
|
5c33583fa38e20f1264c0cb6e2bece3f8a35969ab2b647d14a6e8d58406e3149
|
def is_power_of_two(n):
'Checks if n is a power of 2.\n\n Args:\n n: Non-negative integer.\n '
if (n < 0):
raise ValueError('Input argument must be >= 0.')
return ((n & (n - 1)) == 0)
|
Checks if n is a power of 2.
Args:
n: Non-negative integer.
|
src/q0504.py
|
is_power_of_two
|
mirzadm/cracking-python3
| 0
|
python
|
def is_power_of_two(n):
'Checks if n is a power of 2.\n\n Args:\n n: Non-negative integer.\n '
if (n < 0):
raise ValueError('Input argument must be >= 0.')
return ((n & (n - 1)) == 0)
|
def is_power_of_two(n):
'Checks if n is a power of 2.\n\n Args:\n n: Non-negative integer.\n '
if (n < 0):
raise ValueError('Input argument must be >= 0.')
return ((n & (n - 1)) == 0)<|docstring|>Checks if n is a power of 2.
Args:
n: Non-negative integer.<|endoftext|>
|
ef74593faf3e6703cc34ee6b8a2dd81a713f181abcb723154d8bf3030aede201
|
def validate_monday(date: datetime.date):
'\n Validates that date is a Monday\n '
if (date.isoweekday() != 1):
raise ValidationError((_('"%s" is not a Monday') % date.strftime('%d %b %Y').lstrip('0')))
|
Validates that date is a Monday
|
mtp_api/apps/core/models.py
|
validate_monday
|
ministryofjustice/mtp-api
| 5
|
python
|
def validate_monday(date: datetime.date):
'\n \n '
if (date.isoweekday() != 1):
raise ValidationError((_('"%s" is not a Monday') % date.strftime('%d %b %Y').lstrip('0')))
|
def validate_monday(date: datetime.date):
'\n \n '
if (date.isoweekday() != 1):
raise ValidationError((_('"%s" is not a Monday') % date.strftime('%d %b %Y').lstrip('0')))<|docstring|>Validates that date is a Monday<|endoftext|>
|
fa7185ea474d5a9a27a7d95adc739e371f5e7d0951a9683eb60a0dbc637c8922
|
@click.command()
@click.option('-s', '--state', default=os.path.expanduser('~/.wshygiene'), type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True))
@click.argument('root', nargs=(- 1), type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
def main(state, root):
'workspace-hygiene'
click.echo(state)
click.echo(root)
storage = StorageProxy(state)
scanner = Scanner(storage)
scanner.scan(root, ignore=state)
|
workspace-hygiene
|
wshygiene/cli.py
|
main
|
doriantaylor/py-workspace-hygiene
| 0
|
python
|
@click.command()
@click.option('-s', '--state', default=os.path.expanduser('~/.wshygiene'), type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True))
@click.argument('root', nargs=(- 1), type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
def main(state, root):
click.echo(state)
click.echo(root)
storage = StorageProxy(state)
scanner = Scanner(storage)
scanner.scan(root, ignore=state)
|
@click.command()
@click.option('-s', '--state', default=os.path.expanduser('~/.wshygiene'), type=click.Path(exists=False, file_okay=False, dir_okay=True, resolve_path=True))
@click.argument('root', nargs=(- 1), type=click.Path(file_okay=False, dir_okay=True, resolve_path=True))
def main(state, root):
click.echo(state)
click.echo(root)
storage = StorageProxy(state)
scanner = Scanner(storage)
scanner.scan(root, ignore=state)<|docstring|>workspace-hygiene<|endoftext|>
|
a8e2d6fead491ffe9e6af8658b8d3c87f089962e8a3356cf79fb0adea9de9cf6
|
def getUsernames(self, url):
'\n @param: url QUrl\n @return: QStringList\n '
pass
|
@param: url QUrl
@return: QStringList
|
mc/autofill/PasswordManager.py
|
getUsernames
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def getUsernames(self, url):
'\n @param: url QUrl\n @return: QStringList\n '
pass
|
def getUsernames(self, url):
'\n @param: url QUrl\n @return: QStringList\n '
pass<|docstring|>@param: url QUrl
@return: QStringList<|endoftext|>
|
d76f511ed4f6d240a484dfee584b498939f691c5dcd4e0ed0b600230a7cccf8c
|
def getEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass
|
@param: url QUrl
@return: QVector<PasswordEntry>
|
mc/autofill/PasswordManager.py
|
getEntries
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def getEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass
|
def getEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass<|docstring|>@param: url QUrl
@return: QVector<PasswordEntry><|endoftext|>
|
ae1839e3bd992e0554728cdaaf0d6a0dfc6c3375b32aa385da166e0b5c9c4484
|
def getAllEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass
|
@param: url QUrl
@return: QVector<PasswordEntry>
|
mc/autofill/PasswordManager.py
|
getAllEntries
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def getAllEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass
|
def getAllEntries(self, url):
'\n @param: url QUrl\n @return: QVector<PasswordEntry>\n '
pass<|docstring|>@param: url QUrl
@return: QVector<PasswordEntry><|endoftext|>
|
9b7f704617979e6c9be81ff29f5dd1021a65a6b2651b3b5a8644902bf2509362
|
def addEntry(self, entry):
'\n @param: entry PasswordEntry\n '
pass
|
@param: entry PasswordEntry
|
mc/autofill/PasswordManager.py
|
addEntry
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def addEntry(self, entry):
'\n \n '
pass
|
def addEntry(self, entry):
'\n \n '
pass<|docstring|>@param: entry PasswordEntry<|endoftext|>
|
e7e81dc3961f403bbf18ecd2d571d0a631a8567c43436d82a3a9c1f0e12daf78
|
def updateEntry(self, entry):
'\n @param: entry PasswordEntry\n '
pass
|
@param: entry PasswordEntry
|
mc/autofill/PasswordManager.py
|
updateEntry
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def updateEntry(self, entry):
'\n \n '
pass
|
def updateEntry(self, entry):
'\n \n '
pass<|docstring|>@param: entry PasswordEntry<|endoftext|>
|
df19737ccc7086c011056267b35c6c630a298550eb11cba011897dde6ad0d79a
|
def updateLastUsed(self, entry):
'\n @param: entry PasswordEntry\n '
pass
|
@param: entry PasswordEntry
|
mc/autofill/PasswordManager.py
|
updateLastUsed
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def updateLastUsed(self, entry):
'\n \n '
pass
|
def updateLastUsed(self, entry):
'\n \n '
pass<|docstring|>@param: entry PasswordEntry<|endoftext|>
|
d41f059f164402530150318be4690175688a499800eae420ad4f379a393b5b0e
|
def removeEntry(self, entry):
'\n @param: entry PasswordEntry\n '
pass
|
@param: entry PasswordEntry
|
mc/autofill/PasswordManager.py
|
removeEntry
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def removeEntry(self, entry):
'\n \n '
pass
|
def removeEntry(self, entry):
'\n \n '
pass<|docstring|>@param: entry PasswordEntry<|endoftext|>
|
5ed70614b051d35028b742b6f2ef6b662d424fb3af3bbc2b70b5a7f1a94aae4c
|
def availableBackends(self):
'\n @return: QHash<QString, PasswordBackend>\n '
pass
|
@return: QHash<QString, PasswordBackend>
|
mc/autofill/PasswordManager.py
|
availableBackends
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def availableBackends(self):
'\n \n '
pass
|
def availableBackends(self):
'\n \n '
pass<|docstring|>@return: QHash<QString, PasswordBackend><|endoftext|>
|
e53d53515f3ca3de0b2b129c3addafd1cac8a02e43938244e1c64c3e6f989232
|
def activeBackend(self):
'\n @return: PasswordBackend\n '
pass
|
@return: PasswordBackend
|
mc/autofill/PasswordManager.py
|
activeBackend
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def activeBackend(self):
'\n \n '
pass
|
def activeBackend(self):
'\n \n '
pass<|docstring|>@return: PasswordBackend<|endoftext|>
|
f120318ff6a0cc6c6bf35563dfdba4c502128822641cc2a70a1707d5b8f872c3
|
def swtichBackend(self, backendID):
'\n @param: backendID QString\n '
pass
|
@param: backendID QString
|
mc/autofill/PasswordManager.py
|
swtichBackend
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def swtichBackend(self, backendID):
'\n \n '
pass
|
def swtichBackend(self, backendID):
'\n \n '
pass<|docstring|>@param: backendID QString<|endoftext|>
|
c637b2d6e32e810956943f05a5339bb98504755a453cc8eeba0d9dca639331a7
|
def registerBackend(self, id_, backend):
'\n @param: id_ QString\n @param: backend PasswordBackend\n '
pass
|
@param: id_ QString
@param: backend PasswordBackend
|
mc/autofill/PasswordManager.py
|
registerBackend
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def registerBackend(self, id_, backend):
'\n @param: id_ QString\n @param: backend PasswordBackend\n '
pass
|
def registerBackend(self, id_, backend):
'\n @param: id_ QString\n @param: backend PasswordBackend\n '
pass<|docstring|>@param: id_ QString
@param: backend PasswordBackend<|endoftext|>
|
a6eb333c2f8e107ee958350af553c2dd62cc51cbf594d2f0e3282249777e9740
|
def unregisterBackend(self, backend):
'\n @param: backend PasswordBackend\n '
pass
|
@param: backend PasswordBackend
|
mc/autofill/PasswordManager.py
|
unregisterBackend
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
def unregisterBackend(self, backend):
'\n \n '
pass
|
def unregisterBackend(self, backend):
'\n \n '
pass<|docstring|>@param: backend PasswordBackend<|endoftext|>
|
6849df233692498edbee10ac131ced8c5765f0390efe58a86d46626f3f000988
|
@classmethod
def createHost(cls, url):
'\n @param: url QUrl\n @return: QString\n '
pass
|
@param: url QUrl
@return: QString
|
mc/autofill/PasswordManager.py
|
createHost
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
@classmethod
def createHost(cls, url):
'\n @param: url QUrl\n @return: QString\n '
pass
|
@classmethod
def createHost(cls, url):
'\n @param: url QUrl\n @return: QString\n '
pass<|docstring|>@param: url QUrl
@return: QString<|endoftext|>
|
dfc012d48df31cb095b3c4e3e792c520ca5e8dc606234e6d8a716f80300b85a6
|
@classmethod
def urlEncodePassword(cls, password):
'\n @param: password QString\n @return: QByteArray\n '
pass
|
@param: password QString
@return: QByteArray
|
mc/autofill/PasswordManager.py
|
urlEncodePassword
|
zy-sunshine/falkon-pyqt5
| 1
|
python
|
@classmethod
def urlEncodePassword(cls, password):
'\n @param: password QString\n @return: QByteArray\n '
pass
|
@classmethod
def urlEncodePassword(cls, password):
'\n @param: password QString\n @return: QByteArray\n '
pass<|docstring|>@param: password QString
@return: QByteArray<|endoftext|>
|
1f8a026de77877a894337bee8f141c52326c71564cfd3a67473ef97ce4cf44a5
|
def seriescoeff(m=6, lengthScale=1.0, magnSigma2=1.0, true_covariance=False):
'\n Calculate the coefficients q_j^2 for the covariance function \n approximation:\n \n k(\tau) = \\sum_{j=0}^{+\\infty} q_j^2 \\cos(j\\omega_0 \tau)\n \n Reference is:\n\n [1] Arno Solin and Simo Särkkä (2014). Explicit link between periodic \n covariance functions and state space models. In Proceedings of the \n Seventeenth International Conference on Artifcial Intelligence and \n Statistics (AISTATS 2014). JMLR: W&CP, volume 33. \n \n Note! Only the infinite approximation (through Bessel function) \n is currently implemented.\n\n Input:\n ----------------\n \n m: int\n Degree of approximation. Default 6.\n lengthScale: float\n Length scale parameter in the kerenl\n magnSigma2:float\n Multiplier in front of the kernel.\n \n \n Output:\n -----------------\n \n coeffs: array(m+1)\n Covariance series coefficients\n \n coeffs_dl: array(m+1)\n Derivatives of the coefficients with respect to lengthscale.\n \n '
if true_covariance:
bb = (lambda j, m: (((((1.0 + np.array((j != 0), dtype=np.float64)) / (2 ** j)) * sp.special.binom(j, sp.floor((((j - m) / 2.0) * np.array((m <= j), dtype=np.float64))))) * np.array((m <= j), dtype=np.float64)) * np.array((sp.mod((j - m), 2) == 0), dtype=np.float64)))
(M, J) = np.meshgrid(range(0, (m + 1)), range(0, (m + 1)))
coeffs = ((((bb(J, M) / sp.misc.factorial(J)) * sp.exp((- (lengthScale ** (- 2))))) * ((lengthScale ** (- 2)) ** J)) * magnSigma2)
coeffs_dl = np.sum(((coeffs * (lengthScale ** (- 3))) * (2.0 - ((2.0 * J) * (lengthScale ** 2)))), 0)
coeffs = np.sum(coeffs, 0)
else:
coeffs = (((2 * magnSigma2) * sp.exp((- (lengthScale ** (- 2))))) * special.iv(range(0, (m + 1)), (1.0 / (lengthScale ** 2))))
if np.any((np.isfinite(coeffs) == False)):
raise ValueError('sde_standard_periodic: Coefficients are not finite!')
coeffs[0] = (0.5 * coeffs[0])
coeffs_dl = np.zeros((m + 1))
coeffs_dl[1:] = (((magnSigma2 * (lengthScale ** (- 3))) * sp.exp((- (lengthScale ** (- 2))))) * (((- 4) * special.iv(range(0, m), (lengthScale ** (- 2)))) + ((4 * (1 + (np.arange(1, (m + 1)) * (lengthScale ** 2)))) * special.iv(range(1, (m + 1)), (lengthScale ** (- 2))))))
coeffs_dl[0] = (((magnSigma2 * (lengthScale ** (- 3))) * np.exp((- (lengthScale ** (- 2))))) * ((2 * special.iv(0, (lengthScale ** (- 2)))) - (2 * special.iv(1, (lengthScale ** (- 2))))))
return (coeffs.squeeze(), coeffs_dl.squeeze())
|
Calculate the coefficients q_j^2 for the covariance function
approximation:
k( au) = \sum_{j=0}^{+\infty} q_j^2 \cos(j\omega_0 au)
Reference is:
[1] Arno Solin and Simo Särkkä (2014). Explicit link between periodic
covariance functions and state space models. In Proceedings of the
Seventeenth International Conference on Artifcial Intelligence and
Statistics (AISTATS 2014). JMLR: W&CP, volume 33.
Note! Only the infinite approximation (through Bessel function)
is currently implemented.
Input:
----------------
m: int
Degree of approximation. Default 6.
lengthScale: float
Length scale parameter in the kerenl
magnSigma2:float
Multiplier in front of the kernel.
Output:
-----------------
coeffs: array(m+1)
Covariance series coefficients
coeffs_dl: array(m+1)
Derivatives of the coefficients with respect to lengthscale.
|
GPy/kern/src/sde_standard_periodic.py
|
seriescoeff
|
mgrady3/GPy
| 1,685
|
python
|
def seriescoeff(m=6, lengthScale=1.0, magnSigma2=1.0, true_covariance=False):
'\n Calculate the coefficients q_j^2 for the covariance function \n approximation:\n \n k(\tau) = \\sum_{j=0}^{+\\infty} q_j^2 \\cos(j\\omega_0 \tau)\n \n Reference is:\n\n [1] Arno Solin and Simo Särkkä (2014). Explicit link between periodic \n covariance functions and state space models. In Proceedings of the \n Seventeenth International Conference on Artifcial Intelligence and \n Statistics (AISTATS 2014). JMLR: W&CP, volume 33. \n \n Note! Only the infinite approximation (through Bessel function) \n is currently implemented.\n\n Input:\n ----------------\n \n m: int\n Degree of approximation. Default 6.\n lengthScale: float\n Length scale parameter in the kerenl\n magnSigma2:float\n Multiplier in front of the kernel.\n \n \n Output:\n -----------------\n \n coeffs: array(m+1)\n Covariance series coefficients\n \n coeffs_dl: array(m+1)\n Derivatives of the coefficients with respect to lengthscale.\n \n '
if true_covariance:
bb = (lambda j, m: (((((1.0 + np.array((j != 0), dtype=np.float64)) / (2 ** j)) * sp.special.binom(j, sp.floor((((j - m) / 2.0) * np.array((m <= j), dtype=np.float64))))) * np.array((m <= j), dtype=np.float64)) * np.array((sp.mod((j - m), 2) == 0), dtype=np.float64)))
(M, J) = np.meshgrid(range(0, (m + 1)), range(0, (m + 1)))
coeffs = ((((bb(J, M) / sp.misc.factorial(J)) * sp.exp((- (lengthScale ** (- 2))))) * ((lengthScale ** (- 2)) ** J)) * magnSigma2)
coeffs_dl = np.sum(((coeffs * (lengthScale ** (- 3))) * (2.0 - ((2.0 * J) * (lengthScale ** 2)))), 0)
coeffs = np.sum(coeffs, 0)
else:
coeffs = (((2 * magnSigma2) * sp.exp((- (lengthScale ** (- 2))))) * special.iv(range(0, (m + 1)), (1.0 / (lengthScale ** 2))))
if np.any((np.isfinite(coeffs) == False)):
raise ValueError('sde_standard_periodic: Coefficients are not finite!')
coeffs[0] = (0.5 * coeffs[0])
coeffs_dl = np.zeros((m + 1))
coeffs_dl[1:] = (((magnSigma2 * (lengthScale ** (- 3))) * sp.exp((- (lengthScale ** (- 2))))) * (((- 4) * special.iv(range(0, m), (lengthScale ** (- 2)))) + ((4 * (1 + (np.arange(1, (m + 1)) * (lengthScale ** 2)))) * special.iv(range(1, (m + 1)), (lengthScale ** (- 2))))))
coeffs_dl[0] = (((magnSigma2 * (lengthScale ** (- 3))) * np.exp((- (lengthScale ** (- 2))))) * ((2 * special.iv(0, (lengthScale ** (- 2)))) - (2 * special.iv(1, (lengthScale ** (- 2))))))
return (coeffs.squeeze(), coeffs_dl.squeeze())
|
def seriescoeff(m=6, lengthScale=1.0, magnSigma2=1.0, true_covariance=False):
'\n Calculate the coefficients q_j^2 for the covariance function \n approximation:\n \n k(\tau) = \\sum_{j=0}^{+\\infty} q_j^2 \\cos(j\\omega_0 \tau)\n \n Reference is:\n\n [1] Arno Solin and Simo Särkkä (2014). Explicit link between periodic \n covariance functions and state space models. In Proceedings of the \n Seventeenth International Conference on Artifcial Intelligence and \n Statistics (AISTATS 2014). JMLR: W&CP, volume 33. \n \n Note! Only the infinite approximation (through Bessel function) \n is currently implemented.\n\n Input:\n ----------------\n \n m: int\n Degree of approximation. Default 6.\n lengthScale: float\n Length scale parameter in the kerenl\n magnSigma2:float\n Multiplier in front of the kernel.\n \n \n Output:\n -----------------\n \n coeffs: array(m+1)\n Covariance series coefficients\n \n coeffs_dl: array(m+1)\n Derivatives of the coefficients with respect to lengthscale.\n \n '
if true_covariance:
bb = (lambda j, m: (((((1.0 + np.array((j != 0), dtype=np.float64)) / (2 ** j)) * sp.special.binom(j, sp.floor((((j - m) / 2.0) * np.array((m <= j), dtype=np.float64))))) * np.array((m <= j), dtype=np.float64)) * np.array((sp.mod((j - m), 2) == 0), dtype=np.float64)))
(M, J) = np.meshgrid(range(0, (m + 1)), range(0, (m + 1)))
coeffs = ((((bb(J, M) / sp.misc.factorial(J)) * sp.exp((- (lengthScale ** (- 2))))) * ((lengthScale ** (- 2)) ** J)) * magnSigma2)
coeffs_dl = np.sum(((coeffs * (lengthScale ** (- 3))) * (2.0 - ((2.0 * J) * (lengthScale ** 2)))), 0)
coeffs = np.sum(coeffs, 0)
else:
coeffs = (((2 * magnSigma2) * sp.exp((- (lengthScale ** (- 2))))) * special.iv(range(0, (m + 1)), (1.0 / (lengthScale ** 2))))
if np.any((np.isfinite(coeffs) == False)):
raise ValueError('sde_standard_periodic: Coefficients are not finite!')
coeffs[0] = (0.5 * coeffs[0])
coeffs_dl = np.zeros((m + 1))
coeffs_dl[1:] = (((magnSigma2 * (lengthScale ** (- 3))) * sp.exp((- (lengthScale ** (- 2))))) * (((- 4) * special.iv(range(0, m), (lengthScale ** (- 2)))) + ((4 * (1 + (np.arange(1, (m + 1)) * (lengthScale ** 2)))) * special.iv(range(1, (m + 1)), (lengthScale ** (- 2))))))
coeffs_dl[0] = (((magnSigma2 * (lengthScale ** (- 3))) * np.exp((- (lengthScale ** (- 2))))) * ((2 * special.iv(0, (lengthScale ** (- 2)))) - (2 * special.iv(1, (lengthScale ** (- 2))))))
return (coeffs.squeeze(), coeffs_dl.squeeze())<|docstring|>Calculate the coefficients q_j^2 for the covariance function
approximation:
k( au) = \sum_{j=0}^{+\infty} q_j^2 \cos(j\omega_0 au)
Reference is:
[1] Arno Solin and Simo Särkkä (2014). Explicit link between periodic
covariance functions and state space models. In Proceedings of the
Seventeenth International Conference on Artifcial Intelligence and
Statistics (AISTATS 2014). JMLR: W&CP, volume 33.
Note! Only the infinite approximation (through Bessel function)
is currently implemented.
Input:
----------------
m: int
Degree of approximation. Default 6.
lengthScale: float
Length scale parameter in the kerenl
magnSigma2:float
Multiplier in front of the kernel.
Output:
-----------------
coeffs: array(m+1)
Covariance series coefficients
coeffs_dl: array(m+1)
Derivatives of the coefficients with respect to lengthscale.<|endoftext|>
|
5b449f4d98f6c581bcdd3c5af7e2b83d0ba44ab98ef97ea719fc3a2610039d7c
|
def __init__(self, *args, **kwargs):
'\n Init constructior.\n \n Two optinal extra parameters are added in addition to the ones in \n StdPeriodic kernel.\n \n :param approx_order: approximation order for the RBF covariance. (Default 7)\n :type approx_order: int\n \n :param balance: Whether to balance this kernel separately. (Defaulf False). Model has a separate parameter for balancing.\n :type balance: bool\n '
if ('approx_order' in kwargs):
self.approx_order = kwargs.get('approx_order')
del kwargs['approx_order']
else:
self.approx_order = 7
if ('balance' in kwargs):
self.balance = bool(kwargs.get('balance'))
del kwargs['balance']
else:
self.balance = False
super(sde_StdPeriodic, self).__init__(*args, **kwargs)
|
Init constructior.
Two optinal extra parameters are added in addition to the ones in
StdPeriodic kernel.
:param approx_order: approximation order for the RBF covariance. (Default 7)
:type approx_order: int
:param balance: Whether to balance this kernel separately. (Defaulf False). Model has a separate parameter for balancing.
:type balance: bool
|
GPy/kern/src/sde_standard_periodic.py
|
__init__
|
mgrady3/GPy
| 1,685
|
python
|
def __init__(self, *args, **kwargs):
'\n Init constructior.\n \n Two optinal extra parameters are added in addition to the ones in \n StdPeriodic kernel.\n \n :param approx_order: approximation order for the RBF covariance. (Default 7)\n :type approx_order: int\n \n :param balance: Whether to balance this kernel separately. (Defaulf False). Model has a separate parameter for balancing.\n :type balance: bool\n '
if ('approx_order' in kwargs):
self.approx_order = kwargs.get('approx_order')
del kwargs['approx_order']
else:
self.approx_order = 7
if ('balance' in kwargs):
self.balance = bool(kwargs.get('balance'))
del kwargs['balance']
else:
self.balance = False
super(sde_StdPeriodic, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
'\n Init constructior.\n \n Two optinal extra parameters are added in addition to the ones in \n StdPeriodic kernel.\n \n :param approx_order: approximation order for the RBF covariance. (Default 7)\n :type approx_order: int\n \n :param balance: Whether to balance this kernel separately. (Defaulf False). Model has a separate parameter for balancing.\n :type balance: bool\n '
if ('approx_order' in kwargs):
self.approx_order = kwargs.get('approx_order')
del kwargs['approx_order']
else:
self.approx_order = 7
if ('balance' in kwargs):
self.balance = bool(kwargs.get('balance'))
del kwargs['balance']
else:
self.balance = False
super(sde_StdPeriodic, self).__init__(*args, **kwargs)<|docstring|>Init constructior.
Two optinal extra parameters are added in addition to the ones in
StdPeriodic kernel.
:param approx_order: approximation order for the RBF covariance. (Default 7)
:type approx_order: int
:param balance: Whether to balance this kernel separately. (Defaulf False). Model has a separate parameter for balancing.
:type balance: bool<|endoftext|>
|
27e082af41792f13dac71029455b3959e68b8d3515f11eb7400f5308cad00292
|
def sde_update_gradient_full(self, gradients):
'\n Update gradient in the order in which parameters are represented in the\n kernel\n '
self.variance.gradient = gradients[0]
self.period.gradient = gradients[1]
self.lengthscale.gradient = gradients[2]
|
Update gradient in the order in which parameters are represented in the
kernel
|
GPy/kern/src/sde_standard_periodic.py
|
sde_update_gradient_full
|
mgrady3/GPy
| 1,685
|
python
|
def sde_update_gradient_full(self, gradients):
'\n Update gradient in the order in which parameters are represented in the\n kernel\n '
self.variance.gradient = gradients[0]
self.period.gradient = gradients[1]
self.lengthscale.gradient = gradients[2]
|
def sde_update_gradient_full(self, gradients):
'\n Update gradient in the order in which parameters are represented in the\n kernel\n '
self.variance.gradient = gradients[0]
self.period.gradient = gradients[1]
self.lengthscale.gradient = gradients[2]<|docstring|>Update gradient in the order in which parameters are represented in the
kernel<|endoftext|>
|
9f4f427b8bb7bd2a5baa2526e5ac6d3b973d21ce26407e114a41b342ed28cc07
|
def sde(self):
' \n Return the state space representation of the standard periodic covariance.\n \n \n ! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)\n After this Bessel functions of the first becomes NaN. Rescaling\n time variable might help.\n \n ! Note: one must keep period also not very low. Because then\n the gradients wrt wavelength become ustable. \n However this might depend on the data. For test example with\n 300 data points the low limit is 0.15. \n '
if (self.approx_order is not None):
N = int(self.approx_order)
else:
N = 7
p_period = float(self.period)
p_lengthscale = (2 * float(self.lengthscale))
p_variance = float(self.variance)
w0 = ((2 * np.pi) / p_period)
[q2, dq2l] = seriescoeff(N, p_lengthscale, p_variance)
dq2l = (2 * dq2l)
eps = 1e-12
if (np.any((np.isfinite(q2) == False)) or np.any((np.abs(q2) > (1.0 / eps))) or np.any((np.abs(q2) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in q2 :'.format(eps) + q2.__format__('')))
if (np.any((np.isfinite(dq2l) == False)) or np.any((np.abs(dq2l) > (1.0 / eps))) or np.any((np.abs(dq2l) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in dq2l :'.format(eps) + q2.__format__('')))
F = np.kron(np.diag(range(0, (N + 1))), np.array(((0, (- w0)), (w0, 0))))
L = np.eye((2 * (N + 1)))
Qc = np.zeros(((2 * (N + 1)), (2 * (N + 1))))
P_inf = np.kron(np.diag(q2), np.eye(2))
H = np.kron(np.ones((1, (N + 1))), np.array((1, 0)))
P0 = P_inf.copy()
dF = np.empty((F.shape[0], F.shape[1], 3))
dQc = np.empty((Qc.shape[0], Qc.shape[1], 3))
dP_inf = np.empty((P_inf.shape[0], P_inf.shape[1], 3))
dF[(:, :, 0)] = np.zeros(F.shape)
dQc[(:, :, 0)] = np.zeros(Qc.shape)
dP_inf[(:, :, 0)] = (P_inf / p_variance)
dF[(:, :, 1)] = np.kron(np.diag(range(0, (N + 1))), (np.array(((0, w0), ((- w0), 0))) / p_period))
dQc[(:, :, 1)] = np.zeros(Qc.shape)
dP_inf[(:, :, 1)] = np.zeros(P_inf.shape)
dF[(:, :, 2)] = np.zeros(F.shape)
dQc[(:, :, 2)] = np.zeros(Qc.shape)
dP_inf[(:, :, 2)] = np.kron(np.diag(dq2l), np.eye(2))
dP0 = dP_inf.copy()
if self.balance:
import GPy.models.state_space_main as ssm
(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0) = ssm.balance_ss_model(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)
return (F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)
|
Return the state space representation of the standard periodic covariance.
! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)
After this Bessel functions of the first becomes NaN. Rescaling
time variable might help.
! Note: one must keep period also not very low. Because then
the gradients wrt wavelength become ustable.
However this might depend on the data. For test example with
300 data points the low limit is 0.15.
|
GPy/kern/src/sde_standard_periodic.py
|
sde
|
mgrady3/GPy
| 1,685
|
python
|
def sde(self):
' \n Return the state space representation of the standard periodic covariance.\n \n \n ! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)\n After this Bessel functions of the first becomes NaN. Rescaling\n time variable might help.\n \n ! Note: one must keep period also not very low. Because then\n the gradients wrt wavelength become ustable. \n However this might depend on the data. For test example with\n 300 data points the low limit is 0.15. \n '
if (self.approx_order is not None):
N = int(self.approx_order)
else:
N = 7
p_period = float(self.period)
p_lengthscale = (2 * float(self.lengthscale))
p_variance = float(self.variance)
w0 = ((2 * np.pi) / p_period)
[q2, dq2l] = seriescoeff(N, p_lengthscale, p_variance)
dq2l = (2 * dq2l)
eps = 1e-12
if (np.any((np.isfinite(q2) == False)) or np.any((np.abs(q2) > (1.0 / eps))) or np.any((np.abs(q2) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in q2 :'.format(eps) + q2.__format__()))
if (np.any((np.isfinite(dq2l) == False)) or np.any((np.abs(dq2l) > (1.0 / eps))) or np.any((np.abs(dq2l) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in dq2l :'.format(eps) + q2.__format__()))
F = np.kron(np.diag(range(0, (N + 1))), np.array(((0, (- w0)), (w0, 0))))
L = np.eye((2 * (N + 1)))
Qc = np.zeros(((2 * (N + 1)), (2 * (N + 1))))
P_inf = np.kron(np.diag(q2), np.eye(2))
H = np.kron(np.ones((1, (N + 1))), np.array((1, 0)))
P0 = P_inf.copy()
dF = np.empty((F.shape[0], F.shape[1], 3))
dQc = np.empty((Qc.shape[0], Qc.shape[1], 3))
dP_inf = np.empty((P_inf.shape[0], P_inf.shape[1], 3))
dF[(:, :, 0)] = np.zeros(F.shape)
dQc[(:, :, 0)] = np.zeros(Qc.shape)
dP_inf[(:, :, 0)] = (P_inf / p_variance)
dF[(:, :, 1)] = np.kron(np.diag(range(0, (N + 1))), (np.array(((0, w0), ((- w0), 0))) / p_period))
dQc[(:, :, 1)] = np.zeros(Qc.shape)
dP_inf[(:, :, 1)] = np.zeros(P_inf.shape)
dF[(:, :, 2)] = np.zeros(F.shape)
dQc[(:, :, 2)] = np.zeros(Qc.shape)
dP_inf[(:, :, 2)] = np.kron(np.diag(dq2l), np.eye(2))
dP0 = dP_inf.copy()
if self.balance:
import GPy.models.state_space_main as ssm
(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0) = ssm.balance_ss_model(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)
return (F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)
|
def sde(self):
' \n Return the state space representation of the standard periodic covariance.\n \n \n ! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)\n After this Bessel functions of the first becomes NaN. Rescaling\n time variable might help.\n \n ! Note: one must keep period also not very low. Because then\n the gradients wrt wavelength become ustable. \n However this might depend on the data. For test example with\n 300 data points the low limit is 0.15. \n '
if (self.approx_order is not None):
N = int(self.approx_order)
else:
N = 7
p_period = float(self.period)
p_lengthscale = (2 * float(self.lengthscale))
p_variance = float(self.variance)
w0 = ((2 * np.pi) / p_period)
[q2, dq2l] = seriescoeff(N, p_lengthscale, p_variance)
dq2l = (2 * dq2l)
eps = 1e-12
if (np.any((np.isfinite(q2) == False)) or np.any((np.abs(q2) > (1.0 / eps))) or np.any((np.abs(q2) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in q2 :'.format(eps) + q2.__format__()))
if (np.any((np.isfinite(dq2l) == False)) or np.any((np.abs(dq2l) > (1.0 / eps))) or np.any((np.abs(dq2l) < eps))):
warnings.warn(('sde_Periodic: Infinite, too small, or too large (eps={0:e}) values in dq2l :'.format(eps) + q2.__format__()))
F = np.kron(np.diag(range(0, (N + 1))), np.array(((0, (- w0)), (w0, 0))))
L = np.eye((2 * (N + 1)))
Qc = np.zeros(((2 * (N + 1)), (2 * (N + 1))))
P_inf = np.kron(np.diag(q2), np.eye(2))
H = np.kron(np.ones((1, (N + 1))), np.array((1, 0)))
P0 = P_inf.copy()
dF = np.empty((F.shape[0], F.shape[1], 3))
dQc = np.empty((Qc.shape[0], Qc.shape[1], 3))
dP_inf = np.empty((P_inf.shape[0], P_inf.shape[1], 3))
dF[(:, :, 0)] = np.zeros(F.shape)
dQc[(:, :, 0)] = np.zeros(Qc.shape)
dP_inf[(:, :, 0)] = (P_inf / p_variance)
dF[(:, :, 1)] = np.kron(np.diag(range(0, (N + 1))), (np.array(((0, w0), ((- w0), 0))) / p_period))
dQc[(:, :, 1)] = np.zeros(Qc.shape)
dP_inf[(:, :, 1)] = np.zeros(P_inf.shape)
dF[(:, :, 2)] = np.zeros(F.shape)
dQc[(:, :, 2)] = np.zeros(Qc.shape)
dP_inf[(:, :, 2)] = np.kron(np.diag(dq2l), np.eye(2))
dP0 = dP_inf.copy()
if self.balance:
import GPy.models.state_space_main as ssm
(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0) = ssm.balance_ss_model(F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)
return (F, L, Qc, H, P_inf, P0, dF, dQc, dP_inf, dP0)<|docstring|>Return the state space representation of the standard periodic covariance.
! Note: one must constrain lengthscale not to drop below 0.2. (independently of approximation order)
After this Bessel functions of the first becomes NaN. Rescaling
time variable might help.
! Note: one must keep period also not very low. Because then
the gradients wrt wavelength become ustable.
However this might depend on the data. For test example with
300 data points the low limit is 0.15.<|endoftext|>
|
7b40ee6f7965e9603bad73fa30c45c98ee0b96f807b78d6418f0d35e93876a6c
|
def get_outermost(self, direction: Tuple[(int, int)]) -> Tuple[(int, int)]:
'\n Calculate the outermost tile in the direction provided\n :param direction: Direction to use\n :return: The position of the outermost tile\n '
coordinates = self.coordinates
center = (sum((c[0] for c in coordinates)), sum((c[1] for c in coordinates)))
transformed = [np.dot((c[0], c[1]), direction) for c in coordinates]
arg_max = np.argwhere((transformed == np.amax(transformed)))
arg_min = np.argmin((np.sqrt((((center[0] - transformed[i[0]][0]) ** 2) + ((center[1] - transformed[i[0]][1]) ** 2))) for i in arg_max))
return (coordinates[arg_max[arg_min][0]][0], coordinates[arg_max[arg_min][0]][1])
|
Calculate the outermost tile in the direction provided
:param direction: Direction to use
:return: The position of the outermost tile
|
scripts/engine/core/component.py
|
get_outermost
|
Snayff/notquiteparadise
| 12
|
python
|
def get_outermost(self, direction: Tuple[(int, int)]) -> Tuple[(int, int)]:
'\n Calculate the outermost tile in the direction provided\n :param direction: Direction to use\n :return: The position of the outermost tile\n '
coordinates = self.coordinates
center = (sum((c[0] for c in coordinates)), sum((c[1] for c in coordinates)))
transformed = [np.dot((c[0], c[1]), direction) for c in coordinates]
arg_max = np.argwhere((transformed == np.amax(transformed)))
arg_min = np.argmin((np.sqrt((((center[0] - transformed[i[0]][0]) ** 2) + ((center[1] - transformed[i[0]][1]) ** 2))) for i in arg_max))
return (coordinates[arg_max[arg_min][0]][0], coordinates[arg_max[arg_min][0]][1])
|
def get_outermost(self, direction: Tuple[(int, int)]) -> Tuple[(int, int)]:
'\n Calculate the outermost tile in the direction provided\n :param direction: Direction to use\n :return: The position of the outermost tile\n '
coordinates = self.coordinates
center = (sum((c[0] for c in coordinates)), sum((c[1] for c in coordinates)))
transformed = [np.dot((c[0], c[1]), direction) for c in coordinates]
arg_max = np.argwhere((transformed == np.amax(transformed)))
arg_min = np.argmin((np.sqrt((((center[0] - transformed[i[0]][0]) ** 2) + ((center[1] - transformed[i[0]][1]) ** 2))) for i in arg_max))
return (coordinates[arg_max[arg_min][0]][0], coordinates[arg_max[arg_min][0]][1])<|docstring|>Calculate the outermost tile in the direction provided
:param direction: Direction to use
:return: The position of the outermost tile<|endoftext|>
|
d651203b9e5ce0aa8c9df3caad00ddac657d6b6bbb57568006a0d276e9b5634a
|
@property
def x(self) -> int:
'\n :return: The x component of the top-left position\n '
return self.reference_position[0]
|
:return: The x component of the top-left position
|
scripts/engine/core/component.py
|
x
|
Snayff/notquiteparadise
| 12
|
python
|
@property
def x(self) -> int:
'\n \n '
return self.reference_position[0]
|
@property
def x(self) -> int:
'\n \n '
return self.reference_position[0]<|docstring|>:return: The x component of the top-left position<|endoftext|>
|
5b4dc7e0ff467c47e9596083d1d4c84278c75d0ea2211f09ede52d6061c5452d
|
@property
def y(self) -> int:
'\n :return: The y component of the top-left position\n '
return self.reference_position[1]
|
:return: The y component of the top-left position
|
scripts/engine/core/component.py
|
y
|
Snayff/notquiteparadise
| 12
|
python
|
@property
def y(self) -> int:
'\n \n '
return self.reference_position[1]
|
@property
def y(self) -> int:
'\n \n '
return self.reference_position[1]<|docstring|>:return: The y component of the top-left position<|endoftext|>
|
2b2e360098b508df002e96313b36cab4cb137ecf1223e90da3ab42b7b9539670
|
@property
def coordinates(self) -> List[Tuple[(int, int)]]:
'\n :return: The list of coordinates that this Position represents\n '
return [((self.x + x), (self.y + y)) for (x, y) in self.offsets]
|
:return: The list of coordinates that this Position represents
|
scripts/engine/core/component.py
|
coordinates
|
Snayff/notquiteparadise
| 12
|
python
|
@property
def coordinates(self) -> List[Tuple[(int, int)]]:
'\n \n '
return [((self.x + x), (self.y + y)) for (x, y) in self.offsets]
|
@property
def coordinates(self) -> List[Tuple[(int, int)]]:
'\n \n '
return [((self.x + x), (self.y + y)) for (x, y) in self.offsets]<|docstring|>:return: The list of coordinates that this Position represents<|endoftext|>
|
33c9bbcd41f09bbdac18bb20c4f551b84ca5681bd8b58226a43454ccd19983b6
|
def __contains__(self, key: Tuple[(int, int)]):
'\n :param key: Coordinate to test against\n :return: A bool that represents if the Position contains the provided coordinates\n '
for coordinate in self.coordinates:
if (coordinate == key):
return True
return False
|
:param key: Coordinate to test against
:return: A bool that represents if the Position contains the provided coordinates
|
scripts/engine/core/component.py
|
__contains__
|
Snayff/notquiteparadise
| 12
|
python
|
def __contains__(self, key: Tuple[(int, int)]):
'\n :param key: Coordinate to test against\n :return: A bool that represents if the Position contains the provided coordinates\n '
for coordinate in self.coordinates:
if (coordinate == key):
return True
return False
|
def __contains__(self, key: Tuple[(int, int)]):
'\n :param key: Coordinate to test against\n :return: A bool that represents if the Position contains the provided coordinates\n '
for coordinate in self.coordinates:
if (coordinate == key):
return True
return False<|docstring|>:param key: Coordinate to test against
:return: A bool that represents if the Position contains the provided coordinates<|endoftext|>
|
0c1b89a2fb91965abd692b527181fd667382f38126e0ef96b7c59775d506dde5
|
def set_current_sprite(self, sprite_category: SpriteCategoryType):
'\n Set the current sprite. Set current sprite duration to 0.\n '
sprite = getattr(self.sprites, sprite_category)
self.current_sprite = sprite
self.current_sprite_category = sprite_category
self.current_sprite_duration = 0
|
Set the current sprite. Set current sprite duration to 0.
|
scripts/engine/core/component.py
|
set_current_sprite
|
Snayff/notquiteparadise
| 12
|
python
|
def set_current_sprite(self, sprite_category: SpriteCategoryType):
'\n \n '
sprite = getattr(self.sprites, sprite_category)
self.current_sprite = sprite
self.current_sprite_category = sprite_category
self.current_sprite_duration = 0
|
def set_current_sprite(self, sprite_category: SpriteCategoryType):
'\n \n '
sprite = getattr(self.sprites, sprite_category)
self.current_sprite = sprite
self.current_sprite_category = sprite_category
self.current_sprite_duration = 0<|docstring|>Set the current sprite. Set current sprite duration to 0.<|endoftext|>
|
1b406c8a7384e59e13248807d7b826f0eed52364b467ab40de9eabb5b2018197
|
def set_draw_to_target(self):
'\n Set draw_x and draw_y to their target values\n '
self.draw_x = self.target_draw_x
self.draw_y = self.target_draw_y
|
Set draw_x and draw_y to their target values
|
scripts/engine/core/component.py
|
set_draw_to_target
|
Snayff/notquiteparadise
| 12
|
python
|
def set_draw_to_target(self):
'\n \n '
self.draw_x = self.target_draw_x
self.draw_y = self.target_draw_y
|
def set_draw_to_target(self):
'\n \n '
self.draw_x = self.target_draw_x
self.draw_y = self.target_draw_y<|docstring|>Set draw_x and draw_y to their target values<|endoftext|>
|
a7d64d65016641cb8d6cc47d4838192fdb56be33083076b31b48da62032b12bd
|
def set_skill_cooldown(self, name: str, value: int):
'\n Sets the cooldown of a skill\n '
self.cooldowns[name] = max(0, value)
|
Sets the cooldown of a skill
|
scripts/engine/core/component.py
|
set_skill_cooldown
|
Snayff/notquiteparadise
| 12
|
python
|
def set_skill_cooldown(self, name: str, value: int):
'\n \n '
self.cooldowns[name] = max(0, value)
|
def set_skill_cooldown(self, name: str, value: int):
'\n \n '
self.cooldowns[name] = max(0, value)<|docstring|>Sets the cooldown of a skill<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.