hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c452c8a6569b75610cdf1914eeb687f42106575
| 196
|
py
|
Python
|
ferminet_ecp/integral/__init__.py
|
bytedance/FermiNet_with_ECP
|
f86874513541866db6b89559698e2078d4e37f7c
|
[
"Apache-2.0"
] | 3
|
2022-01-11T03:31:00.000Z
|
2022-01-18T07:28:24.000Z
|
ferminet_ecp/integral/__init__.py
|
bytedance/FermiNet_with_ECP
|
f86874513541866db6b89559698e2078d4e37f7c
|
[
"Apache-2.0"
] | null | null | null |
ferminet_ecp/integral/__init__.py
|
bytedance/FermiNet_with_ECP
|
f86874513541866db6b89559698e2078d4e37f7c
|
[
"Apache-2.0"
] | 1
|
2022-01-05T17:39:53.000Z
|
2022-01-05T17:39:53.000Z
|
# Copyright (c) ByteDance, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
| 39.2
| 61
| 0.760204
| true
| true
|
|
1c452ced1d02e0be92216546022c19ebe1d5e3d7
| 5,219
|
py
|
Python
|
packages/python/plotly/plotly/graph_objs/scatterternary/unselected/_textfont.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/scatterternary/unselected/_textfont.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
packages/python/plotly/plotly/graph_objs/scatterternary/unselected/_textfont.py
|
labaran1/plotly.py
|
7ec751e8fed4a570c11ea4bea2231806389d62eb
|
[
"MIT"
] | null | null | null |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatterternary.unselected"
_path_str = "scatterternary.unselected.textfont"
_valid_props = {"color"}
# color
# -----
@property
def color(self):
"""
Sets the text font color of unselected points, applied only
when a selection exists.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the text font color of unselected points, applied
only when a selection exists.
"""
def __init__(self, arg=None, color=None, **kwargs):
"""
Construct a new Textfont object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.scatterternary
.unselected.Textfont`
color
Sets the text font color of unselected points, applied
only when a selection exists.
Returns
-------
Textfont
"""
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatterternary.unselected.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatterternary.unselected.Textfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 36.753521
| 82
| 0.564284
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
_parent_path_str = "scatterternary.unselected"
_path_str = "scatterternary.unselected.textfont"
_valid_props = {"color"}
@property
def color(self):
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
@property
def _prop_descriptions(self):
return """\
color
Sets the text font color of unselected points, applied
only when a selection exists.
"""
def __init__(self, arg=None, color=None, **kwargs):
super(Textfont, self).__init__("textfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatterternary.unselected.Textfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatterternary.unselected.Textfont`"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| true
| true
|
1c452d29c806e98640fff6394a519a69547d9461
| 21,953
|
py
|
Python
|
sdk/storage/azure-storage-queue/azure/storage/queue/_shared/utils.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-queue/azure/storage/queue/_shared/utils.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-04T18:12:16.000Z
|
2019-06-04T18:12:16.000Z
|
sdk/storage/azure-storage-queue/azure/storage/queue/_shared/utils.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import ( # pylint: disable=unused-import
Union, Optional, Any, Iterable, Dict, List, Type, Tuple,
TYPE_CHECKING
)
import base64
import hashlib
import hmac
import logging
from os import fstat
from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
try:
from urllib.parse import quote, unquote, parse_qs
except ImportError:
from urlparse import parse_qs # type: ignore
from urllib2 import quote, unquote # type: ignore
import six
import isodate
from azure.core import Configuration
from azure.core.exceptions import raise_with_traceback
from azure.core.pipeline import Pipeline
from azure.core.pipeline.transport import RequestsTransport
from azure.core.pipeline.policies import (
RedirectPolicy,
ContentDecodePolicy,
BearerTokenCredentialPolicy,
ProxyPolicy)
from azure.core.exceptions import (
HttpResponseError,
ResourceNotFoundError,
ResourceModifiedError,
ResourceExistsError,
ClientAuthenticationError,
DecodeError)
from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, DEFAULT_SOCKET_TIMEOUT
from .models import LocationMode, StorageErrorCode
from .authentication import SharedKeyCredentialPolicy
from .policies import (
StorageBlobSettings,
StorageHeadersPolicy,
StorageUserAgentPolicy,
StorageContentValidation,
StorageRequestHook,
StorageResponseHook,
StorageLoggingPolicy,
StorageHosts,
QueueMessagePolicy,
ExponentialRetry)
if TYPE_CHECKING:
from datetime import datetime
from azure.core.pipeline.transport import HttpTransport
from azure.core.pipeline.policies import HTTPPolicy
from azure.core.exceptions import AzureError
_LOGGER = logging.getLogger(__name__)
class _QueryStringConstants(object):
SIGNED_SIGNATURE = 'sig'
SIGNED_PERMISSION = 'sp'
SIGNED_START = 'st'
SIGNED_EXPIRY = 'se'
SIGNED_RESOURCE = 'sr'
SIGNED_IDENTIFIER = 'si'
SIGNED_IP = 'sip'
SIGNED_PROTOCOL = 'spr'
SIGNED_VERSION = 'sv'
SIGNED_CACHE_CONTROL = 'rscc'
SIGNED_CONTENT_DISPOSITION = 'rscd'
SIGNED_CONTENT_ENCODING = 'rsce'
SIGNED_CONTENT_LANGUAGE = 'rscl'
SIGNED_CONTENT_TYPE = 'rsct'
START_PK = 'spk'
START_RK = 'srk'
END_PK = 'epk'
END_RK = 'erk'
SIGNED_RESOURCE_TYPES = 'srt'
SIGNED_SERVICES = 'ss'
@staticmethod
def to_list():
return [
_QueryStringConstants.SIGNED_SIGNATURE,
_QueryStringConstants.SIGNED_PERMISSION,
_QueryStringConstants.SIGNED_START,
_QueryStringConstants.SIGNED_EXPIRY,
_QueryStringConstants.SIGNED_RESOURCE,
_QueryStringConstants.SIGNED_IDENTIFIER,
_QueryStringConstants.SIGNED_IP,
_QueryStringConstants.SIGNED_PROTOCOL,
_QueryStringConstants.SIGNED_VERSION,
_QueryStringConstants.SIGNED_CACHE_CONTROL,
_QueryStringConstants.SIGNED_CONTENT_DISPOSITION,
_QueryStringConstants.SIGNED_CONTENT_ENCODING,
_QueryStringConstants.SIGNED_CONTENT_LANGUAGE,
_QueryStringConstants.SIGNED_CONTENT_TYPE,
_QueryStringConstants.START_PK,
_QueryStringConstants.START_RK,
_QueryStringConstants.END_PK,
_QueryStringConstants.END_RK,
_QueryStringConstants.SIGNED_RESOURCE_TYPES,
_QueryStringConstants.SIGNED_SERVICES,
]
class StorageAccountHostsMixin(object):
def __init__(
self, parsed_url, # type: Any
service, # type: str
credential=None, # type: Optional[Any]
**kwargs # type: Any
):
# type: (...) -> None
self._location_mode = kwargs.get('_location_mode', LocationMode.PRIMARY)
self._hosts = kwargs.get('_hosts')
self.scheme = parsed_url.scheme
if service not in ['blob', 'queue', 'file']:
raise ValueError("Invalid service: {}".format(service))
account = parsed_url.netloc.split(".{}.core.".format(service))
secondary_hostname = None
self.credential = format_shared_key_credential(account, credential)
if self.scheme.lower() != 'https' and hasattr(self.credential, 'get_token'):
raise ValueError("Token credential is only supported with HTTPS.")
if hasattr(self.credential, 'account_name'):
secondary_hostname = "{}-secondary.{}.{}".format(
self.credential.account_name, service, SERVICE_HOST_BASE)
if not self._hosts:
if len(account) > 1:
secondary_hostname = parsed_url.netloc.replace(
account[0],
account[0] + '-secondary')
if kwargs.get('secondary_hostname'):
secondary_hostname = kwargs['secondary_hostname']
self._hosts = {
LocationMode.PRIMARY: parsed_url.netloc,
LocationMode.SECONDARY: secondary_hostname}
self.require_encryption = kwargs.get('require_encryption', False)
self.key_encryption_key = kwargs.get('key_encryption_key')
self.key_resolver_function = kwargs.get('key_resolver_function')
self._config, self._pipeline = create_pipeline(self.credential, hosts=self._hosts, **kwargs)
def __enter__(self):
self._client.__enter__()
return self
def __exit__(self, *args):
self._client.__exit__(*args)
@property
def url(self):
return self._format_url(self._hosts[self._location_mode])
@property
def primary_endpoint(self):
return self._format_url(self._hosts[LocationMode.PRIMARY])
@property
def primary_hostname(self):
return self._hosts[LocationMode.PRIMARY]
@property
def secondary_endpoint(self):
if not self._hosts[LocationMode.SECONDARY]:
raise ValueError("No secondary host configured.")
return self._format_url(self._hosts[LocationMode.SECONDARY])
@property
def secondary_hostname(self):
return self._hosts[LocationMode.SECONDARY]
@property
def location_mode(self):
return self._location_mode
@location_mode.setter
def location_mode(self, value):
if self._hosts.get(value):
self._location_mode = value
self._client._config.url = self.url # pylint: disable=protected-access
else:
raise ValueError("No host URL for location mode: {}".format(value))
def _format_query_string(self, sas_token, credential, snapshot=None):
query_str = "?"
if snapshot:
query_str += 'snapshot={}&'.format(self.snapshot)
if sas_token and not credential:
query_str += sas_token
elif is_credential_sastoken(credential):
query_str += credential.lstrip('?')
credential = None
return query_str.rstrip('?&'), credential
def format_shared_key_credential(account, credential):
if isinstance(credential, six.string_types):
if len(account) < 2:
raise ValueError("Unable to determine account name for shared key credential.")
credential = {
'account_name': account[0],
'account_key': credential
}
if isinstance(credential, dict):
if 'account_name' not in credential:
raise ValueError("Shared key credential missing 'account_name")
if 'account_key' not in credential:
raise ValueError("Shared key credential missing 'account_key")
return SharedKeyCredentialPolicy(**credential)
return credential
service_connection_params = {
'blob': {'primary': 'BlobEndpoint', 'secondary': 'BlobSecondaryEndpoint'},
'queue': {'primary': 'QueueEndpoint', 'secondary': 'QueueSecondaryEndpoint'},
'file': {'primary': 'FileEndpoint', 'secondary': 'FileSecondaryEndpoint'},
}
def parse_connection_str(conn_str, credential, service):
conn_str = conn_str.rstrip(';')
conn_settings = dict([s.split('=', 1) for s in conn_str.split(';')]) # pylint: disable=consider-using-dict-comprehension
endpoints = service_connection_params[service]
primary = None
secondary = None
if not credential:
try:
credential = {
'account_name': conn_settings['AccountName'],
'account_key': conn_settings['AccountKey']
}
except KeyError:
credential = conn_settings.get('SharedAccessSignature')
if endpoints['primary'] in conn_settings:
primary = conn_settings[endpoints['primary']]
if endpoints['secondary'] in conn_settings:
secondary = conn_settings[endpoints['secondary']]
else:
if endpoints['secondary'] in conn_settings:
raise ValueError("Connection string specifies only secondary endpoint.")
try:
primary = "{}://{}.{}.{}".format(
conn_settings['DefaultEndpointsProtocol'],
conn_settings['AccountName'],
service,
conn_settings['EndpointSuffix']
)
secondary = "{}-secondary.{}.{}".format(
conn_settings['AccountName'],
service,
conn_settings['EndpointSuffix']
)
except KeyError:
pass
if not primary:
try:
primary = "https://{}.{}.{}".format(
conn_settings['AccountName'],
service,
conn_settings.get('EndpointSuffix', SERVICE_HOST_BASE)
)
except KeyError:
raise ValueError("Connection string missing required connection details.")
return primary, secondary, credential
def url_quote(url):
return quote(url)
def url_unquote(url):
return unquote(url)
def encode_base64(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')
def decode_base64(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
decoded = base64.b64decode(data)
return decoded.decode('utf-8')
def _decode_base64_to_bytes(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
return base64.b64decode(data)
def _sign_string(key, string_to_sign, key_is_base64=True):
if key_is_base64:
key = _decode_base64_to_bytes(key)
else:
if isinstance(key, six.text_type):
key = key.encode('utf-8')
if isinstance(string_to_sign, six.text_type):
string_to_sign = string_to_sign.encode('utf-8')
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
digest = signed_hmac_sha256.digest()
encoded_digest = encode_base64(digest)
return encoded_digest
def serialize_iso(attr):
"""Serialize Datetime object into ISO-8601 formatted string.
:param Datetime attr: Object to be serialized.
:rtype: str
:raises: ValueError if format invalid.
"""
if not attr:
return None
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
try:
utc = attr.utctimetuple()
if utc.tm_year > 9999 or utc.tm_year < 1:
raise OverflowError("Hit max or min date")
date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
utc.tm_year, utc.tm_mon, utc.tm_mday,
utc.tm_hour, utc.tm_min, utc.tm_sec)
return date + 'Z'
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
raise_with_traceback(ValueError, msg, err)
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
raise_with_traceback(TypeError, msg, err)
def get_length(data):
length = None
# Check if object implements the __len__ method, covers most input cases such as bytearray.
try:
length = len(data)
except: # pylint: disable=bare-except
pass
if not length:
# Check if the stream is a file-like stream object.
# If so, calculate the size using the file descriptor.
try:
fileno = data.fileno()
except (AttributeError, UnsupportedOperation):
pass
else:
return fstat(fileno).st_size
# If the stream is seekable and tell() is implemented, calculate the stream size.
try:
current_position = data.tell()
data.seek(0, SEEK_END)
length = data.tell() - current_position
data.seek(current_position, SEEK_SET)
except (AttributeError, UnsupportedOperation):
pass
return length
def read_length(data):
try:
if hasattr(data, 'read'):
read_data = b''
for chunk in iter(lambda: data.read(4096), b""):
read_data += chunk
return len(read_data), read_data
if hasattr(data, '__iter__'):
read_data = b''
for chunk in data:
read_data += chunk
return len(read_data), read_data
except: # pylint: disable=bare-except
pass
raise ValueError("Unable to calculate content length, please specify.")
def parse_length_from_content_range(content_range):
'''
Parses the blob length from the content range header: bytes 1-3/65537
'''
if content_range is None:
return None
# First, split in space and take the second half: '1-3/65537'
# Next, split on slash and take the second half: '65537'
# Finally, convert to an int: 65537
return int(content_range.split(' ', 1)[1].split('/', 1)[1])
def validate_and_format_range_headers(
start_range, end_range, start_range_required=True,
end_range_required=True, check_content_md5=False, align_to_page=False):
# If end range is provided, start range must be provided
if (start_range_required or end_range is not None) and start_range is None:
raise ValueError("start_range value cannot be None.")
if end_range_required and end_range is None:
raise ValueError("end_range value cannot be None.")
# Page ranges must be 512 aligned
if align_to_page:
if start_range is not None and start_range % 512 != 0:
raise ValueError("Invalid page blob start_range: {0}. "
"The size must be aligned to a 512-byte boundary.".format(start_range))
if end_range is not None and end_range % 512 != 511:
raise ValueError("Invalid page blob end_range: {0}. "
"The size must be aligned to a 512-byte boundary.".format(end_range))
# Format based on whether end_range is present
range_header = None
if end_range is not None:
range_header = 'bytes={0}-{1}'.format(start_range, end_range)
elif start_range is not None:
range_header = "bytes={0}-".format(start_range)
# Content MD5 can only be provided for a complete range less than 4MB in size
range_validation = None
if check_content_md5:
if start_range is None or end_range is None:
raise ValueError("Both start and end range requied for MD5 content validation.")
if end_range - start_range > 4 * 1024 * 1024:
raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
range_validation = 'true'
return range_header, range_validation
def normalize_headers(headers):
normalized = {}
for key, value in headers.items():
if key.startswith('x-ms-'):
key = key[5:]
normalized[key.lower().replace('-', '_')] = value
return normalized
def return_response_headers(response, deserialized, response_headers): # pylint: disable=unused-argument
return normalize_headers(response_headers)
def return_headers_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument
return normalize_headers(response_headers), deserialized
def return_context_and_deserialized(response, deserialized, response_headers): # pylint: disable=unused-argument
return response.location_mode, deserialized
def create_configuration(**kwargs):
# type: (**Any) -> Configuration
config = Configuration(**kwargs)
config.headers_policy = StorageHeadersPolicy(**kwargs)
config.user_agent_policy = StorageUserAgentPolicy(**kwargs)
config.retry_policy = kwargs.get('retry_policy') or ExponentialRetry(**kwargs)
config.redirect_policy = RedirectPolicy(**kwargs)
config.logging_policy = StorageLoggingPolicy(**kwargs)
config.proxy_policy = ProxyPolicy(**kwargs)
config.blob_settings = StorageBlobSettings(**kwargs)
return config
def create_pipeline(credential, **kwargs):
# type: (Any, **Any) -> Tuple[Configuration, Pipeline]
credential_policy = None
if hasattr(credential, 'get_token'):
credential_policy = BearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE)
elif isinstance(credential, SharedKeyCredentialPolicy):
credential_policy = credential
elif credential is not None:
raise TypeError("Unsupported credential: {}".format(credential))
config = kwargs.get('_configuration') or create_configuration(**kwargs)
if kwargs.get('_pipeline'):
return config, kwargs['_pipeline']
transport = kwargs.get('transport') # type: HttpTransport
if 'connection_timeout' not in kwargs:
kwargs['connection_timeout'] = DEFAULT_SOCKET_TIMEOUT
if not transport:
transport = RequestsTransport(**kwargs)
policies = [
QueueMessagePolicy(),
config.headers_policy,
config.user_agent_policy,
StorageContentValidation(),
StorageRequestHook(**kwargs),
credential_policy,
ContentDecodePolicy(),
config.redirect_policy,
StorageHosts(**kwargs),
config.retry_policy,
config.logging_policy,
StorageResponseHook(**kwargs),
]
return config, Pipeline(transport, policies=policies)
def parse_query(query_str):
sas_values = _QueryStringConstants.to_list()
parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()}
sas_params = ["{}={}".format(k, v) for k, v in parsed_query.items() if k in sas_values]
sas_token = None
if sas_params:
sas_token = '&'.join(sas_params)
return parsed_query.get('snapshot'), sas_token
def is_credential_sastoken(credential):
if not credential or not isinstance(credential, six.string_types):
return False
sas_values = _QueryStringConstants.to_list()
parsed_query = parse_qs(credential.lstrip('?'))
if parsed_query and all([k in sas_values for k in parsed_query.keys()]):
return True
return False
def add_metadata_headers(metadata):
headers = {}
if metadata:
for key, value in metadata.items():
headers['x-ms-meta-{}'.format(key)] = value
return headers
def process_storage_error(storage_error):
raise_error = HttpResponseError
error_code = storage_error.response.headers.get('x-ms-error-code')
error_message = storage_error.message
additional_data = {}
try:
error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response)
if error_body:
for info in error_body.iter():
if info.tag.lower() == 'code':
error_code = info.text
elif info.tag.lower() == 'message':
error_message = info.text
else:
additional_data[info.tag] = info.text
except DecodeError:
pass
try:
if error_code:
error_code = StorageErrorCode(error_code)
if error_code in [StorageErrorCode.condition_not_met,
StorageErrorCode.blob_overwritten]:
raise_error = ResourceModifiedError
if error_code in [StorageErrorCode.invalid_authentication_info,
StorageErrorCode.authentication_failed]:
raise_error = ClientAuthenticationError
if error_code in [StorageErrorCode.resource_not_found,
StorageErrorCode.blob_not_found,
StorageErrorCode.queue_not_found,
StorageErrorCode.container_not_found]:
raise_error = ResourceNotFoundError
if error_code in [StorageErrorCode.account_already_exists,
StorageErrorCode.account_being_created,
StorageErrorCode.resource_already_exists,
StorageErrorCode.resource_type_mismatch,
StorageErrorCode.blob_already_exists,
StorageErrorCode.queue_already_exists,
StorageErrorCode.container_already_exists,
StorageErrorCode.container_being_deleted,
StorageErrorCode.queue_being_deleted]:
raise_error = ResourceExistsError
except ValueError:
# Got an unknown error code
pass
try:
error_message += "\nErrorCode:{}".format(error_code.value)
except AttributeError:
error_message += "\nErrorCode:{}".format(error_code)
for name, info in additional_data.items():
error_message += "\n{}:{}".format(name, info)
error = raise_error(message=error_message, response=storage_error.response)
error.error_code = error_code
error.additional_info = additional_data
raise error
| 36.166392
| 125
| 0.651802
|
from typing import ( Union, Optional, Any, Iterable, Dict, List, Type, Tuple,
TYPE_CHECKING
)
import base64
import hashlib
import hmac
import logging
from os import fstat
from io import (SEEK_END, SEEK_SET, UnsupportedOperation)
try:
from urllib.parse import quote, unquote, parse_qs
except ImportError:
from urlparse import parse_qs from urllib2 import quote, unquote
import six
import isodate
from azure.core import Configuration
from azure.core.exceptions import raise_with_traceback
from azure.core.pipeline import Pipeline
from azure.core.pipeline.transport import RequestsTransport
from azure.core.pipeline.policies import (
RedirectPolicy,
ContentDecodePolicy,
BearerTokenCredentialPolicy,
ProxyPolicy)
from azure.core.exceptions import (
HttpResponseError,
ResourceNotFoundError,
ResourceModifiedError,
ResourceExistsError,
ClientAuthenticationError,
DecodeError)
from .constants import STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, DEFAULT_SOCKET_TIMEOUT
from .models import LocationMode, StorageErrorCode
from .authentication import SharedKeyCredentialPolicy
from .policies import (
StorageBlobSettings,
StorageHeadersPolicy,
StorageUserAgentPolicy,
StorageContentValidation,
StorageRequestHook,
StorageResponseHook,
StorageLoggingPolicy,
StorageHosts,
QueueMessagePolicy,
ExponentialRetry)
if TYPE_CHECKING:
from datetime import datetime
from azure.core.pipeline.transport import HttpTransport
from azure.core.pipeline.policies import HTTPPolicy
from azure.core.exceptions import AzureError
_LOGGER = logging.getLogger(__name__)
class _QueryStringConstants(object):
SIGNED_SIGNATURE = 'sig'
SIGNED_PERMISSION = 'sp'
SIGNED_START = 'st'
SIGNED_EXPIRY = 'se'
SIGNED_RESOURCE = 'sr'
SIGNED_IDENTIFIER = 'si'
SIGNED_IP = 'sip'
SIGNED_PROTOCOL = 'spr'
SIGNED_VERSION = 'sv'
SIGNED_CACHE_CONTROL = 'rscc'
SIGNED_CONTENT_DISPOSITION = 'rscd'
SIGNED_CONTENT_ENCODING = 'rsce'
SIGNED_CONTENT_LANGUAGE = 'rscl'
SIGNED_CONTENT_TYPE = 'rsct'
START_PK = 'spk'
START_RK = 'srk'
END_PK = 'epk'
END_RK = 'erk'
SIGNED_RESOURCE_TYPES = 'srt'
SIGNED_SERVICES = 'ss'
@staticmethod
def to_list():
return [
_QueryStringConstants.SIGNED_SIGNATURE,
_QueryStringConstants.SIGNED_PERMISSION,
_QueryStringConstants.SIGNED_START,
_QueryStringConstants.SIGNED_EXPIRY,
_QueryStringConstants.SIGNED_RESOURCE,
_QueryStringConstants.SIGNED_IDENTIFIER,
_QueryStringConstants.SIGNED_IP,
_QueryStringConstants.SIGNED_PROTOCOL,
_QueryStringConstants.SIGNED_VERSION,
_QueryStringConstants.SIGNED_CACHE_CONTROL,
_QueryStringConstants.SIGNED_CONTENT_DISPOSITION,
_QueryStringConstants.SIGNED_CONTENT_ENCODING,
_QueryStringConstants.SIGNED_CONTENT_LANGUAGE,
_QueryStringConstants.SIGNED_CONTENT_TYPE,
_QueryStringConstants.START_PK,
_QueryStringConstants.START_RK,
_QueryStringConstants.END_PK,
_QueryStringConstants.END_RK,
_QueryStringConstants.SIGNED_RESOURCE_TYPES,
_QueryStringConstants.SIGNED_SERVICES,
]
class StorageAccountHostsMixin(object):
def __init__(
self, parsed_url, service, credential=None, **kwargs ):
self._location_mode = kwargs.get('_location_mode', LocationMode.PRIMARY)
self._hosts = kwargs.get('_hosts')
self.scheme = parsed_url.scheme
if service not in ['blob', 'queue', 'file']:
raise ValueError("Invalid service: {}".format(service))
account = parsed_url.netloc.split(".{}.core.".format(service))
secondary_hostname = None
self.credential = format_shared_key_credential(account, credential)
if self.scheme.lower() != 'https' and hasattr(self.credential, 'get_token'):
raise ValueError("Token credential is only supported with HTTPS.")
if hasattr(self.credential, 'account_name'):
secondary_hostname = "{}-secondary.{}.{}".format(
self.credential.account_name, service, SERVICE_HOST_BASE)
if not self._hosts:
if len(account) > 1:
secondary_hostname = parsed_url.netloc.replace(
account[0],
account[0] + '-secondary')
if kwargs.get('secondary_hostname'):
secondary_hostname = kwargs['secondary_hostname']
self._hosts = {
LocationMode.PRIMARY: parsed_url.netloc,
LocationMode.SECONDARY: secondary_hostname}
self.require_encryption = kwargs.get('require_encryption', False)
self.key_encryption_key = kwargs.get('key_encryption_key')
self.key_resolver_function = kwargs.get('key_resolver_function')
self._config, self._pipeline = create_pipeline(self.credential, hosts=self._hosts, **kwargs)
def __enter__(self):
self._client.__enter__()
return self
def __exit__(self, *args):
self._client.__exit__(*args)
@property
def url(self):
return self._format_url(self._hosts[self._location_mode])
@property
def primary_endpoint(self):
return self._format_url(self._hosts[LocationMode.PRIMARY])
@property
def primary_hostname(self):
return self._hosts[LocationMode.PRIMARY]
@property
def secondary_endpoint(self):
if not self._hosts[LocationMode.SECONDARY]:
raise ValueError("No secondary host configured.")
return self._format_url(self._hosts[LocationMode.SECONDARY])
@property
def secondary_hostname(self):
return self._hosts[LocationMode.SECONDARY]
@property
def location_mode(self):
return self._location_mode
@location_mode.setter
def location_mode(self, value):
if self._hosts.get(value):
self._location_mode = value
self._client._config.url = self.url else:
raise ValueError("No host URL for location mode: {}".format(value))
def _format_query_string(self, sas_token, credential, snapshot=None):
query_str = "?"
if snapshot:
query_str += 'snapshot={}&'.format(self.snapshot)
if sas_token and not credential:
query_str += sas_token
elif is_credential_sastoken(credential):
query_str += credential.lstrip('?')
credential = None
return query_str.rstrip('?&'), credential
def format_shared_key_credential(account, credential):
if isinstance(credential, six.string_types):
if len(account) < 2:
raise ValueError("Unable to determine account name for shared key credential.")
credential = {
'account_name': account[0],
'account_key': credential
}
if isinstance(credential, dict):
if 'account_name' not in credential:
raise ValueError("Shared key credential missing 'account_name")
if 'account_key' not in credential:
raise ValueError("Shared key credential missing 'account_key")
return SharedKeyCredentialPolicy(**credential)
return credential
service_connection_params = {
'blob': {'primary': 'BlobEndpoint', 'secondary': 'BlobSecondaryEndpoint'},
'queue': {'primary': 'QueueEndpoint', 'secondary': 'QueueSecondaryEndpoint'},
'file': {'primary': 'FileEndpoint', 'secondary': 'FileSecondaryEndpoint'},
}
def parse_connection_str(conn_str, credential, service):
conn_str = conn_str.rstrip(';')
conn_settings = dict([s.split('=', 1) for s in conn_str.split(';')]) endpoints = service_connection_params[service]
primary = None
secondary = None
if not credential:
try:
credential = {
'account_name': conn_settings['AccountName'],
'account_key': conn_settings['AccountKey']
}
except KeyError:
credential = conn_settings.get('SharedAccessSignature')
if endpoints['primary'] in conn_settings:
primary = conn_settings[endpoints['primary']]
if endpoints['secondary'] in conn_settings:
secondary = conn_settings[endpoints['secondary']]
else:
if endpoints['secondary'] in conn_settings:
raise ValueError("Connection string specifies only secondary endpoint.")
try:
primary = "{}://{}.{}.{}".format(
conn_settings['DefaultEndpointsProtocol'],
conn_settings['AccountName'],
service,
conn_settings['EndpointSuffix']
)
secondary = "{}-secondary.{}.{}".format(
conn_settings['AccountName'],
service,
conn_settings['EndpointSuffix']
)
except KeyError:
pass
if not primary:
try:
primary = "https://{}.{}.{}".format(
conn_settings['AccountName'],
service,
conn_settings.get('EndpointSuffix', SERVICE_HOST_BASE)
)
except KeyError:
raise ValueError("Connection string missing required connection details.")
return primary, secondary, credential
def url_quote(url):
return quote(url)
def url_unquote(url):
return unquote(url)
def encode_base64(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')
def decode_base64(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
decoded = base64.b64decode(data)
return decoded.decode('utf-8')
def _decode_base64_to_bytes(data):
if isinstance(data, six.text_type):
data = data.encode('utf-8')
return base64.b64decode(data)
def _sign_string(key, string_to_sign, key_is_base64=True):
if key_is_base64:
key = _decode_base64_to_bytes(key)
else:
if isinstance(key, six.text_type):
key = key.encode('utf-8')
if isinstance(string_to_sign, six.text_type):
string_to_sign = string_to_sign.encode('utf-8')
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
digest = signed_hmac_sha256.digest()
encoded_digest = encode_base64(digest)
return encoded_digest
def serialize_iso(attr):
if not attr:
return None
if isinstance(attr, str):
attr = isodate.parse_datetime(attr)
try:
utc = attr.utctimetuple()
if utc.tm_year > 9999 or utc.tm_year < 1:
raise OverflowError("Hit max or min date")
date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
utc.tm_year, utc.tm_mon, utc.tm_mday,
utc.tm_hour, utc.tm_min, utc.tm_sec)
return date + 'Z'
except (ValueError, OverflowError) as err:
msg = "Unable to serialize datetime object."
raise_with_traceback(ValueError, msg, err)
except AttributeError as err:
msg = "ISO-8601 object must be valid Datetime object."
raise_with_traceback(TypeError, msg, err)
def get_length(data):
length = None
try:
length = len(data)
except: pass
if not length:
try:
fileno = data.fileno()
except (AttributeError, UnsupportedOperation):
pass
else:
return fstat(fileno).st_size
try:
current_position = data.tell()
data.seek(0, SEEK_END)
length = data.tell() - current_position
data.seek(current_position, SEEK_SET)
except (AttributeError, UnsupportedOperation):
pass
return length
def read_length(data):
try:
if hasattr(data, 'read'):
read_data = b''
for chunk in iter(lambda: data.read(4096), b""):
read_data += chunk
return len(read_data), read_data
if hasattr(data, '__iter__'):
read_data = b''
for chunk in data:
read_data += chunk
return len(read_data), read_data
except: pass
raise ValueError("Unable to calculate content length, please specify.")
def parse_length_from_content_range(content_range):
if content_range is None:
return None
return int(content_range.split(' ', 1)[1].split('/', 1)[1])
def validate_and_format_range_headers(
start_range, end_range, start_range_required=True,
end_range_required=True, check_content_md5=False, align_to_page=False):
if (start_range_required or end_range is not None) and start_range is None:
raise ValueError("start_range value cannot be None.")
if end_range_required and end_range is None:
raise ValueError("end_range value cannot be None.")
if align_to_page:
if start_range is not None and start_range % 512 != 0:
raise ValueError("Invalid page blob start_range: {0}. "
"The size must be aligned to a 512-byte boundary.".format(start_range))
if end_range is not None and end_range % 512 != 511:
raise ValueError("Invalid page blob end_range: {0}. "
"The size must be aligned to a 512-byte boundary.".format(end_range))
range_header = None
if end_range is not None:
range_header = 'bytes={0}-{1}'.format(start_range, end_range)
elif start_range is not None:
range_header = "bytes={0}-".format(start_range)
range_validation = None
if check_content_md5:
if start_range is None or end_range is None:
raise ValueError("Both start and end range requied for MD5 content validation.")
if end_range - start_range > 4 * 1024 * 1024:
raise ValueError("Getting content MD5 for a range greater than 4MB is not supported.")
range_validation = 'true'
return range_header, range_validation
def normalize_headers(headers):
normalized = {}
for key, value in headers.items():
if key.startswith('x-ms-'):
key = key[5:]
normalized[key.lower().replace('-', '_')] = value
return normalized
def return_response_headers(response, deserialized, response_headers): return normalize_headers(response_headers)
def return_headers_and_deserialized(response, deserialized, response_headers): return normalize_headers(response_headers), deserialized
def return_context_and_deserialized(response, deserialized, response_headers): return response.location_mode, deserialized
def create_configuration(**kwargs):
config = Configuration(**kwargs)
config.headers_policy = StorageHeadersPolicy(**kwargs)
config.user_agent_policy = StorageUserAgentPolicy(**kwargs)
config.retry_policy = kwargs.get('retry_policy') or ExponentialRetry(**kwargs)
config.redirect_policy = RedirectPolicy(**kwargs)
config.logging_policy = StorageLoggingPolicy(**kwargs)
config.proxy_policy = ProxyPolicy(**kwargs)
config.blob_settings = StorageBlobSettings(**kwargs)
return config
def create_pipeline(credential, **kwargs):
credential_policy = None
if hasattr(credential, 'get_token'):
credential_policy = BearerTokenCredentialPolicy(credential, STORAGE_OAUTH_SCOPE)
elif isinstance(credential, SharedKeyCredentialPolicy):
credential_policy = credential
elif credential is not None:
raise TypeError("Unsupported credential: {}".format(credential))
config = kwargs.get('_configuration') or create_configuration(**kwargs)
if kwargs.get('_pipeline'):
return config, kwargs['_pipeline']
transport = kwargs.get('transport') if 'connection_timeout' not in kwargs:
kwargs['connection_timeout'] = DEFAULT_SOCKET_TIMEOUT
if not transport:
transport = RequestsTransport(**kwargs)
policies = [
QueueMessagePolicy(),
config.headers_policy,
config.user_agent_policy,
StorageContentValidation(),
StorageRequestHook(**kwargs),
credential_policy,
ContentDecodePolicy(),
config.redirect_policy,
StorageHosts(**kwargs),
config.retry_policy,
config.logging_policy,
StorageResponseHook(**kwargs),
]
return config, Pipeline(transport, policies=policies)
def parse_query(query_str):
sas_values = _QueryStringConstants.to_list()
parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()}
sas_params = ["{}={}".format(k, v) for k, v in parsed_query.items() if k in sas_values]
sas_token = None
if sas_params:
sas_token = '&'.join(sas_params)
return parsed_query.get('snapshot'), sas_token
def is_credential_sastoken(credential):
if not credential or not isinstance(credential, six.string_types):
return False
sas_values = _QueryStringConstants.to_list()
parsed_query = parse_qs(credential.lstrip('?'))
if parsed_query and all([k in sas_values for k in parsed_query.keys()]):
return True
return False
def add_metadata_headers(metadata):
headers = {}
if metadata:
for key, value in metadata.items():
headers['x-ms-meta-{}'.format(key)] = value
return headers
def process_storage_error(storage_error):
raise_error = HttpResponseError
error_code = storage_error.response.headers.get('x-ms-error-code')
error_message = storage_error.message
additional_data = {}
try:
error_body = ContentDecodePolicy.deserialize_from_http_generics(storage_error.response)
if error_body:
for info in error_body.iter():
if info.tag.lower() == 'code':
error_code = info.text
elif info.tag.lower() == 'message':
error_message = info.text
else:
additional_data[info.tag] = info.text
except DecodeError:
pass
try:
if error_code:
error_code = StorageErrorCode(error_code)
if error_code in [StorageErrorCode.condition_not_met,
StorageErrorCode.blob_overwritten]:
raise_error = ResourceModifiedError
if error_code in [StorageErrorCode.invalid_authentication_info,
StorageErrorCode.authentication_failed]:
raise_error = ClientAuthenticationError
if error_code in [StorageErrorCode.resource_not_found,
StorageErrorCode.blob_not_found,
StorageErrorCode.queue_not_found,
StorageErrorCode.container_not_found]:
raise_error = ResourceNotFoundError
if error_code in [StorageErrorCode.account_already_exists,
StorageErrorCode.account_being_created,
StorageErrorCode.resource_already_exists,
StorageErrorCode.resource_type_mismatch,
StorageErrorCode.blob_already_exists,
StorageErrorCode.queue_already_exists,
StorageErrorCode.container_already_exists,
StorageErrorCode.container_being_deleted,
StorageErrorCode.queue_being_deleted]:
raise_error = ResourceExistsError
except ValueError:
pass
try:
error_message += "\nErrorCode:{}".format(error_code.value)
except AttributeError:
error_message += "\nErrorCode:{}".format(error_code)
for name, info in additional_data.items():
error_message += "\n{}:{}".format(name, info)
error = raise_error(message=error_message, response=storage_error.response)
error.error_code = error_code
error.additional_info = additional_data
raise error
| true
| true
|
1c4530276fcdb695658f172383b6ef0f338a039e
| 47
|
py
|
Python
|
deepclaw/utils/success_label/__init__.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | 5
|
2020-06-24T03:47:00.000Z
|
2021-10-13T03:35:38.000Z
|
deepclaw/utils/success_label/__init__.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | null | null | null |
deepclaw/utils/success_label/__init__.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | 3
|
2020-06-18T09:25:39.000Z
|
2021-04-18T03:51:08.000Z
|
__all__ = ['DetectForeground', 'success_label']
| 47
| 47
| 0.765957
|
__all__ = ['DetectForeground', 'success_label']
| true
| true
|
1c453071197a502639b6057e02299e32868cc15f
| 392
|
py
|
Python
|
generate-schema.py
|
akhmetzaki/amplitude-bigquery
|
9c637198aa85518d72acb1184f06279497de3189
|
[
"MIT"
] | null | null | null |
generate-schema.py
|
akhmetzaki/amplitude-bigquery
|
9c637198aa85518d72acb1184f06279497de3189
|
[
"MIT"
] | null | null | null |
generate-schema.py
|
akhmetzaki/amplitude-bigquery
|
9c637198aa85518d72acb1184f06279497de3189
|
[
"MIT"
] | null | null | null |
import pandas as pd
import json
def parse_json():
with open('bigquery-schema-events.json', 'r') as f:
events_df = pd.DataFrame(json.loads(f.read()))
with open('bigquery-schema-events-properties.json') as f:
prop_df = pd.DataFrame(json.loads(f.read()))
events_df.to_csv('events.csv')
prop_df.to_csv('props.csv')
if __name__ == '__main__':
parse_json()
| 23.058824
| 61
| 0.660714
|
import pandas as pd
import json
def parse_json():
with open('bigquery-schema-events.json', 'r') as f:
events_df = pd.DataFrame(json.loads(f.read()))
with open('bigquery-schema-events-properties.json') as f:
prop_df = pd.DataFrame(json.loads(f.read()))
events_df.to_csv('events.csv')
prop_df.to_csv('props.csv')
if __name__ == '__main__':
parse_json()
| true
| true
|
1c4530e1085d4b99291f8b076baa1a404e14de62
| 1,161
|
py
|
Python
|
openpmd_updater/transforms/v2_0_0/Version.py
|
openPMD/openPMD-updater
|
1f334c99093b7175af990d57879ea24ff5b4cb01
|
[
"ISC"
] | 1
|
2019-03-28T13:29:44.000Z
|
2019-03-28T13:29:44.000Z
|
openpmd_updater/transforms/v2_0_0/Version.py
|
openPMD/openPMD-updater
|
1f334c99093b7175af990d57879ea24ff5b4cb01
|
[
"ISC"
] | 3
|
2018-11-06T15:51:42.000Z
|
2020-02-03T16:38:29.000Z
|
openpmd_updater/transforms/v2_0_0/Version.py
|
openPMD/openPMD-updater
|
1f334c99093b7175af990d57879ea24ff5b4cb01
|
[
"ISC"
] | 2
|
2018-11-07T18:10:21.000Z
|
2020-01-22T03:29:02.000Z
|
"""
This file is part of the openPMD-updater.
Copyright 2018 openPMD contributors
Authors: Axel Huebl
License: ISC
"""
from openpmd_updater.transforms.ITransform import ITransform
import numpy as np
class Version(ITransform):
"""
Transforms the openPMD version.
openPMD standard: 1.*.* -> 2.0.0
Related openPMD-standard issues:
https://github.com/openPMD/openPMD-standard/projects/3
"""
"""Name and description of the transformation"""
name = "version", "replace openPMD version identifier with new version"
"""Minimum openPMD standard version that is supported by this transformation"""
min_version = "1.0.0"
"""openPMD standard version is fulfulled by this transformation"""
to_version = "2.0.0"
def __init__(self, backend):
"""Open a file"""
self.fb = backend
def transform(self, in_place=True):
"""Perform transformation"""
if not in_place:
raise NotImplementedError("Only in-place transformation implemented!")
self.fb.cd(None)
self.fb.del_attr("openPMD")
self.fb.add_attr("openPMD", np.string_(Version.to_version))
| 26.386364
| 83
| 0.676141
|
from openpmd_updater.transforms.ITransform import ITransform
import numpy as np
class Version(ITransform):
name = "version", "replace openPMD version identifier with new version"
min_version = "1.0.0"
to_version = "2.0.0"
def __init__(self, backend):
self.fb = backend
def transform(self, in_place=True):
if not in_place:
raise NotImplementedError("Only in-place transformation implemented!")
self.fb.cd(None)
self.fb.del_attr("openPMD")
self.fb.add_attr("openPMD", np.string_(Version.to_version))
| true
| true
|
1c4530f68bcd141ac33d9dab2284d8d8685dbe9c
| 250
|
py
|
Python
|
nsd1903/devops/day01/mtprint4.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | 1
|
2019-09-19T04:53:22.000Z
|
2019-09-19T04:53:22.000Z
|
nsd1903/devops/day01/mtprint4.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | null | null | null |
nsd1903/devops/day01/mtprint4.py
|
MrWangwf/nsd2019
|
5e859b4b1926dc098d236be3720779c50d0a55fc
|
[
"Apache-2.0"
] | 1
|
2021-12-28T04:26:02.000Z
|
2021-12-28T04:26:02.000Z
|
import threading
class MyClass:
def __call__(self, a, b, c):
print('Hello', a, b, c)
if __name__ == '__main__':
for i in range(3):
t = threading.Thread(target=MyClass(), args=(10, 20, 30))
t.start() # target(*args)
| 22.727273
| 65
| 0.572
|
import threading
class MyClass:
def __call__(self, a, b, c):
print('Hello', a, b, c)
if __name__ == '__main__':
for i in range(3):
t = threading.Thread(target=MyClass(), args=(10, 20, 30))
t.start()
| true
| true
|
1c4531039560c0133de2c7dd58e494b440bb52fe
| 3,438
|
py
|
Python
|
d2relay.py
|
seichter/d2relay
|
23e3e727b4de80e6e685964b7339ef0652d9ed8d
|
[
"MIT"
] | null | null | null |
d2relay.py
|
seichter/d2relay
|
23e3e727b4de80e6e685964b7339ef0652d9ed8d
|
[
"MIT"
] | null | null | null |
d2relay.py
|
seichter/d2relay
|
23e3e727b4de80e6e685964b7339ef0652d9ed8d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
D2 is a minimal relay tool to output Leica DISTO D2 data to the console
Copyrights (c) 2021 Hartmut Seichter
Distributed under the terms of the MIT License
"""
import gatt
import struct
debug_mode = False
keepalive_hack = True
class DISTOManager(gatt.DeviceManager):
def __init__(self, adapter_name):
super().__init__(adapter_name)
def run(self):
super().run()
def on_idle(self):
pass
class DISTO(gatt.Device):
def connect_succeeded(self):
super().connect_succeeded()
print("[%s] Connected" % (self.mac_address))
def connect_failed(self, error):
super().connect_failed(error)
print("[%s] Connection failed: %s" % (self.mac_address, str(error)))
def disconnect_succeeded(self):
super().disconnect_succeeded()
print("[%s] Disconnected" % (self.mac_address))
# aweful hack to keep the DISTO from disconnecting
# this will drain your distos battery!
if keepalive_hack:
self.connect()
def services_resolved(self):
super().services_resolved()
if debug_mode:
print("[%s] Resolved services" % (self.mac_address))
for service in self.services:
if debug_mode:
print("[%s] Service [%s]" % (self.mac_address, service.uuid))
for characteristic in service.characteristics:
if debug_mode:
print("[%s] Characteristic [%s]" % (self.mac_address, characteristic.uuid))
characteristic.read_value()
characteristic.enable_notifications()
def characteristic_value_updated(self, characteristic, value):
if debug_mode:
# this is here for debugging ... there many more things to implement
# if characteristic.uuid == '3ab10102-f831-4395-b29d-570977d5bf94':
print(characteristic.uuid,':',type(value),len(value)) # ,int.from_bytes(value,byteorder='big', signed=False))
# else:
# print( characteristic.uuid, ":", value.decode("utf-8"))
print('\traw :',value)
if len(value) == 2:
print("\tuint16 :",struct.unpack('>H',value)[0])
if len(value) == 4:
print("\tfloat :",struct.unpack('f',value)[0])
elif len(value) == 8:
print('\tdouble:',struct.unpack('d',value)[0])
elif characteristic.uuid == '3ab10101-f831-4395-b29d-570977d5bf94':
self.report_measurement(value)
# Vendor ID
elif characteristic.uuid == '00002a29-0000-1000-8000-00805f9b34fb':
# for whatever reasons the D2 reports itself as the BLE SoC
# thats driving it - a Nordic Semi nRF51822 - a 16MhZ Cortex-M0
is_leica = value.decode('utf-8') == 'nRF51822'
elif characteristic.uuid == '00002a1a-0000-1000-8000-00805f9b34fb':
print('Battery level',value)
def report_measurement(self,value):
float_val = struct.unpack('f',value)[0]
print(round(float_val,3),'m')
# note: Make it configurable
manager = DISTOManager(adapter_name='hci0')
# well this is only for the D2 but other BLE devices by Leica should
# work similar - usually they just have more functions
device = DISTO(mac_address= 'FD:8B:B0:50:BA:A3', manager=manager)
device.connect()
manager.run()
| 33.705882
| 121
| 0.613729
|
import gatt
import struct
debug_mode = False
keepalive_hack = True
class DISTOManager(gatt.DeviceManager):
def __init__(self, adapter_name):
super().__init__(adapter_name)
def run(self):
super().run()
def on_idle(self):
pass
class DISTO(gatt.Device):
def connect_succeeded(self):
super().connect_succeeded()
print("[%s] Connected" % (self.mac_address))
def connect_failed(self, error):
super().connect_failed(error)
print("[%s] Connection failed: %s" % (self.mac_address, str(error)))
def disconnect_succeeded(self):
super().disconnect_succeeded()
print("[%s] Disconnected" % (self.mac_address))
if keepalive_hack:
self.connect()
def services_resolved(self):
super().services_resolved()
if debug_mode:
print("[%s] Resolved services" % (self.mac_address))
for service in self.services:
if debug_mode:
print("[%s] Service [%s]" % (self.mac_address, service.uuid))
for characteristic in service.characteristics:
if debug_mode:
print("[%s] Characteristic [%s]" % (self.mac_address, characteristic.uuid))
characteristic.read_value()
characteristic.enable_notifications()
def characteristic_value_updated(self, characteristic, value):
if debug_mode:
print(characteristic.uuid,':',type(value),len(value)) print('\traw :',value)
if len(value) == 2:
print("\tuint16 :",struct.unpack('>H',value)[0])
if len(value) == 4:
print("\tfloat :",struct.unpack('f',value)[0])
elif len(value) == 8:
print('\tdouble:',struct.unpack('d',value)[0])
elif characteristic.uuid == '3ab10101-f831-4395-b29d-570977d5bf94':
self.report_measurement(value)
elif characteristic.uuid == '00002a29-0000-1000-8000-00805f9b34fb':
is_leica = value.decode('utf-8') == 'nRF51822'
elif characteristic.uuid == '00002a1a-0000-1000-8000-00805f9b34fb':
print('Battery level',value)
def report_measurement(self,value):
float_val = struct.unpack('f',value)[0]
print(round(float_val,3),'m')
manager = DISTOManager(adapter_name='hci0')
device = DISTO(mac_address= 'FD:8B:B0:50:BA:A3', manager=manager)
device.connect()
manager.run()
| true
| true
|
1c4531cb438cfc0c8e48462c8037d07548cfe931
| 44,723
|
py
|
Python
|
nova/api/openstack/wsgi.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | null | null | null |
nova/api/openstack/wsgi.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:39:52.000Z
|
2021-03-21T11:39:52.000Z
|
nova/api/openstack/wsgi.py
|
nicholaskuechler/nova
|
ff412c3888b234eb123161cc4e6d0d0d69c0004e
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:37:33.000Z
|
2021-03-21T11:37:33.000Z
|
# Copyright 2013 IBM Corp.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import inspect
import math
import time
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import strutils
import six
import webob
from nova.api.openstack import api_version_request as api_version
from nova.api.openstack import versioned_method
from nova import exception
from nova import i18n
from nova.i18n import _
from nova.i18n import _LE
from nova.i18n import _LI
from nova import utils
from nova import wsgi
LOG = logging.getLogger(__name__)
_SUPPORTED_CONTENT_TYPES = (
'application/json',
'application/vnd.openstack.compute+json',
)
_MEDIA_TYPE_MAP = {
'application/vnd.openstack.compute+json': 'json',
'application/json': 'json',
}
# These are typically automatically created by routes as either defaults
# collection or member methods.
_ROUTES_METHODS = [
'create',
'delete',
'show',
'update',
]
_METHODS_WITH_BODY = [
'POST',
'PUT',
]
# The default api version request if none is requested in the headers
# Note(cyeoh): This only applies for the v2.1 API once microversions
# support is fully merged. It does not affect the V2 API.
DEFAULT_API_VERSION = "2.1"
# name of attribute to keep version method information
VER_METHOD_ATTR = 'versioned_methods'
# Name of header used by clients to request a specific version
# of the REST API
API_VERSION_REQUEST_HEADER = 'X-OpenStack-Nova-API-Version'
ENV_LEGACY_V2 = 'openstack.legacy_v2'
def get_supported_content_types():
return _SUPPORTED_CONTENT_TYPES
def get_media_map():
return dict(_MEDIA_TYPE_MAP.items())
class Request(wsgi.Request):
"""Add some OpenStack API-specific logic to the base webob.Request."""
def __init__(self, *args, **kwargs):
super(Request, self).__init__(*args, **kwargs)
self._extension_data = {'db_items': {}}
if not hasattr(self, 'api_version_request'):
self.api_version_request = api_version.APIVersionRequest()
def cache_db_items(self, key, items, item_key='id'):
"""Allow API methods to store objects from a DB query to be
used by API extensions within the same API request.
An instance of this class only lives for the lifetime of a
single API request, so there's no need to implement full
cache management.
"""
db_items = self._extension_data['db_items'].setdefault(key, {})
for item in items:
db_items[item[item_key]] = item
def get_db_items(self, key):
"""Allow an API extension to get previously stored objects within
the same API request.
Note that the object data will be slightly stale.
"""
return self._extension_data['db_items'][key]
def get_db_item(self, key, item_key):
"""Allow an API extension to get a previously stored object
within the same API request.
Note that the object data will be slightly stale.
"""
return self.get_db_items(key).get(item_key)
def cache_db_instances(self, instances):
self.cache_db_items('instances', instances, 'uuid')
def cache_db_instance(self, instance):
self.cache_db_items('instances', [instance], 'uuid')
def get_db_instances(self):
return self.get_db_items('instances')
def get_db_instance(self, instance_uuid):
return self.get_db_item('instances', instance_uuid)
def cache_db_flavors(self, flavors):
self.cache_db_items('flavors', flavors, 'flavorid')
def cache_db_flavor(self, flavor):
self.cache_db_items('flavors', [flavor], 'flavorid')
def get_db_flavors(self):
return self.get_db_items('flavors')
def get_db_flavor(self, flavorid):
return self.get_db_item('flavors', flavorid)
def cache_db_compute_nodes(self, compute_nodes):
self.cache_db_items('compute_nodes', compute_nodes, 'id')
def cache_db_compute_node(self, compute_node):
self.cache_db_items('compute_nodes', [compute_node], 'id')
def get_db_compute_nodes(self):
return self.get_db_items('compute_nodes')
def get_db_compute_node(self, id):
return self.get_db_item('compute_nodes', id)
def best_match_content_type(self):
"""Determine the requested response content-type."""
if 'nova.best_content_type' not in self.environ:
# Calculate the best MIME type
content_type = None
# Check URL path suffix
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in get_supported_content_types():
content_type = possible_type
if not content_type:
content_type = self.accept.best_match(
get_supported_content_types())
self.environ['nova.best_content_type'] = (content_type or
'application/json')
return self.environ['nova.best_content_type']
def get_content_type(self):
"""Determine content type of the request body.
Does not do any body introspection, only checks header
"""
if "Content-Type" not in self.headers:
return None
content_type = self.content_type
# NOTE(markmc): text/plain is the default for eventlet and
# other webservers which use mimetools.Message.gettype()
# whereas twisted defaults to ''.
if not content_type or content_type == 'text/plain':
return None
if content_type not in get_supported_content_types():
raise exception.InvalidContentType(content_type=content_type)
return content_type
def best_match_language(self):
"""Determine the best available language for the request.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
"""
if not self.accept_language:
return None
return self.accept_language.best_match(
i18n.get_available_languages())
def set_api_version_request(self):
"""Set API version request based on the request header information."""
if API_VERSION_REQUEST_HEADER in self.headers:
hdr_string = self.headers[API_VERSION_REQUEST_HEADER]
# 'latest' is a special keyword which is equivalent to requesting
# the maximum version of the API supported
if hdr_string == 'latest':
self.api_version_request = api_version.max_api_version()
else:
self.api_version_request = api_version.APIVersionRequest(
hdr_string)
# Check that the version requested is within the global
# minimum/maximum of supported API versions
if not self.api_version_request.matches(
api_version.min_api_version(),
api_version.max_api_version()):
raise exception.InvalidGlobalAPIVersion(
req_ver=self.api_version_request.get_string(),
min_ver=api_version.min_api_version().get_string(),
max_ver=api_version.max_api_version().get_string())
else:
self.api_version_request = api_version.APIVersionRequest(
api_version.DEFAULT_API_VERSION)
def set_legacy_v2(self):
self.environ[ENV_LEGACY_V2] = True
def is_legacy_v2(self):
return self.environ.get(ENV_LEGACY_V2, False)
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
return jsonutils.dumps(data)
def serializers(**serializers):
"""Attaches serializers to a method.
This decorator associates a dictionary of serializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_serializers'):
func.wsgi_serializers = {}
func.wsgi_serializers.update(serializers)
return func
return decorator
def deserializers(**deserializers):
"""Attaches deserializers to a method.
This decorator associates a dictionary of deserializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_deserializers'):
func.wsgi_deserializers = {}
func.wsgi_deserializers.update(deserializers)
return func
return decorator
def response(code):
"""Attaches response code to a method.
This decorator associates a response code with a method. Note
that the function attributes are directly manipulated; the method
is not wrapped.
"""
def decorator(func):
func.wsgi_code = code
return func
return decorator
class ResponseObject(object):
"""Bundles a response object with appropriate serializers.
Object that app methods may return in order to bind alternate
serializers with a response object to be serialized. Its use is
optional.
"""
def __init__(self, obj, code=None, headers=None, **serializers):
"""Binds serializers with an object.
Takes keyword arguments akin to the @serializer() decorator
for specifying serializers. Serializers specified will be
given preference over default serializers or method-specific
serializers on return.
"""
self.obj = obj
self.serializers = serializers
self._default_code = 200
self._code = code
self._headers = headers or {}
self.serializer = None
self.media_type = None
def __getitem__(self, key):
"""Retrieves a header with the given name."""
return self._headers[key.lower()]
def __setitem__(self, key, value):
"""Sets a header with the given name to the given value."""
self._headers[key.lower()] = value
def __delitem__(self, key):
"""Deletes the header with the given name."""
del self._headers[key.lower()]
def _bind_method_serializers(self, meth_serializers):
"""Binds method serializers with the response object.
Binds the method serializers with the response object.
Serializers specified to the constructor will take precedence
over serializers specified to this method.
:param meth_serializers: A dictionary with keys mapping to
response types and values containing
serializer objects.
"""
# We can't use update because that would be the wrong
# precedence
for mtype, serializer in meth_serializers.items():
self.serializers.setdefault(mtype, serializer)
def get_serializer(self, content_type, default_serializers=None):
"""Returns the serializer for the wrapped object.
Returns the serializer for the wrapped object subject to the
indicated content type. If no serializer matching the content
type is attached, an appropriate serializer drawn from the
default serializers will be used. If no appropriate
serializer is available, raises InvalidContentType.
"""
default_serializers = default_serializers or {}
try:
mtype = get_media_map().get(content_type, content_type)
if mtype in self.serializers:
return mtype, self.serializers[mtype]
else:
return mtype, default_serializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
def preserialize(self, content_type, default_serializers=None):
"""Prepares the serializer that will be used to serialize.
Determines the serializer that will be used and prepares an
instance of it for later call. This allows the serializer to
be accessed by extensions for, e.g., template extension.
"""
mtype, serializer = self.get_serializer(content_type,
default_serializers)
self.media_type = mtype
self.serializer = serializer()
def attach(self, **kwargs):
"""Attach slave templates to serializers."""
if self.media_type in kwargs:
self.serializer.attach(kwargs[self.media_type])
def serialize(self, request, content_type, default_serializers=None):
"""Serializes the wrapped object.
Utility method for serializing the wrapped object. Returns a
webob.Response object.
"""
if self.serializer:
serializer = self.serializer
else:
_mtype, _serializer = self.get_serializer(content_type,
default_serializers)
serializer = _serializer()
response = webob.Response()
response.status_int = self.code
for hdr, value in self._headers.items():
response.headers[hdr] = utils.utf8(str(value))
response.headers['Content-Type'] = utils.utf8(content_type)
if self.obj is not None:
response.body = serializer.serialize(self.obj)
return response
@property
def code(self):
"""Retrieve the response status."""
return self._code or self._default_code
@property
def headers(self):
"""Retrieve the headers."""
return self._headers.copy()
def action_peek_json(body):
"""Determine action to invoke."""
try:
decoded = jsonutils.loads(body)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
# Make sure there's exactly one key...
if len(decoded) != 1:
msg = _("too many body keys")
raise exception.MalformedRequestBody(reason=msg)
# Return the action and the decoded body...
return decoded.keys()[0]
class ResourceExceptionHandler(object):
"""Context manager to handle Resource exceptions.
Used when processing exceptions generated by API implementation
methods (or their extensions). Converts most exceptions to Fault
exceptions, with the appropriate logging.
"""
def __enter__(self):
return None
def __exit__(self, ex_type, ex_value, ex_traceback):
if not ex_value:
return True
if isinstance(ex_value, exception.Forbidden):
raise Fault(webob.exc.HTTPForbidden(
explanation=ex_value.format_message()))
elif isinstance(ex_value, exception.VersionNotFoundForAPIMethod):
raise
elif isinstance(ex_value, exception.Invalid):
raise Fault(exception.ConvertedException(
code=ex_value.code,
explanation=ex_value.format_message()))
elif isinstance(ex_value, TypeError):
exc_info = (ex_type, ex_value, ex_traceback)
LOG.error(_LE('Exception handling resource: %s'), ex_value,
exc_info=exc_info)
raise Fault(webob.exc.HTTPBadRequest())
elif isinstance(ex_value, Fault):
LOG.info(_LI("Fault thrown: %s"), ex_value)
raise ex_value
elif isinstance(ex_value, webob.exc.HTTPException):
LOG.info(_LI("HTTP exception thrown: %s"), ex_value)
raise Fault(ex_value)
# We didn't handle the exception
return False
class Resource(wsgi.Application):
"""WSGI app that handles (de)serialization and controller dispatch.
WSGI app that reads routing information supplied by RoutesMiddleware
and calls the requested action method upon its controller. All
controller action methods must accept a 'req' argument, which is the
incoming wsgi.Request. If the operation is a PUT or POST, the controller
method must also accept a 'body' argument (the deserialized request body).
They may raise a webob.exc exception or return a dict, which will be
serialized by requested content type.
Exceptions derived from webob.exc.HTTPException will be automatically
wrapped in Fault() to provide API friendly error responses.
"""
support_api_request_version = False
def __init__(self, controller, action_peek=None, inherits=None,
**deserializers):
""":param controller: object that implement methods created by routes
lib
:param action_peek: dictionary of routines for peeking into an
action request body to determine the
desired action
:param inherits: another resource object that this resource should
inherit extensions from. Any action extensions that
are applied to the parent resource will also apply
to this resource.
"""
self.controller = controller
default_deserializers = dict(json=JSONDeserializer)
default_deserializers.update(deserializers)
self.default_deserializers = default_deserializers
self.default_serializers = dict(json=JSONDictSerializer)
self.action_peek = dict(json=action_peek_json)
self.action_peek.update(action_peek or {})
# Copy over the actions dictionary
self.wsgi_actions = {}
if controller:
self.register_actions(controller)
# Save a mapping of extensions
self.wsgi_extensions = {}
self.wsgi_action_extensions = {}
self.inherits = inherits
def register_actions(self, controller):
"""Registers controller actions with this resource."""
actions = getattr(controller, 'wsgi_actions', {})
for key, method_name in actions.items():
self.wsgi_actions[key] = getattr(controller, method_name)
def register_extensions(self, controller):
"""Registers controller extensions with this resource."""
extensions = getattr(controller, 'wsgi_extensions', [])
for method_name, action_name in extensions:
# Look up the extending method
extension = getattr(controller, method_name)
if action_name:
# Extending an action...
if action_name not in self.wsgi_action_extensions:
self.wsgi_action_extensions[action_name] = []
self.wsgi_action_extensions[action_name].append(extension)
else:
# Extending a regular method
if method_name not in self.wsgi_extensions:
self.wsgi_extensions[method_name] = []
self.wsgi_extensions[method_name].append(extension)
def get_action_args(self, request_environment):
"""Parse dictionary created by routes library."""
# NOTE(Vek): Check for get_action_args() override in the
# controller
if hasattr(self.controller, 'get_action_args'):
return self.controller.get_action_args(request_environment)
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except (KeyError, IndexError, AttributeError):
return {}
try:
del args['controller']
except KeyError:
pass
try:
del args['format']
except KeyError:
pass
return args
def get_body(self, request):
try:
content_type = request.get_content_type()
except exception.InvalidContentType:
LOG.debug("Unrecognized Content-Type provided in request")
return None, ''
return content_type, request.body
def deserialize(self, meth, content_type, body):
meth_deserializers = getattr(meth, 'wsgi_deserializers', {})
try:
mtype = get_media_map().get(content_type, content_type)
if mtype in meth_deserializers:
deserializer = meth_deserializers[mtype]
else:
deserializer = self.default_deserializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
if (hasattr(deserializer, 'want_controller')
and deserializer.want_controller):
return deserializer(self.controller).deserialize(body)
else:
return deserializer().deserialize(body)
def pre_process_extensions(self, extensions, request, action_args):
# List of callables for post-processing extensions
post = []
for ext in extensions:
if inspect.isgeneratorfunction(ext):
response = None
# If it's a generator function, the part before the
# yield is the preprocessing stage
try:
with ResourceExceptionHandler():
gen = ext(req=request, **action_args)
response = next(gen)
except Fault as ex:
response = ex
# We had a response...
if response:
return response, []
# No response, queue up generator for post-processing
post.append(gen)
else:
# Regular functions only perform post-processing
post.append(ext)
# Run post-processing in the reverse order
return None, reversed(post)
def post_process_extensions(self, extensions, resp_obj, request,
action_args):
for ext in extensions:
response = None
if inspect.isgenerator(ext):
# If it's a generator, run the second half of
# processing
try:
with ResourceExceptionHandler():
response = ext.send(resp_obj)
except StopIteration:
# Normal exit of generator
continue
except Fault as ex:
response = ex
else:
# Regular functions get post-processing...
try:
with ResourceExceptionHandler():
response = ext(req=request, resp_obj=resp_obj,
**action_args)
except exception.VersionNotFoundForAPIMethod:
# If an attached extension (@wsgi.extends) for the
# method has no version match its not an error. We
# just don't run the extends code
continue
except Fault as ex:
response = ex
# We had a response...
if response:
return response
return None
def _should_have_body(self, request):
return request.method in _METHODS_WITH_BODY
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""WSGI method that controls (de)serialization and method dispatch."""
if self.support_api_request_version:
# Set the version of the API requested based on the header
try:
request.set_api_version_request()
except exception.InvalidAPIVersionString as e:
return Fault(webob.exc.HTTPBadRequest(
explanation=e.format_message()))
except exception.InvalidGlobalAPIVersion as e:
return Fault(webob.exc.HTTPNotAcceptable(
explanation=e.format_message()))
# Identify the action, its arguments, and the requested
# content type
action_args = self.get_action_args(request.environ)
action = action_args.pop('action', None)
content_type, body = self.get_body(request)
accept = request.best_match_content_type()
# NOTE(Vek): Splitting the function up this way allows for
# auditing by external tools that wrap the existing
# function. If we try to audit __call__(), we can
# run into troubles due to the @webob.dec.wsgify()
# decorator.
return self._process_stack(request, action, action_args,
content_type, body, accept)
def _process_stack(self, request, action, action_args,
content_type, body, accept):
"""Implement the processing stack."""
# Get the implementing method
try:
meth, extensions = self.get_method(request, action,
content_type, body)
except (AttributeError, TypeError):
return Fault(webob.exc.HTTPNotFound())
except KeyError as ex:
msg = _("There is no such action: %s") % ex.args[0]
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
if body:
msg = _("Action: '%(action)s', calling method: %(meth)s, body: "
"%(body)s") % {'action': action,
'body': six.text_type(body, 'utf-8'),
'meth': str(meth)}
LOG.debug(strutils.mask_password(msg))
else:
LOG.debug("Calling method '%(meth)s'",
{'meth': str(meth)})
# Now, deserialize the request body...
try:
contents = {}
if self._should_have_body(request):
# allow empty body with PUT and POST
if request.content_length == 0:
contents = {'body': None}
else:
contents = self.deserialize(meth, content_type, body)
except exception.InvalidContentType:
msg = _("Unsupported Content-Type")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Update the action args
action_args.update(contents)
project_id = action_args.pop("project_id", None)
context = request.environ.get('nova.context')
if (context and project_id and (project_id != context.project_id)):
msg = _("Malformed request URL: URL's project_id '%(project_id)s'"
" doesn't match Context's project_id"
" '%(context_project_id)s'") % \
{'project_id': project_id,
'context_project_id': context.project_id}
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Run pre-processing extensions
response, post = self.pre_process_extensions(extensions,
request, action_args)
if not response:
try:
with ResourceExceptionHandler():
action_result = self.dispatch(meth, request, action_args)
except Fault as ex:
response = ex
if not response:
# No exceptions; convert action_result into a
# ResponseObject
resp_obj = None
if type(action_result) is dict or action_result is None:
resp_obj = ResponseObject(action_result)
elif isinstance(action_result, ResponseObject):
resp_obj = action_result
else:
response = action_result
# Run post-processing extensions
if resp_obj:
# Do a preserialize to set up the response object
serializers = getattr(meth, 'wsgi_serializers', {})
resp_obj._bind_method_serializers(serializers)
if hasattr(meth, 'wsgi_code'):
resp_obj._default_code = meth.wsgi_code
resp_obj.preserialize(accept, self.default_serializers)
# Process post-processing extensions
response = self.post_process_extensions(post, resp_obj,
request, action_args)
if resp_obj and not response:
response = resp_obj.serialize(request, accept,
self.default_serializers)
if hasattr(response, 'headers'):
for hdr, val in response.headers.items():
# Headers must be utf-8 strings
response.headers[hdr] = utils.utf8(str(val))
if not request.api_version_request.is_null():
response.headers[API_VERSION_REQUEST_HEADER] = \
request.api_version_request.get_string()
response.headers['Vary'] = API_VERSION_REQUEST_HEADER
return response
def get_method(self, request, action, content_type, body):
meth, extensions = self._get_method(request,
action,
content_type,
body)
if self.inherits:
_meth, parent_ext = self.inherits.get_method(request,
action,
content_type,
body)
extensions.extend(parent_ext)
return meth, extensions
def _get_method(self, request, action, content_type, body):
"""Look up the action-specific method and its extensions."""
# Look up the method
try:
if not self.controller:
meth = getattr(self, action)
else:
meth = getattr(self.controller, action)
except AttributeError:
if (not self.wsgi_actions or
action not in _ROUTES_METHODS + ['action']):
# Propagate the error
raise
else:
return meth, self.wsgi_extensions.get(action, [])
if action == 'action':
# OK, it's an action; figure out which action...
mtype = get_media_map().get(content_type)
action_name = self.action_peek[mtype](body)
else:
action_name = action
# Look up the action method
return (self.wsgi_actions[action_name],
self.wsgi_action_extensions.get(action_name, []))
def dispatch(self, method, request, action_args):
"""Dispatch a call to the action-specific method."""
try:
return method(req=request, **action_args)
except exception.VersionNotFoundForAPIMethod:
# We deliberately don't return any message information
# about the exception to the user so it looks as if
# the method is simply not implemented.
return Fault(webob.exc.HTTPNotFound())
class ResourceV21(Resource):
support_api_request_version = True
def action(name):
"""Mark a function as an action.
The given name will be taken as the action key in the body.
This is also overloaded to allow extensions to provide
non-extending definitions of create and delete operations.
"""
def decorator(func):
func.wsgi_action = name
return func
return decorator
def extends(*args, **kwargs):
"""Indicate a function extends an operation.
Can be used as either::
@extends
def index(...):
pass
or as::
@extends(action='resize')
def _action_resize(...):
pass
"""
def decorator(func):
# Store enough information to find what we're extending
func.wsgi_extends = (func.__name__, kwargs.get('action'))
return func
# If we have positional arguments, call the decorator
if args:
return decorator(*args)
# OK, return the decorator instead
return decorator
class ControllerMetaclass(type):
"""Controller metaclass.
This metaclass automates the task of assembling a dictionary
mapping action keys to method names.
"""
def __new__(mcs, name, bases, cls_dict):
"""Adds the wsgi_actions dictionary to the class."""
# Find all actions
actions = {}
extensions = []
versioned_methods = None
# start with wsgi actions from base classes
for base in bases:
actions.update(getattr(base, 'wsgi_actions', {}))
if base.__name__ == "Controller":
# NOTE(cyeoh): This resets the VER_METHOD_ATTR attribute
# between API controller class creations. This allows us
# to use a class decorator on the API methods that doesn't
# require naming explicitly what method is being versioned as
# it can be implicit based on the method decorated. It is a bit
# ugly.
if VER_METHOD_ATTR in base.__dict__:
versioned_methods = getattr(base, VER_METHOD_ATTR)
delattr(base, VER_METHOD_ATTR)
for key, value in cls_dict.items():
if not callable(value):
continue
if getattr(value, 'wsgi_action', None):
actions[value.wsgi_action] = key
elif getattr(value, 'wsgi_extends', None):
extensions.append(value.wsgi_extends)
# Add the actions and extensions to the class dict
cls_dict['wsgi_actions'] = actions
cls_dict['wsgi_extensions'] = extensions
if versioned_methods:
cls_dict[VER_METHOD_ATTR] = versioned_methods
return super(ControllerMetaclass, mcs).__new__(mcs, name, bases,
cls_dict)
@six.add_metaclass(ControllerMetaclass)
class Controller(object):
"""Default controller."""
_view_builder_class = None
def __init__(self, view_builder=None):
"""Initialize controller with a view builder instance."""
if view_builder:
self._view_builder = view_builder
elif self._view_builder_class:
self._view_builder = self._view_builder_class()
else:
self._view_builder = None
def __getattribute__(self, key):
def version_select(*args, **kwargs):
"""Look for the method which matches the name supplied and version
constraints and calls it with the supplied arguments.
@return: Returns the result of the method called
@raises: VersionNotFoundForAPIMethod if there is no method which
matches the name and version constraints
"""
# The first arg to all versioned methods is always the request
# object. The version for the request is attached to the
# request object
if len(args) == 0:
ver = kwargs['req'].api_version_request
else:
ver = args[0].api_version_request
func_list = self.versioned_methods[key]
for func in func_list:
if ver.matches(func.start_version, func.end_version):
# Update the version_select wrapper function so
# other decorator attributes like wsgi.response
# are still respected.
functools.update_wrapper(version_select, func.func)
return func.func(self, *args, **kwargs)
# No version match
raise exception.VersionNotFoundForAPIMethod(version=ver)
try:
version_meth_dict = object.__getattribute__(self, VER_METHOD_ATTR)
except AttributeError:
# No versioning on this class
return object.__getattribute__(self, key)
if version_meth_dict and \
key in object.__getattribute__(self, VER_METHOD_ATTR):
return version_select
return object.__getattribute__(self, key)
# NOTE(cyeoh): This decorator MUST appear first (the outermost
# decorator) on an API method for it to work correctly
@classmethod
def api_version(cls, min_ver, max_ver=None):
"""Decorator for versioning api methods.
Add the decorator to any method which takes a request object
as the first parameter and belongs to a class which inherits from
wsgi.Controller.
@min_ver: string representing minimum version
@max_ver: optional string representing maximum version
"""
def decorator(f):
obj_min_ver = api_version.APIVersionRequest(min_ver)
if max_ver:
obj_max_ver = api_version.APIVersionRequest(max_ver)
else:
obj_max_ver = api_version.APIVersionRequest()
# Add to list of versioned methods registered
func_name = f.__name__
new_func = versioned_method.VersionedMethod(
func_name, obj_min_ver, obj_max_ver, f)
func_dict = getattr(cls, VER_METHOD_ATTR, {})
if not func_dict:
setattr(cls, VER_METHOD_ATTR, func_dict)
func_list = func_dict.get(func_name, [])
if not func_list:
func_dict[func_name] = func_list
func_list.append(new_func)
# Ensure the list is sorted by minimum version (reversed)
# so later when we work through the list in order we find
# the method which has the latest version which supports
# the version requested.
# TODO(cyeoh): Add check to ensure that there are no overlapping
# ranges of valid versions as that is amibiguous
func_list.sort(key=lambda f: f.start_version, reverse=True)
return f
return decorator
@staticmethod
def is_valid_body(body, entity_name):
if not (body and entity_name in body):
return False
def is_dict(d):
try:
d.get(None)
return True
except AttributeError:
return False
return is_dict(body[entity_name])
class Fault(webob.exc.HTTPException):
"""Wrap webob.exc.HTTPException to provide API friendly response."""
_fault_names = {
400: "badRequest",
401: "unauthorized",
403: "forbidden",
404: "itemNotFound",
405: "badMethod",
409: "conflictingRequest",
413: "overLimit",
415: "badMediaType",
429: "overLimit",
501: "notImplemented",
503: "serviceUnavailable"}
def __init__(self, exception):
"""Create a Fault for the given webob.exc.exception."""
self.wrapped_exc = exception
for key, value in self.wrapped_exc.headers.items():
self.wrapped_exc.headers[key] = str(value)
self.status_int = exception.status_int
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""Generate a WSGI response based on the exception passed to ctor."""
user_locale = req.best_match_language()
# Replace the body with fault details.
code = self.wrapped_exc.status_int
fault_name = self._fault_names.get(code, "computeFault")
explanation = self.wrapped_exc.explanation
LOG.debug("Returning %(code)s to user: %(explanation)s",
{'code': code, 'explanation': explanation})
explanation = i18n.translate(explanation, user_locale)
fault_data = {
fault_name: {
'code': code,
'message': explanation}}
if code == 413 or code == 429:
retry = self.wrapped_exc.headers.get('Retry-After', None)
if retry:
fault_data[fault_name]['retryAfter'] = retry
if not req.api_version_request.is_null():
self.wrapped_exc.headers[API_VERSION_REQUEST_HEADER] = \
req.api_version_request.get_string()
self.wrapped_exc.headers['Vary'] = \
API_VERSION_REQUEST_HEADER
content_type = req.best_match_content_type()
serializer = {
'application/json': JSONDictSerializer(),
}[content_type]
self.wrapped_exc.body = serializer.serialize(fault_data)
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
def __str__(self):
return self.wrapped_exc.__str__()
class RateLimitFault(webob.exc.HTTPException):
"""Rate-limited request response."""
def __init__(self, message, details, retry_time):
"""Initialize new `RateLimitFault` with relevant information."""
hdrs = RateLimitFault._retry_after(retry_time)
self.wrapped_exc = webob.exc.HTTPTooManyRequests(headers=hdrs)
self.content = {
"overLimit": {
"code": self.wrapped_exc.status_int,
"message": message,
"details": details,
"retryAfter": hdrs['Retry-After'],
},
}
@staticmethod
def _retry_after(retry_time):
delay = int(math.ceil(retry_time - time.time()))
retry_after = delay if delay > 0 else 0
headers = {'Retry-After': '%d' % retry_after}
return headers
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""Return the wrapped exception with a serialized body conforming
to our error format.
"""
user_locale = request.best_match_language()
content_type = request.best_match_content_type()
self.content['overLimit']['message'] = \
i18n.translate(self.content['overLimit']['message'], user_locale)
self.content['overLimit']['details'] = \
i18n.translate(self.content['overLimit']['details'], user_locale)
serializer = {
'application/json': JSONDictSerializer(),
}[content_type]
content = serializer.serialize(self.content)
self.wrapped_exc.body = content
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
| 35.721246
| 79
| 0.612034
|
import functools
import inspect
import math
import time
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import strutils
import six
import webob
from nova.api.openstack import api_version_request as api_version
from nova.api.openstack import versioned_method
from nova import exception
from nova import i18n
from nova.i18n import _
from nova.i18n import _LE
from nova.i18n import _LI
from nova import utils
from nova import wsgi
LOG = logging.getLogger(__name__)
_SUPPORTED_CONTENT_TYPES = (
'application/json',
'application/vnd.openstack.compute+json',
)
_MEDIA_TYPE_MAP = {
'application/vnd.openstack.compute+json': 'json',
'application/json': 'json',
}
_ROUTES_METHODS = [
'create',
'delete',
'show',
'update',
]
_METHODS_WITH_BODY = [
'POST',
'PUT',
]
DEFAULT_API_VERSION = "2.1"
VER_METHOD_ATTR = 'versioned_methods'
API_VERSION_REQUEST_HEADER = 'X-OpenStack-Nova-API-Version'
ENV_LEGACY_V2 = 'openstack.legacy_v2'
def get_supported_content_types():
return _SUPPORTED_CONTENT_TYPES
def get_media_map():
return dict(_MEDIA_TYPE_MAP.items())
class Request(wsgi.Request):
def __init__(self, *args, **kwargs):
super(Request, self).__init__(*args, **kwargs)
self._extension_data = {'db_items': {}}
if not hasattr(self, 'api_version_request'):
self.api_version_request = api_version.APIVersionRequest()
def cache_db_items(self, key, items, item_key='id'):
db_items = self._extension_data['db_items'].setdefault(key, {})
for item in items:
db_items[item[item_key]] = item
def get_db_items(self, key):
return self._extension_data['db_items'][key]
def get_db_item(self, key, item_key):
return self.get_db_items(key).get(item_key)
def cache_db_instances(self, instances):
self.cache_db_items('instances', instances, 'uuid')
def cache_db_instance(self, instance):
self.cache_db_items('instances', [instance], 'uuid')
def get_db_instances(self):
return self.get_db_items('instances')
def get_db_instance(self, instance_uuid):
return self.get_db_item('instances', instance_uuid)
def cache_db_flavors(self, flavors):
self.cache_db_items('flavors', flavors, 'flavorid')
def cache_db_flavor(self, flavor):
self.cache_db_items('flavors', [flavor], 'flavorid')
def get_db_flavors(self):
return self.get_db_items('flavors')
def get_db_flavor(self, flavorid):
return self.get_db_item('flavors', flavorid)
def cache_db_compute_nodes(self, compute_nodes):
self.cache_db_items('compute_nodes', compute_nodes, 'id')
def cache_db_compute_node(self, compute_node):
self.cache_db_items('compute_nodes', [compute_node], 'id')
def get_db_compute_nodes(self):
return self.get_db_items('compute_nodes')
def get_db_compute_node(self, id):
return self.get_db_item('compute_nodes', id)
def best_match_content_type(self):
if 'nova.best_content_type' not in self.environ:
content_type = None
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in get_supported_content_types():
content_type = possible_type
if not content_type:
content_type = self.accept.best_match(
get_supported_content_types())
self.environ['nova.best_content_type'] = (content_type or
'application/json')
return self.environ['nova.best_content_type']
def get_content_type(self):
if "Content-Type" not in self.headers:
return None
content_type = self.content_type
if not content_type or content_type == 'text/plain':
return None
if content_type not in get_supported_content_types():
raise exception.InvalidContentType(content_type=content_type)
return content_type
def best_match_language(self):
if not self.accept_language:
return None
return self.accept_language.best_match(
i18n.get_available_languages())
def set_api_version_request(self):
if API_VERSION_REQUEST_HEADER in self.headers:
hdr_string = self.headers[API_VERSION_REQUEST_HEADER]
if hdr_string == 'latest':
self.api_version_request = api_version.max_api_version()
else:
self.api_version_request = api_version.APIVersionRequest(
hdr_string)
if not self.api_version_request.matches(
api_version.min_api_version(),
api_version.max_api_version()):
raise exception.InvalidGlobalAPIVersion(
req_ver=self.api_version_request.get_string(),
min_ver=api_version.min_api_version().get_string(),
max_ver=api_version.max_api_version().get_string())
else:
self.api_version_request = api_version.APIVersionRequest(
api_version.DEFAULT_API_VERSION)
def set_legacy_v2(self):
self.environ[ENV_LEGACY_V2] = True
def is_legacy_v2(self):
return self.environ.get(ENV_LEGACY_V2, False)
class ActionDispatcher(object):
def dispatch(self, *args, **kwargs):
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class TextDeserializer(ActionDispatcher):
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class DictSerializer(ActionDispatcher):
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
def default(self, data):
return jsonutils.dumps(data)
def serializers(**serializers):
def decorator(func):
if not hasattr(func, 'wsgi_serializers'):
func.wsgi_serializers = {}
func.wsgi_serializers.update(serializers)
return func
return decorator
def deserializers(**deserializers):
def decorator(func):
if not hasattr(func, 'wsgi_deserializers'):
func.wsgi_deserializers = {}
func.wsgi_deserializers.update(deserializers)
return func
return decorator
def response(code):
def decorator(func):
func.wsgi_code = code
return func
return decorator
class ResponseObject(object):
def __init__(self, obj, code=None, headers=None, **serializers):
self.obj = obj
self.serializers = serializers
self._default_code = 200
self._code = code
self._headers = headers or {}
self.serializer = None
self.media_type = None
def __getitem__(self, key):
return self._headers[key.lower()]
def __setitem__(self, key, value):
self._headers[key.lower()] = value
def __delitem__(self, key):
del self._headers[key.lower()]
def _bind_method_serializers(self, meth_serializers):
# precedence
for mtype, serializer in meth_serializers.items():
self.serializers.setdefault(mtype, serializer)
def get_serializer(self, content_type, default_serializers=None):
default_serializers = default_serializers or {}
try:
mtype = get_media_map().get(content_type, content_type)
if mtype in self.serializers:
return mtype, self.serializers[mtype]
else:
return mtype, default_serializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
def preserialize(self, content_type, default_serializers=None):
mtype, serializer = self.get_serializer(content_type,
default_serializers)
self.media_type = mtype
self.serializer = serializer()
def attach(self, **kwargs):
if self.media_type in kwargs:
self.serializer.attach(kwargs[self.media_type])
def serialize(self, request, content_type, default_serializers=None):
if self.serializer:
serializer = self.serializer
else:
_mtype, _serializer = self.get_serializer(content_type,
default_serializers)
serializer = _serializer()
response = webob.Response()
response.status_int = self.code
for hdr, value in self._headers.items():
response.headers[hdr] = utils.utf8(str(value))
response.headers['Content-Type'] = utils.utf8(content_type)
if self.obj is not None:
response.body = serializer.serialize(self.obj)
return response
@property
def code(self):
return self._code or self._default_code
@property
def headers(self):
return self._headers.copy()
def action_peek_json(body):
try:
decoded = jsonutils.loads(body)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
# Make sure there's exactly one key...
if len(decoded) != 1:
msg = _("too many body keys")
raise exception.MalformedRequestBody(reason=msg)
return decoded.keys()[0]
class ResourceExceptionHandler(object):
def __enter__(self):
return None
def __exit__(self, ex_type, ex_value, ex_traceback):
if not ex_value:
return True
if isinstance(ex_value, exception.Forbidden):
raise Fault(webob.exc.HTTPForbidden(
explanation=ex_value.format_message()))
elif isinstance(ex_value, exception.VersionNotFoundForAPIMethod):
raise
elif isinstance(ex_value, exception.Invalid):
raise Fault(exception.ConvertedException(
code=ex_value.code,
explanation=ex_value.format_message()))
elif isinstance(ex_value, TypeError):
exc_info = (ex_type, ex_value, ex_traceback)
LOG.error(_LE('Exception handling resource: %s'), ex_value,
exc_info=exc_info)
raise Fault(webob.exc.HTTPBadRequest())
elif isinstance(ex_value, Fault):
LOG.info(_LI("Fault thrown: %s"), ex_value)
raise ex_value
elif isinstance(ex_value, webob.exc.HTTPException):
LOG.info(_LI("HTTP exception thrown: %s"), ex_value)
raise Fault(ex_value)
return False
class Resource(wsgi.Application):
support_api_request_version = False
def __init__(self, controller, action_peek=None, inherits=None,
**deserializers):
self.controller = controller
default_deserializers = dict(json=JSONDeserializer)
default_deserializers.update(deserializers)
self.default_deserializers = default_deserializers
self.default_serializers = dict(json=JSONDictSerializer)
self.action_peek = dict(json=action_peek_json)
self.action_peek.update(action_peek or {})
# Copy over the actions dictionary
self.wsgi_actions = {}
if controller:
self.register_actions(controller)
# Save a mapping of extensions
self.wsgi_extensions = {}
self.wsgi_action_extensions = {}
self.inherits = inherits
def register_actions(self, controller):
actions = getattr(controller, 'wsgi_actions', {})
for key, method_name in actions.items():
self.wsgi_actions[key] = getattr(controller, method_name)
def register_extensions(self, controller):
extensions = getattr(controller, 'wsgi_extensions', [])
for method_name, action_name in extensions:
# Look up the extending method
extension = getattr(controller, method_name)
if action_name:
# Extending an action...
if action_name not in self.wsgi_action_extensions:
self.wsgi_action_extensions[action_name] = []
self.wsgi_action_extensions[action_name].append(extension)
else:
# Extending a regular method
if method_name not in self.wsgi_extensions:
self.wsgi_extensions[method_name] = []
self.wsgi_extensions[method_name].append(extension)
def get_action_args(self, request_environment):
# NOTE(Vek): Check for get_action_args() override in the
# controller
if hasattr(self.controller, 'get_action_args'):
return self.controller.get_action_args(request_environment)
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except (KeyError, IndexError, AttributeError):
return {}
try:
del args['controller']
except KeyError:
pass
try:
del args['format']
except KeyError:
pass
return args
def get_body(self, request):
try:
content_type = request.get_content_type()
except exception.InvalidContentType:
LOG.debug("Unrecognized Content-Type provided in request")
return None, ''
return content_type, request.body
def deserialize(self, meth, content_type, body):
meth_deserializers = getattr(meth, 'wsgi_deserializers', {})
try:
mtype = get_media_map().get(content_type, content_type)
if mtype in meth_deserializers:
deserializer = meth_deserializers[mtype]
else:
deserializer = self.default_deserializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
if (hasattr(deserializer, 'want_controller')
and deserializer.want_controller):
return deserializer(self.controller).deserialize(body)
else:
return deserializer().deserialize(body)
def pre_process_extensions(self, extensions, request, action_args):
# List of callables for post-processing extensions
post = []
for ext in extensions:
if inspect.isgeneratorfunction(ext):
response = None
# If it's a generator function, the part before the
try:
with ResourceExceptionHandler():
gen = ext(req=request, **action_args)
response = next(gen)
except Fault as ex:
response = ex
if response:
return response, []
post.append(gen)
else:
post.append(ext)
return None, reversed(post)
def post_process_extensions(self, extensions, resp_obj, request,
action_args):
for ext in extensions:
response = None
if inspect.isgenerator(ext):
# processing
try:
with ResourceExceptionHandler():
response = ext.send(resp_obj)
except StopIteration:
# Normal exit of generator
continue
except Fault as ex:
response = ex
else:
# Regular functions get post-processing...
try:
with ResourceExceptionHandler():
response = ext(req=request, resp_obj=resp_obj,
**action_args)
except exception.VersionNotFoundForAPIMethod:
# If an attached extension (@wsgi.extends) for the
# method has no version match its not an error. We
# just don't run the extends code
continue
except Fault as ex:
response = ex
if response:
return response
return None
def _should_have_body(self, request):
return request.method in _METHODS_WITH_BODY
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
if self.support_api_request_version:
try:
request.set_api_version_request()
except exception.InvalidAPIVersionString as e:
return Fault(webob.exc.HTTPBadRequest(
explanation=e.format_message()))
except exception.InvalidGlobalAPIVersion as e:
return Fault(webob.exc.HTTPNotAcceptable(
explanation=e.format_message()))
action_args = self.get_action_args(request.environ)
action = action_args.pop('action', None)
content_type, body = self.get_body(request)
accept = request.best_match_content_type()
return self._process_stack(request, action, action_args,
content_type, body, accept)
def _process_stack(self, request, action, action_args,
content_type, body, accept):
try:
meth, extensions = self.get_method(request, action,
content_type, body)
except (AttributeError, TypeError):
return Fault(webob.exc.HTTPNotFound())
except KeyError as ex:
msg = _("There is no such action: %s") % ex.args[0]
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
if body:
msg = _("Action: '%(action)s', calling method: %(meth)s, body: "
"%(body)s") % {'action': action,
'body': six.text_type(body, 'utf-8'),
'meth': str(meth)}
LOG.debug(strutils.mask_password(msg))
else:
LOG.debug("Calling method '%(meth)s'",
{'meth': str(meth)})
try:
contents = {}
if self._should_have_body(request):
if request.content_length == 0:
contents = {'body': None}
else:
contents = self.deserialize(meth, content_type, body)
except exception.InvalidContentType:
msg = _("Unsupported Content-Type")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
action_args.update(contents)
project_id = action_args.pop("project_id", None)
context = request.environ.get('nova.context')
if (context and project_id and (project_id != context.project_id)):
msg = _("Malformed request URL: URL's project_id '%(project_id)s'"
" doesn't match Context's project_id"
" '%(context_project_id)s'") % \
{'project_id': project_id,
'context_project_id': context.project_id}
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Run pre-processing extensions
response, post = self.pre_process_extensions(extensions,
request, action_args)
if not response:
try:
with ResourceExceptionHandler():
action_result = self.dispatch(meth, request, action_args)
except Fault as ex:
response = ex
if not response:
# No exceptions; convert action_result into a
# ResponseObject
resp_obj = None
if type(action_result) is dict or action_result is None:
resp_obj = ResponseObject(action_result)
elif isinstance(action_result, ResponseObject):
resp_obj = action_result
else:
response = action_result
# Run post-processing extensions
if resp_obj:
# Do a preserialize to set up the response object
serializers = getattr(meth, 'wsgi_serializers', {})
resp_obj._bind_method_serializers(serializers)
if hasattr(meth, 'wsgi_code'):
resp_obj._default_code = meth.wsgi_code
resp_obj.preserialize(accept, self.default_serializers)
# Process post-processing extensions
response = self.post_process_extensions(post, resp_obj,
request, action_args)
if resp_obj and not response:
response = resp_obj.serialize(request, accept,
self.default_serializers)
if hasattr(response, 'headers'):
for hdr, val in response.headers.items():
# Headers must be utf-8 strings
response.headers[hdr] = utils.utf8(str(val))
if not request.api_version_request.is_null():
response.headers[API_VERSION_REQUEST_HEADER] = \
request.api_version_request.get_string()
response.headers['Vary'] = API_VERSION_REQUEST_HEADER
return response
def get_method(self, request, action, content_type, body):
meth, extensions = self._get_method(request,
action,
content_type,
body)
if self.inherits:
_meth, parent_ext = self.inherits.get_method(request,
action,
content_type,
body)
extensions.extend(parent_ext)
return meth, extensions
def _get_method(self, request, action, content_type, body):
# Look up the method
try:
if not self.controller:
meth = getattr(self, action)
else:
meth = getattr(self.controller, action)
except AttributeError:
if (not self.wsgi_actions or
action not in _ROUTES_METHODS + ['action']):
# Propagate the error
raise
else:
return meth, self.wsgi_extensions.get(action, [])
if action == 'action':
# OK, it's an action; figure out which action...
mtype = get_media_map().get(content_type)
action_name = self.action_peek[mtype](body)
else:
action_name = action
return (self.wsgi_actions[action_name],
self.wsgi_action_extensions.get(action_name, []))
def dispatch(self, method, request, action_args):
try:
return method(req=request, **action_args)
except exception.VersionNotFoundForAPIMethod:
# about the exception to the user so it looks as if
# the method is simply not implemented.
return Fault(webob.exc.HTTPNotFound())
class ResourceV21(Resource):
support_api_request_version = True
def action(name):
def decorator(func):
func.wsgi_action = name
return func
return decorator
def extends(*args, **kwargs):
def decorator(func):
# Store enough information to find what we're extending
func.wsgi_extends = (func.__name__, kwargs.get('action'))
return func
if args:
return decorator(*args)
return decorator
class ControllerMetaclass(type):
def __new__(mcs, name, bases, cls_dict):
actions = {}
extensions = []
versioned_methods = None
for base in bases:
actions.update(getattr(base, 'wsgi_actions', {}))
if base.__name__ == "Controller":
# require naming explicitly what method is being versioned as
# it can be implicit based on the method decorated. It is a bit
# ugly.
if VER_METHOD_ATTR in base.__dict__:
versioned_methods = getattr(base, VER_METHOD_ATTR)
delattr(base, VER_METHOD_ATTR)
for key, value in cls_dict.items():
if not callable(value):
continue
if getattr(value, 'wsgi_action', None):
actions[value.wsgi_action] = key
elif getattr(value, 'wsgi_extends', None):
extensions.append(value.wsgi_extends)
# Add the actions and extensions to the class dict
cls_dict['wsgi_actions'] = actions
cls_dict['wsgi_extensions'] = extensions
if versioned_methods:
cls_dict[VER_METHOD_ATTR] = versioned_methods
return super(ControllerMetaclass, mcs).__new__(mcs, name, bases,
cls_dict)
@six.add_metaclass(ControllerMetaclass)
class Controller(object):
_view_builder_class = None
def __init__(self, view_builder=None):
if view_builder:
self._view_builder = view_builder
elif self._view_builder_class:
self._view_builder = self._view_builder_class()
else:
self._view_builder = None
def __getattribute__(self, key):
def version_select(*args, **kwargs):
# The first arg to all versioned methods is always the request
# object. The version for the request is attached to the
# request object
if len(args) == 0:
ver = kwargs['req'].api_version_request
else:
ver = args[0].api_version_request
func_list = self.versioned_methods[key]
for func in func_list:
if ver.matches(func.start_version, func.end_version):
# Update the version_select wrapper function so
# other decorator attributes like wsgi.response
# are still respected.
functools.update_wrapper(version_select, func.func)
return func.func(self, *args, **kwargs)
# No version match
raise exception.VersionNotFoundForAPIMethod(version=ver)
try:
version_meth_dict = object.__getattribute__(self, VER_METHOD_ATTR)
except AttributeError:
# No versioning on this class
return object.__getattribute__(self, key)
if version_meth_dict and \
key in object.__getattribute__(self, VER_METHOD_ATTR):
return version_select
return object.__getattribute__(self, key)
# NOTE(cyeoh): This decorator MUST appear first (the outermost
# decorator) on an API method for it to work correctly
@classmethod
def api_version(cls, min_ver, max_ver=None):
def decorator(f):
obj_min_ver = api_version.APIVersionRequest(min_ver)
if max_ver:
obj_max_ver = api_version.APIVersionRequest(max_ver)
else:
obj_max_ver = api_version.APIVersionRequest()
# Add to list of versioned methods registered
func_name = f.__name__
new_func = versioned_method.VersionedMethod(
func_name, obj_min_ver, obj_max_ver, f)
func_dict = getattr(cls, VER_METHOD_ATTR, {})
if not func_dict:
setattr(cls, VER_METHOD_ATTR, func_dict)
func_list = func_dict.get(func_name, [])
if not func_list:
func_dict[func_name] = func_list
func_list.append(new_func)
# Ensure the list is sorted by minimum version (reversed)
# so later when we work through the list in order we find
# the method which has the latest version which supports
# the version requested.
# TODO(cyeoh): Add check to ensure that there are no overlapping
# ranges of valid versions as that is amibiguous
func_list.sort(key=lambda f: f.start_version, reverse=True)
return f
return decorator
@staticmethod
def is_valid_body(body, entity_name):
if not (body and entity_name in body):
return False
def is_dict(d):
try:
d.get(None)
return True
except AttributeError:
return False
return is_dict(body[entity_name])
class Fault(webob.exc.HTTPException):
_fault_names = {
400: "badRequest",
401: "unauthorized",
403: "forbidden",
404: "itemNotFound",
405: "badMethod",
409: "conflictingRequest",
413: "overLimit",
415: "badMediaType",
429: "overLimit",
501: "notImplemented",
503: "serviceUnavailable"}
def __init__(self, exception):
self.wrapped_exc = exception
for key, value in self.wrapped_exc.headers.items():
self.wrapped_exc.headers[key] = str(value)
self.status_int = exception.status_int
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
user_locale = req.best_match_language()
# Replace the body with fault details.
code = self.wrapped_exc.status_int
fault_name = self._fault_names.get(code, "computeFault")
explanation = self.wrapped_exc.explanation
LOG.debug("Returning %(code)s to user: %(explanation)s",
{'code': code, 'explanation': explanation})
explanation = i18n.translate(explanation, user_locale)
fault_data = {
fault_name: {
'code': code,
'message': explanation}}
if code == 413 or code == 429:
retry = self.wrapped_exc.headers.get('Retry-After', None)
if retry:
fault_data[fault_name]['retryAfter'] = retry
if not req.api_version_request.is_null():
self.wrapped_exc.headers[API_VERSION_REQUEST_HEADER] = \
req.api_version_request.get_string()
self.wrapped_exc.headers['Vary'] = \
API_VERSION_REQUEST_HEADER
content_type = req.best_match_content_type()
serializer = {
'application/json': JSONDictSerializer(),
}[content_type]
self.wrapped_exc.body = serializer.serialize(fault_data)
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
def __str__(self):
return self.wrapped_exc.__str__()
class RateLimitFault(webob.exc.HTTPException):
def __init__(self, message, details, retry_time):
hdrs = RateLimitFault._retry_after(retry_time)
self.wrapped_exc = webob.exc.HTTPTooManyRequests(headers=hdrs)
self.content = {
"overLimit": {
"code": self.wrapped_exc.status_int,
"message": message,
"details": details,
"retryAfter": hdrs['Retry-After'],
},
}
@staticmethod
def _retry_after(retry_time):
delay = int(math.ceil(retry_time - time.time()))
retry_after = delay if delay > 0 else 0
headers = {'Retry-After': '%d' % retry_after}
return headers
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
user_locale = request.best_match_language()
content_type = request.best_match_content_type()
self.content['overLimit']['message'] = \
i18n.translate(self.content['overLimit']['message'], user_locale)
self.content['overLimit']['details'] = \
i18n.translate(self.content['overLimit']['details'], user_locale)
serializer = {
'application/json': JSONDictSerializer(),
}[content_type]
content = serializer.serialize(self.content)
self.wrapped_exc.body = content
self.wrapped_exc.content_type = content_type
return self.wrapped_exc
| true
| true
|
1c4531d80c6d31acc1cfb7c059317df37a5ce0ad
| 896
|
py
|
Python
|
nally/core/layers/raw_packet.py
|
FreibergVlad/port-scanner
|
432dd9a07cb5118f3a0722fe4908434c76191b3b
|
[
"MIT"
] | null | null | null |
nally/core/layers/raw_packet.py
|
FreibergVlad/port-scanner
|
432dd9a07cb5118f3a0722fe4908434c76191b3b
|
[
"MIT"
] | null | null | null |
nally/core/layers/raw_packet.py
|
FreibergVlad/port-scanner
|
432dd9a07cb5118f3a0722fe4908434c76191b3b
|
[
"MIT"
] | null | null | null |
from nally.core.layers.packet import Packet
class RawPacket(Packet):
"""
Raw implementation of Packet interface.
Actually, just a holder of raw bytes
"""
def __init__(self, raw_packet):
super().__init__()
self.__raw_packet = bytes(raw_packet)
def to_bytes(self):
return self.__raw_packet
@staticmethod
def from_bytes(bytes_packet: bytes):
return RawPacket(bytes_packet)
def is_response(self, packet) -> bool:
return True
@property
def raw_payload(self) -> bytes:
return self.__raw_packet
@Packet.upper_layer.setter
def upper_layer(self, upper_layer):
raise NotImplementedError("Raw packet doesn't support payload")
def __eq__(self, other: object) -> bool:
if isinstance(other, RawPacket):
return self.raw_payload == other.raw_payload
return False
| 24.888889
| 71
| 0.66183
|
from nally.core.layers.packet import Packet
class RawPacket(Packet):
def __init__(self, raw_packet):
super().__init__()
self.__raw_packet = bytes(raw_packet)
def to_bytes(self):
return self.__raw_packet
@staticmethod
def from_bytes(bytes_packet: bytes):
return RawPacket(bytes_packet)
def is_response(self, packet) -> bool:
return True
@property
def raw_payload(self) -> bytes:
return self.__raw_packet
@Packet.upper_layer.setter
def upper_layer(self, upper_layer):
raise NotImplementedError("Raw packet doesn't support payload")
def __eq__(self, other: object) -> bool:
if isinstance(other, RawPacket):
return self.raw_payload == other.raw_payload
return False
| true
| true
|
1c453226bdc6a0908183e8c5c8f5eb83db899edf
| 17,813
|
py
|
Python
|
ProjectFiles/bin/Release/2.80/scripts/addons/add_advanced_objects_panels/__init__.py
|
BlazesRus/Bforartists
|
126bdd9e47cc984fd97ba5299bfb92ec5278e754
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2019-07-08T15:51:14.000Z
|
2019-07-08T15:51:14.000Z
|
ProjectFiles/bin/Release/2.80/scripts/addons/add_advanced_objects_panels/__init__.py
|
BlazesRus/Bforartists
|
126bdd9e47cc984fd97ba5299bfb92ec5278e754
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
ProjectFiles/bin/Release/2.80/scripts/addons/add_advanced_objects_panels/__init__.py
|
BlazesRus/Bforartists
|
126bdd9e47cc984fd97ba5299bfb92ec5278e754
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Contributed to by:
# meta-androcto, Bill Currie, Jorge Hernandez - Melenedez Jacob Morris, Oscurart #
# Rebellion, Antonis Karvelas, Eleanor Howick, lijenstina, Daniel Schalla, Domlysz #
# Unnikrishnan(kodemax), Florian Meyer, Omar ahmed, Brian Hinton (Nichod), liero #
# Atom, Dannyboy, Mano-Wii, Kursad Karatas, teldredge, Phil Cote #
bl_info = {
"name": "Add Advanced Object Panels",
"author": "meta-androcto",
"version": (1, 1, 5),
"blender": (2, 7, 7),
"description": "Individual Create Panel Activation List",
"location": "Addons Preferences",
"warning": "",
"wiki_url": "https://wiki.blender.org/index.php/Extensions:2.6"
"/Py/Scripts/Object/Add_Advanced",
"category": "Object"
}
import bpy
from bpy.types import (
AddonPreferences,
PropertyGroup,
)
from bpy.props import (
BoolProperty,
BoolVectorProperty,
EnumProperty,
FloatProperty,
FloatVectorProperty,
IntProperty,
StringProperty,
PointerProperty,
)
sub_modules_names = (
"drop_to_ground",
"object_laplace_lightning",
"object_mangle_tools",
"unfold_transition",
"delaunay_voronoi",
"oscurart_constellation",
)
sub_modules = [__import__(__package__ + "." + submod, {}, {}, submod) for submod in sub_modules_names]
sub_modules.sort(key=lambda mod: (mod.bl_info['category'], mod.bl_info['name']))
# Add-ons Preferences
def _get_pref_class(mod):
import inspect
for obj in vars(mod).values():
if inspect.isclass(obj) and issubclass(obj, PropertyGroup):
if hasattr(obj, 'bl_idname') and obj.bl_idname == mod.__name__:
return obj
def get_addon_preferences(name=''):
"""Acquisition and registration"""
addons = bpy.context.preferences.addons
if __name__ not in addons: # wm.read_factory_settings()
return None
addon_prefs = addons[__name__].preferences
if name:
if not hasattr(addon_prefs, name):
for mod in sub_modules:
if mod.__name__.split('.')[-1] == name:
cls = _get_pref_class(mod)
if cls:
prop = PointerProperty(type=cls)
setattr(AdvancedObjPreferences1, name, prop)
bpy.utils.unregister_class(AdvancedObjPreferences1)
bpy.utils.register_class(AdvancedObjPreferences1)
return getattr(addon_prefs, name, None)
else:
return addon_prefs
def register_submodule(mod):
if not hasattr(mod, '__addon_enabled__'):
mod.__addon_enabled__ = False
if not mod.__addon_enabled__:
mod.register()
mod.__addon_enabled__ = True
def unregister_submodule(mod):
if mod.__addon_enabled__:
mod.unregister()
mod.__addon_enabled__ = False
prefs = get_addon_preferences()
name = mod.__name__.split('.')[-1]
if hasattr(AdvancedObjPreferences1, name):
delattr(AdvancedObjPreferences1, name)
if prefs:
bpy.utils.unregister_class(AdvancedObjPreferences1)
bpy.utils.register_class(AdvancedObjPreferences1)
if name in prefs:
del prefs[name]
def enable_all_modules(self, context):
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
setattr(self, 'use_' + mod_name, False)
if not mod.__addon_enabled__:
setattr(self, 'use_' + mod_name, True)
mod.__addon_enabled__ = True
return None
def disable_all_modules(self, context):
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
if mod.__addon_enabled__:
setattr(self, 'use_' + mod_name, False)
mod.__addon_enabled__ = False
return None
class AdvancedObjPreferences1(AddonPreferences):
bl_idname = __name__
enable_all = BoolProperty(
name="Enable all",
description="Enable all Advanced Objects' Panels",
default=False,
update=enable_all_modules
)
disable_all = BoolProperty(
name="Disable all",
description="Disable all Advanced Objects' Panels",
default=False,
update=disable_all_modules
)
def draw(self, context):
layout = self.layout
split = layout.split(percentage=0.5, align=True)
row = split.row()
row.alignment = "LEFT"
sub_box = row.box()
sub_box.prop(self, "enable_all", emboss=False,
icon="VISIBLE_IPO_ON", icon_only=True)
row.label("Enable All")
row = split.row()
row.alignment = "RIGHT"
row.label("Disable All")
sub_box = row.box()
sub_box.prop(self, "disable_all", emboss=False,
icon="VISIBLE_IPO_OFF", icon_only=True)
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
info = mod.bl_info
column = layout.column()
box = column.box()
# first stage
expand = getattr(self, 'show_expanded_' + mod_name)
icon = 'TRIA_DOWN' if expand else 'TRIA_RIGHT'
col = box.column()
row = col.row()
sub = row.row()
sub.context_pointer_set('addon_prefs', self)
op = sub.operator('wm.context_toggle', text='', icon=icon,
emboss=False)
op.data_path = 'addon_prefs.show_expanded_' + mod_name
sub.label('{}: {}'.format(info['category'], info['name']))
sub = row.row()
sub.alignment = 'RIGHT'
if info.get('warning'):
sub.label('', icon='ERROR')
sub.prop(self, 'use_' + mod_name, text='')
# The second stage
if expand:
if info.get('description'):
split = col.row().split(percentage=0.15)
split.label('Description:')
split.label(info['description'])
if info.get('location'):
split = col.row().split(percentage=0.15)
split.label('Location:')
split.label(info['location'])
if info.get('author'):
split = col.row().split(percentage=0.15)
split.label('Author:')
split.label(info['author'])
if info.get('version'):
split = col.row().split(percentage=0.15)
split.label('Version:')
split.label('.'.join(str(x) for x in info['version']),
translate=False)
if info.get('warning'):
split = col.row().split(percentage=0.15)
split.label('Warning:')
split.label(' ' + info['warning'], icon='ERROR')
tot_row = int(bool(info.get('wiki_url')))
if tot_row:
split = col.row().split(percentage=0.15)
split.label(text='Internet:')
if info.get('wiki_url'):
op = split.operator('wm.url_open',
text='Documentation', icon='HELP')
op.url = info.get('wiki_url')
for i in range(4 - tot_row):
split.separator()
# Details and settings
if getattr(self, 'use_' + mod_name):
prefs = get_addon_preferences(mod_name)
if prefs and hasattr(prefs, 'draw'):
box = box.column()
prefs.layout = box
try:
prefs.draw(context)
except:
import traceback
traceback.print_exc()
box.label(text="Error (see console)", icon="ERROR")
del prefs.layout
row = layout.row()
row.label(text="End of Advanced Object Panels Activations",
icon="FILE_PARENT")
for mod in sub_modules:
info = mod.bl_info
mod_name = mod.__name__.split('.')[-1]
def gen_update(mod):
def update(self, context):
if getattr(self, 'use_' + mod.__name__.split('.')[-1]):
if not mod.__addon_enabled__:
register_submodule(mod)
else:
if mod.__addon_enabled__:
unregister_submodule(mod)
return update
prop = BoolProperty(
name=info['name'],
description=info.get('description', ''),
update=gen_update(mod),
)
setattr(AdvancedObjPreferences1, 'use_' + mod_name, prop)
prop = BoolProperty()
setattr(AdvancedObjPreferences1, 'show_expanded_' + mod_name, prop)
class AdvancedObjProperties1(PropertyGroup):
# object_laplace_lighting props
ORIGIN = FloatVectorProperty(
name="Origin charge"
)
GROUNDZ = IntProperty(
name="Ground Z coordinate"
)
HORDER = IntProperty(
name="Secondary paths orders",
default=1
)
# object_laplace_lighting UI props
TSTEPS = IntProperty(
name="Iterations",
default=350,
description="Number of cells to create\n"
"Will end early if hits ground plane or cloud"
)
GSCALE = FloatProperty(
name="Grid unit size",
default=0.12,
description="scale of cells, .25 = 4 cells per blenderUnit"
)
BIGVAR = FloatProperty(
name="Straightness",
default=6.3,
description="Straightness/branchiness of bolt, \n"
"<2 is mush, >12 is staight line, 6.3 is good"
)
GROUNDBOOL = BoolProperty(
name="Use Ground object",
description="Use ground plane or not",
default=True
)
GROUNDC = IntProperty(
name="Ground charge",
default=-250,
description="Charge of the ground plane"
)
CLOUDBOOL = BoolProperty(
name="Use Cloud object",
default=False,
description="Use cloud object - attracts and terminates like ground but\n"
"any obj instead of z plane\n"
"Can slow down loop if obj is large, overrides ground"
)
CLOUDC = IntProperty(
name="Cloud charge",
default=-1,
description="Charge of a cell in cloud object\n"
"(so total charge also depends on obj size)"
)
VMMESH = BoolProperty(
name="Multi mesh",
default=True,
description="Output to multi-meshes for different materials on main/sec/side branches"
)
VSMESH = BoolProperty(
name="Single mesh",
default=False,
description="Output to single mesh for using build modifier and particles for effects"
)
VCUBE = BoolProperty(
name="Cubes",
default=False,
description="CTRL-J after run to JOIN\n"
"Outputs a bunch of cube objects, mostly for testing"
)
VVOX = BoolProperty(
name="Voxel (experimental)",
default=False,
description="Output to a voxel file to bpy.data.filepath\FSLGvoxels.raw\n"
"(doesn't work well right now)"
)
IBOOL = BoolProperty(
name="Use Insulator object",
default=False,
description="Use insulator mesh object to prevent growth of bolt in areas"
)
OOB = StringProperty(
name="Select",
default="",
description="Origin of bolt, can be an Empty\n"
"if object is a mesh will use all verts as charges")
GOB = StringProperty(
name="Select",
default="",
description="Object to use as ground plane, uses z coord only"
)
COB = StringProperty(
name="Select",
default="",
description="Object to use as cloud, best to use a cube"
)
IOB = StringProperty(
name="Select",
default="",
description="Object to use as insulator, 'voxelized'\n"
"before generating bolt (can be slow)"
)
# object_mangle_tools properties
mangle_constraint_vector = BoolVectorProperty(
name="Mangle Constraint",
default=(True, True, True),
subtype='XYZ',
description="Constrains Mangle Direction"
)
mangle_random_magnitude = IntProperty(
name="Mangle Severity",
default=5,
min=1, max=30,
description="Severity of mangling"
)
mangle_name = StringProperty(
name="Shape Key Name",
default="mangle",
description="Name given for mangled shape keys"
)
# unfold_transition properties
unfold_arm_name = StringProperty(
default=""
)
unfold_modo = EnumProperty(
name="",
items=[("cursor", "3D Cursor", "Use the Distance to 3D Cursor"),
("weight", "Weight Map", "Use a Painted Weight map"),
("index", "Mesh Indices", "Use Faces and Vertices index")],
description="How to Sort Bones for animation", default="cursor"
)
unfold_flip = BoolProperty(
name="Flipping Faces",
default=False,
description="Rotate faces around the Center and skip Scaling - "
"keep checked for both operators"
)
unfold_fold_duration = IntProperty(
name="Total Time",
min=5, soft_min=25,
max=10000, soft_max=2500,
default=200,
description="Total animation length"
)
unfold_sca_time = IntProperty(
name="Scale Time",
min=1,
max=5000, soft_max=500,
default=10,
description="Faces scaling time"
)
unfold_rot_time = IntProperty(
name="Rotation Time",
min=1, soft_min=5,
max=5000, soft_max=500,
default=15,
description="Faces rotation time"
)
unfold_rot_max = IntProperty(
name="Angle",
min=-180,
max=180,
default=135,
description="Faces rotation angle"
)
unfold_fold_noise = IntProperty(
name="Noise",
min=0,
max=500, soft_max=50,
default=0,
description="Offset some faces animation"
)
unfold_bounce = FloatProperty(
name="Bounce",
min=0,
max=10, soft_max=2.5,
default=0,
description="Add some bounce to rotation"
)
unfold_from_point = BoolProperty(
name="Point",
default=False,
description="Scale faces from a Point instead of from an Edge"
)
unfold_wiggle_rot = BoolProperty(
name="Wiggle",
default=False,
description="Use all Axis + Random Rotation instead of X Aligned"
)
# oscurart_constellation
constellation_limit = FloatProperty(
name="Initial Threshold",
description="Edges will be created only if the distance\n"
"between vertices is smaller than this value\n"
"This is a starting value on Operator Invoke",
default=2,
min=0
)
# Class list
classes = (
AdvancedObjPreferences1,
AdvancedObjProperties1,
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.advanced_objects1 = PointerProperty(
type=AdvancedObjProperties1
)
prefs = get_addon_preferences()
for mod in sub_modules:
if not hasattr(mod, '__addon_enabled__'):
mod.__addon_enabled__ = False
name = mod.__name__.split('.')[-1]
if getattr(prefs, 'use_' + name):
register_submodule(mod)
def unregister():
for mod in sub_modules:
if mod.__addon_enabled__:
unregister_submodule(mod)
del bpy.types.Scene.advanced_objects1
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
if __name__ == "__main__":
register()
| 34.521318
| 102
| 0.547016
|
bl_info = {
"name": "Add Advanced Object Panels",
"author": "meta-androcto",
"version": (1, 1, 5),
"blender": (2, 7, 7),
"description": "Individual Create Panel Activation List",
"location": "Addons Preferences",
"warning": "",
"wiki_url": "https://wiki.blender.org/index.php/Extensions:2.6"
"/Py/Scripts/Object/Add_Advanced",
"category": "Object"
}
import bpy
from bpy.types import (
AddonPreferences,
PropertyGroup,
)
from bpy.props import (
BoolProperty,
BoolVectorProperty,
EnumProperty,
FloatProperty,
FloatVectorProperty,
IntProperty,
StringProperty,
PointerProperty,
)
sub_modules_names = (
"drop_to_ground",
"object_laplace_lightning",
"object_mangle_tools",
"unfold_transition",
"delaunay_voronoi",
"oscurart_constellation",
)
sub_modules = [__import__(__package__ + "." + submod, {}, {}, submod) for submod in sub_modules_names]
sub_modules.sort(key=lambda mod: (mod.bl_info['category'], mod.bl_info['name']))
def _get_pref_class(mod):
import inspect
for obj in vars(mod).values():
if inspect.isclass(obj) and issubclass(obj, PropertyGroup):
if hasattr(obj, 'bl_idname') and obj.bl_idname == mod.__name__:
return obj
def get_addon_preferences(name=''):
addons = bpy.context.preferences.addons
if __name__ not in addons: return None
addon_prefs = addons[__name__].preferences
if name:
if not hasattr(addon_prefs, name):
for mod in sub_modules:
if mod.__name__.split('.')[-1] == name:
cls = _get_pref_class(mod)
if cls:
prop = PointerProperty(type=cls)
setattr(AdvancedObjPreferences1, name, prop)
bpy.utils.unregister_class(AdvancedObjPreferences1)
bpy.utils.register_class(AdvancedObjPreferences1)
return getattr(addon_prefs, name, None)
else:
return addon_prefs
def register_submodule(mod):
if not hasattr(mod, '__addon_enabled__'):
mod.__addon_enabled__ = False
if not mod.__addon_enabled__:
mod.register()
mod.__addon_enabled__ = True
def unregister_submodule(mod):
if mod.__addon_enabled__:
mod.unregister()
mod.__addon_enabled__ = False
prefs = get_addon_preferences()
name = mod.__name__.split('.')[-1]
if hasattr(AdvancedObjPreferences1, name):
delattr(AdvancedObjPreferences1, name)
if prefs:
bpy.utils.unregister_class(AdvancedObjPreferences1)
bpy.utils.register_class(AdvancedObjPreferences1)
if name in prefs:
del prefs[name]
def enable_all_modules(self, context):
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
setattr(self, 'use_' + mod_name, False)
if not mod.__addon_enabled__:
setattr(self, 'use_' + mod_name, True)
mod.__addon_enabled__ = True
return None
def disable_all_modules(self, context):
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
if mod.__addon_enabled__:
setattr(self, 'use_' + mod_name, False)
mod.__addon_enabled__ = False
return None
class AdvancedObjPreferences1(AddonPreferences):
bl_idname = __name__
enable_all = BoolProperty(
name="Enable all",
description="Enable all Advanced Objects' Panels",
default=False,
update=enable_all_modules
)
disable_all = BoolProperty(
name="Disable all",
description="Disable all Advanced Objects' Panels",
default=False,
update=disable_all_modules
)
def draw(self, context):
layout = self.layout
split = layout.split(percentage=0.5, align=True)
row = split.row()
row.alignment = "LEFT"
sub_box = row.box()
sub_box.prop(self, "enable_all", emboss=False,
icon="VISIBLE_IPO_ON", icon_only=True)
row.label("Enable All")
row = split.row()
row.alignment = "RIGHT"
row.label("Disable All")
sub_box = row.box()
sub_box.prop(self, "disable_all", emboss=False,
icon="VISIBLE_IPO_OFF", icon_only=True)
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
info = mod.bl_info
column = layout.column()
box = column.box()
expand = getattr(self, 'show_expanded_' + mod_name)
icon = 'TRIA_DOWN' if expand else 'TRIA_RIGHT'
col = box.column()
row = col.row()
sub = row.row()
sub.context_pointer_set('addon_prefs', self)
op = sub.operator('wm.context_toggle', text='', icon=icon,
emboss=False)
op.data_path = 'addon_prefs.show_expanded_' + mod_name
sub.label('{}: {}'.format(info['category'], info['name']))
sub = row.row()
sub.alignment = 'RIGHT'
if info.get('warning'):
sub.label('', icon='ERROR')
sub.prop(self, 'use_' + mod_name, text='')
if expand:
if info.get('description'):
split = col.row().split(percentage=0.15)
split.label('Description:')
split.label(info['description'])
if info.get('location'):
split = col.row().split(percentage=0.15)
split.label('Location:')
split.label(info['location'])
if info.get('author'):
split = col.row().split(percentage=0.15)
split.label('Author:')
split.label(info['author'])
if info.get('version'):
split = col.row().split(percentage=0.15)
split.label('Version:')
split.label('.'.join(str(x) for x in info['version']),
translate=False)
if info.get('warning'):
split = col.row().split(percentage=0.15)
split.label('Warning:')
split.label(' ' + info['warning'], icon='ERROR')
tot_row = int(bool(info.get('wiki_url')))
if tot_row:
split = col.row().split(percentage=0.15)
split.label(text='Internet:')
if info.get('wiki_url'):
op = split.operator('wm.url_open',
text='Documentation', icon='HELP')
op.url = info.get('wiki_url')
for i in range(4 - tot_row):
split.separator()
if getattr(self, 'use_' + mod_name):
prefs = get_addon_preferences(mod_name)
if prefs and hasattr(prefs, 'draw'):
box = box.column()
prefs.layout = box
try:
prefs.draw(context)
except:
import traceback
traceback.print_exc()
box.label(text="Error (see console)", icon="ERROR")
del prefs.layout
row = layout.row()
row.label(text="End of Advanced Object Panels Activations",
icon="FILE_PARENT")
for mod in sub_modules:
info = mod.bl_info
mod_name = mod.__name__.split('.')[-1]
def gen_update(mod):
def update(self, context):
if getattr(self, 'use_' + mod.__name__.split('.')[-1]):
if not mod.__addon_enabled__:
register_submodule(mod)
else:
if mod.__addon_enabled__:
unregister_submodule(mod)
return update
prop = BoolProperty(
name=info['name'],
description=info.get('description', ''),
update=gen_update(mod),
)
setattr(AdvancedObjPreferences1, 'use_' + mod_name, prop)
prop = BoolProperty()
setattr(AdvancedObjPreferences1, 'show_expanded_' + mod_name, prop)
class AdvancedObjProperties1(PropertyGroup):
ORIGIN = FloatVectorProperty(
name="Origin charge"
)
GROUNDZ = IntProperty(
name="Ground Z coordinate"
)
HORDER = IntProperty(
name="Secondary paths orders",
default=1
)
TSTEPS = IntProperty(
name="Iterations",
default=350,
description="Number of cells to create\n"
"Will end early if hits ground plane or cloud"
)
GSCALE = FloatProperty(
name="Grid unit size",
default=0.12,
description="scale of cells, .25 = 4 cells per blenderUnit"
)
BIGVAR = FloatProperty(
name="Straightness",
default=6.3,
description="Straightness/branchiness of bolt, \n"
"<2 is mush, >12 is staight line, 6.3 is good"
)
GROUNDBOOL = BoolProperty(
name="Use Ground object",
description="Use ground plane or not",
default=True
)
GROUNDC = IntProperty(
name="Ground charge",
default=-250,
description="Charge of the ground plane"
)
CLOUDBOOL = BoolProperty(
name="Use Cloud object",
default=False,
description="Use cloud object - attracts and terminates like ground but\n"
"any obj instead of z plane\n"
"Can slow down loop if obj is large, overrides ground"
)
CLOUDC = IntProperty(
name="Cloud charge",
default=-1,
description="Charge of a cell in cloud object\n"
"(so total charge also depends on obj size)"
)
VMMESH = BoolProperty(
name="Multi mesh",
default=True,
description="Output to multi-meshes for different materials on main/sec/side branches"
)
VSMESH = BoolProperty(
name="Single mesh",
default=False,
description="Output to single mesh for using build modifier and particles for effects"
)
VCUBE = BoolProperty(
name="Cubes",
default=False,
description="CTRL-J after run to JOIN\n"
"Outputs a bunch of cube objects, mostly for testing"
)
VVOX = BoolProperty(
name="Voxel (experimental)",
default=False,
description="Output to a voxel file to bpy.data.filepath\FSLGvoxels.raw\n"
"(doesn't work well right now)"
)
IBOOL = BoolProperty(
name="Use Insulator object",
default=False,
description="Use insulator mesh object to prevent growth of bolt in areas"
)
OOB = StringProperty(
name="Select",
default="",
description="Origin of bolt, can be an Empty\n"
"if object is a mesh will use all verts as charges")
GOB = StringProperty(
name="Select",
default="",
description="Object to use as ground plane, uses z coord only"
)
COB = StringProperty(
name="Select",
default="",
description="Object to use as cloud, best to use a cube"
)
IOB = StringProperty(
name="Select",
default="",
description="Object to use as insulator, 'voxelized'\n"
"before generating bolt (can be slow)"
)
# object_mangle_tools properties
mangle_constraint_vector = BoolVectorProperty(
name="Mangle Constraint",
default=(True, True, True),
subtype='XYZ',
description="Constrains Mangle Direction"
)
mangle_random_magnitude = IntProperty(
name="Mangle Severity",
default=5,
min=1, max=30,
description="Severity of mangling"
)
mangle_name = StringProperty(
name="Shape Key Name",
default="mangle",
description="Name given for mangled shape keys"
)
# unfold_transition properties
unfold_arm_name = StringProperty(
default=""
)
unfold_modo = EnumProperty(
name="",
items=[("cursor", "3D Cursor", "Use the Distance to 3D Cursor"),
("weight", "Weight Map", "Use a Painted Weight map"),
("index", "Mesh Indices", "Use Faces and Vertices index")],
description="How to Sort Bones for animation", default="cursor"
)
unfold_flip = BoolProperty(
name="Flipping Faces",
default=False,
description="Rotate faces around the Center and skip Scaling - "
"keep checked for both operators"
)
unfold_fold_duration = IntProperty(
name="Total Time",
min=5, soft_min=25,
max=10000, soft_max=2500,
default=200,
description="Total animation length"
)
unfold_sca_time = IntProperty(
name="Scale Time",
min=1,
max=5000, soft_max=500,
default=10,
description="Faces scaling time"
)
unfold_rot_time = IntProperty(
name="Rotation Time",
min=1, soft_min=5,
max=5000, soft_max=500,
default=15,
description="Faces rotation time"
)
unfold_rot_max = IntProperty(
name="Angle",
min=-180,
max=180,
default=135,
description="Faces rotation angle"
)
unfold_fold_noise = IntProperty(
name="Noise",
min=0,
max=500, soft_max=50,
default=0,
description="Offset some faces animation"
)
unfold_bounce = FloatProperty(
name="Bounce",
min=0,
max=10, soft_max=2.5,
default=0,
description="Add some bounce to rotation"
)
unfold_from_point = BoolProperty(
name="Point",
default=False,
description="Scale faces from a Point instead of from an Edge"
)
unfold_wiggle_rot = BoolProperty(
name="Wiggle",
default=False,
description="Use all Axis + Random Rotation instead of X Aligned"
)
# oscurart_constellation
constellation_limit = FloatProperty(
name="Initial Threshold",
description="Edges will be created only if the distance\n"
"between vertices is smaller than this value\n"
"This is a starting value on Operator Invoke",
default=2,
min=0
)
# Class list
classes = (
AdvancedObjPreferences1,
AdvancedObjProperties1,
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.advanced_objects1 = PointerProperty(
type=AdvancedObjProperties1
)
prefs = get_addon_preferences()
for mod in sub_modules:
if not hasattr(mod, '__addon_enabled__'):
mod.__addon_enabled__ = False
name = mod.__name__.split('.')[-1]
if getattr(prefs, 'use_' + name):
register_submodule(mod)
def unregister():
for mod in sub_modules:
if mod.__addon_enabled__:
unregister_submodule(mod)
del bpy.types.Scene.advanced_objects1
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
if __name__ == "__main__":
register()
| true
| true
|
1c4533e70cb3d4cda0a63ef32b57e6f92afbb550
| 433
|
py
|
Python
|
setup_moleval.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 28
|
2020-12-11T22:10:16.000Z
|
2022-02-25T05:00:51.000Z
|
setup_moleval.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 3
|
2021-08-31T22:50:41.000Z
|
2021-11-04T15:41:01.000Z
|
setup_moleval.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 9
|
2021-03-03T12:10:10.000Z
|
2022-02-15T06:53:11.000Z
|
from setuptools import setup, find_packages
setup(
name='moleval',
version='1.0',
packages=['moleval'] + ['moleval.'+p for p in find_packages(where="moleval")],
license='MIT',
author='Morgan Thomas',
author_email='morganthomas263@gmail.com',
description='A evaluation framework for goal directed generative models',
include_package_data=True,
package_data={'moleval': ['test/data/sample.smi']}
)
| 30.928571
| 82
| 0.69746
|
from setuptools import setup, find_packages
setup(
name='moleval',
version='1.0',
packages=['moleval'] + ['moleval.'+p for p in find_packages(where="moleval")],
license='MIT',
author='Morgan Thomas',
author_email='morganthomas263@gmail.com',
description='A evaluation framework for goal directed generative models',
include_package_data=True,
package_data={'moleval': ['test/data/sample.smi']}
)
| true
| true
|
1c45356cce2b4f69b3bb2ad77abcc74a79c6c0af
| 272
|
py
|
Python
|
src/openbiolink/gui/tqdmbuf.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 97
|
2019-11-26T09:53:18.000Z
|
2022-03-19T10:33:10.000Z
|
src/openbiolink/gui/tqdmbuf.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 67
|
2019-12-09T21:01:52.000Z
|
2021-12-21T15:19:41.000Z
|
src/openbiolink/gui/tqdmbuf.py
|
jerryhluo/OpenBioLink
|
6fc073af978daec0b0db5938b73beed37f57f495
|
[
"MIT"
] | 20
|
2020-01-13T23:02:25.000Z
|
2022-03-16T21:43:31.000Z
|
import io
class TqdmBuffer(io.StringIO):
foo = ""
buf = ""
def __init__(self):
super(TqdmBuffer, self).__init__()
def write(self, buf):
TqdmBuffer.foo = buf.strip("\r\n\t ")
def flush(self):
TqdmBuffer.buf = TqdmBuffer.foo
| 17
| 45
| 0.580882
|
import io
class TqdmBuffer(io.StringIO):
foo = ""
buf = ""
def __init__(self):
super(TqdmBuffer, self).__init__()
def write(self, buf):
TqdmBuffer.foo = buf.strip("\r\n\t ")
def flush(self):
TqdmBuffer.buf = TqdmBuffer.foo
| true
| true
|
1c453596a670a84db2e2af489075f7f2a241bdfa
| 154
|
py
|
Python
|
arc086_a.py
|
hythof/atc
|
12cb94ebe693e1f469ce0d982bc2924b586552cd
|
[
"CC0-1.0"
] | null | null | null |
arc086_a.py
|
hythof/atc
|
12cb94ebe693e1f469ce0d982bc2924b586552cd
|
[
"CC0-1.0"
] | null | null | null |
arc086_a.py
|
hythof/atc
|
12cb94ebe693e1f469ce0d982bc2924b586552cd
|
[
"CC0-1.0"
] | null | null | null |
from collections import Counter
N,K,*A = [int(x) for x in open(0).read().split()]
c=Counter(A)
s=c.most_common()[K:]
ans=sum([n for _,n in s])
print(ans)
| 22
| 49
| 0.655844
|
from collections import Counter
N,K,*A = [int(x) for x in open(0).read().split()]
c=Counter(A)
s=c.most_common()[K:]
ans=sum([n for _,n in s])
print(ans)
| true
| true
|
1c4535bc026eff090d7af84d54154e33b7a62cfe
| 2,673
|
py
|
Python
|
rgd/geodata/api/download.py
|
Erotemic/ResonantGeoData
|
ff9aec9daf73353bcc95a9d30e98fcc5cdffc6e0
|
[
"Apache-2.0"
] | null | null | null |
rgd/geodata/api/download.py
|
Erotemic/ResonantGeoData
|
ff9aec9daf73353bcc95a9d30e98fcc5cdffc6e0
|
[
"Apache-2.0"
] | null | null | null |
rgd/geodata/api/download.py
|
Erotemic/ResonantGeoData
|
ff9aec9daf73353bcc95a9d30e98fcc5cdffc6e0
|
[
"Apache-2.0"
] | null | null | null |
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404 # , render
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rgd.geodata import models
from rgd.geodata.permissions import check_read_perm
@swagger_auto_schema(
method='GET',
operation_summary='Download ChecksumFile data directly from S3.',
)
@api_view(['GET'])
def download_checksum_file(request, pk):
instance = models.common.ChecksumFile.objects.get(pk=pk)
check_read_perm(request.user, instance)
return HttpResponseRedirect(instance.get_url())
@swagger_auto_schema(
method='GET',
operation_summary='Download the associated ImageFile data for this ImageEntry directly from S3.',
)
@api_view(['GET'])
def download_image_entry_file(request, pk):
instance = models.imagery.ImageEntry.objects.get(pk=pk)
check_read_perm(request.user, instance)
url = instance.image_file.imagefile.file.get_url()
return HttpResponseRedirect(url)
@swagger_auto_schema(
method='GET',
operation_summary='Download the associated ChecksumFile data for this ConvertedImageFile directly from S3.',
)
@api_view(['GET'])
def download_cog_file(request, pk):
instance = models.imagery.ConvertedImageFile.objects.get(pk=pk)
check_read_perm(request.user, instance)
af_id = instance.converted_file.id
instance = models.common.ChecksumFile.objects.get(pk=af_id)
return HttpResponseRedirect(instance.get_url())
def _get_status_response(request, model, pk):
model_class = ''.join([part[:1].upper() + part[1:] for part in model.split('_')])
if not hasattr(models, model_class):
raise AttributeError(f'No such model ({model})')
instance = get_object_or_404(getattr(models, model_class), pk=pk)
check_read_perm(request.user, instance)
if not hasattr(instance, 'status'):
raise AttributeError(f'Model ({model}) has no attribute (status).')
data = {
'pk': instance.pk,
'model': model,
'status': instance.status,
}
return Response(data)
@swagger_auto_schema(
method='GET',
operation_summary='Check the status.',
)
@api_view(['GET'])
def get_status(request, model, pk):
"""Get the status of any TaskEventMixin model."""
return _get_status_response(request, model, pk)
@swagger_auto_schema(
method='GET',
operation_summary='Check the status of SubsampledImage.',
)
@api_view(['GET'])
def get_status_subsampled_image(request, pk):
"""Get the status of any SubsampledImage model."""
return _get_status_response(request, 'SubsampledImage', pk)
| 33
| 112
| 0.737748
|
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404 from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rgd.geodata import models
from rgd.geodata.permissions import check_read_perm
@swagger_auto_schema(
method='GET',
operation_summary='Download ChecksumFile data directly from S3.',
)
@api_view(['GET'])
def download_checksum_file(request, pk):
instance = models.common.ChecksumFile.objects.get(pk=pk)
check_read_perm(request.user, instance)
return HttpResponseRedirect(instance.get_url())
@swagger_auto_schema(
method='GET',
operation_summary='Download the associated ImageFile data for this ImageEntry directly from S3.',
)
@api_view(['GET'])
def download_image_entry_file(request, pk):
instance = models.imagery.ImageEntry.objects.get(pk=pk)
check_read_perm(request.user, instance)
url = instance.image_file.imagefile.file.get_url()
return HttpResponseRedirect(url)
@swagger_auto_schema(
method='GET',
operation_summary='Download the associated ChecksumFile data for this ConvertedImageFile directly from S3.',
)
@api_view(['GET'])
def download_cog_file(request, pk):
instance = models.imagery.ConvertedImageFile.objects.get(pk=pk)
check_read_perm(request.user, instance)
af_id = instance.converted_file.id
instance = models.common.ChecksumFile.objects.get(pk=af_id)
return HttpResponseRedirect(instance.get_url())
def _get_status_response(request, model, pk):
model_class = ''.join([part[:1].upper() + part[1:] for part in model.split('_')])
if not hasattr(models, model_class):
raise AttributeError(f'No such model ({model})')
instance = get_object_or_404(getattr(models, model_class), pk=pk)
check_read_perm(request.user, instance)
if not hasattr(instance, 'status'):
raise AttributeError(f'Model ({model}) has no attribute (status).')
data = {
'pk': instance.pk,
'model': model,
'status': instance.status,
}
return Response(data)
@swagger_auto_schema(
method='GET',
operation_summary='Check the status.',
)
@api_view(['GET'])
def get_status(request, model, pk):
return _get_status_response(request, model, pk)
@swagger_auto_schema(
method='GET',
operation_summary='Check the status of SubsampledImage.',
)
@api_view(['GET'])
def get_status_subsampled_image(request, pk):
return _get_status_response(request, 'SubsampledImage', pk)
| true
| true
|
1c45386d87d0096209f4bdb484bfc04696f23f4b
| 15,690
|
py
|
Python
|
python/ccxt/async_support/bit2c.py
|
FullStackHan/ccxt
|
1efa15e162e23bae91215aa5d1e5199ebb7fe39e
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/bit2c.py
|
FullStackHan/ccxt
|
1efa15e162e23bae91215aa5d1e5199ebb7fe39e
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/bit2c.py
|
FullStackHan/ccxt
|
1efa15e162e23bae91215aa5d1e5199ebb7fe39e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
class bit2c (Exchange):
def describe(self):
return self.deep_extend(super(bit2c, self).describe(), {
'id': 'bit2c',
'name': 'Bit2C',
'countries': ['IL'], # Israel
'rateLimit': 3000,
'has': {
'CORS': False,
'fetchOpenOrders': True,
'fetchMyTrades': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766119-3593220e-5ece-11e7-8b3a-5a041f6bcc3f.jpg',
'api': 'https://bit2c.co.il',
'www': 'https://www.bit2c.co.il',
'referral': 'https://bit2c.co.il/Aff/63bfed10-e359-420c-ab5a-ad368dab0baf',
'doc': [
'https://www.bit2c.co.il/home/api',
'https://github.com/OferE/bit2c',
],
},
'api': {
'public': {
'get': [
'Exchanges/{pair}/Ticker',
'Exchanges/{pair}/orderbook',
'Exchanges/{pair}/trades',
'Exchanges/{pair}/lasttrades',
],
},
'private': {
'post': [
'Merchant/CreateCheckout',
'Order/AddCoinFundsRequest',
'Order/AddFund',
'Order/AddOrder',
'Order/AddOrderMarketPriceBuy',
'Order/AddOrderMarketPriceSell',
'Order/CancelOrder',
'Order/AddCoinFundsRequest',
'Order/AddStopOrder',
'Payment/GetMyId',
'Payment/Send',
'Payment/Pay',
],
'get': [
'Account/Balance',
'Account/Balance/v2',
'Order/MyOrders',
'Order/GetById',
'Order/AccountHistory',
'Order/OrderHistory',
],
},
},
'markets': {
'BTC/NIS': {'id': 'BtcNis', 'symbol': 'BTC/NIS', 'base': 'BTC', 'quote': 'NIS', 'baseId': 'Btc', 'quoteId': 'Nis'},
'ETH/NIS': {'id': 'EthNis', 'symbol': 'ETH/NIS', 'base': 'ETH', 'quote': 'NIS', 'baseId': 'Eth', 'quoteId': 'Nis'},
'BCH/NIS': {'id': 'BchabcNis', 'symbol': 'BCH/NIS', 'base': 'BCH', 'quote': 'NIS', 'baseId': 'Bchabc', 'quoteId': 'Nis'},
'LTC/NIS': {'id': 'LtcNis', 'symbol': 'LTC/NIS', 'base': 'LTC', 'quote': 'NIS', 'baseId': 'Ltc', 'quoteId': 'Nis'},
'ETC/NIS': {'id': 'EtcNis', 'symbol': 'ETC/NIS', 'base': 'ETC', 'quote': 'NIS', 'baseId': 'Etc', 'quoteId': 'Nis'},
'BTG/NIS': {'id': 'BtgNis', 'symbol': 'BTG/NIS', 'base': 'BTG', 'quote': 'NIS', 'baseId': 'Btg', 'quoteId': 'Nis'},
'BSV/NIS': {'id': 'BchsvNis', 'symbol': 'BSV/NIS', 'base': 'BSV', 'quote': 'NIS', 'baseId': 'Bchsv', 'quoteId': 'Nis'},
'GRIN/NIS': {'id': 'GrinNis', 'symbol': 'GRIN/NIS', 'base': 'GRIN', 'quote': 'NIS', 'baseId': 'Grin', 'quoteId': 'Nis'},
},
'fees': {
'trading': {
'maker': 0.5 / 100,
'taker': 0.5 / 100,
},
},
'options': {
'fetchTradesMethod': 'public_get_exchanges_pair_lasttrades',
},
'exceptions': {
# {"error" : "Please provide valid APIkey"}
# {"error" : "Please provide valid nonce in Request UInt64.TryParse failed for nonce :"}
},
})
async def fetch_balance(self, params={}):
await self.load_markets()
balance = await self.privateGetAccountBalanceV2(params)
#
# {
# "AVAILABLE_NIS": 0.0,
# "NIS": 0.0,
# "LOCKED_NIS": 0.0,
# "AVAILABLE_BTC": 0.0,
# "BTC": 0.0,
# "LOCKED_BTC": 0.0,
# "AVAILABLE_ETH": 0.0,
# "ETH": 0.0,
# "LOCKED_ETH": 0.0,
# "AVAILABLE_BCHSV": 0.0,
# "BCHSV": 0.0,
# "LOCKED_BCHSV": 0.0,
# "AVAILABLE_BCHABC": 0.0,
# "BCHABC": 0.0,
# "LOCKED_BCHABC": 0.0,
# "AVAILABLE_LTC": 0.0,
# "LTC": 0.0,
# "LOCKED_LTC": 0.0,
# "AVAILABLE_ETC": 0.0,
# "ETC": 0.0,
# "LOCKED_ETC": 0.0,
# "AVAILABLE_BTG": 0.0,
# "BTG": 0.0,
# "LOCKED_BTG": 0.0,
# "AVAILABLE_GRIN": 0.0,
# "GRIN": 0.0,
# "LOCKED_GRIN": 0.0,
# "Fees": {
# "BtcNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "EthNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "BchabcNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "LtcNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "EtcNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "BtgNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "LtcBtc": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "BchsvNis": {"FeeMaker": 1.0, "FeeTaker": 1.0},
# "GrinNis": {"FeeMaker": 1.0, "FeeTaker": 1.0}
# }
# }
#
result = {'info': balance}
codes = list(self.currencies.keys())
for i in range(0, len(codes)):
code = codes[i]
account = self.account()
currency = self.currency(code)
uppercase = currency['id'].upper()
if uppercase in balance:
account['free'] = self.safe_float(balance, 'AVAILABLE_' + uppercase)
account['total'] = self.safe_float(balance, uppercase)
account['used'] = account['total'] - account['free']
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'pair': self.market_id(symbol),
}
orderbook = await self.publicGetExchangesPairOrderbook(self.extend(request, params))
return self.parse_order_book(orderbook)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
request = {
'pair': self.market_id(symbol),
}
ticker = await self.publicGetExchangesPairTicker(self.extend(request, params))
timestamp = self.milliseconds()
averagePrice = self.safe_float(ticker, 'av')
baseVolume = self.safe_float(ticker, 'a')
quoteVolume = None
if baseVolume is not None and averagePrice is not None:
quoteVolume = baseVolume * averagePrice
last = self.safe_float(ticker, 'll')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_float(ticker, 'h'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'l'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': averagePrice,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = self.options['fetchTradesMethod']
request = {
'pair': market['id'],
}
response = await getattr(self, method)(self.extend(request, params))
if isinstance(response, basestring):
raise ExchangeError(response)
return self.parse_trades(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
method = 'privatePostOrderAddOrder'
request = {
'Amount': amount,
'Pair': self.market_id(symbol),
}
if type == 'market':
method += 'MarketPrice' + self.capitalize(side)
else:
request['Price'] = price
request['Total'] = amount * price
request['IsBid'] = (side == 'buy')
response = await getattr(self, method)(self.extend(request, params))
return {
'info': response,
'id': response['NewOrder']['id'],
}
async def cancel_order(self, id, symbol=None, params={}):
request = {
'id': id,
}
return await self.privatePostOrderCancelOrder(self.extend(request, params))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.privateGetOrderMyOrders(self.extend(request, params))
orders = self.safe_value(response, market['id'], {})
asks = self.safe_value(orders, 'ask', [])
bids = self.safe_value(orders, 'bid', [])
return self.parse_orders(self.array_concat(asks, bids), market, since, limit)
def parse_order(self, order, market=None):
timestamp = self.safe_integer(order, 'created')
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'amount')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
symbol = None
if market is not None:
symbol = market['symbol']
side = self.safe_value(order, 'type')
if side == 0:
side = 'buy'
elif side == 1:
side = 'sell'
id = self.safe_string(order, 'id')
status = self.safe_string(order, 'status')
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'filled': None,
'remaining': None,
'cost': cost,
'trades': None,
'fee': None,
'info': order,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
method = 'privateGetOrderOrderhistory'
request = {}
if limit is not None:
request['take'] = limit
request['take'] = limit
if since is not None:
request['toTime'] = self.ymd(self.milliseconds(), '.')
request['fromTime'] = self.ymd(since, '.')
if symbol is not None:
market = self.market(symbol)
request['pair'] = market['id']
response = await getattr(self, method)(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
timestamp = None
id = None
price = None
amount = None
orderId = None
feeCost = None
side = None
reference = self.safe_string(trade, 'reference')
if reference is not None:
timestamp = self.safe_integer(trade, 'ticks') * 1000
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'firstAmount')
reference_parts = reference.split('|') # reference contains: 'pair|orderId|tradeId'
if market is None:
marketId = self.safe_string(trade, 'pair')
if marketId in self.markets_by_id[marketId]:
market = self.markets_by_id[marketId]
elif reference_parts[0] in self.markets_by_id:
market = self.markets_by_id[reference_parts[0]]
orderId = reference_parts[1]
id = reference_parts[2]
side = self.safe_integer(trade, 'action')
if side == 0:
side = 'buy'
elif side == 1:
side = 'sell'
feeCost = self.safe_float(trade, 'feeAmount')
else:
timestamp = self.safe_integer(trade, 'date') * 1000
id = self.safe_string(trade, 'tid')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
side = self.safe_value(trade, 'isBid')
if side is not None:
if side:
side = 'buy'
else:
side = 'sell'
symbol = None
if market is not None:
symbol = market['symbol']
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': price * amount,
'fee': {
'cost': feeCost,
'currency': 'NIS',
'rate': None,
},
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.implode_params(path, params)
if api == 'public':
# lasttrades is the only endpoint that doesn't require the .json extension/suffix
if path.find('lasttrades') < 0:
url += '.json'
else:
self.check_required_credentials()
nonce = self.nonce()
query = self.extend({
'nonce': nonce,
}, params)
auth = self.urlencode(query)
if method == 'GET':
if query:
url += '?' + auth
else:
body = auth
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha512, 'base64')
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'key': self.apiKey,
'sign': self.decode(signature),
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| 39.621212
| 137
| 0.476801
|
from ccxt.async_support.base.exchange import Exchange
try:
basestring except NameError:
basestring = str import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
class bit2c (Exchange):
def describe(self):
return self.deep_extend(super(bit2c, self).describe(), {
'id': 'bit2c',
'name': 'Bit2C',
'countries': ['IL'], 'rateLimit': 3000,
'has': {
'CORS': False,
'fetchOpenOrders': True,
'fetchMyTrades': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766119-3593220e-5ece-11e7-8b3a-5a041f6bcc3f.jpg',
'api': 'https://bit2c.co.il',
'www': 'https://www.bit2c.co.il',
'referral': 'https://bit2c.co.il/Aff/63bfed10-e359-420c-ab5a-ad368dab0baf',
'doc': [
'https://www.bit2c.co.il/home/api',
'https://github.com/OferE/bit2c',
],
},
'api': {
'public': {
'get': [
'Exchanges/{pair}/Ticker',
'Exchanges/{pair}/orderbook',
'Exchanges/{pair}/trades',
'Exchanges/{pair}/lasttrades',
],
},
'private': {
'post': [
'Merchant/CreateCheckout',
'Order/AddCoinFundsRequest',
'Order/AddFund',
'Order/AddOrder',
'Order/AddOrderMarketPriceBuy',
'Order/AddOrderMarketPriceSell',
'Order/CancelOrder',
'Order/AddCoinFundsRequest',
'Order/AddStopOrder',
'Payment/GetMyId',
'Payment/Send',
'Payment/Pay',
],
'get': [
'Account/Balance',
'Account/Balance/v2',
'Order/MyOrders',
'Order/GetById',
'Order/AccountHistory',
'Order/OrderHistory',
],
},
},
'markets': {
'BTC/NIS': {'id': 'BtcNis', 'symbol': 'BTC/NIS', 'base': 'BTC', 'quote': 'NIS', 'baseId': 'Btc', 'quoteId': 'Nis'},
'ETH/NIS': {'id': 'EthNis', 'symbol': 'ETH/NIS', 'base': 'ETH', 'quote': 'NIS', 'baseId': 'Eth', 'quoteId': 'Nis'},
'BCH/NIS': {'id': 'BchabcNis', 'symbol': 'BCH/NIS', 'base': 'BCH', 'quote': 'NIS', 'baseId': 'Bchabc', 'quoteId': 'Nis'},
'LTC/NIS': {'id': 'LtcNis', 'symbol': 'LTC/NIS', 'base': 'LTC', 'quote': 'NIS', 'baseId': 'Ltc', 'quoteId': 'Nis'},
'ETC/NIS': {'id': 'EtcNis', 'symbol': 'ETC/NIS', 'base': 'ETC', 'quote': 'NIS', 'baseId': 'Etc', 'quoteId': 'Nis'},
'BTG/NIS': {'id': 'BtgNis', 'symbol': 'BTG/NIS', 'base': 'BTG', 'quote': 'NIS', 'baseId': 'Btg', 'quoteId': 'Nis'},
'BSV/NIS': {'id': 'BchsvNis', 'symbol': 'BSV/NIS', 'base': 'BSV', 'quote': 'NIS', 'baseId': 'Bchsv', 'quoteId': 'Nis'},
'GRIN/NIS': {'id': 'GrinNis', 'symbol': 'GRIN/NIS', 'base': 'GRIN', 'quote': 'NIS', 'baseId': 'Grin', 'quoteId': 'Nis'},
},
'fees': {
'trading': {
'maker': 0.5 / 100,
'taker': 0.5 / 100,
},
},
'options': {
'fetchTradesMethod': 'public_get_exchanges_pair_lasttrades',
},
'exceptions': {
},
})
async def fetch_balance(self, params={}):
await self.load_markets()
balance = await self.privateGetAccountBalanceV2(params)
result = {'info': balance}
codes = list(self.currencies.keys())
for i in range(0, len(codes)):
code = codes[i]
account = self.account()
currency = self.currency(code)
uppercase = currency['id'].upper()
if uppercase in balance:
account['free'] = self.safe_float(balance, 'AVAILABLE_' + uppercase)
account['total'] = self.safe_float(balance, uppercase)
account['used'] = account['total'] - account['free']
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
request = {
'pair': self.market_id(symbol),
}
orderbook = await self.publicGetExchangesPairOrderbook(self.extend(request, params))
return self.parse_order_book(orderbook)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
request = {
'pair': self.market_id(symbol),
}
ticker = await self.publicGetExchangesPairTicker(self.extend(request, params))
timestamp = self.milliseconds()
averagePrice = self.safe_float(ticker, 'av')
baseVolume = self.safe_float(ticker, 'a')
quoteVolume = None
if baseVolume is not None and averagePrice is not None:
quoteVolume = baseVolume * averagePrice
last = self.safe_float(ticker, 'll')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_float(ticker, 'h'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'l'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': averagePrice,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = self.options['fetchTradesMethod']
request = {
'pair': market['id'],
}
response = await getattr(self, method)(self.extend(request, params))
if isinstance(response, basestring):
raise ExchangeError(response)
return self.parse_trades(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
method = 'privatePostOrderAddOrder'
request = {
'Amount': amount,
'Pair': self.market_id(symbol),
}
if type == 'market':
method += 'MarketPrice' + self.capitalize(side)
else:
request['Price'] = price
request['Total'] = amount * price
request['IsBid'] = (side == 'buy')
response = await getattr(self, method)(self.extend(request, params))
return {
'info': response,
'id': response['NewOrder']['id'],
}
async def cancel_order(self, id, symbol=None, params={}):
request = {
'id': id,
}
return await self.privatePostOrderCancelOrder(self.extend(request, params))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.privateGetOrderMyOrders(self.extend(request, params))
orders = self.safe_value(response, market['id'], {})
asks = self.safe_value(orders, 'ask', [])
bids = self.safe_value(orders, 'bid', [])
return self.parse_orders(self.array_concat(asks, bids), market, since, limit)
def parse_order(self, order, market=None):
timestamp = self.safe_integer(order, 'created')
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'amount')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
symbol = None
if market is not None:
symbol = market['symbol']
side = self.safe_value(order, 'type')
if side == 0:
side = 'buy'
elif side == 1:
side = 'sell'
id = self.safe_string(order, 'id')
status = self.safe_string(order, 'status')
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': None,
'side': side,
'price': price,
'amount': amount,
'filled': None,
'remaining': None,
'cost': cost,
'trades': None,
'fee': None,
'info': order,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
method = 'privateGetOrderOrderhistory'
request = {}
if limit is not None:
request['take'] = limit
request['take'] = limit
if since is not None:
request['toTime'] = self.ymd(self.milliseconds(), '.')
request['fromTime'] = self.ymd(since, '.')
if symbol is not None:
market = self.market(symbol)
request['pair'] = market['id']
response = await getattr(self, method)(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
timestamp = None
id = None
price = None
amount = None
orderId = None
feeCost = None
side = None
reference = self.safe_string(trade, 'reference')
if reference is not None:
timestamp = self.safe_integer(trade, 'ticks') * 1000
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'firstAmount')
reference_parts = reference.split('|') if market is None:
marketId = self.safe_string(trade, 'pair')
if marketId in self.markets_by_id[marketId]:
market = self.markets_by_id[marketId]
elif reference_parts[0] in self.markets_by_id:
market = self.markets_by_id[reference_parts[0]]
orderId = reference_parts[1]
id = reference_parts[2]
side = self.safe_integer(trade, 'action')
if side == 0:
side = 'buy'
elif side == 1:
side = 'sell'
feeCost = self.safe_float(trade, 'feeAmount')
else:
timestamp = self.safe_integer(trade, 'date') * 1000
id = self.safe_string(trade, 'tid')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'amount')
side = self.safe_value(trade, 'isBid')
if side is not None:
if side:
side = 'buy'
else:
side = 'sell'
symbol = None
if market is not None:
symbol = market['symbol']
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': price * amount,
'fee': {
'cost': feeCost,
'currency': 'NIS',
'rate': None,
},
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.implode_params(path, params)
if api == 'public':
if path.find('lasttrades') < 0:
url += '.json'
else:
self.check_required_credentials()
nonce = self.nonce()
query = self.extend({
'nonce': nonce,
}, params)
auth = self.urlencode(query)
if method == 'GET':
if query:
url += '?' + auth
else:
body = auth
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha512, 'base64')
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'key': self.apiKey,
'sign': self.decode(signature),
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
| true
| true
|
1c4538cc2359718686d6db32146a5a2b4f7febf8
| 801
|
py
|
Python
|
apps/MixMaster/Units/specificEntropy.py
|
VishalKandala/Cantera-1.7
|
750786f9b845a56fc177a9f1d5a9c5bb6ebd87cc
|
[
"BSD-3-Clause"
] | null | null | null |
apps/MixMaster/Units/specificEntropy.py
|
VishalKandala/Cantera-1.7
|
750786f9b845a56fc177a9f1d5a9c5bb6ebd87cc
|
[
"BSD-3-Clause"
] | null | null | null |
apps/MixMaster/Units/specificEntropy.py
|
VishalKandala/Cantera-1.7
|
750786f9b845a56fc177a9f1d5a9c5bb6ebd87cc
|
[
"BSD-3-Clause"
] | null | null | null |
#!/bin/env python
#
#--------------------------------------------------------------------------
#
# $License$
#
#--------------------------------------------------------------------------
# $Log: specificEntropy.py,v $
# Revision 1.1.1.1 2003/04/14 17:57:49 dggoodwin
# Initial import.
#
# Revision 1.1 2002/12/20 13:23:41 dgg
# first commit
#
# Revision 1.1.1.1 2000/01/21 22:59:51 dgg
# dgg Cantera
#
# Revision 1.1 1999/11/27 17:27:35 dgg
# initial import to Cantera repository
#
# Revision 1.1 1999/11/25 19:50:58 aivazis
# Original source
#
import SI, energy, mass
units = ['J__kg_K', 'kJ__kg_K', 'cal__g_K']
J__kg_K = SI.joule/(SI.kilogram * SI.kelvin)
kJ__kg_K = 1000.0*J__kg_K
cal__g_K = energy.calorie/(mass.gram * SI.kelvin)
#
# End of file
| 22.25
| 76
| 0.515605
|
import SI, energy, mass
units = ['J__kg_K', 'kJ__kg_K', 'cal__g_K']
J__kg_K = SI.joule/(SI.kilogram * SI.kelvin)
kJ__kg_K = 1000.0*J__kg_K
cal__g_K = energy.calorie/(mass.gram * SI.kelvin)
| true
| true
|
1c4538dde779970879db26d139380a7fc2409bbd
| 683
|
py
|
Python
|
architectures/arch_dsprite_burgess-g_kyle_d_vae.py
|
christopher-beckham/amr
|
1bd67b9b4fb2fcf07cc8faba3c863f5ad5d4c4c0
|
[
"BSD-3-Clause"
] | 35
|
2019-08-27T08:59:53.000Z
|
2021-09-19T15:55:34.000Z
|
architectures/arch_dsprite_burgess-g_kyle_d_vae.py
|
christopher-beckham/amr
|
1bd67b9b4fb2fcf07cc8faba3c863f5ad5d4c4c0
|
[
"BSD-3-Clause"
] | 4
|
2020-01-14T05:09:31.000Z
|
2020-05-25T20:39:55.000Z
|
architectures/arch_dsprite_burgess-g_kyle_d_vae.py
|
christopher-beckham/amr
|
1bd67b9b4fb2fcf07cc8faba3c863f5ad5d4c4c0
|
[
"BSD-3-Clause"
] | 3
|
2019-12-24T01:29:49.000Z
|
2020-12-06T01:56:19.000Z
|
import torch
from torch import nn
import torch.nn.functional as F
#from .shared import networks
from .konny.model import BetaVAE_B
from . import discriminators
from functools import partial
# TODO: add logging to this
def get_network(n_channels, ndf, **kwargs):
gen = BetaVAE_B(nc=n_channels, vae=True)
disc_x = discriminators.Discriminator(nf=ndf,
input_nc=n_channels,
n_classes=1,
sigmoid=True,
spec_norm=True)
return {
'gen': gen,
'disc_x': disc_x,
'class_mixer': None
}
| 31.045455
| 62
| 0.541728
|
import torch
from torch import nn
import torch.nn.functional as F
from .konny.model import BetaVAE_B
from . import discriminators
from functools import partial
def get_network(n_channels, ndf, **kwargs):
gen = BetaVAE_B(nc=n_channels, vae=True)
disc_x = discriminators.Discriminator(nf=ndf,
input_nc=n_channels,
n_classes=1,
sigmoid=True,
spec_norm=True)
return {
'gen': gen,
'disc_x': disc_x,
'class_mixer': None
}
| true
| true
|
1c4539377f1b84b2035abe30a51c80e4b68b7573
| 4,856
|
py
|
Python
|
.venv/lib/python2.7/site-packages/celery/backends/cache.py
|
MansoorHanif/FYP-web-app
|
918008d3b5eedaa904f3e720296afde9d73ac3f4
|
[
"BSD-3-Clause"
] | 4
|
2018-10-19T04:36:20.000Z
|
2020-02-13T16:14:09.000Z
|
.venv/lib/python2.7/site-packages/celery/backends/cache.py
|
MansoorHanif/FYP-web-app
|
918008d3b5eedaa904f3e720296afde9d73ac3f4
|
[
"BSD-3-Clause"
] | 3
|
2020-02-11T23:03:45.000Z
|
2021-06-10T18:05:11.000Z
|
oo/lib/python3.5/site-packages/celery/backends/cache.py
|
chunky2808/SPOJ-history-Django-App
|
490c58b1593cd3626f0ddc27fdd09c6e8d1c56e1
|
[
"MIT"
] | 1
|
2019-10-26T04:20:52.000Z
|
2019-10-26T04:20:52.000Z
|
# -*- coding: utf-8 -*-
"""Memcached and in-memory cache result backend."""
from __future__ import absolute_import, unicode_literals
import sys
from kombu.utils.encoding import bytes_to_str, ensure_bytes
from kombu.utils.objects import cached_property
from celery.exceptions import ImproperlyConfigured
from celery.utils.functional import LRUCache
from .base import KeyValueStoreBackend
__all__ = ['CacheBackend']
_imp = [None]
PY3 = sys.version_info[0] == 3
REQUIRES_BACKEND = """\
The Memcached backend requires either pylibmc or python-memcached.\
"""
UNKNOWN_BACKEND = """\
The cache backend {0!r} is unknown,
Please use one of the following backends instead: {1}\
"""
def import_best_memcache():
if _imp[0] is None:
is_pylibmc, memcache_key_t = False, ensure_bytes
try:
import pylibmc as memcache
is_pylibmc = True
except ImportError:
try:
import memcache # noqa
except ImportError:
raise ImproperlyConfigured(REQUIRES_BACKEND)
if PY3: # pragma: no cover
memcache_key_t = bytes_to_str
_imp[0] = (is_pylibmc, memcache, memcache_key_t)
return _imp[0]
def get_best_memcache(*args, **kwargs):
# pylint: disable=unpacking-non-sequence
# This is most definitely a sequence, but pylint thinks it's not.
is_pylibmc, memcache, key_t = import_best_memcache()
Client = _Client = memcache.Client
if not is_pylibmc:
def Client(*args, **kwargs): # noqa
kwargs.pop('behaviors', None)
return _Client(*args, **kwargs)
return Client, key_t
class DummyClient(object):
def __init__(self, *args, **kwargs):
self.cache = LRUCache(limit=5000)
def get(self, key, *args, **kwargs):
return self.cache.get(key)
def get_multi(self, keys):
cache = self.cache
return {k: cache[k] for k in keys if k in cache}
def set(self, key, value, *args, **kwargs):
self.cache[key] = value
def delete(self, key, *args, **kwargs):
self.cache.pop(key, None)
def incr(self, key, delta=1):
return self.cache.incr(key, delta)
def touch(self, key, expire):
pass
backends = {
'memcache': get_best_memcache,
'memcached': get_best_memcache,
'pylibmc': get_best_memcache,
'memory': lambda: (DummyClient, ensure_bytes),
}
class CacheBackend(KeyValueStoreBackend):
"""Cache result backend."""
servers = None
supports_autoexpire = True
supports_native_join = True
implements_incr = True
def __init__(self, app, expires=None, backend=None,
options={}, url=None, **kwargs):
super(CacheBackend, self).__init__(app, **kwargs)
self.url = url
self.options = dict(self.app.conf.cache_backend_options,
**options)
self.backend = url or backend or self.app.conf.cache_backend
if self.backend:
self.backend, _, servers = self.backend.partition('://')
self.servers = servers.rstrip('/').split(';')
self.expires = self.prepare_expires(expires, type=int)
try:
self.Client, self.key_t = backends[self.backend]()
except KeyError:
raise ImproperlyConfigured(UNKNOWN_BACKEND.format(
self.backend, ', '.join(backends)))
self._encode_prefixes() # rencode the keyprefixes
def get(self, key):
return self.client.get(key)
def mget(self, keys):
return self.client.get_multi(keys)
def set(self, key, value):
return self.client.set(key, value, self.expires)
def delete(self, key):
return self.client.delete(key)
def _apply_chord_incr(self, header, partial_args, group_id, body, **opts):
self.client.set(self.get_key_for_chord(group_id), 0, time=self.expires)
return super(CacheBackend, self)._apply_chord_incr(
header, partial_args, group_id, body, **opts)
def incr(self, key):
return self.client.incr(key)
def expire(self, key, value):
return self.client.touch(key, value)
@cached_property
def client(self):
return self.Client(self.servers, **self.options)
def __reduce__(self, args=(), kwargs={}):
servers = ';'.join(self.servers)
backend = '{0}://{1}/'.format(self.backend, servers)
kwargs.update(
dict(backend=backend,
expires=self.expires,
options=self.options))
return super(CacheBackend, self).__reduce__(args, kwargs)
def as_uri(self, *args, **kwargs):
"""Return the backend as an URI.
This properly handles the case of multiple servers.
"""
servers = ';'.join(self.servers)
return '{0}://{1}/'.format(self.backend, servers)
| 29.975309
| 79
| 0.632208
|
from __future__ import absolute_import, unicode_literals
import sys
from kombu.utils.encoding import bytes_to_str, ensure_bytes
from kombu.utils.objects import cached_property
from celery.exceptions import ImproperlyConfigured
from celery.utils.functional import LRUCache
from .base import KeyValueStoreBackend
__all__ = ['CacheBackend']
_imp = [None]
PY3 = sys.version_info[0] == 3
REQUIRES_BACKEND = """\
The Memcached backend requires either pylibmc or python-memcached.\
"""
UNKNOWN_BACKEND = """\
The cache backend {0!r} is unknown,
Please use one of the following backends instead: {1}\
"""
def import_best_memcache():
if _imp[0] is None:
is_pylibmc, memcache_key_t = False, ensure_bytes
try:
import pylibmc as memcache
is_pylibmc = True
except ImportError:
try:
import memcache except ImportError:
raise ImproperlyConfigured(REQUIRES_BACKEND)
if PY3: memcache_key_t = bytes_to_str
_imp[0] = (is_pylibmc, memcache, memcache_key_t)
return _imp[0]
def get_best_memcache(*args, **kwargs):
is_pylibmc, memcache, key_t = import_best_memcache()
Client = _Client = memcache.Client
if not is_pylibmc:
def Client(*args, **kwargs): # noqa
kwargs.pop('behaviors', None)
return _Client(*args, **kwargs)
return Client, key_t
class DummyClient(object):
def __init__(self, *args, **kwargs):
self.cache = LRUCache(limit=5000)
def get(self, key, *args, **kwargs):
return self.cache.get(key)
def get_multi(self, keys):
cache = self.cache
return {k: cache[k] for k in keys if k in cache}
def set(self, key, value, *args, **kwargs):
self.cache[key] = value
def delete(self, key, *args, **kwargs):
self.cache.pop(key, None)
def incr(self, key, delta=1):
return self.cache.incr(key, delta)
def touch(self, key, expire):
pass
backends = {
'memcache': get_best_memcache,
'memcached': get_best_memcache,
'pylibmc': get_best_memcache,
'memory': lambda: (DummyClient, ensure_bytes),
}
class CacheBackend(KeyValueStoreBackend):
servers = None
supports_autoexpire = True
supports_native_join = True
implements_incr = True
def __init__(self, app, expires=None, backend=None,
options={}, url=None, **kwargs):
super(CacheBackend, self).__init__(app, **kwargs)
self.url = url
self.options = dict(self.app.conf.cache_backend_options,
**options)
self.backend = url or backend or self.app.conf.cache_backend
if self.backend:
self.backend, _, servers = self.backend.partition('://')
self.servers = servers.rstrip('/').split(';')
self.expires = self.prepare_expires(expires, type=int)
try:
self.Client, self.key_t = backends[self.backend]()
except KeyError:
raise ImproperlyConfigured(UNKNOWN_BACKEND.format(
self.backend, ', '.join(backends)))
self._encode_prefixes() # rencode the keyprefixes
def get(self, key):
return self.client.get(key)
def mget(self, keys):
return self.client.get_multi(keys)
def set(self, key, value):
return self.client.set(key, value, self.expires)
def delete(self, key):
return self.client.delete(key)
def _apply_chord_incr(self, header, partial_args, group_id, body, **opts):
self.client.set(self.get_key_for_chord(group_id), 0, time=self.expires)
return super(CacheBackend, self)._apply_chord_incr(
header, partial_args, group_id, body, **opts)
def incr(self, key):
return self.client.incr(key)
def expire(self, key, value):
return self.client.touch(key, value)
@cached_property
def client(self):
return self.Client(self.servers, **self.options)
def __reduce__(self, args=(), kwargs={}):
servers = ';'.join(self.servers)
backend = '{0}://{1}/'.format(self.backend, servers)
kwargs.update(
dict(backend=backend,
expires=self.expires,
options=self.options))
return super(CacheBackend, self).__reduce__(args, kwargs)
def as_uri(self, *args, **kwargs):
servers = ';'.join(self.servers)
return '{0}://{1}/'.format(self.backend, servers)
| true
| true
|
1c453bfaaf39c5377c870fdd45ad36ff26553b79
| 13,662
|
py
|
Python
|
Behavioral decoding/HPC code/Flint_HPC_BDP_S_test.py
|
Next-Generation-Neural-Interfaces/Hardware-efficient-MUA-compression
|
853c6e0f3d085812e88fd0572ac7c64a172255d7
|
[
"MIT"
] | null | null | null |
Behavioral decoding/HPC code/Flint_HPC_BDP_S_test.py
|
Next-Generation-Neural-Interfaces/Hardware-efficient-MUA-compression
|
853c6e0f3d085812e88fd0572ac7c64a172255d7
|
[
"MIT"
] | null | null | null |
Behavioral decoding/HPC code/Flint_HPC_BDP_S_test.py
|
Next-Generation-Neural-Interfaces/Hardware-efficient-MUA-compression
|
853c6e0f3d085812e88fd0572ac7c64a172255d7
|
[
"MIT"
] | 1
|
2022-02-23T21:52:02.000Z
|
2022-02-23T21:52:02.000Z
|
"""
Evaluating spike-based BMI decoding using Wiener filter
Load Flint data from matlab. HPC version.
"""
# import packages
import numpy as np
from HPC_working_dir.functions.preprocess import input_shaping, split_index
from HPC_working_dir.functions.decoders import WienerCascadeDecoder
from HPC_working_dir.functions.metrics import compute_rmse, compute_pearson
import time as timer
import pickle
from scipy import io
import copy
import os
from os.path import exists
def moving_average(a, n=3) :
a = np.hstack((np.zeros(n-1),a))
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
# Create formatted data folder on HPC directory "working_directory", upload
# formatted data (only the .mat files, all_binned_data.pkl not required) to it,
# and add the path below.
# Create a results_test_Flint folder, and add the path below.
# Upload filenames_Flint_test.txt to the HPC "working_directory".
def BDP_for_S_and_BP():
# Path to HPC working directory (where neural data and results folders are loctaed)
working_directory = ''
if working_directory == '':
print('Fill in path to working directory')
return 0
file_names = working_directory + 'filenames_Flint_test.txt'
# Directories
mat_folder = working_directory + 'neural_data/' # spike features folder
result_folder = working_directory + 'results_test_Flint/' # results folder
delta_time_vec = [0.001, 0.005, 0.01, 0.02, 0.05, 0.1]
time_steps_vec = [5, 10, 15]
lag_values_vec = [0, 5, 10]
window_len_vec = [0, 0.05, 0.1, 0.2]
alpha_vec = [0, 1e-4, 1e-2]
degree_vec = [2,3,4]
regular = 'l2' # regularisation type
num_fold = 5 # number of cross-validation folds
PBS_ARRAY_INDEX = int(os.environ['PBS_ARRAY_INDEX'])
print('PBS_ARRAY_INDEX: ' + str(PBS_ARRAY_INDEX))
print ("Starting simulation")
run_start = timer.time()
# Look at all stored file names, we have it this way so the indexing is
# consistent, always relative to the .txt file
with open(file_names) as f:
lines = f.readlines()
# Rec indexing
PBS_ARRAY_INDEX_rec = PBS_ARRAY_INDEX % len(lines)
count_1 = int(np.floor(PBS_ARRAY_INDEX/(len(lines))))
# BP indexing
PBS_ARRAY_INDEX_BP = count_1 % len(delta_time_vec)
count_2 = int(np.floor(count_1/(len(delta_time_vec))))
# Wdw indexing
PBS_ARRAY_INDEX_wdw = count_2 % len(window_len_vec)
delta_time = delta_time_vec[PBS_ARRAY_INDEX_BP]
file_name = lines[PBS_ARRAY_INDEX_rec].replace('\n','')
wdw_time = window_len_vec[PBS_ARRAY_INDEX_wdw]
print('BP: ' + str(delta_time*1000) + '; Rec-sub: ' + file_name + ' - wdw: ' + str(wdw_time))
# Load neural data
mat_filename = mat_folder+file_name + '_BP_'+ str(int(delta_time*1000))+'_ms.mat'
print ("Loading input features from file: "+mat_filename)
f = io.loadmat(mat_filename)
model = WienerCascadeDecoder() # instantiate model
# Moving average window
wdw_samples = int(np.round(wdw_time / delta_time))
input_feature_1 = f['binned_MUA'][:]
cursor_vel_1 = f['collated_hand_vel'][:] # in mm/s
print('input shape: ' + str(np.shape(input_feature_1)))
print('output shape: ' + str(np.shape(cursor_vel_1)))
for timesteps in time_steps_vec:
input_feature = copy.deepcopy(input_feature_1)
cursor_vel = copy.deepcopy(cursor_vel_1[:,:2]) # ignore z-axis
input_dim = input_feature.shape[1] # input dimension
output_dim = cursor_vel.shape[1] # output dimension NOTE CHANGE FOR ORIG
# Initialise performance scores (RMSE and CC) with nan values
rmse_valid = np.full((num_fold,output_dim),np.nan)
rmse_test = np.copy(rmse_valid)
cc_valid = np.copy(rmse_valid)
cc_test = np.copy(rmse_valid)
time_train = np.full((num_fold),np.nan)
time_test = np.copy(time_train)
print ("Formatting input feature data")
stride = 1 # number of samples to be skipped
X_in = input_shaping(input_feature,timesteps,stride)
X_in = X_in.reshape(X_in.shape[0],(X_in.shape[1]*X_in.shape[2]),order='F')
print ("Formatting output (kinematic) data")
diff_samp = cursor_vel.shape[0]-X_in.shape[0]
Y_out = cursor_vel[diff_samp:,:] # in mm/s (remove it for new corrected velocity)
print ("Splitting input dataset into training, validation, and testing subdataset")
all_train_idx,all_valid_idx,all_test_idx = split_index(X_in,num_fold)
S_vector = np.arange(2,40)
copy_X_in = copy.deepcopy(X_in)
for S in S_vector:
X_in = copy.deepcopy(copy_X_in)
# Clip dynamic range
X_in[X_in>S] = S
print('S: ' + str(S))
# Moving average window
if wdw_samples != 0:
for channel in np.arange(len(X_in[0,:])):
X_in[:,channel] = moving_average(X_in[:,channel],wdw_samples)
for lag_value in lag_values_vec:
lag = int(-0.004 / delta_time * lag_value) # lag between kinematic and ´neural data (minus indicates neural input occurs before kinematic)
for alpha in alpha_vec:
for degree in degree_vec:
params = {'timesteps':timesteps, 'regular': regular, 'alpha':alpha, 'degree':degree}
# Storing evaluation results into pkl file
result_filename = result_folder+file_name+\
'_delta_'+str(int(delta_time*1e3))+'ms_S_'+str(int(S))+\
'_wdw_' + str(int(wdw_time*1000)) + '_lag_'+str(lag_value)\
+ '_timestep_'+str(timesteps) +\
'_alpha_' + str(alpha) + '_deg_' \
+ str(degree) + '.pkl'
if exists(result_filename):
print('Results exist \n')
continue
for i in range(num_fold):
train_idx = all_train_idx[i]
valid_idx = all_valid_idx[i]
test_idx = all_test_idx[i]
# specify training dataset
X_train = X_in[train_idx,:]
Y_train = Y_out[train_idx,:]
# specify validation dataset
X_valid = X_in[valid_idx,:]
Y_valid = Y_out[valid_idx,:]
# specify validation dataset
X_test = X_in[test_idx,:]
Y_test = Y_out[test_idx,:]
# Standardise (z-score) input dataset
X_train_mean = np.nanmean(X_train,axis=0)
X_train_std = np.nanstd(X_train,axis=0)
X_train = (X_train - X_train_mean)/X_train_std
X_valid = (X_valid - X_train_mean)/X_train_std
X_test = (X_test - X_train_mean)/X_train_std
# Remove nan columns
remove = np.isnan(X_train[0,:])
X_train = np.delete(X_train,remove,1)
X_valid = np.delete(X_valid,remove,1)
X_test = np.delete(X_test,remove,1)
# Zero mean (centering) output dataset
Y_train_mean = np.nanmean(Y_train,axis=0)
Y_train = Y_train - Y_train_mean
Y_valid = Y_valid - Y_train_mean
Y_test = Y_test - Y_train_mean
#Re-align data to take lag into account
if lag < 0:
X_train = X_train[:lag,:] # remove lag first from end (X lag behind Y)
Y_train = Y_train[-lag:,:] # reomve lag first from beginning
X_valid = X_valid[:lag,:]
Y_valid = Y_valid[-lag:,:]
X_test = X_test[:lag,:]
Y_test = Y_test[-lag:,:]
if lag > 0:
X_train = X_train[lag:,:] # remove lag first from beginning
Y_train = Y_train[:-lag,:] # remove lag first from end (X lead in front of Y)
X_valid = X_valid[lag:,:]
Y_valid = Y_valid[:-lag,:]
X_test = X_test[lag:,:]
Y_test = Y_test[:-lag,:]
print("Instantiating and training model...")
model = WienerCascadeDecoder() # instantiate model
start = timer.time()
model.fit(X_train,Y_train,**params) # train model
end = timer.time()
print("Model training took {:.2f} seconds".format(end - start))
time_train[i] = end - start
print("Evaluating model...")
Y_valid_predict = model.predict(X_valid)
start = timer.time()
Y_test_predict = model.predict(X_test)
end = timer.time()
print("Model testing took {:.2f} seconds".format(end - start))
time_test[i] = end - start
# Compute performance metrics
rmse_vld = compute_rmse(Y_valid,Y_valid_predict)
rmse_tst = compute_rmse(Y_test,Y_test_predict)
cc_vld = compute_pearson(Y_valid,Y_valid_predict)
cc_tst = compute_pearson(Y_test,Y_test_predict)
rmse_valid[i,:] = rmse_vld
rmse_test[i,:] = rmse_tst
cc_valid[i,:] = cc_vld
cc_test[i,:] = cc_tst
print("Fold-{} | Validation RMSE: {:.2f}".format(i,np.mean(rmse_vld)))
print("Fold-{} | Validation CC: {:.2f}".format(i,np.mean(cc_vld)))
print("Fold-{} | Testing RMSE: {:.2f}".format(i,np.mean(rmse_tst)))
print("Fold-{} | Testing CC: {:.2f}".format(i,np.mean(cc_tst)))
run_end = timer.time()
mean_rmse_valid = np.nanmean(rmse_valid,axis=0)
mean_rmse_test = np.nanmean(rmse_test,axis=0)
mean_cc_valid = np.nanmean(cc_valid,axis=0)
mean_cc_test = np.nanmean(cc_test,axis=0)
mean_time = np.nanmean(time_train,axis=0)
print("----------------------------------------------------------------------")
print("Validation Mean RMSE: %.3f " %(np.mean(mean_rmse_valid)))
print("Validation Mean CC: %.3f " %(np.mean(mean_cc_valid)))
print("Testing Mean RMSE: %.3f " %(np.mean(mean_rmse_test)))
print("Testing Mean CC: %.3f " %(np.mean(mean_cc_test)))
print("----------------------------------------------------------------------")
print ("Storing results into file: "+result_filename)
# Store results
with open(result_filename, 'wb') as file:
results = {'rmse_valid': rmse_valid,
'rmse_test': rmse_test,
'cc_valid': cc_valid,
'cc_test': cc_test} # Shows how much of the validation data is used for assignment vs CR
#'Y_true': Y_test, # Shows how much of the validation data is used for assignment vs CR
#'Y_predict': Y_test_predict}
# A new file will be created
pickle.dump(results, file)
run_time = run_end - run_start
print ("Finished whole processes within %.2f seconds" % run_time)
print("All done")
| 48.792857
| 155
| 0.480091
|
import numpy as np
from HPC_working_dir.functions.preprocess import input_shaping, split_index
from HPC_working_dir.functions.decoders import WienerCascadeDecoder
from HPC_working_dir.functions.metrics import compute_rmse, compute_pearson
import time as timer
import pickle
from scipy import io
import copy
import os
from os.path import exists
def moving_average(a, n=3) :
a = np.hstack((np.zeros(n-1),a))
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
def BDP_for_S_and_BP():
working_directory = ''
if working_directory == '':
print('Fill in path to working directory')
return 0
file_names = working_directory + 'filenames_Flint_test.txt'
mat_folder = working_directory + 'neural_data/' result_folder = working_directory + 'results_test_Flint/'
delta_time_vec = [0.001, 0.005, 0.01, 0.02, 0.05, 0.1]
time_steps_vec = [5, 10, 15]
lag_values_vec = [0, 5, 10]
window_len_vec = [0, 0.05, 0.1, 0.2]
alpha_vec = [0, 1e-4, 1e-2]
degree_vec = [2,3,4]
regular = 'l2' num_fold = 5
PBS_ARRAY_INDEX = int(os.environ['PBS_ARRAY_INDEX'])
print('PBS_ARRAY_INDEX: ' + str(PBS_ARRAY_INDEX))
print ("Starting simulation")
run_start = timer.time()
with open(file_names) as f:
lines = f.readlines()
PBS_ARRAY_INDEX_rec = PBS_ARRAY_INDEX % len(lines)
count_1 = int(np.floor(PBS_ARRAY_INDEX/(len(lines))))
PBS_ARRAY_INDEX_BP = count_1 % len(delta_time_vec)
count_2 = int(np.floor(count_1/(len(delta_time_vec))))
PBS_ARRAY_INDEX_wdw = count_2 % len(window_len_vec)
delta_time = delta_time_vec[PBS_ARRAY_INDEX_BP]
file_name = lines[PBS_ARRAY_INDEX_rec].replace('\n','')
wdw_time = window_len_vec[PBS_ARRAY_INDEX_wdw]
print('BP: ' + str(delta_time*1000) + '; Rec-sub: ' + file_name + ' - wdw: ' + str(wdw_time))
mat_filename = mat_folder+file_name + '_BP_'+ str(int(delta_time*1000))+'_ms.mat'
print ("Loading input features from file: "+mat_filename)
f = io.loadmat(mat_filename)
model = WienerCascadeDecoder()
wdw_samples = int(np.round(wdw_time / delta_time))
input_feature_1 = f['binned_MUA'][:]
cursor_vel_1 = f['collated_hand_vel'][:]
print('input shape: ' + str(np.shape(input_feature_1)))
print('output shape: ' + str(np.shape(cursor_vel_1)))
for timesteps in time_steps_vec:
input_feature = copy.deepcopy(input_feature_1)
cursor_vel = copy.deepcopy(cursor_vel_1[:,:2])
input_dim = input_feature.shape[1] output_dim = cursor_vel.shape[1]
rmse_valid = np.full((num_fold,output_dim),np.nan)
rmse_test = np.copy(rmse_valid)
cc_valid = np.copy(rmse_valid)
cc_test = np.copy(rmse_valid)
time_train = np.full((num_fold),np.nan)
time_test = np.copy(time_train)
print ("Formatting input feature data")
stride = 1 X_in = input_shaping(input_feature,timesteps,stride)
X_in = X_in.reshape(X_in.shape[0],(X_in.shape[1]*X_in.shape[2]),order='F')
print ("Formatting output (kinematic) data")
diff_samp = cursor_vel.shape[0]-X_in.shape[0]
Y_out = cursor_vel[diff_samp:,:]
print ("Splitting input dataset into training, validation, and testing subdataset")
all_train_idx,all_valid_idx,all_test_idx = split_index(X_in,num_fold)
S_vector = np.arange(2,40)
copy_X_in = copy.deepcopy(X_in)
for S in S_vector:
X_in = copy.deepcopy(copy_X_in)
X_in[X_in>S] = S
print('S: ' + str(S))
if wdw_samples != 0:
for channel in np.arange(len(X_in[0,:])):
X_in[:,channel] = moving_average(X_in[:,channel],wdw_samples)
for lag_value in lag_values_vec:
lag = int(-0.004 / delta_time * lag_value)
for alpha in alpha_vec:
for degree in degree_vec:
params = {'timesteps':timesteps, 'regular': regular, 'alpha':alpha, 'degree':degree}
result_filename = result_folder+file_name+\
'_delta_'+str(int(delta_time*1e3))+'ms_S_'+str(int(S))+\
'_wdw_' + str(int(wdw_time*1000)) + '_lag_'+str(lag_value)\
+ '_timestep_'+str(timesteps) +\
'_alpha_' + str(alpha) + '_deg_' \
+ str(degree) + '.pkl'
if exists(result_filename):
print('Results exist \n')
continue
for i in range(num_fold):
train_idx = all_train_idx[i]
valid_idx = all_valid_idx[i]
test_idx = all_test_idx[i]
X_train = X_in[train_idx,:]
Y_train = Y_out[train_idx,:]
X_valid = X_in[valid_idx,:]
Y_valid = Y_out[valid_idx,:]
X_test = X_in[test_idx,:]
Y_test = Y_out[test_idx,:]
X_train_mean = np.nanmean(X_train,axis=0)
X_train_std = np.nanstd(X_train,axis=0)
X_train = (X_train - X_train_mean)/X_train_std
X_valid = (X_valid - X_train_mean)/X_train_std
X_test = (X_test - X_train_mean)/X_train_std
remove = np.isnan(X_train[0,:])
X_train = np.delete(X_train,remove,1)
X_valid = np.delete(X_valid,remove,1)
X_test = np.delete(X_test,remove,1)
Y_train_mean = np.nanmean(Y_train,axis=0)
Y_train = Y_train - Y_train_mean
Y_valid = Y_valid - Y_train_mean
Y_test = Y_test - Y_train_mean
if lag < 0:
X_train = X_train[:lag,:] Y_train = Y_train[-lag:,:] X_valid = X_valid[:lag,:]
Y_valid = Y_valid[-lag:,:]
X_test = X_test[:lag,:]
Y_test = Y_test[-lag:,:]
if lag > 0:
X_train = X_train[lag:,:] Y_train = Y_train[:-lag,:] X_valid = X_valid[lag:,:]
Y_valid = Y_valid[:-lag,:]
X_test = X_test[lag:,:]
Y_test = Y_test[:-lag,:]
print("Instantiating and training model...")
model = WienerCascadeDecoder() start = timer.time()
model.fit(X_train,Y_train,**params) end = timer.time()
print("Model training took {:.2f} seconds".format(end - start))
time_train[i] = end - start
print("Evaluating model...")
Y_valid_predict = model.predict(X_valid)
start = timer.time()
Y_test_predict = model.predict(X_test)
end = timer.time()
print("Model testing took {:.2f} seconds".format(end - start))
time_test[i] = end - start
rmse_vld = compute_rmse(Y_valid,Y_valid_predict)
rmse_tst = compute_rmse(Y_test,Y_test_predict)
cc_vld = compute_pearson(Y_valid,Y_valid_predict)
cc_tst = compute_pearson(Y_test,Y_test_predict)
rmse_valid[i,:] = rmse_vld
rmse_test[i,:] = rmse_tst
cc_valid[i,:] = cc_vld
cc_test[i,:] = cc_tst
print("Fold-{} | Validation RMSE: {:.2f}".format(i,np.mean(rmse_vld)))
print("Fold-{} | Validation CC: {:.2f}".format(i,np.mean(cc_vld)))
print("Fold-{} | Testing RMSE: {:.2f}".format(i,np.mean(rmse_tst)))
print("Fold-{} | Testing CC: {:.2f}".format(i,np.mean(cc_tst)))
run_end = timer.time()
mean_rmse_valid = np.nanmean(rmse_valid,axis=0)
mean_rmse_test = np.nanmean(rmse_test,axis=0)
mean_cc_valid = np.nanmean(cc_valid,axis=0)
mean_cc_test = np.nanmean(cc_test,axis=0)
mean_time = np.nanmean(time_train,axis=0)
print("----------------------------------------------------------------------")
print("Validation Mean RMSE: %.3f " %(np.mean(mean_rmse_valid)))
print("Validation Mean CC: %.3f " %(np.mean(mean_cc_valid)))
print("Testing Mean RMSE: %.3f " %(np.mean(mean_rmse_test)))
print("Testing Mean CC: %.3f " %(np.mean(mean_cc_test)))
print("----------------------------------------------------------------------")
print ("Storing results into file: "+result_filename)
with open(result_filename, 'wb') as file:
results = {'rmse_valid': rmse_valid,
'rmse_test': rmse_test,
'cc_valid': cc_valid,
'cc_test': cc_test} pickle.dump(results, file)
run_time = run_end - run_start
print ("Finished whole processes within %.2f seconds" % run_time)
print("All done")
| true
| true
|
1c453de5d7c24725e9fd95307eec625076dae59e
| 9,931
|
py
|
Python
|
hchztests/tests/test_container_virtualssh.py
|
codedsk/hubcheck-hubzero-tests
|
89dd7164fed9161a5bf80e0a5635cec3da5be31d
|
[
"MIT"
] | 1
|
2016-01-02T01:36:14.000Z
|
2016-01-02T01:36:14.000Z
|
hchztests/tests/test_container_virtualssh.py
|
codedsk/hubcheck-hubzero-tests
|
89dd7164fed9161a5bf80e0a5635cec3da5be31d
|
[
"MIT"
] | null | null | null |
hchztests/tests/test_container_virtualssh.py
|
codedsk/hubcheck-hubzero-tests
|
89dd7164fed9161a5bf80e0a5635cec3da5be31d
|
[
"MIT"
] | null | null | null |
import hubcheck
import os
import pytest
import re
import sys
pytestmark = [ pytest.mark.container,
pytest.mark.virtualssh,
pytest.mark.reboot
]
class TestToolSession(hubcheck.testcase.TestCase2):
def setup_method(self,method):
self.username,self.userpass = \
self.testdata.find_account_for('registeredworkspace')
hubname = self.testdata.find_url_for('https')
self.session = hubcheck.shell.ToolSession(
host=hubname, username=self.username, password=self.userpass)
self._session_number = -1
self.shell = None
def teardown_method(self,method):
if self.shell is not None:
self.shell.close()
if self._session_number > 0:
self.session.stop(self._session_number)
del(self.session)
def test_session_help(self):
"""test issuing the 'session help' command"""
i,o,e = self.session.help()
output = o.read(1024)
assert output != '', "output is empty, no help data printed"
# def test_session_access_1(self):
# """test issuing 'ssh user@<hub> session'
# interactive shell without pty is not currently supported
# """
# shell = self.session.access(use_pty=False)
# # since there is no pty, there is no prompt and
# # we cannot use the execute() function
# shell.send('echo $SESSION')
# buf = self.get_buffer()
# idx = shell.expect(['(\d+)'])
# assert idx == 0,"echo $SESSION returned '%s'" % (buf)
# def test_session_access_2_1(self):
# """test issuing 'ssh -t user@<hub> session'
# with no other open sessions
# """
#
# self.shell = self.session.access()
# output,es = self.shell.execute('echo hi')
# assert output == 'hi',"output = %s" % (output)
def test_session_access_2_2(self):
"""test issuing 'ssh -t user@<hub> session'
with other open sessions
"""
# start up a session so we can get the session number
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
self.shell = self.session.access()
output,es = self.shell.execute('echo hi')
assert output == 'hi',"output = %s" % (output)
# def test_session_access_3_1(self):
# """test issuing 'ssh -t user@<hub> session <command>'
# with no other open sessions
# """
#
# i,o,e = self.session.access(command='echo hi')
# output = o.read(1024)
# assert output == 'hi\n',"output = %s" % (output)
def test_session_access_3_2(self):
"""test issuing 'ssh -t user@<hub> session <command>'
with other open sessions
"""
# start up a session so we can get the session number
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
i,o,e = self.session.access(command='echo hi')
output = o.read(1024)
assert output == 'hi\n',"output = %s" % (output)
def test_session_access_4(self):
"""test issuing 'ssh -t user@<hub> session <session #>'
"""
# start up a session so we can get the session number
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
# try to access the newly started session
self.shell = self.session.access(session_number=session_number)
output,es = self.shell.execute('echo $SESSION')
assert int(output) > 0,"output = %s" % (output)
def test_session_access_5(self):
"""test issuing 'ssh -t user@<hub> session <session #> <command>'
"""
# start up a session so we can get the session number
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
# access the newly started session to run a command
i,o,e = self.session.access(
session_number=session_number,
command='echo $SESSION')
output = int(re.search('(\d+)',o.read(1024)).group(0))
assert session_number == output, "output = %s" % (output)
def test_session_list(self):
"""test issuing 'ssh user@<hub> session list'"""
i,o,e = self.session.list()
output = o.read(1024)
assert output != '',"output of list command is empty"
def test_session_create_1(self):
"""test issuing 'ssh user@<hub> session create'"""
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"output = %s\ninvalid session number: %s" % \
(output,session_number)
self._session_number = session_number
def test_session_create_2(self):
"""test issuing 'ssh user@<hub> session create <title>'"""
i,o,e = self.session.create(title="hc_test_workspace")
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"output = %s\ninvalid session number: %s" % \
(output,session_number)
self._session_number = session_number
def test_session_start(self):
"""test issuing 'ssh -t user@<hub> session start'"""
self.shell = self.session.start()
output,es = self.shell.execute('echo $SESSION')
assert int(output) > 0,"output = %s" % (output)
self._session_number = int(output)
def test_session_stop(self):
"""test issuing 'ssh -t user@<hub> session stop <session #>'"""
# start up a session so we can get the session number
i,o,e = self.session.create()
session_number = int(re.search('(\d+)',o.read(1024)).group(0))
assert session_number > 0, "session_number = %s" % (session_number)
self._session_number = session_number
# stop the session
i,o,e = self.session.stop(session_number=session_number)
output = o.read(1024)
# 'stopping session' message doesnt seem to come across stdout or stderr
# match = re.search("stopping session (\d+)",output)
#
# self.assertTrue(match is not None,"output = %s" % (output))
#
# out_session_number = match.group(0)
# self.assertTrue(out_session_number == session_number,
# "out_session_number = %s\nsession_number=%s" % \
# (out_session_number,session_number))
#
self._session_number = -1
def test_get_open_session_detail_1(self):
"""test that get_open_session_detail, a wrapper for 'session list', returns a dict"""
data = self.session.get_open_session_detail()
def test_get_open_session_detail_2(self):
"""test that opening a new session shows up in get_open_session_detail"""
self.shell = self.session.start()
session_number = int(self.shell.execute('echo $SESSION')[0])
assert session_number > 0, \
"invalid session number: %s" % (session_number)
self._session_number = session_number
# account for the 5 seconds it takes between when the
# session is created to when the 'session list' command
# is updated
import time
time.sleep(5)
data = self.session.get_open_session_detail()
# check if the session number shows up in the
# 'session list' command output
has_session = False
for session_info in data.values():
if int(session_info['session_number']) == session_number:
has_session = True
break
assert has_session, \
"newly opened session number %d does not appear in %s" % \
(session_number,data)
def test_get_session_number_by_title_1(self):
"""test searching the 'session list' command for a session by title
when there is a matching title.
"""
title = 'tstest1'
# start a test session with a title
i,o,e = self.session.create(title)
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"invalid session number: %s\noutput = '%s'" % \
(session_number,output)
self._session_number = session_number
# account for the 5 seconds it takes between when the
# session is created to when the 'session list' command
# is updated
import time
time.sleep(5)
test_sn = int(self.session.get_session_number_by_title(title))
assert session_number == test_sn, \
"session_number = '%s', test_sn = '%s', output = '%s'" % \
(session_number,test_sn,output)
def test_get_session_number_by_title_2(self):
"""test searching the 'session list' command for a session by title
when there is no matching title.
"""
title = 'tstest2'
test_sn = int(self.session.get_session_number_by_title(title))
assert test_sn == -1, "test_sn = '%s'" % (test_sn)
| 32.243506
| 93
| 0.598631
|
import hubcheck
import os
import pytest
import re
import sys
pytestmark = [ pytest.mark.container,
pytest.mark.virtualssh,
pytest.mark.reboot
]
class TestToolSession(hubcheck.testcase.TestCase2):
def setup_method(self,method):
self.username,self.userpass = \
self.testdata.find_account_for('registeredworkspace')
hubname = self.testdata.find_url_for('https')
self.session = hubcheck.shell.ToolSession(
host=hubname, username=self.username, password=self.userpass)
self._session_number = -1
self.shell = None
def teardown_method(self,method):
if self.shell is not None:
self.shell.close()
if self._session_number > 0:
self.session.stop(self._session_number)
del(self.session)
def test_session_help(self):
i,o,e = self.session.help()
output = o.read(1024)
assert output != '', "output is empty, no help data printed"
# interactive shell without pty is not currently supported
# """
# with no other open sessions
# """
def test_session_access_2_2(self):
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
self.shell = self.session.access()
output,es = self.shell.execute('echo hi')
assert output == 'hi',"output = %s" % (output)
# with no other open sessions
# """
def test_session_access_3_2(self):
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
i,o,e = self.session.access(command='echo hi')
output = o.read(1024)
assert output == 'hi\n',"output = %s" % (output)
def test_session_access_4(self):
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
self.shell = self.session.access(session_number=session_number)
output,es = self.shell.execute('echo $SESSION')
assert int(output) > 0,"output = %s" % (output)
def test_session_access_5(self):
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"session_number = %s\noutput = %s" % (session_number,output)
self._session_number = session_number
i,o,e = self.session.access(
session_number=session_number,
command='echo $SESSION')
output = int(re.search('(\d+)',o.read(1024)).group(0))
assert session_number == output, "output = %s" % (output)
def test_session_list(self):
i,o,e = self.session.list()
output = o.read(1024)
assert output != '',"output of list command is empty"
def test_session_create_1(self):
i,o,e = self.session.create()
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"output = %s\ninvalid session number: %s" % \
(output,session_number)
self._session_number = session_number
def test_session_create_2(self):
i,o,e = self.session.create(title="hc_test_workspace")
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"output = %s\ninvalid session number: %s" % \
(output,session_number)
self._session_number = session_number
def test_session_start(self):
self.shell = self.session.start()
output,es = self.shell.execute('echo $SESSION')
assert int(output) > 0,"output = %s" % (output)
self._session_number = int(output)
def test_session_stop(self):
i,o,e = self.session.create()
session_number = int(re.search('(\d+)',o.read(1024)).group(0))
assert session_number > 0, "session_number = %s" % (session_number)
self._session_number = session_number
i,o,e = self.session.stop(session_number=session_number)
output = o.read(1024)
self._session_number = -1
def test_get_open_session_detail_1(self):
data = self.session.get_open_session_detail()
def test_get_open_session_detail_2(self):
self.shell = self.session.start()
session_number = int(self.shell.execute('echo $SESSION')[0])
assert session_number > 0, \
"invalid session number: %s" % (session_number)
self._session_number = session_number
import time
time.sleep(5)
data = self.session.get_open_session_detail()
has_session = False
for session_info in data.values():
if int(session_info['session_number']) == session_number:
has_session = True
break
assert has_session, \
"newly opened session number %d does not appear in %s" % \
(session_number,data)
def test_get_session_number_by_title_1(self):
title = 'tstest1'
i,o,e = self.session.create(title)
output = o.read(1024)
session_number = int(re.search('(\d+)',output).group(0))
assert session_number > 0, \
"invalid session number: %s\noutput = '%s'" % \
(session_number,output)
self._session_number = session_number
import time
time.sleep(5)
test_sn = int(self.session.get_session_number_by_title(title))
assert session_number == test_sn, \
"session_number = '%s', test_sn = '%s', output = '%s'" % \
(session_number,test_sn,output)
def test_get_session_number_by_title_2(self):
title = 'tstest2'
test_sn = int(self.session.get_session_number_by_title(title))
assert test_sn == -1, "test_sn = '%s'" % (test_sn)
| true
| true
|
1c453f426ef1cb4b8320f235713f540c29364267
| 14,600
|
py
|
Python
|
shade/tests/unit/test__utils.py
|
obourdon/shade
|
6d9a821d722d270bf9b04827a8a65bddbaa68266
|
[
"Apache-2.0"
] | null | null | null |
shade/tests/unit/test__utils.py
|
obourdon/shade
|
6d9a821d722d270bf9b04827a8a65bddbaa68266
|
[
"Apache-2.0"
] | null | null | null |
shade/tests/unit/test__utils.py
|
obourdon/shade
|
6d9a821d722d270bf9b04827a8a65bddbaa68266
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import string
import tempfile
from uuid import uuid4
import mock
import testtools
from shade import _utils
from shade import exc
from shade.tests.unit import base
RANGE_DATA = [
dict(id=1, key1=1, key2=5),
dict(id=2, key1=1, key2=20),
dict(id=3, key1=2, key2=10),
dict(id=4, key1=2, key2=30),
dict(id=5, key1=3, key2=40),
dict(id=6, key1=3, key2=40),
]
class TestUtils(base.TestCase):
def test__filter_list_name_or_id(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', None)
self.assertEqual([el1], ret)
def test__filter_list_name_or_id_special(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto[2017-01-10]', None)
self.assertEqual([el2], ret)
def test__filter_list_name_or_id_partial_bad(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto[2017-01]', None)
self.assertEqual([], ret)
def test__filter_list_name_or_id_partial_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto*', None)
self.assertEqual([el2], ret)
def test__filter_list_name_or_id_non_glob_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto', None)
self.assertEqual([], ret)
def test__filter_list_name_or_id_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
el3 = dict(id=200, name='pluto-2')
data = [el1, el2, el3]
ret = _utils._filter_list(data, 'pluto*', None)
self.assertEqual([el2, el3], ret)
def test__filter_list_name_or_id_glob_not_found(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
el3 = dict(id=200, name='pluto-2')
data = [el1, el2, el3]
ret = _utils._filter_list(data, 'q*', None)
self.assertEqual([], ret)
def test__filter_list_unicode(self):
el1 = dict(id=100, name=u'中文', last='duck',
other=dict(category='duck', financial=dict(status='poor')))
el2 = dict(id=200, name=u'中文', last='trump',
other=dict(category='human', financial=dict(status='rich')))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown', financial=dict(status='rich')))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, u'中文',
{'other': {
'financial': {'status': 'rich'}
}})
self.assertEqual([el2], ret)
def test__filter_list_filter(self):
el1 = dict(id=100, name='donald', other='duck')
el2 = dict(id=200, name='donald', other='trump')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', {'other': 'duck'})
self.assertEqual([el1], ret)
def test__filter_list_filter_jmespath(self):
el1 = dict(id=100, name='donald', other='duck')
el2 = dict(id=200, name='donald', other='trump')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', "[?other == `duck`]")
self.assertEqual([el1], ret)
def test__filter_list_dict1(self):
el1 = dict(id=100, name='donald', last='duck',
other=dict(category='duck'))
el2 = dict(id=200, name='donald', last='trump',
other=dict(category='human'))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown'))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, 'donald', {'other': {'category': 'clown'}})
self.assertEqual([el3], ret)
def test__filter_list_dict2(self):
el1 = dict(id=100, name='donald', last='duck',
other=dict(category='duck', financial=dict(status='poor')))
el2 = dict(id=200, name='donald', last='trump',
other=dict(category='human', financial=dict(status='rich')))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown', financial=dict(status='rich')))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, 'donald',
{'other': {
'financial': {'status': 'rich'}
}})
self.assertEqual([el2, el3], ret)
def test_safe_dict_min_ints(self):
"""Test integer comparison"""
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_strs(self):
"""Test integer as strings comparison"""
data = [{'f1': '3'}, {'f1': '2'}, {'f1': '1'}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_None(self):
"""Test None values"""
data = [{'f1': 3}, {'f1': None}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_key_missing(self):
"""Test missing key for an entry still works"""
data = [{'f1': 3}, {'x': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_key_not_found(self):
"""Test key not found in any elements returns None"""
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('doesnotexist', data)
self.assertIsNone(retval)
def test_safe_dict_min_not_int(self):
"""Test non-integer key value raises OSCE"""
data = [{'f1': 3}, {'f1': "aaa"}, {'f1': 1}]
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Search for minimum value failed. "
"Value for f1 is not an integer: aaa"
):
_utils.safe_dict_min('f1', data)
def test_safe_dict_max_ints(self):
"""Test integer comparison"""
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_strs(self):
"""Test integer as strings comparison"""
data = [{'f1': '3'}, {'f1': '2'}, {'f1': '1'}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_None(self):
"""Test None values"""
data = [{'f1': 3}, {'f1': None}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_key_missing(self):
"""Test missing key for an entry still works"""
data = [{'f1': 3}, {'x': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_key_not_found(self):
"""Test key not found in any elements returns None"""
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('doesnotexist', data)
self.assertIsNone(retval)
def test_safe_dict_max_not_int(self):
"""Test non-integer key value raises OSCE"""
data = [{'f1': 3}, {'f1': "aaa"}, {'f1': 1}]
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Search for maximum value failed. "
"Value for f1 is not an integer: aaa"
):
_utils.safe_dict_max('f1', data)
def test_parse_range_None(self):
self.assertIsNone(_utils.parse_range(None))
def test_parse_range_invalid(self):
self.assertIsNone(_utils.parse_range("<invalid"))
def test_parse_range_int_only(self):
retval = _utils.parse_range("1024")
self.assertIsInstance(retval, tuple)
self.assertIsNone(retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_lt(self):
retval = _utils.parse_range("<1024")
self.assertIsInstance(retval, tuple)
self.assertEqual("<", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_gt(self):
retval = _utils.parse_range(">1024")
self.assertIsInstance(retval, tuple)
self.assertEqual(">", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_le(self):
retval = _utils.parse_range("<=1024")
self.assertIsInstance(retval, tuple)
self.assertEqual("<=", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_ge(self):
retval = _utils.parse_range(">=1024")
self.assertIsInstance(retval, tuple)
self.assertEqual(">=", retval[0])
self.assertEqual(1024, retval[1])
def test_range_filter_min(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "min")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[:2], retval)
def test_range_filter_max(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "max")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[-2:], retval)
def test_range_filter_range(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "<3")
self.assertIsInstance(retval, list)
self.assertEqual(4, len(retval))
self.assertEqual(RANGE_DATA[:4], retval)
def test_range_filter_exact(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "2")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[2:4], retval)
def test_range_filter_invalid_int(self):
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Invalid range value: <1A0"
):
_utils.range_filter(RANGE_DATA, "key1", "<1A0")
def test_range_filter_invalid_op(self):
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Invalid range value: <>100"
):
_utils.range_filter(RANGE_DATA, "key1", "<>100")
def test_file_segment(self):
file_size = 4200
content = ''.join(random.SystemRandom().choice(
string.ascii_uppercase + string.digits)
for _ in range(file_size)).encode('latin-1')
self.imagefile = tempfile.NamedTemporaryFile(delete=False)
self.imagefile.write(content)
self.imagefile.close()
segments = self.cloud._get_file_segments(
endpoint='test_container/test_image',
filename=self.imagefile.name,
file_size=file_size,
segment_size=1000)
self.assertEqual(len(segments), 5)
segment_content = b''
for (index, (name, segment)) in enumerate(segments.items()):
self.assertEqual(
'test_container/test_image/{index:0>6}'.format(index=index),
name)
segment_content += segment.read()
self.assertEqual(content, segment_content)
def test_get_entity_pass_object(self):
obj = mock.Mock(id=uuid4().hex)
self.cloud.use_direct_get = True
self.assertEqual(obj, _utils._get_entity(self.cloud, '', obj, {}))
def test_get_entity_no_use_direct_get(self):
# test we are defaulting to the search_<resource> methods
# if the use_direct_get flag is set to False(default).
uuid = uuid4().hex
resource = 'network'
func = 'search_%ss' % resource
filters = {}
with mock.patch.object(self.cloud, func) as search:
_utils._get_entity(self.cloud, resource, uuid, filters)
search.assert_called_once_with(uuid, filters)
def test_get_entity_no_uuid_like(self):
# test we are defaulting to the search_<resource> methods
# if the name_or_id param is a name(string) but not a uuid.
self.cloud.use_direct_get = True
name = 'name_no_uuid'
resource = 'network'
func = 'search_%ss' % resource
filters = {}
with mock.patch.object(self.cloud, func) as search:
_utils._get_entity(self.cloud, resource, name, filters)
search.assert_called_once_with(name, filters)
def test_get_entity_pass_uuid(self):
uuid = uuid4().hex
self.cloud.use_direct_get = True
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
for r in resources:
f = 'get_%s_by_id' % r
with mock.patch.object(self.cloud, f) as get:
_utils._get_entity(self.cloud, r, uuid, {})
get.assert_called_once_with(uuid)
def test_get_entity_pass_search_methods(self):
self.cloud.use_direct_get = True
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
filters = {}
name = 'name_no_uuid'
for r in resources:
f = 'search_%ss' % r
with mock.patch.object(self.cloud, f) as search:
_utils._get_entity(self.cloud, r, name, {})
search.assert_called_once_with(name, filters)
def test_get_entity_get_and_search(self):
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
for r in resources:
self.assertTrue(hasattr(self.cloud, 'get_%s_by_id' % r))
self.assertTrue(hasattr(self.cloud, 'search_%ss' % r))
| 38.32021
| 79
| 0.597603
|
import random
import string
import tempfile
from uuid import uuid4
import mock
import testtools
from shade import _utils
from shade import exc
from shade.tests.unit import base
RANGE_DATA = [
dict(id=1, key1=1, key2=5),
dict(id=2, key1=1, key2=20),
dict(id=3, key1=2, key2=10),
dict(id=4, key1=2, key2=30),
dict(id=5, key1=3, key2=40),
dict(id=6, key1=3, key2=40),
]
class TestUtils(base.TestCase):
def test__filter_list_name_or_id(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', None)
self.assertEqual([el1], ret)
def test__filter_list_name_or_id_special(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto[2017-01-10]', None)
self.assertEqual([el2], ret)
def test__filter_list_name_or_id_partial_bad(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto[2017-01]', None)
self.assertEqual([], ret)
def test__filter_list_name_or_id_partial_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto*', None)
self.assertEqual([el2], ret)
def test__filter_list_name_or_id_non_glob_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto[2017-01-10]')
data = [el1, el2]
ret = _utils._filter_list(data, 'pluto', None)
self.assertEqual([], ret)
def test__filter_list_name_or_id_glob(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
el3 = dict(id=200, name='pluto-2')
data = [el1, el2, el3]
ret = _utils._filter_list(data, 'pluto*', None)
self.assertEqual([el2, el3], ret)
def test__filter_list_name_or_id_glob_not_found(self):
el1 = dict(id=100, name='donald')
el2 = dict(id=200, name='pluto')
el3 = dict(id=200, name='pluto-2')
data = [el1, el2, el3]
ret = _utils._filter_list(data, 'q*', None)
self.assertEqual([], ret)
def test__filter_list_unicode(self):
el1 = dict(id=100, name=u'中文', last='duck',
other=dict(category='duck', financial=dict(status='poor')))
el2 = dict(id=200, name=u'中文', last='trump',
other=dict(category='human', financial=dict(status='rich')))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown', financial=dict(status='rich')))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, u'中文',
{'other': {
'financial': {'status': 'rich'}
}})
self.assertEqual([el2], ret)
def test__filter_list_filter(self):
el1 = dict(id=100, name='donald', other='duck')
el2 = dict(id=200, name='donald', other='trump')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', {'other': 'duck'})
self.assertEqual([el1], ret)
def test__filter_list_filter_jmespath(self):
el1 = dict(id=100, name='donald', other='duck')
el2 = dict(id=200, name='donald', other='trump')
data = [el1, el2]
ret = _utils._filter_list(data, 'donald', "[?other == `duck`]")
self.assertEqual([el1], ret)
def test__filter_list_dict1(self):
el1 = dict(id=100, name='donald', last='duck',
other=dict(category='duck'))
el2 = dict(id=200, name='donald', last='trump',
other=dict(category='human'))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown'))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, 'donald', {'other': {'category': 'clown'}})
self.assertEqual([el3], ret)
def test__filter_list_dict2(self):
el1 = dict(id=100, name='donald', last='duck',
other=dict(category='duck', financial=dict(status='poor')))
el2 = dict(id=200, name='donald', last='trump',
other=dict(category='human', financial=dict(status='rich')))
el3 = dict(id=300, name='donald', last='ronald mac',
other=dict(category='clown', financial=dict(status='rich')))
data = [el1, el2, el3]
ret = _utils._filter_list(
data, 'donald',
{'other': {
'financial': {'status': 'rich'}
}})
self.assertEqual([el2, el3], ret)
def test_safe_dict_min_ints(self):
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_strs(self):
data = [{'f1': '3'}, {'f1': '2'}, {'f1': '1'}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_None(self):
data = [{'f1': 3}, {'f1': None}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_key_missing(self):
data = [{'f1': 3}, {'x': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('f1', data)
self.assertEqual(1, retval)
def test_safe_dict_min_key_not_found(self):
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_min('doesnotexist', data)
self.assertIsNone(retval)
def test_safe_dict_min_not_int(self):
data = [{'f1': 3}, {'f1': "aaa"}, {'f1': 1}]
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Search for minimum value failed. "
"Value for f1 is not an integer: aaa"
):
_utils.safe_dict_min('f1', data)
def test_safe_dict_max_ints(self):
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_strs(self):
data = [{'f1': '3'}, {'f1': '2'}, {'f1': '1'}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_None(self):
data = [{'f1': 3}, {'f1': None}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_key_missing(self):
data = [{'f1': 3}, {'x': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('f1', data)
self.assertEqual(3, retval)
def test_safe_dict_max_key_not_found(self):
data = [{'f1': 3}, {'f1': 2}, {'f1': 1}]
retval = _utils.safe_dict_max('doesnotexist', data)
self.assertIsNone(retval)
def test_safe_dict_max_not_int(self):
data = [{'f1': 3}, {'f1': "aaa"}, {'f1': 1}]
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Search for maximum value failed. "
"Value for f1 is not an integer: aaa"
):
_utils.safe_dict_max('f1', data)
def test_parse_range_None(self):
self.assertIsNone(_utils.parse_range(None))
def test_parse_range_invalid(self):
self.assertIsNone(_utils.parse_range("<invalid"))
def test_parse_range_int_only(self):
retval = _utils.parse_range("1024")
self.assertIsInstance(retval, tuple)
self.assertIsNone(retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_lt(self):
retval = _utils.parse_range("<1024")
self.assertIsInstance(retval, tuple)
self.assertEqual("<", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_gt(self):
retval = _utils.parse_range(">1024")
self.assertIsInstance(retval, tuple)
self.assertEqual(">", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_le(self):
retval = _utils.parse_range("<=1024")
self.assertIsInstance(retval, tuple)
self.assertEqual("<=", retval[0])
self.assertEqual(1024, retval[1])
def test_parse_range_ge(self):
retval = _utils.parse_range(">=1024")
self.assertIsInstance(retval, tuple)
self.assertEqual(">=", retval[0])
self.assertEqual(1024, retval[1])
def test_range_filter_min(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "min")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[:2], retval)
def test_range_filter_max(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "max")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[-2:], retval)
def test_range_filter_range(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "<3")
self.assertIsInstance(retval, list)
self.assertEqual(4, len(retval))
self.assertEqual(RANGE_DATA[:4], retval)
def test_range_filter_exact(self):
retval = _utils.range_filter(RANGE_DATA, "key1", "2")
self.assertIsInstance(retval, list)
self.assertEqual(2, len(retval))
self.assertEqual(RANGE_DATA[2:4], retval)
def test_range_filter_invalid_int(self):
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Invalid range value: <1A0"
):
_utils.range_filter(RANGE_DATA, "key1", "<1A0")
def test_range_filter_invalid_op(self):
with testtools.ExpectedException(
exc.OpenStackCloudException,
"Invalid range value: <>100"
):
_utils.range_filter(RANGE_DATA, "key1", "<>100")
def test_file_segment(self):
file_size = 4200
content = ''.join(random.SystemRandom().choice(
string.ascii_uppercase + string.digits)
for _ in range(file_size)).encode('latin-1')
self.imagefile = tempfile.NamedTemporaryFile(delete=False)
self.imagefile.write(content)
self.imagefile.close()
segments = self.cloud._get_file_segments(
endpoint='test_container/test_image',
filename=self.imagefile.name,
file_size=file_size,
segment_size=1000)
self.assertEqual(len(segments), 5)
segment_content = b''
for (index, (name, segment)) in enumerate(segments.items()):
self.assertEqual(
'test_container/test_image/{index:0>6}'.format(index=index),
name)
segment_content += segment.read()
self.assertEqual(content, segment_content)
def test_get_entity_pass_object(self):
obj = mock.Mock(id=uuid4().hex)
self.cloud.use_direct_get = True
self.assertEqual(obj, _utils._get_entity(self.cloud, '', obj, {}))
def test_get_entity_no_use_direct_get(self):
uuid = uuid4().hex
resource = 'network'
func = 'search_%ss' % resource
filters = {}
with mock.patch.object(self.cloud, func) as search:
_utils._get_entity(self.cloud, resource, uuid, filters)
search.assert_called_once_with(uuid, filters)
def test_get_entity_no_uuid_like(self):
self.cloud.use_direct_get = True
name = 'name_no_uuid'
resource = 'network'
func = 'search_%ss' % resource
filters = {}
with mock.patch.object(self.cloud, func) as search:
_utils._get_entity(self.cloud, resource, name, filters)
search.assert_called_once_with(name, filters)
def test_get_entity_pass_uuid(self):
uuid = uuid4().hex
self.cloud.use_direct_get = True
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
for r in resources:
f = 'get_%s_by_id' % r
with mock.patch.object(self.cloud, f) as get:
_utils._get_entity(self.cloud, r, uuid, {})
get.assert_called_once_with(uuid)
def test_get_entity_pass_search_methods(self):
self.cloud.use_direct_get = True
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
filters = {}
name = 'name_no_uuid'
for r in resources:
f = 'search_%ss' % r
with mock.patch.object(self.cloud, f) as search:
_utils._get_entity(self.cloud, r, name, {})
search.assert_called_once_with(name, filters)
def test_get_entity_get_and_search(self):
resources = ['flavor', 'image', 'volume', 'network',
'subnet', 'port', 'floating_ip', 'security_group']
for r in resources:
self.assertTrue(hasattr(self.cloud, 'get_%s_by_id' % r))
self.assertTrue(hasattr(self.cloud, 'search_%ss' % r))
| true
| true
|
1c453fdbba1ac5b86f5831ece9560ce0e7000157
| 2,630
|
py
|
Python
|
pytext/utils/distributed.py
|
twild-fb/pytext
|
07cadc0d130dac30d71d9da70380f124b3f5ac59
|
[
"BSD-3-Clause"
] | 2
|
2019-06-27T21:32:26.000Z
|
2019-10-16T06:20:25.000Z
|
pytext/utils/distributed.py
|
twild-fb/pytext
|
07cadc0d130dac30d71d9da70380f124b3f5ac59
|
[
"BSD-3-Clause"
] | null | null | null |
pytext/utils/distributed.py
|
twild-fb/pytext
|
07cadc0d130dac30d71d9da70380f124b3f5ac59
|
[
"BSD-3-Clause"
] | 1
|
2019-10-16T06:20:26.000Z
|
2019-10-16T06:20:26.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import torch
import torch.distributed as dist_c10d
def dist_init(
distributed_rank: int,
world_size: int,
init_method: str,
device_id: int,
backend: str = "nccl",
):
"""
1. After spawn process per GPU, we want all workers to call init_process_group
around the same time or times out.
2. After dist_init, we want all workers to start calling all_reduce/barrier
around the same time or NCCL timeouts.
"""
if init_method and world_size > 1 and torch.cuda.is_available():
dist_c10d.init_process_group(
backend=backend,
init_method=init_method,
world_size=world_size,
rank=distributed_rank,
)
# calling all_reduce for synchronzing all workers
dist_tensor = torch.tensor(
[1], dtype=torch.float32, device="cuda:{}".format(device_id)
)
dist_c10d.all_reduce(dist_tensor)
if distributed_rank != 0:
suppress_output()
def suppress_output():
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
# force print the result when kwargs contains force and value is True
if kwargs.pop("force", False):
builtin_print(*args, **kwargs)
__builtin__.print = print
def get_shard_range(dataset_size: int, rank: int, world_size: int):
"""
In case dataset_size is not evenly divided by world_size, we need to pad
one extra example in each shard
shard_len = dataset_size // world_size + 1
Case 1 rank < remainder: each shard start position is rank * shard_len
Case 2 rank >= remainder: without padding, each shard start position is
rank * (shard_len - 1) + remainder = rank * shard_len - (rank - remainder)
But to make sure all shard have same size, we need to pad one extra example
when rank >= remainder, so start_position = start_position - 1
For example, dataset_size = 21, world_size = 8
rank 0 to 4: [0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11], [12, 13, 14]
rank 5 to 7: [14, 15, 16], [16, 17, 18], [18, 19, 20]
"""
remainder = dataset_size % world_size
shard_len = dataset_size // world_size
if remainder == 0:
shard_offset = rank * shard_len
else:
# take one extra when dataset_size is not evenly divided by world_size
shard_len += 1
shard_offset = rank * shard_len - max(0, rank + 1 - remainder)
shard_end = shard_offset + shard_len - 1
return (shard_offset, shard_end)
| 32.875
| 82
| 0.653232
|
import torch
import torch.distributed as dist_c10d
def dist_init(
distributed_rank: int,
world_size: int,
init_method: str,
device_id: int,
backend: str = "nccl",
):
if init_method and world_size > 1 and torch.cuda.is_available():
dist_c10d.init_process_group(
backend=backend,
init_method=init_method,
world_size=world_size,
rank=distributed_rank,
)
dist_tensor = torch.tensor(
[1], dtype=torch.float32, device="cuda:{}".format(device_id)
)
dist_c10d.all_reduce(dist_tensor)
if distributed_rank != 0:
suppress_output()
def suppress_output():
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
if kwargs.pop("force", False):
builtin_print(*args, **kwargs)
__builtin__.print = print
def get_shard_range(dataset_size: int, rank: int, world_size: int):
remainder = dataset_size % world_size
shard_len = dataset_size // world_size
if remainder == 0:
shard_offset = rank * shard_len
else:
shard_len += 1
shard_offset = rank * shard_len - max(0, rank + 1 - remainder)
shard_end = shard_offset + shard_len - 1
return (shard_offset, shard_end)
| true
| true
|
1c4542bacaf77780be537b1e01682b77026b72b7
| 1,980
|
py
|
Python
|
twitter_bot.py
|
sankarsh98/Twitter-Bot
|
34e7503ef31c452ff596a60aba8bfbb7fecf08b6
|
[
"MIT"
] | 7
|
2021-12-12T16:34:19.000Z
|
2022-02-14T12:07:56.000Z
|
twitter_bot.py
|
sankarsh98/Twitter-Bot
|
34e7503ef31c452ff596a60aba8bfbb7fecf08b6
|
[
"MIT"
] | 1
|
2021-12-12T17:00:04.000Z
|
2021-12-12T17:00:04.000Z
|
twitter_bot.py
|
sankarsh98/Twitter-Bot
|
34e7503ef31c452ff596a60aba8bfbb7fecf08b6
|
[
"MIT"
] | 1
|
2021-12-12T13:21:52.000Z
|
2021-12-12T13:21:52.000Z
|
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
# path to chrome driver, download it from https://sites.google.com/a/chromium.org/chromedriver/downloads
chrome_driver = "/Users/sanky/Downloads/chromedriver"
class TwitterBot:
def __init__(self,username,password,mobile):
self.browser=webdriver.Chrome(chrome_driver)
self.username=username
self.password=password
self.mobile=mobile
# signin to the account
def signIn(self):
self.browser.get("https://www.twitter.com/login")
time.sleep(5)
usernameInput = self.browser.find_element_by_name('text')
usernameInput.send_keys(self.username)
time.sleep(3)
self.browser.maximize_window()
nextButton1 = self.browser.find_element_by_xpath('//span[.="Next"]')
nextButton1.click()
time.sleep(3)
try:
middleInput = self.browser.find_element_by_name('text')
middleInput.send_keys(self.mobile)
time.sleep(3)
nextButton2 = self.browser.find_element_by_xpath('//span[.="Next"]')
nextButton2.click()
time.sleep(3)
except NoSuchElementException:
print ("exception")
passwordInput = self.browser.find_element_by_xpath('//input[@name="password"]')
passwordInput.send_keys(self.password)
# passwordInput.send_keys(Keys.ENTER)
time.sleep(5)
loginButton = self.browser.find_element_by_xpath('//span[.="Log in"]')
loginButton.click()
def TweetSomething(self, message):
time.sleep(5)
tweet_button = self.browser.find_element_by_xpath('//span[.="Tweet"]')
tweet_button.click()
tweet = self.browser.find_element_by_xpath('//div[@aria-label="Tweet text"]')
tweet.send_keys(message)
tweet.send_keys(Keys.COMMAND, Keys.ENTER)
| 31.935484
| 104
| 0.664646
|
import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
chrome_driver = "/Users/sanky/Downloads/chromedriver"
class TwitterBot:
def __init__(self,username,password,mobile):
self.browser=webdriver.Chrome(chrome_driver)
self.username=username
self.password=password
self.mobile=mobile
def signIn(self):
self.browser.get("https://www.twitter.com/login")
time.sleep(5)
usernameInput = self.browser.find_element_by_name('text')
usernameInput.send_keys(self.username)
time.sleep(3)
self.browser.maximize_window()
nextButton1 = self.browser.find_element_by_xpath('//span[.="Next"]')
nextButton1.click()
time.sleep(3)
try:
middleInput = self.browser.find_element_by_name('text')
middleInput.send_keys(self.mobile)
time.sleep(3)
nextButton2 = self.browser.find_element_by_xpath('//span[.="Next"]')
nextButton2.click()
time.sleep(3)
except NoSuchElementException:
print ("exception")
passwordInput = self.browser.find_element_by_xpath('//input[@name="password"]')
passwordInput.send_keys(self.password)
time.sleep(5)
loginButton = self.browser.find_element_by_xpath('//span[.="Log in"]')
loginButton.click()
def TweetSomething(self, message):
time.sleep(5)
tweet_button = self.browser.find_element_by_xpath('//span[.="Tweet"]')
tweet_button.click()
tweet = self.browser.find_element_by_xpath('//div[@aria-label="Tweet text"]')
tweet.send_keys(message)
tweet.send_keys(Keys.COMMAND, Keys.ENTER)
| true
| true
|
1c45432e3476eee07bd3665a03b2d5e4c182b9bb
| 6,538
|
py
|
Python
|
selfdrive/manager/manager.py
|
cfranyota/openpilot
|
977a0d829f0c6e373ffc6c3876d2cff77a1a5df2
|
[
"MIT"
] | 2
|
2022-01-07T09:00:14.000Z
|
2022-01-07T10:06:02.000Z
|
selfdrive/manager/manager.py
|
cfranyota/openpilot
|
977a0d829f0c6e373ffc6c3876d2cff77a1a5df2
|
[
"MIT"
] | null | null | null |
selfdrive/manager/manager.py
|
cfranyota/openpilot
|
977a0d829f0c6e373ffc6c3876d2cff77a1a5df2
|
[
"MIT"
] | 3
|
2021-12-18T18:58:35.000Z
|
2022-01-29T05:05:08.000Z
|
#!/usr/bin/env python3
import datetime
import os
import signal
import subprocess
import sys
import traceback
from typing import List, Tuple, Union
import cereal.messaging as messaging
import selfdrive.sentry as sentry
from common.basedir import BASEDIR
from common.params import Params, ParamKeyType
from common.text_window import TextWindow
from selfdrive.boardd.set_time import set_time
from selfdrive.hardware import HARDWARE, PC
from selfdrive.manager.helpers import unblock_stdout
from selfdrive.manager.process import ensure_running
from selfdrive.manager.process_config import managed_processes
from selfdrive.athena.registration import register, UNREGISTERED_DONGLE_ID
from selfdrive.swaglog import cloudlog, add_file_handler
from selfdrive.version import is_dirty, get_commit, get_version, get_origin, get_short_branch, \
terms_version, training_version
sys.path.append(os.path.join(BASEDIR, "pyextra"))
def manager_init() -> None:
# update system time from panda
set_time(cloudlog)
# save boot log
subprocess.call("./bootlog", cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))
params = Params()
params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
default_params: List[Tuple[str, Union[str, bytes]]] = [
("CompletedTrainingVersion", "0"),
("HasAcceptedTerms", "0"),
("OpenpilotEnabledToggle", "1"),
#("DisableRadar_Allow", "1"),
]
if not PC:
default_params.append(("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')))
if params.get_bool("RecordFrontLock"):
params.put_bool("RecordFront", True)
if not params.get_bool("DisableRadar_Allow"):
params.delete("DisableRadar")
# set unset params
for k, v in default_params:
if params.get(k) is None:
params.put(k, v)
# is this dashcam?
if os.getenv("PASSIVE") is not None:
params.put_bool("Passive", bool(int(os.getenv("PASSIVE", "0"))))
if params.get("Passive") is None:
raise Exception("Passive must be set to continue")
# Create folders needed for msgq
try:
os.mkdir("/dev/shm")
except FileExistsError:
pass
except PermissionError:
print("WARNING: failed to make /dev/shm")
# set version params
params.put("Version", get_version())
params.put("TermsVersion", terms_version)
params.put("TrainingVersion", training_version)
params.put("GitCommit", get_commit(default=""))
params.put("GitBranch", get_short_branch(default=""))
params.put("GitRemote", get_origin(default=""))
# set dongle id
reg_res = register(show_spinner=True)
if reg_res:
dongle_id = reg_res
else:
serial = params.get("HardwareSerial")
raise Exception(f"Registration failed for device {serial}")
os.environ['DONGLE_ID'] = dongle_id # Needed for swaglog
if not is_dirty():
os.environ['CLEAN'] = '1'
# init logging
sentry.init(sentry.SentryProject.SELFDRIVE)
cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty(),
device=HARDWARE.get_device_type())
def manager_prepare() -> None:
for p in managed_processes.values():
p.prepare()
def manager_cleanup() -> None:
# send signals to kill all procs
for p in managed_processes.values():
p.stop(block=False)
# ensure all are killed
for p in managed_processes.values():
p.stop(block=True)
cloudlog.info("everything is dead")
def manager_thread() -> None:
cloudlog.bind(daemon="manager")
cloudlog.info("manager start")
cloudlog.info({"environ": os.environ})
params = Params()
ignore: List[str] = []
if params.get("DongleId", encoding='utf8') in (None, UNREGISTERED_DONGLE_ID):
ignore += ["manage_athenad", "uploader"]
if os.getenv("NOBOARD") is not None:
ignore.append("pandad")
ignore += [x for x in os.getenv("BLOCK", "").split(",") if len(x) > 0]
ensure_running(managed_processes.values(), started=False, not_run=ignore)
started_prev = False
sm = messaging.SubMaster(['deviceState'])
pm = messaging.PubMaster(['managerState'])
while True:
sm.update()
not_run = ignore[:]
started = sm['deviceState'].started
driverview = params.get_bool("IsDriverViewEnabled")
ensure_running(managed_processes.values(), started, driverview, not_run)
# trigger an update after going offroad
if started_prev and not started and 'updated' in managed_processes:
os.sync()
managed_processes['updated'].signal(signal.SIGHUP)
started_prev = started
running = ' '.join("%s%s\u001b[0m" % ("\u001b[32m" if p.proc.is_alive() else "\u001b[31m", p.name)
for p in managed_processes.values() if p.proc)
print(running)
cloudlog.debug(running)
# send managerState
msg = messaging.new_message('managerState')
msg.managerState.processes = [p.get_process_state_msg() for p in managed_processes.values()]
pm.send('managerState', msg)
# Exit main loop when uninstall/shutdown/reboot is needed
shutdown = False
for param in ("DoUninstall", "DoShutdown", "DoReboot"):
if params.get_bool(param):
shutdown = True
params.put("LastManagerExitReason", param)
cloudlog.warning(f"Shutting down manager - {param} set")
if shutdown:
break
def main() -> None:
prepare_only = os.getenv("PREPAREONLY") is not None
manager_init()
# Start UI early so prepare can happen in the background
if not prepare_only:
managed_processes['ui'].start()
manager_prepare()
if prepare_only:
return
# SystemExit on sigterm
signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1))
try:
manager_thread()
except Exception:
traceback.print_exc()
sentry.capture_exception()
finally:
manager_cleanup()
params = Params()
if params.get_bool("DoUninstall"):
cloudlog.warning("uninstalling")
HARDWARE.uninstall()
elif params.get_bool("DoReboot"):
cloudlog.warning("reboot")
HARDWARE.reboot()
elif params.get_bool("DoShutdown"):
cloudlog.warning("shutdown")
HARDWARE.shutdown()
if __name__ == "__main__":
unblock_stdout()
try:
main()
except Exception:
add_file_handler(cloudlog)
cloudlog.exception("Manager failed to start")
try:
managed_processes['ui'].stop()
except Exception:
pass
# Show last 3 lines of traceback
error = traceback.format_exc(-3)
error = "Manager failed to start\n\n" + error
with TextWindow(error) as t:
t.wait_for_exit()
raise
# manual exit because we are forked
sys.exit(0)
| 27.821277
| 102
| 0.701285
|
import datetime
import os
import signal
import subprocess
import sys
import traceback
from typing import List, Tuple, Union
import cereal.messaging as messaging
import selfdrive.sentry as sentry
from common.basedir import BASEDIR
from common.params import Params, ParamKeyType
from common.text_window import TextWindow
from selfdrive.boardd.set_time import set_time
from selfdrive.hardware import HARDWARE, PC
from selfdrive.manager.helpers import unblock_stdout
from selfdrive.manager.process import ensure_running
from selfdrive.manager.process_config import managed_processes
from selfdrive.athena.registration import register, UNREGISTERED_DONGLE_ID
from selfdrive.swaglog import cloudlog, add_file_handler
from selfdrive.version import is_dirty, get_commit, get_version, get_origin, get_short_branch, \
terms_version, training_version
sys.path.append(os.path.join(BASEDIR, "pyextra"))
def manager_init() -> None:
set_time(cloudlog)
subprocess.call("./bootlog", cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))
params = Params()
params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
default_params: List[Tuple[str, Union[str, bytes]]] = [
("CompletedTrainingVersion", "0"),
("HasAcceptedTerms", "0"),
("OpenpilotEnabledToggle", "1"),
]
if not PC:
default_params.append(("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')))
if params.get_bool("RecordFrontLock"):
params.put_bool("RecordFront", True)
if not params.get_bool("DisableRadar_Allow"):
params.delete("DisableRadar")
for k, v in default_params:
if params.get(k) is None:
params.put(k, v)
if os.getenv("PASSIVE") is not None:
params.put_bool("Passive", bool(int(os.getenv("PASSIVE", "0"))))
if params.get("Passive") is None:
raise Exception("Passive must be set to continue")
try:
os.mkdir("/dev/shm")
except FileExistsError:
pass
except PermissionError:
print("WARNING: failed to make /dev/shm")
params.put("Version", get_version())
params.put("TermsVersion", terms_version)
params.put("TrainingVersion", training_version)
params.put("GitCommit", get_commit(default=""))
params.put("GitBranch", get_short_branch(default=""))
params.put("GitRemote", get_origin(default=""))
reg_res = register(show_spinner=True)
if reg_res:
dongle_id = reg_res
else:
serial = params.get("HardwareSerial")
raise Exception(f"Registration failed for device {serial}")
os.environ['DONGLE_ID'] = dongle_id
if not is_dirty():
os.environ['CLEAN'] = '1'
sentry.init(sentry.SentryProject.SELFDRIVE)
cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty(),
device=HARDWARE.get_device_type())
def manager_prepare() -> None:
for p in managed_processes.values():
p.prepare()
def manager_cleanup() -> None:
for p in managed_processes.values():
p.stop(block=False)
for p in managed_processes.values():
p.stop(block=True)
cloudlog.info("everything is dead")
def manager_thread() -> None:
cloudlog.bind(daemon="manager")
cloudlog.info("manager start")
cloudlog.info({"environ": os.environ})
params = Params()
ignore: List[str] = []
if params.get("DongleId", encoding='utf8') in (None, UNREGISTERED_DONGLE_ID):
ignore += ["manage_athenad", "uploader"]
if os.getenv("NOBOARD") is not None:
ignore.append("pandad")
ignore += [x for x in os.getenv("BLOCK", "").split(",") if len(x) > 0]
ensure_running(managed_processes.values(), started=False, not_run=ignore)
started_prev = False
sm = messaging.SubMaster(['deviceState'])
pm = messaging.PubMaster(['managerState'])
while True:
sm.update()
not_run = ignore[:]
started = sm['deviceState'].started
driverview = params.get_bool("IsDriverViewEnabled")
ensure_running(managed_processes.values(), started, driverview, not_run)
if started_prev and not started and 'updated' in managed_processes:
os.sync()
managed_processes['updated'].signal(signal.SIGHUP)
started_prev = started
running = ' '.join("%s%s\u001b[0m" % ("\u001b[32m" if p.proc.is_alive() else "\u001b[31m", p.name)
for p in managed_processes.values() if p.proc)
print(running)
cloudlog.debug(running)
msg = messaging.new_message('managerState')
msg.managerState.processes = [p.get_process_state_msg() for p in managed_processes.values()]
pm.send('managerState', msg)
shutdown = False
for param in ("DoUninstall", "DoShutdown", "DoReboot"):
if params.get_bool(param):
shutdown = True
params.put("LastManagerExitReason", param)
cloudlog.warning(f"Shutting down manager - {param} set")
if shutdown:
break
def main() -> None:
prepare_only = os.getenv("PREPAREONLY") is not None
manager_init()
if not prepare_only:
managed_processes['ui'].start()
manager_prepare()
if prepare_only:
return
signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1))
try:
manager_thread()
except Exception:
traceback.print_exc()
sentry.capture_exception()
finally:
manager_cleanup()
params = Params()
if params.get_bool("DoUninstall"):
cloudlog.warning("uninstalling")
HARDWARE.uninstall()
elif params.get_bool("DoReboot"):
cloudlog.warning("reboot")
HARDWARE.reboot()
elif params.get_bool("DoShutdown"):
cloudlog.warning("shutdown")
HARDWARE.shutdown()
if __name__ == "__main__":
unblock_stdout()
try:
main()
except Exception:
add_file_handler(cloudlog)
cloudlog.exception("Manager failed to start")
try:
managed_processes['ui'].stop()
except Exception:
pass
error = traceback.format_exc(-3)
error = "Manager failed to start\n\n" + error
with TextWindow(error) as t:
t.wait_for_exit()
raise
sys.exit(0)
| true
| true
|
1c45433433a647c579569bbb598afc58791b69df
| 402
|
py
|
Python
|
utils/convert.py
|
alanhuang122/skyless-utils
|
773f23c37a95d97a3346948997b19eccbed8cc24
|
[
"MIT"
] | 2
|
2018-09-12T19:47:19.000Z
|
2019-11-07T07:46:10.000Z
|
utils/convert.py
|
alanhuang122/skyless-utils
|
773f23c37a95d97a3346948997b19eccbed8cc24
|
[
"MIT"
] | 11
|
2018-08-29T01:49:29.000Z
|
2019-07-08T18:54:16.000Z
|
utils/convert.py
|
alanhuang122/skyless-utils
|
773f23c37a95d97a3346948997b19eccbed8cc24
|
[
"MIT"
] | null | null | null |
import json
with open('skyless.dat', 'w') as f:
for name in ['areas', 'bargains', 'domiciles', 'events', 'exchanges', 'personae', 'prospects', 'qualities', 'settings', 'ports']:
with open(f'{name}.txt') as g:
data = json.loads(g.read())
for line in data:
f.write(f"{{\"key\": \"{name}:{line['Id']}\", \"value\": {json.dumps(line)}}}\n")
| 44.666667
| 133
| 0.512438
|
import json
with open('skyless.dat', 'w') as f:
for name in ['areas', 'bargains', 'domiciles', 'events', 'exchanges', 'personae', 'prospects', 'qualities', 'settings', 'ports']:
with open(f'{name}.txt') as g:
data = json.loads(g.read())
for line in data:
f.write(f"{{\"key\": \"{name}:{line['Id']}\", \"value\": {json.dumps(line)}}}\n")
| true
| true
|
1c45439138577fd473020f8d53be33fe6f9ba3bf
| 421
|
py
|
Python
|
src/config/config.py
|
burakkkara/SeniorProject
|
1f878a5c007c910f210c900911897ff34c498b12
|
[
"Apache-2.0"
] | 2
|
2022-01-10T07:24:06.000Z
|
2022-01-10T16:19:18.000Z
|
src/config/config.py
|
burakkkara/SeniorProject
|
1f878a5c007c910f210c900911897ff34c498b12
|
[
"Apache-2.0"
] | null | null | null |
src/config/config.py
|
burakkkara/SeniorProject
|
1f878a5c007c910f210c900911897ff34c498b12
|
[
"Apache-2.0"
] | null | null | null |
# Primary connection info
primary = {
'ip': '192.168.1.34',
'port': 8080
}
# Secondary connection info
secondary = {
'ip': '192.168.43.38',
# 'ip': '192.168.43.3854',
'port': 8081
}
# Test server info
# Resides on AWS
server = {
'ip': '3.134.95.115',
'port': 8080
}
# Request info
requested = {
'httpVersion': 'http://',
'httpPort': 80,
'httpsPort': 443
}
use_anonymous = True
| 14.517241
| 30
| 0.565321
|
primary = {
'ip': '192.168.1.34',
'port': 8080
}
secondary = {
'ip': '192.168.43.38',
'port': 8081
}
server = {
'ip': '3.134.95.115',
'port': 8080
}
requested = {
'httpVersion': 'http://',
'httpPort': 80,
'httpsPort': 443
}
use_anonymous = True
| true
| true
|
1c45441e2bef433e828fef7485bbc809e1defc92
| 1,097
|
py
|
Python
|
tests/nark/log_tests.py
|
shadowmint/python-nark
|
7919c544c8386a023f6a9d0ffbc50e1145604528
|
[
"Apache-2.0"
] | null | null | null |
tests/nark/log_tests.py
|
shadowmint/python-nark
|
7919c544c8386a023f6a9d0ffbc50e1145604528
|
[
"Apache-2.0"
] | null | null | null |
tests/nark/log_tests.py
|
shadowmint/python-nark
|
7919c544c8386a023f6a9d0ffbc50e1145604528
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Douglas Linder
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import bootstrap
import nark
class LogTests(unittest.TestCase):
def test_can_create_logger(self):
a = nark.Assert()
i = nark.Logging.get()
a.not_null(i, "Unable to create log instance")
def test_can_log_message(self):
a = nark.Assert()
i = nark.Logging.get()
i.debug("Hello %s", "world")
i.info("Hello %s", "world")
i.warning("Hello %s", "world")
i.error("Hello %s", "world")
i.critical("Hello %s", "world")
if __name__ == "__main__":
unittest.main()
| 28.868421
| 74
| 0.703737
|
import unittest
import bootstrap
import nark
class LogTests(unittest.TestCase):
def test_can_create_logger(self):
a = nark.Assert()
i = nark.Logging.get()
a.not_null(i, "Unable to create log instance")
def test_can_log_message(self):
a = nark.Assert()
i = nark.Logging.get()
i.debug("Hello %s", "world")
i.info("Hello %s", "world")
i.warning("Hello %s", "world")
i.error("Hello %s", "world")
i.critical("Hello %s", "world")
if __name__ == "__main__":
unittest.main()
| true
| true
|
1c4544f61fe44587878d56854ec0f0a9d3c481cd
| 9,875
|
py
|
Python
|
src/server.py
|
HintikkaKimmo/rasa_nlu
|
cf71683351f86a9880793273c8ce877c411f9de1
|
[
"Apache-2.0"
] | 2
|
2016-12-20T13:19:44.000Z
|
2017-01-16T10:44:35.000Z
|
src/server.py
|
HintikkaKimmo/rasa_nlu
|
cf71683351f86a9880793273c8ce877c411f9de1
|
[
"Apache-2.0"
] | null | null | null |
src/server.py
|
HintikkaKimmo/rasa_nlu
|
cf71683351f86a9880793273c8ce877c411f9de1
|
[
"Apache-2.0"
] | 1
|
2017-01-02T15:16:49.000Z
|
2017-01-02T15:16:49.000Z
|
import argparse
import json
import os
import urllib.parse
import multiprocessing
import glob
import warnings
import logging
from http.server import BaseHTTPRequestHandler, HTTPServer
from rasa_nlu.train import do_train
from rasa_nlu.config import RasaNLUConfig
class RasaNLUServer(object):
def __init__(self, config):
self.server = None
self.config = config
self.logfile = config.write
self.emulator = self.__create_emulator()
self.interpreter = self.__create_interpreter()
self.data_router = DataRouter(config, self.interpreter, self.emulator)
if 'DYNO' in os.environ and config.backend == 'mitie': # running on Heroku
from rasa_nlu.featurizers.mitie_featurizer import MITIEFeaturizer
MITIEFeaturizer(config.mitie_file)
def __create_interpreter(self):
model_dir = self.config.server_model_dir
metadata, backend = None, None
if model_dir is not None:
# download model from S3 if needed
if not os.path.isdir(model_dir):
try:
from rasa_nlu.persistor import Persistor
p = Persistor(self.config.path, self.config.aws_region, self.config.bucket_name)
p.fetch_and_extract('{0}.tar.gz'.format(os.path.basename(model_dir)))
except:
warnings.warn("using default interpreter, couldn't find model dir or fetch it from S3")
metadata = json.loads(open(os.path.join(model_dir, 'metadata.json'), 'rb').read())
backend = metadata["backend"]
if backend is None:
from .interpreters.simple_interpreter import HelloGoodbyeInterpreter
return HelloGoodbyeInterpreter()
elif backend.lower() == 'mitie':
logging.info("using mitie backend")
from .interpreters.mitie_interpreter import MITIEInterpreter
return MITIEInterpreter(**metadata)
elif backend.lower() == 'spacy_sklearn':
logging.info("using spacy + sklearn backend")
from .interpreters.spacy_sklearn_interpreter import SpacySklearnInterpreter
return SpacySklearnInterpreter(**metadata)
else:
raise ValueError("unknown backend : {0}".format(backend))
def __create_emulator(self):
mode = self.config.emulate
if mode is None:
from .emulators import NoEmulator
return NoEmulator()
elif mode.lower() == 'wit':
from .emulators.wit import WitEmulator
return WitEmulator()
elif mode.lower() == 'luis':
from .emulators.luis import LUISEmulator
return LUISEmulator()
elif mode.lower() == 'api':
from .emulators.api import ApiEmulator
return ApiEmulator()
else:
raise ValueError("unknown mode : {0}".format(mode))
def start(self):
self.server = HTTPServer(('', self.config.port), lambda *args: RasaRequestHandler(self.data_router, *args))
logging.info('Started http server on port %s' % self.config.port)
self.server.serve_forever()
def stop(self):
logging.info('^C received. Aborting.')
if len(self.data_router.responses) > 0:
logging.info('saving logs')
self.data_router.write_logs()
if self.server is not None:
logging.info('shutting down server')
self.server.socket.close()
class DataRouter(object):
def __init__(self, config, interpreter, emulator):
self.config = config
self.interpreter = interpreter
self.emulator = emulator
self.logfile = config.write
self.responses = set()
self.train_proc = None
self.model_dir = config.path
self.token = config.token
def extract(self, data):
return self.emulator.normalise_request_json(data)
def parse(self, text):
result = self.interpreter.parse(text)
self.responses.add(json.dumps(result, sort_keys=True))
return result
def format(self, data):
return self.emulator.normalise_response_json(data)
def write_logs(self):
with open(self.logfile, 'w') as f:
responses = [json.loads(r) for r in self.responses]
f.write(json.dumps(responses, indent=2))
def get_status(self):
if self.train_proc is not None:
training = self.train_proc.is_alive()
else:
training = False
models = glob.glob(os.path.join(self.model_dir, 'model*'))
return json.dumps({
"training": training,
"available_models": models
})
def auth(self, path):
if self.token is None:
return True
else:
parsed_path = urllib.parse.urlparse(path)
data = urllib.parse.parse_qs(parsed_path.query)
valid = ("token" in data and data["token"][0] == self.token)
return valid
def start_train_proc(self, data):
logging.info("starting train")
if self.train_proc is not None and self.train_proc.is_alive():
self.train_proc.terminate()
logging.info("training process {0} killed".format(self.train_proc))
fname = 'tmp_training_data.json'
with open(fname, 'w') as f:
f.write(data)
_config = dict(list(self.config.items()))
_config["data"] = fname
train_config = RasaNLUConfig(cmdline_args=_config)
self.train_proc = multiprocessing.Process(target=do_train, args=(train_config,))
self.train_proc.start()
logging.info("training process {0} started".format(self.train_proc))
class RasaRequestHandler(BaseHTTPRequestHandler):
def __init__(self, data_router, *args):
self.data_router = data_router
BaseHTTPRequestHandler.__init__(self, *args)
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
def auth_err(self):
self.send_response(401)
self.wfile.write("unauthorized")
def get_response(self, data_dict):
if 'q' not in data_dict:
return json.dumps({"error": "Invalid parse parameter specified"})
data = self.data_router.extract(data_dict)
result = self.data_router.parse(data["text"])
response = self.data_router.format(result)
return json.dumps(response)
def do_GET(self):
if self.data_router.auth(self.path):
self._set_headers()
if self.path.startswith("/parse"):
parsed_path = urllib.parse.urlparse(urllib.parse.unquote(self.path))
data = urllib.parse.parse_qs(parsed_path.query)
self.wfile.write(self.get_response(data).encode('utf-8'))
elif self.path.startswith("/status"):
response = self.data_router.get_status()
self.wfile.write(response)
else:
self.wfile.write("hello")
else:
self.auth_err()
return
def do_POST(self):
if self.data_router.auth(self.path):
if self.path.startswith("/parse"):
self._set_headers()
data_string = self.rfile.read(int(self.headers['Content-Length']))
data_dict = json.loads(data_string.decode("utf-8"))
self.wfile.write(self.get_response(data_dict))
if self.path.startswith("/train"):
self._set_headers()
data_string = self.rfile.read(int(self.headers['Content-Length']))
self.data_router.start_train_proc(data_string)
self.data_router.start_train_proc(data_string.decode("utf-8"))
self.wfile.write(
json.dumps({"info": "training started with pid {0}".format(self.data_router.train_proc.pid)})
)
else:
self.auth_err()
return
def create_argparser():
parser = argparse.ArgumentParser(description='parse incoming text')
parser.add_argument('-c', '--config',
help="config file, all the command line options can also be passed via a (json-formatted) " +
"config file. NB command line args take precedence")
parser.add_argument('-d', '--server_model_dir',
help='directory containing model to for parser to use')
parser.add_argument('-e', '--emulate', choices=['wit', 'luis', 'api'],
help='which service to emulate (default: None i.e. use simple built in format)')
parser.add_argument('-l', '--language', choices=['de', 'en'], help="model and data language")
parser.add_argument('-m', '--mitie_file',
help='file with mitie total_word_feature_extractor')
parser.add_argument('-p', '--path', help="path where model files will be saved")
parser.add_argument('-P', '--port', type=int, help='port on which to run server')
parser.add_argument('-t', '--token',
help="auth token. If set, reject requests which don't provide this token as a query parameter")
parser.add_argument('-w', '--write', help='file where logs will be saved')
return parser
if __name__ == "__main__":
parser = create_argparser()
cmdline_args = {key: val for key, val in list(vars(parser.parse_args()).items()) if val is not None}
config = RasaNLUConfig(cmdline_args.get("config"), os.environ, cmdline_args)
print((config.view()))
logging.basicConfig(filename=config.log_file, level=config.log_level)
logging.captureWarnings(True)
logging.debug(config.view())
try:
server = RasaNLUServer(config)
server.start()
except KeyboardInterrupt:
server.stop()
| 39.342629
| 119
| 0.621165
|
import argparse
import json
import os
import urllib.parse
import multiprocessing
import glob
import warnings
import logging
from http.server import BaseHTTPRequestHandler, HTTPServer
from rasa_nlu.train import do_train
from rasa_nlu.config import RasaNLUConfig
class RasaNLUServer(object):
def __init__(self, config):
self.server = None
self.config = config
self.logfile = config.write
self.emulator = self.__create_emulator()
self.interpreter = self.__create_interpreter()
self.data_router = DataRouter(config, self.interpreter, self.emulator)
if 'DYNO' in os.environ and config.backend == 'mitie': from rasa_nlu.featurizers.mitie_featurizer import MITIEFeaturizer
MITIEFeaturizer(config.mitie_file)
def __create_interpreter(self):
model_dir = self.config.server_model_dir
metadata, backend = None, None
if model_dir is not None:
if not os.path.isdir(model_dir):
try:
from rasa_nlu.persistor import Persistor
p = Persistor(self.config.path, self.config.aws_region, self.config.bucket_name)
p.fetch_and_extract('{0}.tar.gz'.format(os.path.basename(model_dir)))
except:
warnings.warn("using default interpreter, couldn't find model dir or fetch it from S3")
metadata = json.loads(open(os.path.join(model_dir, 'metadata.json'), 'rb').read())
backend = metadata["backend"]
if backend is None:
from .interpreters.simple_interpreter import HelloGoodbyeInterpreter
return HelloGoodbyeInterpreter()
elif backend.lower() == 'mitie':
logging.info("using mitie backend")
from .interpreters.mitie_interpreter import MITIEInterpreter
return MITIEInterpreter(**metadata)
elif backend.lower() == 'spacy_sklearn':
logging.info("using spacy + sklearn backend")
from .interpreters.spacy_sklearn_interpreter import SpacySklearnInterpreter
return SpacySklearnInterpreter(**metadata)
else:
raise ValueError("unknown backend : {0}".format(backend))
def __create_emulator(self):
mode = self.config.emulate
if mode is None:
from .emulators import NoEmulator
return NoEmulator()
elif mode.lower() == 'wit':
from .emulators.wit import WitEmulator
return WitEmulator()
elif mode.lower() == 'luis':
from .emulators.luis import LUISEmulator
return LUISEmulator()
elif mode.lower() == 'api':
from .emulators.api import ApiEmulator
return ApiEmulator()
else:
raise ValueError("unknown mode : {0}".format(mode))
def start(self):
self.server = HTTPServer(('', self.config.port), lambda *args: RasaRequestHandler(self.data_router, *args))
logging.info('Started http server on port %s' % self.config.port)
self.server.serve_forever()
def stop(self):
logging.info('^C received. Aborting.')
if len(self.data_router.responses) > 0:
logging.info('saving logs')
self.data_router.write_logs()
if self.server is not None:
logging.info('shutting down server')
self.server.socket.close()
class DataRouter(object):
def __init__(self, config, interpreter, emulator):
self.config = config
self.interpreter = interpreter
self.emulator = emulator
self.logfile = config.write
self.responses = set()
self.train_proc = None
self.model_dir = config.path
self.token = config.token
def extract(self, data):
return self.emulator.normalise_request_json(data)
def parse(self, text):
result = self.interpreter.parse(text)
self.responses.add(json.dumps(result, sort_keys=True))
return result
def format(self, data):
return self.emulator.normalise_response_json(data)
def write_logs(self):
with open(self.logfile, 'w') as f:
responses = [json.loads(r) for r in self.responses]
f.write(json.dumps(responses, indent=2))
def get_status(self):
if self.train_proc is not None:
training = self.train_proc.is_alive()
else:
training = False
models = glob.glob(os.path.join(self.model_dir, 'model*'))
return json.dumps({
"training": training,
"available_models": models
})
def auth(self, path):
if self.token is None:
return True
else:
parsed_path = urllib.parse.urlparse(path)
data = urllib.parse.parse_qs(parsed_path.query)
valid = ("token" in data and data["token"][0] == self.token)
return valid
def start_train_proc(self, data):
logging.info("starting train")
if self.train_proc is not None and self.train_proc.is_alive():
self.train_proc.terminate()
logging.info("training process {0} killed".format(self.train_proc))
fname = 'tmp_training_data.json'
with open(fname, 'w') as f:
f.write(data)
_config = dict(list(self.config.items()))
_config["data"] = fname
train_config = RasaNLUConfig(cmdline_args=_config)
self.train_proc = multiprocessing.Process(target=do_train, args=(train_config,))
self.train_proc.start()
logging.info("training process {0} started".format(self.train_proc))
class RasaRequestHandler(BaseHTTPRequestHandler):
def __init__(self, data_router, *args):
self.data_router = data_router
BaseHTTPRequestHandler.__init__(self, *args)
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
def auth_err(self):
self.send_response(401)
self.wfile.write("unauthorized")
def get_response(self, data_dict):
if 'q' not in data_dict:
return json.dumps({"error": "Invalid parse parameter specified"})
data = self.data_router.extract(data_dict)
result = self.data_router.parse(data["text"])
response = self.data_router.format(result)
return json.dumps(response)
def do_GET(self):
if self.data_router.auth(self.path):
self._set_headers()
if self.path.startswith("/parse"):
parsed_path = urllib.parse.urlparse(urllib.parse.unquote(self.path))
data = urllib.parse.parse_qs(parsed_path.query)
self.wfile.write(self.get_response(data).encode('utf-8'))
elif self.path.startswith("/status"):
response = self.data_router.get_status()
self.wfile.write(response)
else:
self.wfile.write("hello")
else:
self.auth_err()
return
def do_POST(self):
if self.data_router.auth(self.path):
if self.path.startswith("/parse"):
self._set_headers()
data_string = self.rfile.read(int(self.headers['Content-Length']))
data_dict = json.loads(data_string.decode("utf-8"))
self.wfile.write(self.get_response(data_dict))
if self.path.startswith("/train"):
self._set_headers()
data_string = self.rfile.read(int(self.headers['Content-Length']))
self.data_router.start_train_proc(data_string)
self.data_router.start_train_proc(data_string.decode("utf-8"))
self.wfile.write(
json.dumps({"info": "training started with pid {0}".format(self.data_router.train_proc.pid)})
)
else:
self.auth_err()
return
def create_argparser():
parser = argparse.ArgumentParser(description='parse incoming text')
parser.add_argument('-c', '--config',
help="config file, all the command line options can also be passed via a (json-formatted) " +
"config file. NB command line args take precedence")
parser.add_argument('-d', '--server_model_dir',
help='directory containing model to for parser to use')
parser.add_argument('-e', '--emulate', choices=['wit', 'luis', 'api'],
help='which service to emulate (default: None i.e. use simple built in format)')
parser.add_argument('-l', '--language', choices=['de', 'en'], help="model and data language")
parser.add_argument('-m', '--mitie_file',
help='file with mitie total_word_feature_extractor')
parser.add_argument('-p', '--path', help="path where model files will be saved")
parser.add_argument('-P', '--port', type=int, help='port on which to run server')
parser.add_argument('-t', '--token',
help="auth token. If set, reject requests which don't provide this token as a query parameter")
parser.add_argument('-w', '--write', help='file where logs will be saved')
return parser
if __name__ == "__main__":
parser = create_argparser()
cmdline_args = {key: val for key, val in list(vars(parser.parse_args()).items()) if val is not None}
config = RasaNLUConfig(cmdline_args.get("config"), os.environ, cmdline_args)
print((config.view()))
logging.basicConfig(filename=config.log_file, level=config.log_level)
logging.captureWarnings(True)
logging.debug(config.view())
try:
server = RasaNLUServer(config)
server.start()
except KeyboardInterrupt:
server.stop()
| true
| true
|
1c4545eeb5ecae107979a9aeead1b4bcf5bbd9f8
| 1,011
|
py
|
Python
|
var/spack/repos/builtin/packages/r-lsei/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2
|
2020-10-15T01:08:42.000Z
|
2021-10-18T01:28:18.000Z
|
var/spack/repos/builtin/packages/r-lsei/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2
|
2019-07-30T10:12:28.000Z
|
2019-12-17T09:02:27.000Z
|
var/spack/repos/builtin/packages/r-lsei/package.py
|
adrianjhpc/spack
|
0a9e4fcee57911f2db586aa50c8873d9cca8de92
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 5
|
2019-07-30T09:42:14.000Z
|
2021-01-25T05:39:20.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RLsei(RPackage):
"""It contains functions that solve least squares linear regression
problems under linear equality/inequality constraints. Functions for
solving quadratic programming problems are also available, which
transform such problems into least squares ones first. It is developed
based on the 'Fortran' program of Lawson and Hanson (1974, 1995), which
is public domain and available at
<http://www.netlib.org/lawson-hanson>."""
homepage = "https://cloud.r-project.org/package=lsei"
url = "https://cloud.r-project.org/src/contrib/lsei_1.2-0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/lsei"
version('1.2-0', sha256='4781ebd9ef93880260d5d5f23066580ac06061e95c1048fb25e4e838963380f6')
| 43.956522
| 95
| 0.736894
|
from spack import *
class RLsei(RPackage):
homepage = "https://cloud.r-project.org/package=lsei"
url = "https://cloud.r-project.org/src/contrib/lsei_1.2-0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/lsei"
version('1.2-0', sha256='4781ebd9ef93880260d5d5f23066580ac06061e95c1048fb25e4e838963380f6')
| true
| true
|
1c45462adfeb644306dbba088d37aea211810842
| 77,143
|
py
|
Python
|
if/python/hpdf.py
|
yabaud/libharu
|
d84867ebf9f3de6afd661d2cdaff102457fbc371
|
[
"Zlib"
] | 1,118
|
2015-01-09T10:40:33.000Z
|
2022-03-28T08:19:56.000Z
|
if/python/hpdf.py
|
yabaud/libharu
|
d84867ebf9f3de6afd661d2cdaff102457fbc371
|
[
"Zlib"
] | 148
|
2015-01-09T02:47:40.000Z
|
2022-03-31T21:10:14.000Z
|
if/python/hpdf.py
|
yabaud/libharu
|
d84867ebf9f3de6afd661d2cdaff102457fbc371
|
[
"Zlib"
] | 394
|
2015-01-23T17:06:52.000Z
|
2022-03-25T03:50:06.000Z
|
##
## * << Haru Free PDF Library 2.0.8 >> -- hpdf.h
## *
## * URL http://libharu.org/
## *
## * Copyright (c) 1999-2006 Takeshi Kanno
## *
## * Permission to use, copy, modify, distribute and sell this software
## * and its documentation for any purpose is hereby granted without fee,
## * provided that the above copyright notice appear in all copies and
## * that both that copyright notice and this permission notice appear
## * in supporting documentation.
## * It is provided "as is" without express or implied warranty.
## *
##
## port to python by Li Jun
## http://groups.google.com/group/pythoncia
import os
import sys
import types
def setpath():
dllpath='%s/dll' %(os.path.dirname(os.path.realpath(__file__)))
if 'PATH' in os.environ:
if dllpath not in os.environ['PATH']:
os.environ['PATH']='%s;%s' % (dllpath, os.environ['PATH'])
else:
os.environ['PATH']=dllpath
setpath()
from hpdf_consts import *
from hpdf_types import *
if os.sys.platform=='win32':
harudll='libhpdf.dll'
#haru=WinDLL(harudll)
haru=CDLL(harudll)
else:
harudll='libhpdf.so'
haru=CDLL(harudll)
HPDF_HANDLE=c_void_p
HPDF_Doc=HPDF_HANDLE
HPDF_Page=HPDF_HANDLE
HPDF_Pages=HPDF_HANDLE
HPDF_Stream=HPDF_HANDLE
HPDF_Image=HPDF_HANDLE
HPDF_Font=HPDF_HANDLE
HPDF_Outline=HPDF_HANDLE
HPDF_Encoder=HPDF_HANDLE
HPDF_Destination=HPDF_HANDLE
HPDF_XObject=HPDF_HANDLE
HPDF_Annotation=HPDF_HANDLE
HPDF_ExtGState=HPDF_HANDLE
#const char * HPDF_GetVersion (void)
HPDF_GetVersion=haru.HPDF_GetVersion
HPDF_GetVersion.restype=c_char_p
#HPDF_Doc HPDF_NewEx (HPDF_Error_Handler user_error_fn, HPDF_Alloc_Func user_alloc_fn, HPDF_Free_Func user_free_fn, HPDF_UINT mem_pool_buf_size, void *user_data)
HPDF_NewEx=haru.HPDF_NewEx
HPDF_NewEx.restype=HPDF_Doc
#HPDF_Doc HPDF_New (HPDF_Error_Handler user_error_fn, void *user_data)
HPDF_New=haru.HPDF_New
HPDF_New.restype=HPDF_Doc
#HPDF_STATUS HPDF_SetErrorHandler (HPDF_Doc pdf, HPDF_Error_Handler user_error_fn)
HPDF_SetErrorHandler=haru.HPDF_SetErrorHandler
HPDF_SetErrorHandler.restype=HPDF_STATUS
#void HPDF_Free (HPDF_Doc pdf)
HPDF_Free=haru.HPDF_Free
HPDF_Free.restype=None
#HPDF_STATUS HPDF_NewDoc (HPDF_Doc pdf)
HPDF_NewDoc=haru.HPDF_NewDoc
HPDF_NewDoc.restype=HPDF_STATUS
#void HPDF_FreeDoc (HPDF_Doc pdf)
HPDF_FreeDoc=haru.HPDF_FreeDoc
HPDF_FreeDoc.restype=None
#HPDF_BOOL HPDF_HasDoc (HPDF_Doc pdf)
HPDF_HasDoc=haru.HPDF_HasDoc
HPDF_HasDoc.restype=HPDF_BOOL
#void HPDF_FreeDocAll (HPDF_Doc pdf)
HPDF_FreeDocAll=haru.HPDF_FreeDocAll
HPDF_FreeDocAll.restype=None
#HPDF_STATUS HPDF_SaveToStream (HPDF_Doc pdf)
HPDF_SaveToStream=haru.HPDF_SaveToStream
HPDF_SaveToStream.restype=HPDF_STATUS
#HPDF_UINT32 HPDF_GetStreamSize (HPDF_Doc pdf)
HPDF_GetStreamSize=haru.HPDF_GetStreamSize
HPDF_GetStreamSize.restype=HPDF_UINT32
#HPDF_STATUS HPDF_ReadFromStream (HPDF_Doc pdf, HPDF_BYTE *buf, HPDF_UINT32 *size)
_HPDF_ReadFromStream=haru.HPDF_ReadFromStream
_HPDF_ReadFromStream.restype=HPDF_STATUS
def HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
size=HPDF_UINT32(int(size))
return _HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
)
#HPDF_STATUS HPDF_ResetStream (HPDF_Doc pdf)
HPDF_ResetStream=haru.HPDF_ResetStream
HPDF_ResetStream.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SaveToFile (HPDF_Doc pdf, const char *file_name)
HPDF_SaveToFile=haru.HPDF_SaveToFile
HPDF_SaveToFile.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetError (HPDF_Doc pdf)
HPDF_GetError=haru.HPDF_GetError
HPDF_GetError.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetErrorDetail (HPDF_Doc pdf)
HPDF_GetErrorDetail=haru.HPDF_GetErrorDetail
HPDF_GetErrorDetail.restype=HPDF_STATUS
#void HPDF_ResetError (HPDF_Doc pdf)
HPDF_ResetError=haru.HPDF_ResetError
HPDF_ResetError.restype=None
#HPDF_STATUS HPDF_SetPagesConfiguration (HPDF_Doc pdf, HPDF_UINT page_per_pages)
_HPDF_SetPagesConfiguration=haru.HPDF_SetPagesConfiguration
_HPDF_SetPagesConfiguration.restype=HPDF_STATUS
def HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
):
page_per_pages=HPDF_UINT(int(page_per_pages))
return _HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
)
#HPDF_Page HPDF_GetPageByIndex (HPDF_Doc pdf, HPDF_UINT index)
HPDF_GetPageByIndex=haru.HPDF_GetPageByIndex
HPDF_GetPageByIndex.restype=HPDF_Page
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
#HPDF_PageLayout HPDF_GetPageLayout (HPDF_Doc pdf)
HPDF_GetPageLayout=haru.HPDF_GetPageLayout
HPDF_GetPageLayout.restype=HPDF_PageLayout
#HPDF_STATUS HPDF_SetPageLayout (HPDF_Doc pdf, HPDF_PageLayout layout)
HPDF_SetPageLayout=haru.HPDF_SetPageLayout
HPDF_SetPageLayout.restype=HPDF_STATUS
#HPDF_PageMode HPDF_GetPageMode (HPDF_Doc pdf)
HPDF_GetPageMode=haru.HPDF_GetPageMode
HPDF_GetPageMode.restype=HPDF_PageMode
#HPDF_STATUS HPDF_SetPageMode (HPDF_Doc pdf, HPDF_PageMode mode)
HPDF_SetPageMode=haru.HPDF_SetPageMode
HPDF_SetPageMode.restype=HPDF_STATUS
#HPDF_UINT HPDF_GetViewerPreference (HPDF_Doc pdf)
HPDF_GetViewerPreference=haru.HPDF_GetViewerPreference
HPDF_GetViewerPreference.restype=HPDF_UINT
#HPDF_STATUS HPDF_SetViewerPreference (HPDF_Doc pdf, HPDF_UINT value)
HPDF_SetViewerPreference=haru.HPDF_SetViewerPreference
HPDF_SetViewerPreference.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SetOpenAction (HPDF_Doc pdf, HPDF_Destination open_action)
HPDF_SetOpenAction=haru.HPDF_SetOpenAction
HPDF_SetOpenAction.restype=HPDF_STATUS
#---------------------------------------------------------------------------
#----- page handling -------------------------------------------------------
#HPDF_Page HPDF_GetCurrentPage (HPDF_Doc pdf)
HPDF_GetCurrentPage=haru.HPDF_GetCurrentPage
HPDF_GetCurrentPage.restype=HPDF_Page
#HPDF_Page HPDF_AddPage (HPDF_Doc pdf)
HPDF_AddPage=haru.HPDF_AddPage
HPDF_AddPage.restype=HPDF_Page
#HPDF_Page HPDF_InsertPage (HPDF_Doc pdf, HPDF_Page page)
HPDF_InsertPage=haru.HPDF_InsertPage
HPDF_InsertPage.restype=HPDF_Page
#HPDF_STATUS HPDF_Page_SetWidth (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetWidth=haru.HPDF_Page_SetWidth
_HPDF_Page_SetWidth.restype=HPDF_STATUS
def HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_SetHeight (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetHeight=haru.HPDF_Page_SetHeight
_HPDF_Page_SetHeight.restype=HPDF_STATUS
def HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS
#HPDF_Page_SetSize (HPDF_Page page,
# HPDF_PageSizes size,
# HPDF_PageDirection direction);
HPDF_Page_SetSize=haru.HPDF_Page_SetSize
HPDF_Page_SetSize.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Page_SetRotate (HPDF_Page page, HPDF_UINT16 angle)
_HPDF_Page_SetRotate=haru.HPDF_Page_SetRotate
_HPDF_Page_SetRotate.restype=HPDF_STATUS
def HPDF_Page_SetRotate(
page, #HPDF_Page
angle, #HPDF_UINT16
):
angle=HPDF_UINT16(int(angle))
return _HPDF_Page_SetRotate(
page, #HPDF_Page
angle, #HPDF_UINT16
)
#---------------------------------------------------------------------------
#----- font handling -------------------------------------------------------
#HPDF_Font HPDF_GetFont (HPDF_Doc pdf, const char *font_name, const char *encoding_name)
HPDF_GetFont=haru.HPDF_GetFont
HPDF_GetFont.restype=HPDF_Font
#const char* HPDF_LoadType1FontFromFile (HPDF_Doc pdf, const char *afm_file_name, const char *data_file_name)
HPDF_LoadType1FontFromFile=haru.HPDF_LoadType1FontFromFile
HPDF_LoadType1FontFromFile.restype=c_char_p
#const char* HPDF_LoadTTFontFromFile (HPDF_Doc pdf, const char *file_name, HPDF_BOOL embedding)
HPDF_LoadTTFontFromFile=haru.HPDF_LoadTTFontFromFile
HPDF_LoadTTFontFromFile.restype=c_char_p
#const char* HPDF_LoadTTFontFromFile2 (HPDF_Doc pdf, const char *file_name, HPDF_UINT index, HPDF_BOOL embedding)
HPDF_LoadTTFontFromFile2=haru.HPDF_LoadTTFontFromFile2
HPDF_LoadTTFontFromFile2.restype=c_char_p
#HPDF_STATUS HPDF_AddPageLabel (HPDF_Doc pdf, HPDF_UINT page_num, HPDF_PageNumStyle style, HPDF_UINT first_page, const char *prefix)
_HPDF_AddPageLabel=haru.HPDF_AddPageLabel
_HPDF_AddPageLabel.restype=HPDF_STATUS
def HPDF_AddPageLabel(
pdf, #HPDF_Doc
page_num, #HPDF_UINT
style, #HPDF_PageNumStyle
first_page, #HPDF_UINT
prefix, #c_char_p
):
page_num, first_page=[HPDF_UINT(int(i))for i in (page_num, first_page)]
return _HPDF_AddPageLabel(
pdf, #HPDF_Doc
page_num, #HPDF_UINT
style, #HPDF_PageNumStyle
first_page, #HPDF_UINT
prefix, #c_char_p
)
#HPDF_STATUS HPDF_UseJPFonts (HPDF_Doc pdf)
HPDF_UseJPFonts=haru.HPDF_UseJPFonts
HPDF_UseJPFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseKRFonts (HPDF_Doc pdf)
HPDF_UseKRFonts=haru.HPDF_UseKRFonts
HPDF_UseKRFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNSFonts (HPDF_Doc pdf)
HPDF_UseCNSFonts=haru.HPDF_UseCNSFonts
HPDF_UseCNSFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNTFonts (HPDF_Doc pdf)
HPDF_UseCNTFonts=haru.HPDF_UseCNTFonts
HPDF_UseCNTFonts.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- outline ------------------------------------------------------------
#HPDF_Outline HPDF_CreateOutline (HPDF_Doc pdf, HPDF_Outline parent, const char *title, HPDF_Encoder encoder)
HPDF_CreateOutline=haru.HPDF_CreateOutline
HPDF_CreateOutline.restype=HPDF_Outline
#HPDF_STATUS HPDF_Outline_SetOpened (HPDF_Outline outline, HPDF_BOOL opened)
HPDF_Outline_SetOpened=haru.HPDF_Outline_SetOpened
HPDF_Outline_SetOpened.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Outline_SetDestination (HPDF_Outline outline, HPDF_Destination dst)
HPDF_Outline_SetDestination=haru.HPDF_Outline_SetDestination
HPDF_Outline_SetDestination.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- destination --------------------------------------------------------
#HPDF_Destination HPDF_Page_CreateDestination (HPDF_Page page)
HPDF_Page_CreateDestination=haru.HPDF_Page_CreateDestination
HPDF_Page_CreateDestination.restype=HPDF_Destination
#HPDF_STATUS HPDF_Destination_SetXYZ (HPDF_Destination dst, HPDF_REAL left, HPDF_REAL top, HPDF_REAL zoom)
_HPDF_Destination_SetXYZ=haru.HPDF_Destination_SetXYZ
_HPDF_Destination_SetXYZ.restype=HPDF_STATUS
def HPDF_Destination_SetXYZ(
dst, #HPDF_Destination
left, #HPDF_REAL
top, #HPDF_REAL
zoom, #HPDF_REAL
):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
zoom=HPDF_REAL(zoom)
return _HPDF_Destination_SetXYZ(
dst, #HPDF_Destination
left, #HPDF_REAL
top, #HPDF_REAL
zoom, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFit (HPDF_Destination dst)
HPDF_Destination_SetFit=haru.HPDF_Destination_SetFit
HPDF_Destination_SetFit.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Destination_SetFitH (HPDF_Destination dst, HPDF_REAL top)
_HPDF_Destination_SetFitH=haru.HPDF_Destination_SetFitH
_HPDF_Destination_SetFitH.restype=HPDF_STATUS
def HPDF_Destination_SetFitH(
dst, #HPDF_Destination
top, #HPDF_REAL
):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitH(
dst, #HPDF_Destination
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitV (HPDF_Destination dst, HPDF_REAL left)
_HPDF_Destination_SetFitV=haru.HPDF_Destination_SetFitV
_HPDF_Destination_SetFitV.restype=HPDF_STATUS
def HPDF_Destination_SetFitV(
dst, #HPDF_Destination
left, #HPDF_REAL
):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitV(
dst, #HPDF_Destination
left, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitR (HPDF_Destination dst, HPDF_REAL left, HPDF_REAL bottom, HPDF_REAL right, HPDF_REAL top)
_HPDF_Destination_SetFitR=haru.HPDF_Destination_SetFitR
_HPDF_Destination_SetFitR.restype=HPDF_STATUS
def HPDF_Destination_SetFitR(
dst, #HPDF_Destination
left, #HPDF_REAL
bottom, #HPDF_REAL
right, #HPDF_REAL
top, #HPDF_REAL
):
left=HPDF_REAL(left)
bottom=HPDF_REAL(bottom)
right=HPDF_REAL(right)
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitR(
dst, #HPDF_Destination
left, #HPDF_REAL
bottom, #HPDF_REAL
right, #HPDF_REAL
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitB (HPDF_Destination dst)
HPDF_Destination_SetFitB=haru.HPDF_Destination_SetFitB
HPDF_Destination_SetFitB.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Destination_SetFitBH (HPDF_Destination dst, HPDF_REAL top)
_HPDF_Destination_SetFitBH=haru.HPDF_Destination_SetFitBH
_HPDF_Destination_SetFitBH.restype=HPDF_STATUS
def HPDF_Destination_SetFitBH(
dst, #HPDF_Destination
top, #HPDF_REAL
):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitBH(
dst, #HPDF_Destination
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitBV (HPDF_Destination dst, HPDF_REAL left)
_HPDF_Destination_SetFitBV=haru.HPDF_Destination_SetFitBV
_HPDF_Destination_SetFitBV.restype=HPDF_STATUS
def HPDF_Destination_SetFitBV(
dst, #HPDF_Destination
left, #HPDF_REAL
):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitBV(
dst, #HPDF_Destination
left, #HPDF_REAL
)
#--------------------------------------------------------------------------
#----- encoder ------------------------------------------------------------
#HPDF_Encoder HPDF_GetEncoder (HPDF_Doc pdf, const char *encoding_name)
HPDF_GetEncoder=haru.HPDF_GetEncoder
HPDF_GetEncoder.restype=HPDF_Encoder
#HPDF_Encoder HPDF_GetCurrentEncoder (HPDF_Doc pdf)
HPDF_GetCurrentEncoder=haru.HPDF_GetCurrentEncoder
HPDF_GetCurrentEncoder.restype=HPDF_Encoder
#HPDF_STATUS HPDF_SetCurrentEncoder (HPDF_Doc pdf, const char *encoding_name)
HPDF_SetCurrentEncoder=haru.HPDF_SetCurrentEncoder
HPDF_SetCurrentEncoder.restype=HPDF_STATUS
#HPDF_EncoderType HPDF_Encoder_GetType (HPDF_Encoder encoder)
HPDF_Encoder_GetType=haru.HPDF_Encoder_GetType
HPDF_Encoder_GetType.restype=HPDF_EncoderType
#HPDF_ByteType HPDF_Encoder_GetByteType (HPDF_Encoder encoder, const char *text, HPDF_UINT index)
_HPDF_Encoder_GetByteType=haru.HPDF_Encoder_GetByteType
_HPDF_Encoder_GetByteType.restype=HPDF_ByteType
def HPDF_Encoder_GetByteType(
encoder, #HPDF_Encoder
text, #const char *
index #HPDF_UINT
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Encoder_GetByteType(
encoder, #HPDF_Encoder
text, #const char *
index #HPDF_UINT
)
#HPDF_UNICODE HPDF_Encoder_GetUnicode (HPDF_Encoder encoder, HPDF_UINT16 code)
HPDF_Encoder_GetUnicode=haru.HPDF_Encoder_GetUnicode
HPDF_Encoder_GetUnicode.restype=HPDF_UNICODE
#HPDF_WritingMode HPDF_Encoder_GetWritingMode (HPDF_Encoder encoder)
HPDF_Encoder_GetWritingMode=haru.HPDF_Encoder_GetWritingMode
HPDF_Encoder_GetWritingMode.restype=HPDF_WritingMode
#HPDF_STATUS HPDF_UseJPEncodings (HPDF_Doc pdf)
HPDF_UseJPEncodings=haru.HPDF_UseJPEncodings
HPDF_UseJPEncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseKREncodings (HPDF_Doc pdf)
HPDF_UseKREncodings=haru.HPDF_UseKREncodings
HPDF_UseKREncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNSEncodings (HPDF_Doc pdf)
HPDF_UseCNSEncodings=haru.HPDF_UseCNSEncodings
HPDF_UseCNSEncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNTEncodings (HPDF_Doc pdf)
HPDF_UseCNTEncodings=haru.HPDF_UseCNTEncodings
HPDF_UseCNTEncodings.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- annotation ---------------------------------------------------------
#HPDF_Annotation HPDF_Page_CreateTextAnnot (HPDF_Page page, HPDF_Rect rect, const char *text, HPDF_Encoder encoder)
HPDF_Page_CreateTextAnnot=haru.HPDF_Page_CreateTextAnnot
HPDF_Page_CreateTextAnnot.restype=HPDF_Annotation
#HPDF_Annotation HPDF_Page_CreateLinkAnnot (HPDF_Page page, HPDF_Rect rect, HPDF_Destination dst)
HPDF_Page_CreateLinkAnnot=haru.HPDF_Page_CreateLinkAnnot
HPDF_Page_CreateLinkAnnot.restype=HPDF_Annotation
#HPDF_Annotation HPDF_Page_CreateURILinkAnnot (HPDF_Page page, HPDF_Rect rect, const char *uri)
HPDF_Page_CreateURILinkAnnot=haru.HPDF_Page_CreateURILinkAnnot
HPDF_Page_CreateURILinkAnnot.restype=HPDF_Annotation
#HPDF_STATUS HPDF_LinkAnnot_SetHighlightMode (HPDF_Annotation annot, HPDF_AnnotHighlightMode mode)
HPDF_LinkAnnot_SetHighlightMode=haru.HPDF_LinkAnnot_SetHighlightMode
HPDF_LinkAnnot_SetHighlightMode.restype=HPDF_STATUS
#HPDF_STATUS HPDF_LinkAnnot_SetBorderStyle (HPDF_Annotation annot, HPDF_REAL width, HPDF_UINT16 dash_on, HPDF_UINT16 dash_off)
_HPDF_LinkAnnot_SetBorderStyle=haru.HPDF_LinkAnnot_SetBorderStyle
_HPDF_LinkAnnot_SetBorderStyle.restype=HPDF_STATUS
def HPDF_LinkAnnot_SetBorderStyle(
annot, #HPDF_Annotation
width, #HPDF_REAL
dash_on, #HPDF_UINT16
dash_off, #HPDF_UINT16
):
width=HPDF_REAL(width)
dash_on=HPDF_UINT16(dash_on)
dash_off=HPDF_UINT16(dash_off)
return _HPDF_LinkAnnot_SetBorderStyle(
annot, #HPDF_Annotation
width, #HPDF_REAL
dash_on, #HPDF_UINT16
dash_off, #HPDF_UINT16
)
#HPDF_STATUS HPDF_TextAnnot_SetIcon (HPDF_Annotation annot, HPDF_AnnotIcon icon)
HPDF_TextAnnot_SetIcon=haru.HPDF_TextAnnot_SetIcon
HPDF_TextAnnot_SetIcon.restype=HPDF_STATUS
#HPDF_STATUS HPDF_TextAnnot_SetOpened (HPDF_Annotation annot, HPDF_BOOL opened)
HPDF_TextAnnot_SetOpened=haru.HPDF_TextAnnot_SetOpened
HPDF_TextAnnot_SetOpened.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- image data ---------------------------------------------------------
#HPDF_Image HPDF_LoadPngImageFromFile (HPDF_Doc pdf, const char *filename)
HPDF_LoadPngImageFromFile=haru.HPDF_LoadPngImageFromFile
HPDF_LoadPngImageFromFile.restype=HPDF_Image
#HPDF_Image HPDF_LoadPngImageFromFile2 (HPDF_Doc pdf, const char *filename)
HPDF_LoadPngImageFromFile2=haru.HPDF_LoadPngImageFromFile2
HPDF_LoadPngImageFromFile2.restype=HPDF_Image
#HPDF_Image HPDF_LoadJpegImageFromFile (HPDF_Doc pdf, const char *filename)
HPDF_LoadJpegImageFromFile=haru.HPDF_LoadJpegImageFromFile
HPDF_LoadJpegImageFromFile.restype=HPDF_Image
#HPDF_Image HPDF_LoadRawImageFromFile (HPDF_Doc pdf, const char *filename, HPDF_UINT width, HPDF_UINT height, HPDF_ColorSpace color_space)
_HPDF_LoadRawImageFromFile=haru.HPDF_LoadRawImageFromFile
_HPDF_LoadRawImageFromFile.restype=HPDF_Image
def HPDF_LoadRawImageFromFile(
pdf, #HPDF_Doc
filename, #c_char_p
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
):
width=HPDF_UINT(width)
height=HPDF_UINT(height)
return _HPDF_LoadRawImageFromFile(
pdf, #HPDF_Doc
filename, #c_char_p
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
)
#HPDF_Image HPDF_LoadRawImageFromMem (HPDF_Doc pdf, const HPDF_BYTE *buf, HPDF_UINT width, HPDF_UINT height, HPDF_ColorSpace color_space, HPDF_UINT bits_per_component)
_HPDF_LoadRawImageFromMem=haru.HPDF_LoadRawImageFromMem
_HPDF_LoadRawImageFromMem.restype=HPDF_Image
def HPDF_LoadRawImageFromMem(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
bits_per_component, #HPDF_UINT
):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
if height in [0, None]:
height=size/width
width=HPDF_UINT(width)
height=HPDF_UINT(height)
bits_per_component=HPDF_UINT(bits_per_component)
return _HPDF_LoadRawImageFromMem(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
bits_per_component, #HPDF_UINT
)
#HPDF_Point HPDF_Image_GetSize (HPDF_Image image)
HPDF_Image_GetSize=haru.HPDF_Image_GetSize
HPDF_Image_GetSize.restype=HPDF_Point
#HPDF_STATUS HPDF_Image_GetSize2 (HPDF_Image image, HPDF_Point *size)
_HPDF_Image_GetSize2=haru.HPDF_Image_GetSize2
_HPDF_Image_GetSize2.restype=HPDF_STATUS
def HPDF_Image_GetSize2(
image, #HPDF_Image
size=None, #POINTER(HPDF_Point)
):
size=HPDF_Point
ret= _HPDF_Image_GetSize2(
image, #HPDF_Image
size, #POINTER(HPDF_Point)
)
return ret, size.x, size.y
#HPDF_UINT HPDF_Image_GetWidth (HPDF_Image image)
HPDF_Image_GetWidth=haru.HPDF_Image_GetWidth
HPDF_Image_GetWidth.restype=HPDF_UINT
#HPDF_UINT HPDF_Image_GetHeight (HPDF_Image image)
HPDF_Image_GetHeight=haru.HPDF_Image_GetHeight
HPDF_Image_GetHeight.restype=HPDF_UINT
#HPDF_UINT HPDF_Image_GetBitsPerComponent (HPDF_Image image)
HPDF_Image_GetBitsPerComponent=haru.HPDF_Image_GetBitsPerComponent
HPDF_Image_GetBitsPerComponent.restype=HPDF_UINT
#const char* HPDF_Image_GetColorSpace (HPDF_Image image)
HPDF_Image_GetColorSpace=haru.HPDF_Image_GetColorSpace
HPDF_Image_GetColorSpace.restype=c_char_p
#HPDF_STATUS HPDF_Image_SetColorMask (HPDF_Image image, HPDF_UINT rmin, HPDF_UINT rmax, HPDF_UINT gmin, HPDF_UINT gmax, HPDF_UINT bmin, HPDF_UINT bmax)
_HPDF_Image_SetColorMask=haru.HPDF_Image_SetColorMask
_HPDF_Image_SetColorMask.restype=HPDF_STATUS
def HPDF_Image_SetColorMask(
image, #HPDF_Image
rmin, #HPDF_UINT
rmax, #HPDF_UINT
gmin, #HPDF_UINT
gmax, #HPDF_UINT
bmin, #HPDF_UINT
bmax, #HPDF_UINT
):
rmin=HPDF_UINT(rmin)
rmax=HPDF_UINT(rmax)
gmin=HPDF_UINT(gmin)
gmax=HPDF_UINT(gmax)
bmin=HPDF_UINT(bmin)
bmax=HPDF_UINT(bmax)
return _HPDF_Image_SetColorMask(
image, #HPDF_Image
rmin, #HPDF_UINT
rmax, #HPDF_UINT
gmin, #HPDF_UINT
gmax, #HPDF_UINT
bmin, #HPDF_UINT
bmax, #HPDF_UINT
)
#HPDF_STATUS HPDF_Image_SetMaskImage (HPDF_Image image, HPDF_Image mask_image)
HPDF_Image_SetMaskImage=haru.HPDF_Image_SetMaskImage
HPDF_Image_SetMaskImage.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- info dictionary ----------------------------------------------------
#HPDF_STATUS HPDF_SetInfoAttr (HPDF_Doc pdf, HPDF_InfoType type, const char *value)
HPDF_SetInfoAttr=haru.HPDF_SetInfoAttr
HPDF_SetInfoAttr.restype=HPDF_STATUS
#const char* HPDF_GetInfoAttr (HPDF_Doc pdf, HPDF_InfoType type)
HPDF_GetInfoAttr=haru.HPDF_GetInfoAttr
HPDF_GetInfoAttr.restype=c_char_p
#HPDF_STATUS HPDF_SetInfoDateAttr (HPDF_Doc pdf, HPDF_InfoType type, HPDF_Date value)
HPDF_SetInfoDateAttr=haru.HPDF_SetInfoDateAttr
HPDF_SetInfoDateAttr.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- encryption ---------------------------------------------------------
#HPDF_STATUS HPDF_SetPassword (HPDF_Doc pdf, const char *owner_passwd, const char *user_passwd)
HPDF_SetPassword=haru.HPDF_SetPassword
HPDF_SetPassword.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SetPermission (HPDF_Doc pdf, HPDF_UINT permission)
_HPDF_SetPermission=haru.HPDF_SetPermission
_HPDF_SetPermission.restype=HPDF_STATUS
def HPDF_SetPermission(
pdf, #HPDF_Doc
permission, #HPDF_UINT
):
permission=HPDF_UINT(int(permission))
return _HPDF_SetPermission(
pdf, #HPDF_Doc
permission, #HPDF_UINT
)
#HPDF_STATUS HPDF_SetEncryptionMode (HPDF_Doc pdf, HPDF_EncryptMode mode, HPDF_UINT key_len)
_HPDF_SetEncryptionMode=haru.HPDF_SetEncryptionMode
_HPDF_SetEncryptionMode.restype=HPDF_STATUS
def HPDF_SetEncryptionMode(
pdf, #HPDF_Doc
mode, #HPDF_EncryptMode
key_len, #HPDF_UINT
):
key_len=HPDF_UINT(int(key_len))
return _HPDF_SetEncryptionMode(
pdf, #HPDF_Doc
mode, #HPDF_EncryptMode
key_len, #HPDF_UINT
)
#--------------------------------------------------------------------------
#----- compression --------------------------------------------------------
#HPDF_STATUS HPDF_SetCompressionMode (HPDF_Doc pdf, HPDF_UINT mode)
HPDF_SetCompressionMode=haru.HPDF_SetCompressionMode
HPDF_SetCompressionMode.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- font ---------------------------------------------------------------
#const char* HPDF_Font_GetFontName (HPDF_Font font)
HPDF_Font_GetFontName=haru.HPDF_Font_GetFontName
HPDF_Font_GetFontName.restype=c_char_p
#const char* HPDF_Font_GetEncodingName (HPDF_Font font)
HPDF_Font_GetEncodingName=haru.HPDF_Font_GetEncodingName
HPDF_Font_GetEncodingName.restype=c_char_p
#HPDF_INT HPDF_Font_GetUnicodeWidth (HPDF_Font font, HPDF_UNICODE code)
HPDF_Font_GetUnicodeWidth=haru.HPDF_Font_GetUnicodeWidth
HPDF_Font_GetUnicodeWidth.restype=HPDF_INT
#HPDF_Box HPDF_Font_GetBBox (HPDF_Font font)
HPDF_Font_GetBBox=haru.HPDF_Font_GetBBox
HPDF_Font_GetBBox.restype=HPDF_Box
#HPDF_INT HPDF_Font_GetAscent (HPDF_Font font)
HPDF_Font_GetAscent=haru.HPDF_Font_GetAscent
HPDF_Font_GetAscent.restype=HPDF_INT
#HPDF_INT HPDF_Font_GetDescent (HPDF_Font font)
HPDF_Font_GetDescent=haru.HPDF_Font_GetDescent
HPDF_Font_GetDescent.restype=HPDF_INT
#HPDF_UINT HPDF_Font_GetXHeight (HPDF_Font font)
HPDF_Font_GetXHeight=haru.HPDF_Font_GetXHeight
HPDF_Font_GetXHeight.restype=HPDF_UINT
#HPDF_UINT HPDF_Font_GetCapHeight (HPDF_Font font)
HPDF_Font_GetCapHeight=haru.HPDF_Font_GetCapHeight
HPDF_Font_GetCapHeight.restype=HPDF_UINT
#HPDF_TextWidth HPDF_Font_TextWidth (HPDF_Font font, const HPDF_BYTE *text, HPDF_UINT len)
HPDF_Font_TextWidth=haru.HPDF_Font_TextWidth
HPDF_Font_TextWidth.restype=HPDF_TextWidth
#HPDF_UINT HPDF_Font_MeasureText (HPDF_Font font, const HPDF_BYTE *text, HPDF_UINT len, HPDF_REAL width, HPDF_REAL font_size, HPDF_REAL char_space, HPDF_REAL word_space, HPDF_BOOL wordwrap, HPDF_REAL *real_width)
_HPDF_Font_MeasureText=haru.HPDF_Font_MeasureText
_HPDF_Font_MeasureText.restype=HPDF_UINT
def HPDF_Font_MeasureText(
font, #HPDF_Font
text, #POINTER(HPDF_BYTE)
length, #HPDF_UINT
width, #HPDF_REAL
font_size, #HPDF_REAL
char_space, #HPDF_REAL
word_space, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
):
if type(text) in (types.TupleType, types.ListType):
length=len(text)
text=pointer((HPDF_BYTE*length)(*text))
length=HPDF_UINT(int(length))
width=HPDF_REAL(width)
font_size=HPDF_REAL(font_size)
char_space=HPDF_REAL(char_space)
word_space=HPDF_REAL(word_space)
real_width=HPDF_REAL(real_width)
return _HPDF_Font_MeasureText(
font, #HPDF_Font
text, #POINTER(HPDF_BYTE)
length, #HPDF_UINT
width, #HPDF_REAL
font_size, #HPDF_REAL
char_space, #HPDF_REAL
word_space, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
)
#--------------------------------------------------------------------------
#----- extended graphics state --------------------------------------------
#HPDF_ExtGState HPDF_CreateExtGState (HPDF_Doc pdf)
HPDF_CreateExtGState=haru.HPDF_CreateExtGState
HPDF_CreateExtGState.restype=HPDF_ExtGState
#HPDF_STATUS HPDF_ExtGState_SetAlphaStroke (HPDF_ExtGState ext_gstate, HPDF_REAL value)
_HPDF_ExtGState_SetAlphaStroke=haru.HPDF_ExtGState_SetAlphaStroke
_HPDF_ExtGState_SetAlphaStroke.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaStroke(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaStroke(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_ExtGState_SetAlphaFill (HPDF_ExtGState ext_gstate, HPDF_REAL value)
_HPDF_ExtGState_SetAlphaFill=haru.HPDF_ExtGState_SetAlphaFill
_HPDF_ExtGState_SetAlphaFill.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaFill(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaFill(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_ExtGState_SetBlendMode (HPDF_ExtGState ext_gstate, HPDF_BlendMode mode)
HPDF_ExtGState_SetBlendMode=haru.HPDF_ExtGState_SetBlendMode
HPDF_ExtGState_SetBlendMode.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
#HPDF_REAL HPDF_Page_TextWidth (HPDF_Page page, const char *text)
_HPDF_Page_TextWidth=haru.HPDF_Page_TextWidth
_HPDF_Page_TextWidth.restype=HPDF_REAL
def HPDF_Page_TextWidth(
page, #HPDF_Page
text, #c_char_p
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextWidth(
page, #HPDF_Page
text, #c_char_p
)
#HPDF_UINT HPDF_Page_MeasureText (HPDF_Page page, const char *text, HPDF_REAL width, HPDF_BOOL wordwrap, HPDF_REAL *real_width)
_HPDF_Page_MeasureText=haru.HPDF_Page_MeasureText
_HPDF_Page_MeasureText.restype=HPDF_UINT
def HPDF_Page_MeasureText(
page, #HPDF_Page
text, #c_char_p
width, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
):
width=HPDF_REAL(width)
real_width=HPDF_REAL(real_width)
return _HPDF_Page_MeasureText(
page, #HPDF_Page
text, #c_char_p
width, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
)
#HPDF_REAL
#HPDF_Page_GetWidth (HPDF_Page page);
HPDF_Page_GetWidth=haru.HPDF_Page_GetWidth
HPDF_Page_GetWidth.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetHeight (HPDF_Page page)
HPDF_Page_GetHeight=haru.HPDF_Page_GetHeight
HPDF_Page_GetHeight.restype=HPDF_REAL
#HPDF_UINT16 HPDF_Page_GetGMode (HPDF_Page page)
HPDF_Page_GetGMode=haru.HPDF_Page_GetGMode
HPDF_Page_GetGMode.restype=HPDF_UINT16
#HPDF_Point HPDF_Page_GetCurrentPos (HPDF_Page page)
HPDF_Page_GetCurrentPos=haru.HPDF_Page_GetCurrentPos
HPDF_Page_GetCurrentPos.restype=HPDF_Point
#HPDF_STATUS HPDF_Page_GetCurrentPos2 (HPDF_Page page, HPDF_Point *pos)
_HPDF_Page_GetCurrentPos2=haru.HPDF_Page_GetCurrentPos2
_HPDF_Page_GetCurrentPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentPos2(
page, #HPDF_Page
pos=None, #POINTER(HPDF_Point)
):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentPos2(
page, #HPDF_Page
pos, #POINTER(HPDF_Point)
)
return ret, pos.x, pos.y
#HPDF_Point HPDF_Page_GetCurrentTextPos (HPDF_Page page)
HPDF_Page_GetCurrentTextPos=haru.HPDF_Page_GetCurrentTextPos
HPDF_Page_GetCurrentTextPos.restype=HPDF_Point
#HPDF_STATUS HPDF_Page_GetCurrentTextPos2 (HPDF_Page page, HPDF_Point *pos)
_HPDF_Page_GetCurrentTextPos2=haru.HPDF_Page_GetCurrentTextPos2
_HPDF_Page_GetCurrentTextPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentTextPos2(
page, #HPDF_Page
pos=None, #POINTER(HPDF_Point)
):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentTextPos2(
page, #HPDF_Page
pos, #POINTER(HPDF_Point)
)
return ret, pos.x, pos.y
#HPDF_Font HPDF_Page_GetCurrentFont (HPDF_Page page)
HPDF_Page_GetCurrentFont=haru.HPDF_Page_GetCurrentFont
HPDF_Page_GetCurrentFont.restype=HPDF_Font
#HPDF_REAL HPDF_Page_GetCurrentFontSize (HPDF_Page page)
HPDF_Page_GetCurrentFontSize=haru.HPDF_Page_GetCurrentFontSize
HPDF_Page_GetCurrentFontSize.restype=HPDF_REAL
#HPDF_TransMatrix HPDF_Page_GetTransMatrix (HPDF_Page page)
HPDF_Page_GetTransMatrix=haru.HPDF_Page_GetTransMatrix
HPDF_Page_GetTransMatrix.restype=HPDF_TransMatrix
#HPDF_REAL HPDF_Page_GetLineWidth (HPDF_Page page)
HPDF_Page_GetLineWidth=haru.HPDF_Page_GetLineWidth
HPDF_Page_GetLineWidth.restype=HPDF_REAL
#HPDF_LineCap HPDF_Page_GetLineCap (HPDF_Page page)
HPDF_Page_GetLineCap=haru.HPDF_Page_GetLineCap
HPDF_Page_GetLineCap.restype=HPDF_LineCap
#HPDF_LineJoin HPDF_Page_GetLineJoin (HPDF_Page page)
HPDF_Page_GetLineJoin=haru.HPDF_Page_GetLineJoin
HPDF_Page_GetLineJoin.restype=HPDF_LineJoin
#HPDF_REAL HPDF_Page_GetMiterLimit (HPDF_Page page)
HPDF_Page_GetMiterLimit=haru.HPDF_Page_GetMiterLimit
HPDF_Page_GetMiterLimit.restype=HPDF_REAL
#HPDF_DashMode HPDF_Page_GetDash (HPDF_Page page)
HPDF_Page_GetDash=haru.HPDF_Page_GetDash
HPDF_Page_GetDash.restype=HPDF_DashMode
#HPDF_REAL HPDF_Page_GetFlat (HPDF_Page page)
HPDF_Page_GetFlat=haru.HPDF_Page_GetFlat
HPDF_Page_GetFlat.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetCharSpace (HPDF_Page page)
HPDF_Page_GetCharSpace=haru.HPDF_Page_GetCharSpace
HPDF_Page_GetCharSpace.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetWordSpace (HPDF_Page page)
HPDF_Page_GetWordSpace=haru.HPDF_Page_GetWordSpace
HPDF_Page_GetWordSpace.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetHorizontalScalling (HPDF_Page page)
HPDF_Page_GetHorizontalScalling=haru.HPDF_Page_GetHorizontalScalling
HPDF_Page_GetHorizontalScalling.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetTextLeading (HPDF_Page page)
HPDF_Page_GetTextLeading=haru.HPDF_Page_GetTextLeading
HPDF_Page_GetTextLeading.restype=HPDF_REAL
#HPDF_TextRenderingMode HPDF_Page_GetTextRenderingMode (HPDF_Page page)
HPDF_Page_GetTextRenderingMode=haru.HPDF_Page_GetTextRenderingMode
HPDF_Page_GetTextRenderingMode.restype=HPDF_TextRenderingMode
# This function is obsolete. Use HPDF_Page_GetTextRise.
#HPDF_REAL HPDF_Page_GetTextRaise (HPDF_Page page)
HPDF_Page_GetTextRaise=haru.HPDF_Page_GetTextRaise
HPDF_Page_GetTextRaise.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetTextRise (HPDF_Page page)
HPDF_Page_GetTextRise=haru.HPDF_Page_GetTextRise
HPDF_Page_GetTextRise.restype=HPDF_REAL
#HPDF_RGBColor HPDF_Page_GetRGBFill (HPDF_Page page)
HPDF_Page_GetRGBFill=haru.HPDF_Page_GetRGBFill
HPDF_Page_GetRGBFill.restype=HPDF_RGBColor
#HPDF_RGBColor HPDF_Page_GetRGBStroke (HPDF_Page page)
HPDF_Page_GetRGBStroke=haru.HPDF_Page_GetRGBStroke
HPDF_Page_GetRGBStroke.restype=HPDF_RGBColor
#HPDF_CMYKColor HPDF_Page_GetCMYKFill (HPDF_Page page)
HPDF_Page_GetCMYKFill=haru.HPDF_Page_GetCMYKFill
HPDF_Page_GetCMYKFill.restype=HPDF_CMYKColor
#HPDF_CMYKColor HPDF_Page_GetCMYKStroke (HPDF_Page page)
HPDF_Page_GetCMYKStroke=haru.HPDF_Page_GetCMYKStroke
HPDF_Page_GetCMYKStroke.restype=HPDF_CMYKColor
#HPDF_REAL HPDF_Page_GetGrayFill (HPDF_Page page)
HPDF_Page_GetGrayFill=haru.HPDF_Page_GetGrayFill
HPDF_Page_GetGrayFill.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetGrayStroke (HPDF_Page page)
HPDF_Page_GetGrayStroke=haru.HPDF_Page_GetGrayStroke
HPDF_Page_GetGrayStroke.restype=HPDF_REAL
#HPDF_ColorSpace HPDF_Page_GetStrokingColorSpace (HPDF_Page page)
HPDF_Page_GetStrokingColorSpace=haru.HPDF_Page_GetStrokingColorSpace
HPDF_Page_GetStrokingColorSpace.restype=HPDF_ColorSpace
#HPDF_ColorSpace HPDF_Page_GetFillingColorSpace (HPDF_Page page)
HPDF_Page_GetFillingColorSpace=haru.HPDF_Page_GetFillingColorSpace
HPDF_Page_GetFillingColorSpace.restype=HPDF_ColorSpace
#HPDF_TransMatrix HPDF_Page_GetTextMatrix (HPDF_Page page)
HPDF_Page_GetTextMatrix=haru.HPDF_Page_GetTextMatrix
HPDF_Page_GetTextMatrix.restype=HPDF_TransMatrix
#HPDF_UINT HPDF_Page_GetGStateDepth (HPDF_Page page)
HPDF_Page_GetGStateDepth=haru.HPDF_Page_GetGStateDepth
HPDF_Page_GetGStateDepth.restype=HPDF_UINT
#--------------------------------------------------------------------------
#----- GRAPHICS OPERATORS -------------------------------------------------
#--- General graphics state ---------------------------------------------
# w
#HPDF_STATUS HPDF_Page_SetLineWidth (HPDF_Page page, HPDF_REAL line_width)
_HPDF_Page_SetLineWidth=haru.HPDF_Page_SetLineWidth
_HPDF_Page_SetLineWidth.restype=HPDF_STATUS
def HPDF_Page_SetLineWidth(
page, #HPDF_Page
line_width, #HPDF_REAL
):
line_width=HPDF_REAL(line_width)
return _HPDF_Page_SetLineWidth(
page, #HPDF_Page
line_width, #HPDF_REAL
)
# J
#HPDF_STATUS HPDF_Page_SetLineCap (HPDF_Page page, HPDF_LineCap line_cap)
HPDF_Page_SetLineCap=haru.HPDF_Page_SetLineCap
HPDF_Page_SetLineCap.restype=HPDF_STATUS
# j
#HPDF_STATUS HPDF_Page_SetLineJoin (HPDF_Page page, HPDF_LineJoin line_join)
HPDF_Page_SetLineJoin=haru.HPDF_Page_SetLineJoin
HPDF_Page_SetLineJoin.restype=HPDF_STATUS
# M
#HPDF_STATUS HPDF_Page_SetMiterLimit (HPDF_Page page, HPDF_REAL miter_limit)
_HPDF_Page_SetMiterLimit=haru.HPDF_Page_SetMiterLimit
_HPDF_Page_SetMiterLimit.restype=HPDF_STATUS
def HPDF_Page_SetMiterLimit(
page, #HPDF_Page
miter_limit, #HPDF_REAL
):
miter_limit=HPDF_REAL(miter_limit)
return _HPDF_Page_SetMiterLimit(
page, #HPDF_Page
miter_limit, #HPDF_REAL
)
# d
#HPDF_STATUS HPDF_Page_SetDash (HPDF_Page page, const HPDF_UINT16 *dash_ptn, HPDF_UINT num_param, HPDF_UINT phase)
_HPDF_Page_SetDash=haru.HPDF_Page_SetDash
_HPDF_Page_SetDash.restype=HPDF_STATUS
def HPDF_Page_SetDash(
page, #HPDF_Page
dash_ptn, #POINTER(HPDF_UINT16)
num_param, #HPDF_UINT
phase, #HPDF_UINT
):
if type(dash_ptn) in (types.ListType, types.TupleType):
num_param=len(dash_ptn)
dash_ptn=pointer((HPDF_UINT16*num_param)(*dash_ptn))
return _HPDF_Page_SetDash(
page, #HPDF_Page
dash_ptn, #POINTER(HPDF_UINT16)
num_param, #HPDF_UINT
phase, #HPDF_UINT
)
# ri --not implemented yet
# i
#HPDF_STATUS HPDF_Page_SetFlat (HPDF_Page page, HPDF_REAL flatness)
_HPDF_Page_SetFlat=haru.HPDF_Page_SetFlat
_HPDF_Page_SetFlat.restype=HPDF_STATUS
def HPDF_Page_SetFlat(
page, #HPDF_Page
flatness, #HPDF_REAL
):
flatness=HPDF_REAL(flatness)
return _HPDF_Page_SetFlat(
page, #HPDF_Page
flatness, #HPDF_REAL
)
# gs
#HPDF_STATUS HPDF_Page_SetExtGState (HPDF_Page page, HPDF_ExtGState ext_gstate)
HPDF_Page_SetExtGState=haru.HPDF_Page_SetExtGState
HPDF_Page_SetExtGState.restype=HPDF_STATUS
#--- Special graphic state operator --------------------------------------
# q
#HPDF_STATUS HPDF_Page_GSave (HPDF_Page page)
HPDF_Page_GSave=haru.HPDF_Page_GSave
HPDF_Page_GSave.restype=HPDF_STATUS
# Q
#HPDF_STATUS HPDF_Page_GRestore (HPDF_Page page)
HPDF_Page_GRestore=haru.HPDF_Page_GRestore
HPDF_Page_GRestore.restype=HPDF_STATUS
# cm
#HPDF_STATUS HPDF_Page_Concat (HPDF_Page page, HPDF_REAL a, HPDF_REAL b, HPDF_REAL c, HPDF_REAL d, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_Concat=haru.HPDF_Page_Concat
_HPDF_Page_Concat.restype=HPDF_STATUS
def HPDF_Page_Concat(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_Concat(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
)
#--- Path construction operator ------------------------------------------
# m
#HPDF_STATUS HPDF_Page_MoveTo (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTo=haru.HPDF_Page_MoveTo
_HPDF_Page_MoveTo.restype=HPDF_STATUS
def HPDF_Page_MoveTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# l
#HPDF_STATUS HPDF_Page_LineTo (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_LineTo=haru.HPDF_Page_LineTo
_HPDF_Page_LineTo.restype=HPDF_STATUS
def HPDF_Page_LineTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_LineTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# c
#HPDF_STATUS HPDF_Page_CurveTo (HPDF_Page page, HPDF_REAL x1, HPDF_REAL y1, HPDF_REAL x2, HPDF_REAL y2, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo=haru.HPDF_Page_CurveTo
_HPDF_Page_CurveTo.restype=HPDF_STATUS
def HPDF_Page_CurveTo(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# v
#HPDF_STATUS HPDF_Page_CurveTo2 (HPDF_Page page, HPDF_REAL x2, HPDF_REAL y2, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo2=haru.HPDF_Page_CurveTo2
_HPDF_Page_CurveTo2.restype=HPDF_STATUS
def HPDF_Page_CurveTo2(
page, #HPDF_Page
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo2(
page, #HPDF_Page
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# y
#HPDF_STATUS HPDF_Page_CurveTo3 (HPDF_Page page, HPDF_REAL x1, HPDF_REAL y1, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo3=haru.HPDF_Page_CurveTo3
_HPDF_Page_CurveTo3.restype=HPDF_STATUS
def HPDF_Page_CurveTo3(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo3(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# h
#HPDF_STATUS HPDF_Page_ClosePath (HPDF_Page page)
HPDF_Page_ClosePath=haru.HPDF_Page_ClosePath
HPDF_Page_ClosePath.restype=HPDF_STATUS
# re
#HPDF_STATUS HPDF_Page_Rectangle (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL width, HPDF_REAL height)
_HPDF_Page_Rectangle=haru.HPDF_Page_Rectangle
_HPDF_Page_Rectangle.restype=HPDF_STATUS
def HPDF_Page_Rectangle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_Rectangle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
)
#--- Path painting operator ---------------------------------------------
# S
#HPDF_STATUS HPDF_Page_Stroke (HPDF_Page page)
_HPDF_Page_Stroke=haru.HPDF_Page_Stroke
_HPDF_Page_Stroke.restype=HPDF_STATUS
def HPDF_Page_Stroke(
page, #HPDF_Page
):
return _HPDF_Page_Stroke(
page, #HPDF_Page
)
# s
#HPDF_STATUS HPDF_Page_ClosePathStroke (HPDF_Page page)
HPDF_Page_ClosePathStroke=haru.HPDF_Page_ClosePathStroke
HPDF_Page_ClosePathStroke.restype=HPDF_STATUS
# f
#HPDF_STATUS HPDF_Page_Fill (HPDF_Page page)
HPDF_Page_Fill=haru.HPDF_Page_Fill
HPDF_Page_Fill.restype=HPDF_STATUS
# f*
#HPDF_STATUS HPDF_Page_Eofill (HPDF_Page page)
HPDF_Page_Eofill=haru.HPDF_Page_Eofill
HPDF_Page_Eofill.restype=HPDF_STATUS
# B
#HPDF_STATUS HPDF_Page_FillStroke (HPDF_Page page)
HPDF_Page_FillStroke=haru.HPDF_Page_FillStroke
HPDF_Page_FillStroke.restype=HPDF_STATUS
# B*
#HPDF_STATUS HPDF_Page_EofillStroke (HPDF_Page page)
HPDF_Page_EofillStroke=haru.HPDF_Page_EofillStroke
HPDF_Page_EofillStroke.restype=HPDF_STATUS
# b
#HPDF_STATUS HPDF_Page_ClosePathFillStroke (HPDF_Page page)
HPDF_Page_ClosePathFillStroke=haru.HPDF_Page_ClosePathFillStroke
HPDF_Page_ClosePathFillStroke.restype=HPDF_STATUS
# b*
#HPDF_STATUS HPDF_Page_ClosePathEofillStroke (HPDF_Page page)
HPDF_Page_ClosePathEofillStroke=haru.HPDF_Page_ClosePathEofillStroke
HPDF_Page_ClosePathEofillStroke.restype=HPDF_STATUS
# n
#HPDF_STATUS HPDF_Page_EndPath (HPDF_Page page)
HPDF_Page_EndPath=haru.HPDF_Page_EndPath
HPDF_Page_EndPath.restype=HPDF_STATUS
#--- Clipping paths operator --------------------------------------------
# W
#HPDF_STATUS HPDF_Page_Clip (HPDF_Page page)
HPDF_Page_Clip=haru.HPDF_Page_Clip
HPDF_Page_Clip.restype=HPDF_STATUS
# W*
#HPDF_STATUS HPDF_Page_Eoclip (HPDF_Page page)
HPDF_Page_Eoclip=haru.HPDF_Page_Eoclip
HPDF_Page_Eoclip.restype=HPDF_STATUS
#--- Text object operator -----------------------------------------------
# BT
#HPDF_STATUS HPDF_Page_BeginText (HPDF_Page page)
HPDF_Page_BeginText=haru.HPDF_Page_BeginText
HPDF_Page_BeginText.restype=HPDF_STATUS
# ET
#HPDF_STATUS HPDF_Page_EndText (HPDF_Page page)
HPDF_Page_EndText=haru.HPDF_Page_EndText
HPDF_Page_EndText.restype=HPDF_STATUS
#--- Text state ---------------------------------------------------------
# Tc
#HPDF_STATUS HPDF_Page_SetCharSpace (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetCharSpace=haru.HPDF_Page_SetCharSpace
_HPDF_Page_SetCharSpace.restype=HPDF_STATUS
def HPDF_Page_SetCharSpace(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetCharSpace(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tw
#HPDF_STATUS HPDF_Page_SetWordSpace (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetWordSpace=haru.HPDF_Page_SetWordSpace
_HPDF_Page_SetWordSpace.restype=HPDF_STATUS
def HPDF_Page_SetWordSpace(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetWordSpace(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tz
#HPDF_STATUS HPDF_Page_SetHorizontalScalling (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetHorizontalScalling=haru.HPDF_Page_SetHorizontalScalling
_HPDF_Page_SetHorizontalScalling.restype=HPDF_STATUS
def HPDF_Page_SetHorizontalScalling(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetHorizontalScalling(
page, #HPDF_Page
value, #HPDF_REAL
)
# TL
#HPDF_STATUS HPDF_Page_SetTextLeading (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextLeading=haru.HPDF_Page_SetTextLeading
_HPDF_Page_SetTextLeading.restype=HPDF_STATUS
def HPDF_Page_SetTextLeading(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextLeading(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tf
#HPDF_STATUS HPDF_Page_SetFontAndSize (HPDF_Page page, HPDF_Font font, HPDF_REAL size)
_HPDF_Page_SetFontAndSize=haru.HPDF_Page_SetFontAndSize
_HPDF_Page_SetFontAndSize.restype=HPDF_STATUS
def HPDF_Page_SetFontAndSize(
page, #HPDF_Page
font, #HPDF_Font
size, #HPDF_REAL
):
size=HPDF_REAL(size)
return _HPDF_Page_SetFontAndSize(
page, #HPDF_Page
font, #HPDF_Font
size, #HPDF_REAL
)
# Tr
#HPDF_STATUS HPDF_Page_SetTextRenderingMode (HPDF_Page page, HPDF_TextRenderingMode mode)
HPDF_Page_SetTextRenderingMode=haru.HPDF_Page_SetTextRenderingMode
HPDF_Page_SetTextRenderingMode.restype=HPDF_STATUS
# Ts
#HPDF_STATUS HPDF_Page_SetTextRise (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextRise=haru.HPDF_Page_SetTextRise
_HPDF_Page_SetTextRise.restype=HPDF_STATUS
def HPDF_Page_SetTextRise(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRise(
page, #HPDF_Page
value, #HPDF_REAL
)
# This function is obsolete. Use HPDF_Page_SetTextRise.
#HPDF_STATUS HPDF_Page_SetTextRaise (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextRaise=haru.HPDF_Page_SetTextRaise
_HPDF_Page_SetTextRaise.restype=HPDF_STATUS
def HPDF_Page_SetTextRaise(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRaise(
page, #HPDF_Page
value, #HPDF_REAL
)
#--- Text positioning ---------------------------------------------------
# Td
#HPDF_STATUS HPDF_Page_MoveTextPos (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTextPos=haru.HPDF_Page_MoveTextPos
_HPDF_Page_MoveTextPos.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# TD
#HPDF_STATUS HPDF_Page_MoveTextPos2 (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTextPos2=haru.HPDF_Page_MoveTextPos2
_HPDF_Page_MoveTextPos2.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos2(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos2(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# Tm
#HPDF_STATUS HPDF_Page_SetTextMatrix (HPDF_Page page, HPDF_REAL a, HPDF_REAL b, HPDF_REAL c, HPDF_REAL d, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_SetTextMatrix=haru.HPDF_Page_SetTextMatrix
_HPDF_Page_SetTextMatrix.restype=HPDF_STATUS
def HPDF_Page_SetTextMatrix(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_SetTextMatrix(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
)
# T*
#HPDF_STATUS HPDF_Page_MoveToNextLine (HPDF_Page page)
HPDF_Page_MoveToNextLine=haru.HPDF_Page_MoveToNextLine
HPDF_Page_MoveToNextLine.restype=HPDF_STATUS
#--- Text showing -------------------------------------------------------
# Tj
#HPDF_STATUS HPDF_Page_ShowText (HPDF_Page page, const char *text)
_HPDF_Page_ShowText=haru.HPDF_Page_ShowText
_HPDF_Page_ShowText.restype=HPDF_STATUS
def HPDF_Page_ShowText(page,
text
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_ShowText(page,
text
)
# TJ
# '
#HPDF_STATUS HPDF_Page_ShowTextNextLine (HPDF_Page page, const char *text)
HPDF_Page_ShowTextNextLine=haru.HPDF_Page_ShowTextNextLine
HPDF_Page_ShowTextNextLine.restype=HPDF_STATUS
# "
#HPDF_STATUS HPDF_Page_ShowTextNextLineEx (HPDF_Page page, HPDF_REAL word_space, HPDF_REAL char_space, const char *text)
_HPDF_Page_ShowTextNextLineEx=haru.HPDF_Page_ShowTextNextLineEx
_HPDF_Page_ShowTextNextLineEx.restype=HPDF_STATUS
def HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
):
word_space=HPDF_REAL(word_space)
char_space=HPDF_REAL(char_space)
return _HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
)
#--- Color showing ------------------------------------------------------
# cs --not implemented yet
# CS --not implemented yet
# sc --not implemented yet
# scn --not implemented yet
# SC --not implemented yet
# SCN --not implemented yet
# g
#HPDF_STATUS HPDF_Page_SetGrayFill (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayFill=haru.HPDF_Page_SetGrayFill
_HPDF_Page_SetGrayFill.restype=HPDF_STATUS
def HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
)
# G
#HPDF_STATUS HPDF_Page_SetGrayStroke (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayStroke=haru.HPDF_Page_SetGrayStroke
_HPDF_Page_SetGrayStroke.restype=HPDF_STATUS
def HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
)
# rg
#HPDF_STATUS HPDF_Page_SetRGBFill (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBFill=haru.HPDF_Page_SetRGBFill
_HPDF_Page_SetRGBFill.restype=HPDF_STATUS
def HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# RG
#HPDF_STATUS HPDF_Page_SetRGBStroke (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBStroke=haru.HPDF_Page_SetRGBStroke
_HPDF_Page_SetRGBStroke.restype=HPDF_STATUS
def HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# k
#HPDF_STATUS HPDF_Page_SetCMYKFill (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKFill=haru.HPDF_Page_SetCMYKFill
_HPDF_Page_SetCMYKFill.restype=HPDF_STATUS
def HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
# K
#HPDF_STATUS HPDF_Page_SetCMYKStroke (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKStroke=haru.HPDF_Page_SetCMYKStroke
_HPDF_Page_SetCMYKStroke.restype=HPDF_STATUS
def HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
#--- Shading patterns ---------------------------------------------------
# sh --not implemented yet
#--- In-line images -----------------------------------------------------
# BI --not implemented yet
# ID --not implemented yet
# EI --not implemented yet
#--- XObjects -----------------------------------------------------------
# Do
#HPDF_STATUS HPDF_Page_ExecuteXObject (HPDF_Page page, HPDF_XObject obj)
HPDF_Page_ExecuteXObject=haru.HPDF_Page_ExecuteXObject
HPDF_Page_ExecuteXObject.restype=HPDF_STATUS
#--- Marked content -----------------------------------------------------
# BMC --not implemented yet
# BDC --not implemented yet
# EMC --not implemented yet
# MP --not implemented yet
# DP --not implemented yet
#--- Compatibility ------------------------------------------------------
# BX --not implemented yet
# EX --not implemented yet
#HPDF_STATUS HPDF_Page_DrawImage (HPDF_Page page, HPDF_Image image, HPDF_REAL x, HPDF_REAL y, HPDF_REAL width, HPDF_REAL height)
_HPDF_Page_DrawImage=haru.HPDF_Page_DrawImage
_HPDF_Page_DrawImage.restype=HPDF_STATUS
def HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Circle (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray)
_HPDF_Page_Circle=haru.HPDF_Page_Circle
_HPDF_Page_Circle.restype=HPDF_STATUS
def HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
return _HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Ellipse (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL xray, HPDF_REAL yray)
_HPDF_Page_Ellipse=haru.HPDF_Page_Ellipse
_HPDF_Page_Ellipse.restype=HPDF_STATUS
def HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
xray=HPDF_REAL(xray)
yray=HPDF_REAL(yray)
return _HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Arc (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray, HPDF_REAL ang1, HPDF_REAL ang2)
_HPDF_Page_Arc=haru.HPDF_Page_Arc
_HPDF_Page_Arc.restype=HPDF_STATUS
def HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
ang1=HPDF_REAL(ang1)
ang2=HPDF_REAL(ang2)
return _HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_TextOut (HPDF_Page page, HPDF_REAL xpos, HPDF_REAL ypos, const char *text)
_HPDF_Page_TextOut=haru.HPDF_Page_TextOut
_HPDF_Page_TextOut.restype=HPDF_STATUS
def HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
):
xpos=HPDF_REAL(xpos)
ypos=HPDF_REAL(ypos)
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
)
#HPDF_STATUS HPDF_Page_TextRect (HPDF_Page page, HPDF_REAL left, HPDF_REAL top, HPDF_REAL right, HPDF_REAL bottom, const char *text, HPDF_TextAlignment align, HPDF_UINT *len)
#???
_HPDF_Page_TextRect=haru.HPDF_Page_TextRect
_HPDF_Page_TextRect.restype=HPDF_STATUS
def HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
right=HPDF_REAL(right)
bottom=HPDF_REAL(bottom)
if type(length) in (types.ListType, types.TupleType):
size=len(length)
length=pointer((HPDF_UINT*size)(*length))
return _HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
)
#HPDF_STATUS HPDF_Page_SetSlideShow (HPDF_Page page, HPDF_TransitionStyle type, HPDF_REAL disp_time, HPDF_REAL trans_time)
_HPDF_Page_SetSlideShow=haru.HPDF_Page_SetSlideShow
_HPDF_Page_SetSlideShow.restype=HPDF_STATUS
def HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
):
disp_time=HPDF_REAL(disp_time)
trans_time=HPDF_REAL(trans_time)
return _HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
)
NULL=0
HPDF_NOPNGLIB=False
| 36.664924
| 213
| 0.572962
|
import os
import sys
import types
def setpath():
dllpath='%s/dll' %(os.path.dirname(os.path.realpath(__file__)))
if 'PATH' in os.environ:
if dllpath not in os.environ['PATH']:
os.environ['PATH']='%s;%s' % (dllpath, os.environ['PATH'])
else:
os.environ['PATH']=dllpath
setpath()
from hpdf_consts import *
from hpdf_types import *
if os.sys.platform=='win32':
harudll='libhpdf.dll'
haru=CDLL(harudll)
else:
harudll='libhpdf.so'
haru=CDLL(harudll)
HPDF_HANDLE=c_void_p
HPDF_Doc=HPDF_HANDLE
HPDF_Page=HPDF_HANDLE
HPDF_Pages=HPDF_HANDLE
HPDF_Stream=HPDF_HANDLE
HPDF_Image=HPDF_HANDLE
HPDF_Font=HPDF_HANDLE
HPDF_Outline=HPDF_HANDLE
HPDF_Encoder=HPDF_HANDLE
HPDF_Destination=HPDF_HANDLE
HPDF_XObject=HPDF_HANDLE
HPDF_Annotation=HPDF_HANDLE
HPDF_ExtGState=HPDF_HANDLE
HPDF_GetVersion=haru.HPDF_GetVersion
HPDF_GetVersion.restype=c_char_p
HPDF_NewEx=haru.HPDF_NewEx
HPDF_NewEx.restype=HPDF_Doc
HPDF_New=haru.HPDF_New
HPDF_New.restype=HPDF_Doc
HPDF_SetErrorHandler=haru.HPDF_SetErrorHandler
HPDF_SetErrorHandler.restype=HPDF_STATUS
HPDF_Free=haru.HPDF_Free
HPDF_Free.restype=None
HPDF_NewDoc=haru.HPDF_NewDoc
HPDF_NewDoc.restype=HPDF_STATUS
HPDF_FreeDoc=haru.HPDF_FreeDoc
HPDF_FreeDoc.restype=None
HPDF_HasDoc=haru.HPDF_HasDoc
HPDF_HasDoc.restype=HPDF_BOOL
HPDF_FreeDocAll=haru.HPDF_FreeDocAll
HPDF_FreeDocAll.restype=None
HPDF_SaveToStream=haru.HPDF_SaveToStream
HPDF_SaveToStream.restype=HPDF_STATUS
HPDF_GetStreamSize=haru.HPDF_GetStreamSize
HPDF_GetStreamSize.restype=HPDF_UINT32
_HPDF_ReadFromStream=haru.HPDF_ReadFromStream
_HPDF_ReadFromStream.restype=HPDF_STATUS
def HPDF_ReadFromStream(
pdf, buf, size, ):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
size=HPDF_UINT32(int(size))
return _HPDF_ReadFromStream(
pdf, buf, size, )
HPDF_ResetStream=haru.HPDF_ResetStream
HPDF_ResetStream.restype=HPDF_STATUS
HPDF_SaveToFile=haru.HPDF_SaveToFile
HPDF_SaveToFile.restype=HPDF_STATUS
HPDF_GetError=haru.HPDF_GetError
HPDF_GetError.restype=HPDF_STATUS
HPDF_GetErrorDetail=haru.HPDF_GetErrorDetail
HPDF_GetErrorDetail.restype=HPDF_STATUS
HPDF_ResetError=haru.HPDF_ResetError
HPDF_ResetError.restype=None
_HPDF_SetPagesConfiguration=haru.HPDF_SetPagesConfiguration
_HPDF_SetPagesConfiguration.restype=HPDF_STATUS
def HPDF_SetPagesConfiguration(
pdf, page_per_pages, ):
page_per_pages=HPDF_UINT(int(page_per_pages))
return _HPDF_SetPagesConfiguration(
pdf, page_per_pages, )
HPDF_GetPageByIndex=haru.HPDF_GetPageByIndex
HPDF_GetPageByIndex.restype=HPDF_Page
HPDF_GetPageLayout=haru.HPDF_GetPageLayout
HPDF_GetPageLayout.restype=HPDF_PageLayout
HPDF_SetPageLayout=haru.HPDF_SetPageLayout
HPDF_SetPageLayout.restype=HPDF_STATUS
HPDF_GetPageMode=haru.HPDF_GetPageMode
HPDF_GetPageMode.restype=HPDF_PageMode
HPDF_SetPageMode=haru.HPDF_SetPageMode
HPDF_SetPageMode.restype=HPDF_STATUS
HPDF_GetViewerPreference=haru.HPDF_GetViewerPreference
HPDF_GetViewerPreference.restype=HPDF_UINT
HPDF_SetViewerPreference=haru.HPDF_SetViewerPreference
HPDF_SetViewerPreference.restype=HPDF_STATUS
HPDF_SetOpenAction=haru.HPDF_SetOpenAction
HPDF_SetOpenAction.restype=HPDF_STATUS
HPDF_GetCurrentPage=haru.HPDF_GetCurrentPage
HPDF_GetCurrentPage.restype=HPDF_Page
HPDF_AddPage=haru.HPDF_AddPage
HPDF_AddPage.restype=HPDF_Page
HPDF_InsertPage=haru.HPDF_InsertPage
HPDF_InsertPage.restype=HPDF_Page
_HPDF_Page_SetWidth=haru.HPDF_Page_SetWidth
_HPDF_Page_SetWidth.restype=HPDF_STATUS
def HPDF_Page_SetWidth(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetWidth(
page, value, )
_HPDF_Page_SetHeight=haru.HPDF_Page_SetHeight
_HPDF_Page_SetHeight.restype=HPDF_STATUS
def HPDF_Page_SetHeight(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetHeight(
page, value, )
HPDF_Page_SetSize=haru.HPDF_Page_SetSize
HPDF_Page_SetSize.restype=HPDF_STATUS
_HPDF_Page_SetRotate=haru.HPDF_Page_SetRotate
_HPDF_Page_SetRotate.restype=HPDF_STATUS
def HPDF_Page_SetRotate(
page, angle, ):
angle=HPDF_UINT16(int(angle))
return _HPDF_Page_SetRotate(
page, angle, )
HPDF_GetFont=haru.HPDF_GetFont
HPDF_GetFont.restype=HPDF_Font
HPDF_LoadType1FontFromFile=haru.HPDF_LoadType1FontFromFile
HPDF_LoadType1FontFromFile.restype=c_char_p
HPDF_LoadTTFontFromFile=haru.HPDF_LoadTTFontFromFile
HPDF_LoadTTFontFromFile.restype=c_char_p
HPDF_LoadTTFontFromFile2=haru.HPDF_LoadTTFontFromFile2
HPDF_LoadTTFontFromFile2.restype=c_char_p
_HPDF_AddPageLabel=haru.HPDF_AddPageLabel
_HPDF_AddPageLabel.restype=HPDF_STATUS
def HPDF_AddPageLabel(
pdf, page_num, style, first_page, prefix, ):
page_num, first_page=[HPDF_UINT(int(i))for i in (page_num, first_page)]
return _HPDF_AddPageLabel(
pdf, page_num, style, first_page, prefix, )
HPDF_UseJPFonts=haru.HPDF_UseJPFonts
HPDF_UseJPFonts.restype=HPDF_STATUS
HPDF_UseKRFonts=haru.HPDF_UseKRFonts
HPDF_UseKRFonts.restype=HPDF_STATUS
HPDF_UseCNSFonts=haru.HPDF_UseCNSFonts
HPDF_UseCNSFonts.restype=HPDF_STATUS
HPDF_UseCNTFonts=haru.HPDF_UseCNTFonts
HPDF_UseCNTFonts.restype=HPDF_STATUS
HPDF_CreateOutline=haru.HPDF_CreateOutline
HPDF_CreateOutline.restype=HPDF_Outline
HPDF_Outline_SetOpened=haru.HPDF_Outline_SetOpened
HPDF_Outline_SetOpened.restype=HPDF_STATUS
HPDF_Outline_SetDestination=haru.HPDF_Outline_SetDestination
HPDF_Outline_SetDestination.restype=HPDF_STATUS
HPDF_Page_CreateDestination=haru.HPDF_Page_CreateDestination
HPDF_Page_CreateDestination.restype=HPDF_Destination
_HPDF_Destination_SetXYZ=haru.HPDF_Destination_SetXYZ
_HPDF_Destination_SetXYZ.restype=HPDF_STATUS
def HPDF_Destination_SetXYZ(
dst, left, top, zoom, ):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
zoom=HPDF_REAL(zoom)
return _HPDF_Destination_SetXYZ(
dst, left, top, zoom, )
HPDF_Destination_SetFit=haru.HPDF_Destination_SetFit
HPDF_Destination_SetFit.restype=HPDF_STATUS
_HPDF_Destination_SetFitH=haru.HPDF_Destination_SetFitH
_HPDF_Destination_SetFitH.restype=HPDF_STATUS
def HPDF_Destination_SetFitH(
dst, top, ):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitH(
dst, top, )
_HPDF_Destination_SetFitV=haru.HPDF_Destination_SetFitV
_HPDF_Destination_SetFitV.restype=HPDF_STATUS
def HPDF_Destination_SetFitV(
dst, left, ):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitV(
dst, left, )
_HPDF_Destination_SetFitR=haru.HPDF_Destination_SetFitR
_HPDF_Destination_SetFitR.restype=HPDF_STATUS
def HPDF_Destination_SetFitR(
dst, left, bottom, right, top, ):
left=HPDF_REAL(left)
bottom=HPDF_REAL(bottom)
right=HPDF_REAL(right)
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitR(
dst, left, bottom, right, top, )
HPDF_Destination_SetFitB=haru.HPDF_Destination_SetFitB
HPDF_Destination_SetFitB.restype=HPDF_STATUS
_HPDF_Destination_SetFitBH=haru.HPDF_Destination_SetFitBH
_HPDF_Destination_SetFitBH.restype=HPDF_STATUS
def HPDF_Destination_SetFitBH(
dst, top, ):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitBH(
dst, top, )
_HPDF_Destination_SetFitBV=haru.HPDF_Destination_SetFitBV
_HPDF_Destination_SetFitBV.restype=HPDF_STATUS
def HPDF_Destination_SetFitBV(
dst, left, ):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitBV(
dst, left, )
HPDF_GetEncoder=haru.HPDF_GetEncoder
HPDF_GetEncoder.restype=HPDF_Encoder
HPDF_GetCurrentEncoder=haru.HPDF_GetCurrentEncoder
HPDF_GetCurrentEncoder.restype=HPDF_Encoder
HPDF_SetCurrentEncoder=haru.HPDF_SetCurrentEncoder
HPDF_SetCurrentEncoder.restype=HPDF_STATUS
HPDF_Encoder_GetType=haru.HPDF_Encoder_GetType
HPDF_Encoder_GetType.restype=HPDF_EncoderType
_HPDF_Encoder_GetByteType=haru.HPDF_Encoder_GetByteType
_HPDF_Encoder_GetByteType.restype=HPDF_ByteType
def HPDF_Encoder_GetByteType(
encoder, text, index ):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Encoder_GetByteType(
encoder, text, index )
HPDF_Encoder_GetUnicode=haru.HPDF_Encoder_GetUnicode
HPDF_Encoder_GetUnicode.restype=HPDF_UNICODE
HPDF_Encoder_GetWritingMode=haru.HPDF_Encoder_GetWritingMode
HPDF_Encoder_GetWritingMode.restype=HPDF_WritingMode
HPDF_UseJPEncodings=haru.HPDF_UseJPEncodings
HPDF_UseJPEncodings.restype=HPDF_STATUS
HPDF_UseKREncodings=haru.HPDF_UseKREncodings
HPDF_UseKREncodings.restype=HPDF_STATUS
HPDF_UseCNSEncodings=haru.HPDF_UseCNSEncodings
HPDF_UseCNSEncodings.restype=HPDF_STATUS
HPDF_UseCNTEncodings=haru.HPDF_UseCNTEncodings
HPDF_UseCNTEncodings.restype=HPDF_STATUS
HPDF_Page_CreateTextAnnot=haru.HPDF_Page_CreateTextAnnot
HPDF_Page_CreateTextAnnot.restype=HPDF_Annotation
HPDF_Page_CreateLinkAnnot=haru.HPDF_Page_CreateLinkAnnot
HPDF_Page_CreateLinkAnnot.restype=HPDF_Annotation
HPDF_Page_CreateURILinkAnnot=haru.HPDF_Page_CreateURILinkAnnot
HPDF_Page_CreateURILinkAnnot.restype=HPDF_Annotation
HPDF_LinkAnnot_SetHighlightMode=haru.HPDF_LinkAnnot_SetHighlightMode
HPDF_LinkAnnot_SetHighlightMode.restype=HPDF_STATUS
_HPDF_LinkAnnot_SetBorderStyle=haru.HPDF_LinkAnnot_SetBorderStyle
_HPDF_LinkAnnot_SetBorderStyle.restype=HPDF_STATUS
def HPDF_LinkAnnot_SetBorderStyle(
annot, width, dash_on, dash_off, ):
width=HPDF_REAL(width)
dash_on=HPDF_UINT16(dash_on)
dash_off=HPDF_UINT16(dash_off)
return _HPDF_LinkAnnot_SetBorderStyle(
annot, width, dash_on, dash_off, )
HPDF_TextAnnot_SetIcon=haru.HPDF_TextAnnot_SetIcon
HPDF_TextAnnot_SetIcon.restype=HPDF_STATUS
HPDF_TextAnnot_SetOpened=haru.HPDF_TextAnnot_SetOpened
HPDF_TextAnnot_SetOpened.restype=HPDF_STATUS
HPDF_LoadPngImageFromFile=haru.HPDF_LoadPngImageFromFile
HPDF_LoadPngImageFromFile.restype=HPDF_Image
HPDF_LoadPngImageFromFile2=haru.HPDF_LoadPngImageFromFile2
HPDF_LoadPngImageFromFile2.restype=HPDF_Image
HPDF_LoadJpegImageFromFile=haru.HPDF_LoadJpegImageFromFile
HPDF_LoadJpegImageFromFile.restype=HPDF_Image
_HPDF_LoadRawImageFromFile=haru.HPDF_LoadRawImageFromFile
_HPDF_LoadRawImageFromFile.restype=HPDF_Image
def HPDF_LoadRawImageFromFile(
pdf, filename, width, height, color_space, ):
width=HPDF_UINT(width)
height=HPDF_UINT(height)
return _HPDF_LoadRawImageFromFile(
pdf, filename, width, height, color_space, )
_HPDF_LoadRawImageFromMem=haru.HPDF_LoadRawImageFromMem
_HPDF_LoadRawImageFromMem.restype=HPDF_Image
def HPDF_LoadRawImageFromMem(
pdf, buf, width, height, color_space, bits_per_component, ):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
if height in [0, None]:
height=size/width
width=HPDF_UINT(width)
height=HPDF_UINT(height)
bits_per_component=HPDF_UINT(bits_per_component)
return _HPDF_LoadRawImageFromMem(
pdf, buf, width, height, color_space, bits_per_component, )
HPDF_Image_GetSize=haru.HPDF_Image_GetSize
HPDF_Image_GetSize.restype=HPDF_Point
_HPDF_Image_GetSize2=haru.HPDF_Image_GetSize2
_HPDF_Image_GetSize2.restype=HPDF_STATUS
def HPDF_Image_GetSize2(
image, size=None, ):
size=HPDF_Point
ret= _HPDF_Image_GetSize2(
image, size, )
return ret, size.x, size.y
HPDF_Image_GetWidth=haru.HPDF_Image_GetWidth
HPDF_Image_GetWidth.restype=HPDF_UINT
HPDF_Image_GetHeight=haru.HPDF_Image_GetHeight
HPDF_Image_GetHeight.restype=HPDF_UINT
HPDF_Image_GetBitsPerComponent=haru.HPDF_Image_GetBitsPerComponent
HPDF_Image_GetBitsPerComponent.restype=HPDF_UINT
HPDF_Image_GetColorSpace=haru.HPDF_Image_GetColorSpace
HPDF_Image_GetColorSpace.restype=c_char_p
_HPDF_Image_SetColorMask=haru.HPDF_Image_SetColorMask
_HPDF_Image_SetColorMask.restype=HPDF_STATUS
def HPDF_Image_SetColorMask(
image, rmin, rmax, gmin, gmax, bmin, bmax, ):
rmin=HPDF_UINT(rmin)
rmax=HPDF_UINT(rmax)
gmin=HPDF_UINT(gmin)
gmax=HPDF_UINT(gmax)
bmin=HPDF_UINT(bmin)
bmax=HPDF_UINT(bmax)
return _HPDF_Image_SetColorMask(
image, rmin, rmax, gmin, gmax, bmin, bmax, )
HPDF_Image_SetMaskImage=haru.HPDF_Image_SetMaskImage
HPDF_Image_SetMaskImage.restype=HPDF_STATUS
HPDF_SetInfoAttr=haru.HPDF_SetInfoAttr
HPDF_SetInfoAttr.restype=HPDF_STATUS
HPDF_GetInfoAttr=haru.HPDF_GetInfoAttr
HPDF_GetInfoAttr.restype=c_char_p
HPDF_SetInfoDateAttr=haru.HPDF_SetInfoDateAttr
HPDF_SetInfoDateAttr.restype=HPDF_STATUS
HPDF_SetPassword=haru.HPDF_SetPassword
HPDF_SetPassword.restype=HPDF_STATUS
_HPDF_SetPermission=haru.HPDF_SetPermission
_HPDF_SetPermission.restype=HPDF_STATUS
def HPDF_SetPermission(
pdf, permission, ):
permission=HPDF_UINT(int(permission))
return _HPDF_SetPermission(
pdf, permission, )
_HPDF_SetEncryptionMode=haru.HPDF_SetEncryptionMode
_HPDF_SetEncryptionMode.restype=HPDF_STATUS
def HPDF_SetEncryptionMode(
pdf, mode, key_len, ):
key_len=HPDF_UINT(int(key_len))
return _HPDF_SetEncryptionMode(
pdf, mode, key_len, )
HPDF_SetCompressionMode=haru.HPDF_SetCompressionMode
HPDF_SetCompressionMode.restype=HPDF_STATUS
HPDF_Font_GetFontName=haru.HPDF_Font_GetFontName
HPDF_Font_GetFontName.restype=c_char_p
HPDF_Font_GetEncodingName=haru.HPDF_Font_GetEncodingName
HPDF_Font_GetEncodingName.restype=c_char_p
HPDF_Font_GetUnicodeWidth=haru.HPDF_Font_GetUnicodeWidth
HPDF_Font_GetUnicodeWidth.restype=HPDF_INT
HPDF_Font_GetBBox=haru.HPDF_Font_GetBBox
HPDF_Font_GetBBox.restype=HPDF_Box
HPDF_Font_GetAscent=haru.HPDF_Font_GetAscent
HPDF_Font_GetAscent.restype=HPDF_INT
HPDF_Font_GetDescent=haru.HPDF_Font_GetDescent
HPDF_Font_GetDescent.restype=HPDF_INT
HPDF_Font_GetXHeight=haru.HPDF_Font_GetXHeight
HPDF_Font_GetXHeight.restype=HPDF_UINT
HPDF_Font_GetCapHeight=haru.HPDF_Font_GetCapHeight
HPDF_Font_GetCapHeight.restype=HPDF_UINT
HPDF_Font_TextWidth=haru.HPDF_Font_TextWidth
HPDF_Font_TextWidth.restype=HPDF_TextWidth
_HPDF_Font_MeasureText=haru.HPDF_Font_MeasureText
_HPDF_Font_MeasureText.restype=HPDF_UINT
def HPDF_Font_MeasureText(
font, text, length, width, font_size, char_space, word_space, wordwrap, real_width, ):
if type(text) in (types.TupleType, types.ListType):
length=len(text)
text=pointer((HPDF_BYTE*length)(*text))
length=HPDF_UINT(int(length))
width=HPDF_REAL(width)
font_size=HPDF_REAL(font_size)
char_space=HPDF_REAL(char_space)
word_space=HPDF_REAL(word_space)
real_width=HPDF_REAL(real_width)
return _HPDF_Font_MeasureText(
font, text, length, width, font_size, char_space, word_space, wordwrap, real_width, )
HPDF_CreateExtGState=haru.HPDF_CreateExtGState
HPDF_CreateExtGState.restype=HPDF_ExtGState
_HPDF_ExtGState_SetAlphaStroke=haru.HPDF_ExtGState_SetAlphaStroke
_HPDF_ExtGState_SetAlphaStroke.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaStroke(
ext_gstate, value, ):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaStroke(
ext_gstate, value, )
_HPDF_ExtGState_SetAlphaFill=haru.HPDF_ExtGState_SetAlphaFill
_HPDF_ExtGState_SetAlphaFill.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaFill(
ext_gstate, value, ):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaFill(
ext_gstate, value, )
HPDF_ExtGState_SetBlendMode=haru.HPDF_ExtGState_SetBlendMode
HPDF_ExtGState_SetBlendMode.restype=HPDF_STATUS
_HPDF_Page_TextWidth=haru.HPDF_Page_TextWidth
_HPDF_Page_TextWidth.restype=HPDF_REAL
def HPDF_Page_TextWidth(
page, text, ):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextWidth(
page, text, )
_HPDF_Page_MeasureText=haru.HPDF_Page_MeasureText
_HPDF_Page_MeasureText.restype=HPDF_UINT
def HPDF_Page_MeasureText(
page, text, width, wordwrap, real_width, ):
width=HPDF_REAL(width)
real_width=HPDF_REAL(real_width)
return _HPDF_Page_MeasureText(
page, text, width, wordwrap, real_width, )
HPDF_Page_GetWidth=haru.HPDF_Page_GetWidth
HPDF_Page_GetWidth.restype=HPDF_REAL
HPDF_Page_GetHeight=haru.HPDF_Page_GetHeight
HPDF_Page_GetHeight.restype=HPDF_REAL
HPDF_Page_GetGMode=haru.HPDF_Page_GetGMode
HPDF_Page_GetGMode.restype=HPDF_UINT16
HPDF_Page_GetCurrentPos=haru.HPDF_Page_GetCurrentPos
HPDF_Page_GetCurrentPos.restype=HPDF_Point
_HPDF_Page_GetCurrentPos2=haru.HPDF_Page_GetCurrentPos2
_HPDF_Page_GetCurrentPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentPos2(
page, pos=None, ):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentPos2(
page, pos, )
return ret, pos.x, pos.y
HPDF_Page_GetCurrentTextPos=haru.HPDF_Page_GetCurrentTextPos
HPDF_Page_GetCurrentTextPos.restype=HPDF_Point
_HPDF_Page_GetCurrentTextPos2=haru.HPDF_Page_GetCurrentTextPos2
_HPDF_Page_GetCurrentTextPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentTextPos2(
page, pos=None, ):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentTextPos2(
page, pos, )
return ret, pos.x, pos.y
HPDF_Page_GetCurrentFont=haru.HPDF_Page_GetCurrentFont
HPDF_Page_GetCurrentFont.restype=HPDF_Font
HPDF_Page_GetCurrentFontSize=haru.HPDF_Page_GetCurrentFontSize
HPDF_Page_GetCurrentFontSize.restype=HPDF_REAL
HPDF_Page_GetTransMatrix=haru.HPDF_Page_GetTransMatrix
HPDF_Page_GetTransMatrix.restype=HPDF_TransMatrix
HPDF_Page_GetLineWidth=haru.HPDF_Page_GetLineWidth
HPDF_Page_GetLineWidth.restype=HPDF_REAL
HPDF_Page_GetLineCap=haru.HPDF_Page_GetLineCap
HPDF_Page_GetLineCap.restype=HPDF_LineCap
HPDF_Page_GetLineJoin=haru.HPDF_Page_GetLineJoin
HPDF_Page_GetLineJoin.restype=HPDF_LineJoin
HPDF_Page_GetMiterLimit=haru.HPDF_Page_GetMiterLimit
HPDF_Page_GetMiterLimit.restype=HPDF_REAL
HPDF_Page_GetDash=haru.HPDF_Page_GetDash
HPDF_Page_GetDash.restype=HPDF_DashMode
HPDF_Page_GetFlat=haru.HPDF_Page_GetFlat
HPDF_Page_GetFlat.restype=HPDF_REAL
HPDF_Page_GetCharSpace=haru.HPDF_Page_GetCharSpace
HPDF_Page_GetCharSpace.restype=HPDF_REAL
HPDF_Page_GetWordSpace=haru.HPDF_Page_GetWordSpace
HPDF_Page_GetWordSpace.restype=HPDF_REAL
HPDF_Page_GetHorizontalScalling=haru.HPDF_Page_GetHorizontalScalling
HPDF_Page_GetHorizontalScalling.restype=HPDF_REAL
HPDF_Page_GetTextLeading=haru.HPDF_Page_GetTextLeading
HPDF_Page_GetTextLeading.restype=HPDF_REAL
HPDF_Page_GetTextRenderingMode=haru.HPDF_Page_GetTextRenderingMode
HPDF_Page_GetTextRenderingMode.restype=HPDF_TextRenderingMode
HPDF_Page_GetTextRaise=haru.HPDF_Page_GetTextRaise
HPDF_Page_GetTextRaise.restype=HPDF_REAL
HPDF_Page_GetTextRise=haru.HPDF_Page_GetTextRise
HPDF_Page_GetTextRise.restype=HPDF_REAL
HPDF_Page_GetRGBFill=haru.HPDF_Page_GetRGBFill
HPDF_Page_GetRGBFill.restype=HPDF_RGBColor
HPDF_Page_GetRGBStroke=haru.HPDF_Page_GetRGBStroke
HPDF_Page_GetRGBStroke.restype=HPDF_RGBColor
HPDF_Page_GetCMYKFill=haru.HPDF_Page_GetCMYKFill
HPDF_Page_GetCMYKFill.restype=HPDF_CMYKColor
HPDF_Page_GetCMYKStroke=haru.HPDF_Page_GetCMYKStroke
HPDF_Page_GetCMYKStroke.restype=HPDF_CMYKColor
HPDF_Page_GetGrayFill=haru.HPDF_Page_GetGrayFill
HPDF_Page_GetGrayFill.restype=HPDF_REAL
HPDF_Page_GetGrayStroke=haru.HPDF_Page_GetGrayStroke
HPDF_Page_GetGrayStroke.restype=HPDF_REAL
HPDF_Page_GetStrokingColorSpace=haru.HPDF_Page_GetStrokingColorSpace
HPDF_Page_GetStrokingColorSpace.restype=HPDF_ColorSpace
HPDF_Page_GetFillingColorSpace=haru.HPDF_Page_GetFillingColorSpace
HPDF_Page_GetFillingColorSpace.restype=HPDF_ColorSpace
HPDF_Page_GetTextMatrix=haru.HPDF_Page_GetTextMatrix
HPDF_Page_GetTextMatrix.restype=HPDF_TransMatrix
HPDF_Page_GetGStateDepth=haru.HPDF_Page_GetGStateDepth
HPDF_Page_GetGStateDepth.restype=HPDF_UINT
_HPDF_Page_SetLineWidth=haru.HPDF_Page_SetLineWidth
_HPDF_Page_SetLineWidth.restype=HPDF_STATUS
def HPDF_Page_SetLineWidth(
page, line_width, ):
line_width=HPDF_REAL(line_width)
return _HPDF_Page_SetLineWidth(
page, line_width, )
HPDF_Page_SetLineCap=haru.HPDF_Page_SetLineCap
HPDF_Page_SetLineCap.restype=HPDF_STATUS
HPDF_Page_SetLineJoin=haru.HPDF_Page_SetLineJoin
HPDF_Page_SetLineJoin.restype=HPDF_STATUS
_HPDF_Page_SetMiterLimit=haru.HPDF_Page_SetMiterLimit
_HPDF_Page_SetMiterLimit.restype=HPDF_STATUS
def HPDF_Page_SetMiterLimit(
page, miter_limit, ):
miter_limit=HPDF_REAL(miter_limit)
return _HPDF_Page_SetMiterLimit(
page, miter_limit, )
_HPDF_Page_SetDash=haru.HPDF_Page_SetDash
_HPDF_Page_SetDash.restype=HPDF_STATUS
def HPDF_Page_SetDash(
page, dash_ptn, num_param, phase, ):
if type(dash_ptn) in (types.ListType, types.TupleType):
num_param=len(dash_ptn)
dash_ptn=pointer((HPDF_UINT16*num_param)(*dash_ptn))
return _HPDF_Page_SetDash(
page, dash_ptn, num_param, phase, )
_HPDF_Page_SetFlat=haru.HPDF_Page_SetFlat
_HPDF_Page_SetFlat.restype=HPDF_STATUS
def HPDF_Page_SetFlat(
page, flatness, ):
flatness=HPDF_REAL(flatness)
return _HPDF_Page_SetFlat(
page, flatness, )
HPDF_Page_SetExtGState=haru.HPDF_Page_SetExtGState
HPDF_Page_SetExtGState.restype=HPDF_STATUS
HPDF_Page_GSave=haru.HPDF_Page_GSave
HPDF_Page_GSave.restype=HPDF_STATUS
HPDF_Page_GRestore=haru.HPDF_Page_GRestore
HPDF_Page_GRestore.restype=HPDF_STATUS
_HPDF_Page_Concat=haru.HPDF_Page_Concat
_HPDF_Page_Concat.restype=HPDF_STATUS
def HPDF_Page_Concat(
page, a, b, c, d, x, y, ):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_Concat(
page, a, b, c, d, x, y, )
_HPDF_Page_MoveTo=haru.HPDF_Page_MoveTo
_HPDF_Page_MoveTo.restype=HPDF_STATUS
def HPDF_Page_MoveTo(
page, x, y, ):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTo(
page, x, y, )
_HPDF_Page_LineTo=haru.HPDF_Page_LineTo
_HPDF_Page_LineTo.restype=HPDF_STATUS
def HPDF_Page_LineTo(
page, x, y, ):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_LineTo(
page, x, y, )
_HPDF_Page_CurveTo=haru.HPDF_Page_CurveTo
_HPDF_Page_CurveTo.restype=HPDF_STATUS
def HPDF_Page_CurveTo(
page, x1, y1, x2, y2, x3, y3, ):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo(
page, x1, y1, x2, y2, x3, y3, )
_HPDF_Page_CurveTo2=haru.HPDF_Page_CurveTo2
_HPDF_Page_CurveTo2.restype=HPDF_STATUS
def HPDF_Page_CurveTo2(
page, x2, y2, x3, y3, ):
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo2(
page, x2, y2, x3, y3, )
_HPDF_Page_CurveTo3=haru.HPDF_Page_CurveTo3
_HPDF_Page_CurveTo3.restype=HPDF_STATUS
def HPDF_Page_CurveTo3(
page, x1, y1, x3, y3, ):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo3(
page, x1, y1, x3, y3, )
HPDF_Page_ClosePath=haru.HPDF_Page_ClosePath
HPDF_Page_ClosePath.restype=HPDF_STATUS
_HPDF_Page_Rectangle=haru.HPDF_Page_Rectangle
_HPDF_Page_Rectangle.restype=HPDF_STATUS
def HPDF_Page_Rectangle(
page, x, y, width, height, ):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_Rectangle(
page, x, y, width, height, )
_HPDF_Page_Stroke=haru.HPDF_Page_Stroke
_HPDF_Page_Stroke.restype=HPDF_STATUS
def HPDF_Page_Stroke(
page, ):
return _HPDF_Page_Stroke(
page, )
HPDF_Page_ClosePathStroke=haru.HPDF_Page_ClosePathStroke
HPDF_Page_ClosePathStroke.restype=HPDF_STATUS
HPDF_Page_Fill=haru.HPDF_Page_Fill
HPDF_Page_Fill.restype=HPDF_STATUS
HPDF_Page_Eofill=haru.HPDF_Page_Eofill
HPDF_Page_Eofill.restype=HPDF_STATUS
HPDF_Page_FillStroke=haru.HPDF_Page_FillStroke
HPDF_Page_FillStroke.restype=HPDF_STATUS
HPDF_Page_EofillStroke=haru.HPDF_Page_EofillStroke
HPDF_Page_EofillStroke.restype=HPDF_STATUS
HPDF_Page_ClosePathFillStroke=haru.HPDF_Page_ClosePathFillStroke
HPDF_Page_ClosePathFillStroke.restype=HPDF_STATUS
HPDF_Page_ClosePathEofillStroke=haru.HPDF_Page_ClosePathEofillStroke
HPDF_Page_ClosePathEofillStroke.restype=HPDF_STATUS
HPDF_Page_EndPath=haru.HPDF_Page_EndPath
HPDF_Page_EndPath.restype=HPDF_STATUS
HPDF_Page_Clip=haru.HPDF_Page_Clip
HPDF_Page_Clip.restype=HPDF_STATUS
HPDF_Page_Eoclip=haru.HPDF_Page_Eoclip
HPDF_Page_Eoclip.restype=HPDF_STATUS
HPDF_Page_BeginText=haru.HPDF_Page_BeginText
HPDF_Page_BeginText.restype=HPDF_STATUS
HPDF_Page_EndText=haru.HPDF_Page_EndText
HPDF_Page_EndText.restype=HPDF_STATUS
_HPDF_Page_SetCharSpace=haru.HPDF_Page_SetCharSpace
_HPDF_Page_SetCharSpace.restype=HPDF_STATUS
def HPDF_Page_SetCharSpace(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetCharSpace(
page, value, )
_HPDF_Page_SetWordSpace=haru.HPDF_Page_SetWordSpace
_HPDF_Page_SetWordSpace.restype=HPDF_STATUS
def HPDF_Page_SetWordSpace(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetWordSpace(
page, value, )
_HPDF_Page_SetHorizontalScalling=haru.HPDF_Page_SetHorizontalScalling
_HPDF_Page_SetHorizontalScalling.restype=HPDF_STATUS
def HPDF_Page_SetHorizontalScalling(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetHorizontalScalling(
page, value, )
_HPDF_Page_SetTextLeading=haru.HPDF_Page_SetTextLeading
_HPDF_Page_SetTextLeading.restype=HPDF_STATUS
def HPDF_Page_SetTextLeading(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextLeading(
page, value, )
_HPDF_Page_SetFontAndSize=haru.HPDF_Page_SetFontAndSize
_HPDF_Page_SetFontAndSize.restype=HPDF_STATUS
def HPDF_Page_SetFontAndSize(
page, font, size, ):
size=HPDF_REAL(size)
return _HPDF_Page_SetFontAndSize(
page, font, size, )
HPDF_Page_SetTextRenderingMode=haru.HPDF_Page_SetTextRenderingMode
HPDF_Page_SetTextRenderingMode.restype=HPDF_STATUS
_HPDF_Page_SetTextRise=haru.HPDF_Page_SetTextRise
_HPDF_Page_SetTextRise.restype=HPDF_STATUS
def HPDF_Page_SetTextRise(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRise(
page, value, )
_HPDF_Page_SetTextRaise=haru.HPDF_Page_SetTextRaise
_HPDF_Page_SetTextRaise.restype=HPDF_STATUS
def HPDF_Page_SetTextRaise(
page, value, ):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRaise(
page, value, )
_HPDF_Page_MoveTextPos=haru.HPDF_Page_MoveTextPos
_HPDF_Page_MoveTextPos.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos(
page, x, y, ):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos(
page, x, y, )
_HPDF_Page_MoveTextPos2=haru.HPDF_Page_MoveTextPos2
_HPDF_Page_MoveTextPos2.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos2(
page, x, y, ):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos2(
page, x, y, )
_HPDF_Page_SetTextMatrix=haru.HPDF_Page_SetTextMatrix
_HPDF_Page_SetTextMatrix.restype=HPDF_STATUS
def HPDF_Page_SetTextMatrix(
page, a, b, c, d, x, y, ):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_SetTextMatrix(
page, a, b, c, d, x, y, )
HPDF_Page_MoveToNextLine=haru.HPDF_Page_MoveToNextLine
HPDF_Page_MoveToNextLine.restype=HPDF_STATUS
_HPDF_Page_ShowText=haru.HPDF_Page_ShowText
_HPDF_Page_ShowText.restype=HPDF_STATUS
def HPDF_Page_ShowText(page,
text
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_ShowText(page,
text
)
#HPDF_STATUS HPDF_Page_ShowTextNextLine (HPDF_Page page, const char *text)
HPDF_Page_ShowTextNextLine=haru.HPDF_Page_ShowTextNextLine
HPDF_Page_ShowTextNextLine.restype=HPDF_STATUS
# "
#HPDF_STATUS HPDF_Page_ShowTextNextLineEx (HPDF_Page page, HPDF_REAL word_space, HPDF_REAL char_space, const char *text)
_HPDF_Page_ShowTextNextLineEx=haru.HPDF_Page_ShowTextNextLineEx
_HPDF_Page_ShowTextNextLineEx.restype=HPDF_STATUS
def HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
):
word_space=HPDF_REAL(word_space)
char_space=HPDF_REAL(char_space)
return _HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
)
#--- Color showing ------------------------------------------------------
# cs --not implemented yet
# CS --not implemented yet
# sc --not implemented yet
# scn --not implemented yet
# SC --not implemented yet
# SCN --not implemented yet
# g
#HPDF_STATUS HPDF_Page_SetGrayFill (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayFill=haru.HPDF_Page_SetGrayFill
_HPDF_Page_SetGrayFill.restype=HPDF_STATUS
def HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
)
# G
#HPDF_STATUS HPDF_Page_SetGrayStroke (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayStroke=haru.HPDF_Page_SetGrayStroke
_HPDF_Page_SetGrayStroke.restype=HPDF_STATUS
def HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
)
# rg
#HPDF_STATUS HPDF_Page_SetRGBFill (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBFill=haru.HPDF_Page_SetRGBFill
_HPDF_Page_SetRGBFill.restype=HPDF_STATUS
def HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# RG
#HPDF_STATUS HPDF_Page_SetRGBStroke (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBStroke=haru.HPDF_Page_SetRGBStroke
_HPDF_Page_SetRGBStroke.restype=HPDF_STATUS
def HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# k
#HPDF_STATUS HPDF_Page_SetCMYKFill (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKFill=haru.HPDF_Page_SetCMYKFill
_HPDF_Page_SetCMYKFill.restype=HPDF_STATUS
def HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
# K
#HPDF_STATUS HPDF_Page_SetCMYKStroke (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKStroke=haru.HPDF_Page_SetCMYKStroke
_HPDF_Page_SetCMYKStroke.restype=HPDF_STATUS
def HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
#--- Shading patterns ---------------------------------------------------
# sh --not implemented yet
#--- In-line images -----------------------------------------------------
# BI --not implemented yet
# ID --not implemented yet
# EI --not implemented yet
#--- XObjects -----------------------------------------------------------
# Do
#HPDF_STATUS HPDF_Page_ExecuteXObject (HPDF_Page page, HPDF_XObject obj)
HPDF_Page_ExecuteXObject=haru.HPDF_Page_ExecuteXObject
HPDF_Page_ExecuteXObject.restype=HPDF_STATUS
#--- Marked content -----------------------------------------------------
# BMC --not implemented yet
# BDC --not implemented yet
# EMC --not implemented yet
# MP --not implemented yet
# DP --not implemented yet
#--- Compatibility ------------------------------------------------------
# BX --not implemented yet
# EX --not implemented yet
#HPDF_STATUS HPDF_Page_DrawImage (HPDF_Page page, HPDF_Image image, HPDF_REAL x, HPDF_REAL y, HPDF_REAL width, HPDF_REAL height)
_HPDF_Page_DrawImage=haru.HPDF_Page_DrawImage
_HPDF_Page_DrawImage.restype=HPDF_STATUS
def HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Circle (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray)
_HPDF_Page_Circle=haru.HPDF_Page_Circle
_HPDF_Page_Circle.restype=HPDF_STATUS
def HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
return _HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Ellipse (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL xray, HPDF_REAL yray)
_HPDF_Page_Ellipse=haru.HPDF_Page_Ellipse
_HPDF_Page_Ellipse.restype=HPDF_STATUS
def HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
xray=HPDF_REAL(xray)
yray=HPDF_REAL(yray)
return _HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Arc (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray, HPDF_REAL ang1, HPDF_REAL ang2)
_HPDF_Page_Arc=haru.HPDF_Page_Arc
_HPDF_Page_Arc.restype=HPDF_STATUS
def HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
ang1=HPDF_REAL(ang1)
ang2=HPDF_REAL(ang2)
return _HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_TextOut (HPDF_Page page, HPDF_REAL xpos, HPDF_REAL ypos, const char *text)
_HPDF_Page_TextOut=haru.HPDF_Page_TextOut
_HPDF_Page_TextOut.restype=HPDF_STATUS
def HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
):
xpos=HPDF_REAL(xpos)
ypos=HPDF_REAL(ypos)
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
)
#HPDF_STATUS HPDF_Page_TextRect (HPDF_Page page, HPDF_REAL left, HPDF_REAL top, HPDF_REAL right, HPDF_REAL bottom, const char *text, HPDF_TextAlignment align, HPDF_UINT *len)
#???
_HPDF_Page_TextRect=haru.HPDF_Page_TextRect
_HPDF_Page_TextRect.restype=HPDF_STATUS
def HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
right=HPDF_REAL(right)
bottom=HPDF_REAL(bottom)
if type(length) in (types.ListType, types.TupleType):
size=len(length)
length=pointer((HPDF_UINT*size)(*length))
return _HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
)
#HPDF_STATUS HPDF_Page_SetSlideShow (HPDF_Page page, HPDF_TransitionStyle type, HPDF_REAL disp_time, HPDF_REAL trans_time)
_HPDF_Page_SetSlideShow=haru.HPDF_Page_SetSlideShow
_HPDF_Page_SetSlideShow.restype=HPDF_STATUS
def HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
):
disp_time=HPDF_REAL(disp_time)
trans_time=HPDF_REAL(trans_time)
return _HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
)
NULL=0
HPDF_NOPNGLIB=False
| true
| true
|
1c45468befaec83596d4e2e0316330f6b9f1a238
| 1,638
|
py
|
Python
|
generator/generator_test.py
|
kwyckmans/nonogram
|
f4a28027a269a37c9f266a2f0f6db9920b549c05
|
[
"Unlicense"
] | null | null | null |
generator/generator_test.py
|
kwyckmans/nonogram
|
f4a28027a269a37c9f266a2f0f6db9920b549c05
|
[
"Unlicense"
] | 2
|
2022-01-13T03:53:51.000Z
|
2022-03-12T00:59:47.000Z
|
generator/generator_test.py
|
kwyckmans/nonogram
|
f4a28027a269a37c9f266a2f0f6db9920b549c05
|
[
"Unlicense"
] | null | null | null |
import unittest
from generator.generator import CellValue, generate_nonogram
class GeneratorTest(unittest.TestCase):
"""Tests generation of nonograms by providing the generator with various pixel configurations.
TODO: add a single end-to-end test with a very simple image.
"""
def test_generator_for_full_black_row(self):
pixel_data = {0: [CellValue.BLACK, CellValue.BLACK]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(2,)])
self.assertEqual(nonogram.col_clues, [(1,), (1,)])
def test_generator_for_full_white_row(self):
pixel_data = {0: [CellValue.WHITE, CellValue.WHITE]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [()])
self.assertEqual(nonogram.col_clues, [(), ()])
def test_generator_for_black_in_back_sequence(self):
pixel_data = {0: [CellValue.WHITE, CellValue.BLACK]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(1,)])
self.assertEqual(nonogram.col_clues, [(), (1,)])
def test_generator_for_black_in_front_sequence(self):
pixel_data = {0: [CellValue.BLACK, CellValue.WHITE]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(1,)])
self.assertEqual(nonogram.col_clues, [(1,), ()])
if __name__ == "__main__":
unittest.main()
| 37.227273
| 98
| 0.696581
|
import unittest
from generator.generator import CellValue, generate_nonogram
class GeneratorTest(unittest.TestCase):
def test_generator_for_full_black_row(self):
pixel_data = {0: [CellValue.BLACK, CellValue.BLACK]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(2,)])
self.assertEqual(nonogram.col_clues, [(1,), (1,)])
def test_generator_for_full_white_row(self):
pixel_data = {0: [CellValue.WHITE, CellValue.WHITE]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [()])
self.assertEqual(nonogram.col_clues, [(), ()])
def test_generator_for_black_in_back_sequence(self):
pixel_data = {0: [CellValue.WHITE, CellValue.BLACK]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(1,)])
self.assertEqual(nonogram.col_clues, [(), (1,)])
def test_generator_for_black_in_front_sequence(self):
pixel_data = {0: [CellValue.BLACK, CellValue.WHITE]}
nonogram = generate_nonogram(pixel_data=pixel_data)
self.assertIsNotNone(nonogram)
self.assertEqual(nonogram.row_clues, [(1,)])
self.assertEqual(nonogram.col_clues, [(1,), ()])
if __name__ == "__main__":
unittest.main()
| true
| true
|
1c45472ed521b60ea9b1462d18e54e9cdaece161
| 96,554
|
py
|
Python
|
python/ccxt/async_support/bybit.py
|
dougvanzee/ccxt
|
ed06a2d180e02d1006f33be6ba65407ecb0a831b
|
[
"MIT"
] | 3
|
2021-06-29T16:27:19.000Z
|
2021-07-18T08:36:07.000Z
|
python/ccxt/async_support/bybit.py
|
Bytedex/ccxt
|
3863b5e1d6c77d719ac102b0243964c4946e7abb
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/bybit.py
|
Bytedex/ccxt
|
3863b5e1d6c77d719ac102b0243964c4946e7abb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bybit(Exchange):
def describe(self):
return self.deep_extend(super(bybit, self).describe(), {
'id': 'bybit',
'name': 'Bybit',
'countries': ['VG'], # British Virgin Islands
'version': 'v2',
'userAgent': None,
'rateLimit': 100,
'hostname': 'bybit.com', # bybit.com, bytick.com
'has': {
'cancelOrder': True,
'CORS': True,
'cancelAllOrders': True,
'createOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchDeposits': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': False,
'fetchWithdrawals': True,
'fetchPositions': True,
},
'timeframes': {
'1m': '1',
'3m': '3',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': 'D',
'1w': 'W',
'1M': 'M',
'1y': 'Y',
},
'urls': {
'test': 'https://api-testnet.{hostname}',
'logo': 'https://user-images.githubusercontent.com/51840849/76547799-daff5b80-649e-11ea-87fb-3be9bac08954.jpg',
'api': 'https://api.{hostname}',
'www': 'https://www.bybit.com',
'doc': [
'https://bybit-exchange.github.io/docs/inverse/',
'https://bybit-exchange.github.io/docs/linear/',
'https://github.com/bybit-exchange',
],
'fees': 'https://help.bybit.com/hc/en-us/articles/360039261154',
'referral': 'https://www.bybit.com/app/register?ref=X7Prm',
},
'api': {
'futures': {
'private': {
'get': [
'position/list',
'order/list',
'order',
'stop-order/list',
'stop-order',
'execution/list',
'trade/closed-pnl/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
'position/switch-mode',
'position/switch-isolated',
],
},
},
'v2': {
'public': {
'get': [
'orderBook/L2',
'kline/list',
'tickers',
'trading-records',
'symbols',
'liq-records',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'open-interest',
'big-deal',
'account-ratio',
'time',
'announcement',
],
},
'private': {
'get': [
'order/list',
'order',
'stop-order/list',
'stop-order',
'position/list',
'execution/list',
'trade/closed-pnl/list',
'funding/prev-funding-rate',
'funding/prev-funding',
'funding/predicted-funding',
'account/api-key',
'account/lcp',
'wallet/balance',
'wallet/fund/records',
'wallet/withdraw/list',
'exchange-order/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
],
},
},
'public': {
'linear': {
'get': [
'kline',
'recent-trading-records',
'funding/prev-funding-rate',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'risk-limit',
],
},
},
'private': {
'linear': {
'get': [
'order/list',
'order/search',
'stop-order/list',
'stop-order/search',
'position/list',
'trade/execution/list',
'trade/closed-pnl/list',
'funding/predicted-funding',
'funding/prev-funding',
],
'post': [
'order/create',
'order/cancel',
'order/cancel-all',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancel-all',
'stop-order/replace',
'position/set-auto-add-margin',
'position/switch-isolated',
'tpsl/switch-mode',
'position/add-margin',
'position/set-leverage',
'position/trading-stop',
],
},
},
'openapi': {
'wallet': {
'get': [
'risk-limit/list',
],
'post': [
'risk-limit',
],
},
},
},
'httpExceptions': {
'403': RateLimitExceeded, # Forbidden -- You request too many times
},
'exceptions': {
'exact': {
'10001': BadRequest, # parameter error
'10002': InvalidNonce, # request expired, check your timestamp and recv_window
'10003': AuthenticationError, # Invalid apikey
'10004': AuthenticationError, # invalid sign
'10005': PermissionDenied, # permission denied for current apikey
'10006': RateLimitExceeded, # too many requests
'10007': AuthenticationError, # api_key not found in your request parameters
'10010': PermissionDenied, # request ip mismatch
'10017': BadRequest, # request path not found or request method is invalid
'20001': OrderNotFound, # Order not exists
'20003': InvalidOrder, # missing parameter side
'20004': InvalidOrder, # invalid parameter side
'20005': InvalidOrder, # missing parameter symbol
'20006': InvalidOrder, # invalid parameter symbol
'20007': InvalidOrder, # missing parameter order_type
'20008': InvalidOrder, # invalid parameter order_type
'20009': InvalidOrder, # missing parameter qty
'20010': InvalidOrder, # qty must be greater than 0
'20011': InvalidOrder, # qty must be an integer
'20012': InvalidOrder, # qty must be greater than zero and less than 1 million
'20013': InvalidOrder, # missing parameter price
'20014': InvalidOrder, # price must be greater than 0
'20015': InvalidOrder, # missing parameter time_in_force
'20016': InvalidOrder, # invalid value for parameter time_in_force
'20017': InvalidOrder, # missing parameter order_id
'20018': InvalidOrder, # invalid date format
'20019': InvalidOrder, # missing parameter stop_px
'20020': InvalidOrder, # missing parameter base_price
'20021': InvalidOrder, # missing parameter stop_order_id
'20022': BadRequest, # missing parameter leverage
'20023': BadRequest, # leverage must be a number
'20031': BadRequest, # leverage must be greater than zero
'20070': BadRequest, # missing parameter margin
'20071': BadRequest, # margin must be greater than zero
'20084': BadRequest, # order_id or order_link_id is required
'30001': BadRequest, # order_link_id is repeated
'30003': InvalidOrder, # qty must be more than the minimum allowed
'30004': InvalidOrder, # qty must be less than the maximum allowed
'30005': InvalidOrder, # price exceeds maximum allowed
'30007': InvalidOrder, # price exceeds minimum allowed
'30008': InvalidOrder, # invalid order_type
'30009': ExchangeError, # no position found
'30010': InsufficientFunds, # insufficient wallet balance
'30011': PermissionDenied, # operation not allowed as position is undergoing liquidation
'30012': PermissionDenied, # operation not allowed as position is undergoing ADL
'30013': PermissionDenied, # position is in liq or adl status
'30014': InvalidOrder, # invalid closing order, qty should not greater than size
'30015': InvalidOrder, # invalid closing order, side should be opposite
'30016': ExchangeError, # TS and SL must be cancelled first while closing position
'30017': InvalidOrder, # estimated fill price cannot be lower than current Buy liq_price
'30018': InvalidOrder, # estimated fill price cannot be higher than current Sell liq_price
'30019': InvalidOrder, # cannot attach TP/SL params for non-zero position when placing non-opening position order
'30020': InvalidOrder, # position already has TP/SL params
'30021': InvalidOrder, # cannot afford estimated position_margin
'30022': InvalidOrder, # estimated buy liq_price cannot be higher than current mark_price
'30023': InvalidOrder, # estimated sell liq_price cannot be lower than current mark_price
'30024': InvalidOrder, # cannot set TP/SL/TS for zero-position
'30025': InvalidOrder, # trigger price should bigger than 10% of last price
'30026': InvalidOrder, # price too high
'30027': InvalidOrder, # price set for Take profit should be higher than Last Traded Price
'30028': InvalidOrder, # price set for Stop loss should be between Liquidation price and Last Traded Price
'30029': InvalidOrder, # price set for Stop loss should be between Last Traded Price and Liquidation price
'30030': InvalidOrder, # price set for Take profit should be lower than Last Traded Price
'30031': InsufficientFunds, # insufficient available balance for order cost
'30032': InvalidOrder, # order has been filled or cancelled
'30033': RateLimitExceeded, # The number of stop orders exceeds maximum limit allowed
'30034': OrderNotFound, # no order found
'30035': RateLimitExceeded, # too fast to cancel
'30036': ExchangeError, # the expected position value after order execution exceeds the current risk limit
'30037': InvalidOrder, # order already cancelled
'30041': ExchangeError, # no position found
'30042': InsufficientFunds, # insufficient wallet balance
'30043': PermissionDenied, # operation not allowed as position is undergoing liquidation
'30044': PermissionDenied, # operation not allowed as position is undergoing AD
'30045': PermissionDenied, # operation not allowed as position is not normal status
'30049': InsufficientFunds, # insufficient available balance
'30050': ExchangeError, # any adjustments made will trigger immediate liquidation
'30051': ExchangeError, # due to risk limit, cannot adjust leverage
'30052': ExchangeError, # leverage can not less than 1
'30054': ExchangeError, # position margin is invalid
'30057': ExchangeError, # requested quantity of contracts exceeds risk limit
'30063': ExchangeError, # reduce-only rule not satisfied
'30067': InsufficientFunds, # insufficient available balance
'30068': ExchangeError, # exit value must be positive
'34026': ExchangeError, # the limit is no change
},
'broad': {
'unknown orderInfo': OrderNotFound, # {"ret_code":-1,"ret_msg":"unknown orderInfo","ext_code":"","ext_info":"","result":null,"time_now":"1584030414.005545","rate_limit_status":99,"rate_limit_reset_ms":1584030414003,"rate_limit":100}
'invalid api_key': AuthenticationError, # {"ret_code":10003,"ret_msg":"invalid api_key","ext_code":"","ext_info":"","result":null,"time_now":"1599547085.415797"}
},
},
'precisionMode': TICK_SIZE,
'options': {
'marketTypes': {
'BTC/USDT': 'linear',
'BCH/USDT': 'linear',
'ETH/USDT': 'linear',
'LTC/USDT': 'linear',
'XTZ/USDT': 'linear',
'LINK/USDT': 'linear',
'ADA/USDT': 'linear',
'DOT/USDT': 'linear',
'UNI/USDT': 'linear',
},
'defaultType': 'linear', # may also be inverse or inverseFuture
'code': 'BTC',
'cancelAllOrders': {
# 'method': 'v2PrivatePostOrderCancelAll', # v2PrivatePostStopOrderCancelAll
},
'recvWindow': 5 * 1000, # 5 sec default
'timeDifference': 0, # the difference between system clock and exchange server clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': 0.00075,
'maker': -0.00025,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {},
'deposit': {},
},
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
async def load_time_difference(self, params={}):
serverTime = await self.fetch_time(params)
after = self.milliseconds()
self.options['timeDifference'] = after - serverTime
return self.options['timeDifference']
async def fetch_time(self, params={}):
response = await self.v2PublicGetTime(params)
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: {},
# time_now: '1583933682.448826'
# }
#
return self.safe_timestamp(response, 'time_now')
async def fetch_markets(self, params={}):
if self.options['adjustForTimeDifference']:
await self.load_time_difference()
response = await self.v2PublicGetSymbols(params)
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":[
# {
# "name":"BTCUSD",
# "alias":"BTCUSD",
# "status":"Trading",
# "base_currency":"BTC",
# "quote_currency":"USD",
# "price_scale":2,
# "taker_fee":"0.00075",
# "maker_fee":"-0.00025",
# "leverage_filter":{"min_leverage":1,"max_leverage":100,"leverage_step":"0.01"},
# "price_filter":{"min_price":"0.5","max_price":"999999.5","tick_size":"0.5"},
# "lot_size_filter":{"max_trading_qty":1000000,"min_trading_qty":1,"qty_step":1}
# },
# {
# "name":"BTCUSDT",
# "alias":"BTCUSDT",
# "status":"Trading",
# "base_currency":"BTC",
# "quote_currency":"USDT",
# "price_scale":2,
# "taker_fee":"0.00075",
# "maker_fee":"-0.00025",
# "leverage_filter":{"min_leverage":1,"max_leverage":100,"leverage_step":"0.01"},
# "price_filter":{"min_price":"0.5","max_price":"999999.5","tick_size":"0.5"},
# "lot_size_filter":{"max_trading_qty":100,"min_trading_qty":0.001,"qty_step":0.001}
# },
# ],
# "time_now":"1610539664.818033"
# }
#
markets = self.safe_value(response, 'result', [])
options = self.safe_value(self.options, 'fetchMarkets', {})
linearQuoteCurrencies = self.safe_value(options, 'linear', {'USDT': True})
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string_2(market, 'name', 'symbol')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
linear = (quote in linearQuoteCurrencies)
inverse = not linear
symbol = base + '/' + quote
baseQuote = base + quote
if baseQuote != id:
symbol = id
lotSizeFilter = self.safe_value(market, 'lot_size_filter', {})
priceFilter = self.safe_value(market, 'price_filter', {})
precision = {
'amount': self.safe_number(lotSizeFilter, 'qty_step'),
'price': self.safe_number(priceFilter, 'tick_size'),
}
status = self.safe_string(market, 'status')
active = None
if status is not None:
active = (status == 'Trading')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': active,
'precision': precision,
'taker': self.safe_number(market, 'taker_fee'),
'maker': self.safe_number(market, 'maker_fee'),
'type': 'future',
'spot': False,
'future': True,
'option': False,
'linear': linear,
'inverse': inverse,
'limits': {
'amount': {
'min': self.safe_number(lotSizeFilter, 'min_trading_qty'),
'max': self.safe_number(lotSizeFilter, 'max_trading_qty'),
},
'price': {
'min': self.safe_number(priceFilter, 'min_price'),
'max': self.safe_number(priceFilter, 'max_price'),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
#
timestamp = None
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market)
last = self.safe_number(ticker, 'last_price')
open = self.safe_number(ticker, 'prev_price_24h')
percentage = self.safe_number(ticker, 'price_24h_pcnt')
if percentage is not None:
percentage *= 100
change = None
average = None
if (last is not None) and (open is not None):
change = last - open
average = self.sum(open, last) / 2
baseVolume = self.safe_number(ticker, 'turnover_24h')
quoteVolume = self.safe_number(ticker, 'volume_24h')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high_price_24h'),
'low': self.safe_number(ticker, 'low_price_24h'),
'bid': self.safe_number(ticker, 'bid_price'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'ask_price'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.v2PublicGetTickers(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
# ],
# time_now: '1583948195.818255'
# }
#
result = self.safe_value(response, 'result', [])
first = self.safe_value(result, 0)
timestamp = self.safe_timestamp(response, 'time_now')
ticker = self.parse_ticker(first, market)
ticker['timestamp'] = timestamp
ticker['datetime'] = self.iso8601(timestamp)
return ticker
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.v2PublicGetTickers(params)
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
# ],
# time_now: '1583948195.818255'
# }
#
result = self.safe_value(response, 'result', [])
tickers = {}
for i in range(0, len(result)):
ticker = self.parse_ticker(result[i])
symbol = ticker['symbol']
tickers[symbol] = ticker
return self.filter_by_array(tickers, 'symbol', symbols)
def parse_ohlcv(self, ohlcv, market=None):
#
# inverse perpetual BTC/USD
#
# {
# symbol: 'BTCUSD',
# interval: '1',
# open_time: 1583952540,
# open: '7760.5',
# high: '7764',
# low: '7757',
# close: '7763.5',
# volume: '1259766',
# turnover: '162.32773718999994'
# }
#
# linear perpetual BTC/USDT
#
# {
# "id":143536,
# "symbol":"BTCUSDT",
# "period":"15",
# "start_at":1587883500,
# "volume":1.035,
# "open":7540.5,
# "high":7541,
# "low":7540.5,
# "close":7541
# }
#
return [
self.safe_timestamp_2(ohlcv, 'open_time', 'start_at'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number_2(ohlcv, 'turnover', 'volume'),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'interval': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
now = self.seconds()
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV() requires a since argument or a limit argument')
else:
request['from'] = now - limit * duration
else:
request['from'] = int(since / 1000)
if limit is not None:
request['limit'] = limit # max 200, default 200
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'publicLinearGetKline' if (marketType == 'linear') else 'v2PublicGetKlineList'
response = await getattr(self, method)(self.extend(request, params))
#
# inverse perpetual BTC/USD
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# interval: '1',
# open_time: 1583952540,
# open: '7760.5',
# high: '7764',
# low: '7757',
# close: '7763.5',
# volume: '1259766',
# turnover: '162.32773718999994'
# },
# ],
# time_now: '1583953082.397330'
# }
#
# linear perpetual BTC/USDT
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":[
# {
# "id":143536,
# "symbol":"BTCUSDT",
# "period":"15",
# "start_at":1587883500,
# "volume":1.035,
# "open":7540.5,
# "high":7541,
# "low":7540.5,
# "close":7541
# }
# ],
# "time_now":"1587884120.168077"
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_ohlcvs(result, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# id: 43785688,
# symbol: 'BTCUSD',
# price: 7786,
# qty: 67,
# side: 'Sell',
# time: '2020-03-11T19:18:30.123Z'
# }
#
# fetchMyTrades, fetchOrderTrades(private)
#
# {
# "closed_size": 0,
# "cross_seq": 277136382,
# "exec_fee": "0.0000001",
# "exec_id": "256e5ef8-abfe-5772-971b-f944e15e0d68",
# "exec_price": "8178.5",
# "exec_qty": 1,
# # the docs say the exec_time field is "abandoned" now
# # the user should use "trade_time_ms"
# "exec_time": "1571676941.70682",
# "exec_type": "Trade", #Exec Type Enum
# "exec_value": "0.00012227",
# "fee_rate": "0.00075",
# "last_liquidity_ind": "RemovedLiquidity", #Liquidity Enum
# "leaves_qty": 0,
# "nth_fill": 2,
# "order_id": "7ad50cb1-9ad0-4f74-804b-d82a516e1029",
# "order_link_id": "",
# "order_price": "8178",
# "order_qty": 1,
# "order_type": "Market", #Order Type Enum
# "side": "Buy", #Side Enum
# "symbol": "BTCUSD", #Symbol Enum
# "user_id": 1,
# "trade_time_ms": 1577480599000
# }
#
id = self.safe_string_2(trade, 'id', 'exec_id')
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
amountString = self.safe_string_2(trade, 'qty', 'exec_qty')
priceString = self.safe_string_2(trade, 'exec_price', 'price')
cost = self.safe_number(trade, 'exec_value')
amount = self.parse_number(amountString)
price = self.parse_number(priceString)
if cost is None:
cost = self.parse_number(Precise.string_mul(priceString, amountString))
timestamp = self.parse8601(self.safe_string(trade, 'time'))
if timestamp is None:
timestamp = self.safe_integer(trade, 'trade_time_ms')
side = self.safe_string_lower(trade, 'side')
lastLiquidityInd = self.safe_string(trade, 'last_liquidity_ind')
takerOrMaker = 'maker' if (lastLiquidityInd == 'AddedLiquidity') else 'taker'
feeCost = self.safe_number(trade, 'exec_fee')
fee = None
if feeCost is not None:
feeCurrencyCode = market['base'] if market['inverse'] else market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': self.safe_number(trade, 'fee_rate'),
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': self.safe_string(trade, 'order_id'),
'type': self.safe_string_lower(trade, 'order_type'),
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'from': 123, # from id
}
if limit is not None:
request['count'] = limit # default 500, max 1000
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'publicLinearGetRecentTradingRecords' if (marketType == 'linear') else 'v2PublicGetTradingRecords'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# id: 43785688,
# symbol: 'BTCUSD',
# price: 7786,
# qty: 67,
# side: 'Sell',
# time: '2020-03-11T19:18:30.123Z'
# },
# ],
# time_now: '1583954313.393362'
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_trades(result, market, since, limit)
def parse_order_book(self, orderbook, timestamp=None, bidsKey='Buy', asksKey='Sell', priceKey='price', amountKey='size'):
bids = []
asks = []
for i in range(0, len(orderbook)):
bidask = orderbook[i]
side = self.safe_string(bidask, 'side')
if side == 'Buy':
bids.append(self.parse_bid_ask(bidask, priceKey, amountKey))
elif side == 'Sell':
asks.append(self.parse_bid_ask(bidask, priceKey, amountKey))
else:
raise ExchangeError(self.id + ' parseOrderBook encountered an unrecognized bidask format: ' + self.json(bidask))
return {
'bids': self.sort_by(bids, 0, True),
'asks': self.sort_by(asks, 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'nonce': None,
}
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.v2PublicGetOrderBookL2(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {symbol: 'BTCUSD', price: '7767.5', size: 677956, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7767', size: 580690, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7766.5', size: 475252, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7768', size: 330847, side: 'Sell'},
# {symbol: 'BTCUSD', price: '7768.5', size: 97159, side: 'Sell'},
# {symbol: 'BTCUSD', price: '7769', size: 6508, side: 'Sell'},
# ],
# time_now: '1583954829.874823'
# }
#
result = self.safe_value(response, 'result', [])
timestamp = self.safe_timestamp(response, 'time_now')
return self.parse_order_book(result, timestamp, 'Buy', 'Sell', 'price', 'size')
async def fetch_balance(self, params={}):
await self.load_markets()
request = {}
coin = self.safe_string(params, 'coin')
code = self.safe_string(params, 'code')
if coin is not None:
request['coin'] = coin
elif code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
response = await self.v2PrivateGetWalletBalance(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: {
# BTC: {
# equity: 0,
# available_balance: 0,
# used_margin: 0,
# order_margin: 0,
# position_margin: 0,
# occ_closing_fee: 0,
# occ_funding_fee: 0,
# wallet_balance: 0,
# realised_pnl: 0,
# unrealised_pnl: 0,
# cum_realised_pnl: 0,
# given_cash: 0,
# service_cash: 0
# }
# },
# time_now: '1583937810.370020',
# rate_limit_status: 119,
# rate_limit_reset_ms: 1583937810367,
# rate_limit: 120
# }
#
result = {
'info': response,
}
balances = self.safe_value(response, 'result', {})
currencyIds = list(balances.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
balance = balances[currencyId]
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available_balance')
account['used'] = self.safe_string(balance, 'used_margin')
account['total'] = self.safe_string(balance, 'equity')
result[code] = account
return self.parse_balance(result, False)
def parse_order_status(self, status):
statuses = {
# basic orders
'Created': 'open',
'Rejected': 'rejected', # order is triggered but failed upon being placed
'New': 'open',
'PartiallyFilled': 'open',
'Filled': 'closed',
'Cancelled': 'canceled',
'PendingCancel': 'canceling', # the engine has received the cancellation but there is no guarantee that it will be successful
# conditional orders
'Active': 'open', # order is triggered and placed successfully
'Untriggered': 'open', # order waits to be triggered
'Triggered': 'closed', # order is triggered
# 'Cancelled': 'canceled', # order is cancelled
# 'Rejected': 'rejected', # order is triggered but fail to be placed
'Deactivated': 'canceled', # conditional order was cancelled before triggering
}
return self.safe_string(statuses, status, status)
def parse_time_in_force(self, timeInForce):
timeInForces = {
'GoodTillCancel': 'GTC',
'ImmediateOrCancel': 'IOC',
'FillOrKill': 'FOK',
'PostOnly': 'PO',
}
return self.safe_string(timeInForces, timeInForce, timeInForce)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "user_id": 1,
# "order_id": "335fd977-e5a5-4781-b6d0-c772d5bfb95b",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8800,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Created",
# "last_exec_time": 0,
# "last_exec_price": 0,
# "leaves_qty": 1,
# "cum_exec_qty": 0, # in contracts, where 1 contract = 1 quote currency unit(USD for inverse contracts)
# "cum_exec_value": 0, # in contract's underlying currency(BTC for inverse contracts)
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# }
#
# fetchOrder
#
# {
# "user_id" : 599946,
# "symbol" : "BTCUSD",
# "side" : "Buy",
# "order_type" : "Limit",
# "price" : "7948",
# "qty" : 10,
# "time_in_force" : "GoodTillCancel",
# "order_status" : "Filled",
# "ext_fields" : {
# "o_req_num" : -1600687220498,
# "xreq_type" : "x_create"
# },
# "last_exec_time" : "1588150113.968422",
# "last_exec_price" : "7948",
# "leaves_qty" : 0,
# "leaves_value" : "0",
# "cum_exec_qty" : 10,
# "cum_exec_value" : "0.00125817",
# "cum_exec_fee" : "-0.00000031",
# "reject_reason" : "",
# "cancel_type" : "",
# "order_link_id" : "",
# "created_at" : "2020-04-29T08:45:24.399146Z",
# "updated_at" : "2020-04-29T08:48:33.968422Z",
# "order_id" : "dd2504b9-0157-406a-99e1-efa522373944"
# }
#
# conditional order
#
# {
# "user_id":##,
# "symbol":"BTCUSD",
# "side":"Buy",
# "order_type":"Market",
# "price":0,
# "qty":10,
# "time_in_force":"GoodTillCancel",
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "order_status":"Untriggered",
# "ext_fields":{
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "expected_direction":"Rising",
# "trigger_price":12400,
# "close_on_trigger":true,
# "op_from":"api",
# "remark":"145.53.159.48",
# "o_req_num":0
# },
# "leaves_qty":10,
# "leaves_value":0.00080645,
# "reject_reason":null,
# "cross_seq":-1,
# "created_at":"2020-08-21T09:18:48.000Z",
# "updated_at":"2020-08-21T09:18:48.000Z",
# "stop_px":12400,
# "stop_order_id":"3f3b54b1-3379-42c7-8510-44f4d9915be0"
# }
#
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
feeCurrency = None
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
id = self.safe_string_2(order, 'order_id', 'stop_order_id')
type = self.safe_string_lower(order, 'order_type')
price = self.safe_number(order, 'price')
if price == 0.0:
price = None
average = self.safe_number(order, 'average_price')
amount = self.safe_number(order, 'qty')
cost = self.safe_number(order, 'cum_exec_value')
filled = self.safe_number(order, 'cum_exec_qty')
remaining = self.safe_number(order, 'leaves_qty')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
if market is not None:
if marketType == 'linear':
feeCurrency = market['quote']
else:
feeCurrency = market['base']
lastTradeTimestamp = self.safe_timestamp(order, 'last_exec_time')
if lastTradeTimestamp == 0:
lastTradeTimestamp = None
status = self.parse_order_status(self.safe_string_2(order, 'order_status', 'stop_order_status'))
side = self.safe_string_lower(order, 'side')
feeCost = self.safe_number(order, 'cum_exec_fee')
fee = None
if feeCost is not None:
feeCost = abs(feeCost)
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
clientOrderId = self.safe_string(order, 'order_link_id')
if (clientOrderId is not None) and (len(clientOrderId) < 1):
clientOrderId = None
timeInForce = self.parse_time_in_force(self.safe_string(order, 'time_in_force'))
stopPrice = self.safe_number(order, 'stop_px')
postOnly = (timeInForce == 'PO')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
})
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearGetOrderSearch' if (marketType == 'linear') else 'v2PrivateGetOrder'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
method = 'privateLinearGetStopOrderSearch' if (marketType == 'linear') else 'v2PrivateGetStopOrder'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Limit",
# "price": "8083",
# "qty": 10,
# "time_in_force": "GoodTillCancel",
# "order_status": "New",
# "ext_fields": {"o_req_num": -308787, "xreq_type": "x_create", "xreq_offset": 4154640},
# "leaves_qty": 10,
# "leaves_value": "0.00123716",
# "cum_exec_qty": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-10-21T07:28:19.396246Z",
# "updated_at": "2019-10-21T07:28:19.396246Z",
# "order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"
# },
# "time_now": "1571651135.291930",
# "rate_limit_status": 99, # The remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": "8000",
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Untriggered",
# "ext_fields": {},
# "leaves_qty": 1,
# "leaves_value": "0.00013333",
# "cum_exec_qty": 0,
# "cum_exec_value": null,
# "cum_exec_fee": null,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-12-27T19:56:24.052194Z",
# "updated_at": "2019-12-27T19:56:24.052194Z",
# "order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"
# },
# "time_now": "1577476584.386958",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request = {
# orders ---------------------------------------------------------
'side': self.capitalize(side),
'symbol': market['id'],
'order_type': self.capitalize(type),
'qty': qty, # order quantity in USD, integer only
# 'price': float(self.price_to_precision(symbol, price)), # required for limit orders
'time_in_force': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
# 'take_profit': 123.45, # take profit price, only take effect upon opening the position
# 'stop_loss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduce_only': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# close_on_trigger to avoid failing due to insufficient available margin
# 'close_on_trigger': False, required for linear orders
# 'order_link_id': 'string', # unique client order id, max 36 characters
# conditional orders ---------------------------------------------
# base_price is used to compare with the value of stop_px, to decide
# whether your conditional order will be triggered by crossing trigger
# price from upper side or lower side, mainly used to identify the
# expected direction of the current conditional order
# 'base_price': 123.45, # required for conditional orders
# 'stop_px': 123.45, # trigger price, required for conditional orders
# 'trigger_by': 'LastPrice', # IndexPrice, MarkPrice
}
priceIsRequired = False
if type == 'limit':
priceIsRequired = True
if priceIsRequired:
if price is not None:
request['price'] = float(self.price_to_precision(symbol, price))
else:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument for a ' + type + ' order')
clientOrderId = self.safe_string_2(params, 'order_link_id', 'clientOrderId')
if clientOrderId is not None:
request['order_link_id'] = clientOrderId
params = self.omit(params, ['order_link_id', 'clientOrderId'])
stopPx = self.safe_value_2(params, 'stop_px', 'stopPrice')
basePrice = self.safe_value(params, 'base_price')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearPostOrderCreate' if (marketType == 'linear') else 'v2PrivatePostOrderCreate'
if marketType == 'linear':
method = 'privateLinearPostOrderCreate'
request['reduce_only'] = False
request['close_on_trigger'] = False
if stopPx is not None:
if basePrice is None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
else:
method = 'privateLinearPostStopOrderCreate' if (marketType == 'linear') else 'v2PrivatePostStopOrderCreate'
request['stop_px'] = float(self.price_to_precision(symbol, stopPx))
request['base_price'] = float(self.price_to_precision(symbol, basePrice))
params = self.omit(params, ['stop_px', 'stopPrice', 'base_price'])
elif basePrice is not None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "order_id": "335fd977-e5a5-4781-b6d0-c772d5bfb95b",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8800,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Created",
# "last_exec_time": 0,
# "last_exec_price": 0,
# "leaves_qty": 1,
# "cum_exec_qty": 0,
# "cum_exec_value": 0,
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# },
# "time_now": "1575111823.458705",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_status": "Untriggered",
# "ext_fields": {
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "expected_direction": "Rising",
# "trigger_price": 7500,
# "op_from": "api",
# "remark": "127.0.01",
# "o_req_num": 0
# },
# "leaves_qty": 1,
# "leaves_value": 0.00013333,
# "reject_reason": null,
# "cross_seq": -1,
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# "ext_info": null,
# "time_now": "1577450904.327654",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1577450904335,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' editOrder() requires an symbol argument')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
await self.load_markets()
market = self.market(symbol)
request = {
# 'order_id': id, # only for non-conditional orders
'symbol': market['id'],
# 'p_r_qty': self.amount_to_precision(symbol, amount), # new order quantity, optional
# 'p_r_price' self.priceToprecision(symbol, price), # new order price, optional
# ----------------------------------------------------------------
# conditional orders
# 'stop_order_id': id, # only for conditional orders
# 'p_r_trigger_price': 123.45, # new trigger price also known as stop_px
}
method = 'privateLinearPostOrderReplace' if (marketType == 'linear') else 'v2PrivatePostOrderReplace'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is not None:
method = 'privateLinearPostStopOrderReplace' if (marketType == 'linear') else 'v2PrivatePostStopOrderReplace'
request['stop_order_id'] = stopOrderId
params = self.omit(params, ['stop_order_id'])
else:
request['order_id'] = id
if amount is not None:
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request['p_r_qty'] = qty
if price is not None:
request['p_r_price'] = float(self.price_to_precision(symbol, price))
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"},
# "time_now": "1539778407.210858",
# "rate_limit_status": 99, # remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"stop_order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"},
# "ext_info": null,
# "time_now": "1577475760.604942",
# "rate_limit_status": 96,
# "rate_limit_reset_ms": 1577475760612,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'id': self.safe_string_2(result, 'order_id', 'stop_order_id'),
'order_id': self.safe_string(result, 'order_id'),
'stop_order_id': self.safe_string(result, 'stop_order_id'),
}
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_value(marketTypes, symbol)
method = 'privateLinearPostOrderCancel' if (marketType == 'linear') else 'v2PrivatePostOrderCancel'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
method = 'privateLinearPostStopOrderCancel' if (marketType == 'linear') else 'v2PrivatePostStopOrderCancel'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
options = self.safe_value(self.options, 'cancelAllOrders', {})
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
defaultMethod = 'privateLinearPostOrderCancelAll' if (marketType == 'linear') else 'v2PrivatePostOrderCancelAll'
method = self.safe_string(options, 'method', defaultMethod)
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', [])
return self.parse_orders(result, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'order_id': 'string'
# 'order_link_id': 'string', # unique client order id, max 36 characters
# 'symbol': market['id'], # default BTCUSD
# 'order': 'desc', # asc
# 'page': 1,
# 'limit': 20, # max 50
# 'order_status': 'Created,New'
# conditional orders ---------------------------------------------
# 'stop_order_id': 'string',
# 'stop_order_status': 'Untriggered',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
options = self.safe_value(self.options, 'fetchOrders', {})
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
defaultMethod = 'privateLinearGetOrderList' if (marketType == 'linear') else 'v2PrivateGetOrderList'
query = params
if ('stop_order_id' in params) or ('stop_order_status' in params):
stopOrderStatus = self.safe_value(params, 'stopOrderStatus')
if stopOrderStatus is not None:
if isinstance(stopOrderStatus, list):
stopOrderStatus = ','.join(stopOrderStatus)
request['stop_order_status'] = stopOrderStatus
query = self.omit(params, 'stop_order_status')
defaultMethod = 'privateLinearGetStopOrderList' if (marketType == 'linear') else 'v2PrivateGetStopOrderList'
method = self.safe_string(options, 'method', defaultMethod)
response = await getattr(self, method)(self.extend(request, query))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 6,
# "data": [
# {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Market",
# "price": 7074,
# "qty": 2,
# "time_in_force": "ImmediateOrCancel",
# "order_status": "Filled",
# "ext_fields": {
# "close_on_trigger": True,
# "orig_order_type": "BLimit",
# "prior_x_req_price": 5898.5,
# "op_from": "pc",
# "remark": "127.0.0.1",
# "o_req_num": -34799032763,
# "xreq_type": "x_create"
# },
# "last_exec_time": "1577448481.696421",
# "last_exec_price": 7070.5,
# "leaves_qty": 0,
# "leaves_value": 0,
# "cum_exec_qty": 2,
# "cum_exec_value": 0.00028283,
# "cum_exec_fee": 0.00002,
# "reject_reason": "NoError",
# "order_link_id": "",
# "created_at": "2019-12-27T12:08:01.000Z",
# "updated_at": "2019-12-27T12:08:01.000Z",
# "order_id": "f185806b-b801-40ff-adec-52289370ed62"
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577448922.437871",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 1,
# "data": [
# {
# "user_id": 1,
# "stop_order_status": "Untriggered",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_link_id": "",
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# ]
# },
# "ext_info": null,
# "time_now": "1577451658.755468",
# "rate_limit_status": 599,
# "rate_limit_reset_ms": 1577451658762,
# "rate_limit": 600
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_orders(data, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Rejected',
'Filled',
'Cancelled',
# conditional orders
# 'Active',
# 'Triggered',
# 'Cancelled',
# 'Rejected',
# 'Deactivated',
]
options = self.safe_value(self.options, 'fetchClosedOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Created',
'New',
'PartiallyFilled',
'PendingCancel',
# conditional orders
# 'Untriggered',
]
options = self.safe_value(self.options, 'fetchOpenOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
request = {
'order_id': id,
}
return await self.fetch_my_trades(symbol, since, limit, self.extend(request, params))
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'order_id': 'f185806b-b801-40ff-adec-52289370ed62', # if not provided will return user's trading records
# 'symbol': market['id'],
# 'start_time': int(since / 1000),
# 'page': 1,
# 'limit' 20, # max 50
}
market = None
if symbol is None:
orderId = self.safe_string(params, 'order_id')
if orderId is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument or an order_id param')
else:
request['order_id'] = orderId
params = self.omit(params, 'order_id')
else:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = since
if limit is not None:
request['limit'] = limit # default 20, max 50
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearGetTradeExecutionList' if (marketType == 'linear') else 'v2PrivateGetExecutionList'
response = await getattr(self, method)(self.extend(request, params))
#
# inverse
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "order_id": "Abandonednot !", # Abandonednot !
# "trade_list": [
# {
# "closed_size": 0,
# "cross_seq": 277136382,
# "exec_fee": "0.0000001",
# "exec_id": "256e5ef8-abfe-5772-971b-f944e15e0d68",
# "exec_price": "8178.5",
# "exec_qty": 1,
# "exec_time": "1571676941.70682",
# "exec_type": "Trade", #Exec Type Enum
# "exec_value": "0.00012227",
# "fee_rate": "0.00075",
# "last_liquidity_ind": "RemovedLiquidity", #Liquidity Enum
# "leaves_qty": 0,
# "nth_fill": 2,
# "order_id": "7ad50cb1-9ad0-4f74-804b-d82a516e1029",
# "order_link_id": "",
# "order_price": "8178",
# "order_qty": 1,
# "order_type": "Market", #Order Type Enum
# "side": "Buy", #Side Enum
# "symbol": "BTCUSD", #Symbol Enum
# "user_id": 1
# }
# ]
# },
# "time_now": "1577483699.281488",
# "rate_limit_status": 118,
# "rate_limit_reset_ms": 1577483699244737,
# "rate_limit": 120
# }
#
# linear
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":{
# "current_page":1,
# "data":[
# {
# "order_id":"b59418ec-14d4-4ef9-b9f4-721d5d576974",
# "order_link_id":"",
# "side":"Sell",
# "symbol":"BTCUSDT",
# "exec_id":"0327284d-faec-5191-bd89-acc5b4fafda9",
# "price":0.5,
# "order_price":0.5,
# "order_qty":0.01,
# "order_type":"Market",
# "fee_rate":0.00075,
# "exec_price":9709.5,
# "exec_type":"Trade",
# "exec_qty":0.01,
# "exec_fee":0.07282125,
# "exec_value":97.095,
# "leaves_qty":0,
# "closed_size":0.01,
# "last_liquidity_ind":"RemovedLiquidity",
# "trade_time":1591648052,
# "trade_time_ms":1591648052861
# }
# ]
# },
# "time_now":"1591736501.979264",
# "rate_limit_status":119,
# "rate_limit_reset_ms":1591736501974,
# "rate_limit":120
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value_2(result, 'trade_list', 'data', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'currency': currency['id'], # alias
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
'wallet_fund_type': 'Deposit', # Deposit, Withdraw, RealisedPNL, Commission, Refund, Prize, ExchangeOrderWithdraw, ExchangeOrderDeposit
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletFundRecords(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577481867.115552",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577481867122,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
# 'status': 'Pending', # ToBeConfirmed, UnderReview, Pending, Success, CancelByUser, Reject, Expire
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletWithdrawList(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 137,
# "user_id": 1,
# "coin": "XRP", # Coin Enum
# "status": "Pending", # Withdraw Status Enum
# "amount": "20.00000000",
# "fee": "0.25000000",
# "address": "rH7H595XYEVTEHU2FySYsWnmfACBnZS9zM",
# "tx_id": "",
# "submited_at": "2019-06-11T02:20:24.000Z",
# "updated_at": "2019-06-11T02:20:24.000Z"
# },
# ],
# "current_page": 1,
# "last_page": 1
# },
# "ext_info": null,
# "time_now": "1577482295.125488",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577482295132,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'ToBeConfirmed': 'pending',
'UnderReview': 'pending',
'Pending': 'pending',
'Success': 'ok',
'CancelByUser': 'canceled',
'Reject': 'rejected',
'Expire': 'expired',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchWithdrawals
#
# {
# "id": 137,
# "user_id": 1,
# "coin": "XRP", # Coin Enum
# "status": "Pending", # Withdraw Status Enum
# "amount": "20.00000000",
# "fee": "0.25000000",
# "address": "rH7H595XYEVTEHU2FySYsWnmfACBnZS9zM",
# "tx_id": "",
# "submited_at": "2019-06-11T02:20:24.000Z",
# "updated_at": "2019-06-11T02:20:24.000Z"
# }
#
# fetchDeposits ledger entries
#
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
#
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
timestamp = self.parse8601(self.safe_string_2(transaction, 'submited_at', 'exec_time'))
updated = self.parse8601(self.safe_string(transaction, 'updated_at'))
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
address = self.safe_string(transaction, 'address')
feeCost = self.safe_number(transaction, 'fee')
type = self.safe_string_lower(transaction, 'type')
fee = None
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
return {
'info': transaction,
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'tx_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': None,
'tagTo': None,
'tagFrom': None,
'type': type,
'amount': self.safe_number(transaction, 'amount'),
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
async def fetch_ledger(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'coin': currency['id'],
# 'currency': currency['id'], # alias
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
# 'wallet_fund_type': 'Deposit', # Withdraw, RealisedPNL, Commission, Refund, Prize, ExchangeOrderWithdraw, ExchangeOrderDeposit
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletFundRecords(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577481867.115552",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577481867122,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_ledger(data, currency, since, limit)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
#
currencyId = self.safe_string(item, 'coin')
code = self.safe_currency_code(currencyId, currency)
amount = self.safe_number(item, 'amount')
after = self.safe_number(item, 'wallet_balance')
direction = 'out' if (amount < 0) else 'in'
before = None
if after is not None and amount is not None:
difference = amount if (direction == 'out') else -amount
before = self.sum(after, difference)
timestamp = self.parse8601(self.safe_string(item, 'exec_time'))
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
id = self.safe_string(item, 'id')
referenceId = self.safe_string(item, 'tx_id')
return {
'id': id,
'currency': code,
'account': self.safe_string(item, 'wallet_id'),
'referenceAccount': None,
'referenceId': referenceId,
'status': None,
'amount': amount,
'before': before,
'after': after,
'fee': None,
'direction': direction,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'type': type,
'info': item,
}
def parse_ledger_entry_type(self, type):
types = {
'Deposit': 'transaction',
'Withdraw': 'transaction',
'RealisedPNL': 'trade',
'Commission': 'fee',
'Refund': 'cashback',
'Prize': 'prize', # ?
'ExchangeOrderWithdraw': 'transaction',
'ExchangeOrderDeposit': 'transaction',
}
return self.safe_string(types, type, type)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.implode_params(self.urls['api'], {'hostname': self.hostname})
type = self.safe_string(api, 0)
section = self.safe_string(api, 1)
request = '/' + type + '/' + section + '/' + path
# public v2
if section == 'public':
if params:
request += '?' + self.rawencode(params)
elif type == 'public':
if params:
request += '?' + self.rawencode(params)
else:
self.check_required_credentials()
timestamp = self.nonce()
query = self.extend(params, {
'api_key': self.apiKey,
'recv_window': self.options['recvWindow'],
'timestamp': timestamp,
})
sortedQuery = self.keysort(query)
auth = self.rawencode(sortedQuery)
signature = self.hmac(self.encode(auth), self.encode(self.secret))
if method == 'POST':
body = self.json(self.extend(query, {
'sign': signature,
}))
headers = {
'Content-Type': 'application/json',
}
else:
request += '?' + self.urlencode(sortedQuery) + '&sign=' + signature
url += request
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return # fallback to default error handler
#
# {
# ret_code: 10001,
# ret_msg: 'ReadMapCB: expect {or n, but found \u0000, error ' +
# 'found in #0 byte of ...||..., bigger context ' +
# '...||...',
# ext_code: '',
# ext_info: '',
# result: null,
# time_now: '1583934106.590436'
# }
#
errorCode = self.safe_string(response, 'ret_code')
if errorCode != '0':
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback) # unknown message
async def fetch_positions(self, symbols=None, params={}):
await self.load_markets()
request = {}
if isinstance(symbols, list):
length = len(symbols)
if length != 1:
raise ArgumentsRequired(self.id + ' fetchPositions takes exactly one symbol')
request['symbol'] = self.market_id(symbols[0])
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
response = None
if type == 'linear':
response = await self.privateLinearGetPositionList(self.extend(request, params))
elif type == 'inverse':
response = await self.v2PrivateGetPositionList(self.extend(request, params))
elif type == 'inverseFuture':
response = await self.futuresPrivateGetPositionList(self.extend(request, params))
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [] or {} depending on the request
# }
return self.safe_value(response, 'result')
| 44.128885
| 253
| 0.461514
|
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bybit(Exchange):
def describe(self):
return self.deep_extend(super(bybit, self).describe(), {
'id': 'bybit',
'name': 'Bybit',
'countries': ['VG'], 'version': 'v2',
'userAgent': None,
'rateLimit': 100,
'hostname': 'bybit.com', 'has': {
'cancelOrder': True,
'CORS': True,
'cancelAllOrders': True,
'createOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchDeposits': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': False,
'fetchWithdrawals': True,
'fetchPositions': True,
},
'timeframes': {
'1m': '1',
'3m': '3',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': 'D',
'1w': 'W',
'1M': 'M',
'1y': 'Y',
},
'urls': {
'test': 'https://api-testnet.{hostname}',
'logo': 'https://user-images.githubusercontent.com/51840849/76547799-daff5b80-649e-11ea-87fb-3be9bac08954.jpg',
'api': 'https://api.{hostname}',
'www': 'https://www.bybit.com',
'doc': [
'https://bybit-exchange.github.io/docs/inverse/',
'https://bybit-exchange.github.io/docs/linear/',
'https://github.com/bybit-exchange',
],
'fees': 'https://help.bybit.com/hc/en-us/articles/360039261154',
'referral': 'https://www.bybit.com/app/register?ref=X7Prm',
},
'api': {
'futures': {
'private': {
'get': [
'position/list',
'order/list',
'order',
'stop-order/list',
'stop-order',
'execution/list',
'trade/closed-pnl/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
'position/switch-mode',
'position/switch-isolated',
],
},
},
'v2': {
'public': {
'get': [
'orderBook/L2',
'kline/list',
'tickers',
'trading-records',
'symbols',
'liq-records',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'open-interest',
'big-deal',
'account-ratio',
'time',
'announcement',
],
},
'private': {
'get': [
'order/list',
'order',
'stop-order/list',
'stop-order',
'position/list',
'execution/list',
'trade/closed-pnl/list',
'funding/prev-funding-rate',
'funding/prev-funding',
'funding/predicted-funding',
'account/api-key',
'account/lcp',
'wallet/balance',
'wallet/fund/records',
'wallet/withdraw/list',
'exchange-order/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
],
},
},
'public': {
'linear': {
'get': [
'kline',
'recent-trading-records',
'funding/prev-funding-rate',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'risk-limit',
],
},
},
'private': {
'linear': {
'get': [
'order/list',
'order/search',
'stop-order/list',
'stop-order/search',
'position/list',
'trade/execution/list',
'trade/closed-pnl/list',
'funding/predicted-funding',
'funding/prev-funding',
],
'post': [
'order/create',
'order/cancel',
'order/cancel-all',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancel-all',
'stop-order/replace',
'position/set-auto-add-margin',
'position/switch-isolated',
'tpsl/switch-mode',
'position/add-margin',
'position/set-leverage',
'position/trading-stop',
],
},
},
'openapi': {
'wallet': {
'get': [
'risk-limit/list',
],
'post': [
'risk-limit',
],
},
},
},
'httpExceptions': {
'403': RateLimitExceeded, },
'exceptions': {
'exact': {
'10001': BadRequest, '10002': InvalidNonce, '10003': AuthenticationError, '10004': AuthenticationError, '10005': PermissionDenied, '10006': RateLimitExceeded, '10007': AuthenticationError, '10010': PermissionDenied, '10017': BadRequest, '20001': OrderNotFound, '20003': InvalidOrder, '20004': InvalidOrder, '20005': InvalidOrder, '20006': InvalidOrder, '20007': InvalidOrder, '20008': InvalidOrder, '20009': InvalidOrder, '20010': InvalidOrder, '20011': InvalidOrder, '20012': InvalidOrder, '20013': InvalidOrder, '20014': InvalidOrder, '20015': InvalidOrder, '20016': InvalidOrder, '20017': InvalidOrder, '20018': InvalidOrder, '20019': InvalidOrder, '20020': InvalidOrder, '20021': InvalidOrder, '20022': BadRequest, '20023': BadRequest, '20031': BadRequest, '20070': BadRequest, '20071': BadRequest, '20084': BadRequest, '30001': BadRequest, '30003': InvalidOrder, '30004': InvalidOrder, '30005': InvalidOrder, '30007': InvalidOrder, '30008': InvalidOrder, '30009': ExchangeError, '30010': InsufficientFunds, '30011': PermissionDenied, '30012': PermissionDenied, '30013': PermissionDenied, '30014': InvalidOrder, '30015': InvalidOrder, '30016': ExchangeError, '30017': InvalidOrder, '30018': InvalidOrder, '30019': InvalidOrder, '30020': InvalidOrder, '30021': InvalidOrder, '30022': InvalidOrder, '30023': InvalidOrder, '30024': InvalidOrder, '30025': InvalidOrder, '30026': InvalidOrder, '30027': InvalidOrder, '30028': InvalidOrder, '30029': InvalidOrder, '30030': InvalidOrder, '30031': InsufficientFunds, '30032': InvalidOrder, '30033': RateLimitExceeded, '30034': OrderNotFound, '30035': RateLimitExceeded, '30036': ExchangeError, '30037': InvalidOrder, '30041': ExchangeError, '30042': InsufficientFunds, '30043': PermissionDenied, '30044': PermissionDenied, '30045': PermissionDenied, '30049': InsufficientFunds, '30050': ExchangeError, '30051': ExchangeError, '30052': ExchangeError, '30054': ExchangeError, '30057': ExchangeError, '30063': ExchangeError, '30067': InsufficientFunds, '30068': ExchangeError, '34026': ExchangeError, },
'broad': {
'unknown orderInfo': OrderNotFound, 'invalid api_key': AuthenticationError, },
},
'precisionMode': TICK_SIZE,
'options': {
'marketTypes': {
'BTC/USDT': 'linear',
'BCH/USDT': 'linear',
'ETH/USDT': 'linear',
'LTC/USDT': 'linear',
'XTZ/USDT': 'linear',
'LINK/USDT': 'linear',
'ADA/USDT': 'linear',
'DOT/USDT': 'linear',
'UNI/USDT': 'linear',
},
'defaultType': 'linear', 'code': 'BTC',
'cancelAllOrders': {
},
'recvWindow': 5 * 1000, 'timeDifference': 0, 'adjustForTimeDifference': False, },
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': 0.00075,
'maker': -0.00025,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {},
'deposit': {},
},
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
async def load_time_difference(self, params={}):
serverTime = await self.fetch_time(params)
after = self.milliseconds()
self.options['timeDifference'] = after - serverTime
return self.options['timeDifference']
async def fetch_time(self, params={}):
response = await self.v2PublicGetTime(params)
return self.safe_timestamp(response, 'time_now')
async def fetch_markets(self, params={}):
if self.options['adjustForTimeDifference']:
await self.load_time_difference()
response = await self.v2PublicGetSymbols(params)
markets = self.safe_value(response, 'result', [])
options = self.safe_value(self.options, 'fetchMarkets', {})
linearQuoteCurrencies = self.safe_value(options, 'linear', {'USDT': True})
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string_2(market, 'name', 'symbol')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
linear = (quote in linearQuoteCurrencies)
inverse = not linear
symbol = base + '/' + quote
baseQuote = base + quote
if baseQuote != id:
symbol = id
lotSizeFilter = self.safe_value(market, 'lot_size_filter', {})
priceFilter = self.safe_value(market, 'price_filter', {})
precision = {
'amount': self.safe_number(lotSizeFilter, 'qty_step'),
'price': self.safe_number(priceFilter, 'tick_size'),
}
status = self.safe_string(market, 'status')
active = None
if status is not None:
active = (status == 'Trading')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': active,
'precision': precision,
'taker': self.safe_number(market, 'taker_fee'),
'maker': self.safe_number(market, 'maker_fee'),
'type': 'future',
'spot': False,
'future': True,
'option': False,
'linear': linear,
'inverse': inverse,
'limits': {
'amount': {
'min': self.safe_number(lotSizeFilter, 'min_trading_qty'),
'max': self.safe_number(lotSizeFilter, 'max_trading_qty'),
},
'price': {
'min': self.safe_number(priceFilter, 'min_price'),
'max': self.safe_number(priceFilter, 'max_price'),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
timestamp = None
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market)
last = self.safe_number(ticker, 'last_price')
open = self.safe_number(ticker, 'prev_price_24h')
percentage = self.safe_number(ticker, 'price_24h_pcnt')
if percentage is not None:
percentage *= 100
change = None
average = None
if (last is not None) and (open is not None):
change = last - open
average = self.sum(open, last) / 2
baseVolume = self.safe_number(ticker, 'turnover_24h')
quoteVolume = self.safe_number(ticker, 'volume_24h')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high_price_24h'),
'low': self.safe_number(ticker, 'low_price_24h'),
'bid': self.safe_number(ticker, 'bid_price'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'ask_price'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.v2PublicGetTickers(self.extend(request, params))
result = self.safe_value(response, 'result', [])
first = self.safe_value(result, 0)
timestamp = self.safe_timestamp(response, 'time_now')
ticker = self.parse_ticker(first, market)
ticker['timestamp'] = timestamp
ticker['datetime'] = self.iso8601(timestamp)
return ticker
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.v2PublicGetTickers(params)
result = self.safe_value(response, 'result', [])
tickers = {}
for i in range(0, len(result)):
ticker = self.parse_ticker(result[i])
symbol = ticker['symbol']
tickers[symbol] = ticker
return self.filter_by_array(tickers, 'symbol', symbols)
def parse_ohlcv(self, ohlcv, market=None):
return [
self.safe_timestamp_2(ohlcv, 'open_time', 'start_at'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number_2(ohlcv, 'turnover', 'volume'),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'interval': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
now = self.seconds()
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV() requires a since argument or a limit argument')
else:
request['from'] = now - limit * duration
else:
request['from'] = int(since / 1000)
if limit is not None:
request['limit'] = limit marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'publicLinearGetKline' if (marketType == 'linear') else 'v2PublicGetKlineList'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
return self.parse_ohlcvs(result, market, timeframe, since, limit)
def parse_trade(self, trade, market=None):
id = self.safe_string_2(trade, 'id', 'exec_id')
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
amountString = self.safe_string_2(trade, 'qty', 'exec_qty')
priceString = self.safe_string_2(trade, 'exec_price', 'price')
cost = self.safe_number(trade, 'exec_value')
amount = self.parse_number(amountString)
price = self.parse_number(priceString)
if cost is None:
cost = self.parse_number(Precise.string_mul(priceString, amountString))
timestamp = self.parse8601(self.safe_string(trade, 'time'))
if timestamp is None:
timestamp = self.safe_integer(trade, 'trade_time_ms')
side = self.safe_string_lower(trade, 'side')
lastLiquidityInd = self.safe_string(trade, 'last_liquidity_ind')
takerOrMaker = 'maker' if (lastLiquidityInd == 'AddedLiquidity') else 'taker'
feeCost = self.safe_number(trade, 'exec_fee')
fee = None
if feeCost is not None:
feeCurrencyCode = market['base'] if market['inverse'] else market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': self.safe_number(trade, 'fee_rate'),
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': self.safe_string(trade, 'order_id'),
'type': self.safe_string_lower(trade, 'order_type'),
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if limit is not None:
request['count'] = limit marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'publicLinearGetRecentTradingRecords' if (marketType == 'linear') else 'v2PublicGetTradingRecords'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
return self.parse_trades(result, market, since, limit)
def parse_order_book(self, orderbook, timestamp=None, bidsKey='Buy', asksKey='Sell', priceKey='price', amountKey='size'):
bids = []
asks = []
for i in range(0, len(orderbook)):
bidask = orderbook[i]
side = self.safe_string(bidask, 'side')
if side == 'Buy':
bids.append(self.parse_bid_ask(bidask, priceKey, amountKey))
elif side == 'Sell':
asks.append(self.parse_bid_ask(bidask, priceKey, amountKey))
else:
raise ExchangeError(self.id + ' parseOrderBook encountered an unrecognized bidask format: ' + self.json(bidask))
return {
'bids': self.sort_by(bids, 0, True),
'asks': self.sort_by(asks, 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'nonce': None,
}
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.v2PublicGetOrderBookL2(self.extend(request, params))
result = self.safe_value(response, 'result', [])
timestamp = self.safe_timestamp(response, 'time_now')
return self.parse_order_book(result, timestamp, 'Buy', 'Sell', 'price', 'size')
async def fetch_balance(self, params={}):
await self.load_markets()
request = {}
coin = self.safe_string(params, 'coin')
code = self.safe_string(params, 'code')
if coin is not None:
request['coin'] = coin
elif code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
response = await self.v2PrivateGetWalletBalance(self.extend(request, params))
result = {
'info': response,
}
balances = self.safe_value(response, 'result', {})
currencyIds = list(balances.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
balance = balances[currencyId]
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available_balance')
account['used'] = self.safe_string(balance, 'used_margin')
account['total'] = self.safe_string(balance, 'equity')
result[code] = account
return self.parse_balance(result, False)
def parse_order_status(self, status):
statuses = {
'Created': 'open',
'Rejected': 'rejected', 'New': 'open',
'PartiallyFilled': 'open',
'Filled': 'closed',
'Cancelled': 'canceled',
'PendingCancel': 'canceling', 'Active': 'open', 'Untriggered': 'open', 'Triggered': 'closed', 'Deactivated': 'canceled', }
return self.safe_string(statuses, status, status)
def parse_time_in_force(self, timeInForce):
timeInForces = {
'GoodTillCancel': 'GTC',
'ImmediateOrCancel': 'IOC',
'FillOrKill': 'FOK',
'PostOnly': 'PO',
}
return self.safe_string(timeInForces, timeInForce, timeInForce)
def parse_order(self, order, market=None):
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# }
#
# fetchOrder
#
# {
# "user_id" : 599946,
# "symbol" : "BTCUSD",
# "side" : "Buy",
# "order_type" : "Limit",
# "price" : "7948",
# "qty" : 10,
# "time_in_force" : "GoodTillCancel",
# "order_status" : "Filled",
# "ext_fields" : {
# "o_req_num" : -1600687220498,
# "xreq_type" : "x_create"
# },
# "last_exec_time" : "1588150113.968422",
# "last_exec_price" : "7948",
# "leaves_qty" : 0,
# "leaves_value" : "0",
# "cum_exec_qty" : 10,
# "cum_exec_value" : "0.00125817",
# "cum_exec_fee" : "-0.00000031",
# "reject_reason" : "",
# "cancel_type" : "",
# "order_link_id" : "",
# "created_at" : "2020-04-29T08:45:24.399146Z",
# "updated_at" : "2020-04-29T08:48:33.968422Z",
# "order_id" : "dd2504b9-0157-406a-99e1-efa522373944"
# }
#
# conditional order
#
# {
# "user_id":##,
# "symbol":"BTCUSD",
# "side":"Buy",
# "order_type":"Market",
# "price":0,
# "qty":10,
# "time_in_force":"GoodTillCancel",
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "order_status":"Untriggered",
# "ext_fields":{
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "expected_direction":"Rising",
# "trigger_price":12400,
# "close_on_trigger":true,
# "op_from":"api",
# "remark":"145.53.159.48",
# "o_req_num":0
# },
# "leaves_qty":10,
# "leaves_value":0.00080645,
# "reject_reason":null,
# "cross_seq":-1,
# "created_at":"2020-08-21T09:18:48.000Z",
# "updated_at":"2020-08-21T09:18:48.000Z",
# "stop_px":12400,
# "stop_order_id":"3f3b54b1-3379-42c7-8510-44f4d9915be0"
# }
#
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
feeCurrency = None
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
id = self.safe_string_2(order, 'order_id', 'stop_order_id')
type = self.safe_string_lower(order, 'order_type')
price = self.safe_number(order, 'price')
if price == 0.0:
price = None
average = self.safe_number(order, 'average_price')
amount = self.safe_number(order, 'qty')
cost = self.safe_number(order, 'cum_exec_value')
filled = self.safe_number(order, 'cum_exec_qty')
remaining = self.safe_number(order, 'leaves_qty')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
if market is not None:
if marketType == 'linear':
feeCurrency = market['quote']
else:
feeCurrency = market['base']
lastTradeTimestamp = self.safe_timestamp(order, 'last_exec_time')
if lastTradeTimestamp == 0:
lastTradeTimestamp = None
status = self.parse_order_status(self.safe_string_2(order, 'order_status', 'stop_order_status'))
side = self.safe_string_lower(order, 'side')
feeCost = self.safe_number(order, 'cum_exec_fee')
fee = None
if feeCost is not None:
feeCost = abs(feeCost)
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
clientOrderId = self.safe_string(order, 'order_link_id')
if (clientOrderId is not None) and (len(clientOrderId) < 1):
clientOrderId = None
timeInForce = self.parse_time_in_force(self.safe_string(order, 'time_in_force'))
stopPrice = self.safe_number(order, 'stop_px')
postOnly = (timeInForce == 'PO')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
})
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearGetOrderSearch' if (marketType == 'linear') else 'v2PrivateGetOrder'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
method = 'privateLinearGetStopOrderSearch' if (marketType == 'linear') else 'v2PrivateGetStopOrder'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Limit",
# "price": "8083",
# "qty": 10,
# "time_in_force": "GoodTillCancel",
# "order_status": "New",
# "ext_fields": {"o_req_num": -308787, "xreq_type": "x_create", "xreq_offset": 4154640},
# "leaves_qty": 10,
# "leaves_value": "0.00123716",
# "cum_exec_qty": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-10-21T07:28:19.396246Z",
# "updated_at": "2019-10-21T07:28:19.396246Z",
# "order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"
# },
# "time_now": "1571651135.291930",
# "rate_limit_status": 99, # The remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": "8000",
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Untriggered",
# "ext_fields": {},
# "leaves_qty": 1,
# "leaves_value": "0.00013333",
# "cum_exec_qty": 0,
# "cum_exec_value": null,
# "cum_exec_fee": null,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-12-27T19:56:24.052194Z",
# "updated_at": "2019-12-27T19:56:24.052194Z",
# "order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"
# },
# "time_now": "1577476584.386958",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request = {
# orders ---------------------------------------------------------
'side': self.capitalize(side),
'symbol': market['id'],
'order_type': self.capitalize(type),
'qty': qty, # order quantity in USD, integer only
# 'price': float(self.price_to_precision(symbol, price)), # required for limit orders
'time_in_force': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
# 'take_profit': 123.45, # take profit price, only take effect upon opening the position
# 'stop_loss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduce_only': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# close_on_trigger to avoid failing due to insufficient available margin
# 'close_on_trigger': False, required for linear orders
# 'order_link_id': 'string', # unique client order id, max 36 characters
# conditional orders ---------------------------------------------
# base_price is used to compare with the value of stop_px, to decide
# whether your conditional order will be triggered by crossing trigger
# price from upper side or lower side, mainly used to identify the
# expected direction of the current conditional order
# 'base_price': 123.45, # required for conditional orders
# 'stop_px': 123.45, # trigger price, required for conditional orders
# 'trigger_by': 'LastPrice', # IndexPrice, MarkPrice
}
priceIsRequired = False
if type == 'limit':
priceIsRequired = True
if priceIsRequired:
if price is not None:
request['price'] = float(self.price_to_precision(symbol, price))
else:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument for a ' + type + ' order')
clientOrderId = self.safe_string_2(params, 'order_link_id', 'clientOrderId')
if clientOrderId is not None:
request['order_link_id'] = clientOrderId
params = self.omit(params, ['order_link_id', 'clientOrderId'])
stopPx = self.safe_value_2(params, 'stop_px', 'stopPrice')
basePrice = self.safe_value(params, 'base_price')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearPostOrderCreate' if (marketType == 'linear') else 'v2PrivatePostOrderCreate'
if marketType == 'linear':
method = 'privateLinearPostOrderCreate'
request['reduce_only'] = False
request['close_on_trigger'] = False
if stopPx is not None:
if basePrice is None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
else:
method = 'privateLinearPostStopOrderCreate' if (marketType == 'linear') else 'v2PrivatePostStopOrderCreate'
request['stop_px'] = float(self.price_to_precision(symbol, stopPx))
request['base_price'] = float(self.price_to_precision(symbol, basePrice))
params = self.omit(params, ['stop_px', 'stopPrice', 'base_price'])
elif basePrice is not None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "order_id": "335fd977-e5a5-4781-b6d0-c772d5bfb95b",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8800,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Created",
# "last_exec_time": 0,
# "last_exec_price": 0,
# "leaves_qty": 1,
# "cum_exec_qty": 0,
# "cum_exec_value": 0,
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# },
# "time_now": "1575111823.458705",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_status": "Untriggered",
# "ext_fields": {
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "expected_direction": "Rising",
# "trigger_price": 7500,
# "op_from": "api",
# "remark": "127.0.01",
# "o_req_num": 0
# },
# "leaves_qty": 1,
# "leaves_value": 0.00013333,
# "reject_reason": null,
# "cross_seq": -1,
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# "ext_info": null,
# "time_now": "1577450904.327654",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1577450904335,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
async def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' editOrder() requires an symbol argument')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
await self.load_markets()
market = self.market(symbol)
request = {
# 'order_id': id, # only for non-conditional orders
'symbol': market['id'],
# 'p_r_qty': self.amount_to_precision(symbol, amount), # new order quantity, optional
# 'p_r_price' self.priceToprecision(symbol, price), # new order price, optional
# ----------------------------------------------------------------
# conditional orders
# 'stop_order_id': id, # only for conditional orders
# 'p_r_trigger_price': 123.45, # new trigger price also known as stop_px
}
method = 'privateLinearPostOrderReplace' if (marketType == 'linear') else 'v2PrivatePostOrderReplace'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is not None:
method = 'privateLinearPostStopOrderReplace' if (marketType == 'linear') else 'v2PrivatePostStopOrderReplace'
request['stop_order_id'] = stopOrderId
params = self.omit(params, ['stop_order_id'])
else:
request['order_id'] = id
if amount is not None:
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request['p_r_qty'] = qty
if price is not None:
request['p_r_price'] = float(self.price_to_precision(symbol, price))
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"},
# "time_now": "1539778407.210858",
# "rate_limit_status": 99, # remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"stop_order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"},
# "ext_info": null,
# "time_now": "1577475760.604942",
# "rate_limit_status": 96,
# "rate_limit_reset_ms": 1577475760612,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'id': self.safe_string_2(result, 'order_id', 'stop_order_id'),
'order_id': self.safe_string(result, 'order_id'),
'stop_order_id': self.safe_string(result, 'stop_order_id'),
}
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_value(marketTypes, symbol)
method = 'privateLinearPostOrderCancel' if (marketType == 'linear') else 'v2PrivatePostOrderCancel'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
method = 'privateLinearPostStopOrderCancel' if (marketType == 'linear') else 'v2PrivatePostStopOrderCancel'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
async def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
options = self.safe_value(self.options, 'cancelAllOrders', {})
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
defaultMethod = 'privateLinearPostOrderCancelAll' if (marketType == 'linear') else 'v2PrivatePostOrderCancelAll'
method = self.safe_string(options, 'method', defaultMethod)
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', [])
return self.parse_orders(result, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'order_id': 'string'
# 'order_link_id': 'string', # unique client order id, max 36 characters
# 'symbol': market['id'], # default BTCUSD
# 'order': 'desc', # asc
# 'page': 1,
# 'limit': 20, # max 50
# 'order_status': 'Created,New'
# conditional orders ---------------------------------------------
# 'stop_order_id': 'string',
# 'stop_order_status': 'Untriggered',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
options = self.safe_value(self.options, 'fetchOrders', {})
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
defaultMethod = 'privateLinearGetOrderList' if (marketType == 'linear') else 'v2PrivateGetOrderList'
query = params
if ('stop_order_id' in params) or ('stop_order_status' in params):
stopOrderStatus = self.safe_value(params, 'stopOrderStatus')
if stopOrderStatus is not None:
if isinstance(stopOrderStatus, list):
stopOrderStatus = ','.join(stopOrderStatus)
request['stop_order_status'] = stopOrderStatus
query = self.omit(params, 'stop_order_status')
defaultMethod = 'privateLinearGetStopOrderList' if (marketType == 'linear') else 'v2PrivateGetStopOrderList'
method = self.safe_string(options, 'method', defaultMethod)
response = await getattr(self, method)(self.extend(request, query))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 6,
# "data": [
# {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Market",
# "price": 7074,
# "qty": 2,
# "time_in_force": "ImmediateOrCancel",
# "order_status": "Filled",
# "ext_fields": {
# "close_on_trigger": True,
# "orig_order_type": "BLimit",
# "prior_x_req_price": 5898.5,
# "op_from": "pc",
# "remark": "127.0.0.1",
# "o_req_num": -34799032763,
# "xreq_type": "x_create"
# },
# "last_exec_time": "1577448481.696421",
# "last_exec_price": 7070.5,
# "leaves_qty": 0,
# "leaves_value": 0,
# "cum_exec_qty": 2,
# "cum_exec_value": 0.00028283,
# "cum_exec_fee": 0.00002,
# "reject_reason": "NoError",
# "order_link_id": "",
# "created_at": "2019-12-27T12:08:01.000Z",
# "updated_at": "2019-12-27T12:08:01.000Z",
# "order_id": "f185806b-b801-40ff-adec-52289370ed62"
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577448922.437871",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 1,
# "data": [
# {
# "user_id": 1,
# "stop_order_status": "Untriggered",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_link_id": "",
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# ]
# },
# "ext_info": null,
# "time_now": "1577451658.755468",
# "rate_limit_status": 599,
# "rate_limit_reset_ms": 1577451658762,
# "rate_limit": 600
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_orders(data, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Rejected',
'Filled',
'Cancelled',
# conditional orders
# 'Active',
# 'Triggered',
# 'Cancelled',
# 'Rejected',
# 'Deactivated',
]
options = self.safe_value(self.options, 'fetchClosedOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Created',
'New',
'PartiallyFilled',
'PendingCancel',
# conditional orders
# 'Untriggered',
]
options = self.safe_value(self.options, 'fetchOpenOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return await self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
request = {
'order_id': id,
}
return await self.fetch_my_trades(symbol, since, limit, self.extend(request, params))
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
# 'order_id': 'f185806b-b801-40ff-adec-52289370ed62', # if not provided will return user's trading records
}
market = None
if symbol is None:
orderId = self.safe_string(params, 'order_id')
if orderId is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument or an order_id param')
else:
request['order_id'] = orderId
params = self.omit(params, 'order_id')
else:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = since
if limit is not None:
request['limit'] = limit marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
method = 'privateLinearGetTradeExecutionList' if (marketType == 'linear') else 'v2PrivateGetExecutionList'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
trades = self.safe_value_2(result, 'trade_list', 'data', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'wallet_fund_type': 'Deposit', }
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletFundRecords(self.extend(request, params))
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletWithdrawList(self.extend(request, params))
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'ToBeConfirmed': 'pending',
'UnderReview': 'pending',
'Pending': 'pending',
'Success': 'ok',
'CancelByUser': 'canceled',
'Reject': 'rejected',
'Expire': 'expired',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
timestamp = self.parse8601(self.safe_string_2(transaction, 'submited_at', 'exec_time'))
updated = self.parse8601(self.safe_string(transaction, 'updated_at'))
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
address = self.safe_string(transaction, 'address')
feeCost = self.safe_number(transaction, 'fee')
type = self.safe_string_lower(transaction, 'type')
fee = None
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
return {
'info': transaction,
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'tx_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': None,
'tagTo': None,
'tagFrom': None,
'type': type,
'amount': self.safe_number(transaction, 'amount'),
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
async def fetch_ledger(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.ymd(since)
if limit is not None:
request['limit'] = limit
response = await self.v2PrivateGetWalletFundRecords(self.extend(request, params))
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_ledger(data, currency, since, limit)
def parse_ledger_entry(self, item, currency=None):
currencyId = self.safe_string(item, 'coin')
code = self.safe_currency_code(currencyId, currency)
amount = self.safe_number(item, 'amount')
after = self.safe_number(item, 'wallet_balance')
direction = 'out' if (amount < 0) else 'in'
before = None
if after is not None and amount is not None:
difference = amount if (direction == 'out') else -amount
before = self.sum(after, difference)
timestamp = self.parse8601(self.safe_string(item, 'exec_time'))
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
id = self.safe_string(item, 'id')
referenceId = self.safe_string(item, 'tx_id')
return {
'id': id,
'currency': code,
'account': self.safe_string(item, 'wallet_id'),
'referenceAccount': None,
'referenceId': referenceId,
'status': None,
'amount': amount,
'before': before,
'after': after,
'fee': None,
'direction': direction,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'type': type,
'info': item,
}
def parse_ledger_entry_type(self, type):
types = {
'Deposit': 'transaction',
'Withdraw': 'transaction',
'RealisedPNL': 'trade',
'Commission': 'fee',
'Refund': 'cashback',
'Prize': 'prize', 'ExchangeOrderWithdraw': 'transaction',
'ExchangeOrderDeposit': 'transaction',
}
return self.safe_string(types, type, type)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.implode_params(self.urls['api'], {'hostname': self.hostname})
type = self.safe_string(api, 0)
section = self.safe_string(api, 1)
request = '/' + type + '/' + section + '/' + path
if section == 'public':
if params:
request += '?' + self.rawencode(params)
elif type == 'public':
if params:
request += '?' + self.rawencode(params)
else:
self.check_required_credentials()
timestamp = self.nonce()
query = self.extend(params, {
'api_key': self.apiKey,
'recv_window': self.options['recvWindow'],
'timestamp': timestamp,
})
sortedQuery = self.keysort(query)
auth = self.rawencode(sortedQuery)
signature = self.hmac(self.encode(auth), self.encode(self.secret))
if method == 'POST':
body = self.json(self.extend(query, {
'sign': signature,
}))
headers = {
'Content-Type': 'application/json',
}
else:
request += '?' + self.urlencode(sortedQuery) + '&sign=' + signature
url += request
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return errorCode = self.safe_string(response, 'ret_code')
if errorCode != '0':
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback)
async def fetch_positions(self, symbols=None, params={}):
await self.load_markets()
request = {}
if isinstance(symbols, list):
length = len(symbols)
if length != 1:
raise ArgumentsRequired(self.id + ' fetchPositions takes exactly one symbol')
request['symbol'] = self.market_id(symbols[0])
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
response = None
if type == 'linear':
response = await self.privateLinearGetPositionList(self.extend(request, params))
elif type == 'inverse':
response = await self.v2PrivateGetPositionList(self.extend(request, params))
elif type == 'inverseFuture':
response = await self.futuresPrivateGetPositionList(self.extend(request, params))
return self.safe_value(response, 'result')
| true
| true
|
1c45498823d147eda5f301807dd3845f13b71b1d
| 5,632
|
py
|
Python
|
tests/python/relax/test_expr.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 90
|
2021-11-30T11:58:10.000Z
|
2022-03-31T02:24:04.000Z
|
tests/python/relax/test_expr.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 64
|
2021-11-22T23:58:23.000Z
|
2022-03-31T03:19:22.000Z
|
tests/python/relax/test_expr.py
|
psrivas2/relax
|
4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a
|
[
"Apache-2.0"
] | 27
|
2021-12-09T22:39:27.000Z
|
2022-03-24T23:21:48.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
import tvm
from tvm import tir
from tvm import relax as rx
import numpy as np
def test_var() -> None:
v0 = rx.Var("v0")
assert v0.name_hint == "v0"
assert v0.shape_ is None
assert v0._checked_type_ is None
shape_anno = [54, 96]
type_anno = rx.DynTensorType(2, "float32")
v1 = rx.Var("v1", shape_anno, type_anno)
assert v1.name_hint == "v1"
for s0, s1 in zip(v1.shape_, shape_anno):
assert s0 == s1
assert v1._checked_type_ == type_anno
def test_dataflow_var() -> None:
v0 = rx.DataflowVar("v0")
assert v0.name_hint == "v0"
assert v0.shape_ is None
assert v0._checked_type_ is None
shape_anno = [54, 96]
type_anno = rx.DynTensorType(2, "float16")
v1 = rx.DataflowVar("v1", shape_anno, type_anno)
assert v1.name_hint == "v1"
for s0, s1 in zip(v1.shape_, shape_anno):
assert s0 == s1
assert v1._checked_type_ == type_anno
assert isinstance(v1, rx.DataflowVar)
def test_match_shape() -> None:
# match_shape([16, 8], [m, n])
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
var = rx.Var("v0", type_annotation=rx.ShapeType())
b0 = rx.MatchShape(shape, [m, n], var)
assert b0.value == shape
assert b0.pattern[0] == m
assert b0.pattern[1] == n
assert b0.var is not None
assert b0.var.checked_type == rx.ShapeType()
# var1: Tensor((m, n), "float32") =
# match_shape(var0: Tensor(_, "float32"), [m, n])
type_anno0 = rx.DynTensorType(-1, "float32")
value = rx.Var("value", type_annotation=type_anno0)
shape_anno = [m, n]
type_anno = rx.DynTensorType(2, "float32")
var = rx.Var("v1", shape_anno, type_anno)
b1 = rx.MatchShape(value, [m, n], var)
assert b1.value == value
assert b1.pattern[0] == m
assert b1.pattern[1] == n
assert b1.var is not None
for s0, s1 in zip(b1.var.shape, [m, n]):
assert s0 == s1
assert b1.var.checked_type == rx.DynTensorType(2, "float32")
def test_var_binding() -> None:
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b0 = rx.VarBinding(v0, val)
assert b0.var.name_hint == "v0"
assert b0.value == val
def test_binding_block() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
b0 = rx.MatchShape(shape, [m, n], rx.Var("v0"))
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b1 = rx.VarBinding(v0, val)
block0 = rx.BindingBlock([b0, b1])
assert block0.bindings[0] == b0
assert block0.bindings[1] == b1
def test_dataflow_block() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
b0 = rx.MatchShape(shape, [m, n], rx.Var("v0"))
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b1 = rx.VarBinding(v0, val)
block0 = rx.DataflowBlock([b0, b1])
assert block0.bindings[0] == b0
assert block0.bindings[1] == b1
assert isinstance(block0, rx.DataflowBlock)
def test_seq_expr() -> None:
x = rx.Var("foo")
bindings = [rx.VarBinding(x, rx.const(1))]
blocks = [rx.BindingBlock(bindings)]
seqe = rx.SeqExpr(blocks, x)
assert seqe.blocks[0] == blocks[0]
assert seqe.body == x
def test_shape_expr() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
s = rx.ShapeExpr([m, n])
assert s.values[0] == m
assert s.values[1] == n
def test_func():
type_anno = rx.DynTensorType(2, "float32")
x = rx.Var("foo", type_annotation=type_anno)
bindings = [rx.VarBinding(x, rx.const(1))]
blocks = [rx.BindingBlock(bindings)]
seqe = rx.SeqExpr(blocks, x)
ret_type = rx.DynTensorType(-1, "float32")
func = rx.Function([x], seqe, ret_type)
func = func.with_attr("global_symbol", "func")
assert func.params[0] == x
assert func.body == seqe
assert func.ret_type == ret_type
assert func.attrs["global_symbol"] == "func"
def test_shape_of():
v0 = rx.Var("v0")
s0 = v0.shape
assert isinstance(s0, tvm.relay.Call)
assert s0.op.name == "relax.shape_of"
shape_anno = [96, 54]
v1 = rx.Var("v1", shape_anno)
s1 = v1.shape
for x, y in zip(shape_anno, s1):
assert x == y
def test_shape_expr():
shape_expr = rx.ShapeExpr([10, 20])
assert shape_expr.values[0] == 10
assert shape_expr.values[1] == 20
assert shape_expr.checked_type == rx.ShapeType()
assert shape_expr.shape_ is None
x = rx.Var("v0", (10, 20), rx.DynTensorType(2, "float32"))
assert x.shape_.values[0] == 10
assert x.shape_.values[1] == 20
assert x.shape_.checked_type == rx.ShapeType()
assert x.shape_.shape_ is None
if __name__ == "__main__":
pytest.main([__file__])
| 30.608696
| 64
| 0.636719
|
import pytest
import tvm
from tvm import tir
from tvm import relax as rx
import numpy as np
def test_var() -> None:
v0 = rx.Var("v0")
assert v0.name_hint == "v0"
assert v0.shape_ is None
assert v0._checked_type_ is None
shape_anno = [54, 96]
type_anno = rx.DynTensorType(2, "float32")
v1 = rx.Var("v1", shape_anno, type_anno)
assert v1.name_hint == "v1"
for s0, s1 in zip(v1.shape_, shape_anno):
assert s0 == s1
assert v1._checked_type_ == type_anno
def test_dataflow_var() -> None:
v0 = rx.DataflowVar("v0")
assert v0.name_hint == "v0"
assert v0.shape_ is None
assert v0._checked_type_ is None
shape_anno = [54, 96]
type_anno = rx.DynTensorType(2, "float16")
v1 = rx.DataflowVar("v1", shape_anno, type_anno)
assert v1.name_hint == "v1"
for s0, s1 in zip(v1.shape_, shape_anno):
assert s0 == s1
assert v1._checked_type_ == type_anno
assert isinstance(v1, rx.DataflowVar)
def test_match_shape() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
var = rx.Var("v0", type_annotation=rx.ShapeType())
b0 = rx.MatchShape(shape, [m, n], var)
assert b0.value == shape
assert b0.pattern[0] == m
assert b0.pattern[1] == n
assert b0.var is not None
assert b0.var.checked_type == rx.ShapeType()
type_anno0 = rx.DynTensorType(-1, "float32")
value = rx.Var("value", type_annotation=type_anno0)
shape_anno = [m, n]
type_anno = rx.DynTensorType(2, "float32")
var = rx.Var("v1", shape_anno, type_anno)
b1 = rx.MatchShape(value, [m, n], var)
assert b1.value == value
assert b1.pattern[0] == m
assert b1.pattern[1] == n
assert b1.var is not None
for s0, s1 in zip(b1.var.shape, [m, n]):
assert s0 == s1
assert b1.var.checked_type == rx.DynTensorType(2, "float32")
def test_var_binding() -> None:
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b0 = rx.VarBinding(v0, val)
assert b0.var.name_hint == "v0"
assert b0.value == val
def test_binding_block() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
b0 = rx.MatchShape(shape, [m, n], rx.Var("v0"))
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b1 = rx.VarBinding(v0, val)
block0 = rx.BindingBlock([b0, b1])
assert block0.bindings[0] == b0
assert block0.bindings[1] == b1
def test_dataflow_block() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
shape = rx.const([16, 8], "int32")
b0 = rx.MatchShape(shape, [m, n], rx.Var("v0"))
v0 = rx.Var("v0")
val = rx.const(np.random.rand(24, 56))
b1 = rx.VarBinding(v0, val)
block0 = rx.DataflowBlock([b0, b1])
assert block0.bindings[0] == b0
assert block0.bindings[1] == b1
assert isinstance(block0, rx.DataflowBlock)
def test_seq_expr() -> None:
x = rx.Var("foo")
bindings = [rx.VarBinding(x, rx.const(1))]
blocks = [rx.BindingBlock(bindings)]
seqe = rx.SeqExpr(blocks, x)
assert seqe.blocks[0] == blocks[0]
assert seqe.body == x
def test_shape_expr() -> None:
m = tir.Var("m", dtype="int32")
n = tir.Var("n", dtype="int32")
s = rx.ShapeExpr([m, n])
assert s.values[0] == m
assert s.values[1] == n
def test_func():
type_anno = rx.DynTensorType(2, "float32")
x = rx.Var("foo", type_annotation=type_anno)
bindings = [rx.VarBinding(x, rx.const(1))]
blocks = [rx.BindingBlock(bindings)]
seqe = rx.SeqExpr(blocks, x)
ret_type = rx.DynTensorType(-1, "float32")
func = rx.Function([x], seqe, ret_type)
func = func.with_attr("global_symbol", "func")
assert func.params[0] == x
assert func.body == seqe
assert func.ret_type == ret_type
assert func.attrs["global_symbol"] == "func"
def test_shape_of():
v0 = rx.Var("v0")
s0 = v0.shape
assert isinstance(s0, tvm.relay.Call)
assert s0.op.name == "relax.shape_of"
shape_anno = [96, 54]
v1 = rx.Var("v1", shape_anno)
s1 = v1.shape
for x, y in zip(shape_anno, s1):
assert x == y
def test_shape_expr():
shape_expr = rx.ShapeExpr([10, 20])
assert shape_expr.values[0] == 10
assert shape_expr.values[1] == 20
assert shape_expr.checked_type == rx.ShapeType()
assert shape_expr.shape_ is None
x = rx.Var("v0", (10, 20), rx.DynTensorType(2, "float32"))
assert x.shape_.values[0] == 10
assert x.shape_.values[1] == 20
assert x.shape_.checked_type == rx.ShapeType()
assert x.shape_.shape_ is None
if __name__ == "__main__":
pytest.main([__file__])
| true
| true
|
1c454a4e8e42c6d6b32f7a64da00659348fb0aba
| 14,119
|
py
|
Python
|
cvat/apps/authentication/auth.py
|
adasdevops/ADAS_UPDATE_STABLE
|
306202b4e291b5876e3dd4fdd201c761e9d182f0
|
[
"Intel",
"MIT"
] | null | null | null |
cvat/apps/authentication/auth.py
|
adasdevops/ADAS_UPDATE_STABLE
|
306202b4e291b5876e3dd4fdd201c761e9d182f0
|
[
"Intel",
"MIT"
] | null | null | null |
cvat/apps/authentication/auth.py
|
adasdevops/ADAS_UPDATE_STABLE
|
306202b4e291b5876e3dd4fdd201c761e9d182f0
|
[
"Intel",
"MIT"
] | 1
|
2022-03-04T09:18:30.000Z
|
2022-03-04T09:18:30.000Z
|
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
from django.conf import settings
from django.db.models import Q
import rules
from . import AUTH_ROLE
from . import signature
from rest_framework.permissions import BasePermission
from django.core import signing
from rest_framework import authentication, exceptions
from rest_framework.authentication import TokenAuthentication as _TokenAuthentication
from django.contrib.auth import login
# Even with token authorization it is very important to have a valid session id
# in cookies because in some cases we cannot use token authorization (e.g. when
# we redirect to the server in UI using just URL). To overkill that we override
# the class to call `login` method which restores the session id in cookies.
class TokenAuthentication(_TokenAuthentication):
def authenticate(self, request):
auth = super().authenticate(request)
session = getattr(request, 'session')
if auth is not None and session.session_key is None:
login(request, auth[0], 'django.contrib.auth.backends.ModelBackend')
return auth
def register_signals():
from django.db.models.signals import post_migrate, post_save
from django.contrib.auth.models import User, Group
def create_groups(sender, **kwargs):
for role in AUTH_ROLE:
db_group, _ = Group.objects.get_or_create(name=role)
db_group.save()
post_migrate.connect(create_groups, weak=False)
if settings.IAM_TYPE == 'BASIC':
from .auth_basic import create_user
post_save.connect(create_user, sender=User)
elif settings.DJANGO_AUTH_TYPE == 'LDAP':
import django_auth_ldap.backend
from .auth_ldap import create_user
django_auth_ldap.backend.populate_user.connect(create_user)
class SignatureAuthentication(authentication.BaseAuthentication):
"""
Authentication backend for signed URLs.
"""
def authenticate(self, request):
"""
Returns authenticated user if URL signature is valid.
"""
signer = signature.Signer()
sign = request.query_params.get(signature.QUERY_PARAM)
if not sign:
return
try:
user = signer.unsign(sign, request.build_absolute_uri())
except signing.SignatureExpired:
raise exceptions.AuthenticationFailed('This URL has expired.')
except signing.BadSignature:
raise exceptions.AuthenticationFailed('Invalid signature.')
if not user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted.')
return (user, None)
# AUTH PREDICATES
has_admin_role = rules.is_group_member(str(AUTH_ROLE.ADMIN))
has_user_role = rules.is_group_member(str(AUTH_ROLE.USER))
has_annotator_role = rules.is_group_member(str(AUTH_ROLE.ANNOTATOR))
has_observer_role = rules.is_group_member(str(AUTH_ROLE.OBSERVER))
@rules.predicate
def is_project_owner(db_user, db_project):
# If owner is None (null) the task can be accessed/changed/deleted
# only by admin. At the moment each task has an owner.
return db_project is not None and db_project.owner == db_user
@rules.predicate
def is_project_assignee(db_user, db_project):
return db_project is not None and db_project.assignee == db_user
@rules.predicate
def is_project_annotator(db_user, db_project):
db_tasks = list(db_project.tasks.prefetch_related('segment_set').all())
return any([is_task_annotator(db_user, db_task) for db_task in db_tasks])
@rules.predicate
def is_project_reviewer(db_user, db_project):
db_tasks = list(db_project.tasks.prefetch_related('segment_set').all())
return any([is_task_reviewer(db_user, db_task) for db_task in db_tasks])
@rules.predicate
def is_task_owner(db_user, db_task):
# If owner is None (null) the task can be accessed/changed/deleted
# only by admin. At the moment each task has an owner.
return db_task.owner == db_user or is_project_owner(db_user, db_task.project)
@rules.predicate
def is_task_assignee(db_user, db_task):
return db_task.assignee == db_user or is_project_assignee(db_user, db_task.project)
@rules.predicate
def is_task_reviewer(db_user, db_task):
db_segments = list(db_task.segment_set.prefetch_related('job_set__assignee').all())
return any([is_job_reviewer(db_user, db_job)
for db_segment in db_segments for db_job in db_segment.job_set.all()])
@rules.predicate
def is_task_annotator(db_user, db_task):
db_segments = list(db_task.segment_set.prefetch_related('job_set__assignee').all())
return any([is_job_annotator(db_user, db_job)
for db_segment in db_segments for db_job in db_segment.job_set.all()])
@rules.predicate
def is_job_owner(db_user, db_job):
return is_task_owner(db_user, db_job.segment.task)
@rules.predicate
def is_job_annotator(db_user, db_job):
db_task = db_job.segment.task
# A job can be annotated by any user if the task's assignee is None.
has_rights = (db_task.assignee is None and not settings.RESTRICTIONS['reduce_task_visibility']) or is_task_assignee(db_user, db_task)
if db_job.assignee is not None:
has_rights |= (db_user == db_job.assignee)
return has_rights
@rules.predicate
def has_change_permissions(db_user, db_job):
db_task = db_job.segment.task
# A job can be annotated by any user if the task's assignee is None.
has_rights = (db_task.assignee is None and not settings.RESTRICTIONS['reduce_task_visibility']) or is_task_assignee(db_user, db_task)
if db_job.assignee is not None:
has_rights |= (db_user == db_job.assignee) and (db_job.status == 'annotation')
if db_job.reviewer is not None:
has_rights |= (db_user == db_job.reviewer) and (db_job.status == 'validation')
return has_rights
@rules.predicate
def is_job_reviewer(db_user, db_job):
has_rights = db_job.reviewer == db_user
return has_rights
@rules.predicate
def is_issue_owner(db_user, db_issue):
has_rights = db_issue.owner == db_user
return has_rights
@rules.predicate
def is_comment_author(db_user, db_comment):
has_rights = (db_comment.author == db_user)
return has_rights
@rules.predicate
def is_cloud_storage_owner(db_user, db_storage):
return db_storage.owner == db_user
# AUTH PERMISSIONS RULES
rules.add_perm('engine.role.user', has_user_role)
rules.add_perm('engine.role.admin', has_admin_role)
rules.add_perm('engine.role.annotator', has_annotator_role)
rules.add_perm('engine.role.observer', has_observer_role)
rules.add_perm('engine.project.create', has_admin_role | has_user_role)
rules.add_perm('engine.project.access', has_admin_role | has_observer_role |
is_project_owner | is_project_annotator)
rules.add_perm('engine.project.change', has_admin_role | is_project_owner |
is_project_assignee)
rules.add_perm('engine.project.delete', has_admin_role | is_project_owner)
rules.add_perm('engine.task.create', has_admin_role | has_user_role)
rules.add_perm('engine.task.access', has_admin_role | has_observer_role |
is_task_owner | is_task_annotator | is_task_reviewer)
rules.add_perm('engine.task.change', has_admin_role | is_task_owner |
is_task_assignee)
rules.add_perm('engine.task.delete', has_admin_role | is_task_owner)
rules.add_perm('engine.job.access', has_admin_role | has_observer_role |
is_job_owner | is_job_annotator | is_job_reviewer)
rules.add_perm('engine.job.change', has_admin_role | is_job_owner | has_change_permissions)
rules.add_perm('engine.job.review', has_admin_role | (is_job_reviewer & has_change_permissions))
rules.add_perm('engine.issue.change', has_admin_role | is_issue_owner)
rules.add_perm('engine.issue.destroy', has_admin_role | is_issue_owner)
rules.add_perm('engine.comment.change', has_admin_role | is_comment_author)
rules.add_perm('engine.cloudstorage.create', has_admin_role | has_user_role)
rules.add_perm('engine.cloudstorage.change', has_admin_role | is_cloud_storage_owner)
class AdminRolePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.role.admin')
class UserRolePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.role.user')
class AnnotatorRolePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.role.annotator')
class ObserverRolePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.role.observer')
class ProjectCreatePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.project.create')
class ProjectAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.access', obj)
class ProjectChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.change', obj)
class ProjectDeletePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.delete', obj)
class TaskCreatePermission(BasePermission):
# pylint: disable=no-self-use
def has_permission(self, request, view):
return request.user.has_perm('engine.task.create')
class TaskAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.access', obj)
class ProjectGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
# Don't filter queryset for admin, observer and detail methods
if has_admin_role(user) or has_observer_role(user) or self.detail:
return queryset
else:
return queryset.filter(Q(owner=user) | Q(assignee=user) |
Q(task__owner=user) | Q(task__assignee=user) |
Q(task__segment__job__assignee=user) |
Q(task__segment__job__reviewer=user)).distinct()
def filter_task_queryset(queryset, user):
# Don't filter queryset for admin, observer
if has_admin_role(user) or has_observer_role(user):
return queryset
query_filter = Q(owner=user) | Q(assignee=user) | \
Q(segment__job__assignee=user) | Q(segment__job__reviewer=user)
if not settings.RESTRICTIONS['reduce_task_visibility']:
query_filter |= Q(assignee=None)
return queryset.filter(query_filter).distinct()
class TaskGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
# Don't filter queryset for detail methods
if self.detail:
return queryset
else:
return filter_task_queryset(queryset, user)
class TaskChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.change', obj)
class TaskDeletePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.delete', obj)
class JobAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.access', obj)
class JobChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.change', obj)
class JobReviewPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.review', obj)
class IssueAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
db_job = obj.job
return request.user.has_perm('engine.job.access', db_job)
class IssueDestroyPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.issue.destroy', obj)
class IssueChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
db_job = obj.job
return (request.user.has_perm('engine.job.change', db_job)
or request.user.has_perm('engine.issue.change', obj))
class CommentCreatePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj): # obj is db_job
return request.user.has_perm('engine.job.access', obj)
class CommentChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.comment.change', obj)
class CloudStorageAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm("engine.cloudstorage.change", obj)
class CloudStorageChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm("engine.cloudstorage.change", obj)
class CloudStorageGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
if has_admin_role(user) or self.detail:
return queryset
else:
return queryset.filter(owner=user)
| 39.54902
| 137
| 0.738154
|
from django.conf import settings
from django.db.models import Q
import rules
from . import AUTH_ROLE
from . import signature
from rest_framework.permissions import BasePermission
from django.core import signing
from rest_framework import authentication, exceptions
from rest_framework.authentication import TokenAuthentication as _TokenAuthentication
from django.contrib.auth import login
class TokenAuthentication(_TokenAuthentication):
def authenticate(self, request):
auth = super().authenticate(request)
session = getattr(request, 'session')
if auth is not None and session.session_key is None:
login(request, auth[0], 'django.contrib.auth.backends.ModelBackend')
return auth
def register_signals():
from django.db.models.signals import post_migrate, post_save
from django.contrib.auth.models import User, Group
def create_groups(sender, **kwargs):
for role in AUTH_ROLE:
db_group, _ = Group.objects.get_or_create(name=role)
db_group.save()
post_migrate.connect(create_groups, weak=False)
if settings.IAM_TYPE == 'BASIC':
from .auth_basic import create_user
post_save.connect(create_user, sender=User)
elif settings.DJANGO_AUTH_TYPE == 'LDAP':
import django_auth_ldap.backend
from .auth_ldap import create_user
django_auth_ldap.backend.populate_user.connect(create_user)
class SignatureAuthentication(authentication.BaseAuthentication):
def authenticate(self, request):
signer = signature.Signer()
sign = request.query_params.get(signature.QUERY_PARAM)
if not sign:
return
try:
user = signer.unsign(sign, request.build_absolute_uri())
except signing.SignatureExpired:
raise exceptions.AuthenticationFailed('This URL has expired.')
except signing.BadSignature:
raise exceptions.AuthenticationFailed('Invalid signature.')
if not user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted.')
return (user, None)
has_admin_role = rules.is_group_member(str(AUTH_ROLE.ADMIN))
has_user_role = rules.is_group_member(str(AUTH_ROLE.USER))
has_annotator_role = rules.is_group_member(str(AUTH_ROLE.ANNOTATOR))
has_observer_role = rules.is_group_member(str(AUTH_ROLE.OBSERVER))
@rules.predicate
def is_project_owner(db_user, db_project):
return db_project is not None and db_project.owner == db_user
@rules.predicate
def is_project_assignee(db_user, db_project):
return db_project is not None and db_project.assignee == db_user
@rules.predicate
def is_project_annotator(db_user, db_project):
db_tasks = list(db_project.tasks.prefetch_related('segment_set').all())
return any([is_task_annotator(db_user, db_task) for db_task in db_tasks])
@rules.predicate
def is_project_reviewer(db_user, db_project):
db_tasks = list(db_project.tasks.prefetch_related('segment_set').all())
return any([is_task_reviewer(db_user, db_task) for db_task in db_tasks])
@rules.predicate
def is_task_owner(db_user, db_task):
return db_task.owner == db_user or is_project_owner(db_user, db_task.project)
@rules.predicate
def is_task_assignee(db_user, db_task):
return db_task.assignee == db_user or is_project_assignee(db_user, db_task.project)
@rules.predicate
def is_task_reviewer(db_user, db_task):
db_segments = list(db_task.segment_set.prefetch_related('job_set__assignee').all())
return any([is_job_reviewer(db_user, db_job)
for db_segment in db_segments for db_job in db_segment.job_set.all()])
@rules.predicate
def is_task_annotator(db_user, db_task):
db_segments = list(db_task.segment_set.prefetch_related('job_set__assignee').all())
return any([is_job_annotator(db_user, db_job)
for db_segment in db_segments for db_job in db_segment.job_set.all()])
@rules.predicate
def is_job_owner(db_user, db_job):
return is_task_owner(db_user, db_job.segment.task)
@rules.predicate
def is_job_annotator(db_user, db_job):
db_task = db_job.segment.task
has_rights = (db_task.assignee is None and not settings.RESTRICTIONS['reduce_task_visibility']) or is_task_assignee(db_user, db_task)
if db_job.assignee is not None:
has_rights |= (db_user == db_job.assignee)
return has_rights
@rules.predicate
def has_change_permissions(db_user, db_job):
db_task = db_job.segment.task
# A job can be annotated by any user if the task's assignee is None.
has_rights = (db_task.assignee is None and not settings.RESTRICTIONS['reduce_task_visibility']) or is_task_assignee(db_user, db_task)
if db_job.assignee is not None:
has_rights |= (db_user == db_job.assignee) and (db_job.status == 'annotation')
if db_job.reviewer is not None:
has_rights |= (db_user == db_job.reviewer) and (db_job.status == 'validation')
return has_rights
@rules.predicate
def is_job_reviewer(db_user, db_job):
has_rights = db_job.reviewer == db_user
return has_rights
@rules.predicate
def is_issue_owner(db_user, db_issue):
has_rights = db_issue.owner == db_user
return has_rights
@rules.predicate
def is_comment_author(db_user, db_comment):
has_rights = (db_comment.author == db_user)
return has_rights
@rules.predicate
def is_cloud_storage_owner(db_user, db_storage):
return db_storage.owner == db_user
rules.add_perm('engine.role.user', has_user_role)
rules.add_perm('engine.role.admin', has_admin_role)
rules.add_perm('engine.role.annotator', has_annotator_role)
rules.add_perm('engine.role.observer', has_observer_role)
rules.add_perm('engine.project.create', has_admin_role | has_user_role)
rules.add_perm('engine.project.access', has_admin_role | has_observer_role |
is_project_owner | is_project_annotator)
rules.add_perm('engine.project.change', has_admin_role | is_project_owner |
is_project_assignee)
rules.add_perm('engine.project.delete', has_admin_role | is_project_owner)
rules.add_perm('engine.task.create', has_admin_role | has_user_role)
rules.add_perm('engine.task.access', has_admin_role | has_observer_role |
is_task_owner | is_task_annotator | is_task_reviewer)
rules.add_perm('engine.task.change', has_admin_role | is_task_owner |
is_task_assignee)
rules.add_perm('engine.task.delete', has_admin_role | is_task_owner)
rules.add_perm('engine.job.access', has_admin_role | has_observer_role |
is_job_owner | is_job_annotator | is_job_reviewer)
rules.add_perm('engine.job.change', has_admin_role | is_job_owner | has_change_permissions)
rules.add_perm('engine.job.review', has_admin_role | (is_job_reviewer & has_change_permissions))
rules.add_perm('engine.issue.change', has_admin_role | is_issue_owner)
rules.add_perm('engine.issue.destroy', has_admin_role | is_issue_owner)
rules.add_perm('engine.comment.change', has_admin_role | is_comment_author)
rules.add_perm('engine.cloudstorage.create', has_admin_role | has_user_role)
rules.add_perm('engine.cloudstorage.change', has_admin_role | is_cloud_storage_owner)
class AdminRolePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.role.admin')
class UserRolePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.role.user')
class AnnotatorRolePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.role.annotator')
class ObserverRolePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.role.observer')
class ProjectCreatePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.project.create')
class ProjectAccessPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.access', obj)
class ProjectChangePermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.change', obj)
class ProjectDeletePermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.project.delete', obj)
class TaskCreatePermission(BasePermission):
def has_permission(self, request, view):
return request.user.has_perm('engine.task.create')
class TaskAccessPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.access', obj)
class ProjectGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
if has_admin_role(user) or has_observer_role(user) or self.detail:
return queryset
else:
return queryset.filter(Q(owner=user) | Q(assignee=user) |
Q(task__owner=user) | Q(task__assignee=user) |
Q(task__segment__job__assignee=user) |
Q(task__segment__job__reviewer=user)).distinct()
def filter_task_queryset(queryset, user):
# Don't filter queryset for admin, observer
if has_admin_role(user) or has_observer_role(user):
return queryset
query_filter = Q(owner=user) | Q(assignee=user) | \
Q(segment__job__assignee=user) | Q(segment__job__reviewer=user)
if not settings.RESTRICTIONS['reduce_task_visibility']:
query_filter |= Q(assignee=None)
return queryset.filter(query_filter).distinct()
class TaskGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
if self.detail:
return queryset
else:
return filter_task_queryset(queryset, user)
class TaskChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.change', obj)
class TaskDeletePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.task.delete', obj)
class JobAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.access', obj)
class JobChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.change', obj)
class JobReviewPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.job.review', obj)
class IssueAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
db_job = obj.job
return request.user.has_perm('engine.job.access', db_job)
class IssueDestroyPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.issue.destroy', obj)
class IssueChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
db_job = obj.job
return (request.user.has_perm('engine.job.change', db_job)
or request.user.has_perm('engine.issue.change', obj))
class CommentCreatePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj): # obj is db_job
return request.user.has_perm('engine.job.access', obj)
class CommentChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm('engine.comment.change', obj)
class CloudStorageAccessPermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm("engine.cloudstorage.change", obj)
class CloudStorageChangePermission(BasePermission):
# pylint: disable=no-self-use
def has_object_permission(self, request, view, obj):
return request.user.has_perm("engine.cloudstorage.change", obj)
class CloudStorageGetQuerySetMixin(object):
def get_queryset(self):
queryset = super().get_queryset()
user = self.request.user
if has_admin_role(user) or self.detail:
return queryset
else:
return queryset.filter(owner=user)
| true
| true
|
1c454b16a8c78878e2eced338b13aad8690829d4
| 3,793
|
py
|
Python
|
modelzoo/hnn.py
|
ml-jku/mc-lstm
|
8bbaece3ecb4187a76c6318d4c6e40c1dcc71303
|
[
"MIT"
] | 18
|
2021-03-27T06:31:59.000Z
|
2022-02-24T09:48:57.000Z
|
modelzoo/hnn.py
|
ml-jku/mc-lstm
|
8bbaece3ecb4187a76c6318d4c6e40c1dcc71303
|
[
"MIT"
] | null | null | null |
modelzoo/hnn.py
|
ml-jku/mc-lstm
|
8bbaece3ecb4187a76c6318d4c6e40c1dcc71303
|
[
"MIT"
] | 7
|
2021-03-27T21:01:23.000Z
|
2021-09-21T14:27:59.000Z
|
import torch
from torch import nn
class HNN(nn.Module):
'''Learn arbitrary vector fields that are sums of conservative and solenoidal fields'''
def __init__(self, input_dim, differentiable_model, field_type='solenoidal',
baseline=False, assume_canonical_coords=True):
super(HNN, self).__init__()
self.baseline = baseline
self.differentiable_model = differentiable_model
self.assume_canonical_coords = assume_canonical_coords
self.M = self.permutation_tensor(input_dim) # Levi-Civita permutation tensor
self.field_type = field_type
def forward(self, x):
# traditional forward pass
if self.baseline:
return self.differentiable_model(x)
y = self.differentiable_model(x)
assert y.dim() == 2 and y.shape[1] == 2, "Output tensor should have shape [batch_size, 2]"
return y.split(1, 1)
def time_derivative(self, x, t=None, separate_fields=False):
'''NEURAL ODE-STLE VECTOR FIELD'''
if self.baseline:
return self.differentiable_model(x)
'''NEURAL HAMILTONIAN-STLE VECTOR FIELD'''
F1, F2 = self.forward(x) # traditional forward pass
conservative_field = torch.zeros_like(x) # start out with both components set to 0
solenoidal_field = torch.zeros_like(x)
if self.field_type != 'solenoidal':
dF1 = torch.autograd.grad(F1.sum(), x, create_graph=True)[0] # gradients for conservative field
conservative_field = dF1 @ torch.eye(*self.M.shape)
if self.field_type != 'conservative':
dF2 = torch.autograd.grad(F2.sum(), x, create_graph=True)[0] # gradients for solenoidal field
solenoidal_field = dF2 @ self.M.t()
if separate_fields:
return [conservative_field, solenoidal_field]
return conservative_field + solenoidal_field
def permutation_tensor(self, n):
M = None
if self.assume_canonical_coords:
M = torch.eye(n)
M = torch.cat([M[n // 2:], -M[:n // 2]])
else:
'''Constructs the Levi-Civita permutation tensor'''
M = torch.ones(n, n) # matrix of ones
M *= 1 - torch.eye(n) # clear diagonals
M[::2] *= -1 # pattern of signs
M[:, ::2] *= -1
for i in range(n): # make asymmetric
for j in range(i + 1, n):
M[i, j] *= -1
return M
class MLP(nn.Module):
'''Just a salt-of-the-earth MLP'''
def __init__(self, input_dim, hidden_dim, output_dim, nonlinearity='tanh'):
super(MLP, self).__init__()
self.linear1 = torch.nn.Linear(input_dim, hidden_dim)
self.linear2 = torch.nn.Linear(hidden_dim, hidden_dim)
self.linear3 = torch.nn.Linear(hidden_dim, output_dim, bias=None)
for l in [self.linear1, self.linear2, self.linear3]:
torch.nn.init.orthogonal_(l.weight) # use a principled initialization
self.nonlinearity = choose_nonlinearity(nonlinearity)
def forward(self, x, separate_fields=False):
h = self.nonlinearity(self.linear1(x))
h = self.nonlinearity(self.linear2(h))
return self.linear3(h)
def choose_nonlinearity(name):
nl = None
if name == 'tanh':
nl = torch.tanh
elif name == 'relu':
nl = torch.relu
elif name == 'sigmoid':
nl = torch.sigmoid
elif name == 'softplus':
nl = torch.nn.functional.softplus
elif name == 'selu':
nl = torch.nn.functional.selu
elif name == 'elu':
nl = torch.nn.functional.elu
elif name == 'swish':
nl = lambda x: x * torch.sigmoid(x)
else:
raise ValueError("nonlinearity not recognized")
return nl
| 35.448598
| 108
| 0.613499
|
import torch
from torch import nn
class HNN(nn.Module):
def __init__(self, input_dim, differentiable_model, field_type='solenoidal',
baseline=False, assume_canonical_coords=True):
super(HNN, self).__init__()
self.baseline = baseline
self.differentiable_model = differentiable_model
self.assume_canonical_coords = assume_canonical_coords
self.M = self.permutation_tensor(input_dim) self.field_type = field_type
def forward(self, x):
if self.baseline:
return self.differentiable_model(x)
y = self.differentiable_model(x)
assert y.dim() == 2 and y.shape[1] == 2, "Output tensor should have shape [batch_size, 2]"
return y.split(1, 1)
def time_derivative(self, x, t=None, separate_fields=False):
if self.baseline:
return self.differentiable_model(x)
F1, F2 = self.forward(x)
conservative_field = torch.zeros_like(x) solenoidal_field = torch.zeros_like(x)
if self.field_type != 'solenoidal':
dF1 = torch.autograd.grad(F1.sum(), x, create_graph=True)[0] conservative_field = dF1 @ torch.eye(*self.M.shape)
if self.field_type != 'conservative':
dF2 = torch.autograd.grad(F2.sum(), x, create_graph=True)[0] solenoidal_field = dF2 @ self.M.t()
if separate_fields:
return [conservative_field, solenoidal_field]
return conservative_field + solenoidal_field
def permutation_tensor(self, n):
M = None
if self.assume_canonical_coords:
M = torch.eye(n)
M = torch.cat([M[n // 2:], -M[:n // 2]])
else:
'''Constructs the Levi-Civita permutation tensor'''
M = torch.ones(n, n) M *= 1 - torch.eye(n) M[::2] *= -1 M[:, ::2] *= -1
for i in range(n): for j in range(i + 1, n):
M[i, j] *= -1
return M
class MLP(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim, nonlinearity='tanh'):
super(MLP, self).__init__()
self.linear1 = torch.nn.Linear(input_dim, hidden_dim)
self.linear2 = torch.nn.Linear(hidden_dim, hidden_dim)
self.linear3 = torch.nn.Linear(hidden_dim, output_dim, bias=None)
for l in [self.linear1, self.linear2, self.linear3]:
torch.nn.init.orthogonal_(l.weight)
self.nonlinearity = choose_nonlinearity(nonlinearity)
def forward(self, x, separate_fields=False):
h = self.nonlinearity(self.linear1(x))
h = self.nonlinearity(self.linear2(h))
return self.linear3(h)
def choose_nonlinearity(name):
nl = None
if name == 'tanh':
nl = torch.tanh
elif name == 'relu':
nl = torch.relu
elif name == 'sigmoid':
nl = torch.sigmoid
elif name == 'softplus':
nl = torch.nn.functional.softplus
elif name == 'selu':
nl = torch.nn.functional.selu
elif name == 'elu':
nl = torch.nn.functional.elu
elif name == 'swish':
nl = lambda x: x * torch.sigmoid(x)
else:
raise ValueError("nonlinearity not recognized")
return nl
| true
| true
|
1c454b8c0ca00f4367578dddf2f1c44be75fbd3e
| 4,182
|
py
|
Python
|
share/seeds/generate-seeds.py
|
enixt/enixt
|
6bc063558ccb1a66e28d26bd6e2903c2d4346633
|
[
"MIT"
] | 1
|
2019-01-15T18:37:03.000Z
|
2019-01-15T18:37:03.000Z
|
share/seeds/generate-seeds.py
|
enixt/enixt
|
6bc063558ccb1a66e28d26bd6e2903c2d4346633
|
[
"MIT"
] | null | null | null |
share/seeds/generate-seeds.py
|
enixt/enixt
|
6bc063558ccb1a66e28d26bd6e2903c2d4346633
|
[
"MIT"
] | 2
|
2018-03-10T19:32:49.000Z
|
2019-12-20T01:32:03.000Z
|
#!/usr/bin/python
# Copyright (c) 2014 Wladmir J. van der Laan
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef H_CHAINPARAMSSEEDS\n')
g.write('#define H_CHAINPARAMSSEEDS\n')
g.write('// List of fixed seed nodes for the bitcoin network\n')
g.write('// AUTOGENERATED by share/seeds/generate-seeds.py\n\n')
g.write('// Each line contains a 16-byte IPv6 address and a port.\n')
g.write('// IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 15111)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 25111)
g.write('#endif\n')
if __name__ == '__main__':
main()
| 31.681818
| 98
| 0.587996
|
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: sub = [[], []] x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): continue
x += 1 assert(x < 2)
else: val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: host = match.group(1)
port = match.group(2)
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef H_CHAINPARAMSSEEDS\n')
g.write('#define H_CHAINPARAMSSEEDS\n')
g.write('// List of fixed seed nodes for the bitcoin network\n')
g.write('// AUTOGENERATED by share/seeds/generate-seeds.py\n\n')
g.write('// Each line contains a 16-byte IPv6 address and a port.\n')
g.write('// IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 15111)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 25111)
g.write('#endif\n')
if __name__ == '__main__':
main()
| true
| true
|
1c454f649b1049f927a2a7419ab2e09e87c4ce45
| 3,531
|
py
|
Python
|
widgets.py/migrations/0001_initial.py
|
deejungx/goalza
|
c9ec93aad13228bccd9f185cfac6ff9e1fc1994a
|
[
"MIT"
] | 1
|
2019-05-04T11:26:14.000Z
|
2019-05-04T11:26:14.000Z
|
widgets.py/migrations/0001_initial.py
|
deejungx/goalza
|
c9ec93aad13228bccd9f185cfac6ff9e1fc1994a
|
[
"MIT"
] | null | null | null |
widgets.py/migrations/0001_initial.py
|
deejungx/goalza
|
c9ec93aad13228bccd9f185cfac6ff9e1fc1994a
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2019-05-02 11:46
import datetime
from django.db import migrations, models
import django.db.models.deletion
import pages.models
import pages.myFields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FutsalCompany',
fields=[
('futsal_id', models.AutoField(primary_key=True, serialize=False)),
('futsal_name', models.CharField(max_length=155)),
('opening_time', models.TimeField(default=pages.models.getDefaultOpeningTime)),
('closing_time', models.TimeField(default=pages.models.getDefaultClosingTime)),
],
options={
'verbose_name_plural': 'futsal companies',
'ordering': ['futsal_name'],
},
),
migrations.CreateModel(
name='Ground',
fields=[
('ground_id', models.AutoField(primary_key=True, serialize=False)),
('ground_number', models.IntegerField(unique=True)),
('ground_name', models.CharField(max_length=155, unique=True)),
('futsalCompany', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.FutsalCompany')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('player_id', models.AutoField(primary_key=True, serialize=False)),
('player_name', models.CharField(max_length=155)),
('player_address', models.CharField(blank=True, max_length=220, null=True)),
('player_email', models.EmailField(blank=True, max_length=254, null=True, unique=True)),
('phone_number', models.CharField(max_length=12, unique=True)),
],
),
migrations.CreateModel(
name='GroundPrice',
fields=[
('price_segment_id', models.AutoField(primary_key=True, serialize=False)),
('day_of_week', pages.myFields.DayOfTheWeekField(choices=[('1', 'Monday'), ('2', 'Tuesday'), ('3', 'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'), ('7', 'Sunday')], max_length=1)),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('price', models.DecimalField(decimal_places=2, max_digits=8)),
('ground', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Ground')),
],
),
migrations.CreateModel(
name='Booking',
fields=[
('booking_id', models.AutoField(primary_key=True, serialize=False)),
('booking_date', models.DateField()),
('start_time', models.TimeField()),
('duration', models.DurationField(default=datetime.timedelta(seconds=3600))),
('booking_status', models.CharField(choices=[('CONFIRMED', 'CONFIRMED'), ('PLAYING', 'PLAYING'), ('COMPLETE', 'COMPLETE'), ('CANCELED', 'CANCELED')], max_length=50)),
('payment_status', models.CharField(choices=[('PENDING', 'PENDING'), ('COMPLETED', 'COMPLETED')], max_length=50)),
('ground', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Ground')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Player')),
],
),
]
| 47.08
| 217
| 0.57859
|
import datetime
from django.db import migrations, models
import django.db.models.deletion
import pages.models
import pages.myFields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FutsalCompany',
fields=[
('futsal_id', models.AutoField(primary_key=True, serialize=False)),
('futsal_name', models.CharField(max_length=155)),
('opening_time', models.TimeField(default=pages.models.getDefaultOpeningTime)),
('closing_time', models.TimeField(default=pages.models.getDefaultClosingTime)),
],
options={
'verbose_name_plural': 'futsal companies',
'ordering': ['futsal_name'],
},
),
migrations.CreateModel(
name='Ground',
fields=[
('ground_id', models.AutoField(primary_key=True, serialize=False)),
('ground_number', models.IntegerField(unique=True)),
('ground_name', models.CharField(max_length=155, unique=True)),
('futsalCompany', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.FutsalCompany')),
],
),
migrations.CreateModel(
name='Player',
fields=[
('player_id', models.AutoField(primary_key=True, serialize=False)),
('player_name', models.CharField(max_length=155)),
('player_address', models.CharField(blank=True, max_length=220, null=True)),
('player_email', models.EmailField(blank=True, max_length=254, null=True, unique=True)),
('phone_number', models.CharField(max_length=12, unique=True)),
],
),
migrations.CreateModel(
name='GroundPrice',
fields=[
('price_segment_id', models.AutoField(primary_key=True, serialize=False)),
('day_of_week', pages.myFields.DayOfTheWeekField(choices=[('1', 'Monday'), ('2', 'Tuesday'), ('3', 'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'), ('7', 'Sunday')], max_length=1)),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('price', models.DecimalField(decimal_places=2, max_digits=8)),
('ground', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Ground')),
],
),
migrations.CreateModel(
name='Booking',
fields=[
('booking_id', models.AutoField(primary_key=True, serialize=False)),
('booking_date', models.DateField()),
('start_time', models.TimeField()),
('duration', models.DurationField(default=datetime.timedelta(seconds=3600))),
('booking_status', models.CharField(choices=[('CONFIRMED', 'CONFIRMED'), ('PLAYING', 'PLAYING'), ('COMPLETE', 'COMPLETE'), ('CANCELED', 'CANCELED')], max_length=50)),
('payment_status', models.CharField(choices=[('PENDING', 'PENDING'), ('COMPLETED', 'COMPLETED')], max_length=50)),
('ground', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Ground')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pages.Player')),
],
),
]
| true
| true
|
1c454f9b0ae3d0e44d374d4620851100121f28e8
| 70,561
|
py
|
Python
|
django/db/models/base.py
|
yswai/django
|
90c706e44fa1e640245ad4626f0e3ecb176acce2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/db/models/base.py
|
yswai/django
|
90c706e44fa1e640245ad4626f0e3ecb176acce2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
django/db/models/base.py
|
yswai/django
|
90c706e44fa1e640245ad4626f0e3ecb176acce2
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
import copy
import inspect
import warnings
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
ObjectDoesNotExist, ValidationError,
)
from django.db import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connections,
router, transaction,
)
from django.db.models import signals
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields import AutoField
from django.db.models.fields.related import (
ForeignObjectRel, ManyToOneRel, OneToOneField, lazy_related_operation,
resolve_relation,
)
from django.db.models.manager import ensure_default_manager
from django.db.models.options import Options
from django.db.models.query import Q
from django.db.models.query_utils import (
DeferredAttribute, deferred_class_factory,
)
from django.db.models.utils import make_model_tuple
from django.utils import six
from django.utils.encoding import force_str, force_text
from django.utils.functional import curry
from django.utils.six.moves import zip
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext_lazy as _
from django.utils.version import get_version
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass. Used by ModelBase below.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, 'app_label', None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class('_meta', Options(meta, app_label))
if not abstract:
new_class.add_to_class(
'DoesNotExist',
subclass_exception(
str('DoesNotExist'),
tuple(
x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
str('MultipleObjectsReturned'),
tuple(
x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, '_meta'):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField):
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Do the appropriate setup for any model parents.
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' % (field.name, name, base.__name__)
)
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
# .. and abstract ones.
for field in parent_fields:
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Replace parent links defined on this base by the new
# field as it will be appropriately resolved if required.
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base_parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of them.
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
# Inherit private fields (like GenericForeignKey) from the parent
# class
for field in base._meta.private_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'abstract base class %r' % (field.name, name, base.__name__)
)
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def copy_managers(cls, base_managers):
# This is in-place sorting of an Options attribute, but that's fine.
base_managers.sort()
for _, mgr_name, manager in base_managers: # NOQA (redefinition of _)
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
# We should call the contribute_to_class method only if it's bound
if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields))
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower)
if get_absolute_url_override:
setattr(cls, 'get_absolute_url', get_absolute_url_override)
ensure_default_manager(cls)
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(six.with_metaclass(ModelBase)):
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(self._meta.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(self._meta.fields)
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.remote_field, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or
field.column is None)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in list(kwargs):
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs[prop])
del kwargs[prop]
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if cls._deferred:
new = cls(**dict(zip(field_names, values)))
else:
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
try:
u = six.text_type(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return force_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if six.PY2 and hasattr(self, '__unicode__'):
return force_text(self).encode('utf-8')
return str('%s object' % self.__class__.__name__)
def __eq__(self, other):
if not isinstance(other, Model):
return False
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self._get_pk_val()
if my_pk is None:
return self is other
return my_pk == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if self._get_pk_val() is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self._get_pk_val())
def __reduce__(self):
"""
Provides pickling support. Normally, this just dispatches to Python's
standard handling. However, for models with deferred field loading, we
need to do things manually, as they're dynamically created classes and
only module-level classes can be pickled by the default path.
"""
data = self.__dict__
data[DJANGO_VERSION_PICKLE_KEY] = get_version()
if not self._deferred:
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id, [], simple_class_factory), data
defers = []
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
class_id = model._meta.app_label, model._meta.object_name
return (model_unpickle, (class_id, defers, deferred_class_factory), data)
def __setstate__(self, state):
msg = None
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
current_version = get_version()
if current_version != pickled_version:
msg = (
"Pickled model instance's Django version %s does not match "
"the current version %s." % (pickled_version, current_version)
)
else:
msg = "Pickled model instance's Django version is not specified."
if msg:
warnings.warn(msg, RuntimeWarning, stacklevel=2)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Returns a set containing names of deferred fields for this instance.
"""
return {
f.attname for f in self._meta.concrete_fields
if isinstance(self.__class__.__dict__.get(f.attname), DeferredAttribute)
}
def refresh_from_db(self, using=None, fields=None, **kwargs):
"""
Reloads field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is not None:
if len(fields) == 0:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
'are not allowed in fields.' % LOOKUP_SEP)
db = using if using is not None else self._state.db
if self._deferred:
non_deferred_model = self._meta.proxy_for_model
else:
non_deferred_model = self.__class__
db_instance_qs = non_deferred_model._default_manager.using(db).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif self._deferred:
deferred_fields = self.get_deferred_fields()
fields = [f.attname for f in self._meta.concrete_fields
if f.attname not in deferred_fields]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Throw away stale foreign key references.
if field.is_relation and field.get_cache_name() in self.__dict__:
rel_instance = getattr(self, field.get_cache_name())
local_val = getattr(db_instance, field.attname)
related_val = None if rel_instance is None else getattr(rel_instance, field.target_field.attname)
if local_val != related_val or (local_val is None and related_val is None):
del self.__dict__[field.get_cache_name()]
self._state.db = db_instance._state.db
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey or OneToOneField on this model. If the field is
# nullable, allowing the save() would result in silent data loss.
for field in self._meta.concrete_fields:
if field.is_relation:
# If the related field isn't cached, then an instance hasn't
# been assigned and there's no need to worry about this check.
try:
getattr(self, field.get_cache_name())
except AttributeError:
continue
obj = getattr(self, field.name, None)
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj and obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
delattr(obj, field.remote_field.get_cache_name())
raise ValueError(
"save() prohibited to prevent data loss due to "
"unsaved related object '%s'." % field.name
)
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do a "update_fields" save on the loaded fields.
elif not force_insert and self._deferred and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
deferred_fields = [
f.attname for f in self._meta.fields
if (f.attname not in self.__dict__ and
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
]
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Handles the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.atomic(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""
Saves all the parents of cls using values from self.
"""
meta = cls._meta
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None and
getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self._save_parents(cls=parent, using=using, update_fields=update_fields)
self._save_table(cls=parent, using=using, update_fields=update_fields)
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
delattr(self, cache_name)
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Does the heavy-lifting involved in saving. Updates or inserts the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
forced_update = update_fields or force_update
updated = self._do_update(base_qs, using, pk_val, values, update_fields,
forced_update)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
order_value = cls._base_manager.using(using).filter(**filter_args).count()
self._order = order_value
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if not isinstance(f, AutoField)]
update_pk = bool(meta.has_auto_field and not pk_set)
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
if update_pk:
setattr(self, meta.pk.attname, result)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
This method will try to update the model. If the model was updated (in
the sense that an update query was done and a matching row was found
from the DB) the method will return True.
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
if filtered.exists():
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
return filtered._update(values) > 0 or filtered.exists()
else:
return False
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, update_pk, raw):
"""
Do an INSERT. If update_pk is defined then this method should return
the new pk for the model.
"""
return manager._insert([self], fields=fields, return_id=update_pk,
using=using, raw=raw)
def delete(self, using=None, keep_parents=False):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, (
"%s object can't be deleted because its %s attribute is set to None." %
(self._meta.object_name, self._meta.pk.attname)
)
collector = Collector(using=using)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_text(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by(
'%s%s' % (order, field.name), '%spk' % order
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = self._default_manager.filter(**filter_args).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Checks unique constraints on the model and raises ``ValidationError``
if any failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Gather a list of checks to perform. Since validate_unique could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check.
Fields that did not validate should also be excluded, but they need
to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
# If this is an excluded field, don't add this check.
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages['unique_for_date'],
code='unique_for_date',
params={
'model': self,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'lookup_type': lookup_type,
'field': field_name,
'field_label': six.text_type(capfirst(field.verbose_name)),
'date_field': unique_for,
'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
}
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'unique_check': unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = six.text_type(capfirst(field.verbose_name))
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = six.text_type(get_text_list(field_labels, _('and')))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
"""
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ``ValidationError`` for any errors that occurred.
"""
errors = {}
if exclude is None:
exclude = []
else:
exclude = list(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Cleans all fields and raises a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = []
errors.extend(cls._check_swappable())
errors.extend(cls._check_model())
errors.extend(cls._check_managers(**kwargs))
if not cls._meta.swapped:
errors.extend(cls._check_fields(**kwargs))
errors.extend(cls._check_m2m_through_same_relationship())
errors.extend(cls._check_long_column_names())
clash_errors = cls._check_id_field() + cls._check_field_name_clashes()
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors.extend(cls._check_index_together())
errors.extend(cls._check_unique_together())
errors.extend(cls._check_ordering())
return errors
@classmethod
def _check_swappable(cls):
""" Check if the swapped model exists. """
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable,
id='models.E001',
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split('.')
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract." % (
cls._meta.swappable, app_label, model_name
),
id='models.E002',
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id='models.E017',
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
""" Perform all manager checks. """
errors = []
for __, manager, __ in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
""" Perform all field checks. """
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
""" Check if no relationship model is used by more than one m2m field.
"""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (f.remote_field.model, cls, f.remote_field.through)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two many-to-many relations through "
"the intermediate model '%s'." % f.remote_field.through._meta.label,
obj=cls,
id='models.E003',
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
""" Check if `id` field is a primary key. """
fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk)
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id='models.E004',
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
""" Ref #17673. """
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'." % (
clash.name, clash.model._meta,
f.name, f.model._meta
),
obj=cls,
id='models.E005',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (
f.name, clash.name, clash.model._meta
),
obj=f,
id='models.E006',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id='models.E007'
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_index_together(cls):
""" Check the value of "index_together" option. """
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id='models.E008',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id='models.E009',
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
""" Check the value of "unique_together" option. """
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id='models.E010',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id='models.E011',
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {
field.name: field for field in cls._meta._get_fields(reverse=False)
}
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the non-existent field '%s'." % (
option, field_name,
),
obj=cls,
id='models.E012',
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'." % (
option, field_name, option,
),
obj=cls,
id='models.E013',
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model '%s'."
% (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id='models.E016',
)
)
return errors
@classmethod
def _check_ordering(cls):
""" Check "ordering" option -- is it a list of strings and do all fields
exist? """
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id='models.E021',
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by only one field).",
obj=cls,
id='models.E014',
)
]
errors = []
fields = cls._meta.ordering
# Skip '?' fields.
fields = (f for f in fields if f != '?')
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith('-') else f) for f in fields)
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
fields = (f for f in fields if '__' not in f)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != 'pk'}
# Check for invalid or non-existent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(chain.from_iterable(
(f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
))
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the non-existent field '%s'." % invalid_field,
obj=cls,
id='models.E015',
)
)
return errors
@classmethod
def _check_long_column_names(cls):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in settings.DATABASES.keys():
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if f.db_column is None and column_name is not None and len(column_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id='models.E018',
)
)
for f in cls._meta.local_many_to_many:
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for M2M field '
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id='models.E019',
)
)
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.atomic(using=using, savepoint=False):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(pk=j, **filter_args).update(_order=i)
def method_get_order(ordered_obj, self):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
'get_%s_order' % model.__name__.lower(),
curry(method_get_order, model)
)
setattr(
related_model,
'set_%s_order' % model.__name__.lower(),
curry(method_set_order, model)
)
########
# MISC #
########
def simple_class_factory(model, attrs):
"""
Needed for dynamic classes.
"""
return model
def model_unpickle(model_id, attrs, factory):
"""
Used to unpickle Model subclasses with deferred fields.
"""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
cls = factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def unpickle_inner_exception(klass, exception_name):
# Get the exception class from the class it is attached to:
exception = getattr(klass, exception_name)
return exception.__new__(exception)
| 41.752071
| 114
| 0.572639
|
from __future__ import unicode_literals
import copy
import inspect
import warnings
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
ObjectDoesNotExist, ValidationError,
)
from django.db import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connections,
router, transaction,
)
from django.db.models import signals
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields import AutoField
from django.db.models.fields.related import (
ForeignObjectRel, ManyToOneRel, OneToOneField, lazy_related_operation,
resolve_relation,
)
from django.db.models.manager import ensure_default_manager
from django.db.models.options import Options
from django.db.models.query import Q
from django.db.models.query_utils import (
DeferredAttribute, deferred_class_factory,
)
from django.db.models.utils import make_model_tuple
from django.utils import six
from django.utils.encoding import force_str, force_text
from django.utils.functional import curry
from django.utils.six.moves import zip
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext_lazy as _
from django.utils.version import get_version
def subclass_exception(name, parents, module, attached_to=None):
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
class ModelBase(type):
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
app_label = None
app_config = apps.get_containing_app_config(module)
if getattr(meta, 'app_label', None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class('_meta', Options(meta, app_label))
if not abstract:
new_class.add_to_class(
'DoesNotExist',
subclass_exception(
str('DoesNotExist'),
tuple(
x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
str('MultipleObjectsReturned'),
tuple(
x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
new_class._default_manager = None
new_class._base_manager = None
else:
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, '_meta'):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField):
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Do the appropriate setup for any model parents.
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' % (field.name, name, base.__name__)
)
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
# Only add the ptr field if it's not already present;
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
for field in parent_fields:
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
new_class._meta.parents.update(base_parents)
new_class.copy_managers(base._meta.abstract_managers)
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
for field in base._meta.private_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'abstract base class %r' % (field.name, name, base.__name__)
)
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def copy_managers(cls, base_managers):
base_managers.sort()
for _, mgr_name, manager in base_managers: # NOQA (redefinition of _)
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
# We should call the contribute_to_class method only if it's bound
if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# with respect to a GenericForeignKey and don't know what the
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields))
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower)
if get_absolute_url_override:
setattr(cls, 'get_absolute_url', get_absolute_url_override)
ensure_default_manager(cls)
signals.class_prepared.send(sender=cls)
class ModelState(object):
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(six.with_metaclass(ModelBase)):
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(self._meta.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
fields_iter = iter(self._meta.fields)
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
if isinstance(field.remote_field, ManyToOneRel):
kwargs.pop(field.attname, None)
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or
field.column is None)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in list(kwargs):
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs[prop])
del kwargs[prop]
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if cls._deferred:
new = cls(**dict(zip(field_names, values)))
else:
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
try:
u = six.text_type(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return force_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if six.PY2 and hasattr(self, '__unicode__'):
return force_text(self).encode('utf-8')
return str('%s object' % self.__class__.__name__)
def __eq__(self, other):
if not isinstance(other, Model):
return False
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self._get_pk_val()
if my_pk is None:
return self is other
return my_pk == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if self._get_pk_val() is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self._get_pk_val())
def __reduce__(self):
data = self.__dict__
data[DJANGO_VERSION_PICKLE_KEY] = get_version()
if not self._deferred:
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id, [], simple_class_factory), data
defers = []
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
class_id = model._meta.app_label, model._meta.object_name
return (model_unpickle, (class_id, defers, deferred_class_factory), data)
def __setstate__(self, state):
msg = None
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
current_version = get_version()
if current_version != pickled_version:
msg = (
"Pickled model instance's Django version %s does not match "
"the current version %s." % (pickled_version, current_version)
)
else:
msg = "Pickled model instance's Django version is not specified."
if msg:
warnings.warn(msg, RuntimeWarning, stacklevel=2)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
return {
f.attname for f in self._meta.concrete_fields
if isinstance(self.__class__.__dict__.get(f.attname), DeferredAttribute)
}
def refresh_from_db(self, using=None, fields=None, **kwargs):
if fields is not None:
if len(fields) == 0:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
'are not allowed in fields.' % LOOKUP_SEP)
db = using if using is not None else self._state.db
if self._deferred:
non_deferred_model = self._meta.proxy_for_model
else:
non_deferred_model = self.__class__
db_instance_qs = non_deferred_model._default_manager.using(db).filter(pk=self.pk)
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif self._deferred:
deferred_fields = self.get_deferred_fields()
fields = [f.attname for f in self._meta.concrete_fields
if f.attname not in deferred_fields]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Throw away stale foreign key references.
if field.is_relation and field.get_cache_name() in self.__dict__:
rel_instance = getattr(self, field.get_cache_name())
local_val = getattr(db_instance, field.attname)
related_val = None if rel_instance is None else getattr(rel_instance, field.target_field.attname)
if local_val != related_val or (local_val is None and related_val is None):
del self.__dict__[field.get_cache_name()]
self._state.db = db_instance._state.db
def serializable_value(self, field_name):
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
# Ensure that a model instance without a PK hasn't been assigned to
for field in self._meta.concrete_fields:
if field.is_relation:
try:
getattr(self, field.get_cache_name())
except AttributeError:
continue
obj = getattr(self, field.name, None)
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj and obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
delattr(obj, field.remote_field.get_cache_name())
raise ValueError(
"save() prohibited to prevent data loss due to "
"unsaved related object '%s'." % field.name
)
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do a "update_fields" save on the loaded fields.
elif not force_insert and self._deferred and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
deferred_fields = [
f.attname for f in self._meta.fields
if (f.attname not in self.__dict__ and
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
]
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.atomic(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
meta = cls._meta
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None and
getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self._save_parents(cls=parent, using=using, update_fields=update_fields)
self._save_table(cls=parent, using=using, update_fields=update_fields)
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
delattr(self, cache_name)
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
forced_update = update_fields or force_update
updated = self._do_update(base_qs, using, pk_val, values, update_fields,
forced_update)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
order_value = cls._base_manager.using(using).filter(**filter_args).count()
self._order = order_value
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if not isinstance(f, AutoField)]
update_pk = bool(meta.has_auto_field and not pk_set)
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
if update_pk:
setattr(self, meta.pk.attname, result)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
if filtered.exists():
# database is again checked for if the UPDATE query returns 0.
return filtered._update(values) > 0 or filtered.exists()
else:
return False
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, update_pk, raw):
return manager._insert([self], fields=fields, return_id=update_pk,
using=using, raw=raw)
def delete(self, using=None, keep_parents=False):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, (
"%s object can't be deleted because its %s attribute is set to None." %
(self._meta.object_name, self._meta.pk.attname)
)
collector = Collector(using=using)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_text(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by(
'%s%s' % (order, field.name), '%spk' % order
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = self._default_manager.filter(**filter_args).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
pass
def validate_unique(self, exclude=None):
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
continue
if f.primary_key and not self._state.adding:
continue
lookup_kwargs[str(field_name)] = lookup_value
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages['unique_for_date'],
code='unique_for_date',
params={
'model': self,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'lookup_type': lookup_type,
'field': field_name,
'field_label': six.text_type(capfirst(field.verbose_name)),
'date_field': unique_for,
'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
}
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'unique_check': unique_check,
}
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = six.text_type(capfirst(field.verbose_name))
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = six.text_type(get_text_list(field_labels, _('and')))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
errors = {}
if exclude is None:
exclude = []
else:
exclude = list(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
if validate_unique:
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = []
errors.extend(cls._check_swappable())
errors.extend(cls._check_model())
errors.extend(cls._check_managers(**kwargs))
if not cls._meta.swapped:
errors.extend(cls._check_fields(**kwargs))
errors.extend(cls._check_m2m_through_same_relationship())
errors.extend(cls._check_long_column_names())
clash_errors = cls._check_id_field() + cls._check_field_name_clashes()
errors.extend(clash_errors)
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors.extend(cls._check_index_together())
errors.extend(cls._check_unique_together())
errors.extend(cls._check_ordering())
return errors
@classmethod
def _check_swappable(cls):
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable,
id='models.E001',
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split('.')
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract." % (
cls._meta.swappable, app_label, model_name
),
id='models.E002',
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id='models.E017',
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
errors = []
for __, manager, __ in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (f.remote_field.model, cls, f.remote_field.through)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two many-to-many relations through "
"the intermediate model '%s'." % f.remote_field.through._meta.label,
obj=cls,
id='models.E003',
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk)
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id='models.E004',
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
errors = []
used_fields = {}
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'." % (
clash.name, clash.model._meta,
f.name, f.model._meta
),
obj=cls,
id='models.E005',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (
f.name, clash.name, clash.model._meta
),
obj=f,
id='models.E006',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id='models.E007'
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_index_together(cls):
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id='models.E008',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id='models.E009',
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id='models.E010',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id='models.E011',
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
forward_fields_map = {
field.name: field for field in cls._meta._get_fields(reverse=False)
}
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the non-existent field '%s'." % (
option, field_name,
),
obj=cls,
id='models.E012',
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'." % (
option, field_name, option,
),
obj=cls,
id='models.E013',
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model '%s'."
% (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id='models.E016',
)
)
return errors
@classmethod
def _check_ordering(cls):
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id='models.E021',
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by only one field).",
obj=cls,
id='models.E014',
)
]
errors = []
fields = cls._meta.ordering
fields = (f for f in fields if f != '?')
fields = ((f[1:] if f.startswith('-') else f) for f in fields)
fields = (f for f in fields if '__' not in f)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != 'pk'}
invalid_fields = []
opts = cls._meta
valid_fields = set(chain.from_iterable(
(f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
))
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the non-existent field '%s'." % invalid_field,
obj=cls,
id='models.E015',
)
)
return errors
@classmethod
def _check_long_column_names(cls):
errors = []
allowed_len = None
db_alias = None
for db in settings.DATABASES.keys():
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if f.db_column is None and column_name is not None and len(column_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id='models.E018',
)
)
for f in cls._meta.local_many_to_many:
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for M2M field '
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id='models.E019',
)
)
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.atomic(using=using, savepoint=False):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(pk=j, **filter_args).update(_order=i)
def method_get_order(ordered_obj, self):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
'get_%s_order' % model.__name__.lower(),
curry(method_get_order, model)
)
setattr(
related_model,
'set_%s_order' % model.__name__.lower(),
curry(method_set_order, model)
)
########
# MISC #
########
def simple_class_factory(model, attrs):
return model
def model_unpickle(model_id, attrs, factory):
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
cls = factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def unpickle_inner_exception(klass, exception_name):
# Get the exception class from the class it is attached to:
exception = getattr(klass, exception_name)
return exception.__new__(exception)
| true
| true
|
1c4552d4e09eadd115dbba54a37526988ca931f4
| 1,291
|
py
|
Python
|
examples/django/backend/urls.py
|
envoy/polarwind
|
bb9781c00abe2b6dfd96b24e8820c6ec50be3e24
|
[
"MIT"
] | 15
|
2020-06-23T15:23:53.000Z
|
2022-01-12T21:28:06.000Z
|
examples/django/backend/urls.py
|
envoy/polarwind
|
bb9781c00abe2b6dfd96b24e8820c6ec50be3e24
|
[
"MIT"
] | 221
|
2020-06-25T18:25:30.000Z
|
2022-03-24T13:06:20.000Z
|
examples/django/backend/urls.py
|
envoy/polarwind
|
bb9781c00abe2b6dfd96b24e8820c6ec50be3e24
|
[
"MIT"
] | null | null | null |
"""backend URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, re_path, include
from django.conf import settings
from cra_helper.views import proxy_cra_requests
from . import views
urlpatterns = [
path('', views.home_view, name='home'),
path('auth', include('envoy_auth.urls')),
path('admin/', admin.site.urls),
]
# add a reverse-proxy view to help React in the Django view talk to Create-React-App
if settings.DEBUG:
proxy_urls = [
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_cra_requests),
re_path(r'^(?P<path>.+\.hot-update\.(json|js|js\.map))$', proxy_cra_requests),
]
urlpatterns.extend(proxy_urls)
| 34.891892
| 86
| 0.703331
|
from django.contrib import admin
from django.urls import path, re_path, include
from django.conf import settings
from cra_helper.views import proxy_cra_requests
from . import views
urlpatterns = [
path('', views.home_view, name='home'),
path('auth', include('envoy_auth.urls')),
path('admin/', admin.site.urls),
]
if settings.DEBUG:
proxy_urls = [
re_path(r'^__webpack_dev_server__/(?P<path>.*)$', proxy_cra_requests),
re_path(r'^(?P<path>.+\.hot-update\.(json|js|js\.map))$', proxy_cra_requests),
]
urlpatterns.extend(proxy_urls)
| true
| true
|
1c45542f594a46b1b10eaa45a732b7fa7822eaba
| 11,884
|
py
|
Python
|
virtual_ta/data_conversions.py
|
ilankham/virtual-teaching-assistant
|
105dddee4284209d1deebbfa60793ba7d99c3748
|
[
"MIT"
] | 2
|
2017-11-21T20:42:41.000Z
|
2018-03-05T01:18:30.000Z
|
virtual_ta/data_conversions.py
|
ilankham/virtual-teaching-assistant
|
105dddee4284209d1deebbfa60793ba7d99c3748
|
[
"MIT"
] | null | null | null |
virtual_ta/data_conversions.py
|
ilankham/virtual-teaching-assistant
|
105dddee4284209d1deebbfa60793ba7d99c3748
|
[
"MIT"
] | null | null | null |
"""Creates functions for converting between data formats"""
from calendar import day_name
from collections import OrderedDict
from csv import DictReader
from datetime import date, timedelta
from io import BytesIO, FileIO, StringIO, TextIOWrapper
from typing import BinaryIO, Dict, List, TextIO, Union
from openpyxl import load_workbook
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
FileIO = Union[BinaryIO, BytesIO, FileIO, StringIO, TextIO, TextIOWrapper]
def convert_csv_to_dict(
data_csv_fp: FileIO,
*,
key: str = None,
) -> Dict[str, Dict[str, str]]:
"""Converts a CSV file to a dictionary of dictionaries
This function inputs a CSV file and a key column (defaulting to the
left-most column, if not specified) and outputs a dictionary keyed by the
specified key column and having as values dictionaries encoding the row
from the CSV file corresponding to the key value
Args:
data_csv_fp: pointer to CSV file or file-like object with columns
headers in its first row and ready to be read from
key: a column header from data_csv_fp, whose values should be used as
keys in the dictionary generated
Returns:
A dictionary keyed by the specified key column and having as values
dictionaries encoding the row from the CSV file corresponding to the
key value
"""
csv_file_reader = DictReader(data_csv_fp)
if key is None:
key = csv_file_reader.fieldnames[0]
return_value = OrderedDict()
for row in csv_file_reader:
return_value[row[key]] = row
return return_value
def convert_csv_to_multimap(
data_csv_fp: FileIO,
*,
key_column: str = None,
values_column: str = None,
overwrite_values: bool = False,
) -> Dict[str, Union[str, List[str]]]:
"""Converts a CSV file to a dictionary of dictionaries
This function inputs a CSV file, a key column (defaulting to the
left-most column, if not specified), a values column (defaulting to the
second column from the left, if not specified), and a flag for whether
values should be overwritten (defaulting to False), and outputs a
dictionary keyed by the specified key column and having as values the
entries from the specified values column, with values collected into a
list if overwrite_values is False, and the last value found when reading
the file otherwise
Args:
data_csv_fp: pointer to CSV file or file-like object with columns
headers in its first row and ready to be read from
key_column: a column header from data_csv_fp, whose values should be
used as keys in the dictionary generated
values_column: a column header from data_csv_fp, whose values should be
used as values in the dictionary generated
overwrite_values: determines where the last-appearing value
corresponding to each key is returned; if False, then a list of
values is returned for each key
Returns:
A dictionary keyed by the specified key column and having as values the
entries from the specified values column, with values collected into a
list if overwrite_values == False, and the last value found when
reading the file otherwise
"""
csv_file_reader = DictReader(data_csv_fp)
if key_column is None:
key_column = csv_file_reader.fieldnames[0]
if values_column is None:
values_column = csv_file_reader.fieldnames[1]
return_value = {}
for row in csv_file_reader:
if overwrite_values:
return_value[row[key_column]] = row[values_column]
elif row[key_column] in return_value.keys():
return_value[row[key_column]].append(row[values_column])
else:
return_value[row[key_column]] = [row[values_column]]
return return_value
def convert_xlsx_to_dict(
data_xlsx_fp: FileIO,
*,
key: str = None,
worksheet: str = None,
) -> Dict[str, Dict[str, str]]:
"""Converts an XLSX file to dictionary of dictionaries
This function inputs an XLSX file, a key column (defaulting to the
left-most column, if not specified), and a worksheet name (defaulting to
the first worksheet, if not specified), and outputs a dictionary keyed by
the specified key column and having as values dictionaries encoding the row
from the specified worksheet of the XLSX file corresponding to the key
value
Args:
data_xlsx_fp: pointer to an XLSX file or file-like object with columns
headers in its first row and ready to be read from
key: a column header from data_xlsx_fp, whose values should be used as
keys in the dictionary generated
worksheet: a worksheet name from data_xlsx_fp, whose values should be
used in the dictionary generated
Returns:
A dictionary keyed by the specified key column and having as values
dictionaries encoding the row from the specified worksheet of the XLSX
file corresponding to the key value
"""
xlsx_file_reader = load_workbook(
data_xlsx_fp,
read_only=True,
data_only=True
)
if worksheet is None:
worksheet = xlsx_file_reader[0]
if key is None:
key = worksheet.cell(row=1, column=1)
xlsx_worksheet_reader = xlsx_file_reader[worksheet]
xlsx_worksheet_columns = xlsx_worksheet_reader.rows
xlsx_worksheet_headers = [
cell.value
for cell in next(xlsx_worksheet_columns)
]
key_column_index = xlsx_worksheet_headers.index(key)
return_value = {}
for i, row in enumerate(xlsx_worksheet_columns):
new_row_to_add = {}
for j, cell in enumerate(row):
new_row_to_add[xlsx_worksheet_headers[j]] = row[j].value
return_value[row[key_column_index].value] = new_row_to_add
return return_value
def convert_xlsx_to_yaml_calendar(
data_xlsx_fp: FileIO,
start_date: date,
*,
item_delimiter: str = '|',
relative_week_number_column: str = None,
worksheet: str = None,
) -> str:
"""Converts an XLSX file to a YAML string representing a weekly calendar
This function inputs an XLSX file, a start date, an item delimiter for
decomposing Excel-file cell values into lists (defaulting to a vertical
pipe), a key column for week numbers (defaulting to the left-most column,
if not specified), and a worksheet name (defaulting to the first worksheet,
if not specified), and outputs a string containing a YAML representation
of the XLSX file as a dictionary keyed by the specified key column and
having as values dictionaries encoding the row from the specified worksheet
of the XLSX file corresponding to the key value
Args:
data_xlsx_fp: pointer to an XLSX file or file-like object with columns
headers in its first row and ready to be read from; any column
names in data_xlsx_fp corresponding to day names in the current
locale, as identified by the calendar module, are treated as
providing activities for the corresponding calendar date and will
be ordered according to ISO 8601 in output; all other columns are
treated as providing information about the week itself
start_date: specifies the start date for the calendar, which is
adjusted to the Monday of the week that the start_date appears in,
per ISO 8601's specification that weeks run from Monday to Sunday
item_delimiter: a string whose values will be used to split item values
into lists
relative_week_number_column: a column header from data_xlsx_fp, whose
values should be used as key values in the YAML string generated;
the values of relative_week_number_column should be integers, with
the value one (1) representing the week that start_date appears in
worksheet: a worksheet name from data_xlsx_fp, whose values should be
used in the dictionary generated
Returns:
A string containing a YAML representation of the XLSX file as a
dictionary keyed by the specified key column and having as values
dictionaries encoding the row from the specified worksheet of the XLSX
file corresponding to the key value
"""
data_dict = convert_xlsx_to_dict(
data_xlsx_fp,
key=relative_week_number_column,
worksheet=worksheet
)
start_date_adjusted = start_date - timedelta(days=start_date.weekday())
weekdays_lookup_dict = {day.lower(): n for n, day in enumerate(day_name)}
calendar_dict = CommentedMap()
for week_number, week_data in data_dict.items():
week_number = int(week_number)
calendar_dict[week_number] = CommentedMap()
for weekday in week_data:
if (
weekday == relative_week_number_column or
week_data[weekday] is None
):
continue
if weekday.lower() in weekdays_lookup_dict:
weekday_date = (
start_date_adjusted
+
timedelta(
days=7 * (int(week_number) - 1) +
int(weekdays_lookup_dict[weekday.lower()])
)
).strftime('%d%b%Y').upper()
calendar_dict[week_number][weekday] = CommentedMap()
calendar_dict[week_number][weekday]['Date'] = weekday_date
calendar_dict[week_number][weekday]['Activities'] = (
week_data[weekday].split(item_delimiter)
)
else:
calendar_dict[week_number][weekday] = (
week_data[weekday].split(item_delimiter)
)
yaml = YAML()
calendar_yaml = StringIO()
yaml.dump(data=calendar_dict, stream=calendar_yaml)
calendar_yaml.seek(0)
return calendar_yaml.read()
def flatten_dict(
data_items: dict,
key_value_separator: str = '\n',
items_separator: str = '\n',
*,
suppress_keys: bool = False,
sort_keys: bool = True,
**kwargs
) -> str:
"""Converts a dictionary to a string with specified separators
This function converts dictionary data_items to a string, with
key_value_separator used to separate keys from values and items_separator
used to separate items; in addition, keyword arguments can be passed to the
builtin function sorted
Args:
data_items: a dictionary whose keys and items will be treated as or
converted to strings
key_value_separator: used to separate keys from values in the output
string
items_separator: used to separate items in the output string
suppress_keys: Boolean for determining whether to include keys in the
returned string
sort_keys: Boolean for determining whether to sort keys
**kwargs: options passed through to the builtin function sorted if
sort_keys is True
Returns:
A string representation of data_items with key_value_separator used to
separate keys from values and items_separator used to separate items
"""
if sort_keys:
enumeration_order = enumerate(sorted(data_items.keys(), **kwargs))
else:
enumeration_order = enumerate(data_items, **kwargs)
return_value = items_separator
last_record_number = len(data_items)
for n, k in enumeration_order:
if not suppress_keys:
return_value += str(k)
return_value += key_value_separator
return_value += str(data_items[k])
if n < last_record_number - 1:
return_value += items_separator
return return_value
| 38.089744
| 79
| 0.680326
|
from calendar import day_name
from collections import OrderedDict
from csv import DictReader
from datetime import date, timedelta
from io import BytesIO, FileIO, StringIO, TextIOWrapper
from typing import BinaryIO, Dict, List, TextIO, Union
from openpyxl import load_workbook
from ruamel.yaml import YAML
from ruamel.yaml.comments import CommentedMap
FileIO = Union[BinaryIO, BytesIO, FileIO, StringIO, TextIO, TextIOWrapper]
def convert_csv_to_dict(
data_csv_fp: FileIO,
*,
key: str = None,
) -> Dict[str, Dict[str, str]]:
csv_file_reader = DictReader(data_csv_fp)
if key is None:
key = csv_file_reader.fieldnames[0]
return_value = OrderedDict()
for row in csv_file_reader:
return_value[row[key]] = row
return return_value
def convert_csv_to_multimap(
data_csv_fp: FileIO,
*,
key_column: str = None,
values_column: str = None,
overwrite_values: bool = False,
) -> Dict[str, Union[str, List[str]]]:
csv_file_reader = DictReader(data_csv_fp)
if key_column is None:
key_column = csv_file_reader.fieldnames[0]
if values_column is None:
values_column = csv_file_reader.fieldnames[1]
return_value = {}
for row in csv_file_reader:
if overwrite_values:
return_value[row[key_column]] = row[values_column]
elif row[key_column] in return_value.keys():
return_value[row[key_column]].append(row[values_column])
else:
return_value[row[key_column]] = [row[values_column]]
return return_value
def convert_xlsx_to_dict(
data_xlsx_fp: FileIO,
*,
key: str = None,
worksheet: str = None,
) -> Dict[str, Dict[str, str]]:
xlsx_file_reader = load_workbook(
data_xlsx_fp,
read_only=True,
data_only=True
)
if worksheet is None:
worksheet = xlsx_file_reader[0]
if key is None:
key = worksheet.cell(row=1, column=1)
xlsx_worksheet_reader = xlsx_file_reader[worksheet]
xlsx_worksheet_columns = xlsx_worksheet_reader.rows
xlsx_worksheet_headers = [
cell.value
for cell in next(xlsx_worksheet_columns)
]
key_column_index = xlsx_worksheet_headers.index(key)
return_value = {}
for i, row in enumerate(xlsx_worksheet_columns):
new_row_to_add = {}
for j, cell in enumerate(row):
new_row_to_add[xlsx_worksheet_headers[j]] = row[j].value
return_value[row[key_column_index].value] = new_row_to_add
return return_value
def convert_xlsx_to_yaml_calendar(
data_xlsx_fp: FileIO,
start_date: date,
*,
item_delimiter: str = '|',
relative_week_number_column: str = None,
worksheet: str = None,
) -> str:
data_dict = convert_xlsx_to_dict(
data_xlsx_fp,
key=relative_week_number_column,
worksheet=worksheet
)
start_date_adjusted = start_date - timedelta(days=start_date.weekday())
weekdays_lookup_dict = {day.lower(): n for n, day in enumerate(day_name)}
calendar_dict = CommentedMap()
for week_number, week_data in data_dict.items():
week_number = int(week_number)
calendar_dict[week_number] = CommentedMap()
for weekday in week_data:
if (
weekday == relative_week_number_column or
week_data[weekday] is None
):
continue
if weekday.lower() in weekdays_lookup_dict:
weekday_date = (
start_date_adjusted
+
timedelta(
days=7 * (int(week_number) - 1) +
int(weekdays_lookup_dict[weekday.lower()])
)
).strftime('%d%b%Y').upper()
calendar_dict[week_number][weekday] = CommentedMap()
calendar_dict[week_number][weekday]['Date'] = weekday_date
calendar_dict[week_number][weekday]['Activities'] = (
week_data[weekday].split(item_delimiter)
)
else:
calendar_dict[week_number][weekday] = (
week_data[weekday].split(item_delimiter)
)
yaml = YAML()
calendar_yaml = StringIO()
yaml.dump(data=calendar_dict, stream=calendar_yaml)
calendar_yaml.seek(0)
return calendar_yaml.read()
def flatten_dict(
data_items: dict,
key_value_separator: str = '\n',
items_separator: str = '\n',
*,
suppress_keys: bool = False,
sort_keys: bool = True,
**kwargs
) -> str:
if sort_keys:
enumeration_order = enumerate(sorted(data_items.keys(), **kwargs))
else:
enumeration_order = enumerate(data_items, **kwargs)
return_value = items_separator
last_record_number = len(data_items)
for n, k in enumeration_order:
if not suppress_keys:
return_value += str(k)
return_value += key_value_separator
return_value += str(data_items[k])
if n < last_record_number - 1:
return_value += items_separator
return return_value
| true
| true
|
1c45545157e97f9c4e1cc68b6cafb654b5d57282
| 439
|
py
|
Python
|
news/views.py
|
valch85/newssite
|
ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e
|
[
"Apache-2.0"
] | null | null | null |
news/views.py
|
valch85/newssite
|
ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e
|
[
"Apache-2.0"
] | 2
|
2020-02-12T00:16:37.000Z
|
2020-06-05T20:42:49.000Z
|
news/views.py
|
valch85/newssite
|
ef612a7bde4ff1d6e1e35f5cc4ec9407f031270e
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render, get_object_or_404
from .models import News
# Create your views here.
def index(request):
latest_news_list = News.objects.order_by('-pub_date')[:10]
context = {'latest_news_list': latest_news_list}
return render(request, 'news/index.html', context)
def detail(request, news_id):
new = get_object_or_404(News, pk=news_id)
return render(request, 'news/detail.html', {'new': new})
| 27.4375
| 62
| 0.728929
|
from django.shortcuts import render, get_object_or_404
from .models import News
def index(request):
latest_news_list = News.objects.order_by('-pub_date')[:10]
context = {'latest_news_list': latest_news_list}
return render(request, 'news/index.html', context)
def detail(request, news_id):
new = get_object_or_404(News, pk=news_id)
return render(request, 'news/detail.html', {'new': new})
| true
| true
|
1c4554791df5c97d5a6a036317b43076dea94512
| 9,639
|
py
|
Python
|
celery_manager/static_precompiler/compilers/base.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 45
|
2015-09-30T14:55:33.000Z
|
2021-06-28T02:33:30.000Z
|
celery_manager/static_precompiler/compilers/base.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 261
|
2015-06-03T20:41:56.000Z
|
2022-03-07T08:46:10.000Z
|
celery_manager/static_precompiler/compilers/base.py
|
churchlab/millstone
|
ddb5d003a5b8a7675e5a56bafd5c432d9642b473
|
[
"MIT"
] | 22
|
2015-06-04T20:43:10.000Z
|
2022-02-27T08:27:34.000Z
|
# coding: utf-8
from django.conf import settings
from django.contrib.staticfiles import finders
from static_precompiler.models import Dependency
from static_precompiler.settings import STATIC_ROOT, ROOT, OUTPUT_DIR
from static_precompiler.utils import get_mtime
import logging
import os
logger = logging.getLogger("static_precompiler")
class BaseCompiler(object):
supports_dependencies = False
def is_supported(self, source_path):
""" Return True iff provided source file type is supported by this precompiler.
:param source_path: relative path to a source file
:type source_path: str
:returns: bool
"""
raise NotImplementedError
#noinspection PyMethodMayBeStatic
def get_full_source_path(self, source_path):
""" Return the full path to the given source file.
Check if the source file exists.
:param source_path: relative path to a source file
:type source_path: basestring
:returns: str
:raises: ValueError
"""
full_path = os.path.join(STATIC_ROOT, source_path)
if settings.DEBUG and not os.path.exists(full_path):
# while developing it is more confortable
# searching for the source files rather then
# doing collectstatics all the time
full_path = finders.find(source_path)
if full_path is None:
raise ValueError("Can't find staticfile named: {0}".format(source_path))
return full_path
def get_output_filename(self, source_filename):
""" Return the name of compiled file based on the name of source file.
:param source_filename: name of a source file
:type source_filename: str
:returns: str
"""
raise NotImplementedError
def get_output_path(self, source_path):
""" Get ralative path to compiled file based for the given source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: str
"""
source_dir = os.path.dirname(source_path)
source_filename = os.path.basename(source_path)
output_filename = self.get_output_filename(source_filename)
return os.path.join(OUTPUT_DIR, source_dir, output_filename)
def get_full_output_path(self, source_path):
""" Get full path to compiled file based for the given source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: str
"""
return os.path.join(ROOT, self.get_output_path(source_path))
def get_source_mtime(self, source_path):
""" Get the modification time of the source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: int
"""
return get_mtime(self.get_full_source_path(source_path))
def get_output_mtime(self, source_path):
""" Get the modification time of the compiled file.
Return None of compiled file does not exist.
:param source_path: relative path to a source file
:type source_path: str
:returns: int, None
"""
full_output_path = self.get_full_output_path(source_path)
if not os.path.exists(full_output_path):
return None
return get_mtime(full_output_path)
def should_compile(self, source_path, watch=False):
""" Return True iff provided source file should be compiled.
:param source_path: relative path to a source file
:type source_path: str
:param watch: whether the method was invoked from watch utility
:type watch: bool
:returns: bool
"""
compiled_mtime = self.get_output_mtime(source_path)
if compiled_mtime is None:
return True
source_mtime = self.get_source_mtime(source_path)
if self.supports_dependencies:
for dependency in self.get_dependencies(source_path):
if compiled_mtime <= self.get_source_mtime(dependency):
return True
return compiled_mtime <= source_mtime
def get_source(self, source_path):
""" Get the source code to be compiled.
:param source_path: relative path to a source file
:type source_path: str
:returns: str
"""
return open(self.get_full_source_path(source_path)).read()
def write_output(self, output, source_path):
""" Write the compiled output to a file.
:param output: compiled code
:type output: str
:param source_path: relative path to a source file
:type source_path: str
"""
output_path = self.get_full_output_path(source_path)
output_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
compiled_file = open(output_path, "w+")
compiled_file.write(output)
compiled_file.close()
def compile(self, source_path, watch=False):
""" Compile the given source path and return relative path to the compiled file.
Raise ValueError is the source file type is not supported.
May raise a StaticCompilationError if something goes wrong with compilation.
:param source_path: relative path to a source file
:type source_path: str
:param watch: whether the method was invoked from watch utility
:type watch: bool
:returns: str
"""
if not self.is_supported(source_path):
raise ValueError("'{0}' file type is not supported by '{1}'".format(
source_path, self.__class__.__name__
))
if self.should_compile(source_path, watch=watch):
compiled = self.compile_file(source_path)
compiled = self.postprocess(compiled, source_path)
self.write_output(compiled, source_path)
if self.supports_dependencies:
self.update_dependencies(source_path, self.find_dependencies(source_path))
logging.info("Compiled: '{0}'".format(source_path))
return self.get_output_path(source_path)
def compile_file(self, source_path):
""" Compile the source file. Return the compiled code.
May raise a StaticCompilationError if something goes wrong with compilation.
:param source_path: path to the source file
:type source_path: str
:returns: str
"""
raise NotImplementedError
def compile_source(self, source):
""" Compile the source code. May raise a StaticCompilationError
if something goes wrong with compilation.
:param source: source code
:type source: str
:returns: str
"""
raise NotImplementedError
#noinspection PyMethodMayBeStatic,PyUnusedLocal
def postprocess(self, compiled, source_path):
""" Post-process the compiled code.
:param compiled: compiled code
:type compiled: str
:param source_path: relative path to a source file
:type source_path: str
:returns: str
"""
return compiled
def find_dependencies(self, source_path):
""" Find the dependencies for the given source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: list
"""
raise NotImplementedError
#noinspection PyMethodMayBeStatic
def get_dependencies(self, source_path):
""" Get the saved dependencies for the given source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: list of str
"""
return list(Dependency.objects.filter(
source=source_path
).order_by("depends_on").values_list(
"depends_on", flat=True
))
#noinspection PyMethodMayBeStatic
def get_dependents(self, source_path):
""" Get a list of files that depends on the given source file.
:param source_path: relative path to a source file
:type source_path: str
:returns: list of str
"""
return list(Dependency.objects.filter(
depends_on=source_path
).order_by("source").values_list(
"source", flat=True
))
#noinspection PyMethodMayBeStatic
def update_dependencies(self, source_path, dependencies):
""" Updates the saved dependencies for the given source file.
:param source_path: relative path to a source file
:type source_path: str
:param dependencies: list of files that source file depends on
:type dependencies: list of str
"""
if not dependencies:
Dependency.objects.filter(source=source_path).delete()
else:
Dependency.objects.filter(
source=source_path
).exclude(
depends_on__in=dependencies,
).delete()
for dependency in dependencies:
Dependency.objects.get_or_create(
source=source_path,
depends_on=dependency,
)
def handle_changed_file(self, source_path):
""" Handle the modication of the source file.
:param source_path: relative path to a source file
:type source_path: str
"""
self.compile(source_path, watch=True)
for dependent in self.get_dependents(source_path):
self.compile(dependent, watch=True)
| 32.674576
| 90
| 0.641768
|
from django.conf import settings
from django.contrib.staticfiles import finders
from static_precompiler.models import Dependency
from static_precompiler.settings import STATIC_ROOT, ROOT, OUTPUT_DIR
from static_precompiler.utils import get_mtime
import logging
import os
logger = logging.getLogger("static_precompiler")
class BaseCompiler(object):
supports_dependencies = False
def is_supported(self, source_path):
raise NotImplementedError
def get_full_source_path(self, source_path):
full_path = os.path.join(STATIC_ROOT, source_path)
if settings.DEBUG and not os.path.exists(full_path):
full_path = finders.find(source_path)
if full_path is None:
raise ValueError("Can't find staticfile named: {0}".format(source_path))
return full_path
def get_output_filename(self, source_filename):
raise NotImplementedError
def get_output_path(self, source_path):
source_dir = os.path.dirname(source_path)
source_filename = os.path.basename(source_path)
output_filename = self.get_output_filename(source_filename)
return os.path.join(OUTPUT_DIR, source_dir, output_filename)
def get_full_output_path(self, source_path):
return os.path.join(ROOT, self.get_output_path(source_path))
def get_source_mtime(self, source_path):
return get_mtime(self.get_full_source_path(source_path))
def get_output_mtime(self, source_path):
full_output_path = self.get_full_output_path(source_path)
if not os.path.exists(full_output_path):
return None
return get_mtime(full_output_path)
def should_compile(self, source_path, watch=False):
compiled_mtime = self.get_output_mtime(source_path)
if compiled_mtime is None:
return True
source_mtime = self.get_source_mtime(source_path)
if self.supports_dependencies:
for dependency in self.get_dependencies(source_path):
if compiled_mtime <= self.get_source_mtime(dependency):
return True
return compiled_mtime <= source_mtime
def get_source(self, source_path):
return open(self.get_full_source_path(source_path)).read()
def write_output(self, output, source_path):
output_path = self.get_full_output_path(source_path)
output_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
compiled_file = open(output_path, "w+")
compiled_file.write(output)
compiled_file.close()
def compile(self, source_path, watch=False):
if not self.is_supported(source_path):
raise ValueError("'{0}' file type is not supported by '{1}'".format(
source_path, self.__class__.__name__
))
if self.should_compile(source_path, watch=watch):
compiled = self.compile_file(source_path)
compiled = self.postprocess(compiled, source_path)
self.write_output(compiled, source_path)
if self.supports_dependencies:
self.update_dependencies(source_path, self.find_dependencies(source_path))
logging.info("Compiled: '{0}'".format(source_path))
return self.get_output_path(source_path)
def compile_file(self, source_path):
raise NotImplementedError
def compile_source(self, source):
raise NotImplementedError
#noinspection PyMethodMayBeStatic,PyUnusedLocal
def postprocess(self, compiled, source_path):
return compiled
def find_dependencies(self, source_path):
raise NotImplementedError
#noinspection PyMethodMayBeStatic
def get_dependencies(self, source_path):
return list(Dependency.objects.filter(
source=source_path
).order_by("depends_on").values_list(
"depends_on", flat=True
))
#noinspection PyMethodMayBeStatic
def get_dependents(self, source_path):
return list(Dependency.objects.filter(
depends_on=source_path
).order_by("source").values_list(
"source", flat=True
))
#noinspection PyMethodMayBeStatic
def update_dependencies(self, source_path, dependencies):
if not dependencies:
Dependency.objects.filter(source=source_path).delete()
else:
Dependency.objects.filter(
source=source_path
).exclude(
depends_on__in=dependencies,
).delete()
for dependency in dependencies:
Dependency.objects.get_or_create(
source=source_path,
depends_on=dependency,
)
def handle_changed_file(self, source_path):
self.compile(source_path, watch=True)
for dependent in self.get_dependents(source_path):
self.compile(dependent, watch=True)
| true
| true
|
1c4554d8d53932ad17004044f7fe096837dd57b5
| 6,209
|
py
|
Python
|
infra/bots/recipe_modules/core/api.py
|
henry-luo/skia
|
2f2187b66dca1761f590668d3cbdf07453df7b6f
|
[
"BSD-3-Clause"
] | null | null | null |
infra/bots/recipe_modules/core/api.py
|
henry-luo/skia
|
2f2187b66dca1761f590668d3cbdf07453df7b6f
|
[
"BSD-3-Clause"
] | null | null | null |
infra/bots/recipe_modules/core/api.py
|
henry-luo/skia
|
2f2187b66dca1761f590668d3cbdf07453df7b6f
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0201
import json
import os
import re
import sys
from recipe_engine import recipe_api
from recipe_engine import config_types
class SkiaApi(recipe_api.RecipeApi):
def setup(self):
"""Prepare the bot to run."""
# Setup dependencies.
self.m.vars.setup()
# Check out the Skia code.
self.checkout_steps()
if not self.m.path.exists(self.m.vars.tmp_dir):
self.m.run.run_once(self.m.file.makedirs,
'tmp_dir',
self.m.vars.tmp_dir,
infra_step=True)
self.m.flavor.setup()
def update_repo(self, parent_dir, repo):
"""Update an existing repo. This is safe to call without gen_steps."""
repo_path = parent_dir.join(repo.name)
if self.m.path.exists(repo_path): # pragma: nocover
if self.m.platform.is_win:
git = 'git.bat'
else:
git = 'git'
with self.m.step.context({'cwd': repo_path}):
self.m.step('git remote set-url',
cmd=[git, 'remote', 'set-url', 'origin', repo.url],
infra_step=True)
self.m.step('git fetch',
cmd=[git, 'fetch'],
infra_step=True)
self.m.step('git reset',
cmd=[git, 'reset', '--hard', repo.revision],
infra_step=True)
self.m.step('git clean',
cmd=[git, 'clean', '-d', '-f'],
infra_step=True)
def checkout_steps(self):
"""Run the steps to obtain a checkout of Skia."""
cfg_kwargs = {}
if not self.m.vars.persistent_checkout:
# We should've obtained the Skia checkout through isolates, so we don't
# need to perform the checkout ourselves.
return
# Use a persistent gclient cache for Swarming.
cfg_kwargs['CACHE_DIR'] = self.m.vars.gclient_cache
# Create the checkout path if necessary.
if not self.m.path.exists(self.m.vars.checkout_root):
self.m.file.makedirs('checkout_path',
self.m.vars.checkout_root,
infra_step=True)
# Initial cleanup.
gclient_cfg = self.m.gclient.make_config(**cfg_kwargs)
main_repo = self.m.properties['repository']
if self.m.vars.need_pdfium_checkout:
main_repo = 'https://pdfium.googlesource.com/pdfium.git'
if self.m.vars.need_flutter_checkout:
main_repo = 'https://github.com/flutter/engine.git'
main_name = self.m.path.basename(main_repo)
if main_name.endswith('.git'):
main_name = main_name[:-len('.git')]
# Special case for flutter because it seems to need a very specific
# directory structure to successfully build.
if self.m.vars.need_flutter_checkout and main_name == 'engine':
main_name = 'src/flutter'
main = gclient_cfg.solutions.add()
main.name = main_name
main.managed = False
main.url = main_repo
main.revision = self.m.properties.get('revision') or 'origin/master'
m = gclient_cfg.got_revision_mapping
m[main_name] = 'got_revision'
patch_root = main_name
if self.m.vars.need_pdfium_checkout:
# Skia is a DEP of PDFium; the 'revision' property is a Skia revision, and
# any patch should be applied to Skia, not PDFium.
main.revision = 'origin/master'
main.managed = True
m[main_name] = 'got_%s_revision' % main_name
skia_dep_path = 'pdfium/third_party/skia'
gclient_cfg.patch_projects['skia'] = (skia_dep_path, 'HEAD')
gclient_cfg.revisions[skia_dep_path] = self.m.properties['revision']
m[skia_dep_path] = 'got_revision'
patch_root = skia_dep_path
if self.m.vars.need_flutter_checkout:
# Skia is a DEP of Flutter; the 'revision' property is a Skia revision,
# and any patch should be applied to Skia, not Flutter.
main.revision = 'origin/master'
main.managed = True
m[main_name] = 'got_flutter_revision'
if 'Android' in self.m.vars.builder_cfg.get('extra_config', ''):
gclient_cfg.target_os.add('android')
skia_dep_path = 'src/third_party/skia'
gclient_cfg.patch_projects['skia'] = (skia_dep_path, 'HEAD')
gclient_cfg.revisions[skia_dep_path] = self.m.properties['revision']
m[skia_dep_path] = 'got_revision'
patch_root = skia_dep_path
self.update_repo(self.m.vars.checkout_root, main)
# TODO(rmistry): Remove the below block after there is a solution for
# crbug.com/616443
entries_file = self.m.vars.checkout_root.join('.gclient_entries')
if self.m.path.exists(entries_file):
self.m.file.remove('remove %s' % entries_file,
entries_file,
infra_step=True) # pragma: no cover
if self.m.vars.need_chromium_checkout:
chromium = gclient_cfg.solutions.add()
chromium.name = 'src'
chromium.managed = False
chromium.url = 'https://chromium.googlesource.com/chromium/src.git'
chromium.revision = 'origin/lkgr'
self.update_repo(self.m.vars.checkout_root, chromium)
# Run bot_update.
checkout_kwargs = {}
checkout_kwargs['env'] = self.m.vars.default_env
# Hack the patch ref if necessary.
if self.m.properties.get('patch_storage', '') == 'gerrit':
if self.m.bot_update._issue and self.m.bot_update._patchset:
self.m.bot_update._gerrit_ref = 'refs/changes/%s/%d/%d' % (
str(self.m.bot_update._issue)[-2:],
self.m.bot_update._issue,
self.m.bot_update._patchset,
)
self.m.gclient.c = gclient_cfg
with self.m.step.context({'cwd': self.m.vars.checkout_root}):
update_step = self.m.bot_update.ensure_checkout(
patch_root=patch_root,
**checkout_kwargs)
self.m.vars.got_revision = (
update_step.presentation.properties['got_revision'])
if self.m.vars.need_chromium_checkout:
with self.m.step.context({'cwd': self.m.vars.checkout_root}):
self.m.gclient.runhooks(env=self.m.vars.gclient_env)
| 36.739645
| 80
| 0.639717
|
import json
import os
import re
import sys
from recipe_engine import recipe_api
from recipe_engine import config_types
class SkiaApi(recipe_api.RecipeApi):
def setup(self):
self.m.vars.setup()
self.checkout_steps()
if not self.m.path.exists(self.m.vars.tmp_dir):
self.m.run.run_once(self.m.file.makedirs,
'tmp_dir',
self.m.vars.tmp_dir,
infra_step=True)
self.m.flavor.setup()
def update_repo(self, parent_dir, repo):
repo_path = parent_dir.join(repo.name)
if self.m.path.exists(repo_path): if self.m.platform.is_win:
git = 'git.bat'
else:
git = 'git'
with self.m.step.context({'cwd': repo_path}):
self.m.step('git remote set-url',
cmd=[git, 'remote', 'set-url', 'origin', repo.url],
infra_step=True)
self.m.step('git fetch',
cmd=[git, 'fetch'],
infra_step=True)
self.m.step('git reset',
cmd=[git, 'reset', '--hard', repo.revision],
infra_step=True)
self.m.step('git clean',
cmd=[git, 'clean', '-d', '-f'],
infra_step=True)
def checkout_steps(self):
cfg_kwargs = {}
if not self.m.vars.persistent_checkout:
return
cfg_kwargs['CACHE_DIR'] = self.m.vars.gclient_cache
if not self.m.path.exists(self.m.vars.checkout_root):
self.m.file.makedirs('checkout_path',
self.m.vars.checkout_root,
infra_step=True)
gclient_cfg = self.m.gclient.make_config(**cfg_kwargs)
main_repo = self.m.properties['repository']
if self.m.vars.need_pdfium_checkout:
main_repo = 'https://pdfium.googlesource.com/pdfium.git'
if self.m.vars.need_flutter_checkout:
main_repo = 'https://github.com/flutter/engine.git'
main_name = self.m.path.basename(main_repo)
if main_name.endswith('.git'):
main_name = main_name[:-len('.git')]
if self.m.vars.need_flutter_checkout and main_name == 'engine':
main_name = 'src/flutter'
main = gclient_cfg.solutions.add()
main.name = main_name
main.managed = False
main.url = main_repo
main.revision = self.m.properties.get('revision') or 'origin/master'
m = gclient_cfg.got_revision_mapping
m[main_name] = 'got_revision'
patch_root = main_name
if self.m.vars.need_pdfium_checkout:
main.revision = 'origin/master'
main.managed = True
m[main_name] = 'got_%s_revision' % main_name
skia_dep_path = 'pdfium/third_party/skia'
gclient_cfg.patch_projects['skia'] = (skia_dep_path, 'HEAD')
gclient_cfg.revisions[skia_dep_path] = self.m.properties['revision']
m[skia_dep_path] = 'got_revision'
patch_root = skia_dep_path
if self.m.vars.need_flutter_checkout:
main.revision = 'origin/master'
main.managed = True
m[main_name] = 'got_flutter_revision'
if 'Android' in self.m.vars.builder_cfg.get('extra_config', ''):
gclient_cfg.target_os.add('android')
skia_dep_path = 'src/third_party/skia'
gclient_cfg.patch_projects['skia'] = (skia_dep_path, 'HEAD')
gclient_cfg.revisions[skia_dep_path] = self.m.properties['revision']
m[skia_dep_path] = 'got_revision'
patch_root = skia_dep_path
self.update_repo(self.m.vars.checkout_root, main)
entries_file = self.m.vars.checkout_root.join('.gclient_entries')
if self.m.path.exists(entries_file):
self.m.file.remove('remove %s' % entries_file,
entries_file,
infra_step=True)
if self.m.vars.need_chromium_checkout:
chromium = gclient_cfg.solutions.add()
chromium.name = 'src'
chromium.managed = False
chromium.url = 'https://chromium.googlesource.com/chromium/src.git'
chromium.revision = 'origin/lkgr'
self.update_repo(self.m.vars.checkout_root, chromium)
checkout_kwargs = {}
checkout_kwargs['env'] = self.m.vars.default_env
if self.m.properties.get('patch_storage', '') == 'gerrit':
if self.m.bot_update._issue and self.m.bot_update._patchset:
self.m.bot_update._gerrit_ref = 'refs/changes/%s/%d/%d' % (
str(self.m.bot_update._issue)[-2:],
self.m.bot_update._issue,
self.m.bot_update._patchset,
)
self.m.gclient.c = gclient_cfg
with self.m.step.context({'cwd': self.m.vars.checkout_root}):
update_step = self.m.bot_update.ensure_checkout(
patch_root=patch_root,
**checkout_kwargs)
self.m.vars.got_revision = (
update_step.presentation.properties['got_revision'])
if self.m.vars.need_chromium_checkout:
with self.m.step.context({'cwd': self.m.vars.checkout_root}):
self.m.gclient.runhooks(env=self.m.vars.gclient_env)
| true
| true
|
1c4554eae75d1b7c7c6e5d32aeb944d8689b1f7b
| 345
|
py
|
Python
|
Payment/manage.py
|
silop4all/payment-api
|
c331af421cd882d0b82d291251d1ce1c7f1a7223
|
[
"Apache-2.0"
] | 2
|
2018-02-27T20:51:15.000Z
|
2019-11-25T10:12:06.000Z
|
Payment/manage.py
|
silop4all/payment-api
|
c331af421cd882d0b82d291251d1ce1c7f1a7223
|
[
"Apache-2.0"
] | null | null | null |
Payment/manage.py
|
silop4all/payment-api
|
c331af421cd882d0b82d291251d1ce1c7f1a7223
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
Command-line utility for administrative tasks.
"""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE",
"Payment.settings"
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 19.166667
| 65
| 0.675362
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE",
"Payment.settings"
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true
| true
|
1c45550a7c5f0ce477cb38a1f1274ca4f984e780
| 1,326
|
py
|
Python
|
zerver/management/commands/sync_ldap_user_data.py
|
Supermanu/zulip
|
26f6d708c2e30cfe50d9d61031edb759e8117596
|
[
"Apache-2.0"
] | null | null | null |
zerver/management/commands/sync_ldap_user_data.py
|
Supermanu/zulip
|
26f6d708c2e30cfe50d9d61031edb759e8117596
|
[
"Apache-2.0"
] | 15
|
2020-06-05T18:44:15.000Z
|
2022-03-11T23:26:03.000Z
|
zerver/management/commands/sync_ldap_user_data.py
|
Supermanu/zulip
|
26f6d708c2e30cfe50d9d61031edb759e8117596
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
from typing import Any
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from django.conf import settings
from zproject.backends import ZulipLDAPUserPopulator
from zerver.models import UserProfile
from zerver.lib.logging_util import create_logger
## Setup ##
logger = create_logger(__name__, settings.LDAP_SYNC_LOG_PATH, 'INFO')
# Run this on a cronjob to pick up on name changes.
def sync_ldap_user_data():
# type: () -> None
logger.info("Starting update.")
backend = ZulipLDAPUserPopulator()
for u in UserProfile.objects.select_related().filter(is_active=True, is_bot=False).all():
# This will save the user if relevant, and will do nothing if the user
# does not exist.
try:
if backend.populate_user(backend.django_to_ldap_username(u.email)) is not None:
logger.info("Updated %s." % (u.email,))
else:
logger.warning("Did not find %s in LDAP." % (u.email,))
except IntegrityError:
logger.warning("User populated did not match an existing user.")
logger.info("Finished update.")
class Command(BaseCommand):
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
sync_ldap_user_data()
| 35.837838
| 93
| 0.692308
|
from __future__ import absolute_import
from typing import Any
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from django.conf import settings
from zproject.backends import ZulipLDAPUserPopulator
from zerver.models import UserProfile
from zerver.lib.logging_util import create_logger
logger = create_logger(__name__, settings.LDAP_SYNC_LOG_PATH, 'INFO')
def sync_ldap_user_data():
logger.info("Starting update.")
backend = ZulipLDAPUserPopulator()
for u in UserProfile.objects.select_related().filter(is_active=True, is_bot=False).all():
try:
if backend.populate_user(backend.django_to_ldap_username(u.email)) is not None:
logger.info("Updated %s." % (u.email,))
else:
logger.warning("Did not find %s in LDAP." % (u.email,))
except IntegrityError:
logger.warning("User populated did not match an existing user.")
logger.info("Finished update.")
class Command(BaseCommand):
def handle(self, *args, **options):
sync_ldap_user_data()
| true
| true
|
1c45559ff167fb23cf4728841148d6e890c7d890
| 2,190
|
py
|
Python
|
app.py
|
ODU-Internship/BedSore
|
c9927ce181eb48fc93a3d2adf2330cc0ec412182
|
[
"MIT"
] | null | null | null |
app.py
|
ODU-Internship/BedSore
|
c9927ce181eb48fc93a3d2adf2330cc0ec412182
|
[
"MIT"
] | null | null | null |
app.py
|
ODU-Internship/BedSore
|
c9927ce181eb48fc93a3d2adf2330cc0ec412182
|
[
"MIT"
] | null | null | null |
import os
import plotly.express as px
import plotly.figure_factory as ff
import dash
import dash_core_components as dcc
import dash_html_components as html
import numpy as np
import time
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
#pressure_data = np.random.randint(1000, size=(10, 10))
#a = np.random.randint(1000, size=(5, 5))
t = np.zeros((5, 5))
server = app.server
app.layout = html.Div([
html.H2('Bed Pressure Status'),
html.Div([
dcc.Graph(id='bed_pressure'),
html.Div(id='sen_alerts'),
dcc.Interval(id='graph-update', interval=10000, n_intervals=0)
], style={'width': '100%', 'display': 'inline-block', 'padding': '0 20'}),
])
@app.callback(
[dash.dependencies.Output('bed_pressure', 'figure'),
dash.dependencies.Output('sen_alerts', 'children')],
[dash.dependencies.Input('graph-update', 'n_intervals')])
def update_graph(n):
#pressure_data = np.random.randint(1000, size=(15, 30))
a = np.random.randint(1000, size=(5, 5))
alert_msg = detect(a, t)
sens = html.Ul([html.Li(x) for x in alert_msg])
#fig = px.imshow(a,color_continuous_scale='Hot_r')
#colorscale = [[0, 'white'],[400,'red'] [1000, 'black']]
font_colors = ['black', 'white']
fig = ff.create_annotated_heatmap(
a, colorscale='Hot_r', font_colors=font_colors, showscale=True)
#fig = ff.create_annotated_heatmap(a,color_continuous_scale='Hot_r')
# fig.update_layout(width=int(1000))
#fig = px.imshow(pressure_data)
return fig, sens
def detect(arr, t):
sen_list = []
for (x, y), element in np.ndenumerate(np.array(arr)):
if(element > 400 and t[x][y] == 0.0):
t[x][y] = time.time()
elif(element > 400 and t[x][y] != 0):
if(time.time() - t[x][y] > 9):
sen_list.append("Alert of Sensor placed at " +
str(x) + "," + str(y))
elif(element <= 400):
t[x][y] = 0.0
return sen_list
if __name__ == '__main__':
app.run(debug=True)
| 32.205882
| 79
| 0.607763
|
import os
import plotly.express as px
import plotly.figure_factory as ff
import dash
import dash_core_components as dcc
import dash_html_components as html
import numpy as np
import time
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
t = np.zeros((5, 5))
server = app.server
app.layout = html.Div([
html.H2('Bed Pressure Status'),
html.Div([
dcc.Graph(id='bed_pressure'),
html.Div(id='sen_alerts'),
dcc.Interval(id='graph-update', interval=10000, n_intervals=0)
], style={'width': '100%', 'display': 'inline-block', 'padding': '0 20'}),
])
@app.callback(
[dash.dependencies.Output('bed_pressure', 'figure'),
dash.dependencies.Output('sen_alerts', 'children')],
[dash.dependencies.Input('graph-update', 'n_intervals')])
def update_graph(n):
a = np.random.randint(1000, size=(5, 5))
alert_msg = detect(a, t)
sens = html.Ul([html.Li(x) for x in alert_msg])
font_colors = ['black', 'white']
fig = ff.create_annotated_heatmap(
a, colorscale='Hot_r', font_colors=font_colors, showscale=True)
return fig, sens
def detect(arr, t):
sen_list = []
for (x, y), element in np.ndenumerate(np.array(arr)):
if(element > 400 and t[x][y] == 0.0):
t[x][y] = time.time()
elif(element > 400 and t[x][y] != 0):
if(time.time() - t[x][y] > 9):
sen_list.append("Alert of Sensor placed at " +
str(x) + "," + str(y))
elif(element <= 400):
t[x][y] = 0.0
return sen_list
if __name__ == '__main__':
app.run(debug=True)
| true
| true
|
1c4558ef8db3c87d9205525a1d0af1de3e5c9a0d
| 1,816
|
py
|
Python
|
digitaltmuseum/controllers/duplicates.py
|
danmichaelo/digimus-commons
|
32a44823dadbe0ae0160ff24fcbb7645d8304ff7
|
[
"Unlicense"
] | 1
|
2020-08-10T15:12:50.000Z
|
2020-08-10T15:12:50.000Z
|
digitaltmuseum/controllers/duplicates.py
|
danmichaelo/digimus-commons
|
32a44823dadbe0ae0160ff24fcbb7645d8304ff7
|
[
"Unlicense"
] | null | null | null |
digitaltmuseum/controllers/duplicates.py
|
danmichaelo/digimus-commons
|
32a44823dadbe0ae0160ff24fcbb7645d8304ff7
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:et:sw=4:ts=4:sts=4
import sqlite3
import urllib.request, urllib.parse, urllib.error
from flask import render_template
from .controller import Controller
class DuplicatesController(Controller):
def __init__(self, app, config):
Controller.__init__(self, app)
self.config = config
def get(self):
db = self.open_db()
cur = db.cursor()
dups = []
for row in cur.execute('SELECT institution,imageid,count(*) FROM files GROUP BY institution,imageid'):
if row[2] > 1:
dups.append([row[0], row[1]])
html = '<ul>'
for dup in dups:
html += '<li>%s/%s er oppgitt som kilde for:\n<ul class="dups">\n' % tuple(dup)
# yield(type(html).__name__)
for row in cur.execute('SELECT filename, width, height FROM files WHERE institution=? AND imageid=?', dup):
name = row[0].replace(' ', '_')
name_enc = urllib.parse.quote(name.encode('utf-8'))
thumbmax = 120
if row[1] > row[2]:
thumbw = thumbmax
thumbh = round(float(row[2])/row[1]*thumbmax)
else:
thumbh = thumbmax
thumbw = round(float(row[1])/row[2]*thumbmax)
thumb_url = self.get_thumb_url(name, thumbw)
thumb = '<img src="%s" border="0" alt="%s" width="%d" height="%d"/>' % (thumb_url, name, thumbw, thumbh)
html += '<li><a href="https://commons.wikimedia.org/wiki/File:%s">%s<br />%s</a></li>\n' % (name_enc, thumb, row[0])
html += '</ul>\n'
html += '</ul>\n'
return render_template('dups.html', main=html)
| 39.478261
| 132
| 0.543502
|
import sqlite3
import urllib.request, urllib.parse, urllib.error
from flask import render_template
from .controller import Controller
class DuplicatesController(Controller):
def __init__(self, app, config):
Controller.__init__(self, app)
self.config = config
def get(self):
db = self.open_db()
cur = db.cursor()
dups = []
for row in cur.execute('SELECT institution,imageid,count(*) FROM files GROUP BY institution,imageid'):
if row[2] > 1:
dups.append([row[0], row[1]])
html = '<ul>'
for dup in dups:
html += '<li>%s/%s er oppgitt som kilde for:\n<ul class="dups">\n' % tuple(dup)
for row in cur.execute('SELECT filename, width, height FROM files WHERE institution=? AND imageid=?', dup):
name = row[0].replace(' ', '_')
name_enc = urllib.parse.quote(name.encode('utf-8'))
thumbmax = 120
if row[1] > row[2]:
thumbw = thumbmax
thumbh = round(float(row[2])/row[1]*thumbmax)
else:
thumbh = thumbmax
thumbw = round(float(row[1])/row[2]*thumbmax)
thumb_url = self.get_thumb_url(name, thumbw)
thumb = '<img src="%s" border="0" alt="%s" width="%d" height="%d"/>' % (thumb_url, name, thumbw, thumbh)
html += '<li><a href="https://commons.wikimedia.org/wiki/File:%s">%s<br />%s</a></li>\n' % (name_enc, thumb, row[0])
html += '</ul>\n'
html += '</ul>\n'
return render_template('dups.html', main=html)
| true
| true
|
1c45595dbeb1ee754f37d0d735e5cb42d450fdc5
| 463
|
py
|
Python
|
python3/third_max.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | 1
|
2020-10-08T09:17:40.000Z
|
2020-10-08T09:17:40.000Z
|
python3/third_max.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | null | null | null |
python3/third_max.py
|
joshiaj7/CodingChallenges
|
f95dd79132f07c296e074d675819031912f6a943
|
[
"MIT"
] | null | null | null |
"""
Space : O(1)
Time : O(n)
"""
class Solution:
def thirdMax(self, nums: List[int]) -> int:
a, b, c = -10**10, -10**10, -10**10
nums = list(set(nums))
n = len(nums)
for i in nums:
a = max(i, a)
if n <= 2:
return a
for j in nums:
if j < a:
b = max(b, j)
for k in nums:
if k < b:
c = max(c, k)
return c
| 15.965517
| 47
| 0.360691
|
class Solution:
def thirdMax(self, nums: List[int]) -> int:
a, b, c = -10**10, -10**10, -10**10
nums = list(set(nums))
n = len(nums)
for i in nums:
a = max(i, a)
if n <= 2:
return a
for j in nums:
if j < a:
b = max(b, j)
for k in nums:
if k < b:
c = max(c, k)
return c
| true
| true
|
1c4559619debbfab81b5667b6115f6d8185615c5
| 1,229
|
py
|
Python
|
benchmark/generate_libs/jamplus.py
|
chadaustin/ibb
|
ea1e25cc53a1ad7c302a12d95fc704c443924dff
|
[
"MIT"
] | 4
|
2015-04-09T17:24:58.000Z
|
2019-07-02T12:05:56.000Z
|
benchmark/generate_libs/jamplus.py
|
chadaustin/ibb
|
ea1e25cc53a1ad7c302a12d95fc704c443924dff
|
[
"MIT"
] | null | null | null |
benchmark/generate_libs/jamplus.py
|
chadaustin/ibb
|
ea1e25cc53a1ad7c302a12d95fc704c443924dff
|
[
"MIT"
] | 1
|
2019-11-08T15:38:29.000Z
|
2019-11-08T15:38:29.000Z
|
#!/usr/bin/python
import os.path
import cppcodebase
import random
def CreateLibJamfile(lib_number, classes):
os.chdir(cppcodebase.lib_name(lib_number))
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP lib_" + str(lib_number) + " ;\n\n")
handle.write ("SubDirHdrs $(INCLUDES) ;\n\n")
handle.write ("Library lib_" + str(lib_number) + " :\n")
for i in xrange(classes):
handle.write(' class_' + str(i) + '.cpp\n')
handle.write (' ;\n')
os.chdir('..')
def CreateFullJamfile(libs):
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP ;\n\n")
for i in xrange(libs):
handle.write('SubInclude TOP ' + cppcodebase.lib_name(i) + ' ;\n')
handle.write('\nWorkspace GeneratedLibs :\n')
for i in xrange(libs):
handle.write('\t\t' + cppcodebase.lib_name(i) + '\n')
handle.write(';\n')
handle = file("Jamrules.jam", "w")
handle.write ('INCLUDES = $(TOP) ;\n')
def CreateCodebase(libs, classes, internal_includes, external_includes):
cppcodebase.SetDir('jamplus')
cppcodebase.CreateSetOfLibraries(libs, classes, internal_includes, external_includes, CreateLibJamfile)
CreateFullJamfile(libs)
os.chdir('..')
| 29.261905
| 107
| 0.643613
|
import os.path
import cppcodebase
import random
def CreateLibJamfile(lib_number, classes):
os.chdir(cppcodebase.lib_name(lib_number))
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP lib_" + str(lib_number) + " ;\n\n")
handle.write ("SubDirHdrs $(INCLUDES) ;\n\n")
handle.write ("Library lib_" + str(lib_number) + " :\n")
for i in xrange(classes):
handle.write(' class_' + str(i) + '.cpp\n')
handle.write (' ;\n')
os.chdir('..')
def CreateFullJamfile(libs):
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP ;\n\n")
for i in xrange(libs):
handle.write('SubInclude TOP ' + cppcodebase.lib_name(i) + ' ;\n')
handle.write('\nWorkspace GeneratedLibs :\n')
for i in xrange(libs):
handle.write('\t\t' + cppcodebase.lib_name(i) + '\n')
handle.write(';\n')
handle = file("Jamrules.jam", "w")
handle.write ('INCLUDES = $(TOP) ;\n')
def CreateCodebase(libs, classes, internal_includes, external_includes):
cppcodebase.SetDir('jamplus')
cppcodebase.CreateSetOfLibraries(libs, classes, internal_includes, external_includes, CreateLibJamfile)
CreateFullJamfile(libs)
os.chdir('..')
| true
| true
|
1c4559ad4582ddb2d757dd3fc5e5fa4bd27dcdd3
| 5,260
|
py
|
Python
|
build/android/pylib/base_test_sharder.py
|
leiferikb/bitpop-private
|
4c967307d228e86f07f2576068a169e846c833ca
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2021-11-15T15:17:43.000Z
|
2021-11-15T15:17:43.000Z
|
build/android/pylib/base_test_sharder.py
|
leiferikb/bitpop-private
|
4c967307d228e86f07f2576068a169e846c833ca
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
build/android/pylib/base_test_sharder.py
|
leiferikb/bitpop-private
|
4c967307d228e86f07f2576068a169e846c833ca
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2019-02-21T16:13:42.000Z
|
2019-02-21T16:13:42.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import android_commands
import logging
import multiprocessing
from android_commands import errors
from forwarder import Forwarder
from test_result import TestResults
def _ShardedTestRunnable(test):
"""Standalone function needed by multiprocessing.Pool."""
log_format = '[' + test.device + '] # %(asctime)-15s: %(message)s'
if logging.getLogger().handlers:
logging.getLogger().handlers[0].setFormatter(logging.Formatter(log_format))
else:
logging.basicConfig(format=log_format)
# Handle SystemExit here since python has a bug to exit current process
try:
return test.Run()
except SystemExit:
return TestResults()
def SetTestsContainer(tests_container):
"""Sets tests container.
multiprocessing.Queue can't be pickled across processes, so we need to set
this as a 'global', per process, via multiprocessing.Pool.
"""
BaseTestSharder.tests_container = tests_container
class BaseTestSharder(object):
"""Base class for sharding tests across multiple devices.
Args:
attached_devices: A list of attached devices.
"""
# See more in SetTestsContainer.
tests_container = None
def __init__(self, attached_devices, build_type='Debug'):
self.attached_devices = attached_devices
# Worst case scenario: a device will drop offline per run, so we need
# to retry until we're out of devices.
self.retries = len(self.attached_devices)
self.tests = []
self.build_type = build_type
def CreateShardedTestRunner(self, device, index):
"""Factory function to create a suite-specific test runner.
Args:
device: Device serial where this shard will run
index: Index of this device in the pool.
Returns:
An object of BaseTestRunner type (that can provide a "Run()" method).
"""
pass
def SetupSharding(self, tests):
"""Called before starting the shards."""
pass
def OnTestsCompleted(self, test_runners, test_results):
"""Notifies that we completed the tests."""
pass
def _KillHostForwarder(self):
Forwarder.KillHost(self.build_type)
def RunShardedTests(self):
"""Runs the tests in all connected devices.
Returns:
A TestResults object.
"""
logging.warning('*' * 80)
logging.warning('Sharding in ' + str(len(self.attached_devices)) +
' devices.')
logging.warning('Note that the output is not synchronized.')
logging.warning('Look for the "Final result" banner in the end.')
logging.warning('*' * 80)
final_results = TestResults()
self._KillHostForwarder()
for retry in xrange(self.retries):
logging.warning('Try %d of %d', retry + 1, self.retries)
self.SetupSharding(self.tests)
test_runners = []
# Try to create N shards, and retrying on failure.
try:
for index, device in enumerate(self.attached_devices):
logging.warning('*' * 80)
logging.warning('Creating shard %d for %s', index, device)
logging.warning('*' * 80)
test_runner = self.CreateShardedTestRunner(device, index)
test_runners += [test_runner]
except errors.DeviceUnresponsiveError as e:
logging.critical('****Failed to create a shard: [%s]', e)
self.attached_devices.remove(device)
continue
logging.warning('Starting...')
pool = multiprocessing.Pool(len(self.attached_devices),
SetTestsContainer,
[BaseTestSharder.tests_container])
# map can't handle KeyboardInterrupt exception. It's a python bug.
# So use map_async instead.
async_results = pool.map_async(_ShardedTestRunnable, test_runners)
try:
results_lists = async_results.get(999999)
except errors.DeviceUnresponsiveError as e:
logging.critical('****Failed to run test: [%s]', e)
self.attached_devices = android_commands.GetAttachedDevices()
continue
test_results = TestResults.FromTestResults(results_lists)
# Re-check the attached devices for some devices may
# become offline
retry_devices = set(android_commands.GetAttachedDevices())
# Remove devices that had exceptions.
retry_devices -= TestResults.DeviceExceptions(results_lists)
# Retry on devices that didn't have any exception.
self.attached_devices = list(retry_devices)
if (retry == self.retries - 1 or
len(self.attached_devices) == 0):
all_passed = final_results.ok + test_results.ok
final_results = test_results
final_results.ok = all_passed
break
else:
final_results.ok += test_results.ok
self.tests = []
for t in test_results.GetAllBroken():
self.tests += [t.name]
if not self.tests:
break
else:
# We ran out retries, possibly out of healthy devices.
# There's no recovery at this point.
raise Exception('Unrecoverable error while retrying test runs.')
self.OnTestsCompleted(test_runners, final_results)
self._KillHostForwarder()
return final_results
| 34.834437
| 79
| 0.680989
|
import android_commands
import logging
import multiprocessing
from android_commands import errors
from forwarder import Forwarder
from test_result import TestResults
def _ShardedTestRunnable(test):
log_format = '[' + test.device + '] # %(asctime)-15s: %(message)s'
if logging.getLogger().handlers:
logging.getLogger().handlers[0].setFormatter(logging.Formatter(log_format))
else:
logging.basicConfig(format=log_format)
try:
return test.Run()
except SystemExit:
return TestResults()
def SetTestsContainer(tests_container):
BaseTestSharder.tests_container = tests_container
class BaseTestSharder(object):
tests_container = None
def __init__(self, attached_devices, build_type='Debug'):
self.attached_devices = attached_devices
self.retries = len(self.attached_devices)
self.tests = []
self.build_type = build_type
def CreateShardedTestRunner(self, device, index):
pass
def SetupSharding(self, tests):
pass
def OnTestsCompleted(self, test_runners, test_results):
pass
def _KillHostForwarder(self):
Forwarder.KillHost(self.build_type)
def RunShardedTests(self):
logging.warning('*' * 80)
logging.warning('Sharding in ' + str(len(self.attached_devices)) +
' devices.')
logging.warning('Note that the output is not synchronized.')
logging.warning('Look for the "Final result" banner in the end.')
logging.warning('*' * 80)
final_results = TestResults()
self._KillHostForwarder()
for retry in xrange(self.retries):
logging.warning('Try %d of %d', retry + 1, self.retries)
self.SetupSharding(self.tests)
test_runners = []
# Try to create N shards, and retrying on failure.
try:
for index, device in enumerate(self.attached_devices):
logging.warning('*' * 80)
logging.warning('Creating shard %d for %s', index, device)
logging.warning('*' * 80)
test_runner = self.CreateShardedTestRunner(device, index)
test_runners += [test_runner]
except errors.DeviceUnresponsiveError as e:
logging.critical('****Failed to create a shard: [%s]', e)
self.attached_devices.remove(device)
continue
logging.warning('Starting...')
pool = multiprocessing.Pool(len(self.attached_devices),
SetTestsContainer,
[BaseTestSharder.tests_container])
# map can't handle KeyboardInterrupt exception. It's a python bug.
# So use map_async instead.
async_results = pool.map_async(_ShardedTestRunnable, test_runners)
try:
results_lists = async_results.get(999999)
except errors.DeviceUnresponsiveError as e:
logging.critical('****Failed to run test: [%s]', e)
self.attached_devices = android_commands.GetAttachedDevices()
continue
test_results = TestResults.FromTestResults(results_lists)
# Re-check the attached devices for some devices may
# become offline
retry_devices = set(android_commands.GetAttachedDevices())
# Remove devices that had exceptions.
retry_devices -= TestResults.DeviceExceptions(results_lists)
# Retry on devices that didn't have any exception.
self.attached_devices = list(retry_devices)
if (retry == self.retries - 1 or
len(self.attached_devices) == 0):
all_passed = final_results.ok + test_results.ok
final_results = test_results
final_results.ok = all_passed
break
else:
final_results.ok += test_results.ok
self.tests = []
for t in test_results.GetAllBroken():
self.tests += [t.name]
if not self.tests:
break
else:
raise Exception('Unrecoverable error while retrying test runs.')
self.OnTestsCompleted(test_runners, final_results)
self._KillHostForwarder()
return final_results
| true
| true
|
1c455a3b6c64198e9f6c6b8b99f938134a554603
| 817
|
py
|
Python
|
mmelemental/models/forcefield/bonded/bonds/base.py
|
RlyehAD/mmelemental
|
5f0754356fd2c89e9119cf810f1972430dfa75dc
|
[
"BSD-3-Clause"
] | null | null | null |
mmelemental/models/forcefield/bonded/bonds/base.py
|
RlyehAD/mmelemental
|
5f0754356fd2c89e9119cf810f1972430dfa75dc
|
[
"BSD-3-Clause"
] | null | null | null |
mmelemental/models/forcefield/bonded/bonds/base.py
|
RlyehAD/mmelemental
|
5f0754356fd2c89e9119cf810f1972430dfa75dc
|
[
"BSD-3-Clause"
] | null | null | null |
from pydantic import Field
from mmelemental.models.forcefield.params import Params
from typing import Optional, List, Tuple, Union
from cmselemental.types import Array
import os
import pathlib
__all__ = ["Bonds"]
class Bonds(Params):
lengths: Array[float] = Field(
..., description="Equilibrium bond lengths. Default unit is Angstroms."
)
lengths_units: Optional[str] = Field(
"angstroms", description="Equilibrium bond lengths unit."
)
connectivity: List[Tuple[Union[int, str], Union[int, str], float]] = Field( # type: ignore
...,
description="Particle indices or names e.g. types for each bond and the bond order: (index1, index2, order).",
min_items=1,
)
_path = os.path.join(pathlib.Path(__file__).parent.absolute(), "potentials", "*.py")
| 34.041667
| 119
| 0.684211
|
from pydantic import Field
from mmelemental.models.forcefield.params import Params
from typing import Optional, List, Tuple, Union
from cmselemental.types import Array
import os
import pathlib
__all__ = ["Bonds"]
class Bonds(Params):
lengths: Array[float] = Field(
..., description="Equilibrium bond lengths. Default unit is Angstroms."
)
lengths_units: Optional[str] = Field(
"angstroms", description="Equilibrium bond lengths unit."
)
connectivity: List[Tuple[Union[int, str], Union[int, str], float]] = Field( ...,
description="Particle indices or names e.g. types for each bond and the bond order: (index1, index2, order).",
min_items=1,
)
_path = os.path.join(pathlib.Path(__file__).parent.absolute(), "potentials", "*.py")
| true
| true
|
1c455aa0f8a6d073be57b1f48b7b42ece8de7bc8
| 22,104
|
py
|
Python
|
cirq/ops/three_qubit_gates.py
|
Hongbo-Miao/Cirq
|
d6c6f9b1ea282e79db4475e5327d0380e6558ba6
|
[
"Apache-2.0"
] | null | null | null |
cirq/ops/three_qubit_gates.py
|
Hongbo-Miao/Cirq
|
d6c6f9b1ea282e79db4475e5327d0380e6558ba6
|
[
"Apache-2.0"
] | null | null | null |
cirq/ops/three_qubit_gates.py
|
Hongbo-Miao/Cirq
|
d6c6f9b1ea282e79db4475e5327d0380e6558ba6
|
[
"Apache-2.0"
] | 1
|
2020-12-24T07:13:40.000Z
|
2020-12-24T07:13:40.000Z
|
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common quantum gates that target three qubits."""
from typing import AbstractSet, Any, List, Optional, Tuple, TYPE_CHECKING
import numpy as np
import sympy
from cirq import linalg, protocols, value
from cirq._compat import proper_repr
from cirq._doc import document
from cirq.ops import (
common_gates,
controlled_gate,
eigen_gate,
gate_features,
pauli_gates,
swap_gates,
)
if TYPE_CHECKING:
# pylint: disable=unused-import
import cirq
class CCZPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
"""A doubly-controlled-Z that can be raised to a power.
The matrix of `CCZ**t` is `diag(1, 1, 1, 1, 1, 1, 1, exp(i pi t))`.
"""
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 1, 1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 0, 0, 0, 0, 1])),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIZ': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIZ': global_phase * -c,
'IZZ': global_phase * -c,
'ZZZ': global_phase * c,
}
)
def _decompose_(self, qubits):
"""An adjacency-respecting decomposition.
0: ───p───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p───X───@───p^-1───X───@───X──────@───X──────@───
│ │ │ │
2: ───p───────X───p──────────X───p^-1───X───p^-1───X───
where p = T**self._exponent
"""
if protocols.is_parameterized(self):
return NotImplemented
a, b, c = qubits
# Hacky magic: avoid the non-adjacent edge.
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
p = common_gates.T ** self._exponent
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
return [
p(a),
p(b),
p(c),
sweep_abc,
p(b) ** -1,
p(c),
sweep_abc,
p(c) ** -1,
sweep_abc,
p(c) ** -1,
sweep_abc,
]
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
ooo = args.subspace_index(0b111)
args.target_tensor[ooo] *= np.exp(1j * self.exponent * np.pi)
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', '@'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
lines = [
args.format('h {0};\n', qubits[2]),
args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2]),
args.format('h {0};\n', qubits[2]),
]
return ''.join(lines)
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
lines = [
formatter.format('H {0}\n', qubits[2]),
formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2]),
formatter.format('H {0}\n', qubits[2]),
]
return ''.join(lines)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CCZ'
return '(cirq.CCZ**{})'.format(proper_repr(self._exponent))
return 'cirq.CCZPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'CCZ'
return 'CCZ**{}'.format(self._exponent)
@value.value_equality()
class ThreeQubitDiagonalGate(gate_features.ThreeQubitGate):
"""A gate given by a diagonal 8x8 matrix."""
def __init__(self, diag_angles_radians: List[value.TParamVal]) -> None:
r"""A three qubit gate with only diagonal elements.
This gate's off-diagonal elements are zero and it's on diagonal
elements are all phases.
Args:
diag_angles_radians: The list of angles on the diagonal in radians.
If these values are $(x_0, x_1, \ldots , x_7)$ then the unitary
has diagonal values $(e^{i x_0}, e^{i x_1}, \ldots, e^{i x_7})$.
"""
self._diag_angles_radians: List[value.TParamVal] = diag_angles_radians
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolverOrSimilarType'
) -> 'ThreeQubitDiagonalGate':
return self.__class__(
[protocols.resolve_parameters(angle, resolver) for angle in self._diag_angles_radians]
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
little_endian_index = 4 * (index & 1) + 2 * ((index >> 1) & 1) + ((index >> 2) & 1)
subspace_index = args.subspace_index(little_endian_index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
diag_str = 'diag({})'.format(', '.join(proper_repr(angle) for angle in rounded_angles))
return protocols.CircuitDiagramInfo((diag_str, '#2', '#3'))
def __pow__(self, exponent: Any) -> 'ThreeQubitDiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
return ThreeQubitDiagonalGate(
[protocols.mul(angle, exponent, NotImplemented) for angle in self._diag_angles_radians]
)
def _decompose_(self, qubits):
"""An adjacency-respecting decomposition.
0: ───p_0───@──────────────@───────@──────────@──────────
│ │ │ │
1: ───p_1───X───@───p_3────X───@───X──────@───X──────@───
│ │ │ │
2: ───p_2───────X───p_4────────X───p_5────X───p_6────X───
where p_i = T**(4*x_i) and x_i solve the system of equations
[0, 0, 1, 0, 1, 1, 1][x_0] [r_1]
[0, 1, 0, 1, 1, 0, 1][x_1] [r_2]
[0, 1, 1, 1, 0, 1, 0][x_2] [r_3]
[1, 0, 0, 1, 1, 1, 0][x_3] = [r_4]
[1, 0, 1, 1, 0, 0, 1][x_4] [r_5]
[1, 1, 0, 0, 0, 1, 1][x_5] [r_6]
[1, 1, 1, 0, 1, 0, 0][x_6] [r_7]
where r_i is self._diag_angles_radians[i].
The above system was created by equating the composition of the gates
in the circuit diagram to np.diag(self._diag_angles) (shifted by a
global phase of np.exp(-1j * self._diag_angles[0])).
"""
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
phase_matrix_inverse = 0.25 * np.array(
[
[-1, -1, -1, 1, 1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1],
[1, -1, 1, -1, 1, -1, 1],
[-1, 1, 1, 1, 1, -1, -1],
[1, 1, -1, 1, -1, -1, 1],
[1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1],
]
)
shifted_angles_tail = [
angle - self._diag_angles_radians[0] for angle in self._diag_angles_radians[1:]
]
phase_solutions = phase_matrix_inverse.dot(shifted_angles_tail)
p_gates = [pauli_gates.Z ** (solution / np.pi) for solution in phase_solutions]
return [
p_gates[0](a),
p_gates[1](b),
p_gates[2](c),
sweep_abc,
p_gates[3](b),
p_gates[4](c),
sweep_abc,
p_gates[5](c),
sweep_abc,
p_gates[6](c),
sweep_abc,
]
def _value_equality_values_(self):
return tuple(self._diag_angles_radians)
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
x = [np.exp(1j * angle) for angle in self._diag_angles_radians]
return value.LinearDict(
{
'III': (x[0] + x[1] + x[2] + x[3] + x[4] + x[5] + x[6] + x[7]) / 8,
'IIZ': (x[0] - x[1] + x[2] - x[3] + x[4] - x[5] + x[6] - x[7]) / 8,
'IZI': (x[0] + x[1] - x[2] - x[3] + x[4] + x[5] - x[6] - x[7]) / 8,
'IZZ': (x[0] - x[1] - x[2] + x[3] + x[4] - x[5] - x[6] + x[7]) / 8,
'ZII': (x[0] + x[1] + x[2] + x[3] - x[4] - x[5] - x[6] - x[7]) / 8,
'ZIZ': (x[0] - x[1] + x[2] - x[3] - x[4] + x[5] - x[6] + x[7]) / 8,
'ZZI': (x[0] + x[1] - x[2] - x[3] - x[4] - x[5] + x[6] + x[7]) / 8,
'ZZZ': (x[0] - x[1] - x[2] + x[3] - x[4] + x[5] + x[6] - x[7]) / 8,
}
)
def __repr__(self) -> str:
return 'cirq.ThreeQubitDiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
class CCXPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
"""A Toffoli (doubly-controlled-NOT) that can be raised to a power.
The matrix of `CCX**t` is an 8x8 identity except the bottom right 2x2 area
is the matrix of `X**t`.
"""
def _eigen_components(self):
return [
(0, linalg.block_diag(np.diag([1, 1, 1, 1, 1, 1]), np.array([[0.5, 0.5], [0.5, 0.5]]))),
(
1,
linalg.block_diag(
np.diag([0, 0, 0, 0, 0, 0]), np.array([[0.5, -0.5], [-0.5, 0.5]])
),
),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIX': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIX': global_phase * -c,
'IZX': global_phase * -c,
'ZZX': global_phase * c,
}
)
def qubit_index_to_equivalence_group_key(self, index):
return index < 2
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return protocols.apply_unitary(
controlled_gate.ControlledGate(
controlled_gate.ControlledGate(pauli_gates.X ** self.exponent)
),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_(self, qubits):
c1, c2, t = qubits
yield common_gates.H(t)
yield CCZ(c1, c2, t) ** self._exponent
yield common_gates.H(t)
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', 'X'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
return formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.TOFFOLI'
return '(cirq.TOFFOLI**{})'.format(proper_repr(self._exponent))
return 'cirq.CCXPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'TOFFOLI'
return 'TOFFOLI**{}'.format(self._exponent)
@value.value_equality()
class CSwapGate(gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate):
"""A controlled swap gate. The Fredkin gate."""
def qubit_index_to_equivalence_group_key(self, index):
return 0 if index == 0 else 1
def _pauli_expansion_(self) -> value.LinearDict[str]:
return value.LinearDict(
{
'III': 3 / 4,
'IXX': 1 / 4,
'IYY': 1 / 4,
'IZZ': 1 / 4,
'ZII': 1 / 4,
'ZXX': -1 / 4,
'ZYY': -1 / 4,
'ZZZ': -1 / 4,
}
)
def _trace_distance_bound_(self) -> float:
return 1.0
def _decompose_(self, qubits):
c, t1, t2 = qubits
# Hacky magic: special case based on adjacency.
if hasattr(t1, 'is_adjacent'):
if not t1.is_adjacent(t2):
# Targets separated by control.
return self._decompose_inside_control(t1, c, t2)
if not t1.is_adjacent(c):
# Control separated from t1 by t2.
return self._decompose_outside_control(c, t2, t1)
return self._decompose_outside_control(c, t1, t2)
def _decompose_inside_control(
self, target1: 'cirq.Qid', control: 'cirq.Qid', target2: 'cirq.Qid'
) -> 'cirq.OP_TREE':
"""A decomposition assuming the control separates the targets.
target1: ─@─X───────T──────@────────@─────────X───@─────X^-0.5─
│ │ │ │ │ │
control: ─X─@─X─────@─T^-1─X─@─T────X─@─X^0.5─@─@─X─@──────────
│ │ │ │ │ │
target2: ─────@─H─T─X─T──────X─T^-1───X─T^-1────X───X─H─S^-1───
"""
a, b, c = target1, control, target2
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(c, b)
yield common_gates.H(c)
yield common_gates.T(c)
yield common_gates.CNOT(b, c)
yield common_gates.T(a)
yield common_gates.T(b) ** -1
yield common_gates.T(c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.T(b)
yield common_gates.T(c) ** -1
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield pauli_gates.X(b) ** 0.5
yield common_gates.T(c) ** -1
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(b, c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.H(c)
yield common_gates.S(c) ** -1
yield pauli_gates.X(a) ** -0.5
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
return protocols.apply_unitary(
controlled_gate.ControlledGate(swap_gates.SWAP),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_outside_control(
self, control: 'cirq.Qid', near_target: 'cirq.Qid', far_target: 'cirq.Qid'
) -> 'cirq.OP_TREE':
"""A decomposition assuming one of the targets is in the middle.
control: ───T──────@────────@───@────────────@────────────────
│ │ │ │
near: ─X─T──────X─@─T^-1─X─@─X────@─X^0.5─X─@─X^0.5────────
│ │ │ │ │
far: ─@─Y^-0.5─T─X─T──────X─T^-1─X─T^-1────X─S─────X^-0.5─
"""
a, b, c = control, near_target, far_target
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
yield common_gates.CNOT(c, b)
yield pauli_gates.Y(c) ** -0.5
yield t(a), t(b), t(c)
yield sweep_abc
yield t(b) ** -1, t(c)
yield sweep_abc
yield t(c) ** -1
yield sweep_abc
yield t(c) ** -1
yield pauli_gates.X(b) ** 0.5
yield sweep_abc
yield common_gates.S(c)
yield pauli_gates.X(b) ** 0.5
yield pauli_gates.X(c) ** -0.5
def _has_unitary_(self) -> bool:
return True
def _unitary_(self) -> np.ndarray:
return linalg.block_diag(np.diag([1, 1, 1, 1, 1]), np.array([[0, 1], [1, 0]]), np.diag([1]))
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(('@', 'swap', 'swap'))
return protocols.CircuitDiagramInfo(('@', '×', '×'))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
args.validate_version('2.0')
return args.format('cswap {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
return formatter.format('CSWAP {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def _value_equality_values_(self):
return ()
def __str__(self) -> str:
return 'FREDKIN'
def __repr__(self) -> str:
return 'cirq.FREDKIN'
CCZ = CCZPowGate()
document(
CCZ,
"""The Controlled-Controlled-Z gate.
The `exponent=1` instance of `cirq.CCZPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . 1 .],
[. . . . . . . -1]]
```
""",
)
CCNotPowGate = CCXPowGate
CCX = TOFFOLI = CCNOT = CCXPowGate()
document(
CCX,
"""The TOFFOLI gate.
The `exponent=1` instance of `cirq.CCXPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . . 1],
[. . . . . . 1 .]]
```
""",
)
CSWAP = FREDKIN = CSwapGate()
document(
CSWAP,
"""The Controlled Swap gate.
An instance of `cirq.CSwapGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . . 1 .],
[. . . . . 1 . .],
[. . . . . . . 1]]
```
""",
)
| 34.864353
| 100
| 0.513346
|
from typing import AbstractSet, Any, List, Optional, Tuple, TYPE_CHECKING
import numpy as np
import sympy
from cirq import linalg, protocols, value
from cirq._compat import proper_repr
from cirq._doc import document
from cirq.ops import (
common_gates,
controlled_gate,
eigen_gate,
gate_features,
pauli_gates,
swap_gates,
)
if TYPE_CHECKING:
import cirq
class CCZPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
def _eigen_components(self):
return [
(0, np.diag([1, 1, 1, 1, 1, 1, 1, 0])),
(1, np.diag([0, 0, 0, 0, 0, 0, 0, 1])),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIZ': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIZ': global_phase * -c,
'IZZ': global_phase * -c,
'ZZZ': global_phase * c,
}
)
def _decompose_(self, qubits):
if protocols.is_parameterized(self):
return NotImplemented
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
p = common_gates.T ** self._exponent
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
return [
p(a),
p(b),
p(c),
sweep_abc,
p(b) ** -1,
p(c),
sweep_abc,
p(c) ** -1,
sweep_abc,
p(c) ** -1,
sweep_abc,
]
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
ooo = args.subspace_index(0b111)
args.target_tensor[ooo] *= np.exp(1j * self.exponent * np.pi)
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', '@'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
lines = [
args.format('h {0};\n', qubits[2]),
args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2]),
args.format('h {0};\n', qubits[2]),
]
return ''.join(lines)
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
lines = [
formatter.format('H {0}\n', qubits[2]),
formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2]),
formatter.format('H {0}\n', qubits[2]),
]
return ''.join(lines)
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.CCZ'
return '(cirq.CCZ**{})'.format(proper_repr(self._exponent))
return 'cirq.CCZPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'CCZ'
return 'CCZ**{}'.format(self._exponent)
@value.value_equality()
class ThreeQubitDiagonalGate(gate_features.ThreeQubitGate):
def __init__(self, diag_angles_radians: List[value.TParamVal]) -> None:
self._diag_angles_radians: List[value.TParamVal] = diag_angles_radians
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolverOrSimilarType'
) -> 'ThreeQubitDiagonalGate':
return self.__class__(
[protocols.resolve_parameters(angle, resolver) for angle in self._diag_angles_radians]
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
little_endian_index = 4 * (index & 1) + 2 * ((index >> 1) & 1) + ((index >> 2) & 1)
subspace_index = args.subspace_index(little_endian_index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
diag_str = 'diag({})'.format(', '.join(proper_repr(angle) for angle in rounded_angles))
return protocols.CircuitDiagramInfo((diag_str, '#2', '#3'))
def __pow__(self, exponent: Any) -> 'ThreeQubitDiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
return ThreeQubitDiagonalGate(
[protocols.mul(angle, exponent, NotImplemented) for angle in self._diag_angles_radians]
)
def _decompose_(self, qubits):
a, b, c = qubits
if hasattr(b, 'is_adjacent'):
if not b.is_adjacent(a):
b, c = c, b
elif not b.is_adjacent(c):
a, b = b, a
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
phase_matrix_inverse = 0.25 * np.array(
[
[-1, -1, -1, 1, 1, 1, 1],
[-1, 1, 1, -1, -1, 1, 1],
[1, -1, 1, -1, 1, -1, 1],
[-1, 1, 1, 1, 1, -1, -1],
[1, 1, -1, 1, -1, -1, 1],
[1, -1, 1, 1, -1, 1, -1],
[1, 1, -1, -1, 1, 1, -1],
]
)
shifted_angles_tail = [
angle - self._diag_angles_radians[0] for angle in self._diag_angles_radians[1:]
]
phase_solutions = phase_matrix_inverse.dot(shifted_angles_tail)
p_gates = [pauli_gates.Z ** (solution / np.pi) for solution in phase_solutions]
return [
p_gates[0](a),
p_gates[1](b),
p_gates[2](c),
sweep_abc,
p_gates[3](b),
p_gates[4](c),
sweep_abc,
p_gates[5](c),
sweep_abc,
p_gates[6](c),
sweep_abc,
]
def _value_equality_values_(self):
return tuple(self._diag_angles_radians)
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
x = [np.exp(1j * angle) for angle in self._diag_angles_radians]
return value.LinearDict(
{
'III': (x[0] + x[1] + x[2] + x[3] + x[4] + x[5] + x[6] + x[7]) / 8,
'IIZ': (x[0] - x[1] + x[2] - x[3] + x[4] - x[5] + x[6] - x[7]) / 8,
'IZI': (x[0] + x[1] - x[2] - x[3] + x[4] + x[5] - x[6] - x[7]) / 8,
'IZZ': (x[0] - x[1] - x[2] + x[3] + x[4] - x[5] - x[6] + x[7]) / 8,
'ZII': (x[0] + x[1] + x[2] + x[3] - x[4] - x[5] - x[6] - x[7]) / 8,
'ZIZ': (x[0] - x[1] + x[2] - x[3] - x[4] + x[5] - x[6] + x[7]) / 8,
'ZZI': (x[0] + x[1] - x[2] - x[3] - x[4] - x[5] + x[6] + x[7]) / 8,
'ZZZ': (x[0] - x[1] - x[2] + x[3] - x[4] + x[5] + x[6] - x[7]) / 8,
}
)
def __repr__(self) -> str:
return 'cirq.ThreeQubitDiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
class CCXPowGate(
eigen_gate.EigenGate, gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate
):
def _eigen_components(self):
return [
(0, linalg.block_diag(np.diag([1, 1, 1, 1, 1, 1]), np.array([[0.5, 0.5], [0.5, 0.5]]))),
(
1,
linalg.block_diag(
np.diag([0, 0, 0, 0, 0, 0]), np.array([[0.5, -0.5], [-0.5, 0.5]])
),
),
]
def _trace_distance_bound_(self) -> Optional[float]:
if self._is_parameterized_():
return None
return abs(np.sin(self._exponent * 0.5 * np.pi))
def _pauli_expansion_(self) -> value.LinearDict[str]:
if protocols.is_parameterized(self):
return NotImplemented
global_phase = 1j ** (2 * self._exponent * self._global_shift)
z_phase = 1j ** self._exponent
c = -1j * z_phase * np.sin(np.pi * self._exponent / 2) / 4
return value.LinearDict(
{
'III': global_phase * (1 - c),
'IIX': global_phase * c,
'IZI': global_phase * c,
'ZII': global_phase * c,
'ZZI': global_phase * -c,
'ZIX': global_phase * -c,
'IZX': global_phase * -c,
'ZZX': global_phase * c,
}
)
def qubit_index_to_equivalence_group_key(self, index):
return index < 2
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if protocols.is_parameterized(self):
return NotImplemented
p = 1j ** (2 * self._exponent * self._global_shift)
if p != 1:
args.target_tensor *= p
return protocols.apply_unitary(
controlled_gate.ControlledGate(
controlled_gate.ControlledGate(pauli_gates.X ** self.exponent)
),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_(self, qubits):
c1, c2, t = qubits
yield common_gates.H(t)
yield CCZ(c1, c2, t) ** self._exponent
yield common_gates.H(t)
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
return protocols.CircuitDiagramInfo(('@', '@', 'X'), exponent=self._diagram_exponent(args))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
if self._exponent != 1:
return None
args.validate_version('2.0')
return args.format('ccx {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
if self._exponent != 1:
return None
return formatter.format('CCNOT {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def __repr__(self) -> str:
if self._global_shift == 0:
if self._exponent == 1:
return 'cirq.TOFFOLI'
return '(cirq.TOFFOLI**{})'.format(proper_repr(self._exponent))
return 'cirq.CCXPowGate(exponent={}, global_shift={!r})'.format(
proper_repr(self._exponent), self._global_shift
)
def __str__(self) -> str:
if self._exponent == 1:
return 'TOFFOLI'
return 'TOFFOLI**{}'.format(self._exponent)
@value.value_equality()
class CSwapGate(gate_features.ThreeQubitGate, gate_features.InterchangeableQubitsGate):
def qubit_index_to_equivalence_group_key(self, index):
return 0 if index == 0 else 1
def _pauli_expansion_(self) -> value.LinearDict[str]:
return value.LinearDict(
{
'III': 3 / 4,
'IXX': 1 / 4,
'IYY': 1 / 4,
'IZZ': 1 / 4,
'ZII': 1 / 4,
'ZXX': -1 / 4,
'ZYY': -1 / 4,
'ZZZ': -1 / 4,
}
)
def _trace_distance_bound_(self) -> float:
return 1.0
def _decompose_(self, qubits):
c, t1, t2 = qubits
if hasattr(t1, 'is_adjacent'):
if not t1.is_adjacent(t2):
return self._decompose_inside_control(t1, c, t2)
if not t1.is_adjacent(c):
return self._decompose_outside_control(c, t2, t1)
return self._decompose_outside_control(c, t1, t2)
def _decompose_inside_control(
self, target1: 'cirq.Qid', control: 'cirq.Qid', target2: 'cirq.Qid'
) -> 'cirq.OP_TREE':
a, b, c = target1, control, target2
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(c, b)
yield common_gates.H(c)
yield common_gates.T(c)
yield common_gates.CNOT(b, c)
yield common_gates.T(a)
yield common_gates.T(b) ** -1
yield common_gates.T(c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.T(b)
yield common_gates.T(c) ** -1
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield pauli_gates.X(b) ** 0.5
yield common_gates.T(c) ** -1
yield common_gates.CNOT(b, a)
yield common_gates.CNOT(b, c)
yield common_gates.CNOT(a, b)
yield common_gates.CNOT(b, c)
yield common_gates.H(c)
yield common_gates.S(c) ** -1
yield pauli_gates.X(a) ** -0.5
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
return protocols.apply_unitary(
controlled_gate.ControlledGate(swap_gates.SWAP),
protocols.ApplyUnitaryArgs(args.target_tensor, args.available_buffer, args.axes),
default=NotImplemented,
)
def _decompose_outside_control(
self, control: 'cirq.Qid', near_target: 'cirq.Qid', far_target: 'cirq.Qid'
) -> 'cirq.OP_TREE':
a, b, c = control, near_target, far_target
t = common_gates.T
sweep_abc = [common_gates.CNOT(a, b), common_gates.CNOT(b, c)]
yield common_gates.CNOT(c, b)
yield pauli_gates.Y(c) ** -0.5
yield t(a), t(b), t(c)
yield sweep_abc
yield t(b) ** -1, t(c)
yield sweep_abc
yield t(c) ** -1
yield sweep_abc
yield t(c) ** -1
yield pauli_gates.X(b) ** 0.5
yield sweep_abc
yield common_gates.S(c)
yield pauli_gates.X(b) ** 0.5
yield pauli_gates.X(c) ** -0.5
def _has_unitary_(self) -> bool:
return True
def _unitary_(self) -> np.ndarray:
return linalg.block_diag(np.diag([1, 1, 1, 1, 1]), np.array([[0, 1], [1, 0]]), np.diag([1]))
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
if not args.use_unicode_characters:
return protocols.CircuitDiagramInfo(('@', 'swap', 'swap'))
return protocols.CircuitDiagramInfo(('@', '×', '×'))
def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
args.validate_version('2.0')
return args.format('cswap {0},{1},{2};\n', qubits[0], qubits[1], qubits[2])
def _quil_(
self, qubits: Tuple['cirq.Qid', ...], formatter: 'cirq.QuilFormatter'
) -> Optional[str]:
return formatter.format('CSWAP {0} {1} {2}\n', qubits[0], qubits[1], qubits[2])
def _value_equality_values_(self):
return ()
def __str__(self) -> str:
return 'FREDKIN'
def __repr__(self) -> str:
return 'cirq.FREDKIN'
CCZ = CCZPowGate()
document(
CCZ,
"""The Controlled-Controlled-Z gate.
The `exponent=1` instance of `cirq.CCZPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . 1 .],
[. . . . . . . -1]]
```
""",
)
CCNotPowGate = CCXPowGate
CCX = TOFFOLI = CCNOT = CCXPowGate()
document(
CCX,
"""The TOFFOLI gate.
The `exponent=1` instance of `cirq.CCXPowGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . 1 . .],
[. . . . . . . 1],
[. . . . . . 1 .]]
```
""",
)
CSWAP = FREDKIN = CSwapGate()
document(
CSWAP,
"""The Controlled Swap gate.
An instance of `cirq.CSwapGate`.
Matrix:
```
[[1 . . . . . . .],
[. 1 . . . . . .],
[. . 1 . . . . .],
[. . . 1 . . . .],
[. . . . 1 . . .],
[. . . . . . 1 .],
[. . . . . 1 . .],
[. . . . . . . 1]]
```
""",
)
| true
| true
|
1c455c21b416c77406ef8ef3f269649ffa99767d
| 657
|
py
|
Python
|
tests/while/test_while_class_in_body.py
|
sco1/pylox
|
b4820828306c20cee3f8533c2547fafb92c6c1bd
|
[
"MIT"
] | 2
|
2021-12-18T01:52:50.000Z
|
2022-01-17T19:41:52.000Z
|
tests/while/test_while_class_in_body.py
|
sco1/pylox
|
b4820828306c20cee3f8533c2547fafb92c6c1bd
|
[
"MIT"
] | 18
|
2021-11-30T04:05:53.000Z
|
2022-02-01T03:30:04.000Z
|
tests/while/test_while_class_in_body.py
|
sco1/pylox
|
b4820828306c20cee3f8533c2547fafb92c6c1bd
|
[
"MIT"
] | null | null | null |
from textwrap import dedent
import pytest
from pylox.lox import Lox
# Base cases from https://github.com/munificent/craftinginterpreters/blob/master/test/while/class_in_body.lox
TEST_SRC = dedent(
"""\
// [line 2] Error at 'class': Expect expression.
while (true) class Foo {}
"""
)
EXPECTED_STDOUTS = ["2:14: LoxParseError: Expected expression."]
def test_class_in_body(capsys: pytest.CaptureFixture) -> None:
interpreter = Lox()
interpreter.run(TEST_SRC)
assert interpreter.had_error
assert not interpreter.had_runtime_error
all_out = capsys.readouterr().out.splitlines()
assert all_out == EXPECTED_STDOUTS
| 24.333333
| 109
| 0.727549
|
from textwrap import dedent
import pytest
from pylox.lox import Lox
TEST_SRC = dedent(
"""\
// [line 2] Error at 'class': Expect expression.
while (true) class Foo {}
"""
)
EXPECTED_STDOUTS = ["2:14: LoxParseError: Expected expression."]
def test_class_in_body(capsys: pytest.CaptureFixture) -> None:
interpreter = Lox()
interpreter.run(TEST_SRC)
assert interpreter.had_error
assert not interpreter.had_runtime_error
all_out = capsys.readouterr().out.splitlines()
assert all_out == EXPECTED_STDOUTS
| true
| true
|
1c455d557f5e17a71c8823251f3fd837386f7ace
| 2,004
|
py
|
Python
|
launch/mouse_with_lidar.launch.py
|
rt-net/raspimouse_ros2_examples
|
f16aef6c087a6e6325801b9f6a10b272b4d59c91
|
[
"Apache-2.0"
] | 30
|
2020-05-08T12:13:03.000Z
|
2021-12-27T20:14:43.000Z
|
launch/mouse_with_lidar.launch.py
|
rt-net/raspimouse_ros2_examples
|
f16aef6c087a6e6325801b9f6a10b272b4d59c91
|
[
"Apache-2.0"
] | 16
|
2020-05-28T02:35:24.000Z
|
2021-12-10T05:41:31.000Z
|
launch/mouse_with_lidar.launch.py
|
rt-net/raspimouse_ros2_examples
|
f16aef6c087a6e6325801b9f6a10b272b4d59c91
|
[
"Apache-2.0"
] | 4
|
2020-10-02T23:50:02.000Z
|
2021-08-25T14:19:17.000Z
|
# Copyright 2020 RT Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.actions import IncludeLaunchDescription
from launch.actions import OpaqueFunction
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import LifecycleNode
def generate_launch_description():
declare_lidar = DeclareLaunchArgument(
'lidar', default_value='lds',
description='LiDAR: lds only, for now.'
)
mouse_node = LifecycleNode(
name='raspimouse',
package='raspimouse', executable='raspimouse', output='screen',
parameters=[os.path.join(get_package_share_directory(
'raspimouse_ros2_examples'), 'config', 'mouse.yml')]
)
def func_launch_lidar_node(context):
if context.launch_configurations['lidar'] == 'lds':
return [IncludeLaunchDescription(
PythonLaunchDescriptionSource([os.path.join(
get_package_share_directory('hls_lfcd_lds_driver'),
'launch'),
'/hlds_laser.launch.py'
]),)]
launch_lidar_node = OpaqueFunction(function=func_launch_lidar_node)
ld = LaunchDescription()
ld.add_action(declare_lidar)
ld.add_action(mouse_node)
ld.add_action(launch_lidar_node)
return ld
| 35.785714
| 75
| 0.72006
|
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.actions import IncludeLaunchDescription
from launch.actions import OpaqueFunction
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import LifecycleNode
def generate_launch_description():
declare_lidar = DeclareLaunchArgument(
'lidar', default_value='lds',
description='LiDAR: lds only, for now.'
)
mouse_node = LifecycleNode(
name='raspimouse',
package='raspimouse', executable='raspimouse', output='screen',
parameters=[os.path.join(get_package_share_directory(
'raspimouse_ros2_examples'), 'config', 'mouse.yml')]
)
def func_launch_lidar_node(context):
if context.launch_configurations['lidar'] == 'lds':
return [IncludeLaunchDescription(
PythonLaunchDescriptionSource([os.path.join(
get_package_share_directory('hls_lfcd_lds_driver'),
'launch'),
'/hlds_laser.launch.py'
]),)]
launch_lidar_node = OpaqueFunction(function=func_launch_lidar_node)
ld = LaunchDescription()
ld.add_action(declare_lidar)
ld.add_action(mouse_node)
ld.add_action(launch_lidar_node)
return ld
| true
| true
|
1c455e490ae1d668471a3db85f33281fb0cacecb
| 1,017
|
py
|
Python
|
CGPA_Claculator.py
|
DharaneeswaranR/CGPA-Calculator
|
ce8390288057ac8e1f79b2d76a233e63d6d7402c
|
[
"BSD-3-Clause"
] | 1
|
2021-11-20T15:42:15.000Z
|
2021-11-20T15:42:15.000Z
|
CGPA_Claculator.py
|
DharaneeswaranR/CGPA-Calculator
|
ce8390288057ac8e1f79b2d76a233e63d6d7402c
|
[
"BSD-3-Clause"
] | null | null | null |
CGPA_Claculator.py
|
DharaneeswaranR/CGPA-Calculator
|
ce8390288057ac8e1f79b2d76a233e63d6d7402c
|
[
"BSD-3-Clause"
] | null | null | null |
def calculate_cgpa(grade_points, credits, num):
mark_sum = list()
for i in range(num):
Mark_sum.append(grade_points[i] * credits[i])
cgpa = sum(mark_sum) / sum(credits)
return cgpa
if __name__ == '__main__':
num = int(input("\nEnter number of subjects : "))
grade_points = list()
credits = list()
for i in range(1, num+1):
mark = int(input("\nEnter marks of Subject " + str(i) + " : "))
credit = int(input("Enter credit of Subject " + str(i) + " : "))
if 90 <= mark <= 100:
grade_points.append(10)
elif 80 <= mark <= 89:
grade_points.append(9)
elif 70 <= mark <= 79:
grade_points.append(8)
elif 60 <= mark <= 69:
grade_points.append(7)
elif 50 <= mark <= 59:
grade_points.append(6)
else:
grade_points.append(0)
credits.append(credit)
print("\nCGPA is {:.2f}\n".format(calculate_cgpa(grade_points, credits, num)))
| 26.763158
| 82
| 0.547689
|
def calculate_cgpa(grade_points, credits, num):
mark_sum = list()
for i in range(num):
Mark_sum.append(grade_points[i] * credits[i])
cgpa = sum(mark_sum) / sum(credits)
return cgpa
if __name__ == '__main__':
num = int(input("\nEnter number of subjects : "))
grade_points = list()
credits = list()
for i in range(1, num+1):
mark = int(input("\nEnter marks of Subject " + str(i) + " : "))
credit = int(input("Enter credit of Subject " + str(i) + " : "))
if 90 <= mark <= 100:
grade_points.append(10)
elif 80 <= mark <= 89:
grade_points.append(9)
elif 70 <= mark <= 79:
grade_points.append(8)
elif 60 <= mark <= 69:
grade_points.append(7)
elif 50 <= mark <= 59:
grade_points.append(6)
else:
grade_points.append(0)
credits.append(credit)
print("\nCGPA is {:.2f}\n".format(calculate_cgpa(grade_points, credits, num)))
| true
| true
|
1c455ea2754f11157926ef47242fd8393fbd2d15
| 9,560
|
py
|
Python
|
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | null | null | null |
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | null | null | null |
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | 2
|
2017-10-03T07:45:16.000Z
|
2018-12-21T01:31:21.000Z
|
from __future__ import print_function, division
import numpy as np
import math
from mlfromscratch.utils import normalize, polynomial_features
class Regression(object):
""" Base regression model. Models the relationship between a scalar dependent variable y and the independent
variables X.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations, learning_rate, gradient_descent):
self.w = None
self.n_iterations = n_iterations
self.learning_rate = learning_rate
self.gradient_descent = gradient_descent
self.reg_factor = reg_factor
def initialize_weights(self, n_features):
""" Initialize weights randomly [-1/N, 1/N] """
limit = 1 / math.sqrt(n_features)
self.w = np.random.uniform(-limit, limit, (n_features, ))
def regularization(self):
# No regularization by default
return 0
def regularization_gradient(self):
# No regularization by default
return 0
def fit(self, X, y):
# Insert constant ones as first column (for bias weights)
X = np.insert(X, 0, 1, axis=1)
n_features = np.shape(X)[1]
# Get weights by gradient descent opt.
if self.gradient_descent:
self.training_errors = []
self.initialize_weights(n_features)
# Do gradient descent for n_iterations
for _ in range(self.n_iterations):
y_pred = X.dot(self.w)
# Calculate mean squared error
mse = np.mean(0.5 * (y - y_pred)**2 + self.regularization())
self.training_errors.append(mse)
# Gradient of l2 loss w.r.t w
grad_w = - (y - y_pred).dot(X) + self.regularization_gradient()
# Update the weights
self.w -= self.learning_rate * grad_w
# Get weights by least squares (using Moore-Penrose pseudoinverse)
else:
U, S, V = np.linalg.svd(X.T.dot(X) + self.reg_factor * np.identity(n_features))
S = np.diag(S)
X_sq_reg_inv = V.dot(np.linalg.pinv(S)).dot(U.T)
self.w = X_sq_reg_inv.dot(X.T).dot(y)
def predict(self, X):
# Insert constant ones for bias weights
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred
class LinearRegression(Regression):
"""Linear model.
Parameters:
-----------
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, n_iterations=100, learning_rate=0.001, gradient_descent=True):
super(LinearRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
class PolynomialRegression(Regression):
"""Performs a non-linear transformation of the data before fitting the model
and doing predictions which allows for doing non-linear regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, n_iterations=3000, learning_rate=0.001, gradient_descent=True):
self.degree = degree
super(PolynomialRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
def fit(self, X, y):
X_transformed = polynomial_features(X, degree=self.degree)
super(PolynomialRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = polynomial_features(X, degree=self.degree)
return super(PolynomialRegression, self).predict(X_transformed)
class RidgeRegression(Regression):
"""Also referred to as Tikhonov regularization. Linear regression model with a regularization factor.
Model that tries to balance the fit of the model with respect to the training data and the complexity
of the model. A large regularization factor with decreases the variance of the model.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations=1000, learning_rate=0.001, gradient_descent=True):
super(RidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
class LassoRegression(Regression):
"""Linear regression model with a regularization factor which does both variable selection
and regularization. Model that tries to balance the fit of the model with respect to the training
data and the complexity of the model. A large regularization factor with decreases the variance of
the model and do para.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(LassoRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(LassoRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(LassoRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * len(self.w)
def regularization_gradient(self):
return self.reg_factor * np.sign(self.w)
class PolynomialRidgeRegression(Regression):
"""Similar to regular ridge regression except that the data is transformed to allow
for polynomial regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(PolynomialRidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(PolynomialRidgeRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(PolynomialRidgeRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
| 43.853211
| 114
| 0.688808
|
from __future__ import print_function, division
import numpy as np
import math
from mlfromscratch.utils import normalize, polynomial_features
class Regression(object):
def __init__(self, reg_factor, n_iterations, learning_rate, gradient_descent):
self.w = None
self.n_iterations = n_iterations
self.learning_rate = learning_rate
self.gradient_descent = gradient_descent
self.reg_factor = reg_factor
def initialize_weights(self, n_features):
limit = 1 / math.sqrt(n_features)
self.w = np.random.uniform(-limit, limit, (n_features, ))
def regularization(self):
return 0
def regularization_gradient(self):
return 0
def fit(self, X, y):
X = np.insert(X, 0, 1, axis=1)
n_features = np.shape(X)[1]
if self.gradient_descent:
self.training_errors = []
self.initialize_weights(n_features)
for _ in range(self.n_iterations):
y_pred = X.dot(self.w)
mse = np.mean(0.5 * (y - y_pred)**2 + self.regularization())
self.training_errors.append(mse)
grad_w = - (y - y_pred).dot(X) + self.regularization_gradient()
self.w -= self.learning_rate * grad_w
else:
U, S, V = np.linalg.svd(X.T.dot(X) + self.reg_factor * np.identity(n_features))
S = np.diag(S)
X_sq_reg_inv = V.dot(np.linalg.pinv(S)).dot(U.T)
self.w = X_sq_reg_inv.dot(X.T).dot(y)
def predict(self, X):
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred
class LinearRegression(Regression):
def __init__(self, n_iterations=100, learning_rate=0.001, gradient_descent=True):
super(LinearRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
class PolynomialRegression(Regression):
def __init__(self, degree, n_iterations=3000, learning_rate=0.001, gradient_descent=True):
self.degree = degree
super(PolynomialRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
def fit(self, X, y):
X_transformed = polynomial_features(X, degree=self.degree)
super(PolynomialRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = polynomial_features(X, degree=self.degree)
return super(PolynomialRegression, self).predict(X_transformed)
class RidgeRegression(Regression):
def __init__(self, reg_factor, n_iterations=1000, learning_rate=0.001, gradient_descent=True):
super(RidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
class LassoRegression(Regression):
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(LassoRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(LassoRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(LassoRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * len(self.w)
def regularization_gradient(self):
return self.reg_factor * np.sign(self.w)
class PolynomialRidgeRegression(Regression):
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(PolynomialRidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(PolynomialRidgeRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(PolynomialRidgeRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
| true
| true
|
1c455f55b99a0f9313f7b61d44989bbe51ff3591
| 72,468
|
py
|
Python
|
intersight/model/virtualization_vmware_datacenter_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 5
|
2021-12-16T15:13:32.000Z
|
2022-03-29T16:09:54.000Z
|
intersight/model/virtualization_vmware_datacenter_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 4
|
2022-01-25T19:05:51.000Z
|
2022-03-29T20:18:37.000Z
|
intersight/model/virtualization_vmware_datacenter_relationship.py
|
CiscoDevNet/intersight-python
|
04b721f37c3044646a91c185c7259edfb991557a
|
[
"Apache-2.0"
] | 2
|
2020-07-07T15:01:08.000Z
|
2022-01-31T04:27:35.000Z
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.virtualization_vmware_datacenter import VirtualizationVmwareDatacenter
from intersight.model.virtualization_vmware_folder_relationship import VirtualizationVmwareFolderRelationship
from intersight.model.virtualization_vmware_vcenter_relationship import VirtualizationVmwareVcenterRelationship
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['VirtualizationVmwareDatacenter'] = VirtualizationVmwareDatacenter
globals()['VirtualizationVmwareFolderRelationship'] = VirtualizationVmwareFolderRelationship
globals()['VirtualizationVmwareVcenterRelationship'] = VirtualizationVmwareVcenterRelationship
class VirtualizationVmwareDatacenterRelationship(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('uuid',): {
'regex': {
'pattern': r'^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$', # noqa: E501
},
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'moid': (str,), # noqa: E501
'selector': (str,), # noqa: E501
'link': (str,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
'registered_device': (AssetDeviceRegistrationRelationship,), # noqa: E501
'name': (str,), # noqa: E501
'uuid': (str,), # noqa: E501
'identity': (str,), # noqa: E501
'cluster_count': (int,), # noqa: E501
'datastore_count': (int,), # noqa: E501
'host_count': (int,), # noqa: E501
'inventory_path': (str,), # noqa: E501
'network_count': (int,), # noqa: E501
'vm_count': (int,), # noqa: E501
'vm_template_count': (int,), # noqa: E501
'hypervisor_manager': (VirtualizationVmwareVcenterRelationship,), # noqa: E501
'parent_folder': (VirtualizationVmwareFolderRelationship,), # noqa: E501
'object_type': (str,), # noqa: E501
}
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'virtualization.VmwareDatacenter': VirtualizationVmwareDatacenter,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'moid': 'Moid', # noqa: E501
'selector': 'Selector', # noqa: E501
'link': 'link', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
'registered_device': 'RegisteredDevice', # noqa: E501
'name': 'Name', # noqa: E501
'uuid': 'Uuid', # noqa: E501
'identity': 'Identity', # noqa: E501
'cluster_count': 'ClusterCount', # noqa: E501
'datastore_count': 'DatastoreCount', # noqa: E501
'host_count': 'HostCount', # noqa: E501
'inventory_path': 'InventoryPath', # noqa: E501
'network_count': 'NetworkCount', # noqa: E501
'vm_count': 'VmCount', # noqa: E501
'vm_template_count': 'VmTemplateCount', # noqa: E501
'hypervisor_manager': 'HypervisorManager', # noqa: E501
'parent_folder': 'ParentFolder', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""VirtualizationVmwareDatacenterRelationship - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501
selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501
link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
registered_device (AssetDeviceRegistrationRelationship): [optional] # noqa: E501
name (str): Name of the virtual machine placement. It is the name of the VPC (Virtual Private Cloud) in case of AWS virtual machine, and datacenter name in case of VMware virtual machine.. [optional] # noqa: E501
uuid (str): The uuid of this placement. The uuid is internally generated and not user specified.. [optional] # noqa: E501
identity (str): The internally generated identity of this placement. This entity is not manipulated by users. It aids in uniquely identifying the placement object.. [optional] # noqa: E501
cluster_count (int): Count of all clusters associated with this DC.. [optional] # noqa: E501
datastore_count (int): Count of all datastores associated with this DC.. [optional] # noqa: E501
host_count (int): Count of all hosts associated with this DC.. [optional] # noqa: E501
inventory_path (str): Inventory path of the DC.. [optional] # noqa: E501
network_count (int): Count of all networks associated with this datacenter (DC).. [optional] # noqa: E501
vm_count (int): Count of all virtual machines (VMs) associated with this DC.. [optional] # noqa: E501
vm_template_count (int): Count of all virtual machines templates associated with this DC.. [optional] # noqa: E501
hypervisor_manager (VirtualizationVmwareVcenterRelationship): [optional] # noqa: E501
parent_folder (VirtualizationVmwareFolderRelationship): [optional] # noqa: E501
object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
VirtualizationVmwareDatacenter,
none_type,
],
}
| 63.070496
| 1,678
| 0.659325
|
import re import sys
from intersight.model_utils import ( ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_mo_ref import MoMoRef
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.virtualization_vmware_datacenter import VirtualizationVmwareDatacenter
from intersight.model.virtualization_vmware_folder_relationship import VirtualizationVmwareFolderRelationship
from intersight.model.virtualization_vmware_vcenter_relationship import VirtualizationVmwareVcenterRelationship
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoMoRef'] = MoMoRef
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['VirtualizationVmwareDatacenter'] = VirtualizationVmwareDatacenter
globals()['VirtualizationVmwareFolderRelationship'] = VirtualizationVmwareFolderRelationship
globals()['VirtualizationVmwareVcenterRelationship'] = VirtualizationVmwareVcenterRelationship
class VirtualizationVmwareDatacenterRelationship(ModelComposed):
allowed_values = {
('class_id',): {
'MO.MOREF': "mo.MoRef",
},
('object_type',): {
'AAA.AUDITRECORD': "aaa.AuditRecord",
'AAA.RETENTIONCONFIG': "aaa.RetentionConfig",
'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy",
'ACCESS.POLICY': "access.Policy",
'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy",
'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface",
'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface",
'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface",
'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface",
'ADAPTER.UNIT': "adapter.Unit",
'ADAPTER.UNITEXPANDER': "adapter.UnitExpander",
'APPLIANCE.APPSTATUS': "appliance.AppStatus",
'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy",
'APPLIANCE.BACKUP': "appliance.Backup",
'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy",
'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting",
'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy",
'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate",
'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim",
'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy",
'APPLIANCE.DIAGSETTING': "appliance.DiagSetting",
'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting",
'APPLIANCE.FILEGATEWAY': "appliance.FileGateway",
'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus",
'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus",
'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle",
'APPLIANCE.NODEINFO': "appliance.NodeInfo",
'APPLIANCE.NODESTATUS': "appliance.NodeStatus",
'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote",
'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport",
'APPLIANCE.RESTORE': "appliance.Restore",
'APPLIANCE.SETUPINFO': "appliance.SetupInfo",
'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo",
'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus",
'APPLIANCE.UPGRADE': "appliance.Upgrade",
'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy",
'ASSET.CLUSTERMEMBER': "asset.ClusterMember",
'ASSET.DEPLOYMENT': "asset.Deployment",
'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice",
'ASSET.DEVICECLAIM': "asset.DeviceClaim",
'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration",
'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager",
'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation",
'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification",
'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration",
'ASSET.SUBSCRIPTION': "asset.Subscription",
'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount",
'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation",
'ASSET.TARGET': "asset.Target",
'BIOS.BOOTDEVICE': "bios.BootDevice",
'BIOS.BOOTMODE': "bios.BootMode",
'BIOS.POLICY': "bios.Policy",
'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder",
'BIOS.TOKENSETTINGS': "bios.TokenSettings",
'BIOS.UNIT': "bios.Unit",
'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration",
'BOOT.CDDDEVICE': "boot.CddDevice",
'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode",
'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity",
'BOOT.HDDDEVICE': "boot.HddDevice",
'BOOT.ISCSIDEVICE': "boot.IscsiDevice",
'BOOT.NVMEDEVICE': "boot.NvmeDevice",
'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice",
'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy",
'BOOT.PXEDEVICE': "boot.PxeDevice",
'BOOT.SANDEVICE': "boot.SanDevice",
'BOOT.SDDEVICE': "boot.SdDevice",
'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice",
'BOOT.USBDEVICE': "boot.UsbDevice",
'BOOT.VMEDIADEVICE': "boot.VmediaDevice",
'BULK.EXPORT': "bulk.Export",
'BULK.EXPORTEDITEM': "bulk.ExportedItem",
'BULK.MOCLONER': "bulk.MoCloner",
'BULK.MOMERGER': "bulk.MoMerger",
'BULK.REQUEST': "bulk.Request",
'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj",
'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor",
'CAPABILITY.CATALOG': "capability.Catalog",
'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor",
'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef",
'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor",
'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef",
'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray",
'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor",
'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef",
'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef",
'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor",
'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef",
'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef",
'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor",
'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef",
'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef",
'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor",
'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef",
'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor",
'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef",
'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability",
'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor",
'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef",
'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy",
'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail",
'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport",
'CHASSIS.CONFIGRESULT': "chassis.ConfigResult",
'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry",
'CHASSIS.IOMPROFILE': "chassis.IomProfile",
'CHASSIS.PROFILE': "chassis.Profile",
'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit",
'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair",
'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface",
'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit",
'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup",
'CLOUD.AWSSUBNET': "cloud.AwsSubnet",
'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine",
'CLOUD.AWSVOLUME': "cloud.AwsVolume",
'CLOUD.AWSVPC': "cloud.AwsVpc",
'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory",
'CLOUD.REGIONS': "cloud.Regions",
'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType",
'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType",
'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType",
'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType",
'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards",
'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType",
'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool",
'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization",
'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace",
'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy",
'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy",
'COMPUTE.BLADE': "compute.Blade",
'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity",
'COMPUTE.BOARD': "compute.Board",
'COMPUTE.MAPPING': "compute.Mapping",
'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary",
'COMPUTE.RACKUNIT': "compute.RackUnit",
'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity",
'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy",
'COMPUTE.SERVERSETTING': "compute.ServerSetting",
'COMPUTE.VMEDIA': "compute.Vmedia",
'COND.ALARM': "cond.Alarm",
'COND.ALARMAGGREGATION': "cond.AlarmAggregation",
'COND.HCLSTATUS': "cond.HclStatus",
'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail",
'COND.HCLSTATUSJOB': "cond.HclStatusJob",
'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade",
'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact",
'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition",
'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution",
'CONVERGEDINFRA.POD': "convergedinfra.Pod",
'CRD.CUSTOMRESOURCE': "crd.CustomResource",
'DEVICECONNECTOR.POLICY': "deviceconnector.Policy",
'EQUIPMENT.CHASSIS': "equipment.Chassis",
'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity",
'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation",
'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary",
'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule",
'EQUIPMENT.FAN': "equipment.Fan",
'EQUIPMENT.FANCONTROL': "equipment.FanControl",
'EQUIPMENT.FANMODULE': "equipment.FanModule",
'EQUIPMENT.FEX': "equipment.Fex",
'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity",
'EQUIPMENT.FEXOPERATION': "equipment.FexOperation",
'EQUIPMENT.FRU': "equipment.Fru",
'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary",
'EQUIPMENT.IOCARD': "equipment.IoCard",
'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation",
'EQUIPMENT.IOEXPANDER': "equipment.IoExpander",
'EQUIPMENT.LOCATORLED': "equipment.LocatorLed",
'EQUIPMENT.PSU': "equipment.Psu",
'EQUIPMENT.PSUCONTROL': "equipment.PsuControl",
'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure",
'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot",
'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule",
'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard",
'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController",
'EQUIPMENT.TPM': "equipment.Tpm",
'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver",
'ETHER.HOSTPORT': "ether.HostPort",
'ETHER.NETWORKPORT': "ether.NetworkPort",
'ETHER.PHYSICALPORT': "ether.PhysicalPort",
'ETHER.PORTCHANNEL': "ether.PortChannel",
'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization",
'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole",
'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole",
'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail",
'FABRIC.CONFIGRESULT': "fabric.ConfigResult",
'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry",
'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity",
'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact",
'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy",
'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy",
'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy",
'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy",
'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole",
'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole",
'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole",
'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole",
'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole",
'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy",
'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy",
'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy",
'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy",
'FABRIC.PCMEMBER': "fabric.PcMember",
'FABRIC.PCOPERATION': "fabric.PcOperation",
'FABRIC.PORTMODE': "fabric.PortMode",
'FABRIC.PORTOPERATION': "fabric.PortOperation",
'FABRIC.PORTPOLICY': "fabric.PortPolicy",
'FABRIC.SERVERROLE': "fabric.ServerRole",
'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile",
'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy",
'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile",
'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy",
'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole",
'FABRIC.UPLINKROLE': "fabric.UplinkRole",
'FABRIC.VLAN': "fabric.Vlan",
'FABRIC.VSAN': "fabric.Vsan",
'FAULT.INSTANCE': "fault.Instance",
'FC.PHYSICALPORT': "fc.PhysicalPort",
'FC.PORTCHANNEL': "fc.PortChannel",
'FCPOOL.FCBLOCK': "fcpool.FcBlock",
'FCPOOL.LEASE': "fcpool.Lease",
'FCPOOL.POOL': "fcpool.Pool",
'FCPOOL.POOLMEMBER': "fcpool.PoolMember",
'FCPOOL.UNIVERSE': "fcpool.Universe",
'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost",
'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor",
'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor",
'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade",
'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor",
'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor",
'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable",
'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta",
'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor",
'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable",
'FIRMWARE.EULA': "firmware.Eula",
'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary",
'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor",
'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor",
'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor",
'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor",
'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor",
'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor",
'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor",
'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware",
'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor",
'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable",
'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor",
'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade",
'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade",
'FIRMWARE.UPGRADE': "firmware.Upgrade",
'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact",
'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus",
'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus",
'FORECAST.CATALOG': "forecast.Catalog",
'FORECAST.DEFINITION': "forecast.Definition",
'FORECAST.INSTANCE': "forecast.Instance",
'GRAPHICS.CARD': "graphics.Card",
'GRAPHICS.CONTROLLER': "graphics.Controller",
'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus",
'HCL.DRIVERIMAGE': "hcl.DriverImage",
'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog",
'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo",
'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem",
'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor",
'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName",
'HYPERFLEX.ALARM': "hyperflex.Alarm",
'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog",
'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy",
'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster",
'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo",
'HYPERFLEX.CLUSTER': "hyperflex.Cluster",
'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy",
'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment",
'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory",
'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot",
'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy",
'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy",
'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment",
'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy",
'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult",
'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry",
'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer",
'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic",
'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState",
'HYPERFLEX.DRIVE': "hyperflex.Drive",
'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy",
'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy",
'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal",
'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal",
'HYPERFLEX.HEALTH': "hyperflex.Health",
'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition",
'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution",
'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot",
'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum",
'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion",
'HYPERFLEX.LICENSE': "hyperflex.License",
'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy",
'HYPERFLEX.NODE': "hyperflex.Node",
'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy",
'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile",
'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster",
'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy",
'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion",
'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry",
'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel",
'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken",
'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent",
'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry",
'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion",
'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy",
'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer",
'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy",
'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy",
'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy",
'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo",
'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation",
'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation",
'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo",
'HYPERFLEX.VOLUME': "hyperflex.Volume",
'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration",
'IAAS.CONNECTORPACK': "iaas.ConnectorPack",
'IAAS.DEVICESTATUS': "iaas.DeviceStatus",
'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages",
'IAAS.LICENSEINFO': "iaas.LicenseInfo",
'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks",
'IAAS.SERVICEREQUEST': "iaas.ServiceRequest",
'IAAS.UCSDINFO': "iaas.UcsdInfo",
'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra",
'IAAS.UCSDMESSAGES': "iaas.UcsdMessages",
'IAM.ACCOUNT': "iam.Account",
'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience",
'IAM.APIKEY': "iam.ApiKey",
'IAM.APPREGISTRATION': "iam.AppRegistration",
'IAM.BANNERMESSAGE': "iam.BannerMessage",
'IAM.CERTIFICATE': "iam.Certificate",
'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest",
'IAM.DOMAINGROUP': "iam.DomainGroup",
'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege",
'IAM.ENDPOINTROLE': "iam.EndPointRole",
'IAM.ENDPOINTUSER': "iam.EndPointUser",
'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy",
'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole",
'IAM.IDP': "iam.Idp",
'IAM.IDPREFERENCE': "iam.IdpReference",
'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement",
'IAM.IPADDRESS': "iam.IpAddress",
'IAM.LDAPGROUP': "iam.LdapGroup",
'IAM.LDAPPOLICY': "iam.LdapPolicy",
'IAM.LDAPPROVIDER': "iam.LdapProvider",
'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword",
'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy",
'IAM.OAUTHTOKEN': "iam.OAuthToken",
'IAM.PERMISSION': "iam.Permission",
'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec",
'IAM.PRIVILEGE': "iam.Privilege",
'IAM.PRIVILEGESET': "iam.PrivilegeSet",
'IAM.QUALIFIER': "iam.Qualifier",
'IAM.RESOURCELIMITS': "iam.ResourceLimits",
'IAM.RESOURCEPERMISSION': "iam.ResourcePermission",
'IAM.RESOURCEROLES': "iam.ResourceRoles",
'IAM.ROLE': "iam.Role",
'IAM.SECURITYHOLDER': "iam.SecurityHolder",
'IAM.SERVICEPROVIDER': "iam.ServiceProvider",
'IAM.SESSION': "iam.Session",
'IAM.SESSIONLIMITS': "iam.SessionLimits",
'IAM.SYSTEM': "iam.System",
'IAM.TRUSTPOINT': "iam.TrustPoint",
'IAM.USER': "iam.User",
'IAM.USERGROUP': "iam.UserGroup",
'IAM.USERPREFERENCE': "iam.UserPreference",
'INVENTORY.DEVICEINFO': "inventory.DeviceInfo",
'INVENTORY.DNMOBINDING': "inventory.DnMoBinding",
'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory",
'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder",
'INVENTORY.REQUEST': "inventory.Request",
'IPMIOVERLAN.POLICY': "ipmioverlan.Policy",
'IPPOOL.BLOCKLEASE': "ippool.BlockLease",
'IPPOOL.IPLEASE': "ippool.IpLease",
'IPPOOL.POOL': "ippool.Pool",
'IPPOOL.POOLMEMBER': "ippool.PoolMember",
'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock",
'IPPOOL.SHADOWPOOL': "ippool.ShadowPool",
'IPPOOL.UNIVERSE': "ippool.Universe",
'IQNPOOL.BLOCK': "iqnpool.Block",
'IQNPOOL.LEASE': "iqnpool.Lease",
'IQNPOOL.POOL': "iqnpool.Pool",
'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember",
'IQNPOOL.UNIVERSE': "iqnpool.Universe",
'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus",
'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic",
'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile",
'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation",
'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition",
'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy",
'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository",
'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile",
'KUBERNETES.CATALOG': "kubernetes.Catalog",
'KUBERNETES.CLUSTER': "kubernetes.Cluster",
'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile",
'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile",
'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult",
'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry",
'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy",
'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet",
'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment",
'KUBERNETES.INGRESS': "kubernetes.Ingress",
'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy",
'KUBERNETES.NODE': "kubernetes.Node",
'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile",
'KUBERNETES.POD': "kubernetes.Pod",
'KUBERNETES.SERVICE': "kubernetes.Service",
'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet",
'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy",
'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy",
'KUBERNETES.VERSION': "kubernetes.Version",
'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy",
'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy",
'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider",
'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType",
'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile",
'KVM.POLICY': "kvm.Policy",
'KVM.SESSION': "kvm.Session",
'KVM.TUNNEL': "kvm.Tunnel",
'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData",
'LICENSE.CUSTOMEROP': "license.CustomerOp",
'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp",
'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount",
'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp",
'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount",
'LICENSE.LICENSEINFO': "license.LicenseInfo",
'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp",
'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken",
'LS.SERVICEPROFILE': "ls.ServiceProfile",
'MACPOOL.IDBLOCK': "macpool.IdBlock",
'MACPOOL.LEASE': "macpool.Lease",
'MACPOOL.POOL': "macpool.Pool",
'MACPOOL.POOLMEMBER': "macpool.PoolMember",
'MACPOOL.UNIVERSE': "macpool.Universe",
'MANAGEMENT.CONTROLLER': "management.Controller",
'MANAGEMENT.ENTITY': "management.Entity",
'MANAGEMENT.INTERFACE': "management.Interface",
'MEMORY.ARRAY': "memory.Array",
'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult",
'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration",
'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace",
'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult",
'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy",
'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion",
'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit",
'MEMORY.UNIT': "memory.Unit",
'META.DEFINITION': "meta.Definition",
'NETWORK.ELEMENT': "network.Element",
'NETWORK.ELEMENTSUMMARY': "network.ElementSummary",
'NETWORK.FCZONEINFO': "network.FcZoneInfo",
'NETWORK.VLANPORTINFO': "network.VlanPortInfo",
'NETWORKCONFIG.POLICY': "networkconfig.Policy",
'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost",
'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice",
'NIAAPI.APICHWEOL': "niaapi.ApicHweol",
'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease",
'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend",
'NIAAPI.APICSWEOL': "niaapi.ApicSweol",
'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost",
'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice",
'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol",
'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease",
'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend",
'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol",
'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader",
'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata",
'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader",
'NIAAPI.NIBMETADATA': "niaapi.NibMetadata",
'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex",
'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails",
'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails",
'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails",
'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails",
'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails",
'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest",
'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler",
'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails",
'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails",
'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails",
'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth",
'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails",
'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails",
'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails",
'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails",
'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails",
'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails",
'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails",
'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp",
'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc",
'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails",
'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts",
'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails",
'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies",
'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails",
'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails",
'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails",
'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails",
'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails",
'NIATELEMETRY.EPG': "niatelemetry.Epg",
'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails",
'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile",
'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs",
'NIATELEMETRY.FAULT': "niatelemetry.Fault",
'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails",
'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap",
'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap",
'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails",
'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails",
'NIATELEMETRY.LC': "niatelemetry.Lc",
'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails",
'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails",
'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails",
'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails",
'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails",
'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails",
'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails",
'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails",
'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards",
'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage",
'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory",
'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm",
'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric",
'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState",
'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck",
'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies",
'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies",
'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies",
'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory",
'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc",
'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo",
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest",
'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg",
'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter",
'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails",
'NIATELEMETRY.TENANT': "niatelemetry.Tenant",
'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription",
'NTP.POLICY': "ntp.Policy",
'OAUTH.ACCESSTOKEN': "oauth.AccessToken",
'OAUTH.AUTHORIZATION': "oauth.Authorization",
'OPRS.DEPLOYMENT': "oprs.Deployment",
'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage",
'ORGANIZATION.ORGANIZATION': "organization.Organization",
'OS.BULKINSTALLINFO': "os.BulkInstallInfo",
'OS.CATALOG': "os.Catalog",
'OS.CONFIGURATIONFILE': "os.ConfigurationFile",
'OS.DISTRIBUTION': "os.Distribution",
'OS.INSTALL': "os.Install",
'OS.OSSUPPORT': "os.OsSupport",
'OS.SUPPORTEDVERSION': "os.SupportedVersion",
'OS.TEMPLATEFILE': "os.TemplateFile",
'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget",
'PCI.COPROCESSORCARD': "pci.CoprocessorCard",
'PCI.DEVICE': "pci.Device",
'PCI.LINK': "pci.Link",
'PCI.SWITCH': "pci.Switch",
'PORT.GROUP': "port.Group",
'PORT.MACBINDING': "port.MacBinding",
'PORT.SUBGROUP': "port.SubGroup",
'POWER.CONTROLSTATE': "power.ControlState",
'POWER.POLICY': "power.Policy",
'PROCESSOR.UNIT': "processor.Unit",
'RACK.UNITPERSONALITY': "rack.UnitPersonality",
'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway",
'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem",
'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy",
'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile",
'RECOVERY.CONFIGRESULT': "recovery.ConfigResult",
'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry",
'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup",
'RECOVERY.RESTORE': "recovery.Restore",
'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy",
'RESOURCE.GROUP': "resource.Group",
'RESOURCE.GROUPMEMBER': "resource.GroupMember",
'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount",
'RESOURCE.MEMBERSHIP': "resource.Membership",
'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder",
'RESOURCE.RESERVATION': "resource.Reservation",
'RESOURCEPOOL.LEASE': "resourcepool.Lease",
'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource",
'RESOURCEPOOL.POOL': "resourcepool.Pool",
'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember",
'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe",
'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy",
'SDCARD.POLICY': "sdcard.Policy",
'SDWAN.PROFILE': "sdwan.Profile",
'SDWAN.ROUTERNODE': "sdwan.RouterNode",
'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy",
'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy",
'SEARCH.SEARCHITEM': "search.SearchItem",
'SEARCH.TAGITEM': "search.TagItem",
'SECURITY.UNIT': "security.Unit",
'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail",
'SERVER.CONFIGIMPORT': "server.ConfigImport",
'SERVER.CONFIGRESULT': "server.ConfigResult",
'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry",
'SERVER.PROFILE': "server.Profile",
'SERVER.PROFILETEMPLATE': "server.ProfileTemplate",
'SMTP.POLICY': "smtp.Policy",
'SNMP.POLICY': "snmp.Policy",
'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable",
'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory",
'SOFTWARE.HCLMETA': "software.HclMeta",
'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable",
'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable",
'SOFTWARE.RELEASEMETA': "software.ReleaseMeta",
'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable",
'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable",
'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable",
'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization",
'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage",
'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog",
'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper",
'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel",
'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint",
'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec",
'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile",
'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release",
'SOL.POLICY': "sol.Policy",
'SSH.POLICY': "ssh.Policy",
'STORAGE.CONTROLLER': "storage.Controller",
'STORAGE.DISKGROUP': "storage.DiskGroup",
'STORAGE.DISKSLOT': "storage.DiskSlot",
'STORAGE.DRIVEGROUP': "storage.DriveGroup",
'STORAGE.ENCLOSURE': "storage.Enclosure",
'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk",
'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp",
'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController",
'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps",
'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive",
'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive",
'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController",
'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive",
'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive",
'STORAGE.HITACHIARRAY': "storage.HitachiArray",
'STORAGE.HITACHICONTROLLER': "storage.HitachiController",
'STORAGE.HITACHIDISK': "storage.HitachiDisk",
'STORAGE.HITACHIHOST': "storage.HitachiHost",
'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun",
'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup",
'STORAGE.HITACHIPOOL': "storage.HitachiPool",
'STORAGE.HITACHIPORT': "storage.HitachiPort",
'STORAGE.HITACHIVOLUME': "storage.HitachiVolume",
'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer",
'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume",
'STORAGE.ITEM': "storage.Item",
'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate",
'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk",
'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster",
'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort",
'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy",
'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface",
'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort",
'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup",
'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface",
'STORAGE.NETAPPLICENSE': "storage.NetAppLicense",
'STORAGE.NETAPPLUN': "storage.NetAppLun",
'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap",
'STORAGE.NETAPPNODE': "storage.NetAppNode",
'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer",
'STORAGE.NETAPPSENSOR': "storage.NetAppSensor",
'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm",
'STORAGE.NETAPPVOLUME': "storage.NetAppVolume",
'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot",
'STORAGE.PHYSICALDISK': "storage.PhysicalDisk",
'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension",
'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage",
'STORAGE.PUREARRAY': "storage.PureArray",
'STORAGE.PURECONTROLLER': "storage.PureController",
'STORAGE.PUREDISK': "storage.PureDisk",
'STORAGE.PUREHOST': "storage.PureHost",
'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup",
'STORAGE.PUREHOSTLUN': "storage.PureHostLun",
'STORAGE.PUREPORT': "storage.PurePort",
'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup",
'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot",
'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule",
'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule",
'STORAGE.PUREVOLUME': "storage.PureVolume",
'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot",
'STORAGE.SASEXPANDER': "storage.SasExpander",
'STORAGE.SASPORT': "storage.SasPort",
'STORAGE.SPAN': "storage.Span",
'STORAGE.STORAGEPOLICY': "storage.StoragePolicy",
'STORAGE.VDMEMBEREP': "storage.VdMemberEp",
'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive",
'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer",
'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension",
'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity",
'SYSLOG.POLICY': "syslog.Policy",
'TAM.ADVISORYCOUNT': "tam.AdvisoryCount",
'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition",
'TAM.ADVISORYINFO': "tam.AdvisoryInfo",
'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance",
'TAM.SECURITYADVISORY': "tam.SecurityAdvisory",
'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory",
'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory",
'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory",
'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory",
'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory",
'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory",
'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory",
'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy",
'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle",
'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus",
'TERMINAL.AUDITLOG': "terminal.AuditLog",
'TERRAFORM.EXECUTOR': "terraform.Executor",
'THERMAL.POLICY': "thermal.Policy",
'TOP.SYSTEM': "top.System",
'UCSD.BACKUPINFO': "ucsd.BackupInfo",
'UUIDPOOL.BLOCK': "uuidpool.Block",
'UUIDPOOL.POOL': "uuidpool.Pool",
'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember",
'UUIDPOOL.UNIVERSE': "uuidpool.Universe",
'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease",
'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager",
'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole",
'VIRTUALIZATION.HOST': "virtualization.Host",
'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster",
'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter",
'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink",
'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch",
'VIRTUALIZATION.IWEHOST': "virtualization.IweHost",
'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface",
'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch",
'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork",
'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk",
'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine",
'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface",
'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk",
'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine",
'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork",
'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster",
'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter",
'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore",
'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster",
'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork",
'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch",
'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder",
'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost",
'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork",
'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork",
'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface",
'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort",
'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter",
'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk",
'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine",
'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot",
'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface",
'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch",
'VMEDIA.POLICY': "vmedia.Policy",
'VMRC.CONSOLE': "vmrc.Console",
'VNC.CONSOLE': "vnc.Console",
'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy",
'VNIC.ETHIF': "vnic.EthIf",
'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy",
'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy",
'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy",
'VNIC.FCIF': "vnic.FcIf",
'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy",
'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy",
'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy",
'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy",
'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy",
'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy",
'VNIC.LCPSTATUS': "vnic.LcpStatus",
'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy",
'VNIC.SCPSTATUS': "vnic.ScpStatus",
'VRF.VRF': "vrf.Vrf",
'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor",
'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor",
'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta",
'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner",
'WORKFLOW.CATALOG': "workflow.Catalog",
'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition",
'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler",
'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo",
'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow",
'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition",
'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance",
'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition",
'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance",
'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput",
'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor",
'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog",
'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition",
'WORKFLOW.TASKINFO': "workflow.TaskInfo",
'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata",
'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification",
'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation",
'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta",
'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition",
'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo",
'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta",
'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata",
'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification",
},
}
validations = {
('uuid',): {
'regex': {
'pattern': r'^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$', },
},
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,)
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'class_id': (str,), 'moid': (str,), 'selector': (str,), 'link': (str,), 'account_moid': (str,), 'create_time': (datetime,), 'domain_group_moid': (str,), 'mod_time': (datetime,), 'owners': ([str], none_type,), 'shared_scope': (str,), 'tags': ([MoTag], none_type,), 'version_context': (MoVersionContext,), 'ancestors': ([MoBaseMoRelationship], none_type,), 'parent': (MoBaseMoRelationship,), 'permission_resources': ([MoBaseMoRelationship], none_type,), 'display_names': (DisplayNames,), 'registered_device': (AssetDeviceRegistrationRelationship,), 'name': (str,), 'uuid': (str,), 'identity': (str,), 'cluster_count': (int,), 'datastore_count': (int,), 'host_count': (int,), 'inventory_path': (str,), 'network_count': (int,), 'vm_count': (int,), 'vm_template_count': (int,), 'hypervisor_manager': (VirtualizationVmwareVcenterRelationship,), 'parent_folder': (VirtualizationVmwareFolderRelationship,), 'object_type': (str,), }
@cached_property
def discriminator():
lazy_import()
val = {
'mo.MoRef': MoMoRef,
'virtualization.VmwareDatacenter': VirtualizationVmwareDatacenter,
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', 'moid': 'Moid', 'selector': 'Selector', 'link': 'link', 'account_moid': 'AccountMoid', 'create_time': 'CreateTime', 'domain_group_moid': 'DomainGroupMoid', 'mod_time': 'ModTime', 'owners': 'Owners', 'shared_scope': 'SharedScope', 'tags': 'Tags', 'version_context': 'VersionContext', 'ancestors': 'Ancestors', 'parent': 'Parent', 'permission_resources': 'PermissionResources', 'display_names': 'DisplayNames', 'registered_device': 'RegisteredDevice', 'name': 'Name', 'uuid': 'Uuid', 'identity': 'Identity', 'cluster_count': 'ClusterCount', 'datastore_count': 'DatastoreCount', 'host_count': 'HostCount', 'inventory_path': 'InventoryPath', 'network_count': 'NetworkCount', 'vm_count': 'VmCount', 'vm_template_count': 'VmTemplateCount', 'hypervisor_manager': 'HypervisorManager', 'parent_folder': 'ParentFolder', 'object_type': 'ObjectType', }
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
class_id = kwargs.get('class_id', "mo.MoRef")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
],
'oneOf': [
MoMoRef,
VirtualizationVmwareDatacenter,
none_type,
],
}
| true
| true
|
1c455fa5c65ef452e853463256b07757be1b7bac
| 41,048
|
py
|
Python
|
lib/matplotlib/lines.py
|
SoftwareDev/mat-plot-lib
|
abaf94859d5ef6e653a4d8a7ce2c59cea1724a57
|
[
"MIT",
"BSD-3-Clause"
] | 3
|
2015-02-25T21:51:26.000Z
|
2020-01-05T14:11:52.000Z
|
lib/matplotlib/lines.py
|
SoftwareDev/mat-plot-lib
|
abaf94859d5ef6e653a4d8a7ce2c59cea1724a57
|
[
"MIT",
"BSD-3-Clause"
] | 7
|
2015-05-08T19:36:25.000Z
|
2015-06-30T15:32:17.000Z
|
lib/matplotlib/lines.py
|
OceanWolf/matplotlib
|
a429c415bdb6e54ccfe004a48fdc034ea8e9d329
|
[
"MIT",
"BSD-3-Clause"
] | 6
|
2015-06-05T03:34:06.000Z
|
2022-01-25T09:07:10.000Z
|
"""
This module contains all the 2D line class which can draw with a
variety of line styles, markers and colors.
"""
# TODO: expose cap and join style attrs
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from numpy import ma
from matplotlib import verbose
from . import artist
from .artist import Artist
from .cbook import iterable, is_string_like, is_numlike, ls_mapper
from .colors import colorConverter
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
from matplotlib import rcParams
from .artist import allow_rasterization
from matplotlib import docstring
from matplotlib.markers import MarkerStyle
# Imported here for backward compatibility, even though they don't
# really belong.
from matplotlib.markers import TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN
from matplotlib.markers import CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN
def segment_hits(cx, cy, x, y, radius):
"""
Determine if any line segments are within radius of a
point. Returns the list of line segments that are within that
radius.
"""
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
#if any(candidates): print "candidates",xr[candidates]
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
#if any(point_hits): print "points",xr[candidates]
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
#if any(line_hits): print "lines",xr[candidates]
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
#print points,lines
return np.concatenate((points, lines))
class Line2D(Artist):
"""
A line - the line can have both a solid linestyle connecting all
the vertices, and a marker at each vertex. Additionally, the
drawing of the solid line is influenced by the drawstyle, eg one
can create "stepped" lines in various styles.
"""
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = (list(six.iterkeys(_drawStyles_l)) +
list(six.iterkeys(_drawStyles_s)))
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif hasattr(self, '_x') and len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
elif hasattr(self, '_x'):
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
else:
return "Line2D()"
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle='full',
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
"""
Create a :class:`~matplotlib.lines.Line2D` instance with *x*
and *y* data in sequences *xdata*, *ydata*.
The kwargs are :class:`~matplotlib.lines.Line2D` properties:
%(Line2D)s
See :meth:`set_linestyle` for a decription of the line styles,
:meth:`set_marker` for a description of the markers, and
:meth:`set_drawstyle` for a description of the draw styles.
"""
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if drawstyle is None:
drawstyle = 'default'
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self.set_color(color)
self._marker = MarkerStyle()
self.set_marker(marker)
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._dashSeq = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.set_fillstyle(fillstyle)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self.set_data(xdata, ydata)
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the line. The pick
radius determines the precision of the location test (usually
within five points of the value). Use
:meth:`~matplotlib.lines.Line2D.get_pickradius` or
:meth:`~matplotlib.lines.Line2D.set_pickradius` to view or
modify it.
Returns *True* if any values are within the radius along with
``{'ind': pointlist}``, where *pointlist* is the set of points
within the radius.
TODO: sort returned indices by distance
"""
if six.callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
olderrflags = np.seterr(all='ignore')
try:
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
finally:
np.seterr(**olderrflags)
ind += self.ind_offset
# Debugging message
if False and self._label != '':
print("Checking line", self._label,
"at", mouseevent.x, mouseevent.y)
print('xt', xt)
print('yt', yt)
#print 'dx,dy', (xt-mouseevent.x)**2., (yt-mouseevent.y)**2.
print('ind', ind)
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
"""return the pick radius used for containment tests"""
return self.pickradius
def set_pickradius(self, d):
"""Sets the pick radius used for containment tests
ACCEPTS: float distance in points
"""
self.pickradius = d
def get_fillstyle(self):
"""
return the marker fillstyle
"""
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
"""
Set the marker fill style; 'full' means fill the whole marker.
'none' means no filling; other options are for half-filled markers.
ACCEPTS: ['full' | 'left' | 'right' | 'bottom' | 'top' | 'none']
"""
self._marker.set_fillstyle(fs)
def set_markevery(self, every):
"""
Set the markevery property to subsample the plot when using
markers. e.g., if ``markevery=5``, every 5-th marker will be
plotted. *every* can be
None
Every point will be plotted
an integer N
Every N-th marker will be plotted starting with marker 0
A length-2 tuple of integers
every=(start, N) will start at point start and plot every N-th
marker
ACCEPTS: None | integer | (startind, stride)
"""
self._markevery = every
def get_markevery(self):
"""return the markevery setting"""
return self._markevery
def set_picker(self, p):
"""Sets the event picker details for the line.
ACCEPTS: float distance in points or callable pick function
``fn(artist, event)``
"""
if six.callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
def set_axes(self, ax):
Artist.set_axes(self, ax)
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
set_axes.__doc__ = Artist.set_axes.__doc__
def set_data(self, *args):
"""
Set the x and y data
ACCEPTS: 2D array (rows are x, y) or two 1D arrays
"""
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
if ma.isMaskedArray(self._xorig):
x = ma.asarray(xconv, np.float_)
else:
x = np.asarray(xconv, np.float_)
x = x.ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
if ma.isMaskedArray(self._yorig):
y = ma.asarray(yconv, np.float_)
else:
y = np.asarray(yconv, np.float_)
y = y.ravel()
else:
y = self._y
if len(x) == 1 and len(y) > 1:
x = x * np.ones(y.shape, np.float_)
if len(y) == 1 and len(x) > 1:
y = y * np.ones(x.shape, np.float_)
if len(x) != len(y):
raise RuntimeError('xdata and ydata must be the same length')
x = x.reshape((len(x), 1))
y = y.reshape((len(y), 1))
if ma.isMaskedArray(x) or ma.isMaskedArray(y):
self._xy = ma.concatenate((x, y), 1)
else:
self._xy = np.concatenate((x, y), 1)
self._x = self._xy[:, 0] # just a view
self._y = self._xy[:, 1] # just a view
self._subslice = False
if (self.axes and len(x) > 100 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
if hasattr(self, '_path'):
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
self._path = Path(self._xy, None, interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
"""
Puts a TransformedPath instance at self._transformed_path,
all invalidation of the transform is then handled by the
TransformedPath instance.
"""
# Masked arrays are now handled by the Path class itself
if subslice is not None:
_path = Path(self._xy[subslice, :])
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
"""
Return the :class:`~matplotlib.transforms.TransformedPath` instance
of this line.
"""
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
"""
set the Transformation instance used by this artist
ACCEPTS: a :class:`matplotlib.transforms.Transform` instance
"""
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
def _is_sorted(self, x):
"""return true if x is sorted"""
if len(x) < 2:
return 1
return np.amin(x[1:] - x[0:-1]) >= 0
@allow_rasterization
def draw(self, renderer):
"""draw the Line with `renderer` unless visibility is False"""
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
# Need to handle monotonically decreasing case also...
x0, x1 = self.axes.get_xbound()
i0, = self._x.searchsorted([x0], 'left')
i1, = self._x.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
funcname = self._lineStyles.get(self._linestyle, '_draw_nothing')
if funcname != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
self._lineFunc = getattr(self, funcname)
funcname = self.drawStyles.get(self._drawstyle, '_draw_lines')
drawFunc = getattr(self, funcname)
drawFunc(renderer, gc, tpath, affine.frozen())
if self._marker:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if is_string_like(edgecolor) and edgecolor.lower() == 'none':
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
if iterable(markevery):
startind, stride = markevery
else:
startind, stride = 0, markevery
if tpath.codes is not None:
codes = tpath.codes[startind::stride]
else:
codes = None
vertices = tpath.vertices[startind::stride]
subsampled = Path(vertices, codes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if marker.get_marker() != ',':
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
else:
gc.set_linewidth(0)
if rgbaFace is not None:
gc.set_alpha(rgbaFace[3])
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
if rgbaFaceAlt is not None:
gc.set_alpha(rgbaFaceAlt[3])
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
gc.restore()
renderer.close_group('line2d')
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if (is_string_like(mec) and mec == 'auto'):
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
else:
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (is_string_like(fc) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
"""
Return the xdata, ydata.
If *orig* is *True*, return the original data.
"""
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
"""
Return the xdata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
"""
Return the ydata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
"""
Return the :class:`~matplotlib.path.Path` object associated
with this line.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
"""
Return the *xy* data as a Nx2 numpy array.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
"""
True if line should be drawin with antialiased rendering
ACCEPTS: [True | False]
"""
self._antialiased = b
def set_color(self, color):
"""
Set the color of the line
ACCEPTS: any matplotlib color
"""
self._color = color
def set_drawstyle(self, drawstyle):
"""
Set the drawstyle of the plot
'default' connects the points with lines. The steps variants
produce step-plots. 'steps' is equivalent to 'steps-pre' and
is maintained for backward-compatibility.
ACCEPTS: ['default' | 'steps' | 'steps-pre' | 'steps-mid' |
'steps-post']
"""
self._drawstyle = drawstyle
def set_linewidth(self, w):
"""
Set the line width in points
ACCEPTS: float value in points
"""
self._linewidth = w
def set_linestyle(self, linestyle):
"""
Set the linestyle of the line (also accepts drawstyles)
================ =================
linestyle description
================ =================
``'-'`` solid
``'--'`` dashed
``'-.'`` dash_dot
``':'`` dotted
``'None'`` draw nothing
``' '`` draw nothing
``''`` draw nothing
================ =================
'steps' is equivalent to 'steps-pre' and is maintained for
backward-compatibility.
.. seealso::
:meth:`set_drawstyle`
To set the drawing style (stepping) of the plot.
ACCEPTS: [``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'None'`` |
``' '`` | ``''``]
and any drawstyle in combination with a linestyle, e.g., ``'steps--'``.
"""
for ds in self.drawStyleKeys: # long names are first in the list
if linestyle.startswith(ds):
self.set_drawstyle(ds)
if len(linestyle) > len(ds):
linestyle = linestyle[len(ds):]
else:
linestyle = '-'
break
if linestyle not in self._lineStyles:
if linestyle in ls_mapper:
linestyle = ls_mapper[linestyle]
else:
verbose.report('Unrecognized line style %s, %s' %
(linestyle, type(linestyle)))
if linestyle in [' ', '']:
linestyle = 'None'
self._linestyle = linestyle
@docstring.dedent_interpd
def set_marker(self, marker):
"""
Set the line marker
Parameters
-----------
marker: marker style
See `~matplotlib.markers` for full description of possible
argument
"""
self._marker.set_marker(marker)
def set_markeredgecolor(self, ec):
"""
Set the marker edge color
ACCEPTS: any matplotlib color
"""
if ec is None:
ec = 'auto'
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
"""
Set the marker edge width in points
ACCEPTS: float value in points
"""
if ew is None:
ew = rcParams['lines.markeredgewidth']
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
"""
Set the marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
"""
Set the alternate marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
self._markerfacecoloralt = fc
def set_markersize(self, sz):
"""
Set the marker size in points
ACCEPTS: float
"""
self._markersize = sz
def set_xdata(self, x):
"""
Set the data np.array for x
ACCEPTS: 1D array
"""
self._xorig = x
self._invalidx = True
def set_ydata(self, y):
"""
Set the data np.array for y
ACCEPTS: 1D array
"""
self._yorig = y
self._invalidy = True
def set_dashes(self, seq):
"""
Set the dash sequence, sequence of dashes with on off ink in
points. If seq is empty or if seq = (None, None), the
linestyle will be set to solid.
ACCEPTS: sequence of on/off ink in points
"""
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle('--')
self._dashSeq = seq # TODO: offset ignored for now
def _draw_lines(self, renderer, gc, path, trans):
self._lineFunc(renderer, gc, path, trans)
def _draw_steps_pre(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[0::2, 0], steps[1::2, 0] = vertices[:, 0], vertices[:-1, 0]
steps[0::2, 1], steps[1:-1:2, 1] = vertices[:, 1], vertices[1:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_post(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[::2, 0], steps[1:-1:2, 0] = vertices[:, 0], vertices[1:, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:-1, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_mid(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices), 2), np.float_)
steps[1:-1:2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[2::2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[0, 0] = vertices[0, 0]
steps[-1, 0] = vertices[-1, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_solid(self, renderer, gc, path, trans):
gc.set_linestyle('solid')
renderer.draw_path(gc, path, trans)
def _draw_dashed(self, renderer, gc, path, trans):
gc.set_linestyle('dashed')
if self._dashSeq is not None:
gc.set_dashes(0, self._dashSeq)
renderer.draw_path(gc, path, trans)
def _draw_dash_dot(self, renderer, gc, path, trans):
gc.set_linestyle('dashdot')
renderer.draw_path(gc, path, trans)
def _draw_dotted(self, renderer, gc, path, trans):
gc.set_linestyle('dotted')
renderer.draw_path(gc, path, trans)
def update_from(self, other):
"""copy properties from other to self"""
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgb_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbFace = None
else:
rgbFace = colorConverter.to_rgb(facecolor)
return rgbFace
def _get_rgba_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbaFace = None
else:
rgbaFace = colorConverter.to_rgba(facecolor, self._alpha)
return rgbaFace
def _get_rgba_ln_color(self, alt=False):
return colorConverter.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
'alias for set_antialiased'
self.set_antialiased(val)
def set_c(self, val):
'alias for set_color'
self.set_color(val)
def set_ls(self, val):
"""alias for set_linestyle"""
self.set_linestyle(val)
def set_lw(self, val):
"""alias for set_linewidth"""
self.set_linewidth(val)
def set_mec(self, val):
"""alias for set_markeredgecolor"""
self.set_markeredgecolor(val)
def set_mew(self, val):
"""alias for set_markeredgewidth"""
self.set_markeredgewidth(val)
def set_mfc(self, val):
"""alias for set_markerfacecolor"""
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
"""alias for set_markerfacecoloralt"""
self.set_markerfacecoloralt(val)
def set_ms(self, val):
"""alias for set_markersize"""
self.set_markersize(val)
def get_aa(self):
"""alias for get_antialiased"""
return self.get_antialiased()
def get_c(self):
"""alias for get_color"""
return self.get_color()
def get_ls(self):
"""alias for get_linestyle"""
return self.get_linestyle()
def get_lw(self):
"""alias for get_linewidth"""
return self.get_linewidth()
def get_mec(self):
"""alias for get_markeredgecolor"""
return self.get_markeredgecolor()
def get_mew(self):
"""alias for get_markeredgewidth"""
return self.get_markeredgewidth()
def get_mfc(self):
"""alias for get_markerfacecolor"""
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
"""alias for get_markerfacecoloralt"""
return self.get_markerfacecoloralt()
def get_ms(self):
"""alias for get_markersize"""
return self.get_markersize()
def set_dash_joinstyle(self, s):
"""
Set the join style for dashed linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
"""
Set the join style for solid linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._solidjoinstyle = s
def get_dash_joinstyle(self):
"""
Get the join style for dashed linestyles
"""
return self._dashjoinstyle
def get_solid_joinstyle(self):
"""
Get the join style for solid linestyles
"""
return self._solidjoinstyle
def set_dash_capstyle(self, s):
"""
Set the cap style for dashed linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._dashcapstyle = s
def set_solid_capstyle(self, s):
"""
Set the cap style for solid linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._solidcapstyle = s
def get_dash_capstyle(self):
"""
Get the cap style for dashed linestyles
"""
return self._dashcapstyle
def get_solid_capstyle(self):
"""
Get the cap style for solid linestyles
"""
return self._solidcapstyle
def is_dashed(self):
'return True if line is dashstyle'
return self._linestyle in ('--', '-.', ':')
class VertexSelector:
"""
Manage the callbacks to maintain a list of selected vertices for
:class:`matplotlib.lines.Line2D`. Derived classes should override
:meth:`~matplotlib.lines.VertexSelector.process_selected` to do
something with the picks.
Here is an example which highlights the selected verts with red
circles::
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.lines as lines
class HighlightSelected(lines.VertexSelector):
def __init__(self, line, fmt='ro', **kwargs):
lines.VertexSelector.__init__(self, line)
self.markers, = self.axes.plot([], [], fmt, **kwargs)
def process_selected(self, ind, xs, ys):
self.markers.set_data(xs, ys)
self.canvas.draw()
fig = plt.figure()
ax = fig.add_subplot(111)
x, y = np.random.rand(2, 30)
line, = ax.plot(x, y, 'bs-', picker=5)
selector = HighlightSelected(line)
plt.show()
"""
def __init__(self, line):
"""
Initialize the class with a :class:`matplotlib.lines.Line2D`
instance. The line should already be added to some
:class:`matplotlib.axes.Axes` instance and should have the
picker property set.
"""
if not hasattr(line, 'axes'):
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
"""
Default "do nothing" implementation of the
:meth:`process_selected` method.
*ind* are the indices of the selected vertices. *xs* and *ys*
are the coordinates of the selected vertices.
"""
pass
def onpick(self, event):
"""When the line is picked, update the set of selected indicies."""
if event.artist is not self.line:
return
for i in event.ind:
if i in self.ind:
self.ind.remove(i)
else:
self.ind.add(i)
ind = list(self.ind)
ind.sort()
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| 32.474684
| 79
| 0.568018
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from numpy import ma
from matplotlib import verbose
from . import artist
from .artist import Artist
from .cbook import iterable, is_string_like, is_numlike, ls_mapper
from .colors import colorConverter
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
from matplotlib import rcParams
from .artist import allow_rasterization
from matplotlib import docstring
from matplotlib.markers import MarkerStyle
# really belong.
from matplotlib.markers import TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN
from matplotlib.markers import CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN
def segment_hits(cx, cy, x, y, radius):
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
#if any(candidates): print "candidates",xr[candidates]
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
#if any(point_hits): print "points",xr[candidates]
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
#if any(line_hits): print "lines",xr[candidates]
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
#print points,lines
return np.concatenate((points, lines))
class Line2D(Artist):
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = (list(six.iterkeys(_drawStyles_l)) +
list(six.iterkeys(_drawStyles_s)))
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif hasattr(self, '_x') and len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
elif hasattr(self, '_x'):
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
else:
return "Line2D()"
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle='full',
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if drawstyle is None:
drawstyle = 'default'
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self.set_color(color)
self._marker = MarkerStyle()
self.set_marker(marker)
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._dashSeq = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.set_fillstyle(fillstyle)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self.set_data(xdata, ydata)
def contains(self, mouseevent):
if six.callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
olderrflags = np.seterr(all='ignore')
try:
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
finally:
np.seterr(**olderrflags)
ind += self.ind_offset
# Debugging message
if False and self._label != '':
print("Checking line", self._label,
"at", mouseevent.x, mouseevent.y)
print('xt', xt)
print('yt', yt)
#print 'dx,dy', (xt-mouseevent.x)**2., (yt-mouseevent.y)**2.
print('ind', ind)
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
return self.pickradius
def set_pickradius(self, d):
self.pickradius = d
def get_fillstyle(self):
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
self._marker.set_fillstyle(fs)
def set_markevery(self, every):
self._markevery = every
def get_markevery(self):
return self._markevery
def set_picker(self, p):
if six.callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
def set_axes(self, ax):
Artist.set_axes(self, ax)
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
set_axes.__doc__ = Artist.set_axes.__doc__
def set_data(self, *args):
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
if ma.isMaskedArray(self._xorig):
x = ma.asarray(xconv, np.float_)
else:
x = np.asarray(xconv, np.float_)
x = x.ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
if ma.isMaskedArray(self._yorig):
y = ma.asarray(yconv, np.float_)
else:
y = np.asarray(yconv, np.float_)
y = y.ravel()
else:
y = self._y
if len(x) == 1 and len(y) > 1:
x = x * np.ones(y.shape, np.float_)
if len(y) == 1 and len(x) > 1:
y = y * np.ones(x.shape, np.float_)
if len(x) != len(y):
raise RuntimeError('xdata and ydata must be the same length')
x = x.reshape((len(x), 1))
y = y.reshape((len(y), 1))
if ma.isMaskedArray(x) or ma.isMaskedArray(y):
self._xy = ma.concatenate((x, y), 1)
else:
self._xy = np.concatenate((x, y), 1)
self._x = self._xy[:, 0] # just a view
self._y = self._xy[:, 1] # just a view
self._subslice = False
if (self.axes and len(x) > 100 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
if hasattr(self, '_path'):
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
self._path = Path(self._xy, None, interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
# Masked arrays are now handled by the Path class itself
if subslice is not None:
_path = Path(self._xy[subslice, :])
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
def _is_sorted(self, x):
if len(x) < 2:
return 1
return np.amin(x[1:] - x[0:-1]) >= 0
@allow_rasterization
def draw(self, renderer):
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
# Need to handle monotonically decreasing case also...
x0, x1 = self.axes.get_xbound()
i0, = self._x.searchsorted([x0], 'left')
i1, = self._x.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
funcname = self._lineStyles.get(self._linestyle, '_draw_nothing')
if funcname != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
self._lineFunc = getattr(self, funcname)
funcname = self.drawStyles.get(self._drawstyle, '_draw_lines')
drawFunc = getattr(self, funcname)
drawFunc(renderer, gc, tpath, affine.frozen())
if self._marker:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if is_string_like(edgecolor) and edgecolor.lower() == 'none':
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
if iterable(markevery):
startind, stride = markevery
else:
startind, stride = 0, markevery
if tpath.codes is not None:
codes = tpath.codes[startind::stride]
else:
codes = None
vertices = tpath.vertices[startind::stride]
subsampled = Path(vertices, codes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if marker.get_marker() != ',':
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
else:
gc.set_linewidth(0)
if rgbaFace is not None:
gc.set_alpha(rgbaFace[3])
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
if rgbaFaceAlt is not None:
gc.set_alpha(rgbaFaceAlt[3])
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
gc.restore()
renderer.close_group('line2d')
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if (is_string_like(mec) and mec == 'auto'):
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
else:
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (is_string_like(fc) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
self._antialiased = b
def set_color(self, color):
self._color = color
def set_drawstyle(self, drawstyle):
self._drawstyle = drawstyle
def set_linewidth(self, w):
self._linewidth = w
def set_linestyle(self, linestyle):
for ds in self.drawStyleKeys: # long names are first in the list
if linestyle.startswith(ds):
self.set_drawstyle(ds)
if len(linestyle) > len(ds):
linestyle = linestyle[len(ds):]
else:
linestyle = '-'
break
if linestyle not in self._lineStyles:
if linestyle in ls_mapper:
linestyle = ls_mapper[linestyle]
else:
verbose.report('Unrecognized line style %s, %s' %
(linestyle, type(linestyle)))
if linestyle in [' ', '']:
linestyle = 'None'
self._linestyle = linestyle
@docstring.dedent_interpd
def set_marker(self, marker):
self._marker.set_marker(marker)
def set_markeredgecolor(self, ec):
if ec is None:
ec = 'auto'
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
if ew is None:
ew = rcParams['lines.markeredgewidth']
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
if fc is None:
fc = 'auto'
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
if fc is None:
fc = 'auto'
self._markerfacecoloralt = fc
def set_markersize(self, sz):
self._markersize = sz
def set_xdata(self, x):
self._xorig = x
self._invalidx = True
def set_ydata(self, y):
self._yorig = y
self._invalidy = True
def set_dashes(self, seq):
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle('--')
self._dashSeq = seq # TODO: offset ignored for now
def _draw_lines(self, renderer, gc, path, trans):
self._lineFunc(renderer, gc, path, trans)
def _draw_steps_pre(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[0::2, 0], steps[1::2, 0] = vertices[:, 0], vertices[:-1, 0]
steps[0::2, 1], steps[1:-1:2, 1] = vertices[:, 1], vertices[1:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_post(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices) - 1, 2), np.float_)
steps[::2, 0], steps[1:-1:2, 0] = vertices[:, 0], vertices[1:, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:-1, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_steps_mid(self, renderer, gc, path, trans):
vertices = self._xy
steps = ma.zeros((2 * len(vertices), 2), np.float_)
steps[1:-1:2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[2::2, 0] = 0.5 * (vertices[:-1, 0] + vertices[1:, 0])
steps[0, 0] = vertices[0, 0]
steps[-1, 0] = vertices[-1, 0]
steps[0::2, 1], steps[1::2, 1] = vertices[:, 1], vertices[:, 1]
path = Path(steps)
path = path.transformed(self.get_transform())
self._lineFunc(renderer, gc, path, IdentityTransform())
def _draw_solid(self, renderer, gc, path, trans):
gc.set_linestyle('solid')
renderer.draw_path(gc, path, trans)
def _draw_dashed(self, renderer, gc, path, trans):
gc.set_linestyle('dashed')
if self._dashSeq is not None:
gc.set_dashes(0, self._dashSeq)
renderer.draw_path(gc, path, trans)
def _draw_dash_dot(self, renderer, gc, path, trans):
gc.set_linestyle('dashdot')
renderer.draw_path(gc, path, trans)
def _draw_dotted(self, renderer, gc, path, trans):
gc.set_linestyle('dotted')
renderer.draw_path(gc, path, trans)
def update_from(self, other):
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgb_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbFace = None
else:
rgbFace = colorConverter.to_rgb(facecolor)
return rgbFace
def _get_rgba_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if is_string_like(facecolor) and facecolor.lower() == 'none':
rgbaFace = None
else:
rgbaFace = colorConverter.to_rgba(facecolor, self._alpha)
return rgbaFace
def _get_rgba_ln_color(self, alt=False):
return colorConverter.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
self.set_antialiased(val)
def set_c(self, val):
self.set_color(val)
def set_ls(self, val):
self.set_linestyle(val)
def set_lw(self, val):
self.set_linewidth(val)
def set_mec(self, val):
self.set_markeredgecolor(val)
def set_mew(self, val):
self.set_markeredgewidth(val)
def set_mfc(self, val):
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
self.set_markerfacecoloralt(val)
def set_ms(self, val):
self.set_markersize(val)
def get_aa(self):
return self.get_antialiased()
def get_c(self):
return self.get_color()
def get_ls(self):
return self.get_linestyle()
def get_lw(self):
return self.get_linewidth()
def get_mec(self):
return self.get_markeredgecolor()
def get_mew(self):
return self.get_markeredgewidth()
def get_mfc(self):
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
return self.get_markerfacecoloralt()
def get_ms(self):
return self.get_markersize()
def set_dash_joinstyle(self, s):
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
self._solidjoinstyle = s
def get_dash_joinstyle(self):
return self._dashjoinstyle
def get_solid_joinstyle(self):
return self._solidjoinstyle
def set_dash_capstyle(self, s):
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._dashcapstyle = s
def set_solid_capstyle(self, s):
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
self._solidcapstyle = s
def get_dash_capstyle(self):
return self._dashcapstyle
def get_solid_capstyle(self):
return self._solidcapstyle
def is_dashed(self):
return self._linestyle in ('--', '-.', ':')
class VertexSelector:
def __init__(self, line):
if not hasattr(line, 'axes'):
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
pass
def onpick(self, event):
if event.artist is not self.line:
return
for i in event.ind:
if i in self.ind:
self.ind.remove(i)
else:
self.ind.add(i)
ind = list(self.ind)
ind.sort()
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| true
| true
|
1c456142bbc95af7e87173cb0cb84afd5f28b013
| 929
|
py
|
Python
|
interprete/src/models/gpt/example.py
|
serjtroshin/PLBART
|
58e5de3041a2fc8b98e54648c6489fb3c23db9cb
|
[
"MIT"
] | null | null | null |
interprete/src/models/gpt/example.py
|
serjtroshin/PLBART
|
58e5de3041a2fc8b98e54648c6489fb3c23db9cb
|
[
"MIT"
] | null | null | null |
interprete/src/models/gpt/example.py
|
serjtroshin/PLBART
|
58e5de3041a2fc8b98e54648c6489fb3c23db9cb
|
[
"MIT"
] | null | null | null |
# from transformers import pipeline
# generator = pipeline('text-generation', model='EleutherAI/gpt-neo-2.7B')
# generator("EleutherAI has", do_sample=True, min_length=50)
# [{'generated_text': 'EleutherAI has made a commitment to create new software packages for each of its major clients and has'}]
from transformers import GPT2Tokenizer, GPT2Model
model_name = "microsoft/CodeGPT-small-java-adaptedGPT2"
# model_name = "./CodeGPT-small-java-adaptedGPT2"
tokenizer = GPT2Tokenizer.from_pretrained(model_name) # CodeGPT-small-java-adaptedGPT2
model = GPT2Model.from_pretrained(model_name)
# tokenizer.save_pretrained(f"./{model_name}")
# model.save_pretrained(f"./{model_name}")
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='pt')
print(model)
output = model(**encoded_input, output_hidden_states=True)
print(len(output["hidden_states"]))
print(output["hidden_states"][0].shape)
| 42.227273
| 128
| 0.779333
|
from transformers import GPT2Tokenizer, GPT2Model
model_name = "microsoft/CodeGPT-small-java-adaptedGPT2"
tokenizer = GPT2Tokenizer.from_pretrained(model_name) model = GPT2Model.from_pretrained(model_name)
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='pt')
print(model)
output = model(**encoded_input, output_hidden_states=True)
print(len(output["hidden_states"]))
print(output["hidden_states"][0].shape)
| true
| true
|
1c456237e48e7b21db5e6e1bb2ccae546249bad4
| 7,590
|
py
|
Python
|
prov/constants.py
|
AndreiFrunze/wrangler
|
076a07de00fc966dcf18ca6b6a6e804be5245ed9
|
[
"Apache-2.0"
] | 2
|
2017-09-07T04:33:18.000Z
|
2019-01-07T13:32:15.000Z
|
prov/constants.py
|
AndreiFrunze/wrangler
|
076a07de00fc966dcf18ca6b6a6e804be5245ed9
|
[
"Apache-2.0"
] | 2
|
2016-10-06T13:07:05.000Z
|
2017-12-20T09:47:08.000Z
|
prov/constants.py
|
AndreiFrunze/wrangler
|
076a07de00fc966dcf18ca6b6a6e804be5245ed9
|
[
"Apache-2.0"
] | 5
|
2016-09-01T08:38:20.000Z
|
2018-08-28T12:08:39.000Z
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
__author__ = 'Trung Dong Huynh'
__email__ = 'trungdong@donggiang.com'
import six
# # PROV record constants - PROV-DM
# Built-in namespaces
from prov.identifier import Namespace
XSD = Namespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
PROV = Namespace('prov', 'http://www.w3.org/ns/prov#')
XSI = Namespace('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
# C1. Entities/Activities
PROV_ENTITY = PROV['Entity']
PROV_ACTIVITY = PROV['Activity']
PROV_GENERATION = PROV['Generation']
PROV_USAGE = PROV['Usage']
PROV_COMMUNICATION = PROV['Communication']
PROV_START = PROV['Start']
PROV_END = PROV['End']
PROV_INVALIDATION = PROV['Invalidation']
# C2. Derivations
PROV_DERIVATION = PROV['Derivation']
# C3. Agents/Responsibility
PROV_AGENT = PROV['Agent']
PROV_ATTRIBUTION = PROV['Attribution']
PROV_ASSOCIATION = PROV['Association']
PROV_DELEGATION = PROV['Delegation']
PROV_INFLUENCE = PROV['Influence']
# C4. Bundles
PROV_BUNDLE = PROV['Bundle']
# C5. Alternate
PROV_ALTERNATE = PROV['Alternate']
PROV_SPECIALIZATION = PROV['Specialization']
PROV_MENTION = PROV['Mention']
# C6. Collections
PROV_MEMBERSHIP = PROV['Membership']
PROV_N_MAP = {
PROV_ENTITY: u'entity',
PROV_ACTIVITY: u'activity',
PROV_GENERATION: u'wasGeneratedBy',
PROV_USAGE: u'used',
PROV_COMMUNICATION: u'wasInformedBy',
PROV_START: u'wasStartedBy',
PROV_END: u'wasEndedBy',
PROV_INVALIDATION: u'wasInvalidatedBy',
PROV_DERIVATION: u'wasDerivedFrom',
PROV_AGENT: u'agent',
PROV_ATTRIBUTION: u'wasAttributedTo',
PROV_ASSOCIATION: u'wasAssociatedWith',
PROV_DELEGATION: u'actedOnBehalfOf',
PROV_INFLUENCE: u'wasInfluencedBy',
PROV_ALTERNATE: u'alternateOf',
PROV_SPECIALIZATION: u'specializationOf',
PROV_MENTION: u'mentionOf',
PROV_MEMBERSHIP: u'hadMember',
PROV_BUNDLE: u'bundle',
}
# Records defined as subtypes in PROV-N but top level types in for example
# PROV XML also need a mapping.
ADDITIONAL_N_MAP = {
PROV['Revision']: u'wasRevisionOf',
PROV['Quotation']: u'wasQuotedFrom',
PROV['PrimarySource']: u'hadPrimarySource',
PROV['SoftwareAgent']: u'softwareAgent',
PROV['Person']: u'person',
PROV['Organization']: u'organization',
PROV['Plan']: u'plan',
PROV['Collection']: u'collection',
PROV['EmptyCollection']: u'emptyCollection',
}
# Maps qualified names from the PROV namespace to their base class. If it
# has no baseclass it maps to itsself. This is needed for example for PROV
# XML (de)serializer where extended types are used a lot.
PROV_BASE_CLS = {
PROV_ENTITY: PROV_ENTITY,
PROV_ACTIVITY: PROV_ACTIVITY,
PROV_GENERATION: PROV_GENERATION,
PROV_USAGE: PROV_USAGE,
PROV_COMMUNICATION: PROV_COMMUNICATION,
PROV_START: PROV_START,
PROV_END: PROV_END,
PROV_INVALIDATION: PROV_INVALIDATION,
PROV_DERIVATION: PROV_DERIVATION,
PROV['Revision']: PROV_DERIVATION,
PROV['Quotation']: PROV_DERIVATION,
PROV['PrimarySource']: PROV_DERIVATION,
PROV_AGENT: PROV_AGENT,
PROV['SoftwareAgent']: PROV_AGENT,
PROV['Person']: PROV_AGENT,
PROV['Organization']: PROV_AGENT,
PROV_ATTRIBUTION: PROV_ATTRIBUTION,
PROV_ASSOCIATION: PROV_ASSOCIATION,
PROV['Plan']: PROV_ENTITY,
PROV_DELEGATION: PROV_DELEGATION,
PROV_INFLUENCE: PROV_INFLUENCE,
PROV_ALTERNATE: PROV_ALTERNATE,
PROV_SPECIALIZATION: PROV_SPECIALIZATION,
PROV_MENTION: PROV_MENTION,
PROV['Collection']: PROV_ENTITY,
PROV['EmptyCollection']: PROV_ENTITY,
PROV_MEMBERSHIP: PROV_MEMBERSHIP,
PROV_BUNDLE: PROV_ENTITY
}
# Identifiers for PROV's attributes
PROV_ATTR_ENTITY = PROV['entity']
PROV_ATTR_ACTIVITY = PROV['activity']
PROV_ATTR_TRIGGER = PROV['trigger']
PROV_ATTR_INFORMED = PROV['informed']
PROV_ATTR_INFORMANT = PROV['informant']
PROV_ATTR_STARTER = PROV['starter']
PROV_ATTR_ENDER = PROV['ender']
PROV_ATTR_AGENT = PROV['agent']
PROV_ATTR_PLAN = PROV['plan']
PROV_ATTR_DELEGATE = PROV['delegate']
PROV_ATTR_RESPONSIBLE = PROV['responsible']
PROV_ATTR_GENERATED_ENTITY = PROV['generatedEntity']
PROV_ATTR_USED_ENTITY = PROV['usedEntity']
PROV_ATTR_GENERATION = PROV['generation']
PROV_ATTR_USAGE = PROV['usage']
PROV_ATTR_SPECIFIC_ENTITY = PROV['specificEntity']
PROV_ATTR_GENERAL_ENTITY = PROV['generalEntity']
PROV_ATTR_ALTERNATE1 = PROV['alternate1']
PROV_ATTR_ALTERNATE2 = PROV['alternate2']
PROV_ATTR_BUNDLE = PROV['bundle']
PROV_ATTR_INFLUENCEE = PROV['influencee']
PROV_ATTR_INFLUENCER = PROV['influencer']
PROV_ATTR_COLLECTION = PROV['collection']
# Literal properties
PROV_ATTR_TIME = PROV['time']
PROV_ATTR_STARTTIME = PROV['startTime']
PROV_ATTR_ENDTIME = PROV['endTime']
PROV_ATTRIBUTE_QNAMES = {
PROV_ATTR_ENTITY,
PROV_ATTR_ACTIVITY,
PROV_ATTR_TRIGGER,
PROV_ATTR_INFORMED,
PROV_ATTR_INFORMANT,
PROV_ATTR_STARTER,
PROV_ATTR_ENDER,
PROV_ATTR_AGENT,
PROV_ATTR_PLAN,
PROV_ATTR_DELEGATE,
PROV_ATTR_RESPONSIBLE,
PROV_ATTR_GENERATED_ENTITY,
PROV_ATTR_USED_ENTITY,
PROV_ATTR_GENERATION,
PROV_ATTR_USAGE,
PROV_ATTR_SPECIFIC_ENTITY,
PROV_ATTR_GENERAL_ENTITY,
PROV_ATTR_ALTERNATE1,
PROV_ATTR_ALTERNATE2,
PROV_ATTR_BUNDLE,
PROV_ATTR_INFLUENCEE,
PROV_ATTR_INFLUENCER,
PROV_ATTR_COLLECTION
}
PROV_ATTRIBUTE_LITERALS = {
PROV_ATTR_TIME, PROV_ATTR_STARTTIME, PROV_ATTR_ENDTIME
}
# Set of formal attributes of PROV records
PROV_ATTRIBUTES = PROV_ATTRIBUTE_QNAMES | PROV_ATTRIBUTE_LITERALS
PROV_RECORD_ATTRIBUTES = list((attr, six.text_type(attr)) for attr in
PROV_ATTRIBUTES)
PROV_RECORD_IDS_MAP = dict(
(PROV_N_MAP[rec_type_id], rec_type_id) for rec_type_id in PROV_N_MAP
)
PROV_ID_ATTRIBUTES_MAP = dict(
(prov_id, attribute) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
PROV_ATTRIBUTES_ID_MAP = dict(
(attribute, prov_id) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
# Extra definition for convenience
PROV_TYPE = PROV['type']
PROV_LABEL = PROV['label']
PROV_VALUE = PROV['value']
PROV_LOCATION = PROV['location']
PROV_ROLE = PROV['role']
PROV_QUALIFIEDNAME = PROV['QUALIFIED_NAME']
# XSD DATA TYPES
XSD_ANYURI = XSD['anyURI']
XSD_QNAME = XSD['QName']
XSD_DATETIME = XSD['dateTime']
XSD_TIME = XSD['time']
XSD_DATE = XSD['date']
XSD_STRING = XSD['string']
XSD_BOOLEAN = XSD['boolean']
# All XSD Integer types
XSD_INTEGER = XSD['integer']
XSD_LONG = XSD['long']
XSD_INT = XSD['int']
XSD_SHORT = XSD['short']
XSD_BYTE = XSD['byte']
XSD_NONNEGATIVEINTEGER = XSD['nonNegativeInteger']
XSD_UNSIGNEDLONG = XSD['unsignedLong']
XSD_UNSIGNEDINT = XSD['unsignedInt']
XSD_UNSIGNEDSHORT = XSD['unsignedShort']
XSD_UNSIGNEDBYTE = XSD['unsignedByte']
XSD_POSITIVEINTEGER = XSD['positiveInteger']
XSD_NONPOSITIVEINTEGER = XSD['nonPositiveInteger']
XSD_NEGATIVEINTEGER = XSD['negativeInteger']
# All XSD real number types
XSD_FLOAT = XSD['float']
XSD_DOUBLE = XSD['double']
XSD_DECIMAL = XSD['decimal']
| 33.584071
| 75
| 0.691304
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
__author__ = 'Trung Dong Huynh'
__email__ = 'trungdong@donggiang.com'
import six
from prov.identifier import Namespace
XSD = Namespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
PROV = Namespace('prov', 'http://www.w3.org/ns/prov#')
XSI = Namespace('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
PROV_ENTITY = PROV['Entity']
PROV_ACTIVITY = PROV['Activity']
PROV_GENERATION = PROV['Generation']
PROV_USAGE = PROV['Usage']
PROV_COMMUNICATION = PROV['Communication']
PROV_START = PROV['Start']
PROV_END = PROV['End']
PROV_INVALIDATION = PROV['Invalidation']
PROV_DERIVATION = PROV['Derivation']
PROV_AGENT = PROV['Agent']
PROV_ATTRIBUTION = PROV['Attribution']
PROV_ASSOCIATION = PROV['Association']
PROV_DELEGATION = PROV['Delegation']
PROV_INFLUENCE = PROV['Influence']
PROV_BUNDLE = PROV['Bundle']
PROV_ALTERNATE = PROV['Alternate']
PROV_SPECIALIZATION = PROV['Specialization']
PROV_MENTION = PROV['Mention']
PROV_MEMBERSHIP = PROV['Membership']
PROV_N_MAP = {
PROV_ENTITY: u'entity',
PROV_ACTIVITY: u'activity',
PROV_GENERATION: u'wasGeneratedBy',
PROV_USAGE: u'used',
PROV_COMMUNICATION: u'wasInformedBy',
PROV_START: u'wasStartedBy',
PROV_END: u'wasEndedBy',
PROV_INVALIDATION: u'wasInvalidatedBy',
PROV_DERIVATION: u'wasDerivedFrom',
PROV_AGENT: u'agent',
PROV_ATTRIBUTION: u'wasAttributedTo',
PROV_ASSOCIATION: u'wasAssociatedWith',
PROV_DELEGATION: u'actedOnBehalfOf',
PROV_INFLUENCE: u'wasInfluencedBy',
PROV_ALTERNATE: u'alternateOf',
PROV_SPECIALIZATION: u'specializationOf',
PROV_MENTION: u'mentionOf',
PROV_MEMBERSHIP: u'hadMember',
PROV_BUNDLE: u'bundle',
}
ADDITIONAL_N_MAP = {
PROV['Revision']: u'wasRevisionOf',
PROV['Quotation']: u'wasQuotedFrom',
PROV['PrimarySource']: u'hadPrimarySource',
PROV['SoftwareAgent']: u'softwareAgent',
PROV['Person']: u'person',
PROV['Organization']: u'organization',
PROV['Plan']: u'plan',
PROV['Collection']: u'collection',
PROV['EmptyCollection']: u'emptyCollection',
}
PROV_BASE_CLS = {
PROV_ENTITY: PROV_ENTITY,
PROV_ACTIVITY: PROV_ACTIVITY,
PROV_GENERATION: PROV_GENERATION,
PROV_USAGE: PROV_USAGE,
PROV_COMMUNICATION: PROV_COMMUNICATION,
PROV_START: PROV_START,
PROV_END: PROV_END,
PROV_INVALIDATION: PROV_INVALIDATION,
PROV_DERIVATION: PROV_DERIVATION,
PROV['Revision']: PROV_DERIVATION,
PROV['Quotation']: PROV_DERIVATION,
PROV['PrimarySource']: PROV_DERIVATION,
PROV_AGENT: PROV_AGENT,
PROV['SoftwareAgent']: PROV_AGENT,
PROV['Person']: PROV_AGENT,
PROV['Organization']: PROV_AGENT,
PROV_ATTRIBUTION: PROV_ATTRIBUTION,
PROV_ASSOCIATION: PROV_ASSOCIATION,
PROV['Plan']: PROV_ENTITY,
PROV_DELEGATION: PROV_DELEGATION,
PROV_INFLUENCE: PROV_INFLUENCE,
PROV_ALTERNATE: PROV_ALTERNATE,
PROV_SPECIALIZATION: PROV_SPECIALIZATION,
PROV_MENTION: PROV_MENTION,
PROV['Collection']: PROV_ENTITY,
PROV['EmptyCollection']: PROV_ENTITY,
PROV_MEMBERSHIP: PROV_MEMBERSHIP,
PROV_BUNDLE: PROV_ENTITY
}
PROV_ATTR_ENTITY = PROV['entity']
PROV_ATTR_ACTIVITY = PROV['activity']
PROV_ATTR_TRIGGER = PROV['trigger']
PROV_ATTR_INFORMED = PROV['informed']
PROV_ATTR_INFORMANT = PROV['informant']
PROV_ATTR_STARTER = PROV['starter']
PROV_ATTR_ENDER = PROV['ender']
PROV_ATTR_AGENT = PROV['agent']
PROV_ATTR_PLAN = PROV['plan']
PROV_ATTR_DELEGATE = PROV['delegate']
PROV_ATTR_RESPONSIBLE = PROV['responsible']
PROV_ATTR_GENERATED_ENTITY = PROV['generatedEntity']
PROV_ATTR_USED_ENTITY = PROV['usedEntity']
PROV_ATTR_GENERATION = PROV['generation']
PROV_ATTR_USAGE = PROV['usage']
PROV_ATTR_SPECIFIC_ENTITY = PROV['specificEntity']
PROV_ATTR_GENERAL_ENTITY = PROV['generalEntity']
PROV_ATTR_ALTERNATE1 = PROV['alternate1']
PROV_ATTR_ALTERNATE2 = PROV['alternate2']
PROV_ATTR_BUNDLE = PROV['bundle']
PROV_ATTR_INFLUENCEE = PROV['influencee']
PROV_ATTR_INFLUENCER = PROV['influencer']
PROV_ATTR_COLLECTION = PROV['collection']
# Literal properties
PROV_ATTR_TIME = PROV['time']
PROV_ATTR_STARTTIME = PROV['startTime']
PROV_ATTR_ENDTIME = PROV['endTime']
PROV_ATTRIBUTE_QNAMES = {
PROV_ATTR_ENTITY,
PROV_ATTR_ACTIVITY,
PROV_ATTR_TRIGGER,
PROV_ATTR_INFORMED,
PROV_ATTR_INFORMANT,
PROV_ATTR_STARTER,
PROV_ATTR_ENDER,
PROV_ATTR_AGENT,
PROV_ATTR_PLAN,
PROV_ATTR_DELEGATE,
PROV_ATTR_RESPONSIBLE,
PROV_ATTR_GENERATED_ENTITY,
PROV_ATTR_USED_ENTITY,
PROV_ATTR_GENERATION,
PROV_ATTR_USAGE,
PROV_ATTR_SPECIFIC_ENTITY,
PROV_ATTR_GENERAL_ENTITY,
PROV_ATTR_ALTERNATE1,
PROV_ATTR_ALTERNATE2,
PROV_ATTR_BUNDLE,
PROV_ATTR_INFLUENCEE,
PROV_ATTR_INFLUENCER,
PROV_ATTR_COLLECTION
}
PROV_ATTRIBUTE_LITERALS = {
PROV_ATTR_TIME, PROV_ATTR_STARTTIME, PROV_ATTR_ENDTIME
}
# Set of formal attributes of PROV records
PROV_ATTRIBUTES = PROV_ATTRIBUTE_QNAMES | PROV_ATTRIBUTE_LITERALS
PROV_RECORD_ATTRIBUTES = list((attr, six.text_type(attr)) for attr in
PROV_ATTRIBUTES)
PROV_RECORD_IDS_MAP = dict(
(PROV_N_MAP[rec_type_id], rec_type_id) for rec_type_id in PROV_N_MAP
)
PROV_ID_ATTRIBUTES_MAP = dict(
(prov_id, attribute) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
PROV_ATTRIBUTES_ID_MAP = dict(
(attribute, prov_id) for (prov_id, attribute) in PROV_RECORD_ATTRIBUTES
)
# Extra definition for convenience
PROV_TYPE = PROV['type']
PROV_LABEL = PROV['label']
PROV_VALUE = PROV['value']
PROV_LOCATION = PROV['location']
PROV_ROLE = PROV['role']
PROV_QUALIFIEDNAME = PROV['QUALIFIED_NAME']
# XSD DATA TYPES
XSD_ANYURI = XSD['anyURI']
XSD_QNAME = XSD['QName']
XSD_DATETIME = XSD['dateTime']
XSD_TIME = XSD['time']
XSD_DATE = XSD['date']
XSD_STRING = XSD['string']
XSD_BOOLEAN = XSD['boolean']
# All XSD Integer types
XSD_INTEGER = XSD['integer']
XSD_LONG = XSD['long']
XSD_INT = XSD['int']
XSD_SHORT = XSD['short']
XSD_BYTE = XSD['byte']
XSD_NONNEGATIVEINTEGER = XSD['nonNegativeInteger']
XSD_UNSIGNEDLONG = XSD['unsignedLong']
XSD_UNSIGNEDINT = XSD['unsignedInt']
XSD_UNSIGNEDSHORT = XSD['unsignedShort']
XSD_UNSIGNEDBYTE = XSD['unsignedByte']
XSD_POSITIVEINTEGER = XSD['positiveInteger']
XSD_NONPOSITIVEINTEGER = XSD['nonPositiveInteger']
XSD_NEGATIVEINTEGER = XSD['negativeInteger']
# All XSD real number types
XSD_FLOAT = XSD['float']
XSD_DOUBLE = XSD['double']
XSD_DECIMAL = XSD['decimal']
| true
| true
|
1c45639a0e721d5529cabef77a05e804e7bcec90
| 3,371
|
py
|
Python
|
application/app.py
|
StephenSpicer/airbnb_app
|
ea9b92dbd35d508d2f819255d026b299d63285a8
|
[
"MIT"
] | null | null | null |
application/app.py
|
StephenSpicer/airbnb_app
|
ea9b92dbd35d508d2f819255d026b299d63285a8
|
[
"MIT"
] | null | null | null |
application/app.py
|
StephenSpicer/airbnb_app
|
ea9b92dbd35d508d2f819255d026b299d63285a8
|
[
"MIT"
] | null | null | null |
# Import Statements
import pandas as pd
from flask import Flask, render_template, request
from joblib import load
from .predict import get_prediction
# Instantiate Application
# def create_app():
# app = Flask(__name__)
# @app.route('/')
# def hello_heroku():
# return "hello heroku"
# return app
def create_app():
"""
Function to deploy heroku application.
Contains assorment of functions which control the inputs and outputs
of interractive web application.
"""
app = Flask(__name__)
load_model = load('application/finalized_model.sav')
# as easy as changing path to /form and make a link to it in main page
@app.route('/')
def form():
return render_template('form.html')
@app.route('/run_model', methods=['POST', 'GET'])
def data():
# if user types in /run_model they get this error message
if request.method == 'GET':
message = f"ERROR: The URL /run_model is accessed directly. Try going to home page '/' to submit form"
# if user goes to /form and hits submit, they go to this page!
# in here user input gets stored into to_predict, and then to_predict gets run in model
if request.method == 'POST':
property_type = str(request.values["prop"])
room_type = str(request.values["room_type"])
bathrooms = float(request.values["bathrooms"])
cancellation_policy = str(request.values["cancellation"])
city = str(request.values["city"])
host_since = str(request.values["host_since"])
review_scores_rating = int(request.values["review_rating"])
bedrooms = int(request.values["bedrooms"])
beds = int(request.values["beds"])
# We will be adding a few more dropdowns above
# need to get this to print out T/F list
amenities = request.form.getlist('feature_checkbox')
# basics =
to_predict = [property_type, room_type, bathrooms,
cancellation_policy, city, host_since,
review_scores_rating, bedrooms, beds,
amenities]
message = model_output(to_predict)
return message
def model_output(user_input):
mod_input = []
all_amenities = [
"instant_bookable",
"host_has_profile_pic",
"host_identity_verified",
"cleaning_fee",
"Wireless Internet",
"Air conditioning",
"Kitchen",
"Heating",
"Family/kid friendly",
"Hair dryer",
"Iron",
"Shampoo",
"Fire extinguisher",
"Laptop friendly workspace",
"Indoor fireplace",
"TV",
"Cable TV"]
# Append unchanging variables to list first : check indexing there?
mod_input.extend(user_input[:9])
input = user_input[9]
# For loop through conditional varibles
for option in all_amenities:
if any(option in s for s in input):
mod_input.append(1)
else:
mod_input.append(0)
price='${}'.format(get_prediction(mod_input, load_model))
return render_template('results.html', prediction=price)
return app
| 34.752577
| 114
| 0.589143
|
import pandas as pd
from flask import Flask, render_template, request
from joblib import load
from .predict import get_prediction
def create_app():
app = Flask(__name__)
load_model = load('application/finalized_model.sav')
@app.route('/')
def form():
return render_template('form.html')
@app.route('/run_model', methods=['POST', 'GET'])
def data():
if request.method == 'GET':
message = f"ERROR: The URL /run_model is accessed directly. Try going to home page '/' to submit form"
if request.method == 'POST':
property_type = str(request.values["prop"])
room_type = str(request.values["room_type"])
bathrooms = float(request.values["bathrooms"])
cancellation_policy = str(request.values["cancellation"])
city = str(request.values["city"])
host_since = str(request.values["host_since"])
review_scores_rating = int(request.values["review_rating"])
bedrooms = int(request.values["bedrooms"])
beds = int(request.values["beds"])
amenities = request.form.getlist('feature_checkbox')
to_predict = [property_type, room_type, bathrooms,
cancellation_policy, city, host_since,
review_scores_rating, bedrooms, beds,
amenities]
message = model_output(to_predict)
return message
def model_output(user_input):
mod_input = []
all_amenities = [
"instant_bookable",
"host_has_profile_pic",
"host_identity_verified",
"cleaning_fee",
"Wireless Internet",
"Air conditioning",
"Kitchen",
"Heating",
"Family/kid friendly",
"Hair dryer",
"Iron",
"Shampoo",
"Fire extinguisher",
"Laptop friendly workspace",
"Indoor fireplace",
"TV",
"Cable TV"]
mod_input.extend(user_input[:9])
input = user_input[9]
for option in all_amenities:
if any(option in s for s in input):
mod_input.append(1)
else:
mod_input.append(0)
price='${}'.format(get_prediction(mod_input, load_model))
return render_template('results.html', prediction=price)
return app
| true
| true
|
1c456470d18841deab9b9d0f37410c49dd5f194b
| 8,894
|
py
|
Python
|
dragonfly/opt/multiobjective_optimiser.py
|
hase1128/dragonfly
|
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
|
[
"MIT"
] | null | null | null |
dragonfly/opt/multiobjective_optimiser.py
|
hase1128/dragonfly
|
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
|
[
"MIT"
] | null | null | null |
dragonfly/opt/multiobjective_optimiser.py
|
hase1128/dragonfly
|
4be7e4c539d3edccc4d243ab9f972b1ffb0d9a5c
|
[
"MIT"
] | null | null | null |
"""
Defines a class for Multi-objective Blackbox Optimisation.
-- bparia@cs.cmu.edu
-- kandasamy@cs.cmu.edu
"""
# NB: In this file, the acronym MOO/moo refers to multi-objective optimisation. --KK
# pylint: disable=abstract-class-little-used
# pylint: disable=invalid-name
from __future__ import division
from argparse import Namespace
import numpy as np
# Local imports
from ..exd.exd_core import ExperimentDesigner, exd_core_args
from ..exd.experiment_caller import MultiFunctionCaller, FunctionCaller
from ..utils.general_utils import update_pareto_set
multiobjective_opt_args = exd_core_args
_NO_MF_FOR_MOO_ERR_MSG = 'Multi-fidelity support has not been implemented yet' + \
' for multi-objective optimisation.'
class MultiObjectiveOptimiser(ExperimentDesigner):
""" Blackbox Optimiser Class. """
# pylint: disable=attribute-defined-outside-init
def __init__(self, multi_func_caller, worker_manager, model=None, options=None,
reporter=None):
""" Constructor. """
assert isinstance(multi_func_caller, MultiFunctionCaller) and not \
isinstance(multi_func_caller, FunctionCaller)
self.multi_func_caller = multi_func_caller
# If it is not a list, computing the non-dominated set is equivalent
# to computing the maximum.
self.domain = self.multi_func_caller.domain
super(MultiObjectiveOptimiser, self).__init__(multi_func_caller, worker_manager,
model, options, reporter)
def _exd_child_set_up(self):
""" Set up for the optimisation. """
if self.multi_func_caller.is_mf():
# self.num_fidel_to_opt_calls = 0
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self._moo_set_up()
self._multi_opt_method_set_up()
self.prev_eval_vals = [] # for optimiser, prev_eval_vals
def _moo_set_up(self):
""" Set up for black-box optimisation. """
# Initialise optimal values and points
# (Optimal point for MF problems is not defined)
# (Instead a set of pareto optimal points will be maintained)
self.curr_pareto_vals = []
self.curr_pareto_points = []
self.curr_true_pareto_vals = []
self.curr_true_pareto_points = []
# Set up history
self.history.query_vals = []
self.history.query_true_vals = []
self.history.curr_pareto_vals = []
self.history.curr_pareto_points = []
self.history.curr_true_pareto_vals = []
self.history.curr_true_pareto_points = []
if self.multi_func_caller.is_mf():
# self.history.query_at_fidel_to_opts = []
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
# Set up attributes to be copied from history
self.to_copy_from_qinfo_to_history['val'] = 'query_vals'
self.to_copy_from_qinfo_to_history['true_val'] = 'query_true_vals'
# Set up previous evaluations
self.prev_eval_vals = []
self.prev_eval_true_vals = []
self.history.prev_eval_vals = self.prev_eval_vals
self.history.prev_eval_true_vals = self.prev_eval_true_vals
def _multi_opt_method_set_up(self):
""" Any set up for the specific optimisation method. """
raise NotImplementedError('Implement in Optimisation Method class.')
def _get_problem_str(self):
""" Description of the problem. """
return 'Multi-objective Optimisation'
# Book-keeping ----------------------------------------------------------------
def _exd_child_update_history(self, qinfo):
""" Updates to the history specific to optimisation. """
# Update the best point/val
# check fidelity
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
# Now add to history
self.history.curr_pareto_vals.append(self.curr_pareto_vals)
self.history.curr_pareto_points.append(self.curr_pareto_points)
self.history.curr_true_pareto_vals.append(self.curr_true_pareto_vals)
self.history.curr_true_pareto_points.append(self.curr_true_pareto_points)
# Any method specific updating
self._multi_opt_method_update_history(qinfo)
def _update_opt_point_and_val(self, qinfo, query_is_at_fidel_to_opt=None):
""" Updates the optimum point and value according the data in qinfo.
Can be overridden by a child class if you want to do anything differently.
"""
if query_is_at_fidel_to_opt is not None:
if not query_is_at_fidel_to_opt:
# if the fidelity queried at is not fidel_to_opt, then return
return
# Optimise curr_opt_val and curr_true_opt_val
self.curr_pareto_vals, self.curr_pareto_points = update_pareto_set(
self.curr_pareto_vals, self.curr_pareto_points, qinfo.val, qinfo.point)
self.curr_true_pareto_vals, self.curr_true_pareto_points = update_pareto_set(
self.curr_true_pareto_vals, self.curr_true_pareto_points, qinfo.true_val,
qinfo.point)
def _multi_opt_method_update_history(self, qinfo):
""" Any updates to the history specific to the method. """
pass # Pass by default. Not necessary to override.
def _get_exd_child_header_str(self):
""" Header for black box optimisation. """
ret = '#Pareto=<num_pareto_optimal_points_found>'
ret += self._get_opt_method_header_str()
return ret
@classmethod
def _get_opt_method_header_str(cls):
""" Header for optimisation method. """
return ''
def _get_exd_child_report_results_str(self):
""" Returns a string describing the progress in optimisation. """
best_val_str = '#Pareto: %d'%(len(self.curr_pareto_vals))
opt_method_str = self._get_opt_method_report_results_str()
return best_val_str + opt_method_str + ', '
def _get_opt_method_report_results_str(self):
""" Any details to include in a child method when reporting results.
Can be overridden by a child class.
"""
#pylint: disable=no-self-use
return ''
def _exd_child_handle_prev_evals_in_options(self):
""" Handles pre-evaluations. """
ret = 0
for qinfo in self.options.prev_evaluations.qinfos:
if not hasattr(qinfo, 'true_val'):
qinfo.true_val = [-np.inf] * len(qinfo.val)
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
ret += 1
return ret
def _child_handle_data_loaded_from_file(self, loaded_data_from_file):
""" Handles evaluations from file. """
query_points = loaded_data_from_file['points']
num_pts_in_file = len(query_points)
query_vals = loaded_data_from_file['vals']
assert num_pts_in_file == len(query_vals)
if 'true_vals' in loaded_data_from_file:
query_true_vals = loaded_data_from_file['true_vals']
assert num_pts_in_file == len(query_vals)
else:
query_true_vals = [[-np.inf] * self.multi_func_caller.num_funcs] * len(query_vals)
# Multi-fidelity
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
# Now Iterate through each point
for pt, val, true_val in zip(query_points, query_vals, query_true_vals):
qinfo = Namespace(point=pt, val=val, true_val=true_val)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
return num_pts_in_file
def _exd_child_get_data_to_save(self):
""" Return data to save. """
ret = {'points': self.prev_eval_points + self.history.query_points,
'vals': self.prev_eval_vals + self.history.query_vals,
'true_vals': self.prev_eval_true_vals + self.history.query_true_vals}
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
num_data_saved = len(ret['points'])
return ret, num_data_saved
def _child_run_experiments_initialise(self):
""" Handles any initialisation before running experiments. """
self._opt_method_optimise_initalise()
def _opt_method_optimise_initalise(self):
""" Any routine to run for a method just before optimisation routine. """
pass # Pass by default. Not necessary to override.
def optimise(self, max_capital):
""" Calling optimise with optimise the function. A wrapper for run_experiments from
BlackboxExperimenter. """
ret = self.run_experiments(max_capital)
return ret
def _get_final_return_quantities(self):
""" Return the curr_opt_val, curr_opt_point and history. """
return self.curr_pareto_vals, self.curr_pareto_points, self.history
| 40.798165
| 88
| 0.723746
|
from __future__ import division
from argparse import Namespace
import numpy as np
from ..exd.exd_core import ExperimentDesigner, exd_core_args
from ..exd.experiment_caller import MultiFunctionCaller, FunctionCaller
from ..utils.general_utils import update_pareto_set
multiobjective_opt_args = exd_core_args
_NO_MF_FOR_MOO_ERR_MSG = 'Multi-fidelity support has not been implemented yet' + \
' for multi-objective optimisation.'
class MultiObjectiveOptimiser(ExperimentDesigner):
def __init__(self, multi_func_caller, worker_manager, model=None, options=None,
reporter=None):
assert isinstance(multi_func_caller, MultiFunctionCaller) and not \
isinstance(multi_func_caller, FunctionCaller)
self.multi_func_caller = multi_func_caller
self.domain = self.multi_func_caller.domain
super(MultiObjectiveOptimiser, self).__init__(multi_func_caller, worker_manager,
model, options, reporter)
def _exd_child_set_up(self):
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self._moo_set_up()
self._multi_opt_method_set_up()
self.prev_eval_vals = []
def _moo_set_up(self):
self.curr_pareto_vals = []
self.curr_pareto_points = []
self.curr_true_pareto_vals = []
self.curr_true_pareto_points = []
self.history.query_vals = []
self.history.query_true_vals = []
self.history.curr_pareto_vals = []
self.history.curr_pareto_points = []
self.history.curr_true_pareto_vals = []
self.history.curr_true_pareto_points = []
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
self.to_copy_from_qinfo_to_history['val'] = 'query_vals'
self.to_copy_from_qinfo_to_history['true_val'] = 'query_true_vals'
self.prev_eval_vals = []
self.prev_eval_true_vals = []
self.history.prev_eval_vals = self.prev_eval_vals
self.history.prev_eval_true_vals = self.prev_eval_true_vals
def _multi_opt_method_set_up(self):
raise NotImplementedError('Implement in Optimisation Method class.')
def _get_problem_str(self):
return 'Multi-objective Optimisation'
def _exd_child_update_history(self, qinfo):
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.history.curr_pareto_vals.append(self.curr_pareto_vals)
self.history.curr_pareto_points.append(self.curr_pareto_points)
self.history.curr_true_pareto_vals.append(self.curr_true_pareto_vals)
self.history.curr_true_pareto_points.append(self.curr_true_pareto_points)
self._multi_opt_method_update_history(qinfo)
def _update_opt_point_and_val(self, qinfo, query_is_at_fidel_to_opt=None):
if query_is_at_fidel_to_opt is not None:
if not query_is_at_fidel_to_opt:
return
self.curr_pareto_vals, self.curr_pareto_points = update_pareto_set(
self.curr_pareto_vals, self.curr_pareto_points, qinfo.val, qinfo.point)
self.curr_true_pareto_vals, self.curr_true_pareto_points = update_pareto_set(
self.curr_true_pareto_vals, self.curr_true_pareto_points, qinfo.true_val,
qinfo.point)
def _multi_opt_method_update_history(self, qinfo):
pass
def _get_exd_child_header_str(self):
ret = '#Pareto=<num_pareto_optimal_points_found>'
ret += self._get_opt_method_header_str()
return ret
@classmethod
def _get_opt_method_header_str(cls):
return ''
def _get_exd_child_report_results_str(self):
best_val_str = '#Pareto: %d'%(len(self.curr_pareto_vals))
opt_method_str = self._get_opt_method_report_results_str()
return best_val_str + opt_method_str + ', '
def _get_opt_method_report_results_str(self):
return ''
def _exd_child_handle_prev_evals_in_options(self):
ret = 0
for qinfo in self.options.prev_evaluations.qinfos:
if not hasattr(qinfo, 'true_val'):
qinfo.true_val = [-np.inf] * len(qinfo.val)
if self.multi_func_caller.is_mf():
raise NotImplementedError(_NO_MF_FOR_MOO_ERR_MSG)
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
ret += 1
return ret
def _child_handle_data_loaded_from_file(self, loaded_data_from_file):
query_points = loaded_data_from_file['points']
num_pts_in_file = len(query_points)
query_vals = loaded_data_from_file['vals']
assert num_pts_in_file == len(query_vals)
if 'true_vals' in loaded_data_from_file:
query_true_vals = loaded_data_from_file['true_vals']
assert num_pts_in_file == len(query_vals)
else:
query_true_vals = [[-np.inf] * self.multi_func_caller.num_funcs] * len(query_vals)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
for pt, val, true_val in zip(query_points, query_vals, query_true_vals):
qinfo = Namespace(point=pt, val=val, true_val=true_val)
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
else:
self._update_opt_point_and_val(qinfo)
self.prev_eval_points.append(qinfo.point)
self.prev_eval_vals.append(qinfo.val)
self.prev_eval_true_vals.append(qinfo.true_val)
return num_pts_in_file
def _exd_child_get_data_to_save(self):
ret = {'points': self.prev_eval_points + self.history.query_points,
'vals': self.prev_eval_vals + self.history.query_vals,
'true_vals': self.prev_eval_true_vals + self.history.query_true_vals}
if self.multi_func_caller.is_mf():
raise NotImplementedError('Not implemented multi-fidelity MOO yet.')
num_data_saved = len(ret['points'])
return ret, num_data_saved
def _child_run_experiments_initialise(self):
self._opt_method_optimise_initalise()
def _opt_method_optimise_initalise(self):
pass
def optimise(self, max_capital):
ret = self.run_experiments(max_capital)
return ret
def _get_final_return_quantities(self):
return self.curr_pareto_vals, self.curr_pareto_points, self.history
| true
| true
|
1c4564ebef61049797c74cdf7ecdeb24f8731ca0
| 2,471
|
py
|
Python
|
tests/test_marmiton.py
|
squat-house/recipe-scrapers
|
72d2f69196f95210d2ea248f3b5cb446f94fd2b2
|
[
"MIT"
] | 2
|
2020-07-28T15:12:10.000Z
|
2020-07-30T18:10:33.000Z
|
tests/test_marmiton.py
|
bfcarpio/recipe-scrapers
|
827ec444bc9d422a98c84c05cc4e4bcd3d084d51
|
[
"MIT"
] | 1
|
2022-01-08T10:49:17.000Z
|
2022-01-08T10:49:30.000Z
|
tests/test_marmiton.py
|
AlexRogalskiy/recipe-scrapers
|
ff378b3ba4ae7ff4cbc113ca13991f887c1c70e7
|
[
"MIT"
] | 1
|
2022-01-08T10:49:09.000Z
|
2022-01-08T10:49:09.000Z
|
from recipe_scrapers.marmiton import Marmiton
from tests import ScraperTest
class TestMarmitonScraper(ScraperTest):
scraper_class = Marmiton
def test_host(self):
self.assertEqual("marmiton.org", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.marmiton.org/recettes/recette_ratatouille_23223.aspx",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(self.harvester_class.title(), "Ratatouille")
def test_total_time(self):
self.assertEqual(80, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 personnes", self.harvester_class.yields())
def test_ingredients(self):
self.assertCountEqual(
[
"350 g d'aubergine",
"350 g de courgette",
"350 g de poivron de couleur rouge et vert",
"350 g d'oignon",
"500 g de tomate bien mûres",
"3 gousses d'ail",
"6 cuillères à soupe d'huile d'olive",
"1 brin de thym",
"1 feuille de laurier",
"poivre",
"sel",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Coupez les tomates pelées en quartiers,\n"
"les aubergines et les courgettes en rondelles.\n"
"Emincez les poivrons en lamelles\n"
"et l'oignon en rouelles.\n"
"Chauffez 2 cuillères à soupe d'huile dans une poêle\n"
"et faites-y fondre les oignons et les poivrons.\n"
"Lorsqu'ils sont tendres, ajoutez les tomates, l'ail haché, le thym et le laurier.\n"
"Salez, poivrez et laissez mijoter doucement à couvert durant 45 minutes.\n"
"Pendant ce temps, préparez les aubergines et les courgettes. "
"Faites les cuire séparemment ou non dans l'huile d'olive pendant 15 minutes.\n"
"Vérifiez la cuisson des légumes pour qu'ils ne soient plus fermes. "
"Ajoutez les alors au mélange de tomates et prolongez la cuisson sur tout petit feu pendant 10 min.\n"
"Salez et poivrez si besoin.",
self.harvester_class.instructions(),
)
def test_ratings(self):
self.assertEqual(4.8, self.harvester_class.ratings())
| 38.015385
| 114
| 0.607446
|
from recipe_scrapers.marmiton import Marmiton
from tests import ScraperTest
class TestMarmitonScraper(ScraperTest):
scraper_class = Marmiton
def test_host(self):
self.assertEqual("marmiton.org", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.marmiton.org/recettes/recette_ratatouille_23223.aspx",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(self.harvester_class.title(), "Ratatouille")
def test_total_time(self):
self.assertEqual(80, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 personnes", self.harvester_class.yields())
def test_ingredients(self):
self.assertCountEqual(
[
"350 g d'aubergine",
"350 g de courgette",
"350 g de poivron de couleur rouge et vert",
"350 g d'oignon",
"500 g de tomate bien mûres",
"3 gousses d'ail",
"6 cuillères à soupe d'huile d'olive",
"1 brin de thym",
"1 feuille de laurier",
"poivre",
"sel",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
return self.assertEqual(
"Coupez les tomates pelées en quartiers,\n"
"les aubergines et les courgettes en rondelles.\n"
"Emincez les poivrons en lamelles\n"
"et l'oignon en rouelles.\n"
"Chauffez 2 cuillères à soupe d'huile dans une poêle\n"
"et faites-y fondre les oignons et les poivrons.\n"
"Lorsqu'ils sont tendres, ajoutez les tomates, l'ail haché, le thym et le laurier.\n"
"Salez, poivrez et laissez mijoter doucement à couvert durant 45 minutes.\n"
"Pendant ce temps, préparez les aubergines et les courgettes. "
"Faites les cuire séparemment ou non dans l'huile d'olive pendant 15 minutes.\n"
"Vérifiez la cuisson des légumes pour qu'ils ne soient plus fermes. "
"Ajoutez les alors au mélange de tomates et prolongez la cuisson sur tout petit feu pendant 10 min.\n"
"Salez et poivrez si besoin.",
self.harvester_class.instructions(),
)
def test_ratings(self):
self.assertEqual(4.8, self.harvester_class.ratings())
| true
| true
|
1c4564ec45018750930b24c198003bce822d9fed
| 2,333
|
py
|
Python
|
script.py
|
jornix/Stavanger-school-learning-results
|
9974cac4ebb91ea51b0437f8b7750feac3049804
|
[
"MIT"
] | null | null | null |
script.py
|
jornix/Stavanger-school-learning-results
|
9974cac4ebb91ea51b0437f8b7750feac3049804
|
[
"MIT"
] | null | null | null |
script.py
|
jornix/Stavanger-school-learning-results
|
9974cac4ebb91ea51b0437f8b7750feac3049804
|
[
"MIT"
] | null | null | null |
import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
# Read csv and store in dataframe df
df = pd.read_csv("results.csv")
df.drop(["index"], axis=1).reset_index(drop=True)
# Separate fifth grade tests
femte_trinn = df[
(df["statistikk"] == "Nasjonale prøver 5. trinn") & (pd.isna(df["verdi"]) == False)
].reset_index(drop=True)
# Separate the different tests for fifth grade (engelsk, lesing, regning)
femte_trinn_engelsk = femte_trinn[
(femte_trinn["indikator_delskar"] == "Engelsk")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_lesing = femte_trinn[
(femte_trinn["indikator_delskar"] == "Lesing")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_regning = femte_trinn[
(femte_trinn["indikator_delskar"] == "Regning")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
# Set some seaborn estethic variables
sns.set_theme(style="ticks", color_codes=True)
sns.set_style("darkgrid")
sns.set_context("paper")
sns.set_palette("PiYG")
# calculate and print boxplots to files
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_engelsk, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_engelsk,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Engelsk")
plt.savefig("plots/boxplot_femte_trinn_engelsk.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_lesing, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_lesing,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Lesing")
plt.savefig("plots/boxplot_femte_trinn_lesing.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_regning, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_regning,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Regning")
plt.savefig("plots/boxplot_femte_trinn_regning.png")
# sns.despine(offset=10, trim=True)
plt.show()
| 29.910256
| 87
| 0.717531
|
import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
df = pd.read_csv("results.csv")
df.drop(["index"], axis=1).reset_index(drop=True)
femte_trinn = df[
(df["statistikk"] == "Nasjonale prøver 5. trinn") & (pd.isna(df["verdi"]) == False)
].reset_index(drop=True)
femte_trinn_engelsk = femte_trinn[
(femte_trinn["indikator_delskar"] == "Engelsk")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_lesing = femte_trinn[
(femte_trinn["indikator_delskar"] == "Lesing")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
femte_trinn_regning = femte_trinn[
(femte_trinn["indikator_delskar"] == "Regning")
& (femte_trinn["kjonn"] == "Begge kjønn")
].reset_index(drop=True)
sns.set_theme(style="ticks", color_codes=True)
sns.set_style("darkgrid")
sns.set_context("paper")
sns.set_palette("PiYG")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_engelsk, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_engelsk,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Engelsk")
plt.savefig("plots/boxplot_femte_trinn_engelsk.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_lesing, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_lesing,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Lesing")
plt.savefig("plots/boxplot_femte_trinn_lesing.png")
fig, axes = plt.subplots(figsize=(10, 15))
fig.suptitle("Nasjonale prøver 5. trinn, spredning i resultater")
sns.boxplot(data=femte_trinn_regning, x="verdi", y="enhetsnavn", palette="RdYlBu")
sns.stripplot(
data=femte_trinn_regning,
x="verdi",
y="enhetsnavn",
palette="PRGn",
hue="periode",
)
axes.set_title("Regning")
plt.savefig("plots/boxplot_femte_trinn_regning.png")
plt.show()
| true
| true
|
1c4565b7ff36d1855854ec42200d3c17a32a5c55
| 1,561
|
py
|
Python
|
apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py
|
aadrm/breakoutwagtail
|
cf4ce09153adf2b5e14f15ffbc82bda754d427b3
|
[
"MIT"
] | null | null | null |
apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py
|
aadrm/breakoutwagtail
|
cf4ce09153adf2b5e14f15ffbc82bda754d427b3
|
[
"MIT"
] | null | null | null |
apps/wagtail/myblog/migrations/0002_auto_20210425_1951.py
|
aadrm/breakoutwagtail
|
cf4ce09153adf2b5e14f15ffbc82bda754d427b3
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2021-04-25 19:51
import apps.wagtail.streams.blocks
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='blogpage',
name='body_de',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
migrations.AddField(
model_name='blogpage',
name='body_en',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
]
| 55.75
| 511
| 0.695708
|
import apps.wagtail.streams.blocks
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='blogpage',
name='body_de',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
migrations.AddField(
model_name='blogpage',
name='body_en',
field=wagtail.core.fields.StreamField([('paragraph', wagtail.core.blocks.RichTextBlock()), ('image_text', wagtail.core.blocks.StructBlock([('reverse', wagtail.core.blocks.BooleanBlock(required=False)), ('text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])), ('mymaps', apps.wagtail.streams.blocks.MyMapsBlock()), ('table', apps.wagtail.streams.blocks.CustomTableBlock()), ('spacer', apps.wagtail.streams.blocks.SpacerBlock())], blank=True, null=True),
),
]
| true
| true
|
1c4565b95bce497665b69de99a26e49f66b582e5
| 416
|
py
|
Python
|
simple-backend/stave_backend/migrations/0002_document_ontology.py
|
mylibrar/stave
|
43145015253d0577dfc757419ad8b4fa06a04042
|
[
"Apache-2.0"
] | 35
|
2020-01-29T04:21:10.000Z
|
2021-12-13T01:44:28.000Z
|
simple-backend/stave_backend/migrations/0002_document_ontology.py
|
mylibrar/stave
|
43145015253d0577dfc757419ad8b4fa06a04042
|
[
"Apache-2.0"
] | 86
|
2020-04-17T16:36:13.000Z
|
2022-03-25T22:51:34.000Z
|
simple-backend/stave_backend/migrations/0002_document_ontology.py
|
mylibrar/stave
|
43145015253d0577dfc757419ad8b4fa06a04042
|
[
"Apache-2.0"
] | 18
|
2020-02-04T17:40:02.000Z
|
2021-06-17T07:11:42.000Z
|
# Generated by Django 3.0.2 on 2020-01-13 18:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stave_backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='document',
name='ontology',
field=models.TextField(default=''),
preserve_default=False,
),
]
| 20.8
| 47
| 0.586538
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stave_backend', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='document',
name='ontology',
field=models.TextField(default=''),
preserve_default=False,
),
]
| true
| true
|
1c4565d7e382b201d1238aef00abd33b71ffbf9f
| 3,402
|
py
|
Python
|
src/tests/config_test.py
|
dixonwhitmire/lib-kafka
|
a29ec606278ad1ed8c5ada418e45593fe71dbaec
|
[
"Apache-2.0"
] | null | null | null |
src/tests/config_test.py
|
dixonwhitmire/lib-kafka
|
a29ec606278ad1ed8c5ada418e45593fe71dbaec
|
[
"Apache-2.0"
] | 6
|
2021-11-03T15:17:29.000Z
|
2021-11-05T14:10:56.000Z
|
src/tests/config_test.py
|
dixonwhitmire/lib-kafka
|
a29ec606278ad1ed8c5ada418e45593fe71dbaec
|
[
"Apache-2.0"
] | 1
|
2021-11-01T12:21:11.000Z
|
2021-11-01T12:21:11.000Z
|
import os
import pytest
from pydantic import ValidationError
from lib_kafka import config as configuration
import importlib
from tests import resources_directory
@pytest.fixture(autouse=True)
def reset():
reset_env_vars()
def reset_env_vars():
for env_var in ("KAFKA_BROKER_CONFIG_FILE", "KAFKA_TOPIC_CONFIG_FILE"):
if env_var in os.environ:
del os.environ[env_var]
def get_sample_config_path(file_name):
return os.path.join(resources_directory, file_name)
def test_kafka_settings_success_object():
settings = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
assert settings is not None
def test_kafka_settings_success_env_file():
os.environ["KAFKA_BROKER_CONFIG_FILE"] = get_sample_config_path('kafka.env')
importlib.reload(configuration)
settings = configuration.KafkaSettings()
assert settings is not None
settings_dict = settings.dict(by_alias=True)
assert 'bootstrap.servers' in settings_dict.keys() and settings_dict['bootstrap.servers'] == 'localhost:9093'
assert 'group.id' in settings_dict.keys() and settings_dict['group.id'] == 'kafka-listener'
assert 'security.protocol' in settings_dict.keys() and settings_dict['security.protocol'] == 'PLAINTEXT'
assert 'ssl.ca.location' in settings_dict.keys() and settings_dict[
'ssl.ca.location'] == '/var/app/certs/kafka/tls.crt'
assert 'enable.auto.commit' in settings_dict.keys() and not settings_dict['enable.auto.commit']
def test_kafka_settings_failures():
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id=32,
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
def test_kafka_topics_success_parse():
topics = configuration.KafkaTopics.parse_file(get_sample_config_path('kafka-topic.json'))
assert topics is not None
assert len(topics.dict()['__root__']) == 3
def test_kafka_topic_success_object():
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor=2,
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
assert topic is not None
assert topic.name == 'test-topic'
def test_kafka_topic_failure_object():
with pytest.raises(KeyError):
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor='non-int',
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
| 32.4
| 113
| 0.683422
|
import os
import pytest
from pydantic import ValidationError
from lib_kafka import config as configuration
import importlib
from tests import resources_directory
@pytest.fixture(autouse=True)
def reset():
reset_env_vars()
def reset_env_vars():
for env_var in ("KAFKA_BROKER_CONFIG_FILE", "KAFKA_TOPIC_CONFIG_FILE"):
if env_var in os.environ:
del os.environ[env_var]
def get_sample_config_path(file_name):
return os.path.join(resources_directory, file_name)
def test_kafka_settings_success_object():
settings = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
assert settings is not None
def test_kafka_settings_success_env_file():
os.environ["KAFKA_BROKER_CONFIG_FILE"] = get_sample_config_path('kafka.env')
importlib.reload(configuration)
settings = configuration.KafkaSettings()
assert settings is not None
settings_dict = settings.dict(by_alias=True)
assert 'bootstrap.servers' in settings_dict.keys() and settings_dict['bootstrap.servers'] == 'localhost:9093'
assert 'group.id' in settings_dict.keys() and settings_dict['group.id'] == 'kafka-listener'
assert 'security.protocol' in settings_dict.keys() and settings_dict['security.protocol'] == 'PLAINTEXT'
assert 'ssl.ca.location' in settings_dict.keys() and settings_dict[
'ssl.ca.location'] == '/var/app/certs/kafka/tls.crt'
assert 'enable.auto.commit' in settings_dict.keys() and not settings_dict['enable.auto.commit']
def test_kafka_settings_failures():
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id='test_group_id',
security_protocol='test_protocol',
enable_auto_commit=False
)
with pytest.raises(ValidationError):
test_object = configuration.KafkaSettings(
bootstrap_servers='test_server',
group_id=32,
security_protocol='test_protocol',
enable_auto_commit=False,
ssl_ca_location='test-location'
)
def test_kafka_topics_success_parse():
topics = configuration.KafkaTopics.parse_file(get_sample_config_path('kafka-topic.json'))
assert topics is not None
assert len(topics.dict()['__root__']) == 3
def test_kafka_topic_success_object():
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor=2,
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
assert topic is not None
assert topic.name == 'test-topic'
def test_kafka_topic_failure_object():
with pytest.raises(KeyError):
topic = configuration.KafkaTopic(
name='test-topic',
replication_factor='non-int',
partitions=2,
recreate_topic=False,
operation=configuration.OperationEnum.create
)
| true
| true
|
1c4565e0dbfa4053de453a8497b1789732532024
| 1,386
|
py
|
Python
|
beorn_lib/version.py
|
PAntoine/beorn_lib
|
a5bb8859acfb136f33559b6ddbf3bb20f61bd310
|
[
"MIT"
] | null | null | null |
beorn_lib/version.py
|
PAntoine/beorn_lib
|
a5bb8859acfb136f33559b6ddbf3bb20f61bd310
|
[
"MIT"
] | null | null | null |
beorn_lib/version.py
|
PAntoine/beorn_lib
|
a5bb8859acfb136f33559b6ddbf3bb20f61bd310
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------------
#
# ,--.
# | |-. ,---. ,---. ,--.--.,--,--,
# | .-. '| .-. :| .-. || .--'| \
# | `-' |\ --.' '-' '| | | || |
# `---' `----' `---' `--' `--''--'
#
# file: version
# desc: This file contains the versions of the library.
#
# author: Peter Antoine
# date: 23/05/2015
#---------------------------------------------------------------------------------
# Copyright (c) 2015 Peter Antoine
# All rights Reserved.
# Released Under the MIT Licence
#---------------------------------------------------------------------------------
#---------------------------------------------------------------------------------
# Python Standard Header
#---------------------------------------------------------------------------------
__author__ = "Peter Antoine"
__copyright__ = "Copyright 2014-2021, Peter Antoine"
__credits__ = ["Peter Antoine"]
__license__ = "MIT"
__version__ = "1.5.0"
__maintainer__ = "Peter Antoine"
__email__ = "github@peterantoine.me.uk"
__url__ = "https://github.com/PAntoine/BeornLib"
__status__ = "Development"
# vim: ts=4 sw=4 noexpandtab nocin ai
| 37.459459
| 82
| 0.328283
|
# `---' `----' `---' `--' `--''--'
__author__ = "Peter Antoine"
__copyright__ = "Copyright 2014-2021, Peter Antoine"
__credits__ = ["Peter Antoine"]
__license__ = "MIT"
__version__ = "1.5.0"
__maintainer__ = "Peter Antoine"
__email__ = "github@peterantoine.me.uk"
__url__ = "https://github.com/PAntoine/BeornLib"
__status__ = "Development"
| true
| true
|
1c456664268f1c7c8973969844fb625e7b99c69c
| 6,122
|
py
|
Python
|
dmwmclient/restclient.py
|
FernandoGarzon/dmwmclient
|
aa69978b430f6626864718ddfa21951f2bb41d06
|
[
"BSD-3-Clause"
] | 1
|
2022-02-01T16:06:15.000Z
|
2022-02-01T16:06:15.000Z
|
dmwmclient/restclient.py
|
FernandoGarzon/dmwmclient
|
aa69978b430f6626864718ddfa21951f2bb41d06
|
[
"BSD-3-Clause"
] | 3
|
2020-03-04T23:34:39.000Z
|
2020-04-03T22:19:44.000Z
|
dmwmclient/restclient.py
|
nsmith-/dmwmclient
|
726a9400f5270e0521dc9d2c1bffed3c0af410a2
|
[
"BSD-3-Clause"
] | 4
|
2020-03-17T15:39:10.000Z
|
2021-06-10T22:51:11.000Z
|
import os
import logging
import json
import httpx
import asyncio
from lxml import etree
from . import __version__
logger = logging.getLogger(__name__)
def locate_proxycert():
"""Find a user proxy"""
path = os.getenv("X509_USER_PROXY")
if path is not None:
return path
path = "/tmp/x509up_u%d" % os.getuid()
if os.path.exists(path):
return path
return None
def _defaultcert():
"""Find a suitable user certificate from the usual locations
Preference is given to original user certificate over a proxy
as this is necessary for use with CERN SSO.
"""
path = (
os.path.expanduser("~/.globus/usercert.pem"),
os.path.expanduser("~/.globus/userkey.pem"),
)
if os.path.exists(path[0]) and os.path.exists(path[1]):
return path
path = locate_proxycert()
if path is not None:
return path
raise RuntimeError("Could not identify an appropriate default user certificate")
class RESTClient:
defaults = {
# Location of user x509 certificate, key pair
"usercert": _defaultcert(),
# Location of trusted x509 certificates
"certdir": os.getenv("X509_CERT_DIR", "/etc/grid-security/certificates"),
}
def __init__(self, usercert=None, certdir=None):
if usercert is None:
usercert = RESTClient.defaults["usercert"]
if certdir is None:
certdir = RESTClient.defaults["certdir"]
certdir = os.path.expanduser(certdir)
self._ssoevents = {}
self._client = httpx.AsyncClient(
cert=usercert,
verify=certdir,
timeout=httpx.Timeout(10.0, read_timeout=30.0),
headers=httpx.Headers({"User-Agent": f"python-dmwmclient/{__version__}"}),
)
async def cern_sso_check(self, host):
"""Check if this host already has an SSO action in progress, and wait for it"""
try:
await self._ssoevents[host].wait()
return True
except KeyError:
pass
return False
async def cern_sso_follow(self, result, host):
"""Follow CERN SSO redirect, returning the result of the original request"""
html = etree.HTML(result.content)
links = [
link
for link in html.xpath("//a")
if link.text == "Sign in using your CERN Certificate"
]
if len(links) == 1:
link = links.pop()
logger.debug("Running first-time CERN SSO sign-in routine")
self._ssoevents[host] = asyncio.Event()
url = result.url.join(link.attrib["href"])
result = await self._client.get(url)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while following SSO link to %r"
% (result.status_code, url)
)
html = etree.HTML(result.content)
url = result.url.join(html.xpath("body/form")[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while posting to SSO link %r"
% (result.status_code, url)
)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
self._ssoevents[host].set()
del self._ssoevents[host]
return result
form = html.xpath("body/form")
if len(form) == 1:
logger.debug("Following CERN SSO redirect")
url = result.url.join(form[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
return result
logger.debug("Invalid SSO login page content:\n" + result.text)
raise RuntimeError(
"Could not parse CERN SSO login page (no sign-in link or auto-redirect found)"
)
def build_request(self, **params):
return self._client.build_request(**params)
async def send(self, request, timeout=None, retries=1):
await self.cern_sso_check(request.url.host)
# Looking forward to https://github.com/encode/httpx/pull/784
while retries > 0:
try:
result = await self._client.send(request, timeout=timeout)
if result.status_code == 200 and result.url.host == "login.cern.ch":
if await self.cern_sso_check(request.url.host):
self._client.cookies.set_cookie_header(request)
continue
result = await self.cern_sso_follow(result, request.url.host)
return result
except httpx.TimeoutException:
logging.warning(
"Timeout encountered while executing request %r" % request
)
retries -= 1
raise IOError(
"Exhausted %d retries while executing request %r" % (retries, request)
)
async def getjson(self, url, params=None, timeout=None, retries=1):
request = self.build_request(method="GET", url=url, params=params)
result = await self.send(request, timeout=timeout, retries=retries)
try:
return result.json()
except json.JSONDecodeError:
logging.debug("Result content: {result.text}")
raise IOError(f"Failed to decode json for request {request}")
| 37.329268
| 90
| 0.571219
|
import os
import logging
import json
import httpx
import asyncio
from lxml import etree
from . import __version__
logger = logging.getLogger(__name__)
def locate_proxycert():
path = os.getenv("X509_USER_PROXY")
if path is not None:
return path
path = "/tmp/x509up_u%d" % os.getuid()
if os.path.exists(path):
return path
return None
def _defaultcert():
path = (
os.path.expanduser("~/.globus/usercert.pem"),
os.path.expanduser("~/.globus/userkey.pem"),
)
if os.path.exists(path[0]) and os.path.exists(path[1]):
return path
path = locate_proxycert()
if path is not None:
return path
raise RuntimeError("Could not identify an appropriate default user certificate")
class RESTClient:
defaults = {
"usercert": _defaultcert(),
"certdir": os.getenv("X509_CERT_DIR", "/etc/grid-security/certificates"),
}
def __init__(self, usercert=None, certdir=None):
if usercert is None:
usercert = RESTClient.defaults["usercert"]
if certdir is None:
certdir = RESTClient.defaults["certdir"]
certdir = os.path.expanduser(certdir)
self._ssoevents = {}
self._client = httpx.AsyncClient(
cert=usercert,
verify=certdir,
timeout=httpx.Timeout(10.0, read_timeout=30.0),
headers=httpx.Headers({"User-Agent": f"python-dmwmclient/{__version__}"}),
)
async def cern_sso_check(self, host):
try:
await self._ssoevents[host].wait()
return True
except KeyError:
pass
return False
async def cern_sso_follow(self, result, host):
html = etree.HTML(result.content)
links = [
link
for link in html.xpath("//a")
if link.text == "Sign in using your CERN Certificate"
]
if len(links) == 1:
link = links.pop()
logger.debug("Running first-time CERN SSO sign-in routine")
self._ssoevents[host] = asyncio.Event()
url = result.url.join(link.attrib["href"])
result = await self._client.get(url)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while following SSO link to %r"
% (result.status_code, url)
)
html = etree.HTML(result.content)
url = result.url.join(html.xpath("body/form")[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
if not result.status_code == 200:
logger.debug("Return content:\n" + result.text)
raise IOError(
"HTTP status code %d received while posting to SSO link %r"
% (result.status_code, url)
)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
self._ssoevents[host].set()
del self._ssoevents[host]
return result
form = html.xpath("body/form")
if len(form) == 1:
logger.debug("Following CERN SSO redirect")
url = result.url.join(form[0].attrib["action"])
data = {
el.attrib["name"]: el.attrib["value"]
for el in html.xpath("body/form/input")
}
result = await self._client.post(url, data=data)
logger.debug(
"Received SSO cookie for %s: %r"
% (host, dict(result.history[0].cookies))
)
return result
logger.debug("Invalid SSO login page content:\n" + result.text)
raise RuntimeError(
"Could not parse CERN SSO login page (no sign-in link or auto-redirect found)"
)
def build_request(self, **params):
return self._client.build_request(**params)
async def send(self, request, timeout=None, retries=1):
await self.cern_sso_check(request.url.host)
while retries > 0:
try:
result = await self._client.send(request, timeout=timeout)
if result.status_code == 200 and result.url.host == "login.cern.ch":
if await self.cern_sso_check(request.url.host):
self._client.cookies.set_cookie_header(request)
continue
result = await self.cern_sso_follow(result, request.url.host)
return result
except httpx.TimeoutException:
logging.warning(
"Timeout encountered while executing request %r" % request
)
retries -= 1
raise IOError(
"Exhausted %d retries while executing request %r" % (retries, request)
)
async def getjson(self, url, params=None, timeout=None, retries=1):
request = self.build_request(method="GET", url=url, params=params)
result = await self.send(request, timeout=timeout, retries=retries)
try:
return result.json()
except json.JSONDecodeError:
logging.debug("Result content: {result.text}")
raise IOError(f"Failed to decode json for request {request}")
| true
| true
|
1c4566b1e1aee1b8f0aa8bfb88df64b92879f477
| 7,636
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20180301/get_profile.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180301/get_profile.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180301/get_profile.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetProfileResult',
'AwaitableGetProfileResult',
'get_profile',
]
@pulumi.output_type
class GetProfileResult:
"""
Class representing a Traffic Manager profile.
"""
def __init__(__self__, dns_config=None, endpoints=None, id=None, location=None, monitor_config=None, name=None, profile_status=None, tags=None, traffic_routing_method=None, traffic_view_enrollment_status=None, type=None):
if dns_config and not isinstance(dns_config, dict):
raise TypeError("Expected argument 'dns_config' to be a dict")
pulumi.set(__self__, "dns_config", dns_config)
if endpoints and not isinstance(endpoints, list):
raise TypeError("Expected argument 'endpoints' to be a list")
pulumi.set(__self__, "endpoints", endpoints)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monitor_config and not isinstance(monitor_config, dict):
raise TypeError("Expected argument 'monitor_config' to be a dict")
pulumi.set(__self__, "monitor_config", monitor_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if profile_status and not isinstance(profile_status, str):
raise TypeError("Expected argument 'profile_status' to be a str")
pulumi.set(__self__, "profile_status", profile_status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if traffic_routing_method and not isinstance(traffic_routing_method, str):
raise TypeError("Expected argument 'traffic_routing_method' to be a str")
pulumi.set(__self__, "traffic_routing_method", traffic_routing_method)
if traffic_view_enrollment_status and not isinstance(traffic_view_enrollment_status, str):
raise TypeError("Expected argument 'traffic_view_enrollment_status' to be a str")
pulumi.set(__self__, "traffic_view_enrollment_status", traffic_view_enrollment_status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dnsConfig")
def dns_config(self) -> Optional['outputs.DnsConfigResponse']:
"""
The DNS settings of the Traffic Manager profile.
"""
return pulumi.get(self, "dns_config")
@property
@pulumi.getter
def endpoints(self) -> Optional[Sequence['outputs.EndpointResponse']]:
"""
The list of endpoints in the Traffic Manager profile.
"""
return pulumi.get(self, "endpoints")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficManagerProfiles/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The Azure Region where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitorConfig")
def monitor_config(self) -> Optional['outputs.MonitorConfigResponse']:
"""
The endpoint monitoring settings of the Traffic Manager profile.
"""
return pulumi.get(self, "monitor_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="profileStatus")
def profile_status(self) -> Optional[str]:
"""
The status of the Traffic Manager profile.
"""
return pulumi.get(self, "profile_status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trafficRoutingMethod")
def traffic_routing_method(self) -> Optional[str]:
"""
The traffic routing method of the Traffic Manager profile.
"""
return pulumi.get(self, "traffic_routing_method")
@property
@pulumi.getter(name="trafficViewEnrollmentStatus")
def traffic_view_enrollment_status(self) -> Optional[str]:
"""
Indicates whether Traffic View is 'Enabled' or 'Disabled' for the Traffic Manager profile. Null, indicates 'Disabled'. Enabling this feature will increase the cost of the Traffic Manage profile.
"""
return pulumi.get(self, "traffic_view_enrollment_status")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of the resource. Ex- Microsoft.Network/trafficManagerProfiles.
"""
return pulumi.get(self, "type")
class AwaitableGetProfileResult(GetProfileResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProfileResult(
dns_config=self.dns_config,
endpoints=self.endpoints,
id=self.id,
location=self.location,
monitor_config=self.monitor_config,
name=self.name,
profile_status=self.profile_status,
tags=self.tags,
traffic_routing_method=self.traffic_routing_method,
traffic_view_enrollment_status=self.traffic_view_enrollment_status,
type=self.type)
def get_profile(profile_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProfileResult:
"""
Class representing a Traffic Manager profile.
:param str profile_name: The name of the Traffic Manager profile.
:param str resource_group_name: The name of the resource group containing the Traffic Manager profile.
"""
__args__ = dict()
__args__['profileName'] = profile_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180301:getProfile', __args__, opts=opts, typ=GetProfileResult).value
return AwaitableGetProfileResult(
dns_config=__ret__.dns_config,
endpoints=__ret__.endpoints,
id=__ret__.id,
location=__ret__.location,
monitor_config=__ret__.monitor_config,
name=__ret__.name,
profile_status=__ret__.profile_status,
tags=__ret__.tags,
traffic_routing_method=__ret__.traffic_routing_method,
traffic_view_enrollment_status=__ret__.traffic_view_enrollment_status,
type=__ret__.type)
| 38.761421
| 225
| 0.667234
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetProfileResult',
'AwaitableGetProfileResult',
'get_profile',
]
@pulumi.output_type
class GetProfileResult:
def __init__(__self__, dns_config=None, endpoints=None, id=None, location=None, monitor_config=None, name=None, profile_status=None, tags=None, traffic_routing_method=None, traffic_view_enrollment_status=None, type=None):
if dns_config and not isinstance(dns_config, dict):
raise TypeError("Expected argument 'dns_config' to be a dict")
pulumi.set(__self__, "dns_config", dns_config)
if endpoints and not isinstance(endpoints, list):
raise TypeError("Expected argument 'endpoints' to be a list")
pulumi.set(__self__, "endpoints", endpoints)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monitor_config and not isinstance(monitor_config, dict):
raise TypeError("Expected argument 'monitor_config' to be a dict")
pulumi.set(__self__, "monitor_config", monitor_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if profile_status and not isinstance(profile_status, str):
raise TypeError("Expected argument 'profile_status' to be a str")
pulumi.set(__self__, "profile_status", profile_status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if traffic_routing_method and not isinstance(traffic_routing_method, str):
raise TypeError("Expected argument 'traffic_routing_method' to be a str")
pulumi.set(__self__, "traffic_routing_method", traffic_routing_method)
if traffic_view_enrollment_status and not isinstance(traffic_view_enrollment_status, str):
raise TypeError("Expected argument 'traffic_view_enrollment_status' to be a str")
pulumi.set(__self__, "traffic_view_enrollment_status", traffic_view_enrollment_status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dnsConfig")
def dns_config(self) -> Optional['outputs.DnsConfigResponse']:
return pulumi.get(self, "dns_config")
@property
@pulumi.getter
def endpoints(self) -> Optional[Sequence['outputs.EndpointResponse']]:
return pulumi.get(self, "endpoints")
@property
@pulumi.getter
def id(self) -> Optional[str]:
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitorConfig")
def monitor_config(self) -> Optional['outputs.MonitorConfigResponse']:
return pulumi.get(self, "monitor_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="profileStatus")
def profile_status(self) -> Optional[str]:
return pulumi.get(self, "profile_status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trafficRoutingMethod")
def traffic_routing_method(self) -> Optional[str]:
return pulumi.get(self, "traffic_routing_method")
@property
@pulumi.getter(name="trafficViewEnrollmentStatus")
def traffic_view_enrollment_status(self) -> Optional[str]:
return pulumi.get(self, "traffic_view_enrollment_status")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
class AwaitableGetProfileResult(GetProfileResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProfileResult(
dns_config=self.dns_config,
endpoints=self.endpoints,
id=self.id,
location=self.location,
monitor_config=self.monitor_config,
name=self.name,
profile_status=self.profile_status,
tags=self.tags,
traffic_routing_method=self.traffic_routing_method,
traffic_view_enrollment_status=self.traffic_view_enrollment_status,
type=self.type)
def get_profile(profile_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProfileResult:
__args__ = dict()
__args__['profileName'] = profile_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180301:getProfile', __args__, opts=opts, typ=GetProfileResult).value
return AwaitableGetProfileResult(
dns_config=__ret__.dns_config,
endpoints=__ret__.endpoints,
id=__ret__.id,
location=__ret__.location,
monitor_config=__ret__.monitor_config,
name=__ret__.name,
profile_status=__ret__.profile_status,
tags=__ret__.tags,
traffic_routing_method=__ret__.traffic_routing_method,
traffic_view_enrollment_status=__ret__.traffic_view_enrollment_status,
type=__ret__.type)
| true
| true
|
1c45678f9f69f1c3e03546028ce022e51687f2b5
| 48,532
|
py
|
Python
|
hytra/core/hypothesesgraph.py
|
m-novikov/hytra
|
0dc28deaa2571fa8bea63ca178f0e53cc1cd7508
|
[
"MIT"
] | null | null | null |
hytra/core/hypothesesgraph.py
|
m-novikov/hytra
|
0dc28deaa2571fa8bea63ca178f0e53cc1cd7508
|
[
"MIT"
] | null | null | null |
hytra/core/hypothesesgraph.py
|
m-novikov/hytra
|
0dc28deaa2571fa8bea63ca178f0e53cc1cd7508
|
[
"MIT"
] | null | null | null |
import logging
import copy
import networkx as nx
import numpy as np
from sklearn.neighbors import KDTree
import hytra.core.jsongraph
from hytra.core.jsongraph import negLog, listify
from hytra.util.progressbar import DefaultProgressVisitor
logger = logging.getLogger(__name__)
def getTraxelFeatureVector(traxel, featureName, maxNumDimensions=3):
"""
extract a feature vector from a traxel
"""
result = []
for i in range(maxNumDimensions):
try:
result.append(traxel.get_feature_value(str(featureName), i))
except:
if i == 0:
logger.error(
f"Error when accessing feature {featureName}[{i}] for "
f"traxel (Id={traxel.Id},Timestep={traxel.Timestep})"
)
logger.error(traxel.print_available_features())
raise Exception
else:
logger.error(
f"Error: Classifier was trained with less merger than maxNumObjects {maxNumDimensions}."
)
raise Exception
return result
class NodeMap:
"""
To access per node features of the hypotheses graph,
this node map provides the same interface as pgmlink's NodeMaps
"""
def __init__(self, graph: nx.DiGraph, attributeName):
assert isinstance(graph, nx.DiGraph), "Expecting the graph to be directed"
self.__graph = graph
self.__attributeName = attributeName
def __getitem__(self, key):
return self.__graph.nodes[key][self.__attributeName]
class HypothesesGraph:
"""
Replacement for pgmlink's hypotheses graph,
with a similar API so it can be used as drop-in replacement.
Internally it uses [networkx](http://networkx.github.io/) to construct the graph.
Use the insertEnergies() method to populate the nodes and arcs with the energies for different
configurations (according to DPCT's JSON style'), derived from given probability generation functions.
**Notes:** `self._graph.node`'s are indexed by tuples (int(timestep), int(id)), and contain either a
single `'traxel'` attribute, or a list of traxels in `'tracklet'`.
Nodes also get a unique ID assigned once they are added to the graph.
"""
def __init__(self):
self._graph = nx.DiGraph()
self.withTracklets = False
self.allowLengthOneTracks = True
self._nextNodeUuid = 0
self.progressVisitor = DefaultProgressVisitor()
def nodeIterator(self):
return self._graph.nodes()
def arcIterator(self):
return self._graph.edges()
def countNodes(self):
return self._graph.number_of_nodes()
def countArcs(self):
return self._graph.number_of_edges()
def hasNode(self, node):
return self._graph.has_node(node)
def hasEdge(self, u, v):
return self._graph.has_edge(u, v)
@staticmethod
def source(edge):
return edge[0]
@staticmethod
def target(edge):
return edge[1]
def _findNearestNeighbors(
self, kdtreeObjectPair, traxel, numNeighbors, maxNeighborDist
):
"""
Return a list of object IDs which are the 'numNeighbors' closest elements
in the kdtree less than maxNeighborDist away of the traxel.
"""
kdtree, objectIdList = kdtreeObjectPair
if len(objectIdList) <= numNeighbors:
return objectIdList
distances, neighbors = kdtree.query(
[self._extractCenter(traxel)], k=numNeighbors, return_distance=True
)
return [
objectIdList[index]
for distance, index in zip(distances[0], neighbors[0])
if distance < maxNeighborDist
]
def _extractCenter(self, traxel):
try:
# python probabilityGenerator
if "com" in traxel.Features:
return traxel.Features["com"]
else:
return traxel.Features["RegionCenter"]
except:
# C++ pgmlink probabilityGenerator
try:
return getTraxelFeatureVector(traxel, "com")
except:
try:
return getTraxelFeatureVector(traxel, "RegionCenter")
except:
raise ValueError(
"given traxel (t={},id={}) does not have "
'"com" or "RegionCenter"'.format(traxel.Timestep, traxel.Id)
)
def _traxelMightDivide(self, traxel, divisionThreshold):
assert "divProb" in traxel.Features
return traxel.Features["divProb"][0] > divisionThreshold
def _buildFrameKdTree(self, traxelDict):
"""
Collect the centers of all traxels and their ids of this frame's traxels.
Then build a kdtree and return (kdtree, listOfObjectIdsInFrame), where the second argument
is needed to decode the object id of the nearest neighbors in _findNearestNeighbors().
"""
objectIdList = []
features = []
for obj, traxel in traxelDict.items():
if obj == 0:
continue
objectIdList.append(obj)
features.append(list(self._extractCenter(traxel)))
return (KDTree(features, metric="euclidean"), objectIdList)
def _addNodesForFrame(self, frame, traxelDict):
"""
Insert nodes for all objects in this frame, with the attribute "traxel"
"""
for obj, traxel in traxelDict.items():
if obj == 0:
continue
self._graph.add_node((frame, obj), traxel=traxel, id=self._nextNodeUuid)
self._nextNodeUuid += 1
def addNodeFromTraxel(self, traxel, **kwargs):
"""
Insert a single node specified by a traxel.
All keyword arguments are passed to the node as well.
"""
assert traxel is not None
assert not self.withTracklets
self._graph.add_node(
(traxel.Timestep, traxel.Id), traxel=traxel, id=self._nextNodeUuid, **kwargs
)
self._nextNodeUuid += 1
def buildFromProbabilityGenerator(
self,
probabilityGenerator,
maxNeighborDist=200,
numNearestNeighbors=1,
forwardBackwardCheck=True,
withDivisions=True,
divisionThreshold=0.1,
skipLinks=1,
):
"""
Takes a python probabilityGenerator containing traxel features and finds probable links between frames.
Builds a kdTree with the 'numNearestneighbors' for each frame and adds the nodes. In the same iteration, it adds
a number of 'skipLinks' between the nodes separated by 'skipLinks' frames.
"""
assert probabilityGenerator is not None
assert len(probabilityGenerator.TraxelsPerFrame) > 0
assert skipLinks > 0
def checkNodeWhileAddingLinks(frame, obj):
if (frame, obj) not in self._graph:
logger.warning(
"Adding node ({}, {}) when setting up links".format(frame, obj)
)
kdTreeFrames = [None] * (skipLinks + 1)
# len(probabilityGenerator.TraxelsPerFrame.keys()) is NOT an indicator for the total number of frames,
# because an empty frame does not create a key in the dictionary. E.g. for one frame in the middle of the
# dataset, we won't access the last one.
# Idea: take the max key in the dict. Remember, frame numbering starts with 0.
frameMax = max(probabilityGenerator.TraxelsPerFrame.keys())
frameMin = min(probabilityGenerator.TraxelsPerFrame.keys())
numFrames = frameMax - frameMin + 1
self.progressVisitor.showState("Probability Generator")
countFrames = 0
for frame in range(numFrames):
countFrames += 1
self.progressVisitor.showProgress(countFrames / float(numFrames))
if frame > 0:
del kdTreeFrames[0] # this is the current frame
if (
frame + skipLinks < numFrames
and frameMin + frame + skipLinks
in probabilityGenerator.TraxelsPerFrame.keys()
):
kdTreeFrames.append(
self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
]
)
)
self._addNodesForFrame(
frameMin + frame + skipLinks,
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
],
)
else:
for i in range(0, skipLinks + 1):
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
kdTreeFrames[i] = self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i]
)
self._addNodesForFrame(
frameMin + frame + i,
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i],
)
# find forward links
if (
frameMin + frame in probabilityGenerator.TraxelsPerFrame.keys()
): # 'frame' could be empty
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame
].items():
divisionPreservingNumNearestNeighbors = numNearestNeighbors
if (
divisionPreservingNumNearestNeighbors < 2
and withDivisions
and self._traxelMightDivide(traxel, divisionThreshold)
):
divisionPreservingNumNearestNeighbors = 2
for i in range(1, skipLinks + 1):
if (
frame + i < numFrames
and frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
):
neighbors = self._findNearestNeighbors(
kdTreeFrames[i],
traxel,
divisionPreservingNumNearestNeighbors,
maxNeighborDist,
)
# type(neighbors) is list
for n in neighbors:
edge_start = (frameMin + frame, obj)
edge_end = (frameMin + frame + i, n)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
# find backward links
if forwardBackwardCheck:
for i in range(1, skipLinks + 1):
if frame + i < numFrames:
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame + i
].items():
if kdTreeFrames[0] is not None:
neighbors = self._findNearestNeighbors(
kdTreeFrames[0],
traxel,
numNearestNeighbors,
maxNeighborDist,
)
for n in neighbors:
edge_start = (frameMin + frame, n)
edge_end = (frameMin + frame + i, obj)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
def generateTrackletGraph(self):
"""
**Return** a new hypotheses graph where chains of detections with only one possible
incoming/outgoing transition are contracted into one node in the graph.
The returned graph will have `withTracklets` set to `True`!
The `'tracklet'` node map contains a list of traxels that each node represents.
"""
logger.info("generating tracklet graph...")
tracklet_graph = copy.copy(self)
tracklet_graph._graph = tracklet_graph._graph.copy()
tracklet_graph.withTracklets = True
tracklet_graph.referenceTraxelGraph = self
tracklet_graph.progressVisitor = self.progressVisitor
self.progressVisitor.showState("Initializing Tracklet Graph")
# initialize tracklet map to contain a list of only one traxel per node
countNodes = 0
numNodes = tracklet_graph.countNodes()
for node in tracklet_graph._graph.nodes():
countNodes += 1
self.progressVisitor.showProgress(countNodes / float(numNodes))
tracklet_graph._graph.nodes[node]["tracklet"] = [
tracklet_graph._graph.nodes[node]["traxel"]
]
del tracklet_graph._graph.nodes[node]["traxel"]
# set up a list of links that indicates whether the target's in- and source's out-degree
# are one, meaning the edge can be contracted
links_to_be_contracted = []
node_remapping = {}
self.progressVisitor.showState("Finding Tracklets in Graph")
countEdges = 0
numEdges = tracklet_graph.countArcs()
for edge in tracklet_graph._graph.edges():
countEdges += 1
self.progressVisitor.showProgress(countEdges / float(numEdges))
if (
tracklet_graph._graph.out_degree(edge[0]) == 1
and tracklet_graph._graph.in_degree(edge[1]) == 1
):
links_to_be_contracted.append(edge)
for i in [0, 1]:
node_remapping[edge[i]] = edge[i]
# apply edge contraction
self.progressVisitor.showState("Contracting Edges in Tracklet Graph")
countLinks = 0
numLinks = len(links_to_be_contracted)
for edge in links_to_be_contracted:
countLinks += 1
self.progressVisitor.showProgress(countLinks / float(numLinks))
src = node_remapping[edge[0]]
dest = node_remapping[edge[1]]
if (
tracklet_graph._graph.in_degree(src) == 0
and tracklet_graph._graph.out_degree(dest) == 0
):
# if this tracklet would contract to a single node without incoming or outgoing edges,
# then do NOT contract, as our tracking cannot handle length-one-tracks
continue
tracklet_graph._graph.nodes[src]["tracklet"].extend(
tracklet_graph._graph.nodes[dest]["tracklet"]
)
# duplicate out arcs with new source
for out_edge in tracklet_graph._graph.out_edges(dest):
tracklet_graph._graph.add_edge(src, out_edge[1])
# adjust node remapping to point to new source for all contracted traxels
for t in tracklet_graph._graph.nodes[dest]["tracklet"]:
node_remapping[(t.Timestep, t.Id)] = src
tracklet_graph._graph.remove_node(dest)
logger.info(
"tracklet graph has {} nodes and {} edges (before {},{})".format(
tracklet_graph.countNodes(),
tracklet_graph.countArcs(),
self.countNodes(),
self.countArcs(),
)
)
return tracklet_graph
def getNodeTraxelMap(self):
return NodeMap(self._graph, "traxel")
def getNodeTrackletMap(self):
return NodeMap(self._graph, "tracklet")
def insertEnergies(
self,
maxNumObjects,
detectionProbabilityFunc,
transitionProbabilityFunc,
boundaryCostMultiplierFunc,
divisionProbabilityFunc,
skipLinksBias,
):
"""
Insert energies for detections, divisions and links into the hypotheses graph,
by transforming the probabilities for certain
events (given by the `*ProbabilityFunc`-functions per traxel) into energies. If the given graph
contained tracklets (`self.withTracklets is True`), then also the probabilities over all contained traxels will be
accumulated for those nodes in the graph.
The energies are stored in the networkx graph under the following attribute names (to match the format for solvers):
* detection energies: `self._graph.nodes[n]['features']`
* division energies: `self._graph.nodes[n]['divisionFeatures']`
* appearance energies: `self._graph.nodes[n]['appearanceFeatures']`
* disappearance energies: `self._graph.nodes[n]['disappearanceFeatures']`
* transition energies: `self._graph.edges[src][dest]['features']`
* additionally we also store the timestep (range for traxels) per node as `timestep` attribute
** Parameters: **
* `maxNumObjects`: the max number of objects per detections
* `detectionProbabilityFunc`: should take a traxel and return its detection probabilities
([prob0objects, prob1object,...])
* `transitionProbabilityFunc`: should take two traxels and return this link's probabilities
([prob0objectsInTransition, prob1objectsInTransition,...])
* `boundaryCostMultiplierFunc`: should take a traxel and a boolean that is true if we are seeking for an appearance cost multiplier,
false for disappearance, and return a scalar multiplier between 0 and 1 for the
appearance/disappearance cost that depends on the traxel's distance to the spacial and time boundary
* `divisionProbabilityFunc`: should take a traxel and return its division probabilities ([probNoDiv, probDiv])
"""
numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
self.progressVisitor.showState("Inserting energies")
# insert detection probabilities for all detections (and some also get a div probability)
countElements = 0
for n in self._graph.nodes():
countElements += 1
if not self.withTracklets:
# only one traxel, but make it a list so everything below works the same
traxels = [self._graph.nodes[n]["traxel"]]
else:
traxels = self._graph.nodes[n]["tracklet"]
# accumulate features over all contained traxels
previousTraxel = None
detectionFeatures = np.zeros(maxNumObjects + 1)
for t in traxels:
detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
if previousTraxel is not None:
detectionFeatures += np.array(
negLog(transitionProbabilityFunc(previousTraxel, t))
)
previousTraxel = t
detectionFeatures = listify(list(detectionFeatures))
# division only if probability is big enough
divisionFeatures = divisionProbabilityFunc(traxels[-1])
if divisionFeatures is not None:
divisionFeatures = listify(negLog(divisionFeatures))
# appearance/disappearance
appearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects
)
disappearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects
)
self._graph.nodes[n]["features"] = detectionFeatures
if divisionFeatures is not None:
self._graph.nodes[n]["divisionFeatures"] = divisionFeatures
self._graph.nodes[n]["appearanceFeatures"] = appearanceFeatures
self._graph.nodes[n]["disappearanceFeatures"] = disappearanceFeatures
self._graph.nodes[n]["timestep"] = [
traxels[0].Timestep,
traxels[-1].Timestep,
]
self.progressVisitor.showProgress(countElements / float(numElements))
# insert transition probabilities for all links
for a in self._graph.edges():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if not self.withTracklets:
srcTraxel = self._graph.nodes[self.source(a)]["traxel"]
destTraxel = self._graph.nodes[self.target(a)]["traxel"]
else:
srcTraxel = self._graph.nodes[self.source(a)]["tracklet"][
-1
] # src is last of the traxels in source tracklet
destTraxel = self._graph.nodes[self.target(a)]["tracklet"][
0
] # dest is first of traxels in destination tracklet
features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))
# add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
frame_gap = destTraxel.Timestep - srcTraxel.Timestep
# 1. method
if frame_gap > 1:
features[1][0] = features[1][0] + skipLinksBias * frame_gap
# # 2. method
# # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
# # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
# # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
# # drawback: did not manage to adjust parameter to get sensible results.
# for feat in features:
# for i in range(frame_gap):
# feat.append(23)
# if frame_gap > 1:
# feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]
self._graph.edges[a[0], a[1]]["src"] = self._graph.nodes[a[0]]["id"]
self._graph.edges[a[0], a[1]]["dest"] = self._graph.nodes[a[1]]["id"]
self._graph.edges[a[0], a[1]]["features"] = features
def getMappingsBetweenUUIDsAndTraxels(self):
"""
Extract the mapping from UUID to traxel and vice versa from the networkx graph.
** Returns: a tuple of **
* `traxelIdPerTimestepToUniqueIdMap`: a dictionary of the structure `{str(timestep):{str(labelimageId):int(uuid),
str(labelimageId):int(uuid), ...}, str(nextTimestep):{}, ...}`
* `uuidToTraxelMap`: a dictionary with keys = int(uuid), values = list(of timestep-Id-tuples (int(Timestep), int(Id)))
"""
uuidToTraxelMap = {}
traxelIdPerTimestepToUniqueIdMap = {}
for n in self._graph.nodes():
uuid = self._graph.nodes[n]["id"]
traxels = []
if self.withTracklets:
traxels = self._graph.nodes[n]["tracklet"]
else:
traxels = [self._graph.nodes[n]["traxel"]]
uuidToTraxelMap[uuid] = [(t.Timestep, t.Id) for t in traxels]
for t in uuidToTraxelMap[uuid]:
traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[
str(t[1])
] = uuid
# sort the list of traxels per UUID by their timesteps
for v in uuidToTraxelMap.values():
v.sort(key=lambda timestepIdTuple: timestepIdTuple[0])
return traxelIdPerTimestepToUniqueIdMap, uuidToTraxelMap
def toTrackingGraph(self, noFeatures=False):
"""
Create a dictionary representation of this graph which can be passed to the solvers directly.
The resulting graph (=model) is wrapped within a `hytra.jsongraph.JsonTrackingGraph` structure for convenience.
If `noFeatures` is `True`, then only the structure of the graph will be exported.
"""
requiredNodeAttribs = ["id"]
requiredLinkAttribs = ["src", "dest"]
if not noFeatures:
requiredNodeAttribs.append("features")
requiredLinkAttribs.append("features")
def translateNodeToDict(n):
result = {}
attrs = self._graph.nodes[n]
for k in [
"id",
"features",
"appearanceFeatures",
"disappearanceFeatures",
"divisionFeatures",
"timestep",
]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredNodeAttribs:
raise ValueError(
"Cannot use graph nodes without assigned ID and features, run insertEnergies() first"
)
return result
def translateLinkToDict(l):
result = {}
attrs = self._graph.edges[l[0], l[1]]
for k in ["src", "dest", "features"]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredLinkAttribs:
raise ValueError(
"Cannot use graph links without source, target, and features, run insertEnergies() first"
)
return result
traxelIdPerTimestepToUniqueIdMap, _ = self.getMappingsBetweenUUIDsAndTraxels()
model = {
"segmentationHypotheses": [
translateNodeToDict(n) for n in self._graph.nodes()
],
"linkingHypotheses": [translateLinkToDict(e) for e in self._graph.edges()],
"divisionHypotheses": [],
"traxelToUniqueId": traxelIdPerTimestepToUniqueIdMap,
"settings": {
"statesShareWeights": True,
"allowPartialMergerAppearance": False,
"requireSeparateChildrenOfDivision": True,
"optimizerEpGap": 0.01,
"optimizerVerbose": True,
"optimizerNumThreads": 1,
},
}
# extract exclusion sets:
exclusions = set([])
for n in self._graph.nodes():
if self.withTracklets:
traxel = self._graph.nodes[n]["tracklet"][0]
else:
traxel = self._graph.nodes[n]["traxel"]
if traxel.conflictingTraxelIds is not None:
if self.withTracklets:
logger.error(
"Exclusion constraints do not work with tracklets yet!"
)
conflictingIds = [
traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][str(i)]
for i in traxel.conflictingTraxelIds
]
myId = traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][
str(traxel.Id)
]
for ci in conflictingIds:
# insert pairwise exclusion constraints only, and always put the lower id first
if ci < myId:
exclusions.add((ci, myId))
else:
exclusions.add((myId, ci))
model["exclusions"] = [list(t) for t in exclusions]
# TODO: this recomputes the uuidToTraxelMap even though we have it already...
trackingGraph = hytra.core.jsongraph.JsonTrackingGraph(
model=model, progressVisitor=self.progressVisitor
)
return trackingGraph
def insertSolution(self, resultDictionary):
"""
Add solution values to nodes and arcs from dictionary representation of solution.
The resulting graph (=model) gets an additional property "value" that represents the number of objects inside a detection/arc
Additionally a division indicator is saved in the node property "divisionValue".
The link also gets a new attribute: the gap that is covered. E.g. 1, if consecutive timeframes, 2 if link skipping one timeframe.
"""
assert isinstance(self._graph, nx.DiGraph), "Expecting the graph to be directed"
_, uuidToTraxelMap = self.getMappingsBetweenUUIDsAndTraxels()
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
# reset all values
for n in traxelgraph._graph.nodes():
traxelgraph._graph.nodes[n]["value"] = 0
traxelgraph._graph.nodes[n]["divisionValue"] = False
for e in traxelgraph._graph.edges():
traxelgraph._graph.edges[e[0], e[1]]["value"] = 0
# store values from dict
for detection in resultDictionary["detectionResults"]:
traxels = uuidToTraxelMap[detection["id"]]
for traxel in traxels:
traxelgraph._graph.nodes[traxel]["value"] = detection["value"]
for internal_edge in zip(traxels, traxels[1:]):
traxelgraph._graph.edges[internal_edge[0], internal_edge[1]][
"value"
] = detection["value"]
if (
"linkingResults" in resultDictionary
and resultDictionary["linkingResults"] is not None
):
for link in resultDictionary["linkingResults"]:
source, dest = (
uuidToTraxelMap[link["src"]][-1],
uuidToTraxelMap[link["dest"]][0],
)
if (source in traxelgraph._graph.predecessors(dest)) and (
dest in traxelgraph._graph.neighbors(source)
):
traxelgraph._graph.edges[source, dest]["value"] = link["value"]
traxelgraph._graph.edges[source, dest]["gap"] = dest[0] - source[0]
if (
"divisionResults" in resultDictionary
and resultDictionary["divisionResults"] is not None
):
for division in resultDictionary["divisionResults"]:
traxelgraph._graph.nodes[uuidToTraxelMap[division["id"]][-1]][
"divisionValue"
] = division["value"]
def getSolutionDictionary(self):
"""
Return the solution encoded in the `value` and `divisionValue` attributes of nodes and edges
as a python dictionary in the style that can be saved to JSON or sent to our solvers as ground truths.
"""
resultDictionary = {}
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
detectionList = []
divisionList = []
linkList = []
def checkAttributeValue(element, attribName, default):
if attribName in element:
return element[attribName]
else:
return default
for n in traxelgraph._graph.nodes():
newDetection = {}
newDetection["id"] = traxelgraph._graph.nodes[n]["id"]
newDetection["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "value", 0
)
detectionList.append(newDetection)
if "divisionValue" in traxelgraph._graph.nodes[n]:
newDivsion = {}
newDivsion["id"] = traxelgraph._graph.nodes[n]["id"]
newDivsion["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "divisionValue", False
)
divisionList.append(newDivsion)
for a in traxelgraph.arcIterator():
newLink = {}
src = self.source(a)
dest = self.target(a)
newLink["src"] = traxelgraph._graph.nodes[src]["id"]
newLink["dest"] = traxelgraph._graph.nodes[dest]["id"]
newLink["value"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "value", 0
)
newLink["gap"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "gap", 1
)
linkList.append(newLink)
resultDictionary["detectionResults"] = detectionList
resultDictionary["linkingResults"] = linkList
resultDictionary["divisionResults"] = divisionList
return resultDictionary
def countIncomingObjects(self, node):
"""
Once a solution was written to the graph, this returns the number of
incoming objects of a node, and the number of active incoming edges.
If the latter is greater than 1, this shows that we have a merger.
"""
numberOfIncomingObject = 0
numberOfIncomingEdges = 0
for in_edge in self._graph.in_edges(node):
if "value" in self._graph.edges[in_edge[0], node]:
numberOfIncomingObject += self._graph.edges[in_edge[0], node]["value"]
numberOfIncomingEdges += 1
return numberOfIncomingObject, numberOfIncomingEdges
def countOutgoingObjects(self, node):
"""
Once a solution was written to the graph, this returns the number of
outgoing objects of a node, and the number of active outgoing edges.
If the latter is greater than 1, this shows that we have a merger splitting up, or a division.
"""
numberOfOutgoingObject = 0
numberOfOutgoingEdges = 0
for out_edge in self._graph.out_edges(node):
if (
"value" in self._graph.edges[node, out_edge[1]]
and self._graph.edges[node, out_edge[1]]["value"] > 0
):
numberOfOutgoingObject += self._graph.edges[node, out_edge[1]]["value"]
numberOfOutgoingEdges += 1
return numberOfOutgoingObject, numberOfOutgoingEdges
def computeLineage(self, firstTrackId=2, firstLineageId=2, skipLinks=1):
"""
computes lineage and track id for every node in the graph
"""
update_queue = []
# start lineages / tracks at 2, because 0 means background=black, 1 means misdetection in ilastik
max_lineage_id = firstLineageId
max_track_id = firstTrackId
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
self.progressVisitor.showState("Compute lineage")
# find start of lineages
numElements = 2 * traxelgraph.countNodes()
countElements = 0
for n in traxelgraph.nodeIterator():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if (
traxelgraph.countIncomingObjects(n)[0] == 0
and "value" in traxelgraph._graph.nodes[n]
and traxelgraph._graph.nodes[n]["value"] > 0
and (
self.allowLengthOneTracks
or traxelgraph.countOutgoingObjects(n)[0] > 0
)
):
# found start of a track
update_queue.append((n, max_lineage_id, max_track_id))
max_lineage_id += 1
max_track_id += 1
else:
traxelgraph._graph.nodes[n]["lineageId"] = None
traxelgraph._graph.nodes[n]["trackId"] = None
while len(update_queue) > 0:
countElements += 1
current_node, lineage_id, track_id = update_queue.pop()
self.progressVisitor.showProgress(countElements / float(numElements))
# if we did not run merger resolving, it can happen that we reach a node several times,
# and would propagate the new lineage+track IDs to all descendants again! We simply
# stop propagating in that case and just use the lineageID that reached the node first.
if (
traxelgraph._graph.nodes[current_node].get("lineageId", None)
is not None
and traxelgraph._graph.nodes[current_node].get("trackId", None)
is not None
):
logger.debug("Several tracks are merging here, stopping a later one")
continue
# set a new trackID
traxelgraph._graph.nodes[current_node]["lineageId"] = lineage_id
traxelgraph._graph.nodes[current_node]["trackId"] = track_id
numberOfOutgoingObject, numberOfOutgoingEdges = traxelgraph.countOutgoingObjects(
current_node
)
if numberOfOutgoingObject != numberOfOutgoingEdges:
logger.warning(
"running lineage computation on unresolved graphs depends on a race condition"
)
if (
"divisionValue" in traxelgraph._graph.nodes[current_node]
and traxelgraph._graph.nodes[current_node]["divisionValue"]
):
assert traxelgraph.countOutgoingObjects(current_node)[1] == 2
traxelgraph._graph.nodes[current_node]["children"] = []
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[current_node]["children"].append(a[1])
traxelgraph._graph.nodes[a[1]]["parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
else:
if traxelgraph.countOutgoingObjects(current_node)[1] > 1:
logger.debug(
"Found merger splitting into several objects, propagating lineage and track to all descendants!"
)
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] == 1
) or "gap" not in traxelgraph._graph.edges[current_node, a[1]]:
traxelgraph._graph.nodes[a[1]]["gap"] = 1
update_queue.append(
(traxelgraph.target(a), lineage_id, track_id)
)
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] > 1
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[a[1]]["gap_parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
def pruneGraphToSolution(self, distanceToSolution=0):
"""
creates a new pruned HypothesesGraph that around the result. Assumes that value==0 corresponds
to unlabeled parts of the graph.
distanceToSolution determines how many negative examples are included
distanceToSolution = 0: only include negative edges that connect used objects
distanceToSolution = 1: additionally include edges that connect used objects with unlabeled objects
"""
prunedGraph = HypothesesGraph()
for n in self.nodeIterator():
if "value" in self._graph.nodes[n] and self._graph.nodes[n]["value"] > 0:
prunedGraph._graph.add_node(n, **self._graph.nodes[n])
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if distanceToSolution == 0:
if src in prunedGraph._graph and dest in prunedGraph._graph:
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# TODO: can be optimized by looping over the pruned graph nodes(might sacrifice readability)
for distance in range(1, distanceToSolution + 1):
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if src in prunedGraph._graph or dest in prunedGraph._graph:
prunedGraph._graph.add_node(src, **self._graph.nodes[src])
prunedGraph._graph.add_node(dest, **self._graph.nodes[dest])
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# in case a node is NOT an appearance and
# has all the incoming edges with value 0, we remove all these incoming edges
#
# in case a node is NOT a disappearance and
# has all the outgoing edges with value 0, we remove all these outgoing edges
withAppearanceFeatures = True
withDisappearanceFeatures = True
withFeatures = True
correctAppearanceFeatureLength = True
correctDisappearanceFeatureLength = True
correctFeatureLength = True
maxNumObjects = None
maxNumObjectsAppearance = None
maxNumObjectsDisappearance = None
for n in self.nodeIterator():
try:
maxNumObjectsApp = len(self._graph.nodes[n]["appearanceFeatures"]) - 1
if maxNumObjectsAppearance is None:
maxNumObjectsAppearance = maxNumObjectsApp
elif not maxNumObjectsApp == maxNumObjectsAppearance:
correctAppearanceFeatureLength = False
logger.info(
"Appearance/disappearance features have different lengths!"
)
except:
withAppearanceFeatures = False
logger.info("There are no appearance features in node properties!")
break
try:
maxNumObjectsDis = (
len(self._graph.nodes[n]["disappearanceFeatures"]) - 1
)
if maxNumObjectsDisappearance is None:
maxNumObjectsDisappearance = maxNumObjectsDis
elif not maxNumObjectsDis == maxNumObjectsDisappearance:
correctDisappearanceFeatureLength = False
logger.info("Disappearance features have different lengths!")
except:
withDisappearanceFeatures = False
logger.info("There are no disappearance features in node properties!")
break
if withAppearanceFeatures and withDisappearanceFeatures:
if (
correctAppearanceFeatureLength
and correctDisappearanceFeatureLength
and maxNumObjectsAppearance == maxNumObjectsDisappearance
):
maxNumObjects = maxNumObjectsAppearance
else:
correctFeatureLength = False
logger.info(
"Appearance and disappearance features have different lengths!"
)
else:
withFeatures = False
if withFeatures and correctFeatureLength:
for n in self.nodeIterator():
if not (
"appearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["appearance"]
):
allArcsWithValueZero = True
in_edges = self._graph.in_edges(n)
for edge in list(in_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["appearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not in_edges == []:
self._graph.remove_edges_from(in_edges)
if not (
"disappearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["disappearance"]
):
allArcsWithValueZero = True
out_edges = self._graph.out_edges(n)
for edge in list(out_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["disappearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not out_edges == []:
self._graph.remove_edges_from(out_edges)
return prunedGraph
def _getNodeAttribute(self, timestep, objectId, attribute):
"""
return some attribute of a certain node specified by timestep and objectId
"""
try:
return self._graph.nodes[(int(timestep), int(objectId))][attribute]
except KeyError:
logger.error(
attribute
+ " not found in graph node properties, call computeLineage() first!"
)
raise
def getLineageId(self, timestep, objectId):
"""
return the lineage Id of a certain node specified by timestep and objectId
"""
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "lineageId")
def getTrackId(self, timestep, objectId):
"""
return the track Id of a certain node specified by timestep and objectId
"""
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "trackId")
| 43.643885
| 166
| 0.557879
|
import logging
import copy
import networkx as nx
import numpy as np
from sklearn.neighbors import KDTree
import hytra.core.jsongraph
from hytra.core.jsongraph import negLog, listify
from hytra.util.progressbar import DefaultProgressVisitor
logger = logging.getLogger(__name__)
def getTraxelFeatureVector(traxel, featureName, maxNumDimensions=3):
result = []
for i in range(maxNumDimensions):
try:
result.append(traxel.get_feature_value(str(featureName), i))
except:
if i == 0:
logger.error(
f"Error when accessing feature {featureName}[{i}] for "
f"traxel (Id={traxel.Id},Timestep={traxel.Timestep})"
)
logger.error(traxel.print_available_features())
raise Exception
else:
logger.error(
f"Error: Classifier was trained with less merger than maxNumObjects {maxNumDimensions}."
)
raise Exception
return result
class NodeMap:
def __init__(self, graph: nx.DiGraph, attributeName):
assert isinstance(graph, nx.DiGraph), "Expecting the graph to be directed"
self.__graph = graph
self.__attributeName = attributeName
def __getitem__(self, key):
return self.__graph.nodes[key][self.__attributeName]
class HypothesesGraph:
def __init__(self):
self._graph = nx.DiGraph()
self.withTracklets = False
self.allowLengthOneTracks = True
self._nextNodeUuid = 0
self.progressVisitor = DefaultProgressVisitor()
def nodeIterator(self):
return self._graph.nodes()
def arcIterator(self):
return self._graph.edges()
def countNodes(self):
return self._graph.number_of_nodes()
def countArcs(self):
return self._graph.number_of_edges()
def hasNode(self, node):
return self._graph.has_node(node)
def hasEdge(self, u, v):
return self._graph.has_edge(u, v)
@staticmethod
def source(edge):
return edge[0]
@staticmethod
def target(edge):
return edge[1]
def _findNearestNeighbors(
self, kdtreeObjectPair, traxel, numNeighbors, maxNeighborDist
):
kdtree, objectIdList = kdtreeObjectPair
if len(objectIdList) <= numNeighbors:
return objectIdList
distances, neighbors = kdtree.query(
[self._extractCenter(traxel)], k=numNeighbors, return_distance=True
)
return [
objectIdList[index]
for distance, index in zip(distances[0], neighbors[0])
if distance < maxNeighborDist
]
def _extractCenter(self, traxel):
try:
if "com" in traxel.Features:
return traxel.Features["com"]
else:
return traxel.Features["RegionCenter"]
except:
try:
return getTraxelFeatureVector(traxel, "com")
except:
try:
return getTraxelFeatureVector(traxel, "RegionCenter")
except:
raise ValueError(
"given traxel (t={},id={}) does not have "
'"com" or "RegionCenter"'.format(traxel.Timestep, traxel.Id)
)
def _traxelMightDivide(self, traxel, divisionThreshold):
assert "divProb" in traxel.Features
return traxel.Features["divProb"][0] > divisionThreshold
def _buildFrameKdTree(self, traxelDict):
objectIdList = []
features = []
for obj, traxel in traxelDict.items():
if obj == 0:
continue
objectIdList.append(obj)
features.append(list(self._extractCenter(traxel)))
return (KDTree(features, metric="euclidean"), objectIdList)
def _addNodesForFrame(self, frame, traxelDict):
for obj, traxel in traxelDict.items():
if obj == 0:
continue
self._graph.add_node((frame, obj), traxel=traxel, id=self._nextNodeUuid)
self._nextNodeUuid += 1
def addNodeFromTraxel(self, traxel, **kwargs):
assert traxel is not None
assert not self.withTracklets
self._graph.add_node(
(traxel.Timestep, traxel.Id), traxel=traxel, id=self._nextNodeUuid, **kwargs
)
self._nextNodeUuid += 1
def buildFromProbabilityGenerator(
self,
probabilityGenerator,
maxNeighborDist=200,
numNearestNeighbors=1,
forwardBackwardCheck=True,
withDivisions=True,
divisionThreshold=0.1,
skipLinks=1,
):
assert probabilityGenerator is not None
assert len(probabilityGenerator.TraxelsPerFrame) > 0
assert skipLinks > 0
def checkNodeWhileAddingLinks(frame, obj):
if (frame, obj) not in self._graph:
logger.warning(
"Adding node ({}, {}) when setting up links".format(frame, obj)
)
kdTreeFrames = [None] * (skipLinks + 1)
# Idea: take the max key in the dict. Remember, frame numbering starts with 0.
frameMax = max(probabilityGenerator.TraxelsPerFrame.keys())
frameMin = min(probabilityGenerator.TraxelsPerFrame.keys())
numFrames = frameMax - frameMin + 1
self.progressVisitor.showState("Probability Generator")
countFrames = 0
for frame in range(numFrames):
countFrames += 1
self.progressVisitor.showProgress(countFrames / float(numFrames))
if frame > 0:
del kdTreeFrames[0] # this is the current frame
if (
frame + skipLinks < numFrames
and frameMin + frame + skipLinks
in probabilityGenerator.TraxelsPerFrame.keys()
):
kdTreeFrames.append(
self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
]
)
)
self._addNodesForFrame(
frameMin + frame + skipLinks,
probabilityGenerator.TraxelsPerFrame[
frameMin + frame + skipLinks
],
)
else:
for i in range(0, skipLinks + 1):
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
kdTreeFrames[i] = self._buildFrameKdTree(
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i]
)
self._addNodesForFrame(
frameMin + frame + i,
probabilityGenerator.TraxelsPerFrame[frameMin + frame + i],
)
# find forward links
if (
frameMin + frame in probabilityGenerator.TraxelsPerFrame.keys()
): # 'frame' could be empty
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame
].items():
divisionPreservingNumNearestNeighbors = numNearestNeighbors
if (
divisionPreservingNumNearestNeighbors < 2
and withDivisions
and self._traxelMightDivide(traxel, divisionThreshold)
):
divisionPreservingNumNearestNeighbors = 2
for i in range(1, skipLinks + 1):
if (
frame + i < numFrames
and frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
):
neighbors = self._findNearestNeighbors(
kdTreeFrames[i],
traxel,
divisionPreservingNumNearestNeighbors,
maxNeighborDist,
)
# type(neighbors) is list
for n in neighbors:
edge_start = (frameMin + frame, obj)
edge_end = (frameMin + frame + i, n)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
# find backward links
if forwardBackwardCheck:
for i in range(1, skipLinks + 1):
if frame + i < numFrames:
if (
frameMin + frame + i
in probabilityGenerator.TraxelsPerFrame.keys()
): # empty frame
for obj, traxel in probabilityGenerator.TraxelsPerFrame[
frameMin + frame + i
].items():
if kdTreeFrames[0] is not None:
neighbors = self._findNearestNeighbors(
kdTreeFrames[0],
traxel,
numNearestNeighbors,
maxNeighborDist,
)
for n in neighbors:
edge_start = (frameMin + frame, n)
edge_end = (frameMin + frame + i, obj)
checkNodeWhileAddingLinks(*edge_start)
checkNodeWhileAddingLinks(*edge_end)
self._graph.add_edge(edge_start, edge_end)
self._graph.edges[edge_start, edge_end][
"src"
] = self._graph.nodes[edge_start]["id"]
self._graph.edges[edge_start, edge_end][
"dest"
] = self._graph.nodes[edge_end]["id"]
def generateTrackletGraph(self):
logger.info("generating tracklet graph...")
tracklet_graph = copy.copy(self)
tracklet_graph._graph = tracklet_graph._graph.copy()
tracklet_graph.withTracklets = True
tracklet_graph.referenceTraxelGraph = self
tracklet_graph.progressVisitor = self.progressVisitor
self.progressVisitor.showState("Initializing Tracklet Graph")
# initialize tracklet map to contain a list of only one traxel per node
countNodes = 0
numNodes = tracklet_graph.countNodes()
for node in tracklet_graph._graph.nodes():
countNodes += 1
self.progressVisitor.showProgress(countNodes / float(numNodes))
tracklet_graph._graph.nodes[node]["tracklet"] = [
tracklet_graph._graph.nodes[node]["traxel"]
]
del tracklet_graph._graph.nodes[node]["traxel"]
# set up a list of links that indicates whether the target's in- and source's out-degree
# are one, meaning the edge can be contracted
links_to_be_contracted = []
node_remapping = {}
self.progressVisitor.showState("Finding Tracklets in Graph")
countEdges = 0
numEdges = tracklet_graph.countArcs()
for edge in tracklet_graph._graph.edges():
countEdges += 1
self.progressVisitor.showProgress(countEdges / float(numEdges))
if (
tracklet_graph._graph.out_degree(edge[0]) == 1
and tracklet_graph._graph.in_degree(edge[1]) == 1
):
links_to_be_contracted.append(edge)
for i in [0, 1]:
node_remapping[edge[i]] = edge[i]
# apply edge contraction
self.progressVisitor.showState("Contracting Edges in Tracklet Graph")
countLinks = 0
numLinks = len(links_to_be_contracted)
for edge in links_to_be_contracted:
countLinks += 1
self.progressVisitor.showProgress(countLinks / float(numLinks))
src = node_remapping[edge[0]]
dest = node_remapping[edge[1]]
if (
tracklet_graph._graph.in_degree(src) == 0
and tracklet_graph._graph.out_degree(dest) == 0
):
# if this tracklet would contract to a single node without incoming or outgoing edges,
# then do NOT contract, as our tracking cannot handle length-one-tracks
continue
tracklet_graph._graph.nodes[src]["tracklet"].extend(
tracklet_graph._graph.nodes[dest]["tracklet"]
)
# duplicate out arcs with new source
for out_edge in tracklet_graph._graph.out_edges(dest):
tracklet_graph._graph.add_edge(src, out_edge[1])
# adjust node remapping to point to new source for all contracted traxels
for t in tracklet_graph._graph.nodes[dest]["tracklet"]:
node_remapping[(t.Timestep, t.Id)] = src
tracklet_graph._graph.remove_node(dest)
logger.info(
"tracklet graph has {} nodes and {} edges (before {},{})".format(
tracklet_graph.countNodes(),
tracklet_graph.countArcs(),
self.countNodes(),
self.countArcs(),
)
)
return tracklet_graph
def getNodeTraxelMap(self):
return NodeMap(self._graph, "traxel")
def getNodeTrackletMap(self):
return NodeMap(self._graph, "tracklet")
def insertEnergies(
self,
maxNumObjects,
detectionProbabilityFunc,
transitionProbabilityFunc,
boundaryCostMultiplierFunc,
divisionProbabilityFunc,
skipLinksBias,
):
numElements = self._graph.number_of_nodes() + self._graph.number_of_edges()
self.progressVisitor.showState("Inserting energies")
# insert detection probabilities for all detections (and some also get a div probability)
countElements = 0
for n in self._graph.nodes():
countElements += 1
if not self.withTracklets:
# only one traxel, but make it a list so everything below works the same
traxels = [self._graph.nodes[n]["traxel"]]
else:
traxels = self._graph.nodes[n]["tracklet"]
# accumulate features over all contained traxels
previousTraxel = None
detectionFeatures = np.zeros(maxNumObjects + 1)
for t in traxels:
detectionFeatures += np.array(negLog(detectionProbabilityFunc(t)))
if previousTraxel is not None:
detectionFeatures += np.array(
negLog(transitionProbabilityFunc(previousTraxel, t))
)
previousTraxel = t
detectionFeatures = listify(list(detectionFeatures))
# division only if probability is big enough
divisionFeatures = divisionProbabilityFunc(traxels[-1])
if divisionFeatures is not None:
divisionFeatures = listify(negLog(divisionFeatures))
# appearance/disappearance
appearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[0], True)] * maxNumObjects
)
disappearanceFeatures = listify(
[0.0] + [boundaryCostMultiplierFunc(traxels[-1], False)] * maxNumObjects
)
self._graph.nodes[n]["features"] = detectionFeatures
if divisionFeatures is not None:
self._graph.nodes[n]["divisionFeatures"] = divisionFeatures
self._graph.nodes[n]["appearanceFeatures"] = appearanceFeatures
self._graph.nodes[n]["disappearanceFeatures"] = disappearanceFeatures
self._graph.nodes[n]["timestep"] = [
traxels[0].Timestep,
traxels[-1].Timestep,
]
self.progressVisitor.showProgress(countElements / float(numElements))
# insert transition probabilities for all links
for a in self._graph.edges():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if not self.withTracklets:
srcTraxel = self._graph.nodes[self.source(a)]["traxel"]
destTraxel = self._graph.nodes[self.target(a)]["traxel"]
else:
srcTraxel = self._graph.nodes[self.source(a)]["tracklet"][
-1
] # src is last of the traxels in source tracklet
destTraxel = self._graph.nodes[self.target(a)]["tracklet"][
0
] # dest is first of traxels in destination tracklet
features = listify(negLog(transitionProbabilityFunc(srcTraxel, destTraxel)))
# add feature for additional Frames. Since we do not want these edges to be primarily taken, we add a bias to the edge. Now: hard coded, future: parameter
frame_gap = destTraxel.Timestep - srcTraxel.Timestep
# 1. method
if frame_gap > 1:
features[1][0] = features[1][0] + skipLinksBias * frame_gap
# # 2. method
# # introduce a new energies like: [[6], [15]] -> [[6, 23], [15, 23]] for first links and
# # [[6], [15]] -> [[23, 6], [23, 15]] for second links, and so on for 3rd order links
# # !!! this will introduce a new weight in the weight.json file. For the 2nd link, comes in 2nd row and so on.
# # drawback: did not manage to adjust parameter to get sensible results.
# for feat in features:
# for i in range(frame_gap):
# feat.append(23)
# if frame_gap > 1:
# feat[frame_gap-1], feat[0] = feat[0], feat[frame_gap-1]
self._graph.edges[a[0], a[1]]["src"] = self._graph.nodes[a[0]]["id"]
self._graph.edges[a[0], a[1]]["dest"] = self._graph.nodes[a[1]]["id"]
self._graph.edges[a[0], a[1]]["features"] = features
def getMappingsBetweenUUIDsAndTraxels(self):
uuidToTraxelMap = {}
traxelIdPerTimestepToUniqueIdMap = {}
for n in self._graph.nodes():
uuid = self._graph.nodes[n]["id"]
traxels = []
if self.withTracklets:
traxels = self._graph.nodes[n]["tracklet"]
else:
traxels = [self._graph.nodes[n]["traxel"]]
uuidToTraxelMap[uuid] = [(t.Timestep, t.Id) for t in traxels]
for t in uuidToTraxelMap[uuid]:
traxelIdPerTimestepToUniqueIdMap.setdefault(str(t[0]), {})[
str(t[1])
] = uuid
# sort the list of traxels per UUID by their timesteps
for v in uuidToTraxelMap.values():
v.sort(key=lambda timestepIdTuple: timestepIdTuple[0])
return traxelIdPerTimestepToUniqueIdMap, uuidToTraxelMap
def toTrackingGraph(self, noFeatures=False):
requiredNodeAttribs = ["id"]
requiredLinkAttribs = ["src", "dest"]
if not noFeatures:
requiredNodeAttribs.append("features")
requiredLinkAttribs.append("features")
def translateNodeToDict(n):
result = {}
attrs = self._graph.nodes[n]
for k in [
"id",
"features",
"appearanceFeatures",
"disappearanceFeatures",
"divisionFeatures",
"timestep",
]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredNodeAttribs:
raise ValueError(
"Cannot use graph nodes without assigned ID and features, run insertEnergies() first"
)
return result
def translateLinkToDict(l):
result = {}
attrs = self._graph.edges[l[0], l[1]]
for k in ["src", "dest", "features"]:
if k in attrs:
result[k] = attrs[k]
elif k in requiredLinkAttribs:
raise ValueError(
"Cannot use graph links without source, target, and features, run insertEnergies() first"
)
return result
traxelIdPerTimestepToUniqueIdMap, _ = self.getMappingsBetweenUUIDsAndTraxels()
model = {
"segmentationHypotheses": [
translateNodeToDict(n) for n in self._graph.nodes()
],
"linkingHypotheses": [translateLinkToDict(e) for e in self._graph.edges()],
"divisionHypotheses": [],
"traxelToUniqueId": traxelIdPerTimestepToUniqueIdMap,
"settings": {
"statesShareWeights": True,
"allowPartialMergerAppearance": False,
"requireSeparateChildrenOfDivision": True,
"optimizerEpGap": 0.01,
"optimizerVerbose": True,
"optimizerNumThreads": 1,
},
}
# extract exclusion sets:
exclusions = set([])
for n in self._graph.nodes():
if self.withTracklets:
traxel = self._graph.nodes[n]["tracklet"][0]
else:
traxel = self._graph.nodes[n]["traxel"]
if traxel.conflictingTraxelIds is not None:
if self.withTracklets:
logger.error(
"Exclusion constraints do not work with tracklets yet!"
)
conflictingIds = [
traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][str(i)]
for i in traxel.conflictingTraxelIds
]
myId = traxelIdPerTimestepToUniqueIdMap[str(traxel.Timestep)][
str(traxel.Id)
]
for ci in conflictingIds:
# insert pairwise exclusion constraints only, and always put the lower id first
if ci < myId:
exclusions.add((ci, myId))
else:
exclusions.add((myId, ci))
model["exclusions"] = [list(t) for t in exclusions]
# TODO: this recomputes the uuidToTraxelMap even though we have it already...
trackingGraph = hytra.core.jsongraph.JsonTrackingGraph(
model=model, progressVisitor=self.progressVisitor
)
return trackingGraph
def insertSolution(self, resultDictionary):
assert isinstance(self._graph, nx.DiGraph), "Expecting the graph to be directed"
_, uuidToTraxelMap = self.getMappingsBetweenUUIDsAndTraxels()
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
# reset all values
for n in traxelgraph._graph.nodes():
traxelgraph._graph.nodes[n]["value"] = 0
traxelgraph._graph.nodes[n]["divisionValue"] = False
for e in traxelgraph._graph.edges():
traxelgraph._graph.edges[e[0], e[1]]["value"] = 0
# store values from dict
for detection in resultDictionary["detectionResults"]:
traxels = uuidToTraxelMap[detection["id"]]
for traxel in traxels:
traxelgraph._graph.nodes[traxel]["value"] = detection["value"]
for internal_edge in zip(traxels, traxels[1:]):
traxelgraph._graph.edges[internal_edge[0], internal_edge[1]][
"value"
] = detection["value"]
if (
"linkingResults" in resultDictionary
and resultDictionary["linkingResults"] is not None
):
for link in resultDictionary["linkingResults"]:
source, dest = (
uuidToTraxelMap[link["src"]][-1],
uuidToTraxelMap[link["dest"]][0],
)
if (source in traxelgraph._graph.predecessors(dest)) and (
dest in traxelgraph._graph.neighbors(source)
):
traxelgraph._graph.edges[source, dest]["value"] = link["value"]
traxelgraph._graph.edges[source, dest]["gap"] = dest[0] - source[0]
if (
"divisionResults" in resultDictionary
and resultDictionary["divisionResults"] is not None
):
for division in resultDictionary["divisionResults"]:
traxelgraph._graph.nodes[uuidToTraxelMap[division["id"]][-1]][
"divisionValue"
] = division["value"]
def getSolutionDictionary(self):
resultDictionary = {}
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
detectionList = []
divisionList = []
linkList = []
def checkAttributeValue(element, attribName, default):
if attribName in element:
return element[attribName]
else:
return default
for n in traxelgraph._graph.nodes():
newDetection = {}
newDetection["id"] = traxelgraph._graph.nodes[n]["id"]
newDetection["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "value", 0
)
detectionList.append(newDetection)
if "divisionValue" in traxelgraph._graph.nodes[n]:
newDivsion = {}
newDivsion["id"] = traxelgraph._graph.nodes[n]["id"]
newDivsion["value"] = checkAttributeValue(
traxelgraph._graph.nodes[n], "divisionValue", False
)
divisionList.append(newDivsion)
for a in traxelgraph.arcIterator():
newLink = {}
src = self.source(a)
dest = self.target(a)
newLink["src"] = traxelgraph._graph.nodes[src]["id"]
newLink["dest"] = traxelgraph._graph.nodes[dest]["id"]
newLink["value"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "value", 0
)
newLink["gap"] = checkAttributeValue(
traxelgraph._graph.edges[src, dest], "gap", 1
)
linkList.append(newLink)
resultDictionary["detectionResults"] = detectionList
resultDictionary["linkingResults"] = linkList
resultDictionary["divisionResults"] = divisionList
return resultDictionary
def countIncomingObjects(self, node):
numberOfIncomingObject = 0
numberOfIncomingEdges = 0
for in_edge in self._graph.in_edges(node):
if "value" in self._graph.edges[in_edge[0], node]:
numberOfIncomingObject += self._graph.edges[in_edge[0], node]["value"]
numberOfIncomingEdges += 1
return numberOfIncomingObject, numberOfIncomingEdges
def countOutgoingObjects(self, node):
numberOfOutgoingObject = 0
numberOfOutgoingEdges = 0
for out_edge in self._graph.out_edges(node):
if (
"value" in self._graph.edges[node, out_edge[1]]
and self._graph.edges[node, out_edge[1]]["value"] > 0
):
numberOfOutgoingObject += self._graph.edges[node, out_edge[1]]["value"]
numberOfOutgoingEdges += 1
return numberOfOutgoingObject, numberOfOutgoingEdges
def computeLineage(self, firstTrackId=2, firstLineageId=2, skipLinks=1):
update_queue = []
# start lineages / tracks at 2, because 0 means background=black, 1 means misdetection in ilastik
max_lineage_id = firstLineageId
max_track_id = firstTrackId
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
self.progressVisitor.showState("Compute lineage")
# find start of lineages
numElements = 2 * traxelgraph.countNodes()
countElements = 0
for n in traxelgraph.nodeIterator():
countElements += 1
self.progressVisitor.showProgress(countElements / float(numElements))
if (
traxelgraph.countIncomingObjects(n)[0] == 0
and "value" in traxelgraph._graph.nodes[n]
and traxelgraph._graph.nodes[n]["value"] > 0
and (
self.allowLengthOneTracks
or traxelgraph.countOutgoingObjects(n)[0] > 0
)
):
# found start of a track
update_queue.append((n, max_lineage_id, max_track_id))
max_lineage_id += 1
max_track_id += 1
else:
traxelgraph._graph.nodes[n]["lineageId"] = None
traxelgraph._graph.nodes[n]["trackId"] = None
while len(update_queue) > 0:
countElements += 1
current_node, lineage_id, track_id = update_queue.pop()
self.progressVisitor.showProgress(countElements / float(numElements))
# if we did not run merger resolving, it can happen that we reach a node several times,
# and would propagate the new lineage+track IDs to all descendants again! We simply
# stop propagating in that case and just use the lineageID that reached the node first.
if (
traxelgraph._graph.nodes[current_node].get("lineageId", None)
is not None
and traxelgraph._graph.nodes[current_node].get("trackId", None)
is not None
):
logger.debug("Several tracks are merging here, stopping a later one")
continue
# set a new trackID
traxelgraph._graph.nodes[current_node]["lineageId"] = lineage_id
traxelgraph._graph.nodes[current_node]["trackId"] = track_id
numberOfOutgoingObject, numberOfOutgoingEdges = traxelgraph.countOutgoingObjects(
current_node
)
if numberOfOutgoingObject != numberOfOutgoingEdges:
logger.warning(
"running lineage computation on unresolved graphs depends on a race condition"
)
if (
"divisionValue" in traxelgraph._graph.nodes[current_node]
and traxelgraph._graph.nodes[current_node]["divisionValue"]
):
assert traxelgraph.countOutgoingObjects(current_node)[1] == 2
traxelgraph._graph.nodes[current_node]["children"] = []
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[current_node]["children"].append(a[1])
traxelgraph._graph.nodes[a[1]]["parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
else:
if traxelgraph.countOutgoingObjects(current_node)[1] > 1:
logger.debug(
"Found merger splitting into several objects, propagating lineage and track to all descendants!"
)
for a in traxelgraph._graph.out_edges(current_node):
if (
"value" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["value"] > 0
):
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] == 1
) or "gap" not in traxelgraph._graph.edges[current_node, a[1]]:
traxelgraph._graph.nodes[a[1]]["gap"] = 1
update_queue.append(
(traxelgraph.target(a), lineage_id, track_id)
)
if (
"gap" in traxelgraph._graph.edges[current_node, a[1]]
and traxelgraph._graph.edges[current_node, a[1]]["gap"] > 1
):
traxelgraph._graph.nodes[a[1]]["gap"] = skipLinks
traxelgraph._graph.nodes[a[1]]["gap_parent"] = current_node
update_queue.append(
(traxelgraph.target(a), lineage_id, max_track_id)
)
max_track_id += 1
def pruneGraphToSolution(self, distanceToSolution=0):
prunedGraph = HypothesesGraph()
for n in self.nodeIterator():
if "value" in self._graph.nodes[n] and self._graph.nodes[n]["value"] > 0:
prunedGraph._graph.add_node(n, **self._graph.nodes[n])
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if distanceToSolution == 0:
if src in prunedGraph._graph and dest in prunedGraph._graph:
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# TODO: can be optimized by looping over the pruned graph nodes(might sacrifice readability)
for distance in range(1, distanceToSolution + 1):
for e in self.arcIterator():
src = self.source(e)
dest = self.target(e)
if src in prunedGraph._graph or dest in prunedGraph._graph:
prunedGraph._graph.add_node(src, **self._graph.nodes[src])
prunedGraph._graph.add_node(dest, **self._graph.nodes[dest])
prunedGraph._graph.add_edge(
src, dest, **self._graph.edges[src, dest]
)
# in case a node is NOT an appearance and
# has all the incoming edges with value 0, we remove all these incoming edges
#
# in case a node is NOT a disappearance and
# has all the outgoing edges with value 0, we remove all these outgoing edges
withAppearanceFeatures = True
withDisappearanceFeatures = True
withFeatures = True
correctAppearanceFeatureLength = True
correctDisappearanceFeatureLength = True
correctFeatureLength = True
maxNumObjects = None
maxNumObjectsAppearance = None
maxNumObjectsDisappearance = None
for n in self.nodeIterator():
try:
maxNumObjectsApp = len(self._graph.nodes[n]["appearanceFeatures"]) - 1
if maxNumObjectsAppearance is None:
maxNumObjectsAppearance = maxNumObjectsApp
elif not maxNumObjectsApp == maxNumObjectsAppearance:
correctAppearanceFeatureLength = False
logger.info(
"Appearance/disappearance features have different lengths!"
)
except:
withAppearanceFeatures = False
logger.info("There are no appearance features in node properties!")
break
try:
maxNumObjectsDis = (
len(self._graph.nodes[n]["disappearanceFeatures"]) - 1
)
if maxNumObjectsDisappearance is None:
maxNumObjectsDisappearance = maxNumObjectsDis
elif not maxNumObjectsDis == maxNumObjectsDisappearance:
correctDisappearanceFeatureLength = False
logger.info("Disappearance features have different lengths!")
except:
withDisappearanceFeatures = False
logger.info("There are no disappearance features in node properties!")
break
if withAppearanceFeatures and withDisappearanceFeatures:
if (
correctAppearanceFeatureLength
and correctDisappearanceFeatureLength
and maxNumObjectsAppearance == maxNumObjectsDisappearance
):
maxNumObjects = maxNumObjectsAppearance
else:
correctFeatureLength = False
logger.info(
"Appearance and disappearance features have different lengths!"
)
else:
withFeatures = False
if withFeatures and correctFeatureLength:
for n in self.nodeIterator():
if not (
"appearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["appearance"]
):
allArcsWithValueZero = True
in_edges = self._graph.in_edges(n)
for edge in list(in_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["appearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not in_edges == []:
self._graph.remove_edges_from(in_edges)
if not (
"disappearance" in self._graph.nodes[n].keys()
and self._graph.nodes[n]["disappearance"]
):
allArcsWithValueZero = True
out_edges = self._graph.out_edges(n)
for edge in list(out_edges):
if (
"value" in self._graph.edges[edge[0]][edge[1]].keys()
and not self._graph.edges[edge[0]][edge[1]]["value"] == 0
):
allArcsWithValueZero = False
break
self._graph.nodes[n]["disappearanceFeatures"] = listify(
[0.0] + [0.0] * maxNumObjects
)
if allArcsWithValueZero:
if not out_edges == []:
self._graph.remove_edges_from(out_edges)
return prunedGraph
def _getNodeAttribute(self, timestep, objectId, attribute):
try:
return self._graph.nodes[(int(timestep), int(objectId))][attribute]
except KeyError:
logger.error(
attribute
+ " not found in graph node properties, call computeLineage() first!"
)
raise
def getLineageId(self, timestep, objectId):
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "lineageId")
def getTrackId(self, timestep, objectId):
if self.withTracklets:
traxelgraph = self.referenceTraxelGraph
else:
traxelgraph = self
return traxelgraph._getNodeAttribute(timestep, objectId, "trackId")
| true
| true
|
1c4567adc8fd9e1e995c1211c7aa015c0a2da7ee
| 4,059
|
py
|
Python
|
blockchain_client.py
|
JB1984/TheBlock
|
0e0b00034424d7453651b5efc2ef71a13f3719c4
|
[
"MIT"
] | null | null | null |
blockchain_client.py
|
JB1984/TheBlock
|
0e0b00034424d7453651b5efc2ef71a13f3719c4
|
[
"MIT"
] | 6
|
2018-09-14T21:00:41.000Z
|
2018-09-27T11:05:52.000Z
|
blockchain_client.py
|
JB1984/TheBlock
|
0e0b00034424d7453651b5efc2ef71a13f3719c4
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
import binascii
import base64
import json
import Crypto
import Crypto.Random
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import requests
from flask import Flask, jsonify, request, render_template
class Transaction:
def __init__(self, sender_address, sender_private_key, recipient_address, value, note, picture):
self.sender_address = sender_address
self.sender_private_key = sender_private_key
self.recipient_address = recipient_address
self.value = value
self.note = note
self.picture = picture
def __getattr__(self, attr):
return self.data[attr]
def to_dict(self):
return OrderedDict({'sender_address': self.sender_address,
'recipient_address': self.recipient_address,
'value': self.value,
'note': self.note,
'picture': self.picture})
def sign_transaction(self):
private_key = self.sender_private_key
signer = PKCS1_v1_5.new(private_key)
h = SHA.new(str(self.to_dict()).encode('utf8'))
return binascii.hexlify(signer.sign(h)).decode('ascii')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('./indexClient.html')
@app.route('/make/transaction')
def make_transaction():
return render_template('./make_transaction.html')
@app.route('/view/transactions')
def view_transaction():
return render_template('./view_transactions.html')
@app.route('/view/wallet_balance')
def view_wallet_balance():
return render_template('./view_wallet_balance.html')
@app.route('/wallet/new', methods=['GET'])
def new_wallet():
random_gen = Crypto.Random.new().read
private_key = RSA.generate(1024, random_gen)
public_key = private_key.publickey()
response = {
'private_key': binascii.hexlify(private_key.exportKey(format='DER')).decode('ascii'),
'public_key': binascii.hexlify(public_key.exportKey(format='DER')).decode('ascii')
}
#Save the generated key to the computer for use later
f = open("private.pem", "wb")
f.write(private_key.exportKey('PEM'))
f.close()
return jsonify(response), 200
@app.route('/generate/transaction', methods=['POST'])
def generate_transaction():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
sender_private_key = priKey
recipient_address = request.form['recipient_address']
value = request.form['amount']
note = request.form['note']
if request.form['picture'] != "":
with open(request.form['picture'], "rb") as imageFile:
pictureString = base64.b64encode(imageFile.read())
pictureString1 = str(pictureString)
print(pictureString)
print(pictureString1)
else:
pictureString1 = ""
transaction = Transaction(sender_address, sender_private_key, recipient_address, value, note, pictureString1)
response = {'transaction': transaction.to_dict(), 'signature': transaction.sign_transaction()}
return jsonify(response), 200
@app.route('/get_pub_key', methods=['GET'])
def get_pub_key():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
response = {'pub_key': sender_address}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=8080, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| 29.845588
| 114
| 0.649914
|
from collections import OrderedDict
import binascii
import base64
import json
import Crypto
import Crypto.Random
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import requests
from flask import Flask, jsonify, request, render_template
class Transaction:
def __init__(self, sender_address, sender_private_key, recipient_address, value, note, picture):
self.sender_address = sender_address
self.sender_private_key = sender_private_key
self.recipient_address = recipient_address
self.value = value
self.note = note
self.picture = picture
def __getattr__(self, attr):
return self.data[attr]
def to_dict(self):
return OrderedDict({'sender_address': self.sender_address,
'recipient_address': self.recipient_address,
'value': self.value,
'note': self.note,
'picture': self.picture})
def sign_transaction(self):
private_key = self.sender_private_key
signer = PKCS1_v1_5.new(private_key)
h = SHA.new(str(self.to_dict()).encode('utf8'))
return binascii.hexlify(signer.sign(h)).decode('ascii')
app = Flask(__name__)
@app.route('/')
def index():
return render_template('./indexClient.html')
@app.route('/make/transaction')
def make_transaction():
return render_template('./make_transaction.html')
@app.route('/view/transactions')
def view_transaction():
return render_template('./view_transactions.html')
@app.route('/view/wallet_balance')
def view_wallet_balance():
return render_template('./view_wallet_balance.html')
@app.route('/wallet/new', methods=['GET'])
def new_wallet():
random_gen = Crypto.Random.new().read
private_key = RSA.generate(1024, random_gen)
public_key = private_key.publickey()
response = {
'private_key': binascii.hexlify(private_key.exportKey(format='DER')).decode('ascii'),
'public_key': binascii.hexlify(public_key.exportKey(format='DER')).decode('ascii')
}
f = open("private.pem", "wb")
f.write(private_key.exportKey('PEM'))
f.close()
return jsonify(response), 200
@app.route('/generate/transaction', methods=['POST'])
def generate_transaction():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
sender_private_key = priKey
recipient_address = request.form['recipient_address']
value = request.form['amount']
note = request.form['note']
if request.form['picture'] != "":
with open(request.form['picture'], "rb") as imageFile:
pictureString = base64.b64encode(imageFile.read())
pictureString1 = str(pictureString)
print(pictureString)
print(pictureString1)
else:
pictureString1 = ""
transaction = Transaction(sender_address, sender_private_key, recipient_address, value, note, pictureString1)
response = {'transaction': transaction.to_dict(), 'signature': transaction.sign_transaction()}
return jsonify(response), 200
@app.route('/get_pub_key', methods=['GET'])
def get_pub_key():
p = open("private.pem", "r")
priKey = RSA.import_key(p.read())
pubKey = priKey.publickey()
sender_address = binascii.hexlify(pubKey.exportKey(format='DER')).decode('ascii')
response = {'pub_key': sender_address}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=8080, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
app.run(host='0.0.0.0', port=port)
| true
| true
|
1c4567eaf684081be1c5f842fe292f61cc1719a8
| 1,370
|
py
|
Python
|
dataloader/dataloaders.py
|
misads/cv_template
|
9976ee0ada449a494d26f896c598610f233edc10
|
[
"MIT"
] | 69
|
2020-09-01T11:23:48.000Z
|
2022-03-26T08:42:16.000Z
|
dataloader/dataloaders.py
|
misads/cv_template
|
9976ee0ada449a494d26f896c598610f233edc10
|
[
"MIT"
] | 3
|
2021-02-16T09:22:09.000Z
|
2022-01-02T07:54:39.000Z
|
dataloader/dataloaders.py
|
misads/cv_template
|
9976ee0ada449a494d26f896c598610f233edc10
|
[
"MIT"
] | 15
|
2020-07-07T06:37:33.000Z
|
2022-03-21T07:37:33.000Z
|
# encoding=utf-8
from dataloader.image_list import ListTrainValDataset, ListTestDataset
from dataloader.transforms import get_transform
from torch.utils.data import DataLoader
from options import opt
import pdb
import os
###################
TEST_DATASET_HAS_OPEN = False # 有没有开放测试集
###################
train_list = os.path.join('datasets', opt.dataset, 'train.txt')
val_list = os.path.join('datasets', opt.dataset, 'val.txt')
max_size = 128 if opt.debug else None
# transforms
transform = get_transform(opt.transform)
train_transform = transform.train_transform
val_transform = transform.val_transform
# datasets和dataloaders
train_dataset = ListTrainValDataset(train_list, transforms=train_transform, max_size=max_size)
train_dataloader = DataLoader(train_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers, drop_last=True)
val_dataset = ListTrainValDataset(val_list, transforms=val_transform, max_size=max_size)
val_dataloader = DataLoader(val_dataset, batch_size=1, shuffle=False, num_workers=opt.workers//2)
if TEST_DATASET_HAS_OPEN:
test_list = os.path.join('datasets', opt.dataset, 'test.txt') # 还没有
test_dataset = ListTestDataset(test_list, scale=opt.scale, max_size=max_size, norm=opt.norm_input)
test_dataloader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=1)
else:
test_dataloader = None
| 35.128205
| 126
| 0.783212
|
from dataloader.image_list import ListTrainValDataset, ListTestDataset
from dataloader.transforms import get_transform
from torch.utils.data import DataLoader
from options import opt
import pdb
import os
TEST_DATASET_HAS_OPEN = False
train_list = os.path.join('datasets', opt.dataset, 'train.txt')
val_list = os.path.join('datasets', opt.dataset, 'val.txt')
max_size = 128 if opt.debug else None
transform = get_transform(opt.transform)
train_transform = transform.train_transform
val_transform = transform.val_transform
train_dataset = ListTrainValDataset(train_list, transforms=train_transform, max_size=max_size)
train_dataloader = DataLoader(train_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.workers, drop_last=True)
val_dataset = ListTrainValDataset(val_list, transforms=val_transform, max_size=max_size)
val_dataloader = DataLoader(val_dataset, batch_size=1, shuffle=False, num_workers=opt.workers//2)
if TEST_DATASET_HAS_OPEN:
test_list = os.path.join('datasets', opt.dataset, 'test.txt')
test_dataset = ListTestDataset(test_list, scale=opt.scale, max_size=max_size, norm=opt.norm_input)
test_dataloader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=1)
else:
test_dataloader = None
| true
| true
|
1c45681e99e7576cdadf4d81110b8dbc5fa1dd25
| 4,730
|
py
|
Python
|
graph_objs/ohlc/_line.py
|
wwwidonja/changed_plotly
|
1bda35a438539a97c84a3ab3952e95e8848467bd
|
[
"MIT"
] | null | null | null |
graph_objs/ohlc/_line.py
|
wwwidonja/changed_plotly
|
1bda35a438539a97c84a3ab3952e95e8848467bd
|
[
"MIT"
] | null | null | null |
graph_objs/ohlc/_line.py
|
wwwidonja/changed_plotly
|
1bda35a438539a97c84a3ab3952e95e8848467bd
|
[
"MIT"
] | null | null | null |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "ohlc"
_path_str = "ohlc.line"
_valid_props = {"dash", "width"}
# dash
# ----
@property
def dash(self):
"""
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can also be
set per direction via `increasing.line.dash` and
`decreasing.line.dash`.
The 'dash' property is an enumeration that may be specified as:
- One of the following dash styles:
['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot']
- A string containing a dash length list in pixels or percentages
(e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.)
Returns
-------
str
"""
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
# width
# -----
@property
def width(self):
"""
[object Object] Note that this style setting can also be set
per direction via `increasing.line.width` and
`decreasing.line.width`.
The 'width' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
"""
def __init__(self, arg=None, dash=None, width=None, **kwargs):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
Returns
-------
Line
"""
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the new_plotly.graph_objs.ohlc.Line
constructor must be a dict or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 31.324503
| 82
| 0.532981
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
_parent_path_str = "ohlc"
_path_str = "ohlc.line"
_valid_props = {"dash", "width"}
@property
def dash(self):
return self["dash"]
@dash.setter
def dash(self, val):
self["dash"] = val
@property
def width(self):
return self["width"]
@width.setter
def width(self, val):
self["width"] = val
@property
def _prop_descriptions(self):
return """\
dash
Sets the dash style of lines. Set to a dash type string
("solid", "dot", "dash", "longdash", "dashdot", or
"longdashdot") or a dash length list in px (eg
"5px,10px,2px,2px"). Note that this style setting can
also be set per direction via `increasing.line.dash`
and `decreasing.line.dash`.
width
[object Object] Note that this style setting can also
be set per direction via `increasing.line.width` and
`decreasing.line.width`.
"""
def __init__(self, arg=None, dash=None, width=None, **kwargs):
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the new_plotly.graph_objs.ohlc.Line
constructor must be a dict or
an instance of :class:`new_plotly.graph_objs.ohlc.Line`"""
)
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
_v = arg.pop("dash", None)
_v = dash if dash is not None else _v
if _v is not None:
self["dash"] = _v
_v = arg.pop("width", None)
_v = width if width is not None else _v
if _v is not None:
self["width"] = _v
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
| true
| true
|
1c45687ed1e1d1996e74a33c71010180e5f720d1
| 6,785
|
py
|
Python
|
Localization/particle_filter/particle_filter.py
|
MerdanBay/PythonRobotics
|
71de5d038f348d347d7b5dc00c914d523cd59f92
|
[
"MIT"
] | 1
|
2021-12-02T01:45:01.000Z
|
2021-12-02T01:45:01.000Z
|
Localization/particle_filter/particle_filter.py
|
MerdanBay/PythonRobotics
|
71de5d038f348d347d7b5dc00c914d523cd59f92
|
[
"MIT"
] | null | null | null |
Localization/particle_filter/particle_filter.py
|
MerdanBay/PythonRobotics
|
71de5d038f348d347d7b5dc00c914d523cd59f92
|
[
"MIT"
] | 1
|
2022-01-14T11:11:24.000Z
|
2022-01-14T11:11:24.000Z
|
"""
Particle Filter localization sample
author: Atsushi Sakai (@Atsushi_twi)
"""
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/")
import math
import matplotlib.pyplot as plt
import numpy as np
from utils.angle import rot_mat_2d
# Estimation parameter of PF
Q = np.diag([0.2]) ** 2 # range error
R = np.diag([2.0, np.deg2rad(40.0)]) ** 2 # input error
# Simulation parameter
Q_sim = np.diag([0.2]) ** 2
R_sim = np.diag([1.0, np.deg2rad(30.0)]) ** 2
DT = 0.1 # time tick [s]
SIM_TIME = 50.0 # simulation time [s]
MAX_RANGE = 20.0 # maximum observation range
# Particle filter parameter
NP = 100 # Number of Particle
NTh = NP / 2.0 # Number of particle for re-sampling
show_animation = True
def calc_input():
v = 1.0 # [m/s]
yaw_rate = 0.1 # [rad/s]
u = np.array([[v, yaw_rate]]).T
return u
def observation(x_true, xd, u, rf_id):
x_true = motion_model(x_true, u)
# add noise to gps x-y
z = np.zeros((0, 3))
for i in range(len(rf_id[:, 0])):
dx = x_true[0, 0] - rf_id[i, 0]
dy = x_true[1, 0] - rf_id[i, 1]
d = math.hypot(dx, dy)
if d <= MAX_RANGE:
dn = d + np.random.randn() * Q_sim[0, 0] ** 0.5 # add noise
zi = np.array([[dn, rf_id[i, 0], rf_id[i, 1]]])
z = np.vstack((z, zi))
# add noise to input
ud1 = u[0, 0] + np.random.randn() * R_sim[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R_sim[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
xd = motion_model(xd, ud)
return x_true, z, xd, ud
def motion_model(x, u):
F = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 0]])
B = np.array([[DT * math.cos(x[2, 0]), 0],
[DT * math.sin(x[2, 0]), 0],
[0.0, DT],
[1.0, 0.0]])
x = F.dot(x) + B.dot(u)
return x
def gauss_likelihood(x, sigma):
p = 1.0 / math.sqrt(2.0 * math.pi * sigma ** 2) * \
math.exp(-x ** 2 / (2 * sigma ** 2))
return p
def calc_covariance(x_est, px, pw):
"""
calculate covariance matrix
see ipynb doc
"""
cov = np.zeros((3, 3))
n_particle = px.shape[1]
for i in range(n_particle):
dx = (px[:, i:i + 1] - x_est)[0:3]
cov += pw[0, i] * dx @ dx.T
cov *= 1.0 / (1.0 - pw @ pw.T)
return cov
def pf_localization(px, pw, z, u):
"""
Localization with Particle filter
"""
for ip in range(NP):
x = np.array([px[:, ip]]).T
w = pw[0, ip]
# Predict with random input sampling
ud1 = u[0, 0] + np.random.randn() * R[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
x = motion_model(x, ud)
# Calc Importance Weight
for i in range(len(z[:, 0])):
dx = x[0, 0] - z[i, 1]
dy = x[1, 0] - z[i, 2]
pre_z = math.hypot(dx, dy)
dz = pre_z - z[i, 0]
w = w * gauss_likelihood(dz, math.sqrt(Q[0, 0]))
px[:, ip] = x[:, 0]
pw[0, ip] = w
pw = pw / pw.sum() # normalize
x_est = px.dot(pw.T)
p_est = calc_covariance(x_est, px, pw)
N_eff = 1.0 / (pw.dot(pw.T))[0, 0] # Effective particle number
if N_eff < NTh:
px, pw = re_sampling(px, pw)
return x_est, p_est, px, pw
def re_sampling(px, pw):
"""
low variance re-sampling
"""
w_cum = np.cumsum(pw)
base = np.arange(0.0, 1.0, 1 / NP)
re_sample_id = base + np.random.uniform(0, 1 / NP)
indexes = []
ind = 0
for ip in range(NP):
while re_sample_id[ip] > w_cum[ind]:
ind += 1
indexes.append(ind)
px = px[:, indexes]
pw = np.zeros((1, NP)) + 1.0 / NP # init weight
return px, pw
def plot_covariance_ellipse(x_est, p_est): # pragma: no cover
p_xy = p_est[0:2, 0:2]
eig_val, eig_vec = np.linalg.eig(p_xy)
if eig_val[0] >= eig_val[1]:
big_ind = 0
small_ind = 1
else:
big_ind = 1
small_ind = 0
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
# eig_val[big_ind] or eiq_val[small_ind] were occasionally negative
# numbers extremely close to 0 (~10^-20), catch these cases and set the
# respective variable to 0
try:
a = math.sqrt(eig_val[big_ind])
except ValueError:
a = 0
try:
b = math.sqrt(eig_val[small_ind])
except ValueError:
b = 0
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
angle = math.atan2(eig_vec[1, big_ind], eig_vec[0, big_ind])
fx = rot_mat_2d(angle) @ np.array([[x, y]])
px = np.array(fx[:, 0] + x_est[0, 0]).flatten()
py = np.array(fx[:, 1] + x_est[1, 0]).flatten()
plt.plot(px, py, "--r")
def main():
print(__file__ + " start!!")
time = 0.0
# RF_ID positions [x, y]
rf_id = np.array([[10.0, 0.0],
[10.0, 10.0],
[0.0, 15.0],
[-5.0, 20.0]])
# State Vector [x y yaw v]'
x_est = np.zeros((4, 1))
x_true = np.zeros((4, 1))
px = np.zeros((4, NP)) # Particle store
pw = np.zeros((1, NP)) + 1.0 / NP # Particle weight
x_dr = np.zeros((4, 1)) # Dead reckoning
# history
h_x_est = x_est
h_x_true = x_true
h_x_dr = x_true
while SIM_TIME >= time:
time += DT
u = calc_input()
x_true, z, x_dr, ud = observation(x_true, x_dr, u, rf_id)
x_est, PEst, px, pw = pf_localization(px, pw, z, ud)
# store data history
h_x_est = np.hstack((h_x_est, x_est))
h_x_dr = np.hstack((h_x_dr, x_dr))
h_x_true = np.hstack((h_x_true, x_true))
if show_animation:
plt.cla()
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect(
'key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
for i in range(len(z[:, 0])):
plt.plot([x_true[0, 0], z[i, 1]], [x_true[1, 0], z[i, 2]], "-k")
plt.plot(rf_id[:, 0], rf_id[:, 1], "*k")
plt.plot(px[0, :], px[1, :], ".r")
plt.plot(np.array(h_x_true[0, :]).flatten(),
np.array(h_x_true[1, :]).flatten(), "-b")
plt.plot(np.array(h_x_dr[0, :]).flatten(),
np.array(h_x_dr[1, :]).flatten(), "-k")
plt.plot(np.array(h_x_est[0, :]).flatten(),
np.array(h_x_est[1, :]).flatten(), "-r")
plot_covariance_ellipse(x_est, PEst)
plt.axis("equal")
plt.grid(True)
plt.pause(0.001)
if __name__ == '__main__':
main()
| 25.700758
| 80
| 0.506853
|
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/")
import math
import matplotlib.pyplot as plt
import numpy as np
from utils.angle import rot_mat_2d
Q = np.diag([0.2]) ** 2 R = np.diag([2.0, np.deg2rad(40.0)]) ** 2
Q_sim = np.diag([0.2]) ** 2
R_sim = np.diag([1.0, np.deg2rad(30.0)]) ** 2
DT = 0.1 SIM_TIME = 50.0 MAX_RANGE = 20.0
NP = 100 NTh = NP / 2.0
show_animation = True
def calc_input():
v = 1.0 yaw_rate = 0.1 u = np.array([[v, yaw_rate]]).T
return u
def observation(x_true, xd, u, rf_id):
x_true = motion_model(x_true, u)
z = np.zeros((0, 3))
for i in range(len(rf_id[:, 0])):
dx = x_true[0, 0] - rf_id[i, 0]
dy = x_true[1, 0] - rf_id[i, 1]
d = math.hypot(dx, dy)
if d <= MAX_RANGE:
dn = d + np.random.randn() * Q_sim[0, 0] ** 0.5 zi = np.array([[dn, rf_id[i, 0], rf_id[i, 1]]])
z = np.vstack((z, zi))
ud1 = u[0, 0] + np.random.randn() * R_sim[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R_sim[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
xd = motion_model(xd, ud)
return x_true, z, xd, ud
def motion_model(x, u):
F = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 0]])
B = np.array([[DT * math.cos(x[2, 0]), 0],
[DT * math.sin(x[2, 0]), 0],
[0.0, DT],
[1.0, 0.0]])
x = F.dot(x) + B.dot(u)
return x
def gauss_likelihood(x, sigma):
p = 1.0 / math.sqrt(2.0 * math.pi * sigma ** 2) * \
math.exp(-x ** 2 / (2 * sigma ** 2))
return p
def calc_covariance(x_est, px, pw):
cov = np.zeros((3, 3))
n_particle = px.shape[1]
for i in range(n_particle):
dx = (px[:, i:i + 1] - x_est)[0:3]
cov += pw[0, i] * dx @ dx.T
cov *= 1.0 / (1.0 - pw @ pw.T)
return cov
def pf_localization(px, pw, z, u):
for ip in range(NP):
x = np.array([px[:, ip]]).T
w = pw[0, ip]
ud1 = u[0, 0] + np.random.randn() * R[0, 0] ** 0.5
ud2 = u[1, 0] + np.random.randn() * R[1, 1] ** 0.5
ud = np.array([[ud1, ud2]]).T
x = motion_model(x, ud)
for i in range(len(z[:, 0])):
dx = x[0, 0] - z[i, 1]
dy = x[1, 0] - z[i, 2]
pre_z = math.hypot(dx, dy)
dz = pre_z - z[i, 0]
w = w * gauss_likelihood(dz, math.sqrt(Q[0, 0]))
px[:, ip] = x[:, 0]
pw[0, ip] = w
pw = pw / pw.sum()
x_est = px.dot(pw.T)
p_est = calc_covariance(x_est, px, pw)
N_eff = 1.0 / (pw.dot(pw.T))[0, 0] if N_eff < NTh:
px, pw = re_sampling(px, pw)
return x_est, p_est, px, pw
def re_sampling(px, pw):
w_cum = np.cumsum(pw)
base = np.arange(0.0, 1.0, 1 / NP)
re_sample_id = base + np.random.uniform(0, 1 / NP)
indexes = []
ind = 0
for ip in range(NP):
while re_sample_id[ip] > w_cum[ind]:
ind += 1
indexes.append(ind)
px = px[:, indexes]
pw = np.zeros((1, NP)) + 1.0 / NP
return px, pw
def plot_covariance_ellipse(x_est, p_est): p_xy = p_est[0:2, 0:2]
eig_val, eig_vec = np.linalg.eig(p_xy)
if eig_val[0] >= eig_val[1]:
big_ind = 0
small_ind = 1
else:
big_ind = 1
small_ind = 0
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
try:
a = math.sqrt(eig_val[big_ind])
except ValueError:
a = 0
try:
b = math.sqrt(eig_val[small_ind])
except ValueError:
b = 0
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
angle = math.atan2(eig_vec[1, big_ind], eig_vec[0, big_ind])
fx = rot_mat_2d(angle) @ np.array([[x, y]])
px = np.array(fx[:, 0] + x_est[0, 0]).flatten()
py = np.array(fx[:, 1] + x_est[1, 0]).flatten()
plt.plot(px, py, "--r")
def main():
print(__file__ + " start!!")
time = 0.0
rf_id = np.array([[10.0, 0.0],
[10.0, 10.0],
[0.0, 15.0],
[-5.0, 20.0]])
x_est = np.zeros((4, 1))
x_true = np.zeros((4, 1))
px = np.zeros((4, NP)) # Particle store
pw = np.zeros((1, NP)) + 1.0 / NP # Particle weight
x_dr = np.zeros((4, 1)) # Dead reckoning
# history
h_x_est = x_est
h_x_true = x_true
h_x_dr = x_true
while SIM_TIME >= time:
time += DT
u = calc_input()
x_true, z, x_dr, ud = observation(x_true, x_dr, u, rf_id)
x_est, PEst, px, pw = pf_localization(px, pw, z, ud)
# store data history
h_x_est = np.hstack((h_x_est, x_est))
h_x_dr = np.hstack((h_x_dr, x_dr))
h_x_true = np.hstack((h_x_true, x_true))
if show_animation:
plt.cla()
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect(
'key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
for i in range(len(z[:, 0])):
plt.plot([x_true[0, 0], z[i, 1]], [x_true[1, 0], z[i, 2]], "-k")
plt.plot(rf_id[:, 0], rf_id[:, 1], "*k")
plt.plot(px[0, :], px[1, :], ".r")
plt.plot(np.array(h_x_true[0, :]).flatten(),
np.array(h_x_true[1, :]).flatten(), "-b")
plt.plot(np.array(h_x_dr[0, :]).flatten(),
np.array(h_x_dr[1, :]).flatten(), "-k")
plt.plot(np.array(h_x_est[0, :]).flatten(),
np.array(h_x_est[1, :]).flatten(), "-r")
plot_covariance_ellipse(x_est, PEst)
plt.axis("equal")
plt.grid(True)
plt.pause(0.001)
if __name__ == '__main__':
main()
| true
| true
|
1c45694042710f3d8d3724815e70347ae2585bff
| 1,850
|
py
|
Python
|
thedoctor/tests/test_integration.py
|
hhuuggoo/thedoctor
|
84c11377dc16ef8208480cd2745ce1ffc5614865
|
[
"BSD-2-Clause"
] | 37
|
2015-02-24T21:59:04.000Z
|
2021-07-13T19:04:34.000Z
|
thedoctor/tests/test_integration.py
|
hhuuggoo/thedoctor
|
84c11377dc16ef8208480cd2745ce1ffc5614865
|
[
"BSD-2-Clause"
] | 1
|
2015-03-23T20:23:41.000Z
|
2015-03-23T21:09:58.000Z
|
thedoctor/tests/test_integration.py
|
hhuuggoo/thedoctor
|
84c11377dc16ef8208480cd2745ce1ffc5614865
|
[
"BSD-2-Clause"
] | 7
|
2015-03-17T17:18:27.000Z
|
2020-07-30T13:05:42.000Z
|
from .utils import raises
from .. import ValidationError
from .. import validate
from ..validators import dict_validator, true
def test_integration():
@validate(a=int, b=int)
def func(a, b):
return (a, b)
assert func(1, 2) == (1, 2)
assert raises(ValidationError, func, 1, 'a')
@validate(a=int)
def func(a, b):
return (a, b)
assert func(1, 'a') == (1, 'a')
assert raises(ValidationError, func, 'a', 1)
def test_return_validator():
def return_validator(result):
if result != 3:
raise ValidationError('not 3')
@validate(_return=[int, return_validator])
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert raises(ValidationError, func, 'a', 'b')
assert func(1, 2) == 3
def sums_to_3(all_args):
if all_args['a'] + all_args['b'] != 3:
raise ValidationError('not sum to 3')
@validate(_all=sums_to_3)
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def test_lambda_validator():
@validate(_all=lambda x: true(x['a'] + x['b'] == 3, "must sum to 3"))
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def instance_method_test():
class Test(object):
@validate(_return=lambda x: true(x % 2 == 0, "return must be even"),
a=int, b=int)
def func(self, a, b):
return a + b
t = Test()
assert raises(ValidationError, t.func, 1, 2)
t.func(2, 4)
def dict_validator_integration_test():
@validate(x=dict_validator(
{'name': lambda x: true(x == 'sally', 'must be sally')}))
def func(x):
return x
assert raises(ValidationError, func, {'name': 'bob'})
func({'name': 'sally'})
| 26.428571
| 76
| 0.584324
|
from .utils import raises
from .. import ValidationError
from .. import validate
from ..validators import dict_validator, true
def test_integration():
@validate(a=int, b=int)
def func(a, b):
return (a, b)
assert func(1, 2) == (1, 2)
assert raises(ValidationError, func, 1, 'a')
@validate(a=int)
def func(a, b):
return (a, b)
assert func(1, 'a') == (1, 'a')
assert raises(ValidationError, func, 'a', 1)
def test_return_validator():
def return_validator(result):
if result != 3:
raise ValidationError('not 3')
@validate(_return=[int, return_validator])
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert raises(ValidationError, func, 'a', 'b')
assert func(1, 2) == 3
def sums_to_3(all_args):
if all_args['a'] + all_args['b'] != 3:
raise ValidationError('not sum to 3')
@validate(_all=sums_to_3)
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def test_lambda_validator():
@validate(_all=lambda x: true(x['a'] + x['b'] == 3, "must sum to 3"))
def func(a, b):
return a + b
assert raises(ValidationError, func, 1, 3)
assert func(1, 2) == 3
def instance_method_test():
class Test(object):
@validate(_return=lambda x: true(x % 2 == 0, "return must be even"),
a=int, b=int)
def func(self, a, b):
return a + b
t = Test()
assert raises(ValidationError, t.func, 1, 2)
t.func(2, 4)
def dict_validator_integration_test():
@validate(x=dict_validator(
{'name': lambda x: true(x == 'sally', 'must be sally')}))
def func(x):
return x
assert raises(ValidationError, func, {'name': 'bob'})
func({'name': 'sally'})
| true
| true
|
1c45697bb34e4558711a296700d158a28ef349c2
| 1,054
|
py
|
Python
|
onnxruntime/__init__.py
|
NonStatic2014/onnxruntime
|
bdfd46082a152e9605199b4f01664fc76f97a346
|
[
"MIT"
] | null | null | null |
onnxruntime/__init__.py
|
NonStatic2014/onnxruntime
|
bdfd46082a152e9605199b4f01664fc76f97a346
|
[
"MIT"
] | 10
|
2019-03-25T21:47:46.000Z
|
2019-04-30T02:33:05.000Z
|
onnxruntime/__init__.py
|
NonStatic2014/onnxruntime
|
bdfd46082a152e9605199b4f01664fc76f97a346
|
[
"MIT"
] | 1
|
2019-04-09T16:15:51.000Z
|
2019-04-09T16:15:51.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------
"""
ONNX Runtime
enables high-performance evaluation of trained machine learning (ML)
models while keeping resource usage low.
Building on Microsoft's dedication to the
`Open Neural Network Exchange (ONNX) <https://onnx.ai/>`_
community, it supports traditional ML models as well
as Deep Learning algorithms in the
`ONNX-ML format <https://github.com/onnx/onnx/blob/master/docs/IR.md>`_.
"""
__version__ = "0.5.0"
__author__ = "Microsoft"
from onnxruntime.capi._pybind_state import get_all_providers, get_available_providers, get_device, RunOptions, SessionOptions, set_default_logger_severity, NodeArg, ModelMetadata, GraphOptimizationLevel, ExecutionMode
from onnxruntime.capi.session import InferenceSession
from onnxruntime.capi import onnxruntime_validation
onnxruntime_validation.check_distro_info()
| 47.909091
| 217
| 0.701139
|
__version__ = "0.5.0"
__author__ = "Microsoft"
from onnxruntime.capi._pybind_state import get_all_providers, get_available_providers, get_device, RunOptions, SessionOptions, set_default_logger_severity, NodeArg, ModelMetadata, GraphOptimizationLevel, ExecutionMode
from onnxruntime.capi.session import InferenceSession
from onnxruntime.capi import onnxruntime_validation
onnxruntime_validation.check_distro_info()
| true
| true
|
1c4569ce9ebd3c7cda1043c0df205b8a956f5f5e
| 3,842
|
py
|
Python
|
image_train.py
|
to0mi1/image-categorization-based-cifar10
|
96218f2f60faf424b26112559a170a05463113bb
|
[
"MIT"
] | 1
|
2019-04-16T08:42:32.000Z
|
2019-04-16T08:42:32.000Z
|
image_train.py
|
to0mi1/image-categorization-based-cifar10
|
96218f2f60faf424b26112559a170a05463113bb
|
[
"MIT"
] | null | null | null |
image_train.py
|
to0mi1/image-categorization-based-cifar10
|
96218f2f60faf424b26112559a170a05463113bb
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
モデルを作成しトレーニングを行う
"""
import os
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Activation, Flatten
from keras.utils import plot_model
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import plot_model
from keras.preprocessing import image
from keras.callbacks import EarlyStopping
# パラメータ定義
activation = 'relu'
optimizer = 'Adam'
nb_epoch = 30
batch_size = 16
# 訓練・検証用データの格納ディレクトリ
train_path = './train'
valid_path = './valid'
# 学習結果を格納するディレクトリを作成する
if not os.path.exists('./result'):
os.mkdir('./result')
result_dir = './result'
# 画像を分類するクラスを定義する
classes = ['buri', 'katsuo', 'kuromaguro', 'maaji', 'NG']
nb_classes = len (classes)
def image_train():
print('Start model building')
model = Sequential()
model.add(Conv2D(32, (3, 3), padding="same", input_shape=(150, 150, 3)))
model.add(Activation(activation))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation(activation))
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation(activation))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='binary_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
# モデルをファイルに保存する
model_json = model.to_json()
with open(os.path.join(result_dir, 'model.json'), 'w') as f:
f.write(model_json)
print('start training.')
# 訓練データの作成
train_datagen = ImageDataGenerator(
#zca_whitening= True, # ZCA白色化を適用します
rotation_range=40, # 画像をランダムに回転する回転範囲
width_shift_range=0.2, # ランダムに水平シフトする範囲
height_shift_range=0.2, # ランダムに垂直シフトする範囲
shear_range=0.2, # シアー強度(反時計回りのシアー角度(ラジアン))
zoom_range=0.2, # ランダムにズームする範囲.浮動小数点数が与えられた場合
horizontal_flip=True, # 水平方向に入力をランダムに反転します
rescale=1.0 / 255) # Noneか0ならば,適用しない.それ以外であれば,(他の変換を行う前に) 与えられた値をデータに積算する
train_generator = train_datagen.flow_from_directory(
train_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
# 検証用データの生成定義
validation_datagen = ImageDataGenerator(rescale=1.0 / 255)
validation_generator = validation_datagen.flow_from_directory(
valid_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
steps_per_epoch = train_generator.samples
validation_steps = validation_generator.samples
print('steps_per_epoch is set to %s' % steps_per_epoch)
print('validation_steps is set to %s' % validation_steps)
# 訓練の早期終了
es_cb = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
# 訓練開始
history = model.fit_generator(generator=train_generator,
steps_per_epoch=steps_per_epoch,
verbose=1,
callbacks=[es_cb],
validation_data=validation_generator,
validation_steps=validation_steps,
epochs=nb_epoch)
print('Training Complete.')
model.save_weights(os.path.join(result_dir, 'weight.h5'))
# plot_model(model, to_file=os.path.join(result_dir, filename_prefix + '_model.png'), show_shapes=True)
if __name__ == '__main__':
image_train()
| 30.736
| 107
| 0.656689
|
import os
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Activation, Flatten
from keras.utils import plot_model
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import plot_model
from keras.preprocessing import image
from keras.callbacks import EarlyStopping
activation = 'relu'
optimizer = 'Adam'
nb_epoch = 30
batch_size = 16
train_path = './train'
valid_path = './valid'
if not os.path.exists('./result'):
os.mkdir('./result')
result_dir = './result'
classes = ['buri', 'katsuo', 'kuromaguro', 'maaji', 'NG']
nb_classes = len (classes)
def image_train():
print('Start model building')
model = Sequential()
model.add(Conv2D(32, (3, 3), padding="same", input_shape=(150, 150, 3)))
model.add(Activation(activation))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation(activation))
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation(activation))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation(activation))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='binary_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
model_json = model.to_json()
with open(os.path.join(result_dir, 'model.json'), 'w') as f:
f.write(model_json)
print('start training.')
train_datagen = ImageDataGenerator(
rotation_range=40, width_shift_range=0.2, height_shift_range=0.2, shear_range=0.2, zoom_range=0.2, horizontal_flip=True, rescale=1.0 / 255)
train_generator = train_datagen.flow_from_directory(
train_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
validation_datagen = ImageDataGenerator(rescale=1.0 / 255)
validation_generator = validation_datagen.flow_from_directory(
valid_path,
target_size=(150, 150),
batch_size=batch_size,
classes=classes,
class_mode='categorical')
steps_per_epoch = train_generator.samples
validation_steps = validation_generator.samples
print('steps_per_epoch is set to %s' % steps_per_epoch)
print('validation_steps is set to %s' % validation_steps)
es_cb = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
history = model.fit_generator(generator=train_generator,
steps_per_epoch=steps_per_epoch,
verbose=1,
callbacks=[es_cb],
validation_data=validation_generator,
validation_steps=validation_steps,
epochs=nb_epoch)
print('Training Complete.')
model.save_weights(os.path.join(result_dir, 'weight.h5'))
if __name__ == '__main__':
image_train()
| true
| true
|
1c456a5b82ba05659c5a11cb9ea95320d3f81903
| 2,818
|
py
|
Python
|
test/functional/rpc_getblockfilter.py
|
CallMeMisterOwl/bitcoin
|
9d2895157ec0ff6e356f40c5fe84d3007fc991c1
|
[
"MIT"
] | 2
|
2020-08-16T16:27:01.000Z
|
2020-08-20T06:19:32.000Z
|
test/functional/rpc_getblockfilter.py
|
CallMeMisterOwl/bitcoin
|
9d2895157ec0ff6e356f40c5fe84d3007fc991c1
|
[
"MIT"
] | 1
|
2022-01-08T14:38:57.000Z
|
2022-01-08T14:38:57.000Z
|
test/functional/rpc_getblockfilter.py
|
CallMeMisterOwl/bitcoin
|
9d2895157ec0ff6e356f40c5fe84d3007fc991c1
|
[
"MIT"
] | 1
|
2022-02-19T19:33:46.000Z
|
2022-02-19T19:33:46.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the getblockfilter RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
)
FILTER_TYPES = ["basic"]
class GetBlockFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-blockfilterindex"], []]
def run_test(self):
# Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Reorg node 0 to a new chain
self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Test getblockfilter returns a filter for all blocks and filter types on active chain
for block_hash in chain1_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter returns a filter for all blocks and filter types on stale chain
for block_hash in chain0_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter with unknown block
bad_block_hash = "0123456789abcdef" * 4
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockfilter, bad_block_hash, "basic")
# Test getblockfilter with undefined filter type
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
# Test getblockfilter fails on node without compact block filter index
self.restart_node(0, extra_args=["-blockfilterindex=0"])
for filter_type in FILTER_TYPES:
assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
self.nodes[0].getblockfilter, genesis_hash, filter_type)
if __name__ == '__main__':
GetBlockFilterTest().main()
| 43.353846
| 112
| 0.688077
|
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
)
FILTER_TYPES = ["basic"]
class GetBlockFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-blockfilterindex"], []]
def run_test(self):
self.disconnect_nodes(0, 1)
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
for block_hash in chain1_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
for block_hash in chain0_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
bad_block_hash = "0123456789abcdef" * 4
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockfilter, bad_block_hash, "basic")
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
self.restart_node(0, extra_args=["-blockfilterindex=0"])
for filter_type in FILTER_TYPES:
assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
self.nodes[0].getblockfilter, genesis_hash, filter_type)
if __name__ == '__main__':
GetBlockFilterTest().main()
| true
| true
|
1c456aa4cb7c1dda13e25217b75d8708106ea6d2
| 14,192
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/securityinsights/v20190101preview/get_incident.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIncidentResult',
'AwaitableGetIncidentResult',
'get_incident',
]
@pulumi.output_type
class GetIncidentResult:
"""
Represents an incident in Azure Security Insights.
"""
def __init__(__self__, additional_data=None, classification=None, classification_comment=None, classification_reason=None, created_time_utc=None, description=None, etag=None, first_activity_time_utc=None, id=None, incident_number=None, incident_url=None, labels=None, last_activity_time_utc=None, last_modified_time_utc=None, name=None, owner=None, provider_incident_id=None, provider_name=None, related_analytic_rule_ids=None, severity=None, status=None, title=None, type=None):
if additional_data and not isinstance(additional_data, dict):
raise TypeError("Expected argument 'additional_data' to be a dict")
pulumi.set(__self__, "additional_data", additional_data)
if classification and not isinstance(classification, str):
raise TypeError("Expected argument 'classification' to be a str")
pulumi.set(__self__, "classification", classification)
if classification_comment and not isinstance(classification_comment, str):
raise TypeError("Expected argument 'classification_comment' to be a str")
pulumi.set(__self__, "classification_comment", classification_comment)
if classification_reason and not isinstance(classification_reason, str):
raise TypeError("Expected argument 'classification_reason' to be a str")
pulumi.set(__self__, "classification_reason", classification_reason)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if first_activity_time_utc and not isinstance(first_activity_time_utc, str):
raise TypeError("Expected argument 'first_activity_time_utc' to be a str")
pulumi.set(__self__, "first_activity_time_utc", first_activity_time_utc)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if incident_number and not isinstance(incident_number, int):
raise TypeError("Expected argument 'incident_number' to be a int")
pulumi.set(__self__, "incident_number", incident_number)
if incident_url and not isinstance(incident_url, str):
raise TypeError("Expected argument 'incident_url' to be a str")
pulumi.set(__self__, "incident_url", incident_url)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if last_activity_time_utc and not isinstance(last_activity_time_utc, str):
raise TypeError("Expected argument 'last_activity_time_utc' to be a str")
pulumi.set(__self__, "last_activity_time_utc", last_activity_time_utc)
if last_modified_time_utc and not isinstance(last_modified_time_utc, str):
raise TypeError("Expected argument 'last_modified_time_utc' to be a str")
pulumi.set(__self__, "last_modified_time_utc", last_modified_time_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if owner and not isinstance(owner, dict):
raise TypeError("Expected argument 'owner' to be a dict")
pulumi.set(__self__, "owner", owner)
if provider_incident_id and not isinstance(provider_incident_id, str):
raise TypeError("Expected argument 'provider_incident_id' to be a str")
pulumi.set(__self__, "provider_incident_id", provider_incident_id)
if provider_name and not isinstance(provider_name, str):
raise TypeError("Expected argument 'provider_name' to be a str")
pulumi.set(__self__, "provider_name", provider_name)
if related_analytic_rule_ids and not isinstance(related_analytic_rule_ids, list):
raise TypeError("Expected argument 'related_analytic_rule_ids' to be a list")
pulumi.set(__self__, "related_analytic_rule_ids", related_analytic_rule_ids)
if severity and not isinstance(severity, str):
raise TypeError("Expected argument 'severity' to be a str")
pulumi.set(__self__, "severity", severity)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if title and not isinstance(title, str):
raise TypeError("Expected argument 'title' to be a str")
pulumi.set(__self__, "title", title)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="additionalData")
def additional_data(self) -> 'outputs.IncidentAdditionalDataResponse':
"""
Additional data on the incident
"""
return pulumi.get(self, "additional_data")
@property
@pulumi.getter
def classification(self) -> Optional[str]:
"""
The reason the incident was closed
"""
return pulumi.get(self, "classification")
@property
@pulumi.getter(name="classificationComment")
def classification_comment(self) -> Optional[str]:
"""
Describes the reason the incident was closed
"""
return pulumi.get(self, "classification_comment")
@property
@pulumi.getter(name="classificationReason")
def classification_reason(self) -> Optional[str]:
"""
The classification reason the incident was closed with
"""
return pulumi.get(self, "classification_reason")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> str:
"""
The time the incident was created
"""
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description of the incident
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="firstActivityTimeUtc")
def first_activity_time_utc(self) -> Optional[str]:
"""
The time of the first activity in the incident
"""
return pulumi.get(self, "first_activity_time_utc")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incidentNumber")
def incident_number(self) -> int:
"""
A sequential number
"""
return pulumi.get(self, "incident_number")
@property
@pulumi.getter(name="incidentUrl")
def incident_url(self) -> str:
"""
The deep-link url to the incident in Azure portal
"""
return pulumi.get(self, "incident_url")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence['outputs.IncidentLabelResponse']]:
"""
List of labels relevant to this incident
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastActivityTimeUtc")
def last_activity_time_utc(self) -> Optional[str]:
"""
The time of the last activity in the incident
"""
return pulumi.get(self, "last_activity_time_utc")
@property
@pulumi.getter(name="lastModifiedTimeUtc")
def last_modified_time_utc(self) -> str:
"""
The last time the incident was updated
"""
return pulumi.get(self, "last_modified_time_utc")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def owner(self) -> Optional['outputs.IncidentOwnerInfoResponse']:
"""
Describes a user that the incident is assigned to
"""
return pulumi.get(self, "owner")
@property
@pulumi.getter(name="providerIncidentId")
def provider_incident_id(self) -> Optional[str]:
"""
The incident ID assigned by the incident provider
"""
return pulumi.get(self, "provider_incident_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[str]:
"""
The name of the source provider that generated the incident
"""
return pulumi.get(self, "provider_name")
@property
@pulumi.getter(name="relatedAnalyticRuleIds")
def related_analytic_rule_ids(self) -> Sequence[str]:
"""
List of resource ids of Analytic rules related to the incident
"""
return pulumi.get(self, "related_analytic_rule_ids")
@property
@pulumi.getter
def severity(self) -> str:
"""
The severity of the incident
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the incident
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> str:
"""
The title of the incident
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetIncidentResult(GetIncidentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIncidentResult(
additional_data=self.additional_data,
classification=self.classification,
classification_comment=self.classification_comment,
classification_reason=self.classification_reason,
created_time_utc=self.created_time_utc,
description=self.description,
etag=self.etag,
first_activity_time_utc=self.first_activity_time_utc,
id=self.id,
incident_number=self.incident_number,
incident_url=self.incident_url,
labels=self.labels,
last_activity_time_utc=self.last_activity_time_utc,
last_modified_time_utc=self.last_modified_time_utc,
name=self.name,
owner=self.owner,
provider_incident_id=self.provider_incident_id,
provider_name=self.provider_name,
related_analytic_rule_ids=self.related_analytic_rule_ids,
severity=self.severity,
status=self.status,
title=self.title,
type=self.type)
def get_incident(incident_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIncidentResult:
"""
Represents an incident in Azure Security Insights.
:param str incident_id: Incident ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['incidentId'] = incident_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:securityinsights/v20190101preview:getIncident', __args__, opts=opts, typ=GetIncidentResult).value
return AwaitableGetIncidentResult(
additional_data=__ret__.additional_data,
classification=__ret__.classification,
classification_comment=__ret__.classification_comment,
classification_reason=__ret__.classification_reason,
created_time_utc=__ret__.created_time_utc,
description=__ret__.description,
etag=__ret__.etag,
first_activity_time_utc=__ret__.first_activity_time_utc,
id=__ret__.id,
incident_number=__ret__.incident_number,
incident_url=__ret__.incident_url,
labels=__ret__.labels,
last_activity_time_utc=__ret__.last_activity_time_utc,
last_modified_time_utc=__ret__.last_modified_time_utc,
name=__ret__.name,
owner=__ret__.owner,
provider_incident_id=__ret__.provider_incident_id,
provider_name=__ret__.provider_name,
related_analytic_rule_ids=__ret__.related_analytic_rule_ids,
severity=__ret__.severity,
status=__ret__.status,
title=__ret__.title,
type=__ret__.type)
| 39.532033
| 483
| 0.666995
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIncidentResult',
'AwaitableGetIncidentResult',
'get_incident',
]
@pulumi.output_type
class GetIncidentResult:
def __init__(__self__, additional_data=None, classification=None, classification_comment=None, classification_reason=None, created_time_utc=None, description=None, etag=None, first_activity_time_utc=None, id=None, incident_number=None, incident_url=None, labels=None, last_activity_time_utc=None, last_modified_time_utc=None, name=None, owner=None, provider_incident_id=None, provider_name=None, related_analytic_rule_ids=None, severity=None, status=None, title=None, type=None):
if additional_data and not isinstance(additional_data, dict):
raise TypeError("Expected argument 'additional_data' to be a dict")
pulumi.set(__self__, "additional_data", additional_data)
if classification and not isinstance(classification, str):
raise TypeError("Expected argument 'classification' to be a str")
pulumi.set(__self__, "classification", classification)
if classification_comment and not isinstance(classification_comment, str):
raise TypeError("Expected argument 'classification_comment' to be a str")
pulumi.set(__self__, "classification_comment", classification_comment)
if classification_reason and not isinstance(classification_reason, str):
raise TypeError("Expected argument 'classification_reason' to be a str")
pulumi.set(__self__, "classification_reason", classification_reason)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if first_activity_time_utc and not isinstance(first_activity_time_utc, str):
raise TypeError("Expected argument 'first_activity_time_utc' to be a str")
pulumi.set(__self__, "first_activity_time_utc", first_activity_time_utc)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if incident_number and not isinstance(incident_number, int):
raise TypeError("Expected argument 'incident_number' to be a int")
pulumi.set(__self__, "incident_number", incident_number)
if incident_url and not isinstance(incident_url, str):
raise TypeError("Expected argument 'incident_url' to be a str")
pulumi.set(__self__, "incident_url", incident_url)
if labels and not isinstance(labels, list):
raise TypeError("Expected argument 'labels' to be a list")
pulumi.set(__self__, "labels", labels)
if last_activity_time_utc and not isinstance(last_activity_time_utc, str):
raise TypeError("Expected argument 'last_activity_time_utc' to be a str")
pulumi.set(__self__, "last_activity_time_utc", last_activity_time_utc)
if last_modified_time_utc and not isinstance(last_modified_time_utc, str):
raise TypeError("Expected argument 'last_modified_time_utc' to be a str")
pulumi.set(__self__, "last_modified_time_utc", last_modified_time_utc)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if owner and not isinstance(owner, dict):
raise TypeError("Expected argument 'owner' to be a dict")
pulumi.set(__self__, "owner", owner)
if provider_incident_id and not isinstance(provider_incident_id, str):
raise TypeError("Expected argument 'provider_incident_id' to be a str")
pulumi.set(__self__, "provider_incident_id", provider_incident_id)
if provider_name and not isinstance(provider_name, str):
raise TypeError("Expected argument 'provider_name' to be a str")
pulumi.set(__self__, "provider_name", provider_name)
if related_analytic_rule_ids and not isinstance(related_analytic_rule_ids, list):
raise TypeError("Expected argument 'related_analytic_rule_ids' to be a list")
pulumi.set(__self__, "related_analytic_rule_ids", related_analytic_rule_ids)
if severity and not isinstance(severity, str):
raise TypeError("Expected argument 'severity' to be a str")
pulumi.set(__self__, "severity", severity)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if title and not isinstance(title, str):
raise TypeError("Expected argument 'title' to be a str")
pulumi.set(__self__, "title", title)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="additionalData")
def additional_data(self) -> 'outputs.IncidentAdditionalDataResponse':
return pulumi.get(self, "additional_data")
@property
@pulumi.getter
def classification(self) -> Optional[str]:
return pulumi.get(self, "classification")
@property
@pulumi.getter(name="classificationComment")
def classification_comment(self) -> Optional[str]:
return pulumi.get(self, "classification_comment")
@property
@pulumi.getter(name="classificationReason")
def classification_reason(self) -> Optional[str]:
return pulumi.get(self, "classification_reason")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> str:
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="firstActivityTimeUtc")
def first_activity_time_utc(self) -> Optional[str]:
return pulumi.get(self, "first_activity_time_utc")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incidentNumber")
def incident_number(self) -> int:
return pulumi.get(self, "incident_number")
@property
@pulumi.getter(name="incidentUrl")
def incident_url(self) -> str:
return pulumi.get(self, "incident_url")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence['outputs.IncidentLabelResponse']]:
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastActivityTimeUtc")
def last_activity_time_utc(self) -> Optional[str]:
return pulumi.get(self, "last_activity_time_utc")
@property
@pulumi.getter(name="lastModifiedTimeUtc")
def last_modified_time_utc(self) -> str:
return pulumi.get(self, "last_modified_time_utc")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def owner(self) -> Optional['outputs.IncidentOwnerInfoResponse']:
return pulumi.get(self, "owner")
@property
@pulumi.getter(name="providerIncidentId")
def provider_incident_id(self) -> Optional[str]:
return pulumi.get(self, "provider_incident_id")
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[str]:
return pulumi.get(self, "provider_name")
@property
@pulumi.getter(name="relatedAnalyticRuleIds")
def related_analytic_rule_ids(self) -> Sequence[str]:
return pulumi.get(self, "related_analytic_rule_ids")
@property
@pulumi.getter
def severity(self) -> str:
return pulumi.get(self, "severity")
@property
@pulumi.getter
def status(self) -> str:
return pulumi.get(self, "status")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetIncidentResult(GetIncidentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIncidentResult(
additional_data=self.additional_data,
classification=self.classification,
classification_comment=self.classification_comment,
classification_reason=self.classification_reason,
created_time_utc=self.created_time_utc,
description=self.description,
etag=self.etag,
first_activity_time_utc=self.first_activity_time_utc,
id=self.id,
incident_number=self.incident_number,
incident_url=self.incident_url,
labels=self.labels,
last_activity_time_utc=self.last_activity_time_utc,
last_modified_time_utc=self.last_modified_time_utc,
name=self.name,
owner=self.owner,
provider_incident_id=self.provider_incident_id,
provider_name=self.provider_name,
related_analytic_rule_ids=self.related_analytic_rule_ids,
severity=self.severity,
status=self.status,
title=self.title,
type=self.type)
def get_incident(incident_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIncidentResult:
__args__ = dict()
__args__['incidentId'] = incident_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:securityinsights/v20190101preview:getIncident', __args__, opts=opts, typ=GetIncidentResult).value
return AwaitableGetIncidentResult(
additional_data=__ret__.additional_data,
classification=__ret__.classification,
classification_comment=__ret__.classification_comment,
classification_reason=__ret__.classification_reason,
created_time_utc=__ret__.created_time_utc,
description=__ret__.description,
etag=__ret__.etag,
first_activity_time_utc=__ret__.first_activity_time_utc,
id=__ret__.id,
incident_number=__ret__.incident_number,
incident_url=__ret__.incident_url,
labels=__ret__.labels,
last_activity_time_utc=__ret__.last_activity_time_utc,
last_modified_time_utc=__ret__.last_modified_time_utc,
name=__ret__.name,
owner=__ret__.owner,
provider_incident_id=__ret__.provider_incident_id,
provider_name=__ret__.provider_name,
related_analytic_rule_ids=__ret__.related_analytic_rule_ids,
severity=__ret__.severity,
status=__ret__.status,
title=__ret__.title,
type=__ret__.type)
| true
| true
|
1c456b4651c14bf62f1b981a4373ef3876f9cc4a
| 10,375
|
py
|
Python
|
python/tvm/relay/analysis.py
|
Checkmate50/tvm
|
0293f42232ac2506c9cf8914410282c54ee4c0ed
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relay/analysis.py
|
Checkmate50/tvm
|
0293f42232ac2506c9cf8914410282c54ee4c0ed
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relay/analysis.py
|
Checkmate50/tvm
|
0293f42232ac2506c9cf8914410282c54ee4c0ed
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-else-return
# pylint: disable=unidiomatic-typecheck
"""
This file contains the set of passes for Relay, which exposes an interface for
configuring the passes and scripting them in Python.
"""
from . import _analysis
from . import _make
from .expr import Expr
from .ty import Type
from .module import Module
from .feature import Feature
def post_order_visit(expr, fvisit):
"""Recursively visit the ir in post DFS order node,
apply fvisit. Each node is guaranteed to be visited
only once.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
fvisit : function
The visitor function to be applied.
"""
return _analysis.post_order_visit(expr, fvisit)
def well_formed(expr):
"""Check that each Var is only bound once (well formed).
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
well_form : bool
Whether the input expression is well formed
"""
return _analysis.well_formed(expr)
def check_kind(t, mod=None):
"""Check that the type is well kinded and return the kind.
For example, this mean type cannot has tensor of tensor, or is a tuple type
of 2 shapes.
Parameters
----------
t : tvm.relay.Type
The type to check
mod : Optional[tvm.relay.Module]
The global module.
Returns
-------
kind : Kind
the kind of t
Examples
--------
.. code:: python
assert check_kind(relay.TupleType([relay.TypeParam('tp1', relay.Kind.Shape)])) == Shape
assert check_kind(relay.TupleType([relay.TypeParam('tp1', relay.Kind.Type)])) == Type
"""
if mod is not None:
return _analysis.check_kind(t, mod)
else:
return _analysis.check_kind(t)
def check_constant(expr):
"""Check whether an expression is constant
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
result : bool
Whether the expression is constant.
"""
return _analysis.check_constant(expr)
def free_vars(expr):
"""Get free Vars from expression expr in Post DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of free variables in post DFS order.
Note
----
The fact that Vars are post-DFS ordred are useful in
neural networks: usually this means weights of previous
are ordered first.
"""
return _analysis.free_vars(expr)
def bound_vars(expr):
"""Get bound vars from expression expr in post-DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of bound variables in post-DFS order.
"""
return _analysis.bound_vars(expr)
def all_vars(expr):
"""Get all vars from expression expr in post-DFS order.
Parameters
----------
expr : tvm.relay.Expr
The input expression
Returns
-------
free : List[tvm.relay.Var]
The list of all variables in post-DFS order.
"""
return _analysis.all_vars(expr)
def free_type_vars(expr, mod=None):
"""Get free type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of free type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.free_type_vars(expr, use_mod)
def bound_type_vars(expr, mod=None):
"""Get bound type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of bound type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.bound_type_vars(expr, use_mod)
def all_type_vars(expr, mod=None):
"""Get all type variables from expression/type e
Parameters
----------
expr : Union[tvm.relay.Expr,tvm.relay.Type]
The input expression/type
mod : Optional[tvm.relay.Module]
The global module
Returns
-------
free : List[tvm.relay.TypeVar]
The list of all type variables in post-DFS order
"""
use_mod = mod if mod is not None else Module()
return _analysis.all_type_vars(expr, use_mod)
def alpha_equal(lhs, rhs):
"""Compare two Relay expr for structural equivalence (alpha equivalence).
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
Returns
-------
result : bool
True iff lhs is alpha equal to rhs.
"""
return bool(_make._alpha_equal(lhs, rhs))
def assert_alpha_equal(lhs, rhs):
"""Assert that two Relay expr is structurally equivalent. (alpha equivalence).
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
"""
_make._assert_alpha_equal(lhs, rhs)
def graph_equal(lhs, rhs):
"""Compare two Relay expr for data-flow equivalence.
The difference between this and alpha-equality is that
variables are not expected to match between lhs and rhs;
they are treated as sources and are mapped between each other.
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
Returns
-------
result : bool
True iff lhs is data-flow equivalent to rhs.
"""
return bool(_make._graph_equal(lhs, rhs))
def assert_graph_equal(lhs, rhs):
"""Compare two Relay expr for data-flow equivalence.
The difference between this and alpha-equality is that
variables are not expected to match between lhs and rhs;
they are treated as sources and are mapped between each other.
Parameters
----------
lhs : tvm.relay.Expr
One of the input Expression.
rhs : tvm.relay.Expr
One of the input Expression.
"""
_make._assert_graph_equal(lhs, rhs)
def collect_device_info(expr):
"""Collect the device allocation map for the given expression. The device
ids are propagated from the `device_copy` operators.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
ret : Dict[tvm.relay.expr, int]
A dictionary mapping tvm.relay.Expr to device type.
"""
return _analysis.CollectDeviceInfo(expr)
def collect_device_annotation_ops(expr):
"""Collect the device annotation ops for the given expression.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
ret : Dict[tvm.relay.expr, int]
A dictionary mapping tvm.relay.Expr to device type where the keys are
annotation expressions.
"""
return _analysis.CollectDeviceAnnotationOps(expr)
def get_total_mac_number(expr):
"""
Count the number of MACs (multiply-accumulate) of a model
Parameters
----------
expr : tvm.relay.Expr
The input expression.
Returns
-------
result : int64
The number of MACs (multiply-accumulate) of a model
"""
return _analysis.GetTotalMacNumber(expr)
def missing_gradient_check(expr):
"""
Check if there is a missing gradient and print it.
Parameters
----------
expr : tvm.relay.Expr
The input expression.
"""
_analysis.missing_gradient_check(expr)
def unmatched_cases(match, mod=None):
"""
Finds cases that the match expression does not catch, if any.
Parameters
----------
match : tvm.relay.Match
The match expression
mod : Optional[tvm.relay.Module]
The module (defaults to an empty module)
Returns
-------
missing_patterns : [tvm.relay.Pattern]
Patterns that the match expression does not catch.
"""
return _analysis.unmatched_cases(match, mod)
def detect_feature(a, b=None):
"""
Detect the feature used in a relay program.
Parameters
----------
a : Union[tvm.relay.Expr, tvm.relay.Module]
The input expression or module.
b : Optional[Union[tvm.relay.Expr, tvm.relay.Module]]
The input expression or module.
The two arguments cannot both be expression or module.
Returns
-------
features : Set[Feature]
Features used in the program.
"""
if isinstance(a, Module):
a, b = b, a
return set([Feature(int(x)) for x in _analysis.detect_feature(a, b)])
def structural_hash(value):
"""Hash a Relay expression structurally.
Parameters
----------
expr : Union[tvm.relay.Expr, tvm.relay.Type]
The expression to hash.
Returns
-------
result : int
The hash value
"""
if isinstance(value, Expr):
return int(_analysis._expr_hash(value))
elif isinstance(value, Type):
return int(_analysis._type_hash(value))
else:
msg = ("found value of type {0} expected" +
"relay.Expr or relay.Type").format(type(value))
raise TypeError(msg)
| 24.585308
| 95
| 0.64212
|
from . import _analysis
from . import _make
from .expr import Expr
from .ty import Type
from .module import Module
from .feature import Feature
def post_order_visit(expr, fvisit):
return _analysis.post_order_visit(expr, fvisit)
def well_formed(expr):
return _analysis.well_formed(expr)
def check_kind(t, mod=None):
if mod is not None:
return _analysis.check_kind(t, mod)
else:
return _analysis.check_kind(t)
def check_constant(expr):
return _analysis.check_constant(expr)
def free_vars(expr):
return _analysis.free_vars(expr)
def bound_vars(expr):
return _analysis.bound_vars(expr)
def all_vars(expr):
return _analysis.all_vars(expr)
def free_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.free_type_vars(expr, use_mod)
def bound_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.bound_type_vars(expr, use_mod)
def all_type_vars(expr, mod=None):
use_mod = mod if mod is not None else Module()
return _analysis.all_type_vars(expr, use_mod)
def alpha_equal(lhs, rhs):
return bool(_make._alpha_equal(lhs, rhs))
def assert_alpha_equal(lhs, rhs):
_make._assert_alpha_equal(lhs, rhs)
def graph_equal(lhs, rhs):
return bool(_make._graph_equal(lhs, rhs))
def assert_graph_equal(lhs, rhs):
_make._assert_graph_equal(lhs, rhs)
def collect_device_info(expr):
return _analysis.CollectDeviceInfo(expr)
def collect_device_annotation_ops(expr):
return _analysis.CollectDeviceAnnotationOps(expr)
def get_total_mac_number(expr):
return _analysis.GetTotalMacNumber(expr)
def missing_gradient_check(expr):
_analysis.missing_gradient_check(expr)
def unmatched_cases(match, mod=None):
return _analysis.unmatched_cases(match, mod)
def detect_feature(a, b=None):
if isinstance(a, Module):
a, b = b, a
return set([Feature(int(x)) for x in _analysis.detect_feature(a, b)])
def structural_hash(value):
if isinstance(value, Expr):
return int(_analysis._expr_hash(value))
elif isinstance(value, Type):
return int(_analysis._type_hash(value))
else:
msg = ("found value of type {0} expected" +
"relay.Expr or relay.Type").format(type(value))
raise TypeError(msg)
| true
| true
|
1c456c74c2cf7a473b376d5f287a9bb1a2b9f3b9
| 5,363
|
py
|
Python
|
netbox/utilities/forms/widgets.py
|
letic/netbox
|
0930745e16330edf00da081150b079d5ed6ecc02
|
[
"Apache-2.0"
] | 2
|
2021-07-08T03:58:12.000Z
|
2022-02-11T21:50:46.000Z
|
netbox/utilities/forms/widgets.py
|
letic/netbox
|
0930745e16330edf00da081150b079d5ed6ecc02
|
[
"Apache-2.0"
] | 25
|
2019-09-17T19:40:50.000Z
|
2022-03-11T04:01:55.000Z
|
netbox/utilities/forms/widgets.py
|
letic/netbox
|
0930745e16330edf00da081150b079d5ed6ecc02
|
[
"Apache-2.0"
] | 1
|
2022-02-11T21:50:58.000Z
|
2022-02-11T21:50:58.000Z
|
import json
from django import forms
from django.conf import settings
from django.contrib.postgres.forms import SimpleArrayField
from utilities.choices import ColorChoices
from .utils import add_blank_choice, parse_numeric_range
__all__ = (
'APISelect',
'APISelectMultiple',
'BulkEditNullBooleanSelect',
'ColorSelect',
'ContentTypeSelect',
'DatePicker',
'DateTimePicker',
'NumericArrayField',
'SelectWithDisabled',
'SelectWithPK',
'SlugWidget',
'SmallTextarea',
'StaticSelect2',
'StaticSelect2Multiple',
'TimePicker',
)
class SmallTextarea(forms.Textarea):
"""
Subclass used for rendering a smaller textarea element.
"""
pass
class SlugWidget(forms.TextInput):
"""
Subclass TextInput and add a slug regeneration button next to the form field.
"""
template_name = 'widgets/sluginput.html'
class ColorSelect(forms.Select):
"""
Extends the built-in Select widget to colorize each <option>.
"""
option_template_name = 'widgets/colorselect_option.html'
def __init__(self, *args, **kwargs):
kwargs['choices'] = add_blank_choice(ColorChoices)
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-color-picker'
class BulkEditNullBooleanSelect(forms.NullBooleanSelect):
"""
A Select widget for NullBooleanFields
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Override the built-in choice labels
self.choices = (
('1', '---------'),
('2', 'Yes'),
('3', 'No'),
)
self.attrs['class'] = 'netbox-select2-static'
class SelectWithDisabled(forms.Select):
"""
Modified the stock Select widget to accept choices using a dict() for a label. The dict for each option must include
'label' (string) and 'disabled' (boolean).
"""
option_template_name = 'widgets/selectwithdisabled_option.html'
class StaticSelect2(SelectWithDisabled):
"""
A static <select> form widget using the Select2 library.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-static'
class StaticSelect2Multiple(StaticSelect2, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class SelectWithPK(StaticSelect2):
"""
Include the primary key of each option in the option label (e.g. "Router7 (4721)").
"""
option_template_name = 'widgets/select_option_with_pk.html'
class ContentTypeSelect(StaticSelect2):
"""
Appends an `api-value` attribute equal to the slugified model name for each ContentType. For example:
<option value="37" api-value="console-server-port">console server port</option>
This attribute can be used to reference the relevant API endpoint for a particular ContentType.
"""
option_template_name = 'widgets/select_contenttype.html'
class NumericArrayField(SimpleArrayField):
def to_python(self, value):
if not value:
return []
if isinstance(value, str):
value = ','.join([str(n) for n in parse_numeric_range(value)])
return super().to_python(value)
class APISelect(SelectWithDisabled):
"""
A select widget populated via an API call
:param api_url: API endpoint URL. Required if not set automatically by the parent field.
"""
def __init__(self, api_url=None, full=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-api'
if api_url:
self.attrs['data-url'] = '/{}{}'.format(settings.BASE_PATH, api_url.lstrip('/')) # Inject BASE_PATH
def add_query_param(self, name, value):
"""
Add details for an additional query param in the form of a data-* JSON-encoded list attribute.
:param name: The name of the query param
:param value: The value of the query param
"""
key = f'data-query-param-{name}'
values = json.loads(self.attrs.get(key, '[]'))
if type(value) in (list, tuple):
values.extend([str(v) for v in value])
else:
values.append(str(value))
self.attrs[key] = json.dumps(values)
class APISelectMultiple(APISelect, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class DatePicker(forms.TextInput):
"""
Date picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'date-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD'
class DateTimePicker(forms.TextInput):
"""
DateTime picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'datetime-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD hh:mm:ss'
class TimePicker(forms.TextInput):
"""
Time picker using Flatpickr.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'time-picker'
self.attrs['placeholder'] = 'hh:mm:ss'
| 28.078534
| 120
| 0.641618
|
import json
from django import forms
from django.conf import settings
from django.contrib.postgres.forms import SimpleArrayField
from utilities.choices import ColorChoices
from .utils import add_blank_choice, parse_numeric_range
__all__ = (
'APISelect',
'APISelectMultiple',
'BulkEditNullBooleanSelect',
'ColorSelect',
'ContentTypeSelect',
'DatePicker',
'DateTimePicker',
'NumericArrayField',
'SelectWithDisabled',
'SelectWithPK',
'SlugWidget',
'SmallTextarea',
'StaticSelect2',
'StaticSelect2Multiple',
'TimePicker',
)
class SmallTextarea(forms.Textarea):
pass
class SlugWidget(forms.TextInput):
template_name = 'widgets/sluginput.html'
class ColorSelect(forms.Select):
option_template_name = 'widgets/colorselect_option.html'
def __init__(self, *args, **kwargs):
kwargs['choices'] = add_blank_choice(ColorChoices)
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-color-picker'
class BulkEditNullBooleanSelect(forms.NullBooleanSelect):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.choices = (
('1', '---------'),
('2', 'Yes'),
('3', 'No'),
)
self.attrs['class'] = 'netbox-select2-static'
class SelectWithDisabled(forms.Select):
option_template_name = 'widgets/selectwithdisabled_option.html'
class StaticSelect2(SelectWithDisabled):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-static'
class StaticSelect2Multiple(StaticSelect2, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class SelectWithPK(StaticSelect2):
option_template_name = 'widgets/select_option_with_pk.html'
class ContentTypeSelect(StaticSelect2):
option_template_name = 'widgets/select_contenttype.html'
class NumericArrayField(SimpleArrayField):
def to_python(self, value):
if not value:
return []
if isinstance(value, str):
value = ','.join([str(n) for n in parse_numeric_range(value)])
return super().to_python(value)
class APISelect(SelectWithDisabled):
def __init__(self, api_url=None, full=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'netbox-select2-api'
if api_url:
self.attrs['data-url'] = '/{}{}'.format(settings.BASE_PATH, api_url.lstrip('/'))
def add_query_param(self, name, value):
key = f'data-query-param-{name}'
values = json.loads(self.attrs.get(key, '[]'))
if type(value) in (list, tuple):
values.extend([str(v) for v in value])
else:
values.append(str(value))
self.attrs[key] = json.dumps(values)
class APISelectMultiple(APISelect, forms.SelectMultiple):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['data-multiple'] = 1
class DatePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'date-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD'
class DateTimePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'datetime-picker'
self.attrs['placeholder'] = 'YYYY-MM-DD hh:mm:ss'
class TimePicker(forms.TextInput):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.attrs['class'] = 'time-picker'
self.attrs['placeholder'] = 'hh:mm:ss'
| true
| true
|
1c456cd394ed39fe157a450029b8b5c2dcc40bd1
| 598
|
py
|
Python
|
src/modax/training/.ipynb_checkpoints/utils-checkpoint.py
|
GJBoth/modax
|
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
|
[
"MIT"
] | 2
|
2021-12-10T14:36:37.000Z
|
2022-02-10T11:47:03.000Z
|
src/modax/training/.ipynb_checkpoints/utils-checkpoint.py
|
GJBoth/modax
|
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
|
[
"MIT"
] | null | null | null |
src/modax/training/.ipynb_checkpoints/utils-checkpoint.py
|
GJBoth/modax
|
c7e1c128d4dd48b776f8ec4fa724c2e4b6e13c82
|
[
"MIT"
] | 2
|
2020-12-22T14:49:13.000Z
|
2021-04-09T08:52:08.000Z
|
from jax import jit, value_and_grad
from functools import partial
import jax.profiler
def create_update(loss_fn, loss_fn_args):
def step(opt, state, loss_fn, loss_fn_args):
grad_fn = value_and_grad(loss_fn, argnums=0, has_aux=True)
(loss, (updated_state, metrics, output)), grad = grad_fn(
opt.target, state, *loss_fn_args
)
opt = opt.apply_gradient(grad)
jax.profiler.save_device_memory_profile(f"memory.prof")
return (opt, updated_state), metrics, output
return jit(partial(step, loss_fn=loss_fn, loss_fn_args=loss_fn_args))
| 35.176471
| 73
| 0.704013
|
from jax import jit, value_and_grad
from functools import partial
import jax.profiler
def create_update(loss_fn, loss_fn_args):
def step(opt, state, loss_fn, loss_fn_args):
grad_fn = value_and_grad(loss_fn, argnums=0, has_aux=True)
(loss, (updated_state, metrics, output)), grad = grad_fn(
opt.target, state, *loss_fn_args
)
opt = opt.apply_gradient(grad)
jax.profiler.save_device_memory_profile(f"memory.prof")
return (opt, updated_state), metrics, output
return jit(partial(step, loss_fn=loss_fn, loss_fn_args=loss_fn_args))
| true
| true
|
1c456f4aeae0fd4829f9f0818a661eb6433b1c8e
| 399
|
py
|
Python
|
tests/multidl/downloaders/test_local_file_downloader.py
|
gazay/chiliad
|
771b3d0f7004f2a03094bad7bcc0103715a6c73f
|
[
"MIT"
] | 16
|
2018-02-12T23:47:26.000Z
|
2021-07-23T12:43:05.000Z
|
tests/multidl/downloaders/test_local_file_downloader.py
|
gazay/chiliad
|
771b3d0f7004f2a03094bad7bcc0103715a6c73f
|
[
"MIT"
] | 6
|
2017-10-14T15:36:52.000Z
|
2022-02-13T17:17:17.000Z
|
tests/multidl/downloaders/test_local_file_downloader.py
|
gazay/chiliad
|
771b3d0f7004f2a03094bad7bcc0103715a6c73f
|
[
"MIT"
] | 6
|
2018-05-11T00:16:00.000Z
|
2021-05-03T02:02:55.000Z
|
# -*- coding: utf-8 -*-
import pytest
from multidl.downloaders.local_file_downloader import LocalFileDownloader
@pytest.mark.parametrize('url, expected', [
('file:///dir/file1.txt', 'file1.txt'),
('file:///file2.txt', 'file2.txt'),
])
def test_get_file_name(tmpdir, url, expected):
downloader = LocalFileDownloader(url, str(tmpdir))
assert downloader.get_file_name() == expected
| 26.6
| 73
| 0.704261
|
import pytest
from multidl.downloaders.local_file_downloader import LocalFileDownloader
@pytest.mark.parametrize('url, expected', [
('file:///dir/file1.txt', 'file1.txt'),
('file:///file2.txt', 'file2.txt'),
])
def test_get_file_name(tmpdir, url, expected):
downloader = LocalFileDownloader(url, str(tmpdir))
assert downloader.get_file_name() == expected
| true
| true
|
1c456f6f57902768c2181ef37ffa76b83cb79aad
| 2,283
|
py
|
Python
|
src/demo/worker_flags.py
|
Ravi-0809/question-generation
|
9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c
|
[
"MIT"
] | 212
|
2018-08-15T11:06:35.000Z
|
2021-11-21T10:21:55.000Z
|
src/demo/worker_flags.py
|
Ravi-0809/question-generation
|
9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c
|
[
"MIT"
] | 44
|
2018-10-15T12:50:31.000Z
|
2020-11-13T18:02:03.000Z
|
src/demo/worker_flags.py
|
Ravi-0809/question-generation
|
9065a3b47293b8a69a0548af1f6bedd4a4aa7f9c
|
[
"MIT"
] | 51
|
2018-08-17T18:17:43.000Z
|
2021-03-04T06:14:52.000Z
|
class FlagsObject(object):
pass
FLAGS = FlagsObject()
# config
FLAGS.testing = False
FLAGS.model_type = 'RL-S2S'
FLAGS.restore = False
FLAGS.restore_path = None
FLAGS.policy_gradient = False
FLAGS.glove_vocab = False
FLAGS.embedding_loss = False
FLAGS.latent_switch = False
FLAGS.combine_vocab = False
FLAGS.lr_schedule = False
FLAGS.eval_freq = 1000
FLAGS.num_epochs = 25
FLAGS.batch_size = 64
FLAGS.eval_batch_size = 16
FLAGS.data_path = '../data/'
FLAGS.log_dir = '../logs/'
FLAGS.model_dir = '../models/'
# hyperparams
FLAGS.filter_window_size_before = 1
FLAGS.filter_window_size_after = 1
FLAGS.filter_max_tokens = 100
FLAGS.max_context_len = 203
FLAGS.max_copy_size = 203
FLAGS.embedding_size = 200
FLAGS.context_encoder_units = 768
FLAGS.answer_encoder_units = 768
FLAGS.full_context_encoding = True
FLAGS.decoder_units = 768
FLAGS.switch_units = 128
FLAGS.ctxt_encoder_depth = 1
FLAGS.ans_encoder_depth = 1
FLAGS.vocab_size = 2000
FLAGS.learning_rate = 2e-4
FLAGS.opt_type = "adam"
FLAGS.entropy_weight = 0.01
FLAGS.suppression_weight = 0.01
FLAGS.dropout_rate = 0.3
FLAGS.context_as_set = True
FLAGS.copy_priority = False
FLAGS.smart_copy = True
FLAGS.separate_copy_mech = False
FLAGS.begin_ans_feat = False
FLAGS.maxout_pointer = False
FLAGS.loc_embeddings = False
FLAGS.out_vocab_cpu = False
FLAGS.advanced_condition_encoding = False
FLAGS.disable_copy = False
FLAGS.disable_shortlist = False
FLAGS.length_penalty = 0.05
FLAGS.pg_burnin = 200
FLAGS.pg_dropout = False
FLAGS.lm_weight = 0.25
FLAGS.qa_weight = 0.5
FLAGS.bleu_weight = 0.0
FLAGS.pg_ml_weight = 1
FLAGS.disc_weight = 0.0
FLAGS.disc_train = False
# QA - MPCM hparams
FLAGS.qa_vocab_size = 20000
FLAGS.qa_encoder_units = 100
FLAGS.qa_match_units = 100
FLAGS.qa_num_epochs = 20
FLAGS.qa_batch_size = 32
FLAGS.qa_learning_rate = 1e-4
# LM hparams
FLAGS.lm_vocab_size = 20000
FLAGS.lm_units = 384
FLAGS.lm_num_epochs = 25
FLAGS.lm_dropout = 0.3
# eval params
FLAGS.beam_width = 16
# FLAGS.num_dev_samples = 4691
FLAGS.num_dev_samples = 10570
# FLAGS.num_eval_samples = 5609
FLAGS.num_eval_samples = 11877
FLAGS.eval_on_dev = True
FLAGS.eval_on_test = False
FLAGS.eval_model_id = ""
FLAGS.eval_metrics = True
FLAGS.diverse_bs = False
FLAGS.beam_groups = 1
FLAGS.beam_diversity = 0.5
| 19.852174
| 41
| 0.782742
|
class FlagsObject(object):
pass
FLAGS = FlagsObject()
FLAGS.testing = False
FLAGS.model_type = 'RL-S2S'
FLAGS.restore = False
FLAGS.restore_path = None
FLAGS.policy_gradient = False
FLAGS.glove_vocab = False
FLAGS.embedding_loss = False
FLAGS.latent_switch = False
FLAGS.combine_vocab = False
FLAGS.lr_schedule = False
FLAGS.eval_freq = 1000
FLAGS.num_epochs = 25
FLAGS.batch_size = 64
FLAGS.eval_batch_size = 16
FLAGS.data_path = '../data/'
FLAGS.log_dir = '../logs/'
FLAGS.model_dir = '../models/'
FLAGS.filter_window_size_before = 1
FLAGS.filter_window_size_after = 1
FLAGS.filter_max_tokens = 100
FLAGS.max_context_len = 203
FLAGS.max_copy_size = 203
FLAGS.embedding_size = 200
FLAGS.context_encoder_units = 768
FLAGS.answer_encoder_units = 768
FLAGS.full_context_encoding = True
FLAGS.decoder_units = 768
FLAGS.switch_units = 128
FLAGS.ctxt_encoder_depth = 1
FLAGS.ans_encoder_depth = 1
FLAGS.vocab_size = 2000
FLAGS.learning_rate = 2e-4
FLAGS.opt_type = "adam"
FLAGS.entropy_weight = 0.01
FLAGS.suppression_weight = 0.01
FLAGS.dropout_rate = 0.3
FLAGS.context_as_set = True
FLAGS.copy_priority = False
FLAGS.smart_copy = True
FLAGS.separate_copy_mech = False
FLAGS.begin_ans_feat = False
FLAGS.maxout_pointer = False
FLAGS.loc_embeddings = False
FLAGS.out_vocab_cpu = False
FLAGS.advanced_condition_encoding = False
FLAGS.disable_copy = False
FLAGS.disable_shortlist = False
FLAGS.length_penalty = 0.05
FLAGS.pg_burnin = 200
FLAGS.pg_dropout = False
FLAGS.lm_weight = 0.25
FLAGS.qa_weight = 0.5
FLAGS.bleu_weight = 0.0
FLAGS.pg_ml_weight = 1
FLAGS.disc_weight = 0.0
FLAGS.disc_train = False
FLAGS.qa_vocab_size = 20000
FLAGS.qa_encoder_units = 100
FLAGS.qa_match_units = 100
FLAGS.qa_num_epochs = 20
FLAGS.qa_batch_size = 32
FLAGS.qa_learning_rate = 1e-4
FLAGS.lm_vocab_size = 20000
FLAGS.lm_units = 384
FLAGS.lm_num_epochs = 25
FLAGS.lm_dropout = 0.3
FLAGS.beam_width = 16
FLAGS.num_dev_samples = 10570
FLAGS.num_eval_samples = 11877
FLAGS.eval_on_dev = True
FLAGS.eval_on_test = False
FLAGS.eval_model_id = ""
FLAGS.eval_metrics = True
FLAGS.diverse_bs = False
FLAGS.beam_groups = 1
FLAGS.beam_diversity = 0.5
| true
| true
|
1c4572039e05249fc64e7f7a9e3e39836024d635
| 9,745
|
py
|
Python
|
theseus/geometry/so2.py
|
jeffin07/theseus
|
3498bbddf9cca740c2703d0c1aa3a78a7264cb15
|
[
"MIT"
] | 236
|
2021-12-03T15:59:29.000Z
|
2022-03-30T23:18:33.000Z
|
theseus/geometry/so2.py
|
jeffin07/theseus
|
3498bbddf9cca740c2703d0c1aa3a78a7264cb15
|
[
"MIT"
] | 85
|
2021-12-06T07:04:11.000Z
|
2022-03-31T20:29:26.000Z
|
theseus/geometry/so2.py
|
jeffin07/theseus
|
3498bbddf9cca740c2703d0c1aa3a78a7264cb15
|
[
"MIT"
] | 12
|
2021-12-03T22:02:44.000Z
|
2022-03-20T14:58:27.000Z
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import List, Optional, Tuple, Union, cast
import torch
import theseus.constants
from .lie_group import LieGroup
from .point_types import Point2
class SO2(LieGroup):
def __init__(
self,
theta: Optional[torch.Tensor] = None,
data: Optional[torch.Tensor] = None,
name: Optional[str] = None,
dtype: Optional[torch.dtype] = None,
):
if theta is not None and data is not None:
raise ValueError("Please provide only one of theta or data.")
if theta is not None:
dtype = theta.dtype
super().__init__(data=data, name=name, dtype=dtype)
if theta is not None:
if theta.ndim == 1:
theta = theta.unsqueeze(1)
if theta.ndim != 2 or theta.shape[1] != 1:
raise ValueError(
"Argument theta must be have ndim = 1, or ndim=2 and shape[1] = 1."
)
self.update_from_angle(theta)
@staticmethod
def rand(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
2
* theseus.constants.PI
* torch.rand(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
- theseus.constants.PI
)
@staticmethod
def randn(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
theseus.constants.PI
* torch.randn(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
)
@staticmethod
def _init_data() -> torch.Tensor: # type: ignore
return torch.tensor([1.0, 0.0]).view(1, 2)
def update_from_angle(self, theta: torch.Tensor):
self.update(torch.cat([theta.cos(), theta.sin()], dim=1))
def dof(self) -> int:
return 1
def __repr__(self) -> str:
return f"SO2(data={self.data}, name={self.name})"
def __str__(self) -> str:
with torch.no_grad():
theta = torch.atan2(self[:, 1:], self[:, 0:1])
return f"SO2(theta={theta}, name={self.name})"
def theta(self) -> torch.Tensor:
return self.log_map()
def _adjoint_impl(self) -> torch.Tensor:
return torch.ones(self.shape[0], 1, 1, device=self.device, dtype=self.dtype)
def _project_impl(
self, euclidean_grad: torch.Tensor, is_sparse: bool = False
) -> torch.Tensor:
self._project_check(euclidean_grad, is_sparse)
temp = torch.stack((-self[:, 1], self[:, 0]), dim=1)
if is_sparse:
return torch.einsum("i...k,i...k->i...", euclidean_grad, temp).unsqueeze(-1)
else:
return torch.einsum("...k,...k", euclidean_grad, temp).unsqueeze(-1)
@staticmethod
def exp_map(
tangent_vector: torch.Tensor, jacobians: Optional[List[torch.Tensor]] = None
) -> "SO2":
so2 = SO2(dtype=tangent_vector.dtype)
so2.update_from_angle(tangent_vector)
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
tangent_vector.shape[0],
1,
1,
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
)
return so2
def _log_map_impl(
self, jacobians: Optional[List[torch.Tensor]] = None
) -> torch.Tensor:
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
self.shape[0],
1,
1,
dtype=self.dtype,
device=self.device,
)
)
cosine, sine = self.to_cos_sin()
return torch.atan2(sine, cosine).unsqueeze(1)
def _compose_impl(self, so2_2: LieGroup) -> "SO2":
so2_2 = cast(SO2, so2_2)
cos_1, sin_1 = self.to_cos_sin()
cos_2, sin_2 = so2_2.to_cos_sin()
new_cos = cos_1 * cos_2 - sin_1 * sin_2
new_sin = sin_1 * cos_2 + cos_1 * sin_2
return SO2(data=torch.stack([new_cos, new_sin], dim=1))
def _inverse_impl(self, get_jacobian: bool = False) -> "SO2":
cosine, sine = self.to_cos_sin()
return SO2(data=torch.stack([cosine, -sine], dim=1))
def _rotate_shape_check(self, point: Union[Point2, torch.Tensor]):
err_msg = (
f"SO2 can only transform vectors of shape [{self.shape[0]}, 2] or [1, 2], "
f"but the input has shape {point.shape}."
)
if isinstance(point, torch.Tensor):
if not point.ndim == 2 or point.shape[1] != 2:
raise ValueError(err_msg)
elif point.dof() != 2:
raise ValueError(err_msg)
if (
point.shape[0] != self.shape[0]
and point.shape[0] != 1
and self.shape[0] != 1
):
raise ValueError(
"Input point batch size is not broadcastable with group batch size."
)
@staticmethod
def _rotate_from_cos_sin(
point: Union[Point2, torch.Tensor],
cosine: torch.Tensor,
sine: torch.Tensor,
) -> Point2:
batch_size = max(point.shape[0], cosine.shape[0])
if isinstance(point, torch.Tensor):
if point.ndim != 2 or point.shape[1] != 2:
raise ValueError(
f"Point tensor must have shape batch_size x 2, "
f"but received {point.shape}."
)
point_data = point
else:
point_data = point.data
px, py = point_data[:, 0], point_data[:, 1]
new_point_data = torch.empty(
batch_size, 2, device=cosine.device, dtype=cosine.dtype
)
new_point_data[:, 0] = cosine * px - sine * py
new_point_data[:, 1] = sine * px + cosine * py
return Point2(data=new_point_data)
def rotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([-ret.y(), ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def unrotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, -sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([ret.y(), -ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().transpose(2, 1).expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def to_cos_sin(self) -> Tuple[torch.Tensor, torch.Tensor]:
return self[:, 0], self[:, 1]
def to_matrix(self) -> torch.Tensor:
matrix = torch.empty(self.shape[0], 2, 2).to(
device=self.device, dtype=self.dtype
)
cosine, sine = self.to_cos_sin()
matrix[:, 0, 0] = cosine
matrix[:, 0, 1] = -sine
matrix[:, 1, 0] = sine
matrix[:, 1, 1] = cosine
return matrix
@staticmethod
def hat(tangent_vector: torch.Tensor) -> torch.Tensor:
matrix = torch.zeros(tangent_vector.shape[0], 2, 2).to(
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
matrix[:, 0, 1] = -tangent_vector.view(-1)
matrix[:, 1, 0] = tangent_vector.view(-1)
return matrix
@staticmethod
def vee(matrix: torch.Tensor) -> torch.Tensor:
_check = matrix.ndim == 3 and matrix.shape[1:] == (2, 2)
_check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS
_check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS
_check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0])
if not _check:
raise ValueError("Invalid hat matrix for SO2.")
return matrix[:, 1, 0].clone().view(-1, 1)
def _copy_impl(self, new_name: Optional[str] = None) -> "SO2":
return SO2(data=self.data.clone(), name=new_name)
# only added to avoid casting downstream
def copy(self, new_name: Optional[str] = None) -> "SO2":
return cast(SO2, super().copy(new_name=new_name))
rand_so2 = SO2.rand
randn_so2 = SO2.randn
| 33.719723
| 88
| 0.548794
|
from typing import List, Optional, Tuple, Union, cast
import torch
import theseus.constants
from .lie_group import LieGroup
from .point_types import Point2
class SO2(LieGroup):
def __init__(
self,
theta: Optional[torch.Tensor] = None,
data: Optional[torch.Tensor] = None,
name: Optional[str] = None,
dtype: Optional[torch.dtype] = None,
):
if theta is not None and data is not None:
raise ValueError("Please provide only one of theta or data.")
if theta is not None:
dtype = theta.dtype
super().__init__(data=data, name=name, dtype=dtype)
if theta is not None:
if theta.ndim == 1:
theta = theta.unsqueeze(1)
if theta.ndim != 2 or theta.shape[1] != 1:
raise ValueError(
"Argument theta must be have ndim = 1, or ndim=2 and shape[1] = 1."
)
self.update_from_angle(theta)
@staticmethod
def rand(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
2
* theseus.constants.PI
* torch.rand(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
- theseus.constants.PI
)
@staticmethod
def randn(
*size: int,
generator: Optional[torch.Generator] = None,
dtype: Optional[torch.dtype] = None,
device: Optional[torch.device] = None,
requires_grad: bool = False,
) -> "SO2":
if len(size) != 1:
raise ValueError("The size should be 1D.")
return SO2.exp_map(
theseus.constants.PI
* torch.randn(
size[0],
1,
generator=generator,
dtype=dtype,
device=device,
requires_grad=requires_grad,
)
)
@staticmethod
def _init_data() -> torch.Tensor: return torch.tensor([1.0, 0.0]).view(1, 2)
def update_from_angle(self, theta: torch.Tensor):
self.update(torch.cat([theta.cos(), theta.sin()], dim=1))
def dof(self) -> int:
return 1
def __repr__(self) -> str:
return f"SO2(data={self.data}, name={self.name})"
def __str__(self) -> str:
with torch.no_grad():
theta = torch.atan2(self[:, 1:], self[:, 0:1])
return f"SO2(theta={theta}, name={self.name})"
def theta(self) -> torch.Tensor:
return self.log_map()
def _adjoint_impl(self) -> torch.Tensor:
return torch.ones(self.shape[0], 1, 1, device=self.device, dtype=self.dtype)
def _project_impl(
self, euclidean_grad: torch.Tensor, is_sparse: bool = False
) -> torch.Tensor:
self._project_check(euclidean_grad, is_sparse)
temp = torch.stack((-self[:, 1], self[:, 0]), dim=1)
if is_sparse:
return torch.einsum("i...k,i...k->i...", euclidean_grad, temp).unsqueeze(-1)
else:
return torch.einsum("...k,...k", euclidean_grad, temp).unsqueeze(-1)
@staticmethod
def exp_map(
tangent_vector: torch.Tensor, jacobians: Optional[List[torch.Tensor]] = None
) -> "SO2":
so2 = SO2(dtype=tangent_vector.dtype)
so2.update_from_angle(tangent_vector)
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
tangent_vector.shape[0],
1,
1,
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
)
return so2
def _log_map_impl(
self, jacobians: Optional[List[torch.Tensor]] = None
) -> torch.Tensor:
if jacobians is not None:
SO2._check_jacobians_list(jacobians)
jacobians.append(
torch.ones(
self.shape[0],
1,
1,
dtype=self.dtype,
device=self.device,
)
)
cosine, sine = self.to_cos_sin()
return torch.atan2(sine, cosine).unsqueeze(1)
def _compose_impl(self, so2_2: LieGroup) -> "SO2":
so2_2 = cast(SO2, so2_2)
cos_1, sin_1 = self.to_cos_sin()
cos_2, sin_2 = so2_2.to_cos_sin()
new_cos = cos_1 * cos_2 - sin_1 * sin_2
new_sin = sin_1 * cos_2 + cos_1 * sin_2
return SO2(data=torch.stack([new_cos, new_sin], dim=1))
def _inverse_impl(self, get_jacobian: bool = False) -> "SO2":
cosine, sine = self.to_cos_sin()
return SO2(data=torch.stack([cosine, -sine], dim=1))
def _rotate_shape_check(self, point: Union[Point2, torch.Tensor]):
err_msg = (
f"SO2 can only transform vectors of shape [{self.shape[0]}, 2] or [1, 2], "
f"but the input has shape {point.shape}."
)
if isinstance(point, torch.Tensor):
if not point.ndim == 2 or point.shape[1] != 2:
raise ValueError(err_msg)
elif point.dof() != 2:
raise ValueError(err_msg)
if (
point.shape[0] != self.shape[0]
and point.shape[0] != 1
and self.shape[0] != 1
):
raise ValueError(
"Input point batch size is not broadcastable with group batch size."
)
@staticmethod
def _rotate_from_cos_sin(
point: Union[Point2, torch.Tensor],
cosine: torch.Tensor,
sine: torch.Tensor,
) -> Point2:
batch_size = max(point.shape[0], cosine.shape[0])
if isinstance(point, torch.Tensor):
if point.ndim != 2 or point.shape[1] != 2:
raise ValueError(
f"Point tensor must have shape batch_size x 2, "
f"but received {point.shape}."
)
point_data = point
else:
point_data = point.data
px, py = point_data[:, 0], point_data[:, 1]
new_point_data = torch.empty(
batch_size, 2, device=cosine.device, dtype=cosine.dtype
)
new_point_data[:, 0] = cosine * px - sine * py
new_point_data[:, 1] = sine * px + cosine * py
return Point2(data=new_point_data)
def rotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([-ret.y(), ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def unrotate(
self,
point: Union[Point2, torch.Tensor],
jacobians: Optional[List[torch.Tensor]] = None,
) -> Point2:
self._rotate_shape_check(point)
cosine, sine = self.to_cos_sin()
ret = SO2._rotate_from_cos_sin(point, cosine, -sine)
if jacobians is not None:
self._check_jacobians_list(jacobians)
Jrot = torch.stack([ret.y(), -ret.x()], dim=1).view(-1, 2, 1)
Jpnt = self.to_matrix().transpose(2, 1).expand(ret.shape[0], -1, -1)
jacobians.extend([Jrot, Jpnt])
return ret
def to_cos_sin(self) -> Tuple[torch.Tensor, torch.Tensor]:
return self[:, 0], self[:, 1]
def to_matrix(self) -> torch.Tensor:
matrix = torch.empty(self.shape[0], 2, 2).to(
device=self.device, dtype=self.dtype
)
cosine, sine = self.to_cos_sin()
matrix[:, 0, 0] = cosine
matrix[:, 0, 1] = -sine
matrix[:, 1, 0] = sine
matrix[:, 1, 1] = cosine
return matrix
@staticmethod
def hat(tangent_vector: torch.Tensor) -> torch.Tensor:
matrix = torch.zeros(tangent_vector.shape[0], 2, 2).to(
dtype=tangent_vector.dtype,
device=tangent_vector.device,
)
matrix[:, 0, 1] = -tangent_vector.view(-1)
matrix[:, 1, 0] = tangent_vector.view(-1)
return matrix
@staticmethod
def vee(matrix: torch.Tensor) -> torch.Tensor:
_check = matrix.ndim == 3 and matrix.shape[1:] == (2, 2)
_check &= matrix[:, 0, 0].abs().max().item() < theseus.constants.EPS
_check &= matrix[:, 1, 1].abs().max().item() < theseus.constants.EPS
_check &= torch.allclose(matrix[:, 0, 1], -matrix[:, 1, 0])
if not _check:
raise ValueError("Invalid hat matrix for SO2.")
return matrix[:, 1, 0].clone().view(-1, 1)
def _copy_impl(self, new_name: Optional[str] = None) -> "SO2":
return SO2(data=self.data.clone(), name=new_name)
def copy(self, new_name: Optional[str] = None) -> "SO2":
return cast(SO2, super().copy(new_name=new_name))
rand_so2 = SO2.rand
randn_so2 = SO2.randn
| true
| true
|
1c457240e1e5b43e46789912d1b54ae8e79edea8
| 1,095
|
py
|
Python
|
clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py
|
cliffano/pokeapi-clients
|
92af296c68c3e94afac52642ae22057faaf071ee
|
[
"MIT"
] | null | null | null |
clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py
|
cliffano/pokeapi-clients
|
92af296c68c3e94afac52642ae22057faaf071ee
|
[
"MIT"
] | null | null | null |
clients/python-blueplanet/generated/app/openapi_server/test/test_pal_park_area_controller.py
|
cliffano/pokeapi-clients
|
92af296c68c3e94afac52642ae22057faaf071ee
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
from flask import json
from six import BytesIO
from openapi_server.test import BaseTestCase
class TestPalParkAreaController(BaseTestCase):
"""PalParkAreaController integration test stubs"""
def test_pal_park_area_list(self):
"""Test case for pal_park_area_list
"""
query_string = [('limit', 56),
('offset', 56)]
response = self.client.open(
'/api/v2/pal-park-area/',
method='GET',
query_string=query_string)
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
def test_pal_park_area_read(self):
"""Test case for pal_park_area_read
"""
response = self.client.open(
'/api/v2/pal-park-area/{id}'.format(id=56),
method='GET')
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| 25.465116
| 77
| 0.584475
|
from __future__ import absolute_import
from flask import json
from six import BytesIO
from openapi_server.test import BaseTestCase
class TestPalParkAreaController(BaseTestCase):
def test_pal_park_area_list(self):
query_string = [('limit', 56),
('offset', 56)]
response = self.client.open(
'/api/v2/pal-park-area/',
method='GET',
query_string=query_string)
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
def test_pal_park_area_read(self):
response = self.client.open(
'/api/v2/pal-park-area/{id}'.format(id=56),
method='GET')
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| true
| true
|
1c45730c64130865dfab642c09670520f0db3188
| 5,508
|
py
|
Python
|
gammagl/layers/conv/hetero_wrapper.py
|
BUPT-GAMMA/GammaGL
|
2b9f32e1ac3533cb75a063243e8a2fa654466d18
|
[
"Apache-2.0"
] | null | null | null |
gammagl/layers/conv/hetero_wrapper.py
|
BUPT-GAMMA/GammaGL
|
2b9f32e1ac3533cb75a063243e8a2fa654466d18
|
[
"Apache-2.0"
] | null | null | null |
gammagl/layers/conv/hetero_wrapper.py
|
BUPT-GAMMA/GammaGL
|
2b9f32e1ac3533cb75a063243e8a2fa654466d18
|
[
"Apache-2.0"
] | null | null | null |
import tensorlayerx as tlx
import gammagl.mpops as mpops
import warnings
from collections import defaultdict
from typing import Dict, Optional
def group(xs, aggr):
if len(xs) == 0:
return None
elif aggr is None:
return tlx.stack(xs, axis=1)
elif len(xs) == 1:
return xs[0]
else:
out = tlx.stack(xs, axis=0)
out = getattr(tlx, 'reduce_'+aggr)(out, dim=0)
out = out[0] if isinstance(out, tuple) else out
return out
class HeteroConv(tlx.nn.Module):
r"""A generic wrapper for computing graph convolution on heterogeneous
graphs.
This layer will pass messages from source nodes to target nodes based on
the bipartite GNN layer given for a specific edge type.
If multiple relations point to the same destination, their results will be
aggregated according to :attr:`aggr`.
.. code:: python
>>> hetero_conv = HeteroConv({
('paper', 'cites', 'paper'): GCNConv(64, 16),
('author', 'writes', 'paper'): SAGEConv((128, 64), 64),
('paper', 'written_by', 'author'): GATConv((64, 128), 64),
}, aggr='sum')
>>> out_dict = hetero_conv(x_dict, edge_index_dict)
>>> print(list(out_dict.keys()))
['paper', 'author']
Parameters
----------
convs: Dict[Tuple[str, str, str], Module]
A dictionary
holding a bipartite
:class:`~torch_geometric.nn.conv.MessagePassing` layer for each
individual edge type.
aggr: string, optional
The aggregation scheme to use for grouping
node embeddings generated by different relations.
(:obj:`"sum"`, :obj:`"mean"`, :obj:`"min"`, :obj:`"max"`,
:obj:`None`). (default: :obj:`"sum"`)
"""
def __init__(self, convs: dict,
aggr: Optional[str] = "sum"):
super().__init__()
src_node_types = set([key[0] for key in convs.keys()])
dst_node_types = set([key[-1] for key in convs.keys()])
if len(src_node_types - dst_node_types) > 0:
warnings.warn(
f"There exist node types ({src_node_types - dst_node_types}) "
f"whose representations do not get updated during message "
f"passing as they do not occur as destination type in any "
f"edge type. This may lead to unexpected behaviour.")
self.convs = ModuleDict({'__'.join(k): v for k, v in convs.items()})
self.aggr = aggr
def reset_parameters(self):
for conv in self.convs.values():
conv.reset_parameters()
def forward(
self,
x_dict,
edge_index_dict,
*args_dict,
**kwargs_dict,
):
r"""
Parameters
----------
x_dict: Dict[str, Tensor]
A dictionary holding node feature
information for each individual node type.
edge_index_dict: Dict[Tuple[str, str, str], Tensor]
A dictionary
holding graph connectivity information for each individual
edge type.
*args_dict: optional
Additional forward arguments of invididual
:class:`torch_geometric.nn.conv.MessagePassing` layers.
**kwargs_dict: optional
Additional forward arguments of
individual :class:`torch_geometric.nn.conv.MessagePassing`
layers.
For example, if a specific GNN layer at edge type
:obj:`edge_type` expects edge attributes :obj:`edge_attr` as a
forward argument, then you can pass them to
:meth:`~torch_geometric.nn.conv.HeteroConv.forward` via
:obj:`edge_attr_dict = { edge_type: edge_attr }`.
"""
out_dict = defaultdict(list)
for edge_type, edge_index in edge_index_dict.items():
src, rel, dst = edge_type
str_edge_type = '__'.join(edge_type)
if str_edge_type not in self.convs:
continue
args = []
for value_dict in args_dict:
if edge_type in value_dict:
args.append(value_dict[edge_type])
elif src == dst and src in value_dict:
args.append(value_dict[src])
elif src in value_dict or dst in value_dict:
args.append(
(value_dict.get(src, None), value_dict.get(dst, None)))
kwargs = {}
for arg, value_dict in kwargs_dict.items():
arg = arg[:-5] # `{*}_dict`
if edge_type in value_dict:
kwargs[arg] = value_dict[edge_type]
elif src == dst and src in value_dict:
kwargs[arg] = value_dict[src]
elif src in value_dict or dst in value_dict:
kwargs[arg] = (value_dict.get(src, None),
value_dict.get(dst, None))
conv = self.convs[str_edge_type]
if src == dst:
out = conv(x_dict[src], edge_index, *args, **kwargs)
else:
out = conv((x_dict[src], x_dict[dst]), edge_index, *args,
**kwargs)
out_dict[dst].append(out)
for key, value in out_dict.items():
out_dict[key] = group(value, self.aggr)
return out_dict
def __repr__(self) -> str:
return f'{self.__class__.__name__}(num_relations={len(self.convs)})'
| 37.469388
| 79
| 0.564089
|
import tensorlayerx as tlx
import gammagl.mpops as mpops
import warnings
from collections import defaultdict
from typing import Dict, Optional
def group(xs, aggr):
if len(xs) == 0:
return None
elif aggr is None:
return tlx.stack(xs, axis=1)
elif len(xs) == 1:
return xs[0]
else:
out = tlx.stack(xs, axis=0)
out = getattr(tlx, 'reduce_'+aggr)(out, dim=0)
out = out[0] if isinstance(out, tuple) else out
return out
class HeteroConv(tlx.nn.Module):
def __init__(self, convs: dict,
aggr: Optional[str] = "sum"):
super().__init__()
src_node_types = set([key[0] for key in convs.keys()])
dst_node_types = set([key[-1] for key in convs.keys()])
if len(src_node_types - dst_node_types) > 0:
warnings.warn(
f"There exist node types ({src_node_types - dst_node_types}) "
f"whose representations do not get updated during message "
f"passing as they do not occur as destination type in any "
f"edge type. This may lead to unexpected behaviour.")
self.convs = ModuleDict({'__'.join(k): v for k, v in convs.items()})
self.aggr = aggr
def reset_parameters(self):
for conv in self.convs.values():
conv.reset_parameters()
def forward(
self,
x_dict,
edge_index_dict,
*args_dict,
**kwargs_dict,
):
out_dict = defaultdict(list)
for edge_type, edge_index in edge_index_dict.items():
src, rel, dst = edge_type
str_edge_type = '__'.join(edge_type)
if str_edge_type not in self.convs:
continue
args = []
for value_dict in args_dict:
if edge_type in value_dict:
args.append(value_dict[edge_type])
elif src == dst and src in value_dict:
args.append(value_dict[src])
elif src in value_dict or dst in value_dict:
args.append(
(value_dict.get(src, None), value_dict.get(dst, None)))
kwargs = {}
for arg, value_dict in kwargs_dict.items():
arg = arg[:-5] if edge_type in value_dict:
kwargs[arg] = value_dict[edge_type]
elif src == dst and src in value_dict:
kwargs[arg] = value_dict[src]
elif src in value_dict or dst in value_dict:
kwargs[arg] = (value_dict.get(src, None),
value_dict.get(dst, None))
conv = self.convs[str_edge_type]
if src == dst:
out = conv(x_dict[src], edge_index, *args, **kwargs)
else:
out = conv((x_dict[src], x_dict[dst]), edge_index, *args,
**kwargs)
out_dict[dst].append(out)
for key, value in out_dict.items():
out_dict[key] = group(value, self.aggr)
return out_dict
def __repr__(self) -> str:
return f'{self.__class__.__name__}(num_relations={len(self.convs)})'
| true
| true
|
1c45736df60cd6ec7b8c468f4bdacbfcf5d93fef
| 10,202
|
py
|
Python
|
9_export_fbx_bitbucket_stable/fbx_deformer.py
|
makehumancommunity/community-plugins-fbx
|
1f78be1723063cee0ae83232587431ab0f2e9894
|
[
"MIT"
] | 1
|
2020-04-17T16:32:06.000Z
|
2020-04-17T16:32:06.000Z
|
9_export_fbx_bitbucket_stable/fbx_deformer.py
|
makehumancommunity/community-plugins-fbx
|
1f78be1723063cee0ae83232587431ab0f2e9894
|
[
"MIT"
] | null | null | null |
9_export_fbx_bitbucket_stable/fbx_deformer.py
|
makehumancommunity/community-plugins-fbx
|
1f78be1723063cee0ae83232587431ab0f2e9894
|
[
"MIT"
] | 5
|
2019-06-01T07:04:17.000Z
|
2022-02-21T14:14:51.000Z
|
#!/usr/bin/python2.7
# -*- coding: utf-8 -*-
"""
**Project Name:** MakeHuman
**Product Home Page:** http://www.makehuman.org/
**Code Home Page:** https://bitbucket.org/MakeHuman/makehuman/
**Authors:** Thomas Larsson, Jonas Hauquier
**Copyright(c):** MakeHuman Team 2001-2017
**Licensing:** AGPL3
This file is part of MakeHuman (www.makehuman.org).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Abstract
--------
Fbx mesh
"""
import transformations as tm
from .fbx_utils import *
#--------------------------------------------------------------------
# Object definitions
#--------------------------------------------------------------------
def getObjectCounts(meshes):
"""
Count the total number of vertex groups and shapes required for all
specified meshes.
"""
nVertexGroups = 0
for mesh in meshes:
if mesh.vertexWeights is None:
continue
for weights in mesh.vertexWeights.data:
if weights:
nVertexGroups += 1
nShapes = 0
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for key,shape in mesh.shapes:
if shape:
nShapes += 1
return nVertexGroups, nShapes
def countObjects(meshes, skel):
"""
Count the total number of vertex groups and shapes combined, as required
for all specified meshes. If no skeleton rig is attached to the mesh, no
vertex groups for bone weights are required.
"""
nVertexGroups, nShapes = getObjectCounts(meshes)
if skel:
return (nVertexGroups + 1 + 2*nShapes)
else:
return 2*nShapes
def writeObjectDefs(fp, meshes, skel, config):
nVertexGroups, nShapes = getObjectCounts(meshes)
count = countObjects(meshes, skel)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Definitions')
if count > 0:
fbx_binary.fbx_template_generate(elem, "Deformer", count)
if skel:
fbx_binary.fbx_template_generate(elem, "Pose", 1)
return
if count > 0:
fp.write(
' ObjectType: "Deformer" {\n' +
' Count: %d' % count +
"""
}
""")
if skel:
fp.write("""
ObjectType: "Pose" {
Count: 1
}
""")
#--------------------------------------------------------------------
# Object properties
#--------------------------------------------------------------------
def writeObjectProps(fp, meshes, skel, config):
if skel:
writeBindPose(fp, meshes, skel, config)
for mesh in meshes:
writeDeformer(fp, mesh.name, config)
for bone in skel.getBones():
try:
weights = mesh.vertexWeights.data[bone.name]
except KeyError:
continue
writeSubDeformer(fp, mesh.name, bone, weights, config)
for mesh in meshes:
# TODO support binary FBX shapekey export
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname,shape in mesh.shapes:
writeShapeGeometry(fp, mesh.name, sname, shape, config)
writeShapeDeformer(fp, mesh.name, sname)
writeShapeSubDeformer(fp, mesh.name, sname)
def writeShapeGeometry(fp, name, sname, shape, config):
id,key = getId("Geometry::%s_%sShape" % (name, sname))
nVerts = len(shape.verts)
fp.write(
' Geometry: %d, "%s", "Shape" {\n' % (id, key) +
' version: 100\n' +
' Indexes: *%d {\n' % nVerts +
' a: ')
string = "".join( ['%d,' % vn for vn in shape.verts] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Vertices: *%d {\n' % (3*nVerts) +
' a: ')
target = config.scale * shape.data + config.offset
string = "".join( ["%.4f,%.4f,%.4f," % tuple(dr) for dr in target] )
fp.write(string[:-1])
# Must use normals for shapekeys
fp.write('\n' +
' }\n' +
' Normals: *%d {\n' % (3*nVerts) +
' a: ')
string = nVerts * "0,0,0,"
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' }\n')
def writeShapeDeformer(fp, name, sname):
id,key = getId("Deformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShape" {\n' % (id, key) +
' Version: 100\n' +
' }\n')
def writeShapeSubDeformer(fp, name, sname, shape):
sid,skey = getId("SubDeformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShapeChannel" {' % (sid, skey) +
"""
version: 100
deformpercent: 0.0
FullWeights: *1 {
a: 100
}
}
""")
def writeDeformer(fp, name, config):
id,key = getId("Deformer::%s" % name)
properties = [
("MHName", "p_string", "%sSkin" % name, False, True)
]
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_deformer(elem, key, id, properties)
return
import fbx_utils
fp.write(
' Deformer: %d, "%s", "Skin" {' % (id, key) +
"""
Version: 101
Properties70: {
""" + fbx_utils.get_ascii_properties(properties, indent=3) + """
}
Link_DeformAcuracy: 50
}
""")
def writeSubDeformer(fp, name, bone, weights, config):
id,key = getId("SubDeformer::%s_%s" % (bone.name, name))
bindmat,bindinv = bone.getBindMatrix(config.offset)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_subdeformer(elem, key, id, weights[0], weights[1], bindmat, bindinv)
return
nVertexWeights = len(weights[0])
indexString = ','.join(["%d" % vn for vn in weights[0]])
weightString = ','.join(["%4f" % w for w in weights[1]])
fp.write(
' Deformer: %d, "%s", "Cluster" {\n' % (id, key) +
' Version: 100\n' +
' UserData: "", ""\n' +
' Indexes: *%d {\n' % nVertexWeights +
' a: %s\n' % indexString +
' } \n' +
' Weights: *%d {\n' % nVertexWeights +
' a: %s\n' % weightString +
' }\n')
writeMatrix(fp, 'Transform', bindmat)
writeMatrix(fp, 'TransformLink', bindinv)
fp.write(' }\n')
def writeBindPose(fp, meshes, skel, config):
id,key = getId("Pose::" + skel.name)
nBones = skel.getBoneCount()
nMeshes = len(meshes)
# Skeleton bind matrix
skelbindmat = tm.rotation_matrix(math.pi/2, (1,0,0))
count = 1 + nMeshes + nBones
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
pelem = fbx_binary.fbx_data_bindpose_element(elem, key, id, count)
else:
fp.write(
' Pose: %d, "%s", "BindPose" {\n' % (id, key)+
' Type: "BindPose"\n' +
' Version: 100\n' +
' NbPoseNodes: %d\n' % count)
startLinking()
key = "Model::%s" % skel.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for mesh in meshes:
key = "Model::%sMesh" % mesh.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for bone in skel.getBones():
key = "Model::%s" % bone.name
bindmat,_ = bone.getBindMatrix(config.offset)
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, bindmat)
else:
poseNode(fp, key, bindmat)
stopLinking()
if not config.binary:
fp.write(' }\n')
def poseNode(fp, key, matrix):
pid,_ = getId(key)
matrix[:3,3] = 0
fp.write(
' PoseNode: {\n' +
' Node: %d\n' % pid)
writeMatrix(fp, 'Matrix', matrix, " ")
fp.write(' }\n')
#--------------------------------------------------------------------
# Links
#--------------------------------------------------------------------
def writeLinks(fp, meshes, skel, config):
if skel:
for mesh in meshes:
ooLink(fp, 'Deformer::%s' % mesh.name, 'Geometry::%s' % mesh.name, config)
for bone in skel.getBones():
subdef = 'SubDeformer::%s_%s' % (bone.name, mesh.name)
try:
getId(subdef)
except NameError:
continue
ooLink(fp, subdef, 'Deformer::%s' % mesh.name, config)
ooLink(fp, 'Model::%s' % bone.name, subdef, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname, shape in mesh.shapes:
deform = "Deformer::%s_%sShape" % (mesh.name, sname)
subdef = "SubDeformer::%s_%sShape" % (mesh.name, sname)
ooLink(fp, "Geometry::%s_%sShape" % (mesh.name, sname), subdef)
ooLink(fp, subdef, deform)
ooLink(fp, deform, "Geometry::%s" % mesh.name)
| 29.917889
| 96
| 0.525387
|
import transformations as tm
from .fbx_utils import *
def getObjectCounts(meshes):
nVertexGroups = 0
for mesh in meshes:
if mesh.vertexWeights is None:
continue
for weights in mesh.vertexWeights.data:
if weights:
nVertexGroups += 1
nShapes = 0
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for key,shape in mesh.shapes:
if shape:
nShapes += 1
return nVertexGroups, nShapes
def countObjects(meshes, skel):
nVertexGroups, nShapes = getObjectCounts(meshes)
if skel:
return (nVertexGroups + 1 + 2*nShapes)
else:
return 2*nShapes
def writeObjectDefs(fp, meshes, skel, config):
nVertexGroups, nShapes = getObjectCounts(meshes)
count = countObjects(meshes, skel)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Definitions')
if count > 0:
fbx_binary.fbx_template_generate(elem, "Deformer", count)
if skel:
fbx_binary.fbx_template_generate(elem, "Pose", 1)
return
if count > 0:
fp.write(
' ObjectType: "Deformer" {\n' +
' Count: %d' % count +
"""
}
""")
if skel:
fp.write("""
ObjectType: "Pose" {
Count: 1
}
""")
def writeObjectProps(fp, meshes, skel, config):
if skel:
writeBindPose(fp, meshes, skel, config)
for mesh in meshes:
writeDeformer(fp, mesh.name, config)
for bone in skel.getBones():
try:
weights = mesh.vertexWeights.data[bone.name]
except KeyError:
continue
writeSubDeformer(fp, mesh.name, bone, weights, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname,shape in mesh.shapes:
writeShapeGeometry(fp, mesh.name, sname, shape, config)
writeShapeDeformer(fp, mesh.name, sname)
writeShapeSubDeformer(fp, mesh.name, sname)
def writeShapeGeometry(fp, name, sname, shape, config):
id,key = getId("Geometry::%s_%sShape" % (name, sname))
nVerts = len(shape.verts)
fp.write(
' Geometry: %d, "%s", "Shape" {\n' % (id, key) +
' version: 100\n' +
' Indexes: *%d {\n' % nVerts +
' a: ')
string = "".join( ['%d,' % vn for vn in shape.verts] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Vertices: *%d {\n' % (3*nVerts) +
' a: ')
target = config.scale * shape.data + config.offset
string = "".join( ["%.4f,%.4f,%.4f," % tuple(dr) for dr in target] )
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' Normals: *%d {\n' % (3*nVerts) +
' a: ')
string = nVerts * "0,0,0,"
fp.write(string[:-1])
fp.write('\n' +
' }\n' +
' }\n')
def writeShapeDeformer(fp, name, sname):
id,key = getId("Deformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShape" {\n' % (id, key) +
' Version: 100\n' +
' }\n')
def writeShapeSubDeformer(fp, name, sname, shape):
sid,skey = getId("SubDeformer::%s_%sShape" % (name, sname))
fp.write(
' Deformer: %d, "%s", "BlendShapeChannel" {' % (sid, skey) +
"""
version: 100
deformpercent: 0.0
FullWeights: *1 {
a: 100
}
}
""")
def writeDeformer(fp, name, config):
id,key = getId("Deformer::%s" % name)
properties = [
("MHName", "p_string", "%sSkin" % name, False, True)
]
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_deformer(elem, key, id, properties)
return
import fbx_utils
fp.write(
' Deformer: %d, "%s", "Skin" {' % (id, key) +
"""
Version: 101
Properties70: {
""" + fbx_utils.get_ascii_properties(properties, indent=3) + """
}
Link_DeformAcuracy: 50
}
""")
def writeSubDeformer(fp, name, bone, weights, config):
id,key = getId("SubDeformer::%s_%s" % (bone.name, name))
bindmat,bindinv = bone.getBindMatrix(config.offset)
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
fbx_binary.fbx_data_subdeformer(elem, key, id, weights[0], weights[1], bindmat, bindinv)
return
nVertexWeights = len(weights[0])
indexString = ','.join(["%d" % vn for vn in weights[0]])
weightString = ','.join(["%4f" % w for w in weights[1]])
fp.write(
' Deformer: %d, "%s", "Cluster" {\n' % (id, key) +
' Version: 100\n' +
' UserData: "", ""\n' +
' Indexes: *%d {\n' % nVertexWeights +
' a: %s\n' % indexString +
' } \n' +
' Weights: *%d {\n' % nVertexWeights +
' a: %s\n' % weightString +
' }\n')
writeMatrix(fp, 'Transform', bindmat)
writeMatrix(fp, 'TransformLink', bindinv)
fp.write(' }\n')
def writeBindPose(fp, meshes, skel, config):
id,key = getId("Pose::" + skel.name)
nBones = skel.getBoneCount()
nMeshes = len(meshes)
skelbindmat = tm.rotation_matrix(math.pi/2, (1,0,0))
count = 1 + nMeshes + nBones
if config.binary:
from . import fbx_binary
elem = fbx_binary.get_child_element(fp, 'Objects')
pelem = fbx_binary.fbx_data_bindpose_element(elem, key, id, count)
else:
fp.write(
' Pose: %d, "%s", "BindPose" {\n' % (id, key)+
' Type: "BindPose"\n' +
' Version: 100\n' +
' NbPoseNodes: %d\n' % count)
startLinking()
key = "Model::%s" % skel.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for mesh in meshes:
key = "Model::%sMesh" % mesh.name
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, skelbindmat)
else:
poseNode(fp, key, skelbindmat)
for bone in skel.getBones():
key = "Model::%s" % bone.name
bindmat,_ = bone.getBindMatrix(config.offset)
if config.binary:
id,_ = getId(key)
fbx_binary.fbx_data_pose_node_element(pelem, key, id, bindmat)
else:
poseNode(fp, key, bindmat)
stopLinking()
if not config.binary:
fp.write(' }\n')
def poseNode(fp, key, matrix):
pid,_ = getId(key)
matrix[:3,3] = 0
fp.write(
' PoseNode: {\n' +
' Node: %d\n' % pid)
writeMatrix(fp, 'Matrix', matrix, " ")
fp.write(' }\n')
def writeLinks(fp, meshes, skel, config):
if skel:
for mesh in meshes:
ooLink(fp, 'Deformer::%s' % mesh.name, 'Geometry::%s' % mesh.name, config)
for bone in skel.getBones():
subdef = 'SubDeformer::%s_%s' % (bone.name, mesh.name)
try:
getId(subdef)
except NameError:
continue
ooLink(fp, subdef, 'Deformer::%s' % mesh.name, config)
ooLink(fp, 'Model::%s' % bone.name, subdef, config)
for mesh in meshes:
if hasattr(mesh, 'shapes') and mesh.shapes is not None:
for sname, shape in mesh.shapes:
deform = "Deformer::%s_%sShape" % (mesh.name, sname)
subdef = "SubDeformer::%s_%sShape" % (mesh.name, sname)
ooLink(fp, "Geometry::%s_%sShape" % (mesh.name, sname), subdef)
ooLink(fp, subdef, deform)
ooLink(fp, deform, "Geometry::%s" % mesh.name)
| true
| true
|
1c4573e4d03b690f34aa54ed5a53a2890c2dddaf
| 1,030
|
py
|
Python
|
src/data/425.py
|
NULLCT/LOMC
|
79a16474a8f21310e0fb47e536d527dd5dc6d655
|
[
"MIT"
] | null | null | null |
src/data/425.py
|
NULLCT/LOMC
|
79a16474a8f21310e0fb47e536d527dd5dc6d655
|
[
"MIT"
] | null | null | null |
src/data/425.py
|
NULLCT/LOMC
|
79a16474a8f21310e0fb47e536d527dd5dc6d655
|
[
"MIT"
] | null | null | null |
import sys
def main():
sys.setrecursionlimit(1000000)
N, Q = [int(x) for x in input().split()]
# 隣接リスト形式でグラフをつくる
# 重み付きの場合は、[行き先, weight]をそれぞれの行に持たせれば良い。
graph = [[] for i in range(N)]
for i in range(N - 1):
a, b = [int(x) for x in input().split()]
graph[a - 1].append(b - 1)
graph[b - 1].append(a - 1)
queries = []
for i in range(Q):
c, d = [int(x) for x in input().split()]
queries.append([c - 1, d - 1])
distances = [-1 for x in range(N)]
distances[0] = 0
dfs(graph, distances, 0)
for query in queries:
if (distances[query[0]] - distances[query[1]]) % 2 == 0:
print("Town")
else:
print("Road")
def dfs(graph, distances, current_node):
next_nodes = graph[current_node]
for next_node in next_nodes:
if distances[next_node] < 0:
distances[next_node] = distances[current_node] + 1
dfs(graph, distances, next_node)
if __name__ == "__main__":
main()
| 23.953488
| 64
| 0.551456
|
import sys
def main():
sys.setrecursionlimit(1000000)
N, Q = [int(x) for x in input().split()]
graph = [[] for i in range(N)]
for i in range(N - 1):
a, b = [int(x) for x in input().split()]
graph[a - 1].append(b - 1)
graph[b - 1].append(a - 1)
queries = []
for i in range(Q):
c, d = [int(x) for x in input().split()]
queries.append([c - 1, d - 1])
distances = [-1 for x in range(N)]
distances[0] = 0
dfs(graph, distances, 0)
for query in queries:
if (distances[query[0]] - distances[query[1]]) % 2 == 0:
print("Town")
else:
print("Road")
def dfs(graph, distances, current_node):
next_nodes = graph[current_node]
for next_node in next_nodes:
if distances[next_node] < 0:
distances[next_node] = distances[current_node] + 1
dfs(graph, distances, next_node)
if __name__ == "__main__":
main()
| true
| true
|
1c4574061d4be29467fa53f9afe975e345de3bfa
| 3,268
|
py
|
Python
|
pyInstaller/dash/dash/resources.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 2
|
2021-09-17T14:23:28.000Z
|
2021-09-17T22:12:50.000Z
|
pyInstaller/dash/dash/resources.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | null | null | null |
pyInstaller/dash/dash/resources.py
|
rianawillers/dash-lineplot
|
b72c3e4799d39bdc33bbcae2202fdefd6f4af00e
|
[
"MIT"
] | 17
|
2019-11-21T14:11:29.000Z
|
2019-11-21T15:26:23.000Z
|
import json
import warnings
import os
from .development.base_component import ComponentRegistry
from . import exceptions
class Resources:
def __init__(self, resource_name):
self._resources = []
self.resource_name = resource_name
def append_resource(self, resource):
self._resources.append(resource)
def _filter_resources(self, all_resources, dev_bundles=False):
filtered_resources = []
for s in all_resources:
filtered_resource = {}
if 'dynamic' in s:
filtered_resource['dynamic'] = s['dynamic']
if 'namespace' in s:
filtered_resource['namespace'] = s['namespace']
if 'external_url' in s and not self.config.serve_locally:
filtered_resource['external_url'] = s['external_url']
elif 'dev_package_path' in s and dev_bundles:
filtered_resource['relative_package_path'] = (
s['dev_package_path']
)
elif 'relative_package_path' in s:
filtered_resource['relative_package_path'] = (
s['relative_package_path']
)
elif 'absolute_path' in s:
filtered_resource['absolute_path'] = s['absolute_path']
elif 'asset_path' in s:
info = os.stat(s['filepath'])
filtered_resource['asset_path'] = s['asset_path']
filtered_resource['ts'] = info.st_mtime
elif self.config.serve_locally:
warnings.warn(
'A local version of {} is not available'.format(
s['external_url']
)
)
continue
else:
raise exceptions.ResourceException(
'{} does not have a '
'relative_package_path, absolute_path, or an '
'external_url.'.format(
json.dumps(filtered_resource)
)
)
filtered_resources.append(filtered_resource)
return filtered_resources
def get_all_resources(self, dev_bundles=False):
lib_resources = ComponentRegistry.get_resources(self.resource_name)
all_resources = lib_resources + self._resources
return self._filter_resources(all_resources, dev_bundles)
# pylint: disable=too-few-public-methods
class _Config:
def __init__(self, serve_locally):
self.serve_locally = serve_locally
class Css:
def __init__(self, serve_locally):
self._resources = Resources('_css_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_css(self, stylesheet):
self._resources.append_resource(stylesheet)
def get_all_css(self):
return self._resources.get_all_resources()
class Scripts:
def __init__(self, serve_locally):
self._resources = Resources('_js_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_script(self, script):
self._resources.append_resource(script)
def get_all_scripts(self, dev_bundles=False):
return self._resources.get_all_resources(dev_bundles)
| 34.041667
| 75
| 0.604651
|
import json
import warnings
import os
from .development.base_component import ComponentRegistry
from . import exceptions
class Resources:
def __init__(self, resource_name):
self._resources = []
self.resource_name = resource_name
def append_resource(self, resource):
self._resources.append(resource)
def _filter_resources(self, all_resources, dev_bundles=False):
filtered_resources = []
for s in all_resources:
filtered_resource = {}
if 'dynamic' in s:
filtered_resource['dynamic'] = s['dynamic']
if 'namespace' in s:
filtered_resource['namespace'] = s['namespace']
if 'external_url' in s and not self.config.serve_locally:
filtered_resource['external_url'] = s['external_url']
elif 'dev_package_path' in s and dev_bundles:
filtered_resource['relative_package_path'] = (
s['dev_package_path']
)
elif 'relative_package_path' in s:
filtered_resource['relative_package_path'] = (
s['relative_package_path']
)
elif 'absolute_path' in s:
filtered_resource['absolute_path'] = s['absolute_path']
elif 'asset_path' in s:
info = os.stat(s['filepath'])
filtered_resource['asset_path'] = s['asset_path']
filtered_resource['ts'] = info.st_mtime
elif self.config.serve_locally:
warnings.warn(
'A local version of {} is not available'.format(
s['external_url']
)
)
continue
else:
raise exceptions.ResourceException(
'{} does not have a '
'relative_package_path, absolute_path, or an '
'external_url.'.format(
json.dumps(filtered_resource)
)
)
filtered_resources.append(filtered_resource)
return filtered_resources
def get_all_resources(self, dev_bundles=False):
lib_resources = ComponentRegistry.get_resources(self.resource_name)
all_resources = lib_resources + self._resources
return self._filter_resources(all_resources, dev_bundles)
class _Config:
def __init__(self, serve_locally):
self.serve_locally = serve_locally
class Css:
def __init__(self, serve_locally):
self._resources = Resources('_css_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_css(self, stylesheet):
self._resources.append_resource(stylesheet)
def get_all_css(self):
return self._resources.get_all_resources()
class Scripts:
def __init__(self, serve_locally):
self._resources = Resources('_js_dist')
self._resources.config = self.config = _Config(serve_locally)
def append_script(self, script):
self._resources.append_resource(script)
def get_all_scripts(self, dev_bundles=False):
return self._resources.get_all_resources(dev_bundles)
| true
| true
|
1c45743ca4e65273720ebd7ad9326b42e0788bfd
| 1,734
|
py
|
Python
|
mergesort.py
|
maurendeviia/pythoncharmers
|
b5775d0f51a6f2e5dc0365345e0436dea4c72c14
|
[
"MIT"
] | 37
|
2020-10-01T15:20:12.000Z
|
2021-10-04T14:17:06.000Z
|
mergesort.py
|
maurendeviia/pythoncharmers
|
b5775d0f51a6f2e5dc0365345e0436dea4c72c14
|
[
"MIT"
] | 27
|
2020-10-01T12:32:41.000Z
|
2021-10-04T11:05:34.000Z
|
mergesort.py
|
maurendeviia/pythoncharmers
|
b5775d0f51a6f2e5dc0365345e0436dea4c72c14
|
[
"MIT"
] | 57
|
2020-10-01T11:24:26.000Z
|
2022-02-16T05:09:50.000Z
|
# Python program for implementation of MergeSort
# Merges two subarrays of arr[].
# First subarray is arr[l..m]
# Second subarray is arr[m+1..r]
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r- m
# create temp arrays
L = [0] * (n1)
R = [0] * (n2)
# Copy data to temp arrays L[] and R[]
for i in range(0 , n1):
L[i] = arr[l + i]
for j in range(0 , n2):
R[j] = arr[m + 1 + j]
# Merge the temp arrays back into arr[l..r]
i = 0 # Initial index of first subarray
j = 0 # Initial index of second subarray
k = l # Initial index of merged subarray
while i < n1 and j < n2 :
if L[i] <= R[j]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Copy the remaining elements of L[], if there
# are any
while i < n1:
arr[k] = L[i]
i += 1
k += 1
# Copy the remaining elements of R[], if there
# are any
while j < n2:
arr[k] = R[j]
j += 1
k += 1
# l is for left index and r is right index of the
# sub-array of arr to be sorted
def mergeSort(arr,l,r):
if l < r:
# Same as (l+r)//2, but avoids overflow for
# large l and h
m = (l+(r-1))//2
# Sort first and second halves
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
# Driver code to test above
arr = [12, 11, 13, 5, 6, 7]
n = len(arr)
print ("Given array is")
for i in range(n):
print ("%d" %arr[i]),
mergeSort(arr,0,n-1)
print ("\n\nSorted array is")
for i in range(n):
print ("%d" %arr[i]),
| 23.12
| 52
| 0.474625
|
def merge(arr, l, m, r):
n1 = m - l + 1
n2 = r- m
L = [0] * (n1)
R = [0] * (n2)
for i in range(0 , n1):
L[i] = arr[l + i]
for j in range(0 , n2):
R[j] = arr[m + 1 + j]
i = 0 j = 0 k = l
while i < n1 and j < n2 :
if L[i] <= R[j]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
while i < n1:
arr[k] = L[i]
i += 1
k += 1
while j < n2:
arr[k] = R[j]
j += 1
k += 1
def mergeSort(arr,l,r):
if l < r:
m = (l+(r-1))//2
mergeSort(arr, l, m)
mergeSort(arr, m+1, r)
merge(arr, l, m, r)
arr = [12, 11, 13, 5, 6, 7]
n = len(arr)
print ("Given array is")
for i in range(n):
print ("%d" %arr[i]),
mergeSort(arr,0,n-1)
print ("\n\nSorted array is")
for i in range(n):
print ("%d" %arr[i]),
| true
| true
|
1c4574486b85926786807ba74806c14f69cb0642
| 716
|
py
|
Python
|
test/ie.py
|
napoler/Terry-toolkit
|
8b1a607fb6d27801b0441b67f7eb0962794a728a
|
[
"MIT"
] | null | null | null |
test/ie.py
|
napoler/Terry-toolkit
|
8b1a607fb6d27801b0441b67f7eb0962794a728a
|
[
"MIT"
] | 14
|
2019-11-15T14:28:22.000Z
|
2022-02-10T00:24:28.000Z
|
test/ie.py
|
napoler/Terry-toolkit
|
8b1a607fb6d27801b0441b67f7eb0962794a728a
|
[
"MIT"
] | 1
|
2020-04-15T12:58:00.000Z
|
2020-04-15T12:58:00.000Z
|
#encoding=utf-8
from __future__ import unicode_literals
import sys
sys.path.append("../")
import Terry_toolkit as tkit
t= tkit.Text()
text="""
柯基犬是个十足的小狗子
"""
# li = t.summary(text=text)
# print(li)
# li = t.get_keyphrases(text=text)
# print(li)
# li = t.sentence_segmentation(text=text)
# print(li)
# li = t.participle(text=text,dotype='words_all_filters')
# print(li)
ie=tkit.TripleIE(model_path='/mnt/data/dev/model/ltp/ltp_data_v3.4.0')
s=ie.get(text)
# print(s)
#
for item in s:
if item==None:
pass
else:
print(item[0],item[1],item[2])
# extractor = tkit.TripleExtractor()
# svos = extractor.triples_main(text)
# # print(svos)
# for item in svos:
# print("".join(item))
| 16.272727
| 70
| 0.666201
|
from __future__ import unicode_literals
import sys
sys.path.append("../")
import Terry_toolkit as tkit
t= tkit.Text()
text="""
柯基犬是个十足的小狗子
"""
ie=tkit.TripleIE(model_path='/mnt/data/dev/model/ltp/ltp_data_v3.4.0')
s=ie.get(text)
for item in s:
if item==None:
pass
else:
print(item[0],item[1],item[2])
| true
| true
|
1c45754bd696bde5b9f6046fcb305c9e2b18fb6e
| 8,617
|
py
|
Python
|
make_knockoffs.py
|
wfbradley/snpko
|
abc77349d702915519518eacdf919f06579413d0
|
[
"MIT"
] | null | null | null |
make_knockoffs.py
|
wfbradley/snpko
|
abc77349d702915519518eacdf919f06579413d0
|
[
"MIT"
] | null | null | null |
make_knockoffs.py
|
wfbradley/snpko
|
abc77349d702915519518eacdf919f06579413d0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import pandas as pd
import os
import numpy as np
import SNPknock.fastphase as fp
from SNPknock import knockoffHMM
from joblib import Parallel, delayed
import utils_snpko as utils
logger = utils.logger
def make_knockoff(chromosome=None, grouped_by_chromosome=None, df_SNP=None,
df_geno_experiment=None, df_geno_ensembl=None,
SNP_to_wild_type=None, cache_dir=None, path_to_fp=None,
em_iterations=25, random_seed=123):
# assert chromosome!=None and grouped_by_chromosome!=None and df_SNP!=None
assert chromosome is not None
assert grouped_by_chromosome is not None
assert df_SNP is not None
logger.debug("################")
logger.debug("Chromosome %2d #" % chromosome)
logger.debug("################")
num_experiment_people = len(df_geno_experiment)
num_ensembl_people = len(df_geno_ensembl)
indices = grouped_by_chromosome.groups[chromosome]
df_SNP_chromo = df_SNP.iloc[indices].sort_values('chromosome_position')
SNPs_on_chromosome = df_SNP_chromo['SNP'].values
X_experiment = np.empty((num_experiment_people, len(SNPs_on_chromosome)))
X_ensembl = np.empty((num_ensembl_people, len(SNPs_on_chromosome)))
for X, df in [
(X_experiment, df_geno_experiment),
(X_ensembl, df_geno_ensembl)]:
for j, SNP in enumerate(SNPs_on_chromosome):
X[:, j] = utils.genotype_to_nonwild_type_count(
df[SNP].values, SNP_to_wild_type[SNP])
out_path = '%s/chrom_%d' % (cache_dir, chromosome)
# If all relevant files are found in cache, skip EM recomputation; otherwise,
# redo the whole thing.
target_file_suffix_list = [
'alphahat.txt', 'finallikelihoods', 'origchars', 'rhat.txt', 'thetahat.txt']
already_in_cache = True
for suffix in target_file_suffix_list:
target_path = os.path.join(
cache_dir, 'chrom_%d_%s' % (chromosome, suffix))
if not os.path.exists(target_path):
already_in_cache = False
break
if already_in_cache:
logger.debug("Found chrom %d HMM in cache" % chromosome)
else:
# Write array to file
Xfp_file = '%s/X_%d.inp' % (cache_dir, chromosome)
fp.writeX(X_ensembl, Xfp_file)
# Run fastPhase on data (which runs EM)
fp.runFastPhase(path_to_fp, Xfp_file, out_path,
K=12, numit=em_iterations)
# Read in fastPhase results (i.e., HMM parameters) from file:
r_file = out_path + "_rhat.txt"
alpha_file = out_path + "_alphahat.txt"
theta_file = out_path + "_thetahat.txt"
# Why is X_ensembl[0, :] in the function arguments below?
hmm = fp.loadFit(r_file, theta_file, alpha_file, X_ensembl[0, :])
# Actually produce the knockoffs
knockoffs = knockoffHMM(hmm["pInit"], hmm["Q"], hmm[
"pEmit"], seed=random_seed)
X_knockoffs = knockoffs.sample(X_experiment)
return(X_knockoffs, X_experiment, SNPs_on_chromosome)
def make_all_knockoffs(args):
'''
For each chromosome, independently:
Sort SNPs according to position on genome.
Train HMM parameters with EM on ENSEMBL data.
Generate knockoffs of experimentals SNP data.
For now, we ignore sex of persons, although that is
available in ENSEMBL
'''
logger.info("####################################")
logger.info("Fitting HMM and generating knockoffs")
path_to_fp = os.path.join(args.fastPHASE_path, 'fastPHASE')
if not(os.path.exists(path_to_fp)):
logger.info("Cannot find fastPHASE at %s" % path_to_fp)
raise Exception
cache_dir = os.path.join(args.working_dir, 'fastphase_cache')
utils.safe_mkdir(cache_dir)
df_geno_ensembl = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_ensembl.csv'))
# SNP,wild_type,chromosome,chromosome_position
df_SNP = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_SNP_facts.csv'))
df_wild = pd.read_csv(os.path.join(args.working_dir, 'wild_types.csv'))
SNP_to_wild_type = dict(
zip(df_wild['SNP'].values, df_wild['wild_type'].values))
chromosome_list = np.sort(np.unique(df_SNP['chromosome']))
for chromosome in chromosome_list:
assert chromosome in np.arange(1, 24)
df_geno_experiment = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_experiment.csv'))
# Make sure we have the same SNPs everywhere.
assert (set([c for c in df_geno_ensembl.columns if c.startswith('rs')]) ==
set([c for c in df_geno_experiment.columns if c.startswith('rs')]))
for SNP in df_SNP.SNP.values:
assert SNP in df_geno_ensembl.columns
grouped_by_chromosome = df_SNP.groupby('chromosome')
num_experiment_people = len(df_geno_experiment)
knockoff_SNP_list = []
utils.safe_mkdir(os.path.join(args.working_dir, 'knockoffs'))
em_iterations = 500
logger.info('Number of EM iterations: %d' % em_iterations)
for knockoff_trial_count in xrange(args.num_knockoff_trials):
random_seed = knockoff_trial_count + args.random_seed
if ((args.num_knockoff_trials <= 20) or
knockoff_trial_count % ((args.num_knockoff_trials) // 20) == 0):
logger.info("Knockoff sampling %d of %d" % (
knockoff_trial_count, args.num_knockoff_trials))
if False:
# Serial version; code preserved for debugging purposes
for chromosome in chromosome_list:
knockoff_SNP_list.append(
make_knockoff(
chromosome=chromosome,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir,
path_to_fp=path_to_fp, em_iterations=em_iterations, random_seed=random_seed))
else:
knockoff_SNP_list = Parallel(n_jobs=args.num_workers)(
delayed(make_knockoff)(
chromosome=i,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir, path_to_fp=path_to_fp,
em_iterations=em_iterations, random_seed=random_seed)
for i in chromosome_list)
# Stitch results for each chromosome back together into a single dataframe
# Knockoff results
SNP_columns = [
x for x in df_geno_ensembl.columns if x.startswith('rs')]
df_knockoffs = pd.DataFrame(
columns=SNP_columns, index=np.arange(num_experiment_people))
# Matched experimental observations + knockoffs in one dataframe
matched_columns = []
data_labels = []
for field in df_geno_experiment.columns:
if field.startswith('rs'):
matched_columns.append(field)
matched_columns.append(field + '_knockoff')
elif field.startswith(args.data_prefix):
data_labels.append(field)
else:
continue
df_matched = pd.DataFrame(columns=matched_columns + data_labels,
index=np.arange(num_experiment_people))
for (X_knockoffs, X_experiment, SNPs_on_chromosome) in knockoff_SNP_list:
for i in xrange(num_experiment_people):
for j, SNP in enumerate(SNPs_on_chromosome):
df_knockoffs[SNP].values[i] = X_knockoffs[i, j]
df_matched[SNP].values[i] = int(X_experiment[i, j])
df_matched[
SNP + '_knockoff'].values[i] = int(X_knockoffs[i, j])
for data_label in data_labels:
df_matched[data_label] = df_geno_experiment[data_label]
# Sanity check that all fields are filled in.
for field in df_knockoffs:
for i in xrange(num_experiment_people):
assert pd.notnull(df_knockoffs[field].values[i])
df_matched.to_csv(os.path.join((args.working_dir), 'knockoffs',
'knockoffs_%03d.csv' % knockoff_trial_count),
index=False)
logger.info("Done making knockoffs!!!")
if __name__ == '__main__':
args = utils.parse_arguments()
utils.initialize_logger(args)
make_all_knockoffs(args)
| 40.455399
| 101
| 0.646049
|
import pandas as pd
import os
import numpy as np
import SNPknock.fastphase as fp
from SNPknock import knockoffHMM
from joblib import Parallel, delayed
import utils_snpko as utils
logger = utils.logger
def make_knockoff(chromosome=None, grouped_by_chromosome=None, df_SNP=None,
df_geno_experiment=None, df_geno_ensembl=None,
SNP_to_wild_type=None, cache_dir=None, path_to_fp=None,
em_iterations=25, random_seed=123):
assert chromosome is not None
assert grouped_by_chromosome is not None
assert df_SNP is not None
logger.debug("################")
logger.debug("Chromosome %2d #" % chromosome)
logger.debug("################")
num_experiment_people = len(df_geno_experiment)
num_ensembl_people = len(df_geno_ensembl)
indices = grouped_by_chromosome.groups[chromosome]
df_SNP_chromo = df_SNP.iloc[indices].sort_values('chromosome_position')
SNPs_on_chromosome = df_SNP_chromo['SNP'].values
X_experiment = np.empty((num_experiment_people, len(SNPs_on_chromosome)))
X_ensembl = np.empty((num_ensembl_people, len(SNPs_on_chromosome)))
for X, df in [
(X_experiment, df_geno_experiment),
(X_ensembl, df_geno_ensembl)]:
for j, SNP in enumerate(SNPs_on_chromosome):
X[:, j] = utils.genotype_to_nonwild_type_count(
df[SNP].values, SNP_to_wild_type[SNP])
out_path = '%s/chrom_%d' % (cache_dir, chromosome)
target_file_suffix_list = [
'alphahat.txt', 'finallikelihoods', 'origchars', 'rhat.txt', 'thetahat.txt']
already_in_cache = True
for suffix in target_file_suffix_list:
target_path = os.path.join(
cache_dir, 'chrom_%d_%s' % (chromosome, suffix))
if not os.path.exists(target_path):
already_in_cache = False
break
if already_in_cache:
logger.debug("Found chrom %d HMM in cache" % chromosome)
else:
Xfp_file = '%s/X_%d.inp' % (cache_dir, chromosome)
fp.writeX(X_ensembl, Xfp_file)
fp.runFastPhase(path_to_fp, Xfp_file, out_path,
K=12, numit=em_iterations)
r_file = out_path + "_rhat.txt"
alpha_file = out_path + "_alphahat.txt"
theta_file = out_path + "_thetahat.txt"
hmm = fp.loadFit(r_file, theta_file, alpha_file, X_ensembl[0, :])
knockoffs = knockoffHMM(hmm["pInit"], hmm["Q"], hmm[
"pEmit"], seed=random_seed)
X_knockoffs = knockoffs.sample(X_experiment)
return(X_knockoffs, X_experiment, SNPs_on_chromosome)
def make_all_knockoffs(args):
logger.info("####################################")
logger.info("Fitting HMM and generating knockoffs")
path_to_fp = os.path.join(args.fastPHASE_path, 'fastPHASE')
if not(os.path.exists(path_to_fp)):
logger.info("Cannot find fastPHASE at %s" % path_to_fp)
raise Exception
cache_dir = os.path.join(args.working_dir, 'fastphase_cache')
utils.safe_mkdir(cache_dir)
df_geno_ensembl = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_ensembl.csv'))
df_SNP = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_SNP_facts.csv'))
df_wild = pd.read_csv(os.path.join(args.working_dir, 'wild_types.csv'))
SNP_to_wild_type = dict(
zip(df_wild['SNP'].values, df_wild['wild_type'].values))
chromosome_list = np.sort(np.unique(df_SNP['chromosome']))
for chromosome in chromosome_list:
assert chromosome in np.arange(1, 24)
df_geno_experiment = pd.read_csv(os.path.join(
(args.working_dir), 'pruned_experiment.csv'))
assert (set([c for c in df_geno_ensembl.columns if c.startswith('rs')]) ==
set([c for c in df_geno_experiment.columns if c.startswith('rs')]))
for SNP in df_SNP.SNP.values:
assert SNP in df_geno_ensembl.columns
grouped_by_chromosome = df_SNP.groupby('chromosome')
num_experiment_people = len(df_geno_experiment)
knockoff_SNP_list = []
utils.safe_mkdir(os.path.join(args.working_dir, 'knockoffs'))
em_iterations = 500
logger.info('Number of EM iterations: %d' % em_iterations)
for knockoff_trial_count in xrange(args.num_knockoff_trials):
random_seed = knockoff_trial_count + args.random_seed
if ((args.num_knockoff_trials <= 20) or
knockoff_trial_count % ((args.num_knockoff_trials) // 20) == 0):
logger.info("Knockoff sampling %d of %d" % (
knockoff_trial_count, args.num_knockoff_trials))
if False:
for chromosome in chromosome_list:
knockoff_SNP_list.append(
make_knockoff(
chromosome=chromosome,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir,
path_to_fp=path_to_fp, em_iterations=em_iterations, random_seed=random_seed))
else:
knockoff_SNP_list = Parallel(n_jobs=args.num_workers)(
delayed(make_knockoff)(
chromosome=i,
grouped_by_chromosome=grouped_by_chromosome, df_SNP=df_SNP,
df_geno_experiment=df_geno_experiment, df_geno_ensembl=df_geno_ensembl,
SNP_to_wild_type=SNP_to_wild_type, cache_dir=cache_dir, path_to_fp=path_to_fp,
em_iterations=em_iterations, random_seed=random_seed)
for i in chromosome_list)
SNP_columns = [
x for x in df_geno_ensembl.columns if x.startswith('rs')]
df_knockoffs = pd.DataFrame(
columns=SNP_columns, index=np.arange(num_experiment_people))
matched_columns = []
data_labels = []
for field in df_geno_experiment.columns:
if field.startswith('rs'):
matched_columns.append(field)
matched_columns.append(field + '_knockoff')
elif field.startswith(args.data_prefix):
data_labels.append(field)
else:
continue
df_matched = pd.DataFrame(columns=matched_columns + data_labels,
index=np.arange(num_experiment_people))
for (X_knockoffs, X_experiment, SNPs_on_chromosome) in knockoff_SNP_list:
for i in xrange(num_experiment_people):
for j, SNP in enumerate(SNPs_on_chromosome):
df_knockoffs[SNP].values[i] = X_knockoffs[i, j]
df_matched[SNP].values[i] = int(X_experiment[i, j])
df_matched[
SNP + '_knockoff'].values[i] = int(X_knockoffs[i, j])
for data_label in data_labels:
df_matched[data_label] = df_geno_experiment[data_label]
for field in df_knockoffs:
for i in xrange(num_experiment_people):
assert pd.notnull(df_knockoffs[field].values[i])
df_matched.to_csv(os.path.join((args.working_dir), 'knockoffs',
'knockoffs_%03d.csv' % knockoff_trial_count),
index=False)
logger.info("Done making knockoffs!!!")
if __name__ == '__main__':
args = utils.parse_arguments()
utils.initialize_logger(args)
make_all_knockoffs(args)
| true
| true
|
1c45754f10176c7ca4335379fbf3a06ceffc08ee
| 13,895
|
py
|
Python
|
robots/login.py
|
sicekit/sicekit
|
30d6b665ed083893792579f6640c897e932b4ff0
|
[
"MIT"
] | 11
|
2015-04-13T15:40:00.000Z
|
2021-11-09T14:55:25.000Z
|
robots/login.py
|
sicekit/sicekit
|
30d6b665ed083893792579f6640c897e932b4ff0
|
[
"MIT"
] | null | null | null |
robots/login.py
|
sicekit/sicekit
|
30d6b665ed083893792579f6640c897e932b4ff0
|
[
"MIT"
] | 4
|
2016-06-07T06:39:39.000Z
|
2019-05-11T09:34:20.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script to log the robot in to a wiki account.
Suggestion is to make a special account to use for robot use only. Make
sure this robot account is well known on your home wikipedia before using.
Parameters:
-all Try to log in on all sites where a username is defined in
user-config.py.
-pass Useful in combination with -all when you have accounts for
several sites and use the same password for all of them.
Asks you for the password, then logs in on all given sites.
-pass:XXXX Uses XXXX as password. Be careful if you use this
parameter because your password will be shown on your
screen, and will probably be saved in your command line
history. This is NOT RECOMMENDED for use on computers
where others have either physical or remote access.
Use -pass instead.
-sysop Log in with your sysop account.
-force Ignores if the user is already logged in, and tries to log in.
-v -v Shows http requests made when logging in. This might leak
(doubly private data (password, session id), so make sure to check the
verbose) output. Using -log is recommended: this will output a lot of
data
If not given as parameter, the script will ask for your username and password
(password entry will be hidden), log in to your home wiki using this
combination, and store the resulting cookies (containing your password hash,
so keep it secured!) in a file in the login-data subdirectory.
All scripts in this library will be looking for this cookie file and will use the
login information if it is present.
To log out, throw away the XX-login.data file that is created in the login-data
subdirectory.
"""
#
# (C) Rob W.W. Hooft, 2003
#
# Distributed under the terms of the MIT license.
#
__version__='$Id: login.py 7034 2009-07-09 10:11:29Z alexsh $'
import re
import urllib2
import wikipedia, config
# On some wikis you are only allowed to run a bot if there is a link to
# the bot's user page in a specific list.
botList = {
'wikipedia': {
'en': u'Wikipedia:Registered bots',
# Disabled because they are now using a template system which
# we can't check with our current code.
#'simple': u'Wikipedia:Bots',
},
'gentoo': {
'en': u'Help:Bots',
}
}
class LoginManager:
def __init__(self, password = None, sysop = False, site = None, username=None, verbose=False):
self.site = site or wikipedia.getSite()
if username:
self.username=username
# perform writeback.
if site.family.name not in config.usernames:
config.usernames[site.family.name]={}
config.usernames[site.family.name][self.site.lang]=username
else:
if sysop:
try:
self.username = config.sysopnames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
else:
try:
self.username = config.usernames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
self.password = password
self.verbose = verbose
if getattr(config, 'password_file', ''):
self.readPassword()
def botAllowed(self):
"""
Checks whether the bot is listed on a specific page to comply with
the policy on the respective wiki.
"""
if self.site.family.name in botList and self.site.language() in botList[self.site.family.name]:
botListPageTitle = wikipedia.translate(self.site.language(), botList)
botListPage = wikipedia.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
if linkedPage.titleWithoutNamespace() == self.username:
return True
return False
else:
# No bot policies on other
return True
def getCookie(self, api = config.use_api_login, remember=True, captcha = None):
"""
Login to the site.
remember Remember login (default: True)
captchaId A dictionary containing the captcha id and answer, if any
Returns cookie data if succesful, None otherwise.
"""
if api:
predata = {
'action': 'login',
'lgname': self.username.encode(self.site.encoding()),
'lgpassword': self.password,
'lgdomain': self.site.family.ldapDomain,
}
address = self.site.api_address()
else:
predata = {
"wpName": self.username.encode(self.site.encoding()),
"wpPassword": self.password,
"wpDomain": self.site.family.ldapDomain, # VistaPrint fix
"wpLoginattempt": "Aanmelden & Inschrijven", # dutch button label seems to work for all wikis
"wpRemember": str(int(bool(remember))),
"wpSkipCookieCheck": '1'
}
if captcha:
predata["wpCaptchaId"] = captcha['id']
predata["wpCaptchaWord"] = captcha['answer']
login_address = self.site.login_address()
address = login_address + '&action=submit'
if self.site.hostname() in config.authenticate.keys():
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-agent": wikipedia.useragent
}
data = self.site.urlEncode(predata)
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = u'XXXX'
wikipedia.output(u"urllib2.urlopen(urllib2.Request('%s', %s, %s)):" % (self.site.protocol() + '://' + self.site.hostname() + address, self.site.urlEncode(fakepredata), headers))
response = urllib2.urlopen(urllib2.Request(self.site.protocol() + '://' + self.site.hostname() + address, data, headers))
data = response.read()
if self.verbose:
fakedata = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data)
trans = config.transliterate
config.transliterate = False #transliteration breaks for some reason
wikipedia.output(fakedata.decode(self.site.encoding()))
config.transliterate = trans
wikipedia.cj.save(wikipedia.COOKIEFILE)
return "Ok"
else:
response, data = self.site.postData(address, self.site.urlEncode(predata))
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = fakepredata['lgpassword'] = u'XXXXX'
wikipedia.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata)))
fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", response.msg.__str__())
wikipedia.output(u"%s/%s\n%s" % (response.status, response.reason, fakeresponsemsg))
wikipedia.output(u"%s" % data)
Reat=re.compile(': (.*?);')
L = []
for eat in response.msg.getallmatchingheaders('set-cookie'):
m = Reat.search(eat)
if m:
L.append(m.group(1))
got_token = got_user = False
for Ldata in L:
if 'Token=' in Ldata:
got_token = True
if 'User=' in Ldata or 'UserName=' in Ldata:
got_user = True
if got_token and got_user:
return "\n".join(L)
elif not captcha:
solve = self.site.solveCaptcha(data)
if solve:
return self.getCookie(api = api, remember = remember, captcha = solve)
return None
def storecookiedata(self, data):
"""
Stores cookie data.
The argument data is the raw data, as returned by getCookie().
Returns nothing."""
filename = wikipedia.config.datafilepath('login-data',
'%s-%s-%s-login.data'
% (self.site.family.name, self.site.lang, self.username))
f = open(filename, 'w')
f.write(data)
f.close()
def readPassword(self):
"""
Reads passwords from a file. DO NOT FORGET TO REMOVE READ
ACCESS FOR OTHER USERS!!! Use chmod 600 password-file.
All lines below should be valid Python tuples in the form
(code, family, username, password) or (username, password)
to set a default password for an username. Default usernames
should occur above specific usernames.
Example:
("my_username", "my_default_password")
("my_sysop_user", "my_sysop_password")
("en", "wikipedia", "my_en_user", "my_en_pass")
"""
file = open(wikipedia.config.datafilepath(config.password_file))
for line in file:
if not line.strip(): continue
entry = eval(line)
if len(entry) == 2: #for default userinfo
if entry[0] == self.username: self.password = entry[1]
elif len(entry) == 4: #for userinfo included code and family
if entry[0] == self.site.lang and \
entry[1] == self.site.family.name and \
entry[2] == self.username:
self.password = entry[3]
file.close()
def login(self, api = config.use_api_login, retry = False):
if not self.password:
# As we don't want the password to appear on the screen, we set
# password = True
self.password = wikipedia.input(u'Password for user %s on %s:' % (self.username, self.site), password = True)
self.password = self.password.encode(self.site.encoding())
wikipedia.output(u"Logging in to %s as %s" % (self.site, self.username))
try:
cookiedata = self.getCookie(api = api)
except NotImplementedError:
wikipedia.output('API disabled because this site does not support.')
config.use_api_login = api = False
cookiedata = self.getCookie(api = api)
if cookiedata:
self.storecookiedata(cookiedata)
wikipedia.output(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
wikipedia.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang])
return True
else:
wikipedia.output(u"Login failed. Wrong password or CAPTCHA answer?")
if api:
wikipedia.output(u"API login failed, retrying using standard webpage.")
return self.login(api = False, retry = retry)
if retry:
self.password = None
return self.login(api = api, retry = True)
else:
return False
def showCaptchaWindow(self, url):
pass
def main():
username = password = None
sysop = False
logall = False
forceLogin = False
verbose = False
for arg in wikipedia.handleArgs():
if arg.startswith("-pass"):
if len(arg) == 5:
password = wikipedia.input(u'Password for all accounts:', password = True)
else:
password = arg[6:]
elif arg == "-sysop":
sysop = True
elif arg == "-all":
logall = True
elif arg == "-force":
forceLogin = True
else:
wikipedia.showHelp('login')
return
if wikipedia.verbose > 1:
wikipedia.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
namedict = config.sysopnames
else:
namedict = config.usernames
for familyName in namedict.iterkeys():
for lang in namedict[familyName].iterkeys():
try:
site = wikipedia.getSite( code=lang, fam=familyName )
if not forceLogin and site.loggedInAs(sysop = sysop) is not None:
wikipedia.output(u'Already logged in on %s' % site)
else:
loginMan = LoginManager(password, sysop = sysop, site = site, verbose=verbose)
loginMan.login()
except wikipedia.NoSuchSite:
wikipedia.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config')
else:
loginMan = LoginManager(password, sysop = sysop, verbose=verbose)
loginMan.login()
if __name__ == "__main__":
try:
main()
finally:
wikipedia.stopme()
| 42.362805
| 310
| 0.580784
|
__version__='$Id: login.py 7034 2009-07-09 10:11:29Z alexsh $'
import re
import urllib2
import wikipedia, config
botList = {
'wikipedia': {
'en': u'Wikipedia:Registered bots',
# Disabled because they are now using a template system which
# we can't check with our current code.
},
'gentoo': {
'en': u'Help:Bots',
}
}
class LoginManager:
def __init__(self, password = None, sysop = False, site = None, username=None, verbose=False):
self.site = site or wikipedia.getSite()
if username:
self.username=username
if site.family.name not in config.usernames:
config.usernames[site.family.name]={}
config.usernames[site.family.name][self.site.lang]=username
else:
if sysop:
try:
self.username = config.sysopnames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
else:
try:
self.username = config.usernames[self.site.family.name][self.site.lang]
except:
raise wikipedia.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
self.password = password
self.verbose = verbose
if getattr(config, 'password_file', ''):
self.readPassword()
def botAllowed(self):
if self.site.family.name in botList and self.site.language() in botList[self.site.family.name]:
botListPageTitle = wikipedia.translate(self.site.language(), botList)
botListPage = wikipedia.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
if linkedPage.titleWithoutNamespace() == self.username:
return True
return False
else:
return True
def getCookie(self, api = config.use_api_login, remember=True, captcha = None):
if api:
predata = {
'action': 'login',
'lgname': self.username.encode(self.site.encoding()),
'lgpassword': self.password,
'lgdomain': self.site.family.ldapDomain,
}
address = self.site.api_address()
else:
predata = {
"wpName": self.username.encode(self.site.encoding()),
"wpPassword": self.password,
"wpDomain": self.site.family.ldapDomain, "wpLoginattempt": "Aanmelden & Inschrijven", "wpRemember": str(int(bool(remember))),
"wpSkipCookieCheck": '1'
}
if captcha:
predata["wpCaptchaId"] = captcha['id']
predata["wpCaptchaWord"] = captcha['answer']
login_address = self.site.login_address()
address = login_address + '&action=submit'
if self.site.hostname() in config.authenticate.keys():
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-agent": wikipedia.useragent
}
data = self.site.urlEncode(predata)
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = u'XXXX'
wikipedia.output(u"urllib2.urlopen(urllib2.Request('%s', %s, %s)):" % (self.site.protocol() + '://' + self.site.hostname() + address, self.site.urlEncode(fakepredata), headers))
response = urllib2.urlopen(urllib2.Request(self.site.protocol() + '://' + self.site.hostname() + address, data, headers))
data = response.read()
if self.verbose:
fakedata = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data)
trans = config.transliterate
config.transliterate = False wikipedia.output(fakedata.decode(self.site.encoding()))
config.transliterate = trans
wikipedia.cj.save(wikipedia.COOKIEFILE)
return "Ok"
else:
response, data = self.site.postData(address, self.site.urlEncode(predata))
if self.verbose:
fakepredata = predata
fakepredata['wpPassword'] = fakepredata['lgpassword'] = u'XXXXX'
wikipedia.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata)))
fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", response.msg.__str__())
wikipedia.output(u"%s/%s\n%s" % (response.status, response.reason, fakeresponsemsg))
wikipedia.output(u"%s" % data)
Reat=re.compile(': (.*?);')
L = []
for eat in response.msg.getallmatchingheaders('set-cookie'):
m = Reat.search(eat)
if m:
L.append(m.group(1))
got_token = got_user = False
for Ldata in L:
if 'Token=' in Ldata:
got_token = True
if 'User=' in Ldata or 'UserName=' in Ldata:
got_user = True
if got_token and got_user:
return "\n".join(L)
elif not captcha:
solve = self.site.solveCaptcha(data)
if solve:
return self.getCookie(api = api, remember = remember, captcha = solve)
return None
def storecookiedata(self, data):
filename = wikipedia.config.datafilepath('login-data',
'%s-%s-%s-login.data'
% (self.site.family.name, self.site.lang, self.username))
f = open(filename, 'w')
f.write(data)
f.close()
def readPassword(self):
file = open(wikipedia.config.datafilepath(config.password_file))
for line in file:
if not line.strip(): continue
entry = eval(line)
if len(entry) == 2: if entry[0] == self.username: self.password = entry[1]
elif len(entry) == 4: if entry[0] == self.site.lang and \
entry[1] == self.site.family.name and \
entry[2] == self.username:
self.password = entry[3]
file.close()
def login(self, api = config.use_api_login, retry = False):
if not self.password:
# password = True
self.password = wikipedia.input(u'Password for user %s on %s:' % (self.username, self.site), password = True)
self.password = self.password.encode(self.site.encoding())
wikipedia.output(u"Logging in to %s as %s" % (self.site, self.username))
try:
cookiedata = self.getCookie(api = api)
except NotImplementedError:
wikipedia.output('API disabled because this site does not support.')
config.use_api_login = api = False
cookiedata = self.getCookie(api = api)
if cookiedata:
self.storecookiedata(cookiedata)
wikipedia.output(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
wikipedia.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang])
return True
else:
wikipedia.output(u"Login failed. Wrong password or CAPTCHA answer?")
if api:
wikipedia.output(u"API login failed, retrying using standard webpage.")
return self.login(api = False, retry = retry)
if retry:
self.password = None
return self.login(api = api, retry = True)
else:
return False
def showCaptchaWindow(self, url):
pass
def main():
username = password = None
sysop = False
logall = False
forceLogin = False
verbose = False
for arg in wikipedia.handleArgs():
if arg.startswith("-pass"):
if len(arg) == 5:
password = wikipedia.input(u'Password for all accounts:', password = True)
else:
password = arg[6:]
elif arg == "-sysop":
sysop = True
elif arg == "-all":
logall = True
elif arg == "-force":
forceLogin = True
else:
wikipedia.showHelp('login')
return
if wikipedia.verbose > 1:
wikipedia.output(u"WARNING: Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
namedict = config.sysopnames
else:
namedict = config.usernames
for familyName in namedict.iterkeys():
for lang in namedict[familyName].iterkeys():
try:
site = wikipedia.getSite( code=lang, fam=familyName )
if not forceLogin and site.loggedInAs(sysop = sysop) is not None:
wikipedia.output(u'Already logged in on %s' % site)
else:
loginMan = LoginManager(password, sysop = sysop, site = site, verbose=verbose)
loginMan.login()
except wikipedia.NoSuchSite:
wikipedia.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config')
else:
loginMan = LoginManager(password, sysop = sysop, verbose=verbose)
loginMan.login()
if __name__ == "__main__":
try:
main()
finally:
wikipedia.stopme()
| true
| true
|
1c4576baa0e49856245c1d52cad14c426975599c
| 15,984
|
py
|
Python
|
tfx/orchestration/metadata_test.py
|
romiosarkar6991/tfx-romio
|
0703c1dd037c676e1d438c2e5ce831decfc9eed9
|
[
"Apache-2.0"
] | 1
|
2019-10-10T06:06:12.000Z
|
2019-10-10T06:06:12.000Z
|
tfx/orchestration/metadata_test.py
|
romiosarkar6991/tfx-romio
|
0703c1dd037c676e1d438c2e5ce831decfc9eed9
|
[
"Apache-2.0"
] | null | null | null |
tfx/orchestration/metadata_test.py
|
romiosarkar6991/tfx-romio
|
0703c1dd037c676e1d438c2e5ce831decfc9eed9
|
[
"Apache-2.0"
] | 1
|
2019-10-06T03:39:58.000Z
|
2019-10-06T03:39:58.000Z
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.metadata."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Standard Imports
import mock
import tensorflow as tf
from ml_metadata.proto import metadata_store_pb2
from tfx import types
from tfx.orchestration import data_types
from tfx.orchestration import metadata
from tfx.types import standard_artifacts
from tfx.types.artifact import ArtifactState
class MetadataTest(tf.test.TestCase):
def setUp(self):
super(MetadataTest, self).setUp()
self._connection_config = metadata_store_pb2.ConnectionConfig()
self._connection_config.sqlite.SetInParent()
self._component_info = data_types.ComponentInfo(
component_type='a.b.c', component_id='my_component')
self._component_info2 = data_types.ComponentInfo(
component_type='a.b.d', component_id='my_component_2')
self._pipeline_info = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info2 = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id2')
self._pipeline_info3 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info4 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id2')
def testEmptyArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
m.publish_artifacts([])
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info)
m.publish_execution(eid, {}, {})
[execution] = m.store.get_executions_by_id([eid])
self.assertProtoEquals(
"""
id: 1
type_id: 1
properties {
key: "state"
value {
string_value: "complete"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}""", execution)
def testArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self.assertListEqual([], m.get_all_artifacts())
# Test publish artifact.
artifact = standard_artifacts.Examples()
artifact.uri = 'uri'
m.publish_artifacts([artifact])
[artifact] = m.store.get_artifacts()
self.assertProtoEquals(
"""id: 1
type_id: 1
uri: "uri"
properties {
key: "split"
value {
string_value: ""
}
}
properties {
key: "state"
value {
string_value: "published"
}
}
properties {
key: "type_name"
value {
string_value: "ExamplesPath"
}
}""", artifact)
# Test get artifact.
self.assertListEqual([artifact], m.get_all_artifacts())
self.assertListEqual([artifact], m.get_artifacts_by_uri('uri'))
self.assertListEqual([artifact], m.get_artifacts_by_type('ExamplesPath'))
# Test artifact state.
m.check_artifact_state(artifact, ArtifactState.PUBLISHED)
m.update_artifact_state(artifact, ArtifactState.DELETED)
m.check_artifact_state(artifact, ArtifactState.DELETED)
self.assertRaises(RuntimeError, m.check_artifact_state, artifact,
ArtifactState.PUBLISHED)
def testExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
# Test prepare_execution.
exec_properties = {'arg_one': 1}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
[execution] = m.store.get_executions_by_context(context_id)
self.assertProtoEquals(
"""
id: 1
type_id: 2
properties {
key: "state"
value {
string_value: "new"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}
properties {
key: "arg_one"
value {
string_value: "1"
}
}""", execution)
# Test publish_execution.
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
# Make sure artifacts in output_dict are published.
self.assertEqual(ArtifactState.PUBLISHED, output_artifact.state)
# Make sure execution state are changed.
[execution] = m.store.get_executions_by_id([eid])
self.assertEqual(metadata.EXECUTION_STATE_COMPLETE,
execution.properties['state'].string_value)
# Make sure events are published.
events = m.store.get_events_by_execution_ids([eid])
self.assertEqual(2, len(events))
self.assertEqual(input_artifact.id, events[0].artifact_id)
self.assertEqual(metadata_store_pb2.Event.INPUT, events[0].type)
self.assertProtoEquals(
"""
steps {
key: "input"
}
steps {
index: 0
}""", events[0].path)
self.assertEqual(output_artifact.id, events[1].artifact_id)
self.assertEqual(metadata_store_pb2.Event.OUTPUT, events[1].type)
self.assertProtoEquals(
"""
steps {
key: "output"
}
steps {
index: 0
}""", events[1].path)
def testFetchPreviousResult(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
# Create an 'previous' execution.
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_artifacts = {'input': [input_artifact]}
output_artifacts = {'output': [output_artifact]}
m.publish_execution(eid, input_artifacts, output_artifacts)
# Test previous_run.
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts={},
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=data_types.ComponentInfo(
component_id='unique', component_type='a.b.c')))
self.assertEqual(
eid,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
# Test fetch_previous_result_artifacts.
new_output_artifact = standard_artifacts.Examples()
self.assertNotEqual(ArtifactState.PUBLISHED,
new_output_artifact.state)
new_output_dict = {'output': [new_output_artifact]}
updated_output_dict = m.fetch_previous_result_artifacts(
new_output_dict, eid)
previous_artifact = output_artifacts['output'][-1].artifact
current_artifact = updated_output_dict['output'][-1].artifact
self.assertEqual(ArtifactState.PUBLISHED,
current_artifact.properties['state'].string_value)
self.assertEqual(previous_artifact.id, current_artifact.id)
self.assertEqual(previous_artifact.type_id, current_artifact.type_id)
def testGetCachedExecutionIds(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
mock_store = mock.Mock()
mock_store.get_events_by_execution_ids.side_effect = [
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=3, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
],
]
m._store = mock_store
input_one = standard_artifacts.Examples()
input_one.id = 1
input_two = standard_artifacts.Examples()
input_two.id = 2
input_dict = {
'input_one': [input_one],
'input_two': [input_two],
}
self.assertEqual(1, m._get_cached_execution_id(input_dict, [3, 2, 1]))
def testSearchArtifacts(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
[artifact] = m.search_artifacts(
artifact_name='output',
pipeline_name=self._pipeline_info.pipeline_name,
run_id=self._pipeline_info.run_id,
producer_component_id=self._component_info.component_id)
self.assertEqual(artifact.uri, output_artifact.uri)
def testPublishSkippedExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
[published_artifact] = m.publish_artifacts([output_artifact])
output_artifact.artifact = published_artifact
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(
eid, input_dict, output_dict, state=metadata.EXECUTION_STATE_CACHED)
def testGetExecutionStates(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
context_id2 = m.register_run_context_if_not_exists(self._pipeline_info2)
self.assertListEqual(
[self._pipeline_info.run_id, self._pipeline_info2.run_id],
m.get_all_runs('my_pipeline'))
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
m.publish_execution(eid, {}, {})
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info2,
run_context_id=context_id)
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info2,
component_info=self._component_info,
run_context_id=context_id2)
states = m.get_execution_states(self._pipeline_info)
self.assertDictEqual(
{
self._component_info.component_id:
metadata.EXECUTION_STATE_COMPLETE,
self._component_info2.component_id:
metadata.EXECUTION_STATE_NEW,
}, states)
def testContext(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cid1 = m.register_run_context_if_not_exists(self._pipeline_info)
cid2 = m.register_run_context_if_not_exists(self._pipeline_info2)
cid3 = m.register_run_context_if_not_exists(self._pipeline_info3)
context_type = m.store.get_context_type('run')
self.assertProtoEquals(
"""
id: 1
name: 'run'
properties {
key: "pipeline_name"
value: STRING
}
properties {
key: "run_id"
value: STRING
}
""", context_type)
[context] = m.store.get_contexts_by_id([cid1])
self.assertProtoEquals(
"""
id: 1
type_id: 1
name: 'my_pipeline.my_run_id'
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
""", context)
self.assertEqual(
cid1, m.register_run_context_if_not_exists(self._pipeline_info))
self.assertEqual(cid1, m._get_run_context_id(self._pipeline_info))
self.assertEqual(cid2, m._get_run_context_id(self._pipeline_info2))
self.assertEqual(cid3, m._get_run_context_id(self._pipeline_info3))
self.assertEqual(None, m._get_run_context_id(self._pipeline_info4))
if __name__ == '__main__':
tf.test.main()
| 35.52
| 80
| 0.635385
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import mock
import tensorflow as tf
from ml_metadata.proto import metadata_store_pb2
from tfx import types
from tfx.orchestration import data_types
from tfx.orchestration import metadata
from tfx.types import standard_artifacts
from tfx.types.artifact import ArtifactState
class MetadataTest(tf.test.TestCase):
def setUp(self):
super(MetadataTest, self).setUp()
self._connection_config = metadata_store_pb2.ConnectionConfig()
self._connection_config.sqlite.SetInParent()
self._component_info = data_types.ComponentInfo(
component_type='a.b.c', component_id='my_component')
self._component_info2 = data_types.ComponentInfo(
component_type='a.b.d', component_id='my_component_2')
self._pipeline_info = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info2 = data_types.PipelineInfo(
pipeline_name='my_pipeline', pipeline_root='/tmp', run_id='my_run_id2')
self._pipeline_info3 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id')
self._pipeline_info4 = data_types.PipelineInfo(
pipeline_name='my_pipeline2', pipeline_root='/tmp', run_id='my_run_id2')
def testEmptyArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
m.publish_artifacts([])
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info)
m.publish_execution(eid, {}, {})
[execution] = m.store.get_executions_by_id([eid])
self.assertProtoEquals(
"""
id: 1
type_id: 1
properties {
key: "state"
value {
string_value: "complete"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}""", execution)
def testArtifact(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self.assertListEqual([], m.get_all_artifacts())
artifact = standard_artifacts.Examples()
artifact.uri = 'uri'
m.publish_artifacts([artifact])
[artifact] = m.store.get_artifacts()
self.assertProtoEquals(
"""id: 1
type_id: 1
uri: "uri"
properties {
key: "split"
value {
string_value: ""
}
}
properties {
key: "state"
value {
string_value: "published"
}
}
properties {
key: "type_name"
value {
string_value: "ExamplesPath"
}
}""", artifact)
self.assertListEqual([artifact], m.get_all_artifacts())
self.assertListEqual([artifact], m.get_artifacts_by_uri('uri'))
self.assertListEqual([artifact], m.get_artifacts_by_type('ExamplesPath'))
m.check_artifact_state(artifact, ArtifactState.PUBLISHED)
m.update_artifact_state(artifact, ArtifactState.DELETED)
m.check_artifact_state(artifact, ArtifactState.DELETED)
self.assertRaises(RuntimeError, m.check_artifact_state, artifact,
ArtifactState.PUBLISHED)
def testExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
exec_properties = {'arg_one': 1}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
[execution] = m.store.get_executions_by_context(context_id)
self.assertProtoEquals(
"""
id: 1
type_id: 2
properties {
key: "state"
value {
string_value: "new"
}
}
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "pipeline_root"
value {
string_value: "/tmp"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
properties {
key: "component_id"
value {
string_value: "my_component"
}
}
properties {
key: "arg_one"
value {
string_value: "1"
}
}""", execution)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
self.assertEqual(ArtifactState.PUBLISHED, output_artifact.state)
[execution] = m.store.get_executions_by_id([eid])
self.assertEqual(metadata.EXECUTION_STATE_COMPLETE,
execution.properties['state'].string_value)
events = m.store.get_events_by_execution_ids([eid])
self.assertEqual(2, len(events))
self.assertEqual(input_artifact.id, events[0].artifact_id)
self.assertEqual(metadata_store_pb2.Event.INPUT, events[0].type)
self.assertProtoEquals(
"""
steps {
key: "input"
}
steps {
index: 0
}""", events[0].path)
self.assertEqual(output_artifact.id, events[1].artifact_id)
self.assertEqual(metadata_store_pb2.Event.OUTPUT, events[1].type)
self.assertProtoEquals(
"""
steps {
key: "output"
}
steps {
index: 0
}""", events[1].path)
def testFetchPreviousResult(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = standard_artifacts.Examples()
input_artifacts = {'input': [input_artifact]}
output_artifacts = {'output': [output_artifact]}
m.publish_execution(eid, input_artifacts, output_artifacts)
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts={},
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
self.assertEqual(
None,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=data_types.ComponentInfo(
component_id='unique', component_type='a.b.c')))
self.assertEqual(
eid,
m.previous_execution(
input_artifacts=input_artifacts,
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info))
new_output_artifact = standard_artifacts.Examples()
self.assertNotEqual(ArtifactState.PUBLISHED,
new_output_artifact.state)
new_output_dict = {'output': [new_output_artifact]}
updated_output_dict = m.fetch_previous_result_artifacts(
new_output_dict, eid)
previous_artifact = output_artifacts['output'][-1].artifact
current_artifact = updated_output_dict['output'][-1].artifact
self.assertEqual(ArtifactState.PUBLISHED,
current_artifact.properties['state'].string_value)
self.assertEqual(previous_artifact.id, current_artifact.id)
self.assertEqual(previous_artifact.type_id, current_artifact.type_id)
def testGetCachedExecutionIds(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
mock_store = mock.Mock()
mock_store.get_events_by_execution_ids.side_effect = [
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=3, type=metadata_store_pb2.Event.INPUT)
],
[
metadata_store_pb2.Event(
artifact_id=1, type=metadata_store_pb2.Event.INPUT),
metadata_store_pb2.Event(
artifact_id=2, type=metadata_store_pb2.Event.INPUT),
],
]
m._store = mock_store
input_one = standard_artifacts.Examples()
input_one.id = 1
input_two = standard_artifacts.Examples()
input_two.id = 2
input_dict = {
'input_one': [input_one],
'input_two': [input_two],
}
self.assertEqual(1, m._get_cached_execution_id(input_dict, [3, 2, 1]))
def testSearchArtifacts(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(eid, input_dict, output_dict)
[artifact] = m.search_artifacts(
artifact_name='output',
pipeline_name=self._pipeline_info.pipeline_name,
run_id=self._pipeline_info.run_id,
producer_component_id=self._component_info.component_id)
self.assertEqual(artifact.uri, output_artifact.uri)
def testPublishSkippedExecution(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
exec_properties = {'log_root': 'path'}
eid = m.register_execution(
exec_properties=exec_properties,
pipeline_info=self._pipeline_info,
component_info=self._component_info)
input_artifact = standard_artifacts.Examples()
m.publish_artifacts([input_artifact])
output_artifact = types.Artifact(type_name='MyOutputArtifact')
output_artifact.uri = 'my/uri'
[published_artifact] = m.publish_artifacts([output_artifact])
output_artifact.artifact = published_artifact
input_dict = {'input': [input_artifact]}
output_dict = {'output': [output_artifact]}
m.publish_execution(
eid, input_dict, output_dict, state=metadata.EXECUTION_STATE_CACHED)
def testGetExecutionStates(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
context_id = m.register_run_context_if_not_exists(self._pipeline_info)
context_id2 = m.register_run_context_if_not_exists(self._pipeline_info2)
self.assertListEqual(
[self._pipeline_info.run_id, self._pipeline_info2.run_id],
m.get_all_runs('my_pipeline'))
eid = m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info,
run_context_id=context_id)
m.publish_execution(eid, {}, {})
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info,
component_info=self._component_info2,
run_context_id=context_id)
m.register_execution(
exec_properties={},
pipeline_info=self._pipeline_info2,
component_info=self._component_info,
run_context_id=context_id2)
states = m.get_execution_states(self._pipeline_info)
self.assertDictEqual(
{
self._component_info.component_id:
metadata.EXECUTION_STATE_COMPLETE,
self._component_info2.component_id:
metadata.EXECUTION_STATE_NEW,
}, states)
def testContext(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cid1 = m.register_run_context_if_not_exists(self._pipeline_info)
cid2 = m.register_run_context_if_not_exists(self._pipeline_info2)
cid3 = m.register_run_context_if_not_exists(self._pipeline_info3)
context_type = m.store.get_context_type('run')
self.assertProtoEquals(
"""
id: 1
name: 'run'
properties {
key: "pipeline_name"
value: STRING
}
properties {
key: "run_id"
value: STRING
}
""", context_type)
[context] = m.store.get_contexts_by_id([cid1])
self.assertProtoEquals(
"""
id: 1
type_id: 1
name: 'my_pipeline.my_run_id'
properties {
key: "pipeline_name"
value {
string_value: "my_pipeline"
}
}
properties {
key: "run_id"
value {
string_value: "my_run_id"
}
}
""", context)
self.assertEqual(
cid1, m.register_run_context_if_not_exists(self._pipeline_info))
self.assertEqual(cid1, m._get_run_context_id(self._pipeline_info))
self.assertEqual(cid2, m._get_run_context_id(self._pipeline_info2))
self.assertEqual(cid3, m._get_run_context_id(self._pipeline_info3))
self.assertEqual(None, m._get_run_context_id(self._pipeline_info4))
if __name__ == '__main__':
tf.test.main()
| true
| true
|
1c4576f26d0bd1dafc5d7d1a3c7a8a3a5b06fec8
| 2,734
|
py
|
Python
|
unsupNFP/train.py
|
pfnet-research/hierarchical-molecular-learning
|
2c88a4737c9268e691e97d92bf2e9e2c7e2c1790
|
[
"MIT"
] | 13
|
2018-06-20T11:16:27.000Z
|
2020-06-23T18:56:20.000Z
|
unsupNFP/train.py
|
pfnet-research/hierarchical-molecular-learning
|
2c88a4737c9268e691e97d92bf2e9e2c7e2c1790
|
[
"MIT"
] | null | null | null |
unsupNFP/train.py
|
pfnet-research/hierarchical-molecular-learning
|
2c88a4737c9268e691e97d92bf2e9e2c7e2c1790
|
[
"MIT"
] | 4
|
2019-09-14T23:53:44.000Z
|
2021-12-09T23:36:27.000Z
|
import argparse
from chainer import optimizers
from chainer import serializers
import numpy as np
import model
import load_mutag
import load_nci1
import classification
n_epoch = 200
n_parts = 5
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str, choices=('mutag', 'ptc'))
args = parser.parse_args()
if args.dataset == 'mutag':
mutag_file_name = "MUTAG.mat"
graphs = load_mutag.load_whole_data('MUTAG.mat')
MAX_EDGE_TYPE = load_mutag.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_mutag.MAX_NUMBER_ATOM
elif args.dataset == 'ptc':
smile_filename = 'corrected_smiles.txt'
result_filename = 'corrected_results.txt'
graphs = load_nci1.load_ptc(smile_filename, result_filename)
MAX_EDGE_TYPE = load_nci1.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_nci1.MAX_NUMBER_ATOM
else:
raise ValueError('Invalid dataset type: {}'.format(args.dataset))
model.MAX_EDGE_TYPE = MAX_EDGE_TYPE
model.MAX_NUMBER_ATOM = MAX_NUMBER_ATOM
indexs_test = np.random.permutation(len(graphs))
n_graphs = len(graphs)
print("num of graphs:", n_graphs)
rep_dim = 101
max_degree = 5
num_levels = 6
neg_size = 10
batchsize = 100
hid_dim = 100
out_dim = 2
softmax = model.SoftmaxCrossEntropy(rep_dim, MAX_NUMBER_ATOM)
print("[CONFIG: representation dim =", rep_dim, "]")
atom2vec = model.Atom2vec(MAX_NUMBER_ATOM, rep_dim, max_degree, softmax)
model = model.Mol2Vec(len(graphs), rep_dim, max_degree,
num_levels, neg_size, atom2vec)
optimizer = optimizers.Adam()
optimizer.setup(model)
print("start training")
for epoch in range(1, n_epoch + 1):
print("epoch:", epoch)
indexes = np.random.permutation(len(graphs))
sum_loss = 0
for i in range(0, n_graphs, batchsize):
maxid = min(i + batchsize, n_graphs)
ids = indexes[i:maxid]
graphids = []
adjs = []
atom_arrays = []
for id in indexes[i:maxid]:
graphids.append(graphs[id][0])
# index 1 and 2 need to be changed for MUTAG or NCI1 datasets
atom_arrays.append(graphs[id][1])
adjs.append(graphs[id][2])
graphids = np.asarray(graphids)
adjs = np.asarray(adjs, dtype=np.float32)
atom_arrays = np.asarray(atom_arrays, dtype=np.int32)
optimizer.update(model, graphids, adjs, atom_arrays)
sum_loss += float(model.loss.data) * len(graphids)
print("-----", float(model.loss.data) * len(graphids))
print("loss: ", sum_loss / n_graphs)
serializers.save_npz(str(rep_dim) + "_model_ptc.npz", model)
# after each epcoh, check result
if epoch % 10 == 0:
classification.MLPClassifier(model, graphs, indexs_test,
rep_dim, batchsize)
| 29.717391
| 73
| 0.685077
|
import argparse
from chainer import optimizers
from chainer import serializers
import numpy as np
import model
import load_mutag
import load_nci1
import classification
n_epoch = 200
n_parts = 5
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str, choices=('mutag', 'ptc'))
args = parser.parse_args()
if args.dataset == 'mutag':
mutag_file_name = "MUTAG.mat"
graphs = load_mutag.load_whole_data('MUTAG.mat')
MAX_EDGE_TYPE = load_mutag.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_mutag.MAX_NUMBER_ATOM
elif args.dataset == 'ptc':
smile_filename = 'corrected_smiles.txt'
result_filename = 'corrected_results.txt'
graphs = load_nci1.load_ptc(smile_filename, result_filename)
MAX_EDGE_TYPE = load_nci1.MAX_EDGE_TYPE
MAX_NUMBER_ATOM = load_nci1.MAX_NUMBER_ATOM
else:
raise ValueError('Invalid dataset type: {}'.format(args.dataset))
model.MAX_EDGE_TYPE = MAX_EDGE_TYPE
model.MAX_NUMBER_ATOM = MAX_NUMBER_ATOM
indexs_test = np.random.permutation(len(graphs))
n_graphs = len(graphs)
print("num of graphs:", n_graphs)
rep_dim = 101
max_degree = 5
num_levels = 6
neg_size = 10
batchsize = 100
hid_dim = 100
out_dim = 2
softmax = model.SoftmaxCrossEntropy(rep_dim, MAX_NUMBER_ATOM)
print("[CONFIG: representation dim =", rep_dim, "]")
atom2vec = model.Atom2vec(MAX_NUMBER_ATOM, rep_dim, max_degree, softmax)
model = model.Mol2Vec(len(graphs), rep_dim, max_degree,
num_levels, neg_size, atom2vec)
optimizer = optimizers.Adam()
optimizer.setup(model)
print("start training")
for epoch in range(1, n_epoch + 1):
print("epoch:", epoch)
indexes = np.random.permutation(len(graphs))
sum_loss = 0
for i in range(0, n_graphs, batchsize):
maxid = min(i + batchsize, n_graphs)
ids = indexes[i:maxid]
graphids = []
adjs = []
atom_arrays = []
for id in indexes[i:maxid]:
graphids.append(graphs[id][0])
atom_arrays.append(graphs[id][1])
adjs.append(graphs[id][2])
graphids = np.asarray(graphids)
adjs = np.asarray(adjs, dtype=np.float32)
atom_arrays = np.asarray(atom_arrays, dtype=np.int32)
optimizer.update(model, graphids, adjs, atom_arrays)
sum_loss += float(model.loss.data) * len(graphids)
print("-----", float(model.loss.data) * len(graphids))
print("loss: ", sum_loss / n_graphs)
serializers.save_npz(str(rep_dim) + "_model_ptc.npz", model)
if epoch % 10 == 0:
classification.MLPClassifier(model, graphs, indexs_test,
rep_dim, batchsize)
| true
| true
|
1c457730cf5448e958549f79c322f8bde85c2542
| 13,955
|
py
|
Python
|
stumpy/aamp_ostinato.py
|
alvii147/stumpy
|
6dacfcf35ce03255951d70e5dd2f8b3f4e20a27f
|
[
"BSD-3-Clause"
] | 2
|
2022-01-25T22:38:56.000Z
|
2022-01-31T10:59:02.000Z
|
stumpy/aamp_ostinato.py
|
vishalbelsare/stumpy
|
5f192a0a41fbb44f144cc4b676d525f19aaeaa98
|
[
"BSD-3-Clause"
] | null | null | null |
stumpy/aamp_ostinato.py
|
vishalbelsare/stumpy
|
5f192a0a41fbb44f144cc4b676d525f19aaeaa98
|
[
"BSD-3-Clause"
] | null | null | null |
# STUMPY
# Copyright 2019 TD Ameritrade. Released under the terms of the 3-Clause BSD license.
# STUMPY is a trademark of TD Ameritrade IP Company, Inc. All rights reserved.
import numpy as np
from . import core, aamp, aamped
def _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
"""
For multiple time series find, per individual time series, the subsequences closest
to a given query.
Parameters
----------
Ts : list
A list of time series for which to find the nearest neighbor subsequences that
are closest to the query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
Ts_idx : int
The index of time series in `Ts` which contains the query subsequence
subseq_idx : int
The subsequence index in the time series `Ts[Ts_idx]` that contains the query
subsequence
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
Returns
-------
nns_radii : numpy.ndarray
Radii to subsequences in each time series of `Ts` that are closest to the
query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
nns_subseq_idx : numpy.ndarray
Indices to subsequences in each time series of `Ts` that are closest to the
query subsequence `Ts[Ts_idx][subseq_idx : subseq_idx + m]`
"""
k = len(Ts)
Q = Ts[Ts_idx][subseq_idx : subseq_idx + m]
Q_squared = np.sum(Q * Q)
nns_radii = np.zeros(k, dtype=np.float64)
nns_subseq_idx = np.zeros(k, dtype=np.int64)
for i in range(k):
if np.any(~np.isfinite(Q)): # pragma: no cover
distance_profile = np.empty(Ts[i].shape[0] - m + 1, dtype=np.float64)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(
Ts[Ts_idx][subseq_idx : subseq_idx + m], Ts[i]
)
distance_profile = core._mass_absolute(Q_squared, Ts_squared[i], QT)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
nns_subseq_idx[i] = np.argmin(distance_profile)
nns_radii[i] = distance_profile[nns_subseq_idx[i]]
return nns_radii, nns_subseq_idx
def _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
"""
Compare subsequences with the same radius and return the most central motif (i.e.,
having the smallest average nearest neighbor radii)
Parameters
----------
Ts : list
A list of time series for which to find the most central motif
bsf_radius : float
Best-so-far sradius found by a consensus search algorithm
bsf_Ts_idx : int
The index of time series in `Ts` where the `bsf_radius` was first observed
bsf_subseq_idx : int
The subsequence index in `Ts[bsf_Ts_idx]` that has radius `bsf_radius`
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
Returns
-------
bsf_radius : float
The updated best-so-far radius of the most central consensus motif
bsf_Ts_idx : int
The updated index of time series in `Ts` which contains the most central
consensus motif
bsf_subseq_idx : int
The updated subsequence index in the time series `Ts[bsf_Ts_idx]` that contains
the most central consensus motif
"""
bsf_nns_radii, bsf_nns_subseq_idx = _aamp_across_series_nearest_neighbors(
Ts, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
bsf_nns_mean_radii = bsf_nns_radii.mean()
candidate_nns_Ts_idx = np.flatnonzero(np.isclose(bsf_nns_radii, bsf_radius))
candidate_nns_subseq_idx = bsf_nns_subseq_idx[candidate_nns_Ts_idx]
for Ts_idx, subseq_idx in zip(candidate_nns_Ts_idx, candidate_nns_subseq_idx):
candidate_nns_radii, _ = _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
if (
np.isclose(candidate_nns_radii.max(), bsf_radius)
and candidate_nns_radii.mean() < bsf_nns_mean_radii
):
bsf_Ts_idx = Ts_idx
bsf_subseq_idx = subseq_idx
bsf_nns_mean_radii = candidate_nns_radii.mean()
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def _aamp_ostinato(
Ts,
m,
Ts_squared,
Ts_subseq_isfinite,
dask_client=None,
device_id=None,
mp_func=aamp,
):
"""
Find the consensus motif amongst a list of time series
Parameters
----------
Ts : list
A list of time series for which to find the consensus motif
m : int
Window size
Ts_squared : list
A list of rolling window `T_squared` for each time series in `Ts`
Ts_subseq_isfinite : list
A list of rolling window `T_subseq_isfinite` for each time series in `Ts`
dask_client : client, default None
A Dask Distributed client that is connected to a Dask scheduler and
Dask workers. Setting up a Dask distributed cluster is beyond the
scope of this library. Please refer to the Dask Distributed
documentation.
device_id : int or list, default None
The (GPU) device number to use. The default value is `0`. A list of
valid device ids (int) may also be provided for parallel GPU-STUMP
computation. A list of all valid device ids can be obtained by
executing `[device.id for device in numba.cuda.list_devices()]`.
mp_func : object, default stump
Specify a custom matrix profile function to use for computing matrix profiles
Returns
-------
bsf_radius : float
The (best-so-far) Radius of the consensus motif
bsf_Ts_idx : int
The time series index in `Ts` which contains the consensus motif
bsf_subseq_idx : int
The subsequence index within time series `Ts[bsf_Ts_idx]` the contains the
consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
bsf_radius = np.inf
bsf_Ts_idx = 0
bsf_subseq_idx = 0
partial_mp_func = core._get_partial_mp_func(
mp_func, dask_client=dask_client, device_id=device_id
)
k = len(Ts)
for j in range(k):
if j < (k - 1):
h = j + 1
else:
h = 0
mp = partial_mp_func(Ts[j], m, Ts[h], ignore_trivial=False)
si = np.argsort(mp[:, 0])
for q in si:
Q = Ts[j][q : q + m]
Q_squared = np.sum(Q * Q)
radius = mp[q, 0]
if radius >= bsf_radius:
break
for i in range(k):
if i != j and i != h:
if np.any(~np.isfinite(Q)): # pragma: no cover
distance_profile = np.empty(Ts[i].shape[0] - m + 1)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(Ts[j][q : q + m], Ts[i])
distance_profile = core._mass_absolute(
Q_squared, Ts_squared[i], QT
)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
radius = np.max((radius, np.min(distance_profile)))
if radius >= bsf_radius:
break
if radius < bsf_radius:
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = radius, j, q
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def aamp_ostinato(Ts, m):
"""
Find the non-normalized (i.e., without z-normalization) consensus motif of multiple
time series
This is a wrapper around the vanilla version of the ostinato algorithm
which finds the best radius and a helper function that finds the most
central conserved motif.
Parameters
----------
Ts : list
A list of time series for which to find the most central consensus motif
m : int
Window size
Returns
-------
central_radius : float
Radius of the most central consensus motif
central_Ts_idx : int
The time series index in `Ts` which contains the most central consensus motif
central_subseq_idx : int
The subsequence index within time series `Ts[central_motif_Ts_idx]` the contains
most central consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
def aamp_ostinatoed(dask_client, Ts, m):
"""
Find the non-normalized (i.e., without z-normalization) consensus motif of multiple
time series with a distributed dask cluster
This is a wrapper around the vanilla version of the ostinato algorithm
which finds the best radius and a helper function that finds the most
central conserved motif.
Parameters
----------
dask_client : client
A Dask Distributed client that is connected to a Dask scheduler and
Dask workers. Setting up a Dask distributed cluster is beyond the
scope of this library. Please refer to the Dask Distributed
documentation.
Ts : list
A list of time series for which to find the most central consensus motif
m : int
Window size
Returns
-------
central_radius : float
Radius of the most central consensus motif
central_Ts_idx : int
The time series index in `Ts` which contains the most central consensus motif
central_subseq_idx : int
The subsequence index within time series `Ts[central_motif_Ts_idx]` the contains
most central consensus motif
Notes
-----
`DOI: 10.1109/ICDM.2019.00140 \
<https://www.cs.ucr.edu/~eamonn/consensus_Motif_ICDM_Long_version.pdf>`__
See Table 2
The ostinato algorithm proposed in the paper finds the best radius
in `Ts`. Intuitively, the radius is the minimum distance of a
subsequence to encompass at least one nearest neighbor subsequence
from all other time series. The best radius in `Ts` is the minimum
radius amongst all radii. Some data sets might contain multiple
subsequences which have the same optimal radius.
The greedy Ostinato algorithm only finds one of them, which might
not be the most central motif. The most central motif amongst the
subsequences with the best radius is the one with the smallest mean
distance to nearest neighbors in all other time series. To find this
central motif it is necessary to search the subsequences with the
best radius via `stumpy.ostinato._get_central_motif`
"""
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite, dask_client=dask_client, mp_func=aamped
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
| 35.874036
| 88
| 0.668649
|
import numpy as np
from . import core, aamp, aamped
def _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
k = len(Ts)
Q = Ts[Ts_idx][subseq_idx : subseq_idx + m]
Q_squared = np.sum(Q * Q)
nns_radii = np.zeros(k, dtype=np.float64)
nns_subseq_idx = np.zeros(k, dtype=np.int64)
for i in range(k):
if np.any(~np.isfinite(Q)): distance_profile = np.empty(Ts[i].shape[0] - m + 1, dtype=np.float64)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(
Ts[Ts_idx][subseq_idx : subseq_idx + m], Ts[i]
)
distance_profile = core._mass_absolute(Q_squared, Ts_squared[i], QT)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
nns_subseq_idx[i] = np.argmin(distance_profile)
nns_radii[i] = distance_profile[nns_subseq_idx[i]]
return nns_radii, nns_subseq_idx
def _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
):
bsf_nns_radii, bsf_nns_subseq_idx = _aamp_across_series_nearest_neighbors(
Ts, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
bsf_nns_mean_radii = bsf_nns_radii.mean()
candidate_nns_Ts_idx = np.flatnonzero(np.isclose(bsf_nns_radii, bsf_radius))
candidate_nns_subseq_idx = bsf_nns_subseq_idx[candidate_nns_Ts_idx]
for Ts_idx, subseq_idx in zip(candidate_nns_Ts_idx, candidate_nns_subseq_idx):
candidate_nns_radii, _ = _aamp_across_series_nearest_neighbors(
Ts, Ts_idx, subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
if (
np.isclose(candidate_nns_radii.max(), bsf_radius)
and candidate_nns_radii.mean() < bsf_nns_mean_radii
):
bsf_Ts_idx = Ts_idx
bsf_subseq_idx = subseq_idx
bsf_nns_mean_radii = candidate_nns_radii.mean()
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def _aamp_ostinato(
Ts,
m,
Ts_squared,
Ts_subseq_isfinite,
dask_client=None,
device_id=None,
mp_func=aamp,
):
bsf_radius = np.inf
bsf_Ts_idx = 0
bsf_subseq_idx = 0
partial_mp_func = core._get_partial_mp_func(
mp_func, dask_client=dask_client, device_id=device_id
)
k = len(Ts)
for j in range(k):
if j < (k - 1):
h = j + 1
else:
h = 0
mp = partial_mp_func(Ts[j], m, Ts[h], ignore_trivial=False)
si = np.argsort(mp[:, 0])
for q in si:
Q = Ts[j][q : q + m]
Q_squared = np.sum(Q * Q)
radius = mp[q, 0]
if radius >= bsf_radius:
break
for i in range(k):
if i != j and i != h:
if np.any(~np.isfinite(Q)): distance_profile = np.empty(Ts[i].shape[0] - m + 1)
distance_profile[:] = np.inf
else:
QT = core.sliding_dot_product(Ts[j][q : q + m], Ts[i])
distance_profile = core._mass_absolute(
Q_squared, Ts_squared[i], QT
)
distance_profile[~Ts_subseq_isfinite[i]] = np.inf
radius = np.max((radius, np.min(distance_profile)))
if radius >= bsf_radius:
break
if radius < bsf_radius:
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = radius, j, q
return bsf_radius, bsf_Ts_idx, bsf_subseq_idx
def aamp_ostinato(Ts, m):
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
def aamp_ostinatoed(dask_client, Ts, m):
Ts_squared = [None] * len(Ts)
Ts_subseq_isfinite = [None] * len(Ts)
for i, T in enumerate(Ts):
Ts[i], Ts_subseq_isfinite[i] = core.preprocess_non_normalized(T, m)
Ts_squared[i] = np.sum(core.rolling_window(Ts[i] * Ts[i], m), axis=1)
bsf_radius, bsf_Ts_idx, bsf_subseq_idx = _aamp_ostinato(
Ts, m, Ts_squared, Ts_subseq_isfinite, dask_client=dask_client, mp_func=aamped
)
(central_radius, central_Ts_idx, central_subseq_idx,) = _get_aamp_central_motif(
Ts, bsf_radius, bsf_Ts_idx, bsf_subseq_idx, m, Ts_squared, Ts_subseq_isfinite
)
return central_radius, central_Ts_idx, central_subseq_idx
| true
| true
|
1c4577dd8ddebd2c787183b01c730deb2a42ac5d
| 1,252
|
py
|
Python
|
tests/unit_tests/data/inline_service_integration_test/requester_service.py
|
ZacharyATanenbaum/service_framework
|
b5dde4407998350d1b7ad09284110b986fd4e12a
|
[
"MIT"
] | 1
|
2020-03-20T21:33:56.000Z
|
2020-03-20T21:33:56.000Z
|
tests/unit_tests/data/inline_service_integration_test/requester_service.py
|
ZacharyATanenbaum/service_framework
|
b5dde4407998350d1b7ad09284110b986fd4e12a
|
[
"MIT"
] | 1
|
2020-03-22T03:48:45.000Z
|
2020-03-22T03:48:45.000Z
|
tests/unit_tests/data/inline_service_integration_test/requester_service.py
|
ZacharyATanenbaum/service_framework
|
b5dde4407998350d1b7ad09284110b986fd4e12a
|
[
"MIT"
] | null | null | null |
""" File to house a requester service """
from service_framework.utils.logging_utils import get_logger
LOG = get_logger()
def setup_config(config):
"""
Make config arguments the proper type!
"""
LOG.info('Setting up config!')
config['num_req_to_send'] = int(config.get('num_req_to_send', 2))
config['responses_recieved'] = []
return config
def main(to_send, config):
"""
This function is the main entrance into the Requester Service
"""
for num in range(config['num_req_to_send']):
payload = {'to_echo': 'Hello World - ' + str(num)}
LOG.info('Sending payload: %s', payload)
returned = to_send('request', payload)
LOG.info('Got Response: %s', returned)
config['responses_recieved'].append(returned)
LOG.info('GOT ALL RESPONSES')
config_model = {
'required': {
'num_req_to_send': int,
},
'optional': {
'responses_recieved': str,
}
}
connection_models = {
'out': {
'request': {
'connection_type': 'requester',
'required_arguments': {
'to_echo': str,
},
'required_return_arguments': {
'echoed': str,
}
}
}
}
| 22.357143
| 69
| 0.572684
|
from service_framework.utils.logging_utils import get_logger
LOG = get_logger()
def setup_config(config):
LOG.info('Setting up config!')
config['num_req_to_send'] = int(config.get('num_req_to_send', 2))
config['responses_recieved'] = []
return config
def main(to_send, config):
for num in range(config['num_req_to_send']):
payload = {'to_echo': 'Hello World - ' + str(num)}
LOG.info('Sending payload: %s', payload)
returned = to_send('request', payload)
LOG.info('Got Response: %s', returned)
config['responses_recieved'].append(returned)
LOG.info('GOT ALL RESPONSES')
config_model = {
'required': {
'num_req_to_send': int,
},
'optional': {
'responses_recieved': str,
}
}
connection_models = {
'out': {
'request': {
'connection_type': 'requester',
'required_arguments': {
'to_echo': str,
},
'required_return_arguments': {
'echoed': str,
}
}
}
}
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.