input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
change_id: ChangeID, timeout: float = 30.0, delay: float = 0.1,
) -> Change:
"""Wait for the given change to be ready.
If the Pebble server supports the /v1/changes/{id}/wait API endpoint,
use that to avoid polling, otherwise poll /v1/changes/{id} every delay
seconds.
Args:
change_id: Change ID of change to wait for.
timeout: Maximum time in seconds to wait for the change to be
ready. May be None, in which case wait_change never times out.
delay: If polling, this is the delay in seconds between attempts.
Returns:
The Change object being waited on.
Raises:
TimeoutError: If the maximum timeout is reached.
"""
try:
return self._wait_change_using_wait(change_id, timeout)
except NotImplementedError:
# Pebble server doesn't support wait endpoint, fall back to polling
return self._wait_change_using_polling(change_id, timeout, delay)
def _wait_change_using_wait(self, change_id, timeout):
"""Wait for a change to be ready using the wait-change API."""
deadline = time.time() + timeout if timeout is not None else None
# Hit the wait endpoint every Client.timeout-1 seconds to avoid long
# requests (the -1 is to ensure it wakes up before the socket timeout)
while True:
this_timeout = max(self.timeout - 1, 1) # minimum of 1 second
if timeout is not None:
time_remaining = deadline - time.time()
if time_remaining <= 0:
break
# Wait the lesser of the time remaining and Client.timeout-1
this_timeout = min(time_remaining, this_timeout)
try:
return self._wait_change(change_id, this_timeout)
except TimeoutError:
# Catch timeout from wait endpoint and loop to check deadline
pass
raise TimeoutError('timed out waiting for change {} ({} seconds)'.format(
change_id, timeout))
def _wait_change(self, change_id: ChangeID, timeout: float = None) -> Change:
"""Call the wait-change API endpoint directly."""
query = {}
if timeout is not None:
query['timeout'] = '{:.3f}s'.format(timeout)
try:
resp = self._request('GET', '/v1/changes/{}/wait'.format(change_id), query)
except APIError as e:
if e.code == 404:
raise NotImplementedError('server does not implement wait-change endpoint')
if e.code == 504:
raise TimeoutError('timed out waiting for change {} ({} seconds)'.format(
change_id, timeout))
raise
return Change.from_dict(resp['result'])
def _wait_change_using_polling(self, change_id, timeout, delay):
"""Wait for a change to be ready by polling the get-change API."""
deadline = time.time() + timeout if timeout is not None else None
while timeout is None or time.time() < deadline:
change = self.get_change(change_id)
if change.ready:
return change
time.sleep(delay)
raise TimeoutError('timed out waiting for change {} ({} seconds)'.format(
change_id, timeout))
def add_layer(
self, label: str, layer: typing.Union[str, dict, Layer], *, combine: bool = False):
"""Dynamically add a new layer onto the Pebble configuration layers.
If combine is False (the default), append the new layer as the top
layer with the given label. If combine is True and the label already
exists, the two layers are combined into a single one considering the
layer override rules; if the layer doesn't exist, it is added as usual.
"""
if not isinstance(label, str):
raise TypeError('label must be a str, not {}'.format(type(label).__name__))
if isinstance(layer, str):
layer_yaml = layer
elif isinstance(layer, dict):
layer_yaml = Layer(layer).to_yaml()
elif isinstance(layer, Layer):
layer_yaml = layer.to_yaml()
else:
raise TypeError('layer must be str, dict, or pebble.Layer, not {}'.format(
type(layer).__name__))
body = {
'action': 'add',
'combine': combine,
'label': label,
'format': 'yaml',
'layer': layer_yaml,
}
self._request('POST', '/v1/layers', body=body)
def get_plan(self) -> Plan:
"""Get the Pebble plan (currently contains only combined services)."""
resp = self._request('GET', '/v1/plan', {'format': 'yaml'})
return Plan(resp['result'])
def get_services(self, names: typing.List[str] = None) -> typing.List[ServiceInfo]:
"""Get the service status for the configured services.
If names is specified, only fetch the service status for the services
named.
"""
query = None
if names is not None:
query = {'names': ','.join(names)}
resp = self._request('GET', '/v1/services', query)
return [ServiceInfo.from_dict(info) for info in resp['result']]
def pull(self, path: str, *, encoding: str = 'utf-8') -> typing.Union[typing.BinaryIO,
typing.TextIO]:
"""Read a file's content from the remote system.
Args:
path: Path of the file to read from the remote system.
encoding: Encoding to use for decoding the file's bytes to str,
or None to specify no decoding.
Returns:
A readable file-like object, whose read() method will return str
objects decoded according to the specified encoding, or bytes if
encoding is None.
"""
query = {
'action': 'read',
'path': path,
}
headers = {'Accept': 'multipart/form-data'}
response = self._request_raw('GET', '/v1/files', query, headers)
options = self._ensure_content_type(response.headers, 'multipart/form-data')
boundary = options.get('boundary', '')
if not boundary:
raise ProtocolError('invalid boundary {!r}'.format(boundary))
# We have to manually write the Content-Type with boundary, because
# email.parser expects the entire multipart message with headers.
parser = email.parser.BytesFeedParser()
parser.feed(b'Content-Type: multipart/form-data; boundary=' +
boundary.encode('utf-8') + b'\r\n\r\n')
# Then read the rest of the response and feed it to the parser.
while True:
chunk = response.read(8192)
if not chunk:
break
parser.feed(chunk)
message = parser.close()
# Walk over the multipart parts and read content and metadata.
resp = None
content = None
for part in message.walk():
name = part.get_param('name', header='Content-Disposition')
if name == 'response':
resp = _json_loads(part.get_payload())
elif name == 'files':
filename = part.get_filename()
if filename != path:
raise ProtocolError('path not expected: {}'.format(filename))
# decode=True, ironically, avoids decoding bytes to str
content = part.get_payload(decode=True)
if resp is None:
raise ProtocolError('no "response" field in multipart body')
self._raise_on_path_error(resp, path)
if content is None:
raise ProtocolError('no file content in multipart response')
if encoding is not None:
reader = io.StringIO(content.decode(encoding))
else:
reader = io.BytesIO(content)
return reader
@staticmethod
def _raise_on_path_error(resp, path):
result = resp['result'] or [] # in case it's null instead of []
paths = {item['path']: item for item in result}
if path not in paths:
raise ProtocolError('path not found in response metadata: {}'.format(resp))
error = paths[path].get('error')
if error:
raise PathError(error['kind'], error['message'])
def push(
self, path: str, source: typing.Union[bytes, str, typing.BinaryIO, typing.TextIO], *,
encoding: str = 'utf-8', make_dirs: bool = False, permissions: int = None,
user_id: int = None, user: str = None, group_id: int = None, group: str = None):
"""Write content to a given file path on the remote system.
Args:
path: Path of the file to write to on the remote system.
source: Source of data to write. This is either a concrete str or
bytes instance, or a readable file-like object.
encoding: Encoding to use for encoding source str to bytes, or
strings read from source if it is a TextIO type. Ignored if
source is bytes or BinaryIO.
make_dirs: If True, create parent directories if they don't exist.
permissions: Permissions (mode) to create file with (Pebble default
is 0o644).
user_id: User ID (UID) for file.
user: Username for file. User's UID must match user_id if both are
specified.
group_id: Group ID (GID) for file.
group: Group name for file. Group's GID must match group_id if
both are specified.
"""
info = self._make_auth_dict(permissions, user_id, user, group_id, group)
info['path'] = path
if make_dirs:
info['make-dirs'] = True
metadata = {
'action': 'write',
'files': [info],
}
if hasattr(source, 'read'):
content = source.read()
else:
content = source
if isinstance(content, str):
content = content.encode(encoding)
data, content_type = self._encode_multipart(metadata, path, content)
headers = {
'Accept': 'application/json',
'Content-Type': content_type,
}
response = self._request_raw('POST', '/v1/files', None, headers, data)
self._ensure_content_type(response.headers, 'application/json')
resp = _json_loads(response.read())
self._raise_on_path_error(resp, path)
@staticmethod
def _make_auth_dict(permissions, user_id, user, group_id, group) -> typing.Dict:
d = {}
if permissions is not None:
d['permissions'] = format(permissions, '03o')
if user_id is not None:
d['user-id'] = user_id
if user is not None:
d['user'] = user
if group_id is not None:
d['group-id'] = group_id
if group is not None:
d['group'] = group
return d
@staticmethod
def _encode_multipart(metadata, path, content):
# Python's stdlib mime/multipart handling is screwy and doesn't handle
# binary properly, so roll our own.
boundary = binascii.hexlify(os.urandom(16))
path_escaped = path.replace('"', '\\"').encode('utf-8') # NOQA: test_quote_backslashes
parts = []
parts.extend([
b'--', boundary, b'\r\n',
b'Content-Type: application/json\r\n',
b'Content-Disposition: form-data; name="request"\r\n',
b'\r\n',
json.dumps(metadata).encode('utf-8'), b'\r\n',
b'--', boundary, b'\r\n',
b'Content-Type: application/octet-stream\r\n',
b'Content-Disposition: form-data; name="files"; filename="', path_escaped, b'"\r\n',
b'\r\n',
content, b'\r\n',
b'--', boundary, b'--\r\n',
])
content_type = 'multipart/form-data; boundary="' + boundary.decode('utf-8') + '"'
return b''.join(parts), content_type
def list_files(self, path: str, *, pattern: str = None,
itself: bool = False) -> typing.List[FileInfo]:
"""Return list of directory entries from given path on remote system.
Despite the | |
attrCtx_LVL2_IND0.name = 'EmployeeAddress__AddressCount'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND0.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress/resolutionGuidance/countAttribute/AddressCount'
attrCtx_LVL2_IND0.context_strings = []
attrCtx_LVL2_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress__AddressCount')
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'generatedRound'
attrCtx_LVL2_IND1.name = '_generatedAttributeRound0'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND1.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_2_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_2_City')
attrCtx_LVL2_IND1.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_2_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_2_State')
attrCtx_LVL2_IND1.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'generatedRound'
attrCtx_LVL2_IND2.name = '_generatedAttributeRound1'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND2.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_3_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_3_City')
attrCtx_LVL2_IND2.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_3_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_3_State')
attrCtx_LVL2_IND2.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL2_IND3 = AttributeContextExpectedValue()
attrCtx_LVL2_IND3.type = 'generatedRound'
attrCtx_LVL2_IND3.name = '_generatedAttributeRound2'
attrCtx_LVL2_IND3.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND3.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_4_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound2'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_4_City')
attrCtx_LVL2_IND3.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_4_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly/attributeContext/EmployeeAddresses_Resolved_referenceOnly/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound2'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly/hasAttributes/EmployeeAddress_4_State')
attrCtx_LVL2_IND3.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND3)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND1)
expectedContext_referenceOnly.contexts.append(attrCtx_LVL0_IND1)
expectedContext_structured = AttributeContextExpectedValue()
expectedContext_structured.type = 'entity'
expectedContext_structured.name = 'EmployeeAddresses_Resolved_structured'
expectedContext_structured.definition = 'resolvedFrom/EmployeeAddresses'
expectedContext_structured.contexts = []
attrCtx_LVL0_IND0 = AttributeContextExpectedValue()
attrCtx_LVL0_IND0.type = 'entityReferenceExtends'
attrCtx_LVL0_IND0.name = 'extends'
attrCtx_LVL0_IND0.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured'
attrCtx_LVL0_IND0.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'CdmEntity'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/extends'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL0_IND0.contexts.append(attrCtx_LVL1_IND0)
expectedContext_structured.contexts.append(attrCtx_LVL0_IND0)
attrCtx_LVL0_IND1 = AttributeContextExpectedValue()
attrCtx_LVL0_IND1.type = 'attributeDefinition'
attrCtx_LVL0_IND1.name = 'EmployeeAddress'
attrCtx_LVL0_IND1.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured'
attrCtx_LVL0_IND1.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress'
attrCtx_LVL0_IND1.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'Address'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/EmployeeAddress'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/Address'
attrCtx_LVL1_IND0.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'entityReferenceExtends'
attrCtx_LVL2_IND0.name = 'extends'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND0.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'entity'
attrCtx_LVL3_IND0.name = 'CdmEntity'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/EmployeeAddress/Address/extends'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL2_IND0.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'attributeDefinition'
attrCtx_LVL2_IND1.name = 'City'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND1.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL2_IND1.context_strings = []
attrCtx_LVL2_IND1.context_strings.append(
'EmployeeAddresses_Resolved_structured/hasAttributes/EmployeeAddress/members/City')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'attributeDefinition'
attrCtx_LVL2_IND2.name = 'State'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_structured/attributeContext/EmployeeAddresses_Resolved_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND2.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL2_IND2.context_strings = []
attrCtx_LVL2_IND2.context_strings.append(
'EmployeeAddresses_Resolved_structured/hasAttributes/EmployeeAddress/members/State')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND0)
expectedContext_structured.contexts.append(attrCtx_LVL0_IND1)
expectedContext_normalized_structured = AttributeContextExpectedValue()
expectedContext_normalized_structured.type = 'entity'
expectedContext_normalized_structured.name = 'EmployeeAddresses_Resolved_normalized_structured'
expectedContext_normalized_structured.definition = 'resolvedFrom/EmployeeAddresses'
expectedContext_normalized_structured.contexts = []
attrCtx_LVL0_IND0 = AttributeContextExpectedValue()
attrCtx_LVL0_IND0.type = 'entityReferenceExtends'
attrCtx_LVL0_IND0.name = 'extends'
attrCtx_LVL0_IND0.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured'
attrCtx_LVL0_IND0.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'CdmEntity'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/extends'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL0_IND0.contexts.append(attrCtx_LVL1_IND0)
expectedContext_normalized_structured.contexts.append(attrCtx_LVL0_IND0)
attrCtx_LVL0_IND1 = AttributeContextExpectedValue()
attrCtx_LVL0_IND1.type = 'attributeDefinition'
attrCtx_LVL0_IND1.name = 'EmployeeAddress'
attrCtx_LVL0_IND1.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured'
attrCtx_LVL0_IND1.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress'
attrCtx_LVL0_IND1.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'Address'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/EmployeeAddress'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/Address'
attrCtx_LVL1_IND0.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'entityReferenceExtends'
attrCtx_LVL2_IND0.name = 'extends'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND0.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'entity'
attrCtx_LVL3_IND0.name = 'CdmEntity'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/EmployeeAddress/Address/extends'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL2_IND0.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'attributeDefinition'
attrCtx_LVL2_IND1.name = 'City'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND1.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL2_IND1.context_strings = []
attrCtx_LVL2_IND1.context_strings.append(
'EmployeeAddresses_Resolved_normalized_structured/hasAttributes/EmployeeAddress/members/City')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'attributeDefinition'
attrCtx_LVL2_IND2.name = 'State'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_normalized_structured/attributeContext/EmployeeAddresses_Resolved_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND2.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL2_IND2.context_strings = []
attrCtx_LVL2_IND2.context_strings.append(
'EmployeeAddresses_Resolved_normalized_structured/hasAttributes/EmployeeAddress/members/State')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND0)
expectedContext_normalized_structured.contexts.append(attrCtx_LVL0_IND1)
expectedContext_referenceOnly_normalized = AttributeContextExpectedValue()
expectedContext_referenceOnly_normalized.type = 'entity'
expectedContext_referenceOnly_normalized.name = 'EmployeeAddresses_Resolved_referenceOnly_normalized'
expectedContext_referenceOnly_normalized.definition = 'resolvedFrom/EmployeeAddresses'
expectedContext_referenceOnly_normalized.contexts = []
attrCtx_LVL0_IND0 = AttributeContextExpectedValue()
attrCtx_LVL0_IND0.type = 'entityReferenceExtends'
attrCtx_LVL0_IND0.name = 'extends'
attrCtx_LVL0_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized'
attrCtx_LVL0_IND0.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'CdmEntity'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/extends'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL0_IND0.contexts.append(attrCtx_LVL1_IND0)
expectedContext_referenceOnly_normalized.contexts.append(attrCtx_LVL0_IND0)
attrCtx_LVL0_IND1 = AttributeContextExpectedValue()
attrCtx_LVL0_IND1.type = 'attributeDefinition'
attrCtx_LVL0_IND1.name = 'EmployeeAddress'
attrCtx_LVL0_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized'
attrCtx_LVL0_IND1.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress'
attrCtx_LVL0_IND1.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'Address'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/Address'
attrCtx_LVL1_IND0.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'entityReferenceExtends'
attrCtx_LVL2_IND0.name = 'extends'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/Address'
attrCtx_LVL2_IND0.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'entity'
attrCtx_LVL3_IND0.name = 'CdmEntity'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/Address/extends'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL2_IND0.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'attributeDefinition'
attrCtx_LVL2_IND1.name = 'City'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/Address'
attrCtx_LVL2_IND1.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'attributeDefinition'
attrCtx_LVL2_IND2.name = 'State'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/Address'
attrCtx_LVL2_IND2.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND0)
attrCtx_LVL1_IND1 = AttributeContextExpectedValue()
attrCtx_LVL1_IND1.type = 'generatedSet'
attrCtx_LVL1_IND1.name = '_generatedAttributeSet'
attrCtx_LVL1_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress'
attrCtx_LVL1_IND1.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'addedAttributeExpansionTotal'
attrCtx_LVL2_IND0.name = 'EmployeeAddress__AddressCount'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND0.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress/resolutionGuidance/countAttribute/AddressCount'
attrCtx_LVL2_IND0.context_strings = []
attrCtx_LVL2_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress__AddressCount')
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'generatedRound'
attrCtx_LVL2_IND1.name = '_generatedAttributeRound0'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND1.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_2_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_2_City')
attrCtx_LVL2_IND1.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_2_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_2_State')
attrCtx_LVL2_IND1.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'generatedRound'
attrCtx_LVL2_IND2.name = '_generatedAttributeRound1'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND2.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_3_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_3_City')
attrCtx_LVL2_IND2.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_3_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_3_State')
attrCtx_LVL2_IND2.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL2_IND3 = AttributeContextExpectedValue()
attrCtx_LVL2_IND3.type = 'generatedRound'
attrCtx_LVL2_IND3.name = '_generatedAttributeRound2'
attrCtx_LVL2_IND3.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet'
attrCtx_LVL2_IND3.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'attributeDefinition'
attrCtx_LVL3_IND0.name = 'EmployeeAddress_4_City'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound2'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL3_IND0.context_strings = []
attrCtx_LVL3_IND0.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_4_City')
attrCtx_LVL2_IND3.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL3_IND1 = AttributeContextExpectedValue()
attrCtx_LVL3_IND1.type = 'attributeDefinition'
attrCtx_LVL3_IND1.name = 'EmployeeAddress_4_State'
attrCtx_LVL3_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound2'
attrCtx_LVL3_IND1.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL3_IND1.context_strings = []
attrCtx_LVL3_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized/hasAttributes/EmployeeAddress_4_State')
attrCtx_LVL2_IND3.contexts.append(attrCtx_LVL3_IND1)
attrCtx_LVL1_IND1.contexts.append(attrCtx_LVL2_IND3)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND1)
expectedContext_referenceOnly_normalized.contexts.append(attrCtx_LVL0_IND1)
expectedContext_referenceOnly_structured = AttributeContextExpectedValue()
expectedContext_referenceOnly_structured.type = 'entity'
expectedContext_referenceOnly_structured.name = 'EmployeeAddresses_Resolved_referenceOnly_structured'
expectedContext_referenceOnly_structured.definition = 'resolvedFrom/EmployeeAddresses'
expectedContext_referenceOnly_structured.contexts = []
attrCtx_LVL0_IND0 = AttributeContextExpectedValue()
attrCtx_LVL0_IND0.type = 'entityReferenceExtends'
attrCtx_LVL0_IND0.name = 'extends'
attrCtx_LVL0_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured'
attrCtx_LVL0_IND0.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'CdmEntity'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/extends'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL0_IND0.contexts.append(attrCtx_LVL1_IND0)
expectedContext_referenceOnly_structured.contexts.append(attrCtx_LVL0_IND0)
attrCtx_LVL0_IND1 = AttributeContextExpectedValue()
attrCtx_LVL0_IND1.type = 'attributeDefinition'
attrCtx_LVL0_IND1.name = 'EmployeeAddress'
attrCtx_LVL0_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured'
attrCtx_LVL0_IND1.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress'
attrCtx_LVL0_IND1.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'Address'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/EmployeeAddress'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/Address'
attrCtx_LVL1_IND0.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'entityReferenceExtends'
attrCtx_LVL2_IND0.name = 'extends'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND0.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'entity'
attrCtx_LVL3_IND0.name = 'CdmEntity'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/EmployeeAddress/Address/extends'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL2_IND0.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'attributeDefinition'
attrCtx_LVL2_IND1.name = 'City'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND1.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL2_IND1.context_strings = []
attrCtx_LVL2_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_structured/hasAttributes/EmployeeAddress/members/City')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'attributeDefinition'
attrCtx_LVL2_IND2.name = 'State'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_referenceOnly_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND2.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL2_IND2.context_strings = []
attrCtx_LVL2_IND2.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_structured/hasAttributes/EmployeeAddress/members/State')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND0)
expectedContext_referenceOnly_structured.contexts.append(attrCtx_LVL0_IND1)
expectedContext_referenceOnly_normalized_structured = AttributeContextExpectedValue()
expectedContext_referenceOnly_normalized_structured.type = 'entity'
expectedContext_referenceOnly_normalized_structured.name = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured'
expectedContext_referenceOnly_normalized_structured.definition = 'resolvedFrom/EmployeeAddresses'
expectedContext_referenceOnly_normalized_structured.contexts = []
attrCtx_LVL0_IND0 = AttributeContextExpectedValue()
attrCtx_LVL0_IND0.type = 'entityReferenceExtends'
attrCtx_LVL0_IND0.name = 'extends'
attrCtx_LVL0_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured'
attrCtx_LVL0_IND0.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'CdmEntity'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/extends'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL0_IND0.contexts.append(attrCtx_LVL1_IND0)
expectedContext_referenceOnly_normalized_structured.contexts.append(attrCtx_LVL0_IND0)
attrCtx_LVL0_IND1 = AttributeContextExpectedValue()
attrCtx_LVL0_IND1.type = 'attributeDefinition'
attrCtx_LVL0_IND1.name = 'EmployeeAddress'
attrCtx_LVL0_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured'
attrCtx_LVL0_IND1.definition = 'resolvedFrom/EmployeeAddresses/hasAttributes/EmployeeAddress'
attrCtx_LVL0_IND1.contexts = []
attrCtx_LVL1_IND0 = AttributeContextExpectedValue()
attrCtx_LVL1_IND0.type = 'entity'
attrCtx_LVL1_IND0.name = 'Address'
attrCtx_LVL1_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/EmployeeAddress'
attrCtx_LVL1_IND0.definition = 'resolvedFrom/Address'
attrCtx_LVL1_IND0.contexts = []
attrCtx_LVL2_IND0 = AttributeContextExpectedValue()
attrCtx_LVL2_IND0.type = 'entityReferenceExtends'
attrCtx_LVL2_IND0.name = 'extends'
attrCtx_LVL2_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND0.contexts = []
attrCtx_LVL3_IND0 = AttributeContextExpectedValue()
attrCtx_LVL3_IND0.type = 'entity'
attrCtx_LVL3_IND0.name = 'CdmEntity'
attrCtx_LVL3_IND0.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/EmployeeAddress/Address/extends'
attrCtx_LVL3_IND0.definition = 'resolvedFrom/CdmEntity'
attrCtx_LVL2_IND0.contexts.append(attrCtx_LVL3_IND0)
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND0)
attrCtx_LVL2_IND1 = AttributeContextExpectedValue()
attrCtx_LVL2_IND1.type = 'attributeDefinition'
attrCtx_LVL2_IND1.name = 'City'
attrCtx_LVL2_IND1.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND1.definition = 'resolvedFrom/Address/hasAttributes/City'
attrCtx_LVL2_IND1.context_strings = []
attrCtx_LVL2_IND1.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/hasAttributes/EmployeeAddress/members/City')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND1)
attrCtx_LVL2_IND2 = AttributeContextExpectedValue()
attrCtx_LVL2_IND2.type = 'attributeDefinition'
attrCtx_LVL2_IND2.name = 'State'
attrCtx_LVL2_IND2.parent = 'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/attributeContext/EmployeeAddresses_Resolved_referenceOnly_normalized_structured/EmployeeAddress/Address'
attrCtx_LVL2_IND2.definition = 'resolvedFrom/Address/hasAttributes/State'
attrCtx_LVL2_IND2.context_strings = []
attrCtx_LVL2_IND2.context_strings.append(
'EmployeeAddresses_Resolved_referenceOnly_normalized_structured/hasAttributes/EmployeeAddress/members/State')
attrCtx_LVL1_IND0.contexts.append(attrCtx_LVL2_IND2)
attrCtx_LVL0_IND1.contexts.append(attrCtx_LVL1_IND0)
expectedContext_referenceOnly_normalized_structured.contexts.append(attrCtx_LVL0_IND1)
expected_default = []
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/EmployeeAddress__AddressCount'
att.data_format = 'Int32'
att.name = 'EmployeeAddress__AddressCount'
expected_default.append(att)
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0/EmployeeAddress_2_City'
att.data_format = 'String'
att.name = 'EmployeeAddress_2_City'
att.source_name = 'City'
expected_default.append(att)
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound0/EmployeeAddress_2_State'
att.data_format = 'String'
att.name = 'EmployeeAddress_2_State'
att.source_name = 'State'
expected_default.append(att)
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1/EmployeeAddress_3_City'
att.data_format = 'String'
att.name = 'EmployeeAddress_3_City'
att.source_name = 'City'
expected_default.append(att)
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound1/EmployeeAddress_3_State'
att.data_format = 'String'
att.name = 'EmployeeAddress_3_State'
att.source_name = 'State'
expected_default.append(att)
att = AttributeExpectedValue()
att.attribute_context = 'EmployeeAddresses_Resolved_default/attributeContext/EmployeeAddresses_Resolved_default/EmployeeAddress/_generatedAttributeSet/_generatedAttributeRound2/EmployeeAddress_4_City'
att.data_format = 'String'
att.name = 'EmployeeAddress_4_City'
att.source_name | |
'JLCC', 0x143, 0x13F),
('INT', '27960', 'EPROM', 'PLCC', 0x143, 0x13F),
('INT', '27C010', 'EPROM', 'DIP', 0x05C, 0x0CB),
('INT', '27C010', 'EPROM', 'PLCC', 0x05C, 0x0DE),
('INT', '27C010A', 'EPROM', 'DIP', 0x160, 0x0CB),
('INT', '27C011', 'EPROM', 'DIP', 0x05C, 0x0C9),
('INT', '27C020', 'EPROM', 'DIP', 0x05C, 0x0F5),
('INT', '27C020', 'EPROM', 'PLCC', 0x05C, 0x12D),
('INT', '27C040', 'EPROM', 'DIP', 0x05C, 0x0F6),
('INT', '27C100', 'EPROM', 'DIP', 0x05C, 0x0CC),
('INT', '27C128', 'EPROM', 'DIP', 0x05C, 0x051),
('INT', '27C128', 'EPROM', 'PLCC', 0x05C, 0x0C2),
('INT', '27C202', 'EPROM', 'DIP', 0x07E, 0x0DD),
('INT', '27C202', 'EPROM', 'DIP', 0x07E, 0x0DD),
('INT', '27C202', 'EPROM', 'PLCC', 0x07E, 0x0AB),
('INT', '27C202', 'EPROM', 'JLCC', 0x07E, 0x0AB),
('INT', '27C203', 'EPROM', 'DIP', 0x0A7, 0x04C),
('INT', '27C203', 'EPROM', 'PLCC', 0x0A7, 0x04D),
('INT', '27C203', 'EPROM', 'JLCC', 0x0A7, 0x04D),
('INT', '27C210', 'EPROM', 'DIP', 0x05F, 0x0A8),
('INT', '27C210', 'EPROM', 'PLCC', 0x05F, 0x088),
('INT', '27C213', 'EPROM', 'DIP', 0x136, 0x136),
('INT', '27C220', 'EPROM', 'DIP', 0x05F, 0x0DF),
('INT', '27C220', 'EPROM', 'PLCC', 0x05F, 0x0EC),
('INT', '27C240', 'EPROM', 'DIP', 0x05F, 0x089),
('INT', '27C256', 'EPROM', 'PLCC', 0x05C, 0x0C3),
('INT', '27C256', 'EPROM', 'DIP', 0x05C, 0x032),
('INT', '27C256A', 'EPROM', 'DIP', 0x05C, 0x032),
('INT', '27C256A', 'EPROM', 'PLCC', 0x05C, 0x0C3),
('INT', '27C400', 'EPROM', 'DIP', 0x05F, 0x172),
('INT', '27C512', 'EPROM', 'DIP', 0x05E, 0x0A4),
('INT', '27C513', 'EPROM', 'DIP', 0x1A4, 0x05E),
('INT', '27C64', 'EPROM', 'PLCC', 0x05C, 0x0C1),
('INT', '27C64', 'EPROM', 'DIP', 0x05C, 0x033),
('INT', '27F256', 'FLASH', 'DIP', 0x0A8, 0x109),
('INT', '27F64', 'FLASH', 'DIP', 0x084, 0x033),
('INT', '2815', 'EEPROM', 'DIP', 0x085, 0x023),
('INT', '2816', 'EEPROM', 'DIP', 0x037, 0x023),
('INT', '2816A', 'EEPROM', 'DIP', 0x0A5, 0x096),
('INT', '2817A', 'EEPROM', 'DIP', 0x0BF, 0x0A2),
('INT', '2864A', 'EEPROM', 'DIP', 0x0CC, 0x098),
('INT', '28F001BX-B', 'FLASH', 'DIP', 0x19B, 0x1AB),
('INT', '28F001BX-B', 'FLASH', 'PLCC', 0x19B, 0x1B9),
('INT', '28F001BX-B', 'FLASH', 'PLCC', 0x19B, 0x1B9),
('INT', '28F001BX-T', 'FLASH', 'DIP', 0x19C, 0x1AB),
('INT', '28F001BX-T', 'FLASH', 'PLCC', 0x19C, 0x1B9),
('INT', '28F001BX-T', 'FLASH', 'PLCC', 0x19C, 0x1B9),
('INT', '28F010', 'FLASH', 'DIP', 0x135, 0x118),
('INT', '28F010', 'FLASH', 'PLCC', 0x135, 0x12A),
('INT', '28F020', 'FLASH', 'DIP', 0x135, 0x170),
('INT', '28F020', 'FLASH', 'PLCC', 0x135, 0x177),
('INT', '28F256-P1', 'FLASH', 'DIP', 0x113, 0x10A),
('INT', '28F256-P1', 'FLASH', 'PLCC', 0x113, 0x112),
('INT', '28F256-P2', 'FLASH', 'DIP', 0x0A8, 0x10A),
('INT', '28F256-P2', 'FLASH', 'PLCC', 0x0A8, 0x112),
('INT', '28F256A', 'FLASH', 'DIP', 0x135, 0x10A),
('INT', '28F256A', 'FLASH', 'PLCC', 0x135, 0x112),
('INT', '28F512', 'FLASH', 'DIP', 0x135, 0x117),
('INT', '28F512', 'FLASH', 'PLCC', 0x135, 0x129),
('INT', '68C257', 'EPROM', 'DIP', 0x05C, 0x0E2),
('INT', '68C257', 'EPROM', 'PLCC', 0x05C, 0x0E3),
('INT', '68C257M', 'EPROM', 'DIP', 0x05C, 0x0E2),
('INT', '68C257M', 'EPROM', 'PLCC', 0x05C, 0x0E3),
('INT', '8704', 'EPROM', 'DIP', 0x021, 0x026),
('INT', '8708', 'EPROM', 'DIP', 0x021, 0x027),
('INT', '8741', 'MICRO', 'DIP', 0x056, 0x059),
('INT', '8741A', 'MICRO', 'DIP', 0x056, 0x059),
('INT', '8741AH', 'MICRO', 'DIP', 0x051, 0x01B),
('INT', '8741AH', 'MICRO', 'DIP', 0x051, 0x01B),
('INT', '8742', 'MICRO', 'DIP', 0x050, 0x057),
('INT', '8742AH', 'MICRO', 'DIP', 0x051, 0x03F),
('INT', '8744', 'MICRO', 'DIP', 0x053, 0x058),
('INT', '8744H', 'MICRO', 'DIP', 0x0D5, 0x058),
('INT', '8748', 'MICRO', 'DIP', 0x052, 0x056),
('INT', '8748H', 'MICRO', 'DIP', 0x050, 0x056),
('INT', '8749H', 'MICRO', 'DIP', 0x050, 0x057),
('INT', '8751', 'MICRO', 'DIP', 0x053, 0x058),
('INT', '8751BH', 'MICRO', 'DIP', 0x05A, 0x11C),
('INT', '8751BH', 'MICRO', 'PLCC', 0x05A, 0x126),
('INT', '8751H', 'MICRO', 'DIP', 0x0D5, 0x058),
('INT', '8751H', 'MICRO', 'LCC', 0x0D5, 0x0D4),
('INT', '8752BH', 'MICRO', 'PLCC', 0x05A, 0x00E),
('INT', '8752BH', 'MICRO', 'DIP', 0x05A, 0x00C),
('INT', '8755A', 'MICRO', 'DIP', 0x047, 0x055),
('INT', '87C256', 'EPROM', 'DIP', 0x05C, 0x0C8),
('INT', '87C257', 'EPROM', 'DIP', 0x05C, 0x0E2),
('INT', '87C257', 'EPROM', 'PLCC', 0x05C, 0x0E3),
('INT', '87C257I', 'EPROM', 'DIP', 0x05C, 0x0E2),
('INT', '87C257I', 'EPROM', 'PLCC', 0x05C, 0x0E3),
('INT', '87C42', 'MICRO', 'DIP', 0x1FF, 0x201),
('INT', '87C51', 'MICRO', 'DIP', 0x05A, 0x00B),
('INT', '87C51', 'MICRO', 'PLCC', 0x05A, 0x074),
('INT', '87C51(FX)', 'MICRO', 'DIP', 0x156, 0x1CA),
('INT', '87C51FA', 'MICRO', 'DIP', 0x05A, 0x04F),
('INT', '87C51FA', 'MICRO', 'PLCC', 0x05A, 0x143),
('INT', '87C51FA(FX)', 'MICRO', 'DIP', 0x156, 0x1CC),
('INT', '87C51FA(FX)', 'MICRO', 'PLCC', 0x156, 0x1CD),
('INT', '87C51FB', 'MICRO', 'DIP', 0x05A, 0x073),
('INT', '87C51FB', 'MICRO', 'PLCC', 0x05A, 0x144),
('INT', '87C51FB(FX)', 'MICRO', 'DIP', 0x156, 0x199),
('INT', '87C51FB(FX)', 'MICRO', 'PLCC', 0x156, 0x19A),
('INT', '87C51FC', 'MICRO', 'DIP', 0x156, 0x15E),
('INT', '87C51FC', 'MICRO', 'PLCC', 0x156, 0x15F),
('INT', '87C54', 'MICRO', 'PLCC', 0x156, 0x19A),
('INT', '87C54', 'MICRO', 'DIP', 0x156, 0x199),
('INT', '87C58', 'MICRO', 'DIP', 0x156, 0x15E),
('INT', '87C58', 'MICRO', 'PLCC', 0x156, 0x15F),
('INT', '87C64', 'EPROM', 'PLCC', 0x05C, 0x0C7),
('INT', '87C64', 'EPROM', 'DIP', 0x05C, 0x03A),
('INT', '87C75PF', 'PE', 'DIP', 0x112, 0x107),
('INT', '87C75PF', 'PE', 'PLCC', 0x112, 0x108),
('INT', 'P27128A', 'EPROM', 'DIP', 0x05C, 0x051),
('INT', 'P27256', 'EPROM', 'DIP', 0x05C, 0x032),
('INT', 'P2732A', 'EPROM', 'DIP', 0x04D, 0x024),
('INT', 'P27512', 'EPROM', 'DIP', 0x05E, 0x0A4),
('INT', 'P2764', 'EPROM', 'DIP', 0x079, 0x033),
('INT', 'P2764A', 'EPROM', 'DIP', 0x05C, 0x033),
('INT', 'P27C256', 'EPROM', 'DIP', 0x05C, 0x032),
('INT', 'P8742AH', 'MICRO', 'DIP', 0x051, 0x03F),
('INT', 'P8748H', 'MICRO', 'DIP', 0x050, 0x056),
('ISL', '5600', 'PROM', 'DIP', 0x0D4, 0x002),
('ISL', '5603A', 'PROM', 'DIP', 0x070, 0x001),
('ISL', '5604', 'PROM', 'DIP', 0x070, 0x003),
('ISL', '5610', 'PROM', 'DIP', 0x0D4, 0x002),
('ISL', '5623', 'PROM', 'DIP', 0x070, 0x001),
('ISL', '5624', 'PROM', 'DIP', 0x070, 0x003),
('ISL', '6716', 'EPROM', 'DIP', 0x059, 0x064),
('LAT', 'EE64K8', 'EEPROM', 'DIP', 0x0C3, 0x098),
('MCT', '24C01/A', 'EEPROM', 'DIP', 0x120, 0x18F),
('MCT', '24C02/A', 'EEPROM', 'DIP', 0x120, 0x119),
('MCT', '24C04/A', 'EEPROM', 'DIP', 0x120, 0x11A),
('MCT', '27256', 'EPROM', 'DIP', 0x05C, 0x032),
('MCT', '27C128', 'EPROM', 'DIP', 0x115, 0x051),
('MCT', '27C128', 'EPROM', 'PLCC', 0x115, 0x0C2),
('MCT', '27C128', 'EPROM', 'LCC', 0x115, 0x0C2),
('MCT', '27C256', 'EPROM', 'DIP', 0x187, 0x032),
('MCT', '27C256', 'EPROM', 'PLCC', 0x187, 0x0C3),
('MCT', '27C256', 'EPROM', 'LCC', 0x187, 0x0C3),
('MCT', '27C256', 'EPROM', 'PLCC', 0x115, 0x0C3),
('MCT', '27C512', 'EPROM', 'DIP', 0x188, 0x0A4),
('MCT', '27C512', 'EPROM', 'PLCC', 0x188, 0x0C4),
('MCT', '27C512', 'EPROM', 'LCC', 0x188, 0x0C4),
('MCT', '27C513', 'EPROM', 'DIP', 0x05E, 0x05E),
('MCT', '27C515', 'EPROM', 'DIP', 0x05E, 0x0CA),
('MCT', '27C64', 'EPROM', 'DIP', 0x115, 0x033),
('MCT', '27C64', 'EPROM', 'PLCC', 0x115, 0x0C1),
('MCT', '27C64', 'EPROM', 'LCC', 0x115, 0x0C1),
('MCT', '27HC1616', 'EPROM', 'DIP', 0x191, 0x1A2),
('MCT', '27HC191', 'EPROM', 'DIP', 0x115, 0x021),
('MCT', '27HC256', 'EPROM', 'DIP', 0x115, 0x032),
('MCT', '27HC291', 'EPROM', 'DIP', 0x115, 0x021),
('MCT', '27HC64', 'EPROM', 'DIP', 0x115, 0x033),
('MCT', '27HC641', 'EPROM', 'DIP', 0x115, 0x067),
('MCT', '28C04/A', 'EEPROM', 'DIP', 0x0C4, 0x082),
('MCT', '28C04/A', 'EEPROM', 'PLCC', 0x0C4, 0x18E),
('MCT', '28C16/A', 'EEPROM', 'DIP', 0x0C4, 0x096),
('MCT', '28C16/A', 'EEPROM', 'PLCC', 0x0C4, 0x10D),
('MCT', '28C17/A', 'EEPROM', 'DIP', 0x0C4, 0x0A2),
('MCT', '28C17/A', 'EEPROM', 'PLCC', 0x0C4, 0x10D),
('MCT', '28C256', 'EEPROM', 'DIP', 0x0BA, 0x099),
('MCT', '28C64/A', 'EEPROM', 'DIP', 0x0C4, 0x098),
('MCT', '28C64/A', 'EEPROM', 'PLCC', 0x0C4, 0x05D),
('MCT', '28C64/A', 'EEPROM', 'LCC', 0x0C4, 0x05D),
('MCT', '28CP64', 'EEPROM', 'DIP', 0x0C4, 0x098),
('MCT', '28HC16', 'EEPROM', 'DIP', 0x0C4, 0x096),
('MCT', '28HC17', 'EEPROM', 'DIP', 0x0C4, 0x0A2),
('MCT', '5716', 'EPROM', 'DIP', 0x083, 0x023),
('MCT', '5816', 'EPROM', 'DIP', 0x037, 0x023),
('MCT', '59C11', 'EEPROM', 'DIP', 0x123, 0x11D),
('MCT', '85C72', 'EEPROM', 'DIP', 0x120, 0x18F),
('MCT', '85C82', 'EEPROM', 'DIP', 0x120, 0x119),
('MCT', '93C06', 'EEPROM', 'DIP', 0x118, 0x178),
('MCT', '93C46', 'EEPROM', 'DIP', 0x118, 0x10E),
('MIK', '2716', 'EPROM', 'DIP', 0x019, 0x023),
('MIK', '2732', 'EPROM', 'DIP', 0x019, 0x024),
('MIK', '555', 'EPROM', 'DIP', 0x021, 0x027),
('MIT', '2708', 'EPROM', 'DIP', 0x021, 0x027),
('MIT', '27128', 'EPROM', 'DIP', 0x079, 0x051),
('MIT', '2716', 'EPROM', 'DIP', 0x019, 0x023),
('MIT', '27256', 'EPROM', 'DIP', 0x093, 0x032),
('MIT', '2732', 'EPROM', 'DIP', 0x019, 0x024),
('MIT', '27512', 'EPROM', 'DIP', 0x04B, 0x0A4),
('MIT', '2764', 'EPROM', 'DIP', 0x079, 0x033),
('MIT', '27C100', 'EPROM', 'DIP', 0x091, 0x0CC),
('MIT', '27C100', 'EPROM', 'PLCC', 0x091, 0x127),
('MIT', '27C100', 'EPROM', 'JLCC', 0x091, 0x127),
('MIT', '27C101', 'EPROM', 'DIP', 0x091, 0x0CB),
('MIT', '27C101', 'EPROM', 'PLCC', 0x091, 0x0DE),
('MIT', '27C102', 'EPROM', 'DIP', 0x08E, 0x0A8),
('MIT', '27C102', 'EPROM', 'PLCC', 0x08E, 0x088),
('MIT', '27C102', 'EPROM', 'JLCC', 0x08E, 0x088),
('MIT', '27C128', 'EPROM', 'DIP', 0x077, 0x051),
('MIT', '27C201', 'EPROM', 'DIP', 0x17E, 0x0F5),
('MIT', '27C201', 'EPROM', 'JLCC', 0x17E, 0x12D),
('MIT', '27C202', 'EPROM', 'DIP', 0x08E, 0x0DF),
('MIT', '27C202', 'EPROM', 'JLCC', 0x08E, 0x0EC),
('MIT', '27C256', 'EPROM', 'DIP', 0x08C, 0x032),
('MIT', '27C256A', 'EPROM', 'DIP', 0x08C, 0x032),
('MIT', '27C401', 'EPROM', 'DIP', 0x05C, 0x0F6),
('MIT', '27C402', 'EPROM', 'DIP', 0x05F, 0x089),
('MIT', '27C512A', 'EPROM', 'DIP', 0x04B, 0x0A4),
('MIT', '28F101', 'FLASH', 'DIP', 0x135, 0x118),
('MIT', '54700A', 'PROM', 'DIP', 0x0B5, 0x001),
('MIT', '54701A', 'PROM', 'DIP', 0x0B5, 0x001),
('MIT', '54730A', 'PROM', 'DIP', 0x0B5, 0x002),
('MIT', '54731A', 'PROM', 'DIP', 0x0B5, 0x002),
('MIT', '54740A', 'PROM', 'DIP', 0x0B5, 0x005),
('MIT', '54741A', 'PROM', 'DIP', 0x0B5, 0x005),
('MIT', '8748', 'MICRO', 'DIP', 0x052, 0x056),
('MOS', '2716', 'EPROM', 'DIP', 0x019, 0x023),
('MOT', '2532', 'EPROM', 'DIP', 0x019, 0x025),
('MOT', '2708', 'EPROM', 'DIP', 0x021, 0x027),
('MOT', '2716', 'EPROM', 'DIP', 0x019, 0x023),
('MOT', '2808', 'EEPROM', 'DIP', 0x081, | |
<gh_stars>10-100
import multiprocessing
import os
import codecs
import copy
import time
import tensorflow as tf
from nsm import agent_factory
from nsm import data_utils
from table.utils import init_experiment, FLAGS, get_train_shard_path, get_init_model_path, load_programs
from table.utils import get_experiment_dir, show_samples, collect_traj_for_program
from table.SQL_converter import get_env_trajs
class Actor(multiprocessing.Process):
def __init__(
self, name, actor_id, shard_ids, ckpt_queue, train_queue, eval_queue, replay_queue):
multiprocessing.Process.__init__(self)
self.ckpt_queue = ckpt_queue
self.eval_queue = eval_queue
self.train_queue = train_queue
self.replay_queue = replay_queue
self.name = name
self.shard_ids = shard_ids
self.actor_id = actor_id
def run(self):
agent, envs = init_experiment(
[get_train_shard_path(i) for i in self.shard_ids],
use_gpu=FLAGS.actor_use_gpu,
gpu_id=str(self.actor_id + FLAGS.actor_gpu_start_id))
# only keep the envs that can get oracle traj for a fair compare
#envs, _ = get_env_trajs(envs)
graph = agent.model.graph
current_ckpt = get_init_model_path()
env_dict = dict([(env.name, env) for env in envs])
replay_buffer = agent_factory.AllGoodReplayBuffer(agent, envs[0].de_vocab)
# Load saved programs to warm start the replay buffer.
if FLAGS.load_saved_programs:
load_programs(
envs, replay_buffer, FLAGS.saved_program_file)
if FLAGS.save_replay_buffer_at_end:
replay_buffer_copy = agent_factory.AllGoodReplayBuffer(de_vocab=envs[0].de_vocab)
replay_buffer_copy.program_prob_dict = copy.deepcopy(replay_buffer.program_prob_dict)
i = 0
while True:
# Create the logging files.
if FLAGS.log_samples_every_n_epoch > 0 and i % FLAGS.log_samples_every_n_epoch == 0:
f_replay = codecs.open(os.path.join(
get_experiment_dir(), 'replay_samples_{}_{}.txt'.format(self.name, i)),
'w', encoding='utf-8')
f_policy = codecs.open(os.path.join(
get_experiment_dir(), 'policy_samples_{}_{}.txt'.format(self.name, i)),
'w', encoding='utf-8')
f_train = codecs.open(os.path.join(
get_experiment_dir(), 'train_samples_{}_{}.txt'.format(self.name, i)),
'w', encoding='utf-8')
n_train_samples = 0
if FLAGS.use_replay_samples_in_train:
n_train_samples += FLAGS.n_replay_samples
if FLAGS.use_policy_samples_in_train and FLAGS.use_nonreplay_samples_in_train:
raise ValueError(
'Cannot use both on-policy samples and nonreplay samples for training!')
if FLAGS.use_policy_samples_in_train or FLAGS.use_nonreplay_samples_in_train:
# Note that nonreplay samples are drawn by rejection
# sampling from on-policy samples.
n_train_samples += FLAGS.n_policy_samples
# Make sure that all the samples from the env batch
# fits into one batch for training.
if FLAGS.batch_size < n_train_samples:
raise ValueError(
'One batch have to at least contain samples from one environment.')
env_batch_size = FLAGS.batch_size / n_train_samples
env_iterator = data_utils.BatchIterator(
dict(envs=envs), shuffle=True,
batch_size=env_batch_size)
for j, batch_dict in enumerate(env_iterator):
batch_envs = batch_dict['envs']
tf.logging.info('=' * 50)
tf.logging.info('{} iteration {}, batch {}: {} envs'.format(
self.name, i, j, len(batch_envs)))
t1 = time.time()
# Generate samples with cache and save to replay buffer.
t3 = time.time()
n_explore = 0
for _ in xrange(FLAGS.n_explore_samples):
explore_samples = agent.generate_samples(
batch_envs, n_samples=1, use_cache=FLAGS.use_cache,
greedy=FLAGS.greedy_exploration)
replay_buffer.save(explore_samples)
n_explore += len(explore_samples)
if FLAGS.n_extra_explore_for_hard > 0:
hard_envs = [env for env in batch_envs
if not replay_buffer.has_found_solution(env.name)]
if hard_envs:
for _ in xrange(FLAGS.n_extra_explore_for_hard):
explore_samples = agent.generate_samples(
hard_envs, n_samples=1, use_cache=FLAGS.use_cache,
greedy=FLAGS.greedy_exploration)
replay_buffer.save(explore_samples)
n_explore += len(explore_samples)
t4 = time.time()
tf.logging.info('{} sec used generating {} exploration samples.'.format(
t4 - t3, n_explore))
tf.logging.info('{} samples saved in the replay buffer.'.format(
replay_buffer.size))
t3 = time.time()
replay_samples = replay_buffer.replay(
batch_envs, FLAGS.n_replay_samples,
use_top_k=FLAGS.use_top_k_replay_samples,
agent=None if FLAGS.random_replay_samples else agent,
truncate_at_n=FLAGS.truncate_replay_buffer_at_n)
t4 = time.time()
tf.logging.info('{} sec used selecting {} replay samples.'.format(
t4 - t3, len(replay_samples)))
t3 = time.time()
if FLAGS.use_top_k_policy_samples:
if FLAGS.n_policy_samples == 1:
policy_samples = agent.generate_samples(
batch_envs, n_samples=FLAGS.n_policy_samples,
greedy=True)
else:
policy_samples = agent.beam_search(
batch_envs, beam_size=FLAGS.n_policy_samples)
else:
policy_samples = agent.generate_samples(
batch_envs, n_samples=FLAGS.n_policy_samples,
greedy=False)
t4 = time.time()
tf.logging.info('{} sec used generating {} on-policy samples'.format(
t4-t3, len(policy_samples)))
t2 = time.time()
tf.logging.info(
('{} sec used generating replay and on-policy samples,'
' {} iteration {}, batch {}: {} envs').format(
t2-t1, self.name, i, j, len(batch_envs)))
t1 = time.time()
self.eval_queue.put((policy_samples, len(batch_envs)))
self.replay_queue.put((replay_samples, len(batch_envs)))
assert (FLAGS.fixed_replay_weight >= 0.0 and FLAGS.fixed_replay_weight <= 1.0)
if FLAGS.use_replay_prob_as_weight:
new_samples = []
for sample in replay_samples:
name = sample.traj.env_name
if name in replay_buffer.prob_sum_dict:
replay_prob = max(
replay_buffer.prob_sum_dict[name], FLAGS.min_replay_weight)
else:
replay_prob = 0.0
scale = replay_prob
new_samples.append(
agent_factory.Sample(
traj=sample.traj,
prob=sample.prob * scale))
replay_samples = new_samples
else:
replay_samples = agent_factory.scale_probs(
replay_samples, FLAGS.fixed_replay_weight)
replay_samples = sorted(
replay_samples, key=lambda x: x.traj.env_name)
policy_samples = sorted(
policy_samples, key=lambda x: x.traj.env_name)
if FLAGS.use_nonreplay_samples_in_train:
nonreplay_samples = []
for sample in policy_samples:
if not replay_buffer.contain(sample.traj):
nonreplay_samples.append(sample)
replay_buffer.save(policy_samples)
def weight_samples(samples):
if FLAGS.use_replay_prob_as_weight:
new_samples = []
for sample in samples:
name = sample.traj.env_name
if name in replay_buffer.prob_sum_dict:
replay_prob = max(
replay_buffer.prob_sum_dict[name],
FLAGS.min_replay_weight)
else:
replay_prob = 0.0
scale = 1.0 - replay_prob
new_samples.append(
agent_factory.Sample(
traj=sample.traj,
prob=sample.prob * scale))
else:
new_samples = agent_factory.scale_probs(
samples, 1 - FLAGS.fixed_replay_weight)
return new_samples
train_samples = []
if FLAGS.use_replay_samples_in_train:
if FLAGS.use_trainer_prob:
replay_samples = [
sample._replace(prob=None) for sample in replay_samples]
train_samples += replay_samples
if FLAGS.use_policy_samples_in_train:
train_samples += weight_samples(policy_samples)
if FLAGS.use_nonreplay_samples_in_train:
train_samples += weight_samples(nonreplay_samples)
train_samples = sorted(train_samples, key=lambda x: x.traj.env_name)
tf.logging.info('{} train samples'.format(len(train_samples)))
if FLAGS.use_importance_sampling:
step_logprobs = agent.compute_step_logprobs(
[s.traj for s in train_samples])
else:
step_logprobs = None
if FLAGS.use_replay_prob_as_weight:
n_clip = 0
for env in batch_envs:
name = env.name
if (name in replay_buffer.prob_sum_dict and
replay_buffer.prob_sum_dict[name] < FLAGS.min_replay_weight):
n_clip += 1
clip_frac = float(n_clip) / len(batch_envs)
else:
clip_frac = 0.0
self.train_queue.put((train_samples, step_logprobs, clip_frac))
t2 = time.time()
tf.logging.info(
('{} sec used preparing and enqueuing samples, {}'
' iteration {}, batch {}: {} envs').format(
t2-t1, self.name, i, j, len(batch_envs)))
t1 = time.time()
# Wait for a ckpt that still exist or it is the same
# ckpt (no need to load anything).
while True:
new_ckpt = self.ckpt_queue.get()
new_ckpt_file = new_ckpt + '.meta'
if new_ckpt == current_ckpt or tf.gfile.Exists(new_ckpt_file):
break
t2 = time.time()
tf.logging.info('{} sec waiting {} iteration {}, batch {}'.format(
t2-t1, self.name, i, j))
if new_ckpt != current_ckpt:
# If the ckpt is not the same, then restore the new
# ckpt.
tf.logging.info('{} loading ckpt {}'.format(self.name, new_ckpt))
t1 = time.time()
graph.restore(new_ckpt)
t2 = time.time()
tf.logging.info('{} sec used {} restoring ckpt {}'.format(
t2-t1, self.name, new_ckpt))
current_ckpt = new_ckpt
if FLAGS.log_samples_every_n_epoch > 0 and i % FLAGS.log_samples_every_n_epoch == 0:
f_replay.write(show_samples(replay_samples, envs[0].de_vocab, env_dict))
f_policy.write(show_samples(policy_samples, envs[0].de_vocab, env_dict))
f_train.write(show_samples(train_samples, envs[0].de_vocab, env_dict))
if FLAGS.log_samples_every_n_epoch > 0 and i % FLAGS.log_samples_every_n_epoch == 0:
f_replay.close()
f_policy.close()
f_train.close()
if agent.model.get_global_step() >= FLAGS.n_steps:
if FLAGS.save_replay_buffer_at_end:
all_replay = os.path.join(get_experiment_dir(),
'all_replay_samples_{}.txt'.format(self.name))
with codecs.open(all_replay, 'w', encoding='utf-8') as f:
samples = replay_buffer.all_samples(envs, agent=None)
samples = [s for s in samples if not replay_buffer_copy.contain(s.traj)]
f.write(show_samples(samples, envs[0].de_vocab, None))
tf.logging.info('{} finished'.format(self.name))
return
i += 1
class OracleActor(Actor):
'''
This actor only put oracle examples in the training queue, which means:
1. This actor do not have a replay buffer
2. It does not do exploration
'''
def __init__(
self, name, actor_id, shard_ids, ckpt_queue, train_queue, eval_queue, replay_queue):
Actor.__init__(self, name, actor_id, shard_ids, ckpt_queue, train_queue, eval_queue, replay_queue)
tf.logging.info('actor_{} is oracle actor'.format(actor_id))
def run(self):
agent, all_envs = init_experiment(
[get_train_shard_path(i) for i in self.shard_ids],
use_gpu=FLAGS.actor_use_gpu,
gpu_id=str(self.actor_id + FLAGS.actor_gpu_start_id))
graph = agent.model.graph
current_ckpt = get_init_model_path()
# obtain the oracle of the examples and delete the examples that can not obtain oracle
envs, env_trajs = get_env_trajs(all_envs)
# build a dict to store the oracle trajs
env_oracle_trajs_dict = dict()
for env, env_traj in zip(envs, env_trajs):
env_oracle_trajs_dict[env.name] = env_traj
tf.logging.info('Found oracle for {} envs out of total of {} for actor_{}'.format(len(all_envs), len(envs), self.actor_id))
i = 0
while True:
n_train_samples = 0
n_train_samples += 1
# Make sure that all the samples from the env batch
# fits into one batch for training.
if FLAGS.batch_size < n_train_samples:
raise ValueError(
'One batch have to at least contain samples from one environment.')
env_batch_size = FLAGS.batch_size / n_train_samples
env_iterator = data_utils.BatchIterator(
dict(envs=envs), shuffle=True,
batch_size=env_batch_size)
for j, batch_dict in enumerate(env_iterator):
batch_envs = batch_dict['envs']
tf.logging.info('=' * 50)
tf.logging.info('{} iteration {}, batch {}: {} envs'.format(
self.name, i, j, len(batch_envs)))
t1 = time.time()
# get the oracle samples
oracle_samples = []
for batch_env in batch_envs:
oracle_samples.append(agent_factory.Sample(traj=env_oracle_trajs_dict[batch_env.name], prob=1.0))
self.eval_queue.put((oracle_samples, len(batch_envs)))
self.replay_queue.put((oracle_samples, len(batch_envs)))
assert (FLAGS.fixed_replay_weight >= 0.0 and FLAGS.fixed_replay_weight <= 1.0)
train_samples = []
train_samples += oracle_samples
train_samples = sorted(train_samples, key=lambda x: x.traj.env_name)
tf.logging.info('{} train samples'.format(len(train_samples)))
if FLAGS.use_importance_sampling:
step_logprobs = agent.compute_step_logprobs(
[s.traj for s in train_samples])
else:
step_logprobs = None
# TODO: the clip_factor may be wrong
self.train_queue.put((train_samples, step_logprobs, 0.0))
t2 = time.time()
tf.logging.info(
('{} sec used preparing and enqueuing samples, {}'
' iteration {}, batch {}: {} envs').format(
t2-t1, self.name, i, j, len(batch_envs)))
t1 = time.time()
# Wait for a ckpt that still exist or it is the same
# ckpt (no need to load anything).
while True:
new_ckpt = self.ckpt_queue.get()
new_ckpt_file = new_ckpt + '.meta'
if new_ckpt == current_ckpt or tf.gfile.Exists(new_ckpt_file):
break
t2 = time.time()
tf.logging.info('{} sec waiting {} iteration {}, batch {}'.format(
t2-t1, self.name, i, j))
if new_ckpt != current_ckpt:
# If the ckpt is not the same, then restore the new
# ckpt.
tf.logging.info('{} loading ckpt {}'.format(self.name, new_ckpt))
t1 = time.time()
graph.restore(new_ckpt)
t2 = time.time()
tf.logging.info('{} sec used {} restoring ckpt {}'.format(
| |
pre_avg, post_avg, delta, wait):
librosa.util.peak_pick(x, pre_max, post_max, pre_avg, post_avg, delta, wait)
@pytest.mark.xfail(raises=librosa.ParameterError)
def test_peak_pick_shape_fail():
# Can't pick peaks on 2d inputs
librosa.util.peak_pick(np.eye(2), 1, 1, 1, 1, 0.5, 1)
@pytest.mark.xfail(raises=librosa.ParameterError)
@pytest.mark.parametrize("ndim", [3, 4])
def test_sparsify_rows_ndimfail(ndim):
X = np.zeros([2] * ndim)
librosa.util.sparsify_rows(X)
@pytest.mark.xfail(raises=librosa.ParameterError)
@pytest.mark.parametrize("quantile", [1.0, -1, 2.0])
@pytest.mark.parametrize("X", [np.ones((3, 3))])
def test_sparsify_rows_badquantile(X, quantile):
librosa.util.sparsify_rows(X, quantile=quantile)
@pytest.mark.parametrize("ndim", [1, 2])
@pytest.mark.parametrize("d", [1, 5, 10, 100])
@pytest.mark.parametrize("q", [0.0, 0.01, 0.25, 0.5, 0.99])
def test_sparsify_rows(ndim, d, q):
srand()
X = np.random.randn(*([d] * ndim)) ** 4
X = np.asarray(X)
xs = librosa.util.sparsify_rows(X, quantile=q)
if ndim == 1:
X = X.reshape((1, -1))
assert np.allclose(xs.shape, X.shape)
# And make sure that xs matches X on nonzeros
xsd = np.asarray(xs.todense())
for i in range(xs.shape[0]):
assert np.allclose(xsd[i, xs[i].indices], X[i, xs[i].indices])
# Compute row-wise magnitude marginals
v_in = np.sum(np.abs(X), axis=-1)
v_out = np.sum(np.abs(xsd), axis=-1)
# Ensure that v_out retains 1-q fraction of v_in
assert np.all(v_out >= (1.0 - q) * v_in)
@pytest.mark.parametrize(
"searchdir", [os.path.join(os.path.curdir, "tests"), os.path.join(os.path.curdir, "tests", "data")]
)
@pytest.mark.parametrize("ext", [None, "wav", "WAV", ["wav"], ["WAV"]])
@pytest.mark.parametrize("recurse", [True])
@pytest.mark.parametrize("case_sensitive", list({False} | {platform.system() != "Windows"}))
@pytest.mark.parametrize("limit", [None, 1, 2])
@pytest.mark.parametrize("offset", [0, 1, -1])
@pytest.mark.parametrize(
"output",
[
[
os.path.join(os.path.abspath(os.path.curdir), "tests", "data", s)
for s in ["test1_22050.mp3", "test1_22050.wav", "test1_44100.wav", "test2_8000.wav"]
]
],
)
def test_find_files(searchdir, ext, recurse, case_sensitive, limit, offset, output):
files = librosa.util.find_files(
searchdir, ext=ext, recurse=recurse, case_sensitive=case_sensitive, limit=limit, offset=offset
)
targets = output
if ext is not None:
# If we're only seeking wavs, bump off the mp3 file
targets = targets[1:]
s1 = slice(offset, None)
s2 = slice(limit)
if case_sensitive and ext not in (None, "wav", ["wav"]):
assert len(files) == 0
else:
assert set(files) == set(targets[s1][s2])
def test_find_files_nonrecurse():
files = librosa.util.find_files(os.path.join(os.path.curdir, "tests"), recurse=False)
assert len(files) == 0
# fail if ext is not none, we're case-sensitive, and looking for WAV
@pytest.mark.parametrize("ext", ["WAV", ["WAV"]])
def test_find_files_case_sensitive(ext):
files = librosa.util.find_files(os.path.join(os.path.curdir, "tests"), ext=ext, case_sensitive=True)
# On windows, this test won't work
if platform.system() != "Windows":
assert len(files) == 0
@pytest.mark.parametrize("x_in", np.linspace(-2, 2, num=6))
@pytest.mark.parametrize("cast", [None, np.floor, np.ceil])
def test_valid_int(x_in, cast):
z = librosa.util.valid_int(x_in, cast)
assert isinstance(z, int)
if cast is None:
assert z == int(np.floor(x_in))
else:
assert z == int(cast(x_in))
@pytest.mark.parametrize("x", np.linspace(-2, 2, num=3))
@pytest.mark.parametrize("cast", [7])
@pytest.mark.xfail(raises=librosa.ParameterError)
def test_valid_int_fail(x, cast):
# Test with a non-callable cast operator
librosa.util.valid_int(x, cast)
@pytest.mark.parametrize(
"ivals", [np.asarray([[0, 1], [1, 2]]), np.asarray([[0, 0], [1, 1]]), np.asarray([[0, 2], [1, 2]])]
)
def test_valid_intervals(ivals):
librosa.util.valid_intervals(ivals)
@pytest.mark.xfail(raises=librosa.ParameterError)
@pytest.mark.parametrize(
"ivals", [np.asarray([]), np.arange(2), np.ones((2, 2, 2)), np.ones((2, 3))] # ndim=0 # ndim=1 # ndim=3
) # ndim=2, shape[1] != 2
def test_valid_intervals_badshape(ivals):
# fail if ndim != 2 or shape[1] != 2
librosa.util.valid_intervals(ivals)
@pytest.mark.xfail(raises=librosa.ParameterError)
@pytest.mark.parametrize("intval", [np.asarray([[0, 1], [2, 1]])])
def test_valid_intervals_fail(intval):
# Test for issue #712: intervals must have non-negative duration
librosa.util.valid_intervals(intval)
def test_warning_deprecated():
@librosa.util.decorators.deprecated("old_version", "new_version")
def __dummy():
return True
with warnings.catch_warnings(record=True) as out:
x = __dummy()
# Make sure we still get the right value
assert x is True
# And that the warning triggered
assert len(out) > 0
# And that the category is correct
assert out[0].category is DeprecationWarning
# And that it says the right thing (roughly)
assert "deprecated" in str(out[0].message).lower()
def test_warning_moved():
@librosa.util.decorators.moved("from", "old_version", "new_version")
def __dummy():
return True
with warnings.catch_warnings(record=True) as out:
x = __dummy()
# Make sure we still get the right value
assert x is True
# And that the warning triggered
assert len(out) > 0
# And that the category is correct
assert out[0].category is DeprecationWarning
# And that it says the right thing (roughly)
assert "moved" in str(out[0].message).lower()
def test_warning_rename_kw_pass():
warnings.resetwarnings()
warnings.simplefilter("always")
ov = librosa.util.Deprecated()
nv = 23
with warnings.catch_warnings(record=True) as out:
v = librosa.util.rename_kw("old", ov, "new", nv, "0", "1")
assert v == nv
# Make sure no warning triggered
assert len(out) == 0
def test_warning_rename_kw_fail():
warnings.resetwarnings()
warnings.simplefilter("always")
ov = 27
nv = 23
with warnings.catch_warnings(record=True) as out:
v = librosa.util.rename_kw("old", ov, "new", nv, "0", "1")
assert v == ov
# Make sure the warning triggered
assert len(out) > 0
# And that the category is correct
assert out[0].category is DeprecationWarning
# And that it says the right thing (roughly)
assert "renamed" in str(out[0].message).lower()
@pytest.mark.parametrize("idx", [np.arange(10, 90, 10), np.arange(10, 90, 15)])
@pytest.mark.parametrize("idx_min", [None, 5, 15])
@pytest.mark.parametrize("idx_max", [None, 85, 100])
@pytest.mark.parametrize("step", [None, 2])
@pytest.mark.parametrize("pad", [False, True])
def test_index_to_slice(idx, idx_min, idx_max, step, pad):
slices = librosa.util.index_to_slice(idx, idx_min=idx_min, idx_max=idx_max, step=step, pad=pad)
if pad:
if idx_min is not None:
assert slices[0].start == idx_min
if idx.min() != idx_min:
slices = slices[1:]
if idx_max is not None:
assert slices[-1].stop == idx_max
if idx.max() != idx_max:
slices = slices[:-1]
if idx_min is not None:
idx = idx[idx >= idx_min]
if idx_max is not None:
idx = idx[idx <= idx_max]
idx = np.unique(idx)
assert len(slices) == len(idx) - 1
for sl, start, stop in zip(slices, idx, idx[1:]):
assert sl.start == start
assert sl.stop == stop
assert sl.step == step
@pytest.mark.parametrize("aggregate", [None, np.mean, np.sum])
@pytest.mark.parametrize("ndim,axis", [(1, 0), (1, -1), (2, 0), (2, 1), (2, -1), (3, 0), (3, 2), (3, -1)])
def test_sync(aggregate, ndim, axis):
data = np.ones([6] * ndim, dtype=np.float)
# Make some slices that don't fill the entire dimension
slices = [slice(1, 3), slice(3, 4)]
dsync = librosa.util.sync(data, slices, aggregate=aggregate, axis=axis)
# Check the axis shapes
assert dsync.shape[axis] == len(slices)
s_test = list(dsync.shape)
del s_test[axis]
s_orig = list(data.shape)
del s_orig[axis]
assert s_test == s_orig
# The first slice will sum to 2 and have mean 1
idx = [slice(None)] * ndim
idx[axis] = 0
if aggregate is np.sum:
assert np.allclose(dsync[idx], 2)
else:
assert np.allclose(dsync[idx], 1)
# The second slice will sum to 1 and have mean 1
idx[axis] = 1
assert np.allclose(dsync[idx], 1)
@pytest.mark.parametrize("aggregate", [np.mean, np.max])
def test_sync_slices(aggregate):
x = np.arange(8, dtype=float)
slices = [slice(0, 2), slice(2, 4), slice(4, 6), slice(6, 8)]
xsync = librosa.util.sync(x, slices, aggregate=aggregate)
if aggregate is np.mean:
assert np.allclose(xsync, [0.5, 2.5, 4.5, 6.5])
elif aggregate is np.max:
assert np.allclose(xsync, [1, 3, 5, 7])
else:
assert False
@pytest.mark.parametrize("aggregate", [np.mean, np.max])
@pytest.mark.parametrize("atype", [list, np.asarray])
def test_sync_frames(aggregate, atype):
x = np.arange(8, dtype=float)
frames = atype([0, 2, 4, 6, 8])
xsync = librosa.util.sync(x, frames, aggregate=aggregate)
if aggregate is np.mean:
assert np.allclose(xsync, [0.5, 2.5, 4.5, 6.5])
elif aggregate is np.max:
assert np.allclose(xsync, [1, 3, 5, 7])
else:
assert False
@pytest.mark.parametrize("atype", [list, np.asarray])
@pytest.mark.parametrize("pad", [False, True])
def test_sync_frames_pad(atype, pad):
x = np.arange(8, dtype=float)
frames = atype([2, 4, 6])
xsync = librosa.util.sync(x, frames, pad=pad)
if pad:
assert np.allclose(xsync, [0.5, 2.5, 4.5, 6.5])
else:
assert np.allclose(xsync, [2.5, 4.5])
@pytest.mark.parametrize("data", [np.mod(np.arange(135), 5)])
@pytest.mark.parametrize("idx", [["foo", "bar"], [None], [slice(None), None]])
@pytest.mark.xfail(raises=librosa.ParameterError)
def test_sync_fail(data, idx):
librosa.util.sync(data, idx)
@pytest.mark.parametrize("power", [1, 2, 50, 100, np.inf])
@pytest.mark.parametrize("split_zeros", [False, True])
def test_softmask(power, split_zeros):
srand()
X = np.abs(np.random.randn(10, 10))
X_ref = np.abs(np.random.randn(10, 10))
# Zero out some rows
X[3, :] = 0
X_ref[3, :] = 0
M = librosa.util.softmask(X, X_ref, power=power, split_zeros=split_zeros)
assert np.all(0 <= M) and np.all(M <= 1)
if split_zeros and np.isfinite(power):
assert np.allclose(M[3, :], 0.5)
else:
assert not np.any(M[3, :]), M[3]
def test_softmask_int():
X = 2 * np.ones((3, 3), dtype=np.int32)
X_ref = np.vander(np.arange(3))
M1 = librosa.util.softmask(X, X_ref, power=1)
M2 = librosa.util.softmask(X_ref, X, power=1)
assert np.allclose(M1 + M2, 1)
@pytest.mark.parametrize(
"x,x_ref,power,split_zeros",
[
(-np.ones(3), np.ones(3), 1, False),
(np.ones(3), -np.ones(3), 1, False),
(np.ones(3), np.ones(4), 1, False),
(np.ones(3), np.ones(3), 0, False),
(np.ones(3), np.ones(3), -1, False),
],
)
@pytest.mark.xfail(raises=librosa.ParameterError)
def test_softmask_fail(x, x_ref, power, split_zeros):
librosa.util.softmask(x, x_ref, power=power, split_zeros=split_zeros)
@pytest.mark.parametrize(
"x,value",
[
(1, np.finfo(np.float32).tiny),
(np.ones(3, dtype=int), np.finfo(np.float32).tiny),
(np.ones(3, dtype=np.float32), np.finfo(np.float32).tiny),
(1.0, np.finfo(np.float64).tiny),
(np.ones(3, dtype=np.float64), np.finfo(np.float64).tiny),
(1j, np.finfo(np.complex128).tiny),
(np.ones(3, dtype=np.complex64), np.finfo(np.complex64).tiny),
(np.ones(3, dtype=np.complex128), np.finfo(np.complex128).tiny),
],
)
def test_tiny(x, value):
assert value == librosa.util.tiny(x)
def test_util_fill_off_diagonal_8_8():
# Case 1: Square matrix (N=M)
mut_x = np.ones((8, 8))
librosa.util.fill_off_diagonal(mut_x, 0.25)
gt_x = np.array(
[
[1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1],
]
)
assert np.array_equal(mut_x, gt_x)
assert np.array_equal(mut_x, gt_x.T)
def test_util_fill_off_diagonal_8_12():
# Case 2a: N!=M
mut_x = np.ones((8, 12))
librosa.util.fill_off_diagonal(mut_x, 0.25)
gt_x = np.array(
[
[1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, | |
## @ingroup Components-Energy-Networks
# Lift_Cruise.py
#
# Created: Jan 2016, <NAME>
# Modified: Mar 2020, <NAME>
# Apr 2021, <NAME>
# Jul 2021, <NAME>
# Jul 2021, <NAME>
# Aug 2021, <NAME>
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
# suave imports
import SUAVE
# package imports
import numpy as np
from SUAVE.Core import Units, Data
from .Network import Network
from SUAVE.Analyses.Mission.Segments.Conditions import Residuals
from SUAVE.Components.Physical_Component import Container
from SUAVE.Methods.Power.Battery.pack_battery_conditions import pack_battery_conditions
from SUAVE.Methods.Power.Battery.append_initial_battery_conditions import append_initial_battery_conditions
# ----------------------------------------------------------------------
# Lift_Forward
# ----------------------------------------------------------------------
## @ingroup Components-Energy-Networks
class Lift_Cruise(Network):
""" This is a complex version of battery_propeller with a battery powering propellers through
electric motors. In this case we have 2 sets of motors at different motors that can be controlled seperately
This network adds 2 extra unknowns to the mission. The first is
a voltage, to calculate the thevenin voltage drop in the pack.
The second is torque matching between motor and propeller.
We have two inputs, the forward throttle and the lift throttle setting
Since this is an airplane first and foremost, the "throttle" will be for forward thrust
The new unknown will be for lift throttle, because of the assumption on throttle something needs to be done...
Want only 1 residual on voltage
Assumptions:
For any segment using this, body angle can't be an unknown.
Source:
None
"""
def __defaults__(self):
""" This sets the default values for the network to function.
Assumptions:
None
Source:
N/A
Inputs:
None
Outputs:
None
Properties Used:
N/A
"""
self.lift_rotor_motors = Container()
self.propeller_motors = Container()
self.lift_rotors = Container()
self.propellers = Container()
self.lift_rotor_esc = None
self.propeller_esc = None
self.avionics = None
self.payload = None
self.battery = None
self.lift_rotor_engine_length = None
self.propeller_engine_length = None
self.number_of_lift_rotor_engines = 0
self.number_of_propeller_engines = 0
self.voltage = None
self.propeller_pitch_command = 0.0
self.lift_rotor_pitch_command = 0.0
self.tag = 'Lift_Cruise'
self.generative_design_minimum = 0
self.identical_propellers = True
self.identical_lift_rotors = True
pass
def evaluate_thrust(self,state):
""" Calculate thrust given the current state of the vehicle
Assumptions:
Caps the throttle at 110% and linearly interpolates thrust off that
Source:
N/A
Inputs:
state [state()]
Outputs:
results.thrust_force_vector [Newtons]
results.vehicle_mass_rate [kg/s]
conditions.propulsion:
lift_rotor_rpm [radians/sec]
rpm _forward [radians/sec]
lift_rotor_current_draw [amps]
propeller_current_draw [amps]
battery_power_draw [watts]
battery_energy [joules]
voltage_open_circuit [volts]
voltage_under_load [volts]
lift_rotor_motor_torque [N-M]
propeller_motor_torque [N-M]
lift_rotor_thrust [N]
propeller_thrust [N]
lift_rotor_torque [N-M]
propeller_torque [N-M]
Properties Used:
Defaulted values
"""
# unpack
conditions = state.conditions
numerics = state.numerics
lift_rotor_motors = self.lift_rotor_motors
propeller_motors = self.propeller_motors
lift_rotors = self.lift_rotors
propellers = self.propellers
lift_rotor_esc = self.lift_rotor_esc
propeller_esc = self.propeller_esc
avionics = self.avionics
payload = self.payload
battery = self.battery
num_lift = self.number_of_lift_rotor_engines
num_forward = self.number_of_propeller_engines
#-----------------------------------------------------------------
# SETUP BATTERIES AND ESC's
#-----------------------------------------------------------------
# Set battery energy
battery.current_energy = conditions.propulsion.battery_energy
battery.pack_temperature = conditions.propulsion.battery_pack_temperature
battery.cell_charge_throughput = conditions.propulsion.battery_cell_charge_throughput
battery.age = conditions.propulsion.battery_cycle_day
battery_discharge_flag = conditions.propulsion.battery_discharge_flag
battery.R_growth_factor = conditions.propulsion.battery_resistance_growth_factor
battery.E_growth_factor = conditions.propulsion.battery_capacity_fade_factor
battery.max_energy = conditions.propulsion.battery_max_aged_energy
n_series = battery.pack_config.series
n_parallel = battery.pack_config.parallel
# update ambient temperature based on altitude
battery.ambient_temperature = conditions.freestream.temperature
battery.cooling_fluid.thermal_conductivity = conditions.freestream.thermal_conductivity
battery.cooling_fluid.kinematic_viscosity = conditions.freestream.kinematic_viscosity
battery.cooling_fluid.prandtl_number = conditions.freestream.prandtl_number
battery.cooling_fluid.density = conditions.freestream.density
battery.ambient_pressure = conditions.freestream.pressure
a = conditions.freestream.speed_of_sound
# Predict voltage based on battery
volts = battery.compute_voltage(state)
# --------------------------------------------------------------------------------
# Run Motor, Avionics and Systems (Discharge Model)
# --------------------------------------------------------------------------------
if battery_discharge_flag:
# ESC Voltage
lift_rotor_esc.inputs.voltagein = volts
propeller_esc.inputs.voltagein = volts
#---------------------------------------------------------------
# EVALUATE THRUST FROM FORWARD PROPULSORS
#---------------------------------------------------------------
# Throttle the voltage
propeller_esc.voltageout(conditions)
# How many evaluations to do
if self.identical_propellers:
n_evals = 1
factor = num_forward*1
else:
n_evals = int(num_forward)
factor = 1.
# Setup numbers for iteration
total_prop_motor_current = 0.
total_prop_thrust = 0. * state.ones_row(3)
total_prop_power = 0.
# Iterate over motor/props
for ii in range(n_evals):
# Unpack the motor and props
motor_key = list(propeller_motors.keys())[ii]
prop_key = list(propellers.keys())[ii]
motor = self.propeller_motors[motor_key]
prop = self.propellers[prop_key]
# link
motor.inputs.voltage = propeller_esc.outputs.voltageout
motor.inputs.propeller_CP = np.atleast_2d(conditions.propulsion.propeller_power_coefficient[:,ii]).T
# Run the motor
motor.omega(conditions)
# link
prop.inputs.omega = motor.outputs.omega
prop.inputs.pitch_command = self.propeller_pitch_command
# Run the propeller
F_forward, Q_forward, P_forward, Cp_forward, outputs_forward, etap_forward = prop.spin(conditions)
# Check to see if magic thrust is needed, the ESC caps throttle at 1.1 already
eta = conditions.propulsion.throttle[:,0,None]
P_forward[eta>1.0] = P_forward[eta>1.0]*eta[eta>1.0]
F_forward[eta[:,0]>1.0,:] = F_forward[eta[:,0]>1.0,:]*eta[eta[:,0]>1.0,:]
# Run the motor for current
_, etam_prop = motor.current(conditions)
# Conditions specific to this instantation of motor and propellers
R = prop.tip_radius
rpm = motor.outputs.omega / Units.rpm
F_mag = np.atleast_2d(np.linalg.norm(F_forward, axis=1)).T
total_prop_thrust = total_prop_thrust + F_forward * factor
total_prop_power = total_prop_power + P_forward * factor
total_prop_motor_current = total_prop_motor_current + factor*motor.outputs.current
# Pack specific outputs
conditions.propulsion.propeller_motor_torque[:,ii] = motor.outputs.torque[:,0]
conditions.propulsion.propeller_torque[:,ii] = Q_forward[:,0]
conditions.propulsion.propeller_rpm[:,ii] = rpm[:,0]
conditions.propulsion.propeller_thrust[:,ii] = np.linalg.norm(total_prop_thrust ,axis = 1)
conditions.propulsion.propeller_tip_mach[:,ii] = (R*rpm[:,0]*Units.rpm)/a[:,0]
conditions.propulsion.propeller_disc_loading[:,ii] = (F_mag[:,0])/(np.pi*(R**2)) # N/m^2
conditions.propulsion.propeller_power_loading[:,ii] = (F_mag[:,0])/(P_forward[:,0]) # N/W
conditions.propulsion.propeller_efficiency[:,ii] = etap_forward[:,0]
conditions.propulsion.propeller_motor_efficiency[:,ii] = etam_prop[:,0]
if n_evals==1:
# Append outputs to each identical propeller
for i,p in enumerate(propellers):
conditions.noise.sources.propellers[p.tag] = outputs_forward
else:
conditions.noise.sources.propellers[prop.tag] = outputs_forward
# link
propeller_esc.inputs.currentout = total_prop_motor_current
# Run the esc
propeller_esc.currentin(conditions)
#-------------------------------------------------------------------
# EVALUATE THRUST FROM LIFT PROPULSORS
#-------------------------------------------------------------------
# Make a new set of konditions, since there are differences for the esc and motor
konditions = SUAVE.Analyses.Mission.Segments.Conditions.Aerodynamics()
konditions._size = conditions._size
konditions.propulsion = Data()
konditions.freestream = Data()
konditions.frames = Data()
konditions.frames.inertial = Data()
konditions.frames.body = Data()
konditions.propulsion.throttle = conditions.propulsion.throttle_lift* 1.
konditions.propulsion.propeller_power_coefficient = conditions.propulsion.lift_rotor_power_coefficient * 1.
konditions.freestream.density = conditions.freestream.density * 1.
konditions.freestream.velocity = conditions.freestream.velocity * 1.
konditions.freestream.dynamic_viscosity = conditions.freestream.dynamic_viscosity * 1.
konditions.freestream.speed_of_sound = conditions.freestream.speed_of_sound *1.
konditions.freestream.temperature = conditions.freestream.temperature * 1.
konditions.freestream.altitude = conditions.freestream.altitude * 1.
konditions.frames.inertial.velocity_vector = conditions.frames.inertial.velocity_vector *1.
konditions.frames.body.transform_to_inertial = conditions.frames.body.transform_to_inertial
# Throttle the voltage
lift_rotor_esc.voltageout(konditions)
# How many evaluations to do
if self.identical_lift_rotors:
n_evals = 1
factor = num_lift*1
else:
n_evals = int(num_lift)
factor = 1.
# Setup numbers for iteration
total_lift_rotor_motor_current = 0.
total_lift_rotor_thrust = 0. * state.ones_row(3)
total_lift_rotor_power = 0.
# Iterate over motor/lift_rotors
for ii in range(n_evals):
# Unpack the motor and props
motor_key = list(lift_rotor_motors.keys())[ii]
lift_rotor_key = list(lift_rotors.keys())[ii]
lift_rotor_motor = self.lift_rotor_motors[motor_key]
lift_rotor = self.lift_rotors[lift_rotor_key]
# link
lift_rotor_motor.inputs.voltage = lift_rotor_esc.outputs.voltageout
lift_rotor_motor.inputs.propeller_CP = np.atleast_2d(conditions.propulsion.lift_rotor_power_coefficient[:,ii]).T
# Run the motor
lift_rotor_motor.omega(konditions)
# link
lift_rotor.inputs.omega = lift_rotor_motor.outputs.omega
lift_rotor.inputs.pitch_command = self.lift_rotor_pitch_command
# Run the propeller
F_lift, Q_lift, P_lift, Cp_lift, outputs_lift, etap_lift = lift_rotor.spin(konditions)
# Check to see if magic thrust is needed, the ESC caps throttle at 1.1 already
eta = conditions.propulsion.throttle_lift[:,0,None]
P_lift[eta>1.0] = P_lift[eta>1.0]*eta[eta>1.0]
F_forward[eta[:,0]>1.0,:] = F_lift[eta[:,0]>1.0,:]*eta[eta[:,0]>1.0,:]
# Run the motor for current
_, etam_lift_rotor =lift_rotor_motor.current(konditions)
# Conditions specific to this instantation of motor and propellers
R = lift_rotor.tip_radius
rpm = lift_rotor_motor.outputs.omega / Units.rpm
F_mag = np.atleast_2d(np.linalg.norm(F_lift, axis=1)).T
total_lift_rotor_thrust = total_lift_rotor_thrust + F_lift * factor
total_lift_rotor_power = total_lift_rotor_power + P_lift * factor
total_lift_rotor_motor_current = total_lift_rotor_motor_current + factor*lift_rotor_motor.outputs.current
# Pack specific outputs
conditions.propulsion.lift_rotor_motor_torque[:,ii] = lift_rotor_motor.outputs.torque[:,0]
conditions.propulsion.lift_rotor_torque[:,ii] = Q_lift[:,0]
conditions.propulsion.lift_rotor_rpm[:,ii] = rpm[:,0]
conditions.propulsion.lift_rotor_thrust[:,ii] = np.linalg.norm(total_lift_rotor_thrust ,axis = 1)
conditions.propulsion.lift_rotor_tip_mach[:,ii] = (R*rpm[:,0]*Units.rpm)/a[:,0]
conditions.propulsion.lift_rotor_disc_loading[:,ii] = (F_mag[:,0])/(np.pi*(R**2)) # N/m^2
conditions.propulsion.lift_rotor_power_loading[:,ii] = (F_mag[:,0])/(P_lift[:,0]) # N/W
conditions.propulsion.lift_rotor_efficiency[:,ii] = etap_lift[:,0]
conditions.propulsion.lift_rotor_motor_efficiency[:,ii] = etam_lift_rotor[:,0]
if n_evals==1:
# Append outputs to each identical propeller
for i,p in enumerate(lift_rotors):
conditions.noise.sources.lift_rotors[p.tag] = outputs_lift
else:
conditions.noise.sources.lift_rotors[prop.tag] = outputs_lift
# link
lift_rotor_esc.inputs.currentout = lift_rotor_motor.outputs.current
# Run the lift_rotor esc
lift_rotor_esc.currentin(konditions)
# Run the avionics
avionics.power()
# Run the payload
payload.power()
# Calculate avionics and payload power
avionics_payload_power = avionics.outputs.power + payload.outputs.power
# Calculate avionics and payload current
i_avionics_payload = avionics_payload_power/volts
# Add up the power usages
i_lift = lift_rotor_esc.outputs.currentin
i_forward = propeller_esc.outputs.currentin
current_total = i_lift + i_forward + i_avionics_payload
power_total = current_total * volts
battery.inputs.current = current_total
battery.inputs.power_in = - power_total
# Run the battery
battery.energy_calc(numerics,battery_discharge_flag)
# --------------------------------------------------------------------------------
# Run Charge Model
# --------------------------------------------------------------------------------
else:
# | |
sort1[1]:
sort_again = sorted(fwd, key=lambda i: i["sort2"])
sort_again.insert(1, compound_date_link)
return sort_again
elif order == ["bc", "bc"]:
rev = sorted(final, key=lambda i: i["sort1"], reverse=True)
sort_again = rev
if sort1[0] == sort1[1]:
sort_again = sorted(rev, key=lambda i: i["sort2"])
sort_again.insert(1, compound_date_link)
return sort_again
elif order == ["ad", "bc"]:
right = [final[1], final[0]]
right.insert(1, compound_date_link)
return right
elif order == ["bc", "ad"]:
final.insert(1, compound_date_link)
return final
def make_date_string(final):
def concat_parts(
prefix1="", year1="0000", month1="00", day1="00", suffix1="",
link="", prefix2="", year2="", month2="", day2="", suffix2=""):
date_string = "{}-{}-{}-{}-{}-{}-{}-{}-{}-{}-{}".format(
prefix1, year1, month1, day1, suffix1,
link, prefix2, year2, month2, day2, suffix2)
return date_string
comp1 = final[0]
link = final[1]
comp2 = final[2]
prefix1 = comp1.get("prefix", "")
year1 = comp1.get("year", "")
month1 = comp1.get("month", "")
day1 = comp1.get("day", "")
suffix1 = comp1.get("suffix", "")
if len(link) == 0:
return concat_parts(prefix1, year1, month1, day1, suffix1)
link = link
prefix2 = comp2.get("prefix", "")
year2 = comp2.get("year", "")
month2 = comp2.get("month", "")
day2 = comp2.get("day", "")
suffix2 = comp2.get("suffix", "")
return concat_parts(
prefix1, year1, month1, day1, suffix1,
link, prefix2, year2, month2, day2, suffix2)
def format_stored_date(stored_date, date_prefs=date_prefs):
''' Also used in events_table.py. '''
if stored_date == "-0000-00-00-------":
return ""
dateform = date_prefs[0]
formatted_date = ""
preprefix = ""
prefix1 = ""
year1 = ""
month1 = ""
day1 = ""
suffix1 = ""
link = ""
prefix2 = ""
year2 = ""
month2 = ""
day2 = ""
suffix2 = ""
span = False
ranje = False
compound = False
parts = stored_date.split("-")
if 'to' in parts:
span = True
compound = True
elif 'and' in parts:
ranje = True
compound = True
y = 0
for part in parts:
if len(part) == 0:
pass
elif y in (0, 6):
part = find_prefix(part, date_prefs)
if y == 0:
prefix1 = part
elif y == 6:
prefix2 = part
elif y in (1, 7):
part = part.lstrip("0")
if y == 1:
year1 = part
elif y == 7:
year2 = part
elif y in (2, 8):
part = convert_month(part, dateform)
if y == 2:
month1 = part
elif y == 8:
month2 = part
elif y in (3, 9):
part = part.lstrip("0")
if y == 3:
day1 = part
elif y == 9:
day2 = part
elif y in (4, 10):
part = find_suffix(part, date_prefs)
if y == 4:
suffix1 = part
elif y == 10:
suffix2 = part
elif y == 5:
if compound is False:
break
if span is True:
part = date_prefs[7].split("_")
preprefix = part[0]
link = part[1]
elif ranje is True:
part = date_prefs[8].split("_")
preprefix = part[0]
link = part[1]
y += 1
t = 0
for tup in ((suffix1, year1), (suffix2, year2)):
suffix = tup[0]
year = tup[1]
if suffix in AD:
if int(year) > 99:
suffix = ""
if t == 0:
suffix1 = suffix
elif t == 1:
suffix2 = suffix
t += 1
month_first_commas2 = (
preprefix, prefix1, month1, day1 + ",", year1, suffix1,
link, prefix2, month2, day2 + ",", year2, suffix2)
month_first_comma_a = (
preprefix, prefix1, month1, day1 + ",", year1, suffix1,
link, prefix2, month2, day2, year2, suffix2)
month_first_comma_b = (
preprefix, prefix1, month1, day1, year1, suffix1,
link, prefix2, month2, day2 + ",", year2, suffix2)
month_first_no_comma = (
preprefix, prefix1, month1, day1, year1, suffix1,
link, prefix2, month2, day2, year2, suffix2)
day_first = (
preprefix, prefix1, day1, month1, year1, suffix1,
link, prefix2, day2, month2, year2, suffix2)
len1 = len(day1)
len2 = len(day2)
if "dm" in dateform:
order = day_first
elif "md" in dateform:
if compound is True:
if len1 > 0 and len2 > 0:
order = month_first_commas2
elif len1 > 0 and len2 == 0:
order = month_first_comma_a
elif len1 == 0 and len2 > 0:
order = month_first_comma_b
else:
order = month_first_no_comma
else:
if len1 > 0:
order = month_first_comma_a
else:
order = month_first_no_comma
formatted_date = "{} {} {} {} {} {} {} {} {} {} {} {}".format(*order)
formatted_date = " ".join(formatted_date.split())
return formatted_date
def find_prefix(part, date_prefs):
if part == 'abt':
prefix = date_prefs[1]
elif part == 'est':
prefix = date_prefs[2]
elif part == 'cal':
prefix = date_prefs[3]
elif part in ('bef', 'aft'):
bef_aft = date_prefs[4].split("/")
if part == 'bef':
prefix = bef_aft[0]
elif part == 'aft':
prefix = bef_aft[1]
else:
prefix = ""
return prefix
def find_suffix(part, date_prefs):
if part in ("bc, ad"):
bc_ad = date_prefs[5].split("/")
if part == "bc":
suffix = bc_ad[0]
elif part == "ad":
suffix = bc_ad[1]
elif part in ("os, ns"):
os_ns = date_prefs[6].split("/")
if part == "os":
suffix = bc_ad[0]
elif part == "ns":
suffix = bc_ad[1]
else:
suffix = ""
return suffix
def convert_month(part, dateform):
month = ""
idx = 0
if 'abb' in dateform:
idx = 1
elif 'dot' in dateform:
idx = 2
for k,v in MONTH_CONVERSIONS.items():
if k == part:
month = v[idx]
break
return month
class DatePreferences(Frame):
def __init__(self, master, *args, **kwargs):
Frame.__init__(self, master, *args, **kwargs)
self.master = master
self.prefcombos = {}
self.make_widgets_top()
self.make_widgets_bottom()
def revert_to_default(self):
current_file = get_current_file()[0]
conn = sqlite3.connect(current_file)
conn.execute('PRAGMA foreign_keys = 1')
cur = conn.cursor()
cur.execute(delete_date_format_all)
conn.commit()
cur.execute(insert_date_format_default)
conn.commit()
cur.close()
conn.close()
for combo in self.prefcombos.values():
combo.entry.delete(0, 'end')
def get_new_date_prefs(self):
date_form = None
est_form = None
abt_form = None
cal_form = None
befaft_form = None
epoch_form = None
julegreg_form = None
span_form = None
range_form = None
for combo in self.prefcombos.values():
if len(combo.entry.get()) != 0:
var_form = combo.entry.get()
if combo == self.prefcombos['General']:
date_form = var_form
for k,v in DATE_FORMAT_LOOKUP.items():
if date_form == k:
date_form = v
elif combo == self.prefcombos['Estimated']:
est_form = var_form
elif combo == self.prefcombos['Approximate']:
abt_form = var_form
elif combo == self.prefcombos['Calculated']:
cal_form = var_form
elif combo == self.prefcombos['Before/After']:
befaft_form = var_form
elif combo == self.prefcombos['Epoch']:
epoch_form = var_form
elif combo == self.prefcombos['Julian/Gregorian']:
julegreg_form = var_form
elif combo == self.prefcombos['From...To...']:
span_form = var_form
for k,v in SPAN_FORMAT_LOOKUP.items():
if span_form == k:
span_form = v
elif combo == self.prefcombos['Between...And...']:
range_form = var_form
for k,v in RANGE_FORMAT_LOOKUP.items():
if range_form == k:
range_form = v
self.set_new_date_prefs(
date_form, est_form, abt_form, cal_form, befaft_form, epoch_form,
julegreg_form, span_form, range_form)
def set_new_date_prefs(self,
date_form, est_form, abt_form, cal_form, befaft_form, epoch_form,
julegreg_form, span_form, range_form):
for combo in self.prefcombos.values():
current_file = get_current_file()[0]
conn = sqlite3.connect(current_file)
conn.execute('PRAGMA foreign_keys = 1')
cur = conn.cursor()
if date_form and combo is self.prefcombos['General']:
cur.execute(update_date_format_date_formats, (date_form,))
elif est_form and combo is self.prefcombos['Estimated']:
cur.execute(update_date_format_est, (est_form,))
elif abt_form and combo is self.prefcombos['Approximate']:
cur.execute(update_date_format_abt, (abt_form,))
elif cal_form and combo is self.prefcombos['Calculated']:
cur.execute(update_date_format_cal, (cal_form,))
elif befaft_form and combo is self.prefcombos['Before/After']:
cur.execute(update_date_format_befaft, (befaft_form,))
elif epoch_form and combo is self.prefcombos['Epoch']:
cur.execute(update_date_format_epoch, (epoch_form,))
elif julegreg_form and combo is self.prefcombos['Julian/Gregorian']:
cur.execute(update_date_format_julegreg, (julegreg_form,))
elif span_form and combo is self.prefcombos['From...To...']:
cur.execute(update_date_format_span, (span_form,))
elif range_form and combo is self.prefcombos['Between...And...']:
cur.execute(update_date_format_range, (range_form,))
conn.commit()
cur.close()
conn.close()
combo.entry.delete(0, 'end')
def show_test_date_formatted(self, evt):
widg = evt.widget
storable_date = validate_date(
self.master,
widg,
widg.get())
date_prefs = get_date_formats(tree_is_open=1)
formatted_date = format_stored_date(
storable_date, date_prefs=date_prefs)
widg.delete(0, 'end')
widg.insert(0, formatted_date)
def make_widgets_top(self):
self.test_frm = Frame(self)
self.tester_head = LabelH3(
self.test_frm,
text="Date Entry Demo (doesn't affect your tree)")
DATE_ENTRIES = ['Date Input I', 'Date Input II', 'Date Input III']
self.date_test = {}
g = 0
for lab in DATE_ENTRIES:
lbl = Label(self.test_frm, text=DATE_ENTRIES[g])
lbl.grid(column=0, row= g+1, padx=24, sticky='e')
dent = EntryAutoHilited(self.test_frm)
dent.grid(column=1, row=g+1, sticky='ew')
dent.config(width=64)
dent.bind("<FocusOut>", self.show_test_date_formatted)
self.date_test[lab] = dent
g += 1
def make_widgets_bottom(self):
prefs_area = Frame(self)
buttonbox = Frame(self)
self.pref_head = LabelH2(
prefs_area, text='Set Date Display Preferences')
pref_head2 = Label(
prefs_area,
text='first value in each dropdown list is default')
pfx_lab = LabelH3(prefs_area, text='Prefixes')
sfx_lab = LabelH3(prefs_area, text='Suffixes')
cmpd_lab = LabelH3(prefs_area, text='Compound Dates')
PREF_HEADS = (
"General", "Estimated", "Approximate", "Calculated",
"Before/After", "Epoch", "Julian/Gregorian",
"From...To...", "Between...And...")
date_pref_heads = {}
p = 0
for heading in PREF_HEADS:
lab = LabelH3(prefs_area, text=PREF_HEADS[p])
date_pref_heads[heading] = lab
combo = Combobox(
prefs_area,
root,
height=300,
values=DATE_PREF_COMBOS[p])
self.prefcombos[heading] = combo
p += 1
self.submit = Button(
buttonbox,
text='SUBMIT PREFERENCES',
command=self.get_new_date_prefs,
width=30)
self.revert = Button(
buttonbox,
text='REVERT TO DEFAULT VALUES',
command=self.revert_to_default,
width=30)
# children of | |
Welcome to PsychoPy2!
v1.83.04
()
################ Running: F:\users\emiwar\edited_new\catAtt1.py ################
COM1
Serial<id=0x552a810, open=True>(port='COM1', baudrate=115200, bytesize=8, parity='N', stopbits=1, timeout=2.5, xonxoff=False, rtscts=False, dsrdtr=False)
492
1 coast [('a', 1.7892104581749066)]
1 finished.
2 coast [('a', 1.0033532573809083)]
2 finished.
3 desert [('b', 2.271176326208149)]
3 finished.
4 bathroom [('b', 1.120374462225186)]
4 finished.
5 False [('a', 1.6484581027571892)]
5 finished.
6 car [('b', 1.09450819429361)]
6 finished.
7 car [('a', 0.727271234388752)]
7 finished.
8 desert [('a', 1.0182946802118522)]
8 finished.
9 desert [('b', 0.9273502932597353)]
9 finished.
10 coast [('b', 1.5391301823368337)]
10 finished.
11 False [('b', 1.1794557195544826)]
11 finished.
12 shoe [('b', 1.3977889908176167)]
12 finished.
13 coast [('b', 1.5871455436866313)]
13 finished.
14 False [('b', 0.9810044640696844)]
14 finished.
15 chair [('a', 0.6492164345094409)]
15 finished.
16 desert [('a', 1.6887869243196008)]
16 finished.
17 ('flower', 'desert') [('', 2.5374563394745735)]
17 finished.
18 False [('b', 0.7137966234003272)]
18 finished.
19 coast [('b', 0.6717895602995441)]
19 finished.
20 False [('a', 0.6759380983116046)]
20 finished.
21 shoe [('a', 1.2980141150582085)]
21 finished.
22 bathroom [('a', 0.9993237768674703)]
22 finished.
23 desert [('a', 0.8226436219050584)]
23 finished.
24 desert [('b', 1.8846705259672945)]
24 finished.
25 False [('b', 0.6576146800828155)]
25 finished.
26 car [('b', 0.9339028543595873)]
26 finished.
27 shoe [('a', 0.7009712569978888)]
27 finished.
28 False [('a', 0.6392176573413622)]
28 finished.
29 shoe [('b', 0.8535278994826285)]
29 finished.
30 flower [('b', 1.0293224543322026)]
30 finished.
31 desert [('a', 0.6053431597801477)]
31 finished.
32 ('shoe', 'coast') [('b', 1.5249567683454757)]
32 finished.
33 False [('b', 0.6930744604950405)]
33 finished.
34 chair [('a', 0.7197946580718622)]
34 finished.
35 woods [('a', 0.43924944510700925)]
35 finished.
36 car [('b', 0.7983866829202952)]
36 finished.
37 chair [('b', 0.7206694081170895)]
37 finished.
38 coast [('a', 0.46105749450822486)]
38 finished.
39 False [('b', 0.6472335113428471)]
39 finished.
40 shoe [('a', 0.5809422374657629)]
40 finished.
41 desert [('a', 0.664918828298255)]
41 finished.
42 shoe [('b', 0.9437913713811099)]
42 finished.
43 False [('a', 0.6602424491593979)]
43 finished.
44 bathroom [('a', 0.7776354887530488)]
44 finished.
45 bathroom [('a', 1.114091393345177)]
45 finished.
46 ('coast', 'car') [('', 2.5262954320922404)]
46 finished.
47 desert [('b', 0.4903611811559472)]
47 finished.
48 bathroom [('a', 0.4585672573697934)]
48 finished.
49 desert [('b', 1.0858055603370644)]
49 finished.
50 False [('b', 0.7461999104429538)]
50 finished.
51 False [('b', 0.6215250092592441)]
51 finished.
52 coast [('b', 0.8745392020273925)]
52 finished.
53 False [('b', 0.6717496789699453)]
53 finished.
54 bathroom [('b', 0.8545117366940644)]
54 finished.
55 shoe [('b', 0.8124140608665584)]
55 finished.
56 False [('b', 0.6569067864826934)]
56 finished.
57 car [('a', 0.6998871299723533)]
57 finished.
58 False [('b', 0.6528881560368518)]
58 finished.
59 flower [('b', 1.2191889603788013)]
59 finished.
60 chair [('a', 1.2329333568320067)]
60 finished.
61 False [('b', 0.6721206339841501)]
61 finished.
62 shoe [('b', 0.7326264758657999)]
62 finished.
63 woods [('a', 0.7180979420940048)]
63 finished.
64 False [('a', 0.7712181136307663)]
64 finished.
65 False [('b', 0.838737204617928)]
65 finished.
66 woods [('a', 0.8452220261063985)]
66 finished.
67 woods [('a', 0.9287288383360419)]
67 finished.
68 coast [('b', 0.842296613283338)]
68 finished.
69 coast [('a', 0.7194524410745089)]
69 finished.
70 bathroom [('a', 0.5159166093128533)]
70 finished.
71 car [('b', 1.1115859074629952)]
71 finished.
72 car [('b', 1.2127654271099573)]
72 finished.
73 bathroom [('a', 0.4333338123002477)]
73 finished.
74 woods [('a', 0.47819561616211104)]
74 finished.
75 flower [('b', 0.8665863957161264)]
75 finished.
76 chair [('a', 0.585090482232772)]
76 finished.
77 woods [('a', 0.9331685687030813)]
77 finished.
78 False [('b', 0.9248580034056886)]
78 finished.
79 False [('b', 0.7366541969087166)]
79 finished.
80 False [('b', 0.6810693005547819)]
80 finished.
81 shoe [('a', 0.6482064984866156)]
81 finished.
82 desert [('b', 0.6948591499939312)]
82 finished.
83 ('woods', 'shoe') [('b', 1.7955817938211567)]
83 finished.
84 False [('a', 0.6937204793853198)]
84 finished.
85 coast [('a', 0.5522004083730963)]
85 finished.
86 chair [('b', 0.6286529171871962)]
86 finished.
87 False [('a', 0.6191626269715016)]
87 finished.
88 coast [('a', 0.7254654312447428)]
88 finished.
89 flower [('a', 0.8287108624132316)]
89 finished.
90 False [('b', 0.6648167790136768)]
90 finished.
91 woods [('a', 0.48405758512092234)]
91 finished.
92 desert [('b', 0.8112533968776461)]
92 finished.
93 flower [('b', 0.8341414678733372)]
93 finished.
94 shoe [('a', 0.7768683596486881)]
94 finished.
95 coast [('b', 0.6934835373683654)]
95 finished.
96 flower [('b', 0.4705020384930094)]
96 finished.
97 bathroom [('a', 0.5580755733600427)]
97 finished.
98 shoe [('b', 0.8924324297377098)]
98 finished.
99 car [('b', 0.7835540540104375)]
99 finished.
100 False [('b', 0.6590515809282351)]
100 finished.
101 car [('a', 0.4724855481497343)]
101 finished.
102 False [('b', 0.563056340882099)]
102 finished.
103 False [('a', 0.5299299114956284)]
103 finished.
104 chair [('a', 1.1166432119484853)]
104 finished.
105 bathroom [('a', 0.42641381512726184)]
105 finished.
106 flower [('a', 0.6222211730565732)]
106 finished.
107 chair [('a', 0.6751510285424729)]
107 finished.
108 car [('b', 0.5639258125161177)]
108 finished.
109 chair [('a', 0.5437432205404775)]
109 finished.
110 bathroom [('a', 0.49331591848613243)]
110 finished.
111 woods [('a', 1.5583233654446076)]
111 finished.
112 flower [('a', 0.6966693518139664)]
112 finished.
113 woods [('b', 1.337857909274021)]
113 finished.
114 flower [('a', 0.5836515286719077)]
114 finished.
115 coast [('b', 1.385307307637845)]
115 finished.
116 flower [('b', 0.670207503144411)]
116 finished.
117 woods [('a', 0.6457259384355893)]
117 finished.
118 car [('b', 0.7969031561086695)]
118 finished.
119 flower [('a', 0.4078807266730564)]
119 finished.
120 flower [('b', 0.8354769059243381)]
120 finished.
121 chair [('b', 0.8723562857227307)]
121 finished.
122 bathroom [('a', 0.6884623020266645)]
122 finished.
123 chair [('b', 0.573508181684133)]
123 finished.
124 desert [('a', 0.7517269935312925)]
124 finished.
125 flower [('a', 0.7619641789416391)]
125 finished.
126 bathroom [('a', 0.7040934372632819)]
126 finished.
127 chair [('b', 0.8324541357378621)]
127 finished.
128 flower [('a', 0.6671876654085054)]
128 finished.
129 woods [('b', 0.5426617327206031)]
129 finished.
130 shoe [('b', 1.2154236936738698)]
130 finished.
131 car [('b', 0.8965185065497963)]
131 finished.
132 False [('a', 0.7515313990692221)]
132 finished.
133 False [('b', 0.6667539559492752)]
133 finished.
134 woods [('b', 0.87848188199996)]
134 finished.
135 woods [('a', 0.7493349934914022)]
135 finished.
136 bathroom [('a', 0.6164518695400147)]
136 finished.
137 desert [('b', 0.8319148580533238)]
137 finished.
138 False [('b', 0.9302466748208644)]
138 finished.
139 bathroom [('a', 0.9685415485744215)]
139 finished.
140 flower [('b', 0.6410070387614724)]
140 finished.
141 woods [('a', 0.6779251269092583)]
141 finished.
142 car [('a', 0.49533256483573496)]
142 finished.
143 coast [('a', 0.6840416325890146)]
143 finished.
144 woods [('b', 1.257557145400142)]
144 finished.
145 desert [('', 2.5296302150336487)]
145 finished.
146 shoe [('a', 1.1271610328913084)]
146 finished.
147 chair [('a', 0.5528701801140414)]
147 finished.
148 False [('b', 0.6553332334344759)]
148 finished.
149 car [('b', 0.7868730017180496)]
149 finished.
150 flower [('a', 0.5338819753105781)]
150 finished.
151 woods [('a', 0.5411230758356851)]
151 finished.
152 bathroom [('b', 0.5961868756996864)]
152 finished.
153 chair [('b', 0.7739458792764253)]
153 finished.
154 False [('b', 1.0872119636951538)]
154 finished.
155 chair [('a', 0.4353648276583044)]
155 finished.
156 car [('a', 0.5328949124032079)]
156 finished.
157 car [('a', 0.5800064924458184)]
157 finished.
158 shoe [('b', 0.3869424421509393)]
158 finished.
159 coast [('a', 0.47046948829029134)]
159 finished.
160 shoe [('b', 0.3879500322129843)]
160 finished.
161 shoe [('a', 0.4181882909003889)]
161 finished.
162 coast [('a', 0.407205676520789)]
162 finished.
163 desert [('a', 0.5108455223091823)]
163 finished.
164 chair [('a', 0.6821883237437305)]
164 finished.
165 coast [('b', 1.07370744170953)]
165 finished.
166 desert [('a', 0.6020787556556115)]
166 finished.
167 coast [('b', 0.7831350068047414)]
167 finished.
168 car [('b', 0.8843981012967106)]
168 finished.
169 flower [('a', 0.5010986426564159)]
169 finished.
170 bathroom [('a', 0.4806750032330456)]
170 finished.
171 chair [('b', 0.5683109992999107)]
171 finished.
172 chair [('a', 0.5274206134276938)]
172 finished.
173 bathroom [('a', 0.6623714083709729)]
173 finished.
174 False [('a', 0.6424905655730981)]
174 finished.
175 False [('a', 0.7074035876187281)]
175 finished.
176 chair [('a', 0.6231683546341173)]
176 finished.
177 coast [('a', 2.0843232065058146)]
177 finished.
178 woods [('a', 0.5183449717414987)]
178 finished.
179 shoe [('a', 0.6815748550563967)]
179 finished.
180 car [('b', 0.44872917850034355)]
180 finished.
181 desert [('a', 0.7298101502090049)]
181 finished.
182 coast [('a', 0.5294404854730601)]
182 finished.
183 coast [('a', 0.6971954334703696)]
183 finished.
184 car [('a', 0.5496037232740036)]
184 finished.
185 woods [('a', 0.7880709078310701)]
185 finished.
186 False [('a', 0.7559010438644691)]
186 finished.
187 car [('a', 0.6214760373325134)]
187 finished.
188 bathroom [('a', 0.5798361170600401)]
188 finished.
189 flower [('b', 0.5907597891803107)]
189 finished.
190 flower [('a', 1.261108343203432)]
190 finished.
191 woods [('b', 0.41345033029642764)]
191 finished.
192 flower [('a', 1.4010067689760035)]
192 finished.
193 False [('a', 0.6573924003193952)]
193 finished.
194 False [('a', 0.6162882387907302)]
194 finished.
195 coast [('b', 0.7841945012444285)]
195 finished.
196 desert [('a', 0.6246932290007408)]
196 finished.
197 bathroom [('b', 0.8856229859561608)]
197 finished.
198 False [('a', 0.8337555573605187)]
198 finished.
199 woods [('a', 0.47196093271872996)]
199 finished.
200 False [('b', 0.6793936982219293)]
200 finished.
201 flower [('a', 0.40112201428860317)]
201 finished.
202 False [('a', 0.6073724156678963)]
202 finished.
203 coast [('a', 0.47929587166640886)]
203 finished.
204 woods [('a', 0.6150457594269483)]
204 finished.
205 False [('b', 0.5919966968874633)]
205 finished.
206 shoe [('b', 0.8072925357107579)]
206 finished.
207 woods [('a', 0.47416994783475275)]
207 finished.
208 desert [('b', 0.6698145547500189)]
208 finished.
209 False [('a', 0.6251817752881834)]
209 finished.
210 ('coast', 'flower') [('b', 1.5678382882465485)]
210 finished.
211 desert [('b', 0.7065259051230441)]
211 finished.
212 flower [('a', 0.47094190609868747)]
212 finished.
213 desert [('a', 0.39585562606794156)]
213 finished.
214 False [('b', 0.6410043995558681)]
214 finished.
215 False [('b', 0.6142539977365686)]
215 finished.
216 chair [('a', 0.7622885079895241)]
216 finished.
217 desert [('a', 0.3677539509642429)]
217 finished.
218 False [('a', 0.6044355662870657)]
218 finished.
219 False [('a', 0.6297590375932032)]
219 finished.
220 car [('a', 0.5528478934886607)]
220 finished.
221 bathroom [('a', 0.4189650970920411)]
221 finished.
222 bathroom [('a', 0.3811490925384078)]
222 finished.
223 coast [('b', 0.39897399414735446)]
223 finished.
224 False [('a', 0.5516969065870398)]
224 finished.
225 woods [('a', 0.7838886466358872)]
225 finished.
226 bathroom [('a', 0.5217859093984316)]
226 finished.
227 False [('a', 0.6562763095810169)]
227 finished.
228 coast [('a', 0.556119335494941)]
228 finished.
229 shoe [('a', 0.3938870719098304)]
229 finished.
230 desert [('a', 0.9941362715708237)]
230 finished.
231 desert [('a', 0.6874294928882136)]
231 finished.
232 False [('a', 0.6480663273430309)]
232 finished.
233 False [('a', 0.6597278040605943)]
233 finished.
234 False [('b', 0.6706983953922645)]
234 finished.
235 car [('b', 0.5697288392159408)]
235 finished.
236 car [('a', 0.5423725930811543)]
236 finished.
237 desert [('a', 0.6907894949058573)]
237 finished.
238 woods [('b', 0.6501284266787479)]
238 finished.
239 chair [('b', 0.659382361367534)]
239 finished.
240 woods [('b', 0.46565762992872806)]
240 finished.
241 desert [('a', 0.9241876451746975)]
241 finished.
242 coast [('b', 0.8265253069030223)]
242 finished.
243 shoe [('a', 0.7894435880059518)]
243 finished.
244 desert [('a', 0.830754780554571)]
244 finished.
245 flower [('a', 1.0421932737206134)]
245 finished.
246 car [('b', 0.622684207022985)]
246 finished.
247 shoe [('a', 0.7361442437313599)]
247 finished.
248 shoe [('b', 0.7497261824153156)]
248 finished.
249 ('coast', 'shoe') [('b', 1.2572762166225857)]
249 finished.
250 car [('b', 0.40709629610933007)]
250 finished.
251 desert [('a', 0.355757295130843)]
251 finished.
252 car [('a', 0.4091663130618599)]
252 finished.
253 bathroom [('a', 0.3853398578405631)]
253 finished.
254 ('car', 'woods') [('b', 0.9601770261840556)]
254 finished.
255 flower [('a', 0.5986527734971787)]
255 finished.
256 coast [('a', 0.3600389664047725)]
256 finished.
257 woods [('b', 0.6638121214023158)]
257 finished.
258 woods [('b', 0.6092905316738779)]
258 finished.
259 flower [('b', 0.5359470070968655)]
259 finished.
260 ('shoe', 'desert') [('a', 0.9151577497193557)]
260 finished.
261 car [('b', 0.6528649896763454)]
261 finished.
262 car [('a', 0.8045715147015926)]
262 finished.
263 chair [('a', 0.6184515076754451)]
263 finished.
264 chair [('a', 0.4860522380904513)]
264 finished.
265 bathroom [('b', 0.6215736879410088)]
265 finished.
266 woods [('b', 0.590448949405527)]
266 finished.
267 chair [('a', 0.4721755881103036)]
267 finished.
268 chair [('a', 0.685262118572382)]
268 finished.
269 False [('a', 0.5959499336825047)]
269 finished.
270 bathroom [('b', 0.9627520111480408)]
270 finished.
271 shoe [('a', 0.9151685997869663)]
271 finished.
272 False [('a', 0.5632311149440739)]
272 finished.
273 flower [('a', 0.34823819827875013)]
273 finished.
274 chair [('a', 0.7410467148195039)]
274 finished.
275 False [('b', 0.6508808935295747)]
275 finished.
276 chair [('a', 0.4708287135017599)]
276 finished.
277 False [('a', 0.5959772054739005)]
277 finished.
278 False [('a', 0.6016324366582921)]
278 finished.
279 coast [('b', 0.5193836457810903)]
279 finished.
280 False [('b', 0.6705186361641609)]
280 finished.
281 chair [('a', 0.7183973453111321)]
281 finished.
282 bathroom [('a', 1.2939007664544988)]
282 finished.
283 car [('a', 0.6943219250247239)]
283 finished.
284 flower [('a', 0.4901881665646215)]
284 finished.
285 desert [('a', 0.4609768521136175)]
285 finished.
286 flower [('a', 0.7201711847424122)]
286 finished.
287 False [('b', 0.637003363814074)]
287 finished.
288 desert [('a', 0.8073001600823773)]
288 finished.
289 chair [('a', 0.45979331500939224)]
289 finished.
290 shoe [('a', 1.1841658218763769)]
290 finished.
291 shoe [('a', 0.6541851789834254)]
291 finished.
292 chair [('a', 1.0134664001266174)]
292 finished.
293 False [('b', 0.8516549432174543)]
293 finished.
294 woods [('a', 0.6158656726443041)]
294 finished.
295 False [('b', 0.6867529765108884)]
295 finished.
296 flower [('b', 1.2006268406626077)]
296 finished.
297 desert [('a', 0.35719888789753895)]
297 finished.
298 flower [('a', 0.4490634778803724)]
298 finished.
299 chair [('b', 0.4204955431146118)]
299 finished.
300 bathroom [('a', 0.5117534090472873)]
300 finished.
301 woods [('b', 0.9847146007014089)]
301 finished.
302 coast [('b', 0.746308117873923)]
302 finished.
303 bathroom [('a', 0.736755073212862)]
303 finished.
304 shoe [('a', 0.5053747422743982)]
304 finished.
305 chair [('a', 0.3790779026057862)]
305 finished.
306 woods [('a', 0.3066246700627744)]
306 finished.
307 car [('b', 0.7232942447431014)]
307 finished.
308 coast [('b', 0.8474240033406204)]
308 finished.
309 car [('b', 0.8878258429253947)]
309 finished.
310 shoe [('b', 0.6738886085140621)]
310 finished.
311 False [('a', 0.6092823208118716)]
311 finished.
312 car [('a', 0.31698032648137087)]
312 finished.
313 shoe [('b', 0.32235316266269365)]
313 finished.
314 bathroom [('a', 0.5138281179206388)]
314 finished.
315 flower | |
<gh_stars>0
# -*- coding: utf-8 -*-
__version__ = 6.0
import sys, os, time, random, threading
from colorama import init, Fore, Back, Style
init()
warning = "["+Fore.RED+"!"+Fore.RESET+"]"
question = "["+Fore.YELLOW+"?"+Fore.RESET+"]"
information = "["+Fore.BLUE+"I"+Fore.RESET+"]"
wait = "["+Fore.MAGENTA+"*"+Fore.RESET+"]"
found = "["+Fore.GREEN+"+"+Fore.RESET+"]"
tiret = "["+Fore.CYAN+"-"+Fore.RESET+"]"
def checkVersion():
version = sys.version[:1]
if int(version) == 3:
pass
else:
sys.exit(warning+" Veuillez lancer la version 3 de python.")
checkVersion()
def clear():
if os.name == 'nt':
return os.system('cls')
else:
return os.system('clear')
def loadlib():
import requests, json
import datetime
# fonction
from core.bssidFinder import bssidFinder
from core.employee_lookup import employee_lookup
from core.google import google
from core.hashDecrypt import hashdecrypt
from core.ipFinder import ipFinder
from core.mailToIP import mailToIP
from core.profilerFunc import profilerFunc
from core.searchAdresse import searchAdresse
from core.searchTwitter import searchTwitter
from core.searchPersonne import searchPersonne
from core.searchInstagram import searchInstagram
from core.searchUserName import searchUserName
from core.searchNumber import searchNumber
from core.searchEmail import SearchEmail
from core.Profiler import Profiler
from core.facebookStalk import facebookStalk
global monip, monpays, codemonpays, pathDatabase
global bssidFinder, employee_lookup, google, hashdecrypt, ipFinder, mailToIP, profilerFunc
global searchPersonne, SearchEmail, searchInstagram, searchTwitter, searchNumber, searchAdresse, searchUserName, facebookStalk
global Profiler
monip = requests.get("https://api.ipify.org/").text
monpays = requests.get("http://ip-api.com/json/"+monip).text
value = json.loads(monpays)
monpays = value['country']
codemonpays = value['countryCode']
pathDatabase = os.path.abspath(__file__).split("\\")[:-1]
pathDatabase = "\\".join(pathDatabase)+"\\Watched"
if not os.path.exists(pathDatabase):
os.mkdir(pathDatabase)
def loadingHack(importlib):
chaine = "[*]"+' Start LittleBrother...'
charspec = "$*.X^%_/\\#~!?;"
while importlib.is_alive():
chainehack = ""
for c in chaine:
chainehack += c
r = random.choice(charspec)+random.choice(charspec)+random.choice(charspec)
if len(chainehack+r) <= len(chaine):
pass
else:
r = ""
sys.stdout.write('\r'+chainehack+r)
time.sleep(0.06)
def loadingUpper(importlib):
string = "Start littlebrother"
string = list(string)
nb = len(string)
while importlib.is_alive():
x = 0
while x < nb:
c = string[x]
c = c.upper()
string[x] = c
sys.stdout.write("\r[*] "+''.join(string) +'...')
time.sleep(0.1)
c = string[x]
c = c.lower()
string[x] = c
x += 1
def loadingTextPrint(importlib):
string = "Start littlebrother"
while importlib.is_alive():
space = " " * 100
sys.stdout.write("\r"+space)
x = 1
while x <= len(string):
times = "0."
times += str(random.choice(range(1, 3)))
sys.stdout.write("\rroot@littlebrother:~$ "+string[:x]+"|")
time.sleep(float(times))
x += 1
def thread_loading():
num = random.choice([1, 2, 3])
importlib = threading.Thread(target=loadlib)
importlib.start()
if num == 1:
load = threading.Thread(target=loadingHack(importlib))
elif num == 2:
load = threading.Thread(target=loadingUpper(importlib))
elif num == 3:
load = threading.Thread(target=loadingTextPrint(importlib))
load.start()
importlib.join()
load.join()
thread_loading()
def times():
times = time.strftime("%H:%M:%S")
times = str(times)
return(times)
from datetime import date
today_date = date.today()
header1 = """
_ _ _ _ _ ____ _ _
| | (_) | | | | | | _ \ | | | |
| | _| |_| |_| | ___| |_) |_ __ ___ | |_| |__ ___ _ __
| | | | __| __| |/ _ \ _ <| '__/ _ \| __| '_ \ / _ \ '__|
| |____| | |_| |_| | __/ |_) | | | (_) | |_| | | | __/ |
|______|_|\__|\__|_|\___|____/|_| \___/ \__|_| |_|\___|_|
"""
header2 = """
/$$ /$$ /$$ /$$ /$$ /$$$$$$$ /$$ /$$
| $$ |__/ | $$ | $$ | $$ | $$__ $$ | $$ | $$
| $$ /$$ /$$$$$$ /$$$$$$ | $$ /$$$$$$ | $$ \ $$ /$$$$$$ /$$$$$$ /$$$$$$ | $$$$$$$ /$$$$$$ /$$$$$$
| $$ | $$|_ $$_/|_ $$_/ | $$ /$$__ $$| $$$$$$$ /$$__ $$ /$$__ $$|_ $$_/ | $$__ $$ /$$__ $$ /$$__ $$
| $$ | $$ | $$ | $$ | $$| $$$$$$$$| $$__ $$| $$ \__/| $$ \ $$ | $$ | $$ \ $$| $$$$$$$$| $$ \__/
| $$ | $$ | $$ /$$| $$ /$$| $$| $$_____/| $$ \ $$| $$ | $$ | $$ | $$ /$$| $$ | $$| $$_____/| $$
| $$$$$$$$| $$ | $$$$/| $$$$/| $$| $$$$$$$| $$$$$$$/| $$ | $$$$$$/ | $$$$/| $$ | $$| $$$$$$$| $$
|________/|__/ \___/ \___/ |__/ \_______/|_______/ |__/ \______/ \___/ |__/ |__/ \_______/|__/
"""
header5 = """
___ __ ___________ ___________ ___ _______
|" | |" \\(" _ ")(" _ ")|" | /" "|
|| | || |)__/ \\__/ )__/ \\__/ || | (: ______)
|: | |: | \\_ / \\_ / |: | \\/ |
\\ |___ |. | |. | |. | \\ |___ // ___)_
( \\_|: \\ /\\ |\\ \\: | \\: | ( \\_|: \\(: "|
\\_______)(__\\_|_) \\__| \\__| \\_______)\\_______)
_______ _______ ______ ___________ __ __ _______ _______
| _ "\\ /" \\ / " \\(" _ ")/" | | "\\ /" "| /" \\
(. |_) :)|: | // ____ \\)__/ \\__/(: (__) :)(: ______)|: |
|: \\/ |_____/ )/ / ) :) \\_ / \\/ \\/ \\/ | |_____/ )
(| _ \\ // /(: (____/ // |. | // __ \\ // ___)_ // /
|: |_) :)|: __ \\ \\ / \\: | (: ( ) :)(: "||: __ \\
(_______/ |__| \\___) \"_____/ \\__| \\__| |__/ \\_______)|__| \\___)
"""
header6 = """
_ ____ ______ ______ _ ___ ____ ____ ___ ______ __ __ ___ ____
| T l j| T| T| T / _]| \\ | \\ / \\ | T| T T / _]| \\
| | | T | || || | / [_ | o )| D )Y Y| || l | / [_ | D )
| l___ | | l_j l_jl_j l_j| l___ Y _]| T| / | O |l_j l_j| _ |Y _]| /
| T | | | | | | | T| [_ | O || \\ | | | | | | || [_ | \\
| | j l | | | | | || T| || . Yl ! | | | | || T| . Y
l_____j|____j l__j l__j l_____jl_____jl_____jl__j\\_j \\___/ l__j l__j__jl_____jl__j\\_j
"""
header7 = """
_ _ _ _ _ ___ _ _
| | <_> _| |_ _| |_ | | ___ | . > _ _ ___ _| |_ | |_ ___ _ _
| |_ | | | | | | | |/ ._>| . \| '_>/ . \ | | | . |/ ._>| '_>
|___||_| |_| |_| |_|\___.|___/|_| \___/ |_| |_|_|\___.|_|
"""
header8 = """
_ ______
___/__) , /) (, / ) /)
(, / _/__/_ // _ /---( __ ____/_(/ _ __
/ _(_(__(__(/__(/_) / ____)/ (_(_) (__/ )__(/_/ (_
(_____ (_/ (
)
"""
header9 = """
__ _ _ _ _ ___ _ _
/ /(_) |_| |_| | ___ / __\_ __ ___ | |_| |__ ___ _ __
/ / | | __| __| |/ _ \/__\// '__/ _ \| __| '_ \ / _ \ '__|
/ /__| | |_| |_| | __/ \/ \ | | (_) | |_| | | | __/ |
\____/_|\__|\__|_|\___\_____/_| \___/ \__|_| |_|\___|_|
"""
header11 = """
| _) | | | __ ) | |
| | __| __| | _ \ __ \ __| _ \ __| __ \ _ \ __|
| | | | | __/ | | | ( | | | | | __/ |
_____| _| \__| \__| _| \___| ____/ _| \___/ \__| _| |_| \___| _|
"""
header12 = """
__ _ _ _ _ _____ _ _
| | |_| |_| |_| |___| __ |___ ___| |_| |_ ___ ___
| |__| | _| _| | -_| __ -| _| . | _| | -_| _|
|_____|_|_| |_| |_|___|_____|_| |___|_| |_|_|___|_|
\\\\
\\\\_ \\\\
(') \\\\_
LittleBrother -> / )=.---(') <- Privacy
o( )o( )_-\_
"""
header13 = """
__ __ ______ ______ __ ______
/\ \ /\ \ /\__ _\ /\__ _\ /\ \ /\ ___\
\ \ \____ \ \ \ \/_/\ \/ \/_/\ \/ \ \ \____ \ \ __\
\ \_____\ \ \_\ \ \_\ \ \_\ \ \_____\ \ \_____\
\/_____/ \/_/ \/_/ \/_/ \/_____/ \/_____/
______ ______ ______ ______ __ __ ______ ______
/\ == \ /\ == \ /\ __ \ /\__ _\ /\ \_\ \ /\ ___\ /\ == \
\ \ __< \ \ __< \ \ \/\ \ \/_/\ \/ \ \ __ \ \ \ __\ \ \ __<
\ \_____\ \ \_\ \_\ \ \_____\ \ \_\ \ \_\ \_\ \ \_____\ \ \_\ \_\
\/_____/ \/_/ /_/ \/_____/ \/_/ \/_/\/_/ \/_____/ \/_/ /_/
"""
header14 = """
__ _ __ __ __ ____ __ __
/ / (_) /_/ /_/ /__ / __ )_________ / /_/ /_ ___ _____
/ / / / __/ __/ / _ \/ __ / ___/ __ \/ __/ __ \/ _ \/ ___/
/ /___/ / /_/ /_/ / __/ /_/ / / / /_/ / /_/ / / / | |
import numpy as np
import subprocess
import fileinput
import argparse
import random
import shutil
import math
import cv2
import os
import sys
global counter
def createNumpyMatrix(geometricVertices):
"""Parse the strings from the obj file and convert them into numpy matrix of floats to perform math efficiently"""
vertices = []
for line in geometricVertices:
# convert the string to floats for x,y,z coordinates
elements = list(map(lambda x: float(x), line.split()[1:]))
vertices.append(elements)
# convert to 3 x numPoints matrix
vertices = np.asarray(vertices)
vertices = vertices.T
#print(vertices.shape)
return vertices
def getCenterOfMass(geometricVertices):
# com will be a 3x1 vector
com = np.average(geometricVertices, axis=1)
com = com.reshape(3,1)
return com
def centerAndScaleObject(geometricVertices, com, resize, meshIsAreaLight):
"""Translate the object vertices so that they are centered around the origin"""
geometricVertices = geometricVertices - com
stdev = np.std(geometricVertices, axis=1) / float(resize)
stdev = stdev.reshape(3,1)
if not meshIsAreaLight:
# do not scale the area light mesh object
geometricVertices = geometricVertices / stdev
return geometricVertices
def getRotationMatrix(angleX=0.0, angleY=0.0, angleZ=0.0):
if angleX == 0.0 and angleY == 0.0 and angleZ == 0.0:
angleX = round(random.uniform(0, 2*math.pi), 2)
angleY = round(random.uniform(0, 2*math.pi), 2)
angleZ = round(random.uniform(0, 2*math.pi), 2)
Rx = np.array([[1, 0, 0], [0, math.cos(angleX), -math.sin(angleX)], [0, math.sin(angleX), math.cos(angleX)]], dtype=np.float)
Ry = np.array([[math.cos(angleY), 0, math.sin(angleY)], [0, 1, 0], [-math.sin(angleY), 0, math.cos(angleY)]], dtype=np.float)
Rz = np.array([[math.cos(angleZ), -math.sin(angleZ), 0], [math.sin(angleZ), math.cos(angleZ), 0], [0, 0, 1]], dtype=np.float)
R = np.matmul(np.matmul(Rx, Ry), Rz)
#R = np.identity(3)
return R
def rotateObject(geometricVertices, rotationMatrix):
"""Perform matrix multiplication - Rx to get the vertex coordinates after rotation"""
rotatedGeometricVertices = np.matmul(rotationMatrix, geometricVertices)
return rotatedGeometricVertices
def getAxisAlignedBoundingBox(geometricVertices):
mins = np.amin(geometricVertices, axis=1)
maxs = np.amax(geometricVertices, axis=1)
# bbox will have 6 elements
# xLeft, xRight, yTop, yBottom, zNear, zFar
bbox = {'xmin':mins[0], 'xmax':maxs[0], 'ymin':mins[1], 'ymax':maxs[1], 'zmin':mins[2], 'zmax':maxs[2]}
#print("bounding box:", bbox)
return bbox
def positionObjectInTheBox(geometricVertices, bbox, com):
"""Calculate the bounds of the places where the object can be placed inside the box scene and place the object there"""
# assumption - the object can fit inside the box scene entirely
# the scaling to have 5 units standard deviation is just for that
# create the range tuple in which com of object can lie - (min, max)
xComRange = (-10.0 - bbox['xmin'], 10.0 - bbox['xmax'])
yComRange = (-10.0 - bbox['ymin'], 10.0 - bbox['ymax'])
zComRange = (20.0 - bbox['zmin'], 30.0 - bbox['zmax'])
# skip this object if it does not fit inside the box scene
if (xComRange[0] > xComRange[1]) or (yComRange[0] > yComRange[1]) or (zComRange[0] > zComRange[1]):
print("\n\nMisfit\n\n")
return geometricVertices, False
# generate the position - (x,y,z) for the com of the object within the above computed range
# assume uniform distribution
x = round(random.uniform(xComRange[0], xComRange[1]), 2)
y = round(random.uniform(yComRange[0], yComRange[1]), 2)
z = round(random.uniform(zComRange[0], zComRange[1]), 2)
# translate the object so that it is now located at the above randomly generated location
newCom = np.array([x,y,z]).reshape(3,1)
#newCom = np.array([0,0,25]).reshape(3,1)
geometricVertices = geometricVertices + newCom
return geometricVertices, True
def positionLightInTheBox(geometricVertices, bbox, com):
# create the range tuple in which com of object can lie - (min, max)
xComRange = (-10.0 - bbox['xmin'], 10.0 - bbox['xmax'])
yComRange = (-10.0 - bbox['ymin'], 10.0 - bbox['ymax'])
zComRange = (20.0 - bbox['zmin'], 30.0 - bbox['zmax'])
# skip this object if it does not fit inside the box scene
if (xComRange[0] > xComRange[1]) or (yComRange[0] > yComRange[1]) or (zComRange[0] > zComRange[1]):
return geometricVertices, False
# generate the position - (x,y,z) for the com of the object within the above computed range
# assume uniform distribution
x = round(random.uniform(xComRange[0], xComRange[1]), 2)
y = 9.5 # do not change y, the area light has to remain on the ceiling only
z = round(random.uniform(zComRange[0], zComRange[1]), 2)
# translate the object so that it is now located at the above randomly generated location
newCom = np.array([x,y,z]).reshape(3,1)
#newCom = np.array([0,-5,25]).reshape(3,1)
geometricVertices = geometricVertices + newCom
return geometricVertices
def npMatrixToStrings(geometricVertices, dataType):
stringList = []
if dataType == 'geometricVertices':
label = 'v '
else:
# we are modifying vertex normals
label = 'vn '
for vertex in geometricVertices.T:
line = label + str(vertex[0]) + " " + str(vertex[1]) + " " + str(vertex[2]) + "\n"
stringList.append(line)
return stringList
def removeTextureVertices(faces):
newFaces = []
for line in faces:
elements = line.split()[1:]
# elements = ['f', 'v/vt/vn', 'v/vt/vn', 'v/vt/vn']
# elements = ['f', '1231/14134/2341', '12/24/432', '342/345/67']
# we want following
# elements = ['f', '1231/14134/2341', '12/24/432', '342/345/67']
for index, face in enumerate(elements):
#startIndex = face.find('/')
#endIndex = face.rfind('/')+1
endIndex = face.rfind('/')
#toReplace = face[startIndex:endIndex]
#face = face.replace(toReplace, "//")
face = face[:endIndex]
elements[index] = face
newLine = 'f ' + elements[0] + " " + elements[1] + " " + elements[2] + "\n"
newFaces.append(newLine)
return newFaces
def removeVertexNormals(faces):
newFaces = []
for line in faces:
elements = line.split()[1:]
# elements = ['f', 'v/vt/vn', 'v/vt/vn', 'v/vt/vn']
# elements = ['f', '1231/14134/2341', '12/24/432', '342/345/67']
# we want following
# elements = ['f', '1231/14134', '12/24', '342/345']
for index, face in enumerate(elements):
endIndex = face.rfind('/')
face = face[:endIndex]
elements[index] = face
newLine = 'f ' + elements[0] + " " + elements[1] + " " + elements[2] + "\n"
newFaces.append(newLine)
return newFaces
def printFirstThreeVertices(geometricVertices):
print(len(geometricVertices))
for i in range(6):
print(geometricVertices.T[i])
#print(geometricVertices[i])
def renderImages(lightType):
if lightType == 'point':
subprocess.run(["nori.exe", "custom_simple.xml"])
subprocess.run(["nori.exe", "custom_light_point.xml"])
##subprocess.run(["nori.exe", "custom_depth_point.xml"])
subprocess.run(["nori.exe", "custom_noShadow_point.xml"])
else:
subprocess.run(["nori.exe", "custom_whitted.xml"])
subprocess.run(["nori.exe", "custom_light.xml"])
subprocess.run(["nori.exe", "custom_depth.xml"])
subprocess.run(["nori.exe", "custom_noShadow.xml"])
def alignImages(dstFolder, fileName):
global counter
# these weird names can be changed if nori.exe is updated :)
# it was helpful when there was only one xml file
noShadowImage = cv2.imread('custom_noShadow_point_noShadows.png', cv2.IMREAD_COLOR)
#depthMapImage = cv2.imread('custom_depth_point_depthMap.png', cv2.IMREAD_COLOR)
depthMapImage0 = cv2.imread('8viewDepthMap_0.png', cv2.IMREAD_COLOR)
depthMapImage1 = cv2.imread('8viewDepthMap_1.png', cv2.IMREAD_COLOR)
depthMapImage2 = cv2.imread('8viewDepthMap_2.png', cv2.IMREAD_COLOR)
depthMapImage3 = cv2.imread('8viewDepthMap_3.png', cv2.IMREAD_COLOR)
depthMapImage4 = cv2.imread('8viewDepthMap_4.png', cv2.IMREAD_COLOR)
depthMapImage5 = cv2.imread('8viewDepthMap_5.png', cv2.IMREAD_COLOR)
depthMapImage6 = cv2.imread('8viewDepthMap_6.png', cv2.IMREAD_COLOR)
depthMapImage7 = cv2.imread('8viewDepthMap_7.png', cv2.IMREAD_COLOR)
lightMapImage = cv2.imread('custom_light_point_lightDepth.png', cv2.IMREAD_COLOR)
groundTruthImage = cv2.imread('custom_simple_simple.png', cv2.IMREAD_COLOR)
if lightType == 'area':
noShadowImage = cv2.imread('custom_noShadow.png', cv2.IMREAD_COLOR)
depthMapImage = cv2.imread('custom_depth.png', cv2.IMREAD_COLOR)
lightMapImage = cv2.imread('custom_light.png', cv2.IMREAD_COLOR)
groundTruthImage = cv2.imread('custom_whitted.png', cv2.IMREAD_COLOR)
alignedImage = np.concatenate((noShadowImage, lightMapImage, depthMapImage0, depthMapImage1, depthMapImage2, depthMapImage3, depthMapImage4, depthMapImage5, depthMapImage6, depthMapImage7, groundTruthImage), axis=1)
cv2.imwrite(os.path.join(dstFolder, fileName + '_' + str(counter).zfill(4) + '.png'), alignedImage)
counter += 1
#cv2.imwrite(os.path.join(dstFolder, fileName + '.png'), groundTruthImage)
def randomChooseK(inList, k):
retList = []
for i in range(k):
index = random.choice(range(len(inList)))
retList.append(inList.pop(index))
return retList
def splitImages(dstFolder, valCount, testCount, alignedImages):
os.mkdir(os.path.join(dstFolder, 'train'))
os.mkdir(os.path.join(dstFolder, 'test'))
os.mkdir(os.path.join(dstFolder, 'val'))
# randomly choose images for validation set
valAlignedImages = randomChooseK(alignedImages, valCount)
# now randomly choose images for test set
testAlignedImages = randomChooseK(alignedImages, testCount)
# remaining images go in train set
trainAlignedImages = alignedImages
# move the images to their respective folders
for index, imagePath in enumerate(valAlignedImages):
shutil.move(imagePath, os.path.join(dstFolder, os.path.join('val', str(index) + '.png')))
for index, imagePath in enumerate(testAlignedImages):
shutil.move(imagePath, os.path.join(dstFolder, os.path.join('test', str(index) + '.png')))
for index, imagePath in enumerate(trainAlignedImages):
shutil.move(imagePath, os.path.join(dstFolder, os.path.join('train', str(index) + '.png')))
def randomizeObject(meshFile, resize, meshIsAreaLight=False):
fileName = meshFile
objFile = open(fileName, 'r')
# sort all the strings in their corresponding lists
textureVertices = []
geometricVertices = []
vertexNormals = []
faces = []
for line in objFile:
if line[:2] == 'vt':
# texture vertices
textureVertices.append(line)
elif line[:2] == 'vn':
# vertex normals
vertexNormals.append(line)
elif line[0] == 'v':
# geometricVertices
geometricVertices.append(line)
elif line[0] == 'f':
# faces
faces.append(line)
else:
continue
objFile.close()
# create numpy matrix from the vertices string
geometricVertices = createNumpyMatrix(geometricVertices)
# compute the center of mass of the geometric vertices matrix
com = getCenterOfMass(geometricVertices)
# arrange the vertices around the center of mass
# scale the object so that its vertices have 2 units standard deviation from the mean
geometricVertices = centerAndScaleObject(geometricVertices, com, resize, meshIsAreaLight)
if not meshIsAreaLight:
# ROTATION SHOULD HAPPEN AFTER CENTERING AND SCALING THE OBJECT AND BEFORE TRANSLATING IT
# TO ITS NEW POSITION, IT BECOMES EASIER THAT WAY
# create rotation matrix if needed
rotationMatrix = getRotationMatrix()
# rotate the object
geometricVertices = rotateObject(geometricVertices, rotationMatrix)
# CAUTION! MIGHT NEED TO CHANGE THE VERTEX NORMALS TOO
# it probably was causing problems, so also rotating the vertex normals now!
vertexNormals = createNumpyMatrix(vertexNormals)
vertexNormals = rotateObject(vertexNormals, rotationMatrix)
# get axis aligned bounding box of the object
bbox = getAxisAlignedBoundingBox(geometricVertices)
# bbox will have 6 elements
# xLeft, xRight, yTop, yBottom, | |
# -*- coding: utf-8 -*-
# (C) Copyright 2014 Voyager Search
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import collections
import shutil
import requests
import arcpy
from utils import status
from utils import task_utils
import warnings
from requests.packages.urllib3.exceptions import InsecureRequestWarning
warnings.simplefilter('ignore', InsecureRequestWarning)
# Get SSL trust setting.
verify_ssl = task_utils.get_ssl_mode()
status_writer = status.Writer()
result_count = 0
processed_count = 0.
files_to_package = list()
errors_reasons = {}
skipped_reasons = {}
layer_name = ""
existing_fields = []
new_fields = []
field_values = []
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format):
"""Clips input results."""
clipped = 0
errors = 0
skipped = 0
fds = None
global processed_count
global layer_name
global existing_fields
global new_fields
global field_values
for ds, out_name in input_items.items():
status_writer.send_status(ds)
try:
if not isinstance(out_name, list):
out_name = ''
# -----------------------------------------------
# If the item is a service layer, process and continue.
# -----------------------------------------------
if ds.startswith('http'):
try:
if out_coordinate_system == 0:
service_layer = task_utils.ServiceLayer(ds)
wkid = service_layer.wkid
out_sr = arcpy.SpatialReference(wkid)
arcpy.env.outputCoordinateSystem = out_sr
else:
out_sr = task_utils.get_spatial_reference(out_coordinate_system)
arcpy.env.outputCoordinateSystem = out_sr
if not out_sr.name == gcs_sr.name:
try:
geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
except (AttributeError, IndexError):
try:
clip_poly = gcs_clip_poly.projectAs(out_sr)
except AttributeError:
clip_poly = gcs_clip_poly
except ValueError:
clip_poly = gcs_clip_poly
else:
clip_poly = gcs_clip_poly
arcpy.env.overwriteOutput = True
service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope')
oid_groups = service_layer.object_ids
out_features = None
g = 0.
group_cnt = service_layer.object_ids_cnt
for group in oid_groups:
g += 1
group = [oid for oid in group if oid]
where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON))
feature_set = arcpy.FeatureSet()
if not out_name:
out_name = service_layer.service_layer_name
try:
feature_set.load(url)
except Exception:
continue
if not out_features:
out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name)
else:
clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features')
arcpy.Append_management(clip_features, out_features, 'NO_TEST')
try:
arcpy.Delete_management(clip_features)
except arcpy.ExecuteError:
pass
status_writer.send_percent(float(g) / group_cnt, '', 'clip_data')
processed_count += 1.
clipped += 1
status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data')
continue
except Exception as ex:
status_writer.send_state(status.STAT_WARNING, str(ex))
errors_reasons[ds] = ex.message
errors += 1
continue
# -----------------------------------------------
# Check if the path is a MXD data frame type.
# ------------------------------------------------
map_frame_name = task_utils.get_data_frame_name(ds)
if map_frame_name:
ds = ds.split('|')[0].strip()
# -------------------------------
# Is the input a geometry feature
# -------------------------------
if isinstance(out_name, list):
for row in out_name:
try:
arcpy.env.overwriteOutput = True
name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
if out_format == 'SHP':
name += '.shp'
# Clip the geometry.
geo_json = row['[geo]']
geom = arcpy.AsShape(geo_json)
row.pop('[geo]')
if not arcpy.Exists(name):
if arcpy.env.outputCoordinateSystem:
arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
else:
arcpy.env.outputCoordinateSystem = 4326
arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
new_fields = []
field_values = []
for field, value in row.iteritems():
valid_field = arcpy.ValidateFieldName(field, out_workspace)
new_fields.append(valid_field)
field_values.append(value)
try:
arcpy.AddField_management(layer_name, valid_field, 'TEXT')
except arcpy.ExecuteError:
arcpy.DeleteField_management(layer_name, valid_field)
arcpy.AddField_management(layer_name, valid_field, 'TEXT')
else:
if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
if arcpy.env.outputCoordinateSystem:
arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
else:
arcpy.env.outputCoordinateSystem = 4326
arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
new_fields = []
field_values = []
for field, value in row.items():
valid_field = arcpy.ValidateFieldName(field, out_workspace)
new_fields.append(valid_field)
field_values.append(value)
if not valid_field in existing_fields:
try:
arcpy.AddField_management(layer_name, valid_field, 'TEXT')
except arcpy.ExecuteError:
arcpy.DeleteField_management(layer_name, valid_field)
arcpy.AddField_management(layer_name, valid_field, 'TEXT')
clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry())
if clipped_geometry:
with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
icur.insertRow([clipped_geometry[0]] + field_values)
status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
processed_count += 1
clipped += 1
except KeyError:
processed_count += 1
skipped += 1
status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
skipped_reasons[ds] = 'Invalid input type'
except Exception as ex:
processed_count += 1
errors += 1
errors_reasons[ds] = ex.message
continue
continue
dsc = arcpy.Describe(ds)
try:
if dsc.spatialReference.name == 'Unknown':
status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
except AttributeError:
pass
# --------------------------------------------------------------------
# If no output coord. system, get output spatial reference from input.
# --------------------------------------------------------------------
if out_coordinate_system == 0:
try:
out_sr = dsc.spatialReference
arcpy.env.outputCoordinateSystem = out_sr
except AttributeError:
out_sr = task_utils.get_spatial_reference(4326)
arcpy.env.outputCoordinateSystem = out_sr
else:
out_sr = task_utils.get_spatial_reference(out_coordinate_system)
arcpy.env.outputCoordinateSystem = out_sr
# -------------------------------------------------
# If the item is not a file, project the clip area.
# -------------------------------------------------
if dsc.dataType not in ('File', 'TextFile'):
if not out_sr.name == gcs_sr.name:
try:
geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
except (AttributeError, IndexError):
try:
clip_poly = gcs_clip_poly.projectAs(out_sr)
except AttributeError:
clip_poly = gcs_clip_poly
except ValueError:
clip_poly = gcs_clip_poly
else:
clip_poly = gcs_clip_poly
extent = clip_poly.extent
# -----------------------------
# Check the data type and clip.
# -----------------------------
# Feature Class or ShapeFile
if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
if out_name == '':
name = arcpy.ValidateTableName(dsc.name, out_workspace)
name = task_utils.create_unique_name(name, out_workspace)
else:
name = arcpy.ValidateTableName(out_name, out_workspace)
name = task_utils.create_unique_name(name, out_workspace)
# Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
ws = os.path.dirname(ds)
if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
arcpy.Clip_analysis(ds, clip_poly, name)
else:
fds_name = os.path.basename(ws)
if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds)))
else:
arcpy.Clip_analysis(ds, clip_poly, name)
# Feature dataset
elif dsc.dataType == 'FeatureDataset':
if not out_format == 'SHP':
fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
arcpy.env.workspace = ds
for fc in arcpy.ListFeatureClasses():
try:
if not out_format == 'SHP':
arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0)))
else:
arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace))
except arcpy.ExecuteError:
pass
arcpy.env.workspace = out_workspace
# Raster dataset
elif dsc.dataType == 'RasterDataset':
if out_name == '':
name = task_utils.create_unique_name(dsc.name, out_workspace)
else:
name = task_utils.create_unique_name(out_name, out_workspace)
ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry")
# Layer file
elif dsc.dataType == 'Layer':
task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace)
# Cad drawing dataset
elif dsc.dataType == 'CadDrawingDataset':
arcpy.env.workspace = dsc.catalogPath
cad_wks_name = os.path.splitext(dsc.name)[0]
for cad_fc in arcpy.ListFeatureClasses():
name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
arcpy.Clip_analysis(cad_fc, clip_poly, name)
arcpy.env.workspace = out_workspace
# File
elif dsc.dataType in ('File', 'TextFile'):
if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
name = os.path.splitext(dsc.name)[0]
kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
for layer in arcpy.mapping.ListLayers(group_layer):
if layer.isFeatureLayer:
arcpy.Clip_analysis(layer,
gcs_clip_poly,
task_utils.create_unique_name(layer, out_workspace))
# Clean up temp KML results.
arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
arcpy.Delete_management(kml_layer[1])
del group_layer
else:
if out_name == '':
out_name = dsc.name
if out_workspace.endswith('.gdb'):
f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
else:
f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
processed_count += 1.
status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
status_writer.send_state(_('Copied file: {0}').format(dsc.name))
clipped += 1
if out_format in ('LPK', 'MPK'):
files_to_package.append(f.getOutput(0))
continue
# Map document
elif dsc.dataType == 'MapDocument':
task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name)
else:
processed_count += 1.
status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds))
skipped += 1
skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
continue
processed_count += 1.
status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
status_writer.send_status(_('Clipped: {0}').format(dsc.name))
clipped += 1
# Continue. Process as many as possible.
except Exception as ex:
processed_count += 1.
status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
errors_reasons[ds] = ex.message
errors += 1
pass
return clipped, errors, skipped
def execute(request):
"""Clips selected search results using the clip geometry.
:param request: json as a dict.
"""
clipped = 0
errors = 0
skipped = 0
global result_count
parameters = request['params']
# Retrieve clip geometry.
try:
clip_area = task_utils.get_parameter_value(parameters, 'clip_geometry', 'wkt')
if not clip_area:
clip_area = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
except KeyError:
clip_area = 'POLYGON ((-180 -90, -180 90, 180 90, 180 -90, -180 -90))'
# Retrieve the coordinate system code.
out_coordinate_system = int(task_utils.get_parameter_value(parameters, 'output_projection', 'code'))
# Retrieve the output format, create mxd parameter and output file name values.
out_format = task_utils.get_parameter_value(parameters, 'output_format', 'value')
create_mxd = task_utils.get_parameter_value(parameters, 'create_mxd', 'value')
output_file_name = task_utils.get_parameter_value(parameters, 'output_file_name', 'value')
if not output_file_name:
output_file_name = 'clip_results'
# Create the temporary workspace if clip_feature_class:
out_workspace = os.path.join(request['folder'], 'temp')
if not os.path.exists(out_workspace):
os.makedirs(out_workspace)
# Set the output coordinate system.
| |
array
beta : NDArray
beta array
moving_mean : NDArray
running mean of input
moving_var : NDArray
running variance of input
eps : double, optional, default=0.0010000000474974513
Epsilon to prevent div 0. Must be no less than CUDNN_BN_MIN_EPSILON
defined in cudnn.h when using cudnn (usually 1e-5)
momentum : float, optional, default=0.899999976
Momentum for moving average
fix_gamma : boolean, optional, default=1
Fix gamma while training
use_global_stats : boolean, optional, default=0
Whether use global moving statistics instead of local batch-norm.
This will force change batch-norm into a scale shift operator.
output_mean_var : boolean, optional, default=0
Output the mean and inverse std
axis : int, optional, default='1'
Specify which shape axis the channel is specified
cudnn_off : boolean, optional, default=0
Do not select CUDNN operator, if available
min_calib_range : float or None, optional, default=None
The minimum scalar value in the form of float32 obtained through calibration.
If present, it will be used to by quantized batch norm op to calculate primitive scale.
Note: this calib_range is to calib bn output.
max_calib_range : float or None, optional, default=None
The maximum scalar value in the form of float32 obtained through calibration.
If present, it will be used to by quantized batch norm op to calculate primitive scale.
Note: this calib_range is to calib bn output.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.batch_norm(x, gamma, beta, running_mean, running_var, eps=eps,
momentum=momentum, fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var, axis=axis, cudnn_off=cudnn_off,
min_calib_range=min_calib_range, max_calib_range=max_calib_range)
# pylint: disable=too-many-arguments, unused-argument
@set_module('mxnet.numpy_extension')
def fully_connected(x, weight, bias=None, num_hidden=None,
no_bias=True, flatten=True, **kwargs):
r"""Applies a linear transformation: :math:`Y = XW^T + b`.
If ``flatten`` is set to be true, then the shapes are:
- **data**: `(batch_size, x1, x2, ..., xn)`
- **weight**: `(num_hidden, x1 * x2 * ... * xn)`
- **bias**: `(num_hidden,)`
- **out**: `(batch_size, num_hidden)`
If ``flatten`` is set to be false, then the shapes are:
- **data**: `(x1, x2, ..., xn, input_dim)`
- **weight**: `(num_hidden, input_dim)`
- **bias**: `(num_hidden,)`
- **out**: `(x1, x2, ..., xn, num_hidden)`
The learnable parameters include both ``weight`` and ``bias``.
If ``no_bias`` is set to be true, then the ``bias`` term is ignored.
.. Note::
The sparse support for FullyConnected is limited to forward evaluation with `row_sparse`
weight and bias, where the length of `weight.indices` and `bias.indices` must be equal
to `num_hidden`. This could be useful for model inference with `row_sparse` weights
trained with importance sampling or noise contrastive estimation.
To compute linear transformation with 'csr' sparse data, sparse.dot is recommended instead
of sparse.FullyConnected.
Parameters
----------
data : NDArray
Input data.
weight : NDArray
Weight matrix.
bias : NDArray
Bias parameter.
num_hidden : int, required
Number of hidden nodes of the output.
no_bias : boolean, optional, default=0
Whether to disable bias parameter.
flatten : boolean, optional, default=1
Whether to collapse all but the first axis of the input data tensor.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return _mx_nd_npx.fully_connected(x, weight, bias, num_hidden=num_hidden,
no_bias=no_bias, flatten=flatten)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def pick(data, index, axis=-1, mode='clip', keepdims=False):
r"""Picks elements from an input array according to the input indices along the given axis.
Given an input array of shape ``(d0, d1)`` and indices of shape ``(i0,)``, the result will be
an output array of shape ``(i0,)`` with::
output[i] = input[i, indices[i]]
By default, if any index mentioned is too large, it is replaced by the index that addresses
the last element along an axis (the `clip` mode).
This function supports n-dimensional input and (n-1)-dimensional indices arrays.
Parameters
----------
data : NDArray
The input array
index : NDArray
The index array
axis : int or None, optional, default='-1'
int or None. The axis to picking the elements.
Negative values means indexing from right to left.
If is `None`, the elements in the index w.r.t the flattened input will be picked.
keepdims : boolean, optional, default=0
If true, the axis where we pick the elements is
left in the result as dimension with size one.
mode : {'clip', 'wrap'},optional, default='clip'
Specify how out-of-bound indices behave. Default is "clip".
"clip" means clip to the range. So, if all indices mentioned are too large,
they are replaced by the index that addresses the last element along an axis.
"wrap" means to wrap around.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
Example
-------
>>> x = np.array([[1., 2.],[3., 4.],[5., 6.]])
picks elements with specified indices along axis 0
>>> npx.pick(x, np.array([0, 1]), 0)
array([1., 4.])
picks elements with specified indices along axis 1
>>> npx.pick(x, np.array([0, 1, 0]), 1)
array([1., 4., 5.])
picks elements with specified indices along axis 1 using 'wrap' mode
to place indicies that would normally be out of bounds
>>> npx.pick(x, np.array([2, -1, -2]), 1, mode='wrap')
array([1., 4., 5.])
picks elements with specified indices along axis 1 and dims are maintained
>>> npx.pick(x, np.array([[1.], [0.], [2.]]), 1, keepdims=True)
array([[2.],
[3.],
[6.]])
"""
return _mx_nd_npx.pick(data, index, axis, mode, keepdims)
# pylint: disable=too-many-arguments
@set_module('mxnet.numpy_extension')
def convolution(data=None, weight=None, bias=None, kernel=None, stride=None, dilate=None,
pad=None, num_filter=1, num_group=1, workspace=1024, no_bias=False,
cudnn_tune=None, cudnn_off=False, layout=None):
r"""Compute *N*-D convolution on *(N+2)*-D input.
In the 2-D convolution, given input data with shape *(batch_size,
channel, height, width)*, the output is computed by
.. math::
out[n,i,:,:] = bias[i] + \sum_{j=0}^{channel} data[n,j,:,:] \star
weight[i,j,:,:]
where :math:`\star` is the 2-D cross-correlation operator.
For general 2-D convolution, the shapes are
- **data**: *(batch_size, channel, height, width)*
- **weight**: *(num_filter, channel, kernel[0], kernel[1])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_height, out_width)*.
Define::
f(x,k,p,s,d) = floor((x+2*p-d*(k-1)-1)/s)+1
then we have::
out_height=f(height, kernel[0], pad[0], stride[0], dilate[0])
out_width=f(width, kernel[1], pad[1], stride[1], dilate[1])
If ``no_bias`` is set to be true, then the ``bias`` term is ignored.
The default data ``layout`` is *NCHW*, namely *(batch_size, channel, height,
width)*. We can choose other layouts such as *NWC*.
If ``num_group`` is larger than 1, denoted by *g*, then split the input ``data``
evenly into *g* parts along the channel axis, and also evenly split ``weight``
along the first dimension. Next compute the convolution on the *i*-th part of
the data with the *i*-th weight part. The output is obtained by concatenating all
the *g* results.
1-D convolution does not have *height* dimension but only *width* in space.
- **data**: *(batch_size, channel, width)*
- **weight**: *(num_filter, channel, kernel[0])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_width)*.
3-D convolution adds an additional *depth* dimension besides *height* and
*width*. The shapes are
- **data**: *(batch_size, channel, depth, height, width)*
- **weight**: *(num_filter, channel, kernel[0], kernel[1], kernel[2])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_depth, out_height, out_width)*.
Both ``weight`` and ``bias`` are learnable parameters.
There are other options to tune the performance.
- **cudnn_tune**: enable this option leads to higher startup time but may give
faster speed. Options are
- **off**: no tuning
- **limited_workspace**:run test and pick the fastest algorithm that doesn't
exceed workspace limit.
- **fastest**: pick the fastest algorithm and ignore workspace limit.
- **None** (default): the behavior is determined by environment variable
``MXNET_CUDNN_AUTOTUNE_DEFAULT``. 0 for off, 1 for limited workspace
(default), 2 for fastest.
- **workspace**: A large number leads to more (GPU) memory usage but may improve
the performance.
Parameters
----------
data : NDArray
Input data to the ConvolutionOp.
weight : NDArray
Weight matrix.
bias : NDArray
Bias parameter.
kernel : Shape(tuple), required
Convolution kernel size: (w,), (h, w) or (d, h, w)
stride : Shape(tuple), optional, default=[]
Convolution stride: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.
dilate : Shape(tuple), optional, default=[]
Convolution dilate: (w,), (h, w) or (d, h, w). Defaults to 1 for each dimension.
pad : Shape(tuple), optional, default=[]
Zero pad for convolution: (w,), (h, w) or (d, h, w). Defaults to no padding.
num_filter : int (non-negative), required
Convolution filter(channel) number
num_group : int (non-negative), | |
'861571686':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861571687':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861571688':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861571689':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861571690':{'en': 'Handan, Hebei', 'zh': u('\u6cb3\u5317\u7701\u90af\u90f8\u5e02')},
'861571691':{'en': 'Shijiazhuang, Hebei', 'zh': u('\u6cb3\u5317\u7701\u77f3\u5bb6\u5e84\u5e02')},
'861571692':{'en': 'Baoding, Hebei', 'zh': u('\u6cb3\u5317\u7701\u4fdd\u5b9a\u5e02')},
'861571693':{'en': 'Zhangjiakou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u5f20\u5bb6\u53e3\u5e02')},
'861571694':{'en': 'Chengde, Hebei', 'zh': u('\u6cb3\u5317\u7701\u627f\u5fb7\u5e02')},
'861571695':{'en': 'Baoding, Hebei', 'zh': u('\u6cb3\u5317\u7701\u4fdd\u5b9a\u5e02')},
'861571696':{'en': 'Langfang, Hebei', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571697':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861571698':{'en': 'Hengshui, Hebei', 'zh': u('\u6cb3\u5317\u7701\u8861\u6c34\u5e02')},
'861571699':{'en': 'Xingtai, Hebei', 'zh': u('\u6cb3\u5317\u7701\u90a2\u53f0\u5e02')},
'861571700':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u5357\u660c\u5e02')},
'861571701':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u9e70\u6f6d\u5e02')},
'861571702':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u4e5d\u6c5f\u5e02')},
'861571703':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u4e0a\u9976\u5e02')},
'861571704':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u629a\u5dde\u5e02')},
'861571705':{'en': 'Yichun, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5b9c\u6625\u5e02')},
'861571706':{'en': 'JiAn, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5409\u5b89\u5e02')},
'861571707':{'en': 'Ganzhou, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u8d63\u5dde\u5e02')},
'861571708':{'en': 'Ganzhou, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u8d63\u5dde\u5e02')},
'861571709':{'en': 'Nanchang, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5357\u660c\u5e02')},
'86157171':{'en': 'Wuhan, Hubei', 'zh': u('\u6e56\u5317\u7701\u6b66\u6c49\u5e02')},
'861571720':{'en': 'Yichang, Hubei', 'zh': u('\u6e56\u5317\u7701\u5b9c\u660c\u5e02')},
'861571721':{'en': 'Jingzhou, Hubei', 'zh': u('\u6e56\u5317\u7701\u8346\u5dde\u5e02')},
'861571722':{'en': 'Wuhan, Hubei', 'zh': u('\u6e56\u5317\u7701\u6b66\u6c49\u5e02')},
'861571723':{'en': 'Huangshi, Hubei', 'zh': u('\u6e56\u5317\u7701\u9ec4\u77f3\u5e02')},
'861571724':{'en': 'Xianning, Hubei', 'zh': u('\u6e56\u5317\u7701\u54b8\u5b81\u5e02')},
'861571725':{'en': 'Huanggang, Hubei', 'zh': u('\u6e56\u5317\u7701\u9ec4\u5188\u5e02')},
'861571726':{'en': 'Enshi, Hubei', 'zh': u('\u6e56\u5317\u7701\u6069\u65bd\u571f\u5bb6\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571727':{'en': 'Xiangfan, Hubei', 'zh': u('\u6e56\u5317\u7701\u8944\u6a0a\u5e02')},
'861571728':{'en': 'Shiyan, Hubei', 'zh': u('\u6e56\u5317\u7701\u5341\u5830\u5e02')},
'861571729':{'en': 'Xiaogan, Hubei', 'zh': u('\u6e56\u5317\u7701\u5b5d\u611f\u5e02')},
'861571730':{'en': 'Yueyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u5cb3\u9633\u5e02')},
'861571731':{'en': 'Changsha, Hunan', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571732':{'en': 'Xiangtan, Hunan', 'zh': u('\u6e56\u5357\u7701\u6e58\u6f6d\u5e02')},
'861571733':{'en': 'Zhuzhou, Hunan', 'zh': u('\u6e56\u5357\u7701\u682a\u6d32\u5e02')},
'861571734':{'en': 'Hengyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u8861\u9633\u5e02')},
'861571735':{'en': 'Chenzhou, Hunan', 'zh': u('\u6e56\u5357\u7701\u90f4\u5dde\u5e02')},
'861571736':{'en': 'Changde, Hunan', 'zh': u('\u6e56\u5357\u7701\u5e38\u5fb7\u5e02')},
'861571737':{'en': 'Yiyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u76ca\u9633\u5e02')},
'861571738':{'en': 'Loudi, Hunan', 'zh': u('\u6e56\u5357\u7701\u5a04\u5e95\u5e02')},
'861571739':{'en': 'Shaoyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u90b5\u9633\u5e02')},
'861571740':{'en': 'Yueyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u5cb3\u9633\u5e02')},
'861571741':{'en': 'Changsha, Hunan', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571742':{'en': 'Xiangtan, Hunan', 'zh': u('\u6e56\u5357\u7701\u6e58\u6f6d\u5e02')},
'861571743':{'en': 'Xiangxi, Hunan', 'zh': u('\u6e56\u5357\u7701\u6e58\u897f\u571f\u5bb6\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571744':{'en': 'Zhangjiajie, Hunan', 'zh': u('\u6e56\u5357\u7701\u5f20\u5bb6\u754c\u5e02')},
'861571745':{'en': 'Huaihua, Hunan', 'zh': u('\u6e56\u5357\u7701\u6000\u5316\u5e02')},
'861571746':{'en': 'Yongzhou, Hunan', 'zh': u('\u6e56\u5357\u7701\u6c38\u5dde\u5e02')},
'861571747':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u6c38\u5dde\u5e02')},
'861571748':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571749':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571750':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571751':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u957f\u6c99\u5e02')},
'861571752':{'en': '<NAME>', 'zh': u('\u6e56\u5357\u7701\u5a04\u5e95\u5e02')},
'861571753':{'en': 'Zhuzhou, Hunan', 'zh': u('\u6e56\u5357\u7701\u682a\u6d32\u5e02')},
'861571754':{'en': 'Huaihua, Hunan', 'zh': u('\u6e56\u5357\u7701\u6000\u5316\u5e02')},
'861571755':{'en': 'Chenzhou, Hunan', 'zh': u('\u6e56\u5357\u7701\u90f4\u5dde\u5e02')},
'861571756':{'en': 'Changde, Hunan', 'zh': u('\u6e56\u5357\u7701\u5e38\u5fb7\u5e02')},
'861571757':{'en': 'Yiyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u76ca\u9633\u5e02')},
'861571758':{'en': 'Hengyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u8861\u9633\u5e02')},
'861571759':{'en': 'Shaoyang, Hunan', 'zh': u('\u6e56\u5357\u7701\u90b5\u9633\u5e02')},
'861571760':{'en': 'Shijiazhuang, Hebei', 'zh': u('\u6cb3\u5317\u7701\u77f3\u5bb6\u5e84\u5e02')},
'861571761':{'en': 'Zhangjiakou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u5f20\u5bb6\u53e3\u5e02')},
'861571762':{'en': 'Langfang, Hebei', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571763':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571764':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571765':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571766':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u5eca\u574a\u5e02')},
'861571767':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u90a2\u53f0\u5e02')},
'861571768':{'en': '<NAME>', 'zh': u('\u6cb3\u5317\u7701\u90a2\u53f0\u5e02')},
'861571769':{'en': 'Xingtai, Hebei', 'zh': u('\u6cb3\u5317\u7701\u90a2\u53f0\u5e02')},
'861571770':{'en': 'Fangchenggang, Guangxi', 'zh': u('\u5e7f\u897f\u9632\u57ce\u6e2f\u5e02')},
'861571771':{'en': 'Nanning, Guangxi', 'zh': u('\u5e7f\u897f\u5357\u5b81\u5e02')},
'861571772':{'en': 'Liuzhou, Guangxi', 'zh': u('\u5e7f\u897f\u67f3\u5dde\u5e02')},
'861571773':{'en': 'Guilin, Guangxi', 'zh': u('\u5e7f\u897f\u6842\u6797\u5e02')},
'861571774':{'en': 'Wuzhou, Guangxi', 'zh': u('\u5e7f\u897f\u68a7\u5dde\u5e02')},
'861571775':{'en': 'Yulin, Guangxi', 'zh': u('\u5e7f\u897f\u7389\u6797\u5e02')},
'861571776':{'en': 'Baise, Guangxi', 'zh': u('\u5e7f\u897f\u767e\u8272\u5e02')},
'861571777':{'en': 'Qinzhou, Guangxi', 'zh': u('\u5e7f\u897f\u94a6\u5dde\u5e02')},
'861571778':{'en': 'Hechi, Guangxi', 'zh': u('\u5e7f\u897f\u6cb3\u6c60\u5e02')},
'861571779':{'en': 'Beihai, Guangxi', 'zh': u('\u5e7f\u897f\u5317\u6d77\u5e02')},
'861571780':{'en': 'Ezhou, Hubei', 'zh': u('\u6e56\u5317\u7701\u9102\u5dde\u5e02')},
'861571781':{'en': 'Suizhou, Hubei', 'zh': u('\u6e56\u5317\u7701\u968f\u5dde\u5e02')},
'861571782':{'en': 'Jingmen, Hubei', 'zh': u('\u6e56\u5317\u7701\u8346\u95e8\u5e02')},
'861571783':{'en': 'Jingzhou, Hubei', 'zh': u('\u6e56\u5317\u7701\u8346\u5dde\u5e02')},
'861571784':{'en': 'Jingzhou, Hubei', 'zh': u('\u6e56\u5317\u7701\u8346\u5dde\u5e02')},
'861571785':{'en': 'Xi<NAME>', 'zh': u('\u6e56\u5317\u7701\u8944\u6a0a\u5e02')},
'861571786':{'en': 'Xiangfan, Hubei', 'zh': u('\u6e56\u5317\u7701\u8944\u6a0a\u5e02')},
'861571787':{'en': 'Huanggang, Hubei', 'zh': u('\u6e56\u5317\u7701\u9ec4\u5188\u5e02')},
'861571788':{'en': '<NAME>', 'zh': u('\u6e56\u5317\u7701\u5b9c\u660c\u5e02')},
'861571789':{'en': '<NAME>', 'zh': u('\u6e56\u5317\u7701\u5b9c\u660c\u5e02')},
'861571790':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u65b0\u4f59\u5e02')},
'861571791':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u5357\u660c\u5e02')},
'861571792':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u4e5d\u6c5f\u5e02')},
'861571793':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u4e0a\u9976\u5e02')},
'861571794':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u629a\u5dde\u5e02')},
'861571795':{'en': 'Yichun, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5b9c\u6625\u5e02')},
'861571796':{'en': 'JiAn, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u5409\u5b89\u5e02')},
'861571797':{'en': 'Ganzhou, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u8d63\u5dde\u5e02')},
'861571798':{'en': 'Jingdezhen, Jiangxi', 'zh': u('\u6c5f\u897f\u7701\u666f\u5fb7\u9547\u5e02')},
'861571799':{'en': '<NAME>', 'zh': u('\u6c5f\u897f\u7701\u840d\u4e61\u5e02')},
'86157180':{'en': '<NAME>', 'zh': u('\u56db\u5ddd\u7701\u6210\u90fd\u5e02')},
'861571810':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571811':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571812':{'en': 'Shenzhen, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571813':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'861571814':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571815':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861571816':{'en': 'Shantou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5934\u5e02')},
'861571817':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861571818':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861571819':{'en': 'Huizhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u60e0\u5dde\u5e02')},
'861571820':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861571821':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6c5f\u95e8\u5e02')},
'861571822':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571823':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u97f6\u5173\u5e02')},
'861571824':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571825':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6cb3\u6e90\u5e02')},
'861571826':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6885\u5dde\u5e02')},
'861571827':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6c55\u5c3e\u5e02')},
'861571828':{'en': 'Huizhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u60e0\u5dde\u5e02')},
'861571829':{'en': 'Yangjiang, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u9633\u6c5f\u5e02')},
'861571830':{'en': 'Zhanjiang, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6e5b\u6c5f\u5e02')},
'861571831':{'en': 'Maoming, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u8302\u540d\u5e02')},
'861571832':{'en': 'Zhaoqing, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u8087\u5e86\u5e02')},
'861571833':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'861571834':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'861571835':{'en': '<NAME>', 'zh': u('\u5e7f\u4e1c\u7701\u6e05\u8fdc\u5e02')},
'861571836':{'en': 'Chaozhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6f6e\u5dde\u5e02')},
'861571837':{'en': 'Jieyang, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u63ed\u9633\u5e02')},
'861571838':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571839':{'en': 'Yunfu, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e91\u6d6e\u5e02')},
'861571840':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861571841':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571842':{'en': 'Shenzhen, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571843':{'en': 'Dongguan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e1c\u839e\u5e02')},
'861571844':{'en': 'Guangzhou, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u5e7f\u5dde\u5e02')},
'861571845':{'en': 'Foshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4f5b\u5c71\u5e02')},
'861571846':{'en': 'Zhuhai, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u73e0\u6d77\u5e02')},
'861571847':{'en': 'Shenzhen, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571848':{'en': 'Shenzhen, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u6df1\u5733\u5e02')},
'861571849':{'en': 'Zhongshan, Guangdong', 'zh': u('\u5e7f\u4e1c\u7701\u4e2d\u5c71\u5e02')},
'861571850':{'en': 'Guiyang, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u8d35\u9633\u5e02')},
'861571851':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u8d35\u9633\u5e02')},
'861571852':{'en': 'Zunyi, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9075\u4e49\u5e02')},
'861571853':{'en': 'Anshun, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u5b89\u987a\u5e02')},
'861571854':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u5357\u5e03\u4f9d\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571855':{'en': 'Qiandongnan, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u4e1c\u5357\u82d7\u65cf\u4f97\u65cf\u81ea\u6cbb\u5dde')},
'861571856':{'en': 'Tongren, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u94dc\u4ec1\u5730\u533a')},
'861571857':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861571858':{'en': 'Liupanshui, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u516d\u76d8\u6c34\u5e02')},
'861571859':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u897f\u5357\u5e03\u4f9d\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571860':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u8d35\u9633\u5e02')},
'861571861':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u8d35\u9633\u5e02')},
'861571862':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u9075\u4e49\u5e02')},
'861571863':{'en': '<NAME>', 'zh': u('\u8d35\u5dde\u7701\u5b89\u987a\u5e02')},
'861571864':{'en': 'Qiannan, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u5357\u5e03\u4f9d\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571865':{'en': 'Qiandongnan, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u4e1c\u5357\u82d7\u65cf\u4f97\u65cf\u81ea\u6cbb\u5dde')},
'861571866':{'en': 'Tongren, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u94dc\u4ec1\u5730\u533a')},
'861571867':{'en': 'Bijie, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u6bd5\u8282\u5730\u533a')},
'861571868':{'en': 'Liupanshui, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u516d\u76d8\u6c34\u5e02')},
'861571869':{'en': 'Qianxinan, Guizhou', 'zh': u('\u8d35\u5dde\u7701\u9ed4\u897f\u5357\u5e03\u4f9d\u65cf\u82d7\u65cf\u81ea\u6cbb\u5dde')},
'861571870':{'en': 'Xishuangbanna, Yunnan', 'zh': u('\u4e91\u5357\u7701\u897f\u53cc\u7248\u7eb3\u50a3\u65cf\u81ea\u6cbb\u5dde')},
'861571871':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u6606\u660e\u5e02')},
'861571872':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u5927\u7406\u767d\u65cf\u81ea\u6cbb\u5dde')},
'861571873':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u7ea2\u6cb3\u54c8\u5c3c\u65cf\u5f5d\u65cf\u81ea\u6cbb\u5dde')},
'861571874':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u66f2\u9756\u5e02')},
'861571875':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u4fdd\u5c71\u5e02')},
'861571876':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u6606\u660e\u5e02')},
'861571877':{'en': '<NAME>', 'zh': u('\u4e91\u5357\u7701\u7389\u6eaa\u5e02')},
'861571878':{'en': 'Lijiang, Yunnan', 'zh': u('\u4e91\u5357\u7701\u4e3d\u6c5f\u5e02')},
'861571879':{'en': 'Kunming, Yunnan', 'zh': u('\u4e91\u5357\u7701\u6606\u660e\u5e02')},
'86157188':{'en': 'Beijing', 'zh': u('\u5317\u4eac\u5e02')},
'86157189':{'en': 'Haikou, Hainan', 'zh': u('\u6d77\u5357\u7701\u6d77\u53e3\u5e02')},
'861571900':{'en': 'Aksu, Xinjiang', 'zh': u('\u65b0\u7586\u963f\u514b\u82cf\u5730\u533a')},
'861571901':{'en': 'Altay, Xinjiang', 'zh': u('\u65b0\u7586\u963f\u52d2\u6cf0\u5730\u533a')},
'861571902':{'en': 'Kizilsu, Xinjiang', 'zh': u('\u65b0\u7586\u514b\u5b5c\u52d2\u82cf\u67ef\u5c14\u514b\u5b5c\u81ea\u6cbb\u5dde')},
'861571903':{'en': 'Bortala, Xinjiang', 'zh': u('\u65b0\u7586\u535a\u5c14\u5854\u62c9\u8499\u53e4\u81ea\u6cbb\u5dde')},
'861571904':{'en': 'Hami, Xinjiang', 'zh': u('\u65b0\u7586\u54c8\u5bc6\u5730\u533a')},
'861571905':{'en': 'Hotan, Xinjiang', 'zh': u('\u65b0\u7586\u548c\u7530\u5730\u533a')},
'861571906':{'en': 'Kashi, Xinjiang', 'zh': u('\u65b0\u7586\u5580\u4ec0\u5730\u533a')},
'861571907':{'en': 'Karamay, Xinjiang', 'zh': u('\u65b0\u7586\u514b\u62c9\u739b\u4f9d\u5e02')},
'861571908':{'en': 'Bayingolin, Xinjiang', 'zh': u('\u65b0\u7586\u5df4\u97f3\u90ed\u695e\u8499\u53e4\u81ea\u6cbb\u5dde')},
'861571909':{'en': 'Ili, Xinjiang', 'zh': u('\u65b0\u7586\u4f0a\u7281\u54c8\u8428\u514b\u81ea\u6cbb\u5dde')},
'861571910':{'en': 'XiAn, Shaanxi', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571911':{'en': 'YanAn, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5ef6\u5b89\u5e02')},
'861571912':{'en': 'Yulin, Shaanxi', 'zh': u('\u9655\u897f\u7701\u6986\u6797\u5e02')},
'861571913':{'en': 'Weinan, Shaanxi', 'zh': u('\u9655\u897f\u7701\u6e2d\u5357\u5e02')},
'861571914':{'en': 'Shangluo, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5546\u6d1b\u5e02')},
'861571915':{'en': 'Ankang, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5b89\u5eb7\u5e02')},
'861571916':{'en': 'Hanzhong, Shaanxi', 'zh': u('\u9655\u897f\u7701\u6c49\u4e2d\u5e02')},
'861571917':{'en': 'Baoji, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5b9d\u9e21\u5e02')},
'861571918':{'en': 'XiAn, Shaanxi', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571919':{'en': '<NAME>', 'zh': u('\u9655\u897f\u7701\u94dc\u5ddd\u5e02')},
'861571920':{'en': '<NAME>', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571921':{'en': '<NAME>', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571922':{'en': '<NAME>', 'zh': u('\u9655\u897f\u7701\u6986\u6797\u5e02')},
'861571923':{'en': '<NAME>', 'zh': u('\u9655\u897f\u7701\u6e2d\u5357\u5e02')},
'861571924':{'en': 'Shangluo, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5546\u6d1b\u5e02')},
'861571925':{'en': 'Weinan, Shaanxi', 'zh': u('\u9655\u897f\u7701\u6e2d\u5357\u5e02')},
'861571926':{'en': 'Hanzhong, Shaanxi', 'zh': u('\u9655\u897f\u7701\u6c49\u4e2d\u5e02')},
'861571927':{'en': 'Baoji, Shaanxi', 'zh': u('\u9655\u897f\u7701\u5b9d\u9e21\u5e02')},
'861571928':{'en': 'XiAn, Shaanxi', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571929':{'en': 'XiAn, Shaanxi', 'zh': u('\u9655\u897f\u7701\u897f\u5b89\u5e02')},
'861571930':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u767d\u94f6\u5e02')},
'861571931':{'en': 'Lan<NAME>', 'zh': u('\u7518\u8083\u7701\u5170\u5dde\u5e02')},
'861571932':{'en': 'Lan<NAME>', 'zh': u('\u7518\u8083\u7701\u5170\u5dde\u5e02')},
'861571933':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5170\u5dde\u5e02')},
'861571934':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5170\u5dde\u5e02')},
'861571935':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u91d1\u660c\u5e02')},
'861571936':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u91d1\u660c\u5e02')},
'861571937':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u9152\u6cc9\u5e02')},
'861571938':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5929\u6c34\u5e02')},
'861571939':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u9152\u6cc9\u5e02')},
'86157194':{'en': '<NAME>', 'zh': u('\u56db\u5ddd\u7701\u6210\u90fd\u5e02')},
'861571950':{'en': 'Yinchuan, Ningxia', 'zh': u('\u5b81\u590f\u94f6\u5ddd\u5e02')},
'861571951':{'en': 'Yinchuan, Ningxia', 'zh': u('\u5b81\u590f\u94f6\u5ddd\u5e02')},
'861571952':{'en': 'Shizuishan, Ningxia', 'zh': u('\u5b81\u590f\u77f3\u5634\u5c71\u5e02')},
'861571953':{'en': 'Wuzhong, Ningxia', 'zh': u('\u5b81\u590f\u5434\u5fe0\u5e02')},
'861571954':{'en': 'Guyuan, Ningxia', 'zh': u('\u5b81\u590f\u56fa\u539f\u5e02')},
'861571955':{'en': 'Zhongwei, Ningxia', 'zh': u('\u5b81\u590f\u4e2d\u536b\u5e02')},
'861571956':{'en': '<NAME>', 'zh': u('\u5b81\u590f\u4e2d\u536b\u5e02')},
'861571957':{'en': 'Yinchuan, Ningxia', 'zh': u('\u5b81\u590f\u94f6\u5ddd\u5e02')},
'861571958':{'en': 'Yinchuan, Ningxia', 'zh': u('\u5b81\u590f\u94f6\u5ddd\u5e02')},
'861571959':{'en': '<NAME>', 'zh': u('\u5b81\u590f\u94f6\u5ddd\u5e02')},
'861571960':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u4e34\u590f\u56de\u65cf\u81ea\u6cbb\u5dde')},
'861571961':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5170\u5dde\u5e02')},
'861571962':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5b9a\u897f\u5e02')},
'861571963':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5e73\u51c9\u5e02')},
'861571964':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5e86\u9633\u5e02')},
'861571965':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5e86\u9633\u5e02')},
'861571966':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5f20\u6396\u5e02')},
'861571967':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u7518\u5357\u85cf\u65cf\u81ea\u6cbb\u5dde')},
'861571968':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u5929\u6c34\u5e02')},
'861571969':{'en': '<NAME>', 'zh': u('\u7518\u8083\u7701\u9647\u5357\u5e02')},
'861571970':{'en': 'Haibei, Qinghai', 'zh': u('\u9752\u6d77\u7701\u6d77\u5317\u85cf\u65cf\u81ea\u6cbb\u5dde')},
'861571971':{'en': 'Xining, Qinghai', 'zh': u('\u9752\u6d77\u7701\u897f\u5b81\u5e02')},
'861571972':{'en': 'Haidong, Qinghai', 'zh': u('\u9752\u6d77\u7701\u6d77\u4e1c\u5730\u533a')},
'861571973':{'en': 'Xining, Qinghai', 'zh': u('\u9752\u6d77\u7701\u897f\u5b81\u5e02')},
'861571974':{'en': 'Hainan, Qinghai', 'zh': u('\u9752\u6d77\u7701\u6d77\u5357\u85cf\u65cf\u81ea\u6cbb\u5dde')},
'861571975':{'en': 'Xining, Qinghai', 'zh': u('\u9752\u6d77\u7701\u897f\u5b81\u5e02')},
'861571976':{'en': 'Xining, Qinghai', 'zh': u('\u9752\u6d77\u7701\u897f\u5b81\u5e02')},
'861571977':{'en': 'Haixi, Qinghai', 'zh': u('\u9752\u6d77\u7701\u6d77\u897f\u8499\u53e4\u65cf\u85cf\u65cf\u81ea\u6cbb\u5dde')},
'861571978':{'en': 'Xining, Qinghai', 'zh': u('\u9752\u6d77\u7701\u897f\u5b81\u5e02')},
'861571979':{'en': 'Haixi, Qinghai', 'zh': u('\u9752\u6d77\u7701\u6d77\u897f\u8499\u53e4\u65cf\u85cf\u65cf\u81ea\u6cbb\u5dde')},
'86157198':{'en': 'Haikou, Hainan', 'zh': u('\u6d77\u5357\u7701\u6d77\u53e3\u5e02')},
'861571990':{'en': 'Changji, Xinjiang', 'zh': u('\u65b0\u7586\u660c\u5409\u56de\u65cf\u81ea\u6cbb\u5dde')},
'861571991':{'en': 'Shihezi, Xinjiang', 'zh': u('\u65b0\u7586\u77f3\u6cb3\u5b50\u5e02')},
'861571992':{'en': 'Tacheng, Xinjiang', 'zh': u('\u65b0\u7586\u5854\u57ce\u5730\u533a')},
'861571993':{'en': 'Turpan, Xinjiang', 'zh': u('\u65b0\u7586\u5410\u9c81\u756a\u5730\u533a')},
'861571994':{'en': 'Urumchi, Xinjiang', 'zh': u('\u65b0\u7586\u4e4c\u9c81\u6728\u9f50\u5e02')},
'861571995':{'en': 'Ili, Xinjiang', 'zh': u('\u65b0\u7586\u4f0a\u7281\u54c8\u8428\u514b\u81ea\u6cbb\u5dde')},
'861571996':{'en': 'Karamay, Xinjiang', 'zh': u('\u65b0\u7586\u514b\u62c9\u739b\u4f9d\u5e02')},
'861571997':{'en': 'Bayingolin, Xinjiang', 'zh': u('\u65b0\u7586\u5df4\u97f3\u90ed\u695e\u8499\u53e4\u81ea\u6cbb\u5dde')},
'861571998':{'en': 'Urumchi, Xinjiang', 'zh': u('\u65b0\u7586\u4e4c\u9c81\u6728\u9f50\u5e02')},
'861571999':{'en': 'Urumchi, Xinjiang', 'zh': u('\u65b0\u7586\u4e4c\u9c81\u6728\u9f50\u5e02')},
'86157200':{'en': 'Baoding, Hebei', 'zh': u('\u6cb3\u5317\u7701\u4fdd\u5b9a\u5e02')},
'86157201':{'en': 'Baoding, Hebei', 'zh': u('\u6cb3\u5317\u7701\u4fdd\u5b9a\u5e02')},
'86157202':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'86157203':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'86157204':{'en': 'Cangzhou, Hebei', 'zh': u('\u6cb3\u5317\u7701\u6ca7\u5dde\u5e02')},
'861572050':{'en': 'Chuzhou, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u6ec1\u5dde\u5e02')},
'861572051':{'en': 'Hefei, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u5408\u80a5\u5e02')},
'861572052':{'en': 'Bengbu, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u868c\u57e0\u5e02')},
'861572053':{'en': 'Wuhu, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u829c\u6e56\u5e02')},
'861572054':{'en': 'Huainan, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u6dee\u5357\u5e02')},
'861572055':{'en': 'MaAnshan, Anhui', 'zh': u('\u5b89\u5fbd\u7701\u9a6c\u978d\u5c71\u5e02')},
'861572056':{'en': 'Anqing, Anhui', 'zh': | |
def test_cluster_update_timeout_non_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
cid = 'aaaa-bbbb-cccc'
body = {'cluster': {'timeout': '10min'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'cluster_update')
ex = self.assertRaises(senlin_exc.InvalidParameter,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(_("Invalid value '10min' specified for 'timeout'"),
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_cluster_update_cluster_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
cid = 'non-existent-cluster'
body = {'cluster': {'profile_id': 'xxxx-yyyy-zzzz'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.ClusterNotFound(cluster=cid)
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ClusterNotFound', resp.json['error']['type'])
def test_cluster_update_unsupported_status(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
cid = 'aaaa-bbbb-cccc'
body = {'cluster': {'profile_id': 'xxxx-yyyy-zzzz'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.NotSupported(feature='Wrong status')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('NotSupported', resp.json['error']['type'])
def test_cluster_update_profile_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
cid = 'aaaa-bbbb-cccc'
body = {'cluster': {'profile_id': 'not-a-profile'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.ProfileNotFound(profile='not-a-profile')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ProfileNotFound', resp.json['error']['type'])
def test_cluster_update_profile_type_mismatch(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
cid = 'aaaa-bbbb-cccc'
body = {'cluster': {'profile_id': 'profile-of-diff-type'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.ProfileTypeNotMatch(message='not matching')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('ProfileTypeNotMatch', resp.json['error']['type'])
def test_update_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', False)
cid = 'aaaa-bbbb-cccc'
body = {'cluster': {'profile_id': 'xxxx-yyyy-zzzz'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_cluster_action_add_nodes(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {
'add_nodes': {
'nodes': ['xxxx-yyyy-zzzz', ],
}
}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_add_nodes', {
'identity': cid, 'nodes': ['xxxx-yyyy-zzzz'],
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_add_nodes_none(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'add_nodes': {'somearg': 'somevalue'}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual('No node to add', six.text_type(ex))
self.assertFalse(mock_call.called)
def test_cluster_action_add_nodes_empty(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'add_nodes': {'nodes': []}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual('No node to add', six.text_type(ex))
self.assertFalse(mock_call.called)
def test_cluster_action_add_nodes_bad_requests(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'add_nodes': {'nodes': ['bad-node-1']}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
error = senlin_exc.SenlinBadRequest(msg='Nodes not found: bad-node-1')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('SenlinBadRequest', resp.json['error']['type'])
self.assertIn('Nodes not found: bad-node-1',
resp.json['error']['message'])
def test_cluster_action_del_nodes(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {
'del_nodes': {
'nodes': ['xxxx-yyyy-zzzz', ],
}
}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_del_nodes', {
'identity': cid, 'nodes': ['xxxx-yyyy-zzzz'],
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_del_nodes_none(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'del_nodes': {'somearg': 'somevalue'}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual('No node to delete', six.text_type(ex))
self.assertFalse(mock_call.called)
def test_cluster_action_del_nodes_empty(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'del_nodes': {'nodes': []}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual('No node to delete', six.text_type(ex))
self.assertFalse(mock_call.called)
def test_cluster_action_del_nodes_bad_requests(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'del_nodes': {'nodes': ['bad-node-1']}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
error = senlin_exc.SenlinBadRequest(msg='Nodes not found: bad-node-1')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('SenlinBadRequest', resp.json['error']['type'])
self.assertIn('Nodes not found: bad-node-1',
resp.json['error']['message'])
def test_cluster_action_scale_out(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'scale_out': {'count': 1}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_scale_out', {
'identity': cid, 'count': 1,
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_scale_in(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'scale_in': {'count': 1}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_scale_in', {
'identity': cid, 'count': 1,
})
)
self.assertEqual(eng_resp, resp)
def _cluster_action_scale_non_int(self, action, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {action: {'count': 'abc'}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
error = senlin_exc.InvalidParameter(name='count', value='abc')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('InvalidParameter', resp.json['error']['type'])
self.assertIn("Invalid value 'abc' specified for 'count'",
resp.json['error']['message'])
def test_cluster_action_scale_out_non_int(self, mock_enforce):
self._cluster_action_scale_non_int('scale_out', mock_enforce)
def test_cluster_action_scale_in_non_int(self, mock_enforce):
self._cluster_action_scale_non_int('scale_in', mock_enforce)
def test_cluster_action_attach_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_attach': {'policy_id': 'xxxx-yyyy'}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_policy_attach', {
'identity': cid, 'policy': 'xxxx-yyyy',
'level': 50, 'enabled': True, 'cooldown': 0,
'priority': 50,
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_attach_policy_with_fields(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_attach': {
'policy_id': 'xxxx-yyyy',
'priority': 40,
'cooldown': 20,
'level': 30,
'enabled': False,
}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_policy_attach', {
'identity': cid, 'policy': 'xxxx-yyyy',
'level': 30, 'enabled': False, 'cooldown': 20,
'priority': 40,
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_attach_policy_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_attach': {'policy_id': 'not-a-policy'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.PolicyNotFound(policy='not-a-policy')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('PolicyNotFound', resp.json['error']['type'])
def test_cluster_action_detach_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_detach': {'policy_id': 'xxxx-yyyy'}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_policy_detach', {
'identity': cid, 'policy': 'xxxx-yyyy',
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_detach_policy_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_detach': {'policy_id': 'not-a-policy'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.PolicyNotFound(policy='not-a-policy')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('PolicyNotFound', resp.json['error']['type'])
def test_cluster_action_update_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_update': {
'policy_id': 'xxxx-yyyy',
'priority': 99,
}}
eng_resp = {'action': {'id': 'action-id', 'target': cid}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call',
return_value=eng_resp)
resp = self.controller.action(req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
mock_call.assert_called_once_with(
req.context,
('cluster_policy_update', {
'identity': cid, 'policy': 'xxxx-yyyy',
'priority': 99, 'level': None, 'enabled': None,
'cooldown': None,
})
)
self.assertEqual(eng_resp, resp)
def test_cluster_action_update_policy_non_int(self, mock_enforce):
# NOTE: There are other cases of invalid parameter inputs, but
# we only take one of them as the example for testing. The rest
# of them should be tested at the engine side because this test
# case cares only about whether the API layer can respond to such
# 'error's correctly.
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_update': {
'policy_id': 'xxxx-yyyy',
'priority': 'abc',
}}
req = self._put('/clusters/%(cluster_id)s/action' % {
'cluster_id': cid}, json.dumps(body))
error = senlin_exc.InvalidParameter(name='priority', value='abc')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('InvalidParameter', resp.json['error']['type'])
self.assertIn("Invalid value 'abc' specified for 'priority'",
resp.json['error']['message'])
def test_cluster_action_update_policy_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
cid = 'aaaa-bbbb-cccc'
body = {'policy_update': {'policy_id': 'not-a-policy'}}
req = self._put('/clusters/%(cluster_id)s' % {'cluster_id': cid},
json.dumps(body))
error = senlin_exc.PolicyNotFound(policy='not-a-policy')
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
mock_call.side_effect = shared.to_remote_error(error)
resp = shared.request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
cluster_id=cid,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('PolicyNotFound', resp.json['error']['type'])
def | |
self._po.ff_fname = newff_name
self._po._init = False
if len(candidates):
if optimize_during_scoring:
# if we are going through the trouble to optimize every
# split, always take the best
candidates = sorted(
candidates, key=lambda x: np.abs(x[7]), reverse=False
)
# but bail if we didn't find any splits below the cutoff
candidates = [
c
for c in candidates
if (c[7] - c[6]) / c[6] < self.split_keep_threshhold
]
if len(candidates) == 0:
return None, np.inf, np.inf, None
elif candidate_mode == "split_gradient_max":
candidates = sorted(candidates, key=lambda x: x[1] - x[9], reverse=True)
elif candidate_mode in [
"sum_difference",
"mag_difference",
"mean_difference",
]:
candidates = (
sorted(candidates, key=lambda x: np.abs(x[4]), reverse=True),
)
grad_new_opt = np.inf
# this means optimize the best we found, and return the objective
# as the score
if optimize_during_typing and not optimize_during_scoring:
n_success = 0
total_candidates = (
self.optimize_candidate_limit
if self.optimize_candidate_limit
else len(candidates)
)
total_candidates = min(len(candidates), total_candidates)
for ii, candidate in enumerate(candidates[:total_candidates], 1):
self.db = candidate[8]
self.node_index = candidate[13]
self._po._forcefield = candidate[11]
self._po.db = candidate[12]
node = candidate[0]
if candidate[14] == "new":
print(
"Add candidate parameter",
node,
"to parent",
self[candidate[2]],
)
# candidate[0] = self.add(candidate[2], candidate[0], index=0)
elif candidate[14] == "delete":
print(
"Remove candidate parameter",
node,
"from parent",
self[candidate[2]],
)
# self.combine_parameter(node)
print(
"Performing tier 3 micro optimization for the best candidate score number {}/{}".format(
ii, total_candidates
)
)
newff_name = "newFF_" + str(i) + "." + str(ii) + ".offxml"
print("Wrote FF to file", newff_name)
self.to_smirnoff_xml(newff_name, renumber=True, verbose=False)
success = self._run_optimizer("OPTIMIZE", keep_state=False)
if success:
obj = self._po.X
grad_new_opt = self._po.G
print("Objective after minimization:", self._po.X)
# self.load_new_parameters(self._po.new_ff)
candidate[7] = obj
candidate[10] = grad_new_opt
candidate[11] = copy.deepcopy(self._po._forcefield)
candidate[12] = copy.deepcopy(self._po.db)
# since we are optimizing, overwrite the SP gradient
candidate[1] = grad_new_opt
n_success += 1
else:
self.logger.info("Optimization failed; assuming bogus split")
# print("Remove candidate parameter", node, "from parent", node.parent, candidate[2], node.parent == candidate[2])
# self[node.parent].children.remove(node.index)
# self.node_index.pop(node.index)
if (
self.optimize_candidate_limit is not None
and n_success == self.optimize_candidate_limit
):
break
candidates = sorted(
candidates, key=lambda x: np.abs(x[7]), reverse=False
)
print("All candidates (top wins; pre cutoff filter):")
for c in candidates:
print(
"{:3d}".format(c[5]),
"{:7.2f}%".format(100.0 * (c[7] - c[6]) / c[6]),
c[14],
oldindex[c[2]].payload,
"->",
c[0].payload,
oldffdb[oldindex[c[2]].payload]["data"]["group"].to_smarts(
atom_universe=self.atom_universe,
bond_universe=self.bond_universe,
),
"->",
c[8][c[0].payload]["data"]["group"].to_smarts(
atom_universe=self.atom_universe,
bond_universe=self.bond_universe,
),
"{:.6e}".format(c[6]),
"{:.6e}".format(c[7]),
)
print(
"Key is index, from_param, new_param, total_grad_ref, total_grad_split, total_grad_opt, grad_split_score initial_obj final_obj percent_change\n"
)
# assume that for geometry based scoring, we keep all higher than eps
candidates = [
c
for c in candidates
if (not use_gradients)
or ((c[7] - c[6]) / c[6] < self.split_keep_threshhold)
]
if len(candidates) == 0:
print(
"No candidates meet cutoff threshhold of {:6.2f}%".format(
self.split_keep_threshhold * 100
)
)
self.node_index = oldindex
self.db = oldffdb
self._po.db = olddb
self._po._forcefield = oldfbff
return None, np.inf, np.inf, None
best = candidates[0]
# only re-add if we did a complete scan, since we terminate that case
# with no new node, and the best has to be re-added
# if we break early, the node is already there
# I think nodes need to be prepended to conserve hierarchy
# for example, if we split a param, do we want it to override
# all children? no, since we were only focused on the parent, so
# we only care that the split node comes after *only* the parent,
# which is true since it is a child
# best[0] = self.add(best[2], best[0], index=0)
self.node_index = best[13]
self.db = best[8]
self._po._forcefield = best[11]
self._po.db = best[12]
print("Best", best[14], "result")
print(best[0])
print("Best split gradient", best[1])
print("Best split score", best[4])
print(
"Best split objective drop {}%".format(
100.0 * (best[7] - best[6]) / best[6]
)
)
# print("Best combine parameter")
# print(best[0])
# breakpoint()
# self.combine_parameter(best[0])
# self.print_label_assignments()
# newff_name = "newFF.offxml"
# self.db = best[8]
# self.to_smirnoff_xml(newff_name, verbose=False)
# self._po._setup.ff_fname = newff_name
# self._po._init = False
return best[0], best[1], best[7], best[14]
def _guess_periodicities(
self,
param_name,
values,
report_only=False,
max_terms=3,
period_max=30,
cutoff=0.9,
):
"assumes distances in Bohr"
current = None
new = None
param = self.db[param_name]["data"]["parameter"]
ptype = type(param)
modify = not report_only
# lets not set the phases to the average angle
# try to determine if need to set to 0 or 180 based on data
if ptype not in [
ImproperTorsionHandler.ImproperTorsionType,
ProperTorsionHandler.ProperTorsionType,
]:
return
assert max_terms > 0
stats_fn = self.stats_fn
stats_fn.set_circular(True)
n = np.arange(1, period_max + 1, dtype=float).reshape(1, -1)
A = np.cos(n * np.radians(np.atleast_2d(values).T)).sum(axis=0) / len(values)
periods = []
phases = []
k = []
idivf = []
fname = "measures." + param_name + "." + str(time.time()) + ".p"
dataset = {"data": list(values), "periods": list(n[0]), "overlap": list(A)}
pickle.dump(dataset, open(fname, "wb"))
print("DEBUG: dumped data, periods, and overlap for", param_name, "to", fname)
# print(json.dumps(list(values)))
# print("DEBUG: here are fourier values for ", param_name, "(periods, vals)")
# print(json.dumps(list(n[0])))
# print(json.dumps(list(A)))
# print("DEBUG: max_terms is", max_terms)
for i, val in enumerate(A, 1):
if len(periods) >= max_terms:
break
if np.abs(val) > cutoff:
phase = 0 if val < 0 else 180
# skip=False
# for per, pha in zip(param.periodicity, param.phase):
# if per != i and max(per, i) % min(per, i) == 0 and pha == phase * simtk.unit.degree:
# skip=True
# break
# for per, pha in zip(periods, phases):
# if max(per, i) % min(per, i) == 0 and pha == phase * simtk.unit.degree:
# skip=True
# break
# if skip:
# continue
if i in param.periodicity:
idx = param.periodicity.index(i)
periods.append(param.periodicity[idx])
phases.append(param.phase[idx])
k.append(param.k[idx])
idivf.append(param.idivf[idx])
else:
periods.append(i)
phases.append(phase * simtk.unit.degree)
k.append(0 * simtk.unit.kilocalories_per_mole)
idivf.append(1.0)
# We choose to only modify if a good guess is found
if len(periods) > 0:
# if 1 in param.periodicity:
# idx = param.periodicity.index(1)
# periods.append(param.periodicity[idx])
# phases.append(param.phase[idx])
# k.append(param.k[idx])
# idivf.append(param.idivf[idx])
# else:
# periods.append(1)
# phases.append(0.0 * simtk.unit.degree)
# k.append(param.k[0])
# idivf.append(1.0)
current = (param.periodicity, param.phase)
# n_params = len(param.phase)
# if issubclass(type(value), list):
# new = list(
# [x * simtk.unit.degree for x, _ in zip(value, range(n_params))]
# )
# else:
# new = list([value * simtk.unit.degree for _ in range(n_params)])
if modify:
param.phase = phases
param.periodicity = periods
param.k = k
param.idivf = idivf
new = (param.periodicity, param.phase)
if report_only:
print(
"Would change parameter in ",
param_name,
"from periodicities =",
current,
"to",
new,
"(reporting only; did not change)",
)
else:
print(
"Changed periodicities in ", param_name, "from", current, "to", new
)
else:
print(
f"WARNING: Could not find a good guess for periodicity of parameter {param.id}"
)
def _set_parameter_spatial(self, param_name, value, report_only=False):
"assumes distances in Bohr"
current = None
new = None
param = self.db[param_name]["data"]["parameter"]
ptype = type(param)
modify = not report_only
if ptype == BondHandler.BondType:
current = param.length
new = value * offsb.tools.const.bohr2angstrom * simtk.unit.angstrom
if modify:
param.length = new
elif ptype == vdWHandler.vdWType:
current = param.rmin_half
new = value * offsb.tools.const.bohr2angstrom * simtk.unit.angstrom
if modify:
param.rmin_half = new
elif ptype == AngleHandler.AngleType:
current = param.angle
new = value * simtk.unit.degree
if modify:
param.angle = new
# lets not set the phases to the average angle
# try to determine if need to set to 0 or 180 based on data
elif ptype in [
ImproperTorsionHandler.ImproperTorsionType,
ProperTorsionHandler.ProperTorsionType,
]:
pass
# current = param.phase
# n_params = len(param.phase)
# if issubclass(type(value), list):
# new = list(
# [x * simtk.unit.degree for x, _ in zip(value, range(n_params))]
# )
# else:
# new = list([value * simtk.unit.degree for _ in range(n_params)])
# if modify:
# param.phase = new
else:
raise NotImplementedError()
if report_only:
print(
"Would change parameter in ",
param_name,
"from",
current,
"to",
new,
"(reporting only; did not change)",
)
else:
print("Changed parameter in ", param_name, "from", current, "to", new)
def _set_parameter_force(
self, param_name, value, report_only=False, guess_periodicities=False
):
param = self.db[param_name]["data"]["parameter"]
current = None
new = None
ptype = type(param)
modify = not report_only
len_unit = (
offsb.tools.const.hartree2kcalmol
* simtk.unit.kilocalorie
/ simtk.unit.mole
/ (offsb.tools.const.bohr2angstrom * simtk.unit.angstrom) ** 2
)
rmin_half_unit = (
offsb.tools.const.hartree2kcalmol * simtk.unit.kilocalorie / simtk.unit.mole
)
angle_unit = (
offsb.tools.const.hartree2kcalmol
* simtk.unit.kilocalorie
/ simtk.unit.mole
/ | |
AttachDecorator.data_base64(
mapping={"bogus": "proof"},
ident="indy",
)
],
),
)
holder = async_mock.MagicMock(IndyHolder, autospec=True)
get_creds = async_mock.CoroutineMock(
return_value=(
{
"cred_info": {"referent": "dummy_reft"},
"attrs": {
"player": "<NAME>",
"screenCapture": "aW1hZ2luZSBhIHNjcmVlbiBjYXB0dXJl",
"highScore": "1234560",
},
},
)
)
holder.get_credentials_for_presentation_request_by_referent = get_creds
holder.create_credential_request = async_mock.CoroutineMock(
return_value=(
json.dumps(TestConfig.indy_cred_req),
json.dumps(TestConfig.cred_req_meta),
)
)
self.profile.context.injector.bind_instance(IndyHolder, holder)
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[
AttachDecorator.deserialize(TestConfig.req_attach_v2)
],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
conn_rec = await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert conn_rec is not None
async def test_req_v2_attach_pres_catch_value_error(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_presentation_request": False}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
px2_rec = test_module.V20PresExRecord(
auto_present=False,
pres_request=TestConfig.PRES_REQ_V2.serialize(),
)
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch.object(
V20PresManager,
"receive_pres_request",
autospec=True,
) as pres_mgr_receive_pres_req, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report, async_mock.patch.object(
V20PresManager,
"create_pres",
autospec=True,
) as pres_mgr_create_pres:
oob_mgr_find_existing_conn.return_value = test_exist_conn
pres_mgr_receive_pres_req.return_value = px2_rec
pres_mgr_create_pres.return_value = (
px2_rec,
V20Pres(
formats=[
V20PresFormat(
attach_id="indy",
format_=V20_PRES_ATTACH_FORMAT[PRES_20][
V20PresFormat.Format.INDY.api
],
)
],
presentations_attach=[
AttachDecorator.data_base64(
mapping={"bogus": "proof"},
ident="indy",
)
],
),
)
holder = async_mock.MagicMock(IndyHolder, autospec=True)
get_creds = async_mock.CoroutineMock(return_value=())
holder.get_credentials_for_presentation_request_by_referent = get_creds
holder.create_credential_request = async_mock.CoroutineMock(
return_value=(
json.dumps(TestConfig.indy_cred_req),
json.dumps(TestConfig.cred_req_meta),
)
)
self.profile.context.injector.bind_instance(IndyHolder, holder)
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[
AttachDecorator.deserialize(TestConfig.req_attach_v2)
],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
with self.assertRaises(OutOfBandManagerError) as context:
await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert "cannot respond automatically" in str(context.exception)
async def test_req_attach_cred_offer_v1(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": True}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
state=ConnRecord.State.COMPLETED,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V1.serialize()
exchange_rec = V10CredentialExchange()
exchange_rec.credential_offer = TestConfig.CRED_OFFER_V1
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch.object(
V10CredManager,
"receive_offer",
autospec=True,
) as cred_mgr_offer_receive, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report, async_mock.patch.object(
V10CredManager,
"create_request",
autospec=True,
) as cred_mgr_request_receive:
oob_mgr_find_existing_conn.return_value = test_exist_conn
oob_mgr_check_conn_rec_active.return_value = test_exist_conn
cred_mgr_offer_receive.return_value = exchange_rec
cred_mgr_request_receive.return_value = (exchange_rec, INDY_CRED_REQ)
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[AttachDecorator.deserialize(req_attach)],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
conn_rec = await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert conn_rec is not None
async def test_req_attach_cred_offer_v1_no_issue(self):
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": False}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
state=ConnRecord.State.COMPLETED,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V1.serialize()
exchange_rec = V10CredentialExchange()
exchange_rec.credential_offer = TestConfig.CRED_OFFER_V1
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch.object(
V10CredManager,
"receive_offer",
autospec=True,
) as cred_mgr_offer_receive, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report:
oob_mgr_find_existing_conn.return_value = test_exist_conn
cred_mgr_offer_receive.return_value = exchange_rec
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[AttachDecorator.deserialize(req_attach)],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
with self.assertRaises(OutOfBandManagerError) as context:
await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert "Configuration sets auto_offer false" in str(context.exception)
async def test_req_attach_cred_offer_v2(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": True}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
state=ConnRecord.State.COMPLETED,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V2.serialize()
exchange_rec = V20CredExRecord()
exchange_rec.cred_offer = TestConfig.CRED_OFFER_V2
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch.object(
V20CredManager,
"receive_offer",
autospec=True,
) as cred_mgr_offer_receive, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report, async_mock.patch.object(
V20CredManager,
"create_request",
autospec=True,
) as cred_mgr_request_receive:
oob_mgr_find_existing_conn.return_value = test_exist_conn
oob_mgr_check_conn_rec_active.return_value = test_exist_conn
cred_mgr_offer_receive.return_value = exchange_rec
cred_mgr_request_receive.return_value = (exchange_rec, INDY_CRED_REQ)
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[AttachDecorator.deserialize(req_attach)],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
conn_rec = await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert conn_rec is not None
async def test_req_attach_cred_offer_v2_no_issue(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": False}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
state=ConnRecord.State.COMPLETED,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V2.serialize()
exchange_rec = V20CredExRecord()
exchange_rec.cred_offer = TestConfig.CRED_OFFER_V2
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch.object(
V20CredManager,
"receive_offer",
autospec=True,
) as cred_mgr_offer_receive, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report:
oob_mgr_find_existing_conn.return_value = test_exist_conn
cred_mgr_offer_receive.return_value = exchange_rec
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[AttachDecorator.deserialize(req_attach)],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
with self.assertRaises(OutOfBandManagerError) as context:
await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert "Configuration sets auto_offer false" in str(context.exception)
async def test_catch_unsupported_request_attach(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": False}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
receipt = MessageReceipt(
recipient_did=TestConfig.test_did,
recipient_did_public=False,
sender_did=TestConfig.test_target_did,
)
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V1.serialize()
req_attach["data"]["json"]["@type"] = "test"
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
) as didx_mgr_receive_invitation, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
) as oob_mgr_fetch_conn, async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
) as oob_mgr_check_reuse_state, async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
autospec=True,
) as oob_mgr_create_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_message",
autospec=True,
) as oob_mgr_receive_reuse_msg, async_mock.patch.object(
OutOfBandManager,
"receive_reuse_accepted_message",
autospec=True,
) as oob_mgr_receive_accept_msg, async_mock.patch.object(
OutOfBandManager,
"receive_problem_report",
autospec=True,
) as oob_mgr_receive_problem_report:
oob_mgr_find_existing_conn.return_value = test_exist_conn
mock_oob_invi = async_mock.MagicMock(
handshake_protocols=[
pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix
],
services=[TestConfig.test_target_did],
requests_attach=[AttachDecorator.deserialize(req_attach)],
)
inv_message_cls.deserialize.return_value = mock_oob_invi
with self.assertRaises(OutOfBandManagerError) as context:
await self.manager.receive_invitation(
mock_oob_invi, use_existing_connection=True
)
assert "Unsupported requests~attach type" in str(context.exception)
async def test_check_conn_rec_active_a(self):
async with self.profile.session() as session:
await self.test_conn_rec.save(session)
conn_rec = await self.manager.conn_rec_is_active(
self.test_conn_rec.connection_id
)
assert conn_rec.connection_id == self.test_conn_rec.connection_id
async def test_check_conn_rec_active_b(self):
connection_id = self.test_conn_rec.connection_id
conn_rec_request = deepcopy(self.test_conn_rec)
conn_rec_request.state = "request"
conn_rec_active = deepcopy(self.test_conn_rec)
conn_rec_active.state = "active"
with async_mock.patch.object(
test_module.ConnRecord,
"retrieve_by_id",
autospec=True,
) as mock_conn_rec_retrieve:
mock_conn_rec_retrieve.side_effect = [conn_rec_request, conn_rec_active]
conn_rec = await self.manager.conn_rec_is_active(connection_id)
assert conn_rec.state == "active"
async def test_request_attach_cred_offer_v1_check_conn_rec_active_timeout(self):
async with self.profile.session() as session:
self.profile.context.update_settings({"public_invites": True})
self.profile.context.update_settings(
{"debug.auto_respond_credential_offer": True}
)
test_exist_conn = ConnRecord(
my_did=TestConfig.test_did,
their_did=TestConfig.test_target_did,
their_public_did=TestConfig.test_target_did,
invitation_msg_id="12345678-0123-4567-1234-567812345678",
their_role=ConnRecord.Role.REQUESTER,
)
await test_exist_conn.save(session)
await test_exist_conn.metadata_set(session, "reuse_msg_state", "initial")
await test_exist_conn.metadata_set(session, "reuse_msg_id", "test_123")
req_attach = deepcopy(TestConfig.req_attach_v1)
del req_attach["data"]["json"]
req_attach["data"]["json"] = TestConfig.CRED_OFFER_V1.serialize()
exchange_rec = V20CredExRecord()
exchange_rec.cred_offer = TestConfig.CRED_OFFER_V1
with async_mock.patch.object(
DIDXManager,
"receive_invitation",
autospec=True,
), async_mock.patch.object(
V10CredManager,
"receive_offer",
autospec=True,
) as cred_mgr_offer_receive, async_mock.patch(
"aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage",
autospec=True,
) as inv_message_cls, async_mock.patch.object(
OutOfBandManager,
"fetch_connection_targets",
autospec=True,
), async_mock.patch.object(
OutOfBandManager,
"find_existing_connection",
autospec=True,
) as oob_mgr_find_existing_conn, async_mock.patch.object(
OutOfBandManager,
"check_reuse_msg_state",
autospec=True,
), async_mock.patch.object(
OutOfBandManager,
"conn_rec_is_active",
autospec=True,
) as oob_mgr_check_conn_rec_active, async_mock.patch.object(
OutOfBandManager,
"create_handshake_reuse_message",
| |
energy units)
- **temperature** (in thermal energy units)
- **pressure** (in pressure units)
- **pressure_xx**, **pressure_xy**, **pressure_xz**, **pressure_yy**, **pressure_yz**, **pressure_zz** (in pressure units)
- The above quantities, tagged with a *_groupname* suffix are automatically available for any group passed to
an integrate command
- Specify a compute.thermo directly to enable additional quantities for user-specified groups.
The following quantities are only available if the command is parentheses has been specified and is active
for logging:
- Pair potentials
- **pair_dpd_energy** (:py:class:`hoomd.md.pair.dpd`) - Total DPD conservative potential energy (in energy units)
- **pair_dpdlj_energy** (:py:class:`hoomd.md.pair.dpdlj`) - Total DPDLJ conservative potential energy (in energy units)
- **pair_eam_energy** (:py:class:`hoomd.metal.pair.eam`) - Total EAM potential energy (in energy units)
- **pair_ewald_energy** (:py:class:`hoomd.md.pair.ewald`) - Short ranged part of the electrostatic energy (in energy units)
- **pair_gauss_energy** (:py:class:`hoomd.md.pair.gauss`) - Total Gaussian potential energy (in energy units)
- **pair_lj_energy** (:py:class:`hoomd.md.pair.lj`) - Total Lennard-Jones potential energy (in energy units)
- **pair_morse_energy** (:py:class:`hoomd.md.pair.yukawa`) - Total Morse potential energy (in energy units)
- **pair_table_energy** (:py:class:`hoomd.md.pair.table`) - Total potential energy from Tabulated potentials (in energy units)
- **pair_slj_energy** (:py:class:`hoomd.md.pair.slj`) - Total Shifted Lennard-Jones potential energy (in energy units)
- **pair_yukawa_energy** (:py:class:`hoomd.md.pair.yukawa`) - Total Yukawa potential energy (in energy units)
- **pair_force_shifted_lj_energy** (:py:class:`hoomd.md.pair.force_shifted_lj`) - Total Force-shifted Lennard-Jones potential energy (in energy units)
- **pppm_energy** (:py:class:`hoomd.md.charge.pppm`) - Long ranged part of the electrostatic energy (in energy units)
- Bond potentials
- **bond_fene_energy** (:py:class:`hoomd.md.bond.fene`) - Total fene bond potential energy (in energy units)
- **bond_harmonic_energy** (:py:class:`hoomd.md.bond.harmonic`) - Total harmonic bond potential energy (in energy units)
- **bond_table_energy** (:py:class:`hoomd.md.bond.table`) - Total table bond potential energy (in energy units)
- Angle potentials
- **angle_harmonic_energy** (:py:class:`hoomd.md.angle.harmonic`) - Total harmonic angle potential energy (in energy units)
- Dihedral potentials
- **dihedral_harmonic_energy** (:py:class:`hoomd.md.dihedral.harmonic`) - Total harmonic dihedral potential energy (in energy units)
- Special pair interactions
- **special_pair_lj_energy** (:py:class:`hoomd.md.special_pair.lj`) - Total energy of special pair interactions (in energy units)
- External potentials
- **external_periodic_energy** (:py:class:`hoomd.md.external.periodic`) - Total periodic potential energy (in energy units)
- **external_e_field_energy** (:py:class:`hoomd.md.external.e_field`) - Total e_field potential energy (in energy units)
- Wall potentials
- **external_wall_lj_energy** (:py:class:`hoomd.md.wall.lj`) - Total Lennard-Jones wall energy (in energy units)
- **external_wall_gauss_energy** (:py:class:`hoomd.md.wall.gauss`) - Total Gauss wall energy (in energy units)
- **external_wall_slj_energy** (:py:class:`hoomd.md.wall.slj`) - Total Shifted Lennard-Jones wall energy (in energy units)
- **external_wall_yukawa_energy** (:py:class:`hoomd.md.wall.yukawa`) - Total Yukawa wall energy (in energy units)
- **external_wall_mie_energy** (:py:class:`hoomd.md.wall.mie`) - Total Mie wall energy (in energy units)
- Integrators
- **langevin_reservoir_energy_groupname** (:py:class:`hoomd.md.integrate.langevin`) - Energy reservoir for the Langevin integrator (in energy units)
- **nvt_reservoir_energy_groupname** (:py:class:`hoomd.md.integrate.nvt`) - Energy reservoir for the NVT thermostat (in energy units)
- **nvt_mtk_reservoir_energy_groupname** (:py:class:`hoomd.md.integrate.nvt`) - Energy reservoir for the NVT MTK thermostat (in energy units)
- **npt_thermostat_energy** (:py:class:`hoomd.md.integrate.npt`) - Energy of the NPT thermostat
- **npt_barostat_energy** (:py:class:`hoomd.md.integrate.npt` & :py:class:`hoomd.md.integrate.nph`) - Energy of the NPT (or NPH) barostat
Additionally, all pair and bond poetentials can be provided user-defined names that are appended as suffixes to the
logged quantitiy (e.g. with ``pair.lj(r_cut=2.5, name="alpha")``, the logged quantity would be pair_lj_energy_alpha):
By specifying a force, disabling it with the *log=True* option, and then logging it, different energy terms can
be computed while only a subset of them actually drive the simulation. Common use-cases of this capability
include separating out pair energy of given types (shown below) and free energy calculations. Be aware that the
globally chosen *r_cut* value is the largest of all active pair potentials and those with *log=True*, so you will
observe performance degradation if you *disable(log=True)* a potential with a large *r_cut*.
File output from analyze.log is optional. Specify *None* for the file name and no file will be output.
Use this with the :py:meth:`query()` method to query the values of properties without the overhead of writing them
to disk.
You can register custom python callback functions to provide logged quantities with :py:meth:`register_callback()`.
Examples::
lj1 = pair.lj(r_cut=3.0, name="lj1")
lj1.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
lj1.pair_coeff.set('A', 'B', epsilon=1.0, sigma=1.0)
lj1.pair_coeff.set('B', 'B', epsilon=1.0, sigma=1.0)
lj2 = pair.lj(r_cut=3.0, name="lj2")
lj2.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
lj2.pair_coeff.set('A', 'B', epsilon=0.0, sigma=1.0)
lj2.pair_coeff.set('B', 'B', epsilon=0.0, sigma=1.0)
lj2.disable(log=True)
analyze.log(filename='mylog.log', quantities=['pair_lj_energy_lj1', 'pair_lj_energy_lj2'],
period=100, header_prefix='#')
logger = analyze.log(filename='mylog.log', period=100,
quantities=['pair_lj_energy'])
analyze.log(quantities=['pair_lj_energy', 'bond_harmonic_energy',
'kinetic_energy'], period=1000, filename='full.log')
analyze.log(filename='mylog.log', quantities=['pair_lj_energy'],
period=100, header_prefix='#')
analyze.log(filename='mylog.log', quantities=['bond_harmonic_energy'],
period=10, header_prefix='Log of harmonic energy, run 5\\n')
logger = analyze.log(filename='mylog.log', period=100,
quantities=['pair_lj_energy'], overwrite=True)
log = analyze.log(filename=None, quantities=['potential_energy'], period=1)
U = log.query('potential_energy')
By default, columns in the log file are separated by tabs, suitable for importing as a
tab-delimited spreadsheet. The delimiter can be changed to any string using :py:meth:`set_params()`
The *header_prefix* can be used in a number of ways. It specifies a simple string that
will be printed before the header line of the output file. One handy way to use this
is to specify header_prefix='#' so that ``gnuplot`` will ignore the header line
automatically. Another use-case would be to specify a descriptive line containing
details of the current run. Examples of each of these cases are given above.
Warning:
When an existing log is appended to, the header is not printed. For the log to
remain consistent with the header already in the file, you must specify the same quantities
to log and in the same order for all runs of hoomd that append to the same log.
"""
def __init__(self, filename, quantities, period, header_prefix='', overwrite=False, phase=0):
hoomd.util.print_status_line();
# initialize base class
_analyzer.__init__(self);
if filename is None or filename == "":
filename = "";
period = 1;
# create the c++ mirror class
self.cpp_analyzer = _hoomd.Logger(hoomd.context.current.system_definition, filename, header_prefix, overwrite);
self.setupAnalyzer(period, phase);
# set the logged quantities
quantity_list = _hoomd.std_vector_string();
for item in quantities:
quantity_list.append(str(item));
self.cpp_analyzer.setLoggedQuantities(quantity_list);
# add the logger to the list of loggers
hoomd.context.current.loggers.append(self);
# store metadata
self.metadata_fields = ['filename','period']
self.filename = filename
self.period = period
def set_params(self, quantities=None, delimiter=None):
R""" Change the parameters of the log.
Args:
quantities (list): New list of quantities to log (if specified)
delimiter (str): New delimiter between columns in the output file (if specified)
Examples::
logger.set_params(quantities=['bond_harmonic_energy'])
logger.set_params(delimiter=',');
logger.set_params(quantities=['bond_harmonic_energy'], delimiter=',');
"""
hoomd.util.print_status_line();
if quantities is not None:
# set the logged quantities
quantity_list = _hoomd.std_vector_string();
for item in quantities:
quantity_list.append(str(item));
self.cpp_analyzer.setLoggedQuantities(quantity_list);
if delimiter:
self.cpp_analyzer.setDelimiter(delimiter);
def query(self, quantity):
R""" Get the current value of a logged quantity.
Args:
quantity (str): Name of the quantity to return.
:py:meth:`query()` works in two different ways depending on how the logger is configured. If the logger is writing
to a file, :py:meth:`query()` returns the last value written to the file.
If filename is *None*, then :py:meth:`query()` returns the value of the quantity computed at the current timestep.
Examples::
logdata = logger.query('pair_lj_energy')
log = analyze.log(filename=None, quantities=['potential_energy'], period=1)
U = log.query('potential_energy')
"""
use_cache=True;
if self.filename == "":
use_cache = False;
return self.cpp_analyzer.getQuantity(quantity, hoomd.context.current.system.getCurrentTimeStep(), use_cache);
def register_callback(self, name, callback):
R""" Register a callback to produce a logged quantity.
Args:
name (str): Name of the quantity
callback (callable): A python callable object (i.e. a lambda, function, or class that implements __call__)
The callback method must take a single argument, the current timestep, and return a single floating point value to
be logged.
Note:
One callback can query the value of another, but logged quantities are evaluated in order from left to right.
Examples::
logger = analyze.log(filename='log.dat', quantities=['my_quantity', 'cosm'], period=100)
logger.register_callback('my_quantity', lambda timestep: timestep**2)
logger.register_callback('cosm', lambda timestep: math.cos(logger.query('my_quantity')))
"""
self.cpp_analyzer.registerCallback(name, callback);
## \internal
# \brief Re-registers all computes and updaters with the logger
def update_quantities(self):
# remove all registered quantities
self.cpp_analyzer.removeAll();
# re-register all computes and updater
hoomd.context.current.system.registerLogger(self.cpp_analyzer);
def disable(self):
R""" Disable the logger.
Examples::
logger.disable()
Executing the disable command will remove the logger from the system.
Any :py:func:`hoomd.run()` command executed after disabling the logger will not use that
logger during the simulation. A disabled logger can be re-enabled
with :py:meth:`enable()`.
"""
hoomd.util.print_status_line()
hoomd.util.quiet_status()
_analyzer.disable(self)
hoomd.util.unquiet_status()
hoomd.context.current.loggers.remove(self)
def enable(self):
R""" Enables the logger
Examples::
logger.enable()
See :py:meth:`disable()`.
"""
hoomd.util.print_status_line()
hoomd.util.quiet_status()
_analyzer.enable(self)
hoomd.util.unquiet_status()
hoomd.context.current.loggers.append(self)
class callback(_analyzer):
R""" Callback analyzer.
Args:
callback (callable): The python callback object
period (int): The callback | |
<reponame>ezedonovan/ReadabiliPy
"""Common HTML cleaning functions."""
from bs4 import Comment, Doctype, NavigableString
from .text import normalise_text
def elements_to_delete():
"""Elements that will be deleted together with their contents."""
html5_form_elements = ['button', 'datalist', 'fieldset', 'form', 'input',
'label', 'legend', 'meter', 'optgroup', 'option',
'output', 'progress', 'select', 'textarea']
html5_image_elements = ['area', 'img', 'map', 'picture', 'source']
html5_media_elements = ['audio', 'track', 'video']
html5_embedded_elements = ['embed', 'iframe', 'math', 'object', 'param', 'svg']
html5_interactive_elements = ['details', 'dialog', 'summary']
html5_scripting_elements = ['canvas', 'noscript', 'script', 'template']
html5_data_elements = ['data', 'link']
html5_formatting_elements = ['style']
html5_navigation_elements = ['nav']
elements = html5_form_elements + html5_image_elements \
+ html5_media_elements + html5_embedded_elements \
+ html5_interactive_elements + html5_scripting_elements \
+ html5_data_elements + html5_formatting_elements \
+ html5_navigation_elements
return elements
def elements_to_replace_with_contents():
"""Elements that we will discard while keeping their contents."""
elements = ['a', 'abbr', 'address', 'b', 'bdi', 'bdo', 'center', 'cite',
'code', 'del', 'dfn', 'em', 'i', 'ins', 'kbs', 'mark',
'rb', 'ruby', 'rp', 'rt', 'rtc', 's', 'samp', 'small', 'span',
'strong', 'time', 'u', 'var', 'wbr']
return elements
def special_elements():
"""Elements that we will discard while keeping their contents that need
additional processing."""
elements = ['q', 'sub', 'sup']
return elements
def block_level_whitelist():
"""Elements that we will always accept."""
elements = ['article', 'aside', 'blockquote', 'caption', 'colgroup', 'col',
'div', 'dl', 'dt', 'dd', 'figure', 'figcaption', 'footer',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'li', 'main',
'ol', 'p', 'pre', 'section', 'table', 'tbody', 'thead',
'tfoot', 'tr', 'td', 'th', 'ul']
return elements
def structural_elements():
"""Structural elements we do no further processing on (though we do remove attributes and alter their contents)"""
return ['html', 'head', 'body']
def metadata_elements():
"""Metadata elements we do no further processing on (though we do remove attributes and alter their contents)"""
return ['meta', 'link', 'base', 'title']
def linebreak_elements():
return ['br', 'hr']
def known_elements():
"""All elements that we know by name."""
return structural_elements() + metadata_elements() + linebreak_elements() + elements_to_delete() \
+ elements_to_replace_with_contents() + special_elements() \
+ block_level_whitelist()
def remove_metadata(soup):
"""Remove comments, CData and doctype. These are not rendered by browsers.
The lxml-based parsers automatically convert CData to comments unless it is
inside <script> tags. CData will therefore be removed either as a comment
or as part of a <script> but if any other behaviour is desired, the HTML
will need to be pre-processed before giving it to the BeautifulSoup parser.
We were a bit worried about potentially removing content here but satisfied
ourselves it won't be displayed by most browsers in most cases
(see https://github.com/alan-turing-institute/ReadabiliPy/issues/32)"""
for comment in soup.findAll(string=lambda text: any(isinstance(text, x) for x in [Comment, Doctype])):
comment.extract()
def strip_attributes(soup):
"""Strip class and style attributes."""
for element in soup.find_all():
element.attrs.pop("class", None)
element.attrs.pop("style", None)
def remove_blacklist(soup):
"""Remove all blacklisted elements."""
for element_name in elements_to_delete():
for element in soup.find_all(element_name):
element.decompose()
def unwrap_elements(soup):
"""Flatten all elements where we are only interested in their contents."""
# We do not need to unwrap from the "bottom up" as all we are doing is replacing elements with their contents so
# we will still find child elements after their parent has been unwrapped.
for element_name in elements_to_replace_with_contents():
for element in soup.find_all(element_name):
element.unwrap()
def process_special_elements(soup):
"""Flatten special elements while processing their contents."""
for element_name in special_elements():
for element in soup.find_all(element_name):
# Insert appropriate strings before and/or after the contents
if element.name == 'q':
element.insert_before(NavigableString('"'))
element.insert_after(NavigableString('"'))
if element.name == 'sub':
element.insert_before(NavigableString('_'))
if element.name == 'sup':
element.insert_before(NavigableString('^'))
# Replace the element by its contents
element.unwrap()
def process_unknown_elements(soup):
"""Replace any unknown elements with their contents."""
for element in soup.find_all():
if element.name not in known_elements():
element.unwrap()
def consolidate_text(soup):
"""Join any consecutive NavigableStrings together."""
# Iterate over all strings in the tree
for element in soup.find_all(string=True):
# If the previous element is the same type then extract the current string and append to previous
if type(element.previous_sibling) is type(element):
text = "".join([str(element.previous_sibling), str(element)])
element.previous_sibling.replace_with(text)
element.extract()
def remove_empty_strings_and_elements(soup):
"""Remove any strings which contain only whitespace. Without this,
consecutive linebreaks may not be identified correctly."""
for element in list(soup.descendants):
if not normalise_text(str(element)):
element.extract()
def unnest_paragraphs(soup):
"""Split out block-level elements illegally contained inside paragraphs."""
illegal_elements = ["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset",
"figcaption", "figure", "footer", "form", "h1>-<h6", "header", "hr", "li", "main", "nav",
"noscript", "ol", "p", "pre", "section", "table", "tfoot", "ul", "video"]
for nested_type in illegal_elements:
# Search for nested elements that need to be split out
nested_elements = [e for e in soup.find_all('p') if e.find(nested_type)]
while nested_elements:
# Separate this element into the nested element, plus before and after
elem_nested = nested_elements[0].find(nested_type)
p_before = soup.new_tag("p")
for sibling in list(elem_nested.previous_siblings):
p_before.append(sibling)
p_after = soup.new_tag("p")
for sibling in list(elem_nested.next_siblings):
p_after.append(sibling)
# Replace element by before/nested/after.
# NB. this is done in reverse order as we are adding after the current position
nested_elements[0].insert_after(p_after)
nested_elements[0].insert_after(elem_nested)
nested_elements[0].insert_after(p_before)
nested_elements[0].decompose()
# Rerun search for nested elements now that we have rewritten the tree
nested_elements = [e for e in soup.find_all('p') if e.find(nested_type)]
def insert_paragraph_breaks(soup):
"""Identify <br> and <hr> and split their parent element into multiple elements where appropriate."""
# Indicator which is used as a placeholder to mark paragraph breaks
BREAK_INDICATOR = "|BREAK_HERE|"
# Find consecutive <br> elements and replace with a break marker
for element in soup.find_all('br'):
# When the next element is not another <br> count how long the chain is
if (element.next_sibling is None) or (element.next_sibling.name != 'br'):
br_element_chain = [element]
while (br_element_chain[-1].previous_sibling is not None) and (br_element_chain[-1].previous_sibling.name == 'br'):
br_element_chain.append(br_element_chain[-1].previous_sibling)
# If there's only one <br> then we replace it with a space
if len(br_element_chain) == 1:
br_element_chain[0].replace_with(' ')
# If there are multiple <br>s then replace them with BREAK_INDICATOR
else:
br_element_chain[0].replace_with(BREAK_INDICATOR)
for inner_element in br_element_chain[1:]:
inner_element.decompose()
# Find consecutive <hr> elements and replace with a break marker
# Use a list rather than the generator, since we are altering the tree as we traverse it
for element in list(soup.find_all('hr')):
element.replace_with(BREAK_INDICATOR)
# Consolidate the text again now that we have added strings to the tree
consolidate_text(soup)
# Iterate through the tree, splitting string elements which contain BREAK_INDICATOR
# Use a list rather than the generator, since we are altering the tree as we traverse it
for element in list(soup.find_all(string=True)):
if BREAK_INDICATOR in element:
# Split the text into two or more fragments (there maybe be multiple BREAK_INDICATORs in the string)
text_fragments = [s.strip() for s in str(element).split(BREAK_INDICATOR)]
# Get the parent element
parent_element = element.parent
# If the parent is a paragraph then we want to close and reopen by creating a new tag
if parent_element.name == "p":
# Iterate in reverse order as we are repeatedly adding new elements directly after the original one
for text_fragment in text_fragments[:0:-1]:
new_p_element = soup.new_tag("p")
new_p_element.string = text_fragment
parent_element.insert_after(new_p_element)
# Replace this element by a navigable string containing the first text fragment
element.replace_with(NavigableString(text_fragments[0]))
# Otherwise we want to simply include all the text fragments as independent NavigableStrings (that will be wrapped later)
else:
# Iterate in reverse order as we are repeatedly adding new elements directly after the original one
for text_fragment in text_fragments[:0:-1]:
element.insert_after(soup.new_string(text_fragment))
element.string.replace_with(text_fragments[0])
def normalise_strings(soup):
"""Remove extraneous whitespace and fix unicode issues in all strings."""
# Iterate over all strings in the tree (including bare strings outside tags)
for element in soup.find_all(string=True):
# Treat Beautiful Soup text elements as strings when normalising since normalisation returns a copy of the string
text = str(element)
normalised_text = normalise_text(text)
# Replace the element with a new string element of the same type, but containing the normalised text
element.replace_with(type(element)(normalised_text))
def wrap_bare_text(soup):
"""Wrap any remaining bare text in <p> tags.
We do this to ensure that there is a strong, unique correspondance between presentational paragraphs and DOM structure
- all presentational paragraphs should be the only content associated with their immediate parent
- all presentational paragraphs at the same conceptual level should be equally nested
- the string as displayed in the browser should be equivalent to the innerHTML of the parent (so that indexing is equivalent between presentation and source)
The following examples should not be allowed:
1. Two presentational elements at the same DOM level have non-equivalent index levels
<div index="1.1">
text
<p index="1.1.1">more text</p>
</div>
2. Index 1.1 might | |
<gh_stars>0
import datetime
import hashlib
import traceback
import sys
import json
from pathlib import Path
from typing import List, Optional, Tuple, Dict, NamedTuple, Iterable, Set, Any, Callable
import click
import yaml
import openshift as oc
from doozerlib.rpm_utils import parse_nvr
from doozerlib.brew import KojiWrapper
from doozerlib.rhcos import RHCOSBuildInspector
from doozerlib.cli import cli, pass_runtime
from doozerlib.image import ImageMetadata, BrewBuildImageInspector, ArchiveImageInspector
from doozerlib.assembly_inspector import AssemblyInspector
from doozerlib.runtime import Runtime
from doozerlib.util import red_print, go_suffix_for_arch, brew_arch_for_go_arch, isolate_nightly_name_components, convert_remote_git_to_https, go_arch_for_brew_arch
from doozerlib.assembly import AssemblyTypes, assembly_basis, AssemblyIssue, AssemblyIssueCode
from doozerlib import exectools
from doozerlib.model import Model
from doozerlib.exceptions import DoozerFatalError
from doozerlib.util import find_manifest_list_sha
def default_imagestream_base_name(version: str) -> str:
return f'{version}-art-latest'
def assembly_imagestream_base_name(runtime: Runtime) -> str:
version = runtime.get_minor_version()
if runtime.assembly is None or runtime.assembly == 'stream':
return default_imagestream_base_name(version)
else:
return f'{version}-art-assembly-{runtime.assembly}'
def default_imagestream_namespace_base_name() -> str:
return "ocp"
def payload_imagestream_name_and_namespace(base_imagestream_name: str, base_namespace: str, brew_arch: str, private: bool) -> Tuple[str, str]:
"""
:return: Returns the imagestream name and namespace to which images for the specified CPU arch and privacy mode should be synced.
"""
arch_suffix = go_suffix_for_arch(brew_arch)
priv_suffix = "-priv" if private else ""
name = f"{base_imagestream_name}{arch_suffix}{priv_suffix}"
namespace = f"{base_namespace}{arch_suffix}{priv_suffix}"
return name, namespace
def modify_and_replace_api_object(api_obj: oc.APIObject, modifier_func: Callable[[oc.APIObject], Any], backup_file_path: Path, dry_run: bool):
"""
Receives an APIObject, archives the current state of that object, runs a modifying method on it,
archives the new state of the object, and then tries to replace the object on the
cluster API server.
:param api_obj: The openshift client APIObject to work with.
:param modifier_func: A function that will accept the api_obj as its first parameter and make any desired change
to that object.
:param backup_file_path: A Path object that can be used to archive pre & post modification states of the object
before triggering the update.
:param dry_run: Write archive files but do not actually update the imagestream.
"""
with backup_file_path.joinpath(f'replacing-{api_obj.kind()}.{api_obj.namespace()}.{api_obj.name()}.before-modify.json').open(mode='w+') as backup_file:
backup_file.write(api_obj.as_json(indent=4))
modifier_func(api_obj)
api_obj_model = api_obj.model
# Before replacing api objects on the server, make sure to remove aspects that can
# confuse subsequent CLI interactions with the object.
if api_obj_model.metadata.annotations['kubectl.kubernetes.io/last-applied-configuration']:
api_obj_model.metadata.annotations.pop('kubectl.kubernetes.io/last-applied-configuration')
# If server-side metadata is being passed in, remove it before we try to replace the object.
if api_obj_model.metadata:
for md in ['creationTimestamp', 'generation', 'uid']:
api_obj_model.metadata.pop(md)
api_obj_model.pop('status')
with backup_file_path.joinpath(f'replacing-{api_obj.kind()}.{api_obj.namespace()}.{api_obj.name()}.after-modify.json').open(mode='w+') as backup_file:
backup_file.write(api_obj.as_json(indent=4))
if not dry_run:
api_obj.replace()
@cli.command("release:gen-payload", short_help="Generate input files for release mirroring")
@click.option("--is-name", metavar='NAME', required=False,
help="ImageStream .metadata.name value. For example '4.2-art-latest'")
@click.option("--is-namespace", metavar='NAMESPACE', required=False,
help="ImageStream .metadata.namespace value. For example 'ocp'")
@click.option("--organization", metavar='ORGANIZATION', required=False, default='openshift-release-dev',
help="Quay ORGANIZATION to mirror into.\ndefault=openshift-release-dev")
@click.option("--repository", metavar='REPO', required=False, default='ocp-v4.0-art-dev',
help="Quay REPOSITORY in ORGANIZATION to mirror into.\ndefault=ocp-v4.0-art-dev")
@click.option("--release-repository", metavar='REPO', required=False, default='ocp-release',
help="Quay REPOSITORY in ORGANIZATION to push release payloads (used for multi-arch)\ndefault=ocp-release")
@click.option("--output-dir", metavar='DIR', required=False, default='.',
help="Directory into which the mirroring/imagestream artifacts should be written")
@click.option("--skip-gc-tagging", default=False, is_flag=True,
help="By default, for a named assembly, images will be tagged to prevent garbage collection")
@click.option("--exclude-arch", metavar='ARCH', required=False, multiple=True,
help="Architecture (brew nomenclature) to exclude from payload generation")
@click.option("--emergency-ignore-issues", default=False, is_flag=True,
help="If you must get this command to permit an assembly despite issues. Do not use without approval.")
@click.option("--apply", default=False, is_flag=True,
help="If doozer should perform the mirroring and imagestream updates.")
@click.option("--apply-multi-arch", default=False, is_flag=True,
help="If doozer should also create a release payload for multi-arch/heterogeneous clusters.")
@click.option("--moist-run", default=False, is_flag=True,
help="Performing mirroring/etc but to not actually update imagestreams.")
@pass_runtime
def release_gen_payload(runtime: Runtime, is_name: str, is_namespace: str, organization: str,
repository: str, release_repository: str, output_dir: str, exclude_arch: Tuple[str, ...],
skip_gc_tagging: bool, emergency_ignore_issues: bool,
apply: bool, apply_multi_arch: bool, moist_run: bool):
"""Computes a set of imagestream tags which can be assembled
into an OpenShift release for this assembly. The tags will not be
valid unless --apply and is supplied.
Applying the change will cause the OSBS images to be mirrored into the OpenShift release
repositories on quay.
Applying will also directly update the imagestreams relevant to assembly (e.g.
updating 4.9-art-latest for 4.9's stream assembly).
You may provide the namespace and base name for the image streams, or defaults
will be used.
The ORGANIZATION and REPOSITORY options are combined into
ORGANIZATION/REPOSITORY when preparing for mirroring.
Generate files for mirroring from registry-proxy (OSBS storage) to our
quay registry:
\b
$ doozer --group=openshift-4.2 release:gen-payload \\
--is-name=4.2-art-latest
Note that if you use -i to include specific images, you should also include
openshift-enterprise-cli to satisfy any need for the 'cli' tag. The cli image
is used automatically as a stand-in for images when an arch does not build
that particular tag.
## Validation ##
Additionally we want to check that the following conditions are true for each
imagestream being updated:
* For all architectures built, RHCOS builds must have matching versions of any
unshipped RPM they include (per-entry os metadata - the set of RPMs may differ
between arches, but versions should not).
* Any RPMs present in images (including machine-os-content) from unshipped RPM
builds included in one of our candidate tags must exactly version-match the
latest RPM builds in those candidate tags (ONLY; we never flag what we don't
directly ship.)
These checks (and likely more in the future) should run and any failures should
be listed in brief via a "release.openshift.io/inconsistency" annotation on the
relevant image istag (these are publicly visible; ref. https://bit.ly/37cseC1)
and in more detail in state.yaml. The release-controller, per ART-2195, will
read and propagate/expose this annotation in its display of the release image.
"""
runtime.initialize(mode='both', clone_distgits=False, clone_source=False, prevent_cloning=True)
if runtime.assembly not in {None, "stream", "test"} and runtime.assembly not in runtime.releases_config.releases:
raise DoozerFatalError(f"Assembly '{runtime.assembly}' is not explicitly defined.")
logger = runtime.logger
brew_session = runtime.build_retrying_koji_client()
base_imagestream_name: str = is_name if is_name else assembly_imagestream_base_name(runtime)
base_istream_namespace: str = is_namespace if is_namespace else default_imagestream_namespace_base_name()
if runtime.assembly and runtime.assembly != 'stream' and 'art-latest' in base_imagestream_name:
raise ValueError('The art-latest imagestreams should not be used for an assembly other than "stream"')
logger.info(f'Collecting latest information associated with the assembly: {runtime.assembly}')
assembly_inspector = AssemblyInspector(runtime, brew_session)
logger.info('Checking for mismatched siblings...')
mismatched_siblings = PayloadGenerator.find_mismatched_siblings(assembly_inspector.get_group_release_images().values())
# A list of strings that denote inconsistencies across all payloads generated
assembly_issues: List[AssemblyIssue] = list()
for mismatched_bbii, sibling_bbi in mismatched_siblings:
mismatch_issue = AssemblyIssue(f'{mismatched_bbii.get_nvr()} was built from a different upstream source commit ({mismatched_bbii.get_source_git_commit()[:7]}) than one of its siblings {sibling_bbi.get_nvr()} from {sibling_bbi.get_source_git_commit()[:7]}',
component=mismatched_bbii.get_image_meta().distgit_key,
code=AssemblyIssueCode.MISMATCHED_SIBLINGS)
assembly_issues.append(mismatch_issue)
report = dict()
report['non_release_images'] = [image_meta.distgit_key for image_meta in runtime.get_non_release_image_metas()]
report['release_images'] = [image_meta.distgit_key for image_meta in runtime.get_for_release_image_metas()]
report['missing_image_builds'] = [dgk for (dgk, ii) in assembly_inspector.get_group_release_images().items() if ii is None] # A list of metas where the assembly did not find a build
if runtime.assembly_type is AssemblyTypes.STREAM:
# Only nightlies have the concept of private and public payloads
privacy_modes = [False, True]
else:
privacy_modes = [False]
# Structure to record rhcos builds we use so that they can be analyzed for inconsistencies
targeted_rhcos_builds: Dict[bool, List[RHCOSBuildInspector]] = {
False: [],
True: []
}
"""
Collect a list of builds to tag in order to prevent garbage collection.
Note: we also use this list to warm up caches, so don't wrap this section
with `if not skip_gc_tagging`.
To prevent garbage collection for custom
assemblies (which won't normally be released via errata tool, triggering
the traditional garbage collection prevention), we must tag these builds
explicitly to prevent their GC. It is necessary to prevent GC, because
we want to be able to build custom releases off of custom releases, and
so on. If we loose images and builds for custom releases in brew due
to garbage collection, we will not be able to construct derivative
release payloads.
"""
assembly_build_ids: Set[int] = set() # This list of builds associated with the group/assembly will be used to warm up caches
list_tags_tasks: Dict[Tuple[int, str], Any] = dict() # Maps (build_id, tag) tuple to multicall task to list tags
with runtime.pooled_koji_client_session() as pcs:
with pcs.multicall(strict=True) as m:
for bbii in assembly_inspector.get_group_release_images().values():
if bbii:
build_id = bbii.get_brew_build_id()
assembly_build_ids.add(build_id) # Collect up build ids for cache warm up
hotfix_tag = bbii.get_image_meta().hotfix_brew_tag()
list_tags_tasks[(build_id, hotfix_tag)] = m.listTags(build=build_id)
# RPMs can build for multiple versions of RHEL. For example, a single RPM
# metadata can target 7 & 8.
# For each rhel version targeted by our RPMs, build a list of RPMs
# appropriate for the RHEL version with respect to the group/assembly.
rhel_version_scanned_for_rpms: Dict[int, bool] = dict() # Maps rhel version -> bool indicating whether we have processed that rhel version
for rpm_meta in runtime.rpm_metas():
for el_ver in rpm_meta.determine_rhel_targets():
if el_ver in rhel_version_scanned_for_rpms:
# We've already processed this RHEL | |
<filename>src/tests/google/appengine/api/search/QueryLexer.py
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from google.appengine._internal.antlr3 import *
from google.appengine._internal.antlr3.compat import set, frozenset
HIDDEN = BaseRecognizer.HIDDEN
LT=19
EXPONENT=36
LETTER=40
FUZZY=6
OCTAL_ESC=44
FLOAT=27
NAME_START=38
NOT=33
AND=31
EOF=-1
LPAREN=25
WORD=14
HAS=7
RPAREN=26
NAME=16
ESC_SEQ=37
DIGIT=34
EQ=23
DOT=35
NE=22
GE=20
T__46=46
T__47=47
T__45=45
T__48=48
CONJUNCTION=4
UNICODE_ESC=43
NAME_MID=39
NUMBER=11
HEX_DIGIT=42
UNDERSCORE=41
LITERAL=8
INT=28
VALUE=15
TEXT=30
PHRASE=29
RESTRICTION=12
COLON=24
DISJUNCTION=5
WS=17
NEGATION=9
OR=32
GT=21
GLOBAL=10
LE=18
STRING=13
class QueryLexer(Lexer):
grammarFileName = "blaze-out/host/genfiles/apphosting/api/search/genantlr/Query.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
def __init__(self, input=None, state=None):
if state is None:
state = RecognizerSharedState()
Lexer.__init__(self, input, state)
self.dfa8 = self.DFA8(
self, 8,
eot = self.DFA8_eot,
eof = self.DFA8_eof,
min = self.DFA8_min,
max = self.DFA8_max,
accept = self.DFA8_accept,
special = self.DFA8_special,
transition = self.DFA8_transition
)
self.dfa16 = self.DFA16(
self, 16,
eot = self.DFA16_eot,
eof = self.DFA16_eof,
min = self.DFA16_min,
max = self.DFA16_max,
accept = self.DFA16_accept,
special = self.DFA16_special,
transition = self.DFA16_transition
)
def mT__45(self, ):
try:
_type = T__45
_channel = DEFAULT_CHANNEL
pass
self.match(43)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mT__46(self, ):
try:
_type = T__46
_channel = DEFAULT_CHANNEL
pass
self.match(126)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mT__47(self, ):
try:
_type = T__47
_channel = DEFAULT_CHANNEL
pass
self.match(45)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mT__48(self, ):
try:
_type = T__48
_channel = DEFAULT_CHANNEL
pass
self.match(44)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mOR(self, ):
try:
_type = OR
_channel = DEFAULT_CHANNEL
pass
self.match("OR")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mAND(self, ):
try:
_type = AND
_channel = DEFAULT_CHANNEL
pass
self.match("AND")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNOT(self, ):
try:
_type = NOT
_channel = DEFAULT_CHANNEL
pass
self.match("NOT")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mINT(self, ):
try:
_type = INT
_channel = DEFAULT_CHANNEL
pass
cnt1 = 0
while True:
alt1 = 2
LA1_0 = self.input.LA(1)
if ((48 <= LA1_0 <= 57)) :
alt1 = 1
if alt1 == 1:
pass
self.mDIGIT()
else:
if cnt1 >= 1:
break
eee = EarlyExitException(1, self.input)
raise eee
cnt1 += 1
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mCOLON(self, ):
try:
_type = COLON
_channel = DEFAULT_CHANNEL
pass
self.match(58)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFLOAT(self, ):
try:
_type = FLOAT
_channel = DEFAULT_CHANNEL
alt8 = 3
alt8 = self.dfa8.predict(self.input)
if alt8 == 1:
pass
cnt2 = 0
while True:
alt2 = 2
LA2_0 = self.input.LA(1)
if ((48 <= LA2_0 <= 57)) :
alt2 = 1
if alt2 == 1:
pass
self.mDIGIT()
else:
if cnt2 >= 1:
break
eee = EarlyExitException(2, self.input)
raise eee
cnt2 += 1
self.mDOT()
while True:
alt3 = 2
LA3_0 = self.input.LA(1)
if ((48 <= LA3_0 <= 57)) :
alt3 = 1
if alt3 == 1:
pass
self.mDIGIT()
else:
break
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == 69 or LA4_0 == 101) :
alt4 = 1
if alt4 == 1:
pass
self.mEXPONENT()
elif alt8 == 2:
pass
self.mDOT()
cnt5 = 0
while True:
alt5 = 2
LA5_0 = self.input.LA(1)
if ((48 <= LA5_0 <= 57)) :
alt5 = 1
if alt5 == 1:
pass
self.mDIGIT()
else:
if cnt5 >= 1:
break
eee = EarlyExitException(5, self.input)
raise eee
cnt5 += 1
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == 69 or LA6_0 == 101) :
alt6 = 1
if alt6 == 1:
pass
self.mEXPONENT()
elif alt8 == 3:
pass
cnt7 = 0
while True:
alt7 = 2
LA7_0 = self.input.LA(1)
if ((48 <= LA7_0 <= 57)) :
alt7 = 1
if alt7 == 1:
pass
self.mDIGIT()
else:
if cnt7 >= 1:
break
eee = EarlyExitException(7, self.input)
raise eee
cnt7 += 1
self.mEXPONENT()
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mWS(self, ):
try:
_type = WS
_channel = DEFAULT_CHANNEL
pass
if (9 <= self.input.LA(1) <= 10) or self.input.LA(1) == 13 or self.input.LA(1) == 32:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mPHRASE(self, ):
try:
_type = PHRASE
_channel = DEFAULT_CHANNEL
pass
self.match(34)
while True:
alt9 = 3
LA9_0 = self.input.LA(1)
if (LA9_0 == 92) :
alt9 = 1
elif ((0 <= LA9_0 <= 33) or (35 <= LA9_0 <= 91) or (93 <= LA9_0 <= 65535)) :
alt9 = 2
if alt9 == 1:
pass
self.mESC_SEQ()
elif alt9 == 2:
pass
if (0 <= self.input.LA(1) <= 33) or (35 <= self.input.LA(1) <= 91) or (93 <= self.input.LA(1) <= 65535):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break
self.match(34)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNAME(self, ):
try:
_type = NAME
_channel = DEFAULT_CHANNEL
pass
self.mNAME_START()
while True:
alt10 = 2
LA10_0 = self.input.LA(1)
if ((48 <= LA10_0 <= 57) or (65 <= LA10_0 <= 90) or LA10_0 == 95 or (97 <= LA10_0 <= 122) or (192 <= LA10_0 <= 214) or (216 <= LA10_0 <= 246) or (248 <= LA10_0 <= 8191) or (12352 <= LA10_0 <= 12687) or (13056 <= LA10_0 <= 13183) or (13312 <= LA10_0 <= 15661) or (19968 <= LA10_0 <= 40959) or (63744 <= LA10_0 <= 64255)) :
alt10 = 1
if alt10 == 1:
pass
self.mNAME_MID()
else:
break
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mLPAREN(self, ):
try:
_type = LPAREN
_channel = DEFAULT_CHANNEL
pass
self.match(40)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mRPAREN(self, ):
try:
_type = RPAREN
_channel = DEFAULT_CHANNEL
pass
self.match(41)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mLT(self, ):
try:
_type = LT
_channel = DEFAULT_CHANNEL
pass
self.match(60)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mGT(self, ):
try:
_type = GT
_channel = DEFAULT_CHANNEL
pass
self.match(62)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mGE(self, ):
try:
_type = GE
_channel = DEFAULT_CHANNEL
pass
self.match(">=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mLE(self, ):
try:
_type = LE
_channel = DEFAULT_CHANNEL
pass
self.match("<=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNE(self, ):
try:
_type = NE
_channel = DEFAULT_CHANNEL
pass
self.match("!=")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mEQ(self, ):
try:
_type = EQ
_channel = DEFAULT_CHANNEL
pass
self.match(61)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTEXT(self, ):
try:
_type = TEXT
_channel = DEFAULT_CHANNEL
pass
if self.input.LA(1) == 33 or (35 <= self.input.LA(1) <= 39) or self.input.LA(1) == 44 or (46 <= self.input.LA(1) <= 57) or self.input.LA(1) == 59 or (63 <= self.input.LA(1) <= 125) or (256 <= self.input.LA(1) <= 32767):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
while True:
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 33 or (35 <= LA11_0 <= 39) or (44 <= LA11_0 <= 57) or LA11_0 == 59 or (63 <= LA11_0 <= 125) or (256 <= LA11_0 <= 32767)) :
alt11 = 1
if alt11 == 1:
pass
if self.input.LA(1) == 33 or (35 <= self.input.LA(1) <= 39) or (44 <= self.input.LA(1) <= 57) or self.input.LA(1) == 59 or (63 <= self.input.LA(1) <= 125) or (256 <= self.input.LA(1) <= 32767):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNAME_START(self, ):
try:
pass
if (65 <= self.input.LA(1) <= 90) or self.input.LA(1) == 95 or (97 <= self.input.LA(1) <= 122) or (192 <= self.input.LA(1) <= 214) or (216 <= self.input.LA(1) <= 246) or (248 <= self.input.LA(1) <= 8191) or (12352 <= self.input.LA(1) <= 12687) or (13056 <= self.input.LA(1) <= 13183) or (13312 <= self.input.LA(1) <= 15661) or (19968 <= self.input.LA(1) <= 40959) or (63744 <= self.input.LA(1) <= 64255):
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
| |
EvaluationCacheManager.CacheStatus.valid
else:
metrics.counter_inc(name="anchore_policy_evaluation_cache_misses_invalid")
return EvaluationCacheManager.CacheStatus.invalid
def flush(self):
"""
Flush all cache entries for the given image
:return:
"""
session = get_session()
for entry in session.query(CachedPolicyEvaluation).filter_by(
user_id=self.image.user_id, image_id=self.image.id
):
try:
self._delete_entry(entry)
except:
log.exception("Could not delete eval cache entry: {}".format(entry))
return True
@flask_metrics.do_not_track()
@authorizer.requires_account(with_types=INTERNAL_SERVICE_ALLOWED)
def check_user_image_inline(user_id, image_id, tag, bundle):
"""
Execute a policy evaluation using the info in the request body including the bundle content
:param user_id:
:param image_id:
:param tag:
:param bundle:
:return:
"""
timer = time.time()
db = get_session()
cache_mgr = None
try:
# Input validation
if tag is None:
# set tag value to a value that only matches wildcards
tag = "*/*:*"
try:
img_obj = db.query(Image).get((image_id, user_id))
except:
return make_response_error("Image not found", in_httpcode=404), 404
if not img_obj:
log.info(
"Request for evaluation of image that cannot be found: user_id = {}, image_id = {}".format(
user_id, image_id
)
)
return make_response_error("Image not found", in_httpcode=404), 404
if evaluation_cache_enabled:
timer2 = time.time()
try:
try:
conn_timeout = (
ApiRequestContextProxy.get_service().configuration.get(
"catalog_client_conn_timeout", DEFAULT_CACHE_CONN_TIMEOUT
)
)
read_timeout = (
ApiRequestContextProxy.get_service().configuration.get(
"catalog_client_read_timeout", DEFAULT_CACHE_READ_TIMEOUT
)
)
cache_mgr = EvaluationCacheManager(
img_obj, tag, bundle, conn_timeout, read_timeout
)
except ValueError as err:
log.warn(
"Could not leverage cache due to error in bundle data: {}".format(
err
)
)
cache_mgr = None
if cache_mgr is None:
log.info(
"Could not initialize cache manager for policy evaluation, skipping cache usage"
)
else:
cached_result = cache_mgr.refresh()
if cached_result:
metrics.counter_inc(name="anchore_policy_evaluation_cache_hits")
metrics.histogram_observe(
"anchore_policy_evaluation_cache_access_latency",
time.time() - timer2,
status="hit",
)
log.info(
"Returning cached result of policy evaluation for {}/{}, with tag {} and bundle {} with digest {}. Last evaluation: {}".format(
user_id,
image_id,
tag,
cache_mgr.bundle_id,
cache_mgr.bundle_digest,
cached_result.get("last_modified"),
)
)
return cached_result
else:
metrics.counter_inc(
name="anchore_policy_evaluation_cache_misses"
)
metrics.histogram_observe(
"anchore_policy_evaluation_cache_access_latency",
time.time() - timer2,
status="miss",
)
log.info(
"Policy evaluation not cached, or invalid, executing evaluation for {}/{} with tag {} and bundle {} with digest {}".format(
user_id,
image_id,
tag,
cache_mgr.bundle_id,
cache_mgr.bundle_digest,
)
)
except Exception as ex:
log.exception(
"Unexpected error operating on policy evaluation cache. Skipping use of cache."
)
else:
log.info("Policy evaluation cache disabled. Executing evaluation")
# Build bundle exec.
problems = []
executable_bundle = None
try:
# Allow deprecated gates here to support upgrade cases from old policy bundles.
executable_bundle = build_bundle(bundle, for_tag=tag, allow_deprecated=True)
if executable_bundle.init_errors:
problems = executable_bundle.init_errors
except InitializationError as e:
log.exception("Bundle construction and initialization returned errors")
problems = e.causes
eval_result = None
if not problems:
# Execute bundle
try:
eval_result = executable_bundle.execute(
img_obj, tag, ExecutionContext(db_session=db, configuration={})
)
except Exception as e:
log.exception(
"Error executing policy bundle {} against image {} w/tag {}: {}".format(
bundle["id"], image_id, tag, e
)
)
return (
make_response_error(
"Internal bundle evaluation error",
details={
"message": "Cannot execute given policy against the image due to errors executing the policy bundle: {}".format(
e
)
},
in_httpcode=500,
),
500,
)
else:
# Construct a failure eval with details on the errors and mappings to send to client
eval_result = build_empty_error_execution(
img_obj, tag, executable_bundle, errors=problems, warnings=[]
)
if (
executable_bundle
and executable_bundle.mapping
and len(executable_bundle.mapping.mapping_rules) == 1
):
eval_result.executed_mapping = executable_bundle.mapping.mapping_rules[
0
]
resp = PolicyEvaluation()
resp.user_id = user_id
resp.image_id = image_id
resp.tag = tag
resp.bundle = bundle
resp.matched_mapping_rule = (
eval_result.executed_mapping.json()
if eval_result.executed_mapping
else False
)
resp.last_modified = int(time.time())
resp.final_action = eval_result.bundle_decision.final_decision.name
resp.final_action_reason = eval_result.bundle_decision.reason
resp.matched_whitelisted_images_rule = (
eval_result.bundle_decision.whitelisted_image.json()
if eval_result.bundle_decision.whitelisted_image
else False
)
resp.matched_blacklisted_images_rule = (
eval_result.bundle_decision.blacklisted_image.json()
if eval_result.bundle_decision.blacklisted_image
else False
)
resp.result = eval_result.as_table_json()
resp.created_at = int(time.time())
resp.evaluation_problems = [
problem_from_exception(i) for i in eval_result.errors
]
resp.evaluation_problems += [
problem_from_exception(i) for i in eval_result.warnings
]
if resp.evaluation_problems:
for i in resp.evaluation_problems:
log.warn(
"Returning evaluation response for image {}/{} w/tag {} and bundle {} that contains error: {}".format(
user_id, image_id, tag, bundle["id"], json.dumps(i.to_json())
)
)
metrics.histogram_observe(
"anchore_policy_evaluation_time_seconds",
time.time() - timer,
status="fail",
)
else:
metrics.histogram_observe(
"anchore_policy_evaluation_time_seconds",
time.time() - timer,
status="success",
)
result = resp.to_json()
# Never let the cache block returning results
try:
if evaluation_cache_enabled and cache_mgr is not None:
cache_mgr.save(result)
except Exception as ex:
log.exception(
"Failed saving policy result in cache. Skipping and continuing."
)
db.commit()
return result
except HTTPException as e:
db.rollback()
log.exception("Caught exception in execution: {}".format(e))
raise
except Exception as e:
db.rollback()
log.exception("Failed processing bundle evaluation: {}".format(e))
return (
make_response_error(
"Unexpected internal error",
details={"message": str(e)},
in_httpcode=500,
),
500,
)
finally:
db.close()
@flask_metrics.do_not_track()
@authorizer.requires_account(with_types=INTERNAL_SERVICE_ALLOWED)
def get_image_vulnerabilities(user_id, image_id, force_refresh=False, vendor_only=True):
"""
Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked
to do so.
:param user_id: user id of image to evaluate
:param image_id: image id to evaluate
:param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values
:param vendor_only: if true, filter out the vulnerabilities that vendors will explicitly not address
:return:
"""
# Has image?
db = get_session()
try:
img = db.query(Image).get((image_id, user_id))
if not img:
return make_response_error("Image not found", in_httpcode=404), 404
provider = get_vulnerabilities_provider()
report = provider.get_image_vulnerabilities_json(
image=img,
vendor_only=vendor_only,
db_session=db,
force_refresh=force_refresh,
use_store=True,
)
db.commit()
return report, 200
except HTTPException:
db.rollback()
raise
except Exception as e:
log.exception(
"Error checking image {}, {} for vulnerabiltiies. Rolling back".format(
user_id, image_id
)
)
db.rollback()
return make_response_error(e, in_httpcode=500), 500
finally:
db.close()
@authorizer.requires_account(with_types=INTERNAL_SERVICE_ALLOWED)
def ingress_image(ingress_request):
"""
:param ingress_request json object specifying the identity of the image to sync
:return: status result for image load
"""
req = ImageIngressRequest.from_json(ingress_request)
if not req.user_id:
raise ValueError("user_id")
if not req.image_id:
raise ValueError("image_id")
try:
# Try this synchronously for now to see how slow it really is
conn_timeout = ApiRequestContextProxy.get_service().configuration.get(
"catalog_client_conn_timeout", DEFAULT_CACHE_CONN_TIMEOUT
)
read_timeout = ApiRequestContextProxy.get_service().configuration.get(
"catalog_client_read_timeout", DEFAULT_CACHE_READ_TIMEOUT
)
t = ImageLoadTask(
req.user_id,
req.image_id,
url=req.fetch_url,
content_conn_timeout=conn_timeout,
content_read_timeout=read_timeout,
)
result = t.execute()
resp = ImageIngressResponse()
if not result:
resp.status = "loaded"
else:
# We're doing a sync call above, so just send loaded. It should be 'accepted' once async works.
resp.status = "loaded"
resp.problems = list()
return resp.to_json(), 200
except Exception as e:
log.exception("Error loading image into policy engine")
return make_response_error(e, in_httpcode=500), 500
@authorizer.requires_account(with_types=INTERNAL_SERVICE_ALLOWED)
def validate_bundle(policy_bundle):
"""
Performs a validation of the given policy bundle and either returns 200 OK with a status message in the response indicating pass/fail and any validation errors.
:param policy_bundle:
:return: 200 OK with policy validation response
"""
try:
resp = PolicyValidationResponse()
problems = []
try:
executable_bundle = build_bundle(policy_bundle, allow_deprecated=False)
if executable_bundle.init_errors:
problems = executable_bundle.init_errors
except ValidationError as e:
problems.append(e)
except InitializationError as e:
# Expand any validation issues
problems = e.causes
resp.valid = len(problems) == 0
resp.validation_details = [
problem_from_exception(i, severity="error") for i in problems
]
return resp.to_json()
except HTTPException as e:
log.exception("Caught exception in execution: {}".format(e))
raise
except Exception as e:
log.exception("Failed processing bundle evaluation: {}".format(e))
return make_response_error(e, in_httpcode=500), 500
@authorizer.requires_account(with_types=INTERNAL_SERVICE_ALLOWED)
def describe_policy():
"""
Return a dictionary/json description of the set of gates available and triggers.
:param gate_filter: a list of gate names to filter by, if None, then all are returned
:return: dict/json description of the gates and triggers
"""
try:
doc = []
for name in Gate.registered_gate_names():
v = Gate.get_gate_by_name(name)
g = GateSpec()
g.name = name
g.description = v.__description__ if v.__description__ else ""
g.triggers = []
if hasattr(v, "__superceded_by__"):
g.superceded_by = v.__superceded_by__
else:
g.superceded_by = None
if hasattr(v, "__lifecycle_state__"):
g.state = v.__lifecycle_state__.name
else:
g.state = "active"
for t in v.__triggers__:
tr = TriggerSpec()
tr.name = t.__trigger_name__
tr.description = t.__description__ if t.__description__ else ""
tr.parameters = []
if hasattr(t, "__superceded_by__"):
tr.superceded_by = t.__superceded_by__
else:
tr.superceded_by = None
if hasattr(t, "__lifecycle_state__"):
tr.state = t.__lifecycle_state__.name
else:
tr.state = "active"
params = t._parameters()
if params:
param_list = sorted(
list(params.values()), key=lambda x: x.sort_order
)
for param in param_list:
tps = TriggerParamSpec()
tps.name = param.name
tps.description = param.description
tps.example = param.example
tps.validator = param.validator.json()
tps.required = param.required
if hasattr(param, "__superceded_by__"):
tps.superceded_by = param.__superceded_by__
else:
tps.superceded_by = None
if hasattr(param, "__lifecycle_state__"):
tps.state = param.__lifecycle_state__.name
else:
tps.state = "active"
tr.parameters.append(tps)
g.triggers.append(tr)
doc.append(g.to_json())
doc = sorted(doc, key=lambda x: x["state"])
return doc, 200
except Exception as e:
log.exception("Error describing gate system")
return make_response_error(e, in_httpcode=500), 500
def query_images_by_package(dbsession, request_inputs):
user_auth = request_inputs["auth"]
method = request_inputs["method"]
params = request_inputs["params"]
userId = request_inputs["userId"]
return_object = {}
httpcode = 500
pkg_name = request_inputs.get("params", {}).get("name", None)
pkg_version = request_inputs.get("params", {}).get("version", None)
pkg_type = request_inputs.get("params", {}).get("package_type", None)
| |
255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.shoulder_R0_fk0_ctl.setPalette(palette)
self.shoulder_R0_fk0_ctl.setAutoFillBackground(True)
self.shoulder_R0_fk0_ctl.setObjectName("shoulder_R0_fk0_ctl")
self.legFront_R0_root_ctl = SelectBtn_greenCircle(biped_body)
self.legFront_R0_root_ctl.setGeometry(QtCore.QRect(108, 198, 16, 16))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.legFront_R0_root_ctl.setPalette(palette)
self.legFront_R0_root_ctl.setAutoFillBackground(True)
self.legFront_R0_root_ctl.setObjectName("legFront_R0_root_ctl")
self.legFront_R0_fk0_ctl = SelectBtn_darkGreenBox(biped_body)
self.legFront_R0_fk0_ctl.setGeometry(QtCore.QRect(103, 218, 20, 15))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.legFront_R0_fk0_ctl.setPalette(palette)
self.legFront_R0_fk0_ctl.setAutoFillBackground(True)
self.legFront_R0_fk0_ctl.setObjectName("legFront_R0_fk0_ctl")
self.legFront_R0_fk1_ctl = SelectBtn_darkGreenBox(biped_body)
self.legFront_R0_fk1_ctl.setGeometry(QtCore.QRect(70, 209, 20, 15))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(108, 255, 63, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(29, 127, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(39, 170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(157, 255, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(59, 255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(157, | |
<reponame>mwallraf/network-config-parser
# -*- coding: utf-8 -*-
import os
import re
import pprint
#from netaddr import IPNetwork
import logging
import argparse
import json
import sys
from nmsnetlib.parsers.carrierethernet import ERSParser, SAOSParser
from nmsnetlib.parsers.parsercollection import ParserCollection
from operator import itemgetter, attrgetter
import time
from configure import config as AppConfig
from datetime import datetime
## SETUP LOGGING
myconfig = AppConfig.get(os.environ.get("ENV").lower(), "prod")
logger = myconfig.main_logger
today = datetime.now().strftime("%Y-%m-%d")
# required for ascii table parsing:
#import docutils.statemachine
#import docutils.parsers.rst.tableparser
## GLOBAL VARIABLES
VERSION = "1.0"
VERSION_HISTORY = '''\
-- HISTORY --
1.0 - 20170804 - initial version
1.2 - 20200401 - add logging
update saos definition: PORTS section was changed after upgrades and therefore
LAG ports were not detected if they had any traffic-services
configure
'''
#####
#####
##### # define logging options
##### logger = logging.getLogger('blockparser')
##### logger.setLevel(logging.TRACE)
#####
#####
##### ## log formatting
##### screenformatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s : %(message)s')
##### fileformatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s : %(message)s')
##### ## filters
##### ## screen log handlers (disable by default -> Level 100)
##### logprinter = logging.StreamHandler()
##### logprinter.setFormatter(screenformatter)
##### logprinter.setLevel(logger.getEffectiveLevel())
##### ## file log handlers
##### debugprinter = logging.FileHandler(DEBUG_FILE)
##### debugprinter.setFormatter(fileformatter)
##### debugprinter.setLevel(logging.TRACE)
##### ## add handlers to the logger
##### logger.addHandler(logprinter)
##### logger.addHandler(debugprinter)
#####
#####
'''
get the commandline arguments
'''
def get_args(default_config_dir="."):
description = "Parse all CES backup."
epilog = "Current version: {}\n\n".format(VERSION) + VERSION_HISTORY
parser = argparse.ArgumentParser(description=description, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-c", "--config-dir", help="Dir where the CES backup configs can be found (default = {})".format(default_config_dir),
default=default_config_dir)
parser.add_argument("-D", "--delim", help="Column delimiter used for output (default = |)",
default="|")
#parser.add_argument("-v", "--verbose", help="Show extra logging info on screen (default = False)",
# action="store_true", default=False)
#parser.add_argument("-q", "--quiet", help="Quiet output, only show the summary results (default = False)",
# action="store_true", default=False)
#parser.add_argument("-d", "--debug", help="Enable debug mode, logs are saved in the debug folder (default = False)",
# action="store_true", default=False)
#parser.add_argument("-t", "--trace", help="Enable trace debug mode, logs are saved in the debug folder (default = False)",
# action="store_true", default=False)
args = parser.parse_args()
## if quiet then disable the logprinter, set loglevel too high
#if args.quiet:
# logprinter.setLevel(200)
## if debug then enable debugprinter, set global loglevel to DEBUG
#if args.debug:
# logger.setLevel(logging.DEBUG)
# debugprinter.setLevel(logger.getEffectiveLevel())
## if debug then enable debugprinter, set global loglevel to DEBUG
#if args.trace:
# logger.setLevel(logging.TRACE)
# debugprinter.setLevel(logger.getEffectiveLevel())
## if verbose then set to INFO logging level (unless debug is enabled) (default = CRITICAL)
#if args.verbose:
# if not args.debug:
# logger.setLevel(logging.INFO)
# logprinter.setLevel(logger.getEffectiveLevel())
return args
'''
-----------------------------------------------------
START OF MAIN SCRIPT
-----------------------------------------------------
'''
# def parseERS():
# ### parse each ERS and print out the result, only the first time we want to see the CSV header
# print_title = True
# for f in configs:
# ers = ERS(hostname=f, configfile=ERSDIR+f, delim=cmdargs.delim)
# ers.parse()
# if cmdargs.debug:
# ers.debug_printer()
# ers.printer_tdi(print_title=print_title, print_info=print_title)
# print_title = False
def append_hostname(saos, db_file="db/hosts.json", filename=""):
"""
Store hostname + mgmt IP in a json db db/hosts.json
This can be used to generate a hosts file that is not dependant if parsing of a device fails or not
{ 'hostname': { 'mgmtip': '', 'added': 'timestamp', 'laststeen': 'timestamp' } }
"""
db = {}
now = str(time.time())
hostname = saos.model.host.hostname
chassisid = saos.model.host.chassisid
mgmtip = saos.model.get_management_ip()
if not hostname:
logger.warning("found an invalid hostname in filename: {}".format(filename))
return
## read the hosts db
try:
with open(db_file) as f:
db = json.load(f)
except:
logger.info("Hosts DB does not exist or is not valid, creating it now")
## update the hosts db
if hostname in db:
db[hostname]['mgmtip'] = mgmtip
db[hostname]['lastseen'] = now
db[hostname]['chassisid'] = chassisid
else:
h = { 'mgmtip': mgmtip, 'added': now, 'lastseen': now, 'chassisid': chassisid }
db[hostname] = h
## write the hosts db
with open(db_file, 'w') as f:
logger.debug(db)
json.dump(db, f, indent=4, sort_keys=True)
def parser(configdir="", configlist=[],
dbfile="db/hosts.json",
debug=False,
stopafter=800,
max_config_age_days=4,
filefilter=None,
output_folder=None,
debug_folder=None
):
"""
Main parser script.
Parameters:
configlist = list of config files to parse
debug = True|False
stopafter = max number of files to parse
max_config_age_days = ignore configs alder then specified
file_filter = if specified: only parse files matching the filter
"""
saoslist = ParserCollection(debug=debug)
count = 0
hosts_db_file = dbfile
#filefilter = [ "28050-SDS39-001.txt", "00009-SAS51-011.txt", "00009-SAS51-010.txt", "00009-SAS87-001.txt", "00009-SAS87-002.txt", "00002-SAS87-001.txt", "00002-SAS87-002.txt", "00009-SAS51-002.txt" ]
#filefilter = [ "28959-SDS39-001.txt", "28967-SDS39-001.txt", "28977-SDS39-001.txt", "00002-SAS51-008.txt", "00002-SAS51-009.txt", "26604-SDS39-001.txt" ]
#filefilter = [ "28959-SDS39-001.txt", "28967-SDS39-001.txt", "28977-SDS39-001.txt", "00002-SAS51-008.txt", "00002-SAS51-009.txt", "26604-SDS39-001.txt" ]
#filefilter = [ "00009-SAS51-019.txt", "00002-SAS51-019.txt" ]
#filefilter = [ "00009-SAS51-060.txt", "00009-SAS51-061.txt", "28103-SDS39-002.txt", #NOT OK: LR-ASR-BRU_17_35
# "00387-SAS51-001.txt", "00387-SAS51-002.txt", "28930-SDS39-001.txt" ] # OK: LR-ASR-BRU_19_45
#filefilter = [ "00009-SAS51-060.txt" ]
#filefilter = [ "00387-SAS51-001.txt" ]
#filefilter = [ "00009-SAS51-024.txt" ]
for s in configlist:
configfile = os.path.join(configdir, s)
# SAOS 8700 - parsing
#if s != "00002-SAS87-001.txt":
#if s != "00002-SAS51-001.txt":
# continue
if count > stopafter:
logger.warn("MAX number of parsed configs has been reached: {}".format(stopafter))
break
if filefilter and s not in filefilter:
logger.info("Config is filtered out - skipping ({})".format(s))
continue
logger.info("Parsing {}".format(s))
saos = SAOSParser(configfile=configfile, debug=debug)
saos.parse()
logger.debug("SAOS object: {}".format(saos))
#for i in saos.model.switchports:
# print(i.name)
count += 1
### append hostname + mgmt IP to the hosts DB
### keep track of historical hostnames because if parsing fails somehow then the hosts file will not be complete
append_hostname(saos, hosts_db_file, filename=s)
# at the moment we skip parsing of 8700
#if saos.model.system.hwtype and saos.model.system.hwtype is not "8700":
#if saos.model.system.hwtype and saos.model.system.hwtype == "8700":
if saos.model.system.hwtype:
config_age = saos.model.get_last_config_date(indays=True)
if config_age < max_config_age_days:
#if config_age >= 5:
saoslist.append(saos)
else:
logger.warning("SKIPPING OLD CONFIG: {}".format(configfile))
#print(">>>>>>>>>>>> config age = {}".format())
#print(">>>>>>>>>>>> firstseen: {}".format(saos.model.firstseen))
#print(">>>>>>>>>>>> lastseen: {}".format(saos.model.lastseen))
else:
logger.info("SKIPPING UNKNOWN DEVICE: {} (file: {})".format(saos.model.host.hostname, configfile))
#print saos.model.get_netjson_lldp()
logger.info("finished parsing configs: {}".format(count))
# saoslist.test();
# connect the configs together based on LLDP
saoslist.linkCollection()
### GENERAL INVENTORY INFO
F = open(os.path.join(debug_folder, 'raw.general_inventory.json'), 'w')
F.write(saoslist.get_general_inventory())
F.close()
F = open(os.path.join(output_folder, 'general.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_general_inventory():
F.write("{}\n".format(",".join(line)))
F.close()
### CONFIG CHECKS INVENTORY
F = open(os.path.join(output_folder, 'general.config_check.{}.csv'.format(today)), 'w')
for line in saoslist.report_config_check():
F.write("{}\n".format(",".join(line)))
F.close()
### GENERATE HOSTS FILE based on the hosts DB
hosts_db = {}
try:
if os.path.isfile(hosts_db_file):
with open(hosts_db_file) as f:
hosts_db = json.load(f)
except:
pass
F = open(os.path.join(output_folder, 'hosts'), 'w')
F.write('## CARRIER ETHERNET\n')
for h in hosts_db:
F.write("{} {}\n".format(hosts_db[h].get('mgmtip', ''), h))
F.close()
### SWITCHPORT INFO
F = open(os.path.join(debug_folder, 'raw.get_switchport_status.json'), 'w')
F.write(saoslist.get_switchport_status())
F.close()
F = open(os.path.join(output_folder, 'switchport-status.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_switchport_status():
#print(line)
F.write("{}\n".format(",".join(line)))
F.close()
### LLDP debug info
F = open(os.path.join(debug_folder, 'raw.get_netjson_lldp.json'), 'w')
F.write(saoslist.get_netjson_lldp())
F.close()
### LOGICAL RING INFO
F = open(os.path.join(debug_folder, 'raw.logical_ring_info.json'), 'w')
F.write(saoslist.get_logical_ring_info())
F.close()
F = open(os.path.join(output_folder, 'logical-ringinfo.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_logical_ring_inventory():
F.write("{}\n".format(",".join(line)))
F.close()
### VIRTUAL RING INFO
F = open(os.path.join(debug_folder, 'raw.virtual_ring_info.json'), 'w')
F.write(saoslist.get_virtual_ring_info())
F.close()
F = open(os.path.join(output_folder, 'virtual-ringinfo.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_virtual_ring_inventory():
F.write("{}\n".format(",".join(line)))
F.close()
### VLAN INFO
F = open(os.path.join(debug_folder, 'raw.vlan_info.json'), 'w')
F.write(saoslist.get_vlan_info())
F.close()
F = open(os.path.join(output_folder, 'vlaninfo.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_vlan_inventory():
F.write("{}\n".format(",".join(line)))
F.close()
### SERVICE INFO
F = open(os.path.join(debug_folder, 'raw.service_info.json'), 'w')
F.write(saoslist.get_service_info())
F.close()
F = open(os.path.join(output_folder, 'services.inventory.{}.csv'.format(today)), 'w')
for line in saoslist.report_service_inventory():
F.write("{}\n".format(",".join(line)))
F.close()
### debug json per host
for s in saoslist.parserlist:
F = open(os.path.join(debug_folder, 'raw.{}.json'.format(s.model.host.hostname)), 'w')
F.write(s.json())
F.close()
return
# connect the configs together based on LLDP
#SAOSParser.linkCollection(saoslist.parserlist)
# # generate the logical ringinfo report
# fn_logical_rings = "output/logical-ringinfo"
# _report_logical_ringinfo(saoslist.parserlist, fn_logical_rings)
#
# # generate the virtual ringinfo report
# fn_virtual_rings = "output/virtual-ringinfo"
# _report_virtual_ringinfo(saoslist.parserlist, fn_virtual_rings)
#
# # generate the CES/SVLAN report
# fn_svlans = "output/svlan-info"
# _report_svlans(saoslist.parserlist, fn_svlans)
if __name__ == "__main__":
logger.info("-- start script --")
cmdargs = get_args(myconfig.CARRIERETHERNET_PARSER_CONFIG_FOLDER)
SAOSDIR = cmdargs.config_dir
DBDIR = myconfig.CARRIERETHERNET_PARSER_DB_FOLDER
DEBUG_FOLDER = myconfig.CARRIERETHERNET_PARSER_DEBUG_FOLDER
OUTPUT_FOLDER = myconfig.CARRIERETHERNET_PARSER_OUTPUT_FOLDER
MAX_CONFIG_AGE = myconfig.CARRIERETHERNET_PARSER_MAX_CONFIG_AGE
MAX_PARSED_FILES = myconfig.CARRIERETHERNET_PARSER_MAX_PARSED_FILES
#DEBUG = True if (cmdargs.debug or cmdargs.trace) else False
#SAOSDIR = '/Users/mwallraf/dev/alfie/trops-carrierethernet-parser.git/test_configs/SDS/'
#logger.info("info")
#logger.warning("warn")
#logger.error("error")
#logger.debug("debug")
#logger.trace("trace")
## default config folders, use the first one that exists
## can be overridden by the --config-dir commandline option
#for p in [
# #'/Users/mwallraf/dev/alfie/trops-carrierethernet-parser.git/test_configs/SDS/',
# '/opt/SCRIPTS/exscript-backup/configs/',
# '/Users/mwallraf/Documents/dev/Orange/trops-carrierethernet-parser/test-configs/'
# ]:
# if os.path.exists(p):
# SAOSDIR = p
# break
if not SAOSDIR:
logger.error("no SAOS config dir found")
sys.exit()
logger.info("SAOS config dir: {}".format(SAOSDIR))
saosconfigs = [ f for f in os.listdir(SAOSDIR) if ( ("-SAS" in f.upper() or "-SDS" in f.upper()) and not f.startswith(".")) ]
logger.debug("SAOS configs: {}".format(saosconfigs))
# start the main parser
#max_parsed_files = 1200 # do not parse | |
iqdata_loc2.shape[0]
no_of_data_loc3 = iqdata_loc3.shape[0]
no_of_data_loc4 = iqdata_loc4.shape[0]
no_of_data_loc5 = iqdata_loc5.shape[0]
no_of_data_loc6 = iqdata_loc6.shape[0]
no_of_data_loc7 = iqdata_loc7.shape[0]
no_of_data_loc8 = iqdata_loc8.shape[0]
no_of_data_loc9 = iqdata_loc9.shape[0]
no_of_data_loc10 = iqdata_loc10.shape[0]
no_of_data_loc11 = iqdata_loc11.shape[0]
no_of_data_loc12 = iqdata_loc12.shape[0]
################################################################################################################
# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)
# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTION
xdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))
xdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))
xdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))
xdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))
xdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))
xdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))
xdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))
xdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))
xdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))
xdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))
xdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))
xdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))
# RESHAPING THE XDATA
xdata_loc1_indoor= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)
xdata_loc2_indoor = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)
xdata_loc3_indoor = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)
xdata_loc4_indoor = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)
xdata_loc5_indoor = xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)
xdata_loc6_indoor = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)
xdata_loc7_indoor = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)
xdata_loc8_indoor = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)
xdata_loc9_indoor = xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)
xdata_loc10_indoor = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)
xdata_loc11_indoor = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)
xdata_loc12_indoor = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)
# OUTDOOR DATA
data_file_loc1 = folder_path_outdoor + '+90R/0T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER DIRECTLY POINTING TO THE RECEIVER
data_file_loc2 =folder_path_outdoor + '+90R/+30T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE LEFT TO THE RECEIVER
data_file_loc3 = folder_path_outdoor + '+90R/+60T_+90R_5ft_06_16_2020_914MHz_outdoor.dat'# TRANSMITTER ANTENNA IS 60 DEGREE LEFT TO THE RECEIVER
data_file_loc4 = folder_path_outdoor + '+90R/+90T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE LEFT TO THE RECEIVER
data_file_loc5 = folder_path_outdoor + '+90R/+120T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE LEFT TO THE RECEIVER
data_file_loc6 =folder_path_outdoor + '+90R/+150T_+90R_5ft_06_16_2020_914MHz_outdoor.dat'# TRANSMITTER ANTENNA IS 150 DEGREE LEFT TO THE RECEIVER
data_file_loc7 = folder_path_outdoor + '+90R/180T_+90R_5ft_06_16_2020_914MHz_outdoor.dat'# TRANSMITTER ANTENNA IS DIRECTLY POINTED AWAY FROM THE RECEIVER
data_file_loc8 = folder_path_outdoor + '+90R/-150T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 30 DEGREE RIGHT TO THE RECEIVER
data_file_loc9 = folder_path_outdoor + '+90R/-120T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 60 DEGREE RIGHT TO THE RECEIVER
data_file_loc10 =folder_path_outdoor + '+90R/-90T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 90 DEGREE RIGHT TO THE RECEIVER
data_file_loc11 = folder_path_outdoor + '+90R/-60T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 120 DEGREE RIGHT TO THE RECEIVER
data_file_loc12 = folder_path_outdoor + '+90R/-30T_+90R_5ft_06_16_2020_914MHz_outdoor.dat' # TRANSMITTER ANTENNA IS 150 DEGREE RIGHT TO THE RECEIVER
iqdata_loc1 = scipy.fromfile(open(data_file_loc1), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc2 = scipy.fromfile(open(data_file_loc2), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc3 = scipy.fromfile(open(data_file_loc3), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc4 = scipy.fromfile(open(data_file_loc4), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc5 = scipy.fromfile(open(data_file_loc5), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc6 = scipy.fromfile(open(data_file_loc6), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc7 = scipy.fromfile(open(data_file_loc7), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc8 = scipy.fromfile(open(data_file_loc8), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc9 = scipy.fromfile(open(data_file_loc9), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc10 = scipy.fromfile(open(data_file_loc10), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc11 = scipy.fromfile(open(data_file_loc11), dtype=dtype_all, count = sample_size * no_of_samples)
iqdata_loc12 = scipy.fromfile(open(data_file_loc12), dtype=dtype_all, count = sample_size * no_of_samples)
# PREPARING THE DATA WITHOUT TIME INFORMATION
no_of_data_loc1 = iqdata_loc1.shape[0]
no_of_data_loc2 = iqdata_loc2.shape[0]
no_of_data_loc3 = iqdata_loc3.shape[0]
no_of_data_loc4 = iqdata_loc4.shape[0]
no_of_data_loc5 = iqdata_loc5.shape[0]
no_of_data_loc6 = iqdata_loc6.shape[0]
no_of_data_loc7 = iqdata_loc7.shape[0]
no_of_data_loc8 = iqdata_loc8.shape[0]
no_of_data_loc9 = iqdata_loc9.shape[0]
no_of_data_loc10 = iqdata_loc10.shape[0]
no_of_data_loc11 = iqdata_loc11.shape[0]
no_of_data_loc12 = iqdata_loc12.shape[0]
################################################################################################################
# CONCATINATING THE I AND Q VALUES VERTICALLY OF (I, Q) SAMPLE. -- note the axis argument is set to 1 (means vertical stacking)
# SIMULATNEOUSLY MULTIPLYING WITH THE WEIGHT MATRIX - TO REFLECT THE MULTI-ANGULAR PROJECTION
xdata_loc1= np.concatenate([iqdata_loc1['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc1['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc1 = np.matmul(xdata_loc1, np.transpose(W))
xdata_loc2= np.concatenate([iqdata_loc2['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc2['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc2 = np.matmul(xdata_loc2, np.transpose(W))
xdata_loc3= np.concatenate([iqdata_loc3['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc3['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc3 = np.matmul(xdata_loc3, np.transpose(W))
xdata_loc4= np.concatenate([iqdata_loc4['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc4['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc4 = np.matmul(xdata_loc4, np.transpose(W))
xdata_loc5= np.concatenate([iqdata_loc5['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc5['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc5 = np.matmul(xdata_loc5, np.transpose(W))
xdata_loc6= np.concatenate([iqdata_loc6['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc6['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc6 = np.matmul(xdata_loc6, np.transpose(W))
xdata_loc7= np.concatenate([iqdata_loc7['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc7['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc7 = np.matmul(xdata_loc7, np.transpose(W))
xdata_loc8= np.concatenate([iqdata_loc8['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc8['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc8 = np.matmul(xdata_loc8, np.transpose(W))
xdata_loc9= np.concatenate([iqdata_loc9['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc9['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc9 = np.matmul(xdata_loc9, np.transpose(W))
xdata_loc10= np.concatenate([iqdata_loc10['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc10['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc10 = np.matmul(xdata_loc10, np.transpose(W))
xdata_loc11= np.concatenate([iqdata_loc11['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc11['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc11 = np.matmul(xdata_loc11, np.transpose(W))
xdata_loc12= np.concatenate([iqdata_loc12['raw-iq'].real.reshape(number_of_data_to_read,1), iqdata_loc12['raw-iq'].imag.reshape(number_of_data_to_read,1)], axis=1)
xdata_loc12 = np.matmul(xdata_loc12, np.transpose(W))
# RESHAPING THE XDATA
xdata_loc1= xdata_loc1.T.reshape(no_of_data_loc1//(sample_size), sample_size*no_of_features)
xdata_loc2 = xdata_loc2.T.reshape(no_of_data_loc2//(sample_size), sample_size*no_of_features)
xdata_loc3 = xdata_loc3.T.reshape(no_of_data_loc3//(sample_size), sample_size*no_of_features)
xdata_loc4 = xdata_loc4.T.reshape(no_of_data_loc4//(sample_size), sample_size*no_of_features)
xdata_loc5= xdata_loc5.T.reshape(no_of_data_loc5//(sample_size), sample_size*no_of_features)
xdata_loc6 = xdata_loc6.T.reshape(no_of_data_loc6//(sample_size), sample_size*no_of_features)
xdata_loc7 = xdata_loc7.T.reshape(no_of_data_loc7//(sample_size), sample_size*no_of_features)
xdata_loc8 = xdata_loc8.T.reshape(no_of_data_loc8//(sample_size), sample_size*no_of_features)
xdata_loc9= xdata_loc9.T.reshape(no_of_data_loc9//(sample_size), sample_size*no_of_features)
xdata_loc10 = xdata_loc10.T.reshape(no_of_data_loc10//(sample_size), sample_size*no_of_features)
xdata_loc11 = xdata_loc11.T.reshape(no_of_data_loc11//(sample_size), sample_size*no_of_features)
xdata_loc12 = xdata_loc12.T.reshape(no_of_data_loc12//(sample_size), sample_size*no_of_features)
## CONCATINATE THE INDOOR AND OUTDOOR DATA
xdata_loc1 = np.concatenate((xdata_loc1, xdata_loc1_indoor), axis = 0)
xdata_loc2 = np.concatenate((xdata_loc2, xdata_loc2_indoor), axis = 0)
xdata_loc3 = np.concatenate((xdata_loc3, xdata_loc3_indoor), axis = 0)
xdata_loc4 = np.concatenate((xdata_loc4, xdata_loc4_indoor), axis = 0)
xdata_loc5 = np.concatenate((xdata_loc5, xdata_loc5_indoor), axis = 0)
xdata_loc6 = np.concatenate((xdata_loc6, xdata_loc6_indoor), axis = 0)
xdata_loc7 = np.concatenate((xdata_loc7, xdata_loc7_indoor), axis = 0)
xdata_loc8 = np.concatenate((xdata_loc8, xdata_loc8_indoor), axis = 0)
xdata_loc9 = np.concatenate((xdata_loc9, xdata_loc9_indoor), axis = 0)
xdata_loc10 = np.concatenate((xdata_loc10, xdata_loc10_indoor), axis = 0)
xdata_loc11 = np.concatenate((xdata_loc11, xdata_loc11_indoor), axis = 0)
xdata_loc12 = np.concatenate((xdata_loc12, xdata_loc12_indoor), axis = 0)
# CREATING LABEL FOR THE DATASETS
addIndex = 36
ydata_loc1 = np.full(xdata_loc1.shape[0], (addIndex+0), dtype=int)
ydata_loc2 = np.full(xdata_loc2.shape[0], (addIndex+1), dtype=int)
ydata_loc3 = np.full(xdata_loc3.shape[0], (addIndex+2), dtype=int)
ydata_loc4 = np.full(xdata_loc4.shape[0], (addIndex+3), dtype=int)
ydata_loc5 = np.full(xdata_loc5.shape[0], (addIndex+4), dtype=int)
ydata_loc6 = np.full(xdata_loc6.shape[0], (addIndex+5), dtype=int)
ydata_loc7 = np.full(xdata_loc7.shape[0], (addIndex+6), dtype=int)
ydata_loc8 = np.full(xdata_loc8.shape[0], (addIndex+7), dtype=int)
ydata_loc9 = np.full(xdata_loc9.shape[0], (addIndex+8), dtype=int)
ydata_loc10 = np.full(xdata_loc10.shape[0], (addIndex+9), dtype=int)
ydata_loc11 = np.full(xdata_loc11.shape[0], (addIndex+10), dtype=int)
ydata_loc12 = np.full(xdata_loc12.shape[0], (addIndex+11), dtype=int)
#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)
ydata_90R = np.concatenate([ydata_loc1, ydata_loc2, ydata_loc3, ydata_loc4, ydata_loc5, ydata_loc6, ydata_loc7, ydata_loc8, ydata_loc9, ydata_loc10, ydata_loc11, ydata_loc12], axis=0)
#CONCATINATING THE DIFFERENT POSE DATA HORIZONTALLY (ROWWISE)
xdata_90R = np.concatenate([xdata_loc1, xdata_loc2, xdata_loc3, xdata_loc4, xdata_loc5, xdata_loc6, xdata_loc7, xdata_loc8, xdata_loc9, xdata_loc10, xdata_loc11, xdata_loc12], axis=0)
# PREPROCESSING X AND Y DATA
xdata_90R =xdata_90R.astype(np.float)
# REMOVING THE NANS
xdata_90R = np.nan_to_num(xdata_90R)
################################################################################################################################
#############################################################################################################################
######## #######
######## PREDICTING POSES WITH DIRECTIONAL THE RECEIVER AND TRANSMITTER #######
######## #######
#############################################################################################################################
xdata = np.concatenate([xdata_0R, xdata_m90R, xdata_180R, xdata_90R], axis= 0 )
#CONCATINATING THE DIFFERENT POSE LABELS HORIZONTALLY (ROWWISE)
ydata = np.concatenate([ydata_0R, ydata_m90R, ydata_180R, ydata_90R], axis=0)
#################### NORMALIZE THE X DATA #######################
standard = preprocessing.StandardScaler().fit(xdata) # Normalize the data with zero mean and unit variance for each column
xdata = standard.transform(xdata)
############### SEPARATING TRAIN AND TEST DATA #######################
print("############## STARTING THE TRAINING TO PREDICT THE RELATIVE POSE OF RECEIVER AND TRANSMITTER ##########################")
xtrain, xtest, ytrain, ytest = train_test_split(xdata, ydata, test_size=0.2, shuffle = True, random_state=42) # Randomly shuffling and 80/20 is train/test size
print("XTRAIN AND XTEST SHAPE:", xtrain.shape, xtest.shape)
print("YTRAIN AND YTEST SHAPE:", ytrain.shape, ytest.shape)
# RESHAPING THE DATA FROM 2 DIMENSIONAL TO 4 DIMENSIONAL SHAPE - NEEDED TO APPLY TO USE 2D-CONVOLUTION
# reshape to be [samples][width][height][channels]
xtrain = xtrain.reshape((xtrain.shape[0], no_of_features, sample_size, 1)).astype('float32')
xtest = xtest.reshape((xtest.shape[0], no_of_features, sample_size, 1)).astype('float32')
num_classes = 48 # TOTAL NUMBER OF RANGES
# Convert labels to categorical one-hot encoding
ytrain_one_hot = to_categorical(ytrain, num_classes=num_classes) # DEFINE THE NUMBER OF TOTAL CLASSES IN LABEL
ytest_one_hot = to_categorical(ytest, num_classes=num_classes)
print("XTRAIN AND XTEST SHAPE:", xtrain.shape, xtest.shape)
print("YTRAIN AND YTEST SHAPE:", ytrain_one_hot.shape, ytest_one_hot.shape)
############################################################
# #
######## Building a 2D Convolutional Neural Network #####
# #
############################################################
dr = 0.6 # dropout rate (%)
batch_size = 128 # Mini batch size
nb_epoch = 100 # Number of Epoch (Give a higher number to get better accuracy)
classes = ["0_0R", "+30_0R", "+60_0R", "+90_0R", "+120_0R", "+150_0R", "180_0R", "-150_0R", "-120_0R", "-90_0R", "-60_0R", "-30_0R",
"0_-90R", "+30_-90R", "+60_-90R", "+90_-90R", "+120_-90R", "+150_-90R", "180_-90R", "-150_-90R", "-120_-90R", "-90_-90R", "-60_-90R", "-30_-90R",
"0_180R", "+30_180R", "+60_180R", "+90_180R", "+120_180R", "+150_180R", "180_180R", "-150_180R", "-120_180R", "-90_180R", "-60_180R", "-30_180R",
"0_90R", "+30_90R", "+60_90R", "+90_90R", "+120_90R", "+150_90R", "180_90R", "-150_90R", "-120_90R", "-90_90R", "-60_90R", "-30_90R",] # CHANGE LABEL
in_shp = list(xtrain.shape[1:]) # Input Dimension
print(in_shp)
# model = models.Sequential()
timesteps=1
data_dim=xtrain.shape[1]
# print ("AFTER RESHAPE")
ytrain_one_hot = np.reshape(ytrain_one_hot, (ytrain_one_hot.shape[0], num_classes)) # Used in training
ytest_one_hot = np.reshape(ytest_one_hot, (ytest_one_hot.shape[0], num_classes)) # Used in training
start_time = time.time() # Taking start time to calculate overall execution time
# Modeling the CNN
model_ranging = Sequential()
# FIRST CONVOLUTIONAL LAYER
model_ranging.add(Conv2D(256, (2, 2), input_shape=(no_of_features, sample_size, 1), activation='relu')) # CHANGE # Stride (1, 1)
model_ranging.add(MaxPooling2D()) # Pool size: (2, 2) and stride (2, 2)
model_ranging.add(Dropout(0.2))
# SECOND CONVOLUTIONAL LAYER
model_ranging.add(Conv2D(128, (2, 2), activation='relu'))
model_ranging.add(MaxPooling2D())
model_ranging.add(Dropout(dr))
model_ranging.add(Flatten())
# FIRST DENSE LAYER
model_ranging.add(Dense(256, activation='relu'))
# SECOND DENSE LAYER
model_ranging.add(Dense(128, activation='relu'))
# # THIRD DENSE LAYER - EXTRA LAYER DID NOT WORK
# model_ranging.add(Dense(64, activation='relu'))
# OUTPUT LAYER
model_ranging.add(Dense(num_classes, activation='softmax'))
# Compile model
# For a multi-class classification problem
sgd = SGD(lr=0.0001, decay=1e-6, momentum=0.9, nesterov=True)
adam = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-8)
# model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # Multiclass classification with rmsprop
#model.compile(optimizer='sgd', loss='categorical_crossentropy',metrics=['acc', f1_m, precision_m, recall_m]) # Multiclass classification with rms adam optimizer # CHANGE
model_ranging.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['acc', f1_m, precision_m, recall_m])
model_ranging.summary()
filepath = '/Users/debashri/Desktop/DirectionFinding_Plots/InOutdoor/double_direction_2D_CNN.wts.h5'
print("The dropout rate was: ")
print(dr)
# Fit the model
# history= model.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_data = (xtest, ytest_one_hot), callbacks = [keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'), keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])
history = model_ranging.fit(xtrain, ytrain_one_hot, epochs=nb_epoch, batch_size=batch_size, validation_split=0.1, callbacks=[
keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=2, save_best_only=True, mode='auto'),
keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, verbose=2, mode='auto')])
# SAVING THE MODEL FOR TRANSFER LEARNING
saved_file = '/Users/debashri/Desktop/DirectionFinding_Plots/InOutdoor/double_direction_classifier.h5'
model_ranging.save(saved_file) # SAVING THE MODEL FOR TRANSFER LEARNING
# Evaluate the model
loss, accuracy, f1_score, precision, recall = model_ranging.evaluate(xtest, ytest_one_hot, batch_size=batch_size) # CHANGE
print("\nTest Loss: %s: %.2f%%" % (model_ranging.metrics_names[0], loss * 100)) # CHANGE
print("\nTest Accuracy: %s: %.2f%%" % (model_ranging.metrics_names[1], accuracy * 100)) # CHANGE
print("\nTest F1 Score: %s: %.2f" % (model_ranging.metrics_names[2], f1_score)) # CHANGE
print("\nTest Precision: %s: | |
= building_block._extract_functional_groups(
functional_groups=functional_groups,
)
building_block._with_functional_groups(functional_groups)
building_block._placer_ids = (
building_block._normalize_placer_ids(
placer_ids=placer_ids,
functional_groups=building_block._functional_groups,
)
)
building_block._core_ids = frozenset(
building_block._get_core_ids(
functional_groups=building_block._functional_groups,
)
)
return building_block
@classmethod
def init_from_file(
cls,
path: typing.Union[pathlib.Path, str],
functional_groups: _FunctionalGroups = (),
placer_ids: typing.Optional[abc.Iterable[int]] = None,
) -> BuildingBlock:
"""
Initialize from a file.
Parameters:
path:
The path to a molecular structure file. Supported file
types are:
#. ``.mol``, ``.sdf`` - MDL V3000 MOL file
#. ``.pdb`` - PDB file
functional_groups:
The :class:`.FunctionalGroup` instances the building
block should have, and / or
:class:`.FunctionalGroupFactory` instances used for
creating them.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The building block.
Raises:
:class:`ValueError`:
If the file type cannot be used for initialization.
"""
_, extension = os.path.splitext(path)
if extension not in cls._init_funcs:
raise ValueError(
f'Unable to initialize from "{extension}" files.'
)
# This remake needs to be here because molecules loaded
# with rdkit often have issues, because rdkit tries to do
# bits of structural analysis like stereocenters. remake
# gets rid of all this problematic metadata.
molecule = _utilities.remake(
mol=cls._init_funcs[extension](str(path)),
)
return cls.init_from_rdkit_mol(
molecule=molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
@classmethod
def init_from_rdkit_mol(
cls,
molecule: rdkit.Mol,
functional_groups: _FunctionalGroups = (),
placer_ids: typing.Optional[abc.Iterable[int]] = None,
) -> BuildingBlock:
"""
Initialize from an :mod:`rdkit` molecule.
Parameters:
molecule:
The molecule.
functional_groups:
The :class:`.FunctionalGroup` instances the building
block should have, and / or
:class:`.FunctionalGroupFactory` instances used for
creating them.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
Returns:
The molecule.
"""
building_block = cls.__new__(cls)
building_block._init_from_rdkit_mol(
molecule=molecule,
functional_groups=functional_groups,
placer_ids=placer_ids,
)
return building_block
def _init_from_rdkit_mol(
self,
molecule: rdkit.Mol,
functional_groups: _FunctionalGroups,
placer_ids: typing.Optional[abc.Iterable[int]],
) -> None:
"""
Initialize from an :mod:`rdkit` molecule.
Parameters:
molecule:
The molecule.
functional_groups:
The :class:`.FunctionalGroup` instances the building
block should have, and / or
:class:`.FunctionalGroupFactory` instances used for
creating them.
placer_ids:
The ids of *placer* atoms. These are the atoms which
should be used for calculating the position of the
building block. Depending on the values passed to
`placer_ids`, and the functional groups in the building
block, different *placer* ids will be used by the
building block.
#. `placer_ids` is passed to the initializer: the
passed *placer* ids will be used by the building
block.
#. `placer_ids` is ``None`` and the building block has
functional groups: The *placer* ids of the
functional groups will be used as the *placer* ids
of the building block.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. All atoms of the molecule will be used for
*placer* ids.
"""
atoms = tuple(
Atom(
id=a.GetIdx(),
atomic_number=a.GetAtomicNum(),
charge=a.GetFormalCharge(),
)
for a in molecule.GetAtoms()
)
bonds = tuple(
Bond(
atom1=atoms[b.GetBeginAtomIdx()],
atom2=atoms[b.GetEndAtomIdx()],
order=(
9 if b.GetBondType() == rdkit.BondType.DATIVE
else b.GetBondTypeAsDouble()
)
)
for b in molecule.GetBonds()
)
position_matrix = molecule.GetConformer().GetPositions()
Molecule.__init__(
self=self,
atoms=atoms,
bonds=bonds,
position_matrix=position_matrix,
)
self._with_functional_groups(self._extract_functional_groups(
functional_groups=functional_groups,
))
self._placer_ids = self._normalize_placer_ids(
placer_ids=placer_ids,
functional_groups=self._functional_groups,
)
self._core_ids = frozenset(self._get_core_ids(
functional_groups=self._functional_groups,
))
def _normalize_placer_ids(
self,
placer_ids: typing.Optional[abc.Iterable[int]],
functional_groups: abc.Collection[FunctionalGroup],
) -> frozenset[int]:
"""
Get the final *placer* ids.
Parameters:
placer_ids: The ids of *placer* atoms or ``None``.
functional_groups:
The :class:`.FunctionalGroup` instances of the building
block.
Returns:
Depending on the input values, this function will return
different things.
#. `placer_ids` is a :class:`tuple` of :class`int`: the
`placer_ids` will be returned.
#. `placer_ids` is ``None`` and `functional_groups` is not
empty: The *placer* ids of the functional groups will
be returned.
#. `placer_ids` is ``None`` and `functional_groups` is
empty. The ids of all atoms in the building block will
be returned.
"""
if placer_ids is not None:
return frozenset(placer_ids)
if functional_groups:
return frozenset(_utilities.flatten(
functional_group.get_placer_ids()
for functional_group in functional_groups
))
return frozenset(atom.get_id() for atom in self._atoms)
def _get_core_ids(
self,
functional_groups: abc.Iterable[FunctionalGroup],
) -> abc.Iterable[int]:
"""
Get the final *core* ids.
This method may return duplicate ids.
Parameters:
functional_groups:
The :class:`.FunctionalGroup` instances of the building
block.
Yields:
The id of an atom defining the core of the molecule.
"""
functional_group_atom_ids = {
atom_id
for functional_group in functional_groups
for atom_id in functional_group.get_atom_ids()
}
for atom in self._atoms:
atom_id = atom.get_id()
if atom_id not in functional_group_atom_ids:
yield atom_id
for functional_group in functional_groups:
for atom_id in functional_group.get_core_atom_ids():
yield atom_id
def _extract_functional_groups(
self,
functional_groups: _FunctionalGroups,
) -> abc.Iterable[FunctionalGroup]:
"""
Yield functional groups.
The input can be a mixture of :class:`.FunctionalGroup` and
:class:`.FunctionalGroupFactory`. The output yields
:class:`.FunctionalGroup` instances only. Either those
held directly in `functional_groups` or created by the
factories in `functional_groups`.
Parameters:
functional_groups:
The functional groups to yield and the factories
used to produce them.
Yields:
A functional group from `functional_groups`, or created
by a factory in `functional_groups`.
"""
for functional_group in functional_groups:
if isinstance(
functional_group,
FunctionalGroup,
):
yield functional_group
else:
# Else it's a factory.
yield from functional_group.get_functional_groups(self)
def _with_functional_groups(
self: _T,
functional_groups: abc.Iterable[FunctionalGroup],
) -> _T:
"""
Modify the molecule.
"""
self._functional_groups = tuple(functional_groups)
return self
def with_functional_groups(
self,
functional_groups: abc.Iterable[FunctionalGroup],
) -> BuildingBlock:
"""
Return a clone with specific functional groups.
Parameters:
functional_groups:
:class:`.FunctionalGroup` instances which the clone
should have.
Returns:
The clone.
"""
return self.clone()._with_functional_groups(functional_groups)
def _with_canonical_atom_ordering(
self: _T,
) -> _T:
ordering = rdkit.CanonicalRankAtoms(self.to_rdkit_mol())
super()._with_canonical_atom_ordering()
id_map = {
old_id: new_id
for old_id, new_id in enumerate(ordering)
}
self._functional_groups = tuple(
functional_group.with_ids(id_map)
for functional_group in self._functional_groups
)
self._placer_ids = frozenset(
id_map[placer_id]
for placer_id in self._placer_ids
)
self._core_ids = frozenset(
id_map[core_id]
for core_id in self._core_ids
)
return self
def get_num_functional_groups(self) -> int:
"""
Return the number of functional groups.
Returns:
The number of functional groups in the building block.
"""
return len(self._functional_groups)
def get_functional_groups(
self,
fg_ids: typing.Optional[_typing.OneOrMany[int]] = None,
) -> abc.Iterable[FunctionalGroup]:
"""
Yield the functional groups, ordered by id.
Parameters:
fg_ids:
The ids of functional groups yielded. If ``None``, then
all functional groups are yielded. Can be a single
:class:`int`, if a single functional group is
desired.
Yields:
A functional group of the building block.
"""
if fg_ids is None:
fg_ids = range(len(self._functional_groups))
elif isinstance(fg_ids, int):
fg_ids = (fg_ids, )
for fg_id in fg_ids:
yield self._functional_groups[fg_id]
def _clone(self: _T) -> _T:
clone = super()._clone()
clone._functional_groups = self._functional_groups
clone._placer_ids = self._placer_ids
clone._core_ids = self._core_ids
return clone
def clone(self) -> BuildingBlock:
return self._clone()
def get_num_placers(self) -> int:
"""
Return the number of *placer* atoms in the building block.
Returns:
The number of *placer* atoms in the building block.
"""
return len(self._placer_ids)
def get_placer_ids(self) -> abc.Iterable[int]:
"""
Yield the ids of *placer* atoms.
*Placer* atoms are those, which should be used to calculate
the position of the building block.
See Also:
:meth:`.FunctionalGroup.get_placer_ids`
Yields:
The id of a *placer* atom.
"""
yield from self._placer_ids
def get_core_atom_ids(self) -> abc.Iterable[int]:
"""
Yield ids of atoms which form the core of the building block.
This includes all atoms in the building block not part of a
functional group, | |
from .baseservice import IDError
from .sheetsservice import SheetsService
class StatsService(SheetsService):
def __repr__(self):
return "<StatsService Object>"
@property
def viewer_id(self):
if self.__viewer_id is None:
raise IDError("Service id is uninitialized, use .initialize_env(...)")
return self.__viewer_id
@viewer_id.setter
def viewer_id(self,id):
self.__viewer_id = id
if not self.__viewer_id is None:
self.retrieve_viewer_ids()
def retrieve_viewer_ids(self):
s = self.service.spreadsheets().get(spreadsheetId = self.__viewer_id).execute()
self.viewer_sheet_ids = {
sheet['properties']['title']: sheet['properties']['sheetId']\
for sheet in s['sheets']
}
self.viewer_sheet_properties = {
sheet['properties']['title']: sheet['properties']\
for sheet in s['sheets']
}
return self
def retrieve_meet_parameters(self, roster_json, draw_json):
self.meet_params = {}
self.meet_params['total_teams'] = len(set([quizzer['team'] for quizzer in roster_json]))
self.meet_params['total_quizzes'] = len(draw_json)
try:
self.meet_params['prelims_per_team_number'] = 3 * sum([quiz['type'] == "P" for quiz in draw_json]) // self.meet_params['total_teams']
except ZeroDivisionError:
self.meet_params['prelims_per_team_number'] = 0
self.meet_params['total_quizzers'] = len(roster_json)
try:
self.meet_params['total_quiz_slots'] = max([int(quiz['slot_num']) for quiz in draw_json])
except ValueError:
self.meet_params['total_quiz_slots'] = 0
try:
self.meet_params['total_rooms'] = max([int(quiz['room_num']) for quiz in draw_json])
except ValueError:
self.meet_params['total_rooms'] = 0
return self
# def generate_all_values(self):
# for sheet in ['DRAW','IND']:
# yield self.get_values(self.id,"'{}'!A1:ZZ300".format(sheet))
def generate_update_sheet_dimension_json(self, sheet_property_json,
column_count = None,
row_count = None):
"""Generate updateSheetPropertiesRequest to change sheet columnCount/rowCount
Note, this also updates the `.sheet_properties` object to reflect change
Parameters
----------
sheet_title : str
Title of the sheet to be modified
column_count : int
The number of columns specified for the sheet
row_count : int
The number of rows specified for the sheet. None means do not change
"""
fields = []
if not column_count is None:
fields.append("gridProperties.columnCount")
if not row_count is None:
fields.append("gridProperties.rowCount")
sheet_property_json = self.update_sheet_properties_json(
sheet_property_json,
grid_properties = self.generate_grid_properties_json(
column_count = column_count,
row_count = row_count
)
)
return self.generate_update_sheet_properties_json(
sheet_property_json,
fields = ",".join(fields)
)
def set_bracket_weights(self,weights_dictionary):
"""Change the ind score weighting of the bracket
weights : dictionary
Dictionary of weight values (floats, ints or None), with any of the following
keys: ["P", "S", "A", "B"]. If "None", then that bracket type does not
contribute to the total weight.
"""
processed_weights = [[1.0],[1.0],[0.7],[0.5]]
for i,k in enumerate("PSAB"):
try:
w = weights_dictionary.pop(k)
except KeyError:
pass
else:
if w is None:
processed_weights[i][0] = "NA"
else:
processed_weights[i][0] = w
value_range_list = [self.generate_value_range_json(
range = "Utils!C2:C5",
values = processed_weights
)]
self.batch_update_value(
file_id = self.id,
value_range_list = value_range_list
)
return self
def set_roster(self,roster_json):
"""Update the contents of the roster
Parameters
----------
roster_json : list
list of dictionaries representing each quizzer. Each dictionary should
have the keys: ["id", "team", "bib", "name", "moniker", "is_rookie", "is_cap", "is_cc"]
"""
value_range_list = []
column_names = {"id":0, "name":1, "moniker":2, "is_rookie":3, "is_cap":4, "is_cc":5}
team_list = sorted(list(set([quizzer["team"] for quizzer in roster_json])))
roster_matrix = [[team] + (5 * 6) * [""] for team in team_list]
for ti, team in enumerate(team_list):
for quizzer in [quizzer for quizzer in roster_json if quizzer["team"] == team]:
offset = 6 * (int(quizzer['bib']) - 1)
if roster_matrix[ti][1 + offset] != "":
# Log if quizzer is overwritten
print("Bib numbering error, both {} and {} have bib {}".format(
roster_matrix[ti][1 + offset + column_names['name']],
quizzer['name'],
quizzer['bib']
))
for k,v in column_names.items():
roster_matrix[ti][1 + offset + v] = quizzer[k]
value_range_list.append(
self.generate_value_range_json(
range = "Roster!A3:AE" + str(len(team_list) + 3),
values = roster_matrix
)
)
self.batch_update_value(
file_id = self.id,
value_range_list = value_range_list
)
return self
def set_draw(self, draw_json):#, roster_json):
"""Update the contents of the draw
Parameters
----------
draw_json : list
list of dictionaries representing each quiz. Each dictionary should
have the keys: ["quiz_num","slot_num","room_num","slot_time":,"team1","team2","team3", "url", "type"]
roster_json : list
list of dictionaries representing each quizzer. Each dictionary should
have the keys: ["id", "team", "bib", "name", "moniker", "is_rookie", "is_cap", "is_cc"]
"""
# Step 1: Insert draw into DrawLookup
column_names_left = ["quiz_num","slot_time","room_num","slot_num"]
column_names_right = ["team1","team2","team3"]
draw_matrix_left = [[quiz[key] for key in column_names_left] for quiz in draw_json]
draw_matrix_right = []
for quiz in draw_json:
if "_" in quiz['team1']:
if quiz['team1'][0] == "P":
# Calculate post-prelim ranking lookup
quiz_row = []
for key in column_names_right:
rank = int(quiz[key].split("_")[-1])
lookup = "TeamSummary!B{}".format(2+rank)
quiz_row.append('={}'.format(lookup))
else:
# Calculate schedule lookup
quiz_row = []
for key in column_names_right:
quiz_num, placement = quiz[key].split("_")
quiz_previous = [q for q in draw_json if q['quiz_num'] == quiz_num][0]
offset_row = 2 + 3 * (int(quiz_previous['slot_num']) - 1)
offset_column = 2 + 4 * (int(quiz_previous['room_num']) - 1)
team_range = "Schedule!{}:{}".format(
self.generate_A1_from_RC(offset_row + 0, offset_column + 1),
self.generate_A1_from_RC(offset_row + 2, offset_column + 1)
)
placement_range = "Schedule!{}:{}".format(
self.generate_A1_from_RC(offset_row + 0, offset_column + 3),
self.generate_A1_from_RC(offset_row + 2, offset_column + 3)
)
error_msg = "{placement}{suffix} in {quiz_num}".format(
placement = placement,
suffix = {"1":"st","2":"nd","3":"rd"}[placement],
quiz_num = quiz_num
)
quiz_row.append(
'=IFERROR(INDEX({team_range},MATCH({placement},{placement_range},0),0),"{error_msg}")'.format(
team_range = team_range,
placement = placement,
placement_range = placement_range,
error_msg = error_msg
)
)
else:
# Just add the prelim quiz
quiz_row = [quiz[key] for key in column_names_right]
draw_matrix_right.append(quiz_row)
#draw_matrix_right = [[quiz[key] for key in column_names_right] for quiz in draw_json]
value_range_list = [
self.generate_value_range_json(
range = "DrawLookup!B3:E" + str(len(draw_matrix_left) + 3),
values = draw_matrix_left
),
self.generate_value_range_json(
range = "DrawLookup!G3:I" + str(len(draw_matrix_right) + 3),
values = draw_matrix_right
)
]
self.batch_update_value(
file_id = self.id,
value_range_list = value_range_list,
value_input_option = "USER_ENTERED"
)
# Step 2: Prepare the rest of the DrawLookup page
#TN = self.meet_params['total_teams']#len(set([quizzer['team'] for quizzer in roster_json]))
#TQN = self.meet_params['total_quizzes']#len(draw_json)
sheet_id = self.sheet_ids['DrawLookup']
requests = []
# Set sheet width to 11 + 'total_teams'
requests.append(self.generate_update_sheet_dimension_json(
sheet_property_json = self.sheet_properties['DrawLookup'],
column_count = 11 + self.meet_params['total_teams']
))
# Copy L2 right 'total_teams' times
bbox_source = list(self.generate_bbox_from_A1("L2:L2"))
bbox_dest = 1*bbox_source
bbox_dest[1] += 1
bbox_dest[3] += self.meet_params['total_teams'] - 1
requests.append(
self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest,
paste_type = "PASTE_FORMULA"
)
)
# Copy L3 right 'total_teams' times and down 'total_quizzes' times
bbox_source = list(self.generate_bbox_from_A1("L3:L3"))
bbox_dest = 1*bbox_source
bbox_dest[2] += self.meet_params['total_quizzes'] - 1
bbox_dest[3] += self.meet_params['total_teams'] - 1
requests.append(
self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest,
paste_type = "PASTE_FORMULA"
)
)
# Copy F3 down 'total_quizzes' times
bbox_source = list(self.generate_bbox_from_A1("F3:F3"))
bbox_dest = 1*bbox_source
bbox_dest[0] += 1
bbox_dest[2] += self.meet_params['total_quizzes'] - 1
requests.append(
self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest,
paste_type = "PASTE_FORMULA"
)
)
# Copy K3 down 'total_quizzes' times
bbox_source = list(self.generate_bbox_from_A1("K3:K3"))
bbox_dest = 1*bbox_source
bbox_dest[0] += 1
bbox_dest[2] += self.meet_params['total_quizzes'] - 1
requests.append(
self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest,
paste_type = "PASTE_FORMULA"
)
)
# Update QUIZINDEXLOOKUP to be DrawLookup!L3:(L3+TN+TQN)
bbox = list(self.generate_bbox_from_A1("L3:L3"))
bbox[2] += self.meet_params['total_quizzes'] - 1
bbox[3] += self.meet_params['total_teams'] - 1
requests.append(
self.generate_update_named_range_json(
sheet_id = sheet_id,
named_range_id = self.named_range_ids['QUIZINDEXLOOKUP'],
name = "QUIZINDEXLOOKUP",
bbox = bbox,
fields = "range"
)
)
response = self.batch_update(
file_id = self.id,
request_list = requests
)
return self
def initialize_team_summary(self):#, roster_json):
"""Prepares the TeamSummary tab
The copies down columns E and F
"""
#TN = len(set([quizzer['team'] for quizzer in roster_json]))
sheet_id = self.sheet_ids['TeamSummary']
requests = []
# Set sheet width to 10
requests.append(self.generate_update_sheet_dimension_json(
sheet_property_json = self.sheet_properties['TeamSummary'],
column_count = 10
))
# Copy down E3:F3
bbox_source = list(self.generate_bbox_from_A1("E3:F3"))
bbox_dest = 1*bbox_source
bbox_dest[0] += 1
bbox_dest[2] += self.meet_params['total_teams'] - 1
requests.append(self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest
))
response = self.batch_update(
file_id = self.id,
request_list = requests
)
return self
def initialize_schedule(self):#, draw_json):
"""Prepares the schedule tab
"""
#TSN = max([int(quiz['slot_num']) for quiz in draw_json])
#TRN = max([int(quiz['room_num']) for quiz in draw_json])
sheet_id = self.sheet_ids['Schedule']
requests = []
# Set sheet width to 2 + 4*TRN
requests.append(self.generate_update_sheet_dimension_json(
sheet_property_json = self.sheet_properties['Schedule'],
column_count = 2 + 4*self.meet_params['total_rooms'],
row_count = 2 + 3*self.meet_params['total_quiz_slots']
))
# Copy down A3:F5
bbox_source = list(self.generate_bbox_from_A1("A3:F5"))
bbox_dest = 1*bbox_source
for i in range(1,self.meet_params['total_quiz_slots']):
# Shift window down 3 rows
bbox_dest[0] += 3
bbox_dest[2] += 3
requests.append(self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest
))
# Copy right C1:F
bbox_source = list(self.generate_bbox_from_A1("C1:F"+str(2+3*self.meet_params['total_quiz_slots'])))
bbox_dest = 1*bbox_source
for i in range(1,self.meet_params['total_rooms']):
# Shift the window right 4 columns
bbox_dest[1] += 4
bbox_dest[3] += 4
requests.append(self.generate_copy_paste_json(
sheet_id = sheet_id,
bbox_source = bbox_source,
bbox_dest = bbox_dest
))
self.batch_update(
file_id = self.id,
request_list = requests
)
return self
def set_team_parsed(self):#, draw_json, roster_json):
#TN = len(set([quizzer['team'] for quizzer in roster_json]))
#PN = 3 * sum([quiz['type'] == "P" for quiz in draw_json]) // TN
# Update Quiz Total and Average team points formulas
points_cell_string = ", ".join([
| |
from dataclasses import dataclass
from functools import reduce
from typing import Callable, Iterable, Iterator
'''
The first phase of a compiler is called `lexical analysis` implemented by a `scanner` or `lexer`.
It breaks a program into a sequence `lexemes`:
meaningful substrings of the input.
It also transforms lexemes into `tokens`:
symbolic representations of lexemes with some internalized information.
The classic, state-of-the-art method to specify lexemes is by regular expressions.
'''
'''
1. Representation of regular expressions.
'''
@dataclass
class Regexp:
'abstract class for AST of regular expressions'
def is_null(self):
return False
@dataclass
class Null (Regexp):
'empty set: {}'
def is_null(self):
return True
@dataclass
class Epsilon (Regexp):
'empty word: { "" }'
@dataclass
class Symbol (Regexp):
'single symbol: { "a" }'
sym: str
@dataclass
class Concat(Regexp):
'concatenation: r1.r2'
left: Regexp
right: Regexp
@dataclass
class Alternative(Regexp):
'alternative: r1|r2'
left: Regexp
right: Regexp
@dataclass
class Repeat(Regexp):
'Kleene star: r*'
body: Regexp
## smart constructors for regular expressions
## goal: construct regexps in "normal form"
## * avoid Null() subexpressions
## * Epsilon() subexpressions as much as possible
## * nest concatenation and alternative to the right
null = Null()
epsilon = Epsilon()
symbol = Symbol
def concat(r1, r2):
match (r1, r2):
case (Null(), _) | (_, Null()):
return null
case (Epsilon(), _):
return r2
case (_, Epsilon()):
return r1
case (Concat(r11, r12), _):
return Concat(r11, concat(r12, r2))
case _:
return Concat(r1, r2)
def alternative(r1, r2):
match (r1, r2):
case (Null(), _):
return r2
case (_, Null()):
return r1
case (Alternative(r11, r12), _):
return Alternative(r11, alternative(r12, r2))
case _:
return Alternative(r1, r2)
def repeat(r: Regexp) -> Regexp:
match r:
case Null() | Epsilon():
return epsilon
case Repeat(r1): # r** == r*
return r
case _:
return Repeat(r)
## utilities to construct regular expressions
def optional(r : Regexp) -> Regexp:
'construct r?'
return alternative(r, epsilon)
def repeat_one(r : Regexp) -> Regexp:
'construct r+'
return concat(r, repeat(r))
def concat_list(rs : Iterable[Regexp]) -> Regexp:
return reduce(lambda out, r: concat(out, r), rs, epsilon)
def alternative_list(rs : Iterable[Regexp]) -> Regexp:
return reduce(lambda out, r: alternative(out, r), rs, null)
## a few examples for regular expressions (taken from JavaScript definition)
'''
⟨digit⟩ ::= 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9
⟨hexdigit⟩ ::= ⟨digit⟩ | A | B | C | D | E | F | a | b | c | d | e | f
⟨hexprefix⟩ ::= 0x | 0X
⟨sign⟩ ::= ⟨empty⟩ | -
⟨empty⟩ ::=
⟨integer-literal⟩ ::= ⟨sign⟩ ⟨digit⟩+ | ⟨sign⟩ ⟨hexprefix⟩ ⟨hexdigit⟩+
⟨letter⟩ ::= A | B | C | ...| Z | a | b | c | ...| z
⟨identifier-start⟩ ::= ⟨letter⟩ | $ | _
⟨identifier-part⟩ ::= ⟨identifier-start⟩ | ⟨digit⟩
⟨identifier⟩ ::= ⟨identifier-start⟩ ⟨identifier-part⟩*
'''
def class_regexp(s: str) -> Regexp:
'returns a regexp for the alternative of all characters in s'
return alternative_list(map(symbol, s))
def string_regexp(s: str) -> Regexp:
'returns a regexp for the concatenation of all characters in s'
return concat_list(map(symbol, s))
def char_range_regexp(c1: str, c2: str) -> Regexp:
return alternative_list(map(symbol, map(chr, range(ord(c1), ord(c2)+1))))
digit = class_regexp("0123456789")
hexdigit = alternative(digit, class_regexp("ABCDEFabcdef"))
hexprefix = alternative(string_regexp("0x"), string_regexp("0X"))
sign = optional(symbol('-'))
integer_literal = concat(sign, repeat_one(digit))
integer_literal_js = alternative( concat(sign, repeat_one(digit)),
concat_list([sign, hexprefix, repeat_one(hexdigit)]))
lc_letter = alternative_list(map(symbol, map(chr, range(ord('a'), ord('z')+1))))
uc_letter = alternative_list(map(symbol, map(chr, range(ord('A'), ord('Z')+1))))
letter = alternative(lc_letter, uc_letter)
identifier_start = alternative_list([letter, symbol('$'), symbol('_')])
identifier_part = alternative(identifier_start, digit)
identifier = concat(identifier_start, repeat(identifier_part))
blank_characters = "\t "
line_end_characters = "\n\r"
white_space = repeat_one(class_regexp(blank_characters + line_end_characters))
'''
2. Executing regular expressions
The standard method to 'execute' regular expressions is to transform them into finite automata.
Here we use a different method to execute them directly using `derivatives`.
This method uses regular expressions themselves as states of an automaton without constructing it.
We consider a regexp a final state if it accepts the empty word "".
This condition can be checked by a simple function on the regexp.
'''
def accepts_empty(r : Regexp) -> bool:
'check if r accepts the empty word'
match r:
case Null() | Symbol(_):
return False
case Epsilon() | Repeat(_):
return True
case Concat(r1, r2):
return accepts_empty(r1) and accepts_empty(r2)
case Alternative(r1, r2):
return accepts_empty(r1) or accepts_empty(r2)
'''
The transition function of a (deterministic) finite automaton maps
state `r0` and symbol `s` to the next state, say, `r1`.
If the state `r0` recognizes any words `w` that start with `s` (w[0] == s),
then state `r1` recognizes all those words `w` with the first letter removed (w[1:]).
This construction is called the `derivative` of a language by symbol `s`:
derivative(L, s) = { w[1:] | w in L and w[0] == s }
If L is the language recognized by regular expression `r0`,
then we can effectively compute a regular expression for derivative(L, s)!
As follows:
'''
def after_symbol(s : str, r : Regexp) -> Regexp:
'produces regexp after r consumes symbol s'
match r:
case Null() | Epsilon():
return null
case Symbol(s_expected):
return epsilon if s == s_expected else null
case Alternative(r1, r2):
return alternative(after_symbol(s, r1), after_symbol(s, r2))
case Concat(r1, r2):
return alternative(concat(after_symbol(s, r1), r2),
after_symbol(s, r2) if accepts_empty(r1) else null)
case Repeat(r1):
return concat(after_symbol(s, r1), Repeat(r1))
## matching against a regular expression
def matches(r : Regexp, ss: str) -> bool:
i = 0
while i < len(ss):
r = after_symbol(ss[i], r)
if r.is_null():
return False
i += 1
# reached end of string
return accepts_empty(r)
########################################################################
'''
3. Lexer descriptions
A lexer (scanner) is different from a finite automaton in several aspects.
1. The lexer must classify the next lexeme from a choice of several regular expressions.
It cannot match a single regexp, but it has to keep track and manage matching for
several regexps at the same time.
2. The lexer follows the `maximum munch` rule, which says that the next lexeme is
the longest prefix that matches one of the regular expressions.
3. Once a lexeme is identified, the lexer must turn it into a token and attribute.
Re maximum munch consider this input:
ifoundsalvationinapubliclavatory
Suppose that `if` is a keyword, why should the lexer return <identifier> for this input?
Similarly:
returnSegment
would count as an identifier even though starting with the keyword `return`.
These requirements motivate the following definitions.
A lex_action
* takes some (s : str, i : int position in s, j : int pos in s)
* consumes the lexeme sitting at s[i:j]
* returns (token for s[i:j], some k >= j)
'''
class Token: pass # abstract class of tokens
Position = int # input position
lex_result = tuple[Token, Position]
lex_action = Callable[[str, Position, Position], lex_result]
# a lexer rule attaches a lex_action to a regular expression
@dataclass
class Lex_rule:
re : Regexp
action: lex_action
# a lexer tries to match its input to a list of lex rules
Lex_state = list[Lex_rule]
# reading a symbol advances the regular expression of each lex rule
def next_state(state: Lex_state, ss: str, i: int):
return list(filter(lambda rule: not (rule.re.is_null()),
[Lex_rule(after_symbol(ss[i], rule.re), rule.action)
for rule in state]))
def initial_state(rules: list[Lex_rule]) -> Lex_state:
return rules
def matched_rules(state: Lex_state) -> Lex_state:
return [rule for rule in state if accepts_empty(rule.re)]
def is_stuck(state: Lex_state) -> bool:
return not state
#####################################################################
class ScanError (Exception): pass
@dataclass
class Match:
action: lex_action
final : Position
@dataclass
class Scan:
spec: Lex_state
def scan_one(self) -> Callable[[str, Position], lex_result]:
return lambda ss, i: self.scan_one_token(ss, i)
def scan_one_token(self, ss: str, i: Position) -> lex_result:
state = self.spec
j = i
last_match = None
while j < len(ss) and not is_stuck(state):
state = next_state(state, ss, j); j += 1
all_matches = matched_rules(state)
if all_matches:
this_match = all_matches[0]
last_match = Match(this_match.action, j)
match last_match:
case None:
raise ScanError("no lexeme found:", ss[i:])
case Match(action, final):
return action(ss, i, final)
raise ScanError("internal error: last_match=", last_match)
def make_scanner(scan_one: Callable[[str, Position], lex_result], ss: str) -> Iterator[Token]:
i = 0
while i < len(ss):
(token, i) = scan_one(ss, i)
yield (token)
## example: excerpt from JavaScript scanner
escaped_char = concat(symbol('\\'), alternative(symbol('\\'), symbol('"')))
content_char = alternative_list([symbol(chr(a))
for a in range(ord(' '), 128)
if a not in [ord('\\'), ord('"')]])
string_literal = concat_list([symbol('"'), repeat(alternative(escaped_char, content_char)), symbol('"')])
@dataclass
class Return(Token): pass
@dataclass
class Intlit(Token): value: int
@dataclass
class Ident(Token): name: str
@dataclass
class Lparen(Token): pass
@dataclass
class Rparen(Token): pass
@dataclass
class Slash(Token): pass
@dataclass
class Strlit(Token): value: str
string_spec: Lex_state = [
Lex_rule(escaped_char, lambda ss, i, j: (ss[i+1], j)),
Lex_rule(content_char, lambda ss, i, j: (ss[i], j))
]
string_token = Scan(string_spec).scan_one()
def strlit(ss: str) -> Strlit:
"use subsidiary scanner to transform string content"
return Strlit("".join(make_scanner(string_token, ss)))
js_spec: Lex_state = [
Lex_rule(string_regexp("return"), lambda ss, i, j: (Return(), j)),
Lex_rule(integer_literal, lambda ss, i, j: (Intlit(int(ss[i:j])), j)),
Lex_rule(identifier, lambda ss, i, j: (Ident(ss[i:j]), j)),
Lex_rule(white_space, lambda ss, | |
<gh_stars>1-10
import subprocess
import platform
import os
import sys
# Set these to None for compile/link debugging or subprocess.PIPE to silence
# compiler warnings and errors.
STDOUT = subprocess.PIPE
STDERR = subprocess.PIPE
# STDOUT = None
# STDERR = None
# This is the max length that I want a printed line to be.
MAX_LINE_LENGTH = 78
PY_MAJOR_VERSION = sys.version_info[0]
def line_wrap_paragraph(s):
# Format s with terminal-friendly line wraps.
done = False
beginning = 0
end = MAX_LINE_LENGTH - 1
lines = [ ]
while not done:
if end >= len(s):
done = True
lines.append(s[beginning:])
else:
last_space = s[beginning:end].rfind(' ')
lines.append(s[beginning:beginning + last_space])
beginning += (last_space + 1)
end = beginning + MAX_LINE_LENGTH - 1
return lines
def print_bad_news(value_name, default):
s = "Setup can't determine %s on your system, so it will default to %s which may not be correct." \
% (value_name, default)
plea = "Please report this message and your operating system info to the package maintainer listed in the README file."
lines = line_wrap_paragraph(s) + [''] + line_wrap_paragraph(plea)
border = '*' * MAX_LINE_LENGTH
s = border + "\n* " + ('\n* '.join(lines)) + '\n' + border
print (s)
def does_build_succeed(filename, linker_options = ""):
# Utility function that returns True if the file compiles and links
# successfully, False otherwise.
# Two things to note here --
# - If there's a linker option like -lrt, it needs to come *after*
# the specification of the C file or linking will fail on Ubuntu 11.10
# (maybe because of the gcc version?)
# - Some versions of Linux place the sem_xxx() functions in libpthread.
# Rather than testing whether or not it's needed, I just specify it
# everywhere since it's harmless to specify it when it's not needed.
cmd = "cc -Wall -o ./prober/foo ./prober/%s %s -lpthread" % (filename, linker_options)
p = subprocess.Popen(cmd, shell=True, stdout=STDOUT, stderr=STDERR)
# p.wait() returns the process' return code, so 0 implies that
# the compile & link succeeded.
return not bool(p.wait())
def compile_and_run(filename, linker_options = ""):
# Utility function that returns the stdout output from running the
# compiled source file; None if the compile fails.
cmd = "cc -Wall -o ./prober/foo %s ./prober/%s" % (linker_options, filename)
p = subprocess.Popen(cmd, shell=True, stdout=STDOUT, stderr=STDERR)
if p.wait():
# uh-oh, compile failed
return None
else:
s = subprocess.Popen(["./prober/foo"],
stdout=subprocess.PIPE).communicate()[0]
return s.strip().decode()
def get_sysctl_value(name):
"""Given a sysctl name (e.g. 'kern.mqueue.maxmsg'), returns sysctl's value
for that variable or None if the sysctl call fails (unknown name, not
a BSD-ish system, etc.)
Only makes sense on systems that implement sysctl (BSD derivatives).
"""
s = None
try:
# I redirect stderr to /dev/null because if sysctl is availble but
# doesn't know about the particular item I'm querying, it will
# kvetch with a message like 'second level name mqueue in
# kern.mqueue.maxmsg is invalid'. This always happens under OS X
# (which doesn't have any kern.mqueue values) and under FreeBSD when
# the mqueuefs kernel module isn't loaded.
s = subprocess.Popen(["sysctl", "-n", name],
stdout=subprocess.PIPE,
stderr=open(os.devnull, 'rw')).communicate()[0]
s = s.strip().decode()
except:
pass
return s
def sniff_realtime_lib():
rc = None
filename = "sniff_realtime_lib.c"
if does_build_succeed(filename):
# Realtime libs not needed
rc = False
else:
# cc failed when not linked to realtime libs; let's try again
# with the realtime libs involved and see if things go better.
if does_build_succeed(filename, "-lrt"):
# Realtime libs are needed
rc = True
if rc == None:
# Unable to determine whether or not I needed the realtime libs.
# That's bad! Print a warning, set the return code to False
# and hope for the best.
rc = False
print_bad_news("if it needs to link to the realtime libraries", "'no'")
return rc
def sniff_sem_getvalue(linker_options):
return does_build_succeed("sniff_sem_getvalue.c", linker_options)
def sniff_sem_timedwait(linker_options):
return does_build_succeed("sniff_sem_timedwait.c", linker_options)
def sniff_sem_value_max():
# default is to return None which means that it is #defined in a standard
# header file and doesn't need to be added to my custom header file.
sem_value_max = None
if not does_build_succeed("sniff_sem_value_max.c"):
# OpenSolaris 2008.05 doesn't #define SEM_VALUE_MAX. (This may
# be true elsewhere too.) Ask sysconf() instead if it exists.
# Note that sys.sysconf_names doesn't exist under Cygwin.
if hasattr(os, "sysconf_names") and \
("SC_SEM_VALUE_MAX" in os.sysconf_names):
sem_value_max = os.sysconf("SC_SEM_VALUE_MAX")
else:
# This value of last resort should be #defined everywhere. What
# could possibly go wrong?
sem_value_max = "_POSIX_SEM_VALUE_MAX"
return sem_value_max
def sniff_page_size():
DEFAULT_PAGE_SIZE = 4096
# Linker options don't matter here because I'm not calling any
# functions, just getting the value of a #define.
page_size = compile_and_run("sniff_page_size.c")
if page_size is None:
page_size = DEFAULT_PAGE_SIZE
print_bad_news("the value of PAGE_SIZE", page_size)
return page_size
def sniff_mq_existence(linker_options):
return does_build_succeed("sniff_mq_existence.c", linker_options)
def sniff_mq_prio_max():
# MQ_PRIO_MAX is #defined in limits.h on all of the systems that I
# checked that support message queues at all. (I checked 2 Linux boxes,
# OpenSolaris and FreeBSD 8.0.)
# 32 = minimum allowable max priority per POSIX; systems are permitted
# to define a larger value.
# ref: http://www.opengroup.org/onlinepubs/009695399/basedefs/limits.h.html
DEFAULT_PRIORITY_MAX = 32
max_priority = None
# OS X up to and including 10.8 doesn't support POSIX messages queues and
# doesn't define MQ_PRIO_MAX. Maybe this aggravation will cease in 10.9?
if does_build_succeed("sniff_mq_prio_max.c"):
max_priority = compile_and_run("sniff_mq_prio_max.c")
if max_priority:
try:
max_priority = int(max_priority)
except ValueError:
max_priority = None
if max_priority is None:
# Looking for a #define didn't work; ask sysconf() instead.
# Note that sys.sysconf_names doesn't exist under Cygwin.
if hasattr(os, "sysconf_names") and \
("SC_MQ_PRIO_MAX" in os.sysconf_names):
max_priority = os.sysconf("SC_MQ_PRIO_MAX")
else:
max_priority = DEFAULT_PRIORITY_MAX
print_bad_news("the value of PRIORITY_MAX", max_priority)
# Under OS X, os.sysconf("SC_MQ_PRIO_MAX") returns -1.
if max_priority < 0:
max_priority = DEFAULT_PRIORITY_MAX
# Adjust for the fact that these are 0-based values; i.e. permitted
# priorities range from 0 - (MQ_PRIO_MAX - 1). So why not just make
# the #define one smaller? Because this one goes up to eleven...
max_priority -= 1
# priority is an unsigned int
return str(max_priority).strip() + "U"
def sniff_mq_max_messages():
# This value is not defined by POSIX.
# On most systems I've tested, msg Qs are implemented via mmap-ed files
# or a similar interface, so the only theoretical limits are imposed by the
# file system. In practice, Linux and *BSD impose some fairly tight
# limits.
# On Linux it's available in a /proc file and often defaults to the wimpy
# value of 10.
# On FreeBSD (and other BSDs, I assume), it's available via sysctl as
# kern.mqueue.maxmsg. On my FreeBSD 9.1 test system, it defaults to 100.
# mqueue.h defines mq_attr.mq_maxmsg as a C long, so that's
# a practical limit for this value.
# ref: http://linux.die.net/man/7/mq_overview
# ref: http://www.freebsd.org/cgi/man.cgi?query=mqueuefs&sektion=5&manpath=FreeBSD+7.0-RELEASE
# http://fxr.watson.org/fxr/source/kern/uipc_mqueue.c?v=FREEBSD91#L195
# ref: http://groups.google.com/group/comp.unix.solaris/browse_thread/thread/aa223fc7c91f8c38
# ref: http://cygwin.com/cgi-bin/cvsweb.cgi/src/winsup/cygwin/posix_ipc.cc?cvsroot=src
# ref: http://cygwin.com/cgi-bin/cvsweb.cgi/src/winsup/cygwin/include/mqueue.h?cvsroot=src
mq_max_messages = None
# Try to get the value from where Linux stores it.
try:
mq_max_messages = int(open("/proc/sys/fs/mqueue/msg_max").read())
except:
# Oh well.
pass
if not mq_max_messages:
# Maybe we're on BSD.
mq_max_messages = get_sysctl_value('kern.mqueue.maxmsg')
if mq_max_messages:
mq_max_messages = int(mq_max_messages)
if not mq_max_messages:
# We're on a non-Linux, non-BSD system, or OS X, or BSD with
# the mqueuefs kernel module not loaded (which it's not, by default,
# under FreeBSD 8.x and 9.x which are the only systems I've tested).
#
# If we're on FreeBSD and mqueuefs isn't loaded when this code runs,
# sysctl won't be able to provide mq_max_messages to me. (I assume other
# BSDs behave the same.) If I use too large of a default, then every
# attempt to create a message queue via posix_ipc will fail with
# "ValueError: Invalid parameter(s)" unless the user explicitly sets
# the max_messages param.
if platform.system().endswith("BSD"):
# 100 is the value I see under FreeBSD 9.2. I hope this works
# elsewhere!
mq_max_messages = 100
else:
# We're on a non-Linux, non-BSD system. I take a wild guess at an
# appropriate value. The max possible is > 2 billion, but the
# values used by Linux and FreeBSD suggest that a smaller | |
<gh_stars>1-10
#! /usr/bin/env python
###################################################################################################
# Script to generate specific random streams intended to verify correct load-balance in LAGs.
# An inverse stream is sent from destination to source to perform MAC learning.
# It can operate in two modes:
# 1) Limited: in this mode, the total number of packets that should be sent is known and was
# defined by options -n. First, all inverted packets are sent from destination to source.
# After, if they were all received, the target flow is sent from source to destination.
# We verify that all sent packets were received. In case of errors, TrafficFlow::Monitor
# statistics are displayed.
# 2) Unlimited: in this mode we continually send packets. As we don't know how many we should send,
# normal and inverted flows are sent simultaneously. We can't ensure MAC learning, but as inverted
# stream is started first, in normal conditions MAC learning should be OK.
###################################################################################################
import l2tester
import sys
from scapy.packet import Raw
from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP, TCP, ICMP
from l2tester.packet import MPLS
from optparse import OptionParser
def check_monitor_errors(monitor):
""" Verify that TrafficFlow_Monitor executed with no errors.
@param monitor Instance that should be verified.
"""
stats = l2tester.TrafficFlow_Statistics()
monitor.get_statistics(stats)
if stats.loop_detected_intervals > 0 :
raise Exception("Loop detected during {0} ms.".format(stats.loop_detected_ms))
elif stats.received_packets == 0 :
raise Exception("No packets received.")
elif stats.traffic_interruption_intervals > 0 :
raise Exception("Traffic interrupted during {0} ms. Received {1} from {2} sent packets.".format(
stats.traffic_interruption_ms, stats.received_packets, stats.sent_packets))
def main():
parser = OptionParser(usage="""%prog [options] <src> <dst>
src : Source ethernet interface. Ex: eth0
dst : Destination ethernet interface. Ex: eth1
""")
parser.add_option("-c", "--criteria",
help="""
Load balance criteria: src-mac, dst-mac, src-dst-mac,
src-ip, dst-ip, src-dst-ip,
mpls-top-sec, mpls-top, mpls-sec,
src-tcp, dst-tcp, src-dst-tcp,
src-udp, dst-udp, src-dst-udp
""", default="src-dst-mac",
action="store", type="string", dest="criteria")
parser.add_option("-i", "--interval",
help="Define packet interval in milliseconds.", default=100,
action="store", type="int", dest="interval")
parser.add_option("-n", "--num_packets",
help="Send specified number of packets. Default: send indefinitely.", default=0,
action="store", type="int", dest="num_packets")
parser.add_option("-d", "--debug",
help="Enable L2tester debuging.", default=False,
action="store_true", dest="debug")
(options, args) = parser.parse_args()
if len(args) < 2:
parser.error("must specify interfaces to use")
if options.debug:
l2tester.Logger.config_log_level( l2tester.Logger.L2T_LOG_DEBUG )
if options.criteria not in ["dst-ip", "dst-mac", "src-dst-ip", "src-dst-mac", "src-ip", "src-mac", "src-udp", "dst-udp", "src-dst-udp",
"src-tcp", "dst-tcp", "src-dst-tcp", "mpls-top-sec", "mpls-top", "mpls-sec"]:
parser.error("invalid criteria '{0}'".format(options.criteria))
if_src = args[0]
if_dst = args[1]
try:
if_src_action = l2tester.Action()
if_src_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_action.seed = 10
if_src_filter = l2tester.EthernetFilter()
if_dst_action = l2tester.Action()
if_dst_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_action.seed = 10
if_dst_filter = l2tester.EthernetFilter()
if 'udp' in options.criteria:
if_src_action.mask = 0xFFFF000000000000
if_src_action.range_first = 1
if_src_action.range_last = 0x03FF # Port 1023
if_src_filter.dst_mac = '10:00:01:02:03:FF'
if_src_filter.compile()
if_src_packet = Ether(src = '10:00:01:02:03:04', dst = '10:00:01:02:03:FF') / IP(src='192.168.42.01', dst='192.168.42.02') / UDP(sport=18, dport=50) /Raw( '\0' * 100 )
if_dst_action.mask = 0xFFFF000000000000
if_dst_action.range_first = 1
if_dst_action.range_last = 0x03FF # Port 1023
if_dst_filter.src_mac = '10:00:01:02:03:FF'
if_dst_filter.compile()
if_dst_packet = Ether(src = '10:00:01:02:03:FF', dst = '10:00:01:02:03:04') / IP(src='192.168.42.02', dst='192.168.42.01') / UDP(sport=50, dport=18) /Raw( '\0' * 100 )
if options.criteria == 'src-dst-udp':
# Source Interface generate random source and destination UDP port
if_src_extra_action = l2tester.Action()
if_src_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_extra_action.mask = 0xFFFF000000000000
if_src_extra_action.byte = 34 # Source UDP port
if_src_extra_action.range_first = 1
if_src_extra_action.range_last = 0x03FF # Port 1023
if_src_action.byte = 36 # Destination UDP port
if_src_action.chain_action(if_src_extra_action)
# Destination Interface generate random source and destination UDP port (but seeds are inverted)
if_dst_extra_action = l2tester.Action()
if_dst_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_extra_action.mask = 0xFFFF000000000000
if_dst_extra_action.byte = 36 # Destination UDP port
if_dst_extra_action.range_first = 1
if_dst_extra_action.range_last = 0x03FF # Port 1023
if_dst_action.byte = 34 # Source UDP port
if_dst_action.chain_action(if_dst_extra_action)
elif options.criteria == 'src-udp':
if_src_action.byte = 34
if_dst_action.byte = 36
else: # options.criteria == 'dst-udp':
if_src_action.byte = 36
if_dst_action.byte = 34
if 'tcp' in options.criteria:
if_src_action.mask = 0xFFFF000000000000
if_src_action.range_first = 1
if_src_action.range_last = 0x03FF # Port 1023
if_src_filter.dst_mac = '10:00:01:02:03:FF'
if_src_filter.compile()
if_src_packet = Ether(src = '10:00:01:02:03:04', dst = '10:00:01:02:03:FF') / IP(src='192.168.42.01', dst='192.168.42.02') / TCP(sport=21, dport=57) /Raw( '\0' * 100 )
if_dst_action.mask = 0xFFFF000000000000
if_dst_action.range_first = 1
if_dst_action.range_last = 0x03FF # Port 1023
if_dst_filter.src_mac = '10:00:01:02:03:FF'
if_dst_filter.compile()
if_dst_packet = Ether(src = '10:00:01:02:03:FF', dst = '10:00:01:02:03:04') / IP(src='192.168.42.02', dst='192.168.42.01') / TCP(sport=57, dport=21) /Raw( '\0' * 100 )
if options.criteria == 'src-dst-tcp':
# Source Interface generate random source and destination UDP port
if_src_extra_action = l2tester.Action()
if_src_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_extra_action.mask = 0xFFFF000000000000
if_src_extra_action.byte = 34 # Source TCP port
if_src_extra_action.range_first = 1
if_src_extra_action.range_last = 0x03FF # Port 1023
if_src_action.byte = 36 # Destination TCP port
if_src_action.chain_action(if_src_extra_action)
# Destination Interface generate random source and destination TCP port (but seeds are inverted)
if_dst_extra_action = l2tester.Action()
if_dst_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_extra_action.mask = 0xFFFF000000000000
if_dst_extra_action.byte = 36 # Destination TCP port
if_dst_extra_action.range_first = 1
if_dst_extra_action.range_last = 0x03FF # Port 1023
if_dst_action.byte = 34 # Source TCP port
if_dst_action.chain_action(if_dst_extra_action)
elif options.criteria == 'src-tcp':
if_src_action.byte = 34
if_dst_action.byte = 36
else: # options.criteria == 'dst-tcp'
if_src_action.byte = 36
if_dst_action.byte = 34
if 'ip' in options.criteria:
checksum_action = l2tester.Action()
checksum_action.byte = 24
checksum_action.type = l2tester.Action.ACTION_IPV4_CHECKSUM
if_src_action.mask = 0xFFFFFFFF00000000
if_src_action.range_first = 1
if_src_action.range_last = 0xE0000000 # Create random IPs smaller than first multicast 172.16.58.3
if_src_filter.dst_mac = '10:00:01:02:03:FF'
if_src_filter.compile()
if_src_packet = Ether(src = '10:00:01:02:03:04', dst = '10:00:01:02:03:FF') / IP(src='192.168.42.01', dst='192.168.42.02') / Raw( '\0' * 100 )
if_dst_action.mask = 0xFFFFFFFF00000000
if_dst_action.range_first = 1
if_dst_action.range_last = 0xE0000000 # Create random IPs smaller than first multicast 172.16.58.3
if_dst_filter.src_mac = '10:00:01:02:03:FF'
if_dst_filter.compile()
if_dst_packet = Ether(src = '10:00:01:02:03:FF', dst = '10:00:01:02:03:04') / IP(src='192.168.42.02', dst='192.168.42.01') / Raw( '\0' * 100 )
if options.criteria == 'src-dst-ip':
# Source Interface generate random source and destination IP
if_src_extra_action = l2tester.Action()
if_src_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_extra_action.mask = 0xFFFFFFFF00000000
if_src_extra_action.byte = 26 # Source IP
if_src_extra_action.range_first = 1
if_src_extra_action.range_last = 0xE0000000 # Create random IPs smaller than first multicast 224.0.0.0
if_src_extra_action.chain_action( checksum_action )
if_src_action.byte = 30 # Destination IP
if_src_action.chain_action(if_src_extra_action)
# Destination Interface generate random source and destination IP (but seeds are inverted)
if_dst_extra_action = l2tester.Action()
if_dst_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_extra_action.mask = 0xFFFFFFFF00000000
if_dst_extra_action.byte = 30 # Destination IP
if_dst_extra_action.range_first = 1
if_dst_extra_action.range_last = 0xE0000000 # Create random IPs smaller than first multicast 172.16.58.3
if_dst_extra_action.chain_action( checksum_action )
if_dst_action.byte = 26 # Source IP
if_dst_action.chain_action(if_dst_extra_action)
else:
# For single IP, chain action directly to checksum.
if_src_action.chain_action(checksum_action)
if_dst_action.chain_action(checksum_action)
if options.criteria == 'dst-ip':
if_src_action.byte = 30 # Destination IP
if_dst_action.byte = 26 # Source IP
elif options.criteria == 'src-ip':
if_src_action.byte = 26 # Source IP
if_dst_action.byte = 30 # Destination IP
if 'mpls' in options.criteria:
if_src_action.mask = 0xFFFFF00000000000
if_src_action.range_first = 16 # Labels 0 to 15 are reserved.
if_src_action.range_last = 0xFFFFF #Last valid label (2^20 - 1).
if_src_filter.dst_mac = '10:00:01:02:03:FF'
if_src_filter.compile()
if_src_packet = Ether(src = '10:00:01:02:03:04', dst = '10:00:01:02:03:FF') / MPLS(s=0) / MPLS() / IP(src='192.168.42.01', dst='192.168.42.02') / ICMP() / Raw( '\0' * 100 )
if_dst_action.mask = 0xFFFFF00000000000
if_dst_action.range_first = 0x10 # Labels 0 to 15 are reserved.
if_dst_action.range_last = 0xFFFFF # Last valid label (2^20 - 1).
if_dst_filter.src_mac = '10:00:01:02:03:FF'
if_dst_filter.compile()
if_dst_packet = Ether(src = '10:00:01:02:03:FF', dst = '10:00:01:02:03:04') / MPLS(s=0) / MPLS() / IP(src='192.168.42.02', dst='192.168.42.01') / ICMP() / Raw( '\0' * 100 )
if options.criteria == 'mpls-top-sec':
# Source Interface generates random MPLS top and second labels
if_src_extra_action = l2tester.Action()
if_src_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_extra_action.mask = 0xFFFFF00000000000
if_src_extra_action.byte = 14 # Top label
if_src_extra_action.range_first = 0x10 # Labels 0 to 15 are reserved.
if_src_extra_action.range_last = 0xFFFFF # Last valid label (2^20 - 1).
if_src_action.byte = 18 # Second label
if_src_action.chain_action(if_src_extra_action)
# Destination Interface generates random MPLS top and second labels
if_dst_extra_action = l2tester.Action()
if_dst_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_extra_action.mask = 0xFFFFF00000000000
if_dst_extra_action.byte = 14 # Top label
if_dst_extra_action.range_first = 0x10 # Labels 0 to 15 are reserved.
if_dst_extra_action.range_last = 0xFFFFF # Last valid label (2^20 - 1).
if_dst_action.byte = 18 # Second label
if_dst_action.chain_action(if_dst_extra_action)
elif options.criteria == 'mpls-top':
if_src_action.byte = 14
if_dst_action.byte = 14
else: # options.criteria == 'mpls-sec':
if_src_action.byte = 18
if_dst_action.byte = 18
if 'mac' in options.criteria:
if_src_action.mask = 0xFEFFFFFFFFFF0000
if_src_filter.outer_tpid = 0x5010
if_src_filter.compile()
if_src_packet = Ether(src = '10:00:01:02:03:01', dst = '10:00:01:02:03:02', type = 0x5010) / Raw( '\0' * 100 )
if_dst_action.mask = 0xFEFFFFFFFFFF0000
if_dst_filter.outer_tpid = 0x5011
if_dst_filter.compile()
if_dst_packet = Ether(src = '10:00:01:02:03:02', dst = '10:00:01:02:03:01', type = 0x5011) / Raw( '\0' * 100 )
if options.criteria == 'src-dst-mac':
# Source Interface generate random source and destination MAC
if_src_extra_action = l2tester.Action()
if_src_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_src_extra_action.mask = 0xFEFFFFFFFFFF0000
if_src_extra_action.byte = 6 # Source MAC
if_src_action.byte = 0 # Destination MAC
if_src_action.chain_action(if_src_extra_action)
# Destination Interface generate random source and destination MAC (but seeds are inverted)
if_dst_extra_action = l2tester.Action()
if_dst_extra_action.type = l2tester.Action.ACTION_RANDOMIZE
if_dst_extra_action.mask = 0xFEFFFFFFFFFF0000
if_dst_extra_action.byte = 0 # Destination MAC
if_dst_action.byte = 6 # Source MAC
if_dst_action.chain_action(if_dst_extra_action)
elif options.criteria == 'dst-mac':
if_src_action.byte = 0 # Destination MAC
if_dst_action.byte = 6 # Source MAC
else: # options.criteria == 'src-mac'
if_src_action.byte = 6 # Destination MAC
if_dst_action.byte = 0 # Source MAC
src_to_dst_monitor = l2tester.TrafficFlow_Monitor( if_src, if_dst, str(if_src_packet), options.interval, if_src_action, if_src_filter )
if options.num_packets:
# Limited Mode
try:
sys.stdout.write("Learning MACs... ")
dst_to_src_monitor = l2tester.TrafficFlow_Monitor(if_dst, if_src, str(if_dst_packet), options.interval, if_dst_action, if_dst_filter)
dst_to_src_monitor.run(options.num_packets if 'mac' in options.criteria else 1) #Send more than one packet on the other direction only if criteria is by MAC
check_monitor_errors(dst_to_src_monitor)
sys.stdout.write("OK!\n")
sys.stdout.write("Sending random stream... ")
src_to_dst_monitor.run(options.num_packets)
check_monitor_errors(src_to_dst_monitor)
sys.stdout.write("OK!\n")
except Exception as e:
sys.stdout.write(str(e) + "\n")
return
else:
# Unlimited Mode
# Inverted stream is defined as simple sender (and not as monitor) because it will operate concurrently with monitored stream.
dst_to_src_sender = l2tester.Sender( if_dst, str(if_dst_packet) )
dst_to_src_sender.set_action( if_dst_action )
dst_to_src_sender.manual_bandwidth( 1, 1000000 * options.interval )
dst_to_src_sender.start()
print """
===============================================================================
Timestamp (ms) | Delta (ms) | Event
-------------------------------------------------------------------------------"""
last_event_ms = 0
src_to_dst_monitor.start()
while True:
try :
event = src_to_dst_monitor.iterate_event(0, True, 1000);
if event:
print " {0:>16} | {1:>16} | {2}".format(
event.timestamp_ms, event.timestamp_ms - last_event_ms,
l2tester.TrafficFlow_Event.type_to_str(event.type))
last_event_ms = event.timestamp_ms
if event.type == l2tester.TrafficFlow_Event.TEST_FINISHED :
break;
except KeyboardInterrupt:
src_to_dst_monitor.stop()
dst_to_src_sender.stop()
stats = l2tester.TrafficFlow_Statistics()
src_to_dst_monitor.get_statistics(stats)
print """
===============================================================================
Traffic Interruption
Total : {0} ms
Intervals : {1}
Loop Detection
Total : {2} ms
Intervals | |
<reponame>galizia-lab/pyview
'''
Creates .lst file for single wavelength TIFF files (pre-calculated RATIO) in the format used in Trondheim
Author: Giovanni, Dec 2021, based on template in VIEW folder by Ajay
Expected data structure:
In the folder "01_DATA", each animal has a folder, e.g. "190815_h2_El",
and located in that folder are all .tif files for that animal
There is a sister folder "02_LISTS"
Output:
In the folder "02_LISTS":
There will be a file Animal.lst.xlsx, e.g. "190815_h2_El.lst.xlsx"
That file contains one line for each measurement.
What to do next?
In this file, change values that are global
or insert a function that can extract odor name or concentration name from somewhere
In the Animal.lst.xlsx file, correct/complete entries (e.g. odor names, odor concentrations)
Make sure stimulus timing information is correct
Why do I need a .lst.xlsx file?
Load measurements in pyVIEW using this .lst file, so that stimulus information is correct
For off-line analysis, information is taken from this file.
Good to know:
Information, where possible, is taken from the OME header of the incoming tif file.
If that information is wrong, incomplete, or else, modify the code in:
importers.py:P1DualWavelengthTIFSingleFileImporter:parse_metadata
'''
from view.python_core.measurement_list import MeasurementList
from view.python_core.measurement_list.importers import get_importer_class
from view.python_core.flags import FlagsManager
from collections import OrderedDict
import pandas as pd
import logging
import pathlib as pl
logging.basicConfig(level=logging.INFO)
# ------------------- Some parameters about experimental setup, data structure and output file type --------------------
# 3 for single wavelength Till Photonics Measurements
# 4 for two wavelength Till Photonics Measurements
# 20 for Zeiss Confocal Measurements
LE_loadExp = 33 #for Bente, 33 or 35 both work!
# Mother of all Folders of your dataset
# On Windows, if you copy paths from the file explorer, make sure the string below is always of the form r"......"
STG_MotherOfAllFolders = r"/Users/galizia/Nextcloud/VTK_2021/Bente_Test_2021"
# path of the "Data" folder in VIEW organization containing the data
# On Windows, if you copy paths from the file explorer, make sure the string below is always of the form r"......"
STG_Datapath = r""
# path of the "Lists" folder in VIEW organization containing the list files
# On Windows, if you copy paths from the file explorer, make sure the string below is always of the form r"......"
STG_OdorInfoPath = r""
# Choose measurement list output extension among ".lst", ".lst.xlsx", ".settings.xlsx"
# VIEW does not support writing .xls list files anymore (nonetheless, it can read them and revise/update them to .xlsx)
measurement_output_extension = ".lst.xlsx"
# ------------------- A dictionary containing default values for metadata.----------------------------------------------
# ------------------- Only metadata included in this dictionary will be written ----------------------------------------
# ----Note that columns of the output measeurement list files will have the same order as below.------------------------
default_values = OrderedDict()
default_values['Measu'] = 0 # unique identifier for each line, corresponds to item in TILL photonics log file
default_values['Label'] = "none"
default_values['Odour'] = 'odor?' # stimulus name, maybe extracted from label in the function "custom_func" below
default_values['OConc'] = 0 # odor concentration, maybe extracted from label in the function "custom_func" below
default_values['Analyze'] = -1 # whether to analyze in VIEWoff. Default 1
default_values['Cycle'] = 0 # how many ms per frame
default_values['DBB1'] = 'none' # file name of raw data
default_values['UTC'] = 0 # recording time, extracted from file
default_values['PxSzX'] = '0.0' # um per pixel, 1.5625 for 50x air objective, measured by <NAME> July 2017 on Till vision system, with a binning of 8
default_values['PxSzY'] = '0.0' # um per pixel, 1.5625 for 50x air objective, measured by <NAME> July 2017 on Till vision system, with a binning of 8
default_values['Lambda'] = 0 # wavelength of stimulus. In TILL, from .log file, In Zeiss LSM, from .lsm file
# These will be automatically filed for LE_loadExp=4
default_values['dbb2'] = 'none' # file name of raw data in dual wavelength recordings (FURA)
# To include more columns, uncomment entries below and specify a default value.
# #
# block for first stimulus
# default_values['StimON'] = -1 # stimulus onset, unit: frames, count starts at frame 1.
# default_values['StimOFF'] = -1 # stimulus offset, unit: frames, count starts at frame 1.
# default_values['StimLen'] = 0 # stimulus onset in ms from beginning - alternative to StimON
# default_values['StimONms'] = -1 # stimulus length in ms - alternative to StimOFF
# #
# block for second stimulus
# default_values['Stim2ON'] = 0 # stimulus onset, unit: frames, count starts at frame 1.
# default_values['Stim2OFF'] = 0 # stimulus offset, unit: frames, count starts at frame 1.
# default_values['Stim2Len'] = 0 # stimulus onset in ms from beginning - alternative to StimON
# default_values['Stim2ONms'] = -1 # stimulus length in ms - alternative to StimOFF
# #
# default_values['Age'] = -1
# default_values['Sex'] = 'o'
# default_values['Side'] = 'none'
# default_values['Comment'] = 'none'
# #
# default_values['MTime'] = 0
# default_values['Control'] = 0
# default_values['Pharma'] = 'none'
# default_values['PhTime'] = 0
# default_values['PhConc'] = 0
# default_values['ShiftX'] = 0
# default_values['ShiftY'] = 0
# default_values['StimISI'] = 0
# default_values['setting'] = 'none'
# default_values['dbb3'] = 'none'
# default_values['PosZ'] = 0
# default_values['Countl'] = 0
# default_values['slvFlip'] = 0
# ----------------------------------------------------------------------------------------------------------------------
# ----------------- A function used to modify list entries after automatic parsing of metadata -------------------------
# ----------------- This function indicates what needs to be done for a row --------------------------------------------
# ----------------- The same is internally applied to all rows of the measurement list----------------------------------
def get_odorinfo_from_label(label):
# format for file name (label) is:
# odor_concentration_anything_else.tif
# separating element is underscore
# is the information for a concentration present? Detect "-"
parts = label.split("_")
if len(parts) > 1:
odor = parts[0]
concentration = parts[1]
# in the case the name is odor_conc.tif:
if concentration[-4:] == '.tif':
concentration = concentration[:-4]
else:
odor = 'odor?'
concentration = 'conc?'
return [odor, concentration]
def custom_func(list_row: pd.Series, animal_tag: str) -> pd.Series:
# Examples:
# list_row["StimON"] = 25
# list_row["Odour"] = get_odor_from_label(list_row["Label"])
# if list_row["Measu"]
# get Odor from another file based on the value of <animal_tag> and list_row["Label"]
list_row["StimONms"] = '3000'
list_row["StimLen"] = '2000'
list_row["Comment"] = 'create_measurement_list_ratio'
list_row["Line"] = 'ham'
#extract odor and concentration from name
(list_row["Odour"],list_row["OConc"]) = get_odorinfo_from_label(list_row["Label"])
try:
float(list_row["OConc"])
except: #Odour concentration is not a number, set to fictive 0
list_row["OConc"] = '0.0'
if list_row["Label"][-4:] == '.tif':
list_row["Label"] = list_row["Label"][:-4]
return list_row
# ----------------------------------------------------------------------------------------------------------------------
# ------------------ A function defining the criteria for excluding measurements ---------------------------------------
# ------------------ Currently applicable only for tillvision setups ---------------------------------------------------
def measurement_filter(s):
# exclude blocks that have in the name "Snapshot" or "Delta"
# or that do not have any "_"
name = s["Label"]
label_not_okay = name.count('Snapshot') > 0 or name.count('Delta') > 0 or name.count('_') < 1
label_okay = not label_not_okay
# exclude blocks with less than two frames or no calibration
atleast_two_frames = False
if type(s["Timing_ms"]) is str:
if len(s["Timing_ms"].split(' ')) >= 2 and s["Timing_ms"] != "(No calibration available)":
atleast_two_frames = True
return label_okay and atleast_two_frames
# ______________________________________________________________________________________________________________________
# ------------------ names of columns that will be overwritten by old values -------------------------------------------
# ------ these will only be used if a measurement list file with the same name as current output file exists -----------
overwrite_old_values = ["Line", "PxSzX", "PxSzY", "Age", "Sex", "Prefer",
"Comment", "Analyze", "Odour", "OConc"]
# ______________________________________________________________________________________________________________________
if __name__ == "__main__":
# initialize a FlagsManager object with values specified above
flags = FlagsManager()
flags.update_flags({"STG_MotherOfAllFolders": STG_MotherOfAllFolders,
"STG_OdorInfoPath": STG_OdorInfoPath,
"STG_Datapath": STG_Datapath})
# initialize importer
importer_class = get_importer_class(LE_loadExp)
importer = importer_class(default_values)
# open a dialog for choosing raw data files
# this returns a dictionary where keys are animal tags (STG_ReportTag) and
# values are lists of associated raw data files
animal_tag_raw_data_mapping = importer.ask_for_files(default_dir=flags["STG_Datapath"])
# make sure some files were chosen
assert len(animal_tag_raw_data_mapping) > 0, IOError("No files were chosen!")
for animal_tag, raw_data_files in animal_tag_raw_data_mapping.items():
# automatically parse metadata
metadata_df = importer.import_metadata(raw_data_files=raw_data_files,
measurement_filter=measurement_filter)
# inform user if no usable measurements were found
if metadata_df.shape[0] == 0:
logging.info(f"No usable measurements we found among the files "
f"chosen for the animal {animal_tag}. Not creating a list file")
else:
# create a new Measurement list object from parsed metadata
measurement_list = MeasurementList.create_from_df(LE_loadExp=LE_loadExp,
df=metadata_df)
# apply custom modifications
measurement_list.update_from_custom_func(custom_func=custom_func, animal_tag=animal_tag)
# set anaylze to 0 if raw data files don't exist
flags.update_flags({"STG_ReportTag": animal_tag})
measurement_list.sanitize(flags=flags,
data_file_extensions=importer.movie_data_extensions)
# sort by time as in column "UTC"
#sorted_df = df.sort_values(by=['Column_name'], ascending=True)
# does not work if the list file already existed.
measurement_list.measurement_list_df = measurement_list.measurement_list_df.sort_values(by=['UTC'], ascending=True)
# construct the name of the output file
#AskAjay - what I | |
data = h[0].data.tostring()
headerFields = []
try:
numberOfCols = header['NCOLTBL']
except KeyError as e:
print("Cannot read the DCMP table column headers.")
print(e)
return []
try:
for i in range(numberOfCols):
headerFields.append(header['COLTBL%s'%(i+1)])
except KeyError as e:
print("Cannot read the DCMP table column headers.")
print(e)
return []
headerLine = ' '.join(headerFields)
headerLine += '\n'
asciiData = headerLine + data
dataDictFile = csv.DictReader(io.StringIO(asciiData), delimiter=' ', skipinitialspace = True)
# While we're here, let's add the RA and Dec. Don't forget we can only iterate once,
# so let's create a list of dicts and return it.
#wcs = wcs.WCS(header)
dcmpData = []
if xy2skyConvertByList:
xyList = []
# First scan through the list and add the exptime, mjd and zeropt
# and grabbing the x and y values as we go.
for row in dataDictFile:
# 2015-06-07 KWS Remove None key - I've no idea how it gets there
try:
del row[None]
except KeyError:
print("no None Keys")
row['exptime'] = exptime
row['mjd'] = mjd
row['zeropt'] = zeropt
xyList.append([row['Xpos'], row['Ypos']])
dcmpData.append(row)
# Now pass the list of [x, y] values to the bulk converter
raDecList = xy2skyList(filename, xyList)
if len(raDecList) != len(xyList):
# Something went horribly wrong
dcmpData = []
else:
# OK to continue. Iterate through each row and
# add the RA and Dec to each dict
for i in range(len(xyList)):
dcmpData[i]['RA'] = raDecList[i][0]
dcmpData[i]['Dec'] = raDecList[i][1]
else:
for row in dataDictFile:
#ra, dec = wcs.wcs_pix2sky(float(row['Xpos']),float(row['Ypos']), 1)
ra, dec = xy2sky(filename, row['Xpos'], row['Ypos'])
row['RA'] = ra
row['Dec'] = dec
# 2015-01-12 KWS Adding in exposure time, mjd and zeropoint so that we can do a one-off
# conversion to (e.g.) JSON and read the info from there when doing cross
# matching.
row['exptime'] = exptime
row['mjd'] = mjd
row['zeropt'] = zeropt
dcmpData.append(row)
return header, dcmpData
def createDS9RegionFile(dirName, data, radec = True, size = 0.02, colour = 'cyan'):
"""
Generic code for creating a DS9 region file
**Key Arguments:**
- ``filename`` -- The filename of the region file
- ``data`` -- List of rowinates in RA and DEC (decimal degrees) or x and y, plus a label
- ``radec`` -- Boolean value indicating ra and dec (True) or x and y (False) - not curently used
- ``label`` -- the object label
- ``size`` -- size of the markers
- ``colour`` -- colour of the markers
**Return:**
- None
**Todo**
- Implement the x, y format
"""
# Open file and print header
previousExpName = ''
rf = None
for row in data:
expName = row[0]
if expName != previousExpName:
if rf:
rf.close()
rf = open(dirName + expName + '.reg', 'w')
rf.write('# Region file format: DS9 version 4.1\n' +
'global color=%s dashlist=8 3 width=1 font="helvetica 14 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n' % (colour) +
'linear\n')
rf.write('circle %f %f %.2f # color=%s text={%s}\n' % (row[1], row[2], size, colour, row[3]))
previousExpName = expName
else:
rf.write('circle %f %f %.2f # color=%s text={%s}\n' % (row[1], row[2], size, colour, row[3]))
if rf:
rf.close()
# 2015-06-03 KWS Calculate Position Angle of body 1 wrt body 2
# 2017-08-30 KWS Finally fixed the PA calculation. It should return a value
# between -90 and +270. (Must be a convention...)
def calculatePositionAngle(ra1, dec1, ra2, dec2):
"""
Calculate the position angle (bearing) of body 1 w.r.t. body 2. If either set of
coordinates contains a colon, assume it's in sexagesimal and automatically
convert into decimal before doing the calculation.
"""
if ':' in str(ra1):
ra1 = sexToDec(ra1, ra=True)
if ':' in str(dec1):
dec1 = sexToDec(dec1, ra=False)
if ':' in str(ra2):
ra2 = sexToDec(ra2, ra=True)
if ':' in str(dec2):
dec2 = sexToDec(dec2, ra=False)
# 2013-10-20 KWS Always make sure that the ra and dec values are floats
positionAngle = None
if ra1 is not None and ra2 is not None and dec1 is not None and dec2 is not None:
ra1 = math.radians(float(ra1))
ra2 = math.radians(float(ra2))
dec1 = math.radians(float(dec1))
dec2 = math.radians(float(dec2))
numerator = math.sin(ra1 - ra2)
denominator = math.cos(dec2) * math.tan(dec1) - math.sin(dec2) * math.cos(ra1 - ra2)
positionAngle = math.degrees(math.atan(numerator/denominator))
if denominator < 0.0:
positionAngle = 180.0 + positionAngle
return positionAngle
# 2017-08-30 KWS Are the object coordinates inside an ATLAS footprint?
ATLAS_CONESEARCH_RADIUS = 13888.7 # (i.e. sqrt(5280 * 1.86^2 + 5280 * 1.86^2) )
ATLAS_HALF_WIDTH = 5280 * 1.86 # (also = 13888.7 * cos(45) )
def isObjectInsideATLASFootprint(objectRA, objectDec, fpRA, fpDec, separation = None):
"""isObjectInsideATLASFootprint.
Args:
objectRA:
objectDec:
fpRA:
fpDec:
separation:
"""
if separation is None:
# We need to calculate the angular separation. This is expensive, so if
# we already have this value, use the one sent.
separation = getAngularSeparation(objectRA, objectDec, fpRA, fpDec)
pa = calculatePositionAngle(objectRA, objectDec, fpRA, fpDec) + 90.0
if pa >= 90.0 and pa < 180.0:
pa = pa - 90.0
if pa >= 180.0 and pa < 270.0:
pa = pa - 180.0
if pa >= 270.0:
pa = pa - 270.0
# Bearing (pa) only needs to be between 45 and -45 degrees.
pa -= 45.0
pa = abs(pa)
dist = float(separation) * math.cos(math.radians(45.0 - pa))
inside = True
if dist > ATLAS_HALF_WIDTH:
inside = False
return inside
# Add a grammatical join. Used by the Transient name server when adding lists of users
# to the comments section.
def grammarJoin(words):
"""grammarJoin.
Args:
words:
"""
return reduce(lambda x, y: x and x + ' and ' + y or y,
(', '.join(words[:-1]), words[-1])) if words else ''
COORDS_DEC_REGEX = ""
#COORDS_SEX_REGEX = "^([0-2][0-9])[^0-9]+([0-5][0-9])[^0-9]+([0-5][0-9])(\.[0-9]+){0,1}[^0-9+\-]+([+-]){0,1}([0-9][0-9])[^0-9]+([0-5][0-9])[^0-9]+([0-5][0-9])(\.[0-9]+){0,1}[^0-9 ]{0,1}( ([0-9][0-9]{0,1})){0,1}"
#COORDS_SEX_REGEX = "^([0-2][0-9])[^0-9]{0,1}([0-5][0-9])[^0-9]{0,1}([0-5][0-9])(\.[0-9]+){0,1}[^0-9+\-]{0,5}([+-]){0,1}([0-9][0-9])[^0-9]{0,1}([0-5][0-9])[^0-9]{0,1}([0-5][0-9])(\.[0-9]+){0,1}[^0-9 ]{0,1}( +([0-9][0-9]{0,1})){0,1}"
# 2019-04-17 KWS Made the sexagesimal regex a bit more forgiving.
# h h m m s s . f (+-) d d m m s s . f (radius)
COORDS_SEX_REGEX = "^([0-2]{0,1}[0-9])[^0-9+\-\.]{0,}([0-5]{0,1}[0-9])[^0-9+\-\.]{0,}([0-5]{0,1}[0-9])(\.[0-9]+){0,1}[^0-9+\-\.]{0,}([+-]){0,1}([0-9]{0,1}[0-9])[^0-9+\-\.]{0,}([0-5]{0,1}[0-9])[^0-9+\-\.]{0,}([0-5]{0,1}[0-9])(\.[0-9]+){0,1}[^0-9+\-\.]{0,}(([0-9][0-9]{0,1})){0,1}$"
COORDS_SEX_REGEX_COMPILED = re.compile(COORDS_SEX_REGEX)
#COORDS_DEC_REGEX = "^([0-9]+(\.[0-9]+){0,1})[^0-9+\-]{0,5}([+-]{0,1}[0-9]+(\.[0-9]+){0,1})[^0-9 ]{0,1}( +([0-9][0-9]{0,1})){0,1}"
# 2019-04-17 KWS Made the decimal regex a bit more forgiving.
# d.f (+-) d.f (radius)
COORDS_DEC_REGEX = "^([0-9]+(\.[0-9]+){0,1})[^0-9+\-]{0,}([+-]{0,1}[0-9]+(\.[0-9]+){0,1})[^0-9]{0,}(([0-9][0-9]{0,1})){0,1}$"
COORDS_DEC_REGEX_COMPILED = re.compile(COORDS_DEC_REGEX)
# 2019-04-17 KWS Made the name search more forgiving and extended it to ZTF.
NAME_REGEX = "^(AT|SN|ATLAS|ASASSN-|ZTF|PS([1][\-]){0,1}){0,1} {0,1}([2][0]){0,1}([0-9][0-9][a-z]{1,7}|[0-9][0-9][A-Z])$"
NAME_REGEX_COMPILED = re.compile(NAME_REGEX)
def getObjectNamePortion(inputString):
"""getObjectNamePortion.
Args:
inputString:
"""
# E.g. if the object name is '2016ffx' will return 16ffx
# If the object name is 'ATLAS16abc' will return 16abc
namePortion = None
name = NAME_REGEX_COMPILED.search(inputString)
if name:
prefix = name.group(1) if name.group(1) is not None else ''
century = name.group(3) if name.group(3) is not None else ''
namePortion = prefix + century + name.group(4)
return namePortion
# 2019-04-30 KWS Changed the order of the match test. Try decimal then sexagesimal.
# Also check the values of each sexagesimal field.
def getCoordsAndSearchRadius(inputString):
"""getCoordsAndSearchRadius.
Args:
inputString:
"""
coords = {}
ra = None
dec = None
radius = None
sex = COORDS_SEX_REGEX_COMPILED.search(inputString)
decimal = COORDS_DEC_REGEX_COMPILED.search(inputString)
if decimal:
ra = decimal.group(1)
dec = decimal.group(3)
radius = decimal.group(5)
try:
if float(ra) > 360.0 or float(ra) < 0.0 or float(dec) < -90.0 or float(dec) > 90.0:
coords = {}
else:
coords['ra'] = ra
coords['dec'] = dec
coords['radius'] = radius
except ValueError as e:
coords = {}
elif sex:
hh = sex.group(1)
mm = sex.group(2)
ss = sex.group(3)
ffra = sex.group(4) if sex.group(4) is not None else ''
sign = sex.group(5) if sex.group(5) is not None else '+'
deg = sex.group(6)
mn = sex.group(7)
sec = sex.group(8)
ffdec = sex.group(9) if sex.group(9) is not None else ''
try:
if int(hh) > 24 or int(mm) > 59 or int(ss) > 59 or int(deg) > 90 or int(mn) > 59 or int(sec) > 59:
coords = {}
else:
ra = "%s:%s:%s%s" % (hh, mm, ss, ffra)
dec = "%s%s:%s:%s%s" % (sign, deg, mn, sec, ffdec)
radius = sex.group(11)
coords['ra'] = ra
coords['dec'] = dec
coords['radius'] = radius
except ValueError as e:
coords = {}
return coords
# Return a string representing a float to digits decimal places, truncated.
# Used when we need to truncate an MJD to 3 decimal places for filename.
def truncate(f, digits):
"""truncate.
Args:
f:
digits:
"""
return ("{:.30f}".format(f))[:-30+digits]
# 2017-11-02 KWS Quick and dirty code to clean options dictionary as extracted by docopt.
def cleanOptions(options):
"""cleanOptions.
Args:
| |
import datetime
import pytest
from flask import url_for
from brewlog.models import TastingNote
from . import BrewlogTests
@pytest.mark.usefixtures('client_class')
class TestTastingNoteListView(BrewlogTests):
@pytest.fixture(autouse=True)
def set_up(self, user_factory, brewery_factory):
self.public_user = user_factory()
self.public_brewery = brewery_factory(brewer=self.public_user)
self.hidden_user = user_factory(is_public=False)
self.hidden_brewery = brewery_factory(brewer=self.hidden_user)
self.url = url_for('tastingnote.all')
def test_get_anon(self, brew_factory, tasting_note_factory):
public_brew = brew_factory(brewery=self.public_brewery, name='public_1')
tasting_note_factory(brew=public_brew, author=self.public_user)
tasting_note_factory(brew=public_brew, author=self.hidden_user)
hidden_brew_1 = brew_factory(brewery=self.hidden_brewery, name='hidden_1')
tasting_note_factory(brew=hidden_brew_1, author=self.hidden_user)
hidden_brew_2 = brew_factory(
brewery=self.public_brewery, is_public=False, name='hidden_2'
)
tasting_note_factory(brew=hidden_brew_2, author=self.public_user)
rv = self.client.get(self.url)
assert f'{public_brew.name}</a>' in rv.text
assert f'{hidden_brew_1.name}</a>' not in rv.text
assert f'{hidden_brew_2.name}</a>' not in rv.text
def test_get_authenticated(self, brew_factory, tasting_note_factory):
public_brew = brew_factory(brewery=self.public_brewery, name='public_1')
tasting_note_factory(brew=public_brew, author=self.public_user)
tasting_note_factory(brew=public_brew, author=self.hidden_user)
hidden_brew_1 = brew_factory(brewery=self.hidden_brewery, name='hidden_1')
tasting_note_factory(brew=hidden_brew_1, author=self.hidden_user)
hidden_brew_2 = brew_factory(
brewery=self.public_brewery, is_public=False, name='hidden_2'
)
tasting_note_factory(brew=hidden_brew_2, author=self.public_user)
self.login(self.public_user.email)
rv = self.client.get(self.url)
assert f'{public_brew.name}</a>' in rv.text
assert f'{hidden_brew_1.name}</a>' not in rv.text
assert f'{hidden_brew_2.name}</a>' in rv.text
@pytest.mark.usefixtures('client_class')
class TestTastingNoteCreateView(BrewlogTests):
@pytest.fixture(autouse=True)
def set_up(self, user_factory, brewery_factory):
self.public_user = user_factory(is_public=True)
self.public_brewery = brewery_factory(brewer=self.public_user)
self.hidden_user = user_factory(is_public=False)
self.hidden_brewery = brewery_factory(brewer=self.hidden_user)
@pytest.mark.parametrize('public', [
True, False
], ids=['public', 'hidden'])
def test_get_anon(self, public, brew_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=public)
url = url_for('tastingnote.add', brew_id=brew.id)
rv = self.client.get(url)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
def test_get_anon_hidden_indirect(self, brew_factory):
brew = brew_factory(brewery=self.hidden_brewery, is_public=True)
url = url_for('tastingnote.add', brew_id=brew.id)
rv = self.client.get(url)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
@pytest.mark.parametrize('public', [
True, False
], ids=['public', 'hidden'])
def test_post_anon(self, public, brew_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=public)
data = {
'text': 'Nice beer, cheers!',
'date': datetime.date.today().isoformat(),
}
url = url_for('tastingnote.add', brew_id=brew.id)
rv = self.client.post(url, data=data)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
def test_post_anon_hidden_indirect(self, brew_factory):
brew = brew_factory(brewery=self.hidden_brewery, is_public=True)
data = {
'text': 'Nice beer, cheers!',
'date': datetime.date.today().isoformat(),
}
url = url_for('tastingnote.add', brew_id=brew.id)
rv = self.client.post(url, data=data)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
def test_get_authenticated_to_public(self, brew_factory):
brew = brew_factory(brewery=self.public_brewery)
url = url_for('tastingnote.add', brew_id=brew.id)
self.login(self.hidden_user.email)
rv = self.client.get(url)
assert f'action="{url}"' in rv.text
def test_post_authenticated_to_public(self, brew_factory, user_factory):
brew = brew_factory(brewery=self.public_brewery)
text = 'Nice beer, cheers!'
data = {
'text': text,
'date': datetime.date.today().isoformat(),
}
url = url_for('tastingnote.add', brew_id=brew.id)
actor = user_factory()
self.login(actor.email)
rv = self.client.post(url, data=data, follow_redirects=True)
assert f'<p>{text}</p>' in rv.text
@pytest.mark.parametrize('brewer,brew', [
(True, True),
(True, False),
(False, True)
], ids=['hidden-hidden', 'hidden-public', 'public-hidden'])
def test_get_authenticated_to_hidden(
self, brewer, brew, brew_factory, user_factory
):
if brewer is True:
brewery = self.hidden_brewery
else:
brewery = self.public_brewery
hidden_brew = not brew
brew = brew_factory(brewery=brewery, is_public=hidden_brew)
url = url_for('tastingnote.add', brew_id=brew.id)
actor = user_factory()
self.login(actor.email)
rv = self.client.get(url)
assert rv.status_code == 404
@pytest.mark.parametrize('brewer,brew', [
(True, True),
(True, False),
(False, True)
], ids=['hidden-hidden', 'hidden-public', 'public-hidden'])
def test_post_authenticated_to_hidden(
self, brewer, brew, brew_factory, user_factory
):
if brewer is True:
brewery = self.hidden_brewery
else:
brewery = self.public_brewery
hidden_brew = not brew
brew = brew_factory(brewery=brewery, is_public=hidden_brew)
url = url_for('tastingnote.add', brew_id=brew.id)
actor = user_factory()
self.login(actor.email)
text = 'Nice beer, cheers!'
data = {
'text': text,
'date': datetime.date.today().isoformat(),
}
rv = self.client.post(url, data=data)
assert rv.status_code == 404
@pytest.mark.usefixtures('client_class')
class TestTastingNoteDeleteView(BrewlogTests):
@pytest.fixture(autouse=True)
def set_up(self, user_factory, brewery_factory):
self.public_user = user_factory(is_public=True)
self.public_brewery = brewery_factory(brewer=self.public_user)
self.hidden_user = user_factory(is_public=False)
self.hidden_brewery = brewery_factory(brewer=self.hidden_user)
self.author = user_factory()
def url(self, note):
return url_for('tastingnote.delete', note_id=note.id)
@pytest.mark.parametrize('public', [
True, False
], ids=['public', 'hidden'])
def test_get_anon(self, public, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=public)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
rv = self.client.get(url)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
@pytest.mark.parametrize('public_brew', [
True, False
], ids=['public', 'hidden'])
def test_get_anon_hidden_indirect(
self, public_brew, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.hidden_brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
rv = self.client.get(url)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
@pytest.mark.parametrize('public', [
True, False
], ids=['public', 'hidden'])
def test_post_anon(self, public, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=public)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
rv = self.client.post(url, data=data)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
@pytest.mark.parametrize('public_brew', [
True, False
], ids=['public', 'hidden'])
def test_post_anon_hidden_indirect(
self, public_brew, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.hidden_brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
rv = self.client.post(url, data=data)
assert rv.status_code == 302
assert url_for('auth.select') in rv.headers['location']
def test_get_authenticated_to_public(
self, user_factory, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.public_brewery, is_public=True)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
url = self.url(note)
self.login(actor.email)
rv = self.client.get(url)
assert rv.status_code == 403
def test_post_authenticated_to_public(
self, user_factory, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.public_brewery, is_public=True)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
url = self.url(note)
data = {'delete_it': True}
self.login(actor.email)
rv = self.client.get(url, data=data)
assert rv.status_code == 403
def test_get_authenticated_to_hidden(
self, user_factory, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.public_brewery, is_public=False)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
url = self.url(note)
self.login(actor.email)
rv = self.client.get(url)
assert rv.status_code == 404
def test_post_authenticated_to_hidden(
self, user_factory, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.public_brewery, is_public=False)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
url = self.url(note)
data = {'delete_it': True}
self.login(actor.email)
rv = self.client.post(url, data=data)
assert rv.status_code == 404
@pytest.mark.parametrize('public_brew', [
True, False
], ids=['public', 'hidden'])
def test_post_authenticated_to_hidden_indirect(
self, public_brew, user_factory, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.hidden_brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
url = self.url(note)
data = {'delete_it': True}
self.login(actor.email)
rv = self.client.post(url, data=data)
assert rv.status_code == 404
def test_get_author_to_public(self, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=True)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
self.login(self.author.email)
rv = self.client.get(url)
assert f'action="{url}"' in rv.text
def test_post_author_to_public(self, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=True)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
self.login(self.author.email)
rv = self.client.post(url, data=data, follow_redirects=True)
assert 'has been deleted' in rv.text
def test_get_author_to_hidden(self, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=False)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
self.login(self.author.email)
rv = self.client.get(url)
assert rv.status_code == 404
def test_post_author_to_hidden(self, brew_factory, tasting_note_factory):
brew = brew_factory(brewery=self.public_brewery, is_public=False)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
self.login(self.author.email)
rv = self.client.post(url, data=data, follow_redirects=True)
assert rv.status_code == 404
@pytest.mark.parametrize('public_brew', [
True, False
], ids=['public', 'hidden'])
def test_get_author_to_hidden_indirect(
self, public_brew, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.hidden_brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
self.login(self.author.email)
rv = self.client.get(url)
assert rv.status_code == 404
@pytest.mark.parametrize('public_brew', [
True, False
], ids=['public', 'hidden'])
def test_post_author_to_hidden_indirect(
self, public_brew, brew_factory, tasting_note_factory
):
brew = brew_factory(brewery=self.hidden_brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
self.login(self.author.email)
rv = self.client.post(url, data=data)
assert rv.status_code == 404
@pytest.mark.parametrize('public_brewery,public_brew', [
(True, True),
(True, False),
(False, True),
(False, False)
], ids=['public-public', 'public-hidden', 'hidden-public', 'hidden-hidden'])
def test_get_owner(
self, public_brewery, public_brew, brew_factory, tasting_note_factory
):
if public_brewery:
brewery = self.public_brewery
else:
brewery = self.hidden_brewery
brew = brew_factory(brewery=brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
self.login(brewery.brewer.email)
rv = self.client.get(url)
assert f'action="{url}"' in rv.text
@pytest.mark.parametrize('public_brewery,public_brew', [
(True, True),
(True, False),
(False, True),
(False, False)
], ids=['public-public', 'public-hidden', 'hidden-public', 'hidden-hidden'])
def test_post_owner(
self, public_brewery, public_brew, brew_factory, tasting_note_factory
):
if public_brewery:
brewery = self.public_brewery
else:
brewery = self.hidden_brewery
brew = brew_factory(brewery=brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
url = self.url(note)
data = {'delete_it': True}
self.login(brewery.brewer.email)
rv = self.client.post(url, data=data, follow_redirects=True)
assert 'has been deleted' in rv.text
@pytest.mark.usefixtures('client_class')
class TestTastingNoteLoadView(BrewlogTests):
@pytest.fixture(autouse=True)
def set_up(self, user_factory, brewery_factory):
self.public_user = user_factory()
self.public_brewery = brewery_factory(brewer=self.public_user)
self.hidden_user = user_factory(is_public=False)
self.hidden_brewery = brewery_factory(brewer=self.hidden_user)
self.author = user_factory()
self.url = url_for('tastingnote.loadtext')
def test_missing_note_id(self):
rv = self.client.get(self.url)
assert rv.status_code == 400
def test_nonexisting_note_id(self):
rv = self.client.get(self.url, query_string={'id': 666})
assert rv.status_code == 404
@pytest.mark.parametrize('public_brewery,public_brew', [
(True, True),
(True, False),
(False, True),
(False, False),
])
def test_get_anon(
self, public_brewery, public_brew, brew_factory, tasting_note_factory
):
if public_brewery:
brewery = self.public_brewery
else:
brewery = self.hidden_brewery
brew = brew_factory(brewery=brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
rv = self.client.get(self.url, query_string={'id': note.id})
assert rv.status_code == 200
assert note.text in rv.text
@pytest.mark.parametrize('public_brewery,public_brew', [
(True, True),
(True, False),
(False, True),
(False, False),
])
def test_get_authenticated(
self, public_brewery, public_brew,
user_factory, brew_factory, tasting_note_factory,
):
if public_brewery:
brewery = self.public_brewery
else:
brewery = self.hidden_brewery
brew = brew_factory(brewery=brewery, is_public=public_brew)
note = tasting_note_factory(brew=brew, author=self.author, text='Good stuff')
actor = user_factory()
self.login(actor.email)
rv = self.client.get(self.url, query_string={'id': note.id})
assert rv.status_code == 200
assert note.text in rv.text
@pytest.mark.parametrize('public_brewery,public_brew', [
(True, True),
(True, False),
(False, True),
(False, False),
])
def test_get_author(
self, public_brewery, public_brew, brew_factory, tasting_note_factory
):
if public_brewery:
| |
building.
if split.startswith("test"):
continue
path = self.files_by_split[split]
yield from self.load_data(path)
def count_examples(self):
''' Compute here b/c we're streaming the sentences. '''
example_counts = {}
for split, split_path in self.files_by_split.items():
example_counts[split] = sum(
1 for line in codecs.open(
split_path, 'r', 'utf-8', errors='ignore'))
self.example_counts = example_counts
def process_split(self, split, indexers) -> Iterable[Type[Instance]]:
''' Process split text into a list of AllenNLP Instances. '''
def _make_instance(input, target):
d = {}
d["inputs"] = sentence_to_text_field(input, indexers)
d["targs"] = sentence_to_text_field(target, self.target_indexer) # this line changed
return Instance(d)
for sent1, sent2 in split:
yield _make_instance(sent1, sent2)
def get_metrics(self, reset=False):
'''Get metrics specific to the task'''
avg_nll = self.scorer1.get_metric(reset)
unk_ratio_macroavg = self.scorer3.get_metric(reset)
return {
'perplexity': math.exp(avg_nll),
'bleu_score': 0,
'unk_ratio_macroavg': unk_ratio_macroavg}
@register_task('wmt_debug', rel_path='wmt_debug/', max_targ_v_size=5000)
class MTDebug(MTTask):
def __init__(self, path, max_seq_len, max_targ_v_size, name='wmt_debug'):
''' Demo task for MT with 10k training examples.'''
super().__init__(path=path, max_seq_len=max_seq_len,
max_targ_v_size=max_targ_v_size, name=name)
self.files_by_split = {"train": os.path.join(path, "train.txt"),
"val": os.path.join(path, "valid.txt"),
"test": os.path.join(path, "test.txt")}
@register_task('wmt17_en_ru', rel_path='wmt17_en_ru/', max_targ_v_size=20000)
class MTTaskEnRu(MTTask):
def __init__(self, path, max_seq_len, max_targ_v_size, name='mt_en_ru'):
''' MT En-Ru'''
super().__init__(path=path, max_seq_len=max_seq_len,
max_targ_v_size=max_targ_v_size, name=name)
self.files_by_split = {"train": os.path.join(path, "train.txt"),
"val": os.path.join(path, "valid.txt"),
"test": os.path.join(path, "test.txt")}
@register_task('wmt14_en_de', rel_path='wmt14_en_de/', max_targ_v_size=20000)
class MTTaskEnDe(MTTask):
def __init__(self, path, max_seq_len, max_targ_v_size, name='mt_en_de'):
''' MT En-De'''
super().__init__(path=path, max_seq_len=max_seq_len,
max_targ_v_size=max_targ_v_size, name=name)
self.files_by_split = {"train": os.path.join(path, "train.txt"),
"val": os.path.join(path, "valid.txt"),
"test": os.path.join(path, "test.txt")}
@register_task('reddit_s2s', rel_path='Reddit_2008/', max_targ_v_size=0)
@register_task('reddit_s2s_3.4G', rel_path='Reddit_3.4G/', max_targ_v_size=0)
@register_task('reddit_s2s_dummy', rel_path='Reddit_2008_TestSample/', max_targ_v_size=0)
class RedditSeq2SeqTask(MTTask):
''' Task for seq2seq using reddit data
Note: max_targ_v_size doesn't do anything here b/c the
target is in English'''
def __init__(self, path, max_seq_len, max_targ_v_size, name='reddit_s2s'):
super().__init__(path=path, max_seq_len=max_seq_len,
max_targ_v_size=max_targ_v_size, name=name)
self._label_namespace = None
self.target_indexer = {"words": SingleIdTokenIndexer("tokens")}
self.files_by_split = {"train": os.path.join(path, "train.csv"),
"val": os.path.join(path, "val.csv"),
"test": os.path.join(path, "test.csv")}
def load_data(self, path):
''' Load data '''
with codecs.open(path, 'r', 'utf-8', errors='ignore') as txt_fh:
for row in txt_fh:
row = row.strip().split('\t')
if len(row) < 4 or not row[2] or not row[3]:
continue
src_sent = process_sentence(row[2], self.max_seq_len)
tgt_sent = process_sentence(row[3], self.max_seq_len,
sos_tok=allennlp_util.START_SYMBOL,
eos_tok=allennlp_util.END_SYMBOL,
)
yield (src_sent, tgt_sent)
@register_task('wiki103_classif', rel_path='WikiText103/')
class Wiki103Classification(PairClassificationTask):
'''Pair Classificaiton Task using Wiki103'''
def __init__(self, path, max_seq_len, name="wiki103_classif"):
super().__init__(name, 2)
self.scorer2 = None
self.val_metric = "%s_accuracy" % self.name
self.val_metric_decreases = False
self.files_by_split = {'train': os.path.join(path, "train.sentences.txt"),
'val': os.path.join(path, "valid.sentences.txt"),
'test': os.path.join(path, "test.sentences.txt")}
self.max_seq_len = max_seq_len
self.min_seq_len = 0
def get_split_text(self, split: str):
''' Get split text as iterable of records.
Split should be one of 'train', 'val', or 'test'.
'''
return self.load_data(self.files_by_split[split])
def load_data(self, path):
''' Rather than return a whole list of examples, stream them
See WikiTextLMTask for an explanation of the preproc'''
nonatomics_toks = [UNK_TOK_ALLENNLP, '<unk>']
with open(path) as txt_fh:
for row in txt_fh:
toks = row.strip()
if not toks:
continue
sent = _atomic_tokenize(toks, UNK_TOK_ATOMIC, nonatomics_toks, self.max_seq_len)
if sent.count("=") >= 2 or len(toks) < self.min_seq_len + 2:
continue
yield sent
def get_sentences(self) -> Iterable[Sequence[str]]:
''' Yield sentences, used to compute vocabulary. '''
for split in self.files_by_split:
# Don't use test set for vocab building.
if split.startswith("test"):
continue
path = self.files_by_split[split]
for sent in self.load_data(path):
yield sent
def process_split(self, split, indexers) -> Iterable[Type[Instance]]:
''' Process a language modeling split. Split is a single list of sentences here. '''
def _make_instance(input1, input2, labels):
d = {}
d["input1"] = sentence_to_text_field(input1, indexers)
d["input2"] = sentence_to_text_field(input2, indexers)
d["labels"] = LabelField(labels, label_namespace="labels",
skip_indexing=True)
return Instance(d)
first = True
for sent in split:
if first:
prev_sent = sent
first = False
continue
yield _make_instance(prev_sent, sent, 1)
prev_sent = sent
def count_examples(self):
''' Compute here b/c we're streaming the sentences. '''
example_counts = {}
for split, split_path in self.files_by_split.items():
# pair sentence # = sent # - 1
example_counts[split] = sum(1 for line in open(split_path)) - 1
self.example_counts = example_counts
@register_task('wiki103_s2s', rel_path='WikiText103/', max_targ_v_size=0)
class Wiki103Seq2SeqTask(MTTask):
''' Skipthought objective on Wiki103 '''
def __init__(self, path, max_seq_len, max_targ_v_size, name='wiki103_mt'):
''' Note: max_targ_v_size does nothing here '''
super().__init__(path, max_seq_len, max_targ_v_size, name)
# for skip-thoughts setting, all source sentences are sentences that
# followed by another sentence (which are all but the last one).
# Similar for self.target_sentences
self._nonatomic_toks = [UNK_TOK_ALLENNLP, '<unk>']
self._label_namespace = None
self.target_indexer = {"words": SingleIdTokenIndexer("tokens")}
self.files_by_split = {"train": os.path.join(path, "train.sentences.txt"),
"val": os.path.join(path, "valid.sentences.txt"),
"test": os.path.join(path, "test.sentences.txt")}
def load_data(self, path):
''' Load data '''
nonatomic_toks = self._nonatomic_toks
with codecs.open(path, 'r', 'utf-8', errors='ignore') as txt_fh:
for row in txt_fh:
toks = row.strip()
if not toks:
continue
sent = _atomic_tokenize(toks, UNK_TOK_ATOMIC, nonatomic_toks,
self.max_seq_len)
yield sent, []
def get_num_examples(self, split_text):
''' Return number of examples in the result of get_split_text.
Subclass can override this if data is not stored in column format.
'''
# pair setences# = sent# - 1
return len(split_text) - 1
def process_split(self, split, indexers) -> Iterable[Type[Instance]]:
''' Process a language modeling split.
Split is a single list of sentences here.
'''
target_indexer = self.target_indexer
def _make_instance(prev_sent, sent):
d = {}
d["inputs"] = sentence_to_text_field(prev_sent, indexers)
d["targs"] = sentence_to_text_field(sent, target_indexer)
return Instance(d)
prev_sent = None
for sent, _ in split:
if prev_sent is None:
prev_sent = sent
continue
yield _make_instance(prev_sent, sent)
prev_sent = sent
@register_task('dissentwiki', rel_path='DisSent/wikitext/')
class DisSentTask(PairClassificationTask):
''' Task class for DisSent, dataset agnostic.
Based on Nie, Bennett, and Goodman (2017), but with different datasets.
'''
def __init__(self, path, max_seq_len, prefix, name="dissent"):
''' There are 8 classes because there are 8 discourse markers in
the dataset (and, but, because, if, when, before, though, so)
'''
super().__init__(name, 8)
self.max_seq_len = max_seq_len
self.files_by_split = {"train": os.path.join(path, "%s.train" % prefix),
"val": os.path.join(path, "%s.valid" % prefix),
"test": os.path.join(path, "%s.test" % prefix)}
def get_split_text(self, split: str):
''' Get split text as iterable of records.
Split should be one of 'train', 'val', or 'test'.
'''
return self.load_data(self.files_by_split[split])
def load_data(self, path):
''' Load data '''
with open(path, 'r') as txt_fh:
for row in txt_fh:
row = row.strip().split('\t')
if len(row) != 3 or not (row[0] and row[1] and row[2]):
continue
sent1 = process_sentence(row[0], self.max_seq_len)
sent2 = process_sentence(row[1], self.max_seq_len)
targ = int(row[2])
yield (sent1, sent2, targ)
def get_sentences(self) -> Iterable[Sequence[str]]:
''' Yield sentences, used to compute vocabulary. '''
for split in self.files_by_split:
''' Don't use test set for vocab building. '''
if split.startswith("test"):
continue
path = self.files_by_split[split]
for sent1, sent2, _ in self.load_data(path):
yield sent1
yield sent2
def count_examples(self):
''' Compute the counts here b/c we're streaming the sentences. '''
example_counts = {}
for split, split_path in self.files_by_split.items():
example_counts[split] = sum(1 for line in open(split_path))
self.example_counts = example_counts
def process_split(self, split, indexers) -> Iterable[Type[Instance]]:
''' Process split text into a list of AllenNLP Instances. '''
def _make_instance(input1, input2, labels):
d = {}
d["input1"] = sentence_to_text_field(input1, indexers)
d["input2"] = sentence_to_text_field(input2, indexers)
d["labels"] = LabelField(labels, label_namespace="labels",
skip_indexing=True)
return Instance(d)
for sent1, sent2, trg in split:
yield _make_instance(sent1, sent2, trg)
class DisSentWikiSingleTask(DisSentTask):
''' Task class for DisSent with Wikitext 103 only considering clauses from within a single sentence
Data sets should be prepared as described in Nie, Bennett, and Goodman (2017) '''
def __init__(self, path, max_seq_len, name="dissentwiki"):
super().__init__(path, max_seq_len, "wikitext.dissent.single_sent", name)
@register_task('dissentwikifullbig', rel_path='DisSent/wikitext/')
class DisSentWikiBigFullTask(DisSentTask):
''' Task class for DisSent with Wikitext 103 considering clauses from within a single sentence
or across two sentences.
Data sets should be prepared as described in Nie, Bennett, and Goodman (2017) '''
def __init__(self, path, max_seq_len, name="dissentwikifullbig"):
super().__init__(path, max_seq_len, "wikitext.dissent.big", name)
@register_task('weakgrounded', rel_path='mscoco/weakgrounded/')
class WeakGroundedTask(PairClassificationTask):
''' Task class for Weak Grounded Sentences i.e., training on pairs of captions for the same image '''
def __init__(self, path, max_seq_len, n_classes, name="weakgrounded"):
''' Do stuff '''
super(WeakGroundedTask, self).__init__(name, n_classes)
''' Process the dataset located at path. '''
''' positive = captions of the same image, negative = captions of different images '''
targ_map = {'negative': 0, 'positive': 1}
targ_map = {'0': 0, '1': 1}
tr_data = load_tsv(os.path.join(path, "train_aug.tsv"), max_seq_len, targ_map=targ_map,
s1_idx=0, s2_idx=1, targ_idx=2, skip_rows=0)
val_data = load_tsv(os.path.join(path, "val.tsv"), max_seq_len, targ_map=targ_map,
s1_idx=0, s2_idx=1, targ_idx=2, skip_rows=0)
te_data = load_tsv(os.path.join(path, "test.tsv"), max_seq_len, targ_map=targ_map,
s1_idx=0, s2_idx=1, targ_idx=2, skip_rows=0)
self.train_data_text = tr_data
self.val_data_text = val_data
self.test_data_text = te_data
self.sentences = self.train_data_text[0] + self.val_data_text[0]
self.n_classes = 2
log.info("\tFinished loading MSCOCO data.")
@register_task('grounded', rel_path='mscoco/grounded/')
class GroundedTask(Task):
''' Task class for Grounded Sentences i.e., training on caption->image pair '''
''' Defined new metric function from AllenNLP Average '''
def __init__(self, path, max_seq_len, name="grounded"):
''' Do stuff '''
super(GroundedTask, self).__init__(name)
self.scorer1 = Average()
| |
# Utilities for generating multi-timestep splittings
import itertools
import numpy as np
# To-do: add utilities for estimating the total number of force terms
from simtk import openmm as mm
def generate_solvent_solute_splitting_string(base_integrator="VRORV", K_p=1, K_r=3):
"""Generate string representing sequence of V0, V1, R, O steps, where force group 1
is assumed to contain fast-changing, cheap-to-evaluate forces, and force group 0
is assumed to contain slow-changing, expensive-to-evaluate forces.
Currently only supports solvent-solute splittings of the VRORV (BAOAB / g-BAOAB)
integrator, but it should be easy also to support splittings of the ABOBA integrator.
Parameters
-----------
base_integrator: string
Currently only supports VRORV
K_p: int
Number of times to evaluate force group 1 per timestep.
K_r: int
Number of inner-loop iterations
Returns
-------
splitting_string: string
Sequence of V0, V1, R, O steps, to be passed to LangevinSplittingIntegrator
"""
assert (base_integrator == "VRORV" or base_integrator == "BAOAB")
Rs = "R " * K_r
inner_loop = "V1 " + Rs + "O " + Rs + "V1 "
s = "V0 " + inner_loop * K_p + "V0"
return s
def generate_mts_string(groups_of_fgs, R_before=True, n_R=1):
"""groups_of_fgs is a list of tuples, containing an iterable and an integer.
E.g.
[([3], 1),
([1,2], 10),
([0], 4)
]
We execute the first group [3] once per timestep.
We execute the second group [1,2] 10 times per timestep.
We execute the third group [0] 4*10=40 times per timestep
"""
def group_string(group):
steps = ["R"] * n_R + ["V{}".format(i) for i in group]
# print(steps)
if R_before:
single_iter = " ".join(steps)
else:
single_iter = " ".join(steps[::-1])
# print(single_iter)
return " " + single_iter + " "
group, n_iter = groups_of_fgs[-1]
inner_loop_string = group_string(group) * n_iter
for (group, n_iter) in groups_of_fgs[::-1][1:]:
if R_before:
inner_loop_string = str(group_string(group) + inner_loop_string) * n_iter
else:
inner_loop_string = str(group_string(group) + inner_loop_string) * n_iter
# make sure we don't have extraneous white spaces
return " ".join(inner_loop_string.split())
def generate_from_ratios(bond_steps_per_angle_step=5,
angle_steps=7,
R_before=True,
):
groups_of_fgs = [([0, 1, 2, 3], 1),
([0, 1, 2], angle_steps),
([0], bond_steps_per_angle_step)
]
return generate_mts_string(groups_of_fgs, R_before)
def generate_gbaoab_solvent_solute_string(K_p=2, K_r=1, slow_group=[0], fast_group=[1]):
"""Following appendix of g-baoab paper.
Let's say we want to evaluate force group 0 once per timestep and force group 1 twice per timestep.
Using the default arguments above should give us
V0 (V1 R^K_r O R^K_r V1)^K_p V0
V0 V1 R O R V1 V1 R O R V1 V0
Notes:
* will perform K_p velocity randomizations per timestep
*
"""
Rs = ["R"] * K_r
fast_forces = ["V{}".format(i) for i in fast_group]
slow_forces = ["V{}".format(i) for i in slow_group]
inner_loop_string = fast_forces + Rs + ["O"] + Rs + fast_forces
return " ".join(slow_forces + inner_loop_string * K_p + slow_forces)
def generate_baoab_mts_string(groups, K_r=1):
"""Multi timestep generalization of the solvent-solute splitting scheme presented above...
In the solvent-solute splitting, we have a "fast" group and a "slow" group.
What if we have more than two groups?
In the the straightforard generalization of the solvent-solute scheme, we do something like this:
Accept groups, a list of 2-tuples, where each tuple contains an iterable of force group indices and
an execution-frequency ratio.
For example, groups=[([0], 1), ([1], 2), ([2], 2)] should be taken to mean:
execute V1 twice as often as V0, and execute V2 twices as often as V1....
To be concrete:
If groups=[([0], 1), ([1], 2), ([2], 2)], K_r=1 we would have:
V0 (V1 (V2 R^K_r O R^K_r V2)^2 V1)^2 V0
"""
Rs = ["R"] * K_r
ratios = [group[1] for group in groups]
forces = [["V{}".format(i) for i in group[0]] for group in groups]
inner_loop_string = forces[-1] + Rs + ["O"] + Rs + forces[-1]
for i in range(len(ratios))[::-1][1:]:
inner_loop_string = forces[i] + inner_loop_string * ratios[i - 1] + forces[i]
return " ".join(inner_loop_string)
def generate_baoab_mts_string_from_ratios(bond_steps_per_angle_step=5, angle_steps=7):
"""Assuming there are just four groups.
0: Bonds (Cheap)
1,2: Angles and torsions (~4x more expensive than bonds)
3: Nonbonded (~wayyyy more expensive than bonds)
"""
return generate_baoab_mts_string([([3], 1), ([1, 2], angle_steps), ([0], angle_steps * bond_steps_per_angle_step)])
def generate_random_mts_string(n_updates_per_forcegroup, n_R_steps, n_O_steps):
"""
n_updates_per_forcegroup is an array, where n_updates_per_forcegroup[i] is the number of times to call V0 in the
"""
ingredients = []
for i in range(len(n_updates_per_forcegroup)):
for _ in range(n_updates_per_forcegroup[i]):
ingredients.append("V{}".format(i))
for _ in range(n_R_steps):
ingredients.append("R")
for _ in range(n_O_steps):
ingredients.append("O")
np.random.shuffle(ingredients)
return " ".join(ingredients)
def generate_gbaoab_string(K_r=1):
"""K_r=1 --> 'V R O R V
K_r=2 --> 'V R R O R R V'
etc.
"""
Rs = ["R"] * K_r
return " ".join(["V"] + Rs + ["O"] + Rs + ["V"])
def generate_baoab_mts_string(groups, K_r=1):
"""Multi timestep generalization of the solvent-solute splitting scheme presented above...
In the solvent-solute splitting, we have a "fast" group and a "slow" group.
What if we have more than two groups?
In the the straightforard generalization of the solvent-solute scheme, we do something like this:
Accept groups, a list of 2-tuples, where each tuple contains an iterable of force group indices and
an execution-frequency ratio.
For example, groups=[([0], 1), ([1], 2), ([2], 2)] should be taken to mean:
execute V1 twice as often as V0, and execute V2 twices as often as V1....
To be concrete:
If groups=[([0], 1), ([1], 2), ([2], 2)], K_r=1 we would have:
V0 (V1 (V2 R^K_r O R^K_r V2)^2 V1)^2 V0
"""
Rs = ["R"] * K_r
ratios = [group[1] for group in groups]
forces = [["V{}".format(i) for i in group[0]] for group in groups]
inner_loop_string = (forces[-1] + Rs + ["O"] + Rs + forces[-1]) * ratios[-1]
for i in range(len(ratios))[::-1][1:]:
inner_loop_string = (forces[i] + inner_loop_string + forces[i]) * ratios[i]
return " ".join(inner_loop_string)
def generate_baoab_mts_string_from_ratios(bond_steps_per_angle_step=5, angle_steps=7):
"""Assuming there are just four groups.
0: Bonds (Cheap)
1,2: Angles and torsions (let's say that's ~5x more expensive than bonds)
3: Nonbonded (~wayyyy more expensive than bonded interactions)
"""
return generate_baoab_mts_string([([3], 1), ([1, 2], angle_steps), ([0], bond_steps_per_angle_step)])
def condense_splitting(splitting_string):
"""Since some operators commute, we can simplify some splittings.
Here, we replace repeated O steps or V{i} steps.
Splitting is a list of steps.
Examples:
O V R V V V O should condense to
O V R V O
and O V O O V R V
should condense to:
O V R V
since
"""
# first split into chunks of either velocity or position updates
# don't collapse position updates, do collapse velocity updates
splitting = splitting_string.upper().split()
equivalence_classes = {"R": {"R"}, "V": {"O", "V"}, "O": {"O", "V"}}
current_chunk = [splitting[0]]
collapsed = []
def collapse_chunk(current_chunk):
if current_chunk[0] == "R":
return current_chunk
else:
return list(set(current_chunk))
for i in range(1, len(splitting)):
# if the next step in the splitting is
if splitting[i][0] in equivalence_classes[splitting[i - 1][0]]:
current_chunk.append(splitting[i])
else:
collapsed += collapse_chunk(current_chunk)
current_chunk = [splitting[i]]
collapsed = collapsed + collapse_chunk(current_chunk)
collapsed_string = " ".join(collapsed)
if len(collapsed) < len(splitting):
print("Shortened the splitting from {} steps to {} steps ({} --> {})".format(
len(splitting), len(collapsed), splitting_string, collapsed_string
))
return collapsed_string
def generate_sequential_BAOAB_string(force_group_list, symmetric=True):
"""Generate BAOAB-like schemes that break up the "V R" step
into multiple sequential updates
E.g. force_group_list=(0,1,2), symmetric=True -->
"V0 R V1 R V2 R O R V2 R V1 R V0"
force_group_list=(0,1,2), symmetric=False -->
"V0 R V1 R V2 R O V0 R V1 R V2 R"
"""
VR = []
for i in force_group_list:
VR.append("V{}".format(i))
VR.append("R")
if symmetric:
return " ".join(VR + ["O"] + VR[::-1])
else:
return " ".join(VR + ["O"] + VR)
def generate_all_BAOAB_permutation_strings(n_force_groups, symmetric=True):
"""Generate all of the permutations of range(n_force_groups)"""
return [(perm, generate_sequential_BAOAB_string(perm, symmetric)) for perm in
itertools.permutations(range(n_force_groups))]
# Utilities for modifying force groups
# TODO: Valence vs. nonbonded
# TODO: Short-range vs long-range
# TODO: Solute-solvent vs. solvent-solvent
# Kyle's function for splitting up the forces in a system
def valence_vs_nonbonded(system):
pass
def short_range_vs_long_range(system):
"""Not sure the details of what this should do"""
pass
def clone_nonbonded_parameters(nonbonded_force):
"""Creates a new nonbonded force with the same global parameters,
particle parameters, and exception parameters"""
# call constructor
new_force = nonbonded_force.__class__()
# go through all of the setter and getter methods
new_force.setCutoffDistance(nonbonded_force.getCutoffDistance())
new_force.setEwaldErrorTolerance(nonbonded_force.getEwaldErrorTolerance())
# new_force.setExceptionParameters # this is per-particle-pair property
new_force.setForceGroup(nonbonded_force.getForceGroup())
new_force.setNonbondedMethod(nonbonded_force.getNonbondedMethod())
new_force.setPMEParameters(*nonbonded_force.getPMEParameters())
new_force.setReactionFieldDielectric(nonbonded_force.getReactionFieldDielectric())
new_force.setReciprocalSpaceForceGroup(nonbonded_force.getReciprocalSpaceForceGroup())
new_force.setSwitchingDistance(nonbonded_force.getSwitchingDistance())
new_force.setUseDispersionCorrection(nonbonded_force.getUseDispersionCorrection())
new_force.setUseSwitchingFunction(nonbonded_force.getUseSwitchingFunction())
# | |
"""
348. Design Tic-Tac-Toe
Medium
Assume the following rules are for the tic-tac-toe game on an n x n board between two players:
A move is guaranteed to be valid and is placed on an empty block.
Once a winning condition is reached, no more moves are allowed.
A player who succeeds in placing n of their marks in a horizontal, vertical, or diagonal row wins the game.
Implement the TicTacToe class:
TicTacToe(int n) Initializes the object the size of the board n.
int move(int row, int col, int player) Indicates that the player with id player plays at the cell (row, col) of the board. The move is guaranteed to be a valid move.
Example 1:
Input
["TicTacToe", "move", "move", "move", "move", "move", "move", "move"]
[[3], [0, 0, 1], [0, 2, 2], [2, 2, 1], [1, 1, 2], [2, 0, 1], [1, 0, 2], [2, 1, 1]]
Output
[null, 0, 0, 0, 0, 0, 0, 1]
Explanation
TicTacToe ticTacToe = new TicTacToe(3);
Assume that player 1 is "X" and player 2 is "O" in the board.
ticTacToe.move(0, 0, 1); // return 0 (no one wins)
|X| | |
| | | | // Player 1 makes a move at (0, 0).
| | | |
ticTacToe.move(0, 2, 2); // return 0 (no one wins)
|X| |O|
| | | | // Player 2 makes a move at (0, 2).
| | | |
ticTacToe.move(2, 2, 1); // return 0 (no one wins)
|X| |O|
| | | | // Player 1 makes a move at (2, 2).
| | |X|
ticTacToe.move(1, 1, 2); // return 0 (no one wins)
|X| |O|
| |O| | // Player 2 makes a move at (1, 1).
| | |X|
ticTacToe.move(2, 0, 1); // return 0 (no one wins)
|X| |O|
| |O| | // Player 1 makes a move at (2, 0).
|X| |X|
ticTacToe.move(1, 0, 2); // return 0 (no one wins)
|X| |O|
|O|O| | // Player 2 makes a move at (1, 0).
|X| |X|
ticTacToe.move(2, 1, 1); // return 1 (player 1 wins)
|X| |O|
|O|O| | // Player 1 makes a move at (2, 1).
|X|X|X|
Constraints:
2 <= n <= 100
player is 1 or 2.
0 <= row, col < n
(row, col) are unique for each different call to move.
At most n2 calls will be made to move.
Follow-up: Could you do better than O(n2) per move() operation?
"""
# V0
# IDEA : optimize from V0'
class TicTacToe:
def __init__(self, n):
self.n = n
self.grid = [[i for i in range(n)] for j in range(n)]
def check(self, row, col, player, mark):
n = self.n
sum_of_row = sum([self.grid[row][c] == mark for c in range(n)])
sum_of_col = sum([self.grid[r][col]== mark for r in range(n)])
sum_of_left_d = sum([self.grid[i][i] == mark for i in range(n)])
sum_of_right_d = sum([self.grid[i][n-1-i] == mark for i in range(n)])
if sum_of_row == n or sum_of_col == n or sum_of_left_d== n or sum_of_right_d == n:
return player
def move(self, row, col, player):
if player == 1:
mark = 'X'
else:
mark = 'O'
self.grid[row][col] = mark
# check wining condition
# check if the row has the same mark
n = len(self.grid)
if self.check(row, col, player, mark):
return player
else:
return 0
# V0'
# https://github.com/yennanliu/utility_Python/blob/master/game/tic_tac_toe.py
class TicTacToe:
def __init__(self, n):
"""
Initialize your data structure here.
:type n: int
"""
self.grid = [[' ']*n for i in range(n)]
def move(self, row, col, player):
"""
Player {player} makes a move at ({row}, {col}).
@param row The row of the board.
@param col The column of the board.
@param player The player, can be either 1 or 2.
@return The current winning condition, can be either:
0: No one wins.
1: Player 1 wins.
2: Player 2 wins.
:type row: int
:type col: int
:type player: int
:rtype: int
"""
if player == 1:
mark = 'X'
else:
mark = 'O'
self.grid[row][col] = mark
# check wining condition
# check if the row has the same mark
n = len(self.grid)
sum_of_row = sum([self.grid[row][c] == mark for c in range(n)])
sum_of_col = sum([self.grid[r][col]== mark for r in range(n)])
sum_of_left_d = sum([self.grid[i][i] == mark for i in range(n)])
sum_of_right_d = sum([self.grid[i][n-1-i] == mark for i in range(n)])
if sum_of_row == n or sum_of_col == n or sum_of_left_d== n or sum_of_right_d == n:
return player
else:
return 0
# t_game = TicTacToe()
# t_game.operate_game()
# V0''
# TODO : validate/fix below
class TicTacToe(object):
def __init__(self, n):
self.n = n
self.board = [ [None for i in range(n)] for j in range(n) ]
def check(self, x, y, player):
# check col
tmp = []
for i in range(self.n):
tmp.append(self.board[i][x])
if tmp.count(player) == self.n:
return True
# check row
if self.board[i].count(player) == self.n:
return True
# check diagonal
tmp = []
idxs = [[i,i] for i in range(self.n)]
for idx in idxs:
tmp.append(self.board[idx[0]][idx[1]])
if tmp.count(player) == self.n:
return True
return False
def move(self, row, col, player):
print ("-> self.board = " + str(self.board))
self.board[row][col] = player
if self.check(row, col, player):
return player
return 0
# n = 3
# t = TicTacToe(n)
# _moves = [[0, 0, 1], [0, 2, 2], [2, 2, 1], [1, 1, 2], [2, 0, 1], [1, 0, 2], [2, 1, 1]]
# for m in _moves:
# r = t.move(m[0], m[1], m[2])
# print (r)
# n = 2
# t = TicTacToe(n)
# _moves = [[0,0,2],[1,1,1],[0,1,2]]
# for m in _moves:
# r = t.move(m[0], m[1], m[2])
# print (r)
# V1
# https://blog.csdn.net/danspace1/article/details/86616981
class TicTacToe:
def __init__(self, n):
"""
Initialize your data structure here.
:type n: int
"""
self.grid = [[' ']*n for i in range(n)]
def move(self, row, col, player):
"""
Player {player} makes a move at ({row}, {col}).
@param row The row of the board.
@param col The column of the board.
@param player The player, can be either 1 or 2.
@return The current winning condition, can be either:
0: No one wins.
1: Player 1 wins.
2: Player 2 wins.
:type row: int
:type col: int
:type player: int
:rtype: int
"""
if player == 1:
mark = 'X'
else:
mark = 'O'
self.grid[row][col] = mark
# check wining condition
# check if the row has the same mark
n = len(self.grid)
sum_of_row = sum([self.grid[row][c] == mark for c in range(n)])
sum_of_col = sum([self.grid[r][col]== mark for r in range(n)])
sum_of_left_d = sum([self.grid[i][i] == mark for i in range(n)])
sum_of_right_d = sum([self.grid[i][n-1-i] == mark for i in range(n)])
if sum_of_row == n or sum_of_col == n or sum_of_left_d== n or sum_of_right_d == n:
return player
else:
return 0
# V1
# IDEA : Optimized Brute Force
# https://leetcode.com/problems/design-tic-tac-toe/solution/
# JAVA
# class TicTacToe {
#
# private int[][] board;
# private int n;
#
# public TicTacToe(int n) {
# board = new int[n][n];
# this.n = n;
# }
#
# public int move(int row, int col, int player) {
# board[row][col] = player;
# // check if the player wins
# if ((checkRow(row, player)) ||
# (checkColumn(col, player)) ||
# (row == col && checkDiagonal(player)) ||
# (col == n - row - 1 && checkAntiDiagonal(player))) {
# return player;
# }
# // No one wins
# return 0;
# }
#
# private boolean checkDiagonal(int player) {
# for (int row = 0; row < n; row++) {
# if (board[row][row] != player) {
# return false;
# }
# }
# return true;
# }
#
# private boolean checkAntiDiagonal(int player) {
# for (int row = 0; row < n; row++) {
# if (board[row][n - row - 1] != player) {
# return false;
# }
# }
# return true;
# }
#
# private boolean checkColumn(int col, int player) {
# for (int row = 0; row < n; row++) {
# if (board[row][col] != player) {
# return false;
# }
# }
# return true;
# }
#
# private boolean checkRow(int row, int player) {
# for (int col = 0; col < n; col++) {
# if (board[row][col] != player) {
# return false;
# }
# }
# return true;
# }
# }
# V1
# IDEA : Optimised Approach
# https://leetcode.com/problems/design-tic-tac-toe/solution/
# public class TicTacToe {
# int[] rows;
# int[] cols;
# int diagonal;
# int antiDiagonal;
#
# public TicTacToe(int n) {
# rows = new int[n];
# cols = new int[n];
# }
#
# public int move(int row, int col, int player) {
# int currentPlayer = (player == 1) ? 1 : -1;
# // update currentPlayer in | |
rloc_probe else False )
if 24 - 24: OOooOOo
if 71 - 71: IiII - i1IIi
def is_smr ( self ) :
return ( True if self . smr_bit else False )
if 56 - 56: OoOoOO00 + oO0o
if 74 - 74: iII111i / I1Ii111 / II111iiii - iII111i / oO0o % I11i
def is_smr_invoked ( self ) :
return ( True if self . smr_invoked_bit else False )
if 19 - 19: IiII % OoooooooOO + OoooooooOO
if 7 - 7: i1IIi
def is_ddt ( self ) :
return ( True if self . ddt_bit else False )
if 91 - 91: OoOoOO00 - OoOoOO00 . IiII
if 33 - 33: I1Ii111 - iIii1I11I1II1 / Ii1I % O0
def is_to_etr ( self ) :
return ( True if self . to_etr else False )
if 80 - 80: IiII % OoooooooOO - IiII
if 27 - 27: I1Ii111 - o0oOOo0O0Ooo * I1ii11iIi11i - I1IiiI
def is_to_ms ( self ) :
return ( True if self . to_ms else False )
if 22 - 22: Oo0Ooo % OoooooooOO - Oo0Ooo - iII111i . Ii1I
if 100 - 100: II111iiii / I1Ii111 / iII111i - I1ii11iIi11i * iIii1I11I1II1
if 7 - 7: i1IIi . IiII % i11iIiiIii * I1ii11iIi11i . I11i % I1ii11iIi11i
if 35 - 35: I1IiiI
if 48 - 48: OoooooooOO % OoooooooOO - OoO0O00 . OoOoOO00
if 22 - 22: ooOoO0o . i11iIiiIii . OoooooooOO . i1IIi
if 12 - 12: OoOoOO00 % OOooOOo + oO0o . O0 % iIii1I11I1II1
if 41 - 41: OoooooooOO
if 13 - 13: I11i + I1Ii111 - I1Ii111 % oO0o / I11i
if 4 - 4: I1IiiI + OOooOOo - IiII + iII111i
if 78 - 78: Ii1I
if 29 - 29: II111iiii
if 79 - 79: iIii1I11I1II1 - i11iIiiIii + ooOoO0o - II111iiii . iIii1I11I1II1
if 84 - 84: Oo0Ooo % I11i * O0 * I11i
if 66 - 66: OOooOOo / iIii1I11I1II1 - OoOoOO00 % O0 . ooOoO0o
if 12 - 12: Oo0Ooo + I1IiiI
if 37 - 37: i1IIi * i11iIiiIii
if 95 - 95: i11iIiiIii % I1Ii111 * Oo0Ooo + i1IIi . O0 + I1ii11iIi11i
if 7 - 7: OoO0O00 * i11iIiiIii * iIii1I11I1II1 / OOooOOo / I1Ii111
if 35 - 35: iII111i * OOooOOo
if 65 - 65: II111iiii % i1IIi
if 13 - 13: OoO0O00 * I1Ii111 + Oo0Ooo - IiII
if 31 - 31: OoO0O00
if 68 - 68: OoO0O00 + i1IIi / iIii1I11I1II1 + II111iiii * iIii1I11I1II1 + I1ii11iIi11i
if 77 - 77: i11iIiiIii - I1Ii111 . I1ii11iIi11i % Oo0Ooo . Ii1I
if 9 - 9: o0oOOo0O0Ooo
if 55 - 55: OOooOOo % iIii1I11I1II1 + I11i . ooOoO0o
if 71 - 71: i11iIiiIii / i1IIi + OoOoOO00
if 23 - 23: i11iIiiIii
if 88 - 88: II111iiii - iII111i / OoooooooOO
if 71 - 71: I1ii11iIi11i
if 19 - 19: Oo0Ooo - OoO0O00 + i11iIiiIii / iIii1I11I1II1
if 1 - 1: IiII % i1IIi
if 41 - 41: OoO0O00 * OoO0O00 / iII111i + I1ii11iIi11i . o0oOOo0O0Ooo
if 84 - 84: i11iIiiIii + OoO0O00 * I1IiiI + I1ii11iIi11i / Ii1I
if 80 - 80: I1ii11iIi11i
if 67 - 67: II111iiii
if 2 - 2: o0oOOo0O0Ooo - O0 * Ii1I % IiII
if 64 - 64: i1IIi . ooOoO0o
if 7 - 7: oO0o . iII111i - iII111i / I1Ii111 % Oo0Ooo
if 61 - 61: oO0o - I1ii11iIi11i / iII111i % I1ii11iIi11i + OoO0O00 / Oo0Ooo
if 10 - 10: i11iIiiIii / OoOoOO00
if 27 - 27: I1IiiI / OoooooooOO
if 74 - 74: I1ii11iIi11i % I1Ii111 - OoO0O00 * I11i . OoooooooOO * OoO0O00
if 99 - 99: OoOoOO00 . iII111i - OoooooooOO - O0
if 6 - 6: OOooOOo
if 3 - 3: O0 - I1Ii111 * Ii1I * OOooOOo / Ii1I
if 58 - 58: Ii1I * iIii1I11I1II1 + ooOoO0o . ooOoO0o
if 74 - 74: ooOoO0o - o0oOOo0O0Ooo * IiII % ooOoO0o
class lisp_map_register ( ) :
def __init__ ( self ) :
self . proxy_reply_requested = False
self . lisp_sec_present = False
self . xtr_id_present = False
self . map_notify_requested = False
self . mobile_node = False
self . merge_register_requested = False
self . use_ttl_for_timeout = False
self . map_register_refresh = False
self . record_count = 0
self . nonce = 0
self . alg_id = 0
self . key_id = 0
self . auth_len = 0
self . auth_data = 0
self . xtr_id = 0
self . site_id = 0
self . record_count = 0
self . sport = 0
self . encrypt_bit = 0
self . encryption_key_id = None
if 93 - 93: iIii1I11I1II1 / OoOoOO00 % Oo0Ooo * I1Ii111 - OoO0O00 - o0oOOo0O0Ooo
if 44 - 44: OoooooooOO
def print_map_register ( self ) :
oO = lisp_hex_string ( self . xtr_id )
if 48 - 48: iII111i
oooOo = ( "{} -> flags: {}{}{}{}{}{}{}{}{}, record-count: " +
"{}, nonce: 0x{}, key/alg-id: {}/{}{}, auth-len: {}, xtr-id: " +
"0x{}, site-id: {}" )
if 85 - 85: I1ii11iIi11i . oO0o . O0
lprint ( oooOo . format ( bold ( "Map-Register" , False ) , "P" if self . proxy_reply_requested else "p" ,
# IiII
"S" if self . lisp_sec_present else "s" ,
"I" if self . xtr_id_present else "i" ,
"T" if self . use_ttl_for_timeout else "t" ,
"R" if self . merge_register_requested else "r" ,
"M" if self . mobile_node else "m" ,
"N" if self . map_notify_requested else "n" ,
"F" if self . map_register_refresh else "f" ,
"E" if self . encrypt_bit else "e" ,
self . record_count , lisp_hex_string ( self . nonce ) , self . key_id ,
self . alg_id , " (sha1)" if ( self . key_id == LISP_SHA_1_96_ALG_ID ) else ( " (sha2)" if ( self . key_id == LISP_SHA_256_128_ALG_ID ) else "" ) , self . auth_len , oO , self . site_id ) )
if 68 - 68: I1ii11iIi11i % I1Ii111 + I11i . Oo0Ooo
if 95 - 95: OOooOOo * i11iIiiIii . I11i + Ii1I / Ii1I
if 43 - 43: IiII . OoooooooOO - II111iiii
if 90 - 90: I1IiiI - iIii1I11I1II1 + I1ii11iIi11i * OOooOOo * oO0o
def encode ( self ) :
oo0I1I1iiI1i = ( LISP_MAP_REGISTER << 28 ) | self . record_count
if ( self . proxy_reply_requested ) : oo0I1I1iiI1i |= 0x08000000
if ( self . lisp_sec_present ) : oo0I1I1iiI1i |= 0x04000000
if ( self . xtr_id_present ) : oo0I1I1iiI1i |= 0x02000000
if ( self . map_register_refresh ) : oo0I1I1iiI1i |= 0x1000
if ( self . use_ttl_for_timeout ) : oo0I1I1iiI1i |= 0x800
if ( self . merge_register_requested ) : oo0I1I1iiI1i |= 0x400
if ( self . mobile_node ) : oo0I1I1iiI1i |= 0x200
if ( self . map_notify_requested ) : oo0I1I1iiI1i |= 0x100
if ( self . encryption_key_id != None ) :
oo0I1I1iiI1i |= 0x2000
oo0I1I1iiI1i |= self . encryption_key_id << 14
if 19 - 19: I1Ii111 * II111iiii % Oo0Ooo - i1IIi
if 27 - 27: OoOoOO00 . O0 / I1ii11iIi11i . iIii1I11I1II1
if 15 - 15: Ii1I + OoO0O00 % iIii1I11I1II1 - I1ii11iIi11i - i1IIi % o0oOOo0O0Ooo
if 54 - 54: IiII - II111iiii . ooOoO0o + Ii1I
if 45 - 45: oO0o + II111iiii . iII111i / I1ii11iIi11i
if ( self . alg_id == LISP_NONE_ALG_ID ) :
self . auth_len = 0
else :
if ( self . alg_id == LISP_SHA_1_96_ALG_ID ) :
self . auth_len = LISP_SHA1_160_AUTH_DATA_LEN
if 76 - 76: Ii1I + iII111i - IiII * iIii1I11I1II1 % i1IIi
if ( self . alg_id == LISP_SHA_256_128_ALG_ID ) :
self . auth_len = LISP_SHA2_256_AUTH_DATA_LEN
if 72 - 72: ooOoO0o + II111iiii . O0 - iII111i / OoooooooOO . I1Ii111
if 28 - 28: iIii1I11I1II1 . O0
if 32 | |
<reponame>tdongsi/calendars
"""
Test classes and functions for retail calendar.
Use unittest module as the main test framework.
All the test cases in this module assume default values of class parameters:
1) fiscal year starts on Aug 1st.
2) Retail calendar's end date is the last Saturday of July.
It is hard to unit-test with general, variable class parameters.
"""
from datetime import date, timedelta
import unittest
from freezegun import freeze_time
from calendars.calendars import RetailDate
class RetailDateTest(unittest.TestCase):
"""
Test cases for other properties of calendars.RetailDate that are simple enough.
"""
def test_string_output(self):
my_date = RetailDate(date(2015, 12, 31))
self.assertEqual(my_date.year_dates_string, "2016 (26-Jul-2015 - 30-Jul-2016)")
self.assertEqual(my_date.year_string, "2015 - 2016")
class RetailQuarterStartEnd(unittest.TestCase):
"""
Verify quarter_start_date and quarter_end_date properties of RetailDate.
"""
def _verify_retail_quarter(self, input_date, expected_quarter_start, expected_quarter_end):
my_date = RetailDate(input_date)
self.assertEqual(my_date.quarter_start_date, expected_quarter_start)
self.assertEqual(my_date.quarter_end_date, expected_quarter_end)
pass
def test_year_2004(self):
self._verify_retail_quarter(date(2003, 7, 27), date(2003, 7, 27), date(2003, 10, 25))
self._verify_retail_quarter(date(2003, 11, 2), date(2003, 10, 26), date(2004, 1, 24))
self._verify_retail_quarter(date(2004, 2, 1), date(2004, 1, 25), date(2004, 4, 24))
self._verify_retail_quarter(date(2004, 5, 2), date(2004, 4, 25), date(2004, 7, 31))
class QuarterNumberTest(unittest.TestCase):
"""
Verify RetailDate.quarter property.
"""
def _quarter_number(self, dategiven):
"""
An internal function to minimize changes in tests.
"""
return RetailDate(dategiven).quarter
def test_quarter_number_2004(self):
# 2004: start date and end date of Q1-Q4
self.assertEqual(1, self._quarter_number(date(2003, 7, 27)))
self.assertEqual(1, self._quarter_number(date(2003, 10, 25)))
self.assertEqual(2, self._quarter_number(date(2003, 10, 26)))
self.assertEqual(2, self._quarter_number(date(2003, 11, 1)))
self.assertEqual(2, self._quarter_number(date(2003, 1, 24)))
self.assertEqual(3, self._quarter_number(date(2004, 1, 25)))
self.assertEqual(3, self._quarter_number(date(2004, 2, 1)))
self.assertEqual(3, self._quarter_number(date(2004, 4, 24)))
self.assertEqual(4, self._quarter_number(date(2004, 4, 25)))
self.assertEqual(4, self._quarter_number(date(2004, 5, 1)))
self.assertEqual(4, self._quarter_number(date(2004, 7, 31)))
pass
def test_quarter_number_2010(self):
# 2010: start date and end date of Q1-Q4
self.assertEqual(1, self._quarter_number(date(2009, 7, 26)))
self.assertEqual(1, self._quarter_number(date(2009, 10, 24)))
self.assertEqual(2, self._quarter_number(date(2009, 10, 25)))
self.assertEqual(2, self._quarter_number(date(2009, 11, 1)))
self.assertEqual(2, self._quarter_number(date(2010, 1, 23)))
self.assertEqual(3, self._quarter_number(date(2010, 1, 24)))
self.assertEqual(3, self._quarter_number(date(2010, 1, 31)))
self.assertEqual(3, self._quarter_number(date(2010, 4, 24)))
self.assertEqual(4, self._quarter_number(date(2010, 4, 25)))
self.assertEqual(4, self._quarter_number(date(2010, 5, 2)))
self.assertEqual(4, self._quarter_number(date(2010, 7, 31)))
pass
def test_quarter_number_2014(self):
# 2014: start date and end date of Q1-Q4
self.assertEqual(1, self._quarter_number(date(2013, 7, 28)))
self.assertEqual(1, self._quarter_number(date(2013, 10, 26)))
self.assertEqual(2, self._quarter_number(date(2013, 10, 27)))
self.assertEqual(2, self._quarter_number(date(2014, 1, 25)))
self.assertEqual(3, self._quarter_number(date(2014, 1, 26)))
self.assertEqual(3, self._quarter_number(date(2014, 4, 26)))
self.assertEqual(4, self._quarter_number(date(2014, 4, 27)))
self.assertEqual(4, self._quarter_number(date(2014, 7, 26)))
pass
class RetailYearStartEnd(unittest.TestCase):
"""Test RetailDate.year_start_date and RetailDate.year_end_date properties.
The test cases assume fiscal date is August 1st.
FISCAL_START_MONTH = 8
FISCAL_START_DAY = 1
"""
# dates: list of tuples of (month,day)
dates = [(1, 1),
(8, 1),
(8, 2),
(12, 31)]
dates.extend([(7, day) for day in xrange(22, 32)])
dates.sort()
years = range(2000, 2020)
# Retail year's start dates from 2000-2020
retail_start_dates = [
(1999, 8, 1),
(2000, 7, 30),
(2001, 7, 29),
(2002, 7, 28),
(2003, 7, 27),
(2004, 8, 1),
(2005, 7, 31),
(2006, 7, 30),
(2007, 7, 29),
(2008, 7, 27),
(2009, 7, 26),
(2010, 8, 1),
(2011, 7, 31),
(2012, 7, 29),
(2013, 7, 28),
(2014, 7, 27),
(2015, 7, 26),
(2016, 7, 31),
(2017, 7, 30),
(2018, 7, 29),
(2019, 7, 28)
]
# Retail year's end dates from 2000-2020
retail_end_dates = [
(2000, 7, 29),
(2001, 7, 28),
(2002, 7, 27),
(2003, 7, 26),
(2004, 7, 31),
(2005, 7, 30),
(2006, 7, 29),
(2007, 7, 28),
(2008, 7, 26),
(2009, 7, 25),
(2010, 7, 31),
(2011, 7, 30),
(2012, 7, 28),
(2013, 7, 27),
(2014, 7, 26),
(2015, 7, 25),
(2016, 7, 30),
(2017, 7, 29),
(2018, 7, 28),
(2019, 7, 27),
(2020, 7, 25),
]
def test_start_date_output(self):
""" Sanity tests: if the input date is start date of the retail year,
the year_start_date should be the same.
"""
self.assertEqual(RetailDate.FISCAL_START_MONTH, 8)
self.assertEqual(RetailDate.FISCAL_START_DAY, 1)
# map of input date -> expected output for start date of retail calendar
input_to_output = {}
# Construct the dict:
# {
# start_date-1: previous_start_date,
# start_date : start_date,
# start_date+1: start_date
# }
for idx in xrange(len(self.years)):
start_date = date(*self.retail_start_dates[idx])
if idx != 0:
input_to_output[start_date - timedelta(1)] = date(*self.retail_start_dates[idx - 1])
input_to_output[start_date] = start_date
input_to_output[start_date + timedelta(1)] = date(*self.retail_start_dates[idx])
# Verify the actual output and expected output from dict
for k, v in input_to_output.iteritems():
actual = RetailDate(k).year_start_date
message = "Input: %s, Output: %s, Expected: %s" % (k, actual, v)
self.assertEqual(actual, v, message)
pass
def test_end_date_output(self):
# map of input date -> expected output for end date of retail calendar
input_to_output = {}
# Construct the dict:
# {
# end_date-1: end_date,
# end_date : end_date,
# end_date+1: next_end_date
# }
for idx in xrange(len(self.years)):
start_date = date(*self.retail_end_dates[idx])
input_to_output[start_date - timedelta(1)] = date(*self.retail_end_dates[idx])
input_to_output[start_date] = start_date
if idx != len(self.years)-1:
input_to_output[start_date + timedelta(1)] = date(*self.retail_end_dates[idx + 1])
for k, v in input_to_output.iteritems():
actual = RetailDate(k).year_end_date
message = "Input: %s, Output: %s, Expected: %s" % (k, actual, v)
self.assertEqual(actual, v, message)
pass
def test_aggr_date_input(self):
""" Find all retail year's start dates for random input in 2000-2020 period.
"""
actual_start_date = set([])
actual_end_date = set([])
for year in self.years:
for my_date in self.dates:
input_date = date(year, my_date[0], my_date[1])
retail_date = RetailDate(input_date)
actual_start_date.add(retail_date.year_start_date)
actual_end_date.add(retail_date.year_end_date)
# Verify the retail start dates
expected_start = set([date(mTup[0], mTup[1], mTup[2]) for mTup in self.retail_start_dates])
diff = expected_start.symmetric_difference(actual_start_date)
self.assertEqual(len(diff), 0, "Diff: " + str(diff))
# Verify the retail end dates
expected_end = set([date(mTup[0], mTup[1], mTup[2]) for mTup in self.retail_end_dates])
diff = expected_end.symmetric_difference(actual_end_date)
self.assertEqual(len(diff), 0, "Diff: " + str(diff))
class IsCurrentPreviousYearTests(unittest.TestCase):
"""
Test cases for is_current_year and is_previous_year properties of calendars.RetailDate.
"""
def test_retail_date(self):
# today should be in current retail year
my_date = RetailDate(date.today())
self.assertEqual(my_date.is_current_year, True)
self.assertEqual(my_date.is_previous_year, False)
input_date = date.today() + timedelta(days=400)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date.today() - timedelta(days=400)
self.assertEqual(RetailDate(input_date).is_current_year, False)
# date way in the past should not be
input_date = date(2012, 12, 21)
self.assertEqual(RetailDate(input_date).is_current_year, False)
def test_retail_year_2010(self):
"""
Mock today() as a day during retail year 2010 (2009-07-26 to 2010-07-31)
"""
mock_todays = [date(2009, 10, 1), # random earlier half
date(2010, 2, 1), # random later half
date(2009, 12, 31), # start of calendar year
date(2010, 1, 1), # end of calendar year
date(2009, 7, 26), # start of retail year
date(2010, 7, 31), # end of retail year
]
for today_2010 in mock_todays:
self._curr_retail_2010_tests(today_2010)
self._prev_retail_2010_tests(today_2010)
pass
def _curr_retail_2010_tests(self, today):
with freeze_time(today):
# At False boundary
input_date = date(2009, 7, 24)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2009, 7, 25)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2010, 8, 1)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2010, 8, 2)
self.assertEqual(RetailDate(input_date).is_current_year, False)
# At True boundary
input_date = date(2009, 7, 26)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2009, 7, 27)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2010, 7, 29)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2010, 7, 30)
self.assertEqual(RetailDate(input_date).is_current_year, True)
# Next month lower end
input_date = date(2009, 7, 31)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2009, 8, 1)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2009, 7, 1)
self.assertEqual(RetailDate(input_date).is_current_year, False)
# Next month higher end
input_date = date(2010, 7, 31)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2010, 8, 1)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2010, 8, 31)
self.assertEqual(RetailDate(input_date).is_current_year, False)
# Calendar year end
input_date = date(2008, 12, 31)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2009, 12, 31)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2010, 12, 31)
self.assertEqual(RetailDate(input_date).is_current_year, False)
# Calendar year start
input_date = date(2009, 1, 1)
self.assertEqual(RetailDate(input_date).is_current_year, False)
input_date = date(2010, 1, 1)
self.assertEqual(RetailDate(input_date).is_current_year, True)
input_date = date(2011, 1, 1)
self.assertEqual(RetailDate(input_date).is_current_year, False)
def _prev_retail_2010_tests(self, today):
with freeze_time(today):
# At False boundary
input_date = date(2010, 8, 1)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2010, 8, 2)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2009, 7, 26)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2009, 7, 27)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2010, 7, 29)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2010, 7, 30)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2008, 7, 26)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2008, 7, 25)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
# At True boundary
input_date = date(2009, 7, 24)
self.assertEqual(RetailDate(input_date).is_previous_year, True)
input_date = date(2009, 7, 25)
self.assertEqual(RetailDate(input_date).is_previous_year, True)
input_date = date(2008, 7, 27)
self.assertEqual(RetailDate(input_date).is_previous_year, True)
input_date = date(2008, 7, 28)
self.assertEqual(RetailDate(input_date).is_previous_year, True)
# Next month lower end
input_date = date(2009, 7, 31)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2009, 8, 1)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2009, 7, 1)
self.assertEqual(RetailDate(input_date).is_previous_year, True)
# Next month higher end
input_date = date(2010, 7, 31)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2010, 8, 1)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
input_date = date(2010, 8, 31)
self.assertEqual(RetailDate(input_date).is_previous_year, False)
# Calendar year | |
'multnum2': r"{\s*(\d+)\s*,\s*(\d*)\s*}",
'multhashnum1': r"#{\s*(\d+)\s*}",
'multhashnum2': r"{\s*(\d+)\s*,\s*(\d*)\s*}"
}
class Sequence(DiagramItem):
def __init__(self, *items):
DiagramItem.__init__(self, 'g')
self.items = [wrapString(item) for item in items]
self.needsSpace = True
self.up = 0
self.down = 0
self.height = 0
self.width = 0
for item in self.items:
self.width += item.width + (20 if item.needsSpace else 0)
self.up = max(self.up, item.up - self.height)
self.height += item.height
self.down = max(self.down - item.height, item.down)
if self.items[0].needsSpace:
self.width -= 10
if self.items[-1].needsSpace:
self.width -= 10
addDebug(self)
def __repr__(self):
items = ', '.join(map(repr, self.items))
return 'Sequence(%s)' % items
def format(self, x, y, width):
leftGap, rightGap = determineGaps(width, self.width)
Path(x, y).h(leftGap).addTo(self)
Path(x + leftGap + self.width, y + self.height).h(rightGap).addTo(self)
x += leftGap
for i, item in enumerate(self.items):
if item.needsSpace and i > 0:
Path(x, y).h(10).addTo(self)
x += 10
item.format(x, y, item.width).addTo(self)
x += item.width
y += item.height
if item.needsSpace and i < len(self.items) - 1:
Path(x, y).h(10).addTo(self)
x += 10
return self
class Stack(DiagramItem):
def __init__(self, *items):
DiagramItem.__init__(self, 'g')
self.items = [wrapString(item) for item in items]
self.needsSpace = True
self.width = max(item.width + (20 if item.needsSpace else 0)
for item in self.items)
# pretty sure that space calc is totes wrong
if len(self.items) > 1:
self.width += C.AR * 2
self.up = self.items[0].up
self.down = self.items[-1].down
self.height = 0
last = len(self.items) - 1
for i, item in enumerate(self.items):
self.height += item.height
if i > 0:
self.height += max(C.AR * 2, item.up + C.VS)
if i < last:
self.height += max(C.AR * 2, item.down + C.VS)
addDebug(self)
def __repr__(self):
items = ', '.join(repr(item) for item in self.items)
return 'Stack(%s)' % items
def format(self, x, y, width):
leftGap, rightGap = determineGaps(width, self.width)
Path(x, y).h(leftGap).addTo(self)
x += leftGap
xInitial = x
if len(self.items) > 1:
Path(x, y).h(C.AR).addTo(self)
x += C.AR
innerWidth = self.width - C.AR * 2
else:
innerWidth = self.width
for i, item in enumerate(self.items):
item.format(x, y, innerWidth).addTo(self)
x += innerWidth
y += item.height
if i != len(self.items) - 1:
(Path(x, y)
.arc('ne').down(max(0, item.down + C.VS - C.AR * 2))
.arc('es').left(innerWidth)
.arc('nw').down(max(0, self.items[i + 1].up + C.VS - C.AR * 2))
.arc('ws').addTo(self))
y += max(item.down + C.VS, C.AR * 2) + \
max(self.items[i + 1].up + C.VS, C.AR * 2)
x = xInitial + C.AR
if len(self.items) > 1:
Path(x, y).h(C.AR).addTo(self)
x += C.AR
Path(x, y).h(rightGap).addTo(self)
return self
class OptionalSequence(DiagramItem):
def __new__(cls, *items):
if len(items) <= 1:
return Sequence(*items)
else:
return super(OptionalSequence, cls).__new__(cls)
def __init__(self, *items):
DiagramItem.__init__(self, 'g')
self.items = [wrapString(item) for item in items]
self.needsSpace = False
self.width = 0
self.up = 0
self.height = sum(item.height for item in self.items)
self.down = self.items[0].down
heightSoFar = 0
for i, item in enumerate(self.items):
self.up = max(self.up, max(C.AR * 2, item.up + C.VS) - heightSoFar)
heightSoFar += item.height
if i > 0:
self.down = max(self.height + self.down, heightSoFar
+ max(C.AR * 2, item.down + C.VS)) - self.height
itemWidth = item.width + (20 if item.needsSpace else 0)
if i == 0:
self.width += C.AR + max(itemWidth, C.AR)
else:
self.width += C.AR * 2 + max(itemWidth, C.AR) + C.AR
addDebug(self)
def __repr__(self):
items = ', '.join(repr(item) for item in self.items)
return 'OptionalSequence(%s)' % items
def format(self, x, y, width):
leftGap, rightGap = determineGaps(width, self.width)
Path(x, y).right(leftGap).addTo(self)
Path(x + leftGap + self.width, y
+ self.height).right(rightGap).addTo(self)
x += leftGap
upperLineY = y - self.up
last = len(self.items) - 1
for i, item in enumerate(self.items):
itemSpace = 10 if item.needsSpace else 0
itemWidth = item.width + itemSpace
if i == 0:
# Upper skip
(Path(x, y)
.arc('se')
.up(y - upperLineY - C.AR * 2)
.arc('wn')
.right(itemWidth - C.AR)
.arc('ne')
.down(y + item.height - upperLineY - C.AR * 2)
.arc('ws')
.addTo(self))
# Straight line
(Path(x, y)
.right(itemSpace + C.AR)
.addTo(self))
item.format(x + itemSpace + C.AR, y, item.width).addTo(self)
x += itemWidth + C.AR
y += item.height
elif i < last:
# Upper skip
(Path(x, upperLineY)
.right(C.AR * 2 + max(itemWidth, C.AR) + C.AR)
.arc('ne')
.down(y - upperLineY + item.height - C.AR * 2)
.arc('ws')
.addTo(self))
# Straight line
(Path(x, y)
.right(C.AR * 2)
.addTo(self))
item.format(x + C.AR * 2, y, item.width).addTo(self)
(Path(x + item.width + C.AR * 2, y + item.height)
.right(itemSpace + C.AR)
.addTo(self))
# Lower skip
(Path(x, y)
.arc('ne')
.down(item.height + max(item.down + C.VS, C.AR * 2) - C.AR * 2)
.arc('ws')
.right(itemWidth - C.AR)
.arc('se')
.up(item.down + C.VS - C.AR * 2)
.arc('wn')
.addTo(self))
x += C.AR * 2 + max(itemWidth, C.AR) + C.AR
y += item.height
else:
# Straight line
(Path(x, y)
.right(C.AR * 2)
.addTo(self))
item.format(x + C.AR * 2, y, item.width).addTo(self)
(Path(x + C.AR * 2 + item.width, y + item.height)
.right(itemSpace + C.AR)
.addTo(self))
# Lower skip
(Path(x, y)
.arc('ne')
.down(item.height + max(item.down + C.VS, C.AR * 2) - C.AR * 2)
.arc('ws')
.right(itemWidth - C.AR)
.arc('se')
.up(item.down + C.VS - C.AR * 2)
.arc('wn')
.addTo(self))
return self
class AlternatingSequence(DiagramItem):
def __new__(cls, *items):
if len(items) == 2:
return super(AlternatingSequence, cls).__new__(cls)
else:
raise Exception(
"AlternatingSequence takes exactly two arguments got " + len(items))
def __init__(self, *items):
DiagramItem.__init__(self, 'g')
self.items = [wrapString(item) for item in items]
self.needsSpace = False
arc = C.AR
vert = C.VS
first = self.items[0]
second = self.items[1]
arcX = 1 / math.sqrt(2) * arc * 2
arcY = (1 - 1 / math.sqrt(2)) * arc * 2
crossY = max(arc, vert)
crossX = (crossY - arcY) + arcX
firstOut = max(arc + arc, crossY / 2 + arc + arc,
crossY / 2 + vert + first.down)
self.up = firstOut + first.height + first.up
secondIn = max(arc + arc, crossY / 2 + arc + arc,
crossY / 2 + vert + second.up)
self.down = secondIn + second.height + second.down
self.height = 0
firstWidth = (20 if first.needsSpace else 0) + first.width
secondWidth = (20 if second.needsSpace else 0) + second.width
self.width = 2 * arc + max(firstWidth, crossX, secondWidth) + 2 * arc
addDebug(self)
def __repr__(self):
items = ', '.join(repr(item) for item in self.items)
return 'AlternatingSequence(%s)' % items
def format(self, x, y, width):
arc = C.AR
gaps = determineGaps(width, self.width)
Path(x, y).right(gaps[0]).addTo(self)
x += gaps[0]
Path(x + self.width, y).right(gaps[1]).addTo(self)
# bounding box
# Path(x+gaps[0], y).up(self.up).right(self.width).down(self.up+self.down).left(self.width).up(self.down).addTo(self)
first = self.items[0]
second = self.items[1]
# top
firstIn = self.up - first.up
firstOut = self.up - first.up - first.height
Path(x, y).arc('se').up(firstIn - 2 * arc).arc('wn').addTo(self)
first.format(
x
+ 2
* arc,
y
- firstIn,
self.width
- 4
* arc).addTo(self)
Path(x + self.width - 2 * arc, y
- firstOut).arc('ne').down(firstOut - 2 * arc).arc('ws').addTo(self)
# bottom
secondIn = self.down - second.down - second.height
secondOut = self.down - second.down
Path(x, y).arc('ne').down(secondIn - 2 * arc).arc('ws').addTo(self)
second.format(
x
+ 2
* arc,
y
+ secondIn,
self.width
- 4
* arc).addTo(self)
Path(x + self.width - 2 * arc, y
+ secondOut).arc('se').up(secondOut - 2 * arc).arc('wn').addTo(self)
# crossover
arcX = 1 / Math.sqrt(2) * arc * 2
arcY = (1 - 1 / Math.sqrt(2)) * arc * 2
crossY = max(arc, C.VS)
crossX = (crossY - arcY) + arcX
crossBar = (self.width - 4 * arc - crossX) / 2
(Path(x + arc, y - crossY / 2 - arc).arc('ws').right(crossBar)
.arc_8('n', 'cw').ll(crossX - arcX, crossY - arcY).arc_8('sw', 'ccw')
.right(crossBar).arc('ne').addTo(self))
(Path(x + arc, y + crossY / 2 + arc).arc('wn').right(crossBar)
.arc_8('s', 'ccw').ll(crossX - arcX, -(crossY - arcY)).arc_8('nw', 'cw')
.right(crossBar).arc('se').addTo(self))
return self
class Choice(DiagramItem):
def __init__(self, default, *items):
DiagramItem.__init__(self, 'g')
assert default < len(items)
self.default = default
self.items = [wrapString(item) for item in items]
self.width = C.AR * 4 + max(item.width for item in self.items)
self.up = self.items[0].up
self.down = self.items[-1].down
self.height = self.items[default].height
for i, item in enumerate(self.items):
if i in [default - 1, default + 1]:
arcs = C.AR * 2
else:
arcs = C.AR
if i < default:
self.up += max(arcs, item.height + item.down
+ C.VS + self.items[i + 1].up)
elif i == default:
continue
else:
self.down += max(arcs, item.up + C.VS
+ self.items[i - 1].down + self.items[i - 1].height)
# already counted in self.height
self.down | |
import pygame
from pygame import mixer
import os
import random
import csv
mixer.init()
pygame.init()
SCREEN_WIDTH = 800
SCREEN_HEIGHT = int(SCREEN_WIDTH*0.8)
SCREEN = pygame.display.set_mode((SCREEN_WIDTH,SCREEN_HEIGHT))
pygame.display.set_caption('HALLOWEEN GAME')
#SET FRAME RATE
clock = pygame.time.Clock()
FPS = 60
#GAME VARIABLES
GRAVITY = 1
SCROLL_THRESHOLD = 200
ROWS = 16
COLUMNS = 150
TILE_SIZE = SCREEN_HEIGHT // ROWS
TILE_TYPES = os.listdir('sprites/tiles')
MAX_LEVEL = 3
screen_scroll = 0
bg_scroll = 0
level = 1
start_game = False
start_intro = False
#PLAYER ACTION VARIALES
run = True
moving_left = False
moving_right = False
shoot = False
throw = False
grenade_thrown = False
#DEFINE COLOUR
BG = (144,201,120)
RED = (255, 0, 0)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
BLACK =(0, 0, 0)
PINK = (235, 65, 54)
#LOADING MUSIC
pygame.mixer.music.load('audio/audio_music2.mp3')
pygame.mixer.music.set_volume(0.6)
pygame.mixer.music.play(-1, 0.0, 5000)# LOOP, BREAK, FADE
jump_snd = pygame.mixer.Sound('audio/audio_jump.wav')
jump_snd.set_volume(0.5)
shot_snd = pygame.mixer.Sound('audio/audio_shot.wav')
shot_snd.set_volume(0.5)
grenade_snd = pygame.mixer.Sound('audio/audio_thunder.wav')
grenade_snd.set_volume(0.3)
water_snd = pygame.mixer.Sound('audio/audio_water.wav')
water_snd.set_volume(0.5)
#LOADING IMAGES
#BUTTON
start_img = pygame.image.load('sprites/button/start_btn.png').convert_alpha()
restart_img = pygame.image.load('sprites/button/restart_btn.png').convert_alpha()
exit_img = pygame.image.load('sprites/button/exit_btn.png').convert_alpha()
#BACKGROUND
pine1_img = pygame.image.load('sprites/background/pine1.png').convert_alpha()
pine2_img = pygame.image.load('sprites/background/pine2.png').convert_alpha()
mountain_img = pygame.image.load('sprites/background/mountain.png').convert_alpha()
sky_img = pygame.image.load('sprites/background/sky_cloud.png').convert_alpha()
#TILE LIST LOADING
tile_list = []
for i in range(len(TILE_TYPES)):
img = pygame.image.load(f'sprites/tiles/{i}.png')
img = pygame.transform.scale(img, (TILE_SIZE, TILE_SIZE)).convert_alpha()
tile_list.append(img)
bullet_img = pygame.image.load('sprites/icons/slash.png').convert_alpha()
grenade_img = pygame.image.load('sprites/icons/grenade.png').convert_alpha()
health_box_img = pygame.image.load('sprites/icons/health_box.png').convert_alpha()
ammo_box_img = pygame.image.load('sprites/icons/ammo_box.png').convert_alpha()
grenade_box_img = pygame.image.load('sprites/icons/grenade_box.png').convert_alpha()
font = pygame.font.SysFont('Futura', 30)
def draw_text(text, font, text_col, x, y):
img = font.render(text, True, text_col)
SCREEN.blit(img, (x,y))
def background_colour(BG):
SCREEN.fill(BG)
width = sky_img.get_width()
#pygame.draw.line(SCREEN, RED, (0,300),(SCREEN_WIDTH,300))
for x in range(5):
SCREEN.blit(sky_img, ((x * width) - bg_scroll * 0.3, 0))
SCREEN.blit(mountain_img, ((x * width) - bg_scroll * 0.4, SCREEN_HEIGHT - mountain_img.get_height() - 300))
SCREEN.blit(pine1_img, ((x * width) - bg_scroll * 0.5, SCREEN_HEIGHT - pine1_img.get_height() - 150))
SCREEN.blit(pine2_img, ((x * width) - bg_scroll * 0.7, SCREEN_HEIGHT - pine2_img.get_height()))
#RESET_WORLD
def reset_level():
enemy_group.empty()
bullet_group.empty()
grenade_group.empty()
item_box_group.empty()
explosion_group.empty()
decoration_group.empty()
water_group.empty()
exit_group.empty()
#RESET THE WORLD LEVEL
data = []
for row in range(ROWS):
r = [-1] * COLUMNS
data.append(r)
return data
item_boxes = {
'Health' : health_box_img,
'Ammo' : ammo_box_img,
'Grenade' : grenade_box_img
}
class Button():
def __init__(self,x, y, image, scale):
width = image.get_width()
height = image.get_height()
self.image = pygame.transform.scale(image, (int(width * scale), int(height * scale)))
self.rect = self.image.get_rect()
self.rect.topleft = (x, y)
self.clicked = False
def draw(self, surface):
action = False
#get mouse position
pos = pygame.mouse.get_pos()
#check mouseover and clicked conditions
if self.rect.collidepoint(pos):
if pygame.mouse.get_pressed()[0] == 1 and self.clicked == False:
action = True
self.clicked = True
if pygame.mouse.get_pressed()[0] == 0:
self.clicked = False
#draw button
surface.blit(self.image, (self.rect.x, self.rect.y))
return action
class Character(pygame.sprite.Sprite):
def __init__(self, char_type, x, y, scale, speed, ammo, grenades):
pygame.sprite.Sprite.__init__(self)
self.alive = True
self.char_type = char_type
self.health = 100
self.max_health = self.health
self.speed = speed
self.skill_cooldown = 0
self.ammo = ammo
self.start_ammo = ammo
#self.attack = False
self.grenades = grenades
self.direction = 1
self.vel_y = 0
self.jump = False
self.in_air = True
self.flip = False
self.animation_list = []
self.frame_index = 0
self.action = 0
self.update_time = pygame.time.get_ticks()
#AI SPECIFIC COUNTER
self.vision = pygame.Rect(0, 0, 150, 20)
self.move_counter = 0
self.idling = False
self.idling_counter = 0
self.score = 0
if char_type == 'wizard':
self.health = 200
#LOADING THE ANIMATIONS
animation_list = ['idle', 'run', 'jump', 'death', 'attack']
for animation in animation_list:
#RESET TEMPORARY LIST
temp_list = []
#CHECKS THE MO. OF IMAGES IN A FOLDER
no_of_frames = len(os.listdir(f'sprites/{self.char_type}/{animation}'))
for i in range(no_of_frames):
player_char = pygame.image.load(f'sprites/{self.char_type}/{animation}/{i}.png').convert_alpha()
char = pygame.transform.scale(player_char,(int(player_char.get_width() * scale), int(player_char.get_height() * scale)))
temp_list.append(char)
self.animation_list.append(temp_list)
self.img = self.animation_list[self.action][self.frame_index]
self.rect = self.img.get_rect() #creates a rectangular box around the character to control it.
self.rect.center = (x,y)
self.width = self.img.get_width()
self.height = self.img.get_height()
def update(self):
self.update_animation()
self.death()
#COOLDOWN UPDATE
if self.skill_cooldown > 0:
self.skill_cooldown -= 1
def move(self, move_left, move_right):
#RESET MOVEMENT VARIABLES
screen_scroll = 0
dx = 0
dy = 0
#MOVES THE CHARACTER LEFT AND RIGHT
if move_left:
dx = -self.speed
self.flip = True
self.direction = -1
if move_right:
dx = self.speed
self.flip = False
self.direction = 1
#JUMP
if self.jump == True and self.in_air == False:
self.vel_y = -15
self.jump = False
self.in_air = True
#APPLY GRAVITY
self.vel_y += GRAVITY
if self.vel_y > 10:
self.vel_y
dy += self.vel_y
#COLLISION CHECKING
for tile in world.obstacle_list:
#COLLISIN CHECK IN X DIRECTION
if tile[1].colliderect(self.rect.x + dx, self.rect.y, self.width, self.height):
dx = 0
#CHECK IF ENEMY HAS COLLIDED WITH A WALL
if self.char_type == 'reaper' or self.char_type == 'skeleton' or self.char_type == 'wizard':
self.direction *= -1
self.move_counter = 0
#CHECK COLLISION IN Y DIRECTION
if tile[1].colliderect(self.rect.x, self.rect.y + dy, self.width, self.height):
#CHECK IF BELOW THE GROUND
if self.vel_y < 0:
self.vel_y = 0
self.in_air = False
dy = tile[1].bottom - self.rect.top
#CHECK IF ABOVE THE GROUND
elif self.vel_y >= 0:
self.vel_y = 0
self.in_air = False
dy = tile[1].top - self.rect.bottom
#CHECK FOR COLLISION IN WATER
health = True
if pygame.sprite.spritecollide(self, water_group, False):
self.health = 0
#health
#CHECK COLLISION WITH EXIT
level_complete = False
if pygame.sprite.spritecollide(self, exit_group, False):
level_complete = True
#CHECK IF PLAYER HAS FALL OF THE MAP
if self.rect.bottom > SCREEN_HEIGHT:
self.health = 0
if self.char_type == 'player':
if self.rect.left + dx < 0 or self.rect.right + dx > SCREEN_WIDTH:
dx = 0
#UPDATES RECTANGLE POSITION
self.rect.x += dx
self.rect.y += dy
#UPDATE SCROLL BASED ON PLAYERS POSITION
if self.char_type == 'player':
if (self.rect.right > SCREEN_WIDTH - SCROLL_THRESHOLD and bg_scroll < (world.level_length * TILE_SIZE) - SCREEN_WIDTH) or (self.rect.left < SCROLL_THRESHOLD and screen_scroll > abs(dx)):
self.rect.x -= dx
screen_scroll = -dx
return screen_scroll, level_complete
def shoot(self):
if self.skill_cooldown == 0 and self.ammo > 0:
self.skill_cooldown = 85
if self.char_type == 'player':
self.skill_cooldown = 45
bullet = Bullet(self.rect.centerx + (0.75 * self.rect.size[0] * self.direction), self.rect.centery, self.direction, self.flip)
bullet_group.add(bullet)
self.ammo -= 1
shot_snd.play()
def ai(self):
if self.alive and player.alive:
if self.idling == False and random.randint(1, 200) == 1:
self.update_action(0)#0 : IDLE
self.idling = True
self.idling_counter = 50
#CHECK IF PLAYER IS IN RANGE OF THE AI
if self.vision.colliderect(player.rect):
#STOPS RUNNING AND FACE THE PLAYER
self.update_action(4)#4 : attack
#SHOOT
self.shoot()
else:
if self.idling == False:
if self.direction == 1:
ai_moving_right = True
else:
ai_moving_right = False
ai_moving_left = not ai_moving_right
self.move(ai_moving_left, ai_moving_right)
self.update_action(1)#1 : RUN
self.move_counter += 1
#UPDATE AI VISION AS THE ENEMY MOVES
self.vision.center = (self.rect.centerx + 75 * self.direction, self.rect.centery)
# pygame.draw.rect(SCREEN, RED, self.vision)
if self.move_counter > TILE_SIZE:
self.direction *= -1
self.move_counter *= -1
else:
self.idling_counter -= 1
if self.idling_counter <= 0:
self.idling = False
self.rect.x += screen_scroll
def update_animation(self):
#UPDATE ANIMATION
animation_colldown = 100
#UPDATE IMAGE
self.img = self.animation_list[self.action][self.frame_index]
#CHECKS THE TIME PASSED FROM THE LAST UPDATE
if pygame.time.get_ticks() - self.update_time > animation_colldown:
self.update_time = pygame.time.get_ticks()
self.frame_index += 1
#CHECKS THE LENGTH OF ANIMATION LIST
if self.frame_index >= len(self.animation_list[self.action]):
if self.action == 3:
self.frame_index = len(self.animation_list[self.action]) - 1
else:
self.frame_index = 0
def update_action(self, new_action):
#CHECKS FWHATHER THE ACTION IS DIFFERENT FORM THE PREVIOUS ACTION
if new_action != self.action:
self.action = new_action
#UPDATING THE ANIMATION SETTING
self.frame_index = 0
self.update_time = pygame.time.get_ticks()
def death(self):
if self.health <= 0:
self.health = 0
self.speed = 0
self.alive = False
self.update_action(3)
def draw(self):
SCREEN.blit(pygame.transform.flip(self.img, self.flip , False ), self.rect) # (what and where)
class World():
def __init__(self):
self.obstacle_list = []
def process_data(self, data):
self.level_length = len(data[0])
#ITERATE THROUGH DATA FILE TO PROCESS DATA
for y, row in enumerate(data):
for x, tile in enumerate(row):
if tile >= 0:
img = tile_list[tile]
img_rect = img.get_rect()
img_rect.x = x * TILE_SIZE
img_rect.y = y * TILE_SIZE
tile_data = (img, img_rect)
if tile >= 0 and tile <= 8:
self.obstacle_list.append(tile_data)
elif tile >= 9 and tile <= 10:
water = Water(img, x * TILE_SIZE, y * TILE_SIZE)
water_group.add(water)
elif tile >= 11 and tile <= 14:
decoration = Decoration(img, x * TILE_SIZE, y * TILE_SIZE)
decoration_group.add(decoration)
elif tile == 15: #CREATE PLAYER
player = Character('player', x * TILE_SIZE, y * TILE_SIZE, 1.25, 5, 20, 5)
health_bar = HealthBar(10,10, player.health, player.health)
elif tile == 16: #CREATE ENEMIES
enemy1 = Character('reaper', x * TILE_SIZE, y * TILE_SIZE, 1.25, 3, 50, 0)
enemy_group.add(enemy1)
elif tile == 17: #AMMO BOX
item_box = ItemRefill('Ammo', x * TILE_SIZE, y * TILE_SIZE)
item_box_group.add(item_box)
elif tile == 18: #GRENADE BOX
item_box = ItemRefill('Grenade', x * TILE_SIZE, y * TILE_SIZE)
item_box_group.add(item_box)
elif tile == 19: #HEALTHBOX
item_box = ItemRefill('Health', x * TILE_SIZE, y * TILE_SIZE)
item_box_group.add(item_box)
elif tile == 20: #CREATE EXIT
img = pygame.transform.scale(img, (130, 240)).convert_alpha()
exit = Exit(img, x * TILE_SIZE, | |
import sys
import os
import datetime
import logging
from tqdm import tqdm
import copy
import numpy as np
import re
from concurrent import futures
logger = logging.getLogger("sacluster").getChild(os.path.basename(__file__))
path = "../../.."
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(path + "/lib/others")
from API_method import get, post, put, delete
from info_print import printout
sys.path.append(path + "/lib/def_conf")
from config_function import conf_pattern_2
class modify_sacluster:
def __init__(self, cluster_info, cluster_id, auth_res, ext_info, fp = "", info_list = [1,0,0,0], api_index = True, max_workers = 1):
self.cluster_info = cluster_info
self.cluster_id = cluster_id
self.auth_res = auth_res
self.ext_info = ext_info
self.fp = fp
self.info_list = info_list
self.api_index = api_index
self.max_workers = max_workers
#[k for k in self.cluster_info.keys()]
self.zone_list = [zone_list for zone_list in self.cluster_info["clusterparams"]["server"].keys()]
#if "nfs" in self.cluster_info[cluster_id]["cluster_params"]:
if self.cluster_info["clusterparams"]["nfs"] != None:
self.nfs_zones = list(self.cluster_info["clusterparams"]["nfs"].keys())
self.max_node_num = 0
self.proactice_zones = []
for zone in ext_info["Zone"]:
if(self.ext_info["Zone"][zone]["Type"] == "practice" and self.ext_info["Zone"][zone]["maximum"] > 0):
self.max_node_num += self.ext_info["Zone"][zone]["maximum"]
self.proactice_zones.append(zone)
self.current_zone_max_num = 0
self.head_zone_num = 0
for zone in self.zone_list:
self.current_zone_max_num += self.ext_info["Zone"][zone]["maximum"]
if "head" in self.cluster_info["clusterparams"]["server"][zone].keys():
self.head_zone = zone
self.head_zone_num = self.ext_info["Zone"][zone]["maximum"]
self.url_list = {}
for zone in ext_info["Zone"]:
self.url_list[zone] = "https://secure.sakura.ad.jp/cloud/zone/"+ zone +"/api/cloud/1.1"
self.head_url = "https://secure.sakura.ad.jp/cloud/zone/"+ self.head_zone +"/api/cloud/1.1"
self.sub_url = ["/server","/disk","/switch","/interface","/bridge","/tag","/appliance","/power"]
self.date_modified = "Date modified:" + str(datetime.datetime.now().strftime("%Y_%m_%d"))
def __call__(self):
self.show_current_states()
if(self.mod_type == "1"):
self.modify_compute_node_number()
elif(self.mod_type == "2"):
self.modify_back_switch()
elif(self.mod_type == "3"):
self.modify_core_and_memory()
else:
_ = printout("Warning: the input must be a number from 1 to 3.", info_type = 0, info_list = self.info_list, fp = self.fp)
printout("Finished modifying the cluster", info_type = 0, info_list = self.info_list, fp = self.fp)
def show_current_states(self):
printout('' , info_type = 0, info_list = self.info_list, fp = self.fp)
text_len = len('#' *25 + ' current state ' + '#' *25)
printout('#' *25 + ' current state ' + '#' *25, info_type = 0, info_list = self.info_list, fp = self.fp)
compute_number = sum([len(val["compute"]) for key, val in self.cluster_info["clusterparams"]["server"].items()])
printout(' ' * 10 + 'The number of compute node:'.ljust(35, ' ') + str(compute_number), info_type = 0, info_list = self.info_list, fp = self.fp)
switch_back_zone = []
for key, val in self.cluster_info["clusterparams"]["switch"].items():
#if("back" in val):
if(self.cluster_info["clusterparams"]["switch"][key]["back"] != None):
switch_back_zone.append(key)
printout('' , info_type = 0, info_list = self.info_list, fp = self.fp)
if(len(switch_back_zone) == 0):
printout(' ' * 10 +'Switch of back area:'.ljust(35, ' ') + 'False', info_type = 0, info_list = self.info_list, fp = self.fp)
elif(len(self.cluster_info["clusterparams"]["switch"]) == len(switch_back_zone)):
printout(' ' * 10 +'Switch of back area:'.ljust(35, ' ') + 'True', info_type = 0, info_list = self.info_list, fp = self.fp)
elif(len(self.cluster_info["clusterparams"]["switch"]) > len(switch_back_zone)):
printout(' ' * 10 +'Switch of back area:'.ljust(35, ' ') + 'Some zones are True', info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 +'(' + ",".join(switch_back_zone) + ')', info_type = 0, info_list = self.info_list, fp = self.fp)
node_plan = {}
for zone, val in self.cluster_info["clusterparams"]["server"].items():
if("head" in val):
head_zone = zone
head_core = val["head"]["node"]["core"]
head_memory = val["head"]["node"]["memory"]
for num, val_comp in val["compute"].items():
if(str(val_comp["node"]["core"]) + "-" + str(val_comp["node"]["memory"]) not in node_plan):
node_plan[str(val_comp["node"]["core"]) + "-" + str(val_comp["node"]["memory"])] = 0
else:
node_plan[str(val_comp["node"]["core"]) + "-" + str(val_comp["node"]["memory"])] += 1
printout('' , info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 + 'Node information' , info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 + '((Head node))' , info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 + 'Core:'.ljust(35, ' ') + str(head_core), info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 +'Memory:'.ljust(35, ' ') + str(head_memory), info_type = 0, info_list = self.info_list, fp = self.fp)
printout('' , info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 + '((Compute node))' , info_type = 0, info_list = self.info_list, fp = self.fp)
if(len(node_plan) == 1):
for key, val in node_plan.items():
text = ' ' * 10 +'Core:'.ljust(35, ' ') + str(key.split("-")[0])
printout(text, info_type = 0, info_list = self.info_list, fp = self.fp)
text = ' ' * 10 +'Memory:'.ljust(35, ' ') + str(key.split("-")[1])
printout(text, info_type = 0, info_list = self.info_list, fp = self.fp)
else:
count = 0
for key, val in node_plan.items():
printout(' ' * 10 +'compute node type ' + str(count), info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 +'Core:'.ljust(35, ' ') + str(key.split("-")[0]) , info_type = 0, info_list = self.info_list, fp = self.fp)
printout(' ' * 10 +'Memory:'.ljust(35, ' ') + str(key.split("-")[1]), info_type = 0, info_list = self.info_list, fp = self.fp)
count += 1
printout('#' * text_len, info_type = 0, info_list = self.info_list, fp = self.fp)
self.mod_type = self.answer_response(' \n<<Contents to modify>>\n1. The number of compute node\n2. Switch of back area\n3. Core or memory of nodes', ["1", "2", "3"], "1 to 3", input_comment = "Please input a content number", opp = 1)
def modify_back_switch(self):
switch_back_zone = []
for key, val in self.cluster_info["clusterparams"]["switch"].items():
#if("back" in val):
if(self.cluster_info["clusterparams"]["switch"][key]["back"] != None):
switch_back_zone.append(key)
printout('' , info_type = 0, info_list = self.info_list, fp = self.fp)
if(len(switch_back_zone) == 0):
printout('Switch of back area is False in the current state', info_type = 0, info_list = self.info_list, fp = self.fp)
input_val = self.answer_response("Can a switch be installed in the back area?", ["yes", "y", "no", "n"], "yes/y or no/n")
if(input_val == "yes" or input_val == "y"):
self.bar = tqdm(total = 100)
self.bar.set_description('Progress rate')
self.progress_sum = 0
if(len(self.zone_list) > 1):
progress_val = 20
else:
progress_val = 35
for zone in self.zone_list:
self.cluster_info["clusterparams"]["switch"][zone]["back"] = {}
self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"] = self.create_switch(zone)
self.progress_bar(int(30 / len(self.zone_list)))
node_num = len(self.cluster_info["clusterparams"]["server"][zone]["compute"])
for key, val in self.cluster_info["clusterparams"]["server"][zone]["compute"].items():
nic_id = self.add_interface(zone, val["node"]["id"])
self.progress_bar(int(progress_val / (len(zone) * node_num)))
self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"] = {}
self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"]["id"] = nic_id
self.connect_switch(zone, self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"], self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"]["id"])
self.progress_bar(int(progress_val / (len(zone) * node_num)))
if(len(self.zone_list) > 1):
bridge_id = self.create_bridge()
self.progress_bar(10)
self.cluster_info["clusterparams"]["bridge"]["back"] = {}
self.cluster_info["clusterparams"]["bridge"]["back"]["id"] = bridge_id
for zone in self.zone_list:
self.progress_bar(int(20/len(self.zone_list)))
_ = self.connect_bridge_switch(zone, self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"], self.cluster_info["clusterparams"]["bridge"]["back"]["id"])
self.bar.update(100 - self.progress_sum)
self.bar.close()
else:
printout('Please start the operation over from the beginning.', info_type = 0, info_list = self.info_list, fp = self.fp)
sys.exit()
elif(len(self.cluster_info["clusterparams"]["switch"]) == len(switch_back_zone)):
printout('Switch of back area is True in the current state', info_type = 0, info_list = self.info_list, fp = self.fp)
input_val = self.answer_response("Can a switch be deleted in the back area?", ["yes", "y", "no", "n"], "yes/y or no/n")
if(input_val == "yes" or input_val == "y"):
self.bar = tqdm(total = 100)
self.bar.set_description('Progress rate')
self.progress_sum = 0
if(len(self.zone_list) > 1):
progress_val = 20
else:
progress_val = 50
for zone in self.zone_list:
node_num = len(self.cluster_info["clusterparams"]["server"][zone]["compute"])
for key, val in self.cluster_info["clusterparams"]["server"][zone]["compute"].items():
self.dis_connect_server_switch(zone, self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"]["id"])
self.progress_bar(int(progress_val / (len(zone) * node_num)))
self.delete_interface(zone, self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"]["id"])
self.progress_bar(int(progress_val / (len(zone) * node_num)))
self.cluster_info["clusterparams"]["server"][zone]["compute"][key]["nic"]["back"] = None
for zone in self.zone_list:
if(len(self.zone_list) > 1):
self.disconnect_bridge_switch(zone, self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"])
self.progress_bar(int(20/len(self.zone_list)))
self.delete_switch(zone, self.cluster_info["clusterparams"]["switch"][zone]["back"]["id"])
self.cluster_info["clusterparams"]["switch"][zone]["back"] = None
self.progress_bar(int(30 / len(self.zone_list)))
if(len(self.zone_list) > 1):
self.delete_bridge(self.cluster_info["clusterparams"]["bridge"]["back"]["id"])
self.cluster_info["clusterparams"]["bridge"]["back"] = None
self.progress_bar(10)
self.bar.update(100 - self.progress_sum)
self.bar.close()
else:
printout('Please start the operation over from the beginning.', info_type = 0, info_list = self.info_list, fp = self.fp)
sys.exit()
elif(len(self.cluster_info["clusterparams"]["switch"]) > len(switch_back_zone)):
printout('This cluster is not a target of sacluster operation', info_type = 0, info_list = self.info_list, fp = self.fp)
#input_val = self.mod_type = self.answer_response(" \n<<select option>>\n1. Delete the switch of back area in " + ",".join(switch_back_zone) + "\n2. Add switches in areas other than " + ",".join(switch_back_zone), ["1", "2"], "1 or 2", input_comment = "Please input a opption number", opp = 1)
#CoreとMemory数の変更
def modify_core_and_memory(self):
node_type = ["head node", "compute nodes"][int(self.answer_response(' \nPlease select the node type to modify the setting\n1. Head node\n2. Compute nodes', ["1", "2"], "1 or 2", input_comment = "Please input a number", opp = 1)) - 1]
node_plan, core_plan, memory_plan = self.core_memory_setting(node_type)
if(node_type == "head node"):
self.bar = tqdm(total = 100)
self.bar.set_description('Progress rate')
self.progress_sum = 0
self.change_node_plan(self.head_zone, self.cluster_info["clusterparams"]["server"][self.head_zone]["head"]["node"]["id"], node_plan)
self.bar.update(100)
self.bar.close()
else:
while(True):
self.bar = tqdm(total = 100)
self.bar.set_description('Progress rate')
self.progress_sum = 0
future = []
with futures.ThreadPoolExecutor(max_workers = self.max_workers, thread_name_prefix="thread") as executor:
for zone in self.zone_list:
for i in self.cluster_info["clusterparams"]["server"][zone]["compute"].keys():
logger.debug(self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["node"]["id"])
logger.debug(node_plan)
future.append(executor.submit(self.change_node_plan, zone, self.cluster_info["clusterparams"]["server"][zone]["compute"][i]["node"]["id"], node_plan, ind = 1))
| |
<gh_stars>0
# -*- coding: utf-8 -*-
# Copyright (c) 2017-18 <NAME> and contributors
# See LICENSE.rst for details.
"""
Encapsulates sending commands and data over a serial interface, whether that
is I²C, SPI or bit-banging GPIO.
"""
import errno
import time
import luma.core.error
from luma.core import lib
__all__ = ["i2c", "spi", "bitbang", "ftdi_spi", "ftdi_i2c"]
class i2c(object):
"""
Wrap an `I²C <https://en.wikipedia.org/wiki/I%C2%B2C>`_ (Inter-Integrated
Circuit) interface to provide :py:func:`data` and :py:func:`command` methods.
:param bus: A *smbus* implementation, if ``None`` is supplied (default),
`smbus2 <https://pypi.python.org/pypi/smbus2>`_ is used.
Typically this is overridden in tests, or if there is a specific
reason why `pysmbus <https://pypi.python.org/pypi/pysmbus>`_ must be used
over smbus2.
:type bus:
:param port: I²C port number, usually 0 or 1 (default).
:type port: int
:param address: I²C address, default: ``0x3C``.
:type address: int
:raises luma.core.error.DeviceAddressError: I2C device address is invalid.
:raises luma.core.error.DeviceNotFoundError: I2C device could not be found.
:raises luma.core.error.DevicePermissionError: Permission to access I2C device
denied.
.. note::
1. Only one of ``bus`` OR ``port`` arguments should be supplied;
if both are, then ``bus`` takes precedence.
2. If ``bus`` is provided, there is an implicit expectation
that it has already been opened.
"""
def __init__(self, bus=None, port=1, address=0x3C):
import smbus2
self._cmd_mode = 0x00
self._data_mode = 0x40
try:
self._addr = int(str(address), 0)
except ValueError:
raise luma.core.error.DeviceAddressError(
'I2C device address invalid: {}'.format(address))
try:
self._managed = bus is None
self._i2c_msg_write = smbus2.i2c_msg.write if bus is None else None
self._bus = bus or smbus2.SMBus(port)
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
# FileNotFoundError
raise luma.core.error.DeviceNotFoundError(
'I2C device not found: {}'.format(e.filename))
elif e.errno in [errno.EPERM, errno.EACCES]:
# PermissionError
raise luma.core.error.DevicePermissionError(
'I2C device permission denied: {}'.format(e.filename))
else: # pragma: no cover
raise
def command(self, *cmd):
"""
Sends a command or sequence of commands through to the I²C address
- maximum allowed is 32 bytes in one go.
:param cmd: A spread of commands.
:type cmd: int
:raises luma.core.error.DeviceNotFoundError: I2C device could not be found.
"""
assert(len(cmd) <= 32)
try:
self._bus.write_i2c_block_data(self._addr, self._cmd_mode,
list(cmd))
except (IOError, OSError) as e:
if e.errno in [errno.EREMOTEIO, errno.EIO]:
# I/O error
raise luma.core.error.DeviceNotFoundError(
'I2C device not found on address: 0x{0:02X}'.format(self._addr))
else: # pragma: no cover
raise
def data(self, data):
"""
Sends a data byte or sequence of data bytes to the I²C address.
If the bus is in managed mode backed by smbus2, the i2c_rdwr
method will be used to avoid having to send in chunks.
For SMBus devices the maximum allowed in one transaction is
32 bytes, so if data is larger than this, it is sent in chunks.
:param data: A data sequence.
:type data: list, bytearray
"""
# block size is the maximum data payload that will be tolerated.
# The managed i2c will transfer blocks of upto 4K (using i2c_rdwr)
# whereas we must use the default 32 byte block size when unmanaged
if self._managed:
block_size = 4096
write = self._write_large_block
else:
block_size = 32
write = self._write_block
i = 0
n = len(data)
while i < n:
write(list(data[i:i + block_size]))
i += block_size
def _write_block(self, data):
assert len(data) <= 32
self._bus.write_i2c_block_data(self._addr, self._data_mode, data)
def _write_large_block(self, data):
assert len(data) <= 4096
self._bus.i2c_rdwr(self._i2c_msg_write(self._addr, [self._data_mode] + data))
def cleanup(self):
"""
Clean up I²C resources
"""
if self._managed:
self._bus.close()
@lib.rpi_gpio
class bitbang(object):
"""
Wraps an `SPI <https://en.wikipedia.org/wiki/Serial_Peripheral_Interface_Bus>`_
(Serial Peripheral Interface) bus to provide :py:func:`data` and
:py:func:`command` methods. This is a software implementation and is thus
a lot slower than the default SPI interface. Don't use this class directly
unless there is a good reason!
:param gpio: GPIO interface (must be compatible with `RPi.GPIO <https://pypi.python.org/pypi/RPi.GPIO>`_).
For slaves that don't need reset or D/C functionality, supply a
:py:class:`noop` implementation instead.
:param transfer_size: Max bytes to transfer in one go. Some implementations
only support maximum of 64 or 128 bytes, whereas RPi/py-spidev supports
4096 (default).
:type transfer_size: int
:param SCLK: The GPIO pin to connect the SPI clock to.
:type SCLK: int
:param SDA: The GPIO pin to connect the SPI data (MOSI) line to.
:type SDA: int
:param CE: The GPIO pin to connect the SPI chip enable (CE) line to.
:type CE: int
:param DC: The GPIO pin to connect data/command select (DC) to.
:type DC: int
:param RST: The GPIO pin to connect reset (RES / RST) to.
:type RST: int
"""
def __init__(self, gpio=None, transfer_size=4096, **kwargs):
self._transfer_size = transfer_size
self._managed = gpio is None
self._gpio = gpio or self.__rpi_gpio__()
self._SCLK = self._configure(kwargs.get("SCLK"))
self._SDA = self._configure(kwargs.get("SDA"))
self._CE = self._configure(kwargs.get("CE"))
print("clk:{}, sda:{}, ce:{}".format(self._SCLK,self._SDA,self._CE))
self.set_line("TRI_STATE_ALL", 0)
self._DC = self._configure(kwargs.get("DC"))
self._RST = self._configure(kwargs.get("RST"))
time.sleep(0.1)
self.set_line("TRI_STATE_ALL", 1)
self._cmd_mode = 0 # Command mode = Hold low
self._data_mode = 1 # Data mode = Pull high
if self._RST is not None:
line = self._RST
#line = self._gpio.Chip('gpiochip0').get_line(self._RST)
#print("reset device, pin - {}, {}".format(line, line.owner()))
print("pin - {}, {} / {}".format(self._RST, line.owner().name(), line.offset()))
#line = line.owner().get_lines([line.offset()])
line.set_value(0)
#line.set_value(0)
time.sleep(0.01)
line.set_value(1)
#self._gpio.output(self._RST, self._gpio.LOW) # Reset device
#self._gpio.output(self._RST, self._gpio.HIGH) # Keep RESET pulled high
def set_line(self, name, value):
line = self._gpio.find_line(name)
line.request(consumer=line.owner().name(), type=self._gpio.LINE_REQ_DIR_OUT)
line.set_value(value)
line.release()
def _configure(self, pin):
if pin is not None:
if pin == 7:
self.set_line("MUX32_DIR", 1)
line = self._gpio.Chip('gpiochip0').get_line(48)
print("mux32")
elif pin == 8:
self.set_line("MUX30_DIR", 1)
line = self._gpio.Chip('gpiochip0').get_line(49)
print("mux30")
elif pin == 9:
self.set_line("MUX28_DIR", 1)
line = self._gpio.Chip('gpiochip0').get_line(183)
print("mux28")
else:
print("unknow pin mapping !!!!!!!!!!")
return None
#self._gpio.setup(pin, self._gpio.OUT)
#chip = self._gpio.Chip('gpiochip0')
#line = self._gpio.Chip('gpiochip0').get_line(pin)
#pinname = 'DIG{}_PU_PD'.format(pin)
#line = self._gpio.find_line(pinname)
#print("pin - {}, {} / {}".format(pinname, line.owner().name(), line.offset()))
print("pin - {}, {} / {}".format(pin, line.owner().name(), line.offset()))
line.request(consumer=line.owner().name(), type=self._gpio.LINE_REQ_DIR_OUT)
#line.release()
#return pin
return line
def command(self, *cmd):
"""
Sends a command or sequence of commands through to the SPI device.
:param cmd: A spread of commands.
:type cmd: int
"""
if self._DC:
#self._gpio.output(self._DC, self._cmd_mode)
line = self._DC
#line = line.owner().get_lines([line.offset()])
#line = self._gpio.Chip('gpiochip0').get_line(self._DC)
line.set_value(self._cmd_mode)
self._write_bytes(list(cmd))
def data(self, data):
"""
Sends a data byte or sequence of data bytes through to the SPI device.
If the data is more than :py:attr:`transfer_size` bytes, it is sent in chunks.
:param data: A data sequence.
:type data: list, bytearray
"""
if self._DC:
#self._gpio.output(self._DC, self._data_mode)
line = self._DC
#line = line.owner().get_lines([line.offset()])
#line = self._gpio.Chip('gpiochip0').get_line(self._DC)
line.set_value(1)
i = 0
n = len(data)
tx_sz = self._transfer_size
while i < n:
self._write_bytes(data[i:i + tx_sz])
i += tx_sz
def _write_bytes(self, data):
print("write ce {},sck {}, sda {}".format(self._CE, self._SCLK, self._SDA))
gpio = self._gpio
if self._CE:
gpio.output(self._CE, gpio.LOW) # Active low
for byte in data:
for _ in range(8):
gpio.output(self._SDA, byte & 0x80)
gpio.output(self._SCLK, gpio.HIGH)
byte <<= 1
gpio.output(self._SCLK, gpio.LOW)
if self._CE:
gpio.output(self._CE, gpio.HIGH)
def cleanup(self):
"""
Clean up GPIO resources if managed.
"""
print("clean up")
if self._DC:
self._DC.release()
if self._RST:
self._RST.release()
if self._managed:
self._gpio.cleanup()
@lib.spidev
class spi(bitbang):
"""
Wraps an `SPI <https://en.wikipedia.org/wiki/Serial_Peripheral_Interface_Bus>`_
(Serial Peripheral Interface) bus to provide :py:func:`data` and
:py:func:`command` methods.
:param spi: SPI implementation (must be compatible with `spidev <https://pypi.python.org/pypi/spidev/>`_)
:param gpio: GPIO interface (must be compatible with `RPi.GPIO <https://pypi.python.org/pypi/RPi.GPIO>`_).
For slaves that don't need reset or D/C functionality, supply a
:py:class:`noop` implementation instead.
:param port: SPI port, usually 0 (default) or 1.
:type port: int
:param device: SPI device, usually 0 (default) or 1.
:type device: int
:param bus_speed_hz: SPI bus speed, defaults to 8MHz.
:type bus_speed_hz: int
:param cs_high: Whether SPI chip select is high, defaults to ``False``.
:type cs_high: bool
:param transfer_size: Maximum amount of bytes to transfer in one go. Some implementations
only support a maximum of 64 or 128 bytes, whereas RPi/py-spidev supports
4096 (default).
:type transfer_size: int
:param gpio_DC: The GPIO pin to connect data/command select (DC) to (defaults to 24).
:type gpio_DC: int
:param gpio_RST: The GPIO pin to connect reset (RES / RST) to (defaults to 25).
:type gpio_RST: int
:raises luma.core.error.DeviceNotFoundError: SPI device could not be found.
:raises luma.core.error.UnsupportedPlatform: GPIO access not available.
"""
def __init__(self, spi=None, gpio=None, port=0, device=0,
bus_speed_hz=8000000, cs_high=False, transfer_size=4096,
gpio_DC=24, gpio_RST=25):
assert(bus_speed_hz | |
#! /usr/bin/python
import os
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
import sys
# Class containg statistic information gained from a dataset
# Should be characteristic of a sequencing patform
# This class is used from importing FQSTQSim profiles
class FQSProfile:
def __init__(self, name):
self.name = name
self.delCount = {}
self.delsByRead = []
self.delSize = {}
self.insertCount = {}
self.insertsByRead = []
self.insertSize = {}
self.mutationCount = {}
self.mutationType = {}
self.posCount = {}
self.primerCheck = {}
self.qualHist = {}
self.readHist = {}
# load delCount from .csv file
def load_delCount(self, filepath):
self.delCount = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line[:-1].split(',') # disregarding \n at the end
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s delCount file\n' % self.name)
exit(1)
if int(elements[0]) in self.delCount.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s delCount file\n' % self.name)
exit(1)
self.delCount[int(elements[0])] = int(elements[1])
return self.delCount
def load_delsByRead(self, filepath):
self.delsByRead = []
with open(filepath) as f:
# there should be only one line in the file, so I'm looking only at the first line
line = f.readline()[:-1] # removing \n from the end
# splitting each line using ','
elements = line.split(',')
for element in elements:
self.delsByRead.append(int(element))
return self.delsByRead
def load_delSize(self, filepath):
self.delSize = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly three elements
elements = line.split(',')
if len(elements) != 3:
sys.stderr.write('\n\nInvalid line in %s delSize file\n' % self.name)
import pdb
pdb.set_trace()
exit(1)
if int(elements[0]) in self.delSize.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s delSize file\n' % self.name)
exit(1)
self.delSize[int(elements[0])] = [int(elements[1]), int(elements[2])]
return self.delSize
def load_insertCount(self, filepath):
self.insertCount = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s insertCount file\n' % self.name)
exit(1)
if int(elements[0]) in self.insertCount.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s insertCount file\n' % self.name)
exit(1)
self.insertCount[int(elements[0])] = int(elements[1])
return self.insertCount
def load_insertsByRead(self, filepath):
self.insertsByRead = []
with open(filepath) as f:
# there should be only one line in the file, so I'm looking only at the first line
line = f.readline()[:-1] # removing \n from the end
# splitting each line using ','
elements = line.split(',')
for element in elements:
self.insertsByRead.append(int(element))
return self.insertsByRead
def load_insertSize(self, filepath):
self.insertSize = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly three elements
elements = line.split(',')
if len(elements) != 3:
sys.stderr.write('\n\nInvalid line in %s insertSize file\n' % self.name)
exit(1)
if int(elements[0]) in self.insertSize.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s insertSize file\n' % self.name)
exit(1)
self.insertSize[int(elements[0])] = [int(elements[1]), int(elements[2])]
return self.insertSize
def load_mutationCount(self, filepath):
self.mutationCount = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s mutationCount file\n' % self.name)
exit(1)
if int(elements[0]) in self.mutationCount.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s insertCount file\n' % self.name)
exit(1)
self.mutationCount[int(elements[0])] = int(elements[1])
return self.mutationCount
# mutation type dictionary defines how likely is it for each base
# to mutate to another base
# this is stored in a shallow hierary of dictionaries, code shouldn't be too complex
def load_mutationType(self, filepath):
self.mutationType = {}
with open(filepath) as f:
# there should be exactly 5 lines in the file
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
# each line should contain exactly 9 elements, first defines starting base
# and the following 4 pairs define final base and likelyhood of corresponding mutation
if len(elements) != 9:
sys.stderr.write('\n\nInvalid line in %s mutationType file\n' % self.name)
exit(1)
if elements[0] in self.mutationCount.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s mutationType file\n' % self.name)
exit(1)
mutationDict = {}
mutationDict[elements[1]] = int(elements[2])
mutationDict[elements[3]] = int(elements[4])
mutationDict[elements[5]] = int(elements[6])
mutationDict[elements[7]] = int(elements[8])
self.mutationType[elements[0]] = mutationDict
return self.mutationType
def load_posCount(self, filepath):
self.posCount = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s posCount file\n' % self.name)
exit(1)
if int(elements[0]) in self.posCount.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s posCount file\n' % self.name)
exit(1)
self.posCount[int(elements[0])] = int(elements[1])
return self.posCount
def load_primerCheck(self, filepath):
self.primerCheck = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s primerCheck file\n' % self.name)
exit(1)
if elements[0] in self.primerCheck.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s primerCheck file\n' % self.name)
exit(1)
self.primerCheck[elements[0]] = int(elements[1])
return self.primerCheck
def load_qualHist(self, filepath):
self.qualHist = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there is a variable number of elements
# atm I'm not sure what each of them means
# first element is the dictionary key, the reast are the value
elements = line.split(',')
if len(elements) < 2:
sys.stderr.write('\n\nInvalid line in %s qualHist file\n' % self.name)
exit(1)
if int(elements[0]) in self.qualHist.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s qualHist file\n' % self.name)
exit(1)
self.qualHist[int(elements[0])] = elements[1:]
return self.qualHist
def load_readHist(self, filepath):
self.readHist = {}
with open(filepath) as f:
for line in f:
# splitting each line using ',', there should be exactly two elements
elements = line.split(',')
if len(elements) != 2:
sys.stderr.write('\n\nInvalid line in %s readHist file\n' % self.name)
exit(1)
if int(elements[0]) in self.readHist.iterkeys():
sys.stderr.write('\n\nDuplicate element in %s readHist file\n' % self.name)
exit(1)
self.readHist[int(elements[0])] = int(elements[1])
return self.readHist
def printStatistics(self, title = ''):
sys.stdout.write('\n\n')
if title != '':
sys.stdout.write(title + '\n')
sys.stdout.write('Printing statistics for %s sequencing platform:\n' % self.name)
sys.stdout.write('DelCount dictionary contains %d enteries!\n' % len(self.delCount))
sys.stdout.write('DelsByRead list contains %d enteries!\n' % len(self.delsByRead))
sys.stdout.write('DelSize dictionary contains %d enteries!\n' % len(self.delSize))
sys.stdout.write('InsertCount dictionary contains %d enteries!\n' % len(self.insertCount))
sys.stdout.write('InsertsByRead list contains %d enteries!\n' % len(self.insertsByRead))
sys.stdout.write('InsertSize dictionary contains %d enteries!\n' % len(self.insertSize))
sys.stdout.write('MutationCount dictionary contains %d enteries!\n' % len(self.mutationCount))
sys.stdout.write('MutationType dictionary contains %d enteries!\n' % len(self.mutationType))
sys.stdout.write('PosCount dictionary contains %d enteries!\n' % len(self.posCount))
sys.stdout.write('PrimerCheck dictionary contains %d enteries!\n' % len(self.primerCheck))
sys.stdout.write('QualHist dictionary contains %d enteries!\n' % len(self.qualHist))
sys.stdout.write('ReadHist dictionary contains %d enteries!\n' % len(self.readHist))
# Load all statistics data from a folder with corresponding .CSV files
def load_all(self, rootfolder, name=''):
if name != '':
self.name = name
else:
name = self.name
# delCount
filepath = os.path.join(rootfolder, name, name + '_delCount.csv')
self.load_delCount(filepath)
# delsByRead
filepath = os.path.join(rootfolder, name, name + '_delsByRead.csv')
self.load_delsByRead(filepath)
# delSize
filepath = os.path.join(rootfolder, name, name + '_delSize.csv')
self.load_delSize(filepath)
# insertCount
filepath = os.path.join(rootfolder, name, name + '_insertCount.csv')
self.load_insertCount(filepath)
# insertsByRead
filepath = os.path.join(rootfolder, name, name + '_insertsByRead.csv')
self.load_insertsByRead(filepath)
# insertSize
filepath = os.path.join(rootfolder, name, name + '_insertSize.csv')
self.load_insertSize(filepath)
# mutationCount
filepath = os.path.join(rootfolder, name, name + '_mutationCount.csv')
self.load_mutationCount(filepath)
# mutationType
filepath = os.path.join(rootfolder, name, name + '_mutationType.csv')
self.load_mutationType(filepath)
# posCount
filepath = os.path.join(rootfolder, name, name + '_posCount.csv')
self.load_posCount(filepath)
# primerCheck
filepath = os.path.join(rootfolder, name, name + '_primerCheck.csv')
self.load_primerCheck(filepath)
# qualHist
filepath = os.path.join(rootfolder, name, name + '_qualHist.csv')
self.load_qualHist(filepath)
# readHist
filepath = os.path.join(rootfolder, name, name + '_readHist.csv')
self.load_readHist(filepath)
def generate_readsize(self):
total_count = 0
for value in self.readHist.itervalues():
total_count += value
# Generates a single read for the current profile
def generate_read(self, reference):
sys.stderr.write('Function %s not implemented yet!\n\n' % (sys._getframe().f_code.co_name))
exit(1)
pass
# Generates a number of reads for the current profile
def generate_reads_bynumber(self, number, reference):
sys.stderr.write('Function %s not implemented yet!\n\n' % (sys._getframe().f_code.co_name))
exit(1)
pass
# Generates a number of reads for the current profile, resulting in a given coverage
def generate_reads_bycoverage(self, coverage, reference):
sys.stderr.write('Function %s not implemented yet!\n\n' % (sys._getframe().f_code.co_name))
exit(1)
pass
def load_profile(profilefolder):
# extracting last folder name
name = os.path.basename(os.path.normpath(profilefolder))
rootfolder = os.path.dirname(os.path.normpath(profilefolder))
profile = FQSProfile(name)
profile.load_all(rootfolder)
# profile.printStatistics('After loading')
return profile
def load_folder(profilesfolder):
profiles = []
filenames = os.listdir(profilesfolder)
sys.stdout.write('\n\n')
sys.stdout.write('Loading profiles from %s\n' % profilesfolder)
for filename in filenames:
filepath = os.path.join(profilesfolder, | |
self.cur_player
# If it is a game on lan, no need to show other players tiles on the rack
if not self.lan_mode or self.lan_mode and self.own_mark == self.cur_play_mark:
for letter, tile in zip(player.letters, self.rack):
if letter == '@':
tile.letter.set(' ')
else:
tile.letter.set(letter)
tile['bg'] = '#BE975B'
# Hide the letters on the board if it is a normal game.
# So that players can't see each others letters
if self.norm_mode:
tile['fg'] = '#BE975B'
# If there are no letters left, leave the spots of the used tiles blank
if len(self.bag.bag) == 0:
# Slice the rack array up to the length of player's letters array
for tile in self.rack[len(player.letters):]:
tile.letter.set('')
tile['bg'] = '#cccccc'
# If a player joins a game on lan, display his own letters on the 1st turn.
elif self.joined_lan and self.first_turn:
for letter, tile in zip(self.players[self.own_mark].letters, self.rack):
if letter == '@':
tile.letter.set(' ')
else:
tile.letter.set(letter)
tile['bg'] = '#BE975B'
# Disable board and buttons when it is not a player's turn
# so that they don't mess things up
def disable_board(self):
self.sub.config(state=DISABLED)
self.pas.config(state=DISABLED)
self.sav.config(state=DISABLED)
if self.lan_mode and self.chal_mode:
self.chal.config(state=DISABLED)
for spot in self.gui_board.values():
spot.active = False
# Enable board and buttons when it is player's turn
def enable_board(self):
self.sub.config(state=NORMAL)
self.pas.config(state=NORMAL)
self.sav.config(state=NORMAL)
if self.lan_mode and self.chal_mode:
self.chal.config(state=NORMAL)
for spot in self.gui_board.values():
spot.active = True
def wait_comp(self):
self.disable_board()
self.pl1_info.set('Player: {}'.format(self.cur_player.score))
self.bag_info.set('{} Tiles in Bag'.format(len(self.bag.bag)))
self.status_info.set('... Computer\'s Turn ...')
args = (self.queue, self.opponent, self.bag, self.board, self.dict)
t = threading.Thread(target=self.get_comp_move, args=args)
t.start()
self.process_comp_word()
def get_comp_move(self, queue, opponent, bag, board, dic):
word = opponent.get_move(bag, board, dic)
queue.put(word)
def process_comp_word(self):
if self.queue.empty():
self.master.master.after(1000, self.process_comp_word)
else:
word = self.queue.get()
if self.opponent.is_passing:
self.pass_num += 1
else:
self.pass_num = 0
for spot, letter in zip(word.range, word.word):
if self.gui_board.get(spot, False):
self.gui_board[spot].letter.set(letter)
self.gui_board[spot]['bg'] = '#BE975B'
self.gui_board[spot].active = False
self.used_spots[spot] = self.gui_board[spot]
del self.gui_board[spot]
self.opponent.update_rack(self.bag)
self.opponent.update_score()
self.set_word_info(word.words)
self.decorate_rack()
self.board.place(word.word, word.range)
self.enable_board()
self.init_turn()
def place_tile(self, event):
start_t_name = type(self.start_tile).__name__
end_tile = event.widget
end_t_name = type(end_tile).__name__
end_t_letter = end_tile.letter
if start_t_name == 'RackTile' and self.start_tile.letter.get() != '':
if end_t_name == 'BoardTile' and end_tile.active:
if end_t_letter.get() == '':
end_t_letter.set(self.start_tile.letter.get())
end_tile['bg'] = self.start_tile['bg']
self.placed_tiles[end_tile.name] = end_tile
self.spots_buffer.append(end_tile.name)
self.empty_rack_tiles.append(self.start_tile)
self.start_tile['bg'] = '#cccccc'
self.start_tile.letter.set('')
self.start_tile = None
else:
temp = end_t_letter.get()
end_t_letter.set(self.start_tile.letter.get())
self.start_tile.letter.set(temp)
self.start_tile = None
elif end_t_name == 'RackTile':
temp = end_t_letter.get()
end_t_letter.set(self.start_tile.letter.get())
if end_tile in self.empty_rack_tiles:
self.empty_rack_tiles.append(self.start_tile)
del self.empty_rack_tiles[self.empty_rack_tiles.index(end_tile)]
end_tile['bg'] = '#BE975B'
self.start_tile['bg'] = '#cccccc'
self.start_tile.letter.set(temp)
self.start_tile = None
else:
self.start_tile = None
elif start_t_name == 'BoardTile' and self.start_tile.letter.get() != '' and self.start_tile.active:
if end_t_name == 'RackTile' and end_t_letter.get() == '':
del self.placed_tiles[self.start_tile.name]
del self.empty_rack_tiles[self.empty_rack_tiles.index(end_tile)]
self.spots_buffer.remove(self.start_tile.name)
end_t_letter.set(self.start_tile.letter.get())
end_tile['bg'] = '#BE975B'
self.determine_tile_background(self.start_tile)
self.start_tile.letter.set('')
self.start_tile = None
elif end_t_name == 'BoardTile' and end_tile.active:
if end_t_letter.get() == '':
end_t_letter.set(self.start_tile.letter.get())
end_tile['bg'] = self.start_tile['bg']
self.update_buffer_letters(end_tile)
self.determine_tile_background(self.start_tile)
del self.placed_tiles[self.start_tile.name]
self.placed_tiles[end_tile.name] = end_tile
self.start_tile.letter.set('')
self.start_tile = None
elif end_t_letter.get() == self.start_tile.letter.get():
self.start_tile = None
else:
temp = end_t_letter.get()
end_t_letter.set(self.start_tile.letter.get())
self.start_tile.letter.set(temp)
self.update_buffer_letters(end_tile)
self.placed_tiles[self.start_tile.name] = self.start_tile
self.placed_tiles[end_tile.name] = end_tile
self.start_tile = None
else:
self.start_tile = end_tile
def update_buffer_letters(self, tile):
for spot in self.spots_buffer:
if spot == self.start_tile.name:
self.spots_buffer.remove(spot)
self.spots_buffer.append(tile.name)
def get_lan_move(self):
if self.queue.empty():
self.may_proceed = False
self.master.master.after(1000, self.process_word)
else:
pack = self.queue.get()
# Other player passed letters
if len(pack) == 3:
self.bag = pack[1]
self.pass_num += 1
self.init_turn()
# There was a challenge
elif type(pack[1]) == type(True):
# In this case, pack[1] is the flag for being challenged successfully or not
if pack[1]:
self.is_challenged = True
self.challenge(pack)
self.master.master.after(1000, self.process_word)
else:
self.challenge()
else:
self.may_proceed = True
self.is_challenged = False
# pack[0] is a number significant for create_server and handle_lan_game
# pack[-1] is the game_online flag
self.word, self.w_range, received_tiles, self.prev_spots_buffer, self.players, self.bag, self.board = pack[1:-1]
# It is a new word for the receiver
self.word.new = True
# Should populate placed_tiles because there were no clicks on this side.
for spot, letter in received_tiles.items():
self.placed_tiles[spot] = self.gui_board[spot]
self.placed_tiles[spot].letter.set(letter)
self.placed_tiles[spot].active = False
def determine_direction(self):
# If there is only one letter in the list, find its direction
if len(self.w_range) == 1:
# Get the spots on the right and left side by changing letter value of the spot
r = chr(ord(self.w_range[0][0]) + 1) + self.w_range[0][1:]
l = chr(ord(self.w_range[0][0]) - 1) + self.w_range[0][1:]
# Check the spots on the left and right side.
# If they are occupied, the direction is r. If not, d.
# Also check if they go over the board boundary.
if self.board.board.get(r, False) and re.fullmatch('[A-Z@]', self.board.board[r]):
self.direction = 'r'
elif self.board.board.get(l, False) and re.fullmatch('[A-Z@]', self.board.board[l]):
self.direction = 'r'
else:
self.direction = 'd'
else:
# use letter parts of the first and last spots for checking
check1 = self.w_range[0][0]
check2 = self.w_range[-1][0]
# If letters are the same, direction is down
if check1 == check2:
# Need to sort number parts of the spots as digits for accuracy
digits = sorted([int(x[1:]) for x in self.w_range])
self.w_range = [check1 + str(x) for x in digits]
self.w_range.reverse()
self.direction = 'd'
else:
self.direction = 'r'
def set_raw_word(self):
for spot in self.w_range:
self.raw_word.append(self.placed_tiles[spot].letter.get())
self.set_aob_list(spot)
# offset is necessary because array is mutable and it is dynamically
# changed as the loop continues
offset = 0
length = len(self.w_range)
for spot, index, letter in self.aob_list:
if index < 0:
index = 0
elif index > length:
index = length - 1
self.raw_word.insert(index + offset, letter)
self.w_range.insert(index + offset, spot)
offset += 1
self.raw_word = ''.join(self.raw_word)
if ' ' in self.raw_word:
self.change_wild_tile()
def get_norm_move(self):
self.raw_word = []
self.may_proceed = True
# Array for letters already on board and are included in the word made
self.aob_list = []
self.w_range = sorted(self.placed_tiles)
self.determine_direction()
self.set_raw_word()
# Just the letters are necessary for word object
aob_list = [x[2] for x in self.aob_list]
self.word = Word(self.w_range[0], self.direction, self.raw_word, self.board, self.dict, aob_list, self.chal_mode)
# Check if all the spots are on the same row or column
if not self.valid_sorted_letters():
self.may_proceed = False
def process_word(self):
if self.lan_mode and self.own_mark != self.cur_play_mark:
self.get_lan_move()
elif self.placed_tiles:
self.get_norm_move()
if self.may_proceed and type(self.word) != type(None) and self.word.new and self.word.validate():
self.cur_player.word = self.word
self.pass_num = 0
self.wild_tiles = []
self.prev_words = []
# Deactivate and disclude used spots
for spot in self.w_range:
if spot in self.placed_tiles:
self.placed_tiles[spot].active = False
self.used_spots[spot] = self.gui_board[spot]
del self.gui_board[spot]
# On lan games, the game object of the current player updates rack and score of its own.
if not self.lan_mode or self.own_mark == self.cur_play_mark:
self.cur_player.update_rack(self.bag)
self.cur_player.update_score()
self.prev_spots_buffer = self.spots_buffer.copy()
self.decorate_rack()
self.board.place(self.word.word, self.w_range)
self.set_word_info(self.word.words)
self.prev_words.append(self.word.word)
self.prev_words.extend([x[0] for x in self.word.extra_words])
# Get the background correct for the lan game words
# as there are no clicks on this side
if self.lan_mode and self.own_mark != self.cur_play_mark:
for tile in self.placed_tiles.values():
tile['bg'] = '#BE975B'
# For lan game turn_packs, no tkinter objects, hence no tiles, for pickling.
# Spots and letters are enough
if self.lan_mode:
self.placed_tiles = {spot:tile.letter.get() for spot, tile in self.placed_tiles.items()}
self.init_turn()
else:
if self.wild_tiles:
for tile in self.wild_tiles:
tile.letter.set(' ')
self.wild_tiles = []
def set_aob_list(self, spot):
# Checks are to see if the spot is already on the list
# If aft or bef is not in the gui board, it was previusly placed on the board
# Otherwise, flag is false.
flag = True
if self.direction == 'd':
# Modify the number part of the spots
bef = spot[0] + str(int(spot[1:]) + 1)
aft = spot[0] + str(int(spot[1:]) - 1)
check = [x[0] for x in self.aob_list if x[0] == aft or x[0] == bef]
while flag and not check:
# range(1, 16) because there are 15 rows
if aft not in self.gui_board and int(aft[1:]) in range(1, 16):
self.aob_list.append((aft, self.w_range.index(spot) + 1, self.used_spots[aft].letter.get()))
aft = aft[0] + str(int(aft[1:]) - 1)
elif bef not in self.gui_board and int(bef[1:]) in range(1, 16):
self.aob_list.insert(0, (bef, self.w_range.index(spot) - 1, self.used_spots[bef].letter.get()))
bef = bef[0] + str(int(bef[1:]) + 1)
else:
flag = False
else:
# Modify the letter part of the spots
bef = chr(ord(spot[0]) - 1) + spot[1:]
aft = chr(ord(spot[0]) + 1) + spot[1:]
check = [x[0] for x in | |
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def portals_id_invitation_tickets_fk_get(self, id, id2, fk, **kwargs):
"""
Get InvitationTicket by Id for this Portal
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_invitation_tickets_fk_get(id, id2, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param str fk: InvitationTicket id (required)
:param str filter: Only include changes that match this filter
:return: InvitationTicket
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.portals_id_invitation_tickets_fk_get_with_http_info(id, id2, fk, **kwargs)
else:
(data) = self.portals_id_invitation_tickets_fk_get_with_http_info(id, id2, fk, **kwargs)
return data
def portals_id_invitation_tickets_fk_get_with_http_info(self, id, id2, fk, **kwargs):
"""
Get InvitationTicket by Id for this Portal
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_invitation_tickets_fk_get_with_http_info(id, id2, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param str fk: InvitationTicket id (required)
:param str filter: Only include changes that match this filter
:return: InvitationTicket
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2', 'fk', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method portals_id_invitation_tickets_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `portals_id_invitation_tickets_fk_get`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `portals_id_invitation_tickets_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `portals_id_invitation_tickets_fk_get`")
collection_formats = {}
resource_path = '/Portals/{id}/invitationTickets/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InvitationTicket',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def portals_id_invitation_tickets_get(self, id, id2, **kwargs):
"""
List InvitationTickets for this Portal
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_invitation_tickets_get(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[InvitationTicket]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.portals_id_invitation_tickets_get_with_http_info(id, id2, **kwargs)
else:
(data) = self.portals_id_invitation_tickets_get_with_http_info(id, id2, **kwargs)
return data
def portals_id_invitation_tickets_get_with_http_info(self, id, id2, **kwargs):
"""
List InvitationTickets for this Portal
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_invitation_tickets_get_with_http_info(id, id2, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[InvitationTicket]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method portals_id_invitation_tickets_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `portals_id_invitation_tickets_get`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `portals_id_invitation_tickets_get`")
collection_formats = {}
resource_path = '/Portals/{id}/invitationTickets'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InvitationTicket]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def portals_id_logo_put(self, id, id2, data, **kwargs):
"""
Change logo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_logo_put(id, id2, data, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param Portal data: Logo (required)
:return: Portal
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.portals_id_logo_put_with_http_info(id, id2, data, **kwargs)
else:
(data) = self.portals_id_logo_put_with_http_info(id, id2, data, **kwargs)
return data
def portals_id_logo_put_with_http_info(self, id, id2, data, **kwargs):
"""
Change logo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.portals_id_logo_put_with_http_info(id, id2, data, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Portal id (required)
:param str id2: Portal id (required)
:param Portal data: Logo (required)
:return: Portal
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'id2', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method portals_id_logo_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `portals_id_logo_put`")
# verify the required parameter 'id2' is set
if ('id2' not in params) or (params['id2'] is None):
raise ValueError("Missing the required parameter `id2` when calling `portals_id_logo_put`")
# verify the required parameter 'data' is set
if ('data' not in params) or (params['data'] is None):
raise ValueError("Missing the required parameter `data` when calling `portals_id_logo_put`")
collection_formats = {}
resource_path = '/Portals/{id}/logo'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'id2' in params:
path_params['id'] = params['id2']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' | |
"""
A module for building and performing inference with cluster graphs
"""
# Standard imports
import collections
# Third-party imports
import IPython
import graphviz
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
import networkx as nx
import numpy as np
from tqdm.auto import tqdm
# Local imports
from veroku._cg_helpers._cluster import Cluster
import veroku._cg_helpers._animation as cg_animation
from veroku.factors._factor_utils import get_subset_evidence
# TODO: Optimise _pass_message.
# TODO: Improve sepsets selection for less loopiness.
# TODO: Optimisation: messages from clusters that did not receive any new messages in the previous round, do not need
# new messages calculated.
# pylint: disable=protected-access
DEFAULT_FIG_SIZE = [15, 5]
def _sort_almost_sorted(almost_sorted_deque, key):
"""
Sort a deque like that where only the first element is potentially unsorted and should probably be last and the rest
of the deque is sorted in descending order.
:param collections.deque almost_sorted_deque: The deque of size n, where the first n-1 elements are definitely
sorted (in descending order) and where the last element is also probably in the correct place, but needs to be
checked
:param callable key: The key (function) to use for sorting.
:return: The sorted (given that the conditions are met) deque.
:rtype: collections.deque
"""
if key(almost_sorted_deque[0]) < key(almost_sorted_deque[1]):
almost_sorted_deque.append(almost_sorted_deque.popleft())
if key(almost_sorted_deque[-1]) <= key(almost_sorted_deque[-2]):
return almost_sorted_deque
almost_sorted_deque = collections.deque(sorted(almost_sorted_deque, key=key, reverse=True))
return almost_sorted_deque
def _evidence_reduce_factors(factors, evidence):
"""
Observe relevant evidence for each factor.
:param factors: The factors to reduce with the (relevant) evidence.
:type factors: Factor list
:param dict evidence: The evidence (i.e {'a':1.0, 'b':2.0})
:return: The reduced factors.
:rtype factors: Factor list
"""
reduced_factors = []
for factor in factors:
if evidence is not None:
vrs, values = get_subset_evidence(all_evidence_dict=evidence, subset_vars=factor.var_names)
if len(vrs) > 0:
factor = factor.reduce(vrs, values)
reduced_factors.append(factor.copy())
return reduced_factors
def _absorb_subset_factors(factors):
"""
Absorb any factors that has a scope that is a subset of another factor into such a factor.
:param factors: The list of factors to check for subset factors.
:type factors: Factor list
:return: The (potentially reduced) list of factors.
:rtype: Factor list
"""
# TODO: Simplify this, if possible.
factors_absorbtion_dict = {i: [] for i in range(len(factors))}
final_graph_cluster_factors = []
# factors: possibly smaller list of factors after factors which have a scope that is a subset of another factor have
# been absorbed by the larger one.
factor_processed_mask = [0] * len(factors)
for i, factor_i in enumerate(factors):
if not factor_processed_mask[i]:
factor_product = factor_i.copy()
for j, factor_j in enumerate(factors):
if (i != j) and (not factor_processed_mask[j]):
if set(factor_j.var_names) < set(factor_product.var_names):
factor_product = factor_product.multiply(factor_j)
factors_absorbtion_dict[i].append(j)
factor_processed_mask[j] = 1
factor_processed_mask[i] = 1
if factor_processed_mask[i]:
final_graph_cluster_factors.append(factor_product)
for i, factor_i in enumerate(factors): # add remaining factors
if not factor_processed_mask[i]:
factor_processed_mask[i] = 1
final_graph_cluster_factors.append(factor_i)
assert all(
factor_processed_mask
), "Error: Some factors where not included during variable subset processing."
return final_graph_cluster_factors
class ClusterGraph:
"""
A class for building and performing inference with cluster graphs.
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, factors, evidence=None, special_evidence=None, disable_tqdm=False):
"""
Construct a Cluster graph from a list of factors.
:param factors: The factors to construct the graph from
:type factors: factor list
:param dict evidence: evidence dictionary (mapping variable names to values) that should be used to reduce
factors before building the cluster graph. Example: {'a': 2, 'b':1}
:param dict special_evidence: evidence dictionary (mapping variable names to values) that should be used in the
calculation of messages, and not to reduce factors. This allows factor approximations - such as the
non-linear Gaussian to be iteratively refined. Example: {'a': 2, 'b':1}
:param bool disable_tqdm: Disable the tqdm progress bars used in graph construction and processing.
:param bool verbose: Whether or not to output additional information messages during graph construction and
processing.
:param debug:
"""
# TODO: see if evidence and special_evidence can be replaced by a single variable.
self.num_messages_passed = 0
self.disable_tqdm = disable_tqdm
self.verbose = False
self.debug = False
self.sync_message_passing_max_distances = []
if special_evidence is None:
special_evidence = dict()
self.special_evidence = special_evidence
all_evidence_vars = set(self.special_evidence.keys())
if evidence is not None:
evidence_vars = set(evidence.keys())
all_evidence_vars = all_evidence_vars.union(evidence_vars)
all_factors_copy = _evidence_reduce_factors(factors, evidence)
final_graph_cluster_factors = _absorb_subset_factors(all_factors_copy)
clusters = [
Cluster(factor, cluster_name_prefix=f"c{i}#")
for i, factor in enumerate(final_graph_cluster_factors)
]
self._set_non_rip_sepsets_dict(clusters, all_evidence_vars)
self._clusters = clusters
# Add special evidence to factors
for cluster in self._clusters:
cluster_special_evidence_vars, cluster_special_evidence_values = get_subset_evidence(
self.special_evidence, cluster.var_names
)
cluster_special_evidence = dict(
zip(cluster_special_evidence_vars, cluster_special_evidence_values)
)
cluster.add_special_evidence(cluster_special_evidence)
self.graph_message_paths = collections.deque([])
self._build_graph()
# TODO: consolidate these two, if possible
self.message_passing_animation_frames = []
self.passed_messages = []
def _set_non_rip_sepsets_dict(self, clusters, all_evidence_vars):
"""
Calculate the preliminary sepsets dict before the RIP property is enforced.
:param clusters: The clusters for which the sepsets should be calculated.
:param all_evidence_vars: The variables for which there is observed evidence.
"""
self._non_rip_sepsets = {}
for i in tqdm(range(len(clusters)), disable=self.disable_tqdm):
vars_i = clusters[i].var_names
for j in range(i + 1, len(clusters)):
vars_j = clusters[j].var_names
sepset = set(vars_j).intersection(set(vars_i)) - all_evidence_vars
self._non_rip_sepsets[(i, j)] = sepset
self._non_rip_sepsets[(j, i)] = sepset
def _build_graph(self):
"""
Add the cluster sepsets, graphviz graph and animation graph (for message_passing visualisation).
"""
# Check for non-unique cluster_ids (This should never be the case)
cluster_ids = [cluster.cluster_id for cluster in self._clusters]
if len(set(cluster_ids)) != len(cluster_ids):
raise ValueError(f"Non-unique cluster ids: {cluster_ids}")
self._conditional_print("Info: Building graph.")
self._graph = graphviz.Graph(format="png")
rip_sepsets_dict = self._get_running_intersection_sepsets()
# TODO: see why this is necessary, remove if not
for i in tqdm(range(len(self._clusters)), disable=self.disable_tqdm):
self._clusters[i].remove_all_neighbours()
self._conditional_print(f"Debug: number of clusters: {len(self._clusters)}")
for i in tqdm(range(len(self._clusters)), disable=self.disable_tqdm):
node_i_name = self._clusters[i]._cluster_id
self._graph.node(
name=node_i_name, label=node_i_name, style="filled", fillcolor="white", color="black"
)
for j in range(i + 1, len(self._clusters)):
if (i, j) in rip_sepsets_dict:
sepset = rip_sepsets_dict[(i, j)]
assert len(sepset) > 0, "Error: empty sepset"
self._clusters[i].add_neighbour(self._clusters[j], sepset=sepset)
self._clusters[j].add_neighbour(self._clusters[i], sepset=sepset)
gmp_ij = _GraphMessagePath(self._clusters[i], self._clusters[j])
gmp_ji = _GraphMessagePath(self._clusters[j], self._clusters[i])
self.graph_message_paths.append(gmp_ij)
self.graph_message_paths.append(gmp_ji)
self._clusters[i].add_outward_message_path(gmp_ij)
self._clusters[j].add_outward_message_path(gmp_ji)
# Graph animation
node_j_name = self._clusters[j]._cluster_id
sepset_node_label = ",".join(sepset)
sepset_node_name = cg_animation.make_sepset_node_name(node_i_name, node_j_name)
self._graph.node(name=sepset_node_name, label=sepset_node_label, shape="rectangle")
self._graph.edge(node_i_name, sepset_node_name, color="black", penwidth="2.0")
self._graph.edge(sepset_node_name, node_j_name, color="black", penwidth="2.0")
self._conditional_print(f"num graph message paths: {len(self.graph_message_paths)}")
def _conditional_print(self, message):
"""
Print message if verbose is True.
:param message: The message to print.
"""
if self.verbose:
print(message)
def plot_next_messages_info_gain(self, legend_on=False, figsize=None):
"""
Plot the information gained by a receiving new messages over sebsequent iterations for all message paths in the
graph.
:param bool legend_on: Whether or not to show the message paths (specified by connected cluster pairs) in the
plot legend.
:param list figsize: The matplotlib figure size.
"""
if figsize is None:
figsize = DEFAULT_FIG_SIZE
plt.figure(figsize=figsize)
all_paths_information_gains_with_iters = [
gmp.information_gains_with_iters for gmp in self.graph_message_paths
]
for paths_information_gains_with_iters in all_paths_information_gains_with_iters:
plt.plot(paths_information_gains_with_iters)
plt.title("Information Gain of Messages along Graph Message Paths")
plt.xlabel("iteration")
plt.ylabel("D_KL(prev_msg||msg)")
if legend_on:
legend = [
f"{gmp.sender_cluster.cluster_id}->{gmp.receiver_cluster.cluster_id}"
for gmp in self.graph_message_paths
]
plt.legend(legend)
def plot_message_convergence(self, log=False, figsize=None):
"""
Plot the the KL-divergence between the messages and their previous instances to indicate the message passing
convergence.
:param bool log: If True, plot the log of the KL-divergence.
:param list figsize: The matplotlib [width, height] of the figure.
"""
if figsize is None:
figsize = DEFAULT_FIG_SIZE
mp_max_dists = self.sync_message_passing_max_distances
if log:
mp_max_dists = np.log(mp_max_dists)
# here we tile an flatten to prevent the plot omission of values with inf on either side.
mp_max_dists = np.tile(mp_max_dists, [2, 1]).flatten(order="F")
num_iterations = len(mp_max_dists)
iterations = np.array(list(range(num_iterations))) / 2 # divide by 2 to correct for tile and flatten
non_inf_max_distances = [d for d in mp_max_dists if d != np.inf]
max_non_inf = max(non_inf_max_distances)
new_inf_value = max_non_inf * 1.5
max_distances_replaces_infs = np.array([v if v != np.inf else new_inf_value for v in mp_max_dists])
inf_values = np.ma.masked_where(
max_distances_replaces_infs != new_inf_value, max_distances_replaces_infs
)
plt.figure(figsize=figsize)
plt.plot(iterations, max_distances_replaces_infs)
plt.plot(iterations, inf_values, c="r", linewidth=2)
if len(non_inf_max_distances) != len(mp_max_dists):
custom_lines = [Line2D([0], [0], color="r", lw=4)]
plt.legend(custom_lines, ["infinity"])
plt.title("Message Passing Convergence")
plt.xlabel("iteration")
plt.ylabel("log max D_KL(prev_msg||msg)")
plt.show()
def _get_unique_vars(self):
"""
Get the set of variables in the graph.
:return: The variables
:rtype: list
"""
all_vars = []
for cluster in self._clusters:
all_vars += cluster.var_names
unique_vars = list(set(all_vars))
return unique_vars
def _get_vars_min_spanning_trees(self):
"""
Get the minimum spanning trees of all the variables in the graph.
"""
all_vars = self._get_unique_vars()
var_graphs = {var: nx.Graph() for var in all_vars}
num_clusters = len(self._clusters)
for i in range(num_clusters):
for j in range(i + 1, num_clusters):
sepset = self._non_rip_sepsets[(i, j)]
for var in sepset:
| |
4 (5 tiers)
df['energyrate/period4/tier0'] = df['energyratestructure/period4/tier0rate'] + df['energyratestructure/period4/tier0adj'].fillna(0)
df['energyrate/period4/tier1'] = df['energyratestructure/period4/tier1rate'] + df['energyratestructure/period4/tier1adj'].fillna(0)
df['energyrate/period4/tier2'] = df['energyratestructure/period4/tier2rate'] + df['energyratestructure/period4/tier2adj'].fillna(0)
df['energyrate/period4/tier3'] = df['energyratestructure/period4/tier3rate'] + df['energyratestructure/period4/tier3adj'].fillna(0)
df['energyrate/period4/tier4'] = df['energyratestructure/period4/tier4rate'] + df['energyratestructure/period4/tier4adj'].fillna(0)
#period 5 (5 tiers)
df['energyrate/period5/tier0'] = df['energyratestructure/period5/tier0rate'] + df['energyratestructure/period5/tier0adj'].fillna(0)
df['energyrate/period5/tier1'] = df['energyratestructure/period5/tier1rate'] + df['energyratestructure/period5/tier1adj'].fillna(0)
df['energyrate/period5/tier2'] = df['energyratestructure/period5/tier2rate'] + df['energyratestructure/period5/tier2adj'].fillna(0)
df['energyrate/period5/tier3'] = df['energyratestructure/period5/tier3rate'] + df['energyratestructure/period5/tier3adj'].fillna(0)
df['energyrate/period5/tier4'] = df['energyratestructure/period5/tier4rate'] + df['energyratestructure/period5/tier4adj'].fillna(0)
#period 6-23
df['energyrate/period6/tier0'] = df['energyratestructure/period6/tier0rate'] + df['energyratestructure/period6/tier0adj'].fillna(0)
df['energyrate/period7/tier0'] = df['energyratestructure/period7/tier0rate'] + df['energyratestructure/period7/tier0adj'].fillna(0)
df['energyrate/period8/tier0'] = df['energyratestructure/period8/tier0rate'] + df['energyratestructure/period8/tier0adj'].fillna(0)
df['energyrate/period9/tier0'] = df['energyratestructure/period9/tier0rate'] + df['energyratestructure/period9/tier0adj'].fillna(0)
df['energyrate/period10/tier0'] = df['energyratestructure/period10/tier0rate'] + df['energyratestructure/period10/tier0adj'].fillna(0)
df['energyrate/period11/tier0'] = df['energyratestructure/period11/tier0rate'] + df['energyratestructure/period11/tier0adj'].fillna(0)
df['energyrate/period12/tier0'] = df['energyratestructure/period12/tier0rate'] + df['energyratestructure/period12/tier0adj'].fillna(0)
df['energyrate/period13/tier0'] = df['energyratestructure/period13/tier0rate'] + df['energyratestructure/period13/tier0adj'].fillna(0)
df['energyrate/period14/tier0'] = df['energyratestructure/period14/tier0rate'] + df['energyratestructure/period14/tier0adj'].fillna(0)
df['energyrate/period15/tier0'] = df['energyratestructure/period15/tier0rate'] + df['energyratestructure/period15/tier0adj'].fillna(0)
df['energyrate/period16/tier0'] = df['energyratestructure/period16/tier0rate'] + df['energyratestructure/period16/tier0adj'].fillna(0)
df['energyrate/period17/tier0'] = df['energyratestructure/period17/tier0rate'] + df['energyratestructure/period17/tier0adj'].fillna(0)
df['energyrate/period18/tier0'] = df['energyratestructure/period18/tier0rate'] + df['energyratestructure/period18/tier0adj'].fillna(0)
df['energyrate/period19/tier0'] = df['energyratestructure/period19/tier0rate'] + df['energyratestructure/period19/tier0adj'].fillna(0)
df['energyrate/period20/tier0'] = df['energyratestructure/period20/tier0rate'] + df['energyratestructure/period20/tier0adj'].fillna(0)
df['energyrate/period21/tier0'] = df['energyratestructure/period21/tier0rate'] + df['energyratestructure/period21/tier0adj'].fillna(0)
df['energyrate/period22/tier0'] = df['energyratestructure/period22/tier0rate'] + df['energyratestructure/period22/tier0adj'].fillna(0)
df['energyrate/period23/tier0'] = df['energyratestructure/period23/tier0rate'] + df['energyratestructure/period23/tier0adj'].fillna(0)
if industry == 'residential':
self.res_rate_data = df
elif industry == 'commercial':
self.com_rate_data = df
def filter_null_rates(self, industry):
"""
Filters rates with no cost information.
"""
industry = industry.lower()
if industry == 'residential':
df = self.res_rate_data
elif industry == 'commercial':
df = self.com_rate_data
else:
raise ValueError("industry must be 'residential' or 'commercial'!")
df = df.dropna(subset=['energyrate/period0/tier0'])
if industry == 'residential':
self.res_rate_data = df
elif industry == 'commercial':
self.com_rate_data = df
def calculate_annual_energy_cost_residential(self, outpath='outputs/cost-of-electricity/urdb-res-rates/'):
"""
Calculates the annualized energy costs for residential rates. Estimates
account for seasonal, tier, and TOU rate structures. Key assumptions
include: 1) Charging occurs with the same freqency irregardless of
weekday vs. weekend or season (time of year); 2) Charging occurs with
the same frequency across rate tiers; 3) For TOU rates, charging will
always occur when it is cheapest to do so (off-peak). Adds
'electricity_cost_per_kwh' col to self.res_rate_data.
"""
# Fixed Rates - incl. seasonal & TOU
res_rates_fixed = self.res_rate_data[self.res_rate_data.is_tier_rate==0]
avg_costs = []
for i in range(len(res_rates_fixed)):
month_rates = []
#weekday
for month in [ls.replace('[', '').replace(',', '').replace(' ', '') for ls in str(res_rates_fixed.iloc[i]['energyweekdayschedule']).split(']')][:-2]: #seasonal
periods = (list(set(month)))
day_rates = []
for per in periods: #TOU
rate_str = 'energyrate/period{}/tier0'.format(per)
rate = res_rates_fixed.iloc[i][rate_str]
day_rates.append(rate)
min_day_rate = min(np.array(day_rates))
month_rates.extend([min_day_rate]*5)
#weekend
for month in [ls.replace('[', '').replace(',', '').replace(' ', '') for ls in str(res_rates_fixed.iloc[i]['energyweekendschedule']).split(']')][:-2]: #seasonal
periods = (list(set(month)))
day_rates = []
for per in periods: #TOU
rate_str = 'energyrate/period{}/tier0'.format(per)
rate = res_rates_fixed.iloc[i][rate_str]
day_rates.append(rate)
min_day_rate = min(np.array(day_rates))
month_rates.extend([min_day_rate]*2)
avg_cost = np.array(month_rates).mean() #dow-weighted cost
avg_costs.append(avg_cost)
res_rates_fixed['electricity_cost_per_kwh'] = avg_costs
# Tier Rates - incl. seasonal & TOU
res_rates_tier = self.res_rate_data[self.res_rate_data.is_tier_rate==1]
avg_costs = []
for i in range(len(res_rates_tier)): #tier rate = avg of all tiers
avg_tier_rates = []
avg_tier_month_rates = []
for p in range(24):
if p==0:
tier_rates = []
for t in range(11):
rate_str = 'energyrate/period{0}/tier{1}'.format(p,t)
rate = res_rates_tier.iloc[i][rate_str]
tier_rates.append(rate)
with warnings.catch_warnings(): #supress warnings
warnings.simplefilter("ignore", category=RuntimeWarning)
avg_tier_rate = np.nanmean(np.array(tier_rates))
avg_tier_rates.append(avg_tier_rate)
elif p==1:
tier_rates = []
for t in range(8):
rate_str = 'energyrate/period{0}/tier{1}'.format(p,t)
rate = res_rates_tier.iloc[i][rate_str]
tier_rates.append(rate)
with warnings.catch_warnings(): #supress warnings
warnings.simplefilter("ignore", category=RuntimeWarning)
avg_tier_rate = np.nanmean(np.array(tier_rates))
avg_tier_rates.append(avg_tier_rate)
elif p>=2 and p<6:
tier_rates = []
for t in range(5):
rate_str = 'energyrate/period{0}/tier{1}'.format(p,t)
rate = res_rates_tier.iloc[i][rate_str]
tier_rates.append(rate)
with warnings.catch_warnings(): #supress warnings
warnings.simplefilter("ignore", category=RuntimeWarning)
avg_tier_rate = np.nanmean(np.array(tier_rates))
avg_tier_rates.append(avg_tier_rate)
else:
rate_str = 'energyrate/period{0}/tier0'.format(p)
rate = res_rates_tier.iloc[i][rate_str]
avg_tier_rates.append(rate)
#weekday rates
months = [ls.replace('[', '').replace(',', '').replace(' ', '') for ls in str(res_rates_tier.iloc[i]['energyweekdayschedule']).split(']')][:-2]
for month in months: #seasonal
periods = (list(set(month)))
avg_rates = []
for per in periods: #TOU
per = int(per)
avg_tier_rate = avg_tier_rates[per]
avg_rates.append(avg_tier_rate)
min_avg_tier_day_rate = min(np.array(avg_rates))
avg_tier_month_rates.extend([min_avg_tier_day_rate]*5)
#weekend rates
months = [ls.replace('[', '').replace(',', '').replace(' ', '') for ls in str(res_rates_tier.iloc[i]['energyweekendschedule']).split(']')][:-2]
for month in months:
periods = (list(set(month)))
avg_rates = []
for per in periods:
per = int(per)
avg_tier_rate = avg_tier_rates[per]
avg_rates.append(avg_tier_rate)
min_avg_tier_day_rate = min(np.array(avg_rates))
avg_tier_month_rates.extend([min_avg_tier_day_rate]*2)
avg_cost = np.array(avg_tier_month_rates).mean() #dow-weighted cost
avg_costs.append(avg_cost)
res_rates_tier['electricity_cost_per_kwh'] = avg_costs
res_df = pd.concat([res_rates_fixed, res_rates_tier], sort=False)
res_df = res_df[res_df.electricity_cost_per_kwh>=0] #remove negative rates
self.res_rate_data = res_df
self.res_rate_data.to_csv(outpath+'res_rates.csv', index=False)
print("Complete, {} rates included.".format(len(self.res_rate_data)))
def calculate_annual_cost_dcfc(self,
dcfc_load_profiles = config.DCFC_PROFILES_DICT,
outpath = 'outputs/cost-of-electricity/urdb-dcfc-rates/',
log_lvl = 1):
"""
Calculates the annualized average daily cost to charge for
commercial rates under an annual dcfc_load_profile. Estimates account
for demand, seasonal, tier, and TOU rate structures. Due to it's
significant runtime, function outputs a .csv at outpath for each profile
in dcfc_load_profiles. The log_lvl parameter must be in [0,1,2] where higher
levels reflect more verbose logs.
"""
assert log_lvl in [0,1,2], "Unexpected log_lvl, must be in [0,1,2]"
if log_lvl == 0:
log_lbl = logging.WARNING
elif log_lvl == 1:
log_lbl = logging.INFO
elif log_lvl == 2:
log_lbl = logging.DEBUG
logging.basicConfig(level=log_lbl)
for p in dcfc_load_profiles.keys():
# Load profile
profile_path = dcfc_load_profiles[p]
profile_df = pd.read_csv(profile_path, index_col=0, parse_dates=True)
# Deconstruct timestamp
months = profile_df.index.month
days = profile_df.index.day
hours = profile_df.index.hour
minutes = profile_df.index.minute
weekday = profile_df.index.weekday
# Convert load profile -> energy profile
energy_profile_df = pd.DataFrame({'month': months,
'day': days,
'hour': hours,
'minute': minutes,
'weekday': weekday,
'pwr_kw': profile_df['Power, kW']})
energy_profile_df = energy_profile_df.sort_values(by=['month', 'day', 'hour', 'minute'])
energy_profile_df = energy_profile_df.reset_index()
energy_profile_df['energy_kwh'] = energy_profile_df['pwr_kw']/4
# Aggregate 15-min energy profile -> hourly energy profile
hourly_energy_df = energy_profile_df.groupby(['month', 'day', 'hour', 'weekday'])['energy_kwh'].sum()
hourly_energy_df = hourly_energy_df.reset_index()
# Aggregate hourly energy profile -> monthly energy profile
monthly_energy_df = hourly_energy_df.groupby('month')['energy_kwh'].sum()
monthly_energy_df = monthly_energy_df.reset_index()
# Calculate peak power by month
monthly_peak_pwr_df = energy_profile_df.groupby('month')['pwr_kw'].max()
monthly_peak_pwr_df = monthly_peak_pwr_df.reset_index()
# Calculate annual energy
annual_energy_kwh = monthly_energy_df['energy_kwh'].sum()
# Determine times of peak demand
peak_demand_times = []
for month, peak_pwr_kw in zip(range(1,13), monthly_peak_pwr_df['pwr_kw']):
peak_demand_dow = energy_profile_df[(energy_profile_df.month==month)&\
(energy_profile_df.pwr_kw==peak_pwr_kw)]['weekday'].values[0]
peak_demand_hod = energy_profile_df[(energy_profile_df.month==month)&\
(energy_profile_df.pwr_kw==peak_pwr_kw)]['hour'].values[0]
peak_demand_time = (peak_demand_dow, peak_demand_hod)
peak_demand_times.append(peak_demand_time)
# Filter ineligible rates by peak capacity, energy consumption limits
def is_eligible(rates, monthly_energy, monthly_peak_pwr):
eligible = ((rates['peakkwcapacitymin'] <= monthly_peak_pwr.min())&
(rates['peakkwcapacitymax'] >= monthly_peak_pwr.max())&
(rates['peakkwhusagemin'] <= monthly_energy.min())&
(rates['peakkwhusagemax'] >= monthly_energy.max()))
return eligible
eligibility = is_eligible(self.com_rate_data, monthly_energy_df['energy_kwh'], monthly_peak_pwr_df['pwr_kw'])
self.com_rate_data['eligible'] = eligibility
eligible_rates = self.com_rate_data[self.com_rate_data.eligible==True]
print_str = """rates determined to be ineligible for {} (violated peak capacity/energy consumption constraints)""".format(p)
logging.info(len(self.com_rate_data[self.com_rate_data.eligible==False]), print_str)
### ###
## Calculate cost of electricity ##
### ###
# Energy rates == 0 if NULL; Max = inf if NULL
for tier in range(11):
maxim = 'energyratestructure/period0/tier{}max'.format(tier)
rate = 'energyratestructure/period0/tier{}rate'.format(tier)
adj = 'energyratestructure/period0/tier{}adj'.format(tier)
eligible_rates[maxim] = eligible_rates[maxim].fillna(np.inf)
eligible_rates[rate] = eligible_rates[rate].fillna(0)
eligible_rates[adj] = eligible_rates[adj].fillna(0)
for tier in range(8):
maxim = 'energyratestructure/period1/tier{}max'.format(tier)
rate = 'energyratestructure/period1/tier{}rate'.format(tier)
adj = 'energyratestructure/period1/tier{}adj'.format(tier)
eligible_rates[maxim] = eligible_rates[maxim].fillna(np.inf)
eligible_rates[rate] = eligible_rates[rate].fillna(0)
eligible_rates[adj] = eligible_rates[adj].fillna(0)
for period in range(2,6):
for tier in range(5):
maxim = 'energyratestructure/period{0}/tier{1}max'.format(period, tier)
rate = 'energyratestructure/period{0}/tier{1}rate'.format(period, tier)
adj = 'energyratestructure/period{0}/tier{1}adj'.format(period, tier)
eligible_rates[maxim] = eligible_rates[maxim].fillna(np.inf)
eligible_rates[rate] = eligible_rates[rate].fillna(0)
eligible_rates[adj] = eligible_rates[adj].fillna(0)
for period in range(6,24):
maxim = 'energyratestructure/period{}/tier0max'.format(period)
rate = 'energyratestructure/period{}/tier0rate'.format(period)
adj = 'energyratestructure/period{}/tier0adj'.format(period)
eligible_rates[maxim] = eligible_rates[maxim].fillna(np.inf)
eligible_rates[rate] = eligible_rates[rate].fillna(0)
eligible_rates[adj] = eligible_rates[adj].fillna(0)
# Calculate annual fixed cost charge (1st meter)
logging.info("Starting annual fixed cost calculations for {}...".format(p))
eligible_rates['annual_fixed_cost'] = eligible_rates['fixedchargefirstmeter'] * 12
eligible_rates = eligible_rates[eligible_rates.annual_fixed_cost >= 0]
logging.info("Annual fixed cost calculations complete.")
# Characterize rates (demand/no-demand)
flat_dmd_rates = eligible_rates[~eligible_rates['flatdemandstructure/period0/tier0rate'].isnull()]
flat_dmd_rates['demand_type'] = 'flat'
tou_dmd_rates = eligible_rates[(eligible_rates['flatdemandstructure/period0/tier0rate'].isnull())&
(~eligible_rates['demandratestructure/period0/tier0rate'].isnull())]
tou_dmd_rates['demand_type'] = 'tou'
no_dmd_rates = eligible_rates[(eligible_rates['flatdemandstructure/period0/tier0rate'].isnull())&
(eligible_rates['demandratestructure/period0/tier0rate'].isnull())]
no_dmd_rates['demand_type'] = 'none'
# Demand Charge Rates = 0 when NULL; max = inf when NULL
for tier in range(17):
maxim = 'flatdemandstructure/period0/tier{}max'.format(tier)
rate = 'flatdemandstructure/period0/tier{}rate'.format(tier)
adj = 'flatdemandstructure/period0/tier{}adj'.format(tier)
flat_dmd_rates[maxim] = flat_dmd_rates[maxim].fillna(np.inf)
tou_dmd_rates[maxim] = tou_dmd_rates[maxim].fillna(np.inf)
no_dmd_rates[maxim] = no_dmd_rates[maxim].fillna(np.inf)
flat_dmd_rates[rate] = flat_dmd_rates[rate].fillna(0)
tou_dmd_rates[rate] = tou_dmd_rates[rate].fillna(0)
no_dmd_rates[rate] = no_dmd_rates[rate].fillna(0)
flat_dmd_rates[adj] = flat_dmd_rates[adj].fillna(0)
tou_dmd_rates[adj] = tou_dmd_rates[adj].fillna(0)
no_dmd_rates[adj] = no_dmd_rates[adj].fillna(0)
for tier in range(5):
maxim = 'flatdemandstructure/period1/tier{}max'.format(tier)
rate = 'flatdemandstructure/period1/tier{}rate'.format(tier)
adj = 'flatdemandstructure/period1/tier{}adj'.format(tier)
flat_dmd_rates[maxim] = flat_dmd_rates[maxim].fillna(np.inf)
tou_dmd_rates[maxim] = tou_dmd_rates[maxim].fillna(np.inf)
no_dmd_rates[maxim] = no_dmd_rates[maxim].fillna(np.inf)
flat_dmd_rates[rate] = flat_dmd_rates[rate].fillna(0)
tou_dmd_rates[rate] = tou_dmd_rates[rate].fillna(0)
no_dmd_rates[rate] = no_dmd_rates[rate].fillna(0)
flat_dmd_rates[adj] = flat_dmd_rates[adj].fillna(0)
tou_dmd_rates[adj] = tou_dmd_rates[adj].fillna(0)
no_dmd_rates[adj] = no_dmd_rates[adj].fillna(0)
for tier in range(3):
maxim = 'flatdemandstructure/period2/tier{}max'.format(tier)
rate = 'flatdemandstructure/period2/tier{}rate'.format(tier)
adj = 'flatdemandstructure/period2/tier{}adj'.format(tier)
flat_dmd_rates[maxim] = flat_dmd_rates[maxim].fillna(np.inf)
tou_dmd_rates[maxim] = tou_dmd_rates[maxim].fillna(np.inf)
no_dmd_rates[maxim] = no_dmd_rates[maxim].fillna(np.inf)
flat_dmd_rates[rate] = flat_dmd_rates[rate].fillna(0)
tou_dmd_rates[rate] = tou_dmd_rates[rate].fillna(0)
no_dmd_rates[rate] = no_dmd_rates[rate].fillna(0)
flat_dmd_rates[adj] = flat_dmd_rates[adj].fillna(0)
tou_dmd_rates[adj] = tou_dmd_rates[adj].fillna(0)
no_dmd_rates[adj] = no_dmd_rates[adj].fillna(0)
for period in | |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Configuration service utilities
"""
from argparse import ArgumentParser
from configparser import RawConfigParser
from astropy.io import fits
import numpy as np
import sys
__all__ = ['ConfigurationException', 'Configuration']
class ConfigurationException(Exception):
def __init__(self, msg):
self.msg = msg
class Configuration(ArgumentParser):
"""Configuration initialization if other is not None and its type is
Configuration, its _entries dictionary content is copy to this
Configuration class
Parameters
----------
other : Configuration object, optional
Copies other._entries dictionary's item in self._entries
"""
DEFAULT = "DEFAULT"
FITS = "FITS"
INI = "INI"
DATAIMPL = "DATAIMPL"
HEADERIMPL = "HEADERIMPL"
VALUE_INDEX = 0
COMMENT_INDEX = 1
def __init__(self, other=None):
ArgumentParser.__init__(self)
# Copy other dictionary entries to self._entries
if other is not None and isinstance(other, Configuration):
self._entries = dict(other._entries)
else:
self._entries = dict() # 1 SECTION -> n KEY -> pair(value,comment)
def parse_args(self, args=None, namespace=None):
"""Convert argument strings to objects and assign them as an entry in
self.__dict__['DEFAULT'] Previous calls to add_argument()
determine exactly what objects are created and how they are
assigned. See the documentation for add_argument() for
details. By default, the argument strings are taken from
sys.argv
Parameters
----------
args : object , optional
The argument strings are taken from args instead of
sys.args Warning: args must contains only
arguments(flag/value) no program name
namespace : object , optional
Populates special attribute __dict__ containing
the namespace’s symbol table
Returns
-------
Dictionary containing configuration entries for 'DEFAULT' section
"""
# Read arguments from sys.argv and return those previously
# added result = super(Configuration,
# self).parse_args(args,namespace=namespace)
result = super().parse_args(args, namespace=namespace)
args = vars(result)
# Add arguments(key, value) for DEFAULT section
for key, value in args.items():
self.add(key, value, "From command line arguments", self.DEFAULT)
self.__dict__[key] = value
return self._entries[self.DEFAULT]
def add(self, key, value, comment="", section=DEFAULT):
"""
Create section if not already exist in self._entries
Add configuration variable for corresponding section/key
Into 'DEFAULT' section by default
Parameters:
-----------
key : str
key for the new entry to be add
value : str
value for the new entry to be add
comment : str , optional
comment for the new entry to be add
section : str , optional
section for the new entry to be add
Returns
--------
True is option is added,
False is option already exist
"""
if section not in self._entries:
self._entries[section] = dict()
if key not in self._entries[section]:
self._entries[section][key] = (value, comment)
return True
else:
print(section, key, "already exist", file=sys.stderr)
return False
def has_key(self, key, section=DEFAULT):
"""
Checks if a configuration entry exist
Parameters
----------
key: str
key to search in section
section: str , optional
section to search key ('DEFAULT' section is used by default)
Returns
-------
whether the given option exists in the given section or not.
"""
if section in self._entries:
return key in self._entries[section] # return True is key exists
return False
def get(self, key, section=DEFAULT):
"""
Get a configuration entry value
Parameters
----------
key: str
key to search in section
section: str , optional
section to search key ('DEFAULT' section is used by default)
Returns
--------
value for corresponding section/key pair
None is no suitable value exists for section/key
"""
if not self.has_key(key, section):
return None
return self._entries[section][key][self.VALUE_INDEX]
def get_comment(self, key, section=DEFAULT):
"""
get a configuration entry comment
Parameters:
-----------
key: str
key to search in section
section: str , optional
section to search key ('DEFAULT' section is used by default)
Returns
-------
comment for corresponding section/key pair
None is no suitable value exits for section/key
"""
if not self.has_key(key, section):
return None
return self._entries[section][key][self.COMMENT_INDEX]
def write(self, filename, impl=FITS, implementation=DATAIMPL):
"""
write configuration entries to a file.
Parameters:
-----------
filename: str
Full path name: Save all configuration entries
to this given filename
impl: str , optional
"FITS" -> use Fits format
"INI" -> use windows style ini format
"""
if (impl == self.FITS):
self._write_fits(filename, implementation)
# Write an .ini-format representation of the configuration state.
elif (impl == self.INI):
config_parser = RawConfigParser()
self._fill(config_parser)
with open(filename, 'w') as config_file:
config_parser.write(config_file)
else:
print("Format:", impl, 'not allowed', file=sys.stderr)
def read(self, filenames, impl=FITS, implementation=DATAIMPL,
encoding=None):
"""
Read filename or a list of filenames and parse configuration entries.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, system wide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Parameters:
-----------
filename: str
Full path name or list of full path name containing configuration
entries to parse
impl: str , optional
FITS -> use Fits format
INI -> use windows style ini format
implementation: str , optional
DATAIMPL -> Use Fits data table
HEADERIMP -> Use fits header
Returns
-------
list of successfully read files.
"""
if impl == self.INI:
config_parser = RawConfigParser()
config_parser.optionxform = lambda option: option
success_list = config_parser.read(filenames, encoding)
self._addOptionFromParser(config_parser)
return success_list
elif impl == self.FITS:
return self._read_fits(filenames, implementation, encoding)
else:
print("Format:", impl, 'not allowed', file=sys.stderr)
return list()
def list(self, file=sys.stdout, flush=False):
"""
print all options (DEFAULT included)
Parameters:
-----------
file:
file objects used by the interpreter for standard output
and errors:
flush: boolean , optional
flush keyword argument is true, the stream is forcibly flushed.
"""
for section in self._entries.keys():
print("[", section, "]", file=file, flush=flush)
for key, value_comment in self._entries[section].items():
print(key, "=", value_comment[self.VALUE_INDEX], "; ",
value_comment[self.COMMENT_INDEX], file=file,
flush=flush)
def _fill(self, config_parser):
"""
Fills a Config_parser object with self._entries
"""
if not isinstance(config_parser, RawConfigParser):
return None
# set RawConfigParser ro case sensitive
config_parser.optionxform = lambda option: option
sections = self._entries.keys()
for section in sections:
# dico[section]={}
if not section == self.DEFAULT:
config_parser.add_section(section)
for key, value_comment_tuple in self._entries[section].items():
# dico[section][key] = value
value_comment = value_comment_tuple[self.VALUE_INDEX]\
+ " ; " \
+ value_comment_tuple[self.COMMENT_INDEX]
config_parser.set(section, key, value_comment)
return config_parser
def _write_fits(self, filename, implementation=DATAIMPL):
"""Write an FITS file representation of the configuration state.
"""
if implementation != self.DATAIMPL and implementation != self.HEADERIMPL:
print("Implementation :", implementation,
'not allowed', file=sys.stderr)
return
# hduList will contain one TableHDU per section
hduList = fits.HDUList()
# get all Configuration entries
# loop over section
for section in self._entries.keys():
if implementation == self.DATAIMPL:
# prepare 3 array
key_array = []
value_array = []
comment_array = []
# loop over section entries and fill arrays
for key, value_comment in self._entries[section].items():
key_array.append(key)
value_array.append(value_comment[self.VALUE_INDEX])
comment_array.append(value_comment[self.COMMENT_INDEX])
# create fits.Column form filled arrays
ckey = fits.Column(
name='key', format='A256', array=np.array(key_array))
cvalue = fits.Column(
name='value', format='A256', array=np.array(value_array))
ccomment = fits.Column(
name='comments', format='A256',
array=np.array(comment_array))
# Create the table
hdu = fits.TableHDU.from_columns([ckey, cvalue, ccomment])
hdu.name = section
# append table to hduList
hduList.append(hdu)
elif (implementation == self.HEADERIMPL):
header = fits.Header()
for key, value_comments in self._entries[section].items():
header[key] = (
value_comments[self.VALUE_INDEX],
value_comments[self.COMMENT_INDEX])
table_0 = fits.TableHDU(data=None, header=header, name=section)
hduList.append(table_0)
hduList.writeto(filename, clobber=True)
def _read_fits(self, filenames, implementation=DATAIMPL, encoding=None):
"""Read and parse a Fits file or a list of Fits files.
Return list of successfully read files.
"""
if isinstance(filenames, str):
filenames = [filenames]
read_ok = []
if implementation == self.DATAIMPL:
for filename in filenames:
hdulist = fits.open(filename)
for hdu in hdulist:
section = hdu.name
data = hdu.data
if data is not None:
for (key, value, comment) in data:
try:
self.add(key, value, comment=comment,
section=section)
except ConfigurationException as e:
print(e, file=sys.stderr)
except:
pass
read_ok.append(filename)
elif (implementation == self.HEADERIMPL):
for filename in filenames:
hdulist = fits.open(filename)
for hdu in hdulist:
header = hdu.header
section = hdu.name
for key in header:
try:
self.add(key, header[key],
section=section,
comment=header.comments[key])
except ConfigurationException as e:
print(e, file=sys.stderr)
except:
pass
read_ok.append(filename)
read_ok.append(filename)
else:
print("Implementation :", implementation,
'not allowed', file=sys.stderr)
return read_ok
def _addOptionFromParser(self, config_parser):
"""
fill self._entries from a RawConfigParser
"""
if not isinstance(config_parser, RawConfigParser):
return False
for section in config_parser.sections():
for key, value_comment in config_parser.items(section):
foo = value_comment.split(" ;")
value = foo[self.VALUE_INDEX]
comment = foo[self.COMMENT_INDEX]
comment = comment[1:]
self.add(key, value, comment=comment, section=section)
for key, value_comment in config_parser.defaults().items():
foo = value_comment.split(" ;")
value = foo[self.VALUE_INDEX]
comment = foo[self.COMMENT_INDEX]
comment = comment[1:]
self.add(key, value, comment=comment, section=self.DEFAULT)
| |
<gh_stars>1-10
import re
from Queue import Queue
from math import ceil
from types import *
WHITESPACE = ['\n', '\t', ' ', '', u'\u3000']
# from helpers.constants import WHITESPACE
def splitKeepWhitespace(string):
"""
Splits the string on whitespace, while keeping the tokens on which the string was split.
Args:
string: The string to split.
Returns:
The split string with the whitespace kept.
"""
return re.split(u'(\u3000|\n| |\t)', string)
# Note: Regex in capture group keeps the delimiter in the resultant list
def countWords(textList): # Ignores WHITESPACE as being 'not words'
"""
Counts the "words" in a list of tokens, where words are anything not in the WHITESPACE global.
Args:
textList: A list of tokens in the text.
Returns:
The number of words in the list.
"""
return len([x for x in textList if x not in WHITESPACE])
def stripLeadingWhiteSpace(q):
"""
Takes in the queue representation of the text and strips the leading whitespace.
Args:
q: The text in a Queue object.
Returns:
None
"""
if not q.empty():
while q.queue[0] in WHITESPACE:
trash = q.get()
if q.empty():
break
def stripLeadingBlankLines(q):
"""
Takes in the queue representation of the text and strips the leading blank lines.
Args:
q: The text in a Queue object.
Returns:
None
"""
while q.queue == '':
trash = q.get()
if q.empty():
break
def stripLeadingCharacters(charQueue, numChars):
"""
Takes in the queue representation of the text and strips the leading numChars characters.
Args:
charQueue: The text in a Queue object.
numChars: The number of characters to remove.
Returns:
None
"""
for i in xrange(numChars):
removedChar = charQueue.get()
def stripLeadingWords(wordQueue, numWords):
"""
Takes in the queue representation of the text and strips the leading numWords words.
Args:
wordQueue: The text in a Queue object.
numWords: The number of words to remove.
Returns:
None
"""
for i in xrange(numWords):
stripLeadingWhiteSpace(wordQueue)
removedWord = wordQueue.get()
stripLeadingWhiteSpace(wordQueue)
def stripLeadingLines(lineQueue, numLines):
"""
Takes in the queue representation of the text and strips the leading numLines lines.
Args:
lineQueue: The text in a Queue object.
numLines: The number of lines to remove.
Returns:
None
"""
for i in xrange(numLines):
stripLeadingBlankLines(lineQueue)
removedLine = lineQueue.get()
stripLeadingBlankLines(lineQueue)
def cutByCharacters(text, chunkSize, overlap, lastProp):
"""
Cuts the text into equally sized chunks, where the segment size is measured by counts of characters,
with an option for an amount of overlap between chunks and a minimum proportion threshold for the last chunk.
Args:
text: The string with the contents of the file.
chunkSize: The size of the chunk, in characters.
overlap: The number of characters to overlap between chunks.
lastProp: The minimum proportional size that the last chunk has to be.
Returns:
A list of string that the text has been cut into.
"""
chunkList = [] # The list of the chunks (a.k.a a list of list of strings)
chunkSoFar = Queue() # The rolling window representing the (potential) chunk
currChunkSize = 0 # Index keeping track of whether or not it's time to make a chunk out of the window
tillNextChunk = chunkSize - overlap # The distance between the starts of chunks
for token in text:
currChunkSize += 1
if currChunkSize > chunkSize:
chunkList.append(list(chunkSoFar.queue))
stripLeadingCharacters(charQueue=chunkSoFar, numChars=tillNextChunk)
currChunkSize -= tillNextChunk
chunkSoFar.put(token)
# Making sure the last chunk is of a sufficient proportion
lastChunk = list(chunkSoFar.queue)
if (float(len(lastChunk)) / chunkSize) < lastProp:
if len(chunkList)==0:
chunkList.extend(lastChunk)
else:
chunkList[-1].extend(lastChunk)
else:
chunkList.append(lastChunk)
# Make the list of lists of strings into a list of strings
countSubList = 0
stringList=[]
for subList in chunkList:
stringList.extend([''.join(subList)])
if type(subList) is ListType:
countSubList+=1
# Prevent there isn't subList inside chunkList
if countSubList==0:
stringList = []
stringList.extend([''.join(chunkList)])
return stringList
def cutByWords(text, chunkSize, overlap, lastProp):
"""
Cuts the text into equally sized chunks, where the segment size is measured by counts of words,
with an option for an amount of overlap between chunks and a minim
um proportion threshold for the last chunk.
Args:
text: The string with the contents of the file.
chunkSize: The size of the chunk, in words.
overlap: The number of words to overlap between chunks.
lastProp: The minimum proportional size that the last chunk has to be.
Returns:
A list of string that the text has been cut into.
"""
chunkList = [] # The list of the chunks (a.k.a a list of list of strings)
chunkSoFar = Queue() # The rolling window representing the (potential) chunk
currChunkSize = 0 # Index keeping track of whether or not it's time to make a chunk out of the window
tillNextChunk = chunkSize - overlap # The distance between the starts of chunks
splitText = splitKeepWhitespace(text)
# Create list of chunks (chunks are lists of words and whitespace) by using a queue as a rolling window
for token in splitText:
if token in WHITESPACE:
chunkSoFar.put(token)
else:
currChunkSize += 1
if currChunkSize > chunkSize:
chunkList.append(list(chunkSoFar.queue))
stripLeadingWords(wordQueue=chunkSoFar, numWords=tillNextChunk)
currChunkSize -= tillNextChunk
chunkSoFar.put(token)
# Making sure the last chunk is of a sufficient proportion
lastChunk = list(chunkSoFar.queue) # Grab the final (partial) chunk
if (float(countWords(lastChunk)) / chunkSize) < lastProp: # If the proportion of the last chunk is too low
if len(chunkList)==0:
chunkList.extend(lastChunk)
else:
chunkList[-1].extend(lastChunk)
else:
chunkList.append(lastChunk)
# Make the list of lists of strings into a list of strings
countSubList = 0
stringList=[]
for subList in chunkList:
stringList.extend([''.join(subList)])
if type(subList) is ListType:
countSubList+=1
# Prevent there isn't subList inside chunkList
if countSubList==0:
stringList = []
stringList.extend([''.join(chunkList)])
return stringList
def cutByLines(text, chunkSize, overlap, lastProp):
"""
Cuts the text into equally sized chunks, where the segment size is measured by counts of lines,
with an option for an amount of overlap between chunks and a minimum proportion threshold for the last chunk.
Args:
text: The string with the contents of the file.
chunkSize: The size of the chunk, in lines.
overlap: The number of lines to overlap between chunks.
lastProp: The minimum proportional size that the last chunk has to be.
Returns:
A list of string that the text has been cut into.
"""
chunkList = [] # The list of the chunks (a.k.a. a list of list of strings)
chunkSoFar = Queue() # The rolling window representing the (potential) chunk
currChunkSize = 0 # Index keeping track of whether or not it's time to make a chunk out of the window
tillNextChunk = chunkSize - overlap # The distance between the starts of chunks
splitText = text.splitlines(True)
# Create list of chunks (chunks are lists of words and whitespace) by using a queue as a rolling window
for token in splitText:
if token == '':
chunkSoFar.put(token)
else:
currChunkSize += 1
if currChunkSize > chunkSize:
chunkList.append(list(chunkSoFar.queue))
stripLeadingLines(lineQueue=chunkSoFar, numLines=tillNextChunk)
currChunkSize -= tillNextChunk
chunkSoFar.put(token)
# Making sure the last chunk is of a sufficient proportion
lastChunk = list(chunkSoFar.queue) # Grab the final (partial) chunk
if (float(countWords(lastChunk)) / chunkSize) < lastProp: # If the proportion of the last chunk is too low
if len(chunkList)==0:
chunkList.extend(lastChunk)
else:
chunkList[-1].extend(lastChunk)
else:
chunkList.append(lastChunk)
# Make the list of lists of strings into a list of strings
countSubList = 0
stringList=[]
for subList in chunkList:
stringList.extend([''.join(subList)])
if type(subList) is ListType:
countSubList+=1
# Prevent there isn't subList inside chunkList
if countSubList==0:
stringList = []
stringList.extend([''.join(chunkList)])
return stringList
def cutByNumber(text, numChunks):
"""
Cuts the text into equally sized chunks, where the size of the chunk is determined by the number of desired chunks.
Args:
text: The string with the contents of the file.
numChunks: The number of chunks to cut the text into.
Returns:
A list of string that the text has been cut into.
"""
chunkList = [] # The list of the chunks (a.k.a. a list of list of strings)
chunkSoFar = Queue() # The rolling window representing the (potential) chunk
splitText = splitKeepWhitespace(text)
textLength = countWords(splitText)
chunkSizes = []
for i in xrange(numChunks):
chunkSizes.append(textLength / numChunks)
for i in xrange(textLength % numChunks):
chunkSizes[i] += 1
currChunkSize = 0 # Index keeping track of whether or not it's time to | |
<filename>rotkehlchen/tests/api/test_blockchain.py<gh_stars>0
import logging
from contextlib import ExitStack
from http import HTTPStatus
from unittest.mock import patch
import pytest
import requests
from eth_utils import to_checksum_address
from rotkehlchen.fval import FVal
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.tests.utils.api import (
api_url_for,
assert_error_response,
assert_ok_async_response,
assert_proper_response,
wait_for_async_task,
)
from rotkehlchen.tests.utils.blockchain import (
assert_btc_balances_result,
assert_eth_balances_result,
compare_account_data,
)
from rotkehlchen.tests.utils.constants import A_GNO, A_RDN
from rotkehlchen.tests.utils.factories import (
UNIT_BTC_ADDRESS1,
UNIT_BTC_ADDRESS2,
UNIT_BTC_ADDRESS3,
make_ethereum_address,
)
from rotkehlchen.tests.utils.rotkehlchen import setup_balances
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
@pytest.mark.parametrize('number_of_eth_accounts', [0])
def test_query_empty_blockchain_balances(rotkehlchen_api_server):
"""Make sure that querying balances for all blockchains works when no accounts are tracked
Regression test for https://github.com/rotki/rotki/issues/848
"""
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
))
assert_proper_response(response)
data = response.json()
assert data['message'] == ''
assert data['result'] == {'per_account': {}, 'totals': {}}
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='BTC',
))
assert_proper_response(response)
data = response.json()
assert data['message'] == ''
assert data['result'] == {'per_account': {}, 'totals': {}}
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
))
assert_proper_response(response)
data = response.json()
assert data['message'] == ''
assert data['result'] == {'per_account': {}, 'totals': {}}
@pytest.mark.parametrize('number_of_eth_accounts', [2])
@pytest.mark.parametrize('btc_accounts', [[UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2]])
@pytest.mark.parametrize('owned_eth_tokens', [[A_RDN]])
def test_query_blockchain_balances(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
caplog,
):
"""Test that the query blockchain balances endpoint works correctly. That is:
- Querying only ETH chain returns only ETH and token balances
- Querying only BTC chain returns only BTC account balances
- Querying with no chain returns all balances (ETH, tokens and BTC)
"""
caplog.set_level(logging.DEBUG)
# Disable caching of query results
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
rotki.chain_manager.cache_ttl_secs = 0
setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts)
# First query only ETH and token balances
with setup.etherscan_patch, setup.alethio_patch:
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
))
msg = 'Alethio query should have failed'
assert 'Alethio accounts token balances query failed' in caplog.text, msg
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=False,
)
# Then query only BTC balances
with setup.bitcoin_patch:
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='BTC',
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_btc_balances_result(
json_data=json_data,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=False,
)
# Finally query all balances
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=True,
)
assert_btc_balances_result(
json_data=json_data,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=True,
)
# Try to query not existing blockchain
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='NOTEXISTING',
))
assert_error_response(
response=response,
contained_in_msg='Unrecognized value NOTEXISTING given for blockchain name',
)
@pytest.mark.parametrize('number_of_eth_accounts', [0])
@pytest.mark.parametrize('btc_accounts', [[
UNIT_BTC_ADDRESS1,
UNIT_BTC_ADDRESS2,
'bc1qhkje0xfvhmgk6mvanxwy09n45df03tj3h3jtnf',
]])
@pytest.mark.parametrize('owned_eth_tokens', [[]])
def test_query_bitcoin_blockchain_bech32_balances(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
caplog,
):
"""Test that querying Bech32 bitcoin addresses works fine"""
caplog.set_level(logging.DEBUG)
# Disable caching of query results
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
rotki.chain_manager.cache_ttl_secs = 0
btc_balances = ['111110', '3232223', '555555333']
setup = setup_balances(
rotki,
ethereum_accounts=ethereum_accounts,
btc_accounts=btc_accounts,
btc_balances=btc_balances,
)
# query all balances
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_btc_balances_result(
json_data=json_data,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=False,
)
@pytest.mark.parametrize('number_of_eth_accounts', [2])
@pytest.mark.parametrize('btc_accounts', [[UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2]])
@pytest.mark.parametrize('owned_eth_tokens', [[A_RDN]])
@pytest.mark.parametrize('mocked_current_prices', [{
'RDN': FVal('0.1135'),
'ETH': FVal('212.92'),
'BTC': FVal('8849.04'),
}])
def test_query_blockchain_balances_async(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
):
"""Test that the query blockchain balances endpoint works when queried asynchronously
"""
# Disable caching of query results
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
rotki.chain_manager.cache_ttl_secs = 0
setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts)
# First query only ETH and token balances
with setup.etherscan_patch, setup.alethio_patch:
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
), json={'async_query': True})
task_id = assert_ok_async_response(response)
outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
assert_eth_balances_result(
rotki=rotki,
json_data=outcome,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=False,
)
# Then query only BTC balances
with setup.bitcoin_patch:
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='BTC',
), json={'async_query': True})
task_id = assert_ok_async_response(response)
outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
assert_btc_balances_result(
json_data=outcome,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=False,
)
# Finally query all balances
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
), json={'async_query': True})
task_id = assert_ok_async_response(response)
outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
assert_eth_balances_result(
rotki=rotki,
json_data=outcome,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=True,
)
assert_btc_balances_result(
json_data=outcome,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=True,
)
@pytest.mark.parametrize('number_of_eth_accounts', [2])
@pytest.mark.parametrize('owned_eth_tokens', [[A_RDN]])
def test_query_blockchain_balances_ignore_cache(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
):
"""Test that the query blockchain balances endpoint can ignore the cache"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts)
eth_query = patch.object(
rotki.chain_manager,
'query_ethereum_balances',
wraps=rotki.chain_manager.query_ethereum_balances,
)
tokens_query = patch.object(
rotki.chain_manager,
'query_ethereum_tokens',
wraps=rotki.chain_manager.query_ethereum_tokens,
)
with setup.etherscan_patch, setup.alethio_patch, setup.bitcoin_patch, eth_query as eth_mock, tokens_query as tokens_mock: # noqa: E501
# Query ETH and token balances once
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=False,
)
assert eth_mock.call_count == 1
assert tokens_mock.call_count == 1
# Query again and make sure this time cache is used
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=False,
)
assert eth_mock.call_count == 1
assert tokens_mock.call_count == 1
# Finally query with ignoring the cache
response = requests.get(api_url_for(
rotkehlchen_api_server,
"named_blockchain_balances_resource",
blockchain='ETH',
), json={'ignore_cache': True})
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=False,
)
assert eth_mock.call_count == 2
assert tokens_mock.call_count == 2
@pytest.mark.parametrize('number_of_eth_accounts', [2])
@pytest.mark.parametrize('btc_accounts', [[UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2]])
@pytest.mark.parametrize('owned_eth_tokens', [[A_RDN, A_GNO]])
def test_query_blockchain_balances_alethio(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
caplog,
):
"""Test that the query blockchain balances endpoint works correctly when used with alethio
"""
caplog.set_level(logging.DEBUG)
# Disable caching of query results
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
rotki.chain_manager.cache_ttl_secs = 0
token_balances = {A_RDN: ['0', '4000000'], A_GNO: ['323211111', '343442434']}
setup = setup_balances(
rotki=rotki,
ethereum_accounts=ethereum_accounts,
btc_accounts=btc_accounts,
token_balances=token_balances,
use_alethio=True,
)
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
))
msg = 'Alethio query should not have failed'
assert 'Alethio accounts token balances query failed' not in caplog.text, msg
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=ethereum_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=True,
)
assert_btc_balances_result(
json_data=json_data,
btc_accounts=btc_accounts,
btc_balances=setup.btc_balances,
also_eth=True,
)
def _add_blockchain_accounts_test_start(
api_server,
query_balances_before_first_modification,
ethereum_accounts,
btc_accounts,
use_alethio,
async_query,
):
# Disable caching of query results
rotki = api_server.rest_api.rotkehlchen
rotki.chain_manager.cache_ttl_secs = 0
if query_balances_before_first_modification:
# Also test by having balances queried before adding an account
eth_balances = ['1000000', '2000000']
token_balances = {A_RDN: ['0', '4000000']}
setup = setup_balances(
rotki,
ethereum_accounts=ethereum_accounts,
btc_accounts=btc_accounts,
eth_balances=eth_balances,
token_balances=token_balances,
use_alethio=use_alethio,
)
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
api_server,
"blockchainbalancesresource",
))
new_eth_accounts = [make_ethereum_address(), make_ethereum_address()]
all_eth_accounts = ethereum_accounts + new_eth_accounts
eth_balances = ['1000000', '2000000', '3000000', '4000000']
token_balances = {A_RDN: ['0', '4000000', '0', '250000000']}
setup = setup_balances(
rotki,
ethereum_accounts=all_eth_accounts,
btc_accounts=btc_accounts,
eth_balances=eth_balances,
token_balances=token_balances,
use_alethio=use_alethio,
)
# The application has started only with 2 ethereum accounts. Let's add two more
data = {'accounts': [{'address': x} for x in new_eth_accounts]}
if async_query:
data['async_query'] = True
with setup.etherscan_patch, setup.alethio_patch:
response = requests.put(api_url_for(
api_server,
"blockchainsaccountsresource",
blockchain='ETH',
), json=data)
if async_query:
task_id = assert_ok_async_response(response)
json_data = wait_for_async_task(api_server, task_id)
else:
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=all_eth_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=query_balances_before_first_modification,
)
# Also make sure they are added in the DB
accounts = rotki.data.db.get_blockchain_accounts()
assert len(accounts.eth) == 4
assert all(acc in accounts.eth for acc in all_eth_accounts)
assert len(accounts.btc) == 2
assert all(acc in accounts.btc for acc in btc_accounts)
# Now try to query all balances to make sure the result is the stored
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
api_server,
"blockchainbalancesresource",
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_eth_balances_result(
rotki=rotki,
json_data=json_data,
eth_accounts=all_eth_accounts,
eth_balances=setup.eth_balances,
token_balances=setup.token_balances,
also_btc=True,
)
return all_eth_accounts, eth_balances, token_balances
@pytest.mark.parametrize('number_of_eth_accounts', [2])
@pytest.mark.parametrize('btc_accounts', [[UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2]])
@pytest.mark.parametrize('owned_eth_tokens', [[A_RDN]])
@pytest.mark.parametrize('query_balances_before_first_modification', [True, False])
def test_add_blockchain_accounts(
rotkehlchen_api_server,
ethereum_accounts,
btc_accounts,
query_balances_before_first_modification,
):
"""Test that the endpoint adding blockchain accounts works properly"""
rotki = rotkehlchen_api_server.rest_api.rotkehlchen
all_eth_accounts, eth_balances, token_balances = _add_blockchain_accounts_test_start(
api_server=rotkehlchen_api_server,
query_balances_before_first_modification=query_balances_before_first_modification,
ethereum_accounts=ethereum_accounts,
btc_accounts=btc_accounts,
use_alethio=False,
async_query=False,
)
# Now we will try to add a new BTC account. Setup the mocking infrastructure again
all_btc_accounts = btc_accounts + [UNIT_BTC_ADDRESS3]
setup = setup_balances(
rotki,
ethereum_accounts=all_eth_accounts,
btc_accounts=all_btc_accounts,
eth_balances=eth_balances,
token_balances=token_balances,
btc_balances=['3000000', '5000000', '600000000'],
)
# add the new BTC account
with setup.bitcoin_patch:
response = requests.put(api_url_for(
rotkehlchen_api_server,
"blockchainsaccountsresource",
blockchain='BTC',
), json={'accounts': [{'address': UNIT_BTC_ADDRESS3}]})
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_btc_balances_result(
json_data=json_data,
btc_accounts=all_btc_accounts,
btc_balances=setup.btc_balances,
also_eth=True,
)
# Also make sure it's added in the DB
accounts = rotki.data.db.get_blockchain_accounts()
assert len(accounts.eth) == 4
assert all(acc in accounts.eth for acc in all_eth_accounts)
assert len(accounts.btc) == 3
assert all(acc in accounts.btc for acc in all_btc_accounts)
# Now try to query all balances to make sure the result is also stored
with ExitStack() as stack:
setup.enter_blockchain_patches(stack)
response = requests.get(api_url_for(
rotkehlchen_api_server,
"blockchainbalancesresource",
))
assert_proper_response(response)
json_data = response.json()
assert json_data['message'] == ''
assert_btc_balances_result(
json_data=json_data,
btc_accounts=all_btc_accounts,
btc_balances=setup.btc_balances,
also_eth=True,
)
@pytest.mark.parametrize('include_etherscan_key', [False])
def test_no_etherscan_is_detected(rotkehlchen_api_server):
"""Make sure that interacting with ethereum | |
from assess import *
from sklearn_extra.cluster import KMedoids
import cx_Oracle
import json
import platform
import numpy as np
import time
# Library functions
usings = [difference, ratio, reldifference] # , absdifference
# labels = [quartile, # ok for all comparisons
# likert3, likert5, # ok for difference and relative difference
# fixedratio2, fixedratio3, fixedratio5, # ok for ratio
# fixeddiff2, fixeddiff3, fixeddiff5, # ok for difference
# fixedrel3, fixedrel5] # ok for relative difference
labels = {
"ratio": [quartile, likert3, likert5, fixedratio2, fixedratio3, fixedratio5], # ok for relative difference
"difference": [quartile, likert3, likert5, fixeddiff2, fixeddiff3, fixeddiff5], # ok for relative difference
"reldifference": [quartile, likert3, likert5, fixeddiff2, fixedrel3, fixedrel5] # ok for relative difference
}
features = [avg, std, skew]
def clean_sql_df(df):
# if len(df[byclause].drop_duplicates().index) != len(df.index):
# print("Duplicates in the cube")
# sys.exit(1)
return df.round(5).fillna(0)
def compute_auto_benchmark_sql(sql, k, measure, byclause):
df = clean_sql_df(pd.read_sql(sql, con=connection))
if len(df.index) == 0:
print("Empty dataframe, did you choose a proper selection predicate?")
sys.exit(1)
return compute_auto_benchmarks(df, k, measure, byclause)
def splitAttr(benchmark):
attr, op, val = benchmark[1:-1].replace("[", "").replace("]", "").replace("'", "").split(",")
return attr, op, val
def diversify(X, benchmarks, k):
# apply diversification with clustering
kmedoids = KMedoids(n_clusters=min(k, len(benchmarks)), random_state=0, init='k-medoids++').fit(X) # , max_iter=300
# get the names of the diversified columns
return [benchmarks[x] for x in kmedoids.medoid_indices_]
def compute_auto_benchmarks(Y, k, measure, byclause):
# pick a default function
using = usings[0]
fun_name = str(using.__name__)
# get all the siblings and parents (which are not normalized by property)
# E.g., pick RUSSIA but not RUSSIA_population
for slice in [x for x in Y.columns if x not in byclause and x != measure and "_" not in x]:
# SOL1: apply the default comparison function
# Y[fun_name + "_" + slice] = Y.apply(lambda x: using(x[measure], x[slice]), axis=1)
# SOL2: do not apply any comparison here
Y[fun_name + "_" + slice] = Y[slice]
# select the column names on which diversification will be applied
benchmarks = [x for x in Y.columns if fun_name in x]
# transform such columns into numpy arrays
X = np.array([Y[x].to_numpy() for x in benchmarks])
# apply diversification
benchmarks = diversify(X, benchmarks, k)
Ys = []
# iterate over the diversified columns
for benchmark in benchmarks:
# # get the function name
# using = benchmark.split("_")[0]
# get the name of the column
l = benchmark.split("_")[1]
# generate the new data frame
Z = Y[[c for c in byclause + [measure] + [x for x in Y.columns if l in x]]].copy(deep=True)
# SOL2: compute the comparison here!
Z[fun_name + "_" + l] = Y.apply(lambda x: using(x[measure], x[slice]), axis=1)
# fix the column names
Z.columns = [x.replace(benchmark, "comparison").replace(l, "bc_" + measure) for x in Z.columns]
# append it to the diversified enhanced cubes
Ys.append((Z, l))
return Ys
def compute_using(Y, measure, using, compute_property=False):
Z = Y.copy(deep=True)
if isinstance(using, str):
using = [x for x in usings if x.__name__ == json.loads(using)["fun"]][0]
comp = [x for x in Y.columns if x.lower() == "bc_" + measure.lower()][0]
if not compute_property:
Z["comparison"] = using(Z[measure], Z[comp])
else:
orig_comparison = comp
for column in [x for x in Y.columns if orig_comparison in x]:
Z["comparison_" + using.__name__ + column.replace(orig_comparison, "")] = using(Z[measure], Z[column])
return Z
def compute_auto_using(Y, k, measure, byclause):
# iterate over the existing comparison functions
for using in usings:
# compute the comparison (including the property; e.g., consider both bc_quantity and bc_quantity_population
Y = compute_using(Y, measure, using, compute_property=True)
# select the column names on which diversification will be applied
benchmarks = [x for x in Y.columns if "comparison_" in x]
# transform such columns into numpy arrays
X = np.array([[fun(Y[x]) for fun in features] for x in benchmarks])
# apply diversification
benchmarks = diversify(X, benchmarks, k)
Ys = []
# iterate over the diversified columns
for benchmark in benchmarks:
# get the function name
l = benchmark.split("_")[1]
# generate the new data frame
Z = Y[[c for c in byclause + [measure, benchmark]]].copy(deep=True)
# fix the column names
Z.columns = [x.replace(benchmark, "comparison") for x in Z.columns]
# append it to the diversified enhanced cubes
Ys.append((Z, benchmark.replace("comparison_", ""))) # l
return Ys
def compute_label(Y, label):
if isinstance(label, str):
label = [item for sublist in labels.values() for item in sublist if item.__name__ == label][0]
Z = Y.copy(deep=True)
Z["label"] = label(Z["comparison"])
return Z
# def compute_auto_labels(Y):
# Ys = []
# curlabels = {}
# for l in labels:
# Z = compute_label(Y, l)
# curlabels[l] = Z["label"]
# Ys.append((Z, l))
# # return Ys
# df = pd.DataFrame(data=curlabels)
# n = np.array(df)
# result = rk.center(n, method='kendalltau')
# rankedYs = []
# for Z, l in Ys:
# tau, p = sp.stats.kendalltau(Z["label"], result)
# rankedYs.append((Z, l, tau))
# rankedYs = sorted(rankedYs, key=lambda t: t[2], reverse=True)[:3]
# return [(Z, l) for (Z, l, tau) in rankedYs]
def compute_auto_labels(Y, k, measure, byclause, using):
def kendall_tau_distance(values1, values2):
"""Compute the Kendall tau distance."""
n = len(values1)
assert len(values2) == n, "Both lists have to be of equal length"
i, j = np.meshgrid(np.arange(n), np.arange(n))
a = np.argsort(values1)
b = np.argsort(values2)
ndisordered = np.logical_or(np.logical_and(a[i] < a[j], b[i] > b[j]), np.logical_and(a[i] > a[j], b[i] < b[j])).sum()
return ndisordered # / (n * (n - 1))
for l in labels[json.loads(using)["fun"] if isinstance(using, str) and "{" in using else using.split("_")[0]]:
Y = compute_label(Y, l)
Y = Y.rename(columns={"label": "label_" + l.__name__})
benchmarks = [x for x in Y.columns if "label_" in x]
dictionary = {ni: indi for indi, ni in enumerate(sorted(set([item for sublist in [Y[x] for x in benchmarks] for item in sublist])))}
X = np.array([Y[x].map(dictionary).to_numpy() for x in benchmarks])
kmedoids = KMedoids(n_clusters=min(k, len(benchmarks)), init='k-medoids++', random_state=0, metric=kendall_tau_distance).fit(X)
benchmarks = list(set([benchmarks[x] for x in kmedoids.medoid_indices_]))
Ys = []
for benchmark in benchmarks:
l = benchmark.split("_")[1]
Z = Y[[c for c in byclause + [measure, "comparison", benchmark]]].copy(deep=True)
Z.columns = [x.replace(benchmark, "label") for x in Z.columns]
Ys.append((Z, l))
return Ys
def write_to_file(i, byclause, forclause, measure, df, sibling, using, label):
if using is None:
df = compute_using(df, measure, using if using is not None else difference)
if label is None:
df = compute_label(df, label if label is not None else quartile)
byclause = [x for x in byclause if x not in forclause]
if len(byclause) == 0:
byclause = [forclause[0]]
enhcube = {
"raw": json.loads(df.to_json(orient="records", double_precision=5)),
"dimensions": byclause,
"measures": ["comparison"],
"against": "'" + sibling + "'",
"scaled": using.split("_")[1] if using is not None and "_" in using else "",
"using": {"fun": (json.loads(using)["fun"] if isinstance(using, str) and "{" in using else using.split("_")[0]), "params": [measure, "benchmark." + measure]} if using is not None else {},
# "using": {"fun": (json.loads(using)["fun"] if isinstance(using, str) else using.__name__), "params": [measure, "benchmark." + measure]} if using is not None else {},
"label": label if label is not None else "", # label.__name__ if label is not None else "",
"def_using": difference.__name__ + "(" + measure + ", " + "benchmark." + measure + ")",
"def_label": quartile.__name__
}
with open(args.path + "_" + str(i) + ".json", 'w') as f:
f.write(json.dumps(enhcube).replace("\\", ""))
if __name__ == '__main__':
###############################################################################
# PARAMETERS SETUP
###############################################################################
toprint = {}
parser = argparse.ArgumentParser()
parser.add_argument("--curid", help="curid", type=str)
parser.add_argument("--credentials", help="credentials", type=str)
parser.add_argument("--sql", help="query to get the cube", type=str)
parser.add_argument("--measure", help="measure", type=str)
parser.add_argument("--byclause", help="group by", type=str)
parser.add_argument("--forclause", help="selection clause", type=str)
parser.add_argument("--cube", help="cube", type=str)
parser.add_argument("--benchmark", help="benchmark", type=str)
parser.add_argument("--using", help="using", type=str)
parser.add_argument("--labels", help="labels", type=str)
parser.add_argument("--k", help="number of diverse clauses", type=int)
parser.add_argument("--path", help="output path", type=str)
args = parser.parse_args()
# print(args)
credentials = json.loads(args.credentials)
sql = args.sql.replace("?", "\"").replace("!", " ")
sql = sql[1:-1] if platform.system() == 'Linux' else sql
# print(sql)
# sys.exit(1)
k = args.k
toprint["curid"] = args.curid
toprint["k"] = k
toprint["cube"] = args.cube.upper()
measure = args.measure.upper()
toprint["measure"] = measure
byclause = [x.upper() for x in args.byclause.split(',')]
toprint["byclause"] = '"' + str(byclause) + '"'
toprint["nparents"] = len([x for x in byclause if x != ""])
forclause = [x.upper() for x in args.forclause[1:-1].split(",")]
toprint["forclause"] = '"' + str(forclause) + '"'
toprint["nsiblings"] = len([x for x in forclause if x != ""])
toprint["id"] = '"' + (str(toprint["measure"]) + "-" + str(toprint["byclause"]) + "-" + str(toprint["forclause"])).replace('"', '') + '"'
benchmark = args.benchmark.replace("!", " ") if args.benchmark != "0" else None
toprint["benchmark_time"] = -1
using = args.using if args.using != "null" | |
# Purpose: painless matplotlib embedding for wxPython
# Author: <NAME> <<EMAIL>>
#
# Copyright 2005-2009 Illinois Institute of Technology
#
# See the file "LICENSE" for information on usage and redistribution
# of this file, and for a DISCLAIMER OF ALL WARRANTIES.
"""
Embedding matplotlib in wxPython applications is straightforward, but the
default plotting widget lacks the capabilities necessary for interactive use.
WxMpl (wxPython+matplotlib) is a library of components that provide these
missing features in the form of a better matplolib FigureCanvas.
"""
import wx
import sys
import os.path
import weakref
import matplotlib
matplotlib.use('WXAgg')
import numpy as NumPy
from distutils.version import LooseVersion
if LooseVersion(matplotlib.__version__) < LooseVersion('1.4.2'):
from matplotlib.axes import _process_plot_var_args
else:
from matplotlib.axes._base import _process_plot_var_args
from matplotlib.backend_bases import FigureCanvasBase
from matplotlib.backends.backend_agg import FigureCanvasAgg, RendererAgg
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg
from matplotlib.figure import Figure
from matplotlib.font_manager import FontProperties
from matplotlib.projections.polar import PolarAxes
from matplotlib.transforms import Bbox
__version__ = '2.0dev'
__all__ = ['PlotPanel', 'PlotFrame', 'PlotApp', 'StripCharter', 'Channel',
'FigurePrinter', 'PointEvent', 'EVT_POINT', 'SelectionEvent',
'EVT_SELECTION']
# If you are using wxGtk without libgnomeprint and want to use something other
# than `lpr' to print you will have to specify that command here.
POSTSCRIPT_PRINTING_COMMAND = 'lpr'
# Between 0.98.1 and 0.98.3rc there were some significant API changes:
# * FigureCanvasWx.draw(repaint=True) became draw(drawDC=None)
# * The following events were added:
# - figure_enter_event
# - figure_leave_event
# - axes_enter_event
# - axes_leave_event
MATPLOTLIB_0_98_3 = '0.98.3' <= matplotlib.__version__
#
# Utility functions and classes
#
def invert_point(x, y, transform):
"""
Returns a coordinate inverted by the specificed C{Transform}.
"""
return transform.inverted().transform_point((x, y))
def find_axes(canvas, x, y):
"""
Finds the C{Axes} within a matplotlib C{FigureCanvas} contains the canvas
coordinates C{(x, y)} and returns that axes and the corresponding data
coordinates C{xdata, ydata} as a 3-tuple.
If no axes contains the specified point a 3-tuple of C{None} is returned.
"""
evt = matplotlib.backend_bases.MouseEvent('', canvas, x, y)
axes = None
for a in canvas.get_figure().get_axes():
if a.in_axes(evt):
if axes is None:
axes = a
else:
return None, None, None
if axes is None:
return None, None, None
xdata, ydata = invert_point(x, y, axes.transData)
return axes, xdata, ydata
def get_bbox_lims(bbox):
"""
Returns the boundaries of the X and Y intervals of a C{Bbox}.
"""
p0 = bbox.min
p1 = bbox.max
return (p0[0], p1[0]), (p0[1], p1[1])
def find_selected_axes(canvas, x1, y1, x2, y2):
"""
Finds the C{Axes} within a matplotlib C{FigureCanvas} that overlaps with a
canvas area from C{(x1, y1)} to C{(x1, y1)}. That axes and the
corresponding X and Y axes ranges are returned as a 3-tuple.
If no axes overlaps with the specified area, or more than one axes
overlaps, a 3-tuple of C{None}s is returned.
"""
axes = None
bbox = Bbox.from_extents(x1, y1, x2, y2)
for a in canvas.get_figure().get_axes():
if bbox.overlaps(a.bbox):
if axes is None:
axes = a
else:
return None, None, None
if axes is None:
return None, None, None
x1, y1, x2, y2 = limit_selection(bbox, axes)
xrange, yrange = get_bbox_lims(
Bbox.from_extents(x1, y1, x2, y2).inverse_transformed(axes.transData))
return axes, xrange, yrange
def limit_selection(bbox, axes):
"""
Finds the region of a selection C{bbox} which overlaps with the supplied
C{axes} and returns it as the 4-tuple C{(xmin, ymin, xmax, ymax)}.
"""
bxr, byr = get_bbox_lims(bbox)
axr, ayr = get_bbox_lims(axes.bbox)
xmin = max(bxr[0], axr[0])
xmax = min(bxr[1], axr[1])
ymin = max(byr[0], ayr[0])
ymax = min(byr[1], ayr[1])
return xmin, ymin, xmax, ymax
def format_coord(axes, xdata, ydata):
"""
A C{None}-safe version of {Axes.format_coord()}.
"""
if xdata is None or ydata is None:
return ''
return axes.format_coord(xdata, ydata)
def toplevel_parent_of_window(window):
"""
Returns the first top-level parent of a wx.Window
"""
topwin = window
while not isinstance(topwin, wx.TopLevelWindow):
topwin = topwin.GetParent()
return topwin
class AxesLimits:
"""
Alters the X and Y limits of C{Axes} objects while maintaining a history of
the changes.
"""
def __init__(self, autoscaleUnzoom):
self.autoscaleUnzoom = autoscaleUnzoom
self.history = weakref.WeakKeyDictionary()
def setAutoscaleUnzoom(self, state):
"""
Enable or disable autoscaling the axes as a result of zooming all the
way back out.
"""
self.limits.setAutoscaleUnzoom(state)
def _get_history(self, axes):
"""
Returns the history list of X and Y limits associated with C{axes}.
"""
return self.history.setdefault(axes, [])
def zoomed(self, axes):
"""
Returns a boolean indicating whether C{axes} has had its limits
altered.
"""
return not (not self._get_history(axes))
def set(self, axes, xrange, yrange):
"""
Changes the X and Y limits of C{axes} to C{xrange} and {yrange}
respectively. A boolean indicating whether or not the
axes should be redraw is returned, because polar axes cannot have
their limits changed sensibly.
"""
if not axes.can_zoom():
return False
# The axes limits must be converted to tuples because MPL 0.98.1
# returns the underlying array objects
oldRange = tuple(axes.get_xlim()), tuple(axes.get_ylim())
history = self._get_history(axes)
history.append(oldRange)
axes.set_xlim(xrange)
axes.set_ylim(yrange)
return True
def restore(self, axes):
"""
Changes the X and Y limits of C{axes} to their previous values. A
boolean indicating whether or not the axes should be redraw is
returned.
"""
history = self._get_history(axes)
if not history:
return False
xrange, yrange = history.pop()
if self.autoscaleUnzoom and not len(history):
axes.autoscale_view()
else:
axes.set_xlim(xrange)
axes.set_ylim(yrange)
return True
#
# Director of the matplotlib canvas
#
class PlotPanelDirector:
"""
Encapsulates all of the user-interaction logic required by the
C{PlotPanel}, following the Humble Dialog Box pattern proposed by <NAME>:
U{http://www.objectmentor.com/resources/articles/TheHumbleDialogBox.pdf}
"""
# TODO: add a programmatic interface to zooming and user interactions
# TODO: full support for MPL events
def __init__(self, view, zoom=True, selection=True, rightClickUnzoom=True,
autoscaleUnzoom=True):
"""
Create a new director for the C{PlotPanel} C{view}. The keyword
arguments C{zoom} and C{selection} have the same meanings as for
C{PlotPanel}.
"""
self.view = view
self.zoomEnabled = zoom
self.selectionEnabled = selection
self.rightClickUnzoom = rightClickUnzoom
self.limits = AxesLimits(autoscaleUnzoom)
self.leftButtonPoint = None
def setSelection(self, state):
"""
Enable or disable left-click area selection.
"""
self.selectionEnabled = state
def setZoomEnabled(self, state):
"""
Enable or disable zooming as a result of left-click area selection.
"""
self.zoomEnabled = state
def setAutoscaleUnzoom(self, state):
"""
Enable or disable autoscaling the axes as a result of zooming all the
way back out.
"""
self.limits.setAutoscaleUnzoom(state)
def setRightClickUnzoom(self, state):
"""
Enable or disable unzooming as a result of right-clicking.
"""
self.rightClickUnzoom = state
def canDraw(self):
"""
Indicates if plot may be not redrawn due to the presence of a selection
box.
"""
return self.leftButtonPoint is None
def zoomed(self, axes):
"""
Returns a boolean indicating whether or not the plot has been zoomed in
as a result of a left-click area selection.
"""
return self.limits.zoomed(axes)
def keyDown(self, evt):
"""
Handles wxPython key-press events. These events are currently skipped.
"""
evt.Skip()
def keyUp(self, evt):
"""
Handles wxPython key-release events. These events are currently
skipped.
"""
evt.Skip()
def leftButtonDown(self, evt, x, y):
"""
Handles wxPython left-click events.
"""
self.leftButtonPoint = (x, y)
view = self.view
axes, xdata, ydata = find_axes(view, x, y)
if axes is not None and self.selectionEnabled and axes.can_zoom():
view.cursor.setCross()
view.crosshairs.clear()
def leftButtonUp(self, evt, x, y):
"""
Handles wxPython left-click-release events.
"""
if self.leftButtonPoint is None:
return
view = self.view
axes, xdata, ydata = find_axes(view, x, y)
x0, y0 = self.leftButtonPoint
self.leftButtonPoint = None
view.rubberband.clear()
if x0 == x:
if y0 == y and axes is not None:
view.notify_point(axes, x, y)
view.crosshairs.set(x, y)
return
elif y0 == y:
return
xdata = ydata = None
axes, xrange, yrange = find_selected_axes(view, x0, y0, x, y)
if axes is not None:
xdata, ydata = invert_point(x, y, axes.transData)
if self.zoomEnabled:
if self.limits.set(axes, xrange, yrange):
self.view.draw()
else:
bbox = Bbox.from_extents(x0, y0, x, y)
x1, y1, x2, y2 = limit_selection(bbox, axes)
self.view.notify_selection(axes, x1, y1, x2, y2)
if axes is None:
view.cursor.setNormal()
elif not axes.can_zoom():
view.cursor.setNormal()
view.location.set(format_coord(axes, xdata, ydata))
else:
view.crosshairs.set(x, y)
view.location.set(format_coord(axes, xdata, ydata))
def rightButtonDown(self, evt, x, y):
"""
Handles wxPython right-click events. These events are currently
skipped.
"""
evt.Skip()
def rightButtonUp(self, evt, x, y):
"""
Handles wxPython right-click-release events.
"""
view = self.view
axes, xdata, ydata = find_axes(view, x, y)
if (axes is not None and self.zoomEnabled and self.rightClickUnzoom
and self.limits.restore(axes)):
view.crosshairs.clear()
view.draw()
view.crosshairs.set(x, y)
def mouseMotion(self, evt, x, y):
"""
Handles wxPython mouse motion events, dispatching them based on whether
or not a selection is in process and what the cursor is over.
"""
view = self.view
axes, xdata, ydata = find_axes(view, x, y)
if self.leftButtonPoint is not None:
self.selectionMouseMotion(evt, x, y, axes, xdata, ydata)
else:
if axes is None:
self.canvasMouseMotion(evt, | |
= _messages.StringField(3)
parent = _messages.StringField(4, required=True)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesCreateRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesCreateRequest
object.
Fields:
createMessageRequest: A CreateMessageRequest resource to be passed as the
request body.
parent: The name of the dataset this message belongs to.
"""
createMessageRequest = _messages.MessageField('CreateMessageRequest', 1)
parent = _messages.StringField(2, required=True)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesDeleteRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesDeleteRequest
object.
Fields:
name: The resource name of the HL7v2 message to delete.
"""
name = _messages.StringField(1, required=True)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesGetRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesGetRequest
object.
Enums:
ViewValueValuesEnum: Specifies which parts of the Message resource to
return in the response.
Fields:
name: The resource name of the HL7v2 message to retrieve.
view: Specifies which parts of the Message resource to return in the
response.
"""
class ViewValueValuesEnum(_messages.Enum):
r"""Specifies which parts of the Message resource to return in the
response.
Values:
MESSAGE_VIEW_UNSPECIFIED: <no description>
RAW_ONLY: <no description>
PARSED_ONLY: <no description>
FULL: <no description>
"""
MESSAGE_VIEW_UNSPECIFIED = 0
RAW_ONLY = 1
PARSED_ONLY = 2
FULL = 3
name = _messages.StringField(1, required=True)
view = _messages.EnumField('ViewValueValuesEnum', 2)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesIngestRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesIngestRequest
object.
Fields:
ingestMessageRequest: A IngestMessageRequest resource to be passed as the
request body.
parent: The name of the HL7v2 store this message belongs to.
"""
ingestMessageRequest = _messages.MessageField('IngestMessageRequest', 1)
parent = _messages.StringField(2, required=True)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesListRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesListRequest
object.
Fields:
filter: Restricts messages returned to those matching a filter. Syntax: ht
tps://cloud.google.com/appengine/docs/standard/python/search/query_strin
gs Fields/functions available for filtering are: * `message_type`,
from the MSH-9 segment. For example, `NOT message_type = "ADT"`. *
`send_date` or `sendDate`, the YYYY-MM-DD date the message was sent in
the dataset's time_zone, from the MSH-7 segment. For example, `send_date
< "2017-01-02"`. * `send_time`, the timestamp when the message was
sent, using the RFC3339 time format for comparisons, from the MSH-7
segment. For example, `send_time < "2017-01-02T00:00:00-05:00"`. *
`send_facility`, the care center that the message came from, from the
MSH-4 segment. For example, `send_facility = "ABC"`. *
`PatientId(value, type)`, which matches if the message lists a patient
having an ID of the given value and type in the PID-2, PID-3, or PID-4
segments. For example, `PatientId("123456", "MRN")`. * `labels.x`, a
string value of the label with key `x` as set using the Message.labels
map. For example, `labels."priority"="high"`. The operator `:*` can be
used to assert the existence of a label. For example,
`labels."priority":*`. Limitations on conjunctions: * Negation on the
patient ID function or the labels field is not supported. For example,
these queries are invalid: `NOT PatientId("123456", "MRN")`, `NOT
labels."tag1":*`, `NOT labels."tag2"="val2"`. * Conjunction of multiple
patient ID functions is not supported, for example this query is
invalid: `PatientId("123456", "MRN") AND PatientId("456789", "MRN")`. *
Conjunction of multiple labels fields is also not supported, for example
this query is invalid: `labels."tag1":* AND labels."tag2"="val2"`. *
Conjunction of one patient ID function, one labels field and conditions
on other fields is supported. For example, this query is valid:
`PatientId("123456", "MRN") AND labels."tag1":* AND message_type =
"ADT"`.
orderBy: Orders messages returned by the specified order_by clause.
Syntax:
https://cloud.google.com/apis/design/design_patterns#sorting_order
Fields available for ordering are: * `send_time`
pageSize: Limit on the number of messages to return in a single response.
If zero the default page size of 100 is used.
pageToken: The next_page_token value returned from the previous List
request, if any.
parent: Name of the HL7v2 store to retrieve messages from.
"""
filter = _messages.StringField(1)
orderBy = _messages.StringField(2)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
parent = _messages.StringField(5, required=True)
class HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesPatchRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresMessagesPatchRequest
object.
Fields:
message: A Message resource to be passed as the request body.
name: Resource name of the Message, of the form `projects/{project_id}/dat
asets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`.
Assigned by the server.
updateMask: The update mask applies to the resource. For the `FieldMask`
definition, see https://developers.google.com/protocol-
buffers/docs/reference/google.protobuf#fieldmask Only the `labels` field
is allowed to be updated. The labels in the request are merged with the
existing set of labels. Existing labels with the same keys are updated.
"""
message = _messages.MessageField('Message', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class HealthcareProjectsLocationsDatasetsHl7V2StoresPatchRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresPatchRequest object.
Fields:
hl7V2Store: A Hl7V2Store resource to be passed as the request body.
name: Output only. Resource name of the HL7v2 store, of the form `projects
/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7v2_store_id}`.
updateMask: The update mask applies to the resource. For the `FieldMask`
definition, see https://developers.google.com/protocol-
buffers/docs/reference/google.protobuf#fieldmask
"""
hl7V2Store = _messages.MessageField('Hl7V2Store', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class HealthcareProjectsLocationsDatasetsHl7V2StoresSetIamPolicyRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsHl7V2StoresSetIamPolicyRequest
object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class HealthcareProjectsLocationsDatasetsHl7V2StoresTestIamPermissionsRequest(_messages.Message):
r"""A
HealthcareProjectsLocationsDatasetsHl7V2StoresTestIamPermissionsRequest
object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class HealthcareProjectsLocationsDatasetsListRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsListRequest object.
Fields:
pageSize: The maximum number of items to return. Capped to 100 if not
specified. May not be larger than 1000.
pageToken: The next_page_token value returned from a previous List
request, if any.
parent: The name of the project whose datasets should be listed. For
example, `projects/{project_id}/locations/{location_id}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class HealthcareProjectsLocationsDatasetsOperationsGetRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class HealthcareProjectsLocationsDatasetsOperationsListRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsOperationsListRequest object.
Fields:
filter: The standard list filter.
name: The name of the operation's parent resource.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class HealthcareProjectsLocationsDatasetsPatchRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsPatchRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
name: Output only. Resource name of the dataset, of the form
`projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`.
updateMask: The update mask applies to the resource. For the `FieldMask`
definition, see https://developers.google.com/protocol-
buffers/docs/reference/google.protobuf#fieldmask
"""
dataset = _messages.MessageField('Dataset', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class HealthcareProjectsLocationsDatasetsSetIamPolicyRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class HealthcareProjectsLocationsDatasetsTestIamPermissionsRequest(_messages.Message):
r"""A HealthcareProjectsLocationsDatasetsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class HealthcareProjectsLocationsGetRequest(_messages.Message):
r"""A HealthcareProjectsLocationsGetRequest object.
Fields:
name: Resource name for the location.
"""
name = _messages.StringField(1, required=True)
class HealthcareProjectsLocationsListRequest(_messages.Message):
r"""A HealthcareProjectsLocationsListRequest object.
Fields:
filter: The standard list filter.
name: The resource that owns the locations collection, if applicable.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class Hl7V2Store(_messages.Message):
r"""Represents an HL7v2 store.
Messages:
LabelsValue: User-supplied key-value pairs used to organize HL7v2 stores.
Label keys must be between 1 and 63 characters long, have a UTF-8
encoding of maximum 128 bytes, and must conform to the following PCRE
regular expression: \p{Ll}\p{Lo}{0,62} Label values are optional, must
be between 1 and 63 characters long, have a UTF-8 encoding of maximum
128 bytes, and must conform to the following PCRE regular expression:
[\p{Ll}\p{Lo}\p{N}_-]{0,63} No more than 64 labels can be associated
with a given store.
Fields:
labels: User-supplied key-value pairs used to organize HL7v2 stores.
Label keys must be between 1 and 63 characters long, have a UTF-8
encoding of maximum 128 bytes, and must conform to the following PCRE
regular expression: \p{Ll}\p{Lo}{0,62} Label values are optional, must
be between 1 and 63 characters long, have a UTF-8 encoding of maximum
128 bytes, and must conform to the following PCRE regular expression:
[\p{Ll}\p{Lo}\p{N}_-]{0,63} No more than 64 labels can be associated
with | |
import asyncio
import json
import time
from collections.abc import Mapping
from datetime import datetime, timedelta
from enum import IntEnum, unique
from functools import singledispatch
from logging import getLogger
from pprint import pformat
from typing import Union, Optional, Any, Callable, Dict, Iterable
from warnings import warn
from typing import AsyncContextManager
import aiormq
from aiormq.types import DeliveredMessage
from .exceptions import MessageProcessError
log = getLogger(__name__)
NoneType = type(None)
@unique
class DeliveryMode(IntEnum):
NOT_PERSISTENT = 1
PERSISTENT = 2
DateType = Union[int, datetime, float, timedelta, None]
MICROSECONDS = 1000
def to_microseconds(seconds):
return int(seconds * MICROSECONDS)
@singledispatch
def encode_expiration(value) -> Optional[str]:
raise ValueError('Invalid timestamp type: %r' % type(value), value)
@encode_expiration.register(datetime)
def _(value):
now = datetime.now()
return str(to_microseconds((value - now).total_seconds()))
@encode_expiration.register(int)
@encode_expiration.register(float)
def _(value):
return str(to_microseconds(value))
@encode_expiration.register(timedelta)
def _(value):
return str(int(value.total_seconds() * 1000))
@encode_expiration.register(type(None))
def _(_):
return None
ZERO_TIME = datetime.utcfromtimestamp(0)
@singledispatch
def decode_expiration(t) -> Optional[float]:
raise ValueError('Invalid expiration type: %r' % type(t), t)
@decode_expiration.register(time.struct_time)
def _(t: time.struct_time) -> float:
return (datetime(*t[:7]) - ZERO_TIME).total_seconds()
@decode_expiration.register(str)
def _(t: str) -> float:
return float(t)
@singledispatch
def encode_timestamp(value) -> Optional[time.struct_time]:
raise ValueError('Invalid timestamp type: %r' % type(value), value)
@encode_timestamp.register(time.struct_time)
def _(value):
return value
@encode_timestamp.register(datetime)
def _(value):
return value.timetuple()
@encode_timestamp.register(float)
@encode_timestamp.register(int)
def _(value):
return datetime.utcfromtimestamp(value).timetuple()
@encode_timestamp.register(timedelta)
def _(value):
return datetime.utcnow() + value
@encode_timestamp.register(type(None))
def _(_):
return None
@singledispatch
def decode_timestamp(value) -> Optional[datetime]:
raise ValueError('Invalid timestamp type: %r' % type(value), value)
@decode_timestamp.register(datetime)
def _(value):
return value
@decode_timestamp.register(float)
@decode_timestamp.register(int)
def _(value):
return datetime.utcfromtimestamp(value)
@decode_timestamp.register(time.struct_time)
def _(value: time.struct_time):
return datetime(*value[:6])
@decode_timestamp.register(type(None))
def _(_):
return None
def optional(value, func: Callable[[Any], Any]=str, default=None):
return func(value) if value else default
class HeaderProxy(Mapping):
def __init__(self, headers: Dict[str, bytes]):
self._headers = headers # type: Dict[str, bytes]
self._cache = {} # type: Dict[str, Any]
def __getitem__(self, k):
if k not in self._headers:
raise KeyError(k)
if k not in self._cache:
value = self._headers[k]
if isinstance(value, bytes):
self._cache[k] = value.decode()
else:
self._cache[k] = value
return self._cache[k]
def __setitem__(self, key, value):
self._headers[key] = format_headers(value)
self._cache.pop(key, None)
def __len__(self) -> int:
return len(self._headers)
def __iter__(self):
for key in self._headers:
yield key
@singledispatch
def header_converter(value: Any) -> bytes:
return json.dumps(
value, separators=(',', ":"),
ensure_ascii=False, default=repr
).encode()
@header_converter.register(bytes)
@header_converter.register(datetime)
@header_converter.register(NoneType)
@header_converter.register(list)
@header_converter.register(int)
def _(v: bytes):
return v
@header_converter.register(bytearray)
def _(v: bytes):
return bytes(v)
@header_converter.register(str)
def _(v):
return v.encode()
@header_converter.register(set)
@header_converter.register(tuple)
@header_converter.register(frozenset)
def _(v: Iterable):
return header_converter(list(v))
def format_headers(d: Dict[str, Any]) -> Dict[str, bytes]:
ret = {}
if not d:
return ret
for key, value in d.items():
ret[key] = header_converter(value)
return ret
class Message:
""" AMQP message abstraction """
__slots__ = (
"app_id", "body", "body_size", "content_encoding", "content_type",
"correlation_id", "delivery_mode", "expiration", "headers",
"headers_raw", "message_id", "priority", "reply_to", "timestamp",
"type", "user_id", "__lock",
)
def __init__(self, body: bytes, *, headers: dict=None,
content_type: str=None, content_encoding: str=None,
delivery_mode: DeliveryMode=None,
priority: int=None, correlation_id=None,
reply_to: str=None, expiration: DateType=None,
message_id: str=None,
timestamp: DateType=None,
type: str=None, user_id: str=None,
app_id: str=None):
""" Creates a new instance of Message
:param body: message body
:param headers: message headers
:param headers_raw: message raw headers
:param content_type: content type
:param content_encoding: content encoding
:param delivery_mode: delivery mode
:param priority: priority
:param correlation_id: correlation id
:param reply_to: reply to
:param expiration: expiration in seconds (or datetime or timedelta)
:param message_id: message id
:param timestamp: timestamp
:param type: type
:param user_id: user id
:param app_id: app id
"""
self.__lock = False
self.body = body if isinstance(body, bytes) else bytes(body)
self.body_size = len(self.body) if self.body else 0
self.headers_raw = format_headers(headers)
self.headers = HeaderProxy(self.headers_raw)
self.content_type = content_type
self.content_encoding = content_encoding
self.delivery_mode = DeliveryMode(
optional(
delivery_mode,
func=int,
default=DeliveryMode.NOT_PERSISTENT)
).value
self.priority = optional(priority, int, 0)
self.correlation_id = optional(correlation_id)
self.reply_to = optional(reply_to)
self.expiration = expiration
self.message_id = optional(message_id)
self.timestamp = encode_timestamp(timestamp)
self.type = optional(type)
self.user_id = optional(user_id)
self.app_id = optional(app_id)
@staticmethod
def _as_bytes(value):
if isinstance(value, bytes):
return value
elif isinstance(value, str):
return value.encode()
elif isinstance(value, NoneType):
return b''
else:
return str(value).encode()
def info(self) -> dict:
""" Create a dict with message attributes
::
{
"body_size": 100,
"headers": {},
"content_type": "text/plain",
"content_encoding": "",
"delivery_mode": DeliveryMode.NOT_PERSISTENT,
"priority": 0,
"correlation_id": "",
"reply_to": "",
"expiration": "",
"message_id": "",
"timestamp": "",
"type": "",
"user_id": "",
"app_id": "",
}
"""
return {
"body_size": self.body_size,
"headers": self.headers_raw,
"content_type": self.content_type,
"content_encoding": self.content_encoding,
"delivery_mode": self.delivery_mode,
"priority": self.priority,
"correlation_id": self.correlation_id,
"reply_to": self.reply_to,
"expiration": self.expiration,
"message_id": self.message_id,
"timestamp": decode_timestamp(self.timestamp),
"type": str(self.type),
"user_id": self.user_id,
"app_id": self.app_id,
}
@property
def locked(self) -> bool:
""" is message locked
:return: :class:`bool`
"""
return bool(self.__lock)
@property
def properties(self) -> aiormq.spec.Basic.Properties:
""" Build :class:`pika.BasicProperties` object """
return aiormq.spec.Basic.Properties(
content_type=self.content_type,
content_encoding=self.content_encoding,
headers=self.headers_raw,
delivery_mode=self.delivery_mode,
priority=self.priority,
correlation_id=self.correlation_id,
reply_to=self.reply_to,
expiration=encode_expiration(self.expiration),
message_id=self.message_id,
timestamp=self.timestamp,
message_type=self.type,
user_id=self.user_id,
app_id=self.app_id
)
def __repr__(self):
return "{name}:{repr}".format(
name=self.__class__.__name__,
repr=pformat(self.info())
)
def __setattr__(self, key, value):
if not key.startswith("_") and self.locked:
raise ValueError("Message is locked")
return super().__setattr__(key, value)
def __iter__(self):
return iter(self.body)
def lock(self):
""" Set lock flag to `True`"""
self.__lock = True
def __copy__(self):
return Message(
body=self.body,
headers={
k: v for k, v in self.headers.items()
} if self.headers else {},
content_encoding=self.content_encoding,
content_type=self.content_type,
delivery_mode=self.delivery_mode,
priority=self.priority,
correlation_id=self.correlation_id,
reply_to=self.reply_to,
expiration=self.expiration,
message_id=self.message_id,
timestamp=self.timestamp,
type=self.type,
user_id=self.user_id,
app_id=self.app_id
)
class IncomingMessage(Message):
""" Incoming message it's seems like Message but has additional methods for
message acknowledgement.
Depending on the acknowledgement mode used, RabbitMQ can consider a
message to be successfully delivered either immediately after it is sent
out (written to a TCP socket) or when an explicit ("manual") client
acknowledgement is received. Manually sent acknowledgements can be
positive or negative and use one of the following protocol methods:
* basic.ack is used for positive acknowledgements
* basic.nack is used for negative acknowledgements (note: this is a RabbitMQ
extension to AMQP 0-9-1)
* basic.reject is used for negative acknowledgements but has one limitations
compared to basic.nack
Positive acknowledgements simply instruct RabbitMQ to record a message as
delivered. Negative acknowledgements with basic.reject have the same effect.
The difference is primarily in the semantics: positive acknowledgements
assume a message was successfully processed while their negative
counterpart suggests that a delivery wasn't processed but still should
be deleted.
"""
__slots__ = (
'_loop', '__channel', 'cluster_id', 'consumer_tag',
'delivery_tag', 'exchange', 'routing_key', 'redelivered',
'__no_ack', '__processed', 'message_count'
)
def __init__(self, message: DeliveredMessage, no_ack: bool=False):
""" Create an instance of :class:`IncomingMessage` """
self.__channel = message.channel
self.__no_ack = no_ack
self.__processed = False
expiration = None # type: time.struct_time
if message.header.properties.expiration:
expiration = decode_expiration(message.header.properties.expiration)
super().__init__(
body=message.body,
content_type=message.header.properties.content_type,
content_encoding=message.header.properties.content_encoding,
headers=message.header.properties.headers,
delivery_mode=message.header.properties.delivery_mode,
priority=message.header.properties.priority,
correlation_id=message.header.properties.correlation_id,
reply_to=message.header.properties.reply_to,
expiration=expiration / 1000. if expiration else None,
message_id=message.header.properties.message_id,
timestamp=decode_timestamp(message.header.properties.timestamp),
type=message.header.properties.message_type,
user_id=message.header.properties.user_id,
app_id=message.header.properties.app_id,
)
self.cluster_id = message.header.properties.cluster_id
self.consumer_tag = None
self.delivery_tag = None
self.redelivered = None
self.message_count = None
if isinstance(message.delivery, aiormq.spec.Basic.GetOk):
self.message_count = message.delivery.message_count
self.delivery_tag = message.delivery.delivery_tag
self.redelivered = message.delivery.redelivered
elif isinstance(message.delivery, aiormq.spec.Basic.Deliver):
self.consumer_tag = message.delivery.consumer_tag
self.delivery_tag = message.delivery.delivery_tag
self.redelivered = message.delivery.redelivered
self.routing_key = message.delivery.routing_key
self.exchange = message.delivery.exchange
if no_ack or not self.delivery_tag:
self.lock()
self.__processed = True
def process(self, requeue=False, reject_on_redelivered=False,
ignore_processed=False):
""" Context manager for processing the message
>>> async def on_message_received(message: IncomingMessage):
... async with message.process():
... # When exception will be raised
... # the message will be rejected
... print(message.body)
Example with ignore_processed=True
>>> async def on_message_received(message: IncomingMessage):
... async with message.process(ignore_processed=True):
... # Now (with ignore_processed=True) you may reject
... # (or ack) message manually too
... if True: # some reasonable condition here
... await message.reject()
... print(message.body)
:param requeue: Requeue message when exception.
:param reject_on_redelivered:
When True message will be rejected only when
message was redelivered.
:param ignore_processed: Do nothing if message already processed
"""
return ProcessContext(
self, requeue=requeue,
reject_on_redelivered=reject_on_redelivered,
ignore_processed=ignore_processed,
)
def ack(self, multiple: bool=False) -> asyncio.Task:
""" Send basic.ack is used for positive acknowledgements
.. note::
This method looks like a blocking-method, but actually it's
just send bytes to the socket and not required any responses
from the broker.
:param multiple: If set to True, the message's delivery tag is
treated as "up to and including", so that multiple
messages can be acknowledged with a single method.
If set to False, the ack refers to a single message.
:return: None
"""
if self.__no_ack:
raise TypeError("Can't ack message with \"no_ack\" flag")
if self.__processed:
raise MessageProcessError("Message already processed")
task = asyncio.ensure_future(
self.__channel.basic_ack(
delivery_tag=self.delivery_tag, multiple=multiple
)
)
self.__processed = True
if not self.locked:
self.lock()
return task
def reject(self, requeue=False) -> asyncio.Task:
""" When `requeue=True` the message will be returned to queue.
Otherwise message will be dropped.
.. note::
This method looks like a blocking-method, but actually it's just
send bytes to the socket and not required any responses from
the broker.
:param requeue: bool
"""
if self.__no_ack:
raise TypeError('This message has "no_ack" flag.')
if self.__processed:
raise MessageProcessError("Message already processed")
task = | |
<reponame>spencerpomme/Public-Transit-Status-with-Apache-Kafka<filename>consumers/venv/lib/python3.7/site-packages/mode/utils/objects.py
"""Object utilities."""
import collections.abc
import sys
import typing
from contextlib import suppress
from decimal import Decimal
from functools import total_ordering
from pathlib import Path
from typing import (
AbstractSet,
Any,
Callable,
ClassVar,
Dict,
FrozenSet,
Generic,
Iterable,
List,
Mapping,
MutableMapping,
MutableSequence,
MutableSet,
Sequence,
Set,
Tuple,
Type,
TypeVar,
cast,
)
from typing import _eval_type, _type_check # type: ignore
try:
from typing import _ClassVar # type: ignore
except ImportError: # pragma: no cover
# CPython 3.7
from typing import _GenericAlias # type: ignore
def _is_class_var(x: Any) -> bool: # noqa
return isinstance(x, _GenericAlias) and x.__origin__ is ClassVar
else: # pragma: no cover
# CPython 3.6
def _is_class_var(x: Any) -> bool:
return type(x) is _ClassVar
try:
# CPython 3.7
from typing import ForwardRef # type: ignore
except ImportError: # pragma: no cover
# CPython 3.6
from typing import _ForwardRef as ForwardRef # type: ignore
__all__ = [
'FieldMapping',
'DefaultsMapping',
'Unordered',
'KeywordReduce',
'InvalidAnnotation',
'qualname',
'shortname',
'canoname',
'canonshortname',
'annotations',
'eval_type',
'iter_mro_reversed',
'guess_polymorphic_type',
'cached_property',
'label',
'shortlabel',
]
_T = TypeVar('_T')
RT = TypeVar('RT')
#: Mapping of attribute name to attribute type.
FieldMapping = Mapping[str, Type]
#: Mapping of attribute name to attributes default value.
DefaultsMapping = Mapping[str, Any]
SET_TYPES: Tuple[Type, ...] = (
AbstractSet,
FrozenSet,
MutableSet,
Set,
collections.abc.Set,
)
LIST_TYPES: Tuple[Type, ...] = (
List,
Sequence,
MutableSequence,
collections.abc.Sequence,
)
DICT_TYPES: Tuple[Type, ...] = (
Dict,
Mapping,
MutableMapping,
collections.abc.Mapping,
)
# XXX cast required for mypy bug
# "expression has type Tuple[_SpecialForm]"
TUPLE_TYPES: Tuple[Type, ...] = cast(Tuple[Type, ...], (Tuple,))
class InvalidAnnotation(Exception):
"""Raised by :func:`annotations` when encountering an invalid type."""
@total_ordering
class Unordered(Generic[_T]):
"""Shield object from being ordered in heapq/``__le__``/etc."""
# Used to put anything inside a heapq, even things that cannot be ordered
# like dicts and lists.
def __init__(self, value: _T) -> None:
self.value = value
def __le__(self, other: Any) -> bool:
return True
def __repr__(self) -> str:
return f'<{type(self).__name__}: {self.value!r}>'
def _restore_from_keywords(typ: Type, kwargs: Dict) -> Any:
# This function is used to restore pickled KeywordReduce object.
return typ(**kwargs)
class KeywordReduce:
"""Mixin class for objects that can be "pickled".
"Pickled" means the object can be serialiazed using the Python binary
serializer -- the :mod:`pickle` module.
Python objects are made pickleable through defining the ``__reduce__``
method, that returns a tuple of:
``(restore_function, function_starargs)``::
class X:
def __init__(self, arg1, kw1=None):
self.arg1 = arg1
self.kw1 = kw1
def __reduce__(self) -> Tuple[Callable, Tuple[Any, ...]]:
return type(self), (self.arg1, self.kw1)
This is *tedious* since this means you cannot accept ``**kwargs`` in the
constructur, so what we do is define a ``__reduce_keywords__``
argument that returns a dict instead::
class X:
def __init__(self, arg1, kw1=None):
self.arg1 = arg1
self.kw1 = kw1
def __reduce_keywords__(self) -> Mapping[str, Any]:
return {
'arg1': self.arg1,
'kw1': self.kw1,
}
"""
def __reduce_keywords__(self) -> Mapping:
raise NotImplementedError()
def __reduce__(self) -> Tuple:
return _restore_from_keywords, (type(self), self.__reduce_keywords__())
def qualname(obj: Any) -> str:
"""Get object qualified name."""
if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
obj = obj.__class__
name = getattr(obj, '__qualname__', obj.__name__)
return '.'.join((obj.__module__, name))
def shortname(obj: Any) -> str:
"""Get object name (non-qualified)."""
if not hasattr(obj, '__name__') and hasattr(obj, '__class__'):
obj = obj.__class__
return '.'.join((obj.__module__, obj.__name__))
def canoname(obj: Any, *, main_name: str = None) -> str:
"""Get qualname of obj, trying to resolve the real name of ``__main__``."""
name = qualname(obj)
parts = name.split('.')
if parts[0] == '__main__':
return '.'.join([main_name or _detect_main_name()] + parts[1:])
return name
def canonshortname(obj: Any, *, main_name: str = None) -> str:
"""Get non-qualified name of obj, resolve real name of ``__main__``."""
name = shortname(obj)
parts = name.split('.')
if parts[0] == '__main__':
return '.'.join([main_name or _detect_main_name()] + parts[1:])
return name
def _detect_main_name() -> str: # pragma: no cover
try:
filename = sys.modules['__main__'].__file__
except (AttributeError, KeyError): # ipython/REPL
return '__main__'
else:
path = Path(filename).absolute()
node = path.parent
seen = []
while node:
if (node / '__init__.py').exists():
seen.append(node.stem)
node = node.parent
else:
break
return '.'.join(seen + [path.stem])
def annotations(cls: Type,
*,
stop: Type = object,
invalid_types: Set = None,
alias_types: Mapping = None,
skip_classvar: bool = False,
globalns: Dict[str, Any] = None,
localns: Dict[str, Any] = None) -> Tuple[
FieldMapping, DefaultsMapping]:
"""Get class field definition in MRO order.
Arguments:
cls: Class to get field information from.
stop: Base class to stop at (default is ``object``).
invalid_types: Set of types that if encountered should raise
:exc:`InvalidAnnotation` (does not test for subclasses).
alias_types: Mapping of original type to replacement type.
skip_classvar: Skip attributes annotated with
:class:`typing.ClassVar`.
globalns: Global namespace to use when evaluating forward
references (see :class:`typing.ForwardRef`).
localns: Local namespace to use when evaluating forward
references (see :class:`typing.ForwardRef`).
Returns:
Tuple[FieldMapping, DefaultsMapping]: Tuple with two dictionaries,
the first containing a map of field names to their types,
the second containing a map of field names to their default
value. If a field is not in the second map, it means the field
is required.
Raises:
InvalidAnnotation: if a list of invalid types are provided and an
invalid type is encountered.
Examples:
.. sourcecode:: text
>>> class Point:
... x: float
... y: float
>>> class 3DPoint(Point):
... z: float = 0.0
>>> fields, defaults = annotations(3DPoint)
>>> fields
{'x': float, 'y': float, 'z': 'float'}
>>> defaults
{'z': 0.0}
"""
fields: Dict[str, Type] = {}
defaults: Dict[str, Any] = {} # noqa: E704 (flake8 bug)
for subcls in iter_mro_reversed(cls, stop=stop):
defaults.update(subcls.__dict__)
with suppress(AttributeError):
fields.update(local_annotations(
subcls,
invalid_types=invalid_types,
alias_types=alias_types,
skip_classvar=skip_classvar,
globalns=globalns,
localns=localns,
))
return fields, defaults
def local_annotations(
cls: Type,
*,
invalid_types: Set = None,
alias_types: Mapping = None,
skip_classvar: bool = False,
globalns: Dict[str, Any] = None,
localns: Dict[str, Any] = None) -> Iterable[Tuple[str, Type]]:
return _resolve_refs(
cls.__annotations__,
globalns if globalns is not None else _get_globalns(cls),
localns,
invalid_types or set(),
alias_types or {},
skip_classvar,
)
def _resolve_refs(d: Dict[str, Any],
globalns: Dict[str, Any] = None,
localns: Dict[str, Any] = None,
invalid_types: Set = None,
alias_types: Mapping = None,
skip_classvar: bool = False) -> Iterable[Tuple[str, Type]]:
invalid_types = invalid_types or set()
alias_types = alias_types or {}
for k, v in d.items():
v = eval_type(v, globalns, localns, invalid_types, alias_types)
if skip_classvar and _is_class_var(v):
pass
else:
yield k, v
def eval_type(typ: Any,
globalns: Dict[str, Any] = None,
localns: Dict[str, Any] = None,
invalid_types: Set = None,
alias_types: Mapping = None) -> Type:
"""Convert (possible) string annotation to actual type.
Examples:
>>> eval_type('List[int]') == typing.List[int]
"""
invalid_types = invalid_types or set()
alias_types = alias_types or {}
if isinstance(typ, str):
typ = ForwardRef(typ)
if isinstance(typ, ForwardRef):
# On 3.6/3.7 _eval_type crashes if str references ClassVar
typ = _ForwardRef_safe_eval(typ, globalns, localns)
typ = _eval_type(typ, globalns, localns)
if typ in invalid_types:
raise InvalidAnnotation(typ)
return alias_types.get(typ, typ)
def _ForwardRef_safe_eval(ref: ForwardRef,
globalns: Dict[str, Any] = None,
localns: Dict[str, Any] = None) -> Type:
# On 3.6/3.7 ForwardRef._evaluate crashes if str references ClassVar
if not ref.__forward_evaluated__:
if globalns is None and localns is None:
globalns = localns = {}
elif globalns is None:
globalns = localns
elif localns is None:
localns = globalns
val = eval(ref.__forward_code__, globalns, localns)
if not _is_class_var(val):
val = _type_check(val,
'Forward references must evaluate to types.')
ref.__forward_value__ = val
ref.__forward_evaluated__ = True
return ref.__forward_value__
def _get_globalns(typ: Type) -> Dict[str, Any]:
return sys.modules[typ.__module__].__dict__
def iter_mro_reversed(cls: Type, stop: Type) -> Iterable[Type]:
"""Iterate over superclasses, in reverse Method Resolution Order.
The stop argument specifies a base class that when seen will
stop iterating (well actually start, since this is in reverse, see Example
for demonstration).
Arguments:
cls (Type): Target class.
stop (Type): A base class in which we stop iteration.
Notes:
The last item produced will be the class itself (`cls`).
Examples:
>>> class A: ...
>>> class B(A): ...
>>> class C(B): ...
>>> list(iter_mro_reverse(C, object))
[A, B, C]
>>> list(iter_mro_reverse(C, A))
[B, C]
Yields:
Iterable[Type]: every class.
"""
wanted = False
for subcls in reversed(cls.__mro__):
if wanted:
yield cast(Type, subcls)
else:
wanted = subcls == stop
def remove_optional(typ: Type) -> Type:
_, typ = _remove_optional(typ)
return typ
def is_optional(typ: Type) -> bool:
args = getattr(typ, '__args__', ())
if typ.__class__.__name__ == '_GenericAlias':
# Py3.7
if typ.__origin__ is typing.Union:
for arg in args:
if arg is type(None): # noqa
return True
elif typ.__class__.__name__ == '_Union': # pragma: no cover
# Py3.6
# Optional[x] actually returns Union[x, type(None)]
if args and args[1] is type(None): # noqa
return True
| |
An iTIP message has been sent to an attendee by the organizer. We need to update the attendee state
based on the nature of the iTIP message.
"""
# Do security check: ORGANZIER in iTIP MUST match existing resource value
if self.recipient_calendar:
existing_organizer = self.recipient_calendar.getOrganizer()
existing_organizer = normalizeCUAddr(existing_organizer) if existing_organizer else ""
new_organizer = normalizeCUAddr(self.message.getOrganizer())
new_organizer = normalizeCUAddr(new_organizer) if new_organizer else ""
if existing_organizer != new_organizer:
# Additional check - if the existing organizer is missing and the originator
# is local to the server - then allow the change
if not (existing_organizer == "" and self.originator.hosted()):
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' ignoring UID: '{uid}' - organizer has no copy", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
# If the recipient copy is in the trash, just remove it
if self.recipient_in_trash:
yield self.deleteCalendarResource(self.recipient_calendar_resource)
# Reset state to make it look like a new iTIP being processed
self.recipient_calendar = None
self.recipient_calendar_resource = None
self.recipient_in_trash = False
self.new_resource = True
else:
raise ImplicitProcessorException("5.3;Organizer change not allowed")
# For a missing existing organizer we don't know for sure that the existing event came from the originator - it could be spoofed
# by the originator to have the same UID as the existing event which it would overwrite. Instead what we will do is rename and
# change the UID of the original event to preserve it and let the user resolve the "duplicate" conflict that results.
if not existing_organizer:
# Copy the raw (including per-user) data of the original, change its UID and write out a new resource
changed_calendar = (yield self.recipient_calendar_resource.component()).duplicate()
changed_calendar.replacePropertyInAllComponents(Property("UID", str(uuid.uuid4())))
name = "%s-%s.ics" % (hashlib.md5(changed_calendar.resourceUID()).hexdigest(), str(uuid.uuid4())[:8],)
yield self.recipient_calendar_resource.parentCollection()._createCalendarObjectWithNameInternal(name, changed_calendar, ComponentUpdateState.RAW)
# Delete the original resource
yield self.deleteCalendarResource(self.recipient_calendar_resource)
# Reset state to make it look like a new iTIP being processed
self.recipient_calendar = None
self.recipient_calendar_resource = None
self.new_resource = True
# Handle splitting of data early so we can preserve per-attendee data
if self.message.hasProperty("X-CALENDARSERVER-SPLIT-OLDER-UID"):
if config.Scheduling.Options.Splitting.Enabled:
# Tell the existing resource to split
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' splitting UID: '{uid}'", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
split = (yield self.doImplicitAttendeeSplit())
if split:
returnValue((True, False, False, None,))
else:
self.message.removeProperty("X-CALENDARSERVER-SPLIT-OLDER-UID")
self.message.removeProperty("X-CALENDARSERVER-SPLIT-RID")
elif self.message.hasProperty("X-CALENDARSERVER-SPLIT-NEWER-UID"):
if config.Scheduling.Options.Splitting.Enabled:
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' ignoring UID: '{uid}' - split already done", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
returnValue((True, False, False, None,))
else:
self.message.removeProperty("X-CALENDARSERVER-SPLIT-NEWER-UID")
self.message.removeProperty("X-CALENDARSERVER-SPLIT-RID")
# Different based on method
if self.method == "REQUEST":
result = (yield self.doImplicitAttendeeRequest())
elif self.method == "CANCEL":
result = (yield self.doImplicitAttendeeCancel())
elif self.method == "ADD":
# TODO: implement ADD
result = (False, False, False, None)
elif self.method == "POLLSTATUS":
result = (yield self.doImplicitAttendeePollStatus())
else:
# NB We should never get here as we will have rejected unsupported METHODs earlier.
result = (True, True, False, None,)
returnValue(result)
@inlineCallbacks
def doImplicitAttendeeSplit(self):
"""
Handle splitting of the existing calendar data.
"""
olderUID = self.message.propertyValue("X-CALENDARSERVER-SPLIT-OLDER-UID")
split_rid = self.message.propertyValue("X-CALENDARSERVER-SPLIT-RID")
if olderUID is None or split_rid is None:
returnValue(False)
# Split the resource
yield self.recipient_calendar_resource.splitForAttendee(rid=split_rid, olderUID=olderUID)
returnValue(True)
@inlineCallbacks
def doImplicitAttendeeRequest(self):
"""
An iTIP REQUEST message has been sent to an attendee. If there is no existing resource, we will simply
create a new one. If there is an existing resource we need to reconcile the changes between it and the
iTIP message.
@return: C{tuple} of (processed, auto-processed, store inbox item, changes)
"""
# Check if the incoming data has the recipient declined in all instances.
attendees = self.message.getAttendeeProperties((self.recipient.cuaddr,))
all_declined = all([attendee.parameterValue("PARTSTAT", "NEEDS-ACTION") == "DECLINED" for attendee in attendees])
# If we have a recipient item in the trash, and the incoming message has at least one undeclined partstat, then remove the trash
# item right now so that we treat the iTIP message as a new invite.
if not self.new_resource and self.recipient_in_trash and not all_declined:
yield self.deleteCalendarResource(self.recipient_calendar_resource)
# Reset state to make it look like a new iTIP being processed
self.recipient_calendar = None
self.recipient_calendar_resource = None
self.recipient_in_trash = False
self.new_resource = True
# If there is no existing copy, then look for default calendar and copy it here
if self.new_resource:
# Check if the incoming data has the recipient declined in all instances. In that case we will not create
# a new resource as chances are the recipient previously deleted the resource and we want to keep it deleted.
if all_declined:
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' processing METHOD:REQUEST, UID: '{uid}' - ignoring all declined", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
returnValue((True, False, False, None,))
# Check for default calendar
default = (yield self.recipient.inbox.viewerHome().defaultCalendar(self.message.mainType()))
if default is None:
log.error("No default calendar for recipient: '{recip}'.", recip=self.recipient.cuaddr)
raise ImplicitProcessorException(iTIPRequestStatus.NO_USER_SUPPORT)
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' processing METHOD:REQUEST, UID: '{uid}' - new processed", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
new_calendar = iTipProcessing.processNewRequest(self.message, self.recipient.cuaddr, creating=True)
# Handle auto-reply behavior
organizer = normalizeCUAddr(self.message.getOrganizer())
if (yield self.recipient.record.canAutoSchedule(organizer=organizer)):
# auto schedule mode can depend on who the organizer is
mode = yield self.recipient.record.getAutoScheduleMode(organizer=organizer)
send_reply, store_inbox, partstat, accounting = (yield self.checkAttendeeAutoReply(new_calendar, mode))
if accounting is not None:
accounting["action"] = "create"
emitAccounting(
"AutoScheduling",
self.recipient.record,
json.dumps(accounting) + "\r\n",
filename=self.uid.encode("base64")[:-1] + ".txt"
)
# Only store inbox item when reply is not sent or always for users
store_inbox = store_inbox or self.recipient.record.getCUType() == "INDIVIDUAL"
else:
send_reply = False
store_inbox = True
new_resource = (yield self.writeCalendarResource(default, None, new_calendar))
if send_reply:
# Track outstanding auto-reply processing
log.debug("ImplicitProcessing - recipient '{recip}' processing UID: '{uid}' - auto-reply queued", recip=self.recipient.cuaddr, uid=self.uid)
yield ScheduleAutoReplyWork.autoReply(self.txn, new_resource, partstat)
# Build the schedule-changes XML element
changes = customxml.ScheduleChanges(
customxml.DTStamp(),
customxml.Action(
customxml.Create(),
),
)
result = (True, send_reply, store_inbox, changes,)
else:
# Processing update to existing event
new_calendar, rids = iTipProcessing.processRequest(self.message, self.recipient_calendar, self.recipient.cuaddr)
if new_calendar:
# Handle auto-reply behavior
organizer = normalizeCUAddr(self.message.getOrganizer())
if (yield self.recipient.record.canAutoSchedule(organizer=organizer)) and not hasattr(self.txn, "doing_attendee_refresh"):
# auto schedule mode can depend on who the organizer is
mode = yield self.recipient.record.getAutoScheduleMode(organizer=organizer)
send_reply, store_inbox, partstat, accounting = (yield self.checkAttendeeAutoReply(new_calendar, mode))
if accounting is not None:
accounting["action"] = "modify"
emitAccounting(
"AutoScheduling",
self.recipient.record,
json.dumps(accounting) + "\r\n",
filename=self.uid.encode("base64")[:-1] + ".txt"
)
# Only store inbox item when reply is not sent or always for users
store_inbox = store_inbox or self.recipient.record.getCUType() == "INDIVIDUAL"
else:
# Do not store inbox item if the resource is remaining in the trash
send_reply = False
store_inbox = not self.recipient_in_trash
# Let the store know that no time-range info has changed for a refresh (assuming that
# no auto-accept changes were made)
if hasattr(self.txn, "doing_attendee_refresh"):
new_calendar.noInstanceIndexing = not send_reply
# Update the attendee's copy of the event
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' processing METHOD:REQUEST, UID: '{uid}' - updating event", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
new_resource = (yield self.writeCalendarResource(None, self.recipient_calendar_resource, new_calendar))
if send_reply:
# Track outstanding auto-reply processing
log.debug("ImplicitProcessing - recipient '{recip}' processing UID: '{uid}' - auto-reply queued", recip=self.recipient.cuaddr, uid=self.uid)
yield ScheduleAutoReplyWork.autoReply(self.txn, new_resource, partstat)
# Build the schedule-changes XML element
update_details = []
for rid, props_changed in sorted(rids.iteritems(), key=lambda x: x[0]):
# We do not report removals as those will already have been processed via a CANCEL
if props_changed is None:
continue
recurrence = []
if rid is None:
recurrence.append(customxml.Master())
else:
recurrence.append(customxml.RecurrenceID.fromString(rid.getText()))
changes = []
for propName, paramNames in sorted(props_changed.iteritems(), key=lambda x: x[0]):
params = tuple([customxml.ChangedParameter(name=param) for param in paramNames])
changes.append(customxml.ChangedProperty(*params, **{"name": propName}))
recurrence.append(customxml.Changes(*changes))
update_details += (customxml.Recurrence(*recurrence),)
changes = customxml.ScheduleChanges(
customxml.DTStamp(),
customxml.Action(
customxml.Update(*update_details),
),
)
# Refresh from another Attendee should not have Inbox item
if hasattr(self.txn, "doing_attendee_refresh"):
store_inbox = False
result = (True, send_reply, store_inbox, changes,)
else:
# Request needs to be ignored
log.debug("ImplicitProcessing - originator '{orig}' to recipient '{recip}' processing METHOD:REQUEST, UID: '{uid}' - ignoring", orig=self.originator.cuaddr, recip=self.recipient.cuaddr, uid=self.uid)
result = (True, False, False, None,)
returnValue(result)
@inlineCallbacks
def doImplicitAttendeeCancel(self):
"""
An iTIP CANCEL message has been sent to an attendee. If there is no existing resource, we will simply
ignore the message. If there is an existing resource we need to reconcile the changes between it and the
iTIP message.
@return: C{tuple} of (processed, auto-processed, store inbox item, changes)
"""
# Note that we never get here if there is no | |
Generate video only insights. Ignore audio if present. Fails if no video is present.
all_insights = "AllInsights" #: Generate both audio and video insights. Fails if either audio or video Insights fail.
class OnErrorType(str, Enum):
stop_processing_job = "StopProcessingJob" #: Tells the service that if this TransformOutput fails, then any other incomplete TransformOutputs can be stopped.
continue_job = "ContinueJob" #: Tells the service that if this TransformOutput fails, then allow any other TransformOutput to continue.
class Priority(str, Enum):
low = "Low" #: Used for TransformOutputs that can be generated after Normal and High priority TransformOutputs.
normal = "Normal" #: Used for TransformOutputs that can be generated at Normal priority.
high = "High" #: Used for TransformOutputs that should take precedence over others.
class JobErrorCode(str, Enum):
service_error = "ServiceError" #: Fatal service error, please contact support.
service_transient_error = "ServiceTransientError" #: Transient error, please retry, if retry is unsuccessful, please contact support.
download_not_accessible = "DownloadNotAccessible" #: While trying to download the input files, the files were not accessible, please check the availability of the source.
download_transient_error = "DownloadTransientError" #: While trying to download the input files, there was an issue during transfer (storage service, network errors), see details and check your source.
upload_not_accessible = "UploadNotAccessible" #: While trying to upload the output files, the destination was not reachable, please check the availability of the destination.
upload_transient_error = "UploadTransientError" #: While trying to upload the output files, there was an issue during transfer (storage service, network errors), see details and check your destination.
configuration_unsupported = "ConfigurationUnsupported" #: There was a problem with the combination of input files and the configuration settings applied, fix the configuration settings and retry with the same input, or change input to match the configuration.
content_malformed = "ContentMalformed" #: There was a problem with the input content (for example: zero byte files, or corrupt/non-decodable files), check the input files.
content_unsupported = "ContentUnsupported" #: There was a problem with the format of the input (not valid media file, or an unsupported file/codec), check the validity of the input files.
class JobErrorCategory(str, Enum):
service = "Service" #: The error is service related.
download = "Download" #: The error is download related.
upload = "Upload" #: The error is upload related.
configuration = "Configuration" #: The error is configuration related.
content = "Content" #: The error is related to data in the input files.
class JobRetry(str, Enum):
do_not_retry = "DoNotRetry" #: Issue needs to be investigated and then the job resubmitted with corrections or retried once the underlying issue has been corrected.
may_retry = "MayRetry" #: Issue may be resolved after waiting for a period of time and resubmitting the same Job.
class JobState(str, Enum):
canceled = "Canceled" #: The job was canceled. This is a final state for the job.
canceling = "Canceling" #: The job is in the process of being canceled. This is a transient state for the job.
error = "Error" #: The job has encountered an error. This is a final state for the job.
finished = "Finished" #: The job is finished. This is a final state for the job.
processing = "Processing" #: The job is processing. This is a transient state for the job.
queued = "Queued" #: The job is in a queued state, waiting for resources to become available. This is a transient state.
scheduled = "Scheduled" #: The job is being scheduled to run on an available resource. This is a transient state, between queued and processing states.
class TrackPropertyType(str, Enum):
unknown = "Unknown" #: Unknown track property
four_cc = "FourCC" #: Track FourCC
class TrackPropertyCompareOperation(str, Enum):
unknown = "Unknown" #: Unknown track property compare operation
equal = "Equal" #: Equal operation
class StreamingLocatorContentKeyType(str, Enum):
common_encryption_cenc = "CommonEncryptionCenc" #: Common Encryption using CENC
common_encryption_cbcs = "CommonEncryptionCbcs" #: Common Encryption using CBCS
envelope_encryption = "EnvelopeEncryption" #: Envelope Encryption
class StreamingPolicyStreamingProtocol(str, Enum):
hls = "Hls" #: HLS protocol
dash = "Dash" #: DASH protocol
smooth_streaming = "SmoothStreaming" #: SmoothStreaming protocol
download = "Download" #: Download protocol
class EncryptionScheme(str, Enum):
no_encryption = "NoEncryption" #: NoEncryption scheme
envelope_encryption = "EnvelopeEncryption" #: EnvelopeEncryption scheme
common_encryption_cenc = "CommonEncryptionCenc" #: CommonEncryptionCenc scheme
common_encryption_cbcs = "CommonEncryptionCbcs" #: CommonEncryptionCbcs scheme
class LiveOutputResourceState(str, Enum):
creating = "Creating" #: Live output is being created. No content is archived in the asset until the live output is in running state.
running = "Running" #: Live output is running and archiving live streaming content to the asset if there is valid input from a contribution encoder.
deleting = "Deleting" #: Live output is being deleted. The live asset is being converted from live to on-demand asset. Any streaming URLs created on the live output asset continue to work.
class LiveEventInputProtocol(str, Enum):
fragmented_mp4 = "FragmentedMP4" #: Smooth Streaming input will be sent by the contribution encoder to the live event.
rtmp = "RTMP" #: RTMP input will be sent by the contribution encoder to the live event.
class LiveEventEncodingType(str, Enum):
none = "None" #: A contribution live encoder sends a multiple bitrate stream. The ingested stream passes through the live event without any further processing. It is also called the pass-through mode.
standard = "Standard" #: A contribution live encoder sends a single bitrate stream to the live event and Media Services creates multiple bitrate streams. The output cannot exceed 720p in resolution.
premium1080p = "Premium1080p" #: A contribution live encoder sends a single bitrate stream to the live event and Media Services creates multiple bitrate streams. The output cannot exceed 1080p in resolution.
class LiveEventResourceState(str, Enum):
stopped = "Stopped" #: This is the initial state of the live event after creation (unless autostart was set to true.) No billing occurs in this state. In this state, the live event properties can be updated but streaming is not allowed.
allocating = "Allocating" #: Allocate action was called on the live event and resources are being provisioned for this live event. Once allocation completes successfully, the live event will transition to StandBy state.
stand_by = "StandBy" #: Live event resources have been provisioned and is ready to start. Billing occurs in this state. Most properties can still be updated, however ingest or streaming is not allowed during this state.
starting = "Starting" #: The live event is being started and resources are being allocated. No billing occurs in this state. Updates or streaming are not allowed during this state. If an error occurs, the live event returns to the Stopped state.
running = "Running" #: The live event resources have been allocated, ingest and preview URLs have been generated, and it is capable of receiving live streams. At this point, billing is active. You must explicitly call Stop on the live event resource to halt further billing.
stopping = "Stopping" #: The live event is being stopped and resources are being de-provisioned. No billing occurs in this transient state. Updates or streaming are not allowed during this state.
deleting = "Deleting" #: The live event is being deleted. No billing occurs in this transient state. Updates or streaming are not allowed during this state.
class StreamOptionsFlag(str, Enum):
default = "Default" #: Live streaming with no special latency optimizations.
low_latency = "LowLatency" #: The live event provides lower end to end latency by reducing its internal buffers. This could result in more client buffering during playback if network bandwidth is low.
class StreamingEndpointResourceState(str, Enum):
stopped = "Stopped" #: The initial state of a streaming endpoint after creation. Content is not ready to be streamed from this endpoint.
starting = "Starting" #: The streaming endpoint is transitioning to the running state.
running = "Running" #: The streaming endpoint is running. It is able to stream content to clients
stopping = "Stopping" #: The streaming endpoint is transitioning to the stopped state.
deleting = "Deleting" #: The streaming endpoint is being deleted.
scaling = "Scaling" #: The | |
each layer conditioned on the predicted class and candidate true classes
test_stats_pred = np.zeros((n_test, self.n_layers))
pvalues_pred = np.zeros((n_test, self.n_layers))
test_stats_true = {c: np.zeros((n_test, self.n_layers)) for c in self.labels_unique}
pvalues_true = {c: np.zeros((n_test, self.n_layers)) for c in self.labels_unique}
for i in range(self.n_layers):
if self.transform_models:
# Dimension reduction
data_proj = transform_data_from_model(layer_embeddings[i], self.transform_models[i])
else:
data_proj = layer_embeddings[i]
# Test statistics and negative log p-values for layer `i`
test_stats_temp, pvalues_temp = self.test_stats_models[i].score(data_proj, labels_pred, is_train=is_train,
bootstrap=bootstrap)
# `test_stats_temp` and `pvalues_temp` will have shape `(n_test, self.n_classes + 1)`
test_stats_pred[:, i] = test_stats_temp[:, 0]
pvalues_pred[:, i] = pvalues_temp[:, 0]
for j, c in enumerate(self.labels_unique):
test_stats_true[c][:, i] = test_stats_temp[:, j + 1]
pvalues_true[c][:, i] = pvalues_temp[:, j + 1]
if self.use_top_ranked:
# For the test statistics conditioned on the predicted class, take the largest `self.num_top_ranked`
# negative log p-values across the layers
test_stats_pred, pvalues_pred = self._get_top_ranked(test_stats_pred, pvalues_pred, reverse=True)
# For the test statistics conditioned on the true class, take the smallest `self.num_top_ranked`
# negative log p-values across the layers
for c in self.labels_unique:
test_stats_true[c], pvalues_true[c] = self._get_top_ranked(test_stats_true[c], pvalues_true[c])
# Adversarial or OOD scores for the test samples and the corrected class predictions
if self.score_type == 'density':
scores_adver, scores_ood, corrected_classes = self._score_density_based(
labels_pred, test_stats_pred, test_stats_true,
return_corrected_predictions=return_corrected_predictions
)
elif self.score_type == 'pvalue':
if test_layer_pairs:
n_pairs = int(0.5 * self.n_layers * (self.n_layers - 1))
# logger.info("Estimating p-values for the test statistics from {:d} layer pairs.".format(n_pairs))
pvalues_pred_pairs = np.zeros((n_test, n_pairs))
pvalues_true_pairs = dict()
for c in self.labels_unique:
# Samples predicted into class `c`
ind = np.where(labels_pred == c)[0]
pvalues_pred_pairs[ind, :] = pvalue_score_all_pairs(
self.test_stats_pred_null[c], test_stats_pred[ind, :], log_transform=True, bootstrap=bootstrap
)
pvalues_true_pairs[c] = pvalue_score_all_pairs(
self.test_stats_true_null[c], test_stats_true[c], log_transform=True, bootstrap=bootstrap
)
# Append columns corresponding to the p-values from the layer pairs
pvalues_true[c] = np.hstack((pvalues_true[c], pvalues_true_pairs[c]))
# Append columns corresponding to the p-values from the layer pairs
pvalues_pred = np.hstack((pvalues_pred, pvalues_pred_pairs))
scores_adver, scores_ood, corrected_classes = self._score_pvalue_based(
labels_pred, pvalues_pred, pvalues_true,
return_corrected_predictions=return_corrected_predictions, start_layer=start_layer
)
elif self.score_type == 'klpe':
scores_adver, scores_ood, corrected_classes = self._score_klpe(
labels_pred, test_stats_pred, test_stats_true,
return_corrected_predictions=return_corrected_predictions
)
else:
raise ValueError("Invalid score type '{}'".format(self.score_type))
if return_corrected_predictions:
if self.ood_detection:
return scores_ood, corrected_classes
else:
return scores_adver, corrected_classes
else:
if self.ood_detection:
return scores_ood
else:
return scores_adver
def _score_density_based(self, labels_pred, test_stats_pred, test_stats_true,
return_corrected_predictions=False):
"""
Scoring method based on modeling the joint probability density of the test statistics, conditioned on the
predicted and true class.
:param labels_pred: Same as the method `score`.
:param test_stats_pred: numpy array with the test statistics from the different layers, conditioned on the
predicted class of the test samples. Is a numpy array of shape `(n_test, n_layers)`,
where `n_test` and `n_layers` are the number of test samples and number of layers
respectively.
:param test_stats_true: dict with the test statistics from the different layers, conditioned on each candidate
true class (since this is unknown at test time). The class labels are the keys of the
dict and the values are numpy arrays of shape `(n_test, n_layers)` similar to
`test_stats_pred`.
:param return_corrected_predictions: Same as the method `score`.
:return:
"""
n_test = labels_pred.shape[0]
# Log of the multivariate p-value estimate of the test statistics under the distribution of each
# candidate true class
log_pvalues_true = np.zeros((n_test, self.n_classes))
for i, c in enumerate(self.labels_unique):
v = -1. * score_log_normal_mixture(test_stats_true[c], self.density_models_true[c])
log_pvalues_true[:, i] = np.log(
pvalue_score(self.samples_neg_log_dens_true[c], v, log_transform=False, bootstrap=False)
)
# Adversarial or OOD scores for the test samples and the corrected class predictions
scores_adver = np.zeros(n_test)
scores_ood = np.zeros(n_test)
corrected_classes = copy.copy(labels_pred)
preds_unique = self.labels_unique if (n_test > 1) else [labels_pred[0]]
cnt_par = 0
for c in preds_unique:
# Scoring samples that are predicted into class `c`
ind = np.where(labels_pred == c)[0]
n_pred = ind.shape[0]
if n_pred == 0:
continue
# Score for OOD detection
v = -1. * score_log_normal_mixture(test_stats_pred[ind, :], self.density_models_pred[c])
# `pvalue_score` returns negative log of the p-values
scores_ood[ind] = pvalue_score(self.samples_neg_log_dens_pred[c], v, log_transform=True, bootstrap=False)
# Mask to include all classes, except the predicted class `c`
i = np.where(self.labels_unique == c)[0][0]
mask_excl = np.ones(self.n_classes, dtype=np.bool)
mask_excl[i] = False
# Score for adversarial detection
tmp_arr = log_pvalues_true[ind, :]
scores_adver[ind] = np.max(tmp_arr[:, mask_excl], axis=1) + scores_ood[ind]
# Corrected prediction is the class corresponding to the maximum log p-value conditioned that class
# being the true class
if return_corrected_predictions:
corrected_classes[ind] = [self.labels_unique[j] for j in np.argmax(tmp_arr, axis=1)]
# Break if we have already covered all the test samples
cnt_par += n_pred
if cnt_par >= n_test:
break
return scores_adver, scores_ood, corrected_classes
def _score_pvalue_based(self, labels_pred, pvalues_pred, pvalues_true, return_corrected_predictions=False,
start_layer=0):
"""
Scoring method based on combining the p-values of the test statistics calculated from the layer embeddings.
:param labels_pred: Same as the method `score`.
:param pvalues_pred: numpy array with the negative log p-values from the different layers and layer pairs,
conditioned on the predicted class of the test samples. Is a numpy array of shape
`(n_test, n_layers)`, where `n_test` and `n_layers` are the number of test samples
and number of layers (layer pairs) respectively.
:param pvalues_true: dict with the negative log p-values from the different layers, conditioned on each
candidate true class (since this is unknown at test time). The class labels are the keys
of the dict and the values are numpy arrays of shape `(n_test, n_layers)` similar to
`pvalues_pred`.
:param return_corrected_predictions: Same as the method `score`.
:param start_layer: Starting index of the layers to include in the p-value fusion. Set to 0 to include all
the layers. Set to negative values such as -1, -2, -3 using the same convention as
python indexing. For example, a value of `-3` implies the last 3 layers are included.
:return:
"""
n_test, nl = pvalues_pred.shape
# Equal weight to all the layers or layer pairs
weights = (1. / nl) * np.ones(nl)
log_weights = np.log(weights)
mask_layers = np.zeros(nl, dtype=np.bool)
mask_layers[start_layer:] = True
# Log of the combined p-values
pvalues_comb_pred = np.zeros(n_test)
pvalues_comb_true = np.zeros((n_test, self.n_classes))
if self.pvalue_fusion == 'fisher':
pvalues_comb_pred = -1 * np.sum(pvalues_pred[:, mask_layers], axis=1)
for i, c in enumerate(self.labels_unique):
pvalues_comb_true[:, i] = -1 * np.sum(pvalues_true[c][:, mask_layers], axis=1)
elif self.pvalue_fusion == 'harmonic_mean':
# log of the combined p-values
arr_temp = log_weights + pvalues_pred
offset = np.log(np.sum(weights[mask_layers]))
pvalues_comb_pred = offset - log_sum_exp(arr_temp[:, mask_layers])
for i, c in enumerate(self.labels_unique):
arr_temp = log_weights + pvalues_true[c]
pvalues_comb_true[:, i] = offset - log_sum_exp(arr_temp[:, mask_layers])
else:
raise ValueError("Invalid value '{}' for the input argument 'pvalue_fusion'.".format(self.pvalue_fusion))
# Adversarial or OOD scores for the test samples and the corrected class predictions
scores_adver = np.zeros(n_test)
scores_ood = np.zeros(n_test)
corrected_classes = copy.copy(labels_pred)
preds_unique = self.labels_unique if (n_test > 1) else [labels_pred[0]]
cnt_par = 0
for c in preds_unique:
# Scoring samples that are predicted into class `c`
ind = np.where(labels_pred == c)[0]
n_pred = ind.shape[0]
if n_pred == 0:
continue
# OOD score
scores_ood[ind] = -1 * pvalues_comb_pred[ind]
# Adversarial score
# Mask to include all classes, except the predicted class `c`
i = np.where(self.labels_unique == c)[0][0]
mask_excl = np.ones(self.n_classes, dtype=np.bool)
mask_excl[i] = False
arr_temp = pvalues_comb_true[ind, :]
scores_adver[ind] = np.max(arr_temp[:, mask_excl], axis=1) - pvalues_comb_pred[ind]
# Corrected class prediction based on the maximum p-value conditioned on the candidate true class
if return_corrected_predictions:
corrected_classes[ind] = [self.labels_unique[j] for j in np.argmax(arr_temp, axis=1)]
# Break if we have already covered all the test samples
cnt_par += n_pred
if cnt_par >= n_test:
break
return scores_adver, scores_ood, corrected_classes
def _score_klpe(self, labels_pred, test_stats_pred, test_stats_true, return_corrected_predictions=False):
"""
Scoring method based on the averaged localized p-value estimation method, which estimates the p-value of
the joint (multivariate) distribution of the test statistics across the layers conditioned on the
predicted and true class.
:param labels_pred: Same as the method `score`.
:param test_stats_pred: numpy array with the test statistics from the different layers, conditioned on the
predicted class of the test samples. Is a numpy array of shape `(n_test, n_layers)`,
where `n_test` and `n_layers` are the number of test samples and number of layers
respectively.
:param test_stats_true: dict with the test statistics from the different layers, conditioned on each candidate
true class (since this is unknown at test time). The class | |
<filename>deep_verify/src/layers/layers.py
# coding=utf-8
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines wrappers to easily propagate bounds."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
from deep_verify.src import common
from interval_bound_propagation import layer_utils
import six
import sonnet as snt
import tensorflow as tf
# Holds objective weights.
# The last layer can be combined with the target vector `c`.
ObjectiveWeights = collections.namedtuple('ObjectiveWeights', ['w', 'b'])
@six.add_metaclass(abc.ABCMeta)
class VerifiableLayer(object):
"""Abstract class for dual layers."""
def __init__(self):
self._no_duals = False
@property
def branches(self):
"""Returns list of (name, sub-layers list) pairs, e.g. for ResNet block."""
return []
@abc.abstractproperty
def input_node(self):
"""Returns an `ibp.VerifiableWrapper` for the previous layer's outputs."""
@abc.abstractproperty
def output_node(self):
"""Returns an `ibp.VerifiableWrapper` for this layer's outputs."""
@property
def input_shape(self):
return self.input_bounds.shape[1:]
@property
def output_shape(self):
return self.output_bounds.shape[1:]
@property
def inputs(self):
return self.input_bounds.nominal
@property
def outputs(self):
return self.output_bounds.nominal
@property
def input_bounds(self):
return self.input_node.output_bounds.concretize()
@property
def output_bounds(self):
return self.output_node.output_bounds.concretize()
def dual_shape(self):
"""Returns shape of the dual variable, or possibly nested dict thereof."""
# By default, there is one dual variable for each output.
return None if self._no_duals else tuple(self.output_shape)
def set_no_duals(self):
"""Declares that this layer has no dual variables of its own."""
self._no_duals = True
def reshape_duals_forwards(self, next_layer, dual_vars):
if next_layer.reshape:
# There was a reshape prior to the next layer.
dual_vars = snt.BatchReshape(next_layer.input_shape,
preserve_dims=2)(dual_vars)
return dual_vars
def project_duals_op(self, dual_vars): # pylint:disable=unused-argument
"""Projects duals into their regional constraints.
Args:
dual_vars: Dual variable tensor.
Returns:
Assignment op to modify `dual_vars`, clamping the dual variable
values to their admissible ranges.
"""
# By default, do no projection.
return tf.no_op()
@six.add_metaclass(abc.ABCMeta)
class CustomOp(object):
"""Function or operation with a different implementation for each layer type.
Each `visit_xxx` method is a call-back invoked via
`SingleVerfiableLayer.custom_op`. They have implementation-specific *args and
**kwargs, passed through by `SingleVerifiableLayer.custom_op`, for convenience
so that the same visitor instance can be used multiple times with different
arguments.
"""
def visit_linear(self, layer, w, b, *args, **kwargs):
"""Callback for `Linear`."""
raise NotImplementedError()
def visit_conv(self, layer, w, b, padding, strides, *args, **kwargs):
"""Callback for `Conv`."""
raise NotImplementedError()
def visit_avgpool(self, layer, *args, **kwargs):
"""Callback for `AvgPool`."""
raise NotImplementedError('AvgPool layers are not supported')
def visit_maxpool(self, layer, *args, **kwargs):
"""Callback for `MaxPool`."""
raise NotImplementedError('MaxPool layers are not supported')
@abc.abstractmethod
def visit_activation(self, layer, *args, **kwargs):
"""Callback for `Activation`."""
@six.add_metaclass(abc.ABCMeta)
class SingleVerifiableLayer(VerifiableLayer):
"""Dual layer for a single layer of the underlying network."""
def __init__(self, input_node, output_node, module,
batch_norm=None, reshape=False):
super(SingleVerifiableLayer, self).__init__()
self._module = module
self._batch_norm = batch_norm
self._reshape = reshape
self._input_node = input_node
self._output_node = output_node
@property
def input_node(self):
return self._input_node
@property
def output_node(self):
return self._output_node
@abc.abstractproperty
def is_activation(self):
"""Returns whether an activation layer, as opposed to linear/conv."""
@property
def module(self):
return self._module
@property
def batch_norm(self):
return self._batch_norm
@property
def reshape(self):
return self._reshape
def backward_prop_batchnorm(self, y):
if self.batch_norm is not None:
w, _ = layer_utils.decode_batchnorm(self.batch_norm)
y = y * tf.cast(w, y.dtype)
return y
def backward_prop_batchnorm_bias(self, y, bias):
if self.batch_norm is not None:
w, b = layer_utils.decode_batchnorm(self.batch_norm)
bias = bias + tf.reduce_sum(y * tf.cast(b, y.dtype),
axis=list(range(2, y.shape.ndims)))
y = y * tf.cast(w, y.dtype)
return y, bias
@abc.abstractmethod
def custom_op(self, op, *args, **kwargs):
"""Double-dispatch: invokes a `visit_xxx` method on `op`."""
@six.add_metaclass(abc.ABCMeta)
class AffineLayer(SingleVerifiableLayer):
"""Layer that acts as an affine transform, e.g. linear or convolution."""
@property
def is_activation(self):
return False
@abc.abstractmethod
def forward_prop(self, x, apply_bias=False, w_fn=None):
"""Applies the affine transform to a tensor.
Args:
x: Tensor of shape (num_targets, batch_size, input_shape...).
apply_bias: whether to include the `b` contribution.
w_fn: Optional elementwise preprocessing function to apply to `w`,
for example `tf.abs`.
Returns:
Tensor of shape (num_targets, batch_size, output_shape...),
containing w x + b .
"""
@abc.abstractmethod
def backward_prop(self, y, w_fn=None):
"""Applies the transpose of the affine transform to a tensor.
Args:
y: Tensor of shape (num_targets, batch_size, output_shape...).
w_fn: Optional elementwise preprocessing function to apply to `w`,
for example `tf.abs`.
Returns:
Tensor of shape (num_targets, batch_size, input_shape...),
containing w^T y .
"""
@abc.abstractmethod
def backward_prop_bias(self, y):
"""Takes the scalar product of the bias with a tensor.
Args:
y: Tensor of shape (num_targets, batch_size, output_shape...).
Returns:
Tensor of shape (num_targets, batch_size),
containing b^T y .
"""
@abc.abstractmethod
def flatten(self):
"""Flattens the affine transform, materialising it as fully connected.
Returns:
w_flat:
2D tensor of shape (input_size, output_size).
b_flat:
1D tensor of shape (output_size).
"""
class Conv(AffineLayer):
"""Wraps a convolutional layer."""
def __init__(self, input_node, output_node, module, batch_norm=None,
reshape=False):
super(Conv, self).__init__(input_node, output_node, module,
batch_norm=batch_norm, reshape=reshape)
self._w = module.w
self._b = module.b if module.has_bias else None
self._padding = module.padding
self._strides = module.stride[1:-1]
def forward_prop(self, x, apply_bias=False, w_fn=None):
w = w_fn(self._w) if w_fn is not None else self._w
y = common.convolution(x, tf.cast(w, x.dtype),
padding=self._padding, strides=self._strides)
if apply_bias and self._b is not None:
y += tf.cast(self._b, x.dtype)
return y
def backward_prop(self, y, w_fn=None):
w = w_fn(self._w) if w_fn is not None else self._w
return common.conv_transpose(y, tf.cast(w, y.dtype),
result_shape=self.input_shape,
padding=self._padding, strides=self._strides)
def backward_prop_bias(self, y):
if self._b is not None:
return tf.reduce_sum(y * tf.cast(self._b, y.dtype),
axis=list(range(2, y.shape.ndims)))
else:
return tf.zeros(tf.shape(y)[:2], dtype=y.dtype)
def flatten(self):
return layer_utils.materialise_conv(
self._w, self._b, input_shape=self.input_shape,
padding=self._padding, strides=self._strides)
def custom_op(self, op, *args, **kwargs):
return op.visit_conv(self, self._w, self._b,
self._padding, self._strides, *args, **kwargs)
class Linear(AffineLayer):
"""Wraps a linear layer."""
def __init__(self, input_node, output_node, module, batch_norm=None,
reshape=False):
super(Linear, self).__init__(input_node, output_node, module,
batch_norm=batch_norm, reshape=reshape)
self._w = module.w
self._b = module.b if module.has_bias else None
def forward_prop(self, x, apply_bias=False, w_fn=None):
w = w_fn(self._w) if w_fn is not None else self._w
y = tf.tensordot(x, tf.cast(w, x.dtype), axes=1)
if apply_bias and self._b is not None:
y += tf.cast(self._b, x.dtype)
return y
def backward_prop(self, y, w_fn=None):
w = w_fn(self._w) if w_fn is not None else self._w
return tf.tensordot(y, tf.transpose(tf.cast(w, y.dtype)), axes=1)
def backward_prop_bias(self, y):
if self._b is not None:
return tf.tensordot(y, tf.cast(self._b, y.dtype), axes=1)
else:
return tf.zeros(tf.shape(y)[:2], dtype=y.dtype)
def flatten(self):
return self._w, self._b
def custom_op(self, op, *args, **kwargs):
return op.visit_linear(self, self._w, self._b, *args, **kwargs)
def get_objective_weights(self, labels, target_strategy=None):
"""Elides the objective with this (final) linear layer."""
assert self._b is not None, 'Last layer must have a bias.'
if target_strategy is None:
w, b = common.targeted_objective(self._w, self._b, labels)
else:
w, b = target_strategy.target_objective(self._w, self._b, labels)
return ObjectiveWeights(w, b)
class AvgPool(AffineLayer):
"""Wraps an average-pool layer."""
def __init__(self, input_node, output_node,
kernel_shape, strides, reshape=False):
super(AvgPool, self).__init__(input_node, output_node,
module=None,
reshape=reshape)
self._kernel_shape = list(kernel_shape) if kernel_shape else None
self._strides = list(strides) if strides else None
@property
def kernel_shape(self):
return self._kernel_shape
@property
def strides(self):
return self._strides
def forward_prop(self, x, apply_bias=False, w_fn=None):
return self._module(x)
def backward_prop(self, y, w_fn=None):
del w_fn
return common.avgpool_transpose(y, result_shape=self.input_shape,
kernel_shape=self.kernel_shape,
strides=self.strides)
def backward_prop_bias(self, y):
return tf.zeros(tf.shape(y)[:2], dtype=y.dtype)
def flatten(self):
raise NotImplementedError()
def custom_op(self, op, *args, **kwargs):
return op.visit_avgpool(self, *args, **kwargs)
class MaxPool(SingleVerifiableLayer):
"""Wraps a max-pool layer."""
def __init__(self, input_node, output_node,
kernel_shape, strides, with_relu=False, reshape=False):
super(MaxPool, self).__init__(input_node, output_node,
module=None,
reshape=reshape)
self._kernel_shape = list(kernel_shape) if kernel_shape else None
self._strides = list(strides) if strides else None
self._with_relu = with_relu
@property
def kernel_shape(self):
return self._kernel_shape
@property
def strides(self):
return self._strides
@property
def with_relu(self):
return self._with_relu
@property
def is_activation(self):
return True
def custom_op(self, op, *args, **kwargs):
return op.visit_maxpool(self, *args, **kwargs)
class Activation(SingleVerifiableLayer):
"""Wraps an activation."""
def __init__(self, input_node, output_node, module,
reshape=False, parameters=None):
super(Activation, self).__init__(input_node, output_node, module,
reshape=reshape)
self._activation = module.__name__ # Convert to string.
self._parameters = parameters
@property
def is_activation(self):
return True
@property
def activation(self):
return self._activation
@property
def parameters(self):
return self._parameters
def custom_op(self, op, *args, **kwargs):
return op.visit_activation(self, *args, **kwargs)
def create_verifiable_layer(input_node, output_node, module,
batch_norm=None, reshape=False,
parameters=None):
"""Returns an instance of `SingleVerifiableLayer` for the specified module."""
if isinstance(module, snt.Conv2D) or isinstance(module, snt.Conv1D):
return Conv(input_node, output_node, module, batch_norm, reshape)
elif isinstance(module, snt.Linear):
return Linear(input_node, output_node, module, batch_norm, reshape)
else:
if batch_norm is not None:
raise ValueError('Cannot add a batch normalization layer to an '
'activation.')
return Activation(input_node, output_node, | |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division
import argparse
import os
import platform
import re
import shutil
import subprocess
import sys
import warnings
import setuptools
from distutils.core import Command
from Cython.Distutils import build_ext as _build_ext
__package__ = 'pydriver'
# set to True to skip automatic PCL_HELPER compilation (in this case you have to compile it manually before invoking setup.py)
SKIP_PCL_HELPER = False
if platform.system() == 'Windows':
# requires manual compilation on Windows
SKIP_PCL_HELPER = True
# current working directory (directory of setup.py)
cwd = os.path.abspath(os.path.dirname(__file__))
# pcl_helper directories
pcl_helper_dir = os.path.join(__package__, 'pcl', 'pcl_helper')
pcl_helper_dir_build = os.path.join(pcl_helper_dir, 'build')
pcl_helper_dir_lib = os.path.join(pcl_helper_dir, 'lib')
# version.py file path
version_py_path = os.path.join(cwd, __package__, 'version.py')
# source code template for version.py
version_py_src = """# this file was created automatically by setup.py
__version__ = '{version}'
__version_info__ = {{
'full': __version__,
'short': '.'.join(__version__.split('.')[:2])
}}
"""
def read(fname):
return open(os.path.join(cwd, fname)).read()
def update_version_py():
"""Update version.py using "git describe" command"""
if not os.path.isdir('.git'):
print('This does not appear to be a Git repository, leaving version.py unchanged.')
return False
try:
describe_output = subprocess.check_output(['git', 'describe', '--long', '--dirty']).decode('ascii').strip()
except:
print('Unable to run Git, leaving version.py unchanged.')
return False
# output looks like <version tag>-<commits since tag>-g<hash> and can end with '-dirty', e.g. v0.1.0-14-gd9f10e2-dirty
# our version tags look like 'v0.1.0' or 'v0.1' and optionally additional segments (e.g. v0.1.0rc1), see PEP 0440
describe_parts = re.match('^v([0-9]+\.[0-9]+(?:\.[0-9]+)?\S*)-([0-9]+)-g([0-9a-f]+)(?:-(dirty))?$', describe_output)
assert describe_parts is not None, 'Unexpected output from "git describe": {}'.format(describe_output)
version_tag, n_commits, commit_hash, dirty_flag = describe_parts.groups()
version_parts = re.match('^([0-9]+)\.([0-9]+)(?:\.([0-9]+))?(\S*)$', version_tag)
assert version_parts is not None, 'Unexpected version format: {}'.format(version_tag)
version_major, version_minor, version_micro, version_segments = version_parts.groups()
version_major = int(version_major)
version_minor = int(version_minor)
version_micro = int(version_micro) if version_micro is not None else 0
n_commits = int(n_commits)
if dirty_flag is not None:
print('WARNING: Uncommitted changes detected.')
if n_commits > 0:
# non-exact match, dev version
version_micro += 1
version_segments += '.dev{}+{}'.format(n_commits, commit_hash)
# final version string
if version_micro > 0:
version = '{}.{}.{}{}'.format(version_major, version_minor, version_micro, version_segments)
else:
version = '{}.{}{}'.format(version_major, version_minor, version_segments)
with open(version_py_path, 'w') as f:
f.write(version_py_src.format(version=version))
print('Set version to: {}'.format(version))
# success
return True
# update version.py (if we're in a Git repository)
update_version_py()
# "import" version information without importing the package
exec(open(version_py_path).read())
class build_pcl_helper(Command):
description = 'build pcl_helper library (inplace)'
user_options = []
def initialize_options(self):
self.cwd_pcl_helper_dir_build = None
def finalize_options(self):
# build inplace
self.cwd_pcl_helper_dir_build = os.path.join(cwd, pcl_helper_dir_build)
def run(self):
# create build dir if it doesn't exist
if not os.path.exists(self.cwd_pcl_helper_dir_build):
os.makedirs(self.cwd_pcl_helper_dir_build)
# build pcl_helper
if platform.system() == 'Windows':
self._build_pcl_helper_windows(self.cwd_pcl_helper_dir_build)
else:
self._build_pcl_helper_linux(self.cwd_pcl_helper_dir_build)
def _build_pcl_helper_linux(self, build_dir):
subprocess.check_call(['cmake', '..'], cwd=build_dir)
subprocess.check_call('make', cwd=build_dir)
def _build_pcl_helper_windows(self, build_dir):
raise NotImplementedError
class build_ext(_build_ext):
user_options = _build_ext.user_options + [
('skip-pcl-helper', None, 'skip pcl_helper compilation (assume manual compilation)'),
]
boolean_options = _build_ext.boolean_options + ['skip-pcl-helper']
def initialize_options(self):
_build_ext.initialize_options(self)
# don't skip pcl helper by default
self.skip_pcl_helper = False
# pcl_helper location in source directory
self.cwd_pcl_helper_dir_lib = None
# pcl_helper location in package build directory
self.build_pcl_helper_dir_lib = None
def finalize_options(self):
_build_ext.finalize_options(self)
# prevent numpy from thinking it is still in its setup process:
__builtins__.__NUMPY_SETUP__ = False
import numpy as np
self.include_dirs.append(np.get_include())
# finalize pcl_helper directories
self.cwd_pcl_helper_dir_lib = os.path.join(cwd, pcl_helper_dir_lib)
self.build_pcl_helper_dir_lib = os.path.join(self.build_lib, pcl_helper_dir_lib)
# check global flag SKIP_PCL_HELPER
self.skip_pcl_helper = self.skip_pcl_helper or SKIP_PCL_HELPER
def build_extensions(self, *args, **kwargs):
compiler_type = self.compiler.compiler_type
if compiler_type not in extra_args:
compiler_type = 'unix' # probably some unix-like compiler
# merge compile and link arguments with global arguments for current compiler
for e in self.extensions:
e.extra_compile_args = list(set(e.extra_compile_args + extra_args[compiler_type]['extra_compile_args']))
e.extra_link_args = list(set(e.extra_link_args + extra_args[compiler_type]['extra_link_args']))
_build_ext.build_extensions(self, *args, **kwargs)
def run(self):
if not self.skip_pcl_helper:
# build pcl_helper first
try:
self.run_command('build_pcl_helper')
except:
print('Error: pcl_helper could not be compiled automatically')
print('Please compile pcl_helper manually (see %s/pcl/pcl_helper/README.rst for instructions)' % __package__ + \
' and set SKIP_PCL_HELPER in setup.py to True.')
raise
# copy pcl_helper library to package build directory
self.copy_tree(self.cwd_pcl_helper_dir_lib, self.build_pcl_helper_dir_lib)
_build_ext.run(self)
def get_outputs(self):
# add contents of pcl_helper library directory to outputs (so they can be uninstalled)
outputs = []
for dirpath, dirnames, filenames in os.walk(self.build_pcl_helper_dir_lib):
outputs.extend([os.path.join(dirpath, f) for f in filenames])
return _build_ext.get_outputs(self) + outputs
class CleanCommand(Command):
"""Custom clean command to tidy up the project root."""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
self._remove_dirs('__pycache__')
self._remove_dir(cwd, 'build')
self._remove_dir(cwd, 'build_c')
self._remove_dir(cwd, 'dist')
self._remove_dir(cwd, '.eggs')
self._remove_dir(cwd, '{}.egg-info'.format(__package__))
self._remove_dir(cwd, pcl_helper_dir_build)
self._remove_dir(cwd, pcl_helper_dir_lib)
self._remove_files('pyc')
self._remove_files('pyo')
self._remove_files('pyd')
self._remove_files('so')
def _remove_dirs(self, dirname, parent_dir=None):
if parent_dir is None:
full_parent_dir = cwd
else:
full_parent_dir = os.path.join(cwd, parent_dir)
matches = []
for dirpath, dirnames, filenames in os.walk(full_parent_dir):
matches.extend([os.path.join(dirpath, d) for d in dirnames if d==dirname])
for d in matches:
self._remove_dir(d)
def _remove_dir(self, *args):
dirpath = os.path.abspath(os.path.join(*args))
# sanity checks
if not os.path.exists(dirpath):
# nothing to do
return
if not os.path.isdir(dirpath):
print('"{}" is not a directory, aborting...'.format(dirpath))
sys.exit()
path_check = True
if not dirpath.startswith(cwd):
path_check = False
if path_check and len(dirpath) > len(cwd):
# first character after cwd should be a slash or a backslash
if dirpath[len(cwd)] != os.sep:
path_check = False
if not path_check:
print('The directory "{}" appears to be outside of main directory ({}), aborting...'.format(dirpath, cwd))
sys.exit()
# all sanity checks ok
if not os.path.islink(dirpath):
print('Removing directory: ' + dirpath)
shutil.rmtree(dirpath, ignore_errors=True)
else:
print("Can't remove symlink to directory: " + dirpath)
def _remove_files(self, ext, parent_dir=None):
if parent_dir is None:
full_parent_dir = cwd
else:
full_parent_dir = os.path.join(cwd, parent_dir)
matches = []
for dirpath, dirnames, filenames in os.walk(full_parent_dir):
matches.extend([os.path.join(dirpath, f) for f in filenames if f.endswith('.'+ext)])
for f in matches:
self._remove_file(f)
def _remove_file(self, *args):
filepath = os.path.abspath(os.path.join(*args))
# sanity checks
if not os.path.exists(filepath):
# nothing to do
return
if not os.path.isfile(filepath):
print('"{}" is not a file, aborting...'.format(filepath))
sys.exit()
filepath_dir = os.path.abspath(os.path.dirname(filepath))
path_check = True
if not filepath_dir.startswith(cwd):
path_check = False
if path_check and len(filepath_dir) > len(cwd):
# first character after cwd should be a slash or a backslash
if filepath_dir[len(cwd)] != os.sep:
path_check = False
if not path_check:
print('The file "{}" appears to be outside of main directory ({}), aborting...'.format(filepath, cwd))
sys.exit()
# all sanity checks ok
print('Removing file: ' + filepath)
os.remove(filepath)
class lazy_cythonize(list):
# cythonize only if needed (e.g. not for "clean" command)
def __init__(self, extensions, *args, **kwargs):
self._list = None
self.extensions = extensions
self.args = args
self.kwargs = kwargs
def c_list(self):
if self._list is None:
self._list = self._cythonize()
return self._list
def __iter__(self):
for e in self.c_list(): yield e
def __getitem__(self, ii): return self.c_list()[ii]
def __len__(self): return len(self.c_list())
def _cythonize(self):
from Cython.Build import cythonize
return cythonize(self.extensions, *self.args, **self.kwargs)
# setup argument parser
parser = argparse.ArgumentParser(
description = '%s setup script, basic install: python setup.py install' % __package__,
epilog = 'Other arguments will be passed to setuptools, use --help-setup for more information.',
)
# add arguments which we will parse and pass to setuptools
parser.add_argument('command', nargs = '?',
help = 'command to pass to setuptools, use "install" to install package')
parser.add_argument('--debug', '-g', action = 'store_true',
help = 'compile/link with debugging information')
parser.add_argument('--force', '-f', action = 'store_true',
help = 'forcibly build everything (ignore file timestamps)')
parser.add_argument('--help-setup', action = 'store_true',
help = 'show setuptools help and exit')
# add own arguments
parser.add_argument('--annotate', action = 'store_true',
help = 'let Cython generate HTML files with performance information')
parser.add_argument('--cython-build-dir', default = 'build_c',
help = 'directory for C/C++ sources and HTML files generated by Cython (default: build_c)')
parser.add_argument('--inplace', action = 'store_true',
help = 'build inplace')
parser.add_argument('--no-openmp', dest = 'openmp', action = 'store_false',
help = 'compile/link without OpenMP support')
parser.add_argument('--profile', action = 'store_true',
help = 'enable profiling with cProfile')
parser.add_argument('--skip-pcl-helper', action = 'store_true',
help = 'skip pcl_helper compilation (assume manual compilation)')
# parse command line arguments
cmdargs, unknown_args = parser.parse_known_args()
if cmdargs.help_setup:
# show setuptools help and exit
sys.argv = [sys.argv[0], '--help']
setuptools.setup()
sys.exit()
# construct new command line arguments for setuptools
# leave the script name
setuptools_argv = [sys.argv[0]]
# pass setuptools options which we already have parsed
if cmdargs.command: setuptools_argv.append(cmdargs.command)
if cmdargs.force: setuptools_argv.append('--force')
if cmdargs.debug: setuptools_argv.append('--debug')
# add all unknown args
setuptools_argv += unknown_args
# replace sys.argv by arguments which will be passed to setuptools
sys.argv = setuptools_argv
# initialize setuptools arguments
setup_args = {
'name': __package__,
'version': __version__,
'url': 'http://github.com/lpltk/pydriver',
'license': 'MIT',
'author': '<NAME>',
'author_email': '<EMAIL>',
'description': 'A framework for training and evaluating object detectors and classifiers in road traffic environment.',
'long_description': read('README.rst'),
'zip_safe': False,
'package_dir': {__package__: __package__},
'packages': setuptools.find_packages(),
'package_data': {__package__+'.pcl': ['pcl_helper/lib/*']},
'include_package_data': True,
'platforms': 'any',
'setup_requires': [
'numpy>=1.8.1',
'cython>=0.22.1',
],
'install_requires': [
'numpy>=1.8.1',
'cython>=0.22.1',
'scipy>=0.13.3',
'scikit-image',
'scikit-learn',
],
'classifiers': [
'Development | |
DL_name_setting, redirectValue[tempB]-offset, int((self.uvValue[objCounter][tempB][0])*textureSize[0]), int((self.uvValue[objCounter][tempB][1])*textureSize[1]), DL_end_setting, self.uvValue[objCounter][tempB][0], self.uvValue[objCounter][tempB][1]))
modifiedUVValues[objCounter][redirectValue[tempB]][0] = self.uvValue[objCounter][tempB][0]
modifiedUVValues[objCounter][redirectValue[tempB]][1] = self.uvValue[objCounter][tempB][1]
modifyVertexCompare[gSP2TriangleCheck].append(redirectValue[tempB]) #to see if both tri1 and tri2 are using and modifying the same vertex
if roundtoquarter(self.uvValue[objCounter][tempC][0]) != roundtoquarter(modifiedUVValues[objCounter][redirectValue[tempC]][0]) or roundtoquarter(self.uvValue[objCounter][tempC][1]) != roundtoquarter(modifiedUVValues[objCounter][redirectValue[tempC]][1]):
modify = True
S_Coordinate = checknegativehex(int((self.uvValue[objCounter][tempC][0])*textureSize[0]))
T_Coordinate = checknegativehex(int((self.uvValue[objCounter][tempC][1])*textureSize[1]))
modifyVertexCommands[gSP2TriangleCheck] += (" %sSPModifyVertex(%s %d, G_MWO_POINT_ST, 0x%0004x%0004x%s /*New Coords: %.2f, %.2f*/" % (DL_start_setting, DL_name_setting, redirectValue[tempC]-offset, S_Coordinate, T_Coordinate, DL_end_setting, self.uvValue[objCounter][tempC][0], self.uvValue[objCounter][tempC][1]))
#modifyVertexCommands[gSP2TriangleCheck] += (" %sSPModifyVertex(%s %d, G_MWO_POINT_ST, 0x%0004x%0004x%s /*New Coords: %.2f, %.2f*/" % (DL_start_setting, DL_name_setting, redirectValue[tempC]-offset, int((self.uvValue[objCounter][tempC][0])*textureSize[0]), int((self.uvValue[objCounter][tempC][1])*textureSize[1]), DL_end_setting, self.uvValue[objCounter][tempC][0], self.uvValue[objCounter][tempC][1]))
modifiedUVValues[objCounter][redirectValue[tempC]][0] = self.uvValue[objCounter][tempC][0]
modifiedUVValues[objCounter][redirectValue[tempC]][1] = self.uvValue[objCounter][tempC][1]
modifyVertexCompare[gSP2TriangleCheck].append(redirectValue[tempC]) #to see if both tri1 and tri2 are using and modifying the same vertex
if modify == True:
if faceOptions[gSP2TriangleCheck] == OutputOptions.UPDATE_GSPVERTEX:
faceOptions[gSP2TriangleCheck] = OutputOptions.UPDATE_GSPVERTEX_AND_UVCOORDS
else:
faceOptions[gSP2TriangleCheck] = OutputOptions.UPDATE_UVCOORDS
#use1Triangle = False
if gSP2TriangleCheck >= 1: #after gathering commands, compare them for optimization...
# compareSort = [[0 for x in range(4)] for y in range(2)]
# compareSort[0] = [gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3]]
# compareSort[1] = [gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3]]
#o.write("\n//modifyVertexCompare[0][%i,%i,%i]: \n" %(modifyVertexCompare[0][0],modifyVertexCompare[0][1],modifyVertexCompare[0][2]) )
loop = 0
while loop < len(modifyVertexCompare[0]):
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("\n//modifyVertexCompare[0][%i]: Tri 1 is modifying vertex %i \n" %(modifyVertexCompare[0][loop], modifyVertexCompare[0][loop]))
loop +=1
loop = 0
while loop < len(modifyVertexCompare[1]):
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("\n//modifyVertexCompare[1][%i]: Tri 2 is modifying vertex %i \n" % (modifyVertexCompare[1][loop], modifyVertexCompare[1][loop]))
loop +=1
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("\n//Testing for use1Triangle\n" )
#o.write("//NOTE ----- If texture coordinates for a specific face are warped or shifted, it is likely that tri 2 is modifying a vertex used by tri 1. This shouldn't happen and is a bug. Let me know! \n")
loopOuter = 0
loopInner = 0
#NOTE ---- Make sure that tri1 and tri2 are not modifying the same vertex
while loopOuter < len(modifyVertexCompare[0]):
while loopInner < len(modifyVertexCompare[1]):
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("//Outer: %i Inner: %i\n" % (redirectValue[loopOuter]-offset, redirectValue[loopInner]-offset) )
if faceOptions[0] != OutputOptions.FORCE_1TRIANGLE:
if modifyVertexCompare[0][loopOuter] == modifyVertexCompare[1][loopInner]:
use1Triangle = True
#faceOptions[0] = OutputOptions.FORCE_1TRIANGLE
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("//Split into 2 separate 1Triangle Calls\n" )
loopOuter = tempC+3
loopInnter = tempA
break
# if DEBUG == True: #DEBUG CHECK VALUES:---
# o.write("//NOTE ---- Increase loopInner: %i " % loopInner)
loopInner += 1
loopOuter += 1
# if DEBUG == True: #DEBUG CHECK VALUES:---
# o.write("//NOTE ---- Increase loopOuter: %i " % loopOuter)
loopInner = 0
#NOTE ---- Make sure that tri2 is not modifying a vertex used in tri1
loopOuter = 0
if use1Triangle == False:
while loopOuter < len(modifyVertexCompare[1]):
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("//NOTE ---- tri1:(%i, %i, %i) \n" % (gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2]))
# NOTE ----- Check to make sure that the modified vertex in tri2 (modifyVertexCompare[1][*]) is not altering an unmodified vertex in tri1 (gSP2TriangleHold[0][*]+offset)
if (modifyVertexCompare[1][loopOuter] == (gSP2TriangleHold[0][0]+offset)) or (modifyVertexCompare[1][loopOuter] == (gSP2TriangleHold[0][1] + offset)) or (modifyVertexCompare[1][loopOuter] == (gSP2TriangleHold[0][2] + offset)) :
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("//NOTE ---- tri2:(%i) is modifying a value used in tri1:(%i, %i, %i) \n" % (modifyVertexCompare[1][loopOuter], gSP2TriangleHold[0][0]+ offset, gSP2TriangleHold[0][1]+ offset, gSP2TriangleHold[0][2]+ offset))
use1Triangle = True
loopOuter +=1
loopOuter = 0
while loopOuter < len(modifyVertexCompare[0]):
if (modifyVertexCompare[0][loopOuter] == (gSP2TriangleHold[1][0]+offset)) or (modifyVertexCompare[0][loopOuter] == (gSP2TriangleHold[1][1] + offset)) or (modifyVertexCompare[0][loopOuter] == (gSP2TriangleHold[1][2] + offset)) :
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("//NOTE ---- tri1:(%i) is modifying a value used in tri2:(%i, %i, %i) \n" % (modifyVertexCompare[0][loopOuter], gSP2TriangleHold[1][0]+ offset, gSP2TriangleHold[1][1]+ offset, gSP2TriangleHold[1][2]+ offset))
use1Triangle = True
loopOuter +=1
if use1Triangle == True:
if SHOWTIPS == True or DEBUG == True:
o.write("//NOTE ---- Split into separate 1Triangle commands \n")
if faceOptions[0] == OutputOptions.UPDATE_NONE and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS:
faceOptions[0] = OutputOptions.FORCE_1TRIANGLE
modifyVertexCommands[0] += ("\n")
modifyVertexCommands[1] += ("\n")
elif use1Triangle == False:
if SHOWTIPS == True or DEBUG == True:
o.write("//NOTE ---- Combine both UVCoords updates into 2Triangles commands \n")
if (faceOptions[0] == OutputOptions.UPDATE_UVCOORDS and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS) or (faceOptions[0] == OutputOptions.UPDATE_GSPVERTEX_AND_UVCOORDS and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS):
modifyVertexCommands[0] += modifyVertexCommands[1]
faceOptions[1] = OutputOptions.UPDATE_NONE
modifyVertexCommands[0] += ("\n")
else:
modifyVertexCommands[0] += ("\n")
modifyVertexCommands[1] += ("\n")
#NOTE: The following section beginning with 'if gSP2TriangleCheck >= 2: ' is the previous working version...
if gSP2TriangleCheck >= 1: #after gathering information on 2 triangle commands...
#o.write("//Testing current gSP2TriangleCheck... %i \n" % gSP2TriangleCheck)
#after setting both faceOptions:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("\n//Tri 1 of 2: faceOptions[0] %s\n" % faceOptions[0])
o.write("//Tri 2 of 2: faceOptions[1] %s\n" % faceOptions[1])
if faceOptions[0] == OutputOptions.UPDATE_GSPVERTEX and faceOptions[1] == OutputOptions.UPDATE_NONE:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet1...\n")
o.write("%s" % gSPVertexCommands[0])
#o.write(" %sSPVertex(%s %s%s+%d,%d,0 %s\n" % (DL_start_setting, DL_name_setting, name, VertList_setting, offset, loadlim+1, DL_end_setting)) #load the first x number of vertices
o.write(" %sSP2Triangles(%s %i, %i, %i, %i, %i, %i, %i, %i %s\n" %
(
DL_start_setting, DL_name_setting,
gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3],
gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3],
DL_end_setting)
)
elif faceOptions[0] == OutputOptions.UPDATE_NONE and faceOptions[1] == OutputOptions.UPDATE_GSPVERTEX:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet2...\n")
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3], DL_end_setting))
o.write("%s" % gSPVertexCommands[1])
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3], DL_end_setting))
elif faceOptions[0] == OutputOptions.UPDATE_NONE and faceOptions[1] == OutputOptions.UPDATE_NONE:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet3...\n")
o.write(" %sSP2Triangles(%s %i, %i, %i, %i, %i, %i, %i, %i %s\n" %
(
DL_start_setting, DL_name_setting,
gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3],
gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3],
DL_end_setting)
)
elif faceOptions[0] == OutputOptions.UPDATE_NONE and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet4...\n")
o.write("%s" % modifyVertexCommands[1] )
o.write(" %sSP2Triangles(%s %i, %i, %i, %i, %i, %i, %i, %i %s\n" %
(
DL_start_setting, DL_name_setting,
gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3],
gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3],
DL_end_setting)
)
#FORCE_1TRIANGLE
elif faceOptions[0] == OutputOptions.FORCE_1TRIANGLE and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet4 1/2...\n")
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3], DL_end_setting))
o.write("%s" % modifyVertexCommands[1] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3], DL_end_setting))
elif faceOptions[0] == OutputOptions.UPDATE_NONE and faceOptions[1] == OutputOptions.UPDATE_GSPVERTEX_AND_UVCOORDS:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet9...\n")
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3], DL_end_setting))
o.write("%s" % gSPVertexCommands[1])
o.write("%s" % modifyVertexCommands[1] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3], DL_end_setting))
#delayVertexCall = False
elif faceOptions[0] == OutputOptions.UPDATE_UVCOORDS and faceOptions[1] == OutputOptions.UPDATE_NONE:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet5...\n")
o.write("%s" % modifyVertexCommands[0] )
o.write(" %sSP2Triangles(%s %i, %i, %i, %i, %i, %i, %i, %i %s\n" %
(
DL_start_setting, DL_name_setting,
gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3],
gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3],
DL_end_setting)
)
elif faceOptions[0] == OutputOptions.UPDATE_UVCOORDS and faceOptions[1] == OutputOptions.UPDATE_UVCOORDS:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet6...\n" )
o.write("%s" % modifyVertexCommands[0] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3], DL_end_setting))
o.write("%s" % modifyVertexCommands[1] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3], DL_end_setting))
elif faceOptions[0] == OutputOptions.UPDATE_GSPVERTEX_AND_UVCOORDS and faceOptions[1] == OutputOptions.UPDATE_GSPVERTEX_AND_UVCOORDS:
if DEBUG == True: #DEBUG CHECK VALUES:---
o.write("// OptionSet7...\n")
o.write("%s" % gSPVertexCommands[0])
o.write("%s" % modifyVertexCommands[0] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[0][0], gSP2TriangleHold[0][1], gSP2TriangleHold[0][2], gSP2TriangleHold[0][3], DL_end_setting))
o.write("%s" % gSPVertexCommands[1])
o.write("%s" % modifyVertexCommands[1] )
o.write(" %sSP1Triangle(%s %d, %d, %d, %d %s\n" % (DL_start_setting, DL_name_setting, gSP2TriangleHold[1][0], gSP2TriangleHold[1][1], gSP2TriangleHold[1][2], gSP2TriangleHold[1][3], DL_end_setting))
#delayVertexCall = False
elif faceOptions[0] == OutputOptions.UPDATE_GSPVERTEX and faceOptions[1] == OutputOptions.UPDATE_GSPVERTEX:
if DEBUG == | |
# By <NAME>
#
# Honeybee started by <NAME> is licensed
# under a Creative Commons Attribution-ShareAlike 3.0 Unported License.
"""
Grizzlybear exports Honeybee zones to gbXML file
-
Provided by Honeybee 0.0.55
Args:
EquipRange: reserved for future use
LPDRange: reserved for future use
bldgType: reserved for future use
epwFileAddress: location of the EnergyPlus weather file
rhinolocation: will be replaced with LadyBug location
_HBZones: Input your honeybee zones
HBContext: Input your honeybee context
meshSettings_: Custom mesh setting. Use Grasshopper mesh setting components
_writegbXML: Set to true to create gbxml
workingDir: C:\gbXML by default
fileName: choose a filename, no need to add the xml extension.
Returns:
readMe!: ...
resultFileAddress: ...
"""
ghenv.Component.Name = "Honeybee_GrizzlyBear"
ghenv.Component.NickName = 'grizzlyBear'
ghenv.Component.Message = 'VER 0.0.55\nOCT_31_2014'
ghenv.Component.Category = "Honeybee"
ghenv.Component.SubCategory = "12 | WIP"
#compatibleHBVersion = VER 0.0.55\nAUG_25_2014
#compatibleLBVersion = VER 0.0.58\nAUG_20_2014
try: ghenv.Component.AdditionalHelpFromDocStrings = "1"
except: pass
import os
import traceback
import re
import logging
import Grasshopper.Kernel as gh
import datetime
gbXMLLibFolder = "C:\\gbXML"
gbXMLIsReady = True
if os.path.isdir(gbXMLLibFolder):
if os.path.isfile(os.path.join(gbXMLLibFolder, "VectorMath.dll")):
#vectormath library present
logging.info('vector math present.')
else:
msg = "Cannot find Grizzly Bear Vector Math Dependency. You can download the libraries from the link below. " + \
"Copy the file to C:\\gbXML"
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
link = "https://www.dropbox.com/sh/vaklarrhw9tylg4/AABQdgKCb4qRdlI16ik8WqUya"
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, link)
gbXMLIsReady = False
if os.path.isfile(os.path.join(gbXMLLibFolder, "gbXMLSerializer.dll")):
#vectormath library present
logging.info('gbXML serializer present.')
else:
msg = "Cannot find Grizzly Bear Serializer Dependency. You can download the libraries from the link below. " + \
"Copy the file to C:\gbXML"
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
link = 'https://www.dropbox.com/sh/vaklarrhw9tylg4/AABQdgKCb4qRdlI16ik8WqUya'
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, link)
gbXMLIsReady = False
else:
gbXMLIsReady = False
# let the user know that they need to download OpenStudio libraries
msg = "Cannot find a gbXML folder or any dependencies. Create a folder at C:\gbXML." \
'Then click on the link below to download dependencies. Copy these into this folder after downloading.'
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
link = "https://www.dropbox.com/sh/vaklarrhw9tylg4/AACBaYtBPIHkNj2QC82E7jgSa"
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, link)
if gbXMLIsReady:
try:
logging.basicConfig(filename=os.path.join(gbXMLLibFolder,'GBlog.txt'),filemode='a',format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',datefmt='%H:%M:%S',level=logging.DEBUG)
print filename
except:
print 'not logging'
import shutil
#shutil.copy2(gbXMLLibFolder+"\\gbXMLSerializer.dll","C:\\CHPrograms\\gbXMLSerializer.dll")
import clr
clr.AddReferenceToFileAndPath(os.path.join(gbXMLLibFolder, "gbXMLSerializer.dll"))
#clr.AddReferenceToFileAndPath("C:\\Program Files (x86)\\OpenStudio 1.3.0\\CSharp\\openstudio\\OpenStudio.dll")
clr.AddReferenceToFileAndPath(os.path.join(gbXMLLibFolder, "VectorMath.dll"))
clr.AddReference("System.Xml")
clr.AddReference("System.Core")
clr.AddReference("System.Runtime.Remoting")
import System
import System.Xml
import System.Xml.Serialization
clr.ImportExtensions(System.Linq)
clr.ImportExtensions(System.Globalization)
from System.Collections.Generic import List
from System import DateTime
from System.Xml import XmlConvert
from System.Globalization import CultureInfo
import sys
import uuid
import math
import copy
import gbXMLSerializer as gbx
import scriptcontext as sc
import Rhino as rc
import VectorMath as v
#import OpenStudio as os
class WritegbXML(object):
def __init__(self, location, zipCode):
# import location
locationStr = _location.split('\n')
newLocStr = ""
#clean the coments
for line in locationStr:
if '!' in line: newLocStr += line.split('!')[0].strip()
else: newLocStr += line
newLocStr = newLocStr.replace(';', "")
site, self.locationName, self.latitude, self.longitude, timeZone, elevation = newLocStr.split(',')
# zipCode
if zipCode!=None: self.zipCode = zipCode
else: self.zipCode = "00000"
def point3DtoMemorySafeCoord(pt3d):
x = pt3d.X
y = pt3d.Y
z = pt3d.Z
gbmemSafeCoord = v.Vector.MemorySafe_CartCoord(x,y,z)
logging.debug('Convert point3d to VectorMemorySafe Coord Success.')
return gbmemSafeCoord
#makes a list of list of memorysafecoordinates, for polyloops
def point3DListtoMemorySafeCoordList(self,coordinateList):
try:
logging.debug('Creating Memsafecoord list from point3D list.')
memsafelist = List[List[v.Vector.MemorySafe_CartCoord]]()
for listcount, coordinates in enumerate(coordinateList):
memsafepts = List[v.Vector.MemorySafe_CartCoord]()
for pointcount,pt in enumerate(coordinates):
memsafept = v.Vector.MemorySafe_CartCoord(pt.X,pt.Y,pt.Z)
memsafepts.Add(memsafept)
memsafelist.Add(memsafepts)
logging.info('point3d List successfully converted to MemSafe Coord List.')
except:
logging.debug('Creating Memsafecoord list from point3D list.')
memsafelist = List[List[v.Vector.MemorySafe_CartCoord]]()
for listcount, coordinates in enumerate(coordinateList[0]):
memsafepts = List[v.Vector.MemorySafe_CartCoord]()
for pointcount,pt in enumerate(coordinates):
memsafept = v.Vector.MemorySafe_CartCoord(pt.X,pt.Y,pt.Z)
memsafepts.Add(memsafept)
memsafelist.Add(memsafepts)
logging.info('point3d List successfully converted to MemSafe Coord List.')
return memsafelist
#required to count up the shades in case they are meshed
def getShadeCount(self,shades):
shdct = 0
for surfnum,shade in enumerate(shades):
#coordinateList contains all the shade points for all shades
coordinatesList = shade.extractPoints()
#print coordinatesList
try:
len(coordinatesList[0][0])
print 'meshed surface'
for subcount,ss in enumerate(coordinatesList):
shdct+=1
except:
shdct+=1
return shdct
#takes a Honeybee space, finds the floor, then finds its area
def findZoneFloorArea(self,surfaces):
logging.debug('Finding floor area of all surfaces.')
for ct,surface in enumerate(surfaces):
print ct, surface
coordlist = surface.coordinates ##extractPoints()
try:
memsafelist = wgb.point3DListtoMemorySafeCoordList([coordlist])
except:
memsafelist = wgb.point3DListtoMemorySafeCoordList(coordlist)
normal = v.Vector.GetMemRHR(memsafelist[0])
#get tilt
tilt=gbx.prod.FindTilt(normal)
if(tilt == 180):
print 'found a floor!'
z = coordlist[0][2]
print z
#then we have found the floor
area = v.Vector.GetAreaofMemSafeCoords(memsafelist[0])
logging.info('Successfully found floor and calculated area.')
return area, z
def makeLevelCoords(self,zheight):
coordinates = []
coordinate1=[0,0,zheight]
coordinates.append(coordinate1)
coordinate2=[-10,0,zheight]
coordinates.append(coordinate2)
coordinate3=[-10,-10,zheight]
coordinates.append(coordinate3)
return coordinates
def isItSquare(self,memsafelist):
logging.info('Finding if the surface is a square.')
sqrnt = False
perps = []
ht = 0
wid = 0
if len(memsafelist) != 4:
return sqrnt,ht,wid
else:
for ct,coord in enumerate(memsafelist):
if(ct < len(memsafelist)-2):
v1 = v.Vector.CreateMemorySafe_Vector(memsafelist[ct],memsafelist[ct+1])
if ht != v.Vector.VectorMagnitude(v1):
ht = v.Vector.VectorMagnitude(v1)
v2 = v.Vector.CreateMemorySafe_Vector(memsafelist[ct+1],memsafelist[ct+2])
if wid != v.Vector.VectorMagnitude(v2):
wid = v.Vector.VectorMagnitude(v2)
dot = v.Vector.DotProductMag(v1,v2)
if(dot == 0):
perps.append(1)
else:
perps.append(2)
elif (ct == len(memsafelist) - 2):
v1 = v.Vector.CreateMemorySafe_Vector(memsafelist[ct],memsafelist[ct+1])
v2 = v.Vector.CreateMemorySafe_Vector(memsafelist[ct+1],memsafelist[0])
dot = v.Vector.DotProductMag(v1,v2)
if(dot == 0):
perps.append(1)
else:
perps.append(2)
if 2 in perps:
return sqrnt, ht, wid
else:
sqrnt = True
return sqrnt, ht, wid
def writeShellGeo(self, surfaces, space, namingMethod = 0):
logging.info('Writing gb shell geometry.')
# generate gbXML Shell Geometry (for now assume zero thickness
# assume that each surface defines a single surface (not meshed)
sg = gbx.ShellGeometry()
sg.unit = gbx.lengthUnitEnum.Meters
sg.id = "sg"+space.Name
print sg.id
#make closed shell
cs = gbx.ClosedShell()
#put polyloops in closed shell
totsurfcount = 0
totsurfaces=[]
for surfcount, surface in enumerate(surfaces):
#get list of point for the surface from the HoneyBee Surface
coordinatesList = surface.coordinates ##extractPoints()
#not meshed
coordinatesList = [coordinatesList]
totsurfcount+=1
totsurfaces.append(coordinatesList)
#print "notmeshed"
#print len(coordinatesList)
#print totsurfcount
#print len(totsurfaces)
cs.PolyLoops = gbx.prod.makePolyLoopArray(totsurfcount)
for plcount, allsurf in enumerate(totsurfaces):
#get list of point for the surface from the HoneyBee Surface
coordinatesList = allsurf
# print coordinatesList
if not isinstance(coordinatesList[0], list) and not isinstance(coordinatesList[0], tuple):
coordinatesList = [coordinatesList]
for count, coordinates in enumerate(coordinatesList):
#print "coords",coordinates
cs.PolyLoops[plcount].Points = gbx.BasicSerialization.makeCartesianPtArray(len(coordinates));
for ptcount,pt in enumerate(coordinates):
#print pt
cp = wgb.makegbCartesianPt(pt)
#for the list holding all surface polyloops, 1 point = cp
cs.PolyLoops[plcount].Points[ptcount] = cp
sg.ClosedShell = cs
space.ShellGeo = sg
logging.debug('Successfully created shell geometry for space.'+sg.id)
return space
def EPSCHStr(self, gb, scheduleName,ct,wknmdict):
logging.debug('Making schedules for gb node')
scheduleName = scheduleName.upper()
#try:
scheduleData = None
if scheduleName in sc.sticky ["honeybee_ScheduleLib"].keys():
scheduleData = sc.sticky ["honeybee_ScheduleLib"][scheduleName]
elif scheduleName in sc.sticky ["honeybee_ScheduleTypeLimitsLib"].keys():
scheduleData = sc.sticky["honeybee_ScheduleTypeLimitsLib"][scheduleName]
if scheduleData!=None:
numberOfLayers = len(scheduleData.keys())
scheduleStr = scheduleData[0] + ",\n"
#break this down with a regex to figure out if it is a year, or what
m = re.match('(.*)(:)(.*)',scheduleData[0])
if m:
if (m.group(3) == "Year"):
logging.info('Found a regex match for year:'+m.group(3))
yrs = gbx.Schedule()
gb.Schedule[ct] = yrs
yrs.id = scheduleName.replace(" ","_")
startdate = ''
enddate = ''
yrarr = []
marr = []
mar = []
for layer in range(1, numberOfLayers):
d = scheduleData[layer][1]
if(d == '- Schedule Type Limits Name'):
#assign schedule type
mt = re.match('(Temperature)(\d*)',scheduleData[layer][0])
if mt:
stype = mt.group(1)
stype = stype.strip()
yrs.type = wgb.assignScheduleTypes(stype)
else:
stype = scheduleData[layer][0]
yrs.type = wgb.assignScheduleTypes(stype)
elif (re.match("(- Start Month)(.*)",d)):
logging.info('Found Startmonth of honeybee object string.')
startdate = scheduleData[layer][0]+'-'
elif (re.match("(- Start Day)(.*)",d)):
startdate = startdate + scheduleData[layer][0]
marr.append(startdate)
elif (re.match("(- End Month)(.*)",d)):
logging.info('Found Endmonth of honeybee object string')
enddate = scheduleData[layer][0]+'-'
elif (re.match("(- End Day)(.*)",d)):
enddate = enddate + scheduleData[layer][0]
marr.append(enddate)
mar = copy.deepcopy(marr)
yrarr.append(mar)
marr=[]
elif(re.match('(.*)(:)(.*)',scheduleData[layer][1])):
m = re.match('(.*)(:)(.*)',scheduleData[layer][1])
wk = re.match('(Week Name)(.*)',m.group(3))
if wk:
logging.info('Found weekly sch id associated with start and stops.')
wknum = wk.group(2)
wks = re.match('(.*)({)(.*)(})',scheduleData[layer][0])
if wks:
ws=gbx.WeekScheduleId()
#print wks.group(3)
wknms.append(wks.group(3))
ws.weekScheduleIdRef = 'Week-'+str(wknms.index(wks.group(3)))
#need this to properly assign the week schedule id
marr.append(ws)
yearsched = gbx.BasicSerialization.setYearScheduleArray(len(yrarr))
yrs.YearSchedule = yearsched
yrschnames = []
uniqueint=0
"""
for i,y in enumerate(yrarr):
#these had to be removed because dates cannot be strings
#bd.val = y[1]
#ed.val = y[2]
yrsch = gbx.YearSchedule()
yrs.YearSchedule[i] = yrsch
schednm = scheduleName.replace(" ","_")+str(uniqueint)
if schednm in yrschnames:
uniqueint=uniqueint+1
schednm = scheduleName.replace(" ","_")+str(uniqueint)
yrsch.id = schednm
yrschnames.append(schednm)
bd = gbx.BeginDate()
ed = gbx.EndDate()
begmatch = re.match(r'(\d+)(-)(\d+)',y[1])
if begmatch:
provider = CultureInfo.InvariantCulture
month = begmatch.group(1)
day = begmatch.group(3)
yr | |
# -*- coding: utf-8 -*-
""" Edit history
Author : yda
Date : 2020-11-12
Package name changed - asammdf to mdfstudio
"""
import logging
from textwrap import fill
import numpy as np
from numpy.core.defchararray import encode
from .blocks import v2_v3_blocks as v3b
from .blocks import v4_blocks as v4b
from .blocks.conversion_utils import from_dict
from .blocks.source_utils import Source
from .blocks.utils import extract_cncomment_xml, MdfException
from .version import __version__
logger = logging.getLogger("mdfstudio")
class Signal(object):
"""
The *Signal* represents a channel described by it's samples and timestamps.
It can perform arithmetic operations against other *Signal* or numeric types.
The operations are computed in respect to the timestamps (time correct).
The non-float signals are not interpolated, instead the last value relative
to the current timestamp is used.
*samples*, *timestamps* and *name* are mandatory arguments.
Parameters
----------
samples : numpy.array | list | tuple
signal samples
timestamps : numpy.array | list | tuple
signal timestamps
unit : str
signal unit
name : str
signal name
conversion : dict | channel conversion block
dict that contains extra conversion information about the signal ,
default *None*
comment : str
signal comment, default ''
raw : bool
signal samples are raw values, with no physical conversion applied
master_metadata : list
master name and sync type
display_name : str
display name used by mdf version 3
attachment : bytes, name
channel attachment and name from MDF version 4
source : Source
source information named tuple
bit_count : int
bit count; useful for integer channels
stream_sync : bool
the channel is a synchronisation for the attachment stream (mdf v4 only)
invalidation_bits : numpy.array | None
channel invalidation bits, default *None*
encoding : str | None
encoding for string signals; default *None*
"""
def __init__(
self,
samples=None,
timestamps=None,
unit="",
name="",
conversion=None,
comment="",
raw=True,
master_metadata=None,
display_name="",
attachment=(),
source=None,
bit_count=None,
stream_sync=False,
invalidation_bits=None,
encoding=None,
group_index=-1,
channel_index=-1,
):
if samples is None or timestamps is None or not name:
message = (
'"samples", "timestamps" and "name" are mandatory '
"for Signal class __init__: samples={samples}\n"
"timestamps={timestamps}\nname={name}"
)
raise MdfException(message)
else:
if not isinstance(samples, np.ndarray):
samples = np.array(samples)
if samples.dtype.kind == "U":
if encoding is None:
encodings = ["utf-8", "latin-1"]
else:
encodings = [encoding, "utf-8", "latin-1"]
for encoding in encodings:
try:
samples = encode(samples, encoding)
break
except:
continue
else:
samples = encode(samples, encodings[0], errors="ignore")
if not isinstance(timestamps, np.ndarray):
timestamps = np.array(timestamps, dtype=np.float64)
if samples.shape[0] != timestamps.shape[0]:
message = "{} samples and timestamps length mismatch ({} vs {})"
message = message.format(name, samples.shape[0], timestamps.shape[0])
logger.exception(message)
raise MdfException(message)
self.samples = samples
self.timestamps = timestamps
self.unit = unit
self.name = name
self.comment = comment
self._plot_axis = None
self.raw = raw
self.master_metadata = master_metadata
self.display_name = display_name
self.attachment = attachment
self.encoding = encoding
self.group_index = group_index
self.channel_index = channel_index
self.color = None
if source:
if not isinstance(source, Source):
source = Source.from_source(source)
self.source = source
if bit_count is None:
self.bit_count = samples.dtype.itemsize * 8
else:
self.bit_count = bit_count
self.stream_sync = stream_sync
if invalidation_bits is not None:
if not isinstance(invalidation_bits, np.ndarray):
invalidation_bits = np.array(invalidation_bits)
if invalidation_bits.shape[0] != samples.shape[0]:
message = (
"{} samples and invalidation bits length mismatch ({} vs {})"
)
message = message.format(
name, samples.shape[0], invalidation_bits.shape[0]
)
logger.exception(message)
raise MdfException(message)
self.invalidation_bits = invalidation_bits
if conversion:
if not isinstance(
conversion, (v4b.ChannelConversion, v3b.ChannelConversion)
):
conversion = from_dict(conversion)
self.conversion = conversion
def __repr__(self):
return f"""<Signal {self.name}:
\tsamples={self.samples}
\ttimestamps={self.timestamps}
\tinvalidation_bits={self.invalidation_bits}
\tunit="{self.unit}"
\tconversion={self.conversion}
\tsource={self.source}
\tcomment="{self.comment}"
\tmastermeta="{self.master_metadata}"
\traw={self.raw}
\tdisplay_name={self.display_name}
\tattachment={self.attachment}>
"""
def plot(self, validate=True, index_only=False):
"""plot Signal samples. Pyqtgraph is used if it is available; in this
case see the GUI plot documentation to see the available commands
Parameters
----------
validate (True): bool
apply the invalidation bits
index_only (False) : bool
use index based X axis. This can be useful if the master (usually
time based) is corrupted with NaN, inf or if it is not strictly
increasing
"""
try:
from .gui.plot import plot
plot(self, validate=True, index_only=False)
return
except:
try:
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
except ImportError:
logging.warning("Signal plotting requires pyqtgraph or matplotlib")
return
if len(self.samples.shape) <= 1 and self.samples.dtype.names is None:
fig = plt.figure()
fig.canvas.set_window_title(self.name)
fig.text(
0.95,
0.05,
f"mdfstudio {__version__}",
fontsize=8,
color="red",
ha="right",
va="top",
alpha=0.5,
)
name = self.name
if self.comment:
comment = self.comment.replace("$", "")
comment = extract_cncomment_xml(comment)
comment = fill(comment, 120).replace("\\n", " ")
title = f"{name}\n({comment})"
plt.title(title)
else:
plt.title(name)
try:
if not self.master_metadata:
plt.xlabel("Time [s]")
plt.ylabel(f"[{self.unit}]")
plt.plot(self.timestamps, self.samples, "b")
plt.plot(self.timestamps, self.samples, "b.")
plt.grid(True)
plt.show()
else:
master_name, sync_type = self.master_metadata
if sync_type in (0, 1):
plt.xlabel(f"{master_name} [s]")
elif sync_type == 2:
plt.xlabel(f"{master_name} [deg]")
elif sync_type == 3:
plt.xlabel(f"{master_name} [m]")
elif sync_type == 4:
plt.xlabel(f"{master_name} [index]")
plt.ylabel(f"[{self.unit}]")
plt.plot(self.timestamps, self.samples, "b")
plt.plot(self.timestamps, self.samples, "b.")
plt.grid(True)
plt.show()
except ValueError:
plt.close(fig)
else:
try:
names = self.samples.dtype.names
if self.samples.dtype.names is None or len(names) == 1:
if names:
samples = self.samples[names[0]]
else:
samples = self.samples
shape = samples.shape[1:]
fig = plt.figure()
fig.canvas.set_window_title(self.name)
fig.text(
0.95,
0.05,
f"mdfstudio {__version__}",
fontsize=8,
color="red",
ha="right",
va="top",
alpha=0.5,
)
if self.comment:
comment = self.comment.replace("$", "")
plt.title(f"{self.name}\n({comment})")
else:
plt.title(self.name)
ax = fig.add_subplot(111, projection="3d")
# Grab some test data.
X = np.array(range(shape[1]))
Y = np.array(range(shape[0]))
X, Y = np.meshgrid(X, Y)
Z = samples[0]
# Plot a basic wireframe.
self._plot_axis = ax.plot_wireframe(X, Y, Z, rstride=1, cstride=1)
# Place Sliders on Graph
ax_a = plt.axes([0.25, 0.1, 0.65, 0.03])
# Create Sliders & Determine Range
sa = Slider(
ax_a,
"Time [s]",
self.timestamps[0],
self.timestamps[-1],
valinit=self.timestamps[0],
)
def update(val):
self._plot_axis.remove()
idx = np.searchsorted(self.timestamps, sa.val, side="right")
Z = samples[idx - 1]
self._plot_axis = ax.plot_wireframe(
X, Y, Z, rstride=1, cstride=1
)
fig.canvas.draw_idle()
sa.on_changed(update)
plt.show()
else:
fig = plt.figure()
fig.canvas.set_window_title(self.name)
fig.text(
0.95,
0.05,
f"mdfstudio {__version__}",
fontsize=8,
color="red",
ha="right",
va="top",
alpha=0.5,
)
if self.comment:
comment = self.comment.replace("$", "")
plt.title(f"{self.name}\n({comment})")
else:
plt.title(self.name)
ax = fig.add_subplot(111, projection="3d")
samples = self.samples[names[0]]
axis1 = self.samples[names[1]]
axis2 = self.samples[names[2]]
# Grab some test data.
X, Y = np.meshgrid(axis2[0], axis1[0])
Z = samples[0]
# Plot a basic wireframe.
self._plot_axis = ax.plot_wireframe(X, Y, Z, rstride=1, cstride=1)
# Place Sliders on Graph
ax_a = plt.axes([0.25, 0.1, 0.65, 0.03])
# Create Sliders & Determine Range
sa = Slider(
ax_a,
"Time [s]",
self.timestamps[0],
self.timestamps[-1],
valinit=self.timestamps[0],
)
def update(val):
self._plot_axis.remove()
idx = np.searchsorted(self.timestamps, sa.val, side="right")
Z = samples[idx - 1]
X, Y = np.meshgrid(axis2[idx - 1], axis1[idx - 1])
self._plot_axis = ax.plot_wireframe(
X, Y, Z, rstride=1, cstride=1
)
fig.canvas.draw_idle()
sa.on_changed(update)
plt.show()
except Exception as err:
print(err)
def cut(self, start=None, stop=None, include_ends=True, interpolation_mode=0):
"""
Cuts the signal according to the *start* and *stop* values, by using
the insertion indexes in the signal's *time* axis.
Parameters
----------
start : float
start timestamp for cutting
stop : float
stop timestamp for cutting
include_ends : bool
include the *start* and *stop* timestamps after cutting the signal.
If *start* and *stop* are found in the original timestamps, then
the new samples will be computed using interpolation. Default *True*
interpolation_mode : int
interpolation mode for integer signals; default 0
* 0 - repeat previous samples
* 1 - linear interpolation
Returns
-------
result : Signal
new *Signal* cut from the original
Examples
--------
>>> new_sig = old_sig.cut(1.0, 10.5)
>>> new_sig.timestamps[0], new_sig.timestamps[-1]
0.98, 10.48
"""
ends = (start, stop)
if len(self) == 0:
result = Signal(
np.array([], dtype=self.samples.dtype),
np.array([], dtype=self.timestamps.dtype),
self.unit,
self.name,
self.conversion,
self.comment,
self.raw,
self.master_metadata,
self.display_name,
self.attachment,
self.source,
self.bit_count,
self.stream_sync,
encoding=self.encoding,
group_index=self.group_index,
channel_index=self.channel_index,
)
elif start is None and stop is None:
# return the channel uncut
result = Signal(
self.samples.copy(),
self.timestamps.copy(),
self.unit,
self.name,
self.conversion,
self.comment,
self.raw,
self.master_metadata,
self.display_name,
self.attachment,
self.source,
self.bit_count,
self.stream_sync,
invalidation_bits=self.invalidation_bits.copy()
if self.invalidation_bits is not None
else None,
encoding=self.encoding,
group_index=self.group_index,
channel_index=self.channel_index,
)
else:
if start is None:
# cut from begining to stop
if stop < self.timestamps[0]:
result = Signal(
np.array([], dtype=self.samples.dtype),
np.array([], dtype=self.timestamps.dtype),
self.unit,
self.name,
self.conversion,
self.comment,
self.raw,
self.master_metadata,
self.display_name,
self.attachment,
self.source,
self.bit_count,
self.stream_sync,
encoding=self.encoding,
group_index=self.group_index,
channel_index=self.channel_index,
)
else:
stop = np.searchsorted(self.timestamps, stop, side="right")
if (
include_ends
and ends[-1] not in self.timestamps
and ends[-1] < self.timestamps[-1]
):
interpolated = self.interp(
[ends[1]], interpolation_mode=interpolation_mode
)
samples = np.append(
self.samples[:stop], interpolated.samples, axis=0
)
timestamps = np.append(self.timestamps[:stop], ends[1])
if self.invalidation_bits is not None:
invalidation_bits = np.append(
self.invalidation_bits[:stop],
interpolated.invalidation_bits,
)
else:
invalidation_bits = None
else:
samples = self.samples[:stop].copy()
timestamps = self.timestamps[:stop].copy()
if self.invalidation_bits | |
<reponame>ssebastianj/ia2013-tpi-rl
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
try:
import cdecimal as decimal
except ImportError:
import decimal
import csv
import logging
import multiprocessing
import psutil
import Queue
import random
import threading
import time
from PyQt4 import QtCore, QtGui
from info import app_info
# GUI
from gui.aboutdialog import AboutDialog
from gui.qtgen.mainwindow import Ui_MainWindow
from gui.codetailsdialog import ShowCODetailsDialog
from gui.gwgenrndestadosdialog import GWGenRndEstadosDialog
from gui.gwopcionesdialog import GWOpcionesDialog
from gui.matrizdialog import ShowMatrizDialog
# Core
from core.estado.estado import TIPOESTADO, TipoEstado
from core.gridworld.gridworld import GridWorld
from core.qlearning.qlearning import QLearning
from core.tecnicas.aleatorio import Aleatorio
from core.tecnicas.egreedy import EGreedy, Greedy
from core.tecnicas.softmax import Softmax
# Graphs
from graphs.avgrwds.worker import GraphRecompensasPromedioWorker
from graphs.sucessfuleps.worker import GraphSucessfulEpisodesWorker
# from graphs.itersep.worker import GraphIteracionesXEpisodioWorker
from graphs.matdiffs.worker import GraphMatrizDiffsWorker
from graphs.heatmaps.matrizr import ShowMatrizRHeatMap
from graphs.heatmaps.matrizq import ShowMatrizQHeatMap
# Tools
from tools.queue import get_item_from_queue
from tools.taskbar import taskbar
try:
_tr = QtCore.QString.fromUtf8
except AttributeError:
_tr = lambda s: s
class MainWindow(QtGui.QMainWindow):
u"""
Clase heredada de QMainWindow encargada de mostrar la ventana principal de
la aplicación.
"""
def __init__(self):
super(MainWindow, self).__init__()
self.WMainWindow = Ui_MainWindow()
self.WMainWindow.setupUi(self)
title = QtCore.QString("Hola Mundo", fieldWidth='100')
self.setWindowTitle(title)
# Logging Config
logging.basicConfig(level=logging.DEBUG,
format="[%(levelname)s] – %(threadName)-10s : %(message)s")
self._logger = logging.getLogger()
self._logger.disabled = not app_info.__DEBUG__
# Freeze Support
self._logger.debug("Activar Freeze Support")
multiprocessing.freeze_support()
self._logger.debug("Cantidad de CPUs: {0}"
.format(multiprocessing.cpu_count()))
self._init_vars()
self._initialize_window()
def _init_vars(self):
u"""
Inicializa las variables 'globales'.
"""
self.estado_inicial = None
self.estado_final = None
self.pre_estado_inicial = None
self.pre_estado_final = None
self.matriz_q = None
self.mat_est_acc = None
self.wnd_timer = None
self.ql_entrenar_error_q = None
self.ql_entrenar_out_q = None
self.ql_recorrer_error_q = None
self.ql_recorrer_out_q = None
self.working_process = None
self.worker_queues_list = None
self.entrenar_is_running = False
self.recorrer_is_running = False
self.wnd_taskbar = None
self.last_state_bkp = None
self.last_state_bg = None
self.last_state_text = None
self.camino_optimo = None
self.camino_optimo_active = False
self.camino_optimo_start = None
self.camino_optimo_end = None
self.camino_optimo_cursor = None
self.worker_paused = False
self.working_process_mng = None
self.q_vals_co = None
# Variables necesarias para los gráficos
self.graph_recompensas_promedio = None
self.graph_episodios_finalizados = None
# self.graph_iters_por_episodio = None
self.graph_mat_diff = None
# Políticas de selección de acción
self.tecnicas = { # 0: "Greedy",
1: "ε-Greedy",
2: "Softmax",
# 3: "Aleatorio"
}
# Dimensiones disponibles del GridWorld
self.gw_dimensiones = [ # "3 x 3", "4 x 4", "5 x 5",
"6 x 6", "7 x 7", "8 x 8", "9 x 9", "10 x 10"]
# Configuración general
self.window_config = {"item":
{"show_tooltip": True,
"menu_estado":
{"ocultar_tipos":
[TIPOESTADO.AGENTE],
"enabled": True
},
"size": 40},
"gw":
{"entrenamiento": {"actual_state": {"show": True, "color": "#000000", "icono": None},
"recompfinalauto": True,
"maxitersreached": {"action": 1, "warn": False}
},
"recorrido": {"actual_state": {"show": True, "color": "#000000", "icono": None},
"maxitersreached": {"action": 0, "warn": False}},
},
"tipos_estados":
{0: TipoEstado(0, None, _tr("Inicial"), _tr("I"), "#FF5500", None),
1: TipoEstado(1, 1000, _tr("Final"), _tr("F"), "#00AB00", None),
2: TipoEstado(2, None, _tr("Agente"), _tr("A"), "#474747",
QtGui.QIcon(QtGui.QPixmap(":/iconos/Agente_1.png"))),
3: TipoEstado(3, 0, _tr("Neutro"), _tr("N"), "#FFFFFF", None),
4: TipoEstado(4, 100, _tr("Excelente"), _tr("E"), "#BB0011", None),
5: TipoEstado(5, 50, _tr("Bueno"), _tr("B"), "#4F0ACC", None),
6: TipoEstado(6, -50, _tr("Malo"), _tr("M"), "#EB00A1", None),
7: TipoEstado(7, None, _tr("Pared"), _tr("P"), "#000000", None),
},
"opt_path":
{"color": "#55FF00",
"pintar_inicial": False,
"pintar_final": False,
"delay": 0,
"show_icon": False
},
"exponentes_final": {6: 13,
7: 18,
8: 20,
9: 29,
10: 32
},
"heatmap": {"interpolation": "nearest"}
}
def _initialize_window(self):
u"""
Inicializa el aspecto y características de la ventana.
"""
# Aspectos de la ventana principal
screen_geometry = QtGui.QApplication.desktop().screenGeometry()
y_wnd = (screen_geometry.height() - self.height()) / 2.0
x_wnd = (screen_geometry.width() - self.width()) / 2.0
# Centrar la ventana en la pantalla
self.move(x_wnd, y_wnd)
self.setWindowFlags(QtCore.Qt.WindowSoftkeysVisibleHint)
# Configurar statusbar
# ---------------------------------------------------------------------
self.lbl_process_stat = QtGui.QLabel()
self.lbl_process_stat.setFixedWidth(80)
self.lbl_process_stat.setAlignment(QtCore.Qt.AlignHCenter)
self.WMainWindow.statusBar.addPermanentWidget(self.lbl_process_stat)
# Agregar barra de progreso
self._ent_progress_bar = QtGui.QProgressBar()
self.WMainWindow.statusBar.addPermanentWidget(self._ent_progress_bar)
self._ent_progress_bar.setFixedSize(340, 14)
self._ent_progress_bar.setFormat(_tr(" %p% / %m episodios"))
self._ent_progress_bar.setVisible(False)
self.lbl_nro_estado = QtGui.QLabel()
self.lbl_nro_estado.setFixedWidth(40)
self.lbl_nro_estado.setAlignment(QtCore.Qt.AlignHCenter)
self.WMainWindow.statusBar.addPermanentWidget(self.lbl_nro_estado)
# Agregar etiqueta para mostrar coordenadas actuales
self.lbl_item_actual = QtGui.QLabel()
self.lbl_item_actual.setFixedWidth(110)
self.lbl_item_actual.setAlignment(QtCore.Qt.AlignHCenter)
self.WMainWindow.statusBar.addPermanentWidget(self.lbl_item_actual)
self.lbl_tipo_est_actual = QtGui.QLabel()
self.lbl_tipo_est_actual.setFixedWidth(80)
self.lbl_tipo_est_actual.setAlignment(QtCore.Qt.AlignHCenter)
self.WMainWindow.statusBar.addPermanentWidget(self.lbl_tipo_est_actual)
self.lbl_rec_estado = QtGui.QLabel()
self.lbl_rec_estado.setFixedWidth(80)
self.lbl_rec_estado.setAlignment(QtCore.Qt.AlignHCenter)
self.WMainWindow.statusBar.addPermanentWidget(self.lbl_rec_estado)
# ---------------------------------------------------------------------
self.WMainWindow.tblGridWorld.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.WMainWindow.tblGridWorld.setSortingEnabled(False)
self.WMainWindow.tblGridWorld.setMouseTracking(True)
self.WMainWindow.btnTerminarProceso.setEnabled(False)
self.WMainWindow.btnRecorrer.setEnabled(False)
self.WMainWindow.actionAgenteRecorrer.setDisabled(True)
self.WMainWindow.actionAgenteCancelar.setDisabled(True)
self.WMainWindow.btnMostrarMatrizQ.setDisabled(True)
self.WMainWindow.btnMatrizQVerHM.setDisabled(True)
self.WMainWindow.btnMostrarMatrizR.setDisabled(True)
self.WMainWindow.btnMatrizRVerHM.setDisabled(True)
self.WMainWindow.lblCantMaxIteraciones.setDisabled(True)
self.WMainWindow.sbCantMaxIteraciones.setDisabled(True)
self.WMainWindow.gbCOAcciones.setDisabled(True)
self.WMainWindow.lblMatQDiff.setDisabled(True)
self.WMainWindow.lblMatQIntervalo.setDisabled(True)
self.WMainWindow.sbIntervaloDiffCalc.setDisabled(True)
self.WMainWindow.sbMatricesMinDiff.setDisabled(True)
self.WMainWindow.btnGWGenerarEstados.setVisible(False)
self.WMainWindow.btnPausar.setDisabled(True)
self.WMainWindow.actionAgentePausar.setDisabled(True)
self.WMainWindow.btnCOAdelante.setDisabled(True)
self.WMainWindow.btnCOAtras.setDisabled(True)
# Asignar shorcuts
entrenar_shortcut = "F5"
recorrer_shortcut = "F6"
pausar_shortcut = "F7"
cancelar_shortcut = "Esc"
self.WMainWindow.btnEntrenar.setShortcut(QtGui.QKeySequence(entrenar_shortcut))
self.WMainWindow.btnRecorrer.setShortcut(QtGui.QKeySequence(recorrer_shortcut))
self.WMainWindow.btnPausar.setShortcut(QtGui.QKeySequence(pausar_shortcut))
self.WMainWindow.btnTerminarProceso.setShortcut(QtGui.QKeySequence(cancelar_shortcut))
self.WMainWindow.btnEntrenar.setToolTip("<html><head/><body><p>\
Entrenar agente \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(entrenar_shortcut))
self.WMainWindow.btnRecorrer.setToolTip("<html><head/><body><p>\
Explotar conocimiento \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(recorrer_shortcut))
self.WMainWindow.btnTerminarProceso.setToolTip("<html><head/><body><p>\
Detener proceso \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(cancelar_shortcut))
self.WMainWindow.btnPausar.setToolTip("<html><head/><body><p>\
Pausar proceso \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(pausar_shortcut))
# Asignar shortcuts
mostrar_mat_r_sc = "Ctrl+R"
mostrar_mat_q_sc = "Ctrl+Q"
self.WMainWindow.btnMostrarMatrizR.setShortcut(QtGui.QKeySequence(mostrar_mat_r_sc))
self.WMainWindow.btnMostrarMatrizQ.setShortcut(QtGui.QKeySequence(mostrar_mat_q_sc))
self.WMainWindow.btnMostrarMatrizR.setToolTip("<html><head/><body><p>\
Mostrar matriz de recompensas \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(mostrar_mat_r_sc))
self.WMainWindow.btnMostrarMatrizQ.setToolTip("<html><head/><body><p>\
Mostrar matriz Q \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(mostrar_mat_q_sc))
generar_estados_rnd_sc = "Ctrl+G"
generar_estados_rnd_fast_sc = "Ctrl+Shift+G"
self.WMainWindow.btnGWGenerarEstados.setShortcut(QtGui.QKeySequence(generar_estados_rnd_sc))
self.WMainWindow.btnGenEstRndRapida.setShortcut(QtGui.QKeySequence(generar_estados_rnd_fast_sc))
self.WMainWindow.btnGWGenerarEstados.setToolTip(u"<html><head/><body><p>\
Generar estados aleatorios \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(generar_estados_rnd_sc))
self.WMainWindow.btnGenEstRndRapida.setToolTip(u"<html><head/><body><p>\
Generar estados y dimensión aleatorios (incluyendo al Estado Final) \
<span style='font-size:7pt;'>\
{0}</span></p></body></html>"
.format(generar_estados_rnd_fast_sc))
self.setMouseTracking(True)
# Generación de menúes
self.generar_menu_pruebas()
self.generar_menu_hm_ip()
self.generar_menu_bloqueo()
# self.generar_menu_edicion()
# self.generar_menu_estadisticas()
self.inicializar_todo()
# Conexión de señales
self._set_window_signals()
def convert_dimension(self, dim_str):
u"""
Devuelve una tupla conteniendo el ancho y alto del GridWorld.
:param dim_str: Cadena en forma {Ancho} x {Alto} representando la dimensión
"""
dimension = str(dim_str)
dimension = dimension.lower()
dimension = dimension.split("x")
return (int(dimension[0]), int(dimension[1]))
def set_gw_dimension(self, dimension):
u"""
Configura el tblGridWorld a la dimensión seleccionada e Inicializa los estados en Neutros.
:param dimension: Dimensión del GridWorld.
"""
# Desactivar la visualización de la Matriz R
self.WMainWindow.btnMostrarMatrizR.setDisabled(True)
self.WMainWindow.btnMatrizRVerHM.setDisabled(True)
# Desactiva la visualización de la Matriz Q y el Juego
self.WMainWindow.btnMostrarMatrizQ.setDisabled(True)
self.WMainWindow.btnMatrizQVerHM.setDisabled(True)
self.WMainWindow.btnRecorrer.setDisabled(True)
# Desactivar controles para mostrar camino optimo
self.WMainWindow.gbCOAcciones.setDisabled(True)
self.WMainWindow.gbCOAvance.setDisabled(True)
# Obtener ancho y alto del GridWorld
self._logger.debug("Dimensión: {0}".format(dimension))
ancho_gw, alto_gw = self.convert_dimension(dimension)
# Crear un nuevo GridWorld dados el ancho y el alto del mismo
self.gridworld = GridWorld(ancho_gw,
alto_gw,
self.window_config["tipos_estados"],
None,
[TIPOESTADO.PARED]
)
# Actualizar valor de recompensa final
self.calcular_recompensa_final()
idx_tecnica = self.WMainWindow.cbQLTecnicas.currentIndex()
# Actualizar valor mínimo de gamma
if self.WMainWindow.cbQLTecnicas.itemData(idx_tecnica).toInt()[0] == 2:
self.calcular_gamma_minimo()
ancho_estado_px = self.window_config["item"]["size"]
ancho_gw_px = ancho_estado_px * ancho_gw
# Establecer propiedades visuales de la tabla
self.WMainWindow.tblGridWorld.setRowCount(alto_gw)
self.WMainWindow.tblGridWorld.setColumnCount(ancho_gw)
self.WMainWindow.tblGridWorld.horizontalHeader().setDefaultSectionSize(ancho_estado_px)
self.WMainWindow.tblGridWorld.horizontalHeader().setResizeMode(QtGui.QHeaderView.Fixed)
self.WMainWindow.tblGridWorld.verticalHeader().setDefaultSectionSize(ancho_estado_px)
self.WMainWindow.tblGridWorld.verticalHeader().setResizeMode(QtGui.QHeaderView.Fixed)
self.WMainWindow.tblGridWorld.setCursor(QtCore.Qt.PointingHandCursor)
ancho_contenedor = ancho_gw_px + self.WMainWindow.tblGridWorld.verticalHeader().width() + 1
alto_contenedor = ancho_gw_px + self.WMainWindow.tblGridWorld.horizontalHeader().height() + 1
self.WMainWindow.tblGridWorld.setFixedSize(ancho_contenedor, alto_contenedor)
# self.WMainWindow.tblGridWorld.setSelectionMode(QtGui.QAbstractItemView.MultiSelection)
# Inicializar estados
self.estado_final = None
self.estado_inicial = None
# Recargar estados del GridWorld en pantalla
self.recargar_estados()
self.WMainWindow.btnMostrarMatrizR.setEnabled(True)
self.WMainWindow.btnMatrizRVerHM.setEnabled(True)
def _set_window_signals(self):
u"""
Establece las señales correspondientes a los controles
"""
self.WMainWindow.actionAppSalir.triggered.connect(self.exit)
# Cambia la Dimensión del GridWorld al seleccionar la dimensión en el ComboBox
self.WMainWindow.cbGWDimension.currentIndexChanged.connect(self.set_gw_dimension_cb)
# Cambia el Tipo de Estado al clickear un casillero del tblGridWorld
self.WMainWindow.tblGridWorld.cellClicked[int, int].connect(self.switch_tipo_estado)
# Empieza el Entrenamiento al clickear el btnEntrenar
self.WMainWindow.btnEntrenar.clicked.connect(self.entrenar)
# Interrumpe el Entrenamiento al clickear el btnTerminarTraining
self.WMainWindow.btnTerminarProceso.clicked.connect(self.terminar_proceso)
# Muestra sólo los parámetros utilizados en la técnica seleccionada en el ComboBox
self.WMainWindow.cbQLTecnicas.currentIndexChanged.connect(self.parametros_segun_tecnica)
# Al hacer clic derecho sobre un item del GridWorld
self.WMainWindow.tblGridWorld.customContextMenuRequested.connect(self.show_item_menu)
self.WMainWindow.btnInicializarTodo.clicked.connect(self.inicializar_todo)
self.WMainWindow.btnRecorrer.clicked.connect(self.recorrer_gw)
# Emite cuando se coloca el cursor del mouse sobre un ítem
self.WMainWindow.tblGridWorld.itemEntered.connect(self.mostrar_info_est_status_bar)
self.WMainWindow.menuGridWorld.aboutToShow.connect(self.generar_menu_dimensiones)
self.WMainWindow.menuQLearning.aboutToShow.connect(self.generar_menu_tecnicas)
self.WMainWindow.menuGridWorld.triggered.connect(self.set_gw_dimension_menu)
self.WMainWindow.menuQLearning.triggered.connect(self.parametros_segun_tecnica_menu)
self.WMainWindow.actionAcercaDe.triggered.connect(self.mostrar_dialogo_acerca)
self.WMainWindow.btnGWGenerarEstados.clicked.connect(self.mostrar_gen_rnd_estados_dialog)
self.WMainWindow.btnInicializarGW.clicked.connect(self.refresh_gw)
self.WMainWindow.btnInicializarValoresQL.clicked.connect(self.inicializar_ql_vals)
self.WMainWindow.actionInicializarTodo.triggered.connect(self.inicializar_todo)
self.WMainWindow.btnGWOpciones.clicked.connect(self.mostrar_opciones_gw)
self.WMainWindow.actionAgenteEntrenar.triggered.connect(self.entrenar)
self.WMainWindow.actionAgenteRecorrer.triggered.connect(self.recorrer_gw)
self.WMainWindow.actionAgenteCancelar.triggered.connect(self.terminar_proceso)
self.WMainWindow.btnMostrarMatrizQ.clicked.connect(self.show_matriz_q)
self.WMainWindow.btnMostrarMatrizR.clicked.connect(self.show_matriz_r)
self.WMainWindow.btnCOShowHide.clicked.connect(self.show_hide_camino_optimo)
self.WMainWindow.btnGenEstRndRapida.clicked.connect(lambda: self.refresh_gw_random(True, True))
self.WMainWindow.sbQLGamma.valueChanged.connect(self.calcular_recompensa_final)
self.WMainWindow.menuEstadisticas.triggered.connect(self.show_estadisticas)
self.WMainWindow.menuEstadisticas.aboutToShow.connect(self.generar_menu_estadisticas)
self.WMainWindow.sbQLTau.editingFinished.connect(self.calcular_gamma_minimo)
self.WMainWindow.btnPausar.clicked.connect(self.pausar_reanudar_proceso)
self.WMainWindow.actionAgentePausar.triggered.connect(self.pausar_reanudar_proceso)
self.WMainWindow.btnCOAdelante.clicked.connect(self.estado_co_next)
self.WMainWindow.btnCOAtras.clicked.connect(self.estado_co_back)
self.WMainWindow.btnMatrizQVerHM.clicked.connect(self.mostrar_matrizq_hm)
self.WMainWindow.btnMatrizRVerHM.clicked.connect(self.mostrar_matrizr_hm)
self.WMainWindow.menuInterpolacion.triggered.connect(self.set_hm_interpolation)
self.WMainWindow.menuAnteBloqueo.triggered.connect(self.set_stop_action)
self.WMainWindow.btnCOVerDetalles.clicked.connect(self.mostrar_detalles_co)
def parametros_segun_tecnica(self, indice):
u"""
Muestra u oculta los parámetros en función de la técnica seleccionada.
:param tecnica: Técnica seleccionada
"""
# Obtener valor asociado al item seleccionado
key = self.WMainWindow.cbQLTecnicas.itemData(indice).toInt()[0]
if key == 0:
# Greedy
self.WMainWindow.lblTau.hide()
self.WMainWindow.sbQLTau.hide()
self.WMainWindow.lblEpsilon.show()
self.WMainWindow.sbQLEpsilon.show()
self.WMainWindow.sbQLEpsilon.setMinimum(0.00)
self.WMainWindow.sbQLEpsilon.setValue(0.00)
self.WMainWindow.sbQLEpsilon.setEnabled(False)
self.WMainWindow.chkDecrementarParam.setEnabled(True)
if self.WMainWindow.chkDecrementarParam.isChecked():
self.WMainWindow.sbCantEpisodiosDec.setEnabled(True)
self.WMainWindow.sbDecrementoVal.setEnabled(True)
elif key == 1:
# E-Greedy
self.WMainWindow.lblTau.hide()
self.WMainWindow.sbQLTau.hide()
self.WMainWindow.lblEpsilon.show()
self.WMainWindow.sbQLEpsilon.show()
self.WMainWindow.sbQLEpsilon.setMinimum(0.01)
self.WMainWindow.sbQLEpsilon.setEnabled(True)
self.WMainWindow.chkDecrementarParam.setEnabled(True)
if self.WMainWindow.chkDecrementarParam.isChecked():
self.WMainWindow.sbCantEpisodiosDec.setEnabled(True)
self.WMainWindow.sbDecrementoVal.setEnabled(True)
self.WMainWindow.sbDecrementoVal.setMaximum(0.99)
self.WMainWindow.sbDecrementoVal.setValue(0.01)
self.WMainWindow.sbQLGamma.setMinimum(0.01)
elif key == 2:
# Softmax
self.WMainWindow.lblEpsilon.hide()
self.WMainWindow.sbQLEpsilon.hide()
self.WMainWindow.lblTau.show()
self.WMainWindow.sbQLTau.show()
self.WMainWindow.chkDecrementarParam.setEnabled(True)
if self.WMainWindow.chkDecrementarParam.isChecked():
self.WMainWindow.sbCantEpisodiosDec.setEnabled(True)
self.WMainWindow.sbDecrementoVal.setEnabled(True)
self.WMainWindow.sbDecrementoVal.setMaximum(1000000000)
self.WMainWindow.sbDecrementoVal.setValue(20)
# Hack para calcular el gamma mínimo de acuerdo al hardware
self.calcular_gamma_minimo()
elif key == 3:
# Aleatorio
self.WMainWindow.lblTau.hide()
self.WMainWindow.sbQLTau.hide()
self.WMainWindow.lblEpsilon.show()
self.WMainWindow.sbQLEpsilon.show()
self.WMainWindow.sbQLEpsilon.setMinimum(1.00)
self.WMainWindow.sbQLEpsilon.setValue(1.00)
self.WMainWindow.sbQLEpsilon.setEnabled(False)
self.WMainWindow.chkDecrementarParam.setDisabled(True)
self.WMainWindow.sbCantEpisodiosDec.setDisabled(True)
self.WMainWindow.sbDecrementoVal.setDisabled(True)
def show_item_menu(self, posicion):
u"""
Muestra un menú contextual al hacer clic derecho sobre un item de la tabla
:param posicion: Posición relativa del item clickeado
"""
if not self.window_config["item"]["menu_estado"]["enabled"]:
return None
# Cachear acceso a métodos y atributos
ocultar_tipos_list = self.window_config["item"]["menu_estado"]["ocultar_tipos"]
alto = self.gridworld.alto
selected_items = self.WMainWindow.tblGridWorld.selectedItems()
cant_selected = len(selected_items)
tipos_estados = self.gridworld.tipos_estados
item_actual = self.WMainWindow.tblGridWorld.itemAt(posicion)
estado_actual = self.gridworld.get_estado(item_actual.row() + 1,
item_actual.column() + 1)
# Crear menu contextual para los items de la tabla
self.menu_item = QtGui.QMenu("Tipo de estado")
tipos_estados_group = QtGui.QActionGroup(self.WMainWindow.tblGridWorld)
for tipo in tipos_estados.values():
if tipo.ide not in ocultar_tipos_list:
# Verificar si el tipo de estado posee un ícono
if tipo.icono is None:
action = QtGui.QAction(tipo.nombre,
| |
new_instancemethod(_TopTools.Handle_TopTools_HArray1OfListOfShape__kill_pointed,None,Handle_TopTools_HArray1OfListOfShape)
Handle_TopTools_HArray1OfListOfShape_swigregister = _TopTools.Handle_TopTools_HArray1OfListOfShape_swigregister
Handle_TopTools_HArray1OfListOfShape_swigregister(Handle_TopTools_HArray1OfListOfShape)
def Handle_TopTools_HArray1OfListOfShape_DownCast(*args):
return _TopTools.Handle_TopTools_HArray1OfListOfShape_DownCast(*args)
Handle_TopTools_HArray1OfListOfShape_DownCast = _TopTools.Handle_TopTools_HArray1OfListOfShape_DownCast
class TopTools_HArray1OfShape(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param Low:
:type Low: int
:param Up:
:type Up: int
:rtype: None
:param Low:
:type Low: int
:param Up:
:type Up: int
:param V:
:type V: TopoDS_Shape &
:rtype: None
"""
_TopTools.TopTools_HArray1OfShape_swiginit(self,_TopTools.new_TopTools_HArray1OfShape(*args))
def Init(self, *args):
"""
:param V:
:type V: TopoDS_Shape &
:rtype: None
"""
return _TopTools.TopTools_HArray1OfShape_Init(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray1OfShape_Length(self, *args)
def Lower(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray1OfShape_Lower(self, *args)
def Upper(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray1OfShape_Upper(self, *args)
def SetValue(self, *args):
"""
:param Index:
:type Index: int
:param Value:
:type Value: TopoDS_Shape &
:rtype: None
"""
return _TopTools.TopTools_HArray1OfShape_SetValue(self, *args)
def Value(self, *args):
"""
:param Index:
:type Index: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HArray1OfShape_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Index:
:type Index: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HArray1OfShape_ChangeValue(self, *args)
def Array1(self, *args):
"""
:rtype: TopTools_Array1OfShape
"""
return _TopTools.TopTools_HArray1OfShape_Array1(self, *args)
def ChangeArray1(self, *args):
"""
:rtype: TopTools_Array1OfShape
"""
return _TopTools.TopTools_HArray1OfShape_ChangeArray1(self, *args)
def _kill_pointed(self):
"""_kill_pointed(TopTools_HArray1OfShape self)"""
return _TopTools.TopTools_HArray1OfShape__kill_pointed(self)
def GetHandle(self):
"""GetHandle(TopTools_HArray1OfShape self) -> Handle_TopTools_HArray1OfShape"""
return _TopTools.TopTools_HArray1OfShape_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
TopTools_HArray1OfShape.Init = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Init,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.Length = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Length,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.Lower = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Lower,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.Upper = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Upper,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.SetValue = new_instancemethod(_TopTools.TopTools_HArray1OfShape_SetValue,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.Value = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Value,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.ChangeValue = new_instancemethod(_TopTools.TopTools_HArray1OfShape_ChangeValue,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.Array1 = new_instancemethod(_TopTools.TopTools_HArray1OfShape_Array1,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.ChangeArray1 = new_instancemethod(_TopTools.TopTools_HArray1OfShape_ChangeArray1,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape._kill_pointed = new_instancemethod(_TopTools.TopTools_HArray1OfShape__kill_pointed,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape.GetHandle = new_instancemethod(_TopTools.TopTools_HArray1OfShape_GetHandle,None,TopTools_HArray1OfShape)
TopTools_HArray1OfShape_swigregister = _TopTools.TopTools_HArray1OfShape_swigregister
TopTools_HArray1OfShape_swigregister(TopTools_HArray1OfShape)
class Handle_TopTools_HArray1OfShape(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_TopTools.Handle_TopTools_HArray1OfShape_swiginit(self,_TopTools.new_Handle_TopTools_HArray1OfShape(*args))
DownCast = staticmethod(_TopTools.Handle_TopTools_HArray1OfShape_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_TopTools_HArray1OfShape.Nullify = new_instancemethod(_TopTools.Handle_TopTools_HArray1OfShape_Nullify,None,Handle_TopTools_HArray1OfShape)
Handle_TopTools_HArray1OfShape.IsNull = new_instancemethod(_TopTools.Handle_TopTools_HArray1OfShape_IsNull,None,Handle_TopTools_HArray1OfShape)
Handle_TopTools_HArray1OfShape.GetObject = new_instancemethod(_TopTools.Handle_TopTools_HArray1OfShape_GetObject,None,Handle_TopTools_HArray1OfShape)
Handle_TopTools_HArray1OfShape._kill_pointed = new_instancemethod(_TopTools.Handle_TopTools_HArray1OfShape__kill_pointed,None,Handle_TopTools_HArray1OfShape)
Handle_TopTools_HArray1OfShape_swigregister = _TopTools.Handle_TopTools_HArray1OfShape_swigregister
Handle_TopTools_HArray1OfShape_swigregister(Handle_TopTools_HArray1OfShape)
def Handle_TopTools_HArray1OfShape_DownCast(*args):
return _TopTools.Handle_TopTools_HArray1OfShape_DownCast(*args)
Handle_TopTools_HArray1OfShape_DownCast = _TopTools.Handle_TopTools_HArray1OfShape_DownCast
class TopTools_HArray2OfShape(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param R1:
:type R1: int
:param R2:
:type R2: int
:param C1:
:type C1: int
:param C2:
:type C2: int
:rtype: None
:param R1:
:type R1: int
:param R2:
:type R2: int
:param C1:
:type C1: int
:param C2:
:type C2: int
:param V:
:type V: TopoDS_Shape &
:rtype: None
"""
_TopTools.TopTools_HArray2OfShape_swiginit(self,_TopTools.new_TopTools_HArray2OfShape(*args))
def Init(self, *args):
"""
:param V:
:type V: TopoDS_Shape &
:rtype: None
"""
return _TopTools.TopTools_HArray2OfShape_Init(self, *args)
def ColLength(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_ColLength(self, *args)
def RowLength(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_RowLength(self, *args)
def LowerCol(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_LowerCol(self, *args)
def LowerRow(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_LowerRow(self, *args)
def UpperCol(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_UpperCol(self, *args)
def UpperRow(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HArray2OfShape_UpperRow(self, *args)
def SetValue(self, *args):
"""
:param Row:
:type Row: int
:param Col:
:type Col: int
:param Value:
:type Value: TopoDS_Shape &
:rtype: None
"""
return _TopTools.TopTools_HArray2OfShape_SetValue(self, *args)
def Value(self, *args):
"""
:param Row:
:type Row: int
:param Col:
:type Col: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HArray2OfShape_Value(self, *args)
def ChangeValue(self, *args):
"""
:param Row:
:type Row: int
:param Col:
:type Col: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HArray2OfShape_ChangeValue(self, *args)
def Array2(self, *args):
"""
:rtype: TopTools_Array2OfShape
"""
return _TopTools.TopTools_HArray2OfShape_Array2(self, *args)
def ChangeArray2(self, *args):
"""
:rtype: TopTools_Array2OfShape
"""
return _TopTools.TopTools_HArray2OfShape_ChangeArray2(self, *args)
def _kill_pointed(self):
"""_kill_pointed(TopTools_HArray2OfShape self)"""
return _TopTools.TopTools_HArray2OfShape__kill_pointed(self)
def GetHandle(self):
"""GetHandle(TopTools_HArray2OfShape self) -> Handle_TopTools_HArray2OfShape"""
return _TopTools.TopTools_HArray2OfShape_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
TopTools_HArray2OfShape.Init = new_instancemethod(_TopTools.TopTools_HArray2OfShape_Init,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.ColLength = new_instancemethod(_TopTools.TopTools_HArray2OfShape_ColLength,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.RowLength = new_instancemethod(_TopTools.TopTools_HArray2OfShape_RowLength,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.LowerCol = new_instancemethod(_TopTools.TopTools_HArray2OfShape_LowerCol,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.LowerRow = new_instancemethod(_TopTools.TopTools_HArray2OfShape_LowerRow,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.UpperCol = new_instancemethod(_TopTools.TopTools_HArray2OfShape_UpperCol,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.UpperRow = new_instancemethod(_TopTools.TopTools_HArray2OfShape_UpperRow,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.SetValue = new_instancemethod(_TopTools.TopTools_HArray2OfShape_SetValue,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.Value = new_instancemethod(_TopTools.TopTools_HArray2OfShape_Value,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.ChangeValue = new_instancemethod(_TopTools.TopTools_HArray2OfShape_ChangeValue,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.Array2 = new_instancemethod(_TopTools.TopTools_HArray2OfShape_Array2,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.ChangeArray2 = new_instancemethod(_TopTools.TopTools_HArray2OfShape_ChangeArray2,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape._kill_pointed = new_instancemethod(_TopTools.TopTools_HArray2OfShape__kill_pointed,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape.GetHandle = new_instancemethod(_TopTools.TopTools_HArray2OfShape_GetHandle,None,TopTools_HArray2OfShape)
TopTools_HArray2OfShape_swigregister = _TopTools.TopTools_HArray2OfShape_swigregister
TopTools_HArray2OfShape_swigregister(TopTools_HArray2OfShape)
class Handle_TopTools_HArray2OfShape(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_TopTools.Handle_TopTools_HArray2OfShape_swiginit(self,_TopTools.new_Handle_TopTools_HArray2OfShape(*args))
DownCast = staticmethod(_TopTools.Handle_TopTools_HArray2OfShape_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_TopTools_HArray2OfShape.Nullify = new_instancemethod(_TopTools.Handle_TopTools_HArray2OfShape_Nullify,None,Handle_TopTools_HArray2OfShape)
Handle_TopTools_HArray2OfShape.IsNull = new_instancemethod(_TopTools.Handle_TopTools_HArray2OfShape_IsNull,None,Handle_TopTools_HArray2OfShape)
Handle_TopTools_HArray2OfShape.GetObject = new_instancemethod(_TopTools.Handle_TopTools_HArray2OfShape_GetObject,None,Handle_TopTools_HArray2OfShape)
Handle_TopTools_HArray2OfShape._kill_pointed = new_instancemethod(_TopTools.Handle_TopTools_HArray2OfShape__kill_pointed,None,Handle_TopTools_HArray2OfShape)
Handle_TopTools_HArray2OfShape_swigregister = _TopTools.Handle_TopTools_HArray2OfShape_swigregister
Handle_TopTools_HArray2OfShape_swigregister(Handle_TopTools_HArray2OfShape)
def Handle_TopTools_HArray2OfShape_DownCast(*args):
return _TopTools.Handle_TopTools_HArray2OfShape_DownCast(*args)
Handle_TopTools_HArray2OfShape_DownCast = _TopTools.Handle_TopTools_HArray2OfShape_DownCast
class TopTools_HSequenceOfShape(OCC.MMgt.MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_TopTools.TopTools_HSequenceOfShape_swiginit(self,_TopTools.new_TopTools_HSequenceOfShape(*args))
def IsEmpty(self, *args):
"""
:rtype: bool
"""
return _TopTools.TopTools_HSequenceOfShape_IsEmpty(self, *args)
def Length(self, *args):
"""
:rtype: int
"""
return _TopTools.TopTools_HSequenceOfShape_Length(self, *args)
def Clear(self, *args):
"""
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Clear(self, *args)
def Append(self, *args):
"""
:param anItem:
:type anItem: TopoDS_Shape &
:rtype: None
:param aSequence:
:type aSequence: Handle_TopTools_HSequenceOfShape &
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Append(self, *args)
def Prepend(self, *args):
"""
:param anItem:
:type anItem: TopoDS_Shape &
:rtype: None
:param aSequence:
:type aSequence: Handle_TopTools_HSequenceOfShape &
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Prepend(self, *args)
def Reverse(self, *args):
"""
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Reverse(self, *args)
def InsertBefore(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: TopoDS_Shape &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_TopTools_HSequenceOfShape &
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_InsertBefore(self, *args)
def InsertAfter(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: TopoDS_Shape &
:rtype: None
:param anIndex:
:type anIndex: int
:param aSequence:
:type aSequence: Handle_TopTools_HSequenceOfShape &
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_InsertAfter(self, *args)
def Exchange(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anOtherIndex:
:type anOtherIndex: int
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Exchange(self, *args)
def Split(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: Handle_TopTools_HSequenceOfShape
"""
return _TopTools.TopTools_HSequenceOfShape_Split(self, *args)
def SetValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:param anItem:
:type anItem: TopoDS_Shape &
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_SetValue(self, *args)
def Value(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HSequenceOfShape_Value(self, *args)
def ChangeValue(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_HSequenceOfShape_ChangeValue(self, *args)
def Remove(self, *args):
"""
:param anIndex:
:type anIndex: int
:rtype: None
:param fromIndex:
:type fromIndex: int
:param toIndex:
:type toIndex: int
:rtype: None
"""
return _TopTools.TopTools_HSequenceOfShape_Remove(self, *args)
def Sequence(self, *args):
"""
:rtype: TopTools_SequenceOfShape
"""
return _TopTools.TopTools_HSequenceOfShape_Sequence(self, *args)
def ChangeSequence(self, *args):
"""
:rtype: TopTools_SequenceOfShape
"""
return _TopTools.TopTools_HSequenceOfShape_ChangeSequence(self, *args)
def ShallowCopy(self, *args):
"""
:rtype: Handle_TopTools_HSequenceOfShape
"""
return _TopTools.TopTools_HSequenceOfShape_ShallowCopy(self, *args)
def _kill_pointed(self):
"""_kill_pointed(TopTools_HSequenceOfShape self)"""
return _TopTools.TopTools_HSequenceOfShape__kill_pointed(self)
def GetHandle(self):
"""GetHandle(TopTools_HSequenceOfShape self) -> Handle_TopTools_HSequenceOfShape"""
return _TopTools.TopTools_HSequenceOfShape_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
TopTools_HSequenceOfShape.IsEmpty = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_IsEmpty,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Length = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Length,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Clear = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Clear,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Append = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Append,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Prepend = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Prepend,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Reverse = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Reverse,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.InsertBefore = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_InsertBefore,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.InsertAfter = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_InsertAfter,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Exchange = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Exchange,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Split = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Split,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.SetValue = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_SetValue,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Value = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Value,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.ChangeValue = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_ChangeValue,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Remove = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Remove,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.Sequence = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_Sequence,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.ChangeSequence = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_ChangeSequence,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.ShallowCopy = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_ShallowCopy,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape._kill_pointed = new_instancemethod(_TopTools.TopTools_HSequenceOfShape__kill_pointed,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape.GetHandle = new_instancemethod(_TopTools.TopTools_HSequenceOfShape_GetHandle,None,TopTools_HSequenceOfShape)
TopTools_HSequenceOfShape_swigregister = _TopTools.TopTools_HSequenceOfShape_swigregister
TopTools_HSequenceOfShape_swigregister(TopTools_HSequenceOfShape)
class Handle_TopTools_HSequenceOfShape(OCC.MMgt.Handle_MMgt_TShared):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_TopTools.Handle_TopTools_HSequenceOfShape_swiginit(self,_TopTools.new_Handle_TopTools_HSequenceOfShape(*args))
DownCast = staticmethod(_TopTools.Handle_TopTools_HSequenceOfShape_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_TopTools_HSequenceOfShape.Nullify = new_instancemethod(_TopTools.Handle_TopTools_HSequenceOfShape_Nullify,None,Handle_TopTools_HSequenceOfShape)
Handle_TopTools_HSequenceOfShape.IsNull = new_instancemethod(_TopTools.Handle_TopTools_HSequenceOfShape_IsNull,None,Handle_TopTools_HSequenceOfShape)
Handle_TopTools_HSequenceOfShape.GetObject = new_instancemethod(_TopTools.Handle_TopTools_HSequenceOfShape_GetObject,None,Handle_TopTools_HSequenceOfShape)
Handle_TopTools_HSequenceOfShape._kill_pointed = new_instancemethod(_TopTools.Handle_TopTools_HSequenceOfShape__kill_pointed,None,Handle_TopTools_HSequenceOfShape)
Handle_TopTools_HSequenceOfShape_swigregister = _TopTools.Handle_TopTools_HSequenceOfShape_swigregister
Handle_TopTools_HSequenceOfShape_swigregister(Handle_TopTools_HSequenceOfShape)
def Handle_TopTools_HSequenceOfShape_DownCast(*args):
return _TopTools.Handle_TopTools_HSequenceOfShape_DownCast(*args)
Handle_TopTools_HSequenceOfShape_DownCast = _TopTools.Handle_TopTools_HSequenceOfShape_DownCast
class TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress(OCC.TCollection.TCollection_MapNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param K1:
:type K1: TopoDS_Shape &
:param K2:
:type K2: int
:param I:
:type I: Standard_Address &
:param n1:
:type n1: TCollection_MapNodePtr &
:param n2:
:type n2: TCollection_MapNodePtr &
:rtype: None
"""
_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swiginit(self,_TopTools.new_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress(*args))
def Key1(self, *args):
"""
:rtype: TopoDS_Shape
"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Key1(self, *args)
def GetKey2(self):
"""GetKey2(TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress self) -> Standard_Integer"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_GetKey2(self)
def SetKey2(self, *args):
"""SetKey2(TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress self, Standard_Integer value)"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_SetKey2(self, *args)
def Next2(self, *args):
"""
:rtype: TCollection_MapNodePtr
"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Next2(self, *args)
def Value(self, *args):
"""
:rtype: Standard_Address
"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Value(self, *args)
def _kill_pointed(self):
"""_kill_pointed(TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress self)"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress__kill_pointed(self)
def GetHandle(self):
"""GetHandle(TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress self) -> Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress"""
return _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_GetHandle(self)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.Key1 = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Key1,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.GetKey2 = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_GetKey2,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.SetKey2 = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_SetKey2,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.Next2 = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Next2,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.Value = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Value,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress._kill_pointed = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress__kill_pointed,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.GetHandle = new_instancemethod(_TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_GetHandle,None,TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister = _TopTools.TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister
TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister(TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
class Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress(OCC.TCollection.Handle_TCollection_MapNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swiginit(self,_TopTools.new_Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress(*args))
DownCast = staticmethod(_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_DownCast)
def __del__(self):
try:
self.thisown = False
GarbageCollector.garbage.collect_object(self)
except:
pass
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.Nullify = new_instancemethod(_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_Nullify,None,Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.IsNull = new_instancemethod(_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_IsNull,None,Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress.GetObject = new_instancemethod(_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_GetObject,None,Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress._kill_pointed = new_instancemethod(_TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress__kill_pointed,None,Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister = _TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_swigregister(Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress)
def Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_DownCast(*args):
return _TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_DownCast(*args)
Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_DownCast = _TopTools.Handle_TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeAddress_DownCast
class TopTools_IndexedDataMapNodeOfIndexedDataMapOfShapeListOfShape(OCC.TCollection.TCollection_MapNode):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:param K1:
:type K1: TopoDS_Shape &
:param K2:
:type K2: int
:param I:
:type I: TopTools_ListOfShape &
:param n1:
:type n1: TCollection_MapNodePtr &
:param n2:
:type | |
except that the new cluster is created with the default cluster security and parameter groups. After Amazon Redshift creates the cluster, you can use the ModifyCluster API to associate a different security group and different parameter group with the restored cluster. If you are using a DS node type, you can also choose to change to another DS node type of the same size during restore.
If you restore a cluster into a VPC, you must provide a cluster subnet group where you want the cluster restored.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.restore_from_cluster_snapshot(
ClusterIdentifier='string',
SnapshotIdentifier='string',
SnapshotClusterIdentifier='string',
Port=123,
AvailabilityZone='string',
AllowVersionUpgrade=True|False,
ClusterSubnetGroupName='string',
PubliclyAccessible=True|False,
OwnerAccount='string',
HsmClientCertificateIdentifier='string',
HsmConfigurationIdentifier='string',
ElasticIp='string',
ClusterParameterGroupName='string',
ClusterSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
PreferredMaintenanceWindow='string',
AutomatedSnapshotRetentionPeriod=123,
KmsKeyId='string',
NodeType='string',
EnhancedVpcRouting=True|False,
AdditionalInfo='string',
IamRoles=[
'string',
]
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster that will be created from restoring the snapshot.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
Alphabetic characters must be lowercase.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for all clusters within an AWS account.
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
Example: my-snapshot-id
:type SnapshotClusterIdentifier: string
:param SnapshotClusterIdentifier: The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:type Port: integer
:param Port: The port number on which the cluster accepts connections.
Default: The same port as the original cluster.
Constraints: Must be between 1115 and 65535 .
:type AvailabilityZone: string
:param AvailabilityZone: The Amazon EC2 Availability Zone in which to restore the cluster.
Default: A random, system-chosen Availability Zone.
Example: us-east-1a
:type AllowVersionUpgrade: boolean
:param AllowVersionUpgrade: If true , major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster.
Default: true
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: The name of the subnet group where you want to cluster restored.
A snapshot of cluster in VPC can be restored only in VPC. Therefore, you must provide subnet group name where you want the cluster restored.
:type PubliclyAccessible: boolean
:param PubliclyAccessible: If true , the cluster can be accessed from a public network.
:type OwnerAccount: string
:param OwnerAccount: The AWS customer account used to create or copy the snapshot. Required if you are restoring a snapshot you do not own, optional if you own the snapshot.
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM.
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:type ElasticIp: string
:param ElasticIp: The elastic IP (EIP) address for the cluster.
:type ClusterParameterGroupName: string
:param ClusterParameterGroupName: The name of the parameter group to be associated with this cluster.
Default: The default Amazon Redshift cluster parameter group. For information about the default parameter group, go to Working with Amazon Redshift Parameter Groups .
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type ClusterSecurityGroups: list
:param ClusterSecurityGroups: A list of security groups to be associated with this cluster.
Default: The default cluster security group for Amazon Redshift.
Cluster security groups only apply to clusters outside of VPCs.
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
Default: The default VPC security group is associated with the cluster.
VPC security groups only apply to clusters in VPCs.
(string) --
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range (in UTC) during which automated cluster maintenance can occur.
Format: ddd:hh24:mi-ddd:hh24:mi
Default: The value selected for the cluster from which the snapshot was taken. For more information about the time blocks for each region, see Maintenance Windows in Amazon Redshift Cluster Management Guide.
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
Constraints: Minimum 30-minute window.
:type AutomatedSnapshotRetentionPeriod: integer
:param AutomatedSnapshotRetentionPeriod: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with CreateClusterSnapshot .
Default: The value selected for the cluster from which the snapshot was taken.
Constraints: Must be a value from 0 to 35.
:type KmsKeyId: string
:param KmsKeyId: The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster that you restore from a shared snapshot.
:type NodeType: string
:param NodeType: The node type that the restored cluster will be provisioned with.
Default: The node type of the cluster from which the snapshot was taken. You can modify this if you are using any DS node type. In that case, you can choose to restore into another DS node type of the same size. For example, you can restore ds1.8xlarge into ds2.8xlarge, or ds2.xlarge into ds1.xlarge. If you have a DC instance type, you must restore into that same instance type and size. In other words, you can only restore a dc1.large instance type into another dc1.large instance type. For more information about node types, see About Clusters and Nodes in the Amazon Redshift Cluster Management Guide
:type EnhancedVpcRouting: boolean
:param EnhancedVpcRouting: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:type AdditionalInfo: string
:param AdditionalInfo: Reserved.
:type IamRoles: list
:param IamRoles: A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request.
A cluster can have up to 10 IAM roles associated at any time.
(string) --
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': '<PASSWORD>',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
| |
<filename>python/quick_der/classes.py
# classes.py -- The various classes in the ASN.1 supportive hierarchy
import _quickder
from packstx import *
import primitive
import format
import builder
if not 'intern' in dir (globals () ['__builtins__']):
try:
from sys import intern
except:
intern = lambda s: s
# The ASN1Object is the abstract base class for ASN.1 objects.
# Data from subclasses is stored here, so subclasses can override the
# __getattr__() and __setattr__() methods, allowing obj.field notation.
class ASN1Object (object):
"""The ASN1Object is an abstract base class for all the value holders
of ASN.1 data. It has no value on its own. Subclasses are the
following generic classes:
* `ASN1ConstructedType`
* `ASN1SequenceOf`
* `ASN1SetOf`
* `ASN1Atom`
The `asn2quickder` compiler creates further subclasses of these.
This means that all the data objects derived from unpacking DER
data are indirect subclasses of `ASN1Obejct`.
"""
_der_packer = None
_recipe = None
_numcursori = None
def __init__ (self, derblob=None, bindata=None, offset=0, der_packer=None, recipe=None, context=None):
"""Initialise the current object; abstract classes require
parameters with typing information (der_packer, recipe,
numcursori). Instance data may be supplied through bindata
and a possible offset, with a fallback to derblob that
will use the subclasses' _der_unpack() methods to form the
_bindata values. If neither bindata nor derblob are
supplied, then an empty instance is delivered. The optional
context defines the globals() map in which type references
should be resolved.
"""
#TODO:OLD# assert der_packer is not None or self._der_packer is not None, 'You or a class from asn2quickder must supply a DER_PACK_ sequence for use with Quick DER'
assert (bindata is not None and recipe is not None) or der_packer is not None or self._der_packer is not None, 'You or a class from asn2quickder must supply a DER_PACK_ sequence for use with Quick DER'
assert recipe is not None or self._recipe is not None, 'You or a class from asn2quickder must supply a recipe for instantiating object structures'
#TODO:OLD# assert bindata is not None or derblob is not None or self._numcursori is not None, 'When no binary data is supplied, you or a class from asn2quickder must supply how many DER cursors are used'
#TODO:NEW:MAYBENOTNEEDED# assert self._numcursori is not None, 'You should always indicate how many values will be stored'
assert context is not None or getattr(self, "_context", None) is not None, 'You or a subclass definition should provide a context for symbol resolution'
# Construct the type if so desired
if der_packer:
self._der_packer = der_packer
if recipe:
self._recipe = recipe
if context:
self._context = context
# Ensure presence of all typing data
# Fill the instance data as supplied, or else make it empty
if bindata:
self._bindata = bindata
self._offset = offset
self.__init_bindata__ ()
elif derblob:
self._bindata = _quickder.der_unpack (self._der_packer, derblob, self._numcursori)
self._offset = 0
assert len (self._bindata) == self._numcursori, 'Wrong number of values returned from der_unpack()'
assert offset == 0, 'You supplied a derblob, so you cannot request any offset but 0'
self.__init_bindata__ ()
elif self._numcursori:
self._bindata = [ None ] * self._numcursori
self._offset = offset
assert offset == 0, 'You supplied no initialisation data, so you cannot request any offset but 0'
self.__init_bindata__ ()
def __init_bindata__ (self):
assert False, 'Expected __init_bindata__() method not found in ' + self.__class__.__name__
# The ASN1ConstructedType is a nested structure of named fields.
# Nesting instances share the bindata list structures, which they modify
# to retain sharing. The reason for this is that the _der_pack() on the
# class must use changes made in the nested objects as well as the main one.
#SHARED IN LOWEST CLASS: ._recipe and ._der_packer
#STORED IN OBJECTS: ._fields, ._offset, ._bindata, ._numcursori
class ASN1ConstructedType (ASN1Object):
"""The ASN.1 constructed types are `SEQUENCE`, `SET` and `CHOICE`.
Note that `SEQUENCE OF` and `SET OF` are not considered
constructed types.
Elements of constructed types can be addressed by their field name,
and the Python representation makes just that possible. The result
of updates will automatically be incorporated into the binary data
that is used in upcoming _der_pack() invocations.
Construct subclasses of this class, with the following attributes:
* `_der_packer`
* `_recipe` is a dictionary that maps field names to one of
- an integer index into `bindata[]`
- a subdictionary shaped like `_recipe`
- singleton list capturing the element type of SEQUENCE OF
- singleton set capturing the element type of SET OF
- `(class,offset)` tuples referencing an `ASN1Object` subclass
These recipes are also built by the `asn2quickder` compiler.
"""
def __init_bindata__ (self):
"""The object has been setup with structural information in
_der_packer and _recipe, as well as instance data in
_bindata and _offset. We now iterate over all the fields
in the _recipe to replace some or all entries in _bindata
with an ASN1Object subclass instance.
The last step of this procedure is to self-register into
_bindata [_offset], so as to support future _der_pack()
calls.
"""
if self._recipe [0] != '_NAMED':
import sys
sys.exit (1)
assert self._recipe [0] == '_NAMED', 'ASN1ConstructedType instances must have a dictionary in their _recipe'
(_NAMED,recp) = self._recipe
self._fields = {}
# Static recipe is generated from the ASN.1 grammar
# Iterate over this recipe to form the instance data
for (subfld,subrcp) in recp.items ():
if type (subfld) != str:
raise Exception ("ASN.1 recipe keys can only be strings")
# Interned strings yield faster dictionary lookups
# Field names in Python are always interned
subfld = intern (subfld.replace ('-', '_'))
self._fields [subfld] = self._offset # fallback
subval = builder.build_asn1 (self._context, subrcp, self._bindata, self._offset)
if type (subval) == int:
# Primitive: Index into _bindata; set in _fields
self._fields [subfld] += subval
elif subval.__class__ == ASN1Atom:
# The following moved into __init_bindata__():
# self._bindata [self._offset] = subval
# Native types may be assigned instead of subval
pass
print 'Not placing field', subfld, 'subvalue ::', type (subval)
elif isinstance (subval, ASN1Object):
self._fields [subfld] = subval
def _name2idx (self, name):
while not self._fields.has_key (name):
if name [:1] == '_':
name = name [1:]
continue
raise AttributeError (name)
return self._fields [name]
def __setattr__ (self, name, val):
if name [0] != '_':
idx = self._name2idx (name)
if type (idx) == int:
self._bindata [idx] = val
else:
idx.set (val)
else:
self.__dict__ [name] = val
def __delattr__ (self, name):
idx = self._name2idx (name)
self._bindata [idx] = None
def __getattr__ (self, name):
idx = self._name2idx (name)
if type (idx) == int:
return self._bindata [idx]
else:
return idx
def _der_pack (self):
"""Pack the current ASN1ConstructedType using DER notation.
Follow the syntax that was setup when this object
was created, usually after a der_unpack() operation
or a der_unpack (ClassName, derblob) or empty(ClassName)
call. Return the bytes with the packed data.
"""
bindata = []
for bd in self._bindata [self._offset:self._offset+self._numcursori]:
#TODO# set, list, atomic...
print 'bindata[] element is a', type (bd)
if bd is not None and type (bd) != str:
# Hope to map the value to DER without hints
#TODO# Currently fails on ASN1Objects
bd = format.der_format (bd)
bindata.append (bd)
return _quickder.der_pack (self._der_packer, bindata)
def _der_format (self):
"""Format the current ASN1ConstructedType using DER notation,
but withhold the DER header consisting of the outer tag
and length. This format is comparable to what is stored
in bindata array elements. To be able to produce proper
DER, it needs some contextual information (specifically,
the tag to prefix before the body).
"""
packed = self._der_pack (self)
(tag,ilen,hlen) = _quickder.der_header (packed)
return packed [hlen : hlen+ilen]
def __str__ (self):
retval = '{\n '
comma = ''
for (name,value) in self._fields.items ():
if type (value) == int:
value = self._bindata [value]
if value is None:
continue
if isinstance (value, ASN1Atom) and value.get () is None:
continue
newval = str (value).replace ('\n', '\n ')
retval = retval + comma + name + ' ' + newval
comma = ',\n '
retval = retval + ' }'
return retval
class ASN1SequenceOf (ASN1Object,list):
"""An ASN.1 representation for a SEQUENCE OF other ASN1Object values.
The instances of this class can be manipulated just like Python's
native list type.
TODO: Need to _der_pack() and get the result back into a context.
"""
_der_packer = chr (DER_PACK_STORE | DER_TAG_SEQUENCE) + chr (DER_PACK_END)
_numcursori = 1
def __init_bindata__ (self):
"""The object has been setup with structural information in
_der_packer and _recipe, as well as instance data in
_bindata [_offset]. We now split the instance data into
list elements that we each instantiate from the class in
the _recipe.
The last step of this procedure is to self-register into
_bindata [_offset], so as to support future _der_pack()
calls.
"""
assert self._recipe [0] == '_SEQOF', 'ASN1SequenceOf instances must have a _recipe tuple (\'_SEQOF\',...)'
(_SEQOF,allidx,subpck,subnum,subrcp) = self._recipe
#TODO:DEBUG# print 'SEQUENCE OF from', self._offset, 'to', allidx, 'element recipe =', subrcp
#TODO:DEBUG# print 'len(_bindata) =', len (self._bindata), '_offset =', self._offset, 'allidx =', allidx
derblob = self._bindata [self._offset] or ''
while len (derblob) > 0:
#TODO:DEBUG# print 'Getting the header from ' + ' '.join | |
lcl_8:
lcl_8 = rbnf_named__check_3
else:
lcl_9 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_9
lcl_9 = rbnf_tmp_0.lineno
lcl_10 = rbnf_tmp_0.value
lcl_10 = unwrap(lcl_10)
lcl_11 = rbnf_tmp_1.value
lcl_11 = unwrap(lcl_11)
lcl_10 = Prod(lcl_10, lcl_11, rbnf_tmp_2, rbnf_tmp_3)
lcl_10 = (lcl_9, lcl_10)
rbnf_tmp_1_ = lcl_10
lcl_10 = (True, rbnf_tmp_1_)
lcl_8 = lcl_10
lcl_6 = lcl_8
elif (lcl_7 == 4):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 6):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 5):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 2):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 1):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
elif (lcl_7 == 0):
lcl_10 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_10
lcl_10 = rbnf_named__check_3[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_3
else:
lcl_11 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_11
lcl_11 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_8 = unwrap(lcl_8)
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_11 = (lcl_11, lcl_8)
rbnf_tmp_1_ = lcl_11
lcl_11 = (True, rbnf_tmp_1_)
lcl_10 = lcl_11
lcl_6 = lcl_10
else:
lcl_10 = rbnf_tmp_0.lineno
lcl_11 = rbnf_tmp_0.value
lcl_11 = unwrap(lcl_11)
lcl_8 = rbnf_tmp_1.value
lcl_8 = unwrap(lcl_8)
lcl_11 = Decl(lcl_11, lcl_8, rbnf_tmp_2)
lcl_10 = (lcl_10, lcl_11)
rbnf_tmp_1_ = lcl_10
lcl_10 = (True, rbnf_tmp_1_)
lcl_6 = lcl_10
lcl_5 = lcl_6
else:
lcl_10 = (rbnf_named__off_2, 'singleprod got EOF')
lcl_10 = builtin_cons(lcl_10, builtin_nil)
lcl_10 = (False, lcl_10)
lcl_5 = lcl_10
lcl_4 = lcl_5
lcl_3 = lcl_4
lcl_1 = lcl_3
elif (lcl_2 == 1):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_0 = lcl_10
try:
_rbnf_cur_token = builtin_tokens.array[builtin_tokens.offset]
if (_rbnf_cur_token.idint is 0):
builtin_tokens.offset += 1
else:
_rbnf_cur_token = None
except IndexError:
_rbnf_cur_token = None
lcl_10 = _rbnf_cur_token
rbnf_tmp_1 = lcl_10
lcl_10 = (rbnf_tmp_1 is None)
if lcl_10:
lcl_11 = builtin_tokens.offset
lcl_11 = (lcl_11, 'NonTerm not match')
lcl_11 = builtin_cons(lcl_11, builtin_nil)
lcl_11 = (False, lcl_11)
lcl_10 = lcl_11
else:
lcl_11 = rbnf_named_parse_type(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_11
lcl_11 = rbnf_named__check_2[0]
lcl_11 = (lcl_11 == False)
if lcl_11:
lcl_11 = rbnf_named__check_2
else:
lcl_3 = rbnf_named__check_2[1]
rbnf_tmp_2 = lcl_3
lcl_3 = builtin_tokens.offset
rbnf_named__off_2 = lcl_3
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_3 = _rbnf_peek_tmp
if lcl_3:
lcl_5 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_5 = lcl_5.idint
if (lcl_5 == 3):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 4):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 6):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 5):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 2):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 1):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
elif (lcl_5 == 0):
lcl_6 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_3 = lcl_6
lcl_6 = rbnf_named__check_3[0]
lcl_6 = (lcl_6 == False)
if lcl_6:
lcl_6 = rbnf_named__check_3
else:
lcl_7 = rbnf_named__check_3[1]
rbnf_tmp_3 = lcl_7
lcl_7 = rbnf_tmp_0.lineno
lcl_8 = rbnf_tmp_0.value
lcl_9 = rbnf_tmp_1.value
lcl_9 = unwrap(lcl_9)
lcl_8 = Prod(lcl_8, lcl_9, rbnf_tmp_2, rbnf_tmp_3)
lcl_7 = (lcl_7, lcl_8)
rbnf_tmp_1_ = lcl_7
lcl_7 = (True, rbnf_tmp_1_)
lcl_6 = lcl_7
lcl_4 = lcl_6
else:
lcl_6 = rbnf_tmp_0.lineno
lcl_7 = rbnf_tmp_0.value
lcl_8 = rbnf_tmp_1.value
lcl_8 = unwrap(lcl_8)
lcl_7 = Decl(lcl_7, lcl_8, rbnf_tmp_2)
lcl_6 = (lcl_6, lcl_7)
rbnf_tmp_1_ = lcl_6
lcl_6 = (True, rbnf_tmp_1_)
lcl_4 = lcl_6
lcl_3 = lcl_4
else:
lcl_4 = (rbnf_named__off_2, 'singleprod got EOF')
lcl_4 = builtin_cons(lcl_4, builtin_nil)
lcl_4 = (False, lcl_4)
lcl_3 = lcl_4
lcl_11 = lcl_3
lcl_10 = lcl_11
lcl_1 = lcl_10
elif (lcl_2 == 0):
_rbnf_old_offset = builtin_tokens.offset
_rbnf_cur_token = builtin_tokens.array[_rbnf_old_offset]
builtin_tokens.offset = (_rbnf_old_offset + 1)
lcl_10 = _rbnf_cur_token
rbnf_tmp_0 = lcl_10
lcl_10 = rbnf_named_parse_type(builtin_state, builtin_tokens)
rbnf_named__check_1 = lcl_10
lcl_10 = rbnf_named__check_1[0]
lcl_10 = (lcl_10 == False)
if lcl_10:
lcl_10 = rbnf_named__check_1
else:
lcl_11 = rbnf_named__check_1[1]
rbnf_tmp_1 = lcl_11
lcl_11 = builtin_tokens.offset
rbnf_named__off_1 = lcl_11
try:
builtin_tokens.array[(builtin_tokens.offset + 0)]
_rbnf_peek_tmp = True
except IndexError:
_rbnf_peek_tmp = False
lcl_11 = _rbnf_peek_tmp
if lcl_11:
lcl_4 = builtin_tokens.array[(builtin_tokens.offset + 0)]
lcl_4 = lcl_4.idint
if (lcl_4 == 3):
lcl_5 = rbnf_named_parse_rule(builtin_state, builtin_tokens)
rbnf_named__check_2 = lcl_5
lcl_5 = rbnf_named__check_2[0]
lcl_5 = | |
"""Module for various abstractions to common actions that may involve \
controlling multiple components."""
from kinetic import components as Components
class DualMotor:
"""Abstraction for dual motor drive train."""
def __init__(self, motor_left: Components.Kinetics.Motor,
motor_right: Components.Kinetics.Motor):
"""
Class initialization, creating instance variables.
Takes two Components.Kinetics.Motor instances.
:param motor_left: left-side motor
:type motor_left: Components.Kinetics.Motor
:param motor_right: right-side motor
:type motor_right: Components.Kinetics.Motor
"""
self.motor_left = motor_left
self.motor_right = motor_right
def forward(self, speed: int = 1) -> None:
"""
Bi-motor control to move forward.
Safe to use regardless if direction is disabled.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_left.forward(speed)
self.motor_right.forward(speed)
def backward(self, speed: int = 1) -> None:
"""
Bi-motor control to move backward.
Safe to use regardless if direction is disabled, however if direction \
is disabled, is effectively the same as calling DualMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_left.backward(speed)
self.motor_right.backward(speed)
def clockwise(self, speed: int = 1) -> None:
"""
Bi-motor control to spin clockwise, effectively turning right.
Requires at least direction control, otherwise is effectively the \
same as calling DualMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_left.forward(speed)
self.motor_right.backward(speed)
def counterclockwise(self, speed: int = 1) -> None:
"""
Bi-motor control to spin counterclockwise, effectively turning left.
Requires at least direction control, otherwise is effectively the \
same as calling DualMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_left.backward(speed)
self.motor_right.forward(speed)
class QuadMotor:
"""Abstraction for quad motor drive train."""
def __init__(self, motor_front_left: Components.Kinetics.Motor,
motor_front_right: Components.Kinetics.Motor,
motor_back_left: Components.Kinetics.Motor,
motor_back_right: Components.Kinetics.Motor):
"""
Class initialization, creating instance variables.
Takes two Components.Kinetics.Motor instances.
:param motor_front_left: front-left-side motor
:type motor_front_left: Components.Kinetics.Motor
:param motor_front_right: front-right-side motor
:type motor_front_right: Components.Kinetics.Motor
:param motor_back_left: back-left-side motor
:type motor_back_left: Components.Kinetics.Motor
:param motor_back_right: back-right-side motor
:type motor_back_right: Components.Kinetics.Motor
"""
self.motor_front_left = motor_front_left
self.motor_front_right = motor_front_right
self.motor_back_left = motor_back_left
self.motor_back_right = motor_back_right
def forward(self, speed: int = 1) -> None:
"""
Quad-motor control to move forward.
Safe to use regardless if direction is disabled.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.forward(speed)
self.motor_front_right.forward(speed)
self.motor_back_left.forward(speed)
self.motor_back_right.forward(speed)
def backward(self, speed: int = 1) -> None:
"""
Quad-motor control to move backward.
Safe to use regardless if direction is disabled, however if direction \
is disabled, is effectively the same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.backward(speed)
self.motor_front_right.backward(speed)
self.motor_back_left.backward(speed)
self.motor_back_right.backward(speed)
def clockwise(self, speed: int = 1) -> None:
"""
Quad-motor control to spin clockwise, effectively turning right.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.forward(speed)
self.motor_back_left.forward(speed)
self.motor_front_right.backward(speed)
self.motor_back_right.backward(speed)
def counterclockwise(self, speed: int = 1) -> None:
"""
Quad-motor control to spin counterclockwise, effectively turning left.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.backward(speed)
self.motor_back_left.backward(speed)
self.motor_front_right.forward(speed)
self.motor_back_right.forward(speed)
class MecanumQuadMotor(QuadMotor):
"""Extends QuadMotor drive train with mecanum-wheel strafing."""
def __init__(self, *args):
"""Class initialization."""
super().__init__(*args)
def left(self, speed: int = 1) -> None:
"""
Quad-motor control to strafe left.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.backward(speed)
self.motor_back_left.forward(speed)
self.motor_front_right.forward(speed)
self.motor_back_right.backward(speed)
def right(self, speed: int = 1) -> None:
"""
Quad-motor control to strafe right.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.forward(speed)
self.motor_back_left.backward(speed)
self.motor_front_right.backward(speed)
self.motor_back_right.forward(speed)
def diagonal_forward_left(self, speed: int = 1) -> None:
"""
Quad-motor control to move diagonally forward-left.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_back_left.forward(speed)
self.motor_front_right.forward(speed)
def diagonal_forward_right(self, speed: int = 1) -> None:
"""
Quad-motor control to move diagonally forward-right.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.forward(speed)
self.motor_back_right.forward(speed)
def diagonal_backward_left(self, speed: int = 1) -> None:
"""
Quad-motor control to move diagonally backward-left.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.backward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_back_left.backward(speed)
self.motor_front_right.backward(speed)
def diagonal_backward_right(self, speed: int = 1) -> None:
"""
Quad-motor control to move diagonally backward-right.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.backward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.backward(speed)
self.motor_back_right.backward(speed)
def back_right_clockwise(self, speed: int = 1) -> None:
"""
Quad-motor control to spin clockwise around the back right corner, \
effectively turning right.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, if PWM is disabled speed
is safely ignored, default 1
:type speed: int, optional
"""
self.motor_front_left.forward(speed)
self.motor_back_left.forward(speed)
def back_right_counterclockwise(self, speed: int = 1) -> None:
"""
Quad-motor control to spin counterclockwise around the back right \
corner, effectively turning left.
Requires at least direction control, otherwise is effectively the \
same as calling QuadMotor.forward.
:param speed: absolute, 0 < x =< 1 indicating motor speed, return None
if 0, x > 1 will result in x becoming 1, | |
"""
Some species in the dataframe have observed as well as climatology values.
For these species, plot up X/Y and latitudinal comparisons
"""
import seaborn as sns
sns.set(color_codes=True)
current_palette = sns.color_palette("colorblind")
sns.set_style("darkgrid")
# Get the observational data
if isinstance(df, type(None)):
df = obs.get_processed_df_obs_mod() # NOTE this df contains values >400nM
# - Map observational variables to their shared extracted variables
all_vars = df.columns.tolist()
# Dictionary
obs_var_dict = {
# Temperature
'WOA_TEMP': 'Temperature',
# Chlorophyll-a
'SeaWIFs_ChlrA': 'Chl-a',
# Nitrate
'WOA_Nitrate': 'Nitrate',
# Salinity
'WOA_Salinity': 'Salinity'
# There is also 'Nitrite' and 'Ammonium'
}
# Dict of units for variables
units_dict = {
'SeaWIFs_ChlrA': "mg m$^{-3}$", # Chance et al uses micro g/L
'WOA_Salinity': 'PSU', # https://en.wikipedia.org/wiki/Salinity
'WOA_Nitrate': "$\mu$M",
'WOA_TEMP': '$^{o}$C',
}
# sort dataframe by latitude
# df = df.sort_values('Latitude', axis=0, ascending=True)
# set the order the dict keys are accessed
vars_sorted = list(sorted(obs_var_dict.keys()))[::-1]
# Setup pdf
if save2pdf:
savetitle = 'Oi_prj_Chance2014_Obs_params_vs_NEW_extracted_params'
pdff = AC.plot2pdfmulti(title=savetitle, open=True, dpi=dpi)
# - Get variables and confirm which datasets are being used for plot
dfs = {}
for key_ in vars_sorted:
print(obs_var_dict[key_], key_)
# drop nans...
index2use = df[[obs_var_dict[key_], key_]].dropna().index
dfs[key_] = df.loc[index2use, :]
# Check which datasets are being used
ptr_str = 'For variable: {} (#={})- using: {} \n'
for key_ in vars_sorted:
datasets = list(set(dfs[key_]['Data_Key']))
dataset_str = ', '.join(datasets)
print(ptr_str.format(key_, len(datasets), dataset_str))
# - Loop variables and plot as a scatter plot...
for key_ in vars_sorted:
print(obs_var_dict[key_], key_)
# new figure
fig = plt.figure()
# drop nans...
df_tmp = df[[obs_var_dict[key_], key_]].dropna()
N_ = int(df_tmp[[key_]].shape[0])
print(N_)
# Plot up
sns.regplot(x=obs_var_dict[key_], y=key_, data=df_tmp)
# Add title
plt.title('X-Y plot of {} (N={})'.format(obs_var_dict[key_], N_))
plt.ylabel('Extracted ({}, {})'.format(key_, units_dict[key_]))
plt.xlabel('Obs. ({}, {})'.format(
obs_var_dict[key_], units_dict[key_]))
# Save out figure &/or show?
if save2pdf:
AC.plot2pdfmulti(pdff, savetitle, dpi=dpi)
if show:
plt.show()
plt.close()
# - Loop variables and plot verus lat (with difference)
for key_ in vars_sorted:
print(obs_var_dict[key_], key_)
# New figure
fig = plt.figure()
# Drop nans...
df_tmp = df[[obs_var_dict[key_], key_, 'Latitude']].dropna()
N_ = int(df_tmp[[key_]].shape[0])
print(N_)
# Get data to analyse
obs = df_tmp[obs_var_dict[key_]].values
climate = df_tmp[key_].values
X = df_tmp['Latitude'].values
# Plot up
plt.scatter(X, obs, label=obs_var_dict[key_], color='red',
marker="o")
plt.scatter(X, climate, label=key_, color='blue',
marker="o")
plt.scatter(X, climate-obs, label='diff', color='green',
marker="o")
# Athesetics of plot?
plt.legend()
plt.xlim(-90, 90)
plt.ylabel('{} ({})'.format(obs_var_dict[key_], units_dict[key_]))
plt.xlabel('Latitude ($^{o}$N)')
plt.title('{} (N={}) vs. latitude'.format(obs_var_dict[key_], N_))
# Save out figure &/or show?
if save2pdf:
AC.plot2pdfmulti(pdff, savetitle, dpi=dpi)
if show:
plt.show()
plt.close()
# Save entire pdf
if save2pdf:
AC.plot2pdfmulti(pdff, savetitle, close=True, dpi=dpi)
def plot_up_lat_STT_var(restrict_data_max=True, restrict_min_salinity=True):
"""
Plot up a "pretty" plot of STT vs Lat, with scatter sizes and color by var.
"""
# - Get data as a DataFrame
df = obs.get_processed_df_obs_mod()
if restrict_data_max:
# df = df[ df['Iodide']< 450. ]
df = df[df['Iodide'] < 400.] # Updated to use 400 nM as upper value
if restrict_min_salinity:
df = df[df['WOA_Salinity'] > 30.]
# Add modulus
df["Latitude (Modulus)"] = np.sqrt(df["Latitude"].copy()**2)
# - Local vars
X_varname = "Latitude (Modulus)"
Y_varname = "WOA_TEMP"
S_varname = 'Iodide'
S_label = S_varname
C_varname = S_varname
# - plot
fig, ax = plt.subplots(facecolor='w', edgecolor='w')
df.plot(kind="scatter", x=X_varname, y=Y_varname, alpha=0.4,
s=df[S_varname], label=S_label, figsize=(10, 7),
c=S_varname, cmap=plt.get_cmap("jet"), colorbar=True,
sharex=False, ax=ax, fig=fig)
plt.show()
def plot_up_lat_varI_varII(restrict_data_max=True, restrict_min_salinity=True):
"""
Plot up a "pretty" plot of STT vs Lat, with scatter sizes and color by var.
"""
# - Get data as a DataFrame
df = obs.get_processed_df_obs_mod()
if restrict_data_max:
# df = df[ df['Iodide']< 450. ]
df = df[df['Iodide'] < 400.] # Updated to use 400 nM as upper value
if restrict_min_salinity:
df = df[df['WOA_Salinity'] > 30.]
df["Latitude (Modulus)"] = np.sqrt(df["Latitude"].copy()**2)
# - Local variables
# override? (unhashed)
varI = 'Iodide'
varII = "WOA_TEMP"
# name local vars
X_varname = "Latitude (Modulus)"
Y_varname = varI
S_varname = varII
S_label = S_varname
C_varname = S_varname
# - plot up
fig, ax = plt.subplots(facecolor='w', edgecolor='w')
df.plot(kind="scatter", x=X_varname, y=Y_varname, alpha=0.4,
s=df[S_varname], label=S_label, figsize=(10, 7),
c=S_varname, cmap=plt.get_cmap("jet"), colorbar=True,
sharex=False, ax=ax, fig=fig)
plt.ylim(-5, 500)
plt.show()
def plot_chance_param(df=None, X_var='Temperature', Y_var='Iodide',
data_str='(Obs.) data'):
"""
Plot up chance et al (2014) param vs. data in DataFrame
"""
# Only include finite data points for temp
# ( NOTE: down to 1/3 of data of obs. data?! )
df = df[np.isfinite(df[X_var])]
# Add a variable for C**2 fit
Xvar2plot = X_var+'($^{2}$)'
df[Xvar2plot] = df[X_var].loc[:].values**2
# Plot up data and param.
fig, ax = plt.subplots(facecolor='w', edgecolor='w')
# Plot up
df.plot(kind='scatter', x=Xvar2plot, y=Y_var, ax=ax)
# Add a line of best fit reported param.
actual_data = df[Xvar2plot].values
test_data = np.linspace(AC.myround(actual_data.min()),
AC.myround(actual_data.max()), 20)
m = 0.225
c = 19.0
plt.plot(test_data, ((test_data*m)+c), color='green', ls='--',
label='Chance et al (2014) param.')
# Limit axis to data
plt.xlim(-50, AC.myround(df[Xvar2plot].values.max(), 1000))
plt.ylim(-20, AC.myround(df[Y_var].values.max(), 50, round_up=True))
# Add title and axis labels
N = actual_data.shape[0]
title = 'Linear param vs. {} (N={})'.format(data_str, N)
plt.title(title)
plt.xlabel(X_var + ' ($^{o}$C$^{2}$)')
plt.ylabel(Y_var + ' (nM)')
plt.legend(loc='upper left')
# And show/save
tmp_str = data_str.replace(" ", '_').replace("(", "_").replace(")", "_")
savetitle = 'Chance_param_vs_{}.png'.format(tmp_str)
plt.savefig(savetitle)
plt.show()
def plot_macdonald_param(df=None, X_var='Temperature', Y_var='Iodide',
data_str='(Obs.) data'):
"""
Plot up MacDonald et al (2014) param vs. data in DataFrame
"""
# Only include finite data points for temp
# ( NOTE: down to 1/3 of data of obs. data?! )
df = df[np.isfinite(df[X_var])]
# Add a variable for
Xvar2plot = '1/'+X_var
df[Xvar2plot] = 1. / (df[X_var].loc[:].values+273.15)
Y_var2plot = 'ln({})'.format(Y_var)
df[Y_var2plot] = np.log(df[Y_var].values)
# Plot up data and param.
fig, ax = plt.subplots(facecolor='w', edgecolor='w')
df.plot(kind='scatter', x=Xvar2plot, y=Y_var2plot, ax=ax)
# Add a line of best fit reported param.
# (run some numbers through this equation... )
actual_data = df[X_var].values + 273.15
test_data = np.linspace(actual_data.min(), actual_data.max(), 20)
test_data_Y = 1.46E6*(np.exp((-9134./test_data))) * 1E9
plt.plot(1./test_data, np.log(test_data_Y),
color='green', ls='--', label='MacDonald et al (2014) param.')
# Limit axis to data
plt.xlim(df[Xvar2plot].values.min()-0.000025,
df[Xvar2plot].values.max()+0.000025)
plt.ylim(0, 7)
# Add title and axis labels
N = actual_data.shape[0]
title = 'Arrhenius param vs. {} (N={})'.format(data_str, N)
plt.title(title)
plt.xlabel(Xvar2plot + ' ($^{o}$K)')
plt.ylabel(Y_var2plot + ' (nM)')
plt.legend(loc='lower left')
# And show/save
tmp_str = data_str.replace(" ", '_').replace("(", "_").replace(")", "_")
savetitle = 'MacDonald_parameterisation_vs_{}.png'.format(tmp_str)
plt.savefig(savetitle)
plt.show()
def plot_current_parameterisations():
"""
Plot up a comparison of Chance et al 2014 and MacDonald et al 2014 params.
"""
# - Get obs and processed data
# get raw obs
raw_df = get_core_Chance2014_obs()
# don't consider iodide values above 30
raw_df = raw_df[raw_df['Iodide'] > 30.]
# - get processed obs.
pro_df = obs.get_processed_df_obs_mod()
restrict_data_max, restrict_min_salinity = True, True
if restrict_data_max:
# pro_df = pro_df[ pro_df['Iodide'] < 450. ] # used for July Oi! mtg.
# restrict below 400 (per. com. RJC)
pro_df = pro_df[pro_df['Iodide'] < 400.]
if restrict_min_salinity:
pro_df = pro_df[pro_df['WOA_Salinity'] > 30.]
# - Plots with raw obs.
# Plot up "linear" fit of iodide and temperature. (Chance et al 2014)
# plot up Chance
# plot_chance_param(df=raw_df.copy())
# Plot up "Arrhenius" fit of iodide and temperature. ( MacDonald et al 2014)
plot_macdonald_param(df=raw_df.copy())
# - Plots with extract Vars.
# Plot up "linear" fit of iodide and temperature. (Chance et al 2014)
# plot_chance_param(df=pro_df.copy(), data_str='Extracted data',
# X_var='WOA_TEMP')
# Plot up "Arrhenius" fit of iodide and temperature. ( MacDonald et al 2014)
plot_macdonald_param(df=pro_df.copy(), data_str='Extracted data',
X_var='WOA_TEMP')
# ---------------------------------------------------------------------------
# ---------------- Misc. Support for iodide project ------------------------
# ---------------------------------------------------------------------------
def explore_diferences_for_Skagerak():
"""
Explore how the Skagerak data differs from the dataset as a whole
"""
# - Get the observations and model output
folder = utils.get_file_locations('data_root')
filename = 'Iodine_obs_WOA_v8_5_1_ENSEMBLE_csv__avg_nSkag_nOutliers.csv'
dfA = pd.read_csv(folder+filename, encoding='utf-8')
# - Local variables
diffvar = 'Salinity diff'
ds_str = 'Truesdale_2003_I'
obs_var_dict = {
# Temperature
'WOA_TEMP': 'Temperature',
# Chlorophyll-a
'SeaWIFs_ChlrA': 'Chl-a',
# Nitrate
'WOA_Nitrate': 'Nitrate',
# Salinity
'WOA_Salinity': 'Salinity'
# There is also 'Nitrite' and 'Ammonium'
}
# - Analysis / updates to DataFrames
dfA[diffvar] = dfA['WOA_Salinity'].values - dfA['diffvar'].values
# - Get just the Skagerak dataset
df = dfA.loc[dfA['Data_Key'] == ds_str]
prt_str = 'The general stats on the Skagerak dataset ({}) are: '
print(prt_str.format(ds_str))
# general stats on the iodide numbers
stats = df['Iodide'].describe()
for idx in stats.index.tolist():
vals = stats[stats.index == idx].values[0]
print('{:<10}: {:<10}'.format(idx, vals))
# - stats on the in-situ data
print('\n')
prt_str = 'The | |
timeout_after(0.05) as ta:
await coro1()
results.append('coro1 success')
except TaskTimeout:
results.append('coro1 timeout')
except TimeoutCancellationError:
results.append('coro1 timeout cancel')
assert not ta.expired
await coro2()
results.append('coro2 success')
async def parent():
try:
async with timeout_after(0.01) as ta:
await child()
except TaskTimeout:
results.append('parent timeout')
assert ta.expired
await parent()
assert results == [
'coro1 start',
'coro1 timeout cancel',
'coro2 start',
'parent timeout'
]
@pytest.mark.asyncio
async def test_nested_context_timeout2():
async def coro1():
try:
async with timeout_after(1) as ta:
await sleep(5)
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
assert not ta.expired
raise
else:
assert False
async def coro2():
try:
async with timeout_after(1.5) as ta:
await coro1()
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
assert not ta.expired
raise
else:
assert False
async def parent():
try:
async with timeout_after(0.01) as ta:
await coro2()
except (Exception, CancelledError) as e:
assert isinstance(e, TaskTimeout)
else:
assert False
assert ta.expired
await parent()
@pytest.mark.asyncio
async def test_nested_context_timeout3():
async def coro1():
try:
await timeout_after(1, sleep, 5)
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
else:
assert False
async def coro2():
try:
await timeout_after(1.5, coro1)
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
else:
assert False
async def parent():
try:
await timeout_after(0.001, coro2)
except (Exception, CancelledError) as e:
assert isinstance(e, TaskTimeout)
else:
assert False
await parent()
@pytest.mark.asyncio
async def test_nested_timeout_again():
try:
async with timeout_after(0.01):
raise TaskTimeout(1.0)
except TaskTimeout:
pass
@pytest.mark.asyncio
async def test_nested_timeout_uncaught():
results = []
async def coro1():
results.append('coro1 start')
await sleep(0.5)
results.append('coro1 done')
async def child():
# This will cause a TaskTimeout, but it's uncaught
await timeout_after(0.001, coro1())
async def parent():
try:
await timeout_after(1, child())
except TaskTimeout:
results.append('parent timeout')
except UncaughtTimeoutError:
results.append('uncaught timeout')
await parent()
assert results == [
'coro1 start',
'uncaught timeout'
]
@pytest.mark.asyncio
async def test_nested_context_timeout_uncaught():
results = []
async def coro1():
results.append('coro1 start')
await sleep(0.5)
results.append('coro1 done')
async def child():
# This will cause a TaskTimeout, but it's uncaught
async with timeout_after(0.001):
await coro1()
async def parent():
try:
async with timeout_after(1):
await child()
except TaskTimeout:
results.append('parent timeout')
except UncaughtTimeoutError:
results.append('uncaught timeout')
await parent()
assert results == [
'coro1 start',
'uncaught timeout'
]
@pytest.mark.asyncio
async def test_timeout_at_time():
async def t1(*values):
return 1 + sum(values)
loop = get_event_loop()
assert await timeout_at(loop.time(), t1) == 1
assert await timeout_at(loop.time(), t1, 2, 8) == 11
@pytest.mark.asyncio
async def test_timeout_at_expires():
async def slow():
await sleep(0.02)
return 2
loop = get_event_loop()
try:
await timeout_at(loop.time() + 0.001, slow)
except TaskTimeout:
return
assert False
@pytest.mark.asyncio
async def test_timeout_at_context():
loop = get_event_loop()
try:
async with timeout_at(loop.time() + 0.001):
await sleep(0.02)
except TaskTimeout:
return
assert False
# Ignore
@pytest.mark.asyncio
async def test_ignore_after_coro_callstyles():
async def t1(*values):
return 1 + sum(values)
assert await ignore_after(0.001, t1) == 1
assert await ignore_after(0.001, t1()) == 1
assert await ignore_after(0.001, t1(2, 8)) == 11
assert await ignore_after(0.001, t1, 2, 8) == 11
@pytest.mark.asyncio
async def test_ignore_after_timeout_result():
async def t1(*values):
await sleep(0.01)
return 1 + sum(values)
assert await ignore_after(0.005, t1, timeout_result=100) == 100
assert await ignore_after(0.005, t1, timeout_result=all) is all
@pytest.mark.asyncio
async def test_ignore_after_zero():
async def t1(*values):
return 1 + sum(values)
assert await ignore_after(0, t1) == 1
assert await ignore_after(0, t1, 2) == 3
assert await ignore_after(0, t1, 2, 8) == 11
@pytest.mark.asyncio
async def test_ignore_after_no_expire():
async def t1(*values):
return await return_after_sleep(1 + sum(values), 0.001)
assert await ignore_after(0.005, t1, 1) == 2
await sleep(0.002)
@pytest.mark.asyncio
async def test_ignore_after_no_expire_nested():
async def coro1():
return 2
async def child():
return await ignore_after(0.001, coro1())
async def parent():
return await ignore_after(0.003, child())
try:
result = await parent()
await sleep(0.005)
except:
assert False
else:
assert result == 2
@pytest.mark.asyncio
async def test_ignore_after_no_expire_nested2():
async def coro1():
return 5
async def child():
result = await ignore_after(0.001, coro1(), timeout_result=1)
await sleep(0.005)
return result
async def parent():
try:
result = await ignore_after(0.003, child())
except:
assert False
assert result is None
await parent()
@pytest.mark.asyncio
async def test_ignore_after_raises_KeyError():
try:
await ignore_after(0.01, my_raises, KeyError)
except KeyError:
return
assert False
@pytest.mark.asyncio
async def test_ignore_after_raises_CancelledError():
try:
await ignore_after(0.01, my_raises, CancelledError)
except CancelledError:
return
assert False
@pytest.mark.asyncio
async def test_nested_ignore():
results = []
async def coro1():
results.append('coro1 start')
await sleep(1)
results.append('coro1 done')
async def coro2():
results.append('coro2 start')
await sleep(1)
results.append('coro2 done')
# Parent should cause a ignore before the child.
# Results in a TimeoutCancellationError instead of a normal TaskTimeout
async def child():
try:
await ignore_after(0.005, coro1())
results.append('coro1 success')
except TaskTimeout:
results.append('coro1 timeout')
except TimeoutCancellationError:
results.append('coro1 timeout cancel')
await coro2()
results.append('coro2 success')
async def parent():
try:
await ignore_after(0.001, child())
results.append('parent success')
except TaskTimeout:
results.append('parent timeout')
await parent()
assert results == [
'coro1 start',
'coro1 timeout cancel',
'coro2 start',
'parent success'
]
@pytest.mark.asyncio
async def test_nested_ignore_context_timeout():
results = []
async def coro1():
results.append('coro1 start')
await sleep(1)
results.append('coro1 done')
async def coro2():
results.append('coro2 start')
await sleep(1)
results.append('coro2 done')
# Parent should cause a timeout before the child.
# Results in a TimeoutCancellationError instead of a normal ignore
async def child():
try:
async with ignore_after(0.005):
await coro1()
results.append('coro1 success')
except TaskTimeout:
results.append('coro1 timeout')
except TimeoutCancellationError:
results.append('coro1 timeout cancel')
await coro2()
results.append('coro2 success')
async def parent():
try:
async with ignore_after(0.001):
await child()
results.append('parent success')
except TaskTimeout:
results.append('parent timeout')
await parent()
assert results == [
'coro1 start',
'coro1 timeout cancel',
'coro2 start',
'parent success'
]
@pytest.mark.asyncio
async def test_nested_ignore_context_timeout2():
async def coro1():
try:
async with ignore_after(1):
await sleep(5)
assert False
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
async def coro2():
try:
async with ignore_after(1.5):
await coro1()
assert False
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
async def parent():
try:
async with ignore_after(0.001):
await coro2()
except:
assert False
await parent()
@pytest.mark.asyncio
async def test_nested_ignore_context_timeout3():
async def coro1():
try:
await ignore_after(1, sleep, 5)
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
else:
assert False
async def coro2():
try:
await ignore_after(1.5, coro1)
return 3
except CancelledError as e:
assert isinstance(e, TimeoutCancellationError)
raise
else:
assert False
async def parent():
try:
result = await ignore_after(0.001, coro2)
except Exception as e:
assert False
else:
assert result is None
await parent()
@pytest.mark.asyncio
async def test_nested_ignore_timeout_uncaught():
results = []
async def coro1():
results.append('coro1 start')
await sleep(0.5)
results.append('coro1 done')
async def child():
# This will do nothing
await ignore_after(0.001, coro1())
results.append('coro1 ignored')
return 1
async def parent():
try:
if await ignore_after(0.002, child()) is None:
results.append('child ignored')
else:
results.append('child succeeded')
except TaskTimeout:
results.append('parent timeout')
except UncaughtTimeoutError:
results.append('uncaught timeout')
await parent()
assert results == [
'coro1 start',
'coro1 ignored',
'child succeeded'
]
@pytest.mark.asyncio
async def test_nested_ignore_context_timeout_uncaught():
results = []
async def coro1():
results.append('coro1 start')
await sleep(0.05)
results.append('coro1 done')
async def child():
# This will be ignored
async with ignore_after(0.001):
await coro1()
results.append('child succeeded')
async def parent():
try:
async with ignore_after(0.1):
await child()
results.append('parent succeeded')
except TaskTimeout:
results.append('parent timeout')
except UncaughtTimeoutError:
results.append('uncaught timeout')
await parent()
assert results == [
'coro1 start',
'child succeeded',
'parent succeeded'
]
@pytest.mark.asyncio
async def test_ignore_at_time():
async def t1(*values):
return 1 + sum(values)
loop = get_event_loop()
assert await ignore_at(loop.time(), t1) == 1
assert await ignore_at(loop.time(), t1, 2, 8) == 11
@pytest.mark.asyncio
async def test_ignore_at_expires():
async def slow():
await sleep(0.02)
return 2
loop = get_event_loop()
try:
result = await ignore_at(loop.time() + 0.001, slow())
except:
assert False
assert result is None
try:
result = await ignore_at(loop.time() + 0.001, slow, timeout_result=1)
except:
assert False
assert result == 1
@pytest.mark.asyncio
async def test_ignore_at_context():
loop = get_event_loop()
try:
async with ignore_at(loop.time() + 0.001):
await sleep(0.02)
assert False
except:
assert False
#
# Task group tests snitched from curio
#
@pytest.mark.asyncio
async def test_task_group():
async def child(x, y):
return x + y
async def main():
async with TaskGroup() as g:
t1 = await g.spawn(child, 1, 1)
t2 = await g.spawn(child, 2, 2)
t3 = await g.spawn(child, 3, 3)
assert t1.result() == 2
assert t2.result() == 4
assert t3.result() == 6
await main()
@pytest.mark.asyncio
async def test_task_group_existing():
evt = Event()
async def child(x, y):
return x + y
async def child2(x, y):
await evt.wait()
return x + y
async def main():
t1 = await spawn(child, 1, 1)
t2 = await spawn(child2, 2, 2)
t3 = await spawn(child2, 3, 3)
t4 = await spawn(child, 4, 4)
await t1
await t4
async with TaskGroup([t1, t2, t3]) as g:
evt.set()
await g.add_task(t4)
assert t1.result() == 2
assert t2.result() == 4
assert t3.result() == 6
assert t4.result() == 8
await main()
@pytest.mark.asyncio
async def test_task_any_cancel():
evt = Event()
async def child(x, y):
return x + y
async def child2(x, y):
await evt.wait()
return x + y
async def main():
async | |
Any) -> str:
"""Builds the PCE request endpoint."""
endpoint = self.endpoint
if parent: # e.g. /sec_policy/active/rulesets/1/sec_rules
parent_draft_href = convert_active_href_to_draft(href_from(parent))
endpoint = '{}/{}'.format(parent_draft_href, endpoint)
else: # mutually exclusive as the parent HREF will have the sec_policy and orgs prefix already
if self.is_sec_policy:
if policy_version not in [ACTIVE, DRAFT]:
raise IllumioApiException("Invalid policy_version passed to get: {}".format(policy_version))
endpoint = '/sec_policy/{}/{}'.format(policy_version, endpoint)
if not self.is_global:
endpoint = '/orgs/{}/{}'.format(self.pce.org_id, endpoint)
return endpoint.replace('//', '/')
def get_by_reference(self, reference: Union[str, Reference, dict], **kwargs) -> IllumioObject:
"""Retrieves an object from the PCE using its HREF.
Usage:
>>> ip_list = pce.ip_lists.get_by_reference('/orgs/1/sec_policy/active/ip_lists/1')
>>> ip_list
IPList(
name='Any (0.0.0.0/0 and ::/0)',
...
)
Args:
href (str): the HREF of the object to fetch.
Returns:
IllumioObject: the object json, decoded to its IllumioObject equivalent.
"""
response = self.pce.get(href_from(reference), include_org=False, **kwargs)
return self.object_cls.from_json(response.json())
def get(self, policy_version: str = DRAFT, parent: Union[str, Reference, dict] = None, **kwargs) -> List[IllumioObject]:
"""Retrieves objects from the PCE based on the given parameters.
Keyword arguments to this function are passed to the `requests.get` call.
See https://docs.illumio.com/core/21.5/API-Reference/index.html
for details on filter parameters for collection queries.
Usage:
>>> virtual_services = pce.virtual_services.get(
... policy_version='active',
... params={
... 'name': 'VS-'
... }
... )
>>> virtual_services
[
VirtualService(
href='/orgs/1/sec_policy/active/virtual_services/9177c75f-7b21-4bf0-8c16-2c47c1ca3252',
name='VS-LAB-SERVICES'
...
),
...
]
Args:
policy_version (str, optional): if fetching security policy objects, specifies
whether to fetch 'draft' or 'active' objects. Defaults to 'draft'.
parent (Union[str, Reference, dict], optional): Reference to the
object's parent. Required for some object types, such
as Security Rules which must be created as children of
existing RuleSets.
Returns:
List[IllumioObject]: the returned list of decoded objects.
"""
endpoint = self._build_endpoint(policy_version, parent)
response = self.pce.get(endpoint, include_org=False, **kwargs)
return [self.object_cls.from_json(o) for o in response.json()]
def get_all(self, policy_version: str = DRAFT, parent: Union[str, Reference, dict] = None, **kwargs) -> List[IllumioObject]:
"""Retrieves all objects of a given type from the PCE.
This function makes two requests, using the `X-Total-Count` header
in the response to set the `max_results` parameter on the follow-up
request.
Args:
policy_version (str, optional): if fetching security policy objects, specifies
whether to fetch 'draft' or 'active' objects. Defaults to 'draft'.
parent (Union[str, Reference, dict], optional): Reference to the
object's parent. Required for some object types, such
as Security Rules which must be created as children of
existing RuleSets.
Returns:
List[IllumioObject]: the returned list of decoded objects.
"""
params = kwargs.get('params', {})
endpoint = self._build_endpoint(policy_version, parent)
if 'max_results' not in params:
kwargs['params'] = {**params, **{'max_results': 0}}
response = self.pce.get(endpoint, **kwargs)
if len(response.json()) > 0: # for endpoints that don't support max_results
return [self.object_cls.from_json(o) for o in response.json()]
filtered_object_count = response.headers['X-Total-Count']
kwargs['params'] = {**params, **{'max_results': int(filtered_object_count)}}
response = self.pce.get(endpoint, include_org=False, **kwargs)
return [self.object_cls.from_json(o) for o in response.json()]
def get_async(self, policy_version: str = DRAFT, parent: Union[str, Reference, dict] = None, **kwargs) -> List[IllumioObject]:
"""Retrieves objects asynchronously from the PCE based on the given parameters.
Args:
policy_version (str, optional): if fetching security policy objects, specifies
whether to fetch 'draft' or 'active' objects. Defaults to 'draft'.
parent (Union[str, Reference, dict], optional): Reference to the
object's parent. Required for some object types, such
as Security Rules which must be created as children of
existing Rule Sets.
Returns:
List[IllumioObject]: the returned list of decoded objects.
"""
endpoint = self._build_endpoint(policy_version, parent)
response = self.pce.get_collection(endpoint, **kwargs)
return [self.object_cls.from_json(o) for o in response.json()]
def create(self, body: Any, parent: Union[str, Reference, dict] = None, **kwargs) -> IllumioObject:
"""Creates an object in the PCE.
See https://docs.illumio.com/core/21.5/API-Reference/index.html
for details on POST body parameters when creating objects.
Usage:
>>> from illumio.policyobjects import Label
>>> label = Label(key='role', value='R-DB')
>>> label = pce.labels.create(label)
>>> label
Label(
'href': '/orgs/1/labels/14',
'key': 'role',
'value': 'R-DB
)
Args:
body (Any): the parameters for the newly created object.
parent (Union[str, Reference, dict], optional): Reference to the
object's parent. Required for some object types, such
as Security Rules which must be created as children of
existing RuleSets.
Returns:
IllumioObject: the created object.
"""
kwargs['json'] = body
endpoint = self._build_endpoint(DRAFT, parent)
response = self.pce.post(endpoint, include_org=False, **kwargs)
return self._parse_response_body(response.json())
def _parse_response_body(self, json_response):
# XXX: workaround for Service Bindings. Multiple bindings
# can be created in the same POST, so we need to accommodate
# this case by checking the response body type
if type(json_response) is list:
results = {self.name: [], 'errors': []}
for o in json_response:
if 'href' in o:
results[self.name].append(self.object_cls.from_json(o))
else:
results['errors'].append(o)
return results
return self.object_cls.from_json(json_response)
def update(self, reference: Union[str, Reference, dict], body: Any, **kwargs) -> None:
"""Updates an object in the PCE.
Successful PUT requests return a 204 No Content response.
Usage:
>>> from illumio.workloads import PairingProfile
>>> pairing_profiles = pce.pairing_profile.get(
... params={'name': 'PP-DATABASE', 'max_results': 1}
... )
>>> existing_profile = pairing_profiles[0]
>>> update = PairingProfile(
... name='PP-DATABASE-VENS',
... enabled=False # disable this profile
... )
>>> pce.pairing_profile.update(existing_profile['href'], update)
Args:
reference (Union[str, Reference, dict]): the HREF of the pairing profile to update.
body (Any): the update data.
"""
kwargs['json'] = body
self.pce.put(href_from(reference), include_org=False, **kwargs)
def delete(self, reference: Union[str, Reference, dict], **kwargs) -> None:
"""Deletes an object in the PCE.
Successful DELETE requests return a 204 No Content response.
Args:
reference (Union[str, Reference, dict]): the HREF of the object to delete.
"""
self.pce.delete(href_from(reference), include_org=False, **kwargs)
def _bulk_change(self, objects: List[IllumioObject], method: str, success_status: str, **kwargs) -> List[dict]:
results = []
while objects:
kwargs['json'] = objects[:BULK_CHANGE_LIMIT]
objects = objects[BULK_CHANGE_LIMIT:]
endpoint = self._build_endpoint(DRAFT, None)
response = self.pce.put('{}/{}'.format(endpoint, method), include_org=False, **kwargs)
for result in response.json():
errors = result.get('errors', [])
if success_status and result['status'] != success_status:
errors.append({'token': result['token'], 'message': result['message']})
results.append({'href': result['href'], 'errors': errors})
return results
def bulk_create(self, objects_to_create: List[IllumioObject], **kwargs) -> List[dict]:
"""Creates a set of objects in the PCE.
NOTE: Bulk creation can currently only be applied for Security Principals,
Virtual Services and Workloads.
Args:
objects_to_create (List[IllumioObject]): list of objects to update.
Returns:
List[dict]: a list containing HREFs of created objects
as well as any errors returned from the PCE.
Has the following form:
[
{
'href': {object_href},
'errors': [
{
'token': {error_type},
'message': {error_message}
}
]
}
]
"""
return self._bulk_change(objects_to_create, method='bulk_create', success_status='created', **kwargs)
def bulk_update(self, objects_to_update: List[IllumioObject], **kwargs) -> List[dict]:
"""Updates a set of objects in the PCE.
NOTE: Bulk updates can currently only be applied for Virtual Services and Workloads.
Args:
objects_to_update (List[IllumioObject]): list of objects to update.
Returns:
List[dict]: a list containing HREFs of updated objects
as well as any errors returned from the PCE.
Has the following form:
[
{
'href': {object_href},
'errors': [
{
'token': {error_type},
'message': {error_message}
}
]
}
]
"""
return self._bulk_change(objects_to_update, method='bulk_update', success_status='updated', **kwargs)
def bulk_delete(self, refs: List[Union[str, Reference, dict]], **kwargs) -> List[dict]:
"""Deletes a set of objects in the PCE.
NOTE: Bulk updates can currently only be applied for Workloads.
Args:
hrefs (List[Union[str, Reference, dict]]): list of references to objects to delete.
Returns:
List[dict]: a list containing any errors that occurred during
the bulk operation. Has the following form:
[
{
'href': {object_href},
'errors': [
{
'token': {error_type},
'message': {error_message}
}
]
}
]
"""
objects_to_delete = [Reference(href=href_from(reference)) for reference in refs]
return self._bulk_change(objects_to_delete, method='bulk_delete', success_status=None, **kwargs)
def __getattr__(self, name: str) -> _PCEObjectAPI:
"""Instantiates a generic API for registered PCE objects.
Inspired by the Zabbix API: https://pypi.org/project/zabbix-api/
"""
if name in self._apis:
return self._apis[name]
if name not in PCE_APIS:
raise AttributeError("No such PCE API object: {}".format(name))
api = self._PCEObjectAPI(pce=self, api_data=PCE_APIS[name])
self._apis[name] = api
return api
def get_default_ip_list(self, **kwargs) -> IPList:
"""Retrieves the "Any (0.0.0.0/0 and ::/0)" default global IP list.
Returns:
IPList: decoded object representing the default global IP list.
"""
params = kwargs.get('params', {})
# retrieve by name as each org will use a different ID
kwargs['params'] = {**params, **{'name': ANY_IP_LIST_NAME}}
response = self.get('/sec_policy/active/ip_lists', **kwargs)
return IPList.from_json(response.json()[0])
def generate_pairing_key(self, pairing_profile_href: str, **kwargs) -> str:
"""Generates a pairing key using a pairing profile.
Args:
pairing_profile_href (str): the HREF of the pairing profile to use.
Returns:
str: the pairing key value.
"""
kwargs['json'] = {}
response = self.post('{}/pairing_key'.format(pairing_profile_href), include_org=False, **kwargs)
return response.json().get('activation_code')
@deprecated(deprecated_in='1.0.0')
def get_traffic_flows(self, traffic_query: TrafficQuery, | |
from hdfs import InsecureClient
from io import StringIO
import json
import math
import numpy as np
import os
import pandas as pd
from pyspark import SparkConf, SparkContext, StorageLevel
import random
import re
import shutil
from sklearn.ensemble import RandomForestRegressor
from sklearn.impute import SimpleImputer
from sklearn.linear_model import LinearRegression, SGDRegressor
from sklearn.metrics import mean_absolute_error, mean_squared_error, \
mean_squared_log_error, median_absolute_error, r2_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import sys
import time
import uuid
from xgboost import XGBRegressor
# regex to take care of XGBoost ValueError
regex = re.compile(r"\[|\]|<", re.IGNORECASE)
# number of additional candidates per query
NUMBER_ADDITIONAL_DATASETS = 175
def read_file(file_path, hdfs_client=None, use_hdfs=False):
"""Opens a file for read and returns its corresponding content.
"""
output = None
if use_hdfs:
if hdfs_client.status(file_path, strict=False):
with hdfs_client.read(file_path) as reader:
output = reader.read().decode()
else:
if os.path.exists(file_path):
with open(file_path) as reader:
output = reader.read()
return output
def create_dir(file_path, hdfs_client=None, use_hdfs=False):
"""Creates a new directory specified by file_path.
Returns True on success.
"""
if use_hdfs:
if hdfs_client.status(file_path, strict=False):
print('[WARNING] Directory already exists: %s' % file_path)
hdfs_client.delete(file_path, recursive=True, skip_trash=True)
hdfs_client.makedirs(file_path)
else:
if os.path.exists(file_path):
print('[WARNING] Directory already exists: %s' % file_path)
shutil.rmtree(file_path)
os.makedirs(file_path)
return True
def save_file(file_path, content, hdfs_client=None, use_hdfs=False):
"""Opens a file for write and returns its corresponding file object.
"""
if use_hdfs:
if hdfs_client.status(file_path, strict=False):
print('[WARNING] File already exists: %s' % file_path)
with hdfs_client.write(file_path) as writer:
writer.write(content.encode())
else:
if os.path.exists(file_path):
print('[WARNING] File already exists: %s' % file_path)
with open(file_path, 'w') as writer:
writer.write(content)
# print('[INFO] File %s saved!' % file_path)
def delete_dir(file_path, hdfs_client=None, use_hdfs=False):
"""Deletes a directory.
"""
if use_hdfs:
if hdfs_client.status(file_path, strict=False):
hdfs_client.delete(file_path, recursive=True, skip_trash=True)
else:
if os.path.exists(file_path):
shutil.rmtree(file_path)
# print('[INFO] File %s saved!' % file_path)
def generate_candidate_datasets_negative_examples(query_dataset, target_variable, candidate_datasets, params):
"""Generates candidate datasets for negative examples.
This is necessary because query and candidate datasets must match for the join;
therefore, we need to re-create the key column.
"""
# print('[INFO] Creating negative examples with dataset %s ...' % query_dataset)
# accumulator
global new_combinations_counter
new_candidate_datasets = list()
# params
output_dir = params['new_datasets_directory']
files_dir = os.path.join(output_dir, 'files-test-data')
cluster_execution = params['cluster']
hdfs_address = params['hdfs_address']
hdfs_user = params['hdfs_user']
# HDFS Client
hdfs_client = None
if cluster_execution:
hdfs_client = InsecureClient(hdfs_address, user=hdfs_user)
# information for saving datasets
identifier = str(uuid.uuid4())
identifier_dir = os.path.join(files_dir, identifier)
create_dir(identifier_dir, hdfs_client, cluster_execution)
# reading query dataset
query_data_str = read_file(query_dataset, hdfs_client, cluster_execution)
query_data = pd.read_csv(StringIO(query_data_str))
query_data_key_column = list(query_data['key-for-ranking'])
new_combinations_counter += len(candidate_datasets)
id_ = 0
for i in range(len(candidate_datasets)):
candidate_dataset = candidate_datasets[i]
# reading candidate dataset
candidate_data_str = read_file(candidate_dataset, hdfs_client, cluster_execution)
candidate_data = pd.read_csv(StringIO(candidate_data_str))
candidate_data.drop(columns=['key-for-ranking'], inplace=True)
# generating extra key column entries, if necessary
extra_key_column = list()
if query_data.shape[0] < candidate_data.shape[0]:
extra_key_column = [
str(uuid.uuid4()) for _ in range(candidate_data.shape[0] - query_data.shape[0])
]
# adding the key column to the candidate data
min_size = min(query_data.shape[0], candidate_data.shape[0])
candidate_data.insert(
0,
'key-for-ranking',
query_data_key_column[:min_size] + extra_key_column
)
# saving candidate dataset
dataset_name = "%s_%d.csv" % (os.path.splitext(os.path.basename(candidate_dataset))[0], id_)
file_path = os.path.join(identifier_dir, dataset_name)
save_file(
file_path,
candidate_data.to_csv(index=False),
hdfs_client,
cluster_execution
)
new_candidate_datasets.append(file_path)
id_ += 1
# saving query dataset
query_dataset_path = os.path.join(identifier_dir, os.path.basename(query_dataset))
save_file(
query_dataset_path,
query_data.to_csv(index=False),
hdfs_client,
cluster_execution
)
# saving target information
save_file(
os.path.join(identifier_dir, '.target'),
target_variable,
hdfs_client,
cluster_execution
)
# print('[INFO] Negative examples with dataset %s have been created and saved!' % query_dataset)
return (query_dataset_path, target_variable, new_candidate_datasets)
def generate_performance_scores(query_dataset, target_variable, candidate_datasets, params):
"""Generates all the performance scores.
"""
performance_scores = list()
# params
algorithm = params['regression_algorithm']
cluster_execution = params['cluster']
hdfs_address = params['hdfs_address']
hdfs_user = params['hdfs_user']
inner_join = params['inner_join']
# HDFS Client
hdfs_client = None
if cluster_execution:
# time.sleep(np.random.randint(1, 120)) # avoid opening multiple sockets at the same time
hdfs_client = InsecureClient(hdfs_address, user=hdfs_user)
# reading query dataset
query_data_str = read_file(query_dataset, hdfs_client, cluster_execution)
query_data = pd.read_csv(StringIO(query_data_str))
query_data.set_index(
'key-for-ranking',
drop=True,
inplace=True
)
# build model on query data only
_, scores_before = get_performance_scores(
query_data,
target_variable,
algorithm,
False
)
for candidate_dataset in candidate_datasets:
# reading candidate dataset
candidate_data_str = read_file(candidate_dataset, hdfs_client, cluster_execution)
candidate_data = pd.read_csv(StringIO(candidate_data_str))
candidate_data.set_index(
'key-for-ranking',
drop=True,
inplace=True
)
# join dataset
join_ = query_data.join(
candidate_data,
how='left',
rsuffix='_r'
)
if inner_join:
join_.dropna(inplace=True)
# build model on joined data
# print('[INFO] Generating performance scores for query dataset %s and candidate dataset %s ...' % (query_dataset, candidate_dataset))
imputation_strategy, scores_after = get_performance_scores(
join_,
target_variable,
algorithm,
not(inner_join)
)
# print('[INFO] Performance scores for query dataset %s and candidate dataset %s done!' % (query_dataset, candidate_dataset))
performance_scores.append(
generate_output_performance_data(
query_dataset=query_dataset,
target=target_variable,
candidate_dataset=candidate_dataset,
scores_before=scores_before,
scores_after=scores_after,
imputation_strategy=imputation_strategy
)
)
return performance_scores
def get_performance_scores(data, target_variable_name, algorithm, missing_value_imputation):
"""Builds a model using data to predict the target variable,
returning different performance metrics.
"""
if missing_value_imputation:
# strategies = ['mean', 'median', 'most_frequent']
strategies = ['mean'] # using only mean for now
scores = list()
min_mean_absolute_error = math.inf
min_strategy = ''
for strategy in strategies:
# imputation on data
fill_NaN = SimpleImputer(missing_values=np.nan, strategy=strategy)
new_data = pd.DataFrame(fill_NaN.fit_transform(data))
new_data.columns = data.columns
new_data.index = data.index
# training and testing model
strategy_scores = train_and_test_model(new_data, target_variable_name, algorithm)
# always choosing the one with smallest mean absolute error
if strategy_scores[0] < min_mean_absolute_error:
min_mean_absolute_error = strategy_scores[0]
min_strategy = strategy
scores = [score for score in strategy_scores]
return (min_strategy, scores)
else:
return (None, train_and_test_model(data, target_variable_name, algorithm))
def train_and_test_model(data, target_variable_name, algorithm):
"""Builds a model using data to predict the target variable.
"""
X_train, X_test, y_train, y_test = train_test_split(
data.drop(target_variable_name, axis=1),
data[target_variable_name],
test_size=0.33,
random_state=42
)
yfit = None
if algorithm == 'random forest':
# normalizing data first
scaler_X = StandardScaler().fit(X_train)
scaler_y = StandardScaler().fit(y_train.values.reshape(-1, 1))
X_train = scaler_X.transform(X_train)
y_train = scaler_y.transform(y_train.values.reshape(-1, 1))
X_test = scaler_X.transform(X_test)
y_test = scaler_y.transform(y_test.values.reshape(-1, 1))
forest = RandomForestRegressor(
n_estimators=100,
random_state=42,
n_jobs=-1,
max_depth=len(data.columns)-1
)
forest.fit(X_train, y_train.ravel())
yfit = forest.predict(X_test)
elif algorithm == 'linear':
linear_r = LinearRegression(normalize=True)
linear_r.fit(X_train, y_train)
yfit = linear_r.predict(X_test)
elif algorithm == 'sgd':
# normalizing data first
scaler_X = StandardScaler().fit(X_train)
scaler_y = StandardScaler().fit(y_train.values.reshape(-1, 1))
X_train = scaler_X.transform(X_train)
y_train = scaler_y.transform(y_train.values.reshape(-1, 1))
X_test = scaler_X.transform(X_test)
y_test = scaler_y.transform(y_test.values.reshape(-1, 1))
sgd = SGDRegressor()
sgd.fit(X_train, y_train.ravel())
yfit = sgd.predict(X_test)
elif algorithm == 'xgboost':
# taking care of 'ValueError: feature_names may not contain [, ] or <'
X_train = replace_invalid_characters(X_train)
X_test = replace_invalid_characters(X_test)
xgboost_r = XGBRegressor(max_depth=5, objective='reg:squarederror', random_state=42)
xgboost_r.fit(X_train, y_train)
yfit = xgboost_r.predict(X_test)
return [
mean_absolute_error(y_test, yfit),
mean_squared_error(y_test, yfit),
median_absolute_error(y_test, yfit),
r2_score(y_test, yfit),
]
def generate_output_performance_data(query_dataset, target, candidate_dataset,
scores_before, scores_after, imputation_strategy=None):
"""Generates a training data record in JSON format.
"""
return json.dumps(dict(
query_dataset=os.path.sep.join(query_dataset.split(os.path.sep)[-2:]),
target=target,
candidate_dataset=os.path.sep.join(candidate_dataset.split(os.path.sep)[-2:]),
imputation_strategy=imputation_strategy,
mean_absolute_error=[scores_before[0], scores_after[0]],
mean_squared_error=[scores_before[1], scores_after[1]],
median_absolute_error=[scores_before[2], scores_after[2]],
r2_score=[scores_before[3], scores_after[3]]
))
def replace_invalid_characters(data):
"""Takes care of the following error from XGBoost:
ValueError: feature_names may not contain [, ] or <
This function replaces these invalid characters with the string '_'
From: https://stackoverflow.com/questions/48645846/pythons-xgoost-valueerrorfeature-names-may-not-contain-or/50633571
"""
data.columns = [regex.sub("_", col) if any(x in str(col) for x in set(('[', ']', '<'))) else col for col in data.columns]
return data
if __name__ == '__main__':
start_time = time.time()
# Spark context
conf = SparkConf().setAppName("Data Generation (Scores) for Test Data")
sc = SparkContext(conf=conf)
# counters
existing_combinations_counter = 0
new_combinations_counter = sc.accumulator(0)
# parameters
params = json.load(open(".params.json"))
output_dir = params['new_datasets_directory']
files_dir = os.path.join(output_dir, 'files-test-data')
cluster_execution = params['cluster']
hdfs_address = params['hdfs_address']
hdfs_user = params['hdfs_user']
# HDFS Client
hdfs_client = None
if cluster_execution:
hdfs_client = InsecureClient(hdfs_address, user=hdfs_user)
create_dir(files_dir, hdfs_client, cluster_execution)
# reading test data
# assuming filename is 'test-data.csv'
if not os.path.exists("test-data.csv"):
print("Test data does not exist: test-data.csv")
sys.exit(0)
test_data = sc.parallelize(open("test-data.csv").readlines()).map(
lambda x: x.split(',')
).map(
lambda x: (x[0], x[1], x[2])
).persist(StorageLevel.MEMORY_AND_DISK)
existing_combinations_counter = test_data.count()
new_combinations = test_data.cartesian(test_data).filter(
# filtering same query candidate
lambda x: x[0][0] != x[1][0]
).map(
# key => (query dataset, target variable)
# val => [candidate dataset]
lambda x: ((x[0][0], x[0][1]), [x[1][2]])
).reduceByKey(
# concatenating lists of candidate datasets
lambda x, y: x + y
).map(
# (query dataset, target variable, random candidate datasets)
lambda x: (x[0][0], x[0][1], list(
np.random.choice(x[1], size=NUMBER_ADDITIONAL_DATASETS, replace=False)))
).persist(StorageLevel.MEMORY_AND_DISK)
if not new_combinations.isEmpty():
# getting performance scores
performance_scores = new_combinations.map(
lambda x: generate_candidate_datasets_negative_examples(x[0], x[1], x[2], params)
).flatMap(
lambda x: generate_performance_scores(x[0], x[1], x[2], params)
)
# saving scores
algorithm_name = params['regression_algorithm']
if params['regression_algorithm'] == 'random forest':
algorithm_name = 'random-forest'
filename = os.path.join(output_dir, 'new-test-data-' + algorithm_name)
delete_dir(filename, hdfs_client, cluster_execution)
if not cluster_execution:
filename = 'file://' + filename
performance_scores.saveAsTextFile(filename)
print('Duration: %.4f seconds' % (time.time() - start_time))
print(' -- Configuration:')
print(' . original_datasets_directory: %s' % params['original_datasets_directory'])
print(' . new_datasets_directory: %s' % params['new_datasets_directory'])
print(' . cluster: %s' % str(params['cluster']))
print(' . hdfs_address: | |
and
output RA and DEC vectors are in radians rather than degrees
RESTRICTIONS:
Accuracy of precession decreases for declination values near 90
degrees. PRECESS should not be used more than 2.5 centuries from
2000 on the FK5 system (1950.0 on the FK4 system).
EXAMPLES:
(1) The Pole Star has J2000.0 coordinates (2h, 31m, 46.3s,
89d 15' 50.6"); compute its coordinates at J1985.0
IDL> precess, ten(2,31,46.3)*15, ten(89,15,50.6), 2000, 1985, /PRINT
====> 2h 16m 22.73s, 89d 11' 47.3"
(2) Precess the B1950 coordinates of Eps Ind (RA = 21h 59m,33.053s,
DEC = (-56d, 59', 33.053") to equinox B1975.
IDL> ra = ten(21, 59, 33.053)*15
IDL> dec = ten(-56, 59, 33.053)
IDL> precess, ra, dec ,1950, 1975, /fk4
PROCEDURE:
Algorithm from Computational Spherical Astronomy by Taff (1983),
p. 24. (FK4). FK5 constants from "Astronomical Almanac Explanatory
Supplement 1992, page 104 Table 3.211.1.
PROCEDURE CALLED:
Function PREMAT - computes precession matrix
REVISION HISTORY
Written, <NAME>, STI Corporation August 1986
Correct negative output RA values February 1989
Added /PRINT keyword <NAME> November, 1991
Provided FK5 (J2000.0) <NAME> January 1994
Precession Matrix computation now in PREMAT <NAME> June 1994
Added /RADIAN keyword <NAME> June 1997
Converted to IDL V5.0 <NAME> September 1997
Correct negative output RA values when /RADIAN used March 1999
Work for arrays, not just vectors <NAME> September 2003
"""
deg_to_rad = numpy.pi / 180.0
if not radian:
# ra, dec are given in degrees
ra_rad = ra * deg_to_rad # Convert to double precision if not already
dec_rad = dec * deg_to_rad
else:
ra_rad = ra
dec_rad = dec
a = cos(dec_rad)
x = [a * cos(ra_rad), a * sin(ra_rad), sin(dec_rad)] # input direction
sec_to_rad = deg_to_rad / 3600.0
# Use PREMAT function to get precession matrix from Equinox1 to Equinox2
r = premat(equinox1, equinox2, FK4=FK4)
x2 = numpy.dot(r, x) # rotate to get output direction cosines
ra_rad = numpy.arctan2(x2[1], x2[0])
dec_rad = arcsin(x2[2])
if not radian:
ra = ra_rad / deg_to_rad
# RA between 0 and 360 degrees
ra = ra + int(ra < 0.0) * 360.0
dec = dec_rad / deg_to_rad
else:
ra = ra_rad
dec = dec_rad
ra = ra + int(ra < 0.0) * 2.0 * numpy.pi
return [ra, dec]
def precess_xyz(x, y, z, equinox1, equinox2):
"""
Precess equatorial geocentric rectangular coordinates.
Parameters
----------
x, y, z, equinox1, equinox2 : float
Returns
-------
Precessed coordinates : list
A list containing the updated `x`, `y`, and `z` values.
Notes
-----
.. note:: This function was ported from the IDL Astronomy User's Library.
:IDL - Documentation:
NAME:
PRECESS_XYZ
PURPOSE:
Precess equatorial geocentric rectangular coordinates.
CALLING SEQUENCE:
precess_xyz, x, y, z, equinox1, equinox2
INPUT/OUTPUT:
x,y,z: scalars or vectors giving heliocentric rectangular coordinates
THESE ARE CHANGED UPON RETURNING.
INPUT:
EQUINOX1: equinox of input coordinates, numeric scalar
EQUINOX2: equinox of output coordinates, numeric scalar
OUTPUT:
x,y,z are changed upon return
NOTES:
The equatorial geocentric rectangular coords are converted
to RA and Dec, precessed in the normal way, then changed
back to x, y and z using unit vectors.
EXAMPLE:
Precess 1950 equinox coords x, y and z to 2000.
IDL> precess_xyz,x,y,z, 1950, 2000
HISTORY:
Written by <NAME>/ACC March 24 1999
(unit vectors provided by <NAME>)
Use /Radian call to PRECESS <NAME> November 2000
Use two parameter call to ATAN <NAME> June 2001
"""
# take input coords and convert to ra and dec (in radians)
ra = numpy.arctan2(y, x)
delp = sqrt(x * x + y * y + z * z) # magnitude of distance to Sun
dec = arcsin(z / delp)
# precess the ra and dec
ra, dec = precess(ra, dec, equinox1, equinox2, radian=True)
# convert back to x, y, z
xunit = cos(ra) * cos(dec)
yunit = sin(ra) * cos(dec)
zunit = sin(dec)
x = xunit * delp
y = yunit * delp
z = zunit * delp
return [x, y, z]
def xyz(date, velocity=False, equinox=1950.0):
"""
Calculate geocentric X,Y, and Z and velocity coordinates of the Sun.
Parameters
----------
date : float
Julian date
equinox : float
Equinox of output. If None, Equinox will be 1950.
velocity : boolean
If False, the velocity of the Sun will not be calculated
Returns
-------
Sun position and velocity : list
A list of the from [X, Y, Z, XVEL, YVEL, ZVEL]. Last three values are None
if `velocity` flag is set to False.
Notes
-----
.. note:: This function was ported from the IDL Astronomy User's Library.
:IDL - Documentation:
NAME:
XYZ
PURPOSE:
Calculate geocentric X,Y, and Z and velocity coordinates of the Sun
EXPLANATION:
Calculates geocentric X,Y, and Z vectors and velocity coordinates
(dx, dy and dz) of the Sun. (The positive X axis is directed towards
the equinox, the y-axis, towards the point on the equator at right
ascension 6h, and the z axis toward the north pole of the equator).
Typical position accuracy is <1e-4 AU (15000 km).
CALLING SEQUENCE:
XYZ, date, x, y, z, [ xvel, yvel, zvel, EQUINOX = ]
INPUT:
date: reduced julian date (=JD - 2400000), scalar or vector
OUTPUT:
x,y,z: scalars or vectors giving heliocentric rectangular coordinates
(in A.U) for each date supplied. Note that sqrt(x^2 + y^2
+ z^2) gives the Earth-Sun distance for the given date.
xvel, yvel, zvel: velocity vectors corresponding to X, Y and Z.
OPTIONAL KEYWORD INPUT:
EQUINOX: equinox of output. Default is 1950.
EXAMPLE:
What were the rectangular coordinates and velocities of the Sun on
Jan 22, 1999 0h UT (= JD 2451200.5) in J2000 coords? NOTE:
Astronomical Almanac (AA) is in TDT, so add 64 seconds to
UT to convert.
IDL> xyz,51200.5+64.d/86400.d,x,y,z,xv,yv,zv,equinox = 2000
Compare to Astronomical Almanac (1999 page C20)
X (AU) Y (AU) Z (AU)
XYZ: 0.51456871 -0.76963263 -0.33376880
AA: 0.51453130 -0.7697110 -0.3337152
abs(err): 0.00003739 0.00007839 0.00005360
abs(err)
(km): 5609 11759 8040
NOTE: Velocities in AA are for Earth/Moon barycenter
(a very minor offset) see AA 1999 page E3
X VEL (AU/DAY) YVEL (AU/DAY) Z VEL (AU/DAY)
XYZ: -0.014947268 -0.0083148382 -0.0036068577
AA: -0.01494574 -0.00831185 -0.00360365
abs(err): 0.000001583 0.0000029886 0.0000032077
abs(err)
(km/sec): 0.00265 0.00519 0.00557
PROCEDURE CALLS:
PRECESS_XYZ
REVISION HISTORY
Original algorithm from Almanac for Computers, Doggett et al. USNO 1978
Adapted from the book Astronomical Photometry by <NAME>
Written <NAME> STX June 1989
Correct error in X coefficient W. Landsman HSTX January 1995
Added velocities, more terms to positions and EQUINOX keyword,
some minor adjustments to calculations
P. Plait/ACC March 24, 1999
"""
picon = numpy.pi / 180.0
t = (date - 15020.0) / 36525.0 # Relative Julian century from 1900
# NOTE: longitude arguments below are given in *equinox* of date.
# Precess these to equinox 1950 to give everything an even footing.
# Compute argument of precession from equinox of date back to 1950
pp = (1.396041 + 0.000308 * (t + 0.5)) * (t - 0.499998)
# Compute mean solar longitude, precessed back to 1950
el = 279.696678 + 36000.76892 * t + 0.000303 * t * t - pp
# Compute Mean longitude of the Moon
c = 270.434164 + 480960. * t + 307.883142 * t - 0.001133 * t * t - pp
# Compute longitude of Moon's ascending node
n = 259.183275 - 1800. * t - 134.142008 * t + 0.002078 * t * t - pp
# Compute mean solar anomaly
g = 358.475833 + 35999.04975 * t - 0.00015 * t * t
# Compute the mean jupiter anomaly
j = 225.444651 + 2880.0 * t + 154.906654 * t * t
# Compute mean anomaly of Venus
v = 212.603219 + 58320. * t + 197.803875 * t + 0.001286 * t * t
# Compute mean anomaly of Mars
m = 319.529425 + 19080. * t | |
<reponame>lewisl9029/pulumi
# Copyright 2016-2021, Pulumi Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import os
import traceback
from typing import Optional, Any, Callable, List, NamedTuple, Dict, Set, Tuple, Union, TYPE_CHECKING, cast, Mapping, Sequence, Iterable
from google.protobuf import struct_pb2
import grpc
from . import rpc, settings, known_types
from .. import log
from ..runtime.proto import provider_pb2, resource_pb2
from .rpc_manager import RPC_MANAGER
from .settings import handle_grpc_error
from ..output import Output
from .. import _types
if TYPE_CHECKING:
from .. import Resource, ComponentResource, CustomResource, Inputs, ProviderResource
from ..resource import ResourceOptions
class ResourceResolverOperations(NamedTuple):
"""
The set of properties resulting from a successful call to prepare_resource.
"""
parent_urn: Optional[str]
"""
This resource's parent URN.
"""
serialized_props: struct_pb2.Struct
"""
This resource's input properties, serialized into protobuf structures.
"""
dependencies: Set[str]
"""
The set of URNs, corresponding to the resources that this resource depends on.
"""
provider_ref: Optional[str]
"""
An optional reference to a provider that should be used for this resource's CRUD operations.
"""
provider_refs: Dict[str, Optional[str]]
"""
An optional dict of references to providers that should be used for this resource's CRUD operations.
"""
property_dependencies: Dict[str, List[Optional[str]]]
"""
A map from property name to the URNs of the resources the property depends on.
"""
aliases: List[Optional[str]]
"""
A list of aliases applied to this resource.
"""
# Prepares for an RPC that will manufacture a resource, and hence deals with input and output properties.
# pylint: disable=too-many-locals
async def prepare_resource(res: 'Resource',
ty: str,
custom: bool,
remote: bool,
props: 'Inputs',
opts: Optional['ResourceOptions'],
typ: Optional[type] = None) -> ResourceResolverOperations:
# Before we can proceed, all our dependencies must be finished.
explicit_urn_dependencies: Set[str] = set()
if opts is not None and opts.depends_on is not None:
explicit_urn_dependencies = await _resolve_depends_on_urns(opts)
# Serialize out all our props to their final values. In doing so, we'll also collect all
# the Resources pointed to by any Dependency objects we encounter, adding them to 'implicit_dependencies'.
property_dependencies_resources: Dict[str, List['Resource']] = {}
# If we have type information, we'll use it for translations rather than the resource's translate_input_property.
translate: Optional[Callable[[str], str]] = res.translate_input_property
if typ is not None:
translate = None
serialized_props = await rpc.serialize_properties(props, property_dependencies_resources, translate, typ)
# Wait for our parent to resolve
parent_urn: Optional[str] = ""
if opts is not None and opts.parent is not None:
parent_urn = await opts.parent.urn.future()
# TODO(sean) is it necessary to check the type here?
elif ty != "pulumi:pulumi:Stack":
# If no parent was provided, parent to the root resource.
parent = settings.get_root_resource()
if parent is not None:
parent_urn = await parent.urn.future()
# Construct the provider reference, if we were given a provider to use.
provider_ref = None
if custom and opts is not None and opts.provider is not None:
provider = opts.provider
# If we were given a provider, wait for it to resolve and construct a provider reference from it.
# A provider reference is a well-known string (two ::-separated values) that the engine interprets.
provider_urn = await provider.urn.future()
provider_id = await provider.id.future() or rpc.UNKNOWN
provider_ref = f"{provider_urn}::{provider_id}"
# For remote resources, merge any provider opts into a single dict, and then create a new dict with all of the
# resolved provider refs.
provider_refs: Dict[str, Optional[str]] = {}
if remote and opts is not None:
providers = convert_providers(opts.provider, opts.providers)
for name, provider in providers.items():
# If we were given providers, wait for them to resolve and construct provider references from them.
# A provider reference is a well-known string (two ::-separated values) that the engine interprets.
urn = await provider.urn.future()
id_ = await provider.id.future() or rpc.UNKNOWN
ref = f"{urn}::{id_}"
provider_refs[name] = ref
dependencies: Set[str] = set(explicit_urn_dependencies)
property_dependencies: Dict[str, List[Optional[str]]] = {}
for key, deps in property_dependencies_resources.items():
urns = await _expand_dependencies(deps)
dependencies |= urns
property_dependencies[key] = list(urns)
# Wait for all aliases. Note that we use `res._aliases` instead of `opts.aliases` as the
# former has been processed in the Resource constructor prior to calling
# `register_resource` - both adding new inherited aliases and simplifying aliases down
# to URNs.
aliases: List[Optional[str]] = []
for alias in res._aliases:
alias_val = await Output.from_input(alias).future()
if not alias_val in aliases:
aliases.append(alias_val)
return ResourceResolverOperations(
parent_urn,
serialized_props,
dependencies,
provider_ref,
provider_refs,
property_dependencies,
aliases,
)
def resource_output(res: 'Resource') -> Tuple[Callable[[Any, bool, bool, Optional[Exception]], None], 'Output']:
value_future: asyncio.Future[Any] = asyncio.Future()
known_future: asyncio.Future[bool] = asyncio.Future()
secret_future: asyncio.Future[bool] = asyncio.Future()
def resolve(value: Any, known: bool, secret: bool, exn: Optional[Exception]):
if exn is not None:
value_future.set_exception(exn)
known_future.set_exception(exn)
secret_future.set_exception(exn)
else:
value_future.set_result(value)
known_future.set_result(known)
secret_future.set_result(secret)
return resolve, Output({res}, value_future, known_future, secret_future)
def get_resource(res: 'Resource',
props: 'Inputs',
custom: bool,
urn: str,
typ: Optional[type] = None) -> None:
log.debug(f"getting resource: urn={urn}")
# If we have type information, we'll use its and the resource's type/name metadata
# for name translations rather than using the resource's translation methods.
transform_using_type_metadata = typ is not None
# Extract the resource type from the URN.
urn_parts = urn.split("::")
qualified_type = urn_parts[2]
ty = qualified_type.split("$")[-1]
# Initialize the URN property on the resource.
(resolve_urn, res.__dict__["urn"]) = resource_output(res)
# If this is a custom resource, initialize its ID property.
resolve_id: Optional[Callable[[Any, bool, bool, Optional[Exception]], None]] = None
if custom:
(resolve_id, res.__dict__["id"]) = resource_output(res)
# Like the other resource functions, "transfer" all input properties onto unresolved futures on res.
resolvers = rpc.transfer_properties(res, props)
async def do_get():
try:
resolver = await prepare_resource(res, ty, custom, False, props, None, typ)
monitor = settings.get_monitor()
inputs = await rpc.serialize_properties({"urn": urn}, {})
req = provider_pb2.InvokeRequest(tok="pulumi:pulumi:getResource", args=inputs, provider="", version="")
def do_invoke():
try:
return monitor.Invoke(req)
except grpc.RpcError as exn:
handle_grpc_error(exn)
return None
resp = await asyncio.get_event_loop().run_in_executor(None, do_invoke)
# If the invoke failed, raise an error.
if resp.failures:
raise Exception(f"getResource failed: {resp.failures[0].reason} ({resp.failures[0].property})")
except Exception as exn:
log.debug(
f"exception when preparing or executing rpc: {traceback.format_exc()}")
rpc.resolve_outputs_due_to_exception(resolvers, exn)
resolve_urn(None, True, False, exn)
if resolve_id is not None:
resolve_id(None, True, False, exn)
raise
# Otherwise, grab the URN, ID, and output properties and resolve all of them.
resp = getattr(resp, 'return')
log.debug(f"getResource completed successfully: ty={ty}, urn={resp['urn']}")
resolve_urn(resp["urn"], True, False, None)
if resolve_id:
# The ID is known if (and only if) it is a non-empty string. If it's either None or an
# empty string, we should treat it as unknown. TFBridge in particular is known to send
# the empty string as an ID when doing a preview.
is_known = bool(resp["id"])
resolve_id(resp["id"], is_known, False, None)
rpc.resolve_outputs(res, resolver.serialized_props, resp["state"], {}, resolvers, transform_using_type_metadata)
asyncio.ensure_future(RPC_MANAGER.do_rpc("get resource", do_get)())
def _translate_ignore_changes(res: 'Resource',
typ: Optional[type],
ignore_changes: Optional[List[str]]) -> Optional[List[str]]:
if ignore_changes is not None:
if typ is not None:
# If `typ` is specified, use its type/name metadata for translation.
input_names = _types.input_type_py_to_pulumi_names(typ)
ignore_changes = list(map(lambda k: input_names.get(k) or k, ignore_changes))
elif res.translate_input_property is not None:
ignore_changes = list(map(res.translate_input_property, ignore_changes))
return ignore_changes
def _translate_additional_secret_outputs(res: 'Resource',
typ: Optional[type],
additional_secret_outputs: Optional[List[str]]) -> Optional[List[str]]:
if additional_secret_outputs is not None:
if typ is not None:
# If a `typ` is specified, we've opt-ed in to doing translations using type/name metadata rather
# than using the resource's tranlate_input_property. Use the resource's metadata to translate.
output_names = _types.resource_py_to_pulumi_names(type(res))
additional_secret_outputs = list(map(lambda k: output_names.get(k) or k, additional_secret_outputs))
elif res.translate_input_property is not None:
# Note that while `additional_secret_outputs` lists property names that are outputs, we
# call `translate_input_property` because it is the method that converts from the
# language projection name to the provider name, which is what we want.
additional_secret_outputs = list(map(res.translate_input_property, additional_secret_outputs))
return additional_secret_outputs
def _translate_replace_on_changes(res: 'Resource',
typ: Optional[type],
replace_on_changes: Optional[List[str]]) -> Optional[List[str]]:
if replace_on_changes is not None:
if typ is not None:
# If `typ` is specified, use its type/name metadata for translation.
input_names = _types.input_type_py_to_pulumi_names(typ)
replace_on_changes = list(map(lambda k: input_names.get(k) or k, | |
<reponame>luoyu-123/ezdxf
# Copyright (c) 2019-2020 <NAME>
# License: MIT License
from typing import TYPE_CHECKING, Iterable, List, Union
from contextlib import contextmanager
from ezdxf.lldxf.attributes import DXFAttr, DXFAttributes, DefSubclass, XType
from ezdxf.lldxf.const import SUBCLASS_MARKER, DXF2000, DXFTypeError, DXF2013, DXFStructureError
from ezdxf.lldxf.tags import Tags, DXFTag
from ezdxf.math.matrix44 import Matrix44
from ezdxf.tools import crypt
from .dxfentity import base_class, SubclassProcessor
from .dxfgfx import DXFGraphic, acdb_entity
from .factory import register_entity
if TYPE_CHECKING:
from ezdxf.eztypes import TagWriter, DXFNamespace
__all__ = [
'Body', 'Solid3d', 'Region', 'Surface', 'ExtrudedSurface', 'LoftedSurface',
'RevolvedSurface', 'SweptSurface',
]
acdb_modeler_geometry = DefSubclass('AcDbModelerGeometry', {
'version': DXFAttr(70, default=1),
'flags': DXFAttr(290, dxfversion=DXF2013),
'uid': DXFAttr(2, dxfversion=DXF2013),
})
# with R2013/AC1027 Modeler Geometry of ACIS data is stored in the ACDSDATA
# section as binary encoded information detection:
# group code 70, 1, 3 is missing
# group code 290, 2 present
#
# 0
# ACDSRECORD
# 90
# 1
# 2
# AcDbDs::ID
# 280
# 10
# 320
# 19B <<< handle of associated 3DSOLID entity in model space
# 2
# ASM_Data
# 280
# 15
# 94
# 7197 <<< size in bytes ???
# 310
# 414349532042696E61727946696C6...
@register_entity
class Body(DXFGraphic):
""" DXF BODY entity - container entity for embedded ACIS data. """
DXFTYPE = 'BODY'
DXFATTRIBS = DXFAttributes(base_class, acdb_entity, acdb_modeler_geometry)
MIN_DXF_VERSION_FOR_EXPORT = DXF2000
def __init__(self):
super().__init__()
self._acis_data: List[Union[str, bytes]] = []
@property
def acis_data(self) -> List[Union[str, bytes]]:
""" Get ACIS text data as list of strings for DXF R2000 to DXF R2010 and binary encoded ACIS data for DXF R2013
and later as list of bytes.
"""
if self.has_binary_data:
return self.doc.acdsdata.get_acis_data(self.dxf.handle)
else:
return self._acis_data
@acis_data.setter
def acis_data(self, lines: Iterable[str]):
""" Set ACIS data as list of strings for DXF R2000 to DXF R2010. In case of DXF R2013 and later, setting ACIS
data as binary data is not supported.
"""
if self.has_binary_data:
raise DXFTypeError('Setting ACIS data not supported for DXF R2013 and later.')
else:
self._acis_data = list(lines)
@property
def has_binary_data(self):
""" Returns ``True`` if ACIS data is of type ``List[bytes]``, ``False``
if data is of type ``List[str]``.
"""
if self.doc:
return self.doc.dxfversion >= DXF2013
else:
return False
def copy(self):
""" Prevent copying. (internal interface)"""
raise DXFTypeError('Copying of ACIS data not supported.')
def load_dxf_attribs(
self, processor: SubclassProcessor = None) -> 'DXFNamespace':
""" Loading interface. (internal API)"""
dxf = super().load_dxf_attribs(processor)
if processor:
processor.load_dxfattribs_into_namespace(dxf, acdb_modeler_geometry)
if not self.has_binary_data:
self.load_acis_data(processor.subclasses[2])
return dxf
def load_acis_data(self, tags: Tags):
""" Loading interface. (internal API)"""
text_lines = tags2textlines(tag for tag in tags if tag.code in (1, 3))
self.acis_data = crypt.decode(text_lines)
def export_entity(self, tagwriter: 'TagWriter') -> None:
""" Export entity specific data as DXF tags. (internal API)"""
super().export_entity(tagwriter)
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_modeler_geometry.name)
if tagwriter.dxfversion >= DXF2013:
# ACIS data stored in the ACDSDATA section as binary encoded
# information.
if self.dxf.hasattr('version'):
tagwriter.write_tag2(70, self.dxf.version)
self.dxf.export_dxf_attribs(tagwriter, ['flags', 'uid'])
else:
# DXF R2000 - R2013 stores ACIS data as text in entity
self.dxf.export_dxf_attribs(tagwriter, 'version')
self.export_acis_data(tagwriter)
def export_acis_data(self, tagwriter: 'TagWriter') -> None:
""" Export ACIS data as DXF tags. (internal API)"""
def cleanup(lines):
for line in lines:
yield line.rstrip().replace('\n', '')
tags = Tags(textlines2tags(crypt.encode(cleanup(self.acis_data))))
tagwriter.write_tags(tags)
def set_text(self, text: str, sep: str = '\n') -> None:
""" Set ACIS data from one string. """
self.acis_data = text.split(sep)
def tostring(self) -> str:
""" Returns ACIS data as one string for DXF R2000 to R2010. """
if self.has_binary_data:
return ""
else:
return "\n".join(self.acis_data)
def tobytes(self) -> bytes:
""" Returns ACIS data as joined bytes for DXF R2013 and later. """
if self.has_binary_data:
return b"".join(self.acis_data)
else:
return b""
def get_acis_data(self):
""" Get the ACIS source code as a list of strings. """
# for backward compatibility
return self.acis_data
def set_acis_data(self, text_lines: Iterable[str]) -> None:
""" Set the ACIS source code as a list of strings **without** line
endings.
"""
# for backward compatibility
self.acis_data = text_lines
@contextmanager
def edit_data(self) -> 'ModelerGeometry':
# for backward compatibility
data = ModelerGeometry(self)
yield data
self.acis_data = data.text_lines
class ModelerGeometry:
# for backward compatibility
def __init__(self, body: 'Body'):
self.text_lines = body.acis_data
def __str__(self) -> str:
return "\n".join(self.text_lines)
def set_text(self, text: str, sep: str = '\n') -> None:
self.text_lines = text.split(sep)
def tags2textlines(tags: Iterable) -> Iterable[str]:
""" Yields text lines from code 1 and 3 tags, code 1 starts a line following
code 3 tags are appended to the line.
"""
line = None
for code, value in tags:
if code == 1:
if line is not None:
yield line
line = value
elif code == 3:
line += value
if line is not None:
yield line
def textlines2tags(lines: Iterable[str]) -> Iterable[DXFTag]:
""" Yields text lines as DXFTags, splitting long lines (>255) int code 1
and code 3 tags.
"""
for line in lines:
text = line[:255]
tail = line[255:]
yield DXFTag(1, text)
while len(tail):
text = tail[:255]
tail = tail[255:]
yield DXFTag(3, text)
@register_entity
class Region(Body):
""" DXF REGION entity - container entity for embedded ACIS data. """
DXFTYPE = 'REGION'
acdb_3dsolid = DefSubclass('AcDb3dSolid', {
'history_handle': DXFAttr(350, default='0'),
})
@register_entity
class Solid3d(Body):
""" DXF 3DSOLID entity - container entity for embedded ACIS data. """
DXFTYPE = '3DSOLID'
DXFATTRIBS = DXFAttributes(
base_class, acdb_entity, acdb_modeler_geometry, acdb_3dsolid)
def load_dxf_attribs(
self, processor: SubclassProcessor = None) -> 'DXFNamespace':
dxf = super().load_dxf_attribs(processor)
if processor:
processor.load_dxfattribs_into_namespace(dxf, acdb_3dsolid)
return dxf
def export_entity(self, tagwriter: 'TagWriter') -> None:
""" Export entity specific data as DXF tags. """
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_3dsolid.name)
self.dxf.export_dxf_attribs(tagwriter, 'history_handle')
def load_matrix(subclass: 'Tags', code: int) -> Matrix44:
values = [tag.value for tag in subclass.find_all(code)]
if len(values) != 16:
raise DXFStructureError('Invalid transformation matrix.')
return Matrix44(values)
def export_matrix(tagwriter: 'TagWriter', code: int, matrix: Matrix44) -> None:
for value in matrix.matrix:
tagwriter.write_tag2(code, value)
acdb_surface = DefSubclass('AcDbSurface', {
'u_count': DXFAttr(71),
'v_count': DXFAttr(72),
})
@register_entity
class Surface(Body):
""" DXF SURFACE entity - container entity for embedded ACIS data. """
DXFTYPE = 'SURFACE'
DXFATTRIBS = DXFAttributes(
base_class, acdb_entity, acdb_modeler_geometry, acdb_surface)
def load_dxf_attribs(
self, processor: SubclassProcessor = None) -> 'DXFNamespace':
dxf = super().load_dxf_attribs(processor)
if processor:
processor.load_dxfattribs_into_namespace(dxf, acdb_surface)
return dxf
def export_entity(self, tagwriter: 'TagWriter') -> None:
""" Export entity specific data as DXF tags. """
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_surface.name)
self.dxf.export_dxf_attribs(tagwriter, ['u_count', 'v_count'])
acdb_extruded_surface = DefSubclass('AcDbExtrudedSurface', {
'class_id': DXFAttr(90),
'sweep_vector': DXFAttr(10, xtype=XType.point3d),
# 16x group code 40: Transform matrix of extruded entity (16 floats;
# row major format; default = identity matrix)
'draft_angle': DXFAttr(42, default=0.), # in radians
'draft_start_distance': DXFAttr(43, default=0.),
'draft_end_distance': DXFAttr(44, default=0.),
'twist_angle': DXFAttr(45, default=0.), # in radians?
'scale_factor': DXFAttr(48, default=0.),
'align_angle': DXFAttr(49, default=0.), # in radians
# 16x group code 46: Transform matrix of sweep entity (16 floats;
# row major format; default = identity matrix)
# 16x group code 47: Transform matrix of path entity (16 floats;
# row major format; default = identity matrix)
'solid': DXFAttr(290, default=0), # bool
# 0=No alignment; 1=Align sweep entity to path:
'sweep_alignment_flags': DXFAttr(70, default=0),
'unknown1': DXFAttr(71, default=0),
# 2=Translate sweep entity to path; 3=Translate path to sweep entity:
'align_start': DXFAttr(292, default=0), # bool
'bank': DXFAttr(293, default=0), # bool
'base_point_set': DXFAttr(294, default=0), # bool
'sweep_entity_transform_computed': DXFAttr(295, default=0), # bool
'path_entity_transform_computed': DXFAttr(296, default=0), # bool
'reference_vector_for_controlling_twist': DXFAttr(11, xtype=XType.point3d),
})
@register_entity
class ExtrudedSurface(Surface):
""" DXF EXTRUDEDSURFACE entity - container entity for embedded ACIS data. """
DXFTYPE = 'EXTRUDEDSURFACE'
DXFATTRIBS = DXFAttributes(
base_class, acdb_entity, acdb_modeler_geometry, acdb_surface,
acdb_extruded_surface
)
def __init__(self):
super().__init__()
self.transformation_matrix_extruded_entity = Matrix44()
self.sweep_entity_transformation_matrix = Matrix44()
self.path_entity_transformation_matrix = Matrix44()
def load_dxf_attribs(
self, processor: SubclassProcessor = None) -> 'DXFNamespace':
dxf = super().load_dxf_attribs(processor)
if processor:
processor.load_dxfattribs_into_namespace(dxf, acdb_extruded_surface)
self.load_matrices(processor.subclasses[4])
return dxf
def load_matrices(self, tags: Tags):
self.transformation_matrix_extruded_entity = load_matrix(tags, code=40)
self.sweep_entity_transformation_matrix = load_matrix(tags, code=46)
self.path_entity_transformation_matrix = load_matrix(tags, code=47)
def export_entity(self, tagwriter: 'TagWriter') -> None:
""" Export entity specific data as DXF tags. """
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_extruded_surface.name)
self.dxf.export_dxf_attribs(tagwriter, ['class_id', 'sweep_vector'])
export_matrix(tagwriter, code=40,
matrix=self.transformation_matrix_extruded_entity)
self.dxf.export_dxf_attribs(tagwriter, [
'draft_angle', 'draft_start_distance', 'draft_end_distance',
'twist_angle', 'scale_factor', 'align_angle',
])
export_matrix(tagwriter, code=46, matrix=self.sweep_entity_transformation_matrix)
export_matrix(tagwriter, code=47, matrix=self.path_entity_transformation_matrix)
self.dxf.export_dxf_attribs(tagwriter, [
'solid', 'sweep_alignment_flags', 'unknown1', 'align_start', 'bank',
'base_point_set', 'sweep_entity_transform_computed',
'path_entity_transform_computed',
'reference_vector_for_controlling_twist'
])
acdb_lofted_surface = DefSubclass('AcDbLoftedSurface', {
# 16x group code 40: Transform matrix of loft entity | |
<gh_stars>0
import math
import warnings
import numpy
import six
from chainer import configuration
from chainer import cuda
from chainer.functions.math import identity
from chainer import testing
from chainer import variable
def _copy_arrays(xs):
xp = cuda.get_array_module(*xs)
return [xp.array(x, order='C', dtype=numpy.float64, copy=True) for x in xs]
def numerical_grad(f, inputs, grad_outputs, eps=1e-3):
"""Computes numerical gradient by finite differences.
This function is used to implement gradient check. For usage example, see
unit tests of :mod:`chainer.functions`.
Args:
f (function): Python function with no arguments that runs forward
computation and returns the result.
inputs (tuple of arrays): Tuple of arrays that should be treated as
inputs. Each element of them is slightly modified to realize
numerical gradient by finite differences.
grad_outputs (tuple of arrays): Tuple of arrays that are treated as
output gradients.
eps (float): Epsilon value of finite differences.
Returns:
tuple: Numerical gradient arrays corresponding to ``inputs``.
"""
assert eps > 0
for x in inputs:
if x.dtype.kind != 'f':
raise RuntimeError(
'The dtype of input arrays must be kind of float')
inputs = tuple(inputs)
grad_outputs = tuple(grad_outputs)
gpu = any(isinstance(x, cuda.ndarray) for x in inputs + grad_outputs)
cpu = any(isinstance(x, numpy.ndarray) for x in inputs + grad_outputs)
if gpu and cpu:
raise RuntimeError('Do not mix GPU and CPU arrays in `numerical_grad`')
if gpu:
xp = cuda.cupy
numerical_grad_kernel = cuda.reduce(
'T y1, T y2, U gy, T eps', 'V gxi',
'(y1 - y2) * gy', 'a + b', 'gxi += a / (eps * 2)', '0',
'numerical_grad_kernel'
)
else:
xp = numpy
grads = [xp.zeros(x.shape, numpy.float64) for x in inputs]
with configuration.using_config('type_check', False):
for x, gx in six.moves.zip(inputs, grads):
orig_x = x.copy() # hold original value
for i in numpy.ndindex(x.shape):
orig = orig_x[i]
x[i] = orig + eps
ys1 = _copy_arrays(f())
x[i] = orig - eps
ys2 = _copy_arrays(f())
x[i] = orig
for y1, y2, gy in six.moves.zip(ys1, ys2, grad_outputs):
if gy is not None:
if (gpu and isinstance(y1, cuda.ndarray) and
isinstance(y2, cuda.ndarray) and
isinstance(gy, cuda.ndarray)):
numerical_grad_kernel(y1, y2, gy, eps, gx[i])
else:
dot = ((y1 - y2) * gy).sum()
gx[i] += dot / (2 * eps)
return [g.astype(x.dtype, copy=False)
for g, x in six.moves.zip(grads, inputs)]
def assert_allclose(x, y, atol=1e-5, rtol=1e-4, verbose=True):
"""Asserts if some corresponding element of x and y differs too much.
This function can handle both CPU and GPU arrays simultaneously.
Args:
x: Left-hand-side array.
y: Right-hand-side array.
atol (float): Absolute tolerance.
rtol (float): Relative tolerance.
verbose (bool): If ``True``, it outputs verbose messages on error.
"""
warnings.warn(
'chainer.gradient_check.assert_allclose is deprecated.'
'Use chainer.testing.assert_allclose instead.',
DeprecationWarning)
testing.assert_allclose(x, y, atol, rtol, verbose)
def _as_tuple(x):
if isinstance(x, tuple):
return x
elif isinstance(x, list):
return tuple(x)
else:
return x,
def _filter_list(lst, ignore_list):
return [x for x, ignore in six.moves.zip(lst, ignore_list) if not ignore]
def check_backward(func, x_data, y_grad, params=(),
eps=1e-3, atol=1e-5, rtol=1e-4, no_grads=None, dtype=None):
"""Test backward procedure of a given function.
This function automatically checks backward-process of a given function.
For example, when you have a :class:`~chainer.Function` class ``MyFunc``,
that gets two arguments and returns one value, you can make its test like
this::
>> def test_my_func(self):
>> func = MyFunc()
>> x1_data = xp.array(...)
>> x2_data = xp.array(...)
>> gy_data = xp.array(...)
>> check_backward(func, (x1_data, x2_data), gy_data)
This method creates :class:`~chainer.Variable` objects with ``x_data``
and calls ``func`` with the :class:`~chainer.Variable` s to get its result
as :class:`~chainer.Variable`.
Then, it sets ``y_grad`` array to ``grad`` attribute of the result and
calls ``backward`` method to get gradients of the inputs.
To check correctness of the gradients, the function calls
:func:`numerical_grad` to calculate numerically the gradients and compares
the types of gradients with :func:`chainer.testing.assert_allclose`.
To reduce computational time, it uses directional derivative along a
random vector. A function
:math:`g: \\mathbb{R} \\rightarrow \\mathbb{R}^n` is defined as
:math:`g(\\delta) = f(x + \\delta r)`, where
:math:`\\delta \\in \\mathbb{R}`, :math:`r \\in \\mathbb{R}^n`
is a random vector
and :math:`f` is a function which actually you want to test.
Its gradient is
.. math::
g'(\\delta) = f'(x + \\delta r) \\cdot r.
Therefore, :math:`g'(0) = f'(x) \\cdot r`.
So we can check the correctness of back propagation of :math:`f` indirectly
by comparing this equation with the gradient of :math:`g` numerically
calculated and that of :math:`f` computed by backprop.
If :math:`r` is chosen from uniform distribution, we can conclude with
high probability that the gradient of :math:`f` itself is correct.
If input objects (``x1_data`` or/and ``x2_data`` in this example) represent
integer variables, their gradients are ignored.
You can simplify a test when ``MyFunc`` gets only one argument::
>> check_backward(func, x1_data, gy_data)
If ``MyFunc`` is a loss function which returns a zero-dimensional
array, pass ``None`` to ``gy_data``. In this case, it sets ``1`` to
``grad`` attribute of the result::
>> check_backward(my_loss_func, (x1_data, x2_data), None)
If ``MyFunc`` returns multiple outputs, pass all gradients for outputs
as a tuple::
>> gy1_data = xp.array(...)
>> gy2_data = xp.array(...)
>> check_backward(func, x1_data, (gy1_data, gy2_data))
You can also test a :class:`~chainer.Link`.
To check gradients of parameters of the link, set a tuple of the parameters
to ``params`` arguments::
>> check_backward(my_link, (x1_data, x2_data), gy_data,
>> (my_link.W, my_link.b))
Note that ``params`` are not ``ndarray`` s,
but :class:`~chainer.Variables` s.
Function objects are acceptable as ``func`` argument::
>> check_backward(lambda x1, x2: f(x1, x2),
>> (x1_data, x2_data), gy_data)
.. note::
``func`` is called many times to get numerical gradients for all inputs.
This function doesn't work correctly when ``func`` behaves randomly as
it gets different gradients.
Args:
func (callable): A function which gets :class:`~chainer.Variable` s
and returns :class:`~chainer.Variable` s. ``func`` must returns
a tuple of :class:`~chainer.Variable` s or one
:class:`~chainer.Variable`. You can use :class:`~chainer.Function`
object, :class:`~chainer.Link` object or a function satisfying the
condition.
x_data (ndarray or tuple of ndarrays): A set of ``ndarray`` s to be
passed to ``func``. If ``x_data`` is one ``ndarray`` object, it is
treated as ``(x_data,)``.
y_grad (ndarray or tuple of ndarrays or None):
A set of ``ndarray`` s representing gradients of return-values of
``func``. If ``y_grad`` is one ``ndarray`` object, it is
treated as ``(y_grad,)``. If ``func`` is a loss-function,
``y_grad`` should be set to ``None``.
params (~chainer.Variable or tuple of ~chainder.Variable):
A set of :class:`~chainer.Variable` s whose gradients are checked.
When ``func`` is a :class:`~chainer.Link` object,
set its parameters as ``params``.
If ``params`` is one :class:`~chainer.Variable` object,
it is treated as ``(params,)``.
eps (float): Epsilon value to be passed to :func:`numerical_grad`.
atol (float): Absolute tolerance to be passed to
:func:`chainer.testing.assert_allclose`.
rtol (float): Relative tolerance to be passed to
:func:`chainer.testing.assert_allclose`.
no_grads (list of bool): Flag to skip variable for gradient assertion.
It should be same length as ``x_data``.
dtype (~numpy.dtype): ``x_data``, ``y_grad`` and ``params`` are casted
to this dtype when calculating numerical gradients. Only float
types and ``None`` are allowed.
.. seealso::
:func:`numerical_grad`
"""
x_data = _as_tuple(x_data)
if y_grad is not None:
y_grad = _as_tuple(y_grad)
params = _as_tuple(params)
xs = [variable.Variable(x) for x in x_data]
y = func(*xs)
y = _as_tuple(y)
# All creators of `y` need to be the same because we only call
# `y[0].backward` to call `backward` method of the creator.
# To do so we need to insert a dummy function `Ident` to the
# computational graph.
# Note that `func` may not be a `Function` object.
y = identity.Identity().apply(y)
y_grad = _set_y_grad(y, y_grad)
# Clear gradients which may exist if func calls backward inside of itself.
_clear_grads(xs)
_clear_grads(params)
# We only need to call `backward` for one result `Variable`.
# `Variable.backward` method calls `Function.backward` of its creator.
y[0].backward()
if no_grads is None:
no_grads = [x.dtype.kind != 'f' for x in xs]
else:
if len(no_grads) != len(xs):
raise ValueError(
'Length of no_grads param and xs should be same.')
for skip, x in six.moves.zip(no_grads, xs):
if skip:
if x.grad is not None:
raise RuntimeError(
'gradient of int variable must be None')
else:
if x.grad is None:
raise RuntimeError(
'gradients of some arguments are not calculated')
if len(xs) - len(no_grads) + len(params) == 0:
# When there is no float variables, we need not to check gradient
| |
<gh_stars>100-1000
# Copyright (c) 2016, <NAME>, www.az2000.de
# All rights reserved.
# file created 2011-04-15
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
https://github.com/albertz/py_better_exchook
This is a simple replacement for the standard Python exception handler (sys.excepthook).
In addition to what the standard handler does, it also prints all referenced variables
(no matter if local, global or builtin) of the code line of each stack frame.
See below for some examples and some example output.
See these functions:
- better_exchook
- format_tb / print_tb
- iter_traceback
- get_current_frame
- dump_all_thread_tracebacks
- install
- replace_traceback_format_tb
Although there might be a few more useful functions, thus we export all of them.
Also see the demo/tests at the end.
"""
from __future__ import print_function
import sys
import os
import os.path
import threading
import keyword
import inspect
try:
from traceback import StackSummary, FrameSummary
except ImportError:
class _Dummy:
pass
StackSummary = FrameSummary = _Dummy
# noinspection PySetFunctionToLiteral,SpellCheckingInspection
pykeywords = set(keyword.kwlist) | set(["None", "True", "False"])
_cur_pwd = os.getcwd()
_threading_main_thread = threading.main_thread() if hasattr(threading, "main_thread") else None
try:
# noinspection PyUnresolvedReferences,PyUnboundLocalVariable
unicode
except NameError: # Python3
unicode = str # Python 3 compatibility
try:
# noinspection PyUnresolvedReferences,PyUnboundLocalVariable
raw_input
except NameError: # Python3
raw_input = input
def parse_py_statement(line):
state = 0
curtoken = ""
spaces = " \t\n"
ops = ".,;:+-*/%&!=|(){}[]^<>"
i = 0
def _escape_char(c):
if c == "n": return "\n"
elif c == "t": return "\t"
else: return c
while i < len(line):
c = line[i]
i += 1
if state == 0:
if c in spaces: pass
elif c in ops: yield ("op", c)
elif c == "#": state = 6
elif c == "\"": state = 1
elif c == "'": state = 2
else:
curtoken = c
state = 3
elif state == 1: # string via "
if c == "\\": state = 4
elif c == "\"":
yield ("str", curtoken)
curtoken = ""
state = 0
else: curtoken += c
elif state == 2: # string via '
if c == "\\": state = 5
elif c == "'":
yield ("str", curtoken)
curtoken = ""
state = 0
else: curtoken += c
elif state == 3: # identifier
if c in spaces + ops + "#\"'":
yield ("id", curtoken)
curtoken = ""
state = 0
i -= 1
else: curtoken += c
elif state == 4: # escape in "
curtoken += _escape_char(c)
state = 1
elif state == 5: # escape in '
curtoken += _escape_char(c)
state = 2
elif state == 6: # comment
curtoken += c
if state == 3: yield ("id", curtoken)
elif state == 6: yield ("comment", curtoken)
def parse_py_statements(source_code):
for line in source_code.splitlines():
for t in parse_py_statement(line):
yield t
def grep_full_py_identifiers(tokens):
global pykeywords
tokens = list(tokens)
i = 0
while i < len(tokens):
tokentype, token = tokens[i]
i += 1
if tokentype != "id": continue
while i+1 < len(tokens) and tokens[i] == ("op", ".") and tokens[i+1][0] == "id":
token += "." + tokens[i+1][1]
i += 2
if token == "": continue
if token in pykeywords: continue
if token[0] in ".0123456789": continue
yield token
def set_linecache(filename, source):
import linecache
linecache.cache[filename] = None, None, [line+'\n' for line in source.splitlines()], filename
def simple_debug_shell(globals, locals):
try: import readline
except ImportError: pass # ignore
COMPILE_STRING_FN = "<simple_debug_shell input>"
while True:
try:
s = raw_input("> ")
except (KeyboardInterrupt, EOFError):
print("breaked debug shell: " + sys.exc_info()[0].__name__)
break
if s.strip() == "": continue
try:
c = compile(s, COMPILE_STRING_FN, "single")
except Exception as e:
print("%s : %s in %r" % (e.__class__.__name__, str(e), s))
else:
set_linecache(COMPILE_STRING_FN, s)
try:
ret = eval(c, globals, locals)
except (KeyboardInterrupt, SystemExit):
print("debug shell exit: " + sys.exc_info()[0].__name__)
break
except Exception:
print("Error executing %r" % s)
better_exchook(*sys.exc_info(), autodebugshell=False)
else:
try:
if ret is not None: print(ret)
except Exception:
print("Error printing return value of %r" % s)
better_exchook(*sys.exc_info(), autodebugshell=False)
def debug_shell(user_ns, user_global_ns, traceback=None, execWrapper=None):
ipshell = None
try:
import IPython
have_ipython = True
except ImportError:
have_ipython = False
if not ipshell and traceback and have_ipython:
try:
from IPython.core.debugger import Pdb
from IPython.terminal.debugger import TerminalPdb
from IPython.terminal.ipapp import TerminalIPythonApp
ipapp = TerminalIPythonApp.instance()
ipapp.interact = False # Avoid output (banner, prints)
ipapp.initialize(argv=[])
def_colors = ipapp.shell.colors
pdb_obj = TerminalPdb(def_colors)
pdb_obj.botframe = None # not sure. exception otherwise at quit
ipshell = lambda: pdb_obj.interaction(None, traceback=traceback)
except Exception:
print("IPython Pdb exception:")
better_exchook(*sys.exc_info(), autodebugshell=False)
if not ipshell and have_ipython:
try:
import IPython
import IPython.terminal.embed
class DummyMod(object): pass
module = DummyMod()
module.__dict__ = user_global_ns
module.__name__ = "_DummyMod"
if "__name__" not in user_ns:
user_ns = user_ns.copy()
user_ns["__name__"] = "_DummyUserNsMod"
ipshell = IPython.terminal.embed.InteractiveShellEmbed.instance(
user_ns=user_ns, user_module=module)
except Exception:
print("IPython not available:")
better_exchook(*sys.exc_info(), autodebugshell=False)
else:
if execWrapper:
old = ipshell.run_code
ipshell.run_code = lambda code: execWrapper(lambda: old(code))
if ipshell:
ipshell()
else:
print("Use simple debug shell:")
if traceback:
import pdb
pdb.post_mortem(traceback)
else:
simple_debug_shell(user_global_ns, user_ns)
def output_limit():
return 300
def pp_extra_info(obj, depthlimit = 3):
s = []
if hasattr(obj, "__len__"):
try:
if type(obj) in (str,unicode,list,tuple,dict) and len(obj) <= 5:
pass # don't print len in this case
else:
s += ["len = " + str(obj.__len__())]
except Exception: pass
if depthlimit > 0 and hasattr(obj, "__getitem__"):
try:
if type(obj) in (str,unicode):
pass # doesn't make sense to get subitems here
else:
subobj = obj.__getitem__(0)
extra_info = pp_extra_info(subobj, depthlimit - 1)
if extra_info != "":
s += ["_[0]: {" + extra_info + "}"]
except Exception: pass
return ", ".join(s)
def pretty_print(obj):
s = repr(obj)
limit = output_limit()
if len(s) > limit:
s = s[:limit - 3] + "..."
extra_info = pp_extra_info(obj)
if extra_info != "": s += ", " + extra_info
return s
def fallback_findfile(filename):
mods = [m for m in sys.modules.values() if m and hasattr(m, "__file__") and filename in m.__file__]
if len(mods) == 0:
return None
altfn = mods[0].__file__
if altfn[-4:-1] == ".py": altfn = altfn[:-1] # *.pyc or whatever
if not os.path.exists(altfn) and altfn.startswith("./"):
# Maybe current dir changed.
altfn2 = _cur_pwd + altfn[1:]
if os.path.exists(altfn2):
return altfn2
# Try dirs of some other mods.
for m in ["__main__", "better_exchook"]:
if hasattr(sys.modules.get(m), "__file__"):
altfn2 = os.path.dirname(sys.modules[m].__file__) + altfn[1:]
if os.path.exists(altfn2):
return altfn2
return altfn
def is_source_code_missing_open_brackets(source_code):
open_brackets = "[{("
close_brackets = "]})"
last_close_bracket = [-1] # stack
counters = [0] * len(open_brackets)
# Go in reverse order through the tokens.
# Thus, we first should see the closing brackets, and then the matching opening brackets.
for t_type, t_content in reversed(list(parse_py_statements(source_code))):
if t_type != "op": continue # we are from now on only interested in ops (including brackets)
if t_content in open_brackets:
idx = open_brackets.index(t_content)
if last_close_bracket[-1] == idx: # ignore if we haven't seen the closing one
counters[idx] -= 1
del last_close_bracket[-1]
elif t_content in close_brackets:
idx = close_brackets.index(t_content)
counters[idx] += 1
last_close_bracket += [idx]
return not all([c == 0 for c in counters])
def get_source_code(filename, lineno, module_globals):
import linecache
linecache.checkcache(filename)
source_code = linecache.getline(filename, lineno, module_globals)
# In case of a multi-line statement, lineno is usually the last line.
# We are checking for missing open brackets and add earlier code lines.
while is_source_code_missing_open_brackets(source_code):
if lineno <= 0: break
lineno -= 1
source_code = "".join([linecache.getline(filename, lineno, module_globals), source_code])
| |
be the retention policy.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param frequency_interval: Required. How often the backup should be executed (e.g. for weekly
backup, this should be set to 7 and FrequencyUnit should be set to Day).
:type frequency_interval: int
:param frequency_unit: Required. The unit of time for how often the backup should be executed
(e.g. for weekly backup, this should be set to Day and FrequencyInterval should be set to 7).
Possible values include: "Day", "Hour". Default value: "Day".
:type frequency_unit: str or ~azure.mgmt.web.v2020_06_01.models.FrequencyUnit
:param keep_at_least_one_backup: Required. True if the retention policy should always keep at
least one backup in the storage account, regardless how old it is; false otherwise.
:type keep_at_least_one_backup: bool
:param retention_period_in_days: Required. After how many days backups should be deleted.
:type retention_period_in_days: int
:param start_time: When the schedule should start working.
:type start_time: ~datetime.datetime
:ivar last_execution_time: Last time when this schedule was triggered.
:vartype last_execution_time: ~datetime.datetime
"""
_validation = {
'frequency_interval': {'required': True},
'frequency_unit': {'required': True},
'keep_at_least_one_backup': {'required': True},
'retention_period_in_days': {'required': True},
'last_execution_time': {'readonly': True},
}
_attribute_map = {
'frequency_interval': {'key': 'frequencyInterval', 'type': 'int'},
'frequency_unit': {'key': 'frequencyUnit', 'type': 'str'},
'keep_at_least_one_backup': {'key': 'keepAtLeastOneBackup', 'type': 'bool'},
'retention_period_in_days': {'key': 'retentionPeriodInDays', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'last_execution_time': {'key': 'lastExecutionTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
frequency_interval: int = 7,
frequency_unit: Union[str, "FrequencyUnit"] = "Day",
keep_at_least_one_backup: bool = True,
retention_period_in_days: int = 30,
start_time: Optional[datetime.datetime] = None,
**kwargs
):
super(BackupSchedule, self).__init__(**kwargs)
self.frequency_interval = frequency_interval
self.frequency_unit = frequency_unit
self.keep_at_least_one_backup = keep_at_least_one_backup
self.retention_period_in_days = retention_period_in_days
self.start_time = start_time
self.last_execution_time = None
class BillingMeter(ProxyOnlyResource):
"""App Service billing entity that contains information about meter which the Azure billing system utilizes to charge users for services.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param meter_id: Meter GUID onboarded in Commerce.
:type meter_id: str
:param billing_location: Azure Location of billable resource.
:type billing_location: str
:param short_name: Short Name from App Service Azure pricing Page.
:type short_name: str
:param friendly_name: Friendly name of the meter.
:type friendly_name: str
:param resource_type: App Service ResourceType meter used for.
:type resource_type: str
:param os_type: App Service OS type meter used for.
:type os_type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'meter_id': {'key': 'properties.meterId', 'type': 'str'},
'billing_location': {'key': 'properties.billingLocation', 'type': 'str'},
'short_name': {'key': 'properties.shortName', 'type': 'str'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'resource_type': {'key': 'properties.resourceType', 'type': 'str'},
'os_type': {'key': 'properties.osType', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
meter_id: Optional[str] = None,
billing_location: Optional[str] = None,
short_name: Optional[str] = None,
friendly_name: Optional[str] = None,
resource_type: Optional[str] = None,
os_type: Optional[str] = None,
**kwargs
):
super(BillingMeter, self).__init__(kind=kind, **kwargs)
self.meter_id = meter_id
self.billing_location = billing_location
self.short_name = short_name
self.friendly_name = friendly_name
self.resource_type = resource_type
self.os_type = os_type
class BillingMeterCollection(msrest.serialization.Model):
"""Collection of Billing Meters.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2020_06_01.models.BillingMeter]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[BillingMeter]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["BillingMeter"],
**kwargs
):
super(BillingMeterCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class BlobStorageTokenStore(ProxyOnlyResource):
"""BlobStorageTokenStore.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param sas_url_setting_name:
:type sas_url_setting_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sas_url_setting_name': {'key': 'properties.sasUrlSettingName', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
sas_url_setting_name: Optional[str] = None,
**kwargs
):
super(BlobStorageTokenStore, self).__init__(kind=kind, **kwargs)
self.sas_url_setting_name = sas_url_setting_name
class Capability(msrest.serialization.Model):
"""Describes the capabilities/features allowed for a specific SKU.
:param name: Name of the SKU capability.
:type name: str
:param value: Value of the SKU capability.
:type value: str
:param reason: Reason of the SKU capability.
:type reason: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
reason: Optional[str] = None,
**kwargs
):
super(Capability, self).__init__(**kwargs)
self.name = name
self.value = value
self.reason = reason
class Certificate(Resource):
"""SSL certificate for an app.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar friendly_name: Friendly name of the certificate.
:vartype friendly_name: str
:ivar subject_name: Subject name of the certificate.
:vartype subject_name: str
:param host_names: Host names the certificate applies to.
:type host_names: list[str]
:param pfx_blob: Pfx blob.
:type pfx_blob: bytearray
:ivar site_name: App name.
:vartype site_name: str
:ivar self_link: Self link.
:vartype self_link: str
:ivar issuer: Certificate issuer.
:vartype issuer: str
:ivar issue_date: Certificate issue Date.
:vartype issue_date: ~datetime.datetime
:ivar expiration_date: Certificate expiration date.
:vartype expiration_date: ~datetime.datetime
:param password: <PASSWORD>.
:type password: str
:ivar thumbprint: Certificate thumbprint.
:vartype thumbprint: str
:ivar valid: Is the certificate valid?.
:vartype valid: bool
:ivar cer_blob: Raw bytes of .cer file.
:vartype cer_blob: bytearray
:ivar public_key_hash: Public key hash.
:vartype public_key_hash: str
:ivar hosting_environment_profile: Specification for the App Service Environment to use for the
certificate.
:vartype hosting_environment_profile:
~azure.mgmt.web.v2020_06_01.models.HostingEnvironmentProfile
:param key_vault_id: Key Vault Csm resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar key_vault_secret_status: Status of the Key Vault secret. Possible values include:
"Initialized", "WaitingOnCertificateOrder", "Succeeded", "CertificateOrderFailed",
"OperationNotPermittedOnKeyVault", "AzureServiceUnauthorizedToAccessKeyVault",
"KeyVaultDoesNotExist", "KeyVaultSecretDoesNotExist", "UnknownError", "ExternalPrivateKey",
"Unknown".
:vartype key_vault_secret_status: str or
~azure.mgmt.web.v2020_06_01.models.KeyVaultSecretStatus
:param server_farm_id: Resource ID of the associated App Service plan, formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param canonical_name: CNAME of the certificate to be issued via free certificate.
:type canonical_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'friendly_name': {'readonly': True},
'subject_name': {'readonly': True},
'site_name': {'readonly': True},
'self_link': {'readonly': True},
'issuer': {'readonly': True},
'issue_date': {'readonly': True},
'expiration_date': {'readonly': True},
'thumbprint': {'readonly': True},
'valid': {'readonly': True},
'cer_blob': {'readonly': True},
'public_key_hash': {'readonly': True},
'hosting_environment_profile': {'readonly': True},
'key_vault_secret_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'subject_name': {'key': 'properties.subjectName', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'pfx_blob': {'key': 'properties.pfxBlob', 'type': 'bytearray'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'self_link': {'key': 'properties.selfLink', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'issue_date': {'key': 'properties.issueDate', 'type': 'iso-8601'},
'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'password': {'key': 'properties.password', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'valid': {'key': 'properties.valid', 'type': 'bool'},
'cer_blob': {'key': 'properties.cerBlob', 'type': 'bytearray'},
'public_key_hash': {'key': 'properties.publicKeyHash', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'key_vault_id': {'key': 'properties.keyVaultId', 'type': | |
plot.fig.axes[0].autoscale(tight=True)
plot.fig.axes[1].autoscale(tight=True)
return alldata
#%%
def sweepgate(scanjob):
""" Return the sweepgate in a scanjob """
g = scanjob['sweepdata'].get('param', None)
if isinstance(g, str):
return g
if isinstance(g, qcodes.Parameter):
return g.name
g = scanjob['sweepdata'].get('gate', None)
if g is None:
g = scanjob['sweepdata'].get('gates', [None])[0]
return g
def stepgate(scanjob):
""" Return the step gate in a scanjob """
g = scanjob['stepdata'].get('param', None)
if isinstance(g, str):
return g
if isinstance(g, qcodes.Parameter):
return g.name
g = scanjob['stepdata'].get('gate', None)
if g is None:
g = scanjob['stepdata'].get('gates', [None])[0]
return g
def show2D(dd, impixel=None, im=None, fig=101, verbose=1, dy=None, sigma=None, colorbar=False, title=None, midx=2, units=None):
""" Show result of a 2D scan
Args:
dd (DataSet)
impixel (array or None)
im (array or None)
"""
if dd is None:
return None
extent, g0, g1, vstep, vsweep, arrayname = dataset2Dmetadata(dd)
tr = image_transform(dd, mode='pixel')
array = getattr(dd, arrayname)
if impixel is None:
if im is None:
im = np.array(array)
impixel = tr._transform(im)
else:
pass
else:
pass
labels = [s.name for s in array.set_arrays]
xx = extent
xx = tr.matplotlib_image_extent()
ny = vstep.size
nx = vsweep.size
im = qtt.utilities.tools.diffImageSmooth(impixel, dy=dy, sigma=sigma)
if verbose:
print('show2D: nx %d, ny %d' % (nx, ny,))
if verbose >= 2:
print('extent: %s' % xx)
if units is None:
unitstr = ''
else:
unitstr = ' (%s)' % units
if fig is not None:
scanjob = dd.metadata.get('scanjob', dict())
pgeometry.cfigure(fig)
plt.clf()
if impixel is None:
if verbose >= 2:
print('show2D: show raw image')
plt.pcolormesh(vstep, vsweep, im)
else:
if verbose >= 2:
print('show2D: show image')
plt.imshow(impixel, extent=xx, interpolation='nearest')
labelx = labels[1]
labely = labels[0]
if scanjob.get('sweepdata', None) is not None:
labelx = sweepgate(scanjob)
plt.xlabel('%s' % labelx + unitstr)
else:
pass
if scanjob.get('stepdata', None) is not None:
if units is None:
plt.ylabel('%s' % stepgate(scanjob))
else:
plt.ylabel('%s (%s)' % (stepgate(scanjob), units))
if not title is None:
plt.title(title)
if colorbar:
plt.colorbar()
if verbose >= 2:
print('show2D: at show')
try:
plt.show(block=False)
except:
# ipython backend does not know about block keyword...
plt.show()
return xx, vstep, vsweep
#%% Extract metadata
def dataset1Dmetadata(alldata, arrayname=None, verbose=0):
""" Extract metadata from a 2D scan
Returns:
extent (list): x1,x2
g0 (string): step gate
vstep (array): step values
istep (float)
arrayname (string): identifier of the main array
"""
if arrayname is None:
arrayname = alldata.default_parameter_name()
A = alldata.arrays[arrayname]
g0 = A.set_arrays[0].name
vstep = np.array(A.set_arrays[0])
extent = [vstep[0], vstep[-1]] # change order?
istep = np.abs(np.mean(np.diff(vstep)))
if verbose:
print('1D scan: gates %s %s' % (g0,))
return extent, g0, vstep, istep, arrayname
def dataset2Dmetadata(alldata, arrayname=None, verbose=0):
""" Extract metadata from a 2D scan
Returns:
extent (list): x1,x2,y1,y2
g0 (str): step gate (array_id)
g1 (str): sweep gate (array_id)
vstep (array): step values
vsweep (array): sweep values
arrayname (string): identifier of the main array
"""
if arrayname is None:
arrayname = alldata.default_parameter_name()
A = alldata.arrays[arrayname]
g0 = A.set_arrays[0].array_id
g1 = A.set_arrays[1].array_id
vstep = np.array(A.set_arrays[0])
vsweep = np.array(A.set_arrays[1])[0]
extent = [vsweep[0], vsweep[-1], vstep[0], vstep[-1]] # change order?
if verbose:
print('2D scan: gates %s %s' % (g0, g1))
return extent, g0, g1, vstep, vsweep, arrayname
if __name__ == '__main__' and 0:
test_dataset()
#%%
class image_transform:
def __init__(self, dataset=None, arrayname=None, mode='pixel', unitsperpixel=None, verbose=0):
""" Class to convert scan coordinate to image coordinates
Args:
dataset (DataSet):
arrayname (str or None): name of array to select from dataset
mode (str): 'pixel' or 'raw'
"""
self.H = np.eye(3) # raw image to pixel image transformation
self.extent = None # image extent in pixel
self.verbose = verbose
self.dataset = dataset
self.arrayname = arrayname
extentscan, g0, g2, vstep, vsweep, arrayname = dataset2Dmetadata(
dataset, arrayname=self.arrayname)
self.vstep = vstep
self.vsweep = vsweep
self._istep = dataset_get_istep(dataset)
if self.verbose:
print('image_transform: istep %.2f, unitsperpixel %s' % (self._istep, unitsperpixel))
nx = len(vsweep)
ny = len(vstep)
self.flipX = False
self.flipY = False
Hx = np.diag([-1, 1, 1])
Hx[0, -1] = nx - 1
Hy = np.diag([1, -1, 1])
Hy[1, -1] = ny - 1
if self.verbose:
print('image_transform: vsweep[0] %s' % vsweep[0])
if mode == 'raw':
pass
else:
if vsweep[0] > vsweep[1]:
self.flipX = True
self.H = Hx.dot(self.H)
if vstep[0] < vstep[1]:
self.flipY = True
self.H = Hy.dot(self.H)
self._imraw = np.array(dataset.arrays[arrayname])
if isinstance(unitsperpixel, (float, int)):
unitsperpixel = [unitsperpixel, unitsperpixel]
self.unitsperpixel = unitsperpixel
if self.unitsperpixel is not None:
imextent = self.scan_image_extent()
if self.verbose:
print('image_transform: unitsperpixel %s' %
(self.unitsperpixel, ))
ims, Hs, _ = qtt.algorithms.generic.rescaleImage(
self._imraw, imextent, mvx=unitsperpixel[0], mvy=unitsperpixel[1])
self._im = ims
self.H = Hs @ self.H
if verbose:
print('image_transform: tr._imraw.shape %s' % (self._imraw.shape, ))
print('image_transform: tr._im.shape %s' % (self._im.shape, ))
self._im = self._transform(self._imraw)
self.Hi = np.linalg.inv(self.H)
def image(self):
return self._im
def istep_sweep(self):
return np.mean(np.diff(self.vsweep))
def istep_step(self):
return np.mean(np.diff(self.vstep))
def istep(self):
return self._istep
def scan_image_extent(self):
""" Scan extent
Returns:
extentImage (list): x0, x1, y0, y1
x0, y0 is top left
"""
vsweep = self.vsweep
vstep = self.vstep
extentImage = [vsweep[0], vsweep[-1], vstep[0], vstep[-1]]
if self.flipX:
extentImage = [extentImage[1], extentImage[
0], extentImage[2], extentImage[3]]
if self.flipY:
extentImage = [extentImage[0], extentImage[
1], extentImage[3], extentImage[2]]
self.extent = extentImage
return extentImage
def matplotlib_image_extent(self):
""" Return matplotlib style image extent
Returns:
extentImage (4 floats): x1, x2, y1, y2
the y1 value is bottom left
"""
vsweep = self.vsweep
vstep = self.vstep
extentImage = [vsweep[0], vsweep[-1], vstep[-1], vstep[0]]
if self.flipX:
extentImage = [extentImage[1], extentImage[
0], extentImage[2], extentImage[3]]
if self.flipY:
extentImage = [extentImage[0], extentImage[
1], extentImage[3], extentImage[2]]
self.extent = extentImage
return extentImage
def transform_image(self, im):
return self._transform(im)
def _transform(self, im):
""" Transform raw image to image in pixel coordinates such that the imageExtent is increasing
"""
if self.flipX:
im = im[::, ::-1]
if self.flipY:
im = im[::-1, ::]
if self.unitsperpixel is not None:
imextent = self.scan_image_extent()
ims, Hs, _ = qtt.algorithms.generic.rescaleImage(
self._imraw, imextent, mvx=self.unitsperpixel[0], mvy=self.unitsperpixel[1])
else:
ims = im
return ims
def _itransform(self, im):
if self.flipX:
im = im[::, ::-1]
if self.flipY:
im = im[::-1, ::]
return im
def pixel2scan(self, pt):
""" Convert pixels coordinates to scan coordinates (mV)
Arguments
---------
pt : array
points in pixel coordinates (x,y)
Returns
-------
ptx (array): point in scan coordinates (sweep, step)
"""
ptx = pgeometry.projectiveTransformation(
self.Hi, np.array(pt).astype(float))
extent, g0, g1, vstep, vsweep, arrayname = dataset2Dmetadata(
self.dataset, arrayname=self.arrayname, verbose=0)
nx = vsweep.size
ny = vstep.size
xx = extent
x = ptx
nn = pt.shape[1]
ptx = np.zeros((2, nn))
f = scipy.interpolate.interp1d(
[0, ny - 1], [xx[2], xx[3]], assume_sorted=False, fill_value='extrapolate')
ptx[1, :] = f(x[1, :]) # step
f = scipy.interpolate.interp1d(
[0, nx - 1], [xx[0], xx[1]], assume_sorted=False, fill_value='extrapolate')
ptx[0, :] = f(x[0, :]) # sweep
return ptx
def scan2pixel(self, pt):
""" Convert scan coordinates to pixel coordinates
Arguments
---------
pt : array
points in scan coordinates
Returns:
ptpixel (ndaray): points in pixel coordinates
"""
extent, g0, g1, vstep, vsweep, arrayname = dataset2Dmetadata(
self.dataset, arrayname=self.arrayname, verbose=0)
nx = vsweep.size
ny = vstep.size
xx = extent
x = pt
nn = pt.shape[1]
ptpixel = np.zeros((2, nn))
f = scipy.interpolate.interp1d(
[xx[2], xx[3]], [0, ny - 1], assume_sorted=False)
ptpixel[1, :] = f(x[1, :])
f = scipy.interpolate.interp1d(
[xx[0], xx[1]], [0, nx - 1], assume_sorted=False)
ptpixel[0, :] = f(x[0, :]) # sweep to pixel x
ptpixel = pgeometry.projectiveTransformation(
self.H, np.array(ptpixel).astype(float))
return ptpixel
def test_image_transform(verbose=0):
from qcodes.tests.data_mocks import DataSet2D
ds = DataSet2D()
tr = image_transform(ds)
im = tr.image()
if verbose:
print('transform: im.shape %s' % (str(im.shape),))
tr = image_transform(ds, unitsperpixel=[None, 2])
im = tr.image()
if verbose:
print('transform: im.shape %s' % (str(im.shape),))
if __name__ == '__main__':
test_image_transform()
#%%
def pickleload(pkl_file):
""" Load objects from file with pickle """
try:
with open(pkl_file, 'rb') as output:
data2 = pickle.load(output)
except:
if sys.version_info.major >= 3:
# if pickle file was saved in python2 we might fix issues with a
# different encoding
with open(pkl_file, 'rb') as output:
data2 = pickle.load(output, encoding='latin')
else:
data2 = None
return data2
def _data_extension():
return 'pickle'
def load_data(mfile: str):
''' Load data from specified file '''
# return hickle.load(mfile)
ext = _data_extension()
if ext is not None:
if not mfile.endswith(ext):
mfile = mfile + '.' + ext
return pickleload(mfile)
def write_data(mfile: str, data):
''' Write data to specified file '''
ext = _data_extension()
if ext is not None:
if not mfile.endswith(ext):
mfile = mfile + '.' + ext
| |
<filename>nautobot/core/api/views.py
import logging
import platform
from collections import OrderedDict
from django import __version__ as DJANGO_VERSION
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.http.response import HttpResponseBadRequest
from django.db import transaction
from django.db.models import ProtectedError
from django_rq.queues import get_connection
from rest_framework import status
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet as ModelViewSet_
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.exceptions import PermissionDenied, ParseError
from drf_yasg.openapi import Schema, TYPE_OBJECT, TYPE_ARRAY
from drf_yasg.utils import swagger_auto_schema
from rq.worker import Worker
from graphql import get_default_backend
from graphql.execution import ExecutionResult
from graphql.type.schema import GraphQLSchema
from graphql.execution.middleware import MiddlewareManager
from graphene_django.settings import graphene_settings
from graphene_django.views import GraphQLView, instantiate_middleware, HttpError
from nautobot.core.api import BulkOperationSerializer
from nautobot.core.api.exceptions import SerializerNotFound
from nautobot.utilities.api import get_serializer_for_model
from . import serializers
HTTP_ACTIONS = {
"GET": "view",
"OPTIONS": None,
"HEAD": "view",
"POST": "add",
"PUT": "change",
"PATCH": "change",
"DELETE": "delete",
}
#
# Mixins
#
class BulkUpdateModelMixin:
"""
Support bulk modification of objects using the list endpoint for a model. Accepts a PATCH action with a list of one
or more JSON objects, each specifying the UUID of an object to be updated as well as the attributes to be set.
For example:
PATCH /api/dcim/sites/
[
{
"id": "1f554d07-d099-437d-8d48-7d6e35ec8fa3",
"name": "<NAME>"
},
{
"id": "1f554d07-d099-437d-8d48-7d6e65ec8fa3",
"status": "planned"
}
]
"""
def bulk_update(self, request, *args, **kwargs):
partial = kwargs.pop("partial", False)
serializer = BulkOperationSerializer(data=request.data, many=True)
serializer.is_valid(raise_exception=True)
qs = self.get_queryset().filter(pk__in=[o["id"] for o in serializer.data])
# Map update data by object ID
update_data = {obj.pop("id"): obj for obj in request.data}
data = self.perform_bulk_update(qs, update_data, partial=partial)
return Response(data, status=status.HTTP_200_OK)
def perform_bulk_update(self, objects, update_data, partial):
with transaction.atomic():
data_list = []
for obj in objects:
data = update_data.get(str(obj.id))
serializer = self.get_serializer(obj, data=data, partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
data_list.append(serializer.data)
return data_list
def bulk_partial_update(self, request, *args, **kwargs):
kwargs["partial"] = True
return self.bulk_update(request, *args, **kwargs)
class BulkDestroyModelMixin:
"""
Support bulk deletion of objects using the list endpoint for a model. Accepts a DELETE action with a list of one
or more JSON objects, each specifying the UUID of an object to be deleted. For example:
DELETE /api/dcim/sites/
[
{"id": "3f01f169-49b9-42d5-a526-df9118635d62"},
{"id": "c27d6c5b-7ea8-41e7-b9dd-c065efd5d9cd"}
]
"""
def bulk_destroy(self, request, *args, **kwargs):
serializer = BulkOperationSerializer(data=request.data, many=True)
serializer.is_valid(raise_exception=True)
qs = self.get_queryset().filter(pk__in=[o["id"] for o in serializer.data])
self.perform_bulk_destroy(qs)
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_bulk_destroy(self, objects):
with transaction.atomic():
for obj in objects:
self.perform_destroy(obj)
#
# Viewsets
#
class ModelViewSet(BulkUpdateModelMixin, BulkDestroyModelMixin, ModelViewSet_):
"""
Extend DRF's ModelViewSet to support bulk update and delete functions.
"""
brief = False
brief_prefetch_fields = []
def get_serializer(self, *args, **kwargs):
# If a list of objects has been provided, initialize the serializer with many=True
if isinstance(kwargs.get("data", {}), list):
kwargs["many"] = True
return super().get_serializer(*args, **kwargs)
def get_serializer_class(self):
logger = logging.getLogger("nautobot.core.api.views.ModelViewSet")
# If using 'brief' mode, find and return the nested serializer for this model, if one exists
if self.brief:
logger.debug("Request is for 'brief' format; initializing nested serializer")
try:
serializer = get_serializer_for_model(self.queryset.model, prefix="Nested")
logger.debug(f"Using serializer {serializer}")
return serializer
except SerializerNotFound:
logger.debug(f"Nested serializer for {self.queryset.model} not found!")
# Fall back to the hard-coded serializer class
logger.debug(f"Using serializer {self.serializer_class}")
return self.serializer_class
def get_queryset(self):
# If using brief mode, clear all prefetches from the queryset and append only brief_prefetch_fields (if any)
if self.brief:
return super().get_queryset().prefetch_related(None).prefetch_related(*self.brief_prefetch_fields)
return super().get_queryset()
def initialize_request(self, request, *args, **kwargs):
# Check if brief=True has been passed
if request.method == "GET" and request.GET.get("brief"):
self.brief = True
return super().initialize_request(request, *args, **kwargs)
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
if not request.user.is_authenticated:
return
# Restrict the view's QuerySet to allow only the permitted objects
action = HTTP_ACTIONS[request.method]
if action:
self.queryset = self.queryset.restrict(request.user, action)
def dispatch(self, request, *args, **kwargs):
logger = logging.getLogger("nautobot.core.api.views.ModelViewSet")
try:
return super().dispatch(request, *args, **kwargs)
except ProtectedError as e:
protected_objects = list(e.protected_objects)
msg = f"Unable to delete object. {len(protected_objects)} dependent objects were found: "
msg += ", ".join([f"{obj} ({obj.pk})" for obj in protected_objects])
logger.warning(msg)
return self.finalize_response(request, Response({"detail": msg}, status=409), *args, **kwargs)
def _validate_objects(self, instance):
"""
Check that the provided instance or list of instances are matched by the current queryset. This confirms that
any newly created or modified objects abide by the attributes granted by any applicable ObjectPermissions.
"""
if type(instance) is list:
# Check that all instances are still included in the view's queryset
conforming_count = self.queryset.filter(pk__in=[obj.pk for obj in instance]).count()
if conforming_count != len(instance):
raise ObjectDoesNotExist
else:
# Check that the instance is matched by the view's queryset
self.queryset.get(pk=instance.pk)
def perform_create(self, serializer):
model = self.queryset.model
logger = logging.getLogger("nautobot.core.api.views.ModelViewSet")
logger.info(f"Creating new {model._meta.verbose_name}")
# Enforce object-level permissions on save()
try:
with transaction.atomic():
instance = serializer.save()
self._validate_objects(instance)
except ObjectDoesNotExist:
raise PermissionDenied()
def perform_update(self, serializer):
model = self.queryset.model
logger = logging.getLogger("nautobot.core.api.views.ModelViewSet")
logger.info(f"Updating {model._meta.verbose_name} {serializer.instance} (PK: {serializer.instance.pk})")
# Enforce object-level permissions on save()
try:
with transaction.atomic():
instance = serializer.save()
self._validate_objects(instance)
except ObjectDoesNotExist:
raise PermissionDenied()
def perform_destroy(self, instance):
model = self.queryset.model
logger = logging.getLogger("nautobot.core.api.views.ModelViewSet")
logger.info(f"Deleting {model._meta.verbose_name} {instance} (PK: {instance.pk})")
return super().perform_destroy(instance)
#
# Views
#
class APIRootView(APIView):
"""
This is the root of Nautobot's REST API. API endpoints are arranged by app and model name; e.g. `/api/dcim/sites/`.
"""
_ignore_model_permissions = True
exclude_from_schema = True
swagger_schema = None
def get_view_name(self):
return "API Root"
def get(self, request, format=None):
return Response(
OrderedDict(
(
(
"circuits",
reverse("circuits-api:api-root", request=request, format=format),
),
(
"dcim",
reverse("dcim-api:api-root", request=request, format=format),
),
(
"extras",
reverse("extras-api:api-root", request=request, format=format),
),
("graphql", reverse("graphql-api", request=request, format=format)),
(
"ipam",
reverse("ipam-api:api-root", request=request, format=format),
),
(
"plugins",
reverse("plugins-api:api-root", request=request, format=format),
),
("status", reverse("api-status", request=request, format=format)),
(
"tenancy",
reverse("tenancy-api:api-root", request=request, format=format),
),
(
"users",
reverse("users-api:api-root", request=request, format=format),
),
(
"virtualization",
reverse(
"virtualization-api:api-root",
request=request,
format=format,
),
),
)
)
)
class StatusView(APIView):
"""
A lightweight read-only endpoint for conveying Nautobot's current operational status.
"""
permission_classes = [IsAuthenticated]
def get(self, request):
# Gather the version numbers from all installed Django apps
installed_apps = {}
for app_config in apps.get_app_configs():
app = app_config.module
version = getattr(app, "VERSION", getattr(app, "__version__", None))
if version:
if type(version) is tuple:
version = ".".join(str(n) for n in version)
installed_apps[app_config.name] = version
installed_apps = {k: v for k, v in sorted(installed_apps.items())}
# Gather installed plugins
plugins = {}
for plugin_name in settings.PLUGINS:
plugin_name = plugin_name.rsplit(".", 1)[-1]
plugin_config = apps.get_app_config(plugin_name)
plugins[plugin_name] = getattr(plugin_config, "version", None)
plugins = {k: v for k, v in sorted(plugins.items())}
return Response(
{
"django-version": DJANGO_VERSION,
"installed-apps": installed_apps,
"nautobot-version": settings.VERSION,
"plugins": plugins,
"python-version": platform.python_version(),
"rq-workers-running": Worker.count(get_connection("default")),
}
)
#
# GraphQL
#
class GraphQLDRFAPIView(APIView):
"""
API View for GraphQL to integrate properly with DRF authentication mecanism.
The code is a stripped down version of graphene-django default View
https://github.com/graphql-python/graphene-django/blob/main/graphene_django/views.py#L57
"""
permission_classes = [AllowAny]
graphql_schema = None
executor = None
backend = None
middleware = None
root_value = None
def __init__(self, schema=None, executor=None, middleware=None, root_value=None, backend=None):
if not schema:
schema = graphene_settings.SCHEMA
if backend is None:
backend = get_default_backend()
if middleware is None:
middleware = graphene_settings.MIDDLEWARE
self.graphql_schema = self.graphql_schema or schema
if middleware is not None:
if isinstance(middleware, MiddlewareManager):
self.middleware = middleware
else:
self.middleware = list(instantiate_middleware(middleware))
self.executor = executor
self.root_value = root_value
self.backend = backend
assert isinstance(self.graphql_schema, GraphQLSchema), "A Schema is required to be provided to GraphQLAPIView."
def get_root_value(self, request):
return self.root_value
def get_middleware(self, request):
return self.middleware
def get_context(self, request):
return request
def get_backend(self, request):
return self.backend
@swagger_auto_schema(
request_body=serializers.GraphQLAPISerializer,
operation_description="Query the database using a GraphQL query",
responses={
200: Schema(type=TYPE_OBJECT, properties={"data": Schema(type=TYPE_OBJECT)}),
400: Schema(
type=TYPE_OBJECT,
properties={"errors": Schema(type=TYPE_ARRAY, items={"type": TYPE_OBJECT})},
),
},
)
def post(self, request, *args, **kwargs):
try:
data = self.parse_body(request)
result, status_code = self.get_response(request, data)
return Response(
result,
status=status_code,
)
except HttpError as e:
return Response(
{"errors": [GraphQLView.format_error(e)]},
status=status.HTTP_400_BAD_REQUEST,
)
def get_response(self, request, data):
"""Extract the information from the request, execute the GraphQL query and form the response.
Args:
request (HttpRequest): Request Object from Django
data (dict): Parsed content of the body of the request.
Returns:
response (dict), status_code (int): Payload of the response to send and the status code.
"""
query, variables, operation_name, id = GraphQLView.get_graphql_params(request, data)
execution_result = self.execute_graphql_request(request, data, query, variables, operation_name)
status_code = 200
if execution_result:
response = {}
if execution_result.errors:
response["errors"] = [GraphQLView.format_error(e) for e in execution_result.errors]
if execution_result.invalid:
status_code = 400
else:
response["data"] = execution_result.data
result = response
else:
result = None
return result, status_code
def parse_body(self, request):
"""Analyze the request and based on the content type,
extract the query from the body as a string or as a JSON | |
<filename>tests/execute_w3c_tests.py
#!/usr/bin/env python3
#
# Copyright (c), 2018-2021, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author <NAME> <<EMAIL>>
# @author <NAME> <<EMAIL>>
#
"""
Tests script for running W3C XPath tests on elementpath. This is a
reworking of https://github.com/tjeb/elementpath_w3c_tests project
that uses ElementTree for default and collapses the essential parts
into only one module.
"""
import argparse
import contextlib
import decimal
import re
import json
import math
import os
import pathlib
import sys
import traceback
from collections import OrderedDict
from urllib.parse import urlsplit
from xml.etree import ElementTree
import lxml.etree
import elementpath
import xmlschema
from elementpath import ElementPathError, XPath2Parser, XPathContext, XPathNode
from elementpath.namespaces import get_expanded_name
from elementpath.xpath_token import XPathFunction
from elementpath.datatypes import AnyAtomicType
from elementpath.xpath31 import XPath31Parser
PY38_PLUS = sys.version_info > (3, 8)
DEPENDENCY_TYPES = {'spec', 'feature', 'calendar', 'default-language',
'format-integer-sequence', 'language', 'limits',
'xml-version', 'xsd-version', 'unicode-version',
'unicode-normalization-form'}
SKIP_TESTS = {
'fn-subsequence__cbcl-subsequence-010',
'fn-subsequence__cbcl-subsequence-011',
'fn-subsequence__cbcl-subsequence-012',
'fn-subsequence__cbcl-subsequence-013',
'fn-subsequence__cbcl-subsequence-014',
'prod-NameTest__NodeTest004',
# Unsupported collations
'fn-compare__compare-010',
'fn-substring-after__fn-substring-after-24',
'fn-substring-before__fn-substring-before-24',
'fn-deep-equal__K-SeqDeepEqualFunc-57',
'fn-deep-equal__K-SeqDeepEqualFunc-56',
# Unsupported language
'fn-format-integer__format-integer-032',
'fn-format-integer__format-integer-032-fr',
'fn-format-integer__format-integer-052',
'fn-format-integer__format-integer-065',
# Processing-instructions (tests on env "auction")
'fn-local-name__fn-local-name-78',
'fn-name__fn-name-28',
'fn-string__fn-string-28',
# Require XML 1.1
'fn-codepoints-to-string__K-CodepointToStringFunc-8a',
'fn-codepoints-to-string__K-CodepointToStringFunc-11b',
'fn-codepoints-to-string__K-CodepointToStringFunc-12b',
# Require unicode version "7.0"
'fn-lower-case__fn-lower-case-19',
'fn-upper-case__fn-upper-case-19',
'fn-matches.re__re00506',
'fn-matches.re__re00984',
# Very large number fault (interpreter crashes or float rounding)
'op-to__RangeExpr-409d',
'fn-format-number__numberformat60a',
'fn-format-number__cbcl-fn-format-number-035',
# For XQuery??
'fn-deep-equal__K2-SeqDeepEqualFunc-43', # includes a '!' symbol
# For XP30+
'fn-root__K-NodeRootFunc-2', # includes a XPath 3.0 fn:generate-id()
'fn-codepoints-to-string__cbcl-codepoints-to-string-021', # Too long ...
'fn-unparsed-text__fn-unparsed-text-038', # Typo in filename
'fn-unparsed-text-lines__fn-unparsed-text-lines-038', # Typo in filename
'fn-serialize__serialize-xml-015b', # Do not raise, attribute is good
'fn-parse-xml-fragment__parse-xml-fragment-022-st', # conflict with parse-xml-fragment-022
'fn-for-each-pair__fn-for-each-pair-017', # Requires PI and comments parsing
'fn-function-lookup__fn-function-lookup-522', # xs:dateTimeStamp for XSD 1.1 only
# Unicode FULLY-NORMALIZATION not supported in Python's unicodedata
'fn-normalize-unicode__cbcl-fn-normalize-unicode-001',
'fn-normalize-unicode__cbcl-fn-normalize-unicode-006',
# 'เจมส์' does not match xs:NCName (maybe due to Python re module limitation)
'prod-CastExpr__K2-SeqExprCast-488',
'prod-CastExpr__K2-SeqExprCast-504',
# IMHO incorrect tests
'fn-resolve-uri__fn-resolve-uri-9', # URI scheme names are lowercase
'fn-format-number__numberformat82', # result may be '12.340,00' instead of '0.012,34'
'fn-format-number__numberformat83', # (idem)
}
# Tests that can be run only with lxml.etree
LXML_ONLY = {
# parse of comments or PIs required
'fn-string__fn-string-30',
'prod-AxisStep__Axes003-4',
'prod-AxisStep__Axes006-4',
'prod-AxisStep__Axes033-4',
'prod-AxisStep__Axes037-2',
'prod-AxisStep__Axes046-2',
'prod-AxisStep__Axes049-2',
'prod-AxisStep__Axes058-2',
'prod-AxisStep__Axes058-3',
'prod-AxisStep__Axes061-1',
'prod-AxisStep__Axes061-2',
'prod-AxisStep__Axes064-2',
'prod-AxisStep__Axes064-3',
'prod-AxisStep__Axes067-2',
'prod-AxisStep__Axes067-3',
'prod-AxisStep__Axes073-1',
'prod-AxisStep__Axes073-2',
'prod-AxisStep__Axes076-4',
'prod-AxisStep__Axes079-4',
'fn-path__path007',
'fn-path__path009',
'fn-generate-id__generate-id-005',
'fn-parse-xml-fragment__parse-xml-fragment-010',
# in-scope namespaces required
'prod-AxisStep__Axes118',
'prod-AxisStep__Axes120',
'prod-AxisStep__Axes126',
'fn-resolve-QName__fn-resolve-qname-26',
'fn-in-scope-prefixes__fn-in-scope-prefixes-21',
'fn-in-scope-prefixes__fn-in-scope-prefixes-22',
'fn-in-scope-prefixes__fn-in-scope-prefixes-24',
'fn-in-scope-prefixes__fn-in-scope-prefixes-25',
'fn-in-scope-prefixes__fn-in-scope-prefixes-26',
'fn-innermost__fn-innermost-017',
'fn-innermost__fn-innermost-018',
'fn-innermost__fn-innermost-019',
'fn-innermost__fn-innermost-020',
'fn-innermost__fn-innermost-021',
'fn-outermost__fn-outermost-017',
'fn-outermost__fn-outermost-018',
'fn-outermost__fn-outermost-019',
'fn-outermost__fn-outermost-021',
'fn-local-name__fn-local-name-77',
'fn-local-name__fn-local-name-79',
'fn-name__fn-name-27',
'fn-name__fn-name-29',
'fn-string__fn-string-27',
'fn-format-number__numberformat87',
'fn-format-number__numberformat88',
'fn-path__path010',
'fn-path__path011',
'fn-path__path012',
'fn-path__path013',
'fn-function-lookup__fn-function-lookup-262',
'fn-generate-id__generate-id-007',
'fn-serialize__serialize-xml-012',
'prod-EQName__eqname-018',
'prod-EQName__eqname-023',
'prod-NamedFunctionRef__function-literal-262',
# XML declaration
'fn-serialize__serialize-xml-029b',
'fn-serialize__serialize-xml-030b',
# require external ENTITY parsing
'fn-parse-xml__parse-xml-010',
}
xpath_parser = XPath2Parser
ignore_specs = {'XQ10', 'XQ10+', 'XP30', 'XP30+', 'XQ30', 'XQ30+',
'XP31', 'XP31+', 'XQ31', 'XQ31+', 'XT30+'}
QT3_NAMESPACE = "http://www.w3.org/2010/09/qt-fots-catalog"
namespaces = {'': QT3_NAMESPACE}
INVALID_BASE_URL = 'http://www.w3.org/fots/unparsed-text/'
effective_base_url = None
@contextlib.contextmanager
def working_directory(dirpath):
orig_wd = os.getcwd()
os.chdir(dirpath)
try:
yield
finally:
os.chdir(orig_wd)
def etree_is_equal(root1, root2, strict=True):
nodes1 = root1.iter()
nodes2 = root2.iter()
for e1 in nodes1:
e2 = next(nodes2, None)
if e2 is None:
return False
if e1.tail != e2.tail:
if strict or e1.tail is None or e2.tail is None:
return False
if e1.tail.strip() != e2.tail.strip():
return False
if callable(e1.tag) ^ callable(e2.tag):
return False
elif not callable(e1.tag):
if e1.tag != e1.tag:
return False
if e1.attrib != e1.attrib:
return False
if e1.text != e2.text:
if strict or e1.text is None or e2.text is None:
return False
if e1.text.strip() != e2.text.strip():
return False
return next(nodes2, None) is None
class ExecutionError(Exception):
"""Common class for W3C XPath tests execution script."""
class ParseError(ExecutionError):
"""Other error generated by XPath expression parsing and static evaluation."""
class EvaluateError(ExecutionError):
"""Other error generated by XPath token evaluation with dynamic context."""
class Schema(object):
"""Represents an XSD schema used in XML environment settings."""
def __init__(self, elem):
assert elem.tag == '{%s}schema' % QT3_NAMESPACE
self.uri = elem.attrib.get('uri')
self.file = elem.attrib.get('file')
try:
self.description = elem.find('description', namespaces).text
except AttributeError:
self.description = ''
self.filepath = self.file and os.path.abspath(self.file)
def __repr__(self):
return '%s(uri=%r, file=%s)' % (self.__class__.__name__, self.uri, self.file)
class Source(object):
"""Represents a source file as used in XML environment settings."""
namespaces = None
def __init__(self, elem, use_lxml=False):
assert elem.tag == '{%s}source' % QT3_NAMESPACE
self.file = elem.attrib['file']
self.role = elem.attrib.get('role', '')
self.uri = elem.attrib.get('uri', self.file)
if not urlsplit(self.uri).scheme:
self.uri = pathlib.Path(self.uri).absolute().as_uri()
self.key = self.role or self.file
try:
self.description = elem.find('description', namespaces).text
except AttributeError:
self.description = ''
if use_lxml:
iterparse = lxml.etree.iterparse
parser = lxml.etree.XMLParser(collect_ids=False)
try:
self.xml = lxml.etree.parse(self.file, parser=parser)
except lxml.etree.XMLSyntaxError:
self.xml = None
else:
iterparse = ElementTree.iterparse
if PY38_PLUS:
tree_builder = ElementTree.TreeBuilder(insert_comments=True, insert_pis=True)
parser = ElementTree.XMLParser(target=tree_builder)
else:
parser = None
try:
self.xml = ElementTree.parse(self.file, parser=parser)
except ElementTree.ParseError:
self.xml = None
try:
self.namespaces = {}
dup_index = 1
for _, (prefix, uri) in iterparse(self.file, events=('start-ns',)):
if prefix not in self.namespaces:
self.namespaces[prefix] = uri
elif prefix:
self.namespaces[f'{prefix}{dup_index}'] = uri
dup_index += 1
else:
self.namespaces[f'default{dup_index}'] = uri
dup_index += 1
except (ElementTree.ParseError, lxml.etree.XMLSyntaxError):
pass
def __repr__(self):
return '%s(file=%r)' % (self.__class__.__name__, self.file)
class Collection(object):
"""Represents a collection of source files as used in XML environment settings."""
def __init__(self, elem, use_lxml=False):
assert elem.tag == '{%s}collection' % QT3_NAMESPACE
self.uri = elem.attrib.get('uri')
self.query = elem.find('query', namespaces) # Not used (for XQuery)
self.sources = [Source(e, use_lxml) for e in elem.iterfind('source', namespaces)]
def __repr__(self):
return '%s(uri=%r)' % (self.__class__.__name__, self.uri)
class Environment(object):
"""
The XML environment definition for a test case.
:param elem: the XML Element that contains the environment definition.
:param use_lxml: use lxml.etree for loading XML sources.
"""
collection = None
schema = None
static_base_uri = None
decimal_formats = None
def __init__(self, elem, use_lxml=False):
assert elem.tag == '{%s}environment' % QT3_NAMESPACE
self.name = elem.get('name', 'anonymous')
self.namespaces = {
namespace.attrib['prefix']: namespace.attrib['uri']
for namespace in elem.iterfind('namespace', namespaces)
}
child = elem.find('decimal-format', namespaces)
if child is not None:
name = child.get('name')
if name is not None and ':' in name:
if use_lxml:
name = get_expanded_name(name, child.nsmap)
else:
try:
name = get_expanded_name(name, self.namespaces)
except KeyError:
pass
self.decimal_formats = {name: child.attrib}
child = elem.find('collection', namespaces)
if child is not None:
self.collection = Collection(child, use_lxml)
child = elem.find('schema', namespaces)
if child is not None:
self.schema = Schema(child)
child = elem.find('static-base-uri', namespaces)
if child is not None:
self.static_base_uri = child.get('uri')
self.params = [e.attrib for e in elem.iterfind('param', namespaces)]
self.sources = {}
for child in elem.iterfind('source', namespaces):
source = Source(child, use_lxml)
self.sources[source.key] = source
def __repr__(self):
return '%s(name=%r)' % (self.__class__.__name__, self.name)
def __str__(self):
children = []
for prefix, uri in self.namespaces.items():
children.append('<namespace prefix="{}" uri="{}"/>'.format(prefix, uri))
if self.schema is not None:
children.append('<schema uri="{}" file="{}"/>'.format(
self.schema.uri or '', self.schema.file or ''
))
for role, source in self.sources.items():
children.append('<source role="{}" uri="{}" file="{}"/>'.format(
role, source.uri or '', source.file
))
return '<environment name="{}">\n {}\n</environment>'.format(
self.name, '\n '.join(children)
)
class TestSet(object):
"""
Represents a test-set as read from the catalog file and the test-set XML file itself.
:param elem: the XML Element that contains the test-set definitions.
:param pattern: the regex pattern for selecting test-cases to load.
:param use_lxml: use lxml.etree for loading environment XML sources.
:param environments: the global environments.
"""
def __init__(self, elem, pattern, use_lxml=False, environments=None):
assert elem.tag == '{%s}test-set' % QT3_NAMESPACE
self.name = elem.attrib['name']
self.file = elem.attrib['file']
self.environments = {} if environments is None else environments.copy()
self.test_cases = []
self.specs = []
self.features = []
self.xsd_version = None
self.use_lxml = use_lxml
self.etree = lxml.etree if use_lxml else ElementTree
full_path = os.path.abspath(self.file)
filename = os.path.basename(full_path)
self.workdir = os.path.dirname(full_path)
with working_directory(self.workdir):
xml_root = self.etree.parse(filename).getroot()
self.description = xml_root.find('description', namespaces).text
for child in xml_root.findall('dependency', namespaces):
dep_type = child.attrib['type']
value = child.attrib['value']
if dep_type == 'spec':
self.specs.extend(value.split(' '))
elif dep_type == 'feature':
self.features.append(value)
elif dep_type == 'xsd-version':
self.xsd_version = value
else:
print("unexpected dependency type %s for test-set %r" % (dep_type, self.name))
for child in xml_root.findall('environment', namespaces):
environment = Environment(child, use_lxml)
self.environments[environment.name] = environment
test_case_template = self.name + '__%s'
for child in xml_root.findall('test-case', namespaces):
if pattern.search(test_case_template % child.attrib['name']) is not None:
self.test_cases.append(TestCase(child, self, use_lxml))
def __repr__(self):
return '%s(name=%r)' % (self.__class__.__name__, self.name)
class TestCase(object):
"""
Represents a test case as read from a test-set file.
:param elem: the XML Element that contains the test-case definition.
:param test_set: the test-set that the test-case belongs to.
:param use_lxml: use lxml.etree for loading environment XML sources.
"""
# Single value dependencies
calendar = None
default_language = None
format_integer_sequence = None
language = None
limits = None
unicode_version = None
unicode_normalization_form | |
data required
def test_create_gps_device_log_with_driver(self):
self.minimum_valid_data["device"] = self.gps_device.id
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_gps_device_log_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_gps_device_log_invalid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = self.gps_device_log_data.copy()
data["datetime"] = "10-09-2015"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.gps_device_log_data.copy()
data["vehicle_status"] = "InvalidChoice"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.gps_device_log_data.copy()
data["latitude"] = "InvalidLatitude"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.gps_device_log_data.copy()
data["device"] = -1
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["device"] = "asdsad"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["device"] = self.gps_device.id * 100
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_all_4_function_without_token(self):
response = self.client.post(self.create_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.gps_device_log_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_all_4_functions_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token + "invalidToken")
response = self.client.post(self.create_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.gps_device_log_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_gps_device_log_with_invalid_request(self):
data = self.minimum_valid_data.copy()
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.create_url, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.create_url,data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.patch(self.create_url,data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_gps_device_log_with_minimum_valid_date(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_update_gps_device_log_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_partial_update_gps_device_log_with_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = {"driver_number": "1234567890"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"datetime": "2017-01-28T22:22:30.792000"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"device": self.gps_device.id}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_retrieve_existing_gps_device_log(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_retrieve_non_existing_gps_device_log(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
bad_retrieve_url = reverse("driver_gps_device_log_retrieve",
kwargs={"pk": self.gps_device_log.id * 1000})
response = self.client.get(bad_retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class TracknovateGPSDeviceTest(DriverSetup):
"""
Test cases for Tracknovate GPS Device
"""
def setUp(self):
super().setUp()
self.tracknovate_gps_device_data = {
"updated_on": "2017-07-03T09:30:13.410522",
"deleted": False,
"deleted_on": None,
"phone": "7340660256",
"sim_number": "351608080820397",
"vehicle_id": "cg07aw279",
"driver_name": None,
"driver_number": None,
"number_verified": False,
"current_duration": "<strong style='color:red;'>Halted from last 18 Min </strong>",
"current_vstatus": "<strong style='color:red;'>Halted</strong>",
"driving_licence_number": None,
"vehicle_number": "cg07aw279",
"vehicle_type": None,
"vehicle_status": "unloaded",
"location_time": None,
"is_active": True,
"latitude": None,
"longitude": None,
"inactive_sms_sent_at": None,
"driver": self.driver.id,
}
self.minimum_valid_data = {
"vehicle_id": "cg07aw279",
"phone": "7340660256",
"sim_number": "351608080820397"
}
self.create_url = reverse("driver_tracknovate_gps_device_create")
self.update_url = reverse("driver_tracknovate_gps_device_update", kwargs={"pk": self.tracknovate_gps_device.id})
self.partial_update_url = reverse("driver_tracknovate_gps_device_partial_update",
kwargs={"pk": self.tracknovate_gps_device.id})
self.retrieve_url = reverse("driver_tracknovate_gps_device_retrieve",
kwargs={"pk": self.tracknovate_gps_device.id})
def test_create_tracknovate_gps_device_with_minimum_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding latitude field to minimum valid data required
def test_create_tracknovate_gps_device_with_latitude(self):
self.minimum_valid_data["latitude"] = "21.9200000763"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding vehicle_status field to minimum valid data required
def test_create_tracknovate_gps_device_with_vehicle_status(self):
self.minimum_valid_data["vehicle_status"] = "unloaded"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding is_active field to minimum valid data required
def test_create_tracknovate_gps_device_with_is_active(self):
self.minimum_valid_data["is_active"] = True
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding driver field to minimum valid data required
def test_create_tracknovate_gps_device_with_driver(self):
self.minimum_valid_data["driver"] = self.driver.id
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_tracknovate_gps_device_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_tracknovate_gps_device_invalid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = self.tracknovate_gps_device_data.copy()
data["location_time"] = "10-09-2015"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_data.copy()
data["vehicle_status"] = "InvalidChoice"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_data.copy()
data["latitude"] = "InvalidLatitude"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_data.copy()
data["driver"] = -1
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["driver"] = "asdsad"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["driver"] = self.driver.id * 100
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_all_4_function_without_token(self):
response = self.client.post(self.create_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.tracknovate_gps_device_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_all_4_functions_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token + "invalidToken")
response = self.client.post(self.create_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.tracknovate_gps_device_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_tracknovate_gps_device_invalid_request(self):
data = self.minimum_valid_data.copy()
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.create_url,data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.patch(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_tracknovate_gps_device_with_minimum_valid_date(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_update_tracknovate_gps_device_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.tracknovate_gps_device_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_partial_update_tracknovate_gps_device_with_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = {"phone": "1234567890"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"location_time": "2017-01-28T22:22:30.792000"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"driver": self.driver.id}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_retrieve_existing_tracknovate_gps_device(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_retrieve_non_existing_tracknovate_gps_device(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
bad_retrieve_url = reverse("driver_tracknovate_gps_device_retrieve",
kwargs={"pk": self.tracknovate_gps_device.id * 1000})
response = self.client.get(bad_retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class TracknovateGPSDeviceLogTest(DriverSetup):
"""
Test cases for Tracknovate GPS device Log
"""
def setUp(self):
super().setUp()
self.tracknovate_gps_device_log_data = {
"datetime": "2017-11-02T11:13:18",
"vehicle_id": "cg07aw279",
"latitude": "20.6679560000",
"longitude": "81.4918930000",
"altitude": "23.2",
"speed": 0,
"course": 23.2,
"accuracy": 12.1,
"engine_on": False,
"driver_name": None,
"driver_number": None,
"driving_licence_number": None,
"vehicle_number": "cg07aw279",
"vehicle_type": None,
"vehicle_status": None,
"device": self.tracknovate_gps_device.id,
}
self.minimum_valid_data = {
"vehicle_id": "0234a3e8-a74e-4d29-ad55-c8d428dd6151",
"datetime": "2017-05-06T11:36:30"
}
self.create_url = reverse("driver_tracknovate_gps_device_log_create")
self.update_url = reverse("driver_tracknovate_gps_device_log_update",
kwargs={"pk": self.tracknovate_gps_device_log.id})
self.partial_update_url = reverse("driver_tracknovate_gps_device_log_partial_update",
kwargs={"pk": self.tracknovate_gps_device_log.id})
self.retrieve_url = reverse("driver_tracknovate_gps_device_log_retrieve",
kwargs={"pk": self.tracknovate_gps_device_log.id})
def test_create_tracknovate_gps_device_log_with_minimum_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding latitude field to minimum valid data required
def test_create_tracknovate_gps_device_log_with_latitude(self):
self.minimum_valid_data["latitude"] = "21.9200000763"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding vehicle_status field to minimum valid data required
def test_create_tracknovate_gps_device_log_with_vehicle_status(self):
self.minimum_valid_data["vehicle_status"] = "unloaded"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding driver field to minimum valid data required
def test_create_tracknovate_gps_device_log_with_driver(self):
self.minimum_valid_data["device"] = self.tracknovate_gps_device.id
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_tracknovate_gps_device_log_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_create_tracknovate_gps_device_log_invalid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = self.tracknovate_gps_device_log_data.copy()
data["datetime"] = "10-09-2015"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_log_data.copy()
data["vehicle_status"] = "InvalidChoice"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_log_data.copy()
data["latitude"] = "InvalidLatitude"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = self.tracknovate_gps_device_log_data.copy()
data["device"] = -1
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["device"] = "asdsad"
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data["device"] = self.tracknovate_gps_device.id * 100
response = self.client.post(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_all_4_function_without_token(self):
response = self.client.post(self.create_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.tracknovate_gps_device_log_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_all_4_functions_with_invalid_token(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token + "invalidToken")
response = self.client.post(self.create_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.put(self.update_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.patch(self.partial_update_url, self.tracknovate_gps_device_log_data,
format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self.client.get(self.retrieve_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_tracknovate_gps_device_log_invalid_request(self):
data = self.minimum_valid_data.copy()
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.create_url,data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.patch(self.create_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update_tracknovate_gps_device_log_with_minimum_valid_date(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_update_tracknovate_gps_device_log_with_full_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.put(self.update_url, self.tracknovate_gps_device_log_data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_partial_update_tracknovate_gps_device_log_with_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
data = {"driver_number": "1234567890"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"datetime": "2017-01-28T22:22:30.792000"}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
data = {"device": self.tracknovate_gps_device.id}
response = self.client.patch(self.partial_update_url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_retrieve_existing_tracknovate_gps_device_log(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(self.retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_retrieve_non_existing_tracknovate_gps_device_log(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
bad_retrieve_url = reverse("driver_tracknovate_gps_device_log_retrieve",
kwargs={"pk": self.tracknovate_gps_device_log.id * 1000})
response = self.client.get(bad_retrieve_url, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class WaytrackerGPSDeviceTest(DriverSetup):
"""
Test cases for Waytracker GPS Device
"""
def setUp(self):
super().setUp()
self.waytracker_gps_device_data = {
"deleted": False,
"deleted_on": None,
"vehicle_id": "cg04mb8168",
"driver_name": "sakura",
"driver_number": None,
"number_verified": False,
"driving_licence_number": None,
"vehicle_number": "cg04mb8168",
"vehicle_type": None,
"vehicle_status": "unloaded",
"location_time": "2018-04-02T14:17:00",
"latitude": "21.6293100000",
"longitude": "81.7553280000",
"is_active": True,
"inactive_sms_sent_at": None,
"driver": self.driver.id,
}
self.minimum_valid_data = {
"vehicle_id": "cg07aw279"
}
self.create_url = reverse("driver_waytracker_gps_device_create")
self.update_url = reverse("driver_waytracker_gps_device_update", kwargs={"pk": self.waytracker_gps_device.id})
self.partial_update_url = reverse("driver_waytracker_gps_device_partial_update",
kwargs={"pk": self.waytracker_gps_device.id})
self.retrieve_url = reverse("driver_waytracker_gps_device_retrieve",
kwargs={"pk": self.waytracker_gps_device.id})
def test_create_waytracker_gps_device_with_minimum_valid_data(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding latitude field to minimum valid data required
def test_create_waytracker_gps_device_with_latitude(self):
self.minimum_valid_data["latitude"] = "21.9200000763"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding vehicle_status field to minimum valid data required
def test_create_waytracker_gps_device_with_vehicle_status(self):
self.minimum_valid_data["vehicle_status"] = "unloaded"
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding is_active field to minimum valid data required
def test_create_waytracker_gps_device_with_is_active(self):
self.minimum_valid_data["is_active"] = True
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.post(self.create_url, self.minimum_valid_data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Adding driver field to minimum valid data required
def test_create_waytracker_gps_device_with_driver(self):
self.minimum_valid_data["driver"] | |
-> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1, "2": 0}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 200)
self.assert_model_exists("vote/1")
self.assert_model_not_exists("vote/2")
poll = self.get_model("poll/1")
self.assertEqual(poll.get("votesvalid"), "1.000000")
self.assertEqual(poll.get("votesinvalid"), "0.000000")
self.assertEqual(poll.get("votescast"), "1.000000")
self.assertIn(1, poll.get("voted_ids", []))
option1 = self.get_model("option/1")
option2 = self.get_model("option/2")
self.assertEqual(option1.get("yes"), "1.000000")
self.assertEqual(option1.get("no"), "0.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
self.assertEqual(option2.get("yes"), "0.000000")
self.assertEqual(option2.get("no"), "0.000000")
self.assertEqual(option2.get("abstain"), "0.000000")
def test_change_vote(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1, "2": 0}, "id": 1, "user_id": 1},
stop_poll_after_vote=False,
)
response = self.request(
"poll.vote",
{"value": {"1": 0, "2": 1}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
)
self.assert_status_code(response, 400)
option1 = self.get_model("option/1")
option2 = self.get_model("option/2")
self.assertEqual(option1.get("yes"), "1.000000")
self.assertEqual(option1.get("no"), "0.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
self.assertEqual(option2.get("yes"), "0.000000")
self.assertEqual(option2.get("no"), "0.000000")
self.assertEqual(option2.get("abstain"), "0.000000")
def test_global_yes(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "Y", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "1.000000")
self.assertEqual(option.get("no"), "0.000000")
self.assertEqual(option.get("abstain"), "0.000000")
def test_global_yes_forbidden(self) -> None:
self.update_model("poll/1", {"global_yes": False})
self.start_poll()
response = self.request("poll.vote", {"value": "Y", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_global_no(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "N", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "0.000000")
self.assertEqual(option.get("no"), "1.000000")
self.assertEqual(option.get("abstain"), "0.000000")
def test_global_no_forbidden(self) -> None:
self.update_model("poll/1", {"global_no": False})
self.start_poll()
response = self.request("poll.vote", {"value": "N", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_global_abstain(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "A", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "0.000000")
self.assertEqual(option.get("no"), "0.000000")
self.assertEqual(option.get("abstain"), "1.000000")
def test_global_abstain_forbidden(self) -> None:
self.update_model("poll/1", {"global_abstain": False})
self.start_poll()
response = self.request("poll.vote", {"value": "A", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_negative_vote(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": -1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_too_many_options(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1, "2": 1, "3": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_options(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"id": 1, "value": {"3": 1}},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_anonymous(self) -> None:
self.start_poll()
response = self.anonymous_vote({"value": {"1": 1}})
self.assert_status_code(response, 401)
self.assert_model_not_exists("vote/1")
def test_anonymous_as_vote_user(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1}, "id": 1, "user_id": 0},
)
self.assert_status_code(response, 400)
assert "Votes for anonymous user are not allowed" in response.json["message"]
self.assert_model_not_exists("vote/1")
def test_vote_not_present(self) -> None:
self.start_poll()
self.update_model("user/1", {"is_present_in_meeting_ids": []})
response = self.request(
"poll.vote",
{"value": {"1": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_state(self) -> None:
response = self.request(
"poll.vote",
{"value": {"1": 1}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
stop_poll_after_vote=False,
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_missing_data(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": {}, "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
poll = self.get_model("poll/1")
self.assertNotIn(1, poll.get("voted_ids", []))
def test_wrong_data_format(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": [1, 2, 5], "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_option_format(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "string"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_option_id_type(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"id": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_vote_data(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": [None]}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
class VotePollNamedN(VotePollBaseTestClass):
def create_poll(self) -> None:
self.create_model(
"poll/1",
{
"content_object_id": "assignment/1",
"title": "test_title_4oi49ckKFk39SDIfj30s",
"pollmethod": "N",
"type": Poll.TYPE_NAMED,
"state": Poll.STATE_CREATED,
"meeting_id": 113,
"option_ids": [1, 2],
"entitled_group_ids": [1],
"votesinvalid": "0.000000",
"global_yes": True,
"global_no": True,
"global_abstain": True,
"min_votes_amount": 1,
"max_votes_amount": 10,
},
)
def test_vote(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1, "2": 0}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 200)
self.assert_model_exists("vote/1")
self.assert_model_not_exists("vote/2")
poll = self.get_model("poll/1")
self.assertEqual(poll.get("votesvalid"), "1.000000")
self.assertEqual(poll.get("votesinvalid"), "0.000000")
self.assertEqual(poll.get("votescast"), "1.000000")
self.assertTrue(1 in poll.get("voted_ids", []))
option1 = self.get_model("option/1")
option2 = self.get_model("option/2")
self.assertEqual(option1.get("yes"), "0.000000")
self.assertEqual(option1.get("no"), "1.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
self.assertEqual(option2.get("yes"), "0.000000")
self.assertEqual(option2.get("no"), "0.000000")
self.assertEqual(option2.get("abstain"), "0.000000")
def test_change_vote(self) -> None:
self.add_option()
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": 1, "2": 0}, "id": 1, "user_id": 1},
stop_poll_after_vote=False,
)
response = self.request(
"poll.vote",
{"value": {"1": 0, "2": 1}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
)
self.assert_status_code(response, 400)
option1 = self.get_model("option/1")
option2 = self.get_model("option/2")
self.assertEqual(option1.get("yes"), "0.000000")
self.assertEqual(option1.get("no"), "1.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
self.assertEqual(option2.get("yes"), "0.000000")
self.assertEqual(option2.get("no"), "0.000000")
self.assertEqual(option2.get("abstain"), "0.000000")
def test_global_yes(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "Y", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "1.000000")
self.assertEqual(option.get("no"), "0.000000")
self.assertEqual(option.get("abstain"), "0.000000")
def test_global_yes_forbidden(self) -> None:
self.update_model("poll/1", {"global_yes": False})
self.start_poll()
response = self.request("poll.vote", {"value": "Y", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_global_no(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "N", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "0.000000")
self.assertEqual(option.get("no"), "1.000000")
self.assertEqual(option.get("abstain"), "0.000000")
def test_global_no_forbidden(self) -> None:
self.update_model("poll/1", {"global_no": False})
self.start_poll()
response = self.request("poll.vote", {"value": "N", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_global_abstain(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": "A", "id": 1, "user_id": 1})
self.assert_status_code(response, 200)
option = self.get_model("option/11")
self.assertEqual(option.get("yes"), "0.000000")
self.assertEqual(option.get("no"), "0.000000")
self.assertEqual(option.get("abstain"), "1.000000")
def test_global_abstain_forbidden(self) -> None:
self.update_model("poll/1", {"global_abstain": False})
self.start_poll()
response = self.request("poll.vote", {"value": "A", "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_negative_vote(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": -1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_options(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"3": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_anonymous(self) -> None:
self.start_poll()
response = self.anonymous_vote({"value": {"1": 1}})
self.assert_status_code(response, 401)
self.assert_model_not_exists("vote/1")
def test_vote_not_present(self) -> None:
self.start_poll()
self.update_model("user/1", {"is_present_in_meeting_ids": []})
response = self.request(
"poll.vote",
{"value": {"1": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_state(self) -> None:
response = self.request(
"poll.vote",
{"value": {"1": 1}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
stop_poll_after_vote=False,
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_missing_data(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": {}, "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
poll = self.get_model("poll/1")
self.assertNotIn(1, poll.get("voted_ids", []))
def test_wrong_data_format(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": [1, 2, 5], "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_option_format(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "string"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_option_id_type(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"id": 1}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_vote_data(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": [None]}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
class VotePollPseudoanonymousYNA(VotePollBaseTestClass):
def create_poll(self) -> None:
self.create_model(
"poll/1",
{
"content_object_id": "assignment/1",
"title": "test_title_OkHAIvOSIcpFnCxbaL6v",
"pollmethod": "YNA",
"type": Poll.TYPE_PSEUDOANONYMOUS,
"state": Poll.STATE_CREATED,
"meeting_id": 113,
"option_ids": [1, 2],
"entitled_group_ids": [1],
"votesinvalid": "0.000000",
"min_votes_amount": 1,
"max_votes_amount": 10,
},
)
def test_vote(self) -> None:
self.add_option()
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "Y", "2": "N", "3": "A"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 200)
self.assert_model_count("vote", 113, 3)
poll = self.get_model("poll/1")
self.assertEqual(poll.get("votesvalid"), "1.000000")
self.assertEqual(poll.get("votesinvalid"), "0.000000")
self.assertEqual(poll.get("votescast"), "1.000000")
option1 = self.get_model("option/1")
option2 = self.get_model("option/2")
option3 = self.get_model("option/3")
self.assertEqual(option1.get("yes"), "1.000000")
self.assertEqual(option1.get("no"), "0.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
self.assertEqual(option2.get("yes"), "0.000000")
self.assertEqual(option2.get("no"), "1.000000")
self.assertEqual(option2.get("abstain"), "0.000000")
self.assertEqual(option3.get("yes"), "0.000000")
self.assertEqual(option3.get("no"), "0.000000")
self.assertEqual(option3.get("abstain"), "1.000000")
def test_change_vote(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "Y"}, "id": 1, "user_id": 1},
stop_poll_after_vote=False,
)
response = self.request(
"poll.vote",
{"value": {"1": "N"}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
)
self.assert_status_code(response, 400)
option1 = self.get_model("option/1")
self.assertEqual(option1.get("yes"), "1.000000")
self.assertEqual(option1.get("no"), "0.000000")
self.assertEqual(option1.get("abstain"), "0.000000")
def test_too_many_options(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "Y", "3": "N"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_partial_vote(self) -> None:
self.add_option()
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "Y"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 200)
self.assert_model_exists("vote/1")
def test_wrong_options(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": {"1": "Y", "3": "N"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_anonymous(self) -> None:
self.start_poll()
response = self.anonymous_vote({"value": {"1": "Y"}})
self.assert_status_code(response, 401)
self.assert_model_not_exists("vote/1")
def test_vote_not_present(self) -> None:
self.start_poll()
self.update_model("user/1", {"is_present_in_meeting_ids": []})
response = self.request(
"poll.vote",
{"value": {"1": "Y"}, "id": 1, "user_id": 1},
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_wrong_state(self) -> None:
response = self.request(
"poll.vote",
{"value": {}, "id": 1, "user_id": 1},
start_poll_before_vote=False,
stop_poll_after_vote=False,
)
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
def test_missing_value(self) -> None:
self.start_poll()
response = self.request("poll.vote", {"value": {}, "id": 1, "user_id": 1})
self.assert_status_code(response, 400)
self.assert_model_not_exists("vote/1")
poll = self.get_model("poll/1")
self.assertNotIn(1, poll.get("voted_ids", []))
def test_wrong_value_format(self) -> None:
self.start_poll()
response = self.request(
"poll.vote",
{"value": [1, 2, 5], "id": 1, "user_id": 1},
)
self.assert_status_code(response, | |
<reponame>Smithsonian/cheby_checker
# -*- coding: utf-8 -*-
# cheby_checker/cheby_checker/orbit_cheby.py
"""
--------------------------------------------------------------
cheby_checker's orbit_cheby module.
2020 - 2022
<NAME> & <NAME> & <NAME>
This module provides functionalities to evaluate
dictionaries of chebyshev-coefficients
We are developing a standardized approach regarding
orbit integration and subsequent interpolation using
chebyshev-coefficients applied to 32-day sectors
To contain all core functionality required to predict the
position (e.g. BARYCENTRIC Cartesian) and apparent position
(e.g. RA, Dec) of minor planets, comets, satellites, etc
- See https://drive.google.com/open?id=1F86lnaHL01xPAACX2SYVdOgvvLLnxfYD
This does *NOT* do the underlying nbody integrations.
This DOES fast interpolation using supplied chebyshev dictionaries
N.B. There is a strong assumption that coordinates are BARYCENTRIC EQUATORIAL
- I.e. we are assuming that the nbody integration has supplied cartesian
coordinates in a BARYCENTRIC EQUATORIAL frame.
Currently envisaged as a core component that will be drawn-upon by
- Position_Check
- MP_Checker
- ID_Check
- ...
--------------------------------------------------------------
"""
# Import third-party packages
# --------------------------------------------------------------
import sys
import numpy as np
from collections import defaultdict
import astropy
from astropy_healpix import healpy
# Import neighboring packages
# --------------------------------------------------------------
from .cheby_checker import Base
class MSC_Loader(Base):
"""
Multi-Sector Cheby -Loader Function
Will load/create/return instances of MSC : Multi-Sector Cheby objects
Will handle multi-particle instantiation: will return **LIST** of *MSC* objects
"""
# allow different init depending on source ...
def __init__(self, **kwargs):
print('INIT MSC_Loader...')
# Give access to "Base" methods & attributes
Base.__init__(self)
# Initialization of default standard PARAMETERS / OPTIONS we use for chebyshevs, etc
# - These may be overwritten by kwargs
self.filepath = None # : Ingest method (From textfile)
self.unpacked_primary_provisional_designation_list = [] # : Ingest method (From np.array)
self.times_TDB = None # : Ingest method (From np.array)
self.statearray = None # : Ingest method (From np.array)
self.NbodySim = None # : Ingest method (From nbody.NbodySim)
self.FROM_DB = None # : Ingest method (From sqlite db)
self.dict_of_dicts = None # : Ingest method (From sqlite db)
# The list of MSCs that will be instantiated & returned
self.MSCs = []
# Do a loop over the parameters and see if any attributes need updating (because they were supplied)
# I.e. This looks through kwargs for (e.g.) 'filepath', 'NbodySim', 'FROM_DB', etc etc
for attribute, value in kwargs.items():
if hasattr(self, attribute):
setattr(self, attribute, value)
# Allow initialization from different sources ...
#
# (i) From database (of pre-calculated chebyshevs)
if self.FROM_DB and self.dict_of_dicts:
self._populate_from_database( self.dict_of_dicts )
# (ii) From nbody.NbodySim
elif self.NbodySim is not None :
if "unpacked_primary_provisional_designation_list" not in self.NbodySim.__dict__:
self.NbodySim.unpacked_primary_provisional_designation_list = [ str(_) for _ in range(self.NbodySim.input_n_particles)]
print(f'Populating from NbodySim : ***NO*** designation information : Using {self.NbodySim.unpacked_primary_provisional_designations}')
self._populate_from_nbody_array(self.NbodySim.unpacked_primary_provisional_designation_list ,
self.NbodySim.output_times,
self.NbodySim.output_states)
# (iii) From numpy array (from nbody: Mainly for development?)
elif self.unpacked_primary_provisional_designation_list is not None and \
isinstance(self.statearray , (list,tuple,np.ndarray)) and \
isinstance(self.times_TDB , (list,tuple,np.ndarray)) and \
self.statearray.shape[0] == self.times_TDB.shape[0]:
self._populate_from_nbody_array(self.unpacked_primary_provisional_designation_list, self.times_TDB, self.statearray)
# (iv) From text file ( Mainly for development!)
elif self.filepath != None :
self._populate_from_nbody_text(self.filepath)
# (v) An empty instance ...
else:
self._generate_empty()
def _generate_empty(self, ):
""" """
# TODO: Use logging here and elsewhere
print('\n','*'*6,'Defaulting to the production of a list of empty MSCs','*'*6,'\n')
print('\t','This occurs (a) on erroneous input, and (b) when no input supplied\n')
self.MSCs.append(MSC())
# Functions to handle different instantiaion methods
# - Take input and generate the chebyshev coefficients
# --------------------------------------------------------------
"""
def _populate_from_nbody_text( self, text_filepath ):
'''
Read data (assumed from an nbody integration) from a text file
Use it to generate an array of positions, etc
Then continue on to use func *_populate_from_nbody_array()*
Mainly of use during development
inputs:
-------
text_filepath: string
- filepath to data from nbody simulation
returns:
--------
boolean
- whatever output is from *_populate_from_nbody_array()*
'''
# Read the nbody-json file
name, times, states = nbody_reader.parse_nbody_txt( text_filepath )
# Use the create-from-array function to do the rest
return self._populate_from_nbody_array(name, times, states)
"""
def _populate_from_nbody_array( self,
unpacked_primary_provisional_designation_list ,
times_TDB,
states,
covariances = None ):
"""
Will initialize MSC(s) from supplied arrays
This handles the partitioning of data into multiple MSCs as required
inputs:
-------
name: string
- unpacked_provisional_identification of the object
times_TDB: np.array
- TDB/TT
states: np.array
- This should be ONLY the main fitted vaiables (e.g. 3-posn, 3-vel, + N-non-grav-coeffs)
- Expect shape == ( len(times_TDB) , len(unpacked_primary_provisional_designation_list) , 6/7/8/9)
- In early stages of development, will assume grav-only, hence Nc == 6
covariances: np.array
- This should be the covariances corresponding to the states, so dimension should be (Nt, Np, Nc, Nc)
returns:
--------
"""
# Ensure the passed desig variable is array-like
self.unpacked_primary_provisional_designation_list = np.atleast_1d(unpacked_primary_provisional_designation_list)
# Check that the dimensionality of the inputs is consistent
# - N.B. We expect states.shape = (Nt, Np, Nc), e.g. = (41, 1, 6)
# Nt = Number of times
# Nc = Number of particles
# Np = Number of coords/components being fitted: should be in [6,7,8,9]
# - N.B. We expect covariances.shape = (Nt, Np, Nc, Nc), e.g. = (41, 1, 6, 6)
# Nt = Number of times
# Nc = Number of particles
# Np = Number of coords/components being fitted: should be in [6,7,8,9]
# Check for consistent times & array dimensions
assert len(times_TDB) == states.shape[0]
assert covariances is None or len(self.unpacked_primary_provisional_designation_list) == covariances.shape[0]
# Check for consistent designations & array dimensions
assert len(self.unpacked_primary_provisional_designation_list) == states.shape[1]
assert covariances is None or len(self.unpacked_primary_provisional_designation_list) == covariances.shape[1]
# Check for consistent number of state components (and covariance if supplied)
assert states.shape[2] in [6] # [6,7,8,9] # ASSUME GRAV-ONLY AT THIS STAGE OF DEVELOPMENT ...
assert covariances is None or covariances.shape[2] == covariances.shape[3] == states.shape[2]
# Loop over each of the objects and create an MSC-object for each ...
for i, unpacked in enumerate(self.unpacked_primary_provisional_designation_list):
# Create the MSC (using the appropriate *from_coord_arrays()* function )
# NB: We need to extract the appropriate slice of states corresponding to the particular named object
M = MSC()
M.from_coord_arrays(unpacked , times_TDB , states[:,i,:] , covariances = None if covariances is None else covariances[:,i,:,:])
self.MSCs.append( M )
return self.MSCs
def _populate_from_database(self, dict_of_dicts ):
"""
Method to construct MSCs from 1-or-many sectors stored in the sqlite db as coeffs
inputs:
-------
dict_of_dicts : dictionary-of-dictionaries
- As returned by query_coefficients_by_jd_hp
- Outer dict is key-ed on unpacked_primary_provisional_designation
- Inner dicts are key-ed on sector
returns:
--------
list of MSC objects
"""
# Loop over objects and make list of MSCs
MSCs = []
for unpacked_primary_provisional_designation in np.atleast_1d(unpacked_primary_provisional_designation_list) :
# Create the MSC (using the appropriate *from_database()* function )
M = MSC()
M.from_database( unpacked_primary_provisional_designation )
self.MSCs.append( M )
return MSCs
class MSC(Base):
"""
Multi-Sector Cheby Class
Will hold chebyshev coefficients for a **SINGLE** object
"""
def __init__(self, **kwargs):
# Give access to "Base" methods & attributes
Base.__init__(self)
# Initialization of default standard PARAMETERS / OPTIONS we use for chebyshevs, etc
self.minorder = 5 # : Fitting Chebys
self.maxorder = 125 # : Fitting Chebys
self.maxerr = 1e-9 # : Fitting Chebys
# It's probably going to be useful to track the number of components
# (the various covariance calculations & reshapings can get complicated)
self.n_coordinates = None
self.covar_bool = False
# Fundamental identifiying data for MSC
self.unpacked_primary_provisional_designation = None # Designation of object
self.sector_coeffs = {} # the all-important cheby-coeffs
# Function(s) to populate MSC from various sources ...
# --------------------------------------------------------------
def from_database(self, unpacked_primary_provisional_designation , sector_numbers = None):
"""
Used to initialize (create) an MSC object from data in the sqlite database
(as extracted using the *get_nightly_precalcs()* function in precalc.py )
inputs:
-------
unpacked_primary_provisional_designation : string
returns:
--------
True
- Doesn't directly return, just populates the MSC object
populates:
----------
self.unpacked_primary_provisional_designation : string
self.sector_coeffs : dict
self.sector_init : int
self.sector_final : int
self.TDB_init : float
self.TDB_final : float
"""
# unpacked primary provID of the object (e.g. 2020 AA)
self.unpacked_primary_provisional_designation = unpacked_primary_provisional_designation
# query the database for the required sector coefficients
coeff_dict | |
# -*- coding: utf-8 -*-
"""
Created on Thu May 11 14:40:19 2017
@author: ishort
"""
import math
import Useful
def convec(numDeps, tauRos, depths, temp, press, rho, kappa, kappaSun,
zScale, teff, logg, mmw):
logE = math.log10(math.E) #// for debug output
ln10 = math.log(10.0) #//needed to convert logg from base 10 to base e
convTemp = [ [ 0.0 for i in range(numDeps) ] for j in range(2) ]
#//Schwarzschild criterion for convective instability:
gamma = 5.0 / 3.0 #//adiabatic gamma for ideal monatomic gas - the photon gas is negligible in stars w convection
gammaFac = gamma / (gamma - 1.0) #// yeah, yeah - I know it's 2.5, but let's show where it comes from for the record...
invGamFac = 1.0 / gammaFac
#//CHEAT: Set gammaThing to value that makes convection just disappear at bottom of mid-F star (7000 K)
#//double gammaThing = 1.60;
#//double invGamThing = 1.0 / gammaThing;
#double invGamThing;
#//System.out.println("gammaThing " + gammaThing);
#double deltaP, deltaT; //, dlnPdlnT;
#double dlnTdlnP, dlnMudlnP, deltaMu;
#double Hp, logHp;
#//double HpSun = 1.2535465715411615E7; //cm, as computed by GrayStar at depth index=36
HpSun = 2.0e7 #//cm, approximately as computed by GrayStar at depth index=36
logHpSun = math.log(HpSun)
#//Compute the presure scale height as a reality check:
HpRefDep=36 #//index of reference depth for computing pressure scale height
logHp = press[1][HpRefDep] - rho[1][HpRefDep] - ln10 * logg
Hp = math.exp(logHp)
#//Try scaling gamma to "fix" the convective boundary
#//invGamThing = invGamFac * HpSun/Hp;
#//System.out.println("Hp/HpSun " + Hp/HpSun);
#//double[] mmw = State.mmwFn(numDeps, temp, zScale);
#//Search outward for top of convection zone
isStable = False
iBound = numDeps - 1 #//initialize index of depth where convection begins to bottom of atmosphere
for i in range(numDeps - 2, 0, -1):
#//System.out.println("Hp " + Hp);
#//1st order finite difference - erratic?
#//double deltaP = press[1][i] - press[1][i-1];
#//double deltaT = temp[1][i] - temp[1][i-1];
#//Try "2nd order" finite difference - non-uniform spacing in deltaT
deltaP = press[1][i + 1] - press[1][i - 1]
deltaT = temp[1][i + 1] - temp[1][i - 1]
deltaMu = (mmw[i + 1] - mmw[i]) * Useful.amu
#//dlnPdlnT = deltaP / deltaT;
dlnTdlnP = deltaT / deltaP
dlnMudlnP = deltaMu / deltaP
#//System.out.format("%12.8f %12.8f%n", logE * tauRos[1][i], dlnPlndT);
#// This can be finicky - let's say we have not found the radiative zone unless two consecutive layers meet the criterion
#//if (dlnPdlnT > gammaThing) {
if (dlnTdlnP < invGamFac + dlnMudlnP):
#//Convectively stable
if (isStable == False):
#//The previous convectively unstable layer was an isolated anomoly - we're have NOT found the zone! Reset:
isStable = true
iBound = i
#//System.out.println("First stable layer was found, tauRos " + logE * tauRos[1][i] + " NOW: isStable " + isStable);
#}
#}
#}
#//System.out.println("Convec: iBound " + iBound);
#//Radiative zone - leave temperatures alone:
for i in range(iBound):
convTemp[0][i] = temp[0][i]
convTemp[1][i] = temp[1][i]
baseTemp = temp[0][iBound]
baseLogTemp = temp[1][iBound]
baseTau = tauRos[0][iBound]
baseLogTau = tauRos[1][iBound]
#//double baseDepth = depths[iBound]
logSigma = Useful.logSigma()
logK = Useful.logK()
logAmu = Useful.logAmu()
mixLSun = 1.0 #// convective mixing length in pressure scale heights (H_P)
betaSun = 0.5 #// factor for square of convective bubble velocity (range: 0.0 - 1.0)
#double Cp, logCp; //Specific heat capacity at constant pressure
mixL = mixLSun #//initialization
beta = betaSun #//initialization
teffSun = 5778.0
loggSun = 4.44
#//Shameless fix:
#//It seems mixL and beta need to be temp and press dependent:
if (teff < teffSun):
mixL = mixLSun * math.pow(teff / teffSun, 4.0) #//lower teff -> smaller mixL -> steeper SAdGrad
beta = betaSun * math.pow(teff / teffSun, 4.0) #//lower teff -> smaller beta -> steeper SAdGrad
mixL = mixL * math.pow(loggSun / logg, 2.0) #// lower logg -> larger mixL -> smaller sAdGrad
beta = beta * math.pow(loggSun / logg, 2.0) #// lower logg -> larger beta -> smaller sAdGrad
"""/*
//Shameless fix:
beta = betaSun; // no fix?
mixL = mixLSun * Math.pow(Hp / HpSun, 4.0); //lower teff -> smaller Hp -> smaller mixL -> steeper SAdGrad
//mixL = mixL * Math.pow(logg / loggSun, 4.0); // lower logg -> smaller mixL -> larger sAdGrad
*/"""
logMixL = math.log(mixL)
logBeta = math.log(beta)
logFluxSurfBol = logSigma + 4.0 * math.log(teff)
#// This will get hairy when we take it super-adiabatic so let's take it *really* easy and make every factor and term clear:
logInvGamFac = math.log(invGamFac)
#//Get the mean molecular weight in amu from State - Row 0 is "mu" in amu:
#double mu, logMu, logFctr1, logFctr2, logFctr3;
#double nextTemp, lastTemp, nextTemp2;
#//Adiabatic dT/dx gradients in various coordinates
#//tau, logTau space
#double logAdGradTauMag, logAdGradLogTauMag, adGradLogTau;
#//SuperAdiabatic dT/dx gradients in various coordinates
#double deltaTau, logDeltaTau, deltaLogTau, logDeltaLogTau;
#double sAdGradLogTau, logSadGradR, logSadGradTau, logSadGradLogTau;
#double lastLogTau;
#//r space:
#double logAdGradRMag, adGradR;
#//SuperAdiabatic dT/dx gradients in various coordinates
#double deltaR, logDeltaR;
#/*
# double sAdGradR;
# double lastDepth;
# */
lastTemp = baseTemp
lastLogTau = baseLogTau
#//lastDepth = baseDepth;
#//System.out.println(
#// "tauRos[1][i] (tauRos[1][i]-lastLogTau) adGradLogTau rho[1][i] kappa[1][i] lastTemp nextTemp");
for i in range(iBound, numDeps):
mu = mmw[i]
logMu = math.log(mu)
logFctr1 = logMu + logAmu - logK
#//System.out.println("logFactr1 " + logE*logFctr1 + " logInvGamFac " + logE*logInvGamFac + " logg " + logg);
logCp = math.log(5.0 / 2.0) - logFctr1 #//ideal monatomic gas - underestimate that neglects partial ionization
#// ** Caution: These are log_e of the *magnitude* of the temperature gradients!
#//The adiabatic dT/dTau in r space
logAdGradRMag = logInvGamFac + logFctr1 + ln10 * logg #//logg is in base 10
#//This is baaad stuff - remember our tuaRos scale has *nothing* to do with our kappa values!
#//The adiabatic dT/dTau in tau space - divide dT/dr by rho and kappa and make it +ve becasue we're in tau-space:
#//Bad fake to fix artificially small dT/dr at low Teff - use kappaSun instead of kappa
logAdGradTauMag = logAdGradRMag - rho[1][i] - kappa[1][i]
#//The adiabatic dT/dLnTau in log_e(tau) space
logAdGradLogTauMag = tauRos[1][i] + logAdGradTauMag
#//Build the T(tau) in the convection zone:
#// Work in logTau space - numerically safer??
adGradLogTau = math.exp(logAdGradLogTauMag) #//No minus sign - logTau increases inward...
nextTemp = lastTemp + adGradLogTau * (tauRos[1][i] - lastLogTau)
#//System.out.format("%12.8f %12.8f %12.8f %12.8f %12.8f %7.1f %7.1f%n", logE * tauRos[1][i], logE * (tauRos[1][i] - lastLogTau), adGradLogTau, logE * rho[1][i], logE * kappa[1][i], lastTemp, nextTemp);
"""/*
// Do in geometric depth space
adGradR = Math.exp(logAdGradRMag); // no minus sign - our depths *increase* inwards (they're NOT heights!)
nextTemp = lastTemp + adGradR * (depths[i] - lastDepth);
//System.out.format("%12.8f %12.8f %12.8f %7.1f %7.1f%n", logE*tauRos[1][i], (depths[i] - lastDepth), adGradR, lastTemp, nextTemp);
*/"""
#//Okay - now the difference between the superadiabatic and adiabatic dT/dr:
logFctr2 = rho[1][i] + logCp + 2.0 * logMixL
#// ** NOTE ** Should temp in the following line be the *convective* temp of the last depth???
#// logg is in base 10 - convert to base e
logFctr3 = 3.0 * (ln10 * logg - math.log(lastTemp)) / 2.0
#//Difference between SuperAdibatic dT/dr and Adiabtic dT/dr in r-space - Carroll & Ostlie 2nd Ed. p. 328
#//System.out.println("logFluxSurfBol " + logE * logFluxSurfBol + " logFctr2 " + logE * logFctr2 + " logFctr1 " + logE * logFctr1 + " logFctr3 " + logE * logFctr3 + " logBeta " + logE * logBeta);
logDeltaR = logFluxSurfBol - logFctr2 + 2.0 * logFctr1 + logFctr3 - 0.5 * logBeta
logDeltaR = 2.0 * logDeltaR / 3.0 #//DeltaR is above formula to the 2/3 power
#//This is baaad stuff - remember our tuaRos scale has *nothing* to do with our kappa values!
#//Bad fake to fix artificially small dT/dr at low Teff - use kappaSun instead of kappa
logDeltaTau = logDeltaR - rho[1][i] - kappa[1][i]
logDeltaLogTau = tauRos[1][i] + logDeltaTau
sAdGradLogTau = adGradLogTau + math.exp(logDeltaLogTau)
#//System.out.format("%12.8f %12.8f %12.8f %12.8f%n", logE*tauRos[1][i], logE*logDeltaR, logE*logDeltaTau, logE*logDeltaLogTau);
nextTemp2 = lastTemp + sAdGradLogTau * (tauRos[1][i] - | |
<reponame>forked-group/invest-python
# pylint:disable=too-many-lines
from dataclasses import dataclass
from datetime import datetime
from typing import List
from . import _grpc_helpers
class SecurityTradingStatus(_grpc_helpers.Enum):
SECURITY_TRADING_STATUS_UNSPECIFIED = 0
SECURITY_TRADING_STATUS_NOT_AVAILABLE_FOR_TRADING = 1
SECURITY_TRADING_STATUS_OPENING_PERIOD = 2
SECURITY_TRADING_STATUS_CLOSING_PERIOD = 3
SECURITY_TRADING_STATUS_BREAK_IN_TRADING = 4
SECURITY_TRADING_STATUS_NORMAL_TRADING = 5
SECURITY_TRADING_STATUS_CLOSING_AUCTION = 6
SECURITY_TRADING_STATUS_DARK_POOL_AUCTION = 7
SECURITY_TRADING_STATUS_DISCRETE_AUCTION = 8
SECURITY_TRADING_STATUS_OPENING_AUCTION_PERIOD = 9
SECURITY_TRADING_STATUS_TRADING_AT_CLOSING_AUCTION_PRICE = 10
SECURITY_TRADING_STATUS_SESSION_ASSIGNED = 11
SECURITY_TRADING_STATUS_SESSION_CLOSE = 12
SECURITY_TRADING_STATUS_SESSION_OPEN = 13
SECURITY_TRADING_STATUS_DEALER_NORMAL_TRADING = 14
SECURITY_TRADING_STATUS_DEALER_BREAK_IN_TRADING = 15
SECURITY_TRADING_STATUS_DEALER_NOT_AVAILABLE_FOR_TRADING = 16
class InstrumentIdType(_grpc_helpers.Enum):
INSTRUMENT_ID_UNSPECIFIED = 0
INSTRUMENT_ID_TYPE_FIGI = 1
INSTRUMENT_ID_TYPE_TICKER = 2
class InstrumentStatus(_grpc_helpers.Enum):
INSTRUMENT_STATUS_UNSPECIFIED = 0
INSTRUMENT_STATUS_BASE = 1
INSTRUMENT_STATUS_ALL = 2
class ShareType(_grpc_helpers.Enum):
SHARE_TYPE_UNSPECIFIED = 0
SHARE_TYPE_COMMON = 1
SHARE_TYPE_PREFERRED = 2
SHARE_TYPE_ADR = 3
SHARE_TYPE_GDR = 4
SHARE_TYPE_MLP = 5
SHARE_TYPE_NY_REG_SHRS = 6
SHARE_TYPE_CLOSED_END_FUND = 7
SHARE_TYPE_REIT = 8
class SubscriptionAction(_grpc_helpers.Enum):
SUBSCRIPTION_ACTION_UNSPECIFIED = 0
SUBSCRIPTION_ACTION_SUBSCRIBE = 1
SUBSCRIPTION_ACTION_UNSUBSCRIBE = 2
class SubscriptionInterval(_grpc_helpers.Enum):
SUBSCRIPTION_INTERVAL_UNSPECIFIED = 0
SUBSCRIPTION_INTERVAL_ONE_MINUTE = 1
SUBSCRIPTION_INTERVAL_FIVE_MINUTES = 2
class SubscriptionStatus(_grpc_helpers.Enum):
SUBSCRIPTION_STATUS_UNSPECIFIED = 0
SUBSCRIPTION_STATUS_SUCCESS = 1
SUBSCRIPTION_STATUS_INSTRUMENT_NOT_FOUND = 2
SUBSCRIPTION_STATUS_SUBSCRIPTION_ACTION_IS_INVALID = 3
SUBSCRIPTION_STATUS_DEPTH_IS_INVALID = 4
SUBSCRIPTION_STATUS_INTERVAL_IS_INVALID = 5
SUBSCRIPTION_STATUS_LIMIT_IS_EXCEEDED = 6
SUBSCRIPTION_STATUS_INTERNAL_ERROR = 7
class TradeDirection(_grpc_helpers.Enum):
TRADE_DIRECTION_UNSPECIFIED = 0
TRADE_DIRECTION_BUY = 1
TRADE_DIRECTION_SELL = 2
class CandleInterval(_grpc_helpers.Enum):
CANDLE_INTERVAL_UNSPECIFIED = 0
CANDLE_INTERVAL_1_MIN = 1
CANDLE_INTERVAL_5_MIN = 2
CANDLE_INTERVAL_15_MIN = 3
CANDLE_INTERVAL_HOUR = 4
CANDLE_INTERVAL_DAY = 5
class OperationState(_grpc_helpers.Enum):
OPERATION_STATE_UNSPECIFIED = 0
OPERATION_STATE_EXECUTED = 1
OPERATION_STATE_CANCELED = 2
class OrderDirection(_grpc_helpers.Enum):
ORDER_DIRECTION_UNSPECIFIED = 0
ORDER_DIRECTION_BUY = 1
ORDER_DIRECTION_SELL = 2
class OrderType(_grpc_helpers.Enum):
ORDER_TYPE_UNSPECIFIED = 0
ORDER_TYPE_LIMIT = 1
ORDER_TYPE_MARKET = 2
class OrderExecutionReportStatus(_grpc_helpers.Enum):
EXECUTION_REPORT_STATUS_UNSPECIFIED = 0
EXECUTION_REPORT_STATUS_FILL = 1
EXECUTION_REPORT_STATUS_REJECTED = 2
EXECUTION_REPORT_STATUS_CANCELLED = 3
EXECUTION_REPORT_STATUS_NEW = 4
EXECUTION_REPORT_STATUS_PARTIALLYFILL = 5
class AccountType(_grpc_helpers.Enum):
ACCOUNT_TYPE_UNSPECIFIED = 0
ACCOUNT_TYPE_TINKOFF = 1
ACCOUNT_TYPE_TINKOFF_IIS = 2
ACCOUNT_TYPE_INVEST_BOX = 3
class AccountStatus(_grpc_helpers.Enum):
ACCOUNT_STATUS_UNSPECIFIED = 0
ACCOUNT_STATUS_NEW = 1
ACCOUNT_STATUS_OPEN = 2
ACCOUNT_STATUS_CLOSED = 3
class StopOrderDirection(_grpc_helpers.Enum):
STOP_ORDER_DIRECTION_UNSPECIFIED = 0
STOP_ORDER_DIRECTION_BUY = 1
STOP_ORDER_DIRECTION_SELL = 2
class StopOrderExpirationType(_grpc_helpers.Enum):
STOP_ORDER_EXPIRATION_TYPE_UNSPECIFIED = 0
STOP_ORDER_EXPIRATION_TYPE_GOOD_TILL_CANCEL = 1
STOP_ORDER_EXPIRATION_TYPE_GOOD_TILL_DATE = 2
class StopOrderType(_grpc_helpers.Enum):
STOP_ORDER_TYPE_UNSPECIFIED = 0
STOP_ORDER_TYPE_TAKE_PROFIT = 1
STOP_ORDER_TYPE_STOP_LOSS = 2
STOP_ORDER_TYPE_STOP_LIMIT = 3
class OperationType(_grpc_helpers.Enum):
OPERATION_TYPE_UNSPECIFIED = 0
OPERATION_TYPE_INPUT = 1
OPERATION_TYPE_BOND_TAX = 2
OPERATION_TYPE_OUTPUT_SECURITIES = 3
OPERATION_TYPE_OVERNIGHT = 4
OPERATION_TYPE_TAX = 5
OPERATION_TYPE_BOND_REPAYMENT_FULL = 6
OPERATION_TYPE_SELL_CARD = 7
OPERATION_TYPE_DIVIDEND_TAX = 8
OPERATION_TYPE_OUTPUT = 9
OPERATION_TYPE_BOND_REPAYMENT = 10
OPERATION_TYPE_TAX_CORRECTION = 11
OPERATION_TYPE_SERVICE_FEE = 12
OPERATION_TYPE_BENEFIT_TAX = 13
OPERATION_TYPE_MARGIN_FEE = 14
OPERATION_TYPE_BUY = 15
OPERATION_TYPE_BUY_CARD = 16
OPERATION_TYPE_INPUT_SECURITIES = 17
OPERATION_TYPE_SELL_MARGIN = 18
OPERATION_TYPE_BROKER_FEE = 19
OPERATION_TYPE_BUY_MARGIN = 20
OPERATION_TYPE_DIVIDEND = 21
OPERATION_TYPE_SELL = 22
OPERATION_TYPE_COUPON = 23
OPERATION_TYPE_SUCCESS_FEE = 24
OPERATION_TYPE_DIVIDEND_TRANSFER = 25
OPERATION_TYPE_ACCRUING_VARMARGIN = 26
OPERATION_TYPE_WRITING_OFF_VARMARGIN = 27
OPERATION_TYPE_DELIVERY_BUY = 28
OPERATION_TYPE_DELIVERY_SELL = 29
OPERATION_TYPE_TRACK_MFEE = 30
OPERATION_TYPE_TRACK_PFEE = 31
OPERATION_TYPE_TAX_PROGRESSIVE = 32
OPERATION_TYPE_BOND_TAX_PROGRESSIVE = 33
OPERATION_TYPE_DIVIDEND_TAX_PROGRESSIVE = 34
OPERATION_TYPE_BENEFIT_TAX_PROGRESSIVE = 35
OPERATION_TYPE_TAX_CORRECTION_PROGRESSIVE = 36
OPERATION_TYPE_TAX_REPO_PROGRESSIVE = 37
OPERATION_TYPE_TAX_REPO = 38
OPERATION_TYPE_TAX_REPO_HOLD = 39
OPERATION_TYPE_TAX_REPO_REFUND = 40
OPERATION_TYPE_TAX_REPO_HOLD_PROGRESSIVE = 41
OPERATION_TYPE_TAX_REPO_REFUND_PROGRESSIVE = 42
OPERATION_TYPE_DIV_EXT = 43
OPERATION_TYPE_TAX_CORRECTION_COUPON = 44
class AccessLevel(_grpc_helpers.Enum):
ACCOUNT_ACCESS_LEVEL_UNSPECIFIED = 0
ACCOUNT_ACCESS_LEVEL_FULL_ACCESS = 1
ACCOUNT_ACCESS_LEVEL_READ_ONLY = 2
ACCOUNT_ACCESS_LEVEL_NO_ACCESS = 3
class CouponType(_grpc_helpers.Enum):
COUPON_TYPE_UNSPECIFIED = 0
COUPON_TYPE_CONSTANT = 1
COUPON_TYPE_FLOATING = 2
COUPON_TYPE_DISCOUNT = 3
COUPON_TYPE_MORTGAGE = 4
COUPON_TYPE_FIX = 5
COUPON_TYPE_VARIABLE = 6
COUPON_TYPE_OTHER = 7
@dataclass(eq=False, repr=True)
class MoneyValue(_grpc_helpers.Message):
currency: str = _grpc_helpers.string_field(1)
units: int = _grpc_helpers.int64_field(2)
nano: int = _grpc_helpers.int32_field(3)
@dataclass(eq=False, repr=True)
class Quotation(_grpc_helpers.Message):
units: int = _grpc_helpers.int64_field(1)
nano: int = _grpc_helpers.int32_field(2)
@dataclass(eq=False, repr=True)
class Ping(_grpc_helpers.Message):
time: datetime = _grpc_helpers.int64_field(1)
@dataclass(eq=False, repr=True)
class TradingSchedulesRequest(_grpc_helpers.Message):
exchange: str = _grpc_helpers.string_field(1)
from_: datetime = _grpc_helpers.message_field(2)
to: datetime = _grpc_helpers.message_field(3)
@dataclass(eq=False, repr=True)
class TradingSchedulesResponse(_grpc_helpers.Message):
exchanges: List["TradingSchedule"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class TradingSchedule(_grpc_helpers.Message):
exchange: str = _grpc_helpers.string_field(1)
days: List["TradingDay"] = _grpc_helpers.message_field(2)
@dataclass(eq=False, repr=True)
class TradingDay(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
date: datetime = _grpc_helpers.message_field(1)
is_trading_day: bool = _grpc_helpers.bool_field(2)
start_time: datetime = _grpc_helpers.message_field(3)
end_time: datetime = _grpc_helpers.message_field(4)
# reserved 5,6
opening_auction_start_time: datetime = _grpc_helpers.message_field(7)
closing_auction_end_time: datetime = _grpc_helpers.message_field(8)
evening_opening_auction_start_time: datetime = _grpc_helpers.message_field(9)
evening_start_time: datetime = _grpc_helpers.message_field(10)
evening_end_time: datetime = _grpc_helpers.message_field(11)
clearing_start_time: datetime = _grpc_helpers.message_field(12)
clearing_end_time: datetime = _grpc_helpers.message_field(13)
premarket_start_time: datetime = _grpc_helpers.message_field(14)
premarket_end_time: datetime = _grpc_helpers.message_field(15)
@dataclass(eq=False, repr=True)
class InstrumentRequest(_grpc_helpers.Message):
id_type: "InstrumentIdType" = _grpc_helpers.enum_field(1)
class_code: str = _grpc_helpers.string_field(2)
id: str = _grpc_helpers.string_field(3)
@dataclass(eq=False, repr=True)
class InstrumentsRequest(_grpc_helpers.Message):
instrument_status: "InstrumentStatus" = _grpc_helpers.enum_field(1)
@dataclass(eq=False, repr=True)
class BondResponse(_grpc_helpers.Message):
instrument: "Bond" = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class BondsResponse(_grpc_helpers.Message):
instruments: List["Bond"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class GetBondCouponsRequest(_grpc_helpers.Message):
figi: str = _grpc_helpers.string_field(1)
from_: datetime = _grpc_helpers.message_field(2)
to: datetime = _grpc_helpers.message_field(3)
@dataclass(eq=False, repr=True)
class GetBondCouponsResponse(_grpc_helpers.Message):
events: List["Coupon"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class Coupon(_grpc_helpers.Message):
figi: str = _grpc_helpers.string_field(1)
coupon_date: datetime = _grpc_helpers.message_field(2)
coupon_number: int = _grpc_helpers.int64_field(3)
fix_date: datetime = _grpc_helpers.message_field(4)
pay_one_bond: "MoneyValue" = _grpc_helpers.message_field(5)
coupon_type: "CouponType" = _grpc_helpers.message_field(6)
coupon_start_date: datetime = _grpc_helpers.message_field(7)
coupon_end_date: datetime = _grpc_helpers.message_field(8)
coupon_period: int = _grpc_helpers.int32_field(9)
@dataclass(eq=False, repr=True)
class CurrencyResponse(_grpc_helpers.Message):
instrument: "Currency" = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class CurrenciesResponse(_grpc_helpers.Message):
instruments: List["Currency"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class EtfResponse(_grpc_helpers.Message):
instrument: "Etf" = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class EtfsResponse(_grpc_helpers.Message):
instruments: List["Etf"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class FutureResponse(_grpc_helpers.Message):
instrument: "Future" = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class FuturesResponse(_grpc_helpers.Message):
instruments: List["Future"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class ShareResponse(_grpc_helpers.Message):
instrument: "Share" = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class SharesResponse(_grpc_helpers.Message):
instruments: List["Share"] = _grpc_helpers.message_field(1)
@dataclass(eq=False, repr=True)
class Bond(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
figi: str = _grpc_helpers.string_field(1)
ticker: str = _grpc_helpers.string_field(2)
class_code: str = _grpc_helpers.string_field(3)
isin: str = _grpc_helpers.string_field(4)
lot: int = _grpc_helpers.int32_field(5)
currency: str = _grpc_helpers.string_field(6)
klong: "Quotation" = _grpc_helpers.message_field(7)
kshort: "Quotation" = _grpc_helpers.message_field(8)
dlong: "Quotation" = _grpc_helpers.message_field(9)
dshort: "Quotation" = _grpc_helpers.message_field(10)
dlong_min: "Quotation" = _grpc_helpers.message_field(11)
dshort_min: "Quotation" = _grpc_helpers.message_field(12)
short_enabled_flag: bool = _grpc_helpers.bool_field(13)
name: str = _grpc_helpers.string_field(15)
exchange: str = _grpc_helpers.string_field(16)
coupon_quantity_per_year: int = _grpc_helpers.int32_field(17)
maturity_date: datetime = _grpc_helpers.message_field(18)
nominal: "MoneyValue" = _grpc_helpers.message_field(19)
state_reg_date: datetime = _grpc_helpers.message_field(21)
placement_date: datetime = _grpc_helpers.message_field(22)
placement_price: "MoneyValue" = _grpc_helpers.message_field(23)
aci_value: "MoneyValue" = _grpc_helpers.message_field(24)
country_of_risk: str = _grpc_helpers.string_field(25)
country_of_risk_name: str = _grpc_helpers.string_field(26)
sector: str = _grpc_helpers.string_field(27)
issue_kind: str = _grpc_helpers.string_field(28)
issue_size: int = _grpc_helpers.int64_field(29)
issue_size_plan: int = _grpc_helpers.int64_field(30)
trading_status: "SecurityTradingStatus" = _grpc_helpers.enum_field(31)
otc_flag: bool = _grpc_helpers.bool_field(32)
buy_available_flag: bool = _grpc_helpers.bool_field(33)
sell_available_flag: bool = _grpc_helpers.bool_field(34)
floating_coupon_flag: bool = _grpc_helpers.bool_field(35)
perpetual_flag: bool = _grpc_helpers.bool_field(36)
amortization_flag: bool = _grpc_helpers.bool_field(37)
min_price_increment: "Quotation" = _grpc_helpers.message_field(38)
api_trade_available_flag: bool = _grpc_helpers.bool_field(39)
@dataclass(eq=False, repr=True)
class Currency(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
figi: str = _grpc_helpers.string_field(1)
ticker: str = _grpc_helpers.string_field(2)
class_code: str = _grpc_helpers.string_field(3)
isin: str = _grpc_helpers.string_field(4)
lot: int = _grpc_helpers.int32_field(5)
currency: str = _grpc_helpers.string_field(6)
klong: "Quotation" = _grpc_helpers.message_field(7)
kshort: "Quotation" = _grpc_helpers.message_field(8)
dlong: "Quotation" = _grpc_helpers.message_field(9)
dshort: "Quotation" = _grpc_helpers.message_field(10)
dlong_min: "Quotation" = _grpc_helpers.message_field(11)
dshort_min: "Quotation" = _grpc_helpers.message_field(12)
short_enabled_flag: bool = _grpc_helpers.bool_field(13)
name: str = _grpc_helpers.string_field(15)
exchange: str = _grpc_helpers.string_field(16)
nominal: "MoneyValue" = _grpc_helpers.message_field(17)
country_of_risk: str = _grpc_helpers.string_field(18)
country_of_risk_name: str = _grpc_helpers.string_field(19)
trading_status: "SecurityTradingStatus" = _grpc_helpers.enum_field(20)
otc_flag: bool = _grpc_helpers.bool_field(21)
buy_available_flag: bool = _grpc_helpers.bool_field(22)
sell_available_flag: bool = _grpc_helpers.bool_field(23)
iso_currency_name: str = _grpc_helpers.string_field(24)
min_price_increment: "Quotation" = _grpc_helpers.message_field(25)
api_trade_available_flag: bool = _grpc_helpers.bool_field(26)
@dataclass(eq=False, repr=True)
class Etf(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
figi: str = _grpc_helpers.string_field(1)
ticker: str = _grpc_helpers.string_field(2)
class_code: str = _grpc_helpers.string_field(3)
isin: str = _grpc_helpers.string_field(4)
lot: int = _grpc_helpers.int32_field(5)
currency: str = _grpc_helpers.string_field(6)
klong: "Quotation" = _grpc_helpers.message_field(7)
kshort: "Quotation" = _grpc_helpers.message_field(8)
dlong: "Quotation" = _grpc_helpers.message_field(9)
dshort: "Quotation" = _grpc_helpers.message_field(10)
dlong_min: "Quotation" = _grpc_helpers.message_field(11)
dshort_min: "Quotation" = _grpc_helpers.message_field(12)
short_enabled_flag: bool = _grpc_helpers.bool_field(13)
name: str = _grpc_helpers.string_field(15)
exchange: str = _grpc_helpers.string_field(16)
fixed_commission: "Quotation" = _grpc_helpers.message_field(17)
focus_type: str = _grpc_helpers.string_field(18)
released_date: datetime = _grpc_helpers.message_field(19)
num_shares: "Quotation" = _grpc_helpers.message_field(20)
country_of_risk: str = _grpc_helpers.string_field(21)
country_of_risk_name: str = _grpc_helpers.string_field(22)
sector: str = _grpc_helpers.string_field(23)
rebalancing_freq: str = _grpc_helpers.string_field(24)
trading_status: "SecurityTradingStatus" = _grpc_helpers.enum_field(25)
otc_flag: bool = _grpc_helpers.bool_field(26)
buy_available_flag: bool = _grpc_helpers.bool_field(27)
sell_available_flag: bool = _grpc_helpers.bool_field(28)
min_price_increment: "Quotation" = _grpc_helpers.message_field(29)
api_trade_available_flag: bool = _grpc_helpers.bool_field(30)
@dataclass(eq=False, repr=True)
class Future(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
figi: str = _grpc_helpers.string_field(1)
ticker: str = _grpc_helpers.string_field(2)
class_code: str = _grpc_helpers.string_field(3)
lot: int = _grpc_helpers.int32_field(4)
currency: str = _grpc_helpers.string_field(5)
klong: "Quotation" = _grpc_helpers.message_field(6)
kshort: "Quotation" = _grpc_helpers.message_field(7)
dlong: "Quotation" = _grpc_helpers.message_field(8)
dshort: "Quotation" = _grpc_helpers.message_field(9)
dlong_min: "Quotation" = _grpc_helpers.message_field(10)
dshort_min: "Quotation" = _grpc_helpers.message_field(11)
short_enabled_flag: bool = _grpc_helpers.bool_field(12)
name: str = _grpc_helpers.string_field(13)
exchange: str = _grpc_helpers.string_field(14)
first_trade_date: datetime = _grpc_helpers.message_field(15)
last_trade_date: datetime = _grpc_helpers.message_field(16)
futures_type: str = _grpc_helpers.string_field(17)
asset_type: str = _grpc_helpers.string_field(18)
basic_asset: str = _grpc_helpers.string_field(19)
basic_asset_size: "Quotation" = _grpc_helpers.message_field(20)
country_of_risk: str = _grpc_helpers.string_field(21)
country_of_risk_name: str = _grpc_helpers.string_field(22)
sector: str = _grpc_helpers.string_field(23)
expiration_date: datetime = _grpc_helpers.message_field(24)
trading_status: "SecurityTradingStatus" = _grpc_helpers.enum_field(25)
otc_flag: bool = _grpc_helpers.bool_field(26)
buy_available_flag: bool = _grpc_helpers.bool_field(27)
sell_available_flag: bool = _grpc_helpers.bool_field(28)
min_price_increment: "Quotation" = _grpc_helpers.message_field(29)
api_trade_available_flag: bool = _grpc_helpers.bool_field(30)
@dataclass(eq=False, repr=True)
class Share(_grpc_helpers.Message): # pylint:disable=too-many-instance-attributes
figi: str = _grpc_helpers.string_field(1)
ticker: str = _grpc_helpers.string_field(2)
class_code: str = _grpc_helpers.string_field(3)
isin: str = _grpc_helpers.string_field(4)
lot: int = _grpc_helpers.int32_field(5)
currency: str = _grpc_helpers.string_field(6)
klong: "Quotation" = _grpc_helpers.message_field(7)
kshort: "Quotation" = _grpc_helpers.message_field(8)
dlong: "Quotation" = _grpc_helpers.message_field(9)
dshort: "Quotation" = _grpc_helpers.message_field(10)
dlong_min: "Quotation" = _grpc_helpers.message_field(11)
dshort_min: "Quotation" = _grpc_helpers.message_field(12)
short_enabled_flag: | |
import numpy
#import pyPyrUtils as ppu
import pyPyrUtils
#import pyPyrCcode
import math
import matplotlib.cm
import os
import scipy.misc
import cmath
import JBhelpers
import pylab
import copy
class pyramid: # pyramid
# properties
pyr = []
pyrSize = []
pyrType = ''
image = ''
# constructor
def __init__(self):
print "please specify type of pyramid to create (Gpry, Lpyr, etc.)"
return
# methods
def nbands(self):
return len(self.pyr)
def band(self, bandNum):
return numpy.array(self.pyr[bandNum])
class Spyr(pyramid):
filt = ''
edges = ''
#constructor
def __init__(self, *args): # (image height, filter file, edges)
self.pyrType = 'steerable'
if len(args) > 0:
self.image = numpy.array(args[0])
else:
print "First argument (image) is required."
return
#------------------------------------------------
# defaults:
if len(args) > 2:
if args[2] == 'sp0Filters':
filters = pyPyrUtils.sp0Filters()
elif args[2] == 'sp1Filters':
filters = pyPyrUtils.sp1Filters()
elif args[2] == 'sp3Filters':
filters = pyPyrUtils.sp3Filters()
elif args[2] == 'sp5Filters':
filters = pyPyrUtils.sp5Filters()
elif os.path.isfile(args[2]):
print "Filter files not supported yet"
return
else:
print "filter parameters value %s not supported" % (args[2])
return
else:
filters = pyPyrUtils.sp1Filters()
harmonics = filters['harmonics']
lo0filt = filters['lo0filt']
hi0filt = filters['hi0filt']
lofilt = filters['lofilt']
bfilts = filters['bfilts']
steermtx = filters['mtx']
max_ht = pyPyrUtils.maxPyrHt(self.image.shape, lofilt.shape)
if len(args) > 1:
if args[1] == 'auto':
ht = max_ht
elif args[1] > max_ht:
print "Error: cannot build pyramid higher than %d levels." % (
max_ht)
return
else:
ht = args[1]
else:
ht = max_ht
if len(args) > 3:
edges = args[3]
else:
edges = 'reflect1'
#------------------------------------------------------
nbands = bfilts.shape[1]
self.pyr = []
self.pyrSize = []
for n in range((ht*nbands)+2):
self.pyr.append([])
self.pyrSize.append([])
im = self.image
im_sz = im.shape
pyrCtr = 0
hi0 = pyPyrUtils.corrDn(image = im, filt = hi0filt, edges = edges);
self.pyr[pyrCtr] = hi0
self.pyrSize[pyrCtr] = hi0.shape
pyrCtr += 1
lo = pyPyrUtils.corrDn(image = im, filt = lo0filt, edges = edges)
for i in range(ht):
lo_sz = lo.shape
# assume square filters -- start of buildSpyrLevs
bfiltsz = int(math.floor(math.sqrt(bfilts.shape[0])))
for b in range(bfilts.shape[1]):
filt = bfilts[:,b].reshape(bfiltsz,bfiltsz).T
band = pyPyrUtils.corrDn(image = lo, filt = filt, edges = edges)
self.pyr[pyrCtr] = numpy.array(band)
self.pyrSize[pyrCtr] = (band.shape[0], band.shape[1])
pyrCtr += 1
lo = pyPyrUtils.corrDn(image = lo, filt = lofilt, edges = edges,
step = (2,2))
self.pyr[pyrCtr] = numpy.array(lo)
self.pyrSize[pyrCtr] = lo.shape
# methods
def set(self, *args):
if len(args) != 3:
print 'Error: three input parameters required:'
print ' set(band, location, value)'
print ' where band and value are integer and location is a tuple'
if isinstance(args[1], (int, long)):
self.pyr[args[0]][0][args[1]] = args[2]
elif isinstance(args[1], tuple):
self.pyr[args[0]][args[1][0]][args[1][1]] = args[2]
else:
print 'Error: location parameter must be int or tuple!'
return
def spyrLev(self, lev):
if lev < 0 or lev > self.spyrHt()-1:
print 'Error: level parameter must be between 0 and %d!' % (self.spyrHt()-1)
return
levArray = []
for n in range(self.numBands()):
levArray.append(self.spyrBand(lev, n))
levArray = numpy.array(levArray)
return levArray
def spyrBand(self, lev, band):
if lev < 0 or lev > self.spyrHt()-1:
print 'Error: level parameter must be between 0 and %d!' % (self.spyrHt()-1)
return
if band < 0 or band > self.numBands()-1:
print 'Error: band parameter must be between 0 and %d!' % (self.numBands()-1)
return self.band( ((lev*self.numBands())+band)+1 )
def spyrHt(self):
if len(self.pyrSize) > 2:
spHt = (len(self.pyrSize)-2)/self.numBands()
else:
spHt = 0
return spHt
def numBands(self):
if len(self.pyrSize) == 2:
return 0
else:
b = 2
while ( b <= len(self.pyrSize) and
self.pyrSize[b] == self.pyrSize[1] ):
b += 1
return b-1
def pyrLow(self):
return numpy.array(self.band(len(self.pyrSize)-1))
def pyrHigh(self):
return numpy.array(self.band(0))
def reconPyr(self, *args):
# defaults
if len(args) > 0:
if args[0] == 'sp0Filters':
filters = pyPyrUtils.sp0Filters()
elif args[0] == 'sp1Filters':
filters = pyPyrUtils.sp1Filters()
elif args[0] == 'sp3Filters':
filters = pyPyrUtils.sp3Filters()
elif args[0] == 'sp5Filters':
filters = pyPyrUtils.sp5Filters()
elif os.path.isfile(args[0]):
print "Filter files not supported yet"
return
else:
print "filter %s not supported" % (args[0])
return
else:
filters = pyPyrUtils.sp1Filters()
lo0filt = filters['lo0filt']
hi0filt = filters['hi0filt']
lofilt = filters['lofilt']
bfilts = filters['bfilts']
steermtx = filters['mtx']
# assume square filters -- start of buildSpyrLevs
bfiltsz = int(math.floor(math.sqrt(bfilts.shape[0])))
if len(args) > 1:
edges = args[1]
else:
edges = 'reflect1'
if len(args) > 2:
levs = args[2]
else:
levs = 'all'
if len(args) > 3:
bands = args[3]
else:
bands = 'all'
#---------------------------------------------------------
maxLev = 2 + self.spyrHt()
if levs == 'all':
levs = numpy.array(range(maxLev))
else:
levs = numpy.array(levs)
if (levs < 0).any() or (levs >= maxLev).any():
print "Error: level numbers must be in the range [0, %d]." % (maxLev-1)
return
else:
levs = numpy.array(levs)
if len(levs) > 1 and levs[0] < levs[1]:
levs = levs[::-1] # we want smallest first
if bands == 'all':
bands = numpy.array(range(self.numBands()))
else:
bands = numpy.array(bands)
if (bands < 0).any() or (bands > bfilts.shape[1]).any():
print "Error: band numbers must be in the range [0, %d]." % (self.numBands()-1)
return
else:
bands = numpy.array(bands)
# make a list of all pyramid layers to be used in reconstruction
Nlevs = self.spyrHt()
Nbands = self.numBands()
reconList = [] # pyr indices used in reconstruction
for lev in levs:
if lev == 0:
reconList.append(0)
elif lev == Nlevs+1:
# number of levels times number of bands + top and bottom
# minus 1 for 0 starting index
reconList.append( (Nlevs*Nbands) + 2 - 1)
else:
for band in bands:
reconList.append( ((lev-1) * Nbands) + band + 1)
reconList = numpy.sort(reconList)[::-1] # deepest level first
# initialize reconstruction
if len(self.pyr)-1 in reconList:
recon = numpy.array(self.pyr[len(self.pyrSize)-1])
else:
recon = numpy.zeros(self.pyr[len(self.pyrSize)-1].shape)
# recursive subsystem
# we need to loop over recursive subsystem pairs
for level in range(Nlevs):
maxLevIdx = ((maxLev-2) * Nbands) + 1
resSzIdx = maxLevIdx - (level * Nbands) - 1
recon = pyPyrUtils.upConv(image = recon, filt = lofilt,
edges = edges, step = (2,2),
start = (0,0),
stop = self.pyrSize[resSzIdx])
bandImageIdx = 1 + (((Nlevs-1)-level) * Nbands)
for band in range(Nbands-1,-1,-1):
if bandImageIdx in reconList:
filt = bfilts[:,(Nbands-1)-band].reshape(bfiltsz,
bfiltsz,
order='F')
recon = pyPyrUtils.upConv(image = self.pyr[bandImageIdx],
filt = filt, edges = edges,
stop = (self.pyrSize[bandImageIdx][0],
self.pyrSize[bandImageIdx][1]),
result = recon)
bandImageIdx += 1
# apply lo0filt
sz = recon.shape
recon = pyPyrUtils.upConv(image = recon, filt = lo0filt,
edges = edges, stop = sz)
# apply hi0filt if needed
if 0 in reconList:
recon = pyPyrUtils.upConv(image = self.pyr[0], filt = hi0filt,
edges = edges, start = (0,0),
step = (1,1), stop = recon.shape,
result = recon)
return recon
#def showPyr(self, *args):
def showPyr(self, prange = 'auto2', gap = 1, scale = 2, disp = 'qt'):
ht = self.spyrHt()
nind = len(self.pyr)
nbands = self.numBands()
## Auto range calculations:
if prange == 'auto1':
prange = numpy.ones((nind,1))
band = self.pyrHigh()
mn = numpy.amin(band)
mx = numpy.amax(band)
for lnum in range(1,ht+1):
for bnum in range(nbands):
idx = pyPyrUtils.LB2idx(lnum, bnum, ht+2, nbands)
band = self.band(idx)/(numpy.power(scale,lnum))
prange[(lnum-1)*nbands+bnum+1] = numpy.power(scale,lnum-1)
bmn = numpy.amin(band)
bmx = numpy.amax(band)
mn = min([mn, bmn])
mx = max([mx, bmx])
prange = numpy.outer(prange, numpy.array([mn, mx]))
band = self.pyrLow()
mn = numpy.amin(band)
mx = numpy.amax(band)
prange[nind-1,:] = numpy.array([mn, mx])
elif prange == 'indep1':
prange = numpy.zeros((nind,2))
for bnum in range(nind):
band = self.band(bnum)
mn = band.min()
mx = band.max()
prange[bnum,:] = numpy.array([mn, mx])
elif prange == 'auto2':
prange = numpy.ones(nind)
band = self.pyrHigh()
sqsum = numpy.sum( numpy.power(band, 2) )
numpixels = band.shape[0] * band.shape[1]
for lnum in range(1,ht+1):
for bnum in range(nbands):
band = self.band(pyPyrUtils.LB2idx(lnum, bnum, ht+2, nbands))
band = band / numpy.power(scale,lnum-1)
sqsum += numpy.sum( numpy.power(band, 2) )
numpixels += band.shape[0] * band.shape[1]
prange[(lnum-1)*nbands+bnum+1] = numpy.power(scale, lnum-1)
stdev = numpy.sqrt( sqsum / (numpixels-1) )
prange = numpy.outer(prange, numpy.array([-3*stdev, 3*stdev]))
band = self.pyrLow()
av = numpy.mean(band)
stdev = numpy.sqrt( numpy.var(band) )
prange[nind-1,:] = numpy.array([av-2*stdev, av+2*stdev])
elif prange == 'indep2':
prange = numpy.zeros((nind,2))
for bnum in range(nind-1):
band = self.band(bnum)
stdev = numpy.sqrt( numpy.var(band) )
prange[bnum,:] = numpy.array([-3*stdev, 3*stdev])
band = self.pyrLow()
av = numpy.mean(band)
stdev = numpy.sqrt( numpy.var(band) )
prange[nind-1,:] = numpy.array([av-2*stdev, av+2*stdev])
elif isinstance(prange, basestring):
print "Error:Bad RANGE argument: %s'" % (prange)
elif prange.shape[0] == 1 and prange.shape[1] == 2:
scales = numpy.power(scale, range(ht))
scales | |
#! /usr/bin/env python
'''
.
ABOUT:
USAGE:
DEPENDS:
Python 2.5 or 2.6 (not version 3.x compatible)
EXITSTA:
0: No errors
AUTHOR :
<NAME>, for STScI
HISTORY:
May 15 2009: Initial Version
@author: <NAME>
'''
__author__ = '<NAME>'
__version__ = '0.9'
#Processes command line arguments
def process_args(just_print_help = False):
from optparse import OptionParser
usage = 'usage: %prog [options]'
desc = 'This script can be used to ...'
parser = OptionParser(usage = usage, version='%prog ' + __version__, description = desc)
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
help="Verbose mode on.")
parser.add_option("-i", "--input", dest="smsfile",
help='''User define string that is used to find text files to be processed.
User must specify any wild cards in this string e.g. "*SMS.txt".''',
metavar="string")
parser.add_option('-d', '--debug', action='store_true', dest='debug',
help='debugging mode on.')
parser.add_option('-t', '--text', action='store_true', dest='savetext',
help='Will output all text entries to ParsedText.ascii')
if just_print_help:
parser.print_help()
else:
return parser.parse_args()
def checkZeroArguments(opts):
for x in opts.__dict__:
if opts.__dict__[x] is not None:
return True
return False
def testSTD(filename):
for line in open(filename, 'r'):
if len(line) != 71: return False
return True
def getKey(data):
return data[12:16]
def parseCalendar(string):
'''
Parses calendar string and returns a tuple.
'''
import re
cal_regex = '([0-9]+)Y([0-9]+)D([0-9]+)H([0-9]+)M([.0-9]+)S'
subs = re.search(cal_regex, string)
if int(sub.group(1)) > 50 and int(sub.group(1)) < 100:
year = '19'+sub.group(1)
else:
year = '20'+sub.group(1)
day = sub.group(2)
hour = sub.group(3)
minute = sub.group(4)
second = sub.group(5)
return year, day, hour, minute, second
#keys
def SMS(pos, line, file):
'''
SMSHDR parser
'''
#SMS0001 :SMSHDR,SMS_ID(JRHW00A_),CALENDAR(01Y302D13H54M25.000S) ;;
# ,CREATED(01Y302D13H54M25.000S),PDB_ID(JFLT200) ;;
# ,START=1995Y129D00H00M00.000S,END=1995Y130D00H25M56.000S ;;
# ;SMSTIME=1995.129:00:00:00.000
#tests the line with the key
for x in line.strip().split()[1].split(','):
if x.find('SMS_ID') > -1: SMS_ID = insideParenthesis(x)
if x.find('CALENDAR') > -1: CALENDARRaw = insideParenthesis(x)
if x.find('CREATED') > -1: CREATEDRaw = insideParenthesis(x)
if x.find('PDB_ID') > -1: PDB_ID = insideParenthesis(x)
iterator = fileIterator(file)
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('CALENDAR') > -1: CALENDARRaw = insideParenthesis(x)
if x.find('SMS_ID') > -1: SMS_ID = insideParenthesis(x)
if x.find('CREATED') > -1: CREATEDRaw = insideParenthesis(x)
if x.find('PDB_ID') > -1: PDB_ID = insideParenthesis(x)
if p > pos : break
p += 1
try:
CALENDAR = parseCalendar(CALENDARRaw)
except:
CALENDAR = CALENDARRaw
try:
CREATED = parseCalendar(CREATEDRaw)
except:
CREATED = CREATEDRaw
return SMS_ID, CALENDAR, CREATED, PDB_ID
def GROUP(pos, line, file, debug):
'''
Group parser.
'''
result = {}
cmd = []
commandFound = False
if debug: print 'GROUP function called:'
iterator = fileIterator(file)
first = True
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
if first:
result['GROUP'] = line.strip().split()[1].split(',')[1]
for x in line.strip().split()[1].split(','):
if x.find('FUNC') > -1: result['FUNC'] = insideParenthesis(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1:
tmp = insideParenthesis2('TIME=', line)
if tmp == False:
tline = line.replace(';;', '').strip() + newline.replace(';;', '').strip()
tmp2 = insideParenthesis2('TIME=', tline)
if tmp2 == False:
result['TIME'] = afterEqual(x)
else:
result['TIME'] = tmp2
else:
result['TIME'] = tmp
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
if x.find('CMD') > -1:
cmd.append(line.replace(';;', '').strip())
commandFound = True
if x.find('SAFETY') > -1: result['SAFETY'] = insideParenthesis(x)
first = False
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('FUNC') > -1: result['FUNC'] = insideParenthesis(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1:
tmp = insideParenthesis2('TIME=', line)
if tmp == False:
tline = line.replace(';;', '').strip() + newline.replace(';;', '').strip()
tmp2 = insideParenthesis2('TIME=', tline)
if tmp2 == False:
result['TIME'] = afterEqual(x)
else:
result['TIME'] = tmp2
else:
result['TIME'] = tmp
if x.find('CMD') > -1:
cmd.append(newline.replace(';;', '').strip())
commandFound = True
if x.find('SAFETY') > -1: result['SAFETY'] = insideParenthesis(x)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
if x.find('BEGINDATA') > -1:
tmp = []
while 1:
nl = iterator.next()
if nl.find('ENDDATA') > -1:
result['DATA'] = tmp
break
tmp.append(nl.strip())
p += 1
if p > pos : break
if commandFound: result['CMD'] = cmd
return result
def CPMARK(pos, line, file, debug):
'''
CPMARK parser.
'''
result = {}
if debug: print 'CPMARK function called:'
for x in line.strip().split()[1].split(','):
if x.find('CPID') > -1: result['CIPD'] = insideParenthesis(x)
if x.find('VECTOR') > -1: result['VECTOR'] = insideParenthesis(x)
if x.find('ADR') > -1: result['ADR'] = insideParenthesis(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1: result['TIME'] = afterEqual(x)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
iterator = fileIterator(file)
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('CPID') > -1: result['CIPD'] = insideParenthesis(x)
if x.find('VECTOR') > -1: result['VECTOR'] = insideParenthesis(x)
if x.find('ADR') > -1: result['ADR'] = insideParenthesis(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1: result['TIME'] = afterEqual(x)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
p += 1
if p > pos : break
return result
def SLEW(pos, line, file, debug):
'''
SLEW parser.
'''
result = {}
if debug: print 'SLEW function called:'
for x in line.strip().split()[1].split(','):
if x.find('APER_EID') > -1: result['APER_EID'] = insideParenthesis(x)
if x.find('CPNAME') > -1: result['CPNAME'] = insideParenthesis(x)
if x.find('END_DEC') > -1: result['END_DEC'] = insideParenthesis(x)
if x.find('END_PA') > -1: result['END_PA'] = insideParenthesis(x)
if x.find('END_RA') > -1: result['END_RA'] = insideParenthesis(x)
if x.find('TYPE') > -1: result['TYPE'] = insideParenthesis(x)
if x.find('START') > -1: result['START'] = insideParenthesis2('START=', line)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
iterator = fileIterator(file)
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('APER_EID') > -1: result['APER_EID'] = insideParenthesis(x)
if x.find('CPNAME') > -1: result['CPNAME'] = insideParenthesis(x)
if x.find('END_DEC') > -1: result['END_DEC'] = insideParenthesis(x)
if x.find('END_PA') > -1: result['END_PA'] = insideParenthesis(x)
if x.find('END_RA') > -1: result['END_RA'] = insideParenthesis(x)
if x.find('TYPE') > -1: result['TYPE'] = insideParenthesis(x)
if x.find('START') > -1: result['START'] = insideParenthesis2('START=', newline)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
p += 1
if p > pos : break
return result
def TABLE(pos, line, file, debug):
'''
TABLE parser.
:TABLE,LOSM,GRATING(G185M),LAMBDA(1850),OFFSET(0) ;;
,TIME=(ORB,99950,EASCNCR,04H25M12.000S) ;;
;SMSTIME=2008.221:11:07:13.000
9V20601B :TABLE,LAPER,APERTURE(PSA),DET(FUV),TIME=(ORB,99950 ;;
,EASCNCR,04H27M40.000S)
'''
result = {}
if debug: print 'TABLE function called:'
iterator = fileIterator(file)
first = True
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
result['TABLE'] = line.strip().split()[1].split(',')[1]
if first:
for x in line.strip().split()[1].split(','):
if x.find('DEFAULT') > -1: result['DEFAULT'] = x
if x.find('TIME') > -1 and x.find('SMSTIME') == -1:
tmp = insideParenthesis2('TIME=', line)
if tmp == False:
tline = line.replace(';;', '').strip() + newline.replace(';;', '').strip()
result['TIME'] = insideParenthesis2('TIME=', tline)
else:
result['TIME'] = tmp
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
first = False
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('DEFAULT') > -1: result['DEFAULT'] = x
if x.find('TIME') > -1 and x.find('SMSTIME') == -1:
tmp = insideParenthesis2('TIME=', newline)
if tmp == False:
tline = newline.replace(';;', '').strip() + iterator.next().replace(';;', '').strip()
result['TIME'] = insideParenthesis2('TIME=', tline)
else:
result['TIME'] = tmp
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
p += 1
if p > pos : break
return result
def AUTO(pos, line, file, debug):
'''
AUTO parser.
'''
result = {}
first = True
if debug: print 'AUTO function called:'
iterator = fileIterator(file)
p = 0
for l in iterator:
if p == pos:
while 1:
newline = iterator.next()
if first:
for x in line.strip().split()[1].split(','):
if x.find('EVENT') > -1: result['EVENT'] = insideParenthesis(x)
if x.find('STATE') > -1: result['STATE']= insideParenthesis(x)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1: result['TIME'] = afterEqual(x.replace(';;', '').strip())
first = False
if newline.strip().startswith('SMS') or newline.strip().startswith('9V2'): break
else:
for x in newline.strip().split()[0].split(','):
if x.find('EVENT') > -1: result['EVENT'] = insideParenthesis(x)
if x.find('STATE') > -1: result['STATE']= insideParenthesis(x)
if x.find('SMSTIME') > -1: result['SMSTIME'] = afterEqual(x)
if x.find('TIME') > -1 and x.find('SMSTIME') == -1: result['TIME'] = afterEqual(x.replace(';;', '').strip())
p += 1
if p > pos : break
return result
def GSACQ(pos, | |
None] - crat4[:, :, None] * ssinwt)
ca = np.sqrt((wm[:, :, None] - wp[:, :, None] * ceiwt) / 2 / (wp[:, :, None] - wm[:, :, None] * ceiwt))
cpsi = np.sqrt(1 / psipre[:, :, None] / (1 - psirat[:, :, None] * ceiwt2))
cf = -(wg[-3:, None] * (1 - ceiwt)) / (wp[:, :, None] - wm[:, :, None] * ceiwt)
cesum = rat[:, :, None] * cf * delta[:, -3:, None] / ca
cherm = np.ones((2, 3, 9, ntime))
cherm[:, :, 1] = 2 * np.real(cesum)
for i in range(2):
for j in range(3):
for k in range(2, 9):
cherm[i, j, k] = 2 * (np.real(cesum[i, j]) * cherm[i, j, k - 1] - (k - 1) * cherm[i, j, k - 2])
c0 = cpsi * np.exp(delta2[:, -3:, None] * cf)
covlp = np.zeros((2, 3, 6, 4, ntime), dtype = complex)
pre = 1 / np.sqrt(fact(time_points[:4])[:, None] * fact(time_points[:6]) * 2 ** (time_points[:4, None] + time_points[:6]))
kstar = (time_points[:4, None] + time_points[:6]) // 2
kstar = np.rint(kstar).astype(int)
d1 = du[-3:] / np.sqrt(2)
d2 = d1 ** 2
si = np.sqrt(time_points[:3])
sip = np.sqrt(time_points[1:4])
jj = np.arange(1, 4)
jj = jj[:, None] + jj - 1
sf = np.sqrt(jj - 1)
sfp = np.sqrt(jj)
covlp2 = np.zeros((2, 3, 3, 3, ntime), dtype = complex)
for i in range(2):
for j in range(3):
for k in range(4):
for l in range(6):
for m in range(kstar[k, l] + 1):
kl = k + l
prek = fact(2 * m) / fact(m)
prek *= pre[k, l] * eta(l, k, m)
cadd = prek * c0[i, j] * ca[i, j] ** kl * cg[i, j] ** m * cherm[i, j, kl - 2 * m]
covlp[i, j, l, k] += cadd
for k in range(3):
for l in range(3):
cadd = np.zeros(ntime, dtype = complex)
cad2 = np.copy(cadd)
if jj[k, l] > 1:
cadd = sf[k, l] * covlp[i, j, jj[k, l] - 2, k]
if k > 0:
cadd += si[k] * covlp[i, j, jj[k, l] - 1, k - 1]
cadd += sfp[k, l] * covlp[i, j, jj[k, l], i] + sip[k] * covlp[i, j, jj[k, l] - 1, k + 1]
if k > 0 and jj[k, l] > 1:
cad2 = sf[k, l] * si[k] * covlp[i, j, jj[k, l] - 2, k - 1]
if jj[k, l] > 1:
cad2 += sf[k, l] * sip[k] * covlp[i, j, jj[k, l] - 2, k + 1]
if k > 0:
cad2 += si[k] * sfp[k, l] * covlp[i, j, jj[k, l], k - 1]
cad2 += sfp[k, l] * sip[k] * covlp[i, j, jj[k, l], k + 1]
covlp2[i, j, k, l] = covlp[i, j, jj[k, l] - 1, k] + d1[i, j] * cadd + d2[i, j] * cad2
# simpov subroutine in fortran source code
# calculates time-dependent overlap for mode
# with equal ground and excited state frequency
S = delta2[:, :-3] / 2
sqrtS = -delta[:, :-3] / np.sqrt(2)
cinc = np.exp(-1j * delth) ** wg[:-3]
ceiwt = cinc[:, None] ** time_points
# calculate <0|0(t)> through <4|0(t)>
covlp = np.zeros((2, nmode - 3, 5, ntime), dtype = complex)
covlpq = np.zeros((2, nmode - 3, 3, ntime), dtype = complex)
ce = 1 - ceiwt
ct = np.exp(-S[:, :, None] * ce)
covlp[:, :, 0] = ct
covlp[:, :, 1] = -sqrtS[:, :, None] * ce * ct
covlp[:, :, 2] = S[:, :, None] * (-ce) ** 2 * ct / np.sqrt(2)
covlp[:, :, 3] = -(S * sqrtS)[:, :, None] * ce ** 3 * ct / np.sqrt(6)
covlp[:, :, 4] = S[:, :, None] ** 2 * ce ** 4 / 2 / np.sqrt(6)
covlpq[:, :, 0] = covlp[:, :, 1]
covlpq[:, :, 1] = (S[:, :, None] * ce ** 2 + ceiwt) * ct
covlpq[:, :, 2] = -(S[:, :, None] + ce ** 2 + 2 * ceiwt) * ce * sqrtS[:, :, None] * ct / np.sqrt(2)
for i in range(2):
for j in range(nmode - 3):
if wg[j] != we[i, j]:
# kompov subroutine in frotran source code
# calculates time-dependent overlap for non-duschinsky rotated mode
# with different ground and excited state frequencies. negative ex.
# state frequency is interpreted as imaginary freq.
covlp[i, j] = 0
covlpq[i, j] = 0
S = -delta[i, j]
ceiwt = np.zeros((2, nmode - 3, ntime), dtype = complex)
if we[i, j] < 0:
# calculate cos(wt) and sin(wt) for imaginary w
einc = np.exp(-we[i, j] * delth)
einc2 = np.exp(we[i, j] * delth)
cqt = einc ** time_points
ceiwt[i, j] = einc2 ** time_points
coswt = (cqt + ceiwt[i, j]) / 2
csinwt = 1j * (cqt - ceiwt[i, j]) / 2
cwwe = -1j * we[i, j]
elif we[i, j] > 0:
# calculate cos(wt) and sin(wt) for real w
einc = np.cos(we[i, j] * delth)
einc2 = np.sin(we[i, j] * delth)
coswt = np.ones(ntime, dtype = complex)
csinwt = np.zeros(ntime, dtype = complex)
for k in range(1, ntime):
coswt[k] = einc * coswt[k - 1] - einc2 * csinwt[k - 1]
csinwt[k] = einc2 * coswt[k - 1] + einc * csinwt[k - 1]
cwwe = we[i, j] + 0j
# calculate q(t) [cqt], p(t) [cpt], a(t) [cat], part of g(t) [cgt]
cqt = S * (1 - coswt)
cpt = S * cwwe * csinwt / wg[j]
cat = (coswt + 1j * cwwe * csinwt / wg[j]) / 2
cat /= coswt + 1j * csinwt * wg[j] / cwwe
cgt = 1j * cat * cqt ** 2 - cpt * (cqt + S) / 2
if we[i, j] > 0:
cgt += th * cwwe / 2
# put det(z) into coswt array
coswt += 1j * csinwt * wg[j] / cwwe
# evaluate ln(det z) and add to g(t)
nphase = 0
rxold = 1
realx = np.real(coswt)
realy = np.imag(coswt)
theta = np.arctan(realy / realx)
cinc = 1j * np.log(realx ** 2 + realy ** 2) / 4
for k in range(ntime):
if realx[k] / rxold < 0:
nphase += 1
rxold = realx[k]
cinc[k] -= (theta[k] + nphase * pi) / 2
cgt[k] += cinc[k]
# calculate p prime and a
cpt -= 2j * cat * cqt
cat += 0.5
# calculate overlaps <0|0(t)> through <4|0(t)>
cinc = np.exp(1j * cgt - cpt ** 2 / cat / 4)
covlp[i, j, 0] = cinc / np.sqrt(cat)
covlp[i, j, 1] = -1j * cpt * covlp[i, j, 0] / (cat * np.sqrt(2))
cinc = (cpt / cat) ** 2 + 2 * (1 - 1 / cat)
covlp[i, j, 2] = -cinc * covlp[i, j, 0]/ 2 / np.sqrt(2)
cinc = cpt / cat
cinc *= cinc ** 2 / 6 + 1 - 1 / cat
covlp[i, j, 3] = 1j * np.sqrt(3) * cinc * covlp[i, j, 0] / 2
cinc = (cpt / cat) ** 2
cinc *= cinc + 12 * (1 - 1 / cat)
cinc += 12 * (1 - 1 / cat) ** 2
covlp[i, j, 4] = covlp[i, j, 0] * cinc / 8 / np.sqrt(6)
ct1 = du[:, :-3, None] * (covlp[:, :, 1] + covlpq[:, :, 0]) / np.sqrt(2)
ct2 = du[:, :-3, None] * (covlp[:, :, 2] + (covlpq[:, :, 0] + covlpq[:, :, 1]) / np.sqrt(2))
ct3 = du[:, :-3, None] * (np.sqrt(3 / 2) * covlp[:, :, 3] + covlp[:, :, 1] + covlpq[:, :, 2] / np.sqrt(2))
covlp[:, :, 0] += ct1
covlp[:, :, 1] += ct2
covlp[:, :, 2] += ct3
##### set up the time integrals #####
cat = u[:, None] ** 2 * np.exp(-1j * (e0 + reorg)[:, None] * th) * cdamp
################################
##### raman cross sections #####
################################
spectrum = np.zeros((1000, nline))
xfreq = np.linspace(alow, ahigh, num = 1000, endpoint = True)
part = 0
for i1 in range(3):
for i2 in range(3):
for i3 in range(3):
ei = i1 * wg[-3] + i2 * wg[-2] + | |
collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
# self.moveHome()
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg = [rgbdWithPoseMsg]
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for poser server")
self.poser_client.wait_for_server()
rospy.loginfo("connected to poser server")
goal = pdc_ros_msgs.msg.DeformableRegistrationGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting registration from poser")
self.poser_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for poser result")
self.poser_client.wait_for_result()
result = self.poser_client.get_result()
state = self.poser_client.get_state()
rospy.loginfo("received poser result")
print("result:\n", result)
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("Poser failed")
self.poser_result = result
self._cache['poser_result'] = result
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.taskRunner.callOnMain(self.visualize_poser_result)
def run_keypoint_detection(self, wait_for_result=True, move_to_stored_pose=True, clear_state=True):
"""
Runs keypoint detection using ManKey in pdc-ros. Note that this clears the cache
:return:
:rtype:
"""
if clear_state:
self._clear_cache()
self.state.clear()
if move_to_stored_pose:
CMT = CategoryManipulationType
q = self._stored_poses_director["General"]["home"] # for mugs
if MANIP_TYPE in [CMT.SHOE_ON_RACK, CMT.SHOE_ON_TABLE]:
q = self._stored_poses_director['General']['center_back']
else: # basically all mugs
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=self.graspingParams['speed']['fast'])
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'] = []
self.state.cache['rgbd_with_pose_list'].append(rgbdWithPoseMsg)
# request via a ROS Action
rospy.loginfo("waiting for KeypointDetection server")
self.keypoint_detection_client.wait_for_server()
rospy.loginfo("connected to KeypointDetection server")
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
if EXPERIMENT_MODE:
goal.output_dir = "mankey_experiments/%s" %(spartanUtils.get_current_YYYY_MM_DD_hh_mm_ss())
rospy.loginfo("requesting action from KeypointDetection server")
self.keypoint_detection_client.send_goal(goal)
self.state.set_status("ABOVE_TABLE")
if wait_for_result:
self.wait_for_keypoint_detection_result()
def wait_for_keypoint_detection_result(self):
"""
Wait for keypont detection result, save it to cache
"""
rospy.loginfo("waiting for KeypointDetection result")
self.keypoint_detection_client.wait_for_result()
result = self.keypoint_detection_client.get_result()
state = self.keypoint_detection_client.get_state()
rospy.loginfo("received KeypointDetection result")
print "result:\n", result
self.keypoint_detection_result = result
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("KeypointDetection failed")
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.state._cache["keypoint_detection_result"] = result_dict
return result_dict
def check_keypoint_detection_succeeded(self):
"""
Checks whether keypoint detection succeeded or not
:return:
:rtype:
"""
# you should have run keypoint detection before this
keypoint_detection_result = self.state.cache['keypoint_detection_result']
if keypoint_detection_result["state"] == GoalStatus.SUCCEEDED:
return True
else:
print("keypoint detection failed, ABORTING")
return False
def check_category_goal_estimation_succeeded(self):
"""
Returns a bool as to whether category goal estimation succeeded or not
:return:
:rtype:
"""
state = self.state.cache['category_manipulation_goal']['state']
if state == GoalStatus.SUCCEEDED:
return True
else:
print("category goal estimation failed, ABORTING")
return False
def estimate_mug_rack_pose(self):
"""
:return:
:rtype:
"""
# fusion_params_file = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/fusion/fusion_params.yaml")
#
#
# fusion_params = spartanUtils.getDictFromYamlFilename(fusion_params_file)
# bbox_min = np.array(fusion_params['left']['bbox_min'])
# bbox_min[2] += 0.05 # be conservative on where bottom of table is
# bbox_max = np.array(fusion_params['left']['bbox_max'])
bbox_min = np.array([0.07001, 0.49, 0.01026])
bbox_max = np.array([0.47195, 0.85201, 0.75])
rgbd_with_pose_list = []
# move to pose 1, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# move to pose 2, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack_2"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# convert to VTK poly data and crop
d = DebugData()
for msg in rgbd_with_pose_list:
pointcloud_numpy = DirectorROSVisualizer.numpy_from_pointcloud2_msg(msg.point_cloud)
pointcloud_vtk = vnp.getVtkPolyDataFromNumpyPoints(pointcloud_numpy)
T_world_pointcloud = ros_numpy.numpify(msg.point_cloud_pose.transform)
T_world_pointcloud_vtk = transformUtils.getTransformFromNumpy(T_world_pointcloud)
pointcloud_vtk = filterUtils.transformPolyData(pointcloud_vtk, T_world_pointcloud_vtk)
d.addPolyData(pointcloud_vtk)
pointcloud = d.getPolyData()
print "pointcloud.GetNumberOfPoints()", pointcloud.GetNumberOfPoints()
# crop
transform = vtk.vtkTransform()
bounds = np.zeros([2,3])
bounds[0,:] = bbox_min
bounds[1,:] = bbox_max
print "bounds", bounds
cropped_pointcloud = segmentation.cropToBounds(pointcloud, transform, bounds)
print "cropped_pointcloud.GetNumberOfPoints()", cropped_pointcloud.GetNumberOfPoints()
# visualize it
def vis_function():
print "visualizing pointcloud"
vis.showPolyData(pointcloud, "pointcloud")
vis.showPolyData(cropped_pointcloud, "Mug rack pointcloud")
self.mug_rack_pointcloud = cropped_pointcloud
# not working for some reason
print "visualizing"
self.taskRunner.callOnMain(vis_function)
return
rgbd_with_pose = pdc_ros_msgs.msg.RGBDWithPose()
# N x 3
cropped_pointcloud_numpy = vnp.getNumpyFromVtk(cropped_pointcloud)
print "cropped_pointcloud_numpy.shape", cropped_pointcloud_numpy.shape
# save numpy to file
save_file = "/home/manuelli/sandbox/spartan/pointcloud.npy"
np.save(save_file, cropped_pointcloud_numpy)
return
# it's already in world frame
rgbd_with_pose.point_cloud = DirectorROSVisualizer.pointcloud2_msg_from_numpy(cropped_pointcloud_numpy)
# convert it back to ROS msg
goal = pdc_ros_msgs.msg.EstimatePoseGoal()
goal.rgbd_with_pose_list.append(rgbd_with_pose)
T_world_rack_vtk = self._category_manip.mug_rack_vis_obj.getChildFrame().transform
T_world_rack = transformUtils.getNumpyFromTransform(T_world_rack_vtk)
goal.T_init = ros_numpy.msgify(geometry_msgs.Pose, T_world_rack)
# send out service call
self.pose_estimation_client.wait_for_server()
self.pose_estimation_client.send_goal(goal)
# wait for result
self.pose_estimation_client.wait_for_result()
result = self.pose_estimation_client.get_result()
T_world_rack_estimated = ros_numpy.numpify(result.T_world_model)
T_world_rack_estimated_vtk = transformUtils.getTransformFromNumpy(T_world_rack_estimated)
self._category_manip.mug_rack_vis_obj.getChildFrame().copyFrame(T_world_rack_estimated_vtk)
def run_category_manipulation_goal_estimation(self, wait_for_result=True, capture_rgbd=True):
"""
Calls the CategoryManipulation service of pdc-ros
which is provided by category_manip_server.py.
Uses the keypoint detection result from either
`run_poser` or `run_keypoint_detection`
:return: bool
:rtype:
"""
if not self.check_keypoint_detection_succeeded():
return False
keypoint_detection_result = self.state.cache['keypoint_detection_result']
# don't specify poser output dir for now
goal = pdc_ros_msgs.msg.CategoryManipulationGoal()
goal.output_dir = keypoint_detection_result['output_dir']
goal.keypoint_detection_type = keypoint_detection_result['type']
if capture_rgbd:
self.moveHome()
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if 'rgbd_with_pose_list' in self.state.cache:
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if MANIP_TYPE == CategoryManipulationType.SHOE_ON_RACK:
print("applying T_adjust")
print("self._shoe_manipulation_counter", self._shoe_manipulation_counter)
goal.apply_T_adjust = True
pos = np.array([self.graspingParams["shoe_offset"], 0, 0]) * self._shoe_manipulation_counter
quat = [1,0,0,0]
T_adjust_vtk = transformUtils.transformFromPose(pos, quat)
T_adjust = transformUtils.getNumpyFromTransform(T_adjust_vtk)
goal.T_adjust = ros_numpy.msgify(geometry_msgs.msg.Pose, T_adjust)
else:
goal.apply_T_adjust =False
rospy.loginfo("waiting for CategoryManip server")
self.category_manip_client.wait_for_server()
rospy.loginfo("connected to CategoryManip server")
self.category_manip_client.send_goal(goal)
if wait_for_result:
self.wait_for_category_manipulation_goal_result()
return True
def wait_for_category_manipulation_goal_result(self):
"""
Waits for category manipulation goal result
"""
print("waiting for category manipulation result")
self.category_manip_client.wait_for_result()
result = self.category_manip_client.get_result()
state = self.category_manip_client.get_state()
T_goal_obs = ros_numpy.numpify(result.T_goal_obs)
print "T_goal_obs:\n", T_goal_obs
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
print transformUtils.poseFromTransform(T_goal_obs_vtk)
self.state.cache['category_manipulation_goal'] = dict()
self.state.cache['category_manipulation_goal']['result'] = result
self.state.cache['category_manipulation_goal']["T_goal_obs"] = T_goal_obs_vtk
self.state.cache['category_manipulation_goal']['state'] = state
self.state.cache['category_manipulation_goal']["type"] = CategoryManipulationType.from_string(result.category_manipulation_type)
def run_mug_shelf_3D_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
super_fast_speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["home"]
# q = self._stored_poses_director["mug"]["image_capture_for_mug_shelf"]
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
# run keypoint detection
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_shelf_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_mug_shelf()
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_mug_on_rack_pipeline(self, side_view=False):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
q = self._stored_poses_director["General"]["home"]
if side_view:
print "\nusing side view\n"
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["center_back"]
if side_view:
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_on_rack_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_shoe_on_rack_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
if EXPERIMENT_MODE:
self._shoe_manipulation_counter = 0 # for testing
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["center_back"]
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_shoe_rack_manipulation()
if not code:
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_shoe_rack()
if EXPERIMENT_MODE:
print "\n\n", self.state.cache['keypoint_detection_result']['output_dir']
def run_manipulate_object(self, debug=False):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
if debug:
self._object_manipulation = ObjectManipulation()
self._object_manipulation.assign_defaults()
self._object_manipulation.compute_transforms()
return
self.moveHome()
grasp_found, grasp_data = self.request_spartan_grasp(clear_state=False)
if not grasp_found:
print "no grasp found, returning\n"
return False
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True)
print "object_in_gripper:", object_in_gripper
T_goal_obs = self.state.cache['category_manipulation_T_goal_obs']
T_W_G = self.state.cache['gripper_frame_at_grasp']
self._object_manipulation = ObjectManipulation(T_goal_object=T_goal_obs, T_W_G=T_W_G)
self._object_manipulation.grasp_data = self.state.grasp_data
self._object_manipulation.compute_transforms()
self.taskRunner.callOnMain(self._object_manipulation.visualize)
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# place the object
grasp_data_place = self._object_manipulation.get_place_grasp_data()
self.execute_place(grasp_data_place)
# open the | |
@classmethod
def from_dict(cls, _dict: Dict) -> 'GenericComponentResponseMspCa':
"""Initialize a GenericComponentResponseMspCa object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'root_certs' in _dict:
args['root_certs'] = _dict.get('root_certs')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a GenericComponentResponseMspCa object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'root_certs') and self.root_certs is not None:
_dict['root_certs'] = self.root_certs
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this GenericComponentResponseMspCa object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'GenericComponentResponseMspCa') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'GenericComponentResponseMspCa') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class GenericComponentResponseMspComponent():
"""
GenericComponentResponseMspComponent.
:attr str tls_cert: (optional) The TLS certificate as base 64 encoded PEM.
Certificate is used to secure/validate a TLS connection with this component.
:attr str ecert: (optional) An identity certificate (base 64 encoded PEM) for
this component that was signed by the CA (aka enrollment certificate).
[Available on peer/orderer components w/query parameter 'deployment_attrs'].
:attr List[str] admin_certs: (optional) An array that contains base 64 encoded
PEM identity certificates for administrators. Also known as signing certificates
of an organization administrator. [Available on peer/orderer components w/query
parameter 'deployment_attrs'].
"""
def __init__(self,
*,
tls_cert: str = None,
ecert: str = None,
admin_certs: List[str] = None) -> None:
"""
Initialize a GenericComponentResponseMspComponent object.
:param str tls_cert: (optional) The TLS certificate as base 64 encoded PEM.
Certificate is used to secure/validate a TLS connection with this
component.
:param str ecert: (optional) An identity certificate (base 64 encoded PEM)
for this component that was signed by the CA (aka enrollment certificate).
[Available on peer/orderer components w/query parameter
'deployment_attrs'].
:param List[str] admin_certs: (optional) An array that contains base 64
encoded PEM identity certificates for administrators. Also known as signing
certificates of an organization administrator. [Available on peer/orderer
components w/query parameter 'deployment_attrs'].
"""
self.tls_cert = tls_cert
self.ecert = ecert
self.admin_certs = admin_certs
@classmethod
def from_dict(cls, _dict: Dict) -> 'GenericComponentResponseMspComponent':
"""Initialize a GenericComponentResponseMspComponent object from a json dictionary."""
args = {}
if 'tls_cert' in _dict:
args['tls_cert'] = _dict.get('tls_cert')
if 'ecert' in _dict:
args['ecert'] = _dict.get('ecert')
if 'admin_certs' in _dict:
args['admin_certs'] = _dict.get('admin_certs')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a GenericComponentResponseMspComponent object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'tls_cert') and self.tls_cert is not None:
_dict['tls_cert'] = self.tls_cert
if hasattr(self, 'ecert') and self.ecert is not None:
_dict['ecert'] = self.ecert
if hasattr(self, 'admin_certs') and self.admin_certs is not None:
_dict['admin_certs'] = self.admin_certs
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this GenericComponentResponseMspComponent object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'GenericComponentResponseMspComponent') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'GenericComponentResponseMspComponent') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class GenericComponentResponseMspTlsca():
"""
GenericComponentResponseMspTlsca.
:attr str name: (optional) The "name" to distinguish this CA from the other CA.
[Available on ca components].
:attr List[str] root_certs: (optional) An array that contains one or more base
64 encoded PEM root certificates for the TLS CA. [Available on ca/peer/orderer
components].
"""
def __init__(self,
*,
name: str = None,
root_certs: List[str] = None) -> None:
"""
Initialize a GenericComponentResponseMspTlsca object.
:param str name: (optional) The "name" to distinguish this CA from the
other CA. [Available on ca components].
:param List[str] root_certs: (optional) An array that contains one or more
base 64 encoded PEM root certificates for the TLS CA. [Available on
ca/peer/orderer components].
"""
self.name = name
self.root_certs = root_certs
@classmethod
def from_dict(cls, _dict: Dict) -> 'GenericComponentResponseMspTlsca':
"""Initialize a GenericComponentResponseMspTlsca object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'root_certs' in _dict:
args['root_certs'] = _dict.get('root_certs')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a GenericComponentResponseMspTlsca object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'root_certs') and self.root_certs is not None:
_dict['root_certs'] = self.root_certs
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this GenericComponentResponseMspTlsca object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'GenericComponentResponseMspTlsca') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'GenericComponentResponseMspTlsca') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class GenericComponentResponseResources():
"""
The **cached** Kubernetes resource attributes for this component. [Available on
ca/peer/orderer components w/query parameter 'deployment_attrs'].
:attr GenericResources ca: (optional)
:attr GenericResources peer: (optional)
:attr GenericResources orderer: (optional)
:attr GenericResources proxy: (optional)
:attr GenericResources statedb: (optional)
"""
def __init__(self,
*,
ca: 'GenericResources' = None,
peer: 'GenericResources' = None,
orderer: 'GenericResources' = None,
proxy: 'GenericResources' = None,
statedb: 'GenericResources' = None) -> None:
"""
Initialize a GenericComponentResponseResources object.
:param GenericResources ca: (optional)
:param GenericResources peer: (optional)
:param GenericResources orderer: (optional)
:param GenericResources proxy: (optional)
:param GenericResources statedb: (optional)
"""
self.ca = ca
self.peer = peer
self.orderer = orderer
self.proxy = proxy
self.statedb = statedb
@classmethod
def from_dict(cls, _dict: Dict) -> 'GenericComponentResponseResources':
"""Initialize a GenericComponentResponseResources object from a json dictionary."""
args = {}
if 'ca' in _dict:
args['ca'] = GenericResources.from_dict(_dict.get('ca'))
if 'peer' in _dict:
args['peer'] = GenericResources.from_dict(_dict.get('peer'))
if 'orderer' in _dict:
args['orderer'] = GenericResources.from_dict(_dict.get('orderer'))
if 'proxy' in _dict:
args['proxy'] = GenericResources.from_dict(_dict.get('proxy'))
if 'statedb' in _dict:
args['statedb'] = GenericResources.from_dict(_dict.get('statedb'))
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a GenericComponentResponseResources object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'ca') and self.ca is not None:
_dict['ca'] = self.ca.to_dict()
if hasattr(self, 'peer') and self.peer is not None:
_dict['peer'] = self.peer.to_dict()
if hasattr(self, 'orderer') and self.orderer is not None:
_dict['orderer'] = self.orderer.to_dict()
if hasattr(self, 'proxy') and self.proxy is not None:
_dict['proxy'] = self.proxy.to_dict()
if hasattr(self, 'statedb') and self.statedb is not None:
_dict['statedb'] = self.statedb.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this GenericComponentResponseResources object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'GenericComponentResponseResources') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'GenericComponentResponseResources') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class GenericComponentResponseStorage():
"""
The **cached** Kubernetes storage attributes for this component. [Available on
ca/peer/orderer components w/query parameter 'deployment_attrs'].
:attr StorageObject ca: (optional)
:attr StorageObject peer: (optional)
:attr StorageObject orderer: (optional)
:attr StorageObject statedb: (optional)
"""
def __init__(self,
*,
ca: 'StorageObject' = None,
peer: 'StorageObject' = None,
orderer: 'StorageObject' = None,
statedb: 'StorageObject' = None) -> None:
"""
Initialize a GenericComponentResponseStorage object.
:param StorageObject ca: (optional)
:param StorageObject peer: (optional)
:param StorageObject orderer: (optional)
:param StorageObject statedb: (optional)
"""
self.ca = ca
self.peer = peer
self.orderer = orderer
self.statedb = statedb
@classmethod
def from_dict(cls, _dict: Dict) -> 'GenericComponentResponseStorage':
"""Initialize a GenericComponentResponseStorage object from a json dictionary."""
args = {}
if 'ca' in _dict:
args['ca'] = StorageObject.from_dict(_dict.get('ca'))
if 'peer' in _dict:
args['peer'] = StorageObject.from_dict(_dict.get('peer'))
if 'orderer' in _dict:
args['orderer'] = StorageObject.from_dict(_dict.get('orderer'))
if 'statedb' in _dict:
args['statedb'] = StorageObject.from_dict(_dict.get('statedb'))
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a | |
<reponame>157239n/k1lib<gh_stars>1-10
# AUTOGENERATED FILE! PLEASE DON'T EDIT
"""This module is for all things related to atoms, molecules and their simulations"""
import k1lib
from typing import Dict, List
settings = k1lib.Settings().add("overOctet", False, "whether to allow making bonds that exceeds the octet rule")
k1lib.settings.add("mo", settings, "from k1lib.mo module")
__all__ = ["Atom", "substances", "NoFreeElectrons", "OctetFull"]
class NoFreeElectrons(RuntimeError): pass
class OctetFull(RuntimeError): pass
# if Atom's gDepth is smaller than this, then it means that it has not been visited
_depthAuto = k1lib.AutoIncrement()
_idxAuto = k1lib.AutoIncrement()
class Atom:
"""Just an atom really. Has properties, can bond to other atoms, and can
generate a :class:`System` for simulation."""
def __init__(self, name:str, atomicN:int, massN:float, valenceE:int, radius:List[float]=[], octetE:int=8):
"""Creates a new atom. Not intended to be used by the end user. If you
wish to get a new atom, just do stuff like this::
c1 = mo.C
c2 = mo.C
c1 == c2 # returns False, demonstrating that these are different atoms
If you wish to register new substances with the module, you can do this::
genF = lambda: Atom(...)
mo.registerSubstance("elementName", genF)
mo.elementName # should executes `genF` and returns
:param name: element name (eg. "C")
:param atomicN: atomic number (eg. 6)
:param massN: atomic mass in g/mol (eg. 12)
:param valenceE: how many valence electrons initially?
:param radius: covalent radiuses (in pm) for single, double and triple bonds
:param octetE: how many electrons in a full octet? Default 8, but can be 2 for H and He"""
self.name = name; self.atomicN = atomicN; self.massN = massN
self.ogValenceE = valenceE # original
self.valenceE = valenceE; self.octetE = octetE; self.radius = radius
self._bonds = [] # list of Atoms this Atom is bonded to
self.gDepth = -1 # graph depth, for graph traversal stuff. Values will be updated from _depthAuto
self.idx = f"A{_idxAuto()}" # unique value for Atoms everywhere
# contracts:
# - valenceE = eClouds * 2 + freeE + len(bonds) * 2
# - valenceE <= octetE. "<" happens when octet not full
# can only form a new bond if freeE >= 1. Can dec eClouds to inc freeE
if name != "_e":
self.eClouds = []; self.freeE = valenceE % 2
for i in range(valenceE//2): self.eClouds.append(mo._e)
else: self.eClouds = []; self.freeE = 0
@property
def bonds(self):
"""List of Atoms bonded to this Atom"""
return self._bonds
@bonds.setter
def bonds(self, v): self._bonds = v
@property
def nonHBonds(self) -> List["Atom"]:
"""All atoms this atom is bonded to, minus the Hydrogens."""
return [a for a in self.bonds if a.name != "H"]
@property
def HBonds(self) -> List["Atom"]:
"""All hydrogens this atom is bonded to."""
return [a for a in self.bonds if a.name == "H"]
@property
def uniqueBonds(self) -> List["Atom"]:
"""All unique bonds. Meaning, if there's a double bond, only return 1
atom, not 2."""
return list(set(self.bonds))
@property
def uniqueNonHBonds(self) -> List["Atom"]:
"""All unique non Hydrogen bonds."""
return list(set(self.nonHBonds))
def nBonds(self, atom:"Atom"):
"""Get number of bonds between this and another atom."""
return len([bond for bond in self.bonds if bond == atom])
@property
def availableBonds(self) -> int:
"""Available bonds. This includes electron clouds, radical electrons, and
Hydrogen bonds."""
return len(self.eClouds) * 2 + self.freeE + len([a for a in self.bonds if a.name == "H"])
def __repr__(self):
return f"""<Atom {self.name} ({self.atomicN}), {len(self.bonds)} bonds, {self.valenceE}/{self.octetE} valence electrons, {len(self.eClouds)} electron clouds, {self.freeE} free (radical) electrons>"""
@k1lib.patch(Atom)
def _show(self, g=None, gDepth=-1, H:bool=True, GVKwargs={}):
self.gDepth = gDepth
if not H:
nH = len(self.HBonds); nH = "" if nH==0 else ("H" if nH == 1 else f"H{nH}")
g.node(self.idx, f"{self.name}{nH}", **GVKwargs)
else: g.node(self.idx, self.name, **GVKwargs)
for atom in self.bonds:
if atom.gDepth >= gDepth or (not H and atom.name == "H"): continue
# all this complexity just to determine arrow direction
d1 = (self.nonHBonds[0] == atom) if len(self.nonHBonds) > 0 else False
d2 = (atom.nonHBonds[0] == self) if len(atom.nonHBonds) > 0 else False
if d1 and d2: g(self.idx, atom.idx, dir="both")
elif d1: g(self.idx, atom.idx)
elif d2: g(atom.idx, self.idx)
else: g(self.idx, atom.idx, arrowhead="none")
if H: [atom._show(g, gDepth, H) for atom in self.bonds if atom.gDepth < gDepth]
else: [atom._show(g, gDepth, H) for atom in self.nonHBonds if atom.gDepth < gDepth]
@k1lib.patch(Atom)
def show(self, H:bool=True):
"""Show the molecule graph this atom is a part of. Meant for debugging
simple substances only, as graphs of big molecules look unwieldy. This also
highlights the current :class:`Atom`, and each bond is an arrow, indicating
where :meth:`next` will go next.
:param H: whether to display hydrogens as separate atoms, or bunched into the main atom"""
g = k1lib.digraph(); self._show(g, _depthAuto(), H, {"style": "filled"}); return g
@k1lib.patch(Atom)
def _addFreeE(self, amt:int=1):
"""Adds free electron to atom."""
if amt > 1: [self._addFreeE() for i in range(amt)]
self.freeE += 1
if self.freeE >= 2: self.eClouds.append(mo._e); self.freeE -= 2
@k1lib.patch(Atom)
def _subFreeE(self, amt:int=1) -> bool:
"""Tries to use ``amt`` free electrons. Returns successful or not."""
if amt > 1: [self._subFreeE() for i in range(amt)]
elif self.freeE > 0: self.freeE -= 1
elif len(self.eClouds) > 0:
self.freeE += 1; self.eClouds.pop()
else: raise RuntimeError(f"Can't give away any more free electrons on atom {self.name}!")
@k1lib.patch(Atom)
def _makeRoom(self, nBonds:int):
"""Tries to remove bonds with Hydrogen to make room for ``nBonds`` more bonds."""
nBondsToRemove = self.valenceE + nBonds - self.octetE
if nBondsToRemove > 0:
Hs = [bond for bond in self.bonds if bond.name == "H"]
if len(Hs) >= nBondsToRemove:
for i in range(nBondsToRemove): self.removeBond(Hs[i])
elif not settings.overOctet:
ans = input(f"Can't remove Hydrogen bonds to make room for new bond! Do you want to do anyway (y/n): ")
print("Btw, you can auto accept this by doing `settings.mo.overOctet = True`")
if ans.lower()[0] != "y": raise OctetFull("Stopping...")
availableE = len(self.eClouds) * 2 + self.freeE
if availableE < nBonds: raise NoFreeElectrons(f"Can't make room for {nBonds} new bonds on {self.name}. Only {availableE} electrons left for bonds!")
@k1lib.patch(Atom)
def __call__(self, atom:Atom, nBonds:int=1, main=False) -> Atom:
"""Forms a bond with another atom. If valence electrons are full, will
attempt to disconnect Hydrogens from self to make room.
:param bond: number of bonds. 2 for double, 3 for triple
:param main: whether to put this bond in front of existing bonds, to
signify the "main" chain, so that it works well with :meth:`next`
:return: self"""
self._makeRoom(nBonds); atom._makeRoom(nBonds)
if main: self.bonds = [atom] * nBonds + self.bonds
else: self.bonds += [atom] * nBonds
atom.bonds += [self] * nBonds
self.valenceE += nBonds; self._subFreeE(nBonds)
atom.valenceE += nBonds; atom._subFreeE(nBonds)
return self
@k1lib.patch(Atom)
def bond(self, atom:Atom, nBonds:int=1, main=False) -> Atom:
"""Like :meth:`__call__`, but returns the atom passed in instead, so you
can form the main loop quickly."""
self(atom, nBonds, main); return atom
@k1lib.patch(Atom)
def main(self, atom:Atom, nBonds:int=1) -> Atom:
"""Like :meth:`bond`, but with ``main`` param defaulted to True."""
return self.bond(atom, nBonds, True)
@k1lib.patch(Atom)
def removeBond(self, atom:"Atom"):
"""Removes all bonds between this and another atom"""
nBonds = self.nBonds(atom)
self.bonds = [bond for bond in self.bonds if bond != atom]
self.valenceE -= nBonds; self._addFreeE(nBonds)
atom.bonds = [bond for bond in atom.bonds if bond != self]
atom.valenceE -= nBonds; atom._addFreeE(nBonds)
@k1lib.patch(Atom, "next")
def _next(self, offset=0, times:int=1) -> "Atom":
"""Returns the next atom bonded to this. Tries to avoid going into Hydrogens.
This is the main way to navigate around the molecule.
You kinda have to make sure that your molecule's bonding order is appropriate by
choosing between :meth:`bond` and :meth:`main`. Check the bonding order with
:meth:`show`.
:param offset: if there are multiple non-Hydrogen atoms, which ones should I pick?
:param times: how many times do you want to chain ``.next()``?"""
if times < 0: raise RuntimeError("Can't do .next() with negative `times`")
if times == 0: return self
atoms = self.nonHBonds + self.HBonds
if len(atoms) == 0: return None
_next = atoms[offset]
if times == 1: return _next
else: return _next.next(offset, times-1)
@k1lib.patch(Atom)
def nexts(self, atoms:int=2) -> List[Atom]:
"""Kinda like :meth:`next`, but fetches multiple atoms on the backbone.
Example::
c1, c2 = mo.CH4(mo.CH4).nexts()"""
if atoms < 1: raise RuntimeError(f"Zero or negative ({atoms}) number of atoms does not make sense!")
if atoms == 1: return [self]
return [self, *(self.next().nexts(atoms-1))]
empiricalOrder = ["C", "H", "O", "N"]
def em1(e:str, n:int):
if n == 1: return e
else: return f"{e}{n}"
@k1lib.patch(Atom)
def _empirical(self, d:Dict[str, int], gDepth:int):
if self.gDepth >= gDepth: return
self.gDepth = gDepth; d[self.name] += 1
for atom in self.bonds: atom._empirical(d, gDepth)
@k1lib.patch(Atom)
def empirical(self) -> str:
"""Returns an empirical formula for the molecule this :class:`Atom` is attached to."""
| |
<filename>backend/ozon/core/ModelData.py
# Copyright INRIM (https://www.inrim.eu)
# See LICENSE file for full licensing details.
import sys
import os
import logging
import pymongo
import ujson
from .database.mongo_core import *
from .BaseClass import PluginBase
from .QueryEngine import QueryEngine
from fastapi.exceptions import HTTPException
logger = logging.getLogger(__name__)
class ModelData(PluginBase):
plugins = []
def __init_subclass__(cls, **kwargs):
cls.plugins.append(cls())
class ModelDataBase(ModelData):
@classmethod
def create(cls, session, pwd_context):
self = ModelDataBase()
self.init(session, pwd_context)
return self
def init(self, session, pwd_context):
self.session = session
self.pwd_context = pwd_context
self.qe = QueryEngine.new(session=session)
self.no_clone_field_keys = {}
self.computed_fields = {}
self.create_task_action = {}
self.unique_fields = []
self.asc = 1
self.desc = -1
self.system_model = {
"component": Component,
"session": Session,
"attachment_trash": AttachmentTrash
}
async def gen_model(self, model_name):
model = False
if model_name in self.system_model:
model = self.system_model.get(model_name)
else:
component = await search_by_name(Component, model_name)
if component:
mm = ModelMaker(
model_name, component.components)
for field in mm.unique_fields:
await set_unique(mm.model, field)
self.no_clone_field_keys = mm.no_clone_field_keys
self.computed_fields = mm.computed_fields
self.create_task_action = mm.create_task_action
model = mm.model
return model
def clean_data_to_clone(self, data: dict):
for k, v in self.no_clone_field_keys.items():
if k in data and not k == "rec_name":
data[k] = v
if data.get("data_value") and data.get("data_value").get(k):
data.get("data_value")[k] = v
return data.copy()
async def all(self, schema: Type[ModelType], sort=[], distinct=""):
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
if not sort:
#
sort = [("list_order", ASCENDING), ("rec_name", DESCENDING)]
return await search_all(schema, sort=sort)
async def all_distinct(
self, schema: Type[ModelType], distinct, query={}, additional_key=[], compute_label=""):
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
# sort = [("title", DESCENDING)]
querye = self.qe.default_query(schema, query)
list_data = await search_all_distinct(schema, distinct=distinct, query=querye, compute_label=compute_label)
return get_data_list(list_data, additional_key=additional_key)
async def freq_for_all_by_field_value(
self, schema: Type[ModelType], field, field_query, min_occurence=2, add_fields="", sort=-1,
additional_key=[]
):
list_data = await search_count_field_value_freq(
schema, field=field, field_query=field_query, min_occurence=min_occurence, add_fields=add_fields, sort=sort)
return get_data_list(list_data, additional_key=additional_key)
async def by_name(self, model, record_name):
return await search_by_name(model, record_name)
async def user_by_token(self, token):
return await search_user_by_token(User, token)
async def by_uid(self, model, uid):
return await search_by_uid(model, uid)
async def component_by_name(self, model_name):
return await search_by_name(Component, model_name)
async def component_by_type(self, model_type):
return await search_by_type(Component, model_type=model_type)
async def component_distinct_model(self):
return await search_distinct(Component)
async def search_base(
self, data_model: Type[ModelType], query={}, parent="", sort=[],
limit=0, skip=0, use_aggregate=False):
"""
"""
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
if not sort:
#
sort = [("list_order", ASCENDING), ("rec_name", DESCENDING)]
if use_aggregate:
list_data = await aggregate(
data_model, query, sort=sort, limit=limit, skip=skip
)
else:
list_data = await search_by_filter(
data_model, query, sort=sort, limit=limit, skip=skip
)
return list_data
async def get_list_base(
self, data_model, fields=[], query={}, sort=[], limit=0, skip=0, model_type="",
parent="", merge_field="", row_action="", additional_key=[],
use_aggregate=False
):
"""
additional_key handle formio id name (workaroud):
- in form io id is defined ad '_id' but in standard mongodb id is defained 'id'
passing replace ['rec_name', '_id'] if use formio builder to link resource in form.
Before calling this method the params select sent from formio is '_id, title'
in endpoint this field be going to replaced with 'rec_name', in get_data_list if
replace is defined, adding record key '_id' with value equal 'rec_name' to send
a list data ecpected by fomiojs buider
"""
logger.debug(
f"get_list_base -> data_model:{data_model}, fields: {fields}, query:{query}, sort:{sort},"
f" model_type:{model_type}, parent:{parent}, merge_field: {merge_field}, row_action:{row_action}"
)
list_data = []
if fields:
fields = fields + default_list_metadata
return await self.search(
data_model, fields=fields, query=query, sort=sort, limit=limit, skip=skip,
merge_field=merge_field, row_action=row_action, parent=parent, additional_key=additional_key,
use_aggregate=use_aggregate
)
async def count_by_filter(self, data_model, query={}) -> int:
return await count_by_filter(data_model, domain=query)
async def search(
self, data_model: Type[ModelType], fields=[], query={}, sort=[], limit=0, skip=0,
merge_field="", row_action="", parent="", additional_key=[], remove_keys=[], use_aggregate=False):
if fields:
fields = fields + default_list_metadata
list_data = await self.search_base(
data_model, query=query, parent=parent, sort=sort, limit=limit, skip=skip,
use_aggregate=use_aggregate
)
return get_data_list(
list_data, fields=fields, merge_field=merge_field,
row_action=row_action, additional_key=additional_key, remove_keys=remove_keys)
async def search_export(
self, data_model: Type[ModelType], fields=[], query={}, sort=[], limit=0, skip=0,
merge_field="", data_mode="raw", parent="", additional_key=[], remove_keys=[],
use_aggregate=False):
if fields:
fields = fields + export_list_metadata
list_data = await self.search_base(
data_model, query=query, parent=parent, sort=sort, limit=limit, skip=skip,
use_aggregate=use_aggregate
)
return get_data_list(
list_data, fields=fields, merge_field=merge_field,
remove_keys=remove_keys, additional_key=additional_key)
async def make_action_task_for_model(
self, session, model_name, component_schema, act_config={}):
logger.info(f" make_default_action_model {model_name}")
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
sort = [("list_order", ASCENDING), ("rec_name", DESCENDING)]
q = {"$and": [
{"model": model_name},
{"deleted": 0},
{"action_type": "save"},
{"list_query": "{}"}]}
action_model = await self.gen_model("action")
model = await self.gen_model(model_name)
list_data = await search_by_filter(
action_model, q, sort=sort, limit=0, skip=0
)
if list_data:
src_action = list_data[0]
action = action_model(**src_action)
action.sys = component_schema.sys
action.model = model_name
action.list_order = await self.count_by_filter(model, query={"deleted": 0})
action.data_value['model'] = component_schema.title
action.admin = act_config.get("admin", False)
if not action.admin:
action.user_function = "user"
if action.component_type:
action.component_type = component_schema.type
action.action_type = act_config.get("action_type", "task")
action.data_value['action_type'] = act_config.get("action_type")
action.type = act_config.get("type", "data")
action.title = f"Task {component_schema.title}"
action.data_value['title'] = f"Task {component_schema.title}"
action.rec_name = f"{model_name}_{act_config.get('rec_name')}"
action.data_value['rec_name'] = action.rec_name
await self.save_object(session, action, model_name="action", model=action_model)
async def make_default_action_model(
self, session, model_name, component_schema):
logger.info(f" make_default_action_model {model_name}")
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
sort = [("list_order", ASCENDING), ("rec_name", DESCENDING)]
q = {"$and": [
{"model": "action"},
{"sys": True},
{"deleted": 0},
{"list_query": "{}"}]}
action_model = await self.gen_model("action")
menu_group_model = await self.gen_model("menu_group")
model = await self.gen_model(model_name)
list_data = await search_by_filter(
action_model, q, sort=sort, limit=0, skip=0
)
group_created = False
menu_groups = await self.count_by_filter(
menu_group_model, query={"rec_name": model_name, "deleted": 0})
if (
menu_groups == 0 and
(
component_schema.sys or not component_schema.type == 'resource'
)
):
menu = menu_group_model(
**{
"rec_name": model_name,
"label": component_schema.title,
"admin": component_schema.sys
})
group_created = True
await self.save_object(session, menu, model_name="menu_group", model=menu_group_model)
for action_tmp in list_data:
data = action_tmp
action = action_model(**data)
action.sys = component_schema.sys
action.model = model_name
action.list_order = await self.count_by_filter(model, query={"deleted": 0})
action.data_value['model'] = component_schema.title
action.admin = component_schema.sys
if not action.admin:
action.user_function = "user"
if action.component_type:
action.component_type = component_schema.type
if action.action_type == "menu":
action.title = f"Lista {component_schema.title}"
action.data_value['title'] = f"Lista {component_schema.title}"
if not group_created and component_schema.type == 'resource':
action.menu_group = 'risorse_app'
action.data_value['menu_group'] = "Risorse Apps"
else:
action.menu_group = model_name
action.data_value['menu_group'] = component_schema.title
action.rec_name = action.rec_name.replace("_action", f"_{model_name}")
action.data_value['rec_name'] = action.rec_name
action.next_action_name = action.next_action_name.replace("_action", f"_{model_name}")
await self.save_object(session, action, model_name="action", model=action_model)
async def save_record(self, schema, remove_meta=True):
await save_record(schema, remove_meta=remove_meta)
async def save_all(self, schema, remove_meta=True):
return await save_all(schema, remove_meta=remove_meta)
async def set_user_data(self, record):
record.owner_uid = self.session.user.get('uid')
record.owner_name = self.session.user.get('full_name', "")
record.owner_mail = self.session.user.get('mail', "")
record.owner_sector = self.session.sector
record.owner_sector_id = self.session.sector_id
record.owner_personal_type = self.session.user.get("tipo_personale", "")
record.owner_job_title = self.session.user.get("qualifica", "")
record.owner_function = self.session.function
return record
def get_password_hash(self, password):
return self.pwd_context.hash(password)
def diff(self, li1, li2):
li_dif = [i for i in li1 + li2 if i not in li1 or i not in li2]
return li_dif
async def get_record_diff(self, session, object_o, rec_name: str = "", model_name="", copy=False):
logger.info(f"model:{model_name}, rec_name: {rec_name}, copy: {copy}")
# if not model:
# model = await self.gen_model(model_name)
to_pop = default_list_metadata_fields[:]
if rec_name:
source = await self.by_name(type(object_o), rec_name)
if not copy:
if object_o.rec_name == rec_name:
to_pop.append("rec_name")
object_o = update_model(source, object_o, pop_form_newobject=to_pop)
new_dict = ujson.loads(object_o.json())
[new_dict.pop(key) for key in to_pop]
if rec_name and source:
src_base = source.dict().copy()
[src_base.pop(key) for key in to_pop]
src_dict = src_base.copy()
set_src_l = list(src_dict.items())
set_new_l = list(new_dict.items())
dict_diff = dict(self.diff(set_src_l, set_new_l))
else:
dict_diff = new_dict.copy()
return dict_diff.copy()
async def save_object(
self, session, object_o, rec_name: str = "", model_name="", copy=False, model=False) -> Any:
logger.info(f" model:{model_name}, rec_name: {rec_name}, copy: {copy}")
if not model:
model = await self.gen_model(model_name)
source = await self.by_name(type(object_o), object_o.rec_name)
if source:
rec_name = object_o.rec_name
if rec_name:
if not source:
source = await self.by_name(type(object_o), rec_name)
if not copy:
to_pop = default_fields[:]
# if object_o.rec_name == rec_name:
# to_pop.append("rec_name")
# to_pop.append("list_order")
object_o = update_model(source, object_o, pop_form_newobject=to_pop)
if session.user:
object_o.update_uid = session.user.get('uid')
object_o.update_datetime = datetime.now()
if not rec_name or copy:
object_o.list_order = await self.count_by_filter(model, query={"deleted": 0})
object_o.data_value['list_order'] = object_o.list_order
object_o.create_datetime = datetime.now()
object_o = await self.set_user_data(object_o)
if model_name == "user":
pw_hash = self.get_password_hash(object_o.password)
object_o.password = pw_hash
if copy:
if hasattr(object_o, "title"):
object_o.title = f"{object_o.title} Copy()"
if (
hasattr(object_o, "rec_name") and
object_o.rec_name and model_name not in object_o.rec_name
):
object_o.rec_name = f"{object_o.rec_name}_copy"
if hasattr(object_o, "data_value"):
object_o.data_value['rec_name'] = object_o.rec_name
else:
object_o.rec_name = f"{model_name}.{object_o.id}"
try:
rec = await save_record(object_o)
except pymongo.errors.DuplicateKeyError as e:
logger.error(f" Duplicate {e.details['errmsg']}")
field = e.details['keyValue']
key = list(field.keys())[0]
val = field[key]
return {
"status": "error",
"message": f"Errore Duplicato {key}: {val}",
"model": model_name
}
return rec
| |
from typing import Dict, Generator, List, Set, Union
from collections import defaultdict
import json
import copy
import re
from fuzzy_search.fuzzy_phrase import Phrase
def as_phrase_object(phrase: Union[str, dict, Phrase], ngram_size: int = 2, skip_size: int = 2) -> Phrase:
if isinstance(phrase, Phrase):
return phrase
if isinstance(phrase, dict):
if not is_phrase_dict(phrase):
print(phrase)
raise KeyError("invalid phrase dictionary")
return Phrase(phrase, ngram_size=ngram_size, skip_size=skip_size)
if isinstance(phrase, str):
return Phrase(phrase, ngram_size=ngram_size, skip_size=skip_size)
else:
raise TypeError('phrase must be of type string')
def is_phrase_dict(phrase_dict: Dict[str, Union[str, List[str]]]) -> bool:
if not isinstance(phrase_dict, dict):
return False
if "phrase" not in phrase_dict:
return False
if not isinstance(phrase_dict["phrase"], str):
return False
if "variants" in phrase_dict:
for variant in phrase_dict["variants"]:
if not isinstance(variant, str):
return False
if "distractors" in phrase_dict:
for distractor in phrase_dict["distractors"]:
if not isinstance(distractor, str):
return False
if "labels" in phrase_dict:
if isinstance(phrase_dict["labels"], str):
return True
if not isinstance(phrase_dict["labels"], list):
return False
for label in phrase_dict["labels"]:
if not isinstance(label, str):
return False
return True
class PhraseModel:
def __init__(self, phrases: Union[None, List[Union[str, Dict[str, Union[str, list]], Phrase]]] = None,
variants: Union[None, List[Union[Dict[str, List[str]], Phrase]]] = None,
phrase_labels: Union[None, List[Dict[str, str]]] = None,
distractors: Union[None, List[Union[Dict[str, List[str]], Phrase]]] = None,
model: Union[None, List[Dict[str, Union[str, list]]]] = None,
custom: Union[None, List[Dict[str, Union[str, int, float, list]]]] = None,
config: dict = None):
if config is None:
config = {}
self.ngram_size = config["ngram_size"] if "ngram_size" in config else 2
self.skip_size = config["skip_size"] if "skip_size" in config else 2
self.phrase_index: Dict[str, Phrase] = {}
# only register variants of known phrases
self.variant_index: Dict[str, Phrase] = {}
self.has_variants: Dict[str, Set[str]] = defaultdict(set)
self.is_variant_of: Dict[str, str] = {}
self.distractor_index: Dict[str, Phrase] = {}
self.has_distractors: Dict[str, Set[str]] = defaultdict(set)
self.is_distractor_of: Dict[str, Set[str]] = defaultdict(set)
self.phrase_length_index: Dict[int, set] = defaultdict(set)
self.variant_length_index: Dict[int, set] = defaultdict(set)
self.has_labels: Dict[str, Set[str]] = defaultdict(set)
self.is_label_of: Dict[str, Set[str]] = defaultdict(set)
self.custom = {}
self.word_in_phrase: Dict[str, Set[str]] = defaultdict(set)
self.first_word_in_phrase: Dict[str, Dict[str, int]] = defaultdict(dict)
self.phrase_type: Dict[str, Set[str]] = defaultdict(set)
self.phrase_string_map: Dict[str, Phrase] = {}
if phrases:
self.add_phrases(phrases)
if variants:
self.add_variants(variants)
if distractors:
self.add_distractors(distractors)
if phrase_labels:
self.add_labels(phrase_labels)
if model:
self.add_model(model)
if custom:
self.add_custom(custom)
def __repr__(self):
"""A phrase model to support fuzzy searching in OCR/HTR output."""
return f"PhraseModel({json.dumps(self.json, indent=2)})"
def __str__(self):
return self.__repr__()
def add_model(self, model: List[Union[str, Dict[str, Union[str, list]]]]) -> None:
"""Add an entire model with list of phrase dictionaries.
:param model: a list of phrase dictionaries
:type model: List[Union[str, Dict[str, Union[str list]]]]
:return: None
:rtype: None
"""
self.add_phrases(model)
self.add_variants(model)
self.add_distractors(model)
self.add_labels(model)
self.add_custom(model)
@property
def json(self) -> List[Dict[str, Union[str, List[str]]]]:
"""Return a JSON representation of the phrase model.
:return: a JSON respresentation of the phrase model
:rtype: List[Dict[str, Union[str, List[str]]]]
"""
model_json: List[Dict[str, Union[str, List[str]]]] = []
for phrase in self.phrase_index:
entry = {'phrase': phrase}
if phrase in self.has_variants:
entry['variants'] = list(self.has_variants[phrase])
if phrase in self.has_labels:
entry['label'] = list(self.has_labels[phrase])
if phrase in self.custom:
entry['custom'] = self.custom[phrase]
model_json += [entry]
return model_json
def add_phrase(self, phrase: Phrase) -> None:
"""Add a phrase to the model as main phrase.
:param phrase: a phrase to be added
:type phrase: Phrase
"""
self.phrase_string_map[phrase.phrase_string] = phrase
self.phrase_type[phrase.phrase_string].add("phrase")
self.phrase_index[phrase.phrase_string] = phrase
self.phrase_length_index[len(phrase.phrase_string)].add(phrase.phrase_string)
self.index_phrase_words(phrase)
def add_variant(self, variant_phrase: Phrase, main_phrase: Phrase):
"""Add a phrase to the model as variant of a given main phrase.
:param variant_phrase: a variant phrase to be added as variant of main_phrase
:type variant_phrase: Phrase
:param main_phrase: a main phrase that the variant phrase is a variant of
:type main_phrase: Phrase
"""
if variant_phrase.phrase_string not in self.phrase_string_map:
self.phrase_string_map[variant_phrase.phrase_string] = variant_phrase
self.variant_index[variant_phrase.phrase_string] = variant_phrase
self.is_variant_of[variant_phrase.phrase_string] = main_phrase.phrase_string
self.has_variants[main_phrase.phrase_string].add(variant_phrase.phrase_string)
self.phrase_type[variant_phrase.phrase_string].add("variant")
self.variant_length_index[len(variant_phrase.phrase_string)].add(variant_phrase.phrase_string)
self.index_phrase_words(variant_phrase)
def add_distractor(self, distractor_phrase: Phrase, main_phrase: Phrase):
"""Add a phrase to the model as distractor of a given main phrase.
:param distractor_phrase: a distractor phrase to be added as distractor of main_phrase
:type distractor_phrase: Phrase
:param main_phrase: a main phrase that the distractor phrase is a distractor of
:type main_phrase: Phrase
"""
if distractor_phrase.phrase_string not in self.phrase_string_map:
self.phrase_string_map[distractor_phrase.phrase_string] = distractor_phrase
self.distractor_index[distractor_phrase.phrase_string] = distractor_phrase
self.is_distractor_of[distractor_phrase.phrase_string].add(main_phrase.phrase_string)
self.has_distractors[main_phrase.phrase_string].add(distractor_phrase.phrase_string)
self.phrase_type[distractor_phrase.phrase_string].add("distractor")
self.index_phrase_words(distractor_phrase)
def remove_phrase(self, phrase: Phrase):
"""Remove a main phrase from the model, including its connections to any variant and distractor phrases.
:param phrase: a phrase that is registered as a main phrase
:type phrase: Phrase
"""
# first check if phrase is registered in this phrase model
if phrase.phrase_string not in self.phrase_index:
raise ValueError(f"{phrase.phrase_string} is not registered as a main phrase")
# remove phrase from the type index
self.phrase_type[phrase.phrase_string].remove("phrase")
# remove the phrase string from the main phrase index
del self.phrase_index[phrase.phrase_string]
# remove the phrase from the phrase length index
self.phrase_length_index[len(phrase.phrase_string)].remove(phrase.phrase_string)
if len(self.phrase_type[phrase.phrase_string]) == 0:
# if the phrase string is not registered as another type (variant or distractor)
# remove the phrase words from the word_to_phrase index
self.remove_phrase_words(phrase)
# if the phrase has variants, remove those as well
if phrase.phrase_string in self.has_variants:
for variant_string in self.has_variants:
variant_phrase = self.variant_index[variant_string]
self.remove_variant(variant_phrase)
# if the phrase has distractors, remove its connections with them as well
if phrase.phrase_string in self.has_distractors:
for distractor_string in self.has_distractors:
distractor_phrase = self.distractor_index[distractor_string]
if len(self.is_distractor_of[distractor_string]) > 1:
self.is_distractor_of[distractor_string].remove(phrase.phrase_string)
else:
# if the distractor is only connected to this phrase, remove the distractor as well
self.remove_distractor(distractor_phrase)
del self.has_distractors[phrase.phrase_string]
def remove_variant(self, variant_phrase: Phrase) -> None:
"""Remove a variant phrase from the model, including its connection to the phrase it is a
variant of.
:param variant_phrase: a phrase that is registered as a variant of one or more main phrases
:type variant_phrase: Phrase
"""
# first check if variant phrase is registered as a variant
if variant_phrase.phrase_string not in self.is_variant_of:
raise ValueError(f"{variant_phrase.phrase_string} is not registered as a variant")
# remove variant from the type index
self.phrase_type[variant_phrase.phrase_string].remove("variant")
# if that is the only type of the phrase, remove it from the word_to_phrase index
if len(self.phrase_type[variant_phrase.phrase_string]) == 0:
self.remove_phrase_words(variant_phrase)
# remove the variant from the variant index
del self.variant_index[variant_phrase.phrase_string]
# remove the variant from the phrase length index
self.variant_length_index[len(variant_phrase.phrase_string)].remove(variant_phrase.phrase_string)
# remove its connection with its main phrase
main_phrase_string = self.is_variant_of[variant_phrase.phrase_string]
del self.is_variant_of[variant_phrase.phrase_string]
self.has_variants[main_phrase_string].remove(variant_phrase.phrase_string)
# if this was the only variant of the main phrase, remove the main phrase from the has_variants index
if len(self.has_variants[main_phrase_string]) == 0:
del self.has_variants[main_phrase_string]
def remove_distractor(self, distractor_phrase: Phrase) -> None:
"""Remove a distractor phrase from the model, including its connection to the phrase it is a
distractor of.
:param distractor_phrase: a phrase that is registered as a distractor of one or more main phrases
:type distractor_phrase: Phrase
"""
if distractor_phrase.phrase_string not in self.is_distractor_of:
raise ValueError(f"{distractor_phrase.phrase_string} is not registered as a distractor")
self.phrase_type[distractor_phrase.phrase_string].remove("distractor")
if len(self.phrase_type[distractor_phrase.phrase_string]) == 0:
self.remove_phrase_words(distractor_phrase)
del self.distractor_index[distractor_phrase.phrase_string]
for main_phrase_string in self.is_distractor_of[distractor_phrase.phrase_string]:
self.has_distractors[main_phrase_string].remove(distractor_phrase.phrase_string)
if len(self.has_distractors[main_phrase_string]) == 0:
del self.has_distractors[main_phrase_string]
del self.is_distractor_of[distractor_phrase.phrase_string]
def add_phrases(self, phrases: List[Union[str, Dict[str, Union[str, List[str]]], Phrase]]) -> None:
"""Add a list of phrases to the phrase model. Phrases must be either:
- a list of strings
- a list of dictionaries with property 'phrase' and the phrase as a string value
- a list of Phrase objects
:param phrases: a list of phrases
:type phrases: List[Union[str, Dict[str, Union[str, List[str]]]]]
"""
for phrase in phrases:
phrase = as_phrase_object(phrase, ngram_size=self.ngram_size, skip_size=self.skip_size)
self.add_phrase(phrase)
# if phrases is a dictionary with possible variants, distractors, labels and custom metadata
# per phrase, add those variants and distractors
phrase_dicts = [phrase for phrase in phrases if isinstance(phrase, dict)]
self.add_variants(phrase_dicts)
self.add_distractors(phrase_dicts)
self.add_custom(phrase_dicts)
self.add_labels(phrase_dicts)
def remove_phrases(self, phrases: List[Union[str, Dict[str, Union[str, List[str]]], Phrase]]):
"""Remove a list of phrases from the phrase model. If it has any registered spelling variants,
remove those as well.
:param phrases: a list of phrases/keyphrases
:type phrases: List[Union[str, Dict[str, Union[str, List[str]]]]]
"""
for phrase in phrases:
phrase = as_phrase_object(phrase, ngram_size=self.ngram_size, skip_size=self.skip_size)
if phrase.phrase_string not in self.phrase_index:
raise KeyError(f"Unknown phrase: {phrase.phrase_string}")
self.remove_phrase(phrase)
def get_phrases_by_max_length(self, max_length: int,
include_variants: bool = False) -> Generator[Phrase, None, None]:
"""Return all phrase in the phrase model that are no longer than a given length.
:param max_length: the maximum length of phrases to be returned
:type max_length: int
:param include_variants: whether to include variants
:return: a generator that yield phrases
:rtype: Generator[Phrase, None, None]
"""
for phrase_length in self.phrase_length_index:
if phrase_length > max_length:
| |
<filename>extra/extra_analysis/logreg.py
"""
logreg.py
This module contains functions to run and analyse logistic regressions
to predict stimulus information from ROI activity for data generated by the
Allen Institute OpenScope experiments for the Credit Assignment Project.
Authors: <NAME>
Date: October, 2018
Note: this code uses python 3.7.
"""
import copy
import logging
import warnings
from pathlib import Path
import numpy as np
import pandas as pd
import torch
from extra_analysis import quant_analys
from util import data_util, file_util, gen_util, logger_util, logreg_util, \
math_util, plot_util, rand_util
from sess_util import sess_gen_util, sess_ntuple_util, sess_str_util
from extra_plot_fcts import logreg_plots
from util import gen_util
logger = logging.getLogger(__name__)
TAB = " "
#### ALWAYS SET TO FALSE - CHANGE ONLY FOR TESTING PURPOSES
TEST_VISFLOW_VARIATIONS = False
#############################################
def get_comps(stimtype="gabors", q1v4=False, exp_v_unexp=False):
"""
get_comps()
Returns comparisons that fit the criteria.
Optional args:
- stimtype (str) : stimtype
default: "gabors"
- q1v4 (bool) : if True, analysis is trained on first and tested on
last quartiles
default: False
- exp_v_unexp (bool): if True, analysis is trained on expected and tested
on unexpected sequences
default: False
Returns:
- comps (list): list of comparisons that fit the criteria
"""
if stimtype == "gabors":
if exp_v_unexp:
raise ValueError("exp_v_unexp can only be used with visual flow.")
comps = ["unexp", "AvB", "AvC", "BvC", "DvU", "Aori", "Bori", "Cori",
"Dori", "Uori", "DoriU", "DoriA", "BCDoriA", "BCDoriU", "ABCoriD",
"ABCoriU"]
elif stimtype == "visflow":
comps = ["unexp", "dir_all", "dir_unexp", "dir_exp", "half_right",
"half_left", "half_diff"]
if exp_v_unexp:
comps = gen_util.remove_if(
comps, ["unexp", "dir_unexp", "dir_all", "half_right",
"half_left", "half_diff"])
if q1v4:
comps = gen_util.remove_if(
comps, ["half_left", "half_right", "half_diff"])
else:
gen_util.accepted_values_error(
"stimtype", stimtype, ["gabors", "visflow"])
return comps
#############################################
def get_class_pars(comp="unexp", stimtype="gabors"):
"""
get_class_pars()
Returns name of the class determining variable, and the unexpected values to
use for the classes.
Optional args:
- comp (str) : type of comparison
default: "unexp"
- stimtype (str) : stimulus type
default: "gabors"
Returns:
- class_var (str) : variable separating classes (e.g., "unexps",
"gab_ori", "visflow_dir")
- unexps (str or list): unexpected values (for each class, if list)
"""
if stimtype == "gabors":
if comp == "unexp":
class_var = "unexps"
unexps = [0, 1]
elif comp == "DvU":
class_var = "unexps"
unexps = [0, 1]
elif "ori" in comp:
class_var = "gab_ori"
gab_letts = [lett.upper() for lett in comp.split("ori")
if len(lett) > 0]
unexps = []
for lett in gab_letts:
if ("D" in lett) ^ ("U" in lett): # exclusive or
unexp_val = 1 if "U" in lett else 0
unexps.append(unexp_val)
else:
unexps.append("any")
if len(gab_letts) == 1:
unexps = unexps[0]
elif "dir" in comp:
raise ValueError("dir comparison not valid for gabors.")
else:
class_var = "gabfr"
unexps = "any"
elif stimtype == "visflow":
class_var = "visflow_dir"
if comp == "dir_all":
unexps = "any"
elif comp == "dir_exp":
unexps = 0
elif comp == "dir_unexp":
unexps = 1
elif comp == "unexp":
unexps = [0, 1]
class_var = "unexps"
elif comp in ["half_right", "half_left", "half_diff"]:
unexps = "any"
class_var = comp
else:
raise ValueError("Only unexp, dir_all, dir_exp, dir_unexp, "
"samehalf, diffhalf comparisons supported for Visflow.")
return class_var, unexps
#############################################
def get_stimpar(comp="unexp", stimtype="gabors", visflow_dir="both",
visflow_size=128, gabfr=0, gabk=16, gab_ori="all",
visflow_pre=0.0):
"""
get_stimpar()
Returns a stimulus parameter named tuple based on the stimulus parameters
passed and comparison type.
Optional args:
- comp (str) : type of comparison
default: "unexp"
- stimtype (str) : stimulus type
default: "gabors"
- visflow_dir (str or list) : visual flow direction
default: "both"
- visflow_size (int or list): visual flow direction
default: 128
- gabfr (int or list) : gabor frame of reference (may be a list
depending on "comp")
default: 0
- gabk (int or list) : gabor kappa
default: 16
- gab_ori (str or list) : gabor orientations (e.g., "all"),
for comp values like DoriU, DoriA, etc.
default: "all"
- visflow_pre (int) : pre parameter for Visflow
default: 0.0
Returns:
- stimpar (StimPar) : named tuple containing stimulus parameters
"""
if stimtype == "visflow" and "half" in visflow_dir or "dir" in visflow_dir:
logger.info("Ignoring visual flow direction setting.")
[visflow_dir, visflow_size, gabfr,
gabk, gab_ori] = sess_gen_util.get_params(
stimtype, visflow_dir, visflow_size, gabfr, gabk, gab_ori)
if stimtype == "gabors":
# DO NOT ALLOW OVERLAPPING
if comp == "unexp":
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, gabfr, gabk, gab_ori,
0, 1.5)
elif comp == "DvU":
gabfr = sess_str_util.gabfr_nbrs(comp[0])
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, gabfr, gabk, gab_ori,
0, 0.45)
elif "ori" in comp:
gab_letts = [lett.upper() for lett in comp.split("ori")
if len(lett) > 0]
act_gabfr = [[sess_str_util.gabfr_nbrs(lett) for lett in letts]
for letts in gab_letts]
if len(act_gabfr) == 1:
pre, post = 0, 0.45
if comp in ["Dori", "Uori"]:
pre, post = 0, 0.6
act_gabfr = act_gabfr[0][0] # only one value
gab_ori = sess_gen_util.filter_gab_oris(gab_letts[0], gab_ori)
if act_gabfr != gabfr:
logger.info(
f"Setting gabfr to {act_gabfr} instead of {gabfr}.")
else:
pre, post = -0.15, 0.45
gab_ori = sess_gen_util.gab_oris_common_U(gab_ori)
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, act_gabfr, gabk, gab_ori,
pre, post)
elif "dir" in comp or "half" in comp:
raise ValueError("dir/half comparison not valid for gabors.")
else:
gabfrs = sess_str_util.gabfr_nbrs([comp[0], comp[2]])
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, gabfrs, gabk, gab_ori,
0, 0.45)
elif stimtype == "visflow":
# DO NOT ALLOW OVERLAPPING
if "right" in comp:
visflow_dir = "right"
elif "left" in comp:
visflow_dir = "left"
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, gabfr, gabk, gab_ori,
visflow_pre, 1.0)
# for visual flow logreg test analyses
if TEST_VISFLOW_VARIATIONS:
logger.warning("Setting visual flow pre/post to 2 for testing purposes.")
stimpar = sess_ntuple_util.init_stimpar(
stimtype, visflow_dir, visflow_size, gabfr, gabk, gab_ori, 2, 2)
return stimpar
#############################################
def get_rundir(run_val, uniqueid=None, alg="sklearn"):
"""
get_rundir(run_val)
Returns the name of the specific subdirectory in which an analysis is
saved, based on a run number and unique ID.
Required args:
- run_val (int): run number ("pytorch" alg) or
number of run ("sklearn" alg)
Optional args:
- uniqueid (str or int): unique ID for analysis
default: None
- alg (str) : algorithm used to run logistic regression
("sklearn" or "pytorch")
default: "sklearn"
Returns:
- rundir (Path): name of subdirectory to save analysis in
"""
if uniqueid is None:
if alg == "sklearn":
rundir = f"{run_val}_runs"
elif alg == "pytorch":
rundir = f"run_{run_val}"
else:
gen_util.accepted_values_error("alg", alg, ["sklearn", "pytorch"])
else:
rundir = f"{uniqueid}_{run_val}"
rundir = Path(rundir)
return rundir
#############################################
def get_compdir_dict(rundir, no_lists=False):
"""
get_compdir_dict(rundir)
Returns a dictionary with analysis parameters based on the full analysis
path.
Required args:
- rundir (Path): path of subdirectory in which analysis is saved,
structured as
".../m_s_plane_stim_fluor_scaled_comp_shuffled/
uniqueid_run"
Optional args:
- no_lists (bool): if True, list parameters are replaced with a string,
e.g. "both"
False
Returns:
- compdir_dict (dict): parameter dictionary
- visflow_dir (str or list): visual flow direction parameter
("right", "left", ["right", "left"]
or "none")
- visflow_size (int or list): visual flow size parameter (128, 256,
[128, 256] or "none")
- comp (str) : comparison parameter ("unexp", "AvB",
"AvC", "BvC" or "DvU", None)
- fluor (str) : fluorescence parameter ("raw" or "dff")
- gabk (int or list) : Gabor kappa parameter
(4, 16, [4, 16] or "none")
- plane (str) : plane ("soma" or "dend")
- mouse_n (int) : mouse number
- sess_n (int) : session number
- scale (bool) : scaling parameter
- run_n (int) : run number
- shuffle (bool) : shuffle parameter
- stimtype (str) : stimulus type ("gabors" or "visflow")
- uniqueid (str) : unique ID (datetime, 6 digit number or
None)
"""
param_str = rundir.parts[-2]
run_str = rundir.parts[-1]
compdir_dict = sess_gen_util.get_params_from_str(param_str, no_lists)
if "run" in run_str:
compdir_dict["uniqueid"] = None
compdir_dict["run_n"] = int(run_str.split("_")[1])
else:
compdir_dict["uniqueid"] = "_".join(
[str(sub) for sub in run_str.split("_")[:-1]])
compdir_dict["run_n"] = int(run_str.split("_")[-1])
return compdir_dict
#############################################
def get_df_name(task="analyse", stimtype="gabors", comp="unexp", ctrl=False,
alg="sklearn"):
"""
get_df_name()
Returns a dictionary with analysis parameters based on the full analysis
path.
Optional args:
- task (str) : type of task for which to get the dataframe
default: "analyse"
- stimtype (str): type of stimulus
default: "gabors"
- | |
units = obj_tables.sci.units.UnitAttribute(unit_registry,
choices=(unit_registry.parse_units('molecule'),),
default=unit_registry.parse_units('molecule'))
references = obj_tables.ManyToManyAttribute(Reference, related_name='observables')
identifiers = IdentifierAttribute(related_name='observables')
class Meta(obj_tables.Model.Meta, ExpressionExpressionTermMeta):
attribute_order = ('id', 'name', 'expression', 'units', 'identifiers', 'references', 'comments')
expression_term_model = ObservableExpression
expression_term_units = 'units'
def deserialize(self, value, objects, decoded=None):
""" Deserialize value
Args:
value (:obj:`str`): String representation
objects (:obj:`dict`): dictionary of objects, grouped by model
decoded (:obj:`dict`, optional): dictionary of objects that have already been decoded
Returns:
:obj:`tuple` of :obj:`ObservableExpression`, `InvalidAttribute` or `None`:
tuple of cleaned value and cleaning error
"""
return expression.deserialize()
class Parameter(KnowledgeBaseObject):
""" Knowledge of parameters
Attributes:
cell (:obj:`Cell`): cell
value (:obj:`float`): value
error (:obj:`float`): measurement error
units (:obj:`unit_registry.Unit`): units of value
evidence (:obj:`list` of :obj:`Evidence`): evidence
references (:obj:`list` of :obj:`Reference`): references
identifierss (:obj:`list` of :obj:`DatabaseReference`): reference in external namespaces
Related attributes:
rate_law_expressions (:obj:`list` of :obj:`RateLawExpression`): rate law expressions that use a Parameter
"""
cell = obj_tables.ManyToOneAttribute(Cell, related_name='parameters')
value = FloatAttribute(min=0)
error = FloatAttribute(min=0)
units = obj_tables.sci.units.UnitAttribute(unit_registry, none=True)
references = obj_tables.ManyToManyAttribute(Reference, related_name='parameters')
evidence = obj_tables.OneToManyAttribute('Evidence', related_name='parameters')
identifiers = IdentifierAttribute(related_name='parameters')
class Meta(obj_tables.Model.Meta):
attribute_order = ('id', 'name', 'synonyms', 'value', 'units', 'evidence', 'identifiers', 'references', 'comments')
expression_term_token_pattern = (token.NAME, )
class Validator(obj_tables.Validator):
def run(self, knowledge_base, get_related=True):
""" Validate a knowledge_base and return its errors
Args:
knowledge_base (:obj:`KnowledgeBase`): knowledge base
get_related (:obj:`bool`, optional): if true, get all related objects
Returns:
:obj:`InvalidObjectSet` or `None`: list of invalid objects/models and their errors
"""
return super(Validator, self).run(knowledge_base, get_related=get_related)
#####################
#####################
# Species types
class MetaboliteSpeciesType(SpeciesType):
""" Knowledge of a metabolite
Attributes:
synonyms (:obj:`str`): synonyms
type (:obj:`pronto`): type
"""
synonyms = obj_tables.LongStringAttribute()
type = obj_tables.sci.onto.OntoTermAttribute(kbOnt,
terms = kbOnt['WC:metabolite'].subclasses(),
none = True)
class Meta(obj_tables.Model.Meta):
verbose_name = 'Metabolite'
attribute_order = ('id', 'name', 'synonyms', 'type', 'identifiers', 'references', 'comments')
def get_structure(self):
""" Get the structure
Returns:
:obj:`str`: InChI or SMILES structure
Raises:
:obj:`ValueError`: if structure has not been provided
"""
structure = self.properties.get_one(property='structure')
if structure:
return structure.get_value()
else:
raise ValueError('The structure of {} has not been provided'.format(self.id))
def calc_structure(self, ph=7.4, major_tautomer=False, keep_hydrogens=False, dearomatize=False):
""" Get the major microspecies
Args:
pH (:obj:`float`, optional): pH, default is 7.4
major_tautomer (:obj:`bool`, optional): if :obj:`True`, use the major tautomeric in the calculation
keep_hydrogens (:obj:`bool`, optional): if :obj:`True`, keep explicity defined hydrogens
dearomatize (:obj:`bool`, optional): if :obj:`True`, dearomatize molecule
Returns:
:obj:`str`: InChI-encoded structure
"""
structure_str = self.get_structure()
if 'InChI=' in structure_str:
return get_major_micro_species(structure_str, 'inchi', 'inchi',
ph=ph, major_tautomer=major_tautomer, keep_hydrogens=keep_hydrogens, dearomatize=dearomatize)
else:
return get_major_micro_species(structure_str, 'smiles', 'smiles',
ph=ph, major_tautomer=major_tautomer, keep_hydrogens=keep_hydrogens, dearomatize=dearomatize)
def to_openbabel_mol(self):
""" Convert species type to an Open Babel molecule
Returns:
:obj:`openbabel.OBMol`: Open Babel molecule
"""
structure_str = self.get_structure()
structure_type = 'inchi' if 'InChI=' in structure_str else 'smi'
mol = openbabel.OBMol()
obConversion = openbabel.OBConversion()
obConversion.SetInFormat(structure_type)
obConversion.ReadString(mol, structure_str)
return mol
def get_empirical_formula(self):
""" Get the empirical formula
Returns:
:obj:`chem.EmpiricalFormula`: empirical formula
"""
prop = self.properties.get_one(property='empirical_formula')
if prop:
return chem.EmpiricalFormula(prop.get_value())
return self.calc_empirical_formula()
def calc_empirical_formula(self):
""" Calculate the empirical formula
Returns:
:obj:`chem.EmpiricalFormula`: empirical formula
"""
mol = self.to_openbabel_mol()
return OpenBabelUtils.get_formula(mol)
def get_charge(self):
""" Get the charge
Returns:
:obj:`int`: charge
"""
prop = self.properties.get_one(property='charge')
if prop:
return prop.get_value()
return self.calc_charge()
def calc_charge(self):
""" Calculate the charge
Returns:
:obj:`int`: charge
"""
mol = self.to_openbabel_mol()
return mol.GetTotalCharge()
def get_mol_wt(self):
""" Get the molecular weight
Returns:
:obj:`float`: molecular weight
Raises:
:obj:`ValueError`: if there is not enough information to calculate molecular weight
"""
prop = self.properties.get_one(property='empirical_formula')
if prop:
return chem.EmpiricalFormula(prop.get_value()).get_molecular_weight()
elif self.properties.get_one(property='structure'):
mol = self.to_openbabel_mol()
return mol.GetMolWt()
else:
raise ValueError('Molecular weight cannot be calculated because no structure or '
'empirical formula has been provided for {}'.format(self.id))
class DnaSpeciesType(PolymerSpeciesType):
""" Knowledge of a DNA species
Attributes:
seq_path (:obj:`str`): path to sequence fasta file
ploidy (:obj:`int`): ploidy
"""
sequence_path = obj_tables.StringAttribute()
ploidy = obj_tables.IntegerAttribute(min=0)
class Meta(obj_tables.Model.Meta):
verbose_name = 'Chromosome'
attribute_order = ('id', 'name', 'sequence_path', 'circular', 'double_stranded',
'ploidy', 'identifiers', 'references', 'comments')
def get_seq(self, start=None, end=None):
""" Get the sequence
Args:
start (:obj:`int`, optional): start coordinate of the queried subsequence,
default is the start of the full sequence
end (:obj:`int`, optional): end coordinate of the queried subsequence,
default is the end of the full sequence
Returns:
:obj:`Bio.Seq.Seq`: structure
"""
seq_idx = Fasta(self.sequence_path, as_raw=True)
start = start or 1
end = end or len(seq_idx[self.id][:])
seq = seq_idx[self.id][start-1:end]
return Bio.Seq.Seq(seq, alphabet=Bio.Alphabet.DNAAlphabet())
def get_empirical_formula(self):
""" Get the empirical formula for a DNA molecule with
* 5' monophosphate (for linear molecules)
* Deprotonated phosphate oxygens
* Linear DNA
:math:`N_A * dAMP + N_C * dCMP + N_G * dGMP + N_T * dTMP - (L - 1) * OH`
* Circular DNA
:math:`N_A * dAMP + N_C * dCMP + N_G * dGMP + N_T * dTMP - L * OH`
N's in the sequence will be distributed into the four bases by preserving the original ratio
Returns:
:obj:`chem.EmpiricalFormula`: empirical formula
"""
seq = self.get_seq()
n_a = seq.upper().count('A')
n_c = seq.upper().count('C')
n_g = seq.upper().count('G')
n_t = seq.upper().count('T')
n_n = seq.upper().count('N')
l = len(seq)
known_bases = n_a + n_c + n_g + n_t
n_a += round(n_a / known_bases * n_n)
n_c += round(n_c / known_bases * n_n)
n_g += round(n_g / known_bases * n_n)
n_t = l - (n_a + n_c + n_g)
if self.double_stranded:
n_a = n_a + n_t
n_t = n_a
n_c = n_c + n_g
n_g = n_c
formula = chem.EmpiricalFormula()
formula.C = 10 * n_a + 9 * n_c + 10 * n_g + 10 * n_t
formula.H = 12 * n_a + 12 * n_c + 12 * n_g + 13 * n_t - \
(l - 1 + self.circular) * (1 + self.double_stranded)
formula.N = 5 * n_a + 3 * n_c + 5 * n_g + 2 * n_t
formula.O = 6 * n_a + 7 * n_c + 7 * n_g + 8 * n_t - \
(l - 1 + self.circular) * (1 + self.double_stranded)
formula.P = n_a + n_c + n_g + n_t
return formula
def get_charge(self):
""" Get the charge for a DNA molecule with
* 5' monophosphate (for linear molecules)
* Deprotonated phosphate oxygens
* Linear DNA
:math:`-L - 1`
* Circular DNA
:math:`-L`
Returns:
:obj:`int`: charge
"""
return (-self.get_len() - 1 + self.circular) * (1 + self.double_stranded)
def get_mol_wt(self):
""" Get the molecular weight for a DNA molecule with
* 5' monophosphate (for linear molecules)
* Deprotonated phosphate oxygens
* Linear DNA
:math:`N_A * MW_{dAMP} + N_C * MW_{dCMP} + N_G * MW_{dGMP} + N_T * MW_{dTMP} - (L - 1) * MW_{OH}`
* Circular DNA
:math:`N_A * MW_{dAMP} + N_C * MW_{dCMP} + N_G * MW_{dGMP} + N_T * MW_{dTMP} - L * MW_{OH}`
Returns:
:obj:`float`: molecular weight
"""
return self.get_empirical_formula().get_molecular_weight()
class ComplexSpeciesType(SpeciesType):
""" Knowledge of a protein complex
Attributes:
formation_process (:obj:`pronto`): type of formation process
subunits (:obj:`list` of :obj:`SpeciesTypeCoefficient`): subunits
type (:obj:`pronto`): type of complex formation
"""
subunits = SubunitAttribute(related_name='complexes')
type = obj_tables.sci.onto.OntoTermAttribute(kbOnt,
terms = kbOnt['WC:complex'].subclasses(),
none=True)
formation_process = obj_tables.sci.onto.OntoTermAttribute(kbOnt,
terms = kbOnt['WC:complexFormation'].subclasses(),
none=True)
class Meta(obj_tables.Model.Meta):
verbose_name = 'Complex'
attribute_order = ('id', 'name', 'synonyms', 'type', 'formation_process', 'subunits',
'identifiers', 'references', 'comments')
def get_empirical_formula(self):
""" Get the empirical formula
Returns:
:obj:`chem.EmpiricalFormula`: empirical formula
"""
# Formula addition
formula = chem.EmpiricalFormula()
for subunit in self.subunits:
for coeff in range(0, abs(int(subunit.coefficient))):
formula = formula + subunit.species_type.get_empirical_formula()
return formula
def get_charge(self):
""" Get the charge at physiological pH
Returns:
:obj:`int`: charge
"""
charge = 0
for subunit in self.subunits:
charge += abs(subunit.coefficient)*subunit.species_type.get_charge()
return charge
def get_mol_wt(self):
""" Get the molecular weight
Returns:
:obj:`float`: molecular weight
"""
weight = 0
for subunit in self.subunits:
weight += abs(subunit.coefficient)*subunit.species_type.get_mol_wt()
return weight
#####################
#####################
# Reactions and related classes
class RateLawDirection(int, CaseInsensitiveEnum):
""" Rate law directions """
backward = -1
forward = 1
class RateLawExpression(obj_tables.Model, Expression):
""" Rate law expression
Attributes:
expression (:obj:`str`): mathematical expression of the rate law
parameters (:obj:`list` of :obj:`Parameter`): parameters whose values are used in the rate law
species (:obj:`list` of :obj:`Species`): species whose dynamic concentrations are used in the rate law
observables (:obj:`list` of :obj:`Observable`): observables whose values are used in the rate law
Related attributes:
rate_law (:obj:`RateLaw`): the `RateLaw` which uses this `RateLawExpression`
"""
expression = LongStringAttribute(primary=True, unique=True, default='')
parameters | |
filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
"""
self._values: typing.Dict[str, typing.Any] = {
"metric_name": metric_name,
"namespace": namespace,
}
if account is not None:
self._values["account"] = account
if color is not None:
self._values["color"] = color
if dimensions is not None:
self._values["dimensions"] = dimensions
if label is not None:
self._values["label"] = label
if period is not None:
self._values["period"] = period
if region is not None:
self._values["region"] = region
if statistic is not None:
self._values["statistic"] = statistic
if unit is not None:
self._values["unit"] = unit
@builtins.property
def account(self) -> typing.Optional[builtins.str]:
"""Account which this metric comes from.
:default: - Deployment account.
"""
result = self._values.get("account")
return result
@builtins.property
def color(self) -> typing.Optional[builtins.str]:
"""The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here.
:default: - Automatic color
"""
result = self._values.get("color")
return result
@builtins.property
def dimensions(self) -> typing.Optional[typing.Mapping[builtins.str, typing.Any]]:
"""Dimensions of the metric.
:default: - No dimensions.
"""
result = self._values.get("dimensions")
return result
@builtins.property
def label(self) -> typing.Optional[builtins.str]:
"""Label for this metric when added to a Graph in a Dashboard.
:default: - No label
"""
result = self._values.get("label")
return result
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
:default: Duration.minutes(5)
"""
result = self._values.get("period")
return result
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
"""Region which this metric comes from.
:default: - Deployment region.
"""
result = self._values.get("region")
return result
@builtins.property
def statistic(self) -> typing.Optional[builtins.str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
:default: Average
"""
result = self._values.get("statistic")
return result
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""Unit used to filter the metric stream.
Only refer to datums emitted to the metric stream with the given unit and
ignore all others. Only useful when datums are being emitted to the same
metric stream under different units.
The default is to use all matric datums in the stream, regardless of unit,
which is recommended in nearly all cases.
CloudWatch does not honor this property for graphs.
:default: - All metric datums in the given metric stream
"""
result = self._values.get("unit")
return result
@builtins.property
def metric_name(self) -> builtins.str:
"""Name of the metric."""
result = self._values.get("metric_name")
assert result is not None, "Required property 'metric_name' is missing"
return result
@builtins.property
def namespace(self) -> builtins.str:
"""Namespace of the metric."""
result = self._values.get("namespace")
assert result is not None, "Required property 'namespace' is missing"
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MetricProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/aws-cloudwatch.MetricRenderingProperties",
jsii_struct_bases=[],
name_mapping={
"period": "period",
"color": "color",
"label": "label",
"stat": "stat",
},
)
class MetricRenderingProperties:
def __init__(
self,
*,
period: jsii.Number,
color: typing.Optional[builtins.str] = None,
label: typing.Optional[builtins.str] = None,
stat: typing.Optional[builtins.str] = None,
) -> None:
"""(deprecated) Custom rendering properties that override the default rendering properties specified in the yAxis parameter of the widget object.
:param period: (deprecated) How many seconds to aggregate over.
:param color: (deprecated) The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here.
:param label: (deprecated) Label for the metric.
:param stat: (deprecated) Aggregation function to use (can be either simple or a percentile).
:deprecated: Replaced by MetricConfig.
:stability: deprecated
"""
self._values: typing.Dict[str, typing.Any] = {
"period": period,
}
if color is not None:
self._values["color"] = color
if label is not None:
self._values["label"] = label
if stat is not None:
self._values["stat"] = stat
@builtins.property
def period(self) -> jsii.Number:
"""(deprecated) How many seconds to aggregate over.
:stability: deprecated
"""
result = self._values.get("period")
assert result is not None, "Required property 'period' is missing"
return result
@builtins.property
def color(self) -> typing.Optional[builtins.str]:
"""(deprecated) The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here.
:stability: deprecated
"""
result = self._values.get("color")
return result
@builtins.property
def label(self) -> typing.Optional[builtins.str]:
"""(deprecated) Label for the metric.
:stability: deprecated
"""
result = self._values.get("label")
return result
@builtins.property
def stat(self) -> typing.Optional[builtins.str]:
"""(deprecated) Aggregation function to use (can be either simple or a percentile).
:stability: deprecated
"""
result = self._values.get("stat")
return result
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MetricRenderingProperties(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-cdk/aws-cloudwatch.MetricStatConfig",
jsii_struct_bases=[],
name_mapping={
"metric_name": "metricName",
"namespace": "namespace",
"period": "period",
"statistic": "statistic",
"account": "account",
"dimensions": "dimensions",
"region": "region",
"unit_filter": "unitFilter",
},
)
class MetricStatConfig:
def __init__(
self,
*,
metric_name: builtins.str,
namespace: builtins.str,
period: aws_cdk.core.Duration,
statistic: builtins.str,
account: typing.Optional[builtins.str] = None,
dimensions: typing.Optional[typing.List[Dimension]] = None,
region: typing.Optional[builtins.str] = None,
unit_filter: typing.Optional["Unit"] = None,
) -> None:
"""Properties for a concrete metric.
NOTE: ``unit`` is no longer on this object since it is only used for ``Alarms``, and doesn't mean what one
would expect it to mean there anyway. It is most likely to be misused.
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
:param period: How many seconds to aggregate over.
:param statistic: Aggregation function to use (can be either simple or a percentile).
:param account: Account which this metric comes from. Default: Deployment account.
:param dimensions: The dimensions to apply to the alarm. Default: []
:param region: Region which this metric comes from. Default: Deployment region.
:param unit_filter: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. This field has been renamed from plain ``unit`` to clearly communicate its purpose. Default: - Refer to all metric datums
"""
self._values: typing.Dict[str, typing.Any] = {
"metric_name": metric_name,
"namespace": namespace,
"period": period,
"statistic": statistic,
}
if account is not None:
self._values["account"] = account
if dimensions is not None:
self._values["dimensions"] = dimensions
if region is not None:
self._values["region"] = region
if unit_filter is not None:
self._values["unit_filter"] = unit_filter
@builtins.property
def metric_name(self) -> builtins.str:
"""Name of the metric."""
result = self._values.get("metric_name")
assert result is not None, "Required property 'metric_name' is missing"
return result
@builtins.property
def namespace(self) -> builtins.str:
"""Namespace of the metric."""
result = self._values.get("namespace")
assert result is not None, "Required property 'namespace' is missing"
return result
@builtins.property
def period(self) -> aws_cdk.core.Duration:
"""How many seconds to aggregate over."""
result = self._values.get("period")
assert result is not None, "Required property 'period' is missing"
return result
@builtins.property
def statistic(self) -> builtins.str:
"""Aggregation function to use (can be either simple or a percentile)."""
result = self._values.get("statistic")
assert result is not None, "Required property 'statistic' is missing"
return result
@builtins.property
def account(self) -> typing.Optional[builtins.str]:
"""Account which this metric comes from.
:default: Deployment account.
"""
result = self._values.get("account")
return result
@builtins.property
def dimensions(self) -> typing.Optional[typing.List[Dimension]]:
"""The dimensions to apply to the alarm.
:default: []
"""
result = self._values.get("dimensions")
return | |
<filename>yamtbx/dataproc/auto/command_line/multi_check_cell_consistency.py
"""
(c) RIKEN 2015. All rights reserved.
Author: <NAME>
This software is released under the new BSD License; see LICENSE.
"""
import iotbx.phil
from cctbx import uctbx
from cctbx import sgtbx
from cctbx import crystal
from cctbx.crystal import reindex
from cctbx.uctbx.determine_unit_cell import NCDist
from cctbx.sgtbx import pointgroup_tools
from yamtbx.dataproc.xds.xparm import XPARM
from yamtbx.dataproc.xds.xds_ascii import XDS_ASCII
from yamtbx.dataproc import pointless
from yamtbx.dataproc.xds import correctlp
from yamtbx.dataproc.dials.command_line import run_dials_auto
from yamtbx import util
from yamtbx.util import xtal
import os
import sys
import networkx as nx
import numpy
master_params_str = """
topdir = None
.type = path
xdsdir = None
.type = path
.multiple = true
.help = Either topdir= or (multiple) xdsdir= should be specified.
tol_length = 0.1
.type = float
.help = relative_length_tolerance
tol_angle = 5
.type = float
.help = absolute_angle_tolerance in degree
do_pointless = False
.type = bool
.help = Run pointless for largest group data to determine symmetry
"""
class CellGraph:
def __init__(self, tol_length=None, tol_angle=None):
self.tol_length = tol_length if tol_length else 0.1
self.tol_angle = tol_angle if tol_angle else 5
self.G = nx.Graph()
self.p1cells = {} # key->p1cell
self.dirs = {} # key->xdsdir
self.symms = {} # key->symms
self.cbops = {} # (key1,key2) = cbop
# __init__()
def get_p1cell_and_symm(self, xdsdir):
dials_hkl = os.path.join(xdsdir, "DIALS.HKL")
xac_file = util.return_first_found_file(("XDS_ASCII.HKL", "XDS_ASCII.HKL.org",
"XDS_ASCII_fullres.HKL.org", "XDS_ASCII_fullres.HKL",
"XDS_ASCII.HKL_noscale.org", "XDS_ASCII.HKL_noscale"),
wd=xdsdir)
p1cell, xs = None, None
if xac_file:
correct_lp = util.return_first_found_file(("CORRECT.LP_noscale", "CORRECT.LP"), wd=xdsdir)
if not correct_lp:
print "CORRECT.LP not found in %s" % xdsdir
return None, None
p1cell = correctlp.get_P1_cell(correct_lp, force_obtuse_angle=True)
try:
xac = XDS_ASCII(xac_file, read_data=False)
except:
print "Invalid XDS_ASCII format:", xac_file
return None, None
xs = xac.symm
elif os.path.isfile(dials_hkl): # DIALS
xs = run_dials_auto.get_most_possible_symmetry(xdsdir)
if xs is None:
print "Cannot get crystal symmetry:", xdsdir
return None, None
p1cell = list(xs.niggli_cell().unit_cell().parameters())
# force obtuse angle
tmp = map(lambda x: (x[0]+3,abs(90.-x[1])), enumerate(p1cell[3:])) # Index and difference from 90 deg
tmp.sort(key=lambda x: x[1], reverse=True)
if p1cell[tmp[0][0]] < 90:
tmp = map(lambda x: (x[0]+3,90.-x[1]), enumerate(p1cell[3:])) # Index and 90-val.
tmp.sort(key=lambda x: x[1], reverse=True)
for i,v in tmp[:2]: p1cell[i] = 180.-p1cell[i]
p1cell = uctbx.unit_cell(p1cell)
return p1cell, xs
# get_p1cell_and_symm()
def add_proc_result(self, key, xdsdir):
if key in self.G: return #G.remove_node(key)
p1cell, symm = self.get_p1cell_and_symm(xdsdir)
if None in (p1cell, symm): return
self.p1cells[key] = p1cell
self.dirs[key] = xdsdir
self.symms[key] = symm
connected_nodes = []
for node in list(self.G.nodes()):
other_cell = self.p1cells[node]
if other_cell.is_similar_to(p1cell, self.tol_length, self.tol_angle):
connected_nodes.append(node)
else:
cosets = reindex.reindexing_operators(crystal.symmetry(other_cell, 1),
crystal.symmetry(p1cell, 1),
self.tol_length, self.tol_angle)
if cosets.double_cosets is not None:
self.cbops[(node,key)] = cosets.combined_cb_ops()[0]
print p1cell, other_cell, self.cbops[(node,key)], other_cell.change_basis(self.cbops[(node,key)])
connected_nodes.append(node)
# Add nodes and edges
self.G.add_node(key)
for node in connected_nodes:
self.G.add_edge(node, key)
# add_proc_result()
def _transformed_cells(self, keys):
cells = [self.p1cells[keys[0]].parameters()]
for key in keys[1:]:
cell = self.p1cells[key]
if (keys[0], key) in self.cbops:
cell = cell.change_basis(self.cbops[(keys[0], key)])
elif (key, keys[0]) in self.cbops:
cell = cell.change_basis(self.cbops[(key, keys[0])].inverse()) # correct??
cells.append(cell.parameters())
return cells
# _transformed_cells()
def _average_p1_cell(self, keys):
cells = numpy.array(self._transformed_cells(keys))
return map(lambda i: cells[:,i].mean(), xrange(6))
# _average_p1_cell()
def group_xds_results(self, out, show_details=True):
print >>out, "Making groups from %d results\n" % len(self.p1cells) # Show total and failed!!
self.groups = map(lambda g: list(g), nx.connected_components(self.G))
self.groups.sort(key=lambda x:-len(x))
self.grouped_dirs = []
self.reference_symmetries = []
#details_str = "group file a b c al be ga\n"
#ofs_debug = open("cell_debug.dat", "w")
#ofs_debug.write("group xdsdir a b c al be ga\n")
for i, keys in enumerate(self.groups):
self.reference_symmetries.append([])
avg_cell = uctbx.unit_cell(self._average_p1_cell(keys))
print >>out, "[%2d]"%(i+1), len(keys), "members:"
print >>out, " Averaged P1 Cell=", " ".join(map(lambda x:"%.2f"%x, avg_cell.parameters()))
#from yamtbx.util.xtal import format_unit_cell
#for xd, uc in zip(map(lambda k:self.dirs[k], keys), self._transformed_cells(keys)):
# ofs_debug.write("%3d %s %s\n" % (i, xd, format_unit_cell(uc)))
#print >>out, " Members=", keys
if show_details:
# by explore_metric_symmetry
sg_explorer = pointgroup_tools.space_group_graph_from_cell_and_sg(avg_cell, sgtbx.space_group_info("P1").group(), max_delta=10)
tmp = []
for obj in sg_explorer.pg_graph.graph.node_objects.values():
pg = obj.allowed_xtal_syms[0][0].space_group().build_derived_reflection_intensity_group(True).info()
cbop = obj.allowed_xtal_syms[0][1]
trans_cell = avg_cell.change_basis(cbop)
if pg.group() == sgtbx.space_group_info("I2").group():
print >>out, "Warning!! I2 cell was given." # this should not happen..
# Transform to best cell
fbc = crystal.find_best_cell(crystal.symmetry(trans_cell, space_group_info=pg,
assert_is_compatible_unit_cell=False),
best_monoclinic_beta=False) # If True, C2 may result in I2..
cbop = fbc.cb_op() * cbop
trans_cell = trans_cell.change_basis(fbc.cb_op())
#print "debug:: op-to-best-cell=", fbc.cb_op()
# If beta<90 in monoclinic system, force it to have beta>90
if pg.group().crystal_system() == "Monoclinic" and trans_cell.parameters()[4] < 90:
op = sgtbx.change_of_basis_op("-h,-k,l")
cbop = op * cbop
trans_cell = trans_cell.change_basis(op)
tmp.append([0, pg, trans_cell, cbop, pg.type().number()])
# Calculate frequency
for pgnum in set(map(lambda x: x[-1], tmp)):
sel = filter(lambda x: tmp[x][-1]==pgnum, xrange(len(tmp)))
pgg = tmp[sel[0]][1].group()
if len(sel) == 1:
freq = len(filter(lambda x: self.symms[x].space_group().build_derived_reflection_intensity_group(True) == pgg, keys))
tmp[sel[0]][0] = freq
else:
trans_cells = map(lambda x: numpy.array(tmp[x][2].parameters()), sel)
for key in keys:
if self.symms[key].space_group().build_derived_reflection_intensity_group(True) != pgg: continue
cell = numpy.array(self.symms[key].unit_cell().parameters())
celldiffs = map(lambda tc: sum(abs(tc-cell)), trans_cells)
min_key = celldiffs.index(min(celldiffs))
tmp[sel[min_key]][0] += 1
print >>out, " Possible symmetries:"
print >>out, " freq symmetry a b c alpha beta gamma reindex"
for freq, pg, trans_cell, cbop, pgnum in sorted(tmp, key=lambda x:x[-1]):
print >> out, " %4d %-10s %s %s" % (freq, pg, " ".join(map(lambda x:"%6.2f"%x, trans_cell.parameters())), cbop)
self.reference_symmetries[i].append((pg, trans_cell, freq))
print >>out, ""
dirs = map(lambda x: self.dirs[x], keys)
self.grouped_dirs.append(dirs)
# group_xds_results()
def get_reference_symm(self, group_idx, rs_idx):
# XXX should be able to specify space group with screws
if group_idx >= len(self.reference_symmetries):
return None
if rs_idx >= len(self.reference_symmetries[group_idx]):
return None
pg, cell, freq = self.reference_symmetries[group_idx][rs_idx]
return crystal.symmetry(cell,
space_group_info=pg,
assert_is_compatible_unit_cell=False)
# get_reference_symm()
def get_selectable_symms(self, group_idx):
if group_idx >= len(self.reference_symmetries):
return []
return self.reference_symmetries[group_idx]
# get_selectable_symms()
def get_most_frequent_symmetry(self, group_idx):
# Should call after self.group_xds_results()
symms = filter(lambda x: x[2]>0, self.reference_symmetries[group_idx])
symms.sort(key=lambda x: x[2], reverse=True)
if len(symms) == 0: return None
if len(symms) > 1 and symms[0][0].group() == sgtbx.space_group_info("P1").group():
return crystal.symmetry(symms[1][1], space_group_info=symms[1][0],
assert_is_compatible_unit_cell=False)
else:
return crystal.symmetry(symms[0][1], space_group_info=symms[0][0],
assert_is_compatible_unit_cell=False)
# get_most_frequent_symmetry()
def get_symmetry_reference_matched(self, group_idx, ref_cs):
ref_pg = ref_cs.space_group().build_derived_reflection_intensity_group(True)
ref_cell = ref_cs.unit_cell()
symms = filter(lambda x: x[0].group()==ref_pg, self.reference_symmetries[group_idx])
if len(symms) == 0: return None
if len(symms) > 1:
# TODO if different too much?
celldiffs = map(lambda s: s[1].bases_mean_square_difference(ref_cell), symms)
min_idx = celldiffs.index(min(celldiffs))
return crystal.symmetry(symms[min_idx][1], space_group_info=symms[min_idx][0],
assert_is_compatible_unit_cell=False)
else:
return crystal.symmetry(symms[0][1], space_group_info=symms[0][0],
assert_is_compatible_unit_cell=False)
# get_symmetry_reference_matched()
def get_group_symmetry_reference_matched(self, ref_cs):
ref_v6 = xtal.v6cell(ref_cs.niggli_cell().unit_cell())
ncdists = []
for i, keys in enumerate(self.groups):
v6 = xtal.v6cell(uctbx.unit_cell(self._average_p1_cell(keys)).niggli_cell())
ncdists.append(NCDist(v6, ref_v6))
print "Group %d: NCDist to reference: %f" % (i+1, ncdists[-1])
return ncdists.index(min(ncdists))+1
# get_group_symmetry_reference_matched()
def is_all_included(self, keys):
all_nodes = set(self.G.nodes_iter())
return all_nodes.issuperset(keys)
# is_all_included()
def get_subgraph(self, keys):
copied_obj = CellGraph(self.tol_length, self.tol_angle)
copied_obj.G = self.G.subgraph(keys)
copied_obj.p1cells = dict((k, self.p1cells[k]) for k in keys)
copied_obj.dirs = dict((k, self.dirs[k]) for k in keys)
copied_obj.symms = dict((k, self.symms[k]) for k in keys)
copied_obj.cbops = dict((k, self.cbops[k]) for k in self.cbops if k[0] in keys or k[1] in keys) # XXX may be slow
return copied_obj
# get_subgraph()
# class CellGraph
def run(params, out=sys.stdout):
cm = CellGraph(tol_length=params.tol_length, tol_angle=params.tol_angle)
if not params.xdsdir and params.topdir:
params.xdsdir = map(lambda x: x[0], filter(lambda x: any(map(lambda y: y.startswith("XDS_ASCII.HKL"), x[2])) or "DIALS.HKL" in x[2],
os.walk(params.topdir)))
for i, xdsdir in enumerate(params.xdsdir):
cm.add_proc_result(i, xdsdir)
cm.group_xds_results(out)
ret = cm.grouped_dirs
if len(ret) == 0:
return cm
print >>out
print >>out, "About the largest group:"
for idx, wd in enumerate(ret[0]):
xac_hkl = os.path.join(wd, "XDS_ASCII.HKL")
correct_lp = os.path.join(wd, "CORRECT.LP")
print >>out, "%.3d %s" % (idx, os.path.relpath(wd, params.topdir) if params.topdir is not None else wd),
if not os.path.isfile(xac_hkl):
print >>out, "Unsuccessful"
continue
sg = XDS_ASCII(xac_hkl, read_data=False).symm.space_group_info()
clp = correctlp.CorrectLp(correct_lp)
if "all" in clp.table:
cmpl = clp.table["all"]["cmpl"][-1]
else:
cmpl = float("nan")
ISa = clp.a_b_ISa[-1]
print >>out, "%10s ISa=%5.2f Cmpl=%5.1f " % (sg, ISa, cmpl)
if params.do_pointless:
worker = pointless.Pointless()
files = map(lambda x: os.path.join(x, "INTEGRATE.HKL"), ret[0])
#print files
files = filter(lambda x: os.path.isfile(x), files)
print >>out, "\nRunning pointless for the largest member."
result = worker.run_for_symm(xdsin=files,
logout="pointless.log",
tolerance=10, d_min=5)
if "symm" in result:
print >>out, " pointless suggested", result["symm"].space_group_info()
if 0:
import pylab
pos = nx.spring_layout(G)
#pos = nx.spectral_layout(G)
#pos = nx.circular_layout(G)
#nx.draw_networkx_nodes(G, pos, node_size = 100, nodelist=others, node_color = 'w')
nx.draw_networkx_nodes(G, pos, node_size = 100, node_color = 'w')
nx.draw_networkx_edges(G, pos, width = 1)
nx.draw_networkx_labels(G, pos, font_size = 12, font_family = 'sans-serif', font_color = 'r')
pylab.xticks([])
pylab.yticks([])
pylab.savefig("network.png")
pylab.show()
return cm
# run()
def run_from_args(argv):
cmdline = iotbx.phil.process_command_line(args=argv,
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
for arg in args:
if os.path.isdir(arg) and params.topdir is None:
params.topdir = arg
if not params.xdsdir and | |
# calculate last-modified time and etag
if zip:
info = zip.getinfo(paths[-1])
lm = util.zip_timestamp(info)
last_modified = http_date(lm)
etag = "%s-%s-%s" % (
lm,
info.file_size,
adler32(info.filename.encode("utf-8")) & 0xFFFFFFFF,
)
else:
stats = os.stat(paths[0])
last_modified = http_date(stats.st_mtime)
etag = "%s-%s-%s" % (
stats.st_mtime,
stats.st_size,
adler32(paths[0].encode("utf-8")) & 0xFFFFFFFF,
)
if not is_resource_modified(request.environ, etag=etag, last_modified=last_modified):
return http_response(status=304)
headers = {
'Cache-Control': 'no-cache',
'Last-Modified': last_modified,
'ETag': etag,
}
if host.config['app']['content_security_policy'] == 'strict':
headers['Content-Security-Policy'] = "connect-src 'none'; form-action 'none';"
# prepare content
if zip:
with zip.open(info) as f:
body = f.read().decode('UTF-8')
else:
with open(paths[0], 'r', encoding='UTF-8') as f:
body = f.read()
body = render_template('markdown.html',
sitename=host.name,
is_local=is_local_access(),
base=request.script_root,
path=request.path,
pathparts=request.paths,
content=commonmark.commonmark(body),
)
return http_response(body, headers=headers)
class Request(flask.Request):
"""Subclassed Request object for more useful properties.
"""
@cached_property
def paths(self):
"""Like request.path, but with ZIP subpaths resolved."""
return get_archive_path(self.path)
@cached_property
def localpath(self):
"""Corresponding filesystem path of the requested path."""
# Don't use os.path.join as if may result in an arbitrary path if
# self.path is an absolute path on Windows.
return os.path.normpath(host.chroot + os.sep + self.path.strip('/'))
@cached_property
def localpaths(self):
"""Like localpath, but with ZIP subpaths resolved."""
paths = self.paths.copy()
paths[0] = os.path.normpath(host.chroot + os.sep + paths[0].lstrip('/'))
return paths
@cached_property
def localrealpath(self):
"""Like localpath, but with symlinks resolved."""
return os.path.realpath(self.localpath)
@cached_property
def localmimetype(self):
"""Mimetype of the requested path."""
mimetype, _ = mimetypes.guess_type(self.localrealpath)
return mimetype
@cached_property
def action(self):
"""Shortcut of the requested action."""
rv = request.values.get('a', default='view')
rv = request.values.get('action', default=rv)
return rv
@cached_property
def format(self):
"""Shortcut of the requested format."""
rv = request.values.get('f')
rv = request.values.get('format', default=rv)
return rv
def handle_action_token(func):
"""A decorator function that validates token.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
token = request.values.get('token') or ''
if not host.token_validate(token):
abort(400, 'Invalid access token.')
host.token_delete(token)
return func(*args, **kwargs)
return wrapper
def handle_action_advanced(func):
"""A decorator function that helps handling an advanced command.
- Verify POST method.
- Provide a default return value.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
format = request.format
if request.method != 'POST':
abort(405, valid_methods=['POST'])
rv = func(*args, **kwargs)
if rv is not None:
return rv
if format:
return http_response('Command run successfully.', format=format)
return http_response(status=204)
return wrapper
def handle_action_writing(func):
"""A decorator function that helps handling a writing action.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if os.path.abspath(request.localpath) == host.chroot:
abort(403, "Unable to operate the root directory.")
return func(*args, **kwargs)
return wrapper
def handle_action_renaming(func):
"""A decorator function that helps handling a move/copy action.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
localpaths = request.localpaths
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
try:
zip.getinfo(localpaths[-1])
except KeyError:
if not util.zip_hasdir(zip, localpaths[-1] + '/'):
abort(404, "Source does not exist.")
else:
if not os.path.lexists(localpaths[0]):
abort(404, "Source does not exist.")
target = request.values.get('target')
if target is None:
abort(400, 'Target is not specified.')
targetpaths = get_archive_path(target)
targetpaths[0] = os.path.normpath(os.path.join(host.chroot, targetpaths[0].lstrip('/')))
if not targetpaths[0].startswith(os.path.join(host.chroot, '')):
abort(403, "Unable to operate beyond the root directory.")
if len(targetpaths) > 1:
with open_archive_path(targetpaths) as zip:
try:
zip.getinfo(targetpaths[-1])
except KeyError:
if util.zip_hasdir(zip, targetpaths[-1] + '/'):
abort(400, 'Found something at target.')
else:
abort(400, 'Found something at target.')
else:
if os.path.lexists(targetpaths[0]):
abort(400, 'Found something at target.')
return func(sourcepaths=localpaths, targetpaths=targetpaths, *args, **kwargs)
return wrapper
def action_unknown():
"""Default handler for an undefined action"""
abort(400, "Action not supported.")
def action_view():
"""Show the content of a file or list a directory.
If formatted, show information of the file or directory.
"""
# info for other output formats
if request.format:
return action_info()
localpaths = request.localpaths
mimetype = request.localmimetype
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
try:
info = zip.getinfo(localpaths[-1])
except KeyError:
# File does not exist. List directory only when URL
# suffixed with "/", as it's not a common operation,
# and it's costy to check for directory existence in
# a ZIP.
if request.path.endswith('/'):
try:
return handle_directory_listing(localpaths, zip, redirect_slash=False)
except util.ZipDirNotFoundError:
abort(404)
abort(404)
else:
# view archive file
if mimetype in ("application/html+zip", "application/x-maff"):
return handle_archive_viewing(localpaths, mimetype)
# view markdown
if mimetype == "text/markdown":
return handle_markdown_output(localpaths, zip)
# convert meta refresh to 302 redirect
if localpaths[-1].lower().endswith('.htm'):
with zip.open(info) as fh:
fh = zip_stream(fh)
target = util.get_meta_refresh(fh).target
if target is not None:
# Keep several chars as javascript encodeURI do,
# plus "%" as target may have already been escaped.
parts = urlsplit(urljoin(request.url, quote(target, ";,/?:@&=+$-_.!~*'()#%")))
new_url = urlunsplit((
parts.scheme,
parts.netloc,
quote_path(unquote(parts.path)),
parts.query,
parts.fragment,
))
return redirect(new_url)
# show static file for other cases
response = zip_static_file(zip, localpaths[-1], mimetype=mimetype)
else:
localpath = localpaths[0]
# handle directory
if os.path.isdir(localpath):
return handle_directory_listing(localpaths)
# handle file
elif os.path.isfile(localpath):
# view archive file
if mimetype in ("application/html+zip", "application/x-maff"):
return handle_archive_viewing(localpaths, mimetype)
# view markdown
if mimetype == "text/markdown":
return handle_markdown_output(localpaths)
# convert meta refresh to 302 redirect
if request.localrealpath.lower().endswith('.htm'):
target = util.get_meta_refresh(localpath).target
if target is not None:
# Keep several chars as javascript encodeURI do,
# plus "%" as target may have already been escaped.
parts = urlsplit(urljoin(request.url, quote(target, ";,/?:@&=+$-_.!~*'()#%")))
new_url = urlunsplit((
parts.scheme,
parts.netloc,
quote_path(unquote(parts.path)),
parts.query,
parts.fragment,
))
return redirect(new_url)
# show static file for other cases
response = static_file(localpath, mimetype=mimetype)
else:
abort(404)
# don't include charset
m, p = parse_options_header(response.headers.get('Content-Type'))
try:
del p['charset']
except KeyError:
pass
response.headers.set('Content-Type', dump_options_header(m, p))
return response
def action_source():
"""Show file content as plain text."""
if request.format:
abort(400, "Action not supported.")
localpaths = request.localpaths
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
response = zip_static_file(zip, localpaths[-1])
else:
response = static_file(localpaths[0])
# show as inline plain text
# @TODO: Chromium (80) seems to ignore header mimetype for certain types
# like image and zip
encoding = request.values.get('e', 'utf-8')
encoding = request.values.get('encoding', default=encoding)
response.headers.set('Content-Type', 'text/plain; charset=' + quote(encoding))
response.headers.set('Content-Disposition', 'inline')
return response
def action_download():
"""Download a file or directory.
@TODO: support streaming ZIP output to prevent memory exhaustion for a large directory
"""
if request.format:
abort(400, "Action not supported.")
localpaths = request.localpaths
filter = request.values.getlist('i')
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
try:
zip.getinfo(localpaths[-1])
except KeyError:
base = localpaths[-1] + '/' if localpaths[-1] else ''
infos = [i for i in zip.infolist() if i.filename.startswith(base)]
# not exist
if base and not len(infos):
abort(404)
filter = set(filter)
filter_d = {f + '/' for f in filter}
# directory (explicit or implicit)
filename = (localpaths[-1] or os.path.basename(localpaths[-2])) + '.zip'
mimetype, _ = mimetypes.guess_type(filename)
fh = io.BytesIO()
with zipfile.ZipFile(fh, 'w') as zh:
cut = len(base)
for info in infos:
info.filename = info.filename[cut:]
# exclude the directory itself
if not info.filename:
continue
# apply the filter
if filter:
if info.filename not in filter:
if not any(info.filename.startswith(f) for f in filter_d):
continue
zh.writestr(info, zip.read(info))
fh.seek(0)
response = flask.send_file(fh, mimetype=mimetype)
response.headers.set('Cache-Control', 'no-store')
else:
filename = os.path.basename(request.localrealpath)
response = zip_static_file(zip, localpaths[-1], mimetype=request.localmimetype)
else:
if os.path.isdir(localpaths[0]):
filename = os.path.basename(request.localrealpath) + '.zip'
mimetype, _ = mimetypes.guess_type(filename)
fh = io.BytesIO()
util.zip_compress(fh, localpaths[0], '', filter=filter)
fh.seek(0)
response = flask.send_file(fh, mimetype=mimetype)
response.headers.set('Cache-Control', 'no-store')
else:
filename = os.path.basename(request.localrealpath)
response = static_file(localpaths[0])
filename = quote_path(filename)
response.headers.set('Content-Disposition',
f'''attachment; filename*=UTF-8''{filename}; filename="{filename}"''')
return response
def action_info():
"""Show information of a path."""
format = request.format
if not format:
abort(400, "Action not supported.")
localpaths = request.localpaths
mimetype = request.localmimetype
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
info = util.zip_file_info(zip, localpaths[-1])
else:
info = util.file_info(localpaths[0])
data = {
'name': info.name,
'type': info.type,
'size': info.size,
'last_modified': info.last_modified,
'mime': mimetype,
}
return http_response(data, format=format)
def action_list():
"""List entries in a directory."""
format = request.format
if not format:
abort(400, "Action not supported.")
localpaths = request.localpaths
if len(localpaths) > 1:
try:
return handle_directory_listing(localpaths, redirect_slash=False, format=format)
except util.ZipDirNotFoundError:
abort(404, "Directory does not exist.")
if os.path.isdir(localpaths[0]):
return handle_directory_listing(localpaths, redirect_slash=False, format=format)
abort(404, "Directory does not exist.")
def action_static():
"""Show a static file of the current theme."""
format = request.format
if format:
abort(400, "Action not supported.")
filepath = request.path.strip('/')
file = host.get_static_file(filepath)
if file:
return static_file(file)
abort(404)
def action_edit():
"""Simple text editor for a file."""
format = request.format
if format:
abort(400, "Action not supported.")
localpaths = request.localpaths
localpath = localpaths[0]
if os.path.lexists(localpath) and not os.path.isfile(localpath):
abort(400, "Found a non-file here.")
if len(localpaths) > 1:
with open_archive_path(localpaths) as zip:
try:
info = zip.getinfo(localpaths[-1])
except KeyError:
body = b''
else:
body = zip.read(info)
else:
try:
with open(localpath, 'rb') as f:
body = f.read()
except FileNotFoundError:
| |
> 2)
def test_cdist_rogerstanimoto_random(self):
eps = 1e-07
X1 = eo['cdist-X1'] < 0.5
X2 = eo['cdist-X2'] < 0.5
Y1 = cdist(X1, X2, 'rogerstanimoto')
Y2 = cdist(X1, X2, 'test_rogerstanimoto')
_assert_within_tol(Y1, Y2, eps, verbose > 2)
def test_cdist_russellrao_random(self):
eps = 1e-07
X1 = eo['cdist-X1'] < 0.5
X2 = eo['cdist-X2'] < 0.5
Y1 = cdist(X1, X2, 'russellrao')
Y2 = cdist(X1, X2, 'test_russellrao')
_assert_within_tol(Y1, Y2, eps, verbose > 2)
def test_cdist_sokalmichener_random(self):
eps = 1e-07
X1 = eo['cdist-X1'] < 0.5
X2 = eo['cdist-X2'] < 0.5
Y1 = cdist(X1, X2, 'sokalmichener')
Y2 = cdist(X1, X2, 'test_sokalmichener')
_assert_within_tol(Y1, Y2, eps, verbose > 2)
def test_cdist_sokalsneath_random(self):
eps = 1e-07
X1 = eo['cdist-X1'] < 0.5
X2 = eo['cdist-X2'] < 0.5
Y1 = cdist(X1, X2, 'sokalsneath')
Y2 = cdist(X1, X2, 'test_sokalsneath')
_assert_within_tol(Y1, Y2, eps, verbose > 2)
class TestPdist(TestCase):
def test_pdist_euclidean_random(self):
eps = 1e-07
X = eo['pdist-double-inp']
Y_right = eo['pdist-euclidean']
Y_test1 = pdist(X, 'euclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_euclidean_random_u(self):
eps = 1e-07
X = eo['pdist-double-inp']
Y_right = eo['pdist-euclidean']
Y_test1 = pdist(X, u('euclidean'))
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_euclidean_random_float32(self):
eps = 1e-07
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-euclidean']
Y_test1 = pdist(X, 'euclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_euclidean_random_nonC(self):
eps = 1e-07
X = eo['pdist-double-inp']
Y_right = eo['pdist-euclidean']
Y_test2 = pdist(X, 'test_euclidean')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_euclidean_iris_double(self):
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-euclidean-iris']
Y_test1 = pdist(X, 'euclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_euclidean_iris_float32(self):
eps = 1e-06
X = np.float32(eo['iris'])
Y_right = eo['pdist-euclidean-iris']
Y_test1 = pdist(X, 'euclidean')
_assert_within_tol(Y_test1, Y_right, eps, verbose > 2)
def test_pdist_euclidean_iris_nonC(self):
# Test pdist(X, 'test_euclidean') [the non-C implementation] on the
# Iris data set.
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-euclidean-iris']
Y_test2 = pdist(X, 'test_euclidean')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_seuclidean_random(self):
eps = 1e-05
X = eo['pdist-double-inp']
Y_right = eo['pdist-seuclidean']
Y_test1 = pdist(X, 'seuclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_seuclidean_random_float32(self):
eps = 1e-05
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-seuclidean']
Y_test1 = pdist(X, 'seuclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_seuclidean_random_nonC(self):
# Test pdist(X, 'test_sqeuclidean') [the non-C implementation]
eps = 1e-05
X = eo['pdist-double-inp']
Y_right = eo['pdist-seuclidean']
Y_test2 = pdist(X, 'test_sqeuclidean')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_seuclidean_iris(self):
eps = 1e-05
X = eo['iris']
Y_right = eo['pdist-seuclidean-iris']
Y_test1 = pdist(X, 'seuclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_seuclidean_iris_float32(self):
# Tests pdist(X, 'seuclidean') on the Iris data set (float32).
eps = 1e-05
X = np.float32(eo['iris'])
Y_right = eo['pdist-seuclidean-iris']
Y_test1 = pdist(X, 'seuclidean')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_seuclidean_iris_nonC(self):
# Test pdist(X, 'test_seuclidean') [the non-C implementation] on the
# Iris data set.
eps = 1e-05
X = eo['iris']
Y_right = eo['pdist-seuclidean-iris']
Y_test2 = pdist(X, 'test_sqeuclidean')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_cosine_random(self):
eps = 1e-08
X = eo['pdist-double-inp']
Y_right = eo['pdist-cosine']
Y_test1 = pdist(X, 'cosine')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cosine_random_float32(self):
eps = 1e-08
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-cosine']
Y_test1 = pdist(X, 'cosine')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cosine_random_nonC(self):
# Test pdist(X, 'test_cosine') [the non-C implementation]
eps = 1e-08
X = eo['pdist-double-inp']
Y_right = eo['pdist-cosine']
Y_test2 = pdist(X, 'test_cosine')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_cosine_iris(self):
eps = 1e-08
X = eo['iris']
Y_right = eo['pdist-cosine-iris']
Y_test1 = pdist(X, 'cosine')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cosine_iris_float32(self):
eps = 1e-07
X = np.float32(eo['iris'])
Y_right = eo['pdist-cosine-iris']
Y_test1 = pdist(X, 'cosine')
_assert_within_tol(Y_test1, Y_right, eps, verbose > 2)
def test_pdist_cosine_iris_nonC(self):
eps = 1e-08
X = eo['iris']
Y_right = eo['pdist-cosine-iris']
Y_test2 = pdist(X, 'test_cosine')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_cityblock_random(self):
eps = 1e-06
X = eo['pdist-double-inp']
Y_right = eo['pdist-cityblock']
Y_test1 = pdist(X, 'cityblock')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cityblock_random_float32(self):
eps = 1e-06
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-cityblock']
Y_test1 = pdist(X, 'cityblock')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cityblock_random_nonC(self):
eps = 1e-06
X = eo['pdist-double-inp']
Y_right = eo['pdist-cityblock']
Y_test2 = pdist(X, 'test_cityblock')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_cityblock_iris(self):
eps = 1e-14
X = eo['iris']
Y_right = eo['pdist-cityblock-iris']
Y_test1 = pdist(X, 'cityblock')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_cityblock_iris_float32(self):
eps = 1e-06
X = np.float32(eo['iris'])
Y_right = eo['pdist-cityblock-iris']
Y_test1 = pdist(X, 'cityblock')
_assert_within_tol(Y_test1, Y_right, eps, verbose > 2)
def test_pdist_cityblock_iris_nonC(self):
# Test pdist(X, 'test_cityblock') [the non-C implementation] on the
# Iris data set.
eps = 1e-14
X = eo['iris']
Y_right = eo['pdist-cityblock-iris']
Y_test2 = pdist(X, 'test_cityblock')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_correlation_random(self):
eps = 1e-07
X = eo['pdist-double-inp']
Y_right = eo['pdist-correlation']
Y_test1 = pdist(X, 'correlation')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_correlation_random_float32(self):
eps = 1e-07
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-correlation']
Y_test1 = pdist(X, 'correlation')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_correlation_random_nonC(self):
eps = 1e-07
X = eo['pdist-double-inp']
Y_right = eo['pdist-correlation']
Y_test2 = pdist(X, 'test_correlation')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_correlation_iris(self):
eps = 1e-08
X = eo['iris']
Y_right = eo['pdist-correlation-iris']
Y_test1 = pdist(X, 'correlation')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_correlation_iris_float32(self):
eps = 1e-07
X = eo['iris']
Y_right = np.float32(eo['pdist-correlation-iris'])
Y_test1 = pdist(X, 'correlation')
_assert_within_tol(Y_test1, Y_right, eps, verbose > 2)
def test_pdist_correlation_iris_nonC(self):
eps = 1e-08
X = eo['iris']
Y_right = eo['pdist-correlation-iris']
Y_test2 = pdist(X, 'test_correlation')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_minkowski_random(self):
eps = 1e-05
X = eo['pdist-double-inp']
Y_right = eo['pdist-minkowski-3.2']
Y_test1 = pdist(X, 'minkowski', 3.2)
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_minkowski_random_float32(self):
eps = 1e-05
X = np.float32(eo['pdist-double-inp'])
Y_right = eo['pdist-minkowski-3.2']
Y_test1 = pdist(X, 'minkowski', 3.2)
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_minkowski_random_nonC(self):
eps = 1e-05
X = eo['pdist-double-inp']
Y_right = eo['pdist-minkowski-3.2']
Y_test2 = pdist(X, 'test_minkowski', 3.2)
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_minkowski_3_2_iris(self):
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-minkowski-3.2-iris']
Y_test1 = pdist(X, 'minkowski', 3.2)
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_minkowski_3_2_iris_float32(self):
eps = 1e-06
X = np.float32(eo['iris'])
Y_right = eo['pdist-minkowski-3.2-iris']
Y_test1 = pdist(X, 'minkowski', 3.2)
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_minkowski_3_2_iris_nonC(self):
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-minkowski-3.2-iris']
Y_test2 = pdist(X, 'test_minkowski', 3.2)
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_minkowski_5_8_iris(self):
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-minkowski-5.8-iris']
Y_test1 = pdist(X, 'minkowski', 5.8)
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_minkowski_5_8_iris_float32(self):
eps = 1e-06
X = np.float32(eo['iris'])
Y_right = eo['pdist-minkowski-5.8-iris']
Y_test1 = pdist(X, 'minkowski', 5.8)
_assert_within_tol(Y_test1, Y_right, eps, verbose > 2)
def test_pdist_minkowski_5_8_iris_nonC(self):
eps = 1e-07
X = eo['iris']
Y_right = eo['pdist-minkowski-5.8-iris']
Y_test2 = pdist(X, 'test_minkowski', 5.8)
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_wminkowski(self):
x = np.array([[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 1.0]])
p2_expected = [1.0, 1.0, np.sqrt(3),
np.sqrt(2), np.sqrt(2),
np.sqrt(2)]
p1_expected = [0.5, 1.0, 3.5,
1.5, 3.0,
2.5]
dist = pdist(x, metric=wminkowski, w=[1.0, 1.0, 1.0])
assert_allclose(dist, p2_expected, rtol=1e-14)
dist = pdist(x, metric=wminkowski, w=[0.5, 1.0, 2.0], p=1)
assert_allclose(dist, p1_expected, rtol=1e-14)
dist = pdist(x, metric='wminkowski', w=[1.0, 1.0, 1.0])
assert_allclose(dist, p2_expected, rtol=1e-14)
dist = pdist(x, metric='wminkowski', w=[0.5, 1.0, 2.0], p=1)
assert_allclose(dist, p1_expected, rtol=1e-14)
def test_pdist_wminkowski_int_weights(self):
# regression test for int weights
x = np.array([[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[1.0, 1.0, 1.0]])
dist1 = pdist(x, metric='wminkowski', w=np.arange(3), p=1)
dist2 = pdist(x, metric='wminkowski', w=[0., 1., 2.], p=1)
assert_allclose(dist1, dist2, rtol=1e-14)
def test_pdist_mahalanobis(self):
# 1-dimensional observations
x = np.array([2.0, 2.0, 3.0, 5.0]).reshape(-1, 1)
dist = pdist(x, metric='mahalanobis')
assert_allclose(dist, [0.0, np.sqrt(0.5), np.sqrt(4.5),
np.sqrt(0.5), np.sqrt(4.5), np.sqrt(2.0)])
# 2-dimensional observations
x = np.array([[0, 0], [-1, 0], [0, 2], [1, 0], [0, -2]])
dist = pdist(x, metric='mahalanobis')
rt2 = np.sqrt(2)
assert_allclose(dist, [rt2, rt2, rt2, rt2, 2, 2*rt2, 2, 2, 2*rt2, 2])
# Too few observations
assert_raises(ValueError,
pdist, [[0, 1], [2, 3]], metric='mahalanobis')
def test_pdist_hamming_random(self):
eps = 1e-07
X = eo['pdist-boolean-inp']
Y_right = eo['pdist-hamming']
Y_test1 = pdist(X, 'hamming')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_hamming_random_float32(self):
eps = 1e-07
X = np.float32(eo['pdist-boolean-inp'])
Y_right = eo['pdist-hamming']
Y_test1 = pdist(X, 'hamming')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_hamming_random_nonC(self):
eps = 1e-07
X = eo['pdist-boolean-inp']
Y_right = eo['pdist-hamming']
Y_test2 = pdist(X, 'test_hamming')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_dhamming_random(self):
eps = 1e-07
X = np.float64(eo['pdist-boolean-inp'])
Y_right = eo['pdist-hamming']
Y_test1 = pdist(X, 'hamming')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_dhamming_random_float32(self):
eps = 1e-07
X = np.float32(eo['pdist-boolean-inp'])
Y_right = eo['pdist-hamming']
Y_test1 = pdist(X, 'hamming')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_dhamming_random_nonC(self):
eps = 1e-07
X = np.float64(eo['pdist-boolean-inp'])
Y_right = eo['pdist-hamming']
Y_test2 = pdist(X, 'test_hamming')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_jaccard_random(self):
eps = 1e-08
X = eo['pdist-boolean-inp']
Y_right = eo['pdist-jaccard']
Y_test1 = pdist(X, 'jaccard')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_jaccard_random_float32(self):
eps = 1e-08
X = np.float32(eo['pdist-boolean-inp'])
Y_right = eo['pdist-jaccard']
Y_test1 = pdist(X, 'jaccard')
_assert_within_tol(Y_test1, Y_right, eps)
def test_pdist_jaccard_random_nonC(self):
eps = 1e-08
X = eo['pdist-boolean-inp']
Y_right = eo['pdist-jaccard']
Y_test2 = pdist(X, 'test_jaccard')
_assert_within_tol(Y_test2, Y_right, eps)
def test_pdist_djaccard_random(self):
eps = |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.