input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 6, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 7, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 8, ''),
('nn::ns::detail::IApplicationManagerInterface', 9, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 11, ''),
('nn::ns::detail::IApplicationManagerInterface', 16, ''),
('nn::ns::detail::IApplicationManagerInterface', 17, ''),
('nn::ns::detail::IApplicationManagerInterface', 19, '8 bytes in - 8 bytes out - OutRaw<8,8,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 21, ''),
('nn::ns::detail::IApplicationManagerInterface', 22, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 23, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 26, ''),
('nn::ns::detail::IApplicationManagerInterface', 27, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 30, ''),
('nn::ns::detail::IApplicationManagerInterface', 31, ''),
('nn::ns::detail::IApplicationManagerInterface', 32, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 33, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 35, '0 bytes in - 0 bytes out - Buffer<0,5,0>'),
('nn::ns::detail::IApplicationManagerInterface', 36, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<4,4,0>'),
('nn::ns::detail::IApplicationManagerInterface', 37, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 38, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 39, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 40, ''),
('nn::ns::detail::IApplicationManagerInterface', 41, ''),
('nn::ns::detail::IApplicationManagerInterface', 42, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 43, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 44, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 45, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 46, '0 bytes in - 0x10 bytes out - OutRaw<0x10,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 47, ''),
('nn::ns::detail::IApplicationManagerInterface', 48, ''),
('nn::ns::detail::IApplicationManagerInterface', 49, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 52, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 53, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 54, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 55, '4 bytes in - 1 bytes out - OutRaw<1,1,0>, InRaw<4,4,0>'),
('nn::ns::detail::IApplicationManagerInterface', 56, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<4,4,0>'),
('nn::ns::detail::IApplicationManagerInterface', 57, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 58, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 59, ''),
('nn::ns::detail::IApplicationManagerInterface', 60, ''),
('nn::ns::detail::IApplicationManagerInterface', 61, '0 bytes in - 0x10 bytes out - OutRaw<0x10,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 62, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IApplicationManagerInterface', 63, '8 bytes in - 1 bytes out - OutRaw<1,1,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 64, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 65, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IApplicationManagerInterface', 66, '0 bytes in - 0x10 bytes out - OutRaw<0x10,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 67, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 68, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 69, ''),
('nn::ns::detail::IApplicationManagerInterface', 70, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 71, ''),
('nn::ns::detail::IApplicationManagerInterface', 80, ''),
('nn::ns::detail::IApplicationManagerInterface', 81, ''),
('nn::ns::detail::IApplicationManagerInterface', 82, ''),
('nn::ns::detail::IApplicationManagerInterface', 100, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 101, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 102, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 200, ''),
('nn::ns::detail::IApplicationManagerInterface', 201, ''),
('nn::ns::detail::IApplicationManagerInterface', 210, ''),
('nn::ns::detail::IApplicationManagerInterface', 220, ''),
('nn::ns::detail::IApplicationManagerInterface', 300, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 301, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 302, '8 bytes in - 8 bytes out - OutRaw<8,8,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 303, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 304, '0 bytes in - 8 bytes out - OutRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 305, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 306, '0 bytes in - 8 bytes out - OutRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 307, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 400, ''),
('nn::ns::detail::IApplicationManagerInterface', 401, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 402, ''),
('nn::ns::detail::IApplicationManagerInterface', 403, '0 bytes in - 4 bytes out - OutRaw<4,4,0>'),
('nn::ns::detail::IApplicationManagerInterface', 404, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 405, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 502, ''),
('nn::ns::detail::IApplicationManagerInterface', 503, ''),
('nn::ns::detail::IApplicationManagerInterface', 504, ''),
('nn::ns::detail::IApplicationManagerInterface', 505, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IApplicationManagerInterface', 506, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 507, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 508, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 600, '8 bytes in - 4 bytes out - OutRaw<4,4,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 601, ''),
('nn::ns::detail::IApplicationManagerInterface', 602, ''),
('nn::ns::detail::IApplicationManagerInterface', 603, ''),
('nn::ns::detail::IApplicationManagerInterface', 604, '0x10 bytes in - 0 bytes out - InRaw<8,8,0>, InRaw<8,8,8>'),
('nn::ns::detail::IApplicationManagerInterface', 605, ''),
('nn::ns::detail::IApplicationManagerInterface', 606, ''),
('nn::ns::detail::IApplicationManagerInterface', 700, '0 bytes in - 0 bytes out - Buffer<0,5,0>'),
('nn::ns::detail::IApplicationManagerInterface', 701, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 702, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 703, ''),
('nn::ns::detail::IApplicationManagerInterface', 704, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 705, ''),
('nn::ns::detail::IApplicationManagerInterface', 800, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 801, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 802, ''),
('nn::ns::detail::IApplicationManagerInterface', 900, ''),
('nn::ns::detail::IApplicationManagerInterface', 901, ''),
('nn::ns::detail::IApplicationManagerInterface', 902, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 903, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 904, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 905, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<4,4,0>'),
('nn::ns::detail::IApplicationManagerInterface', 906, ''),
('nn::ns::detail::IApplicationManagerInterface', 907, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 908, ''),
('nn::ns::detail::IApplicationManagerInterface', 909, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1000, ''),
('nn::ns::detail::IApplicationManagerInterface', 1001, ''),
('nn::ns::detail::IApplicationManagerInterface', 1002, ''),
('nn::ns::detail::IApplicationManagerInterface', 1200, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1300, '8 bytes in - 1 bytes out - OutRaw<1,1,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1301, ''),
('nn::ns::detail::IApplicationManagerInterface', 1302, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1303, '8 bytes in - 0 bytes out - InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1304, ''),
('nn::ns::detail::IApplicationManagerInterface', 1400, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1500, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1501, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1502, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1504, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1505, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1600, '0 bytes in - 0x20 bytes out - OutRaw<0x20,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1601, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IApplicationManagerInterface', 1700, ''),
('nn::ns::detail::IApplicationManagerInterface', 1701, ''),
('nn::ns::detail::IApplicationManagerInterface', 1702, '8 bytes in - 1 bytes out - OutRaw<1,1,0>, InRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1800, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1801, '0 bytes in - 8 bytes out - OutRaw<8,8,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1802, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1803, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IApplicationManagerInterface', 1900, '4 bytes in - 1 bytes out - OutRaw<1,1,0>, InRaw<4,4,0>'),
('nn::ns::detail::IAsyncResult', 0, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IAsyncResult', 1, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IAsyncValue', 0, '0 bytes in - 8 bytes out - OutRaw<8,8,0>'),
('nn::ns::detail::IAsyncValue', 1, '0 bytes in - 0 bytes out - Buffer<0,6,0>'),
('nn::ns::detail::IAsyncValue', 2, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IContentManagementInterface', 11, ''),
('nn::ns::detail::IContentManagementInterface', 43, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IContentManagementInterface', 47, ''),
('nn::ns::detail::IContentManagementInterface', 48, ''),
('nn::ns::detail::IContentManagementInterface', 600, '8 bytes in - 4 bytes out - OutRaw<4,4,0>, InRaw<8,8,0>'),
('nn::ns::detail::IContentManagementInterface', 601, ''),
('nn::ns::detail::IContentManagementInterface', 605, ''),
('nn::ns::detail::IContentManagementInterface', 607, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IDevelopInterface', 0, ''),
('nn::ns::detail::IDevelopInterface', 1, ''),
('nn::ns::detail::IDevelopInterface', 2, ''),
('nn::ns::detail::IDevelopInterface', 4, ''),
('nn::ns::detail::IDevelopInterface', 5, ''),
('nn::ns::detail::IDevelopInterface', 6, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IDevelopInterface', 7, ''),
('nn::ns::detail::IDevelopInterface', 8, ''),
('nn::ns::detail::IDevelopInterface', 9, ''),
('nn::ns::detail::IDocumentInterface', 21, ''),
('nn::ns::detail::IDocumentInterface', 23, '0x10 bytes in - 0 bytes out - InRaw<8,8,8>, InRaw<1,1,0>'),
('nn::ns::detail::IDownloadTaskInterface', 701, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IDownloadTaskInterface', 702, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IDownloadTaskInterface', 703, ''),
('nn::ns::detail::IDownloadTaskInterface', 704, '0 bytes in - 4 bytes out - OutRaw<4,4,0>, Buffer<0,6,0>'),
('nn::ns::detail::IDownloadTaskInterface', 705, ''),
('nn::ns::detail::IFactoryResetInterface', 100, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IFactoryResetInterface', 101, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IFactoryResetInterface', 102, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IProgressAsyncResult', 0, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IProgressAsyncResult', 1, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IProgressAsyncResult', 2, '0 bytes in - 0 bytes out - Buffer<0,6,0>'),
('nn::ns::detail::IProgressAsyncResult', 3, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IProgressMonitorForDeleteUserSaveDataAll', 0, '0 bytes in - 0 bytes out - OutHandle<0,1>'),
('nn::ns::detail::IProgressMonitorForDeleteUserSaveDataAll', 1, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::IProgressMonitorForDeleteUserSaveDataAll', 2, '0 bytes in - 0 bytes out'),
('nn::ns::detail::IProgressMonitorForDeleteUserSaveDataAll', 10, '0 bytes in - 0x28 bytes out - OutRaw<0x28,8,0>'),
('nn::ns::detail::IServiceGetterInterface', 7994, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IServiceGetterInterface', 7995, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IServiceGetterInterface', 7996, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IServiceGetterInterface', 7997, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IServiceGetterInterface', 7998, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::IServiceGetterInterface', 7999, '0 bytes in - 0 bytes out - OutObject<0,0>'),
('nn::ns::detail::ISystemUpdateControl', 0, '0 bytes in - 1 bytes out - OutRaw<1,1,0>'),
('nn::ns::detail::ISystemUpdateControl', 1, ''),
('nn::ns::detail::ISystemUpdateControl', 2, ''),
('nn::ns::detail::ISystemUpdateControl', 3, '0 bytes in - 0x10 | |
All the "values" to take into account to get the storage key.
Returns
-------
str
The redis key to use
"""
args = list(args)
value = args.pop()
parts = [
self.model._name,
self.field.name,
] + args
if self.prefix:
parts.append(self.prefix)
if self.key:
parts.append(self.key)
normalized_value = self.normalize_value(value, transform=kwargs.get('transform_value', True))
parts.append(normalized_value)
return self.field.make_key(*parts)
def get_all_storage_keys(self):
"""Returns the keys to be removed by `clear` in aggressive mode
For the parameters, see BaseIndex.get_all_storage_keys
"""
parts1 = [
self.model._name,
self.field.name,
]
parts2 = parts1 + ['*'] # for indexes taking args, like for hashfields
if self.prefix:
parts1.append(self.prefix)
parts2.append(self.prefix)
if self.key:
parts1.append(self.key)
parts2.append(self.key)
parts1.append('*')
parts2.append('*')
return set(
self.model.database.scan_keys(self.field.make_key(*parts1))
).union(
set(
self.model.database.scan_keys(self.field.make_key(*parts2))
)
)
def get_uniqueness_members(self, key):
"""Get from redis all the members of the given index `key` used to check for uniqueness.
Parameters
----------
key : str
The index key we want the members from.
Returns
-------
List[str]
The members of the index `key`.
"""
return list(self.connection.smembers(key))
def check_uniqueness(self, pk, *args, **kwargs):
"""Check if the given "value" (via `args`) is unique or not.
Parameters
----------
kwargs: dict
key: str
When given, it will be used instead of calling ``get_storage_key``
for the given args
MUST be passed as a keyword argument
For the other parameters, see ``BaseIndex.check_uniqueness``
"""
if not self.field.unique:
return
key = kwargs.get('key', None)
if key is None:
key = self.get_storage_key(*args)
# Lets check if the index key already exist for another instance
pks = self.get_uniqueness_members(key)
self.assert_pks_uniqueness(pks, pk, lambda: list(args)[-1])
def store(self, key, pk, **kwargs):
"""Store data in the index in redis
Parameters
----------
key : str
The key in which to store data
pk : Any
The pk of the instance to save in the index.
kwargs : Any
This is the ``kwargs`` passed to ``.add``. May be used by subclasses.
Returns
-------
bool
If we asked redis to do something. Always ``True`` for this index but may vary in
subclasses.
"""
self.connection.sadd(key, pk)
return True
def unstore(self, key, pk, **kwargs):
"""Remove data from the index in redis
Parameters
----------
key : str
The key from which to remove data
pk : Any
The pk of the instance to remove from the index.
kwargs : Any
This is the ``kwargs`` passed to ``.add``. May be used by subclasses.
Returns
-------
bool
If we asked redis to do something. Always ``True`` for this index but may vary in
subclasses.
"""
self.connection.srem(key, pk)
return True
def add(self, pk, *args, **kwargs):
"""Add the instance tied to the field for the given "value" (via `args`) to the index
For the parameters, see ``BaseIndex.add``
"""
check_uniqueness = kwargs.get('check_uniqueness', True)
key = self.get_storage_key(*args)
if self.field.unique and check_uniqueness:
self.check_uniqueness(pk, key=key, *args)
# Do index => create a key to be able to retrieve parent pk with
# current field value]
logger.debug("adding %s to index %s" % (pk, key))
if self.store(key, pk, **kwargs):
self._get_rollback_cache(pk)['indexed_values'].add(tuple(args))
def remove(self, pk, *args, **kwargs):
"""Remove the instance tied to the field for the given "value" (via `args`) from the index
For the parameters, see ``BaseIndex.remove``
"""
key = self.get_storage_key(*args)
logger.debug("removing %s from index %s" % (pk, key))
if self.unstore(key, pk, **kwargs):
self._get_rollback_cache(pk)['deindexed_values'].add(tuple(args))
class BaseRangeIndex(BaseIndex):
"""Base of indexes using sorted-set to do range filtering (lt, gte...)"""
handle_uniqueness = True
lua_filter_script = NotImplemented
supported_key_types = {'set', 'zset'}
def get_storage_key(self, *args):
"""Return the redis key where to store the index for the given "value" (`args`)
For this index, we store all PKs for a field in the same sorted-set.
Key has this form:
model-name:field-name:sub-field-name:index-key-name
The ':sub-field-name part' is repeated for each entry in *args that is not the final value
Parameters
-----------
args: tuple
All the "values" to take into account to get the storage key. The last entry,
the final value, is not used.
Returns
-------
str
The redis key to use
"""
args = list(args)
args.pop() # final value, not needed for the storage key
parts = [
self.model._name,
self.field.name,
] + args
if self.prefix:
parts.append(self.prefix)
if self.key:
parts.append(self.key)
return self.field.make_key(*parts)
def get_all_storage_keys(self):
"""Returns the keys to be removed by `clear` in aggressive mode
For the parameters, see BaseIndex.get_all_storage_keys
"""
parts1 = [
self.model._name,
self.field.name,
]
parts2 = parts1 + ['*'] # for indexes taking args, like for hashfields
if self.prefix:
parts1.append(self.prefix)
parts2.append(self.prefix)
if self.key:
parts1.append(self.key)
parts2.append(self.key)
return set(
self.model.database.scan_keys(self.field.make_key(*parts1))
).union(
set(
self.model.database.scan_keys(self.field.make_key(*parts2))
)
)
def prepare_data_to_store(self, pk, value, **kwargs):
"""Prepare the value and score to be stored in the zset
Parameters
----------
pk: Any
The pk, that will be stringified
value: Any
The value, to normalize, to use for indexing
kwargs: Any
Passed by the add/remove methods as they receive them.
Returns
-------
str
The string ready to use as member of the sorted set.
float
The score for this member of the sorted set
"""
raise NotImplementedError
def check_uniqueness(self, pk, *args, **kwargs):
"""Check if the given "value" (via `args`) is unique or not.
For the parameters, see ``BaseIndex.check_uniqueness``
"""
if not self.field.unique:
return
key = self.get_storage_key(*args)
value = list(args)[-1]
pks = self.get_pks_for_filter(key, 'eq', self.normalize_value(value))
self.assert_pks_uniqueness(pks, pk, lambda: value)
def store(self, key, member, score):
"""Store data in the index in redis
Parameters
----------
key : str
The key in which to store data
member : Any
The member to store in the sorted set.
score : Union[int, float, None]
The score to use to store the `member`.
Returns
-------
bool
If we asked redis to do something. Will be ``True`` except if `score` is ``None``, in
which case we don't ask redis to save the member.
"""
if score is None:
return False
self.connection.zadd(key, {member: score})
return True
def unstore(self, key, member, score):
"""Remove data from the index in redis
Parameters
----------
key : str
The key from which to remove data
member : Any
The member to remove from the sorted set.
score : Union[int, float, None]
The score for the member, but not used here (may be in subclasses)
Returns
-------
bool
If we asked redis to do something. Always ``True`` for this index but may vary in
subclasses.
"""
self.connection.zrem(key, member)
return True
def add(self, pk, *args, **kwargs):
"""Add the instance tied to the field for the given "value" (via `args`) to the index
For the parameters, see ``BaseIndex.add``
Notes
-----
If the score returned by ``prepare_data_to_store`` is None, nothing will be added to the
index. If it not a valid float, an error will be raised.
"""
check_uniqueness = kwargs.get('check_uniqueness', True)
if self.field.unique and check_uniqueness:
self.check_uniqueness(pk, *args)
key = self.get_storage_key(*args)
args = list(args)
value = args[-1]
logger.debug("adding %s to index %s" % (pk, key))
member, score = self.prepare_data_to_store(pk, value, **kwargs)
if self.store(key, member, score):
self._get_rollback_cache(pk)['indexed_values'].add(tuple(args))
def remove(self, pk, *args, **kwargs):
"""Remove the instance tied to the field for the given "value" (via `args`) from the index
For the parameters, see ``BaseIndex.remove``
"""
key = self.get_storage_key(*args)
args = list(args)
value = args[-1]
logger.debug("removing %s from index %s" % (pk, key))
member, score = self.prepare_data_to_store(pk, value, **kwargs)
if self.unstore(key, member, score):
self._get_rollback_cache(pk)['deindexed_values'].add(tuple(args))
def get_boundaries(self, filter_type, value):
"""Compute the boundaries to pass to the sorted-set command depending of the filter type
Parameters
----------
filter_type: str
One of the filter suffixes in ``self.handled_suffixes``
value: str
The normalized value for which we want the boundaries
Returns
-------
tuple
A tuple with three entries, the begin and the end of the boundaries to pass
to sorted-set command, and in third a value to exclude from the result when
querying the sorted-set
"""
raise ImplementationError
def call_script(self, key, tmp_key, key_type, start, end, exclude, *args):
"""Call the lua scripts with given keys and args
Parameters
-----------
key: str
The key of the index sorted-set
tmp_key: str
The final temporary key where to store the filtered primary keys
key_type: str
The type of temporary key to use, either 'set' or 'zset'
start: str
The "start" argument to pass to the filtering sorted-set command
end: str
The "end" argument to pass to the filtering sorted-set command
| |
= FloatText(value='2.0', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float260, units_btn, ]
box276 = Box(children=row, layout=box_layout)
# -------------------------
div_row20 = Button(description='phenotype:mechanics', disabled=True, layout=divider_button_layout)
div_row20.style.button_color = 'orange'
name_btn = Button(description='cell_cell_adhesion_strength', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float261 = FloatText(value='0.4', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float261, units_btn, ]
box277 = Box(children=row, layout=box_layout)
name_btn = Button(description='cell_cell_repulsion_strength', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float262 = FloatText(value='10.0', step='1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float262, units_btn, ]
box278 = Box(children=row, layout=box_layout)
name_btn = Button(description='relative_maximum_adhesion_distance', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float263 = FloatText(value='1.25', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float263, units_btn, ]
box279 = Box(children=row, layout=box_layout)
self.bool10 = Checkbox(description='enabled', value=False,layout=name_button_layout)
name_btn = Button(description='set_relative_equilibrium_distance', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float264 = FloatText(value='1.8', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [self.bool10, name_btn, self.float264, units_btn, ]
box280 = Box(children=row, layout=box_layout)
self.bool11 = Checkbox(description='enabled', value=False,layout=name_button_layout)
name_btn = Button(description='set_absolute_equilibrium_distance', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float265 = FloatText(value='15.12', step='1', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [self.bool11, name_btn, self.float265, units_btn, ]
box281 = Box(children=row, layout=box_layout)
# -------------------------
div_row21 = Button(description='phenotype:motility', disabled=True, layout=divider_button_layout)
div_row21.style.button_color = 'orange'
name_btn = Button(description='speed', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float266 = FloatText(value='4', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='micron/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float266, units_btn]
box282 = Box(children=row, layout=box_layout)
name_btn = Button(description='persistence_time', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float267 = FloatText(value='5', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float267, units_btn]
box283 = Box(children=row, layout=box_layout)
name_btn = Button(description='migration_bias', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float268 = FloatText(value='0.70', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float268, units_btn]
box284 = Box(children=row, layout=box_layout)
self.bool12 = Checkbox(description='enabled', value=True,layout=name_button_layout)
self.bool13 = Checkbox(description='use_2D', value=True,layout=name_button_layout)
chemotaxis_btn = Button(description='chemotaxis', disabled=True, layout={'width':'30%'})
chemotaxis_btn.style.button_color = '#ffde6b'
self.bool14 = Checkbox(description='enabled', value=False,layout=name_button_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.chemotaxis_substrate3 = Text(value='chemokine', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.chemotaxis_substrate3]
box285 = Box(children=row, layout=box_layout)
name_btn = Button(description='direction', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.chemotaxis_direction3 = Text(value='1', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.chemotaxis_direction3]
box286 = Box(children=row, layout=box_layout)
# -------------------------
div_row22 = Button(description='phenotype:secretion', disabled=True, layout=divider_button_layout)
div_row22.style.button_color = 'orange'
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.text12 = Text(value='interferon 1', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text12]
box287 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float269 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float269, units_btn]
box288 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float270 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float270, units_btn]
box289 = Box(children=row, layout=box_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.text13 = Text(value='pro-inflammatory cytokine', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text13]
box290 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float271 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float271, units_btn]
box291 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float272 = FloatText(value='0.01', step='0.001', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float272, units_btn]
box292 = Box(children=row, layout=box_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.text14 = Text(value='chemokine', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text14]
box293 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float273 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float273, units_btn]
box294 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float274 = FloatText(value='0.01', step='0.001', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float274, units_btn]
box295 = Box(children=row, layout=box_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.text15 = Text(value='debris', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text15]
box296 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float275 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float275, units_btn]
box297 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float276 = FloatText(value='0.1', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float276, units_btn]
box298 = Box(children=row, layout=box_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.text16 = Text(value='anti-inflammatory cytokine', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text16]
box299 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float277 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float277, units_btn]
box300 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float278 = FloatText(value='0.0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float278, units_btn]
box301 = Box(children=row, layout=box_layout)
name_btn = Button(description='substrate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.text17 = Text(value='collagen', disabled=False, style=style, layout=widget_layout_long)
row = [name_btn, self.text17]
box302 = Box(children=row, layout=box_layout)
name_btn = Button(description='secretion_target', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float279 = FloatText(value='1', step='0.1', style=style, layout=widget_layout)
units_btn = Button(description='dimensionless substrate concentration', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
row = [name_btn, self.float279, units_btn]
box303 = Box(children=row, layout=box_layout)
name_btn = Button(description='uptake_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float280 = FloatText(value='0.0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float280, units_btn]
box304 = Box(children=row, layout=box_layout)
# -------------------------
div_row23 = Button(description='phenotype:molecular', disabled=True, layout=divider_button_layout)
div_row23.style.button_color = 'orange'
# ================== <custom_data>, if present ==================
div_row24 = Button(description='Custom Data',disabled=True, layout=divider_button_layout)
div_row24.style.button_color = 'cyan'
name_btn = Button(description='virion', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float281 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='virions', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
description_btn = Button(description='endocytosed virions', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'tan'
row = [name_btn, self.float281, units_btn, description_btn]
box305 = Box(children=row, layout=box_layout)
name_btn = Button(description='uncoated_virion', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float282 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='virions', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
description_btn = Button(description='uncoated endocytosed virions', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float282, units_btn, description_btn]
box306 = Box(children=row, layout=box_layout)
name_btn = Button(description='viral_RNA', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float283 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='RNA', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
description_btn = Button(description='total (functional) viral RNA copies', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'tan'
row = [name_btn, self.float283, units_btn, description_btn]
box307 = Box(children=row, layout=box_layout)
name_btn = Button(description='viral_protein', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float284 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='protein', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
description_btn = Button(description='total assembled sets of viral protein', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float284, units_btn, description_btn]
box308 = Box(children=row, layout=box_layout)
name_btn = Button(description='export_virion', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float285 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='virions', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
description_btn = Button(description='ready to export virion', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'tan'
row = [name_btn, self.float285, units_btn, description_btn]
box309 = Box(children=row, layout=box_layout)
name_btn = Button(description='assembled_virion', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float286 = FloatText(value='0', step='0.01', style=style, layout=widget_layout)
units_btn = Button(description='virions', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
description_btn = Button(description='total assembled virions', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float286, units_btn, description_btn]
box310 = Box(children=row, layout=box_layout)
name_btn = Button(description='virion_uncoating_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'tan'
self.float287 = FloatText(value='0.01', step='0.001', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'tan'
description_btn = Button(description='rate at which an internalized virion is uncoated', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'tan'
row = [name_btn, self.float287, units_btn, description_btn]
box311 = Box(children=row, layout=box_layout)
name_btn = Button(description='uncoated_to_RNA_rate', disabled=True, layout=name_button_layout)
name_btn.style.button_color = 'lightgreen'
self.float288 = FloatText(value='0.01', step='0.001', style=style, layout=widget_layout)
units_btn = Button(description='1/min', disabled=True, layout=name_button_layout)
units_btn.style.button_color = 'lightgreen'
description_btn = Button(description='rate at which uncoated virion makes its mRNA available', disabled=True, layout=desc_button_layout)
description_btn.style.button_color = 'lightgreen'
row = [name_btn, self.float288, units_btn, | |
list()
identifiers_list = list()
keywords_list = list()
operators_list = list()
return_and_print_count = 0
return_and_print_kw_list = ['return', 'print', 'printf', 'println', 'write', 'writeln']
condition_count = 0
condition_kw_list = ['if', 'for', 'while', 'switch', '?', 'assert', ]
uncondition_count = 0
uncondition_kw_list = ['break', 'continue', ]
exception_count = 0
exception_kw_list = ['try', 'catch', 'throw', 'throws', 'finally', ]
new_count = 0
new_count_kw_list = ['new']
super_count = 0
super_count_kw_list = ['super']
dots_count = 0
try:
# print('ec', entity.parent().id())
# source_file_entity = db.ent_from_id(entity.parent().id())
# print('file', type(source_file_entity), source_file_entity.longname())
for lexeme in entity.lexer(show_inactive=False):
# print(lexeme.text(), ': ', lexeme.token())
tokens_list.append(lexeme.text())
if lexeme.token() == 'Identifier':
identifiers_list.append(lexeme.text())
if lexeme.token() == 'Keyword':
keywords_list.append(lexeme.text())
if lexeme.token() == 'Operator':
operators_list.append(lexeme.text())
if lexeme.text() in return_and_print_kw_list:
return_and_print_count += 1
if lexeme.text() in condition_kw_list:
condition_count += 1
if lexeme.text() in uncondition_kw_list:
uncondition_count += 1
if lexeme.text() in exception_kw_list:
exception_count += 1
if lexeme.text() in new_count_kw_list:
new_count += 1
if lexeme.text() in super_count_kw_list:
super_count += 1
if lexeme.text() == '.':
dots_count += 1
except:
raise RuntimeError('Error in computing class lexical metrics for class "{0}"'.format(entity.longname()))
number_of_assignments = operators_list.count('=')
number_of_operators_without_assignments = len(operators_list) - number_of_assignments
number_of_unique_operators = len(set(list(filter('='.__ne__, operators_list))))
class_lexicon_metrics_dict.update({'NumberOfTokens': len(tokens_list)})
class_lexicon_metrics_dict.update({'NumberOfUniqueTokens': len(set(tokens_list))})
class_lexicon_metrics_dict.update({'NumberOfIdentifies': len(identifiers_list)})
class_lexicon_metrics_dict.update({'NumberOfUniqueIdentifiers': len(set(identifiers_list))})
class_lexicon_metrics_dict.update({'NumberOfKeywords': len(keywords_list)})
class_lexicon_metrics_dict.update({'NumberOfUniqueKeywords': len(set(keywords_list))})
class_lexicon_metrics_dict.update(
{'NumberOfOperatorsWithoutAssignments': number_of_operators_without_assignments})
class_lexicon_metrics_dict.update({'NumberOfAssignments': number_of_assignments})
class_lexicon_metrics_dict.update({'NumberOfUniqueOperators': number_of_unique_operators})
class_lexicon_metrics_dict.update({'NumberOfDots': dots_count})
class_lexicon_metrics_dict.update({'NumberOfSemicolons': entity.metric(['CountSemicolon'])['CountSemicolon']})
class_lexicon_metrics_dict.update({'NumberOfReturnAndPrintStatements': return_and_print_count})
class_lexicon_metrics_dict.update({'NumberOfConditionalJumpStatements': condition_count})
class_lexicon_metrics_dict.update({'NumberOfUnConditionalJumpStatements': uncondition_count})
class_lexicon_metrics_dict.update({'NumberOfExceptionStatements': exception_count})
class_lexicon_metrics_dict.update({'NumberOfNewStatements': new_count})
class_lexicon_metrics_dict.update({'NumberOfSuperStatements': super_count})
# print('Class lexicon metrics:', class_lexicon_metrics_dict)
return class_lexicon_metrics_dict
@classmethod
def compute_java_package_metrics(cls, db=None, class_name: str = None):
# print('ib', entity.ib())
# package_name = ''
# Find package: strategy 1
# for ib in entity.ib():
# if ib.find('Package:') != -1:
# sp = ib.split(':')
# print('entity ib', sp[1][1:-1])
# package_name = sp[1][1:-1]
# Find package: strategy 2: Dominated strategy
class_name_list = class_name.split('.')[:-1]
package_name = '.'.join(class_name_list)
# print('package_name string', package_name)
package_list = db.lookup(package_name + '$', 'Package')
if package_list is None:
return None
if len(package_list) == 0: # if len != 1 return None!
return None
package = package_list[0]
# print('kind:', package.kind())
print('Computing package metrics for class: "{0}" in package: "{1}"'.format(class_name, package.longname()))
# Print info
# print('package metrics')
package_metrics = package.metric(package.metrics())
# print('number of metrics:', len(metrics), metrics)
# for i, metric in enumerate(metrics.keys()):
# print(i + 1, ': ', metric, metrics[metric])
# print('class metrics')
# metrics2 = entity.metric(entity.metrics())
# print('number of metrics:', len(metrics), metrics2)
# for i, metric2 in enumerate(metrics.keys()):
# print(i + 1, ': ', metric2, metrics[metric2])
#
# print(package.refs('Definein'))
# for defin in package.refs('Definein'):
# print('kind', defin.ent().kind())
# print(defin, '-->', defin.ent().ents('Java Define', 'Class'))
# metrics = entity.metric(defin.ent().metrics())
# print('number of metrics in file:', len(metrics), metrics)
# for i, metric in enumerate(metrics.keys()):
# print(i + 1, ': ', metric, metrics[metric])
classes_and_interfaces_list = UnderstandUtility.get_package_clasess_java(package_entity=package)
# print(classes_and_interfaces_list)
# quit()
# 2. Custom package metrics
# 2.1. PKLOC (15)
pk_loc_list = list()
pk_loc_decl_list = list()
pk_loc_exe_list = list()
for type_entity in classes_and_interfaces_list:
pk_loc_list.append(type_entity.metric(['CountLineCode'])['CountLineCode'])
pk_loc_decl_list.append(type_entity.metric(['CountLineCodeDecl'])['CountLineCodeDecl'])
pk_loc_exe_list.append(type_entity.metric(['CountLineCodeExe'])['CountLineCodeExe'])
cls.remove_none_from_lists([pk_loc_list, pk_loc_decl_list, pk_loc_exe_list])
try:
package_metrics.update({'AvgLineCodeDecl': sum(pk_loc_decl_list) / len(pk_loc_decl_list)})
package_metrics.update({'AvgLineCodeExe': sum(pk_loc_exe_list) / len(pk_loc_exe_list)})
package_metrics.update({'MaxLineCode': max(pk_loc_list)})
package_metrics.update({'MaxLineCodeDecl': max(pk_loc_decl_list)})
package_metrics.update({'MaxLineCodeExe': max(pk_loc_exe_list)})
package_metrics.update({'MinLineCode': min(pk_loc_list)})
package_metrics.update({'MinLineCodeDecl': min(pk_loc_decl_list)})
package_metrics.update({'MinLineCodeExe': min(pk_loc_exe_list)})
package_metrics.update({'SDLineCode': np.std(pk_loc_list)})
package_metrics.update({'SDLineCodeDecl': np.std(pk_loc_decl_list)})
package_metrics.update({'SDLineCodeExe': np.std(pk_loc_exe_list)})
except:
raise TypeError('Error happen when compute packege metric for class "{0}" and list "{1}"'.format(class_name,
pk_loc_decl_list))
# 2.2 PKNOS (15)
pk_stmt_list = list()
pk_stmt_decl_list = list()
pk_stmt_exe_list = list()
for type_entity in classes_and_interfaces_list:
pk_stmt_list.append(type_entity.metric(['CountStmt'])['CountStmt'])
pk_stmt_decl_list.append(type_entity.metric(['CountStmtDecl'])['CountStmtDecl'])
pk_stmt_exe_list.append(type_entity.metric(['CountStmtExe'])['CountStmtExe'])
cls.remove_none_from_lists([pk_stmt_list, pk_stmt_decl_list, pk_stmt_exe_list])
package_metrics.update({'AvgStmt': sum(pk_stmt_decl_list) / len(pk_stmt_decl_list)})
package_metrics.update({'AvgStmtDecl': sum(pk_stmt_decl_list) / len(pk_stmt_decl_list)})
package_metrics.update({'AvgStmtExe': sum(pk_stmt_exe_list) / len(pk_stmt_exe_list)})
package_metrics.update({'MaxStmt': max(pk_stmt_list)})
package_metrics.update({'MaxStmtDecl': max(pk_stmt_decl_list)})
package_metrics.update({'MaxStmtExe': max(pk_stmt_exe_list)})
package_metrics.update({'MinStmt': min(pk_stmt_list)})
package_metrics.update({'MinStmtDecl': min(pk_stmt_decl_list)})
package_metrics.update({'MinStmtExe': min(pk_stmt_exe_list)})
package_metrics.update({'SDStmt': np.std(pk_stmt_list)})
package_metrics.update({'SDStmtDecl': np.std(pk_stmt_decl_list)})
package_metrics.update({'SDStmtExe': np.std(pk_stmt_exe_list)})
# 2.3 PKCC (20)
pk_cyclomatic_list = list()
pk_cyclomatic_namm_list = list()
pk_cyclomatic_strict_list = list()
pk_cyclomatic_strict_namm_list = list()
pk_cyclomatic_modified_list = list()
pk_cyclomatic_modified_namm_list = list()
pk_essential_list = list()
pk_essential_namm_list = list()
for type_entity in classes_and_interfaces_list:
pk_cyclomatic_list.append(type_entity.metric(['SumCyclomatic'])['SumCyclomatic'])
pk_cyclomatic_modified_list.append(type_entity.metric(['SumCyclomaticModified'])['SumCyclomaticModified'])
pk_cyclomatic_strict_list.append(type_entity.metric(['SumCyclomaticStrict'])['SumCyclomaticStrict'])
pk_essential_list.append(type_entity.metric(['SumEssential'])['SumEssential'])
cls.remove_none_from_lists(
[pk_cyclomatic_list, pk_cyclomatic_strict_list, pk_cyclomatic_modified_list, pk_essential_list])
package_metrics.update({'MinCyclomatic': min(pk_cyclomatic_list)})
package_metrics.update({'MinCyclomaticModified': min(pk_cyclomatic_modified_list)})
package_metrics.update({'MinCyclomaticStrict': min(pk_cyclomatic_strict_list)})
package_metrics.update({'MinEssential': min(pk_essential_list)})
package_metrics.update({'SDCyclomatic': np.std(pk_cyclomatic_list)})
package_metrics.update({'SDCyclomaticModified': np.std(pk_cyclomatic_modified_list)})
package_metrics.update({'SDCyclomaticStrict': np.std(pk_cyclomatic_strict_list)})
package_metrics.update({'SDEssential': np.std(pk_essential_list)})
# 2.4 PKNESTING (4)
pk_nesting_list = list()
for type_entity in classes_and_interfaces_list:
pk_nesting_list.append(type_entity.metric(['MaxNesting'])['MaxNesting'])
cls.remove_none_from_lists([pk_nesting_list])
package_metrics.update({'MinNesting': min(pk_nesting_list)})
package_metrics.update({'AvgNesting': sum(pk_nesting_list) / len(pk_nesting_list)})
package_metrics.update({'SDNesting': np.std(pk_nesting_list)})
# 2.5
# Other Size/Count metrics (understand built-in metrics)
# PKNOMNAMM: Package number of not accessor or mutator methods
j_code_odor = JCodeOdorMetric()
pk_not_accessor_and_mutator_methods_list = list()
pk_accessor_and_mutator_methods_list = list()
for type_entity in classes_and_interfaces_list:
pk_not_accessor_and_mutator_methods_list.append(j_code_odor.NOMNAMM(type_entity))
pk_accessor_and_mutator_methods_list.append(j_code_odor.NOMAMM(type_entity))
cls.remove_none_from_lists([pk_not_accessor_and_mutator_methods_list, pk_accessor_and_mutator_methods_list])
package_metrics.update({'PKNOMNAMM': sum(pk_not_accessor_and_mutator_methods_list)})
# 2.6 Visibility metrics
# Other Visibility metrics metrics (understand built-in metrics)
package_metrics.update({'PKNOAMM': sum(pk_accessor_and_mutator_methods_list)})
# To add other visibility metrics
# 2.7 Inheritance metrics
package_metrics.update({'PKNOI': len(UnderstandUtility.get_package_interfaces_java(package_entity=package))})
package_metrics.update(
{'PKNOAC': len(UnderstandUtility.get_package_abstract_class_java(package_entity=package))})
# print(len(package_metrics))
# print(package_metrics)
return package_metrics
@classmethod
def compute_java_project_metrics(cls, db):
project_metrics = db.metric(db.metrics())
# print('number of metrics:', len(project_metrics), project_metrics)
# for i, metric in enumerate( project_metrics.keys()):
# print(i + 1, ': ', metric, project_metrics[metric])
# print(project_metrics) # Print Understand built-in metrics
# 2 Custom project metrics
files = UnderstandUtility.get_project_files_java(db=db)
# 2.1 PJLOC (30)
pj_loc_list = list()
pj_loc_decl_list = list()
pj_loc_exe_list = list()
pj_stmt_list = list()
pj_stmt_decl_list = list()
pj_stmt_exe_list = list()
for file_entity in files:
pj_loc_list.append(file_entity.metric(['CountLineCode'])['CountLineCode'])
pj_loc_decl_list.append(file_entity.metric(['CountLineCodeDecl'])['CountLineCodeDecl'])
pj_loc_exe_list.append(file_entity.metric(['CountLineCodeExe'])['CountLineCodeExe'])
pj_stmt_list.append(file_entity.metric(['CountStmt'])['CountStmt'])
pj_stmt_decl_list.append(file_entity.metric(['CountStmtDecl'])['CountStmtDecl'])
pj_stmt_exe_list.append(file_entity.metric(['CountStmtExe'])['CountStmtExe'])
cls.remove_none_from_lists([pj_loc_list, pj_loc_decl_list, pj_loc_exe_list,
pj_stmt_list, pj_stmt_decl_list, pj_stmt_exe_list])
project_metrics.update({'AvgLineCodeDecl': sum(pj_loc_decl_list) / len(pj_loc_decl_list)})
project_metrics.update({'AvgLineCodeExe': sum(pj_loc_exe_list) / len(pj_loc_exe_list)})
project_metrics.update({'MaxLineCode': max(pj_loc_list)})
project_metrics.update({'MaxLineCodeDecl': max(pj_loc_decl_list)})
project_metrics.update({'MaxLineCodeExe': max(pj_loc_exe_list)})
project_metrics.update({'MinLineCode': min(pj_loc_list)})
project_metrics.update({'MinLineCodeDecl': min(pj_loc_decl_list)})
project_metrics.update({'MinLineCodeExe': min(pj_loc_exe_list)})
project_metrics.update({'SDLineCode': np.std(pj_loc_list)})
project_metrics.update({'SDLineCodeDecl': np.std(pj_loc_decl_list)})
project_metrics.update({'SDLineCodeExe': np.std(pj_loc_exe_list)})
# 2.2. PJNOST (15)
project_metrics.update({'AvgStmt': sum(pj_stmt_list) / len(pj_stmt_list)})
project_metrics.update({'AvgStmtDecl': sum(pj_stmt_decl_list) / len(pj_stmt_decl_list)})
project_metrics.update({'AvgStmtExe': sum(pj_stmt_exe_list) / len(pj_stmt_exe_list)})
project_metrics.update({'MaxStmt': max(pj_stmt_list)})
project_metrics.update({'MaxStmtDecl': max(pj_stmt_decl_list)})
project_metrics.update({'MaxStmtExe': max(pj_stmt_exe_list)})
project_metrics.update({'MinStmt': min(pj_stmt_list)})
project_metrics.update({'MinStmtDecl': min(pj_stmt_decl_list)})
project_metrics.update({'MinStmtExe': min(pj_stmt_exe_list)})
project_metrics.update({'SDStmt': np.std(pj_stmt_list)})
project_metrics.update({'SDStmtDecl': np.std(pj_stmt_decl_list)})
project_metrics.update({'SDStmtExe': np.std(pj_stmt_exe_list)})
# 2.3 Other Count/Size metrics
packages = db.ents('Java Package')
# print('number of packages', len(packages))
project_metrics.update({'NumberOfPackages': len(packages)})
j_code_odor = JCodeOdorMetric()
pj_number_of_method_namm = 0
for class_ in UnderstandUtility.get_project_classes_java(db=db):
pj_number_of_method_namm += j_code_odor.NOMNAMM(class_)
project_metrics.update({'PJNOMNAMM': pj_number_of_method_namm})
# 2.4 PJCC (20): Project cyclomatic complexity
pj_cyclomatic_list = list()
pj_cyclomatic_namm_list = list()
pj_cyclomatic_strict_list = list()
pj_cyclomatic_strict_namm_list = list()
pj_cyclomatic_modified_list = list()
pj_cyclomatic_modified_namm_list = list()
pj_essential_list = list()
pj_essential_namm_list = list()
for type_entity in files:
pj_cyclomatic_list.append(type_entity.metric(['SumCyclomatic'])['SumCyclomatic'])
pj_cyclomatic_modified_list.append(type_entity.metric(['SumCyclomaticModified'])['SumCyclomaticModified'])
pj_cyclomatic_strict_list.append(type_entity.metric(['SumCyclomaticStrict'])['SumCyclomaticStrict'])
pj_essential_list.append(type_entity.metric(['SumEssential'])['SumEssential'])
cls.remove_none_from_lists([pj_cyclomatic_list, pj_cyclomatic_strict_list,
pj_cyclomatic_modified_list, pj_essential_list])
project_metrics.update({'SumCyclomatic': sum(pj_cyclomatic_list)})
project_metrics.update({'SumCyclomaticModified': sum(pj_cyclomatic_modified_list)})
project_metrics.update({'SumCyclomaticStrict': sum(pj_cyclomatic_strict_list)})
project_metrics.update({'SumEssential': sum(pj_essential_list)})
project_metrics.update({'MaxCyclomatic': max(pj_cyclomatic_list)})
project_metrics.update({'MaxCyclomaticModified': max(pj_cyclomatic_modified_list)})
project_metrics.update({'MaxCyclomaticStrict': max(pj_cyclomatic_strict_list)})
project_metrics.update({'MaxEssential': max(pj_essential_list)})
project_metrics.update({'AvgCyclomatic': sum(pj_cyclomatic_list) / len(pj_cyclomatic_list)})
project_metrics.update(
{'AvgCyclomaticModified': sum(pj_cyclomatic_modified_list) / len(pj_cyclomatic_modified_list)})
project_metrics.update({'AvgCyclomaticStrict': sum(pj_cyclomatic_strict_list) / len(pj_cyclomatic_strict_list)})
project_metrics.update({'AvgEssential': sum(pj_essential_list) / len(pj_essential_list)})
project_metrics.update({'MinCyclomatic': min(pj_cyclomatic_list)})
project_metrics.update({'MinCyclomaticModified': min(pj_cyclomatic_modified_list)})
project_metrics.update({'MinCyclomaticStrict': min(pj_cyclomatic_strict_list)})
project_metrics.update({'MinEssential': min(pj_essential_list)})
project_metrics.update({'SDCyclomatic': np.std(pj_cyclomatic_list)})
project_metrics.update({'SDCyclomaticModified': np.std(pj_cyclomatic_modified_list)})
project_metrics.update({'SDCyclomaticStrict': np.std(pj_cyclomatic_strict_list)})
project_metrics.update({'SDEssential': np.std(pj_essential_list)})
# 2.4 PKNESTING (4)
pj_nesting_list = list()
for type_entity in files:
pj_nesting_list.append(type_entity.metric(['MaxNesting'])['MaxNesting'])
cls.remove_none_from_lists([pj_nesting_list])
project_metrics.update({'MinNesting': min(pj_nesting_list)})
project_metrics.update({'AvgNesting': sum(pj_nesting_list) / len(pj_nesting_list)})
project_metrics.update({'SDNesting': np.std(pj_nesting_list)})
# 3 Inheritance metrics
project_metrics.update({'PJNOI': len(UnderstandUtility.get_project_interfaces_java(db=db))})
project_metrics.update({'PJNAC': len(UnderstandUtility.get_project_abstract_classes_java(db=db))})
return project_metrics
@classmethod
def remove_none_from_lists(cls, lists: list = None):
for i, list_ in enumerate(lists):
if len(list_) == 0:
list_.append(0)
warnings.warn('Empty list passed!')
# else:
# list_ = [i for i in list_ if i is not None]
# if len(list_) == 0:
# list_.append(0)
# raise ValueError('Required data for systematic metric computation is not enough!')
# ------------------------------------------------------------------------
class PreProcess:
"""
Writes all metrics in a csv file and performs preprocessing
"""
# Dataset creation API
@classmethod
def create_understand_database_from_project(cls, root_path='sf110_without_test/'):
# {0}: understand_db_directory, {1}: understand_db_name, {2}: project_root_directory
cmd = 'und create -db {0}{1}.udb -languages java add {2} analyze -all'
# projects = [x[0] for x in os.walk(root_path)]
projects = [name for name in os.listdir(root_path) if os.path.isdir(os.path.join(root_path, name))]
for project_ in projects:
command_ = cmd.format(root_path, project_, root_path + project_)
print('executing command {0}'.format(command_))
# returned_value_in_byte = subprocess.check_output(command_, shell=True)
os.system('cmd /c "{0}"'.format(command_))
# os.system('cmd / k "{0}"'.format(command_))
@classmethod
def extract_project_classes_all(cls, udbs_path, class_list_csv_path_root=r'class_list_csvs/'):
files = [f for f in os.listdir(udbs_path) if os.path.isfile(os.path.join(udbs_path, f))]
for f in files:
print('processing understand db file {0}:'.format(f))
db = understand.open(os.path.join(udbs_path, f))
cls.write_project_classes(project_name=f[:-4], db=db, csv_path=class_list_csv_path_root + f[:-4] + '.csv')
print('processing understand db file {0} was finished'.format(f))
@classmethod
def extract_project_classes(cls, db):
classes_list = UnderstandUtility.get_project_classes_longnames_java(db=db)
print('@understand', len(set(classes_list)), set(classes_list))
return classes_list
@classmethod
def write_project_classes(cls, project_name: str = None, db=None, csv_path: str = None):
classes = cls.extract_project_classes(db=db)
df = pd.DataFrame(columns=['Project', 'Class', 'Line', 'Branch', 'Mutation', 'Output', 'Exceptions', 'Tests'])
df['Project'] = [project_name for i in range(0, len(classes))]
df['Class'] = classes
df.to_csv(csv_path, index=False)
@classmethod
def read_project_classes(cls, db=None, classes_names_list: list = None):
class_entities = list()
for class_name_ in classes_names_list:
# Find relevant class entity
class_entity_ = UnderstandUtility.get_class_entity_by_name(db=db, class_name=class_name_)
if class_entity_ is not None:
method_list = UnderstandUtility.get_method_of_class_java2(db=db, class_entity=class_entity_)
if method_list is not None:
class_entities.append(class_entity_)
else:
# We do not need a class without any method!
warnings.warn('Requested class entity with name "{0}" | |
'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def skip_servicegroup_ex_async(
self,
request: deps_models.SkipServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.SkipServicegroupResponse:
"""
Description: 忽略分组
Summary: 忽略分组
"""
UtilClient.validate_model(request)
return deps_models.SkipServicegroupResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.servicegroup.skip', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def create_servicegroup(
self,
request: deps_models.CreateServicegroupRequest,
) -> deps_models.CreateServicegroupResponse:
"""
Description: 创建发布分组
Summary: 创建发布分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.create_servicegroup_ex(request, headers, runtime)
async def create_servicegroup_async(
self,
request: deps_models.CreateServicegroupRequest,
) -> deps_models.CreateServicegroupResponse:
"""
Description: 创建发布分组
Summary: 创建发布分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.create_servicegroup_ex_async(request, headers, runtime)
def create_servicegroup_ex(
self,
request: deps_models.CreateServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.CreateServicegroupResponse:
"""
Description: 创建发布分组
Summary: 创建发布分组
"""
UtilClient.validate_model(request)
return deps_models.CreateServicegroupResponse().from_map(
self.do_request('1.0', 'antcloud.deps.servicegroup.create', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def create_servicegroup_ex_async(
self,
request: deps_models.CreateServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.CreateServicegroupResponse:
"""
Description: 创建发布分组
Summary: 创建发布分组
"""
UtilClient.validate_model(request)
return deps_models.CreateServicegroupResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.servicegroup.create', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def delete_servicegroup(
self,
request: deps_models.DeleteServicegroupRequest,
) -> deps_models.DeleteServicegroupResponse:
"""
Description: 删除发布分组
Summary: 删除发布分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.delete_servicegroup_ex(request, headers, runtime)
async def delete_servicegroup_async(
self,
request: deps_models.DeleteServicegroupRequest,
) -> deps_models.DeleteServicegroupResponse:
"""
Description: 删除发布分组
Summary: 删除发布分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.delete_servicegroup_ex_async(request, headers, runtime)
def delete_servicegroup_ex(
self,
request: deps_models.DeleteServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.DeleteServicegroupResponse:
"""
Description: 删除发布分组
Summary: 删除发布分组
"""
UtilClient.validate_model(request)
return deps_models.DeleteServicegroupResponse().from_map(
self.do_request('1.0', 'antcloud.deps.servicegroup.delete', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def delete_servicegroup_ex_async(
self,
request: deps_models.DeleteServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.DeleteServicegroupResponse:
"""
Description: 删除发布分组
Summary: 删除发布分组
"""
UtilClient.validate_model(request)
return deps_models.DeleteServicegroupResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.servicegroup.delete', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def switch_servicegroup(
self,
request: deps_models.SwitchServicegroupRequest,
) -> deps_models.SwitchServicegroupResponse:
"""
Description: 转换分组
Summary: 转换分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.switch_servicegroup_ex(request, headers, runtime)
async def switch_servicegroup_async(
self,
request: deps_models.SwitchServicegroupRequest,
) -> deps_models.SwitchServicegroupResponse:
"""
Description: 转换分组
Summary: 转换分组
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.switch_servicegroup_ex_async(request, headers, runtime)
def switch_servicegroup_ex(
self,
request: deps_models.SwitchServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.SwitchServicegroupResponse:
"""
Description: 转换分组
Summary: 转换分组
"""
UtilClient.validate_model(request)
return deps_models.SwitchServicegroupResponse().from_map(
self.do_request('1.0', 'antcloud.deps.servicegroup.switch', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def switch_servicegroup_ex_async(
self,
request: deps_models.SwitchServicegroupRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.SwitchServicegroupResponse:
"""
Description: 转换分组
Summary: 转换分组
"""
UtilClient.validate_model(request)
return deps_models.SwitchServicegroupResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.servicegroup.switch', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_service(
self,
request: deps_models.GetServiceRequest,
) -> deps_models.GetServiceResponse:
"""
Description: 获取发布服务详情
Summary: 获取发布服务详情
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_service_ex(request, headers, runtime)
async def get_service_async(
self,
request: deps_models.GetServiceRequest,
) -> deps_models.GetServiceResponse:
"""
Description: 获取发布服务详情
Summary: 获取发布服务详情
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_service_ex_async(request, headers, runtime)
def get_service_ex(
self,
request: deps_models.GetServiceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.GetServiceResponse:
"""
Description: 获取发布服务详情
Summary: 获取发布服务详情
"""
UtilClient.validate_model(request)
return deps_models.GetServiceResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_service_ex_async(
self,
request: deps_models.GetServiceRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.GetServiceResponse:
"""
Description: 获取发布服务详情
Summary: 获取发布服务详情
"""
UtilClient.validate_model(request)
return deps_models.GetServiceResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def get_service_progress(
self,
request: deps_models.GetServiceProgressRequest,
) -> deps_models.GetServiceProgressResponse:
"""
Description: 获取应用发布信息
Summary: 获取应用发布信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.get_service_progress_ex(request, headers, runtime)
async def get_service_progress_async(
self,
request: deps_models.GetServiceProgressRequest,
) -> deps_models.GetServiceProgressResponse:
"""
Description: 获取应用发布信息
Summary: 获取应用发布信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.get_service_progress_ex_async(request, headers, runtime)
def get_service_progress_ex(
self,
request: deps_models.GetServiceProgressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.GetServiceProgressResponse:
"""
Description: 获取应用发布信息
Summary: 获取应用发布信息
"""
UtilClient.validate_model(request)
return deps_models.GetServiceProgressResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.progress.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def get_service_progress_ex_async(
self,
request: deps_models.GetServiceProgressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.GetServiceProgressResponse:
"""
Description: 获取应用发布信息
Summary: 获取应用发布信息
"""
UtilClient.validate_model(request)
return deps_models.GetServiceProgressResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.progress.get', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_service_progress(
self,
request: deps_models.QueryServiceProgressRequest,
) -> deps_models.QueryServiceProgressResponse:
"""
Description: 查询发布进度
Summary: 查询发布进度
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_service_progress_ex(request, headers, runtime)
async def query_service_progress_async(
self,
request: deps_models.QueryServiceProgressRequest,
) -> deps_models.QueryServiceProgressResponse:
"""
Description: 查询发布进度
Summary: 查询发布进度
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_service_progress_ex_async(request, headers, runtime)
def query_service_progress_ex(
self,
request: deps_models.QueryServiceProgressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceProgressResponse:
"""
Description: 查询发布进度
Summary: 查询发布进度
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceProgressResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.progress.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_service_progress_ex_async(
self,
request: deps_models.QueryServiceProgressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceProgressResponse:
"""
Description: 查询发布进度
Summary: 查询发布进度
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceProgressResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.progress.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_service_slbmount(
self,
request: deps_models.QueryServiceSlbmountRequest,
) -> deps_models.QueryServiceSlbmountResponse:
"""
Description: 查询SLB挂载信息
Summary: 查询SLB挂载信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_service_slbmount_ex(request, headers, runtime)
async def query_service_slbmount_async(
self,
request: deps_models.QueryServiceSlbmountRequest,
) -> deps_models.QueryServiceSlbmountResponse:
"""
Description: 查询SLB挂载信息
Summary: 查询SLB挂载信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_service_slbmount_ex_async(request, headers, runtime)
def query_service_slbmount_ex(
self,
request: deps_models.QueryServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceSlbmountResponse:
"""
Description: 查询SLB挂载信息
Summary: 查询SLB挂载信息
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceSlbmountResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.slbmount.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_service_slbmount_ex_async(
self,
request: deps_models.QueryServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceSlbmountResponse:
"""
Description: 查询SLB挂载信息
Summary: 查询SLB挂载信息
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceSlbmountResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.slbmount.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def batchquery_service_slbmount(
self,
request: deps_models.BatchqueryServiceSlbmountRequest,
) -> deps_models.BatchqueryServiceSlbmountResponse:
"""
Description: 批量查询 SLB 挂载信息
Summary: 批量查询 SLB 挂载信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.batchquery_service_slbmount_ex(request, headers, runtime)
async def batchquery_service_slbmount_async(
self,
request: deps_models.BatchqueryServiceSlbmountRequest,
) -> deps_models.BatchqueryServiceSlbmountResponse:
"""
Description: 批量查询 SLB 挂载信息
Summary: 批量查询 SLB 挂载信息
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.batchquery_service_slbmount_ex_async(request, headers, runtime)
def batchquery_service_slbmount_ex(
self,
request: deps_models.BatchqueryServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.BatchqueryServiceSlbmountResponse:
"""
Description: 批量查询 SLB 挂载信息
Summary: 批量查询 SLB 挂载信息
"""
UtilClient.validate_model(request)
return deps_models.BatchqueryServiceSlbmountResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.slbmount.batchquery', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def batchquery_service_slbmount_ex_async(
self,
request: deps_models.BatchqueryServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.BatchqueryServiceSlbmountResponse:
"""
Description: 批量查询 SLB 挂载信息
Summary: 批量查询 SLB 挂载信息
"""
UtilClient.validate_model(request)
return deps_models.BatchqueryServiceSlbmountResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.slbmount.batchquery', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def update_service_slbmount(
self,
request: deps_models.UpdateServiceSlbmountRequest,
) -> deps_models.UpdateServiceSlbmountResponse:
"""
Description: 更新slb挂载权重
Summary: 更新slb挂载权重
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.update_service_slbmount_ex(request, headers, runtime)
async def update_service_slbmount_async(
self,
request: deps_models.UpdateServiceSlbmountRequest,
) -> deps_models.UpdateServiceSlbmountResponse:
"""
Description: 更新slb挂载权重
Summary: 更新slb挂载权重
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.update_service_slbmount_ex_async(request, headers, runtime)
def update_service_slbmount_ex(
self,
request: deps_models.UpdateServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.UpdateServiceSlbmountResponse:
"""
Description: 更新slb挂载权重
Summary: 更新slb挂载权重
"""
UtilClient.validate_model(request)
return deps_models.UpdateServiceSlbmountResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.slbmount.update', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def update_service_slbmount_ex_async(
self,
request: deps_models.UpdateServiceSlbmountRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.UpdateServiceSlbmountResponse:
"""
Description: 更新slb挂载权重
Summary: 更新slb挂载权重
"""
UtilClient.validate_model(request)
return deps_models.UpdateServiceSlbmountResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.slbmount.update', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_service_slbmountprogress(
self,
request: deps_models.QueryServiceSlbmountprogressRequest,
) -> deps_models.QueryServiceSlbmountprogressResponse:
"""
Description: 查询挂载进度
Summary: 查询挂载进度
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_service_slbmountprogress_ex(request, headers, runtime)
async def query_service_slbmountprogress_async(
self,
request: deps_models.QueryServiceSlbmountprogressRequest,
) -> deps_models.QueryServiceSlbmountprogressResponse:
"""
Description: 查询挂载进度
Summary: 查询挂载进度
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_service_slbmountprogress_ex_async(request, headers, runtime)
def query_service_slbmountprogress_ex(
self,
request: deps_models.QueryServiceSlbmountprogressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceSlbmountprogressResponse:
"""
Description: 查询挂载进度
Summary: 查询挂载进度
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceSlbmountprogressResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.slbmountprogress.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_service_slbmountprogress_ex_async(
self,
request: deps_models.QueryServiceSlbmountprogressRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceSlbmountprogressResponse:
"""
Description: 查询挂载进度
Summary: 查询挂载进度
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceSlbmountprogressResponse().from_map(
await self.do_request_async('1.0', 'antcloud.deps.service.slbmountprogress.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
def query_service_latestsuccess(
self,
request: deps_models.QueryServiceLatestsuccessRequest,
) -> deps_models.QueryServiceLatestsuccessResponse:
"""
Description: 查询最近一次发布成功的应用记录
Summary: 查询最近一次发布成功的应用记录
"""
runtime = util_models.RuntimeOptions()
headers = {}
return self.query_service_latestsuccess_ex(request, headers, runtime)
async def query_service_latestsuccess_async(
self,
request: deps_models.QueryServiceLatestsuccessRequest,
) -> deps_models.QueryServiceLatestsuccessResponse:
"""
Description: 查询最近一次发布成功的应用记录
Summary: 查询最近一次发布成功的应用记录
"""
runtime = util_models.RuntimeOptions()
headers = {}
return await self.query_service_latestsuccess_ex_async(request, headers, runtime)
def query_service_latestsuccess_ex(
self,
request: deps_models.QueryServiceLatestsuccessRequest,
headers: Dict[str, str],
runtime: util_models.RuntimeOptions,
) -> deps_models.QueryServiceLatestsuccessResponse:
"""
Description: 查询最近一次发布成功的应用记录
Summary: 查询最近一次发布成功的应用记录
"""
UtilClient.validate_model(request)
return deps_models.QueryServiceLatestsuccessResponse().from_map(
self.do_request('1.0', 'antcloud.deps.service.latestsuccess.query', 'HTTPS', 'POST', f'/gateway.do', TeaCore.to_map(request), headers, runtime)
)
async def query_service_latestsuccess_ex_async(
| |
not
# broken out further by year
perf_val = [sum([
x[z] * perf_convert[y][cz_int] for
y, z in enumerate(aia_list)]) if type(
x[aia_list[0]]) is not dict else {
str(yr): sum([x[z][str(yr)] * perf_convert[y][
cz_int] for y, z in enumerate(aia_list)])
for yr in years} for x in perf_val]
else:
perf_val = [x[cz_int] for x in perf_val]
except TypeError:
pass
except KeyError:
# Try for if the value is broken out by climate
try:
# Check whether an additional conversion of performance
# values is needed (from AIA regions to EMM regions)
if perf_convert is not None:
try:
# Handle cases where performance values are and are
# not broken out further by year
perf_val = sum([specific_cpl_data['performance'][
'typical'][y] * perf_convert[x][cz_int]
for x, y in enumerate(aia_list)])
except TypeError:
perf_val = {str(yr): sum([specific_cpl_data[
'performance']['typical'][y][str(yr)] *
perf_convert[x][cz_int] for
x, y in enumerate(aia_list)]) for yr in years}
else:
perf_val = specific_cpl_data[
'performance']['typical'][cz_int]
except TypeError:
perf_val = specific_cpl_data['performance']['typical']
the_perf['units'] = specific_cpl_data['performance']['units']
the_perf['source'] = specific_cpl_data['performance']['source']
# Record the performance value identified in the above rigmarole
# Case where the performance value is not broken out by vintage
if type(perf_val) is not list:
# Note: the dict comprehension handles cases where the
# performance value is further broken out by year; if the value
# is not broken out by year, the comprehension assumes the same
# performance value for all years in the analysis time horizon
the_perf['typical'] = {
str(yr): perf_val[str(yr)] if type(perf_val) is dict
else perf_val for yr in years}
# Case where the performance value is broken out by vintage
else:
# Note: the dict comprehension handles cases where the
# performance value is further broken out by year; if the value
# is not broken out by year, the comprehension assumes the same
# performance value for all years in the analysis time horizon
the_perf['typical'] = {
'new': {
str(yr): perf_val[0][str(yr)] if type(perf_val[0]) is dict
else perf_val[0] for yr in years},
'existing': {
str(yr): perf_val[1][str(yr)] if type(perf_val[1]) is dict
else perf_val[1] for yr in years}}
# Transfer the lifetime data as-is (the lifetime data has a
# uniform format across all of the envelope components) except
# for the average, which is updated to be reported by year
the_life['average'] = {str(yr):
specific_cpl_data['lifetime']['average']
for yr in years}
the_life['range'] = specific_cpl_data['lifetime']['range']
the_life['units'] = specific_cpl_data['lifetime']['units']
the_life['source'] = specific_cpl_data['lifetime']['source']
# Add the cost, performance, and lifetime dicts into a master dict
# for the microsegment and envelope component specified by key_list
tech_data_dict = {'installed cost': the_cost,
'performance': the_perf,
'lifetime': the_life}
# If the building type is residential, add envelope component
# consumer choice parameters for each year in the modeling time
# horizon (these parameters are based on AEO consumer choice
# data for the residential heating and cooling end uses in
# 'rsmeqp.txt')
if bldg_class == 'residential':
tech_data_dict['consumer choice'] = {
'competed market share': {
'parameters': {'b1': {str(yr): -0.003 for yr in years},
'b2': {str(yr): -0.012 for yr in years}},
'source': ('EIA AEO choice model parameters for heating' +
' and cooling equipment')
}
}
# If no data were found, which is expected for envelope components
# that are not representative of building products (e.g., people
# gain, equipment gain), simply return 0
else:
tech_data_dict = 0
return tech_data_dict
def mels_cpl_data_handler(cpl_data, conversions, years, key_list):
"""Restructure MELs component cost, performance, and lifetime data.
This function extracts the cost, performance, and lifetime data for
miscellaneous electric loads (MELs) technologies of residential and
commercial buildings from the original data and restructures it into a
form that is generally consistent with similar data originally obtained
from the Annual Energy Outlook (AEO). These data are added to the input
microsegments database after it is converted to a climate zone basis.
Args:
cpl_data (dict): Cost, performance, and lifetime data for
MELs technologies including units and source information.
conversions (dict): Energy, stock, and square footage data needed
to convert cost units for MELs technologies.
years (list): A list of integers representing the range of years
in the data.
key_list (list): Keys that specify the current location in the
microsegments database structure and thus indicate what
data should be returned by this function.
Returns:
A dict with installed cost, performance, and lifetime data
applicable to the microsegment and MELs technology specified
by key_list, as well as units and source information for those
data. All residential costs should be in $/unit, while all commercial
costs should be in $/ft^2 floor.
"""
# Preallocate variable storing cost, performance, and lifetime data
specific_cpl_data = ''
# Preallocate variables for the building class (i.e., residential
# or commercial) and the building type
bldg_class = ''
bldg_type = ''
# Check second item in list (building type) to identify building type
# name and associated class (residential, commercial) of the current
# microsegment
if key_list[1] in mseg.bldgtypedict.keys():
bldg_type = key_list[1]
bldg_class = 'residential'
elif key_list[1] in cm.CommercialTranslationDicts().bldgtypedict.keys():
bldg_type = key_list[1]
bldg_class = 'commercial'
# Use fourth item in list to identify end use of the current microsegment
eu = key_list[3]
# Pull cost, performance, and lifetime data if available, handling cases
# where the data are found on the end use level (4 item microsegment key
# chain) and cases where the data are found on the technology level (5 item
# list)
if len(key_list) == 4:
try:
specific_cpl_data = cpl_data['MELs'][bldg_class][eu]
except KeyError:
pass
elif len(key_list) == 5:
tech = key_list[-1]
try:
specific_cpl_data = cpl_data['MELs'][bldg_class][eu][tech]
except KeyError:
pass
# Preallocate empty dicts for the cost, performance, and lifetime
# data, to include the data, units, and source information
the_cost = {}
the_perf = {}
the_life = {}
# If any data were found for the particular technology, end use, and
# building class, extract the cost, performance, and lifetime data,
# including units and source information, and record it to the
# appropriate dict created for those data
if specific_cpl_data:
# Convert year list to strings for further use in data dicts
years_str = [str(x) for x in years]
# Set the possible operational mode breakouts for MELs performance data
modes = ["active", "ready", "sleep", "off"]
# Extract cost data units
orig_cost_units = specific_cpl_data['cost']['units']
# Case where the commercial MELs cost data require conversion from
# $/unit to '$/ft^2 floor'; currently, data for this conversion are
# only available for PCs so other technologies will be ignored
if orig_cost_units and (
bldg_class == "commercial" and '$/ft^2 floor'
not in orig_cost_units and '$/unit' in orig_cost_units):
if eu == "PCs":
# Set the unconverted cost value
orig_cost = specific_cpl_data['cost']['typical']
# Strip the year from the cost units (to be added back later)
the_year = orig_cost_units[:4]
# PC cost conversion data are split into three categories by
# building type; find the appropriate category key to use
# in pulling these data for the current building type
if bldg_type in ["large office", "small office", "education"]:
convert_key = "office and education"
elif bldg_type == "health care":
convert_key = "health care"
else:
convert_key = "all other"
# Apply the cost conversion ($/unit->$/ft^2 floor) across years
adj_cost = {
key: orig_cost[key] * conversions["cost unit conversions"][
eu]["conversion factor"]["value"][convert_key]
for key in years_str}
# Finalize adjusted cost units by adding back the year
adj_units = the_year + "$/ft^2 floor"
# Add the converted cost information to the appropriate dict
the_cost['typical'] = adj_cost
the_cost['units'] = adj_units
the_cost['source'] = specific_cpl_data['cost']['source']
# Case where MELs cost data are not in expected units (throw error)
elif orig_cost_units and ('$/unit' not in orig_cost_units):
raise ValueError("Baseline MELs technology cost units "
"for " + str(key_list) + " are not in $/unit")
# Case where there is no need for cost conversion
elif orig_cost_units:
the_cost['typical'] = specific_cpl_data['cost']['typical']
the_cost['units'] = orig_cost_units
the_cost['source'] = specific_cpl_data['cost']['source']
# Extract MELs performance data and units
orig_perf = specific_cpl_data['performance']['typical']
orig_perf_units = specific_cpl_data['performance']['units']
# Ensure that all MELs performance data is in units of kWh/yr
# | |
if k not in self.name:
self.name[k] = []
# convert graph Gs to numpy arrays for speed up post processing
self.g2npy()
#
# add boundary
#
self.boundary()
# save ini file
self.save()
#
def exportosm(self):
""" export layout in osm file format
Parameters
----------
_filename : string
Notes
-----
See Also
--------
layout.loadosm
layout.loadini
layout.check
"""
# export Layout in osm format
# The osm filename basenam is the same as the _filename ini file
_filename, ext = os.path.splitext(self._filename)
filename = pyu.getlong(_filename + '.osm', 'struc/osm')
if os.path.exists(filename):
filename = pyu.getlong(_filename + '_.osm', 'struc/osm')
fd = open(filename, "w")
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
fd.write("<osm version='0.6' upload='false' generator='PyLayers'>\n")
# creating points
for n in self.Gs.pos:
if n < 0:
if n not in self.lboundary:
if self.coordinates == 'latlon':
lon, lat = self.Gs.pos[n]
if self.coordinates == 'cart':
x, y = self.Gs.pos[n]
lon, lat = self.m(x, y, inverse=True)
fd.write("<node id='" + str(n) + "' action='modify' visible='true' lat='" +
str(lat) + "' lon='" + str(lon) + "' />\n")
for n in self.Gs.pos:
if n > 0:
#
# Conditions pour ajout segments
#
# _AIR are not added
#
# outdoor AIR wall above buildings are not added
# cond1 is wrong
cond1 = (self.Gs.node[n]['name'] != '_AIR')
cond2 = (self.Gs.node[n]['name'] == 'AIR')
cond3 = (self.Gs.node[n]['z'][1] == self.zceil)
cond4 = (self.Gs.node[n]['z'][0] == self.zfloor)
cond5 = (cond2 and cond3)
cond6 = (cond2 and cond4)
cond7 = (cond2 and cond3 and cond4)
if (cond1 and (not cond5) and (not cond6)) or cond7:
#v1.1 neigh = nx.neighbors(self.Gs, n)
neigh = self.Gs[n].keys()
d = self.Gs.node[n]
#
noden = -10000000 - n
fd.write("<way id='" + str(noden) +
"' action='modify' visible='true'>\n")
fd.write("<nd ref='" + str(neigh[0]) + "' />\n")
fd.write("<nd ref='" + str(neigh[1]) + "' />\n")
fd.write("<tag k='name' v='" + str(d['name']) + "' />\n")
fd.write("<tag k='z' v=\"" + str(d['z']) + "\" />\n")
fd.write("<tag k='transition' v='" +
str(d['transition']) + "' />\n")
fd.write("</way>\n")
fd.write("</osm>\n")
fd.close()
def save(self):
""" save Layout structure in a .lay file
"""
current_version = 1.3
if os.path.splitext(self._filename)[1]=='.ini':
self._filename = self._filename.replace('.ini','.lay')
#
# version 1.3 : suppression of index in slab and materials
#
config = ConfigParser.RawConfigParser()
config.optionxform = str
config.add_section("info")
config.add_section("points")
config.add_section("segments")
config.add_section("files")
config.add_section("slabs")
config.add_section("materials")
if self.coordinates == 'latlon':
config.set("info", "format", "latlon")
else:
config.set("info", "format", "cart")
config.set("info", "version", current_version)
config.set("info", "type", self.typ)
if self.typ == 'indoor':
config.add_section("indoor")
config.set("indoor", "zceil", self.zceil)
config.set("indoor", "zfloor", self.zfloor)
if self.typ == 'outdoor':
config.add_section("outdoor")
#
# save bounding box in latlon for reconstruction of self.m
#
if hasattr(self,"m"):
config.add_section("latlon")
config.set("latlon","llcrnrlon",self.m.llcrnrlon)
config.set("latlon","llcrnrlat",self.m.llcrnrlat)
config.set("latlon","urcrnrlon",self.m.urcrnrlon)
config.set("latlon","urcrnrlat",self.m.urcrnrlat)
config.set("latlon","projection",self.m.projection)
# config.set("info",'Nsegments',self.Ns)
# config.set("info",'Nsubsegments',self.Nss)
#for k in self.display:
# config.set("display", k, self.display[k])
# iterate on points
# boundary nodes and air walls are not saved
for n in self.Gs.pos:
if n < 0:
if n not in self.lboundary:
config.set("points", str(
n), (self.Gs.pos[n][0], self.Gs.pos[n][1]))
# iterate on segments
for n in self.Gs.pos:
if n > 0:
cond1 = (self.Gs.node[n]['name'] != '_AIR')
cond2 = (self.Gs.node[n]['name'] == 'AIR')
cond3 = (self.Gs.node[n]['z'][1] == self.zceil)
cond4 = (self.Gs.node[n]['z'][0] == self.zfloor)
cond5 = (cond2 and cond3)
cond6 = (cond2 and cond4)
cond7 = (cond2 and cond3 and cond4)
#
# _AIR are not stored (cond1)
# AIR segment reaching zceil are not stored (cond4)
# AIR segment reaching zfloor are not stored (cond5)
#
if (cond1 and (not cond5) and (not cond6)) or cond7:
d = copy.deepcopy(self.Gs.node[n])
# v1.1 d['connect'] = nx.neighbors(self.Gs, n)
d['connect'] = list(self.Gs[n].keys())
try:
if d['transition']:
pass
except:
d['transition'] = False
try:
if 'DOOR' in d['ss_name']:
d['transition'] = True
except:
pass
# remove normal information from the strucure
try:
d.pop('norm')
except:
pass
# remove iso information from the strucure
try:
d.pop('iso')
except:
pass
# remove ncycles information from the strucure
try:
d.pop('ncycles')
except:
pass
# transition are saved only if True
if not d['transition']:
d.pop('transition')
# offset are saved only if not zero
if 'offset' in d:
if d['offset']==0:
d.pop('offset')
config.set("segments", str(n), d)
#
# [ slabs ]
#
# get the list of used slabs
lslab = [x for x in self.name if len(self.name[x]) > 0]
lmat = []
#
# In case an osm file has been read; there is no .sl
# By default all the available slabs and materials are provided
#
if not hasattr(self,'sl'):
self.sl = sb.SlabDB(filemat='matDB.ini', fileslab='slabDB.ini')
for s in lslab:
ds = {}
if s not in self.sl:
if s not in self.sl.mat:
self.sl.mat.add(name=s,cval=6,sigma=0,typ='epsr')
self.sl.add(s,[s],[0.1])
#ds['index'] = self.sl[s]['index']
ds['color'] = self.sl[s]['color']
ds['lmatname'] = self.sl[s]['lmatname']
for m in ds['lmatname']:
if m not in lmat:
lmat.append(m)
ds['lthick'] = self.sl[s]['lthick']
ds['linewidth'] = self.sl[s]['linewidth']
config.set("slabs", s, ds)
if "_AIR" not in lslab:
air = {'color': 'white', 'linewidth': 1,
'lthick': [0.1], 'lmatname': ['AIR']}
config.set("slabs", "_AIR", air)
if "AIR" not in lslab:
air = {'color': 'white', 'linewidth': 1,
'lthick': [0.1], 'lmatname': ['AIR']}
config.set("slabs", "AIR", air)
if "CEIL" not in lslab:
ceil = {'color': 'grey20', 'linewidth': 1,
'lthick': [0.1], 'lmatname': ['REINFORCED_CONCRETE']}
config.set("slabs", "CEIL", ceil)
if "FLOOR" not in lslab:
floor = {'color': 'grey40', 'linewidth': 1,
'lthick': [0.1], 'lmatname': ['REINFORCED_CONCRETE']}
config.set("slabs", "FLOOR", floor)
#
# [ materials ]
#
for m in lmat:
dm = self.sl.mat[m]
try:
dm.pop('name')
except:
pass
# store UIT format only if it is used
if 'a' in dm:
if dm['a'] ==None:
dm.pop('a')
dm.pop('b')
dm.pop('c')
dm.pop('d')
config.set("materials", m, dm)
if "REINFORCED_CONCRETE" not in lmat:
reic = {'mur': (
1 + 0j), 'epr': (8.69999980927 + 0j), 'roughness': 0.0, 'sigma': 3.0}
config.set("materials", "REINFORCED_CONCRETE", reic)
# config.set("files",'materials',self.filematini)
# config.set("files",'slab',self.fileslabini)
#
# [ furniture ]
#
config.set("files", 'furniture', self._filefur)
#
# handling olf format ( to be removed later)
#
if os.path.splitext(self._filename)[1]=='.ini':
fileout = self._filename.replace('.ini','.lay')
else:
fileout = self._filename
filelay = pyu.getlong(fileout, pro.pstruc['DIRLAY'])
print(filelay)
fd = open(filelay, "w")
config.write(fd)
fd.close()
# convert graph Gs to numpy arrays for speed up post processing
# ideally an edited Layout should be locked while not saved.
# self.g2npy()
self._hash = hashlib.md5(open(filelay, 'rb').read()).hexdigest()
def load(self):
""" load a layout from a .lay file
The filename is in self._filename
Format version 1.3
------------------
[info]
format = {cart | latlon}
version =
type = {indoor | outdoor}
[points]
-1 = (x,y)
[segments]
1 = {'slab':'',transition:boolean,'connect:[-1,-2],'z':(0,3)}
[slabs]
WALL = {'lthick':[,],'lmat':[,],'color:'','linewidth':float}
[materials]
BRICK = {'mur':complex,'epsr':complex,'sigma':float,'roughness':}
[indoor]
zceil =
zfloor =
[latlon]
"""
# di : dictionnary which reflects the content of ini file
di = {}
config = ConfigParser.RawConfigParser()
config.optionxform = str
filelay = pyu.getlong(self._filename, pro.pstruc['DIRLAY'])
config.read(filelay)
sections = config.sections()
for section in sections:
di[section] = {}
options = config.options(section)
for option in options:
try:
di[section][option] = config.get(section, option)
except:
print(section, option)
self.Np = len(di['points'])
self.Ns = len(di['segments'])
self.Gs = nx.Graph(name='Gs')
self.Gs.pos = {}
self.labels = {}
#
# [info]
# format {cart,latlon}
# version int
# type {'indoor','outdoor'}
if 'version' in di['info']:
self.version = di['info']['version']
if 'type' in di['info']:
self.typ = di['info']['type']
self.name = {}
if ((self.typ!='indoor') &
(self.typ!='outdoor') &
(self.typ!='floorplan')):
print("invalid file type in ",self._filename)
return(None)
#
# [indoor]
# zceil
# zfloor
#
if self.typ == 'indoor':
self.zceil = eval(di['indoor']['zceil'])
self.zfloor = eval(di['indoor']['zfloor'])
# old format
if self.typ == 'floorplan':
self.zceil = eval(di['floorplan']['zceil'])
self.zfloor = eval(di['floorplan']['zfloor'])
# from format 1.3 floorplan is call indoor
if self.typ=='floorplan':
self.typ = 'indoor'
#
# [outdoor]
# TODO add a DEM file
#
if self.typ == 'outdoor':
if 'outdoor' in di:
if 'zceil' in di['outdoor']:
self.zceil = eval(di['outdoor']['zceil'])
else:
self.zceil = 3000 # upper limit for AIR walls
else:
self.zceil = 3000 # upper limit for AIR walls
if 'outdoor' in di:
if 'zfloor' in di['outdoor']:
self.zfloor = eval(di['outdoor']['zfloor'])
else:
self.zfloor = 0
else:
self.zfloor = 0
#
#
# manage ini file with latlon coordinates
#
# if the format is latlon, coordinates are converted into
# cartesian coordinates with the coords.cartesian method
#
if 'format' in di['info']:
if di['info']['format'] == 'latlon':
or_coord_format = 'latlon'
coords = osm.Coords()
coords.clean()
coords.latlon = {i: np.array(
eval(di['points'][i])) for i in di['points']}
coords.boundary = np.hstack((np.min(np.array(coords.latlon.values()), axis=0),
np.max(np.array(coords.latlon.values()), axis=0)))
coords.cartesian(cart=True)
else:
| |
#!/usr/bin/env python3
# coding=utf-8
"""
@author: guoyanfeng
@software: PyCharm
@time: 18-12-25 下午5:15
"""
import atexit
import secrets
import uuid
from collections import MutableMapping
from typing import Any, Dict, List, NoReturn, Union
import aelog
import redis
import ujson
from redis import RedisError
from .exceptions import RedisClientError
from .utils import ignore_error
__all__ = ("Session", "RedisClient", "LONG_EXPIRED", "EXPIRED", "SESSION_EXPIRED", "DAY3_EXPIRED", "DAY7_EXPIRED",
"DAY15_EXPIRED", "DAY30_EXPIRED")
SESSION_EXPIRED: int = 60 * 60 # session过期时间
LONG_EXPIRED: int = 24 * 60 * 60 # 最长过期时间
DAY3_EXPIRED: int = 3 * LONG_EXPIRED
DAY7_EXPIRED: int = 7 * LONG_EXPIRED
DAY15_EXPIRED: int = 15 * LONG_EXPIRED
DAY30_EXPIRED: int = 30 * LONG_EXPIRED
EXPIRED: int = 15 * LONG_EXPIRED # 通用过期时间
class Session(object):
"""
保存实际看结果的session实例
Args:
"""
def __init__(self, account_id: str, *,
session_id: str = None,
org_id: str = None,
role_id: str = None,
menu_id: str = None,
static_route_id: str = None,
dynamic_route_id: str = None,
**kwargs):
self.account_id = account_id # 账户ID
self.session_id = secrets.token_urlsafe() if not session_id else session_id # session ID
self.org_id = org_id or uuid.uuid4().hex # 账户的组织结构在redis中的ID
self.role_id = role_id or uuid.uuid4().hex # 账户的角色在redis中的ID
self.menu_id = menu_id or uuid.uuid4().hex # 账户的页面菜单权限在redis中的ID
self.static_route_id = static_route_id or uuid.uuid4().hex # 账户的静态权限在redis中的ID
self.dynamic_route_id = dynamic_route_id or uuid.uuid4().hex # 账户的动态权限在redis中的ID
for k, v in kwargs.items():
setattr(self, k, v)
def to_dict(self, ) -> Dict:
"""
Args:
Returns:
"""
return dict(vars(self))
class RedisClient(object):
"""
redis 工具类
"""
def __init__(self, app=None, *, host: str = "127.0.0.1", port: int = 6379, dbname: int = 0, passwd: str = "",
pool_size: int = 50):
"""
redis 工具类
Args:
app: app应用
host:redis host
port:redis port
dbname: database name
passwd: <PASSWORD>
pool_size: redis pool size
"""
self.pool = None
self.redis_db: redis.StrictRedis = None
self.host = host
self.port = port
self.dbname = dbname
self.passwd = <PASSWORD>
self.pool_size = pool_size
self._account_key = "account_to_session"
if app is not None:
self.init_app(app, host=self.host, port=self.port, dbname=self.dbname, passwd=self.passwd,
pool_size=self.pool_size)
def init_app(self, app, *, host: str = None, port: int = None, dbname: int = None, passwd: str = "",
pool_size: int = None):
"""
redis 工具类
Args:
app: app应用
host:redis host
port:redis port
dbname: database name
passwd: <PASSWORD>
pool_size: redis pool size
Returns:
"""
host = host or app.config.get("ECLIENTS_REDIS_HOST", None) or self.host
port = port or app.config.get("ECLIENTS_REDIS_PORT", None) or self.port
dbname = dbname or app.config.get("ECLIENTS_REDIS_DBNAME", None) or self.dbname
passwd = passwd or app.config.get("ECLIENTS_REDIS_PASSWD", None) or self.passwd
pool_size = pool_size or app.config.get("ECLIENTS_REDIS_POOL_SIZE", None) or self.pool_size
passwd = passwd if passwd is None else str(passwd)
# 初始化连接
self.open_connection(host, port, passwd, dbname, pool_size)
@atexit.register
def close_connection():
"""
释放redis连接池所有连接
Args:
Returns:
"""
if self.pool:
self.pool.disconnect()
def init_engine(self, *, host: str = None, port: int = None, dbname: int = None, passwd: str = "",
pool_size: int = None):
"""
redis 工具类
Args:
host:redis host
port:redis port
dbname: database name
passwd: <PASSWORD>
pool_size: redis pool size
Returns:
"""
host = host or self.host
port = port or self.port
dbname = dbname or self.dbname
passwd = <PASSWORD> or <PASSWORD>
pool_size = pool_size or self.pool_size
passwd = passwd if passwd is None else str(passwd)
# 初始化连接
self.open_connection(host, port, passwd, dbname, pool_size)
@atexit.register
def close_connection():
"""
释放redis连接池所有连接
Args:
Returns:
"""
if self.pool:
self.pool.disconnect()
def open_connection(self, host: str, port: int, passwd: str, dbname: str, pool_size: int):
"""
初始化连接
Args:
host: host
port: port
passwd: <PASSWORD>
dbname: database name
pool_size: pool size
Returns:
"""
# 返回值都做了解码,应用层不需要再decode
self.pool = redis.ConnectionPool(host=host, port=port, db=dbname, password=<PASSWORD>, decode_responses=True,
max_connections=pool_size)
self.redis_db = redis.StrictRedis(connection_pool=self.pool, decode_responses=True)
def save_session(self, session: Session, dump_responses: bool = False, ex: int = SESSION_EXPIRED) -> str:
"""
利用hash map保存session
Args:
session: Session 实例
dump_responses: 是否对每个键值进行dump
ex: 过期时间,单位秒
Returns:
"""
session_data = self.response_dumps(dump_responses, session)
try:
if not self.redis_db.hmset(session_data["session_id"], session_data):
raise RedisClientError("save session failed, session_id={}".format(session_data["session_id"]))
if not self.redis_db.expire(session_data["session_id"], ex):
aelog.error("set session expire failed, session_id={}".format(session_data["session_id"]))
except RedisError as e:
aelog.exception("save session error: {}, {}".format(session.session_id, e))
raise RedisClientError(str(e))
else:
# 清除老的令牌
try:
old_session_id = self.get_hash_data(self._account_key, field_name=session.account_id)
except RedisClientError as e:
aelog.info(f"{session.account_id} no old token token, {str(e)}")
else:
with ignore_error():
self.delete_session(old_session_id, False)
# 更新新的令牌
self.save_update_hash_data(self._account_key, field_name=session.account_id,
hash_data=session.session_id, ex=LONG_EXPIRED)
return session.session_id
@staticmethod
def response_dumps(dump_responses: bool, session: Session) -> Dict:
session_data = dict(vars(session))
# 是否对每个键值进行dump
if dump_responses:
hash_data = {}
for hash_key, hash_val in session_data.items():
if not isinstance(hash_val, str):
with ignore_error():
hash_val = ujson.dumps(hash_val)
hash_data[hash_key] = hash_val
session_data = hash_data
return session_data
def delete_session(self, session_id: str, delete_key: bool = True) -> NoReturn:
"""
利用hash map删除session
Args:
session_id: session id
delete_key: 删除account到session的account key
Returns:
"""
try:
session_id_ = self.redis_db.hget(session_id, "session_id")
if session_id_ != session_id:
raise RedisClientError("invalid session_id, session_id={}".format(session_id))
exist_keys = []
session_data = self.get_session(session_id, cls_flag=False)
exist_keys.append(session_data["org_id"])
exist_keys.append(session_data["role_id"])
exist_keys.append(session_data["menu_id"])
exist_keys.append(session_data["static_permission_id"])
exist_keys.append(session_data["dynamic_permission_id"])
with ignore_error(): # 删除已经存在的和账户相关的缓存key
self.delete_keys(exist_keys)
if delete_key is True:
self.redis_db.hdel(self._account_key, session_data["account_id"])
if not self.redis_db.delete(session_id):
aelog.error("delete session failed, session_id={}".format(session_id))
except RedisError as e:
aelog.exception("delete session error: {}, {}".format(session_id, e))
raise RedisClientError(str(e))
def update_session(self, session: Session, dump_responses: bool = False, ex: int = SESSION_EXPIRED) -> NoReturn:
"""
利用hash map更新session
Args:
session: Session实例
ex: 过期时间,单位秒
dump_responses: 是否对每个键值进行dump
Returns:
"""
session_data = self.response_dumps(dump_responses, session)
try:
if not self.redis_db.hmset(session_data["session_id"], session_data):
raise RedisClientError("update session failed, session_id={}".format(session_data["session_id"]))
if not self.redis_db.expire(session_data["session_id"], ex):
aelog.error("set session expire failed, session_id={}".format(session_data["session_id"]))
except RedisError as e:
aelog.exception("update session error: {}, {}".format(session_data["session_id"], e))
raise RedisClientError(str(e))
else:
# 更新令牌
self.save_update_hash_data(self._account_key, field_name=session.account_id,
hash_data=session.session_id, ex=LONG_EXPIRED)
def get_session(self, session_id: str, ex: int = SESSION_EXPIRED, cls_flag: bool = True,
load_responses: bool = False) -> Union[Session, Dict[str, str]]:
"""
获取session
Args:
session_id: session id
ex: 过期时间,单位秒
cls_flag: 是否返回session的类实例
load_responses: 结果的键值是否进行load
Returns:
"""
try:
session_data = self.redis_db.hgetall(session_id)
if not session_data:
raise RedisClientError("not found session, session_id={}".format(session_id))
if not self.redis_db.expire(session_id, ex):
aelog.error("set session expire failed, session_id={}".format(session_id))
except RedisError as e:
aelog.exception("get session error: {}, {}".format(session_id, e))
raise RedisClientError(e)
else:
# 返回的键值对是否做load
if load_responses:
hash_data = {}
for hash_key, hash_val in session_data.items():
with ignore_error():
hash_val = ujson.loads(hash_val)
hash_data[hash_key] = hash_val
session_data = hash_data
if cls_flag:
return Session(session_data.pop('account_id'), session_id=session_data.pop('session_id'),
org_id=session_data.pop("org_id"), role_id=session_data.pop("role_id"),
**session_data)
else:
return session_data
def verify(self, session_id: str) -> Session:
"""
校验session,主要用于登录校验
Args:
session_id
Returns:
"""
try:
session = self.get_session(session_id)
except RedisClientError as e:
raise RedisClientError(str(e))
else:
if not session:
raise RedisClientError("invalid session_id, session_id={}".format(session_id))
return session
def save_update_hash_data(self, name: str, hash_data: Dict, field_name: str = None, dump_responses: bool = False,
ex: int = EXPIRED) -> str:
"""
获取hash对象field_name对应的值
Args:
name: redis hash key的名称
field_name: 保存的hash mapping 中的某个字段
hash_data: 获取的hash对象中属性的名称
ex: 过期时间,单位秒
dump_responses: 是否对每个键值进行dump
Returns:
反序列化对象
"""
if field_name is None and not isinstance(hash_data, MutableMapping):
raise ValueError("hash data error, must be MutableMapping.")
try:
if not field_name:
# 是否对每个键值进行dump
if dump_responses:
rs_data = {}
for hash_key, hash_val in hash_data.items():
if not isinstance(hash_val, str):
with ignore_error():
hash_val = ujson.dumps(hash_val)
rs_data[hash_key] = hash_val
hash_data = rs_data
if not self.redis_db.hmset(name, hash_data):
raise RedisClientError("save hash data mapping failed, session_id={}".format(name))
else:
hash_data = hash_data if isinstance(hash_data, str) else ujson.dumps(hash_data)
self.redis_db.hset(name, field_name, hash_data)
if not self.redis_db.expire(name, ex):
aelog.error("set hash data expire failed, session_id={}".format(name))
except RedisError as e:
raise RedisClientError(str(e))
else:
return name
def get_hash_data(self, name: str, field_name: str = None, load_responses: bool = False,
ex: int = EXPIRED) -> Dict:
"""
获取hash对象field_name对应的值
Args:
name: redis hash key的名称
field_name: 获取的hash对象中属性的名称
ex: 过期时间,单位秒
load_responses: 结果的键值是否进行load
Returns:
反序列化对象
"""
try:
if field_name:
hash_data = self.redis_db.hget(name, field_name)
# 返回的键值对是否做load
if load_responses:
with ignore_error():
hash_data = ujson.loads(hash_data)
else:
hash_data = self.redis_db.hgetall(name)
# 返回的键值对是否做load
if load_responses:
rs_data = {}
for hash_key, hash_val in hash_data.items():
with ignore_error():
hash_val = ujson.loads(hash_val)
rs_data[hash_key] = hash_val
hash_data = rs_data
if not hash_data:
raise RedisClientError("not found hash data, name={}, field_name={}".format(name, field_name))
if not self.redis_db.expire(name, ex):
aelog.error("set expire failed, name={}".format(name))
except RedisError as e:
raise RedisClientError(str(e))
else:
return hash_data
def get_list_data(self, name: str, start: int = 0, end: int = -1, ex: int = EXPIRED) -> List:
"""
获取redis的列表中的数据
Args:
name: redis key的名称
start: 获取数据的起始位置,默认列表的第一个值
end: 获取数据的结束位置,默认列表的最后一个值
ex: 过期时间,单位秒
Returns:
"""
try:
data = self.redis_db.lrange(name, start=start, end=end)
if not self.redis_db.expire(name, ex):
aelog.error("set expire failed, name={}".format(name))
except RedisError as e:
raise RedisClientError(str(e))
else:
return data
def save_list_data(self, name: str, list_data: Union[List, str], save_to_left: bool = True,
ex: int = EXPIRED) -> str:
"""
保存数据到redis的列表中
Args:
name: redis key的名称
list_data: 保存的值,可以是单个值也可以是元祖
save_to_left: 是否保存到列表的左边,默认保存到左边
ex: 过期时间,单位秒
Returns:
"""
list_data = (list_data,) if isinstance(list_data, str) else list_data
try:
if save_to_left:
if not self.redis_db.lpush(name, *list_data):
raise RedisClientError("lpush value to head failed.")
else:
if not self.redis_db.rpush(name, | |
>>>
>>> thread = api.patch_conversations_message_participant(conversation_id, participant_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str conversation_id: conversationId (required)
:param str participant_id: participantId (required)
:param MediaParticipantRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['conversation_id', 'participant_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_conversations_message_participant" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'conversation_id' is set
if ('conversation_id' not in params) or (params['conversation_id'] is None):
raise ValueError("Missing the required parameter `conversation_id` when calling `patch_conversations_message_participant`")
# verify the required parameter 'participant_id' is set
if ('participant_id' not in params) or (params['participant_id'] is None):
raise ValueError("Missing the required parameter `participant_id` when calling `patch_conversations_message_participant`")
resource_path = '/api/v2/conversations/messages/{conversationId}/participants/{participantId}'.replace('{format}', 'json')
path_params = {}
if 'conversation_id' in params:
path_params['conversationId'] = params['conversation_id']
if 'participant_id' in params:
path_params['participantId'] = params['participant_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_conversations_message_participant_attributes(self, conversation_id, participant_id, **kwargs):
"""
Update the attributes on a conversation participant.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_conversations_message_participant_attributes(conversation_id, participant_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str conversation_id: conversationId (required)
:param str participant_id: participantId (required)
:param ParticipantAttributes body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['conversation_id', 'participant_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_conversations_message_participant_attributes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'conversation_id' is set
if ('conversation_id' not in params) or (params['conversation_id'] is None):
raise ValueError("Missing the required parameter `conversation_id` when calling `patch_conversations_message_participant_attributes`")
# verify the required parameter 'participant_id' is set
if ('participant_id' not in params) or (params['participant_id'] is None):
raise ValueError("Missing the required parameter `participant_id` when calling `patch_conversations_message_participant_attributes`")
resource_path = '/api/v2/conversations/messages/{conversationId}/participants/{participantId}/attributes'.replace('{format}', 'json')
path_params = {}
if 'conversation_id' in params:
path_params['conversationId'] = params['conversation_id']
if 'participant_id' in params:
path_params['participantId'] = params['participant_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_conversations_message_participant_communication(self, conversation_id, participant_id, communication_id, body, **kwargs):
"""
Update conversation participant's communication by disconnecting it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_conversations_message_participant_communication(conversation_id, participant_id, communication_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str conversation_id: conversationId (required)
:param str participant_id: participantId (required)
:param str communication_id: communicationId (required)
:param MediaParticipantRequest body: Participant (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['conversation_id', 'participant_id', 'communication_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_conversations_message_participant_communication" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'conversation_id' is set
if ('conversation_id' not in params) or (params['conversation_id'] is None):
raise ValueError("Missing the required parameter `conversation_id` when calling `patch_conversations_message_participant_communication`")
# verify the required parameter 'participant_id' is set
if ('participant_id' not in params) or (params['participant_id'] is None):
raise ValueError("Missing the required parameter `participant_id` when calling `patch_conversations_message_participant_communication`")
# verify the required parameter 'communication_id' is set
if ('communication_id' not in params) or (params['communication_id'] is None):
raise ValueError("Missing the required parameter `communication_id` when calling `patch_conversations_message_participant_communication`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_conversations_message_participant_communication`")
resource_path = '/api/v2/conversations/messages/{conversationId}/participants/{participantId}/communications/{communicationId}'.replace('{format}', 'json')
path_params = {}
if 'conversation_id' in params:
path_params['conversationId'] = params['conversation_id']
if 'participant_id' in params:
path_params['participantId'] = params['participant_id']
if 'communication_id' in params:
path_params['communicationId'] = params['communication_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_conversations_messaging_integrations_facebook_integration_id(self, integration_id, body, **kwargs):
"""
Update Facebook messaging integration
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_conversations_messaging_integrations_facebook_integration_id(integration_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str integration_id: Integration ID (required)
:param FacebookIntegrationUpdateRequest body: FacebookIntegrationUpdateRequest (required)
:return: FacebookIntegration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['integration_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_conversations_messaging_integrations_facebook_integration_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'integration_id' is set
if ('integration_id' not in params) or (params['integration_id'] is None):
raise ValueError("Missing the required parameter `integration_id` when calling `patch_conversations_messaging_integrations_facebook_integration_id`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_conversations_messaging_integrations_facebook_integration_id`")
resource_path = '/api/v2/conversations/messaging/integrations/facebook/{integrationId}'.replace('{format}', 'json')
path_params = {}
if 'integration_id' in params:
path_params['integrationId'] = params['integration_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['PureCloud OAuth']
response = self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FacebookIntegration',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def patch_conversations_messaging_integrations_open_integration_id(self, integration_id, body, **kwargs):
"""
Update an Open messaging integration
See https://developer.genesys.cloud/api/digital/openmessaging/ for more information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_conversations_messaging_integrations_open_integration_id(integration_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str integration_id: Integration ID (required)
:param OpenIntegrationUpdateRequest body: OpenIntegrationUpdateRequest (required)
:return: OpenIntegration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['integration_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_conversations_messaging_integrations_open_integration_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'integration_id' is set
if ('integration_id' not in params) or (params['integration_id'] is None):
raise ValueError("Missing the required parameter `integration_id` when calling `patch_conversations_messaging_integrations_open_integration_id`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_conversations_messaging_integrations_open_integration_id`")
resource_path = '/api/v2/conversations/messaging/integrations/open/{integrationId}'.replace('{format}', 'json')
path_params = {}
if 'integration_id' in params:
path_params['integrationId'] = params['integration_id']
query_params = {}
header_params = {}
form_params = []
| |
HOH B .
D 2 HOH 88 388 102 HOH HOH B .
D 2 HOH 89 389 76 HOH HOH B .
D 2 HOH 90 390 257 HOH HOH B .
D 2 HOH 91 391 64 HOH HOH B .
D 2 HOH 92 392 180 HOH HOH B .
D 2 HOH 93 393 205 HOH HOH B .
D 2 HOH 94 394 178 HOH HOH B .
D 2 HOH 95 395 71 HOH HOH B .
D 2 HOH 96 396 313 HOH HOH B .
D 2 HOH 97 397 108 HOH HOH B .
D 2 HOH 98 398 132 HOH HOH B .
D 2 HOH 99 399 220 HOH HOH B .
D 2 HOH 100 400 78 HOH HOH B .
D 2 HOH 101 401 124 HOH HOH B .
D 2 HOH 102 402 173 HOH HOH B .
D 2 HOH 103 403 188 HOH HOH B .
D 2 HOH 104 404 141 HOH HOH B .
D 2 HOH 105 405 210 HOH HOH B .
D 2 HOH 106 406 193 HOH HOH B .
D 2 HOH 107 407 238 HOH HOH B .
D 2 HOH 108 408 229 HOH HOH B .
D 2 HOH 109 409 31 HOH HOH B .
D 2 HOH 110 410 18 HOH HOH B .
D 2 HOH 111 411 162 HOH HOH B .
D 2 HOH 112 412 133 HOH HOH B .
D 2 HOH 113 413 280 HOH HOH B .
D 2 HOH 114 414 79 HOH HOH B .
D 2 HOH 115 415 66 HOH HOH B .
D 2 HOH 116 416 259 HOH HOH B .
D 2 HOH 117 417 145 HOH HOH B .
D 2 HOH 118 418 184 HOH HOH B .
D 2 HOH 119 419 169 HOH HOH B .
D 2 HOH 120 420 127 HOH HOH B .
D 2 HOH 121 421 122 HOH HOH B .
D 2 HOH 122 422 170 HOH HOH B .
D 2 HOH 123 423 172 HOH HOH B .
D 2 HOH 124 424 77 HOH HOH B .
D 2 HOH 125 425 151 HOH HOH B .
D 2 HOH 126 426 33 HOH HOH B .
D 2 HOH 127 427 159 HOH HOH B .
D 2 HOH 128 428 269 HOH HOH B .
D 2 HOH 129 429 128 HOH HOH B .
D 2 HOH 130 430 153 HOH HOH B .
D 2 HOH 131 431 342 HOH HOH B .
D 2 HOH 132 432 251 HOH HOH B .
D 2 HOH 133 433 165 HOH HOH B .
D 2 HOH 134 434 277 HOH HOH B .
D 2 HOH 135 435 138 HOH HOH B .
D 2 HOH 136 436 197 HOH HOH B .
D 2 HOH 137 437 290 HOH HOH B .
D 2 HOH 138 438 192 HOH HOH B .
D 2 HOH 139 439 230 HOH HOH B .
D 2 HOH 140 440 326 HOH HOH B .
D 2 HOH 141 441 304 HOH HOH B .
D 2 HOH 142 442 296 HOH HOH B .
D 2 HOH 143 443 224 HOH HOH B .
D 2 HOH 144 444 248 HOH HOH B .
D 2 HOH 145 445 331 HOH HOH B .
D 2 HOH 146 446 191 HOH HOH B .
D 2 HOH 147 447 315 HOH HOH B .
D 2 HOH 148 448 254 HOH HOH B .
D 2 HOH 149 449 142 HOH HOH B .
D 2 HOH 150 450 271 HOH HOH B .
D 2 HOH 151 451 37 HOH HOH B .
D 2 HOH 152 452 243 HOH HOH B .
D 2 HOH 153 453 301 HOH HOH B .
D 2 HOH 154 454 177 HOH HOH B .
D 2 HOH 155 455 287 HOH HOH B .
D 2 HOH 156 456 196 HOH HOH B .
D 2 HOH 157 457 314 HOH HOH B .
D 2 HOH 158 458 282 HOH HOH B .
D 2 HOH 159 459 174 HOH HOH B .
D 2 HOH 160 460 250 HOH HOH B .
D 2 HOH 161 461 99 HOH HOH B .
D 2 HOH 162 462 272 HOH HOH B .
D 2 HOH 163 463 232 HOH HOH B .
D 2 HOH 164 464 266 HOH HOH B .
D 2 HOH 165 465 136 HOH HOH B .
D 2 HOH 166 466 195 HOH HOH B .
D 2 HOH 167 467 300 HOH HOH B .
D 2 HOH 168 468 214 HOH HOH B .
#
_pdbx_struct_assembly.id 1
_pdbx_struct_assembly.details author_defined_assembly
_pdbx_struct_assembly.method_details ?
_pdbx_struct_assembly.oligomeric_details dimeric
_pdbx_struct_assembly.oligomeric_count 2
#
_pdbx_struct_assembly_gen.assembly_id 1
_pdbx_struct_assembly_gen.oper_expression 1
_pdbx_struct_assembly_gen.asym_id_list A,B,C,D
#
loop_
_pdbx_struct_assembly_prop.biol_id
_pdbx_struct_assembly_prop.type
_pdbx_struct_assembly_prop.value
_pdbx_struct_assembly_prop.details
1 'ABSA (A^2)' 1150 ?
1 MORE -2 ?
1 'SSA (A^2)' 20270 ?
#
_pdbx_struct_oper_list.id 1
_pdbx_struct_oper_list.type 'identity operation'
_pdbx_struct_oper_list.name 1_555
_pdbx_struct_oper_list.symmetry_operation x,y,z
_pdbx_struct_oper_list.matrix[1][1] 1.0000000000
_pdbx_struct_oper_list.matrix[1][2] 0.0000000000
_pdbx_struct_oper_list.matrix[1][3] 0.0000000000
_pdbx_struct_oper_list.vector[1] 0.0000000000
_pdbx_struct_oper_list.matrix[2][1] 0.0000000000
_pdbx_struct_oper_list.matrix[2][2] 1.0000000000
_pdbx_struct_oper_list.matrix[2][3] 0.0000000000
_pdbx_struct_oper_list.vector[2] 0.0000000000
_pdbx_struct_oper_list.matrix[3][1] 0.0000000000
_pdbx_struct_oper_list.matrix[3][2] 0.0000000000
_pdbx_struct_oper_list.matrix[3][3] 1.0000000000
_pdbx_struct_oper_list.vector[3] 0.0000000000
#
_pdbx_audit_revision_history.ordinal 1
_pdbx_audit_revision_history.data_content_type 'Structure model'
_pdbx_audit_revision_history.major_revision 1
_pdbx_audit_revision_history.minor_revision 0
_pdbx_audit_revision_history.revision_date 2019-02-13
#
_pdbx_audit_revision_details.ordinal 1
_pdbx_audit_revision_details.revision_ordinal 1
_pdbx_audit_revision_details.data_content_type 'Structure model'
_pdbx_audit_revision_details.provider repository
_pdbx_audit_revision_details.type 'Initial release'
_pdbx_audit_revision_details.description ?
#
loop_
_software.citation_id
_software.classification
_software.compiler_name
_software.compiler_version
_software.contact_author
_software.contact_author_email
_software.date
_software.description
_software.dependencies
_software.hardware
_software.language
_software.location
_software.mods
_software.name
_software.os
_software.os_version
_software.type
_software.version
_software.pdbx_ordinal
? refinement ? ? ? ? ? ? ? ? ? ? ? REFMAC ? ? ? 5.8.0189 1
? 'data reduction' ? ? ? ? ? ? ? ? ? ? ? HKL-3000 ? ? ? . 2
? 'data scaling' ? ? ? ? ? ? ? ? ? ? ? HKL-3000 ? ? ? . 3
? phasing ? ? ? ? ? ? ? ? ? ? ? PHASER ? ? ? . 4
#
loop_
_pdbx_validate_rmsd_bond.id
_pdbx_validate_rmsd_bond.PDB_model_num
_pdbx_validate_rmsd_bond.auth_atom_id_1
_pdbx_validate_rmsd_bond.auth_asym_id_1
_pdbx_validate_rmsd_bond.auth_comp_id_1
_pdbx_validate_rmsd_bond.auth_seq_id_1
_pdbx_validate_rmsd_bond.PDB_ins_code_1
_pdbx_validate_rmsd_bond.label_alt_id_1
_pdbx_validate_rmsd_bond.auth_atom_id_2
_pdbx_validate_rmsd_bond.auth_asym_id_2
_pdbx_validate_rmsd_bond.auth_comp_id_2
_pdbx_validate_rmsd_bond.auth_seq_id_2
_pdbx_validate_rmsd_bond.PDB_ins_code_2
_pdbx_validate_rmsd_bond.label_alt_id_2
_pdbx_validate_rmsd_bond.bond_value
_pdbx_validate_rmsd_bond.bond_target_value
_pdbx_validate_rmsd_bond.bond_deviation
_pdbx_validate_rmsd_bond.bond_standard_deviation
_pdbx_validate_rmsd_bond.linker_flag
1 1 C A ILE 57 ? ? N A LEU 59 ? ? 1.555 1.336 0.219 0.023 Y
2 1 C B ILE 57 ? ? N B LEU 59 ? ? 1.493 1.336 0.157 0.023 Y
3 1 C B GLY 238 ? ? N B GLY 240 ? ? 1.571 1.336 0.235 0.023 Y
#
loop_
_pdbx_validate_rmsd_angle.id
_pdbx_validate_rmsd_angle.PDB_model_num
_pdbx_validate_rmsd_angle.auth_atom_id_1
_pdbx_validate_rmsd_angle.auth_asym_id_1
_pdbx_validate_rmsd_angle.auth_comp_id_1
_pdbx_validate_rmsd_angle.auth_seq_id_1
_pdbx_validate_rmsd_angle.PDB_ins_code_1
_pdbx_validate_rmsd_angle.label_alt_id_1
_pdbx_validate_rmsd_angle.auth_atom_id_2
_pdbx_validate_rmsd_angle.auth_asym_id_2
_pdbx_validate_rmsd_angle.auth_comp_id_2
_pdbx_validate_rmsd_angle.auth_seq_id_2
_pdbx_validate_rmsd_angle.PDB_ins_code_2
_pdbx_validate_rmsd_angle.label_alt_id_2
_pdbx_validate_rmsd_angle.auth_atom_id_3
_pdbx_validate_rmsd_angle.auth_asym_id_3
_pdbx_validate_rmsd_angle.auth_comp_id_3
_pdbx_validate_rmsd_angle.auth_seq_id_3
_pdbx_validate_rmsd_angle.PDB_ins_code_3
_pdbx_validate_rmsd_angle.label_alt_id_3
_pdbx_validate_rmsd_angle.angle_value
_pdbx_validate_rmsd_angle.angle_target_value
_pdbx_validate_rmsd_angle.angle_deviation
_pdbx_validate_rmsd_angle.angle_standard_deviation
_pdbx_validate_rmsd_angle.linker_flag
1 1 NE A ARG 204 ? ? CZ A ARG 204 ? ? NH1 A ARG 204 ? ? 123.93 120.30 3.63 0.50 N
2 1 NE A ARG 204 ? ? CZ A ARG 204 ? ? NH2 A ARG 204 ? ? 116.96 120.30 -3.34 0.50 N
3 1 NE A ARG 275 ? ? CZ A ARG 275 ? ? NH2 A ARG 275 ? ? 116.90 120.30 -3.40 0.50 N
4 1 NE B ARG 184 ? ? CZ B ARG 184 ? ? NH1 B ARG 184 ? ? 123.31 120.30 3.01 0.50 N
5 1 C B GLY 238 ? ? N B GLY 240 ? ? CA B GLY 240 ? ? 107.58 122.30 -14.72 2.10 Y
#
loop_
_pdbx_validate_torsion.id
_pdbx_validate_torsion.PDB_model_num
_pdbx_validate_torsion.auth_comp_id
_pdbx_validate_torsion.auth_asym_id
_pdbx_validate_torsion.auth_seq_id
_pdbx_validate_torsion.PDB_ins_code
_pdbx_validate_torsion.label_alt_id
_pdbx_validate_torsion.phi
_pdbx_validate_torsion.psi
1 1 CYS A 69 ? ? 46.53 -129.97
2 1 VAL A 103 ? ? -110.81 -136.18
3 1 SER A 220 ? ? -104.37 -120.80
4 1 ASP A 254 ? | |
# -*- coding: utf-8 -*-
#
# This file is part of the FspPssSubarray project
#
#
#
# Distributed under the terms of the GPL license.
# See LICENSE.txt for more info.
"""
Author: <NAME> <EMAIL>,
Herzberg Astronomy and Astrophysics, National Research Council of Canada
Copyright (c) 2019 National Research Council of Canada
"""
""" FspPssSubarray Tango device prototype
FspPssSubarray TANGO device class for the FspPssSubarray prototype
"""
# tango imports
import tango
from tango import DebugIt
from tango.server import run
from tango.server import Device
from tango.server import attribute, command
from tango.server import device_property
from tango import AttrQuality, DispLevel, DevState
from tango import AttrWriteType, PipeWriteType
# Additional import
# PROTECTED REGION ID(FspPssSubarray.additionnal_import) ENABLED START #
import os
import sys
import json
from random import randint
from ska_tango_base.control_model import HealthState, AdminMode, ObsState
from ska_tango_base import CspSubElementObsDevice
from ska_tango_base.commands import ResultCode
# PROTECTED REGION END # // FspPssSubarray.additionnal_import
__all__ = ["FspPssSubarray", "main"]
class FspPssSubarray(CspSubElementObsDevice):
"""
FspPssSubarray TANGO device class for the FspPssSubarray prototype
"""
# PROTECTED REGION ID(FspPssSubarray.class_variable) ENABLED START #
# PROTECTED REGION END # // FspPssSubarray.class_variable
# -----------------
# Device Properties
# -----------------
SubID = device_property(
dtype='uint16'
)
FspID = device_property(
dtype='uint16'
)
CbfControllerAddress = device_property(
dtype='str',
doc="FQDN of CBF Controller",
default_value="mid_csp_cbf/controller/main"
)
# TODO: CbfSubarrayAddress prop not being used
CbfSubarrayAddress = device_property(
dtype='str',
doc="FQDN of CBF Subarray"
)
VCC = device_property(
dtype=('str',)
)
# ----------
# Attributes
# ----------
receptors = attribute(
dtype=('uint16',),
access=AttrWriteType.READ,
max_dim_x=197,
label="Receptors",
doc="List of receptors assigned to subarray",
)
searchBeams = attribute(
dtype=('str',),
access=AttrWriteType.READ,
max_dim_x=192,
label="SearchBeams",
doc="List of searchBeams assigned to fspsubarray",
)
searchWindowID = attribute(
dtype='uint16',
access=AttrWriteType.READ,
max_dim_x=2,
label="ID for 300MHz Search Window",
doc="Identifier of the Search Window to be used as input for beamforming on this FSP.",
)
searchBeamID = attribute(
dtype=('uint16',),
access=AttrWriteType.READ,
max_dim_x=192,
label="ID for 300MHz Search Window",
doc="Identifier of the Search Window to be used as input for beamforming on this FSP.",
)
outputEnable = attribute(
dtype='bool',
access=AttrWriteType.READ,
label="Enable Output",
doc="Enable/disable transmission of output products.",
)
# ---------------
# General methods
# ---------------
def init_command_objects(self):
"""
Sets up the command objects
"""
super().init_command_objects()
device_args = (self, self.state_model, self.logger)
self.register_command_object(
"ConfigureScan", self.ConfigureScanCommand(*device_args)
)
self.register_command_object(
"GoToIdle", self.GoToIdleCommand(*device_args)
)
class InitCommand(CspSubElementObsDevice.InitCommand):
"""
A class for the Vcc's init_device() "command".
"""
def do(self):
"""
Stateless hook for device initialisation.
:return: A tuple containing a return code and a string
message indicating status. The message is for
information purpose only.
:rtype: (ResultCode, str)
"""
self.logger.debug("Entering InitCommand()")
device = self.target
# Make a private copy of the device properties:
device._subarray_id = device.SubID
device._fsp_id = device.FspID
# initialize attribute values
device._receptors = []
device._search_beams = []
device._search_window_id = 0
device._search_beam_id = []
device._output_enable = 0
device._scan_id = 0
device._config_id = ""
# device proxy for easy reference to CBF Controller
device._proxy_cbf_controller = tango.DeviceProxy(device.CbfControllerAddress)
device._controller_max_capabilities = dict(
pair.split(":") for pair in
device._proxy_cbf_controller.get_property("MaxCapabilities")["MaxCapabilities"]
)
device._count_vcc = int(device._controller_max_capabilities["VCC"])
device._fqdn_vcc = list(device.VCC)[:device._count_vcc]
device._proxies_vcc = [*map(tango.DeviceProxy, device._fqdn_vcc)]
message = "FspPssSubarry Init command completed OK"
self.logger.info(message)
return (ResultCode.OK, message)
# PROTECTED REGION END # // FspPssSubarray.init_device
def always_executed_hook(self):
# PROTECTED REGION ID(FspPssSubarray.always_executed_hook) ENABLED START #
"""hook before any commands"""
pass
# PROTECTED REGION END # // FspPssSubarray.always_executed_hook
def delete_device(self):
# PROTECTED REGION ID(FspPssSubarray.delete_device) ENABLED START #
"""Set Idle, remove all receptors, turn device OFF"""
pass
# PROTECTED REGION END # // FspPssSubarray.delete_device
# ------------------
# Attributes methods
# ------------------
def read_receptors(self):
# PROTECTED REGION ID(FspPssSubarray.receptors_read) ENABLED START #
"""return receptros attribute.(array of int)"""
return self._receptors
# PROTECTED REGION END # // FspPssSubarray.receptors_read
def read_searchBeams(self):
# PROTECTED REGION ID(FspPssSubarray.searchBeams_read) ENABLED START #
"""Return searchBeams attribute (JSON)"""
return self._search_beams
# PROTECTED REGION END # // FspPssSubarray.searchBeams_read
def read_searchBeamID(self):
# PROTECTED REGION ID(FspPssSubarray.read_searchBeamID ENABLED START #
"""REturn list of SearchBeam IDs(array of int). (From searchBeams JSON)"""
return self._search_beam_id
# PROTECTED REGION END # // FspPssSubarray.read_searchBeamID
def read_searchWindowID(self):
# PROTECTED REGION ID(CbfSubarrayPssConfig.read_searchWindowID) ENABLED START #
"""Return searchWindowID attribtue(array of int)"""
return self._search_window_id
# PROTECTED REGION END # // CbfSubarrayPssConfig.read_searchWindowID
def read_outputEnable(self):
# PROTECTED REGION ID(CbfSubarrayPssConfig.read_outputEnable) ENABLED START #
"""Enable/Disable transmission of the output products"""
return self._output_enable
# PROTECTED REGION END # // CbfSubarrayPssConfig.read_outputEnable
# --------
# Commands
# --------
def _add_receptors(self, receptorIDs):
"""add specified receptors to the FSP subarray. Input is array of int."""
self.logger.debug("_AddReceptors")
errs = [] # list of error messages
receptor_to_vcc = dict([*map(int, pair.split(":"))] for pair in
self._proxy_cbf_controller.receptorToVcc)
for receptorID in receptorIDs:
try:
vccID = receptor_to_vcc[receptorID]
subarrayID = self._proxies_vcc[vccID - 1].subarrayMembership
# only add receptor if it belongs to the CBF subarray
if subarrayID != self._subarray_id:
errs.append("Receptor {} does not belong to subarray {}.".format(
str(receptorID), str(self._subarray_id)))
else:
if receptorID not in self._receptors:
self._receptors.append(receptorID)
else:
# TODO: this is not true if more receptors can be
# specified for the same search beam
log_msg = "Receptor {} already assigned to current FSP subarray.".format(
str(receptorID))
self.logger.warn(log_msg)
except KeyError: # invalid receptor ID
errs.append("Invalid receptor ID: {}".format(receptorID))
if errs:
msg = "\n".join(errs)
self.logger.error(msg)
tango.Except.throw_exception("Command failed", msg, "AddReceptors execution",
tango.ErrSeverity.ERR)
# PROTECTED REGION END # // FspPssSubarray.AddReceptors
def _remove_receptors(self, argin):
"""Remove Receptors. Input is array of int"""
self.logger.debug("_remove_receptors")
for receptorID in argin:
if receptorID in self._receptors:
self._receptors.remove(receptorID)
else:
log_msg = "Receptor {} not assigned to FSP subarray. "\
"Skipping.".format(str(receptorID))
self.logger.warn(log_msg)
def _remove_all_receptors(self):
self._remove_receptors(self._receptors[:])
# --------
# Commands
# --------
class ConfigureScanCommand(CspSubElementObsDevice.ConfigureScanCommand):
"""
A class for the FspPssSubarray's ConfigureScan() command.
"""
"""Input a serilized JSON object. """
def do(self, argin):
"""
Stateless hook for ConfigureScan() command functionality.
:param argin: The configuration as JSON formatted string
:type argin: str
:return: A tuple containing a return code and a string
message indicating status. The message is for
information purpose only.
:rtype: (ResultCode, str)
:raises: ``CommandError`` if the configuration data validation fails.
"""
device = self.target
argin = json.loads(argin)
# Configure receptors.
self.logger.debug("_receptors = {}".format(device._receptors))
device._fsp_id = argin["fsp_id"]
device._search_window_id = int(argin["search_window_id"])
self.logger.debug("_search_window_id = {}".format(device._search_window_id))
for searchBeam in argin["search_beam"]:
if len(searchBeam["receptor_ids"]) != 1:
# TODO - to add support for multiple receptors
msg = "Currently only 1 receptor per searchBeam is supported"
self.logger.error(msg)
return (ResultCode.FAILED, msg)
device._add_receptors(map(int, searchBeam["receptor_ids"]))
self.logger.debug("device._receptors = {}".format(device._receptors))
device._search_beams.append(json.dumps(searchBeam))
device._search_beam_id.append(int(searchBeam["search_beam_id"]))
# TODO: _output_enable is not currently set
# TODO - possibly move validation of params to
# validate_input()
# (result_code, msg) = self.validate_input(argin) # TODO
result_code = ResultCode.OK # TODO - temp - remove
msg = "Configure command completed OK" # TODO temp, remove
if result_code == ResultCode.OK:
# store the configuration on command success
device._last_scan_configuration = argin
msg = "Configure command completed OK"
return(result_code, msg)
def validate_input(self, argin):
"""
Validate the configuration parameters against allowed values, as needed.
:param argin: The JSON formatted string with configuration for the device.
:type argin: 'DevString'
:return: A tuple containing a return code and a string message.
:rtype: (ResultCode, str)
"""
device = self.target
return (ResultCode.OK, "ConfigureScan arguments validation successfull")
@command(
dtype_in='DevString',
doc_in="JSON formatted string with the scan configuration.",
dtype_out='DevVarLongStringArray',
doc_out="A tuple containing a return code and a string message indicating status. "
"The message is for information purpose only.",
)
@DebugIt()
def ConfigureScan(self, argin):
# PROTECTED REGION ID(Vcc.ConfigureScan) ENABLED START #
"""
Configure the observing device parameters for the current scan.
:param argin: JSON formatted string with the scan configuration.
:type argin: 'DevString'
:return: A tuple containing a return code and a string message indicating status.
The message is for information purpose only.
:rtype: (ResultCode, str)
"""
command = self.get_command_object("ConfigureScan")
(return_code, message) = command(argin)
return [[return_code], [message]]
class GoToIdleCommand(CspSubElementObsDevice.GoToIdleCommand):
"""
A class for the FspPssSubarray's GoToIdle command.
"""
def do(self):
"""
Stateless hook for GoToIdle() command functionality.
:return: A tuple containing a return code and a string
message indicating status. The message is for
information purpose only.
:rtype: (ResultCode, str)
"""
self.logger.debug("Entering GoToIdleCommand()")
device = self.target
# initialize attribute values
device._search_beams = []
device._search_window_id = 0
device._search_beam_id = []
device._output_enable = 0
device._scan_id = 0
device._config_id = ""
device._remove_all_receptors()
if device.state_model.obs_state == ObsState.IDLE:
return (ResultCode.OK,
"GoToIdle command completed OK. Device already IDLE")
return (ResultCode.OK, "GoToIdle command completed OK")
# ----------
# Run server
# ----------
def main(args=None, **kwargs):
# PROTECTED REGION ID(FspPssSubarray.main) ENABLED START #
return run((FspPssSubarray,), args=args, **kwargs)
# PROTECTED REGION END # // FspPssSubarray.main
if | |
#! /usr/bin/env python3
import hmac
import time
import json
import logging
import requests
from hashlib import sha256
from urllib.parse import urlencode
from . import formatter
from . import custom_data_formatter
## API Object imports
from . import blvt_api
from . import bswap_api
from . import futures_api
from . import margin_api
from . import marketData_api
from . import mining_api
from . import savings_api
from . import spot_api
from . import subAccount_api
from . import userDataStream_api
from . import wallet_api
## sets up the rest BASE for binances rest API.
REST_BASE = 'https://api.binance.com'
NO_REQUIREMENTS = ['NONE']
REQUIRE_KEY = ['MARKET_DATA', 'USER_STREAM']
REQUIRE_SIGNATURE = ['USER_DATA', 'TRADE', 'MARGIN']
class Binance_REST:
def __init__(self, public_key=None, private_key=None, default_api_type=None):
self.session = requests.Session()
self.requests_made = 0
self.errors = 0
self.default_api_type = default_api_type
self.public_key = public_key
self.private_key = private_key
## ------------------ [BLVT_EXCLUSIVE] ------------------ ##
def get_blvt_info(self, **kwargs):
return(self.param_check(blvt_api.get_blvt_info, kwargs))
def subscribe_blvt(self, **kwargs):
return(self.param_check(blvt_api.subscribe_blvt, kwargs))
def query_subscription_record(self, **kwargs):
return(self.param_check(blvt_api.query_subscription_record, kwargs))
def redeem_blvt(self, **kwargs):
return(self.param_check(blvt_api.redeem_blvt, kwargs))
def query_redemption_record(self, **kwargs):
return(self.param_check(blvt_api.query_redemption_record, kwargs))
def get_blvt_userLimit(self, **kwargs):
return(self.param_check(blvt_api.get_blvt_userLimit, kwargs))
## ------------------ [BSWAP_EXCLUSIVE] ------------------ ##
def get_swap_pools(self):
return(self.param_check(bswap_api.get_swap_pools))
def get_liquidity_poolInfo(self, **kwargs):
return(self.param_check(bswap_api.get_liquidity_poolInfo, kwargs))
def add_liquidity(self, **kwargs):
return(self.param_check(bswap_api.add_liquidity, kwargs))
def remove_liquidity(self, **kwargs):
return(self.param_check(bswap_api.remove_liquidity, kwargs))
def get_liquidity_record(self, **kwargs):
return(self.param_check(bswap_api.get_liquidity_record, kwargs))
def get_quote(self, **kwargs):
return(self.param_check(bswap_api.get_quote, kwargs))
def make_swap(self, **kwargs):
return(self.param_check(bswap_api.make_swap, kwargs))
def get_swap_history(self, **kwargs):
return(self.param_check(bswap_api.get_swap_history, kwargs))
## ------------------ [FUTURES_EXCLUSIVE] ------------------ ##
def futures_transfer(self, **kwargs):
return(self.param_check(futures_api.futures_transfer, kwargs))
def get_futures_transactions(self, **kwargs):
return(self.param_check(futures_api.get_futures_transactions, kwargs))
def borrow_crossCollat(self, **kwargs):
return(self.param_check(futures_api.borrow_crossCollat, kwargs))
def get_crossCollat_borrowHist(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_borrowHist, kwargs))
def repay_crossCollat(self, **kwargs):
return(self.param_check(futures_api.repay_crossCollat, kwargs))
def get_crossCollat_repayHist(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_repayHist, kwargs))
def get_crossCollat_wallet(self):
return(self.param_check(futures_api.get_crossCollat_wallet))
def get_crossCollat_wallet_v2(self):
return(self.param_check(futures_api.get_crossCollat_wallet_v2))
def get_crossCollat_info(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_info, kwargs))
def get_crossCollat_info_v2(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_info_v2, kwargs))
def get_crossCollat_rate_LTV(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_rate_LTV, kwargs))
def get_crossCollat_rate_LTV_v2(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_rate_LTV_v2, kwargs))
def get_crossCollat_max_LTV(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_max_LTV, kwargs))
def get_crossCollat_max_LTV_v2(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_max_LTV_v2, kwargs))
def adjust_crossCollat_LTV(self, **kwargs):
return(self.param_check(futures_api.adjust_crossCollat_LTV, kwargs))
def adjust_crossCollat_LTV_v2(self, **kwargs):
return(self.param_check(futures_api.adjust_crossCollat_LTV_v2, kwargs))
def adjust_crossCollat_LTV_history(self, **kwargs):
return(self.param_check(futures_api.adjust_crossCollat_LTV_history, kwargs))
def adjust_crossCollat_liquidation_history(self, **kwargs):
return(self.param_check(futures_api.adjust_crossCollat_liquidation_history, kwargs))
def get_collatRepay_limit(self, **kwargs):
return(self.param_check(futures_api.get_collatRepay_limit, kwargs))
def get_collatRepay_quote(self, **kwargs):
return(self.param_check(futures_api.get_collatRepay_quote, kwargs))
def collateral_repay(self, **kwargs):
return(self.param_check(futures_api.collateral_repay, kwargs))
def get_collatRepay_result(self, **kwargs):
return(self.param_check(futures_api.get_collatRepay_result, kwargs))
def get_crossCollat_interestHist(self, **kwargs):
return(self.param_check(futures_api.get_crossCollat_interestHist, kwargs))
## ------------------ [MARGIN_EXCLUSIVE] ------------------ ##
def margin_transfer(self, **kwargs):
return(self.param_check(margin_api.margin_transfer, kwargs))
def margin_accountBorrow(self, **kwargs):
return(self.param_check(margin_api.margin_accountBorrow, kwargs))
def margin_accountRepay(self, **kwargs):
return(self.param_check(margin_api.margin_accountRepay, kwargs))
def query_margin_asset(self, **kwargs):
return(self.param_check(margin_api.query_margin_asset, kwargs))
def query_crossPair(self, **kwargs):
return(self.param_check(margin_api.query_crossPair, kwargs))
def get_margin_allAssets(self):
return(self.param_check(margin_api.get_margin_allAssets))
def get_allCrossPairs(self):
return(self.param_check(margin_api.get_allCrossPairs))
def query_margin_priceIndex(self, **kwargs):
return(self.param_check(margin_api.query_margin_priceIndex, kwargs))
def get_margin_crossTransferHistory(self, **kwargs):
return(self.param_check(margin_api.get_margin_crossTransferHistory, kwargs))
def get_loanRecord(self, **kwargs):
return(self.param_check(margin_api.get_loanRecord, kwargs))
def get_repayRecord(self, **kwargs):
return(self.param_check(margin_api.get_repayRecord, kwargs))
def get_interestHistory(self, **kwargs):
return(self.param_check(margin_api.get_interestHistory, kwargs))
def get_fLiquidationRecord(self, **kwargs):
return(self.param_check(margin_api.get_fLiquidationRecord, kwargs))
def get_cross_accountDetails(self):
return(self.param_check(margin_api.get_cross_accountDetails))
def get_maxBorrow(self, **kwargs):
return(self.param_check(margin_api.get_maxBorrow, kwargs))
def get_maxOutAmmount(self, **kwargs):
return(self.param_check(margin_api.get_maxOutAmmount, kwargs))
def create_isolatedMaringAccount(self, **kwargs):
return(self.param_check(margin_api.create_isolatedMaringAccount, kwargs))
def isolated_transfer(self, **kwargs):
return(self.param_check(margin_api.isolated_transfer, kwargs))
def get_isolated_transferHistory(self, **kwargs):
return(self.param_check(margin_api.get_isolated_transferHistory, kwargs))
def get_isolated_accountInfo(self, **kwargs):
return(self.param_check(margin_api.get_isolated_accountInfo, kwargs))
def get_isolated_symbol(self, **kwargs):
return(self.param_check(margin_api.get_isolated_symbol, kwargs))
def get_isolated_symbol_all(self):
return(self.param_check(margin_api.get_isolated_symbol_all))
def toggle_BNB_burn_ST_MI(self, **kwargs):
return(self.param_check(margin_api.toggle_BNB_burn_ST_MI, kwargs))
def get_BNB_burn_status(self):
return(self.param_check(margin_api.get_BNB_burn_status))
## ------------------ [MARKET_DATA_EXCLUSIVE] ------------------ ##
def test_ping(self):
return(self.param_check(marketData_api.test_ping))
def get_serverTime(self):
return(self.param_check(marketData_api.get_serverTime))
def get_exchangeInfo(self):
return(self.param_check(marketData_api.get_exchangeInfo))
def get_orderBook(self, **kwargs):
return(self.param_check(marketData_api.get_orderBook, kwargs))
def get_custom_trades(self, **kwargs):
if kwargs['limit'] > 1000:
kwargs.update({'pubKey':self.public_key, 'prvKey':self.private_key})
return(custom_data_formatter.get_custom_trades(kwargs))
def get_recentTrades(self, **kwargs):
return(custom_data_formatter.get_custom_trades(self.param_check(marketData_api.get_recentTrades, kwargs)))
def get_oldTrades(self, **kwargs):
return(custom_data_formatter.get_custom_trades(self.param_check(marketData_api.get_oldTrades, kwargs)))
def get_aggTradeList(self, **kwargs):
return(self.param_check(marketData_api.get_aggTradeList, kwargs))
def get_custom_candles(self, **kwargs):
return(custom_data_formatter.get_custom_candles(kwargs))
def get_candles(self, **kwargs):
return(formatter.format_candles(self.param_check(marketData_api.get_candles, kwargs), 'REST'))
def get_averagePrice(self, **kwargs):
return(self.param_check(marketData_api.get_averagePrice, kwargs))
def get_24hTicker(self, **kwargs):
return(self.param_check(marketData_api.get_24hTicker, kwargs))
def get_priceTicker(self, **kwargs):
return(self.param_check(marketData_api.get_priceTicker, kwargs))
def get_orderbookTicker(self, **kwargs):
return(self.param_check(marketData_api.get_orderbookTicker, kwargs))
## ------------------ [MINING_EXCLUSIVE] ------------------ ##
def get_algorithm(self):
return(self.param_check(mining_api.get_algorithm))
def get_coinNames(self):
return(self.param_check(mining_api.get_coinNames))
def get_minerList_detail(self, **kwargs):
return(self.param_check(mining_api.get_minerList_detail, kwargs))
def get_minerList(self, **kwargs):
return(self.param_check(mining_api.get_minerList, kwargs))
def get_earningsList(self, **kwargs):
return(self.param_check(mining_api.get_earningsList, kwargs))
def get_extraBonusList(self, **kwargs):
return(self.param_check(mining_api.get_extraBonusList, kwargs))
def get_hashrateResaleList(self, **kwargs):
return(self.param_check(mining_api.get_hashrateResaleList, kwargs))
def get_hashrateResaleDetail(self, **kwargs):
return(self.param_check(mining_api.get_hashrateResaleDetail, kwargs))
def post_hashrateResale(self, **kwargs):
return(self.param_check(mining_api.post_hashrateResale, kwargs))
def cancel_hashrateResale(self, **kwargs):
return(self.param_check(mining_api.cancel_hashrateResale, kwargs))
def get_statisticList(self, **kwargs):
return(self.param_check(mining_api.get_statisticList, kwargs))
def get_accountList(self, **kwargs):
return(self.param_check(mining_api.get_accountList, kwargs))
## ------------------ [SAVINGS_EXCLUSIVE] ------------------ ##
def get_productList(self, **kwargs):
return(self.param_check(savings_api.get_productList, kwargs))
def get_dailyPurchaseQuota(self, **kwargs):
return(self.param_check(savings_api.get_dailyPurchaseQuota, kwargs))
def purchase_product(self, **kwargs):
return(self.param_check(savings_api.purchase_product, kwargs))
def get_dailyRedemptionQuota(self, **kwargs):
return(self.param_check(savings_api.get_dailyRedemptionQuota, kwargs))
def redeem_product(self, **kwargs):
return(self.param_check(savings_api.redeem_product, kwargs))
def get_product_position(self, **kwargs):
return(self.param_check(savings_api.get_product_position, kwargs))
def get_FnAProject_list(self, **kwargs):
return(self.param_check(savings_api.get_FnAProject_list, kwargs))
def purchase_FnAProject(self, **kwargs):
return(self.param_check(savings_api.purchase_FnAProject, kwargs))
def get_FnAProject_position(self, **kwargs):
return(self.param_check(savings_api.get_FnAProject_position, kwargs))
def get_lending(self):
return(self.param_check(savings_api.get_lending))
def get_purchase_record(self, **kwargs):
return(self.param_check(savings_api.get_purchase_record, kwargs))
def get_redemption_record(self, **kwargs):
return(self.param_check(savings_api.get_redemption_record, kwargs))
def get_interest_history(self, **kwargs):
return(self.param_check(savings_api.get_interest_history, kwargs))
def change_position(self, **kwargs):
return(self.param_check(savings_api.change_position, kwargs))
## ------------------ [SPOT_EXCLUSIVE] ------------------ ##
def place_order_test(self, **kwargs):
return(self.param_check(spot_api.place_order_test, kwargs))
def place_order_oco(self, **kwargs):
return(self.param_check(spot_api.place_order_oco, kwargs))
def cancel_order_oco(self, **kwargs):
return(self.param_check(spot_api.cancel_order_oco, kwargs))
def query_order_oco(self, **kwargs):
return(self.param_check(spot_api.query_order_oco, kwargs))
def get_all_orders_oco(self, **kwargs):
return(self.param_check(spot_api.get_all_orders_oco, kwargs))
def get_open_orders_oco(self):
return(self.param_check(spot_api.get_open_orders_oco))
def get_accountInfo(self):
return(self.param_check(spot_api.get_accountInfo))
## ------------------ [SUB-ACCOUNT_EXCLUSIVE] ------------------ ##
def get_subAccount_list(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_list, kwargs))
def get_subAccount_spotTransferHistory_wapi(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_spotTransferHistory_wapi, kwargs))
def get_subAccount_spotTransferHistory_sapi(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_spotTransferHistory_sapi, kwargs))
def subAccount_spotAsset_transfer(self, **kwargs):
return(self.param_check(subAccount_api.subAccount_spotAsset_transfer, kwargs))
def get_subAccount_futuresTransferHistory(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_futuresTransferHistory, kwargs))
def subAccount_futuresAsset_transfer(self, **kwargs):
return(self.param_check(subAccount_api.subAccount_futuresAsset_transfer, kwargs))
def get_subAccount_assets(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_assets, kwargs))
def get_subAccount_spotAssetsSummary(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_spotAssetsSummary, kwargs))
def get_subAccount_depositAddress(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_depositAddress, kwargs))
def get_subAccount_depositHistory(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_depositHistory, kwargs))
def get_subAccount_statusFnM(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_statusFnM, kwargs))
def enable_subAccount_margin(self, **kwargs):
return(self.param_check(subAccount_api.enable_subAccount_margin, kwargs))
def get_subAccount_marginAccount(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_marginAccount, kwargs))
def get_subAccount_marginAccountSummary(self):
return(self.param_check(subAccount_api.get_subAccount_marginAccountSummary))
def enable_subAccount_futures(self, **kwargs):
return(self.param_check(subAccount_api.enable_subAccount_futures, kwargs))
def get_subAccount_futuresAccount(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_futuresAccount, kwargs))
def get_subAccount_futuresAccountSummary(self):
return(self.param_check(subAccount_api.get_subAccount_futuresAccountSummary))
def get_subAccount_positionRisk(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_positionRisk, kwargs))
def subAccount_futures_transfer(self, **kwargs):
return(self.param_check(subAccount_api.subAccount_futures_transfer, kwargs))
def subAccount_margin_transfer(self, **kwargs):
return(self.param_check(subAccount_api.subAccount_margin_transfer, kwargs))
def master_sub_transfer(self, **kwargs):
return(self.param_check(subAccount_api.master_sub_transfer, kwargs))
def sub_master_transfer(self, **kwargs):
return(self.param_check(subAccount_api.sub_master_transfer, kwargs))
def get_subAccount_transferHistory(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_transferHistory, kwargs))
def make_universalTransfer(self, **kwargs):
return(self.param_check(subAccount_api.make_universalTransfer, kwargs))
def get_universalTransferHisotry(self, **kwargs):
return(self.param_check(subAccount_api.get_universalTransferHisotry, kwargs))
def get_subAccount_futuresAccount_v2(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_futuresAccount_v2, kwargs))
def get_subAccount_futuresAccountSummary_v2(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_futuresAccountSummary_v2, kwargs))
def get_subAccount_positionRisk_v2(self, **kwargs):
return(self.param_check(subAccount_api.get_subAccount_positionRisk_v2, kwargs))
## ------------------ [WALLET_EXCLUSIVE] ------------------ ##
def get_systemStatus(self):
return(self.param_check(wallet_api.get_systemStatus))
def get_allCoinsInfo(self):
return(self.param_check(wallet_api.get_allCoinsInfo))
def get_dailySnapshot(self, **kwargs):
return(self.param_check(wallet_api.get_dailySnapshot, kwargs))
def disable_withdrawSwitch(self):
return(self.param_check(wallet_api.disable_withdrawSwitch))
def enable_withdrawSwitch(self):
return(self.param_check(wallet_api.enable_withdrawSwitch))
def make_withdraw_SAPI(self, **kwargs):
return(self.param_check(wallet_api.make_withdraw_SAPI, kwargs))
def make_withdraw(self, **kwargs):
return(self.param_check(wallet_api.make_withdraw, kwargs))
def get_depositHistory_SN(self, **kwargs):
return(self.param_check(wallet_api.get_depositHistory_SN, kwargs))
def get_depositHistory(self, **kwargs):
return(self.param_check(wallet_api.get_depositHistory, kwargs))
def get_withdrawHistory_SN(self, **kwargs):
return(self.param_check(wallet_api.get_withdrawHistory_SN, kwargs))
def get_withdrawHistory(self, **kwargs):
return(self.param_check(wallet_api.get_withdrawHistory, kwargs))
def depositAddress_SN(self, **kwargs):
return(self.param_check(wallet_api.depositAddress_SN, kwargs))
def depositAddress(self, **kwargs):
return(self.param_check(wallet_api.depositAddress, kwargs))
def get_accountStatus(self):
return(self.param_check(wallet_api.get_accountStatus))
def get_apiStatus(self):
return(self.param_check(wallet_api.get_apiStatus))
def get_dustLog(self):
return(self.param_check(wallet_api.get_dustLog))
def make_dustTransfer(self, **kwargs):
return(self.param_check(wallet_api.make_dustTransfer, kwargs))
def get_dividendRecord(self, **kwargs):
return(self.param_check(wallet_api.get_dividendRecord, kwargs))
def get_assetDetail(self):
return(self.param_check(wallet_api.get_assetDetail))
def get_tradeFee(self, **kwargs):
return(self.param_check(wallet_api.get_tradeFee, kwargs))
def make_universalTransfer(self, **kwargs):
return(self.param_check(wallet_api.make_universalTransfer, kwargs))
def get_universalTransferHistory(self, **kwargs):
return(self.param_check(wallet_api.get_universalTransferHistory, kwargs))
## ------------------ [USER_DATA_STREAM_EXCLUSIVE] ------------------ ##
def get_listenKey(self, api_type=None):
if api_type == 'SPOT': return(self.param_check(userDataStream_api.get_listenKey_spot))
elif api_type == 'MARGIN': return(self.param_check(userDataStream_api.get_listenKey_margin))
elif api_type == 'FUTURES': return(self.param_check(userDataStream_api.send_listenKey_keepAlive_margin, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT/FUTURES)')
def send_listenKey_keepAlive(self, api_type='SPOT', **kwargs):
if api_type == 'SPOT': return(self.param_check(userDataStream_api.send_listenKey_keepAlive_spot, kwargs))
elif api_type == 'MARGIN': return(self.param_check(userDataStream_api.send_listenKey_keepAlive_margin, kwargs))
elif api_type == 'FUTURES': return(self.param_check(userDataStream_api.send_listenKey_keepAlive_margin, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT/FUTURES)')
def close_listenKey(self, api_type='SPOT', **kwargs):
if api_type == 'SPOT': return(self.param_check(userDataStream_api.close_listenKey_spot, kwargs))
elif api_type == 'MARGIN': return(self.param_check(userDataStream_api.close_listenKey_margin, kwargs))
elif api_type == 'FUTURES': return(self.param_check(userDataStream_api.send_listenKey_keepAlive_margin, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT/FUTURES)')
## ------------------ [MULTI_API_ENDPOINT] ------------------ ##
def get_account(self, api_type=None):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.get_accountInfo))
elif api_type == 'MARGIN': return(self.param_check(margin_api.get_cross_accountDetails))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def place_order(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.place_order, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.place_order, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def get_order(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.get_order, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.get_order, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def cancel_order(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.cancel_order, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.cancel_order, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def cancel_all_orders(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.cancel_all_orders, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.cancel_all_orders, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def get_all_orders(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.get_all_orders, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.get_all_orders, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def get_all_trades(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.get_all_trades, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.get_all_trades, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def get_open_orders(self, api_type=None, **kwargs):
if api_type == None: api_type = self.default_api_type
if api_type == 'SPOT': return(self.param_check(spot_api.get_open_orders, kwargs))
elif api_type == 'MARGIN': return(self.param_check(margin_api.get_open_orders, kwargs))
elif api_type == None: return('PLEASE_SPECIFY_API_TYPE, api_type=(MARGIN/SPOT)')
def param_check(self, api_info, users_passed_parameters=None):
if users_passed_parameters==None or not('IS_TEST' in users_passed_parameters):
if api_info.params != None:
missingParameters = []
allParams = []
if 'symbol' in users_passed_parameters:
if '-' in users_passed_parameters:
base, quote = users_passed_parameters['symbol'].split('-')
users_passed_parameters.update({'symbol':(quote+base).upper()})
if 'R' in api_info.params:
allParams += api_info.params['R']
for param in api_info.params['R']:
if not(param in users_passed_parameters):
missingParameters.append(param)
if len(missingParameters) >= 1:
return('MISSING_REQUIRED_PARAMETERS', missingParameters)
if 'O' in api_info.params:
allParams += api_info.params['O']
unknownParams = []
for param | |
which consists in allowing the user
to ignore some deterministic variables in the trace by adding a
`# sample: ignore` comment on the line it is defined.
"""
comment = node.trailing_whitespace.comment
if comment is not None and "sample: ignore" in comment.value:
self.sample_this_op = False
def leave_SimpleStatementLine(self, _) -> None:
"""Re-enable the sampling of deterministic variables after parsing the current line."""
self.sample_this_op = True
# ----------------------------------------------------------------
# PARSE DETERMINISTIC ASSIGNMENTS (named Ops)
# ----------------------------------------------------------------
def visit_Assign(self, node: cst.Assign) -> None:
"""Visit named Ops and Constants.
This method parses assignment expressions such as:
>>> x = f(a)
We parse the right-hand side recursively to create the Op and its
parent which can be Constants, Placeholders or Ops.
TODO: For the sake of simplicity we currently explicitly disallow
expressions that return a tuple. It should be possible to define
multioutput ops to handle this situation.
There are three broad situations to take into consideration. First,
numerical constants:
>>> a = 0
which are parsed into a Constant node. Then transformations that include
function call, binary operatations, unary operations and result of a
comparison.
>>> a = np.dot(x, y)
... a = w + 2
... a = x < 0
... a = ~x
These are parsed into `Op` nodes.
Finally, there are constants that are not encoded by a `ast.Constant`
nodes. Numpy arrays for instance:
>>> a = np.ones(10)
These can be distinguished from regular Ops by the fact that they are
not linked to a named variable. It is however hard to guess that these
expressions are constant when parsing the tree. We thus parse them into
a named `Op` node, and rely from a further simplication step to turn
them into a constant.
"""
op = self.recursive_visit(node.value)
# We restrict ourselves to single-output ops
if len(node.targets) > 1:
raise SyntaxError(MULTIPLE_RETURNED_VALUES_ERROR)
single_target = node.targets[0].target
if isinstance(single_target, cst.Name):
op.name = single_target.value
self.named_variables[op.name] = op
# ----------------------------------------------------------------
# PARSE RANDOM VARIABLE ASSIGNMENTS (Sample Ops)
# ----------------------------------------------------------------
def visit_Comparison(self, node: cst.Comparison) -> None:
"""Parse sample statements.
Sample statements are represented in MCX by the symbol `<~` which is
the combination of the `Lt` and `Invert` operators in Python. We thus
translate the succession of `<` and `~` in the Python AST into a single
`SampleOp` in MCX's graph.
The parser can encounter two situations. First, the object on the
right-hand side of the 'sample' operator is a MCX model:
>>> coef <~ Horseshoe(1.)
In this case the parser instantiates the model, parses it and merges
its graph with the current graph.
Otherwise the assumes that the object on the right hand-side of the operator has
the same API as the `mcx.distributions.Distribution` class
>>> a <~ Normal(0, 1)
... a <~ dist.Normal(0, 1)
And builds a SampleOp from the expression.
"""
if len(node.comparisons) != 1:
return
operator = node.comparisons[0].operator
comparator = node.comparisons[0].comparator
if isinstance(operator, cst.LessThan) and isinstance(
comparator, cst.UnaryOperation
):
if isinstance(node.left, cst.Tuple):
raise SyntaxError(MULTIPLE_RETURNED_VALUES_ERROR)
if isinstance(node.left, cst.Name):
variable_name = node.left.value
else:
return
expression = comparator.expression
if variable_name in self.named_variables:
raise SyntaxError(DUPLICATE_VARIABLE_NAME_ERROR)
# Eval the object on the righ-hand side of the <~ operator
# This eval is necessary to check whether object sampled
# from is a model or a distribution. And in the former case
# to merge to the current graph.
if isinstance(expression, cst.Call):
fn_call_path = unroll_call_path(expression.func)
else:
raise SyntaxError(
"Expressions on the right-hand-side of <~ must be models or distributions. "
f"Found the node {expression} instead."
)
fn_obj = eval(fn_call_path, self.namespace)
if isinstance(fn_obj, mcx.model):
op = self.recursive_visit(comparator.expression)
modelsample_op = SampleModelOp(
op.cst_generator,
self.scope,
variable_name,
fn_call_path,
fn_obj.graph,
)
self.graph = nx.relabel_nodes(self.graph, {op: modelsample_op})
self.named_variables[variable_name] = modelsample_op
elif issubclass(fn_obj, mcx.distributions.Distribution):
op = self.recursive_visit(comparator.expression)
sample_op = SampleOp(
op.cst_generator, self.scope, variable_name, fn_obj
)
self.graph = nx.relabel_nodes(self.graph, {op: sample_op})
self.named_variables[variable_name] = sample_op
else:
raise SyntaxError(
"Expressions on the right-hand-side of <~ must be models or distributions. "
f"Found {fn_call_path} instead."
)
# ----------------------------------------------------------------
# RECURSIVELY PARSE THE RHS OF STATEMENTS
# ----------------------------------------------------------------
def recursive_visit(self, node) -> Union[Constant, Op]:
"""Recursively visit the node and populate the graph with the traversed nodes.
The recursion ends when the CST node being visited is a `Name` or a
`BaseNumber` node. While we follow strictly libcst's CST
decomposition, it may be desirable to simplify the graph for our
purposes. For instance:
- Slices and subscripts. There is no reason to detail the succession of
nodes in the graph. It can either be a constant (only depends on numerical constants),
or a function of other variables.
- Functions like `np.dot`. `np` and `dot` are currently store in different Ops. We should
merge these.
TODO: Implement a function that takes a GraphicalModel and applies
these simplifications. This will be necessary when sampling
deterministic functions.
Note
----
This function could be re-rewritten using functools'
`singledispatchmethod` but it is not available for Python 3.7. While
there is a library that does backward-compatibility I prefered to avoid
adding a dependency.
"""
if isinstance(node, cst.Name):
"""If the node corresponds to a placeholder or a named op its name
should be registered in `named_variables`. Otherwise it corresponds
to the name of an attribute.
"""
try:
name = self.named_variables[node.value]
except KeyError:
name = Name(lambda: node, node.value)
return name
if isinstance(node, cst.BaseNumber):
new_node = Constant(lambda: node)
return new_node
# Parse function calls
if isinstance(node, cst.Call):
func = self.recursive_visit(node.func)
args = [self.recursive_visit(arg) for arg in node.args]
def to_call_cst(*args, **kwargs):
# I don't exactly remember why we pass the `func` as a keyword
# argument, but I think it has something to do with the fact
# that at compilation the arguments are passed in the order they
# were introduced in the graph, and nodes are deleted/re-inserted
# when transforming to get logpdf and samplers.
func = kwargs["__name__"]
return cst.Call(func, args, lpar=node.lpar, rpar=node.rpar)
op = Op(to_call_cst, self.scope)
self.graph.add(op, *args, __name__=func)
return op
if isinstance(node, cst.Arg):
value = self.recursive_visit(node.value)
def to_arg_cst(value):
return cst.Arg(value, node.keyword)
op = Op(to_arg_cst, self.scope)
self.graph.add(op, value)
return op
if isinstance(node, cst.Attribute):
value = self.recursive_visit(node.value)
attr = self.recursive_visit(node.attr)
def to_attribute_cst(value, attr):
return cst.Attribute(value, attr)
op = Op(to_attribute_cst, self.scope)
self.graph.add(op, value, attr)
return op
# Parse lists and tuples
if isinstance(node, cst.List):
elements = [self.recursive_visit(e) for e in node.elements]
def to_list_cst(*list_elements):
return cst.List(list_elements)
op = Op(to_list_cst, self.scope)
self.graph.add(op, *elements)
return op
if isinstance(node, cst.Element):
value = self.recursive_visit(node.value)
def to_element_cst(value):
return cst.Element(value)
op = Op(to_element_cst, self.scope)
self.graph.add(op, value)
return op
# Parse slices and subscripts
if isinstance(node, cst.Subscript):
value = self.recursive_visit(node.value)
slice_elements = [self.recursive_visit(s) for s in node.slice]
def to_subscript_cst(value, *slice_elements):
return cst.Subscript(value, slice_elements)
op = Op(to_subscript_cst, self.scope)
self.graph.add(op, value, *slice_elements)
return op
if isinstance(node, cst.SubscriptElement):
sl = self.recursive_visit(node.slice)
def to_subscript_element_cst(sl):
return cst.SubscriptElement(sl)
op = Op(to_subscript_element_cst, self.scope)
self.graph.add(op, sl)
return op
if isinstance(node, cst.Index):
value = self.recursive_visit(node.value)
def to_index_cst(value):
return cst.Index(value)
op = Op(to_index_cst, self.scope)
self.graph.add(op, value)
return op
# Parse Binary and Unary operations
if isinstance(node, cst.BinaryOperation):
left = self.recursive_visit(node.left)
right = self.recursive_visit(node.right)
def to_binary_operation_cst(left, right):
return cst.BinaryOperation(
left, node.operator, right=right, lpar=node.lpar, rpar=node.rpar
)
op = Op(to_binary_operation_cst, self.scope)
self.graph.add(op, left, right)
return op
if isinstance(node, cst.UnaryOperation):
expression = self.recursive_visit(node.expression)
def to_unary_operation_cst(expression):
return cst.UnaryOperation(
node.operator, expression, lpar=node.lpar, rpar=node.rpar
)
op = Op(to_unary_operation_cst, self.scope)
self.graph.add(op, expression)
return op
# In case we missed an important statement or expression leave a friendly error
# message and redirect the user to the issue tracker to let us know.
raise TypeError(
f"The CST node {node.__class__.__name__} is currently not supported by MCX's parser. "
"Please open an issue on https://github.com/rlouf/mcx so we can integrate it."
)
# ----------------------------------------------------------------
# TAG RETURNED VARIABLES
# ----------------------------------------------------------------
def visit_Return(self, node: cst.Return) -> None:
"""Visit the return statement.
We mark the referenced named Op as returned.
"""
value = node.value
if isinstance(value, cst.Name):
returned_name = value.value
else:
raise SyntaxError(MULTIPLE_RETURNED_VALUES_ERROR)
try:
returned_node = self.named_variables[returned_name]
if not isinstance(returned_node, SampleOp):
raise SyntaxError(TRANSFORMED_RETURNED_VALUE_ERROR)
returned_node.is_returned = True
except KeyError:
raise NameError(f"name '{returned_name}' is not defined")
except | |
###########################################
# Author : <NAME>, <NAME>, <NAME>, <NAME>
# Date : 26.04.2018
# Course : Applications in Object-oriented Programming and Databases
# Teachers : <NAME>, <NAME>
# Project : Bibliotek
# Goal : Book management system
# Libraries : scrapy, bibtexparser, datetime, requests
# API : Google book API
# #########################################
import scrapy
from scrapy.crawler import CrawlerProcess
from bibliotek import settings
from scrapy.http import Request
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibliotek.items import Book
from bibliotek.book_info import BookInfo
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
import pandas as pd
from pathlib import Path
from slugify import slugify
import urllib
import json
import re
import requests
import os
###########################################
# Class : BooksSpider
# Goal : Built on scrapy library, is required to crawl data on websites.
# Each scraped Book Data is then store into an item (items.py) => (Book).
# Each item is then processed through an item pipeline (pipelines.py)
# Example-cli : scrapy crawl books -a title=
# #########################################
class BooksSpider(scrapy.Spider):
# Global variable
# Name is required by scrapy
name = "books"
def __init__(self, extension="pdf",isbn="", title="", authors = "", *args, **kwargs):
"""Constructor of the BooksSpider scrapy class. Scrap the desired book and download it as pdf.
Keyword Arguments:
extension {str} -- type of the book we want (default: {"pdf"})
isbn {str} -- isbn of the book (default: {""})
title {str} -- title of the book (default: {"Introductory Econometrics: A Modern Approach"})
authors {str} -- authors of the book (default: {"<NAME>"})
"""
super(BooksSpider, self).__init__(*args, **kwargs)
# Because making a mistake while spelling is super easy to do,
# first we look for the book on google book api then after having fetched correct informations about it,
# we do make the request on libgen.io
self.bookPreview = BookInfo(isbn, title, authors)
self.logger.info("GOOGLE API : {}".format(self.bookPreview.url_request))
# Build request
request = "{0} {1}".format(self.bookPreview.get_title().split(",")[0],self.bookPreview.get_authors()[0])
request = request.replace(" ", "+")
# Build starting urls
self.start_urls = [
'http://libgen.io/search.php?req={}&open=0&res=25&view=simple&phrase=1&column=def'.format(request)
]
# 'http://libgen.pw/search?q={}'.format(request)
# Choose what extension the book will be downloaded in
self.extension = extension
# Number of query ( Number of book searched with the same name )
self.query_number = 2
# Authors name
self.authors = self.bookPreview.get_authors()[0]
# Book title
self.title = self.bookPreview.get_title()
# Subtitle
self.subtitle = self.bookPreview.get_subtitle()
# Logging all information about book best match
self.logger.info("Cumulative ratio : {0}".format(self.bookPreview.get_cumulative_ratio()))
self.logger.info("Number of best match found :{0}".format(self.bookPreview.top_books_number))
self.logger.info("Book selected :{0}".format(self.bookPreview.selected))
# Log to be sure that arguments passed correclty
self.logger.info("authors : {0}\ttitle : {1}\tsubtitle : {2}".format(self.authors, self.title, self.subtitle))
self.logger.info("start_url : {0}".format(self.start_urls[0]))
def parse(self, response):
"""
First method that Go throught the given start_urls and for each url it parses it
Arguments:
response {scrapy.response} -- scrapy response
"""
# Initiate a scrapper for each URL contained in start_urls
for url in self.start_urls:
yield response.follow(url, self.parse_item)
def parse_item(self, response):
"""
Go throught the given start_urls and for each url it parses it
Arguments:
response {scrapy.response} -- scrapy response
"""
self.logger.info('You just got on parse_item : {0}'.format(response.url))
# Iterators
query = 1
# In order to
book_list = []
try:
# Select the table that contains all the data
table = response.css(".c")
# Extract the relevant elements from the page
# Go through each tr
for row in table.css('tr'):
book_id = row.xpath("./td[1]/text()").extract_first()
authors = row.xpath("./td[2]").css('a:nth-child(1)::text').extract_first()
title = row.xpath("./td[3]/a[@id='{0}']/text()".format(book_id)).extract_first()
publisher = row.xpath("./td[4]/text()").extract_first()
published_date = row.xpath("./td[5]/text()").extract_first()
page_count = row.xpath("./td[6]/text()").extract_first()
language = row.xpath("./td[7]/text()").extract_first()
size = row.xpath("./td[8]/text()").extract_first()
extension = row.xpath("./td[9]/text()").extract_first()
download_link_1 = row.xpath("./td[10]").css('a:nth-child(1)::attr(href)').extract_first()
download_link_2 = row.xpath("./td[11]").css('a:nth-child(1)::attr(href)').extract_first()
download_link_3 = row.xpath("./td[12]").css('a:nth-child(1)::attr(href)').extract_first()
download_link_4 = row.xpath("./td[13]").css('a:nth-child(1)::attr(href)').extract_first()
try:
cumulative_ratio = round((fuzz.ratio(title,self.title)+fuzz.ratio(authors,self.authors))/2,2)
except:
cumulative_ratio = 0
book_info_temp = {}
book_info_temp.update({'index':query,
'title':title,
'authors':authors,
'publisher': publisher,
'published_date': published_date,
'page_count': page_count,
'language': language,
'size': size,
'extension': extension,
'download_link_1':download_link_1,
'download_link_2':download_link_2,
'download_link_3':download_link_3,
'download_link_4':download_link_4,
'cumulative_ratio':cumulative_ratio})
book_list.append(book_info_temp)
# Create the Dataframe of all the list of books
df = pd.DataFrame(book_list)
# Remove the first one since it's not a book but the header of the website
df = df[1:]
# Select only the one that respect the extension criteria
df = df.loc[df['extension'] == self.extension]
# sort by cumulative ratio and select the book best match
best_match = df.sort_values(by='cumulative_ratio',ascending=False).iloc[0]
# Show the book best match debug purpose
print(best_match)
#We select the url that goes on libgen.io
dl_link = best_match['download_link_2']
# We are scraping only libgen.io
if dl_link is not None:
request = Request(
url=response.urljoin(dl_link),
callback=self.parse_books_libgen
)
request.meta['authors'] = best_match['authors']
request.meta['title'] = best_match['title']
request.meta['dl_link1'] = best_match['download_link_1']
request.meta['dl_link2'] = best_match['download_link_2']
request.meta['chosen_url'] = "dl_link2"
yield request
except:
print("="*150)
print("Error while parsing data, the website might give back no entries. Check out Title, authors and ISBN")
print("="*150)
def parse_books_libgen(self, response):
"""
Parse the items on the second page of libgen.io
Once the download link and the bibtex have been parsed,
The save_pdf method is called.
Arguments:
response {scrapy.response} -- scrapy response
"""
# Select the download column
download_column = response.css('td')[2]
# Get the final download URL
download_url = download_column.css('a:nth-child(1)::attr(href)').extract_first()
# Parse the information in bibtex
bibtex_raw = response.xpath('//*[@id="bibtext"]/text()').extract_first()
bibtex_processed = self.bibtex_reader(bibtex_raw)
self.logger.info("authors : {0} \t Title : {1}".format(self.authors, self.title))
self.logger.info("Bibtex : {}".format(bibtex_processed))
self.logger.info('You just got on page 2 on libgen.io : {0}'.format(response.urljoin(download_url)))
request = Request(
url = response.urljoin(download_url),
callback = self.save
)
request.meta['url'] = download_url
if hasattr(bibtex_processed, 'md5'):
request.meta['md5'] = bibtex_processed['md5']
else:
request.meta['md5'] = ""
request.meta['dl_link1'] = response.meta['dl_link1']
request.meta['dl_link2'] = response.meta['dl_link2']
request.meta['chosen_url'] = response.meta['chosen_url']
yield request
def save(self, response):
"""
Download the file as a pdf and call the model to save the data into the database.
Arguments:
response {scrapy.response} -- scrapy response
"""
# Create a book Item and store all variables
book = Book()
# Getting back the book info that we precedently got from Google Book API
bookInfo = self.bookPreview
###############################################################################
# Assign all the google book api data to the Book Item
# This solution swap the data provided by libgen with the google book api data
# We assume that Google api data is cleaner
###############################################################################
# logging the process
self.logger.info("Saving the book info as an Item.")
# Data
book['title'] = self.formatString(bookInfo.get_title())
# We need to check if the author exists and we return its id if he exists ifnot he is created, more details on him are fetched on wikipedia and the id is returned
author_name = self.formatString(''.join(bookInfo.get_authors()))
book['authorId'] = int(checkAuthor(bookInfo.get_author()))
book['publisher'] = self.formatString(bookInfo.get_publisher())
book['published_date'] = self.formatString(bookInfo.get_published_date())
book['description'] = self.formatString(bookInfo.get_description())
book['isbn10'] = self.formatString(bookInfo.get_isbn10())
book['isbn13'] = self.formatString(bookInfo.get_isbn13())
# WE need to check if the author exists and we return its id if he exists ifnot he is created and the id is returned
category_name = self.formatString(''.join(bookInfo.get_categories()))
book['categoryId'] = int(checkCategory(category_name))
book['language'] = self.formatString(bookInfo.get_language())
book['thumbnail'] = self.formatString(bookInfo.get_thumbnail())
book['page_count'] = int(bookInfo.get_page_count())
book['url_info'] = self.formatString(bookInfo.get_url_info())
# Creating the filepath
filename = slugify(book['title'])
filename = "{0}.{1}".format(filename,self.extension)
authorsfilepath = slugify(author_name)
if not os.path.exists("{0}/{1}".format(settings.FILES_STORE,authorsfilepath)):
os.makedirs("{0}/{1}".format(settings.FILES_STORE,authorsfilepath))
path = "{0}/{1}/{2}".format(settings.FILES_STORE,authorsfilepath,filename)
with open(path, 'wb') as f:
f.write(response.body)
self.logger.info("Saved PDF as : {0}".format(path))
###############################################################################
# The only data that cannot be retrieved on google book api is here
###############################################################################
book['booktype'] = self.extension
# If there exist an md5 (Hashed ID from libgen.io)
if hasattr(response.meta, 'md5'):
if response.meta['md5'] is not None:
book['md5'] = response.meta['md5']
else:
book['md5'] = "NaN"
# We assign the link of the first two download link
if response.meta['dl_link1'] is not None:
book['dl_link1']= response.meta['dl_link1']
else:
book['dl_link1'] = "NaN"
if response.meta['dl_link2'] is not None:
book['dl_link2']= response.meta['dl_link2']
else:
book['dl_link2'] = "NaN"
# Assign the Download URL that we used
if response.meta['chosen_url'] is not None:
book['chosen_url']= response.meta['chosen_url']
else:
book['chosen_url'] = ""
# If the path is not
if path is not None:
book['filepath'] = path
else:
book['filepath'] = ""
yield book
def bibtex_reader(self, bibtextdata):
"""
Parse the bibtex data
Arguments:
bibtextdata {str} -- bibtexdata
Returns:
list -- list of all entries of the bibtex
"""
parser = BibTexParser()
parser.ignore_nonstandard_types = False
parser.homogenise_fields = False
parser.common_strings = False
bib_database = bibtexparser.loads(bibtextdata, parser)
return bib_database.entries[0]
def formatString(self, string):
"""
format the string into something updatable to the db, usefull especially for the description that might contain weird charachters
Arguments:
string {str} -- string to format
Returns:
str -- formatted string
"""
string = string.replace("'","\'")
# string = | |
<reponame>Srivatsan-Srinivasan/am221-project<gh_stars>1-10
import os
import time
import numpy as np
import numpy.random as npr
import tensorflow as tf
import tflearn
import bundle_entropy
from replay_memory import ReplayMemory
from experience_replay import PrioritizedReplayBuffer
from schedules import LinearSchedule
from helper import variable_summaries
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('bmh')
from matplotlib.mlab import griddata
from sklearn.decomposition import PCA
flags = tf.app.flags
FLAGS = flags.FLAGS
path = os.path.dirname(os.path.realpath(__file__))
outdir_path = os.path.abspath(os.path.join(path, FLAGS.outdir))
model_path = os.path.join(outdir_path, FLAGS.model)
# Input Convex Neural Network
class Agent:
def __init__(self, dimO, dimA):
dimA, dimO = dimA[0], dimO[0]
self.dimA = dimA
self.dimO = dimO
tau = FLAGS.tau
discount = FLAGS.discount
l2norm = FLAGS.l2norm
learning_rate = FLAGS.rate
outheta = FLAGS.outheta
ousigma = FLAGS.ousigma
if FLAGS.icnn_opt == 'adam':
self.opt = self.adam
elif FLAGS.icnn_opt == 'bundle_entropy':
self.opt = self.bundle_entropy
else:
raise RuntimeError("Unrecognized ICNN optimizer: "+FLAGS.icnn_opt)
if FLAGS.use_per:
self.rm = PrioritizedReplayBuffer(FLAGS.rmsize, alpha=FLAGS.alpha)
self.beta_schedule = LinearSchedule(FLAGS.beta_iters,
initial_p=FLAGS.beta0,
final_p=1.0)
else:
self.rm = ReplayMemory(FLAGS.rmsize, dimO, dimA)
self.sess = tf.Session(config=tf.ConfigProto(
inter_op_parallelism_threads=FLAGS.thread,
log_device_placement=False,
allow_soft_placement=True,
gpu_options=tf.GPUOptions(allow_growth=True)))
self.noise = np.zeros(self.dimA)
obs = tf.placeholder(tf.float32, [None, dimO], "obs")
act = tf.placeholder(tf.float32, [None, dimA], "act")
rew = tf.placeholder(tf.float32, [None], "rew")
per_weight = tf.placeholder(tf.float32, [None], "per_weight")
with tf.variable_scope('q'):
negQ = self.negQ(obs, act)
negQ_entr = negQ - entropy(act)
q = -negQ
q_entr = -negQ_entr
act_grad, = tf.gradients(negQ, act)
act_grad_entr, = tf.gradients(negQ_entr, act)
obs_target = tf.placeholder(tf.float32, [None, dimO], "obs_target")
act_target = tf.placeholder(tf.float32, [None, dimA], "act_target")
term_target = tf.placeholder(tf.bool, [None], "term_target")
with tf.variable_scope('q_target'):
# double Q
negQ_target = self.negQ(obs_target, act_target)
negQ_entr_target = negQ_target - entropy(act_target)
act_target_grad, = tf.gradients(negQ_target, act_target)
act_entr_target_grad, = tf.gradients(negQ_entr_target, act_target)
q_target = -negQ_target
q_target_entr = -negQ_entr_target
if FLAGS.icnn_opt == 'adam':
y = tf.where(term_target, rew, rew + discount * q_target_entr)
y = tf.maximum(q_entr - 1., y)
y = tf.minimum(q_entr + 1., y)
y = tf.stop_gradient(y)
td_error = q_entr - y
elif FLAGS.icnn_opt == 'bundle_entropy':
raise RuntimError("Needs checking.")
q_target = tf.where(term2, rew, rew + discount * q2_entropy)
q_target = tf.maximum(q_entropy - 1., q_target)
q_target = tf.minimum(q_entropy + 1., q_target)
q_target = tf.stop_gradient(q_target)
td_error = q_entropy - q_target
if FLAGS.use_per:
ms_td_error = tf.reduce_sum(tf.multiply(tf.square(td_error), per_weight), 0)
else:
ms_td_error = tf.reduce_mean(tf.square(td_error), 0)
regLosses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES, scope='q/')
loss_q = ms_td_error + l2norm*tf.reduce_sum(regLosses)
self.theta_ = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='q/')
self.theta_cvx_ = [v for v in self.theta_
if 'proj' in v.name and 'W:' in v.name]
self.makeCvx = [v.assign(tf.abs(v)) for v in self.theta_cvx_]
self.proj = [v.assign(tf.maximum(v, 0)) for v in self.theta_cvx_]
# self.proj = [v.assign(tf.abs(v)) for v in self.theta_cvx_]
self.theta_target_ = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
scope='q_target/')
update_target = [theta_target_i.assign_sub(tau*(theta_target_i-theta_i))
for theta_i, theta_target_i in zip(self.theta_, self.theta_target_)]
optim_q = tf.train.AdamOptimizer(learning_rate=learning_rate)
grads_and_vars_q = optim_q.compute_gradients(loss_q)
optimize_q = optim_q.apply_gradients(grads_and_vars_q)
summary_path = os.path.join(model_path, 'board', FLAGS.exp_id)
summary_writer = tf.summary.FileWriter(summary_path, self.sess.graph)
if FLAGS.summary:
if FLAGS.icnn_opt == 'adam':
tf.summary.scalar('Q', tf.reduce_mean(q))
elif FLAGS.icnn_opt == 'bundle_entropy':
tf.summary.scalar('Q', tf.reduce_mean(q_entr))
tf.summary.scalar('Q_target', tf.reduce_mean(q_target))
tf.summary.scalar('loss', ms_td_error)
tf.summary.scalar('reward', tf.reduce_mean(rew))
merged = tf.summary.merge_all()
# tf functions
with self.sess.as_default():
self._train = Fun([obs, act, rew, obs_target, act_target, term_target, per_weight],
[optimize_q, update_target, loss_q, td_error, q, q_target],
merged, summary_writer)
self._fg = Fun([obs, act], [negQ, act_grad])
self._fg_target = Fun([obs_target, act_target], [negQ_target, act_target_grad])
self._fg_entr = Fun([obs, act], [negQ_entr, act_grad_entr])
self._fg_entr_target = Fun([obs_target, act_target],
[negQ_entr_target, act_entr_target_grad])
# initialize tf variables
self.saver = tf.train.Saver(max_to_keep=1)
ckpt = tf.train.latest_checkpoint(model_path + "/tf")
if not FLAGS.force and ckpt:
self.saver.restore(self.sess, ckpt)
else:
self.sess.run(tf.global_variables_initializer())
self.sess.run(self.makeCvx)
self.sess.run([theta_target_i.assign(theta_i)
for theta_i, theta_target_i in zip(self.theta_, self.theta_target_)])
self.sess.graph.finalize()
self.t = 0 # global training time (number of observations)
def bundle_entropy(self, func, obs):
act = np.ones((obs.shape[0], self.dimA)) * 0.5
def fg(x):
value, grad = func(obs, 2 * x - 1)
grad *= 2
return value, grad
act = bundle_entropy.solveBatch(fg, act)[0]
act = 2 * act - 1
return act
def adam(self, func, obs, plot=False):
# if npr.random() < 1./20:
# plot = True
b1 = 0.9
b2 = 0.999
lam = 0.5
eps = 1e-8
alpha = 0.01
nBatch = obs.shape[0]
act = np.zeros((nBatch, self.dimA))
m = np.zeros_like(act)
v = np.zeros_like(act)
b1t, b2t = 1., 1.
act_best, a_diff, f_best = [None]*3
hist = {'act': [], 'f': [], 'g': []}
for i in range(1000):
f, g = func(obs, act)
if plot:
hist['act'].append(act.copy())
hist['f'].append(f)
hist['g'].append(g)
if i == 0:
act_best = act.copy()
f_best = f.copy()
else:
prev_act_best = act_best.copy()
I = (f < f_best)
act_best[I] = act[I]
f_best[I] = f[I]
a_diff_i = np.mean(np.linalg.norm(act_best - prev_act_best, axis=1))
a_diff = a_diff_i if a_diff is None \
else lam*a_diff + (1.-lam)*a_diff_i
# print(a_diff_i, a_diff, np.sum(f))
if a_diff < 1e-3 and i > 5:
#print(' + Adam took {} iterations'.format(i))
if plot:
self.adam_plot(func, obs, hist)
return act_best
m = b1 * m + (1. - b1) * g
v = b2 * v + (1. - b2) * (g * g)
b1t *= b1
b2t *= b2
mhat = m/(1.-b1t)
vhat = v/(1.-b2t)
act -= alpha * mhat / (np.sqrt(v) + eps)
# act = np.clip(act, -1, 1)
act = np.clip(act, -1.+1e-8, 1.-1e-8)
#print(' + Warning: Adam did not converge.')
if plot:
self.adam_plot(func, obs, hist)
return act_best
def adam_plot(self, func, obs, hist):
hist['act'] = np.array(hist['act']).T
hist['f'] = np.array(hist['f']).T
hist['g'] = np.array(hist['g']).T
if self.dimA == 1:
xs = np.linspace(-1.+1e-8, 1.-1e-8, 100)
ys = [func(obs[[0],:], [[xi]])[0] for xi in xs]
fig = plt.figure()
plt.plot(xs, ys, alpha=0.5, linestyle="--")
plt.plot(hist['act'][0,0,:], hist['f'][0,:], label="Adam's trace")
plt.legend()
os.makedirs(os.path.join(model_path, "adam"), exist_ok=True)
t = time.time()
fname = os.path.join(model_path, "adam", 'adam_plot_{}.png'.format(t))
plt.savefig(fname)
plt.close(fig)
elif self.dimA == 2:
assert(False)
else:
xs = npr.uniform(-1., 1., (5000, self.dimA))
ys = np.array([func(obs[[0],:], [xi])[0] for xi in xs])
epi = np.hstack((xs, ys))
pca = PCA(n_components=2).fit(epi)
W = pca.components_[:,:-1]
xs_proj = xs.dot(W.T)
fig = plt.figure()
X = Y = np.linspace(xs_proj.min(), xs_proj.max(), 100)
Z = griddata(xs_proj[:,0], xs_proj[:,1], ys.ravel(),
X, Y, interp='linear')
plt.contourf(X, Y, Z, 15)
plt.colorbar()
adam_x = hist['act'][:,0,:].T
adam_x = adam_x.dot(W.T)
plt.plot(adam_x[:,0], adam_x[:,1], label='Adam', color='k')
plt.legend()
os.makedirs(os.path.join(model_path, "adam"), exist_ok=True)
t = time.time()
fname = os.path.join(model_path, "adam", 'adam_plot_{}.png'.format(t))
plt.savefig(fname)
plt.close(fig)
def reset(self, obs):
self.noise = np.zeros(self.dimA)
self.observation = obs # initial observation
def act(self, test=False):
with self.sess.as_default():
#print('--- Selecting action, test={}'.format(test))
obs = np.expand_dims(self.observation, axis=0)
if FLAGS.icnn_opt == 'adam':
f = self._fg_entr
# f = self._fg
elif FLAGS.icnn_opt == 'bundle_entropy':
f = self._fg
else:
raise RuntimeError("Unrecognized ICNN optimizer: "+FLAGS.icnn_opt)
tflearn.is_training(False)
action = self.opt(f, obs)
tflearn.is_training(not test)
if not test:
self.noise -= FLAGS.outheta*self.noise - \
FLAGS.ousigma*npr.randn(self.dimA)
action += self.noise
action = np.clip(action, -1, 1)
self.action = np.atleast_1d(np.squeeze(action, axis=0))
return self.action
def observe(self, rew, term, obs2, test=False):
obs1 = self.observation
self.observation = obs2
# train
if not test:
self.t = self.t + 1
if FLAGS.use_per:
self.rm.add(obs1, self.action, rew, obs2, float(term))
else:
self.rm.enqueue(obs1, term, self.action, rew)
if self.t > FLAGS.warmup:
for i in range(FLAGS.iter):
loss = self.train()
def train(self):
with self.sess.as_default():
if FLAGS.use_per:
experience = self.rm.sample(FLAGS.bsize, beta=self.beta_schedule.value(self.t))
(obs, act, rew, ob2, term2, weights, batch_idxes) = experience
else:
obs, act, rew, ob2, term2, info = self.rm.minibatch(size=FLAGS.bsize)
#if np.random.uniform() > 0.7 and np.sum(rew > 0.0) >0 :
# print("good reward samples", 100*np.sum(rew > 0.0) / FLAGS.bsize)
if FLAGS.icnn_opt == 'adam':
# f = self._opt_train_entr
f = self._fg_entr_target
# f = self._fg_target
elif FLAGS.icnn_opt == 'bundle_entropy':
f = self._fg_target
else:
raise RuntimeError("Unrecognized ICNN optimizer: "+FLAGS.icnn_opt)
#print('--- Optimizing for training')
tflearn.is_training(False)
act2 = self.opt(f, ob2, plot=FLAGS.adam_plot)
tflearn.is_training(True)
_, _, loss, td_error, _, _ = self._train(obs, act, rew, ob2, act2,
term2, weights,
log=FLAGS.summary,
global_step=self.t)
if FLAGS.use_per:
new_priorities = np.abs(td_error) + FLAGS.eps
self.rm.update_priorities(batch_idxes, new_priorities)
self.sess.run(self.proj)
return loss
def negQ(self, x, y, reuse=False):
szs = [FLAGS.l1size, FLAGS.l2size]
assert(len(szs) >= 1)
fc = tflearn.fully_connected
bn = tflearn.batch_normalization
lrelu = tflearn.activations.leaky_relu
if reuse:
tf.get_variable_scope().reuse_variables()
nLayers = len(szs)
us = []
zs = []
z_zs = []
z_ys = []
z_us = []
reg = 'L2'
prevU = x
for i in range(nLayers):
with tf.variable_scope('u'+str(i)) as s:
u = fc(prevU, szs[i], reuse=reuse, scope=s, regularizer=reg)
if i < nLayers-1:
u = tf.nn.relu(u)
if FLAGS.icnn_bn:
u = bn(u, reuse=reuse, scope=s, name='bn')
variable_summaries(u, suffix='u{}'.format(i))
us.append(u)
prevU = u
prevU, prevZ = x, y
for i in range(nLayers+1):
sz = szs[i] if i < nLayers else 1
z_add = []
if i > 0:
with tf.variable_scope('z{}_zu_u'.format(i)) as s:
zu_u = fc(prevU, szs[i-1], reuse=reuse, scope=s,
activation='relu', bias=True,
regularizer=reg, bias_init=tf.constant_initializer(1.))
variable_summaries(zu_u, suffix='zu_u{}'.format(i))
with tf.variable_scope('z{}_zu_proj'.format(i)) as s:
z_zu = fc(tf.multiply(prevZ, zu_u), sz, reuse=reuse, scope=s,
bias=False, regularizer=reg)
variable_summaries(z_zu, suffix='z_zu{}'.format(i))
z_zs.append(z_zu)
z_add.append(z_zu)
with tf.variable_scope('z{}_yu_u'.format(i)) as s:
yu_u = fc(prevU, self.dimA, reuse=reuse, scope=s, bias=True,
regularizer=reg, bias_init=tf.constant_initializer(1.))
| |
from __future__ import with_statement
import time
import pygame
import numpy
import zlib
from . import googlemapindex
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
from .Resources.header import imgheader
from . import render_lib
from .loadbar import Bar
from .shared import cores
import collections
import traceback
from .tinterface import *
minimap_limits = 0.4, 0.2
defaultres = [1024, 768]
def load(tiles=None, walls=None, colors=None, wallcolors=None):
from .shared import appdata
shared = os.path.join(appdata, "tImages.zip")
ziploc = None
if os.path.exists("tImages.zip"):
ziploc = "tImages.zip"
elif os.path.exists(shared):
ziploc = shared
if ziploc:
from .Resources import ResourceManager
manager = ResourceManager(ziploc)
def load_content(imagename):
base, _ = os.path.splitext(imagename)
return manager.get_pygame_image(base + ".img")
else:
def load_content(imagename):
return pygame.image.load(os.path.join("tImages", imagename))
npc_tex = {}
pink = pygame.surface.Surface((500, 500))
pink.fill((255, 0, 255))
bind = {"Merchant": "NPC_17.png",
"Nurse": "NPC_18.png",
"Arms Dealer": "NPC_19.png",
"Dryad": "NPC_20.png",
"Guide": "NPC_22.png",
"Old Man": "NPC_37.png",
"Demolitionist": "NPC_38.png",
"Clothier": "NPC_54.png",
"Wizard": "NPC_108.png",
"Mechanic": "NPC_124.png",
"Goblin Tinkerer": "NPC_107.png",
}
for name in bind:
npc_tex[name] = load_content(bind[name])
tex = [pink] * 65537
x = 0
while 1:
try:
tex[x] = load_content("Tiles_" + str(x) + ".png") # .convert_alpha()
except pygame.error:
break
except KeyError:
break
x += 1
if tiles is not None:
for t in range(x - 1, len(tiles) + 1):
if t in colors:
s = pygame.surface.Surface((500, 500))
s.fill(colors[t])
tex[t] = s
walltex = [pink] * 65537
x = 1
while 1:
try:
walltex[x] = load_content("Wall_" + str(x) + ".png")
except pygame.error:
break
except KeyError:
break
x += 1
if walls is not None:
for w in wallcolors:
if not walltex[w]:
print(w)
s = pygame.surface.Surface((500, 500))
s.fill(wallcolors[w])
walltex[w] = s
air = load_content("Background_0.png")
gborder = load_content("Background_1.png")
rborder = load_content("Background_4.png")
gfill = load_content("Background_2.png")
rfill = load_content("Background_3.png")
return tex, walltex, npc_tex, air, gborder, rborder, gfill, rfill
def adjust_minimap(target_rel_size, resolution, base_image):
mi_size = base_image.get_size()
mi_scale = 1
scaled_limits = [x * y for x, y in zip(minimap_limits, resolution)]
if mi_size[0] > scaled_limits[0]:
mi_scale = 1 / (mi_size[0] / scaled_limits[0])
if mi_size[1] > scaled_limits[1]:
mi_scale = min(1 / (mi_size[1] / scaled_limits[1]), mi_scale)
if mi_scale != 1:
print("Scaling minimap with factor " + str(mi_scale))
mapimage = pygame.transform.rotozoom(base_image, 0, mi_scale)
return mapimage, mi_scale
def run(path, mapping, data = None, mappingfolder=None):
if data:header, pos = data
else:
with path.open("rb") as f:
header, _, sectiondata = get_header(f)
if sectiondata:
pos = sectiondata["sections"][1]
else:
pos = f.tell()
pygame.init()
pygame.display.init()
threadpool = ThreadPoolExecutor(cores)
texture_loader = threadpool.submit(load)
try:
imageloc = get_myterraria() / "WorldImages" / path.with_suffix('.png').name
base_image = pygame.image.load(str(imageloc))
mapimage, mi_scale = adjust_minimap(minimap_limits, defaultres, base_image)
mi_size = mapimage.get_size()
except:
print("Cannot load minimap:")
traceback.print_exc()
mapimage = None
start = time.clock()
f = path.open("rb")
f.seek(pos)
get = get_tile_buffered_12_masked if header["version"] > 100 else get_tile_buffered
pygame.display.set_caption("Loading World..")
loadbar_width = 200
if mapimage:
if mi_size[0] > 200: loadbar_width = mi_size[0]
surface = pygame.display.set_mode((loadbar_width, 20 + mi_size[1]))
pygame.display.update(surface.blit(mapimage, (0, 20)))
else:
surface = pygame.display.set_mode((loadbar_width, 20))
skip = False
if not skip:
print("loading and converting world data")
rect = pygame.Rect(0, 0, 0, 20)
tup = (rect,)
tiles = numpy.empty((header["width"], header["height"]), dtype=tuple)
w, h = header["width"], header["height"]
for xi in range(w): # for each slice
yi = 0
while yi < h: # get the tiles
data, b = get(f)
tiles[xi, yi:yi + b] = (data,) * b
yi += b
if xi % 16 == 0:
rect.w = int(xi * loadbar_width / w)
pygame.draw.rect(surface, (200, 200, 200), rect)
pygame.display.update(tup)
for event in pygame.event.get():
if event.type == 12:
pygame.quit()
import sys
sys.exit()
npcs = []
f.close()
rmap = numpy.random.randint(3, size=(header["width"], header["height"]))
blendmap = numpy.ones((header["width"], header["height"], 2), dtype=numpy.uint16)
wblendmap = numpy.ones((header["width"], header["height"], 2), dtype=numpy.uint16)
mid = time.clock()
print("World loaded: %5f seconds" % (mid - start))
tex, walltex, npc_tex, air, gborder, rborder, gfill, rfill = texture_loader.result()
spawn = header["spawn"]
clock = pygame.time.Clock()
pos = [spawn[0] * 16 - 256, spawn[1] * 16 - 256]
pygame.display.set_caption("Terraria World Render: {}".format(header["name"].decode()))
if mapping:
res = [1600, 1600]
area = [1600, 1600]
os.chdir("..")
else:
res = list(defaultres)
dis = pygame.display.set_mode(res, pygame.RESIZABLE)
s = pygame.surface.Surface(res)
def relmove(rel):
nonlocal s
nonlocal pos
nonlocal dirty
pos[0] -= rel[0]
pos[1] -= rel[1]
if abs(rel[0]) > res[0] or abs(rel[1]) > res[1]:
dirty = [pygame.rect.Rect(0, 0, res[0], res[1])]
else:
s.blit(s, rel)
if rel[0] > 0:
dirty.append(pygame.rect.Rect(0, 0, rel[0], res[1]))
elif rel[0] < 0:
dirty.append(pygame.rect.Rect(res[0] + rel[0], 0, -rel[0], res[1]))
if rel[1] > 0:
dirty.append(pygame.rect.Rect(0, 0, res[0], rel[1]))
elif rel[1] < 0:
dirty.append(pygame.rect.Rect(0, res[1] + rel[1], res[0], -rel[1]))
print("initializing render loop...")
if mapping:
tempfiles = {}
if mappingfolder == None:
mappingfolder = Path("superimage")
print("Mapping into folder",mappingfolder)
tilefolder = mappingfolder / "tiles"
for folder in (mappingfolder, tilefolder):
if not folder.is_dir():
folder.mkdir()
mx = 0
my = 0
index = (mappingfolder / "index.html").open("wt")
index.write(googlemapindex.index)
index.write(header["name"].decode())
index.write(googlemapindex.index2)
index.close()
pos = [mx * res[0], my * res[1]]
caption = "Rendering {}".format(header["name"].decode())
loadingbar = Bar(caption=caption)
plates_x, plates_y = header["width"] * 16 // area[0], header["height"] * 16 // area[1]
plates_done = 0
plates = plates_x * plates_y
dirty = [pygame.rect.Rect(0, 0, res[0], res[1])]
render_lib.walltex = walltex
render_lib.tex = tex
render_lib.gborder = gborder
render_lib.gfill = gfill
render_lib.rborder = rborder
render_lib.rfill = rfill
wi, he = header["width"] * 16 - 64, header["height"] * 16 - 64
movemode = None
# movemodes:
MAP = 2
CURSOR = 1
while 1:
events = pygame.event.get()
for event in events:
if event.type == 12:
pygame.quit()
import sys
sys.exit()
elif event.type == 16:
res = event.size
dis = pygame.display.set_mode(res, pygame.RESIZABLE)
s = pygame.surface.Surface(res)
mapimage, mi_scale = adjust_minimap(minimap_limits, res, base_image)
mi_size = mapimage.get_size()
dirty.append(pygame.rect.Rect(0, 0, res[0], res[1]))
elif event.type == pygame.MOUSEBUTTONDOWN:
if mapimage and event.button == 1:
if event.pos[0] > (res[0] - mi_size[0]) and event.pos[1] < mi_size[1]:
movemode = MAP
else:
movemode = CURSOR
elif event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
movemode = None
if not mapping:
rel = pygame.mouse.get_rel()
if pygame.mouse.get_pressed()[0] and movemode:
if movemode == CURSOR:
if pygame.key.get_pressed()[pygame.K_SPACE]:
rel = rel[0] * 10, rel[1] * 10
if -rel[0] + pos[0] < 0:
rel = pos[0], rel[1]
elif -rel[0] + pos[0] + res[0] > header["width"] * 16 - 64:
rel = header["width"] * 16 - 64 - pos[0] - res[0], rel[1]
if -rel[1] + pos[1] < 0:
rel = rel[0], pos[1]
elif -rel[1] + pos[1] + res[1] > header["height"] * 16 - 64:
rel = rel[0], header["height"] * 16 - 64 - pos[1] - res[1]
relmove(rel)
else:
mpos = pygame.mouse.get_pos()
mpos = (mpos[0] - res[0] + mi_size[0]) * 16 / mi_scale, mpos[1] * 16 / mi_scale
rel = (pos[0] - mpos[0], pos[1] - mpos[1])
if any(rel): relmove(rel)
if len(dirty):
for rect in dirty:
try:
b = render_lib.render(pygame.surface.Surface(rect.size),
(pos[0] + rect.x, pos[1] + rect.y),
header, tiles, blendmap, wblendmap, rmap)
except Exception as e:
print("Rendering Problem. Likely out of bounds.")
traceback.print_exc()
else:
s.blit(b, rect.topleft)
try:
rect = pygame.rect.Rect(pos, res)
except TypeError:
print("Out of bounds NPC rendering attempt.")
else:
for npc in npcs:
if rect.collidepoint(npc[1]):
try:
target = (-pos[0] + npc[1][0], -pos[1] + npc[1][1] - 12)
s.blit(npc_tex[npc[0]], target, area=(0, 0, 40, 56))
except:
print("Warning: NPC of ID %d could not be rendered" % npc[0])
if mapping:
progtext = "%2dX|%2dY of %dX|%dY" % (mx, my, plates_x - 1, plates_y - 1)
loadingbar.set_progress(100 * plates_done / plates, caption + " " + progtext)
lasttask = threadpool.submit(store_surface, s, tempfiles, mx, my)
mx += 1
if mx * area[0] >= wi:
# index.write("</tr><tr>")
my += 1
mx = 0
if my * area[1] >= he:
lasttask.result()
splice_gmaps(threadpool, tilefolder, tempfiles, header["name"].decode())
threadpool.shutdown()
pygame.quit()
return
if (mx + 1) * area[0] > wi and not (mx * area[0] > wi):
res[0] = -mx * area[0] + wi
if (my + 1) * area[1] > he and not (my * area[1] > he):
res[1] = -my * area[1] + he
dirty = [pygame.rect.Rect(0, 0, res[0], res[1])]
if s.get_size() != res: s = pygame.surface.Surface(res)
res = [area[0], area[1]]
pos = [mx * res[0], my * res[1]]
plates_done += 1
lasttask.result()
else:
dirty = []
dis.blit(s, (0, 0))
if mapimage:
dis.blit(mapimage, (res[0] - mi_size[0], 0))
# draw minimap viewport borders:
bpos = pos[0] // 16, pos[1] // 16
topleft = bpos[0] * mi_scale + res[0] - mi_size[0], bpos[1] * mi_scale
viewsize = | |
<filename>netapp/santricity/models/symbol/fibre_interface.py<gh_stars>1-10
# coding: utf-8
"""
FibreInterface.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class FibreInterface(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
FibreInterface - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'channel': 'int', # (required parameter)
'loop_id': 'int', # (required parameter)
'speed': 'int', # (required parameter)
'hard_address': 'int', # (required parameter)
'node_name': 'str', # (required parameter)
'port_name': 'str', # (required parameter)
'port_id': 'str', # (required parameter)
'topology': 'str', # (required parameter)
'part': 'str', # (required parameter)
'revision': 'int', # (required parameter)
'chan_miswire': 'bool', # (required parameter)
'esm_miswire': 'bool', # (required parameter)
'link_status': 'str', # (required parameter)
'is_degraded': 'bool', # (required parameter)
'speed_control': 'str', # (required parameter)
'max_speed': 'int', # (required parameter)
'speed_neg_error': 'bool', # (required parameter)
'reserved1': 'str',
'reserved2': 'str',
'dds_channel_state': 'int', # (required parameter)
'dds_state_reason': 'int', # (required parameter)
'dds_state_who': 'int', # (required parameter)
'is_local': 'bool', # (required parameter)
'channel_ports': 'list[ChannelPorts]', # (required parameter)
'current_interface_speed': 'str', # (required parameter)
'maximum_interface_speed': 'str', # (required parameter)
'interface_ref': 'str', # (required parameter)
'physical_location': 'Location', # (required parameter)
'is_trunk_capable': 'bool', # (required parameter)
'trunk_miswire': 'bool', # (required parameter)
'protection_information_capable': 'bool', # (required parameter)
'one_way_max_rate': 'int', # (required parameter)
'bidirectional_max_rate': 'int', # (required parameter)
'id': 'str'
}
self.attribute_map = {
'channel': 'channel', # (required parameter)
'loop_id': 'loopID', # (required parameter)
'speed': 'speed', # (required parameter)
'hard_address': 'hardAddress', # (required parameter)
'node_name': 'nodeName', # (required parameter)
'port_name': 'portName', # (required parameter)
'port_id': 'portId', # (required parameter)
'topology': 'topology', # (required parameter)
'part': 'part', # (required parameter)
'revision': 'revision', # (required parameter)
'chan_miswire': 'chanMiswire', # (required parameter)
'esm_miswire': 'esmMiswire', # (required parameter)
'link_status': 'linkStatus', # (required parameter)
'is_degraded': 'isDegraded', # (required parameter)
'speed_control': 'speedControl', # (required parameter)
'max_speed': 'maxSpeed', # (required parameter)
'speed_neg_error': 'speedNegError', # (required parameter)
'reserved1': 'reserved1',
'reserved2': 'reserved2',
'dds_channel_state': 'ddsChannelState', # (required parameter)
'dds_state_reason': 'ddsStateReason', # (required parameter)
'dds_state_who': 'ddsStateWho', # (required parameter)
'is_local': 'isLocal', # (required parameter)
'channel_ports': 'channelPorts', # (required parameter)
'current_interface_speed': 'currentInterfaceSpeed', # (required parameter)
'maximum_interface_speed': 'maximumInterfaceSpeed', # (required parameter)
'interface_ref': 'interfaceRef', # (required parameter)
'physical_location': 'physicalLocation', # (required parameter)
'is_trunk_capable': 'isTrunkCapable', # (required parameter)
'trunk_miswire': 'trunkMiswire', # (required parameter)
'protection_information_capable': 'protectionInformationCapable', # (required parameter)
'one_way_max_rate': 'oneWayMaxRate', # (required parameter)
'bidirectional_max_rate': 'bidirectionalMaxRate', # (required parameter)
'id': 'id'
}
self._channel = None
self._loop_id = None
self._speed = None
self._hard_address = None
self._node_name = None
self._port_name = None
self._port_id = None
self._topology = None
self._part = None
self._revision = None
self._chan_miswire = None
self._esm_miswire = None
self._link_status = None
self._is_degraded = None
self._speed_control = None
self._max_speed = None
self._speed_neg_error = None
self._reserved1 = None
self._reserved2 = None
self._dds_channel_state = None
self._dds_state_reason = None
self._dds_state_who = None
self._is_local = None
self._channel_ports = None
self._current_interface_speed = None
self._maximum_interface_speed = None
self._interface_ref = None
self._physical_location = None
self._is_trunk_capable = None
self._trunk_miswire = None
self._protection_information_capable = None
self._one_way_max_rate = None
self._bidirectional_max_rate = None
self._id = None
@property
def channel(self):
"""
Gets the channel of this FibreInterface.
The channel number of this interface.
:return: The channel of this FibreInterface.
:rtype: int
:required/optional: required
"""
return self._channel
@channel.setter
def channel(self, channel):
"""
Sets the channel of this FibreInterface.
The channel number of this interface.
:param channel: The channel of this FibreInterface.
:type: int
"""
self._channel = channel
@property
def loop_id(self):
"""
Gets the loop_id of this FibreInterface.
The FC loop ID value used by the controller on this channel.
:return: The loop_id of this FibreInterface.
:rtype: int
:required/optional: required
"""
return self._loop_id
@loop_id.setter
def loop_id(self, loop_id):
"""
Sets the loop_id of this FibreInterface.
The FC loop ID value used by the controller on this channel.
:param loop_id: The loop_id of this FibreInterface.
:type: int
"""
self._loop_id = loop_id
@property
def speed(self):
"""
Gets the speed of this FibreInterface.
The speed of the interface in MB/sec. This field is deprecated. The field currentInterfaceSpeed should be used instead.
:return: The speed of this FibreInterface.
:rtype: int
:required/optional: required
"""
return self._speed
@speed.setter
def speed(self, speed):
"""
Sets the speed of this FibreInterface.
The speed of the interface in MB/sec. This field is deprecated. The field currentInterfaceSpeed should be used instead.
:param speed: The speed of this FibreInterface.
:type: int
"""
self._speed = speed
@property
def hard_address(self):
"""
Gets the hard_address of this FibreInterface.
The hard (preferred) address of the interface, in loop ID format.
:return: The hard_address of this FibreInterface.
:rtype: int
:required/optional: required
"""
return self._hard_address
@hard_address.setter
def hard_address(self, hard_address):
"""
Sets the hard_address of this FibreInterface.
The hard (preferred) address of the interface, in loop ID format.
:param hard_address: The hard_address of this FibreInterface.
:type: int
"""
self._hard_address = hard_address
@property
def node_name(self):
"""
Gets the node_name of this FibreInterface.
The unique identifier of the storage array node associated with this interface.
:return: The node_name of this FibreInterface.
:rtype: str
:required/optional: required
"""
return self._node_name
@node_name.setter
def node_name(self, node_name):
"""
Sets the node_name of this FibreInterface.
The unique identifier of the storage array node associated with this interface.
:param node_name: The node_name of this FibreInterface.
:type: str
"""
self._node_name = node_name
@property
def port_name(self):
"""
Gets the port_name of this FibreInterface.
The unique identifier of the Fibre Channel port associated with this interface.
:return: The port_name of this FibreInterface.
:rtype: str
:required/optional: required
"""
return self._port_name
@port_name.setter
def port_name(self, port_name):
"""
Sets the port_name of this FibreInterface.
The unique identifier of the Fibre Channel port associated with this interface.
:param port_name: The port_name of this FibreInterface.
:type: str
"""
self._port_name = port_name
@property
def port_id(self):
"""
Gets the port_id of this FibreInterface.
The NL_Port identifier assigned to this interface.
:return: The port_id of this FibreInterface.
:rtype: str
:required/optional: required
"""
return self._port_id
@port_id.setter
def port_id(self, port_id):
"""
Sets the port_id of this FibreInterface.
The NL_Port identifier assigned to this interface.
:param port_id: The port_id of this FibreInterface.
:type: str
"""
self._port_id = port_id
@property
def topology(self):
"""
Gets the topology of this FibreInterface.
The Fibre Channel topology type being used.
:return: The topology of this FibreInterface.
:rtype: str
:required/optional: required
"""
return self._topology
@topology.setter
def topology(self, topology):
"""
Sets the | |
@legacy_variation_id.setter
def legacy_variation_id(self, legacy_variation_id):
"""Sets the legacy_variation_id of this LineItem.
The unique identifier of a single variation within a multiple-variation listing. This field is only returned if the line item purchased was from a multiple-variation listing. # noqa: E501
:param legacy_variation_id: The legacy_variation_id of this LineItem. # noqa: E501
:type: str
"""
self._legacy_variation_id = legacy_variation_id
@property
def line_item_cost(self):
"""Gets the line_item_cost of this LineItem. # noqa: E501
:return: The line_item_cost of this LineItem. # noqa: E501
:rtype: Amount
"""
return self._line_item_cost
@line_item_cost.setter
def line_item_cost(self, line_item_cost):
"""Sets the line_item_cost of this LineItem.
:param line_item_cost: The line_item_cost of this LineItem. # noqa: E501
:type: Amount
"""
self._line_item_cost = line_item_cost
@property
def line_item_fulfillment_instructions(self):
"""Gets the line_item_fulfillment_instructions of this LineItem. # noqa: E501
:return: The line_item_fulfillment_instructions of this LineItem. # noqa: E501
:rtype: LineItemFulfillmentInstructions
"""
return self._line_item_fulfillment_instructions
@line_item_fulfillment_instructions.setter
def line_item_fulfillment_instructions(self, line_item_fulfillment_instructions):
"""Sets the line_item_fulfillment_instructions of this LineItem.
:param line_item_fulfillment_instructions: The line_item_fulfillment_instructions of this LineItem. # noqa: E501
:type: LineItemFulfillmentInstructions
"""
self._line_item_fulfillment_instructions = line_item_fulfillment_instructions
@property
def line_item_fulfillment_status(self):
"""Gets the line_item_fulfillment_status of this LineItem. # noqa: E501
This enumeration value indicates the current fulfillment status of the line item. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/sel:LineItemFulfillmentStatusEnum'>eBay API documentation</a> # noqa: E501
:return: The line_item_fulfillment_status of this LineItem. # noqa: E501
:rtype: str
"""
return self._line_item_fulfillment_status
@line_item_fulfillment_status.setter
def line_item_fulfillment_status(self, line_item_fulfillment_status):
"""Sets the line_item_fulfillment_status of this LineItem.
This enumeration value indicates the current fulfillment status of the line item. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/sel:LineItemFulfillmentStatusEnum'>eBay API documentation</a> # noqa: E501
:param line_item_fulfillment_status: The line_item_fulfillment_status of this LineItem. # noqa: E501
:type: str
"""
self._line_item_fulfillment_status = line_item_fulfillment_status
@property
def line_item_id(self):
"""Gets the line_item_id of this LineItem. # noqa: E501
This is the unique identifier of an eBay order line item. This field is created as soon as there is a commitment to buy from the seller. # noqa: E501
:return: The line_item_id of this LineItem. # noqa: E501
:rtype: str
"""
return self._line_item_id
@line_item_id.setter
def line_item_id(self, line_item_id):
"""Sets the line_item_id of this LineItem.
This is the unique identifier of an eBay order line item. This field is created as soon as there is a commitment to buy from the seller. # noqa: E501
:param line_item_id: The line_item_id of this LineItem. # noqa: E501
:type: str
"""
self._line_item_id = line_item_id
@property
def listing_marketplace_id(self):
"""Gets the listing_marketplace_id of this LineItem. # noqa: E501
The unique identifier of the eBay marketplace where the line item was listed. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/ba:MarketplaceIdEnum'>eBay API documentation</a> # noqa: E501
:return: The listing_marketplace_id of this LineItem. # noqa: E501
:rtype: str
"""
return self._listing_marketplace_id
@listing_marketplace_id.setter
def listing_marketplace_id(self, listing_marketplace_id):
"""Sets the listing_marketplace_id of this LineItem.
The unique identifier of the eBay marketplace where the line item was listed. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/ba:MarketplaceIdEnum'>eBay API documentation</a> # noqa: E501
:param listing_marketplace_id: The listing_marketplace_id of this LineItem. # noqa: E501
:type: str
"""
self._listing_marketplace_id = listing_marketplace_id
@property
def properties(self):
"""Gets the properties of this LineItem. # noqa: E501
:return: The properties of this LineItem. # noqa: E501
:rtype: LineItemProperties
"""
return self._properties
@properties.setter
def properties(self, properties):
"""Sets the properties of this LineItem.
:param properties: The properties of this LineItem. # noqa: E501
:type: LineItemProperties
"""
self._properties = properties
@property
def purchase_marketplace_id(self):
"""Gets the purchase_marketplace_id of this LineItem. # noqa: E501
The unique identifier of the eBay marketplace where the line item was listed. Often, the listingMarketplaceId and the purchaseMarketplaceId identifier are the same, but there are occasions when an item will surface on multiple eBay marketplaces. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/ba:MarketplaceIdEnum'>eBay API documentation</a> # noqa: E501
:return: The purchase_marketplace_id of this LineItem. # noqa: E501
:rtype: str
"""
return self._purchase_marketplace_id
@purchase_marketplace_id.setter
def purchase_marketplace_id(self, purchase_marketplace_id):
"""Sets the purchase_marketplace_id of this LineItem.
The unique identifier of the eBay marketplace where the line item was listed. Often, the listingMarketplaceId and the purchaseMarketplaceId identifier are the same, but there are occasions when an item will surface on multiple eBay marketplaces. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/ba:MarketplaceIdEnum'>eBay API documentation</a> # noqa: E501
:param purchase_marketplace_id: The purchase_marketplace_id of this LineItem. # noqa: E501
:type: str
"""
self._purchase_marketplace_id = purchase_marketplace_id
@property
def quantity(self):
"""Gets the quantity of this LineItem. # noqa: E501
The number of units of the line item in the order. These are represented as a group by a single lineItemId. # noqa: E501
:return: The quantity of this LineItem. # noqa: E501
:rtype: int
"""
return self._quantity
@quantity.setter
def quantity(self, quantity):
"""Sets the quantity of this LineItem.
The number of units of the line item in the order. These are represented as a group by a single lineItemId. # noqa: E501
:param quantity: The quantity of this LineItem. # noqa: E501
:type: int
"""
self._quantity = quantity
@property
def refunds(self):
"""Gets the refunds of this LineItem. # noqa: E501
This array is always returned, but is returned as an empty array unless the seller has submitted a partial or full refund to the buyer for the order. If a refund has occurred, the refund amount and refund date will be shown for each refund. # noqa: E501
:return: The refunds of this LineItem. # noqa: E501
:rtype: list[LineItemRefund]
"""
return self._refunds
@refunds.setter
def refunds(self, refunds):
"""Sets the refunds of this LineItem.
This array is always returned, but is returned as an empty array unless the seller has submitted a partial or full refund to the buyer for the order. If a refund has occurred, the refund amount and refund date will be shown for each refund. # noqa: E501
:param refunds: The refunds of this LineItem. # noqa: E501
:type: list[LineItemRefund]
"""
self._refunds = refunds
@property
def sku(self):
"""Gets the sku of this LineItem. # noqa: E501
Seller-defined Stock-Keeping Unit (SKU). This inventory identifier must be unique within the seller's eBay inventory. SKUs are optional when listing in the legacy/Trading API system, but SKUs are required when listing items through the Inventory API model. # noqa: E501
:return: The sku of this LineItem. # noqa: E501
:rtype: str
"""
return self._sku
@sku.setter
def sku(self, sku):
"""Sets the sku of this LineItem.
Seller-defined Stock-Keeping Unit (SKU). This inventory identifier must be unique within the seller's eBay inventory. SKUs are optional when listing in the legacy/Trading API system, but SKUs are required when listing items through the Inventory API model. # noqa: E501
:param sku: The sku of this LineItem. # noqa: E501
:type: str
"""
self._sku = sku
@property
def sold_format(self):
"""Gets the sold_format of this LineItem. # noqa: E501
The eBay listing type of the line item. The most common listing types are AUCTION and FIXED_PRICE. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/sel:SoldFormatEnum'>eBay API documentation</a> # noqa: E501
:return: The sold_format of this LineItem. # noqa: E501
:rtype: str
"""
return self._sold_format
@sold_format.setter
def sold_format(self, sold_format):
"""Sets the sold_format of this LineItem.
The eBay listing type of the line item. The most common listing types are AUCTION and FIXED_PRICE. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/fulfillment/types/sel:SoldFormatEnum'>eBay API documentation</a> # noqa: E501
:param sold_format: The sold_format of this LineItem. # noqa: E501
:type: str
"""
self._sold_format = sold_format
@property
def taxes(self):
"""Gets the taxes of this LineItem. # noqa: E501
Contains a list of taxes applied to the line item, if any. This array is always returned, but will be returned as empty if no taxes are applicable to the line item. # noqa: E501
:return: The taxes of this LineItem. # noqa: E501
:rtype: list[Tax]
"""
return self._taxes
@taxes.setter
def taxes(self, taxes):
"""Sets the taxes of this LineItem.
Contains a list of taxes applied to the line item, if any. This array is always returned, but will be returned as empty if no taxes are applicable to the line item. # noqa: E501
:param taxes: The taxes of this LineItem. # noqa: E501
:type: | |
'''
Functions and objects shared between other modules of Mcblend.
'''
from __future__ import annotations
from ctypes import c_int
import math
from enum import Enum
from typing import (
Deque, Dict, Iterator, NamedTuple, List, Optional, Tuple, Any, Iterable, Sequence)
from collections import deque
import numpy as np
import bpy_types
import bpy
import mathutils
from .texture_generator import Mask, ColorMask, get_masks_from_side
from .exception import ExporterException
MINECRAFT_SCALE_FACTOR = 16
'''The scale convertion from blender to minecraft (16 units == 1 meter).'''
class AnimationLoopType(Enum):
'''The types of the loop property from Minecraft animations'''
TRUE = 'true'
FALSE = 'false'
HOLD_ON_LAST_FRAME = 'hold_on_last_frame'
class MCObjType(Enum):
'''The types of Minecraft objects created from blender objects.'''
CUBE = 'CUBE'
BONE = 'BONE'
LOCATOR = 'LOCATOR'
class MeshType(Enum):
'''
Type of the exported mesh. Changes the way of representation of this
object in exported model file.
'''
CUBE = 'Cube'
POLY_MESH = 'Poly Mesh'
class ObjectId(NamedTuple):
'''
Object that represents Unique ID of blender object (bone, empty or mesh).
For meshes and empties:
- :code:`name` is the name of the object.
- :code:`bone_name` is just an empty string.
For bones:
- :code:`name` is the name of the armature that owns the bone.
- :code:`bone_name` is the name of the bone.
'''
name: str
bone_name: str
class McblendObject:
'''
A class that wraps Blender objects (meshes, empties and bones) and
provides access to various properties used by Mcblend.
:param thisobj_id: The :class:`ObjectId` that identifies this object.
:param thisobj: Blender object wrapped inside this object.
:param parentobj_id: The :class:`ObjectId` of the parent of this object.
:param children_ids: The list of :class:`ObjectId`s of the children of this
object.
:param mctype: The :class:`MCObjType` of of this object.
:param group: The :class:`McblendObjectGroup` that stores all of the
:class:`McblendObject`s being processed with this object.
'''
def __init__(
self, thisobj_id: ObjectId, thisobj: bpy.types.Object,
parentobj_id: Optional[ObjectId], children_ids: List[ObjectId],
mctype: MCObjType, group: McblendObjectGroup):
self.thisobj_id = thisobj_id
self.thisobj: bpy.types.Object = thisobj
self.parentobj_id: Optional[ObjectId] = parentobj_id
self.children_ids: List[ObjectId] = children_ids
self.mctype: MCObjType = mctype
self.group = group
@property
def parent(self) -> Optional[McblendObject]:
'''Parent of this object.'''
try:
if self.parentobj_id is None:
return None
return self.group[self.parentobj_id]
except KeyError:
return None
@property
def children(self) -> Tuple[McblendObject, ...]:
'''
Children of this object from the :class:`McblendObjectGroup` of this
object.
'''
children: List[McblendObject] = []
for child_id in self.children_ids:
if child_id in self.group:
children.append(self.group[child_id])
return tuple(children)
@property
def inflate(self) -> float:
'''Inflate value of this object'''
return self.thisobj.mcblend.inflate
@inflate.setter
def inflate(self, inflate: float):
self.thisobj.mcblend.inflate = inflate
@property
def min_uv_size(self) -> np.ndarray:
'''The lower UV-size limit of this object.'''
return np.array(
self.thisobj.mcblend.min_uv_size)
@min_uv_size.setter
def min_uv_size(self, min_uv_size: np.ndarray):
self.thisobj.mcblend.min_uv_size = min_uv_size
@property
def mesh_type(self) -> MeshType:
'''Mesh type of this object'''
return MeshType(self.thisobj.mcblend.mesh_type)
@mesh_type.setter
def mesh_type(self, mesh_type: MeshType):
self.thisobj.mcblend.mesh_type = (
mesh_type.value)
@property
def mirror(self) -> bool:
'''Whether the objects UV is mirrored.'''
return self.thisobj.mcblend.mirror
@mirror.setter
def mirror(self, mirror: bool):
self.thisobj.mcblend.mirror = mirror
@property
def uv_group(self) -> str:
'''The name of the UV-group of this object.'''
return self.thisobj.mcblend.uv_group
@uv_group.setter
def uv_group(self, uv_group: str):
self.thisobj.mcblend.uv_group = uv_group
@property
def obj_data(self) -> Any:
'''
The "data" property of the blender object wrapped inside this object.
'''
return self.thisobj.data
@property
def this_pose_bone(self) -> bpy.types.PoseBone:
'''The pose bone of this object (doesn't work for non-bone objects)'''
return self.thisobj.pose.bones[self.thisobj_id.bone_name]
@property
def obj_name(self) -> str:
'''The name of this object used for exporting to Minecraft model.'''
if self.thisobj.type == 'ARMATURE':
return self.thisobj.pose.bones[
self.thisobj_id.bone_name
].name
return self.thisobj.name
@property
def obj_type(self) -> str:
'''
The type of the blender object wrapped inside this
object (ARMATURE, MESH or EMPTY).
'''
return self.thisobj.type
@property
def obj_bound_box(self) -> Any:
'''The bound_box of the blender object wrapped inside this object.'''
return self.thisobj.bound_box
@property
def obj_matrix_world(self) -> mathutils.Matrix:
'''
The copy of the translation matrix (matrix_world) of the blender
wrapped inside this object.
'''
if self.thisobj.type == 'ARMATURE':
return self.thisobj.matrix_world.copy() @ self.thisobj.pose.bones[
self.thisobj_id.bone_name
].matrix.copy()
return self.thisobj.matrix_world.copy()
@property
def mcube_size(self) -> np.ndarray:
'''
The cube size in Minecraft format based on the bounding box of the
blender object wrapped inside this object.
'''
# 0. ---; 1. --+; 2. -++; 3. -+-; 4. +--; 5. +-+; 6. +++; 7. ++-
bound_box = self.obj_bound_box
return (np.array(bound_box[6]) - np.array(bound_box[0]))[[0, 2, 1]]
@property
def mccube_position(self) -> np.ndarray:
'''
The cube position in Minecraft format based on the bounding box of
the blender object wrapped inside this object.
'''
return np.array(self.obj_bound_box[0])[[0, 2, 1]]
@property
def mcpivot(self) -> np.ndarray:
'''
The pivot point of Minecraft object exported using this object.
'''
def local_crds(
parent: McblendObject, child: McblendObject
) -> mathutils.Vector:
'''Local coordinates of child matrix inside parent matrix'''
# Applying normalize() function to matrix world of parent and child
# suppose to fix some errors with scaling but tests doesn't show any
# difference.
# It does fix the issue #62 so PLEASE don't change it again!
return child.get_local_matrix(
parent, normalize=True).to_translation()
def _get_mcpivot(objprop: McblendObject) -> mathutils.Vector:
if objprop.parent is not None:
result = local_crds(objprop.parent, objprop)
result += _get_mcpivot(objprop.parent)
else:
result = objprop.obj_matrix_world.to_translation()
return result
return np.array(_get_mcpivot(self).xzy)
def get_local_matrix(
self, other: Optional[McblendObject] = None, normalize: bool = False
) -> mathutils.Matrix:
'''
Returns translation matrix of this object optionally in translation
space of the other :class:`McblendObject`.
:param other: Optional - the other :class:`McblendObject`
:param normalize: Whether to normalizes parent and child matrixes
before calculating the relative matrix. This solves problems
related to different scales of parent and child transformations
(see github issue #62 and #71)
:returns: translation matrix of this object.
'''
if other is not None:
p_matrix = other.obj_matrix_world
elif self.group.world_origin is not None:
p_matrix = self.group.get_world_origin_matrix()
else:
p_matrix = (
# pylint: disable=no-value-for-parameter
mathutils.Matrix()
)
c_matrix = self.obj_matrix_world
if normalize:
p_matrix.normalize()
c_matrix.normalize()
return p_matrix.inverted() @ c_matrix
def get_mcrotation(
self, other: Optional[McblendObject] = None
) -> np.ndarray:
'''
Returns the Minecraft rotation of this object optionally in relation
to the other :class:`McblendObject`.
# Arguments:
:param other: Optional - the the other :class:`McblendObject`.
:returns: numpy array with the rotation of this object in Minecraft
format.
'''
def local_rotation(
child_matrix: mathutils.Matrix, parent_matrix: mathutils.Matrix
) -> mathutils.Euler:
'''
Returns Euler rotation of a child matrix in relation to parent matrix
'''
child_q = child_matrix.normalized().to_quaternion()
parent_q = parent_matrix.inverted().normalized().to_quaternion()
return (parent_q @ child_q).to_euler('XZY')
if other is not None:
result_euler = local_rotation(
self.obj_matrix_world, other.obj_matrix_world
)
elif self.group.world_origin is not None:
result_euler = local_rotation(
self.obj_matrix_world,
self.group.get_world_origin_matrix()
)
else:
result_euler = self.obj_matrix_world.to_euler('XZY')
result: np.ndarray = np.array(result_euler)[[0, 2, 1]]
result = result * np.array([1, -1, 1])
result = result * 180/math.pi # math.degrees() for array
return result
def cube_polygons(self) -> CubePolygons:
'''
Returns the :class:`CubePolygons` of this object (always new copy of
the object).
'''
return CubePolygons.build(self.thisobj, self.mirror)
@property
def side1_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 1 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((0, 1, 0))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
return get_masks_from_side(uv_group.side1)
@property
def side2_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 2 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((1, 0, 1))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
return get_masks_from_side(uv_group.side2)
@property
def side3_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 3 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((1, 0, 0))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
return get_masks_from_side(uv_group.side3)
@property
def side4_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 4 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((0, 1, 1))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
return get_masks_from_side(uv_group.side4)
@property
def side5_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 5 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((0, 0, 1))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
return get_masks_from_side(uv_group.side5)
@property
def side6_uv_masks(self) -> Sequence[Mask]:
'''
Sequence of masks affecting the texture of side 6 of the cube of this
object.
'''
if self.uv_group == '':
return [ColorMask((1, 1, 0))]
uv_group = bpy.context.scene.mcblend_uv_groups[self.uv_group]
masks = get_masks_from_side(uv_group.side6)
return masks
def find_lose_parts(self) -> Tuple[int, ...]:
'''
Finds lose parts thisobj (must be a | |
import copy
import inspect
import json
import logging
import pytest
import re
import os
import shutil
import subprocess
import time
from datetime import datetime, timedelta
from configparser import ConfigParser, ExtendedInterpolation
from typing import Dict, List, Optional
from pyhttpd.certs import CertificateSpec
from .md_cert_util import MDCertUtil
from pyhttpd.env import HttpdTestSetup, HttpdTestEnv
from pyhttpd.result import ExecResult
log = logging.getLogger(__name__)
class MDTestSetup(HttpdTestSetup):
def __init__(self, env: 'HttpdTestEnv'):
super().__init__(env=env)
def make(self):
super().make(add_modules=["proxy_connect", "md"])
if "pebble" == self.env.acme_server:
self._make_pebble_conf()
def _make_pebble_conf(self):
our_dir = os.path.dirname(inspect.getfile(MDTestSetup))
conf_src_dir = os.path.join(our_dir, 'pebble')
conf_dest_dir = os.path.join(self.env.gen_dir, 'pebble')
if not os.path.exists(conf_dest_dir):
os.makedirs(conf_dest_dir)
for name in os.listdir(conf_src_dir):
src_path = os.path.join(conf_src_dir, name)
m = re.match(r'(.+).template', name)
if m:
self._make_template(src_path, os.path.join(conf_dest_dir, m.group(1)))
elif os.path.isfile(src_path):
shutil.copy(src_path, os.path.join(conf_dest_dir, name))
class MDTestEnv(HttpdTestEnv):
MD_S_UNKNOWN = 0
MD_S_INCOMPLETE = 1
MD_S_COMPLETE = 2
MD_S_EXPIRED = 3
MD_S_ERROR = 4
EMPTY_JOUT = {'status': 0, 'output': []}
DOMAIN_SUFFIX = "%d.org" % time.time()
LOG_FMT_TIGHT = '%(levelname)s: %(message)s'
@classmethod
def get_acme_server(cls):
return os.environ['ACME'] if 'ACME' in os.environ else "pebble"
@classmethod
def has_acme_server(cls):
return cls.get_acme_server() != 'none'
@classmethod
def has_acme_eab(cls):
return cls.get_acme_server() == 'pebble'
@classmethod
def is_pebble(cls) -> bool:
return cls.get_acme_server() == 'pebble'
@classmethod
def lacks_ocsp(cls):
return cls.is_pebble()
def __init__(self, pytestconfig=None, setup_dirs=True):
super().__init__(pytestconfig=pytestconfig,
local_dir=os.path.dirname(inspect.getfile(MDTestEnv)),
interesting_modules=["md"])
self._acme_server = self.get_acme_server()
self._acme_tos = "accepted"
self._acme_ca_pemfile = os.path.join(self.gen_dir, "apache/acme-ca.pem")
if "pebble" == self._acme_server:
self._acme_url = "https://localhost:14000/dir"
self._acme_eab_url = "https://localhost:14001/dir"
elif "boulder" == self._acme_server:
self._acme_url = "http://localhost:4001/directory"
self._acme_eab_url = None
else:
raise Exception(f"unknown ACME server type: {self._acme_server}")
self._acme_server_down = False
self._acme_server_ok = False
self._a2md_bin = os.path.join(self.bin_dir, 'a2md')
self._default_domain = f"test1.{self.http_tld}"
self._store_dir = "./md"
self.set_store_dir_default()
self.add_cert_specs([
CertificateSpec(domains=[f"expired.{self._http_tld}"],
valid_from=timedelta(days=-100),
valid_to=timedelta(days=-10)),
CertificateSpec(domains=["localhost"], key_type='rsa2048'),
])
self.httpd_error_log.set_ignored_lognos([
#"AH10045", # mod_md complains that there is no vhost for an MDomain
"AH10105", # mod_md does not find a vhost with SSL enabled for an MDomain
"AH10085" # mod_ssl complains about fallback certificates
])
if self.lacks_ocsp():
self.httpd_error_log.set_ignored_patterns([
re.compile(r'.*certificate with serial \S+ has no OCSP responder URL.*'),
])
if setup_dirs:
self._setup = MDTestSetup(env=self)
self._setup.make()
self.issue_certs()
self.clear_store()
def set_store_dir_default(self):
dirpath = "md"
if self.httpd_is_at_least("2.5.0"):
dirpath = os.path.join("state", dirpath)
self.set_store_dir(dirpath)
def set_store_dir(self, dirpath):
self._store_dir = os.path.join(self.server_dir, dirpath)
if self.acme_url:
self.a2md_stdargs([self.a2md_bin, "-a", self.acme_url, "-d", self._store_dir, "-C", self.acme_ca_pemfile, "-j"])
self.a2md_rawargs([self.a2md_bin, "-a", self.acme_url, "-d", self._store_dir, "-C", self.acme_ca_pemfile])
def get_apxs_var(self, name: str) -> str:
p = subprocess.run([self._apxs, "-q", name], capture_output=True, text=True)
if p.returncode != 0:
return ""
return p.stdout.strip()
@property
def acme_server(self):
return self._acme_server
@property
def acme_url(self):
return self._acme_url
@property
def acme_tos(self):
return self._acme_tos
@property
def a2md_bin(self):
return self._a2md_bin
@property
def acme_ca_pemfile(self):
return self._acme_ca_pemfile
@property
def store_dir(self):
return self._store_dir
def get_request_domain(self, request):
return "%s-%s" % (re.sub(r'[_]', '-', request.node.originalname), MDTestEnv.DOMAIN_SUFFIX)
def get_method_domain(self, method):
return "%s-%s" % (re.sub(r'[_]', '-', method.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
def get_module_domain(self, module):
return "%s-%s" % (re.sub(r'[_]', '-', module.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
def get_class_domain(self, c):
return "%s-%s" % (re.sub(r'[_]', '-', c.__name__.lower()), MDTestEnv.DOMAIN_SUFFIX)
# --------- cmd execution ---------
_a2md_args = []
_a2md_args_raw = []
def a2md_stdargs(self, args):
self._a2md_args = [] + args
def a2md_rawargs(self, args):
self._a2md_args_raw = [] + args
def a2md(self, args, raw=False) -> ExecResult:
preargs = self._a2md_args
if raw:
preargs = self._a2md_args_raw
log.debug("running: {0} {1}".format(preargs, args))
return self.run(preargs + args)
def check_acme(self):
if self._acme_server_ok:
return True
if self._acme_server_down:
pytest.skip(msg="ACME server not running")
return False
if self.is_live(self.acme_url, timeout=timedelta(seconds=0.5)):
self._acme_server_ok = True
return True
else:
self._acme_server_down = True
pytest.fail(msg="ACME server not running", pytrace=False)
return False
def get_ca_pem_file(self, hostname: str) -> Optional[str]:
pem_file = super().get_ca_pem_file(hostname)
if pem_file is None:
pem_file = self.acme_ca_pemfile
return pem_file
# --------- access local store ---------
def purge_store(self):
log.debug("purge store dir: %s" % self._store_dir)
assert len(self._store_dir) > 1
if os.path.exists(self._store_dir):
shutil.rmtree(self._store_dir, ignore_errors=False)
os.makedirs(self._store_dir)
def clear_store(self):
log.debug("clear store dir: %s" % self._store_dir)
assert len(self._store_dir) > 1
if not os.path.exists(self._store_dir):
os.makedirs(self._store_dir)
for dirpath in ["challenges", "tmp", "archive", "domains", "accounts", "staging", "ocsp"]:
shutil.rmtree(os.path.join(self._store_dir, dirpath), ignore_errors=True)
def clear_ocsp_store(self):
assert len(self._store_dir) > 1
dirpath = os.path.join(self._store_dir, "ocsp")
log.debug("clear ocsp store dir: %s" % dir)
if os.path.exists(dirpath):
shutil.rmtree(dirpath, ignore_errors=True)
def authz_save(self, name, content):
dirpath = os.path.join(self._store_dir, 'staging', name)
os.makedirs(dirpath)
open(os.path.join(dirpath, 'authz.json'), "w").write(content)
def path_store_json(self):
return os.path.join(self._store_dir, 'md_store.json')
def path_account(self, acct):
return os.path.join(self._store_dir, 'accounts', acct, 'account.json')
def path_account_key(self, acct):
return os.path.join(self._store_dir, 'accounts', acct, 'account.pem')
def store_domains(self):
return os.path.join(self._store_dir, 'domains')
def store_archives(self):
return os.path.join(self._store_dir, 'archive')
def store_stagings(self):
return os.path.join(self._store_dir, 'staging')
def store_challenges(self):
return os.path.join(self._store_dir, 'challenges')
def store_domain_file(self, domain, filename):
return os.path.join(self.store_domains(), domain, filename)
def store_archived_file(self, domain, version, filename):
return os.path.join(self.store_archives(), "%s.%d" % (domain, version), filename)
def store_staged_file(self, domain, filename):
return os.path.join(self.store_stagings(), domain, filename)
def path_fallback_cert(self, domain):
return os.path.join(self._store_dir, 'domains', domain, 'fallback-pubcert.pem')
def path_job(self, domain):
return os.path.join(self._store_dir, 'staging', domain, 'job.json')
def replace_store(self, src):
shutil.rmtree(self._store_dir, ignore_errors=False)
shutil.copytree(src, self._store_dir)
def list_accounts(self):
return os.listdir(os.path.join(self._store_dir, 'accounts'))
def check_md(self, domain, md=None, state=-1, ca=None, protocol=None, agreement=None, contacts=None):
domains = None
if isinstance(domain, list):
domains = domain
domain = domains[0]
if md:
domain = md
path = self.store_domain_file(domain, 'md.json')
with open(path) as f:
md = json.load(f)
assert md
if domains:
assert md['domains'] == domains
if state >= 0:
assert md['state'] == state
if ca:
assert md['ca']['url'] == ca
if protocol:
assert md['ca']['proto'] == protocol
if agreement:
assert md['ca']['agreement'] == agreement
if contacts:
assert md['contacts'] == contacts
def pkey_fname(self, pkeyspec=None):
if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
return "privkey.{0}.pem".format(pkeyspec)
return 'privkey.pem'
def cert_fname(self, pkeyspec=None):
if pkeyspec and not re.match(r'^rsa( ?\d+)?$', pkeyspec.lower()):
return "pubcert.{0}.pem".format(pkeyspec)
return 'pubcert.pem'
def check_md_complete(self, domain, pkey=None):
md = self.get_md_status(domain)
assert md
assert 'state' in md, "md is unexpected: {0}".format(md)
assert md['state'] is MDTestEnv.MD_S_COMPLETE, "unexpected state: {0}".format(md['state'])
assert os.path.isfile(self.store_domain_file(domain, self.pkey_fname(pkey)))
assert os.path.isfile(self.store_domain_file(domain, self.cert_fname(pkey)))
def check_md_credentials(self, domain):
if isinstance(domain, list):
domains = domain
domain = domains[0]
else:
domains = [domain]
# check private key, validate certificate, etc
MDCertUtil.validate_privkey(self.store_domain_file(domain, 'privkey.pem'))
cert = MDCertUtil(self.store_domain_file(domain, 'pubcert.pem'))
cert.validate_cert_matches_priv_key(self.store_domain_file(domain, 'privkey.pem'))
# check SANs and CN
assert cert.get_cn() == domain
# compare lists twice in opposite directions: SAN may not respect ordering
san_list = list(cert.get_san_list())
assert len(san_list) == len(domains)
assert set(san_list).issubset(domains)
assert set(domains).issubset(san_list)
# check valid dates interval
not_before = cert.get_not_before()
not_after = cert.get_not_after()
assert not_before < datetime.now(not_before.tzinfo)
assert not_after > datetime.now(not_after.tzinfo)
# --------- check utilities ---------
def check_json_contains(self, actual, expected):
# write all expected key:value bindings to a copy of the actual data ...
# ... assert it stays unchanged
test_json = copy.deepcopy(actual)
test_json.update(expected)
assert actual == test_json
def check_file_access(self, path, exp_mask):
actual_mask = os.lstat(path).st_mode & 0o777
assert oct(actual_mask) == oct(exp_mask)
def check_dir_empty(self, path):
assert os.listdir(path) == []
def get_http_status(self, domain, path, use_https=True):
r = self.get_meta(domain, path, use_https, insecure=True)
return r.response['status']
def get_cert(self, domain, tls=None, ciphers=None):
return MDCertUtil.load_server_cert(self._httpd_addr, self.https_port,
domain, tls=tls, ciphers=ciphers)
def get_server_cert(self, domain, proto=None, ciphers=None):
args = [
"openssl", "s_client", "-status",
"-connect", "%s:%s" % (self._httpd_addr, self.https_port),
"-CAfile", self.acme_ca_pemfile,
"-servername", domain,
"-showcerts"
]
if proto is not None:
args.extend(["-{0}".format(proto)])
if ciphers is not None:
args.extend(["-cipher", ciphers])
r = self.run(args)
# noinspection PyBroadException
try:
return MDCertUtil.parse_pem_cert(r.stdout)
except:
return None
def verify_cert_key_lenghts(self, domain, pkeys):
for p in pkeys:
cert = self.get_server_cert(domain, proto="tls1_2", ciphers=p['ciphers'])
if 0 == p['keylen']:
assert cert is None
else:
assert cert, "no cert returned for cipher: {0}".format(p['ciphers'])
assert cert.get_key_length() == p['keylen'], "key length, expected {0}, got {1}".format(
p['keylen'], cert.get_key_length()
)
def get_meta(self, domain, path, use_https=True, insecure=False):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
r = self.curl_get(f"{schema}://{domain}:{port}{path}", insecure=insecure)
assert r.exit_code == 0
assert r.response
assert r.response['header']
return r
def get_content(self, domain, path, use_https=True):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
r = self.curl_get(f"{schema}://{domain}:{port}{path}")
assert r.exit_code == 0
return r.stdout
def get_json_content(self, domain, path, use_https=True, insecure=False,
debug_log=True):
schema = "https" if use_https else "http"
port = self.https_port if use_https else self.http_port
url = f"{schema}://{domain}:{port}{path}"
r = self.curl_get(url, insecure=insecure, debug_log=debug_log)
if r.exit_code != 0:
log.error(f"curl get on {url} returned {r.exit_code}"
f"\nstdout: {r.stdout}"
f"\nstderr: {r.stderr}")
assert r.exit_code == 0, r.stderr
return r.json
def get_certificate_status(self, domain) -> Dict:
return self.get_json_content(domain, "/.httpd/certificate-status", insecure=True)
def get_md_status(self, domain, via_domain=None, use_https=True, debug_log=False) -> Dict:
if via_domain is None:
via_domain = self._default_domain
return self.get_json_content(via_domain, f"/md-status/{domain}",
use_https=use_https, debug_log=debug_log)
def get_server_status(self, query="/", via_domain=None, use_https=True):
if via_domain is None:
via_domain = self._default_domain
return self.get_content(via_domain, "/server-status%s" % query, use_https=use_https)
def await_completion(self, names, must_renew=False, restart=True, timeout=60,
via_domain=None, use_https=True):
try_until = time.time() + timeout
renewals = {}
names = names.copy()
while len(names) > 0:
if time.time() >= try_until:
return False
for name in names:
mds | |
<reponame>rajkubp020/helloword
#!/usr/bin/env python
# polarizer.py - add Drude oscillators to LAMMPS data file.
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# version 2017/02/08
import sys
import argparse
import random
from copy import deepcopy
usage = """Add Drude oscillators to LAMMPS data file.
Format of file containing specification of Drude oscillators:
# type dm/u dq/e k/(kJ/molA2) alpha/A3 thole
C3H 1.0 0.0 4184.0 2.051 2.6
...
* dm is the mass to place on the Drude particle (taken from its core),
* dq is the charge to place on the Drude particle (taken from its core),
* k is the harmonic force constant of the bond between core and Drude,
* alpha is the polarizability,
* thole is a parameter of the Thole damping function.
A Drude particle is created for each atom in the LAMMPS data file
that corresponds to an atom type given in the Drude file.
Since LAMMPS uses numbers for atom types in the data file, a comment
after each line in the Masses section has to be introduced to allow
identification of the atom types within the force field database:
Masses
1 12.011 # C3H
2 12.011 # CTO
...
This script will add new atom types, new bond types, new atoms and
new bonds to the data file.
It will also generate some commands to be included in the LAMMPS input script,
which are related to the topology and force field, namely fix drude,
pair_style and pair_coeff commands. For information on thermostating please
read the documentation of the USER-DRUDE package.
This tool can also be used to revert a Drude-polarized data file to a
non-polarizable one.
"""
# keywords of header and main sections (from data.py in Pizza.py)
hkeywords = ["atoms", "ellipsoids", "lines", "triangles", "bodies",
"bonds", "angles", "dihedrals", "impropers",
"atom types", "bond types", "angle types", "dihedral types",
"improper types", "xlo xhi", "ylo yhi", "zlo zhi", "xy xz yz"]
skeywords = [["Masses", "atom types"],
["Pair Coeffs", "atom types"],
["Bond Coeffs", "bond types"], ["Angle Coeffs", "angle types"],
["Dihedral Coeffs", "dihedral types"],
["Improper Coeffs", "improper types"],
["BondBond Coeffs", "angle types"],
["BondAngle Coeffs", "angle types"],
["MiddleBondTorsion Coeffs", "dihedral types"],
["EndBondTorsion Coeffs", "dihedral types"],
["AngleTorsion Coeffs", "dihedral types"],
["AngleAngleTorsion Coeffs", "dihedral types"],
["BondBond13 Coeffs", "dihedral types"],
["AngleAngle Coeffs", "improper types"],
["Atoms", "atoms"], ["Velocities", "atoms"],
["Ellipsoids", "ellipsoids"],
["Lines", "lines"], ["Triangles", "triangles"],
["Bodies", "bodies"],
["Bonds", "bonds"],
["Angles", "angles"], ["Dihedrals", "dihedrals"],
["Impropers", "impropers"], ["Molecules", "atoms"]]
def massline(att):
return "{0:4d} {1:8.3f} # {2}\n".format(att['id'], att['m'], att['type'])
def bdtline(bdt):
return "{0:4d} {1:12.6f} {2:12.6f} {3}\n".format(bdt['id'], bdt['k'],
bdt['r0'], bdt['note'])
def atomline(at):
return "{0:7d} {1:7d} {2:4d} {3:8.4f} {4:13.6e} {5:13.6e} {6:13.6e} "\
" {7}\n".format(at['n'], at['mol'], at['id'], at['q'],
at['x'], at['y'], at['z'], at['note'])
def bondline(bd):
return "{0:7d} {1:4d} {2:7d} {3:7d} {4}\n".format(bd['n'], bd['id'],
bd['i'], bd['j'], bd['note'])
def velline(at):
return "{0:7d} {1:13.6e} {2:13.6e} {3:13.6e} \n".format(at['n'],
at['vx'], at['vy'], at['vz'])
# --------------------------------------
class Data(object):
def __init__(self, datafile):
'''read LAMMPS data file (from data.py in Pizza.py)'''
# for extract method
self.atomtypes = []
self.bondtypes = []
self.atoms = []
self.bonds = []
self.idmap = {}
self.nselect = 1
f = open(datafile, "r")
self.title = f.readline()
self.names = {}
headers = {}
while 1:
line = f.readline().strip()
if '#' in line:
line = line[:line.index('#')].strip()
if len(line) == 0:
continue
found = 0
for keyword in hkeywords:
if keyword in line:
found = 1
words = line.split()
if keyword == "xlo xhi" or keyword == "ylo yhi" or \
keyword == "zlo zhi":
headers[keyword] = (float(words[0]), float(words[1]))
elif keyword == "xy xz yz":
headers[keyword] = \
(float(words[0]), float(words[1]), float(words[2]))
else:
headers[keyword] = int(words[0])
if not found:
break
sections = {}
while 1:
if len(line) > 0:
found = 0
for pair in skeywords:
keyword, length = pair[0], pair[1]
if keyword == line:
found = 1
if length not in headers:
raise RuntimeError("data section {} "\
"has no matching header value".format(line))
f.readline()
list_ = []
for _ in range(headers[length]):
list_.append(f.readline())
sections[keyword] = list_
if not found:
raise RuntimeError("invalid section {} in data"\
" file".format(line))
#f.readline()
line = f.readline()
if not line:
break
if '#' in line:
line = line[:line.index('#')]
line = line.strip()
f.close()
self.headers = headers
self.sections = sections
def write(self, filename):
'''write out a LAMMPS data file (from data.py in Pizza.py)'''
with open(filename, "w") as f:
f.write(self.title + '\n')
for keyword in hkeywords:
if keyword in self.headers:
if keyword == "xlo xhi" or keyword == "ylo yhi" or \
keyword == "zlo zhi":
f.write("{0:f} {1:f} {2}\n".format(
self.headers[keyword][0],
self.headers[keyword][1], keyword))
elif keyword == "xy xz yz":
f.write("{0:f} {1:f} {2:f} {3}\n".format(
self.headers[keyword][0],
self.headers[keyword][1],
self.headers[keyword][2], keyword))
else:
f.write("{0:d} {1}\n".format(self.headers[keyword],
keyword))
for pair in skeywords:
keyword = pair[0]
if keyword in self.sections:
f.write("\n{}\n\n".format(keyword))
for line in self.sections[keyword]:
f.write(line)
def extract_nonpol(self):
"""extract atom and bond info from nonpolarizable data"""
# extract atom IDs
missinglabels = False
for line in self.sections['Masses']:
tok = line.split()
if len(tok) < 4:
print("error: missing type for atom ID " + tok[0] +
" in Masses section")
missinglabels = True
continue
atomtype = {}
atomtype['id'] = int(tok[0])
atomtype['m'] = float(tok[1])
atomtype['type'] = tok[3]
self.atomtypes.append(atomtype)
if missinglabels:
sys.exit(0)
# extract atom registers
for line in self.sections['Atoms']:
tok = line.split()
atom = {}
atom['n'] = int(tok[0])
atom['mol'] = int(tok[1])
atom['id'] = int(tok[2])
atom['q'] = float(tok[3])
atom['x'] = float(tok[4])
atom['y'] = float(tok[5])
atom['z'] = float(tok[6])
#atom['note'] = ''.join([s + ' ' for s in tok[7:]]).strip()
atom['note'] = ' '.join(tok[7:])
self.atoms.append(atom)
self.idmap[atom['n']] = atom
if 'Velocities' in self.sections:
for line in self.sections['Velocities']:
tok = line.split()
atom = self.idmap[int(tok[0])]
atom['vx'] = float(tok[1])
atom['vy'] = float(tok[2])
atom['vz'] = float(tok[3])
def polarize(self, drude):
"""add Drude particles"""
if 'Pair Coeffs' in self.sections:
raise RuntimeError("cannot polarize a data with Pair Coeffs")
self.extract_nonpol()
natom = self.headers['atoms']
nbond = self.headers['bonds']
nattype = self.headers['atom types']
nbdtype = self.headers['bond types']
# create new atom types (IDs) for Drude particles and modify cores
newattypes = []
for att in self.atomtypes:
att['dflag'] = 'n'
for ddt in drude.types:
if ddt['type'] == att['type']:
nattype += 1
newid = {}
newid['id'] = ddt['id'] = nattype
newid['m'] = ddt['dm']
att['m'] -= ddt['dm']
# label drude particles and cores
att['dflag'] = 'c'
newid['dflag'] = 'd'
newid['type'] = att['type'] + ' DP'
att['type'] += ' DC'
ddt['type'] += ' DC'
newattypes.append(newid)
break
self.headers['atom types'] += len(newattypes)
self.sections['Masses'] = []
for att in self.atomtypes + newattypes:
self.sections['Masses'].append(massline(att))
# create new bond types for core-drude bonds
newbdtypes = []
for att in self.atomtypes:
for ddt in drude.types:
if ddt['type'] == att['type']:
nbdtype += 1
newbdtype = {}
newbdtype['id'] = ddt['bdid'] = nbdtype
newbdtype['k'] = ddt['k']
newbdtype['r0'] = 0.0
newbdtype['note'] = '# ' + ddt['type'] + '-DP'
newbdtypes.append(newbdtype)
break
self.headers['bond types'] += len(newbdtypes)
for bdt in newbdtypes:
self.sections['Bond Coeffs'].append(bdtline(bdt))
# create new atoms for Drude particles and new bonds with their cores
random.seed(123)
newatoms = []
newbonds = []
for atom in self.atoms:
atom['dflag'] = '' # [c]ore, [d]rude, [n]on-polarizable
atom['dd'] = 0 # partner drude or core
for att in self.atomtypes:
if att['id'] == atom['id']:
break
for ddt in drude.types:
if ddt['type'] == att['type']:
natom += 1
newatom = deepcopy(atom)
newatom['n'] = natom
self.idmap[natom] = newatom
newatom['id'] = ddt['id']
newatom['q'] = ddt['dq']
newatom['note'] = atom['note']
if '#' not in newatom['note']:
newatom['note'] += ' #'
newatom['note'] += ' DP'
newatom['dflag'] = 'd'
newatom['dd'] = atom['n']
# avoid superposition of cores and Drudes
newatom['x'] += 0.1 * (random.random() - 0.5)
newatom['y'] += 0.1 * (random.random() - 0.5)
newatom['z'] += 0.1 * (random.random() - 0.5)
if 'Velocities' in self.sections:
newatom['vx'] = atom['vx']
newatom['vy'] = atom['vy']
newatom['vz'] = atom['vz']
newatoms.append(newatom)
atom['q'] -= ddt['dq']
atom['dflag'] = 'c'
atom['dd'] = natom
if '#' not in atom['note']:
atom['note'] += ' #'
atom['note'] += ' DC'
nbond += 1
newbond = {}
newbond['n'] = nbond
newbond['id'] = ddt['bdid']
newbond['i'] = atom['n']
newbond['j'] = newatom['n']
newbond['note'] = '# ' + ddt['type'] + '-DP'
newbonds.append(newbond)
break
self.headers['atoms'] += len(newatoms)
self.headers['bonds'] += len(newbonds)
self.sections['Atoms'] = []
for atom in self.atoms + newatoms:
self.sections['Atoms'].append(atomline(atom))
for bond in newbonds:
self.sections['Bonds'].append(bondline(bond))
if 'Velocities' in self.sections:
self.sections['Velocities'] = []
for atom in self.atoms + newatoms:
self.sections['Velocities'].append(velline(atom))
# update list of atom IDs
for att in newattypes:
self.atomtypes.append(att)
def extract_pol(self, drude):
"""extract atom, drude, bonds info from polarizable data"""
# | |
msg="" )\nend' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 50.0, 'Scope', {} ), ( 50.0, 'While', { 'doing': 'expression' } ), ( 50.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 50.0, 'Scope', {} ), ( 50.0, 'While', { 'doing': 'expression' } ), ( 50.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner = Runner( parse( 'while True do begin()\n5\npause( msg="" )\n6\npause( msg="" )\nend' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 25.0, 'Scope', {} ), ( 25.0, 'While', { 'doing': 'expression' } ), ( 25.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 75.0, 'Scope', {} ), ( 75.0, 'While', { 'doing': 'expression' } ), ( 75.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner = Runner( parse( 'while pause( msg="" ) do 5' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'while not pause( msg="" ) do 5' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner = Runner( parse( '( not pause( msg="cond" ) | True )' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( '( True | not pause( msg="cond" ) )' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'while not pause( msg="cond" ) do begin()\npause( msg="exp" )\nend' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'expression' } ), ( 0.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'expression' } ), ( 0.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner = Runner( parse( 'while not pause( msg="cond" ) do begin()\n12\npause( msg="exp" )\n34\nend' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 33.333333333333336, 'Scope', {} ), ( 33.333333333333336, 'While', { 'doing': 'expression' } ), ( 33.333333333333336, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'While', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 33.333333333333336, 'Scope', {} ), ( 33.333333333333336, 'While', { 'doing': 'expression' } ), ( 33.333333333333336, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner = Runner( parse( 'if True then pause( msg="exp" )' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'IfElse', { 'doing': 'expression' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'if pause( msg="cond" ) then 5' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'IfElse', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'if not pause( msg="cond" ) then 5' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'IfElse', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'if False then 23 else pause( msg="cond" )' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 50.0, 'Scope', {} ), ( 50.0, 'IfElse', { 'doing': 'expression' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
runner.run()
assert runner.status == [ ( 100.0, 'Scope', None ) ]
assert runner.done
runner = Runner( parse( 'if not pause( msg="cond" ) then begin()\npause( msg="exp" )\nend' ) )
assert runner.status == [ ( 0.0, 'Scope', None ) ]
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'IfElse', { 'doing': 'condition' } ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not runner.done
with pytest.raises( Pause ):
runner.run()
assert runner.status == [ ( 0.0, 'Scope', {} ), ( 0.0, 'IfElse', { 'doing': 'expression' } ), ( 0.0, 'Scope', {} ), ( 0.0, 'Function', { 'module': None, 'name': 'pause' } ) ]
assert not | |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
from shutil import rmtree
from tempfile import mkdtemp
from glob import iglob
import logging
import pytest
import astropy.io.fits as fits
import numpy as np
from astropy.tests.helper import catch_warnings
from astropy.utils import minversion
from astropy.utils.exceptions import AstropyUserWarning
from astropy.nddata import CCDData
from ..image_collection import ImageFileCollection
_filters = []
_original_dir = ''
_ASTROPY_LT_1_3 = not minversion("astropy", "1.3")
def test_fits_summary(triage_setup):
keywords = ['imagetyp', 'filter']
ic = ImageFileCollection(triage_setup.test_dir,
keywords=keywords)
summary = ic._fits_summary(header_keywords=keywords)
assert len(summary['file']) == triage_setup.n_test['files']
for keyword in keywords:
assert len(summary[keyword]) == triage_setup.n_test['files']
# explicit conversion to array is needed to avoid astropy Table bug in
# 0.2.4
no_filter_no_object_row = np.array(summary['file'] ==
'no_filter_no_object_bias.fit')
# there should be no filter keyword in the bias file
assert summary['filter'][no_filter_no_object_row].mask
class TestImageFileCollectionRepresentation(object):
def test_repr_location(self, triage_setup):
ic = ImageFileCollection(location=triage_setup.test_dir)
assert repr(ic) == "ImageFileCollection(location={0!r})".format(
triage_setup.test_dir)
def test_repr_keywords(self, triage_setup):
ic = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp'])
ref = ("ImageFileCollection(location={0!r}, keywords=['imagetyp'])"
.format(triage_setup.test_dir))
assert repr(ic) == ref
def test_repr_globs(self, triage_setup):
ic = ImageFileCollection(
location=triage_setup.test_dir, glob_exclude="*no_filter*",
glob_include="*object_light*")
ref = ("ImageFileCollection(location={0!r}, "
"glob_include='*object_light*', "
"glob_exclude='*no_filter*')"
.format(triage_setup.test_dir))
assert repr(ic) == ref
def test_repr_files(self, triage_setup):
ic = ImageFileCollection(
location=triage_setup.test_dir,
filenames=['no_filter_no_object_light.fit',
'no_filter_no_object_bias.fit'])
ref = ("ImageFileCollection(location={0!r}, "
"filenames=['no_filter_no_object_light.fit', "
"'no_filter_no_object_bias.fit'])"
.format(triage_setup.test_dir))
assert repr(ic) == ref
def test_repr_ext(self, triage_setup):
hdul = fits.HDUList([fits.PrimaryHDU(np.ones((10, 10))),
fits.ImageHDU(np.ones((10, 10)))])
hdul.writeto(os.path.join(triage_setup.test_dir, 'mef.fits'))
ic = ImageFileCollection(
location=triage_setup.test_dir,
filenames=['mef.fits'],
ext=1)
ref = ("ImageFileCollection(location={0!r}, "
"filenames=['mef.fits'], "
"ext=1)"
.format(triage_setup.test_dir))
assert repr(ic) == ref
def test_repr_info(self, triage_setup):
summary_file_path = os.path.join(triage_setup.test_dir, 'info.csv')
ic = ImageFileCollection(
location=triage_setup.test_dir, keywords=['naxis'])
ic.summary.write(summary_file_path)
with catch_warnings() as w:
ic2 = ImageFileCollection(info_file=summary_file_path)
# ImageFileCollections from info_files contain no files. That issues
# a Warning that we'll ignore here.
assert len(w) == 2
assert "'info_file' argument is deprecated" in str(w[0].message)
assert 'no FITS files in the collection' in str(w[1].message)
ref = ("ImageFileCollection(keywords=['naxis'], info_file={0!r})"
.format(summary_file_path))
assert repr(ic2) == ref
# This should work mark all test methods as using the triage_setup
# fixture, but it doesn't, so the fixture is given explicitly as an
# argument to each method.
# @pytest.mark.usefixtures("triage_setup")
class TestImageFileCollection(object):
def _setup_logger(self, path, level=logging.WARN):
"""
Set up file logger at the path.
"""
logger = logging.getLogger()
logger.setLevel(level)
logger.addHandler(logging.FileHandler(path))
return logger
def test_filter_files(self, triage_setup):
img_collection = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp', 'filter'])
assert len(img_collection.files_filtered(
imagetyp='bias')) == triage_setup.n_test['bias']
assert len(img_collection.files) == triage_setup.n_test['files']
assert ('filter' in img_collection.keywords)
assert ('flying monkeys' not in img_collection.keywords)
assert len(img_collection.values('imagetyp', unique=True)) == 2
def test_filter_files_whitespace_keys(self, triage_setup):
hdr = fits.Header([('HIERARCH a b', 2)])
hdul = fits.HDUList([fits.PrimaryHDU(np.ones((10, 10)), header=hdr)])
hdul.writeto(os.path.join(triage_setup.test_dir,
'hdr_with_whitespace.fits'))
ic = ImageFileCollection(location=triage_setup.test_dir)
# Using a dictionary and unpacking it should work
filtered = ic.files_filtered(**{'a b': 2})
assert len(filtered) == 1
assert 'hdr_with_whitespace.fits' in filtered
# Also check it's working with generators:
for _, filename in ic.data(a_b=2, replace_='_',
return_fname=True):
assert filename == 'hdr_with_whitespace.fits'
def test_filter_files_with_str_on_nonstr_column(self, triage_setup):
ic = ImageFileCollection(location=triage_setup.test_dir)
# Filtering an integer column with a string
filtered = ic.files_filtered(naxis='2')
assert len(filtered) == 0
def test_filter_fz_files(self, triage_setup):
fn = 'test.fits.fz'
ic = ImageFileCollection(location=triage_setup.test_dir, filenames=fn)
# Get a subset of files with a specific header value
filtered = ic.files_filtered(EXPTIME=15.0)
assert len(filtered) == 1
def test_filtered_files_have_proper_path(self, triage_setup):
ic = ImageFileCollection(location=triage_setup.test_dir, keywords='*')
# Get a subset of the files.
plain_biases = ic.files_filtered(imagetyp='bias')
# Force a copy...
plain_biases = list(plain_biases)
# Same subset, but with full path.
path_biases = ic.files_filtered(imagetyp='bias', include_path=True)
for path_b, plain_b in zip(path_biases, plain_biases):
# If the path munging has been done properly, this will succeed.
assert os.path.basename(path_b) == plain_b
def test_filenames_are_set_properly(self, triage_setup):
fn = ['filter_no_object_bias.fit', 'filter_object_light_foo.fit']
img_collection = ImageFileCollection(
location=triage_setup.test_dir, filenames=fn, keywords=['filter'])
assert img_collection.files == fn
img_collection.refresh()
assert img_collection.files == fn
fn = 'filter_no_object_bias.fit'
img_collection = ImageFileCollection(
location=triage_setup.test_dir, filenames=fn, keywords=['filter'])
assert img_collection.files == [fn]
def test_keywords_deleter(self, triage_setup):
ic = ImageFileCollection(triage_setup.test_dir, keywords='*')
assert ic.keywords != []
del ic.keywords
assert ic.keywords == []
def test_files_with_compressed(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir)
assert len(collection._fits_files_in_directory(
compressed=True)) == triage_setup.n_test['files']
def test_files_with_no_compressed(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir)
n_files_found = len(
collection._fits_files_in_directory(compressed=False))
n_uncompressed = (triage_setup.n_test['files'] -
triage_setup.n_test['compressed'])
assert n_files_found == n_uncompressed
def test_generator_full_path(self, triage_setup):
collection = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp'])
for path, file_name in zip(collection._paths(), collection.files):
assert path == os.path.join(triage_setup.test_dir, file_name)
def test_hdus(self, triage_setup):
collection = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp'])
n_hdus = 0
for hdu in collection.hdus():
assert isinstance(hdu, fits.PrimaryHDU)
data = hdu.data # must access the data to force scaling
# pre-astropy 1.1 unsigned data was changed to float32 and BZERO
# removed. In 1.1 and later, BZERO stays but the data type is
# unsigned int.
assert (('BZERO' not in hdu.header) or
(data.dtype is np.dtype(np.uint16)))
n_hdus += 1
assert n_hdus == triage_setup.n_test['files']
def test_hdus_masking(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp', 'exposure'])
old_data = np.array(collection.summary)
for hdu in collection.hdus(imagetyp='bias'):
pass
new_data = np.array(collection.summary)
assert (new_data == old_data).all()
@pytest.mark.parametrize('extension', ['TESTEXT', 1, ('TESTEXT', 1)])
def test_multiple_extensions(self, triage_setup, extension):
ext1 = fits.PrimaryHDU()
ext1.data = np.arange(1, 5)
# It is important than the name used for this test extension
# NOT be MASK or UNCERT because both are treated in a special
# way by the FITS reader.
test_ext_name = 'TESTEXT'
ext2 = fits.ImageHDU(name=test_ext_name)
ext2.data = np.arange(6, 10)
hdulist = fits.hdu.hdulist.HDUList([ext1, ext2])
hdulist.writeto(os.path.join(triage_setup.test_dir,
'multi-extension.fits'))
ic2 = ImageFileCollection(
triage_setup.test_dir, keywords='*',
filenames=['multi-extension.fits'], ext=extension)
ic1 = ImageFileCollection(
triage_setup.test_dir,
keywords='*', filenames=['multi-extension.fits'], ext=0)
assert ic1.ext == 0
assert ic2.ext == extension
column2 = ic2.summary.colnames
column1 = ic1.summary.colnames
assert column1 != column2
list1 = [key.lower() for key in ext2.header]
list2 = ic2.summary.colnames[1:]
assert list1 == list2
ccd_kwargs = {'unit': 'adu'}
for data, hdr, hdu, ccd in zip(ic2.data(),
ic2.headers(),
ic2.hdus(),
ic2.ccds(ccd_kwargs)):
np.testing.assert_array_equal(data, ext2.data)
assert hdr == ext2.header
# Now compare that the generators each give the same stuff
np.testing.assert_array_equal(data, ccd.data)
np.testing.assert_array_equal(data, hdu.data)
assert hdr == hdu.header
assert hdr == ccd.meta
def test_headers(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
n_headers = 0
for header in collection.headers():
assert isinstance(header, fits.Header)
assert ('bzero' in header)
n_headers += 1
assert n_headers == triage_setup.n_test['files']
def test_headers_save_location(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
destination = mkdtemp()
for header in collection.headers(save_location=destination):
pass
new_collection = ImageFileCollection(location=destination,
keywords=['imagetyp'])
basenames = lambda paths: set(
[os.path.basename(file) for file in paths])
assert (len(basenames(collection._paths()) -
basenames(new_collection._paths())) == 0)
rmtree(destination)
def test_headers_with_filter(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
cnt = 0
for header in collection.headers(imagetyp='light'):
assert header['imagetyp'].lower() == 'light'
cnt += 1
assert cnt == triage_setup.n_test['light']
def test_headers_with_multiple_filters(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
cnt = 0
for header in collection.headers(imagetyp='light',
filter='R'):
assert header['imagetyp'].lower() == 'light'
assert header['filter'].lower() == 'r'
cnt += 1
assert cnt == (triage_setup.n_test['light'] -
triage_setup.n_test['need_filter'])
def test_headers_with_filter_wildcard(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
cnt = 0
for header in collection.headers(imagetyp='*'):
cnt += 1
assert cnt == triage_setup.n_test['files']
def test_headers_with_filter_missing_keyword(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
for header in collection.headers(imagetyp='light',
object=''):
assert header['imagetyp'].lower() == 'light'
with pytest.raises(KeyError):
header['object']
def test_generator_headers_save_with_name(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
for header in collection.headers(save_with_name='_new'):
assert isinstance(header, fits.Header)
new_collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
assert (len(new_collection._paths()) ==
2 * (triage_setup.n_test['files']) -
triage_setup.n_test['compressed'])
[os.remove(fil) for fil in iglob(triage_setup.test_dir + '/*_new*')]
def test_generator_data(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
for img in collection.data():
assert isinstance(img, np.ndarray)
def test_generator_ccds_without_unit(self, triage_setup):
collection = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp'])
with pytest.raises(ValueError):
ccd = next(collection.ccds())
def test_generator_ccds(self, triage_setup):
collection = ImageFileCollection(
location=triage_setup.test_dir, keywords=['imagetyp'])
ccd_kwargs = {'unit': 'adu'}
for ccd in collection.ccds(ccd_kwargs=ccd_kwargs):
assert isinstance(ccd, CCDData)
def test_consecutive_fiilters(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp', 'filter',
'object'])
no_files_match = collection.files_filtered(object='fdsafs')
assert(len(no_files_match) == 0)
some_files_should_match = collection.files_filtered(object=None,
imagetyp='light')
assert(len(some_files_should_match) ==
triage_setup.n_test['need_object'])
def test_filter_does_not_not_permanently_change_file_mask(self,
triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
# ensure all files are originally unmasked
assert not collection.summary['file'].mask.any()
# generate list that will match NO files
collection.files_filtered(imagetyp='foisajfoisaj')
# if the code works, this should have no permanent effect
assert not collection.summary['file'].mask.any()
@pytest.mark.parametrize("new_keywords,collection_keys", [
(['imagetyp', 'object'], ['imagetyp', 'filter']),
(['imagetyp'], ['imagetyp', 'filter'])])
def test_keyword_setting(self, new_keywords, collection_keys,
triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=collection_keys)
tbl_orig = collection.summary
collection.keywords = new_keywords
tbl_new = collection.summary
if set(new_keywords).issubset(collection_keys):
# should just delete columns without rebuilding table
assert(tbl_orig is tbl_new)
else:
# we need new keywords so must rebuild
assert(tbl_orig is not tbl_new)
for key in new_keywords:
assert(key in tbl_new.keys())
assert (tbl_orig['file'] == tbl_new['file']).all()
assert (tbl_orig['imagetyp'] == tbl_new['imagetyp']).all()
assert 'filter' not in tbl_new.keys()
assert 'object' not in tbl_orig.keys()
def test_keyword_setting_to_empty_list(self, triage_setup):
ic = ImageFileCollection(triage_setup.test_dir)
ic.keywords = []
assert ['file'] == ic.keywords
def test_header_and_filename(self, triage_setup):
collection = ImageFileCollection(location=triage_setup.test_dir,
keywords=['imagetyp'])
for header, fname in collection.headers(return_fname=True):
assert (fname in collection.summary['file'])
assert (isinstance(header, | |
:param name: Log unique name
:type name: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, *, display_name: str=None, blob_duration: str=None, name: str=None, **kwargs) -> None:
super(OperationMetaLogSpecification, self).__init__(**kwargs)
self.display_name = display_name
self.blob_duration = blob_duration
self.name = name
class OperationMetaMetricDimensionSpecification(Model):
"""What is this?.
:param display_name: Dimension display name
:type display_name: str
:param name: Dimension unique name
:type name: str
:param to_be_exported_for_shoebox: Whether this metric should be exported
for Shoebox
:type to_be_exported_for_shoebox: bool
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'},
}
def __init__(self, *, display_name: str=None, name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None:
super(OperationMetaMetricDimensionSpecification, self).__init__(**kwargs)
self.display_name = display_name
self.name = name
self.to_be_exported_for_shoebox = to_be_exported_for_shoebox
class OperationMetaMetricSpecification(Model):
"""What is this?.
:param source_mdm_namespace: The source MDM namespace
:type source_mdm_namespace: str
:param display_name: Metric display name
:type display_name: str
:param name: Metric unique name
:type name: str
:param aggregation_type: Metric aggregation type
:type aggregation_type: str
:param display_description: Metric description
:type display_description: str
:param source_mdm_account: The source MDM account
:type source_mdm_account: str
:param enable_regional_mdm_account: Whether the regional MDM account is
enabled
:type enable_regional_mdm_account: bool
:param unit: Metric units
:type unit: str
:param dimensions: Metric dimensions
:type dimensions:
list[~azure.mgmt.synapse.models.OperationMetaMetricDimensionSpecification]
:param supports_instance_level_aggregation: Whether the metric supports
instance-level aggregation
:type supports_instance_level_aggregation: bool
:param metric_filter_pattern: Metric filter
:type metric_filter_pattern: str
"""
_attribute_map = {
'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'bool'},
'unit': {'key': 'unit', 'type': 'str'},
'dimensions': {'key': 'dimensions', 'type': '[OperationMetaMetricDimensionSpecification]'},
'supports_instance_level_aggregation': {'key': 'supportsInstanceLevelAggregation', 'type': 'bool'},
'metric_filter_pattern': {'key': 'metricFilterPattern', 'type': 'str'},
}
def __init__(self, *, source_mdm_namespace: str=None, display_name: str=None, name: str=None, aggregation_type: str=None, display_description: str=None, source_mdm_account: str=None, enable_regional_mdm_account: bool=None, unit: str=None, dimensions=None, supports_instance_level_aggregation: bool=None, metric_filter_pattern: str=None, **kwargs) -> None:
super(OperationMetaMetricSpecification, self).__init__(**kwargs)
self.source_mdm_namespace = source_mdm_namespace
self.display_name = display_name
self.name = name
self.aggregation_type = aggregation_type
self.display_description = display_description
self.source_mdm_account = source_mdm_account
self.enable_regional_mdm_account = enable_regional_mdm_account
self.unit = unit
self.dimensions = dimensions
self.supports_instance_level_aggregation = supports_instance_level_aggregation
self.metric_filter_pattern = metric_filter_pattern
class OperationMetaServiceSpecification(Model):
"""What is this?.
:param metric_specifications: Service metric specifications
:type metric_specifications:
list[~azure.mgmt.synapse.models.OperationMetaMetricSpecification]
:param log_specifications: Service log specifications
:type log_specifications:
list[~azure.mgmt.synapse.models.OperationMetaLogSpecification]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetaMetricSpecification]'},
'log_specifications': {'key': 'logSpecifications', 'type': '[OperationMetaLogSpecification]'},
}
def __init__(self, *, metric_specifications=None, log_specifications=None, **kwargs) -> None:
super(OperationMetaServiceSpecification, self).__init__(**kwargs)
self.metric_specifications = metric_specifications
self.log_specifications = log_specifications
class OperationResource(Model):
"""An operation.
:param id: Operation ID
:type id: str
:param name: Operation name
:type name: str
:param status: Operation status. Possible values include: 'InProgress',
'Succeeded', 'Failed', 'Canceled'
:type status: str or ~azure.mgmt.synapse.models.OperationStatus
:param properties: Operation properties
:type properties: object
:param error: Errors from the operation
:type error: ~azure.mgmt.synapse.models.ErrorDetail
:param start_time: Operation start time
:type start_time: datetime
:param end_time: Operation start time
:type end_time: datetime
:param percent_complete: Completion percentage of the operation
:type percent_complete: float
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
'error': {'key': 'error', 'type': 'ErrorDetail'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'percent_complete': {'key': 'percentComplete', 'type': 'float'},
}
def __init__(self, *, id: str=None, name: str=None, status=None, properties=None, error=None, start_time=None, end_time=None, percent_complete: float=None, **kwargs) -> None:
super(OperationResource, self).__init__(**kwargs)
self.id = id
self.name = name
self.status = status
self.properties = properties
self.error = error
self.start_time = start_time
self.end_time = end_time
self.percent_complete = percent_complete
class PrivateEndpoint(Model):
"""Private endpoint details.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource id of the private endpoint.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(PrivateEndpoint, self).__init__(**kwargs)
self.id = None
class PrivateEndpointConnection(ProxyResource):
"""A private endpoint connection.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. Ex-
Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:vartype type: str
:param private_endpoint: The private endpoint which the connection belongs
to.
:type private_endpoint: ~azure.mgmt.synapse.models.PrivateEndpoint
:param private_link_service_connection_state: Connection state of the
private endpoint connection.
:type private_link_service_connection_state:
~azure.mgmt.synapse.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: Provisioning state of the private endpoint
connection.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, *, private_endpoint=None, private_link_service_connection_state=None, **kwargs) -> None:
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.provisioning_state = None
class PrivateLinkResource(ProxyResource):
"""A private link resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Fully qualified resource Id for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:vartype id: str
:ivar name: The name of the resource
:vartype name: str
:ivar type: The type of the resource. Ex-
Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:vartype type: str
:ivar properties: The private link resource properties.
:vartype properties:
~azure.mgmt.synapse.models.PrivateLinkResourceProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'},
}
def __init__(self, **kwargs) -> None:
super(PrivateLinkResource, self).__init__(**kwargs)
self.properties = None
class PrivateLinkResourceProperties(Model):
"""Properties of a private link resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar group_id: The private link resource group id.
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
:ivar required_zone_names: Required DNS zone names of the the private link
resource.
:vartype required_zone_names: list[str]
"""
_validation = {
'group_id': {'readonly': True},
'required_members': {'readonly': True},
'required_zone_names': {'readonly': True},
}
_attribute_map = {
'group_id': {'key': 'groupId', 'type': 'str'},
'required_members': {'key': 'requiredMembers', 'type': '[str]'},
'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'},
}
def __init__(self, **kwargs) -> None:
super(PrivateLinkResourceProperties, self).__init__(**kwargs)
self.group_id = None
self.required_members = None
self.required_zone_names = None
class PrivateLinkServiceConnectionState(Model):
"""Connection state details of the private endpoint.
Variables are only populated by the server, and will be ignored when
sending a request.
:param status: The private link service connection status. Possible values
include: 'Approved', 'Pending', 'Rejected', 'Disconnected'
:type status: str or ~azure.mgmt.synapse.models.enum
:param description: The private link service connection description.
:type description: str
:ivar actions_required: The actions required for private link service
connection.
:vartype actions_required: str
"""
_validation = {
'actions_required': {'readonly': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'actions_required': {'key': 'actionsRequired', 'type': 'str'},
}
def __init__(self, *, status=None, description: str=None, **kwargs) -> None:
super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
self.status = status
self.description = description
self.actions_required = None
class QueryInterval(Model):
"""A database query.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar interval_start_time: The start time of the measurement interval
(ISO8601 format).
:vartype interval_start_time: datetime
:ivar execution_count: The number of times the query was executed during
this interval.
:vartype execution_count: float
:ivar metrics: The list of query metrics during this interval.
:vartype metrics: list[~azure.mgmt.synapse.models.QueryMetric]
"""
_validation = {
'interval_start_time': {'readonly': True},
'execution_count': {'readonly': True},
'metrics': {'readonly': True},
}
_attribute_map = {
'interval_start_time': {'key': 'intervalStartTime', 'type': 'iso-8601'},
'execution_count': {'key': 'executionCount', 'type': 'float'},
'metrics': {'key': 'metrics', 'type': '[QueryMetric]'},
}
def __init__(self, **kwargs) -> None:
super(QueryInterval, self).__init__(**kwargs)
self.interval_start_time = None
self.execution_count = None
self.metrics = None
class QueryMetric(Model):
"""A database query.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name of the metric
:vartype name: str
:ivar display_name: The name of the metric for display in user interface
:vartype display_name: | |
video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1_slow.mov')).getMaskSetForEntireVideo(
) # Constant FR
self._add_mask_files_to_kill(result)
self.assertEqual(0.0, video_tools.get_start_time_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(596, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(596, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(59500.0, video_tools.get_end_time_from_segment(result[0]))
self.assertEqual('video', video_tools.get_type_of_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1_slow.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01') # Constant FR
self._add_mask_files_to_kill(result)
self.assertEqual(2000.0, video_tools.get_start_time_from_segment(result[0]))
self.assertEqual(21, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(576, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(596, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(59500.0, video_tools.get_end_time_from_segment(result[0]))
self.assertEqual('video', video_tools.get_type_of_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1_swap.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01', end_time='00:00:59.29') # Variable FR, swapped streams, fails to grab all frames
self._add_mask_files_to_kill(result)
self.assertEqual(59221.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(779, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(801, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(23, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(1982.0, round(video_tools.get_start_time_from_segment(result[0])))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1_swap.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01') # Variable FR, swapped streams.
self._add_mask_files_to_kill(result)
self.assertEqual(1982.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(23, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(803 - 23 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01')
self._add_mask_files_to_kill(result)
self.assertEqual(1982.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(23, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(803 - 23 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01:02')
self._add_mask_files_to_kill(result)
self.assertEqual(2123.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(24, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(780, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01',
end_time='00:00:04.01')
self._add_mask_files_to_kill(result)
self.assertEqual(1982.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(23, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(3965.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(47, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(47 - 23 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='23',
end_time='29')
self._add_mask_files_to_kill(result)
self.assertEqual(1982.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(23, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(2548.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(29, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(29 - 23 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='29',
end_time='55')
self._add_mask_files_to_kill(result)
self.assertEqual(2548.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(29, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(4532.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(55, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(55 - 29 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='29')
self._add_mask_files_to_kill(result)
self.assertEqual(2548.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(29, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(59348.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(803, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(803 - 29 + 1, video_tools.get_frames_from_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
media_types=['audio'])
self._add_mask_files_to_kill(result)
self.assertEqual(0.0, video_tools.get_start_time_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(2617262, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(2617262, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(59348.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual('audio', video_tools.get_type_of_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='1',
media_types=['video'])
self._add_mask_files_to_kill(result)
self.assertEqual(0.0, video_tools.get_start_time_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(803, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(803, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(59348.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual('video', video_tools.get_type_of_segment(result[0]))
result = video_tools.FileMetaDataLocator(self.locateFile('tests/videos/sample1.mov')).getMaskSetForEntireVideo(
start_time='00:00:02.01',
end_time='00:00:04',
media_types=['audio'])
self._add_mask_files_to_kill(result)
self.assertEqual(2010.0, round(video_tools.get_start_time_from_segment(result[0])))
self.assertEqual(88641, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(4000.0, round(video_tools.get_end_time_from_segment(result[0])))
self.assertEqual(176400, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(176400 - 88641 + 1, video_tools.get_frames_from_segment(result[0]))
def test_extract_mask(self):
amount = 30
fileOne = self._init_write_file('test_ts_em1', 2500, 75, 30, 30, maskonly=True)
fileTwo = self._init_write_file('test_ts_em2', 4100, 123, 27, 30, maskonly=True)
sets = []
change = video_tools.create_segment(
starttime=2500,
startframe=75,
endtime=3500,
endframe=75 + amount - 1,
frames=amount,
rate=30,
videosegment=fileOne,
type='video')
sets.append(change)
change = video_tools.create_segment(
starttime=4100,
startframe=123,
endtime=5000,
endframe=149,
frames=int(27),
rate=30,
videosegment=fileTwo,
type='video')
sets.append(change)
reader = tool_set.GrayBlockReader(fileTwo)
c = 0
while c < 3:
expect_mask = reader.read()
c += 1
reader.close()
mask = video_tools.extractMask(sets, 125)
self.assertTrue(np.all(mask == expect_mask))
def test_formMaskDiffForImage(self):
from maskgen.image_wrap import ImageWrapper
fileOne = self._init_write_zip_file('test_ts_fmdfi.png.zip', 20, 30)
test_image = np.random.randint(255, size=(1090, 1920)).astype('uint8')
masks = video_tools.formMaskDiffForImage(fileOne, ImageWrapper(test_image), 'test_ts_fmdfi', 'op')
self.assertEqual(1, len(masks))
mask = masks[0]
self.assertEquals(20, video_tools.get_frames_from_segment(mask))
self.assertEquals(1, video_tools.get_start_frame_from_segment(mask))
self.assertEquals(20, video_tools.get_end_frame_from_segment(mask))
self.assertEquals(0, video_tools.get_start_time_from_segment(mask))
self.assertEquals(666, int(video_tools.get_end_time_from_segment(mask)))
reader = tool_set.GrayBlockReader(video_tools.get_file_from_segment(mask))
count = 0
while True:
diff_mask = reader.read()
if diff_mask is None:
break
self.assertTrue(np.sum(255-diff_mask) > 0)
count += 1
self.assertEqual(20, count)
def test_inverse_intersection_for_mask(self):
amount = 30
fileOne = self._init_write_file('test_ts_em1', 2500, 75, 30, 30, maskonly=True)
sets = []
change = video_tools.create_segment(
starttime=2500,
startframe=75,
endtime=3500,
endframe=75 + amount - 1,
frames=amount,
rate=30,
videosegment=fileOne,
type='video')
sets.append(change)
test_mask = np.random.randint(2, size=(1090, 1920)).astype('uint8') * 255
new_sets = video_tools.inverse_intersection_for_mask(test_mask, sets)
reader = tool_set.GrayBlockReader(video_tools.get_file_from_segment(new_sets[0]))
while True:
expect_mask = reader.read()
if expect_mask is None:
break
self.assertTrue(np.all((test_mask.astype('int') - 255-expect_mask.astype('int') <= 0)))
def test_remove_intersection(self):
setOne = []
maskitem = video_tools.create_segment(
starttime=900,
startframe=10,
endtime=2900,
endframe=30,
frames=21,
rate=10,
type='video')
setOne.append(maskitem)
maskitem = video_tools.create_segment(
starttime=4900,
startframe=50,
endtime=6900,
endframe=70,
frames=21,
rate=10,
type='video')
setOne.append(maskitem)
setTwo = []
maskitem = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=500,
endframe=6,
frames=6,
rate=10,
type='video')
setTwo.append(maskitem)
maskitem = video_tools.create_segment(
starttime=800,
startframe=9,
endtime=1400,
endframe=15,
frames=7,
rate=10,
type='video')
setTwo.append(maskitem)
maskitem = video_tools.create_segment(
starttime=2400,
startframe=25,
endtime=3400,
endframe=35,
frames=11,
rate=10,
type='video')
setTwo.append(maskitem)
maskitem = video_tools.create_segment(
starttime=3200,
startframe=44,
endtime=4600,
endframe=47,
frames=4,
rate=10,
type='video')
setTwo.append(maskitem)
maskitem = video_tools.create_segment(
starttime=8900,
startframe=90,
endtime=9400,
endframe=95,
frames=6,
rate=10,
type='video')
setTwo.append(maskitem)
finalsets = video_tools.removeIntersectionOfMaskSets(setOne, setTwo)
self.assertEquals(6, len(finalsets))
self.assertEqual([
{'endframe': 6, 'rate': 10, 'starttime': 0, 'frames': 6, 'startframe': 1, 'endtime': 500,
'type': 'video', 'error':0},
{'endframe': 9, 'rate': 10, 'starttime': 800, 'frames': 1, 'startframe': 9, 'endtime': 800.0,
'type': 'video', 'error':0},
{'endframe': 30, 'rate': 10, 'starttime': 900, 'frames': 21, 'startframe': 10, 'endtime': 2900,
'type': 'video', 'error':0},
{'endframe': 47, 'rate': 10, 'starttime': 3200, 'frames': 4, 'startframe': 44, 'endtime': 4600,
'type': 'video', 'error':0},
{'endframe': 70, 'rate': 10, 'starttime': 4900, 'frames': 21, 'startframe': 50, 'endtime': 6900,
'type': 'video', 'error':0},
{'endframe': 95, 'rate': 10, 'starttime': 8900, 'frames': 6, 'startframe': 90, 'endtime': 9400,
'type': 'video', 'error':0}], finalsets)
def test_before_dropping(self):
amount = 30
fileOne = self._init_write_file('test_ts_bd1', 2500, 75, 30, 30)
fileTwo = self._init_write_file('test_ts_bd2', 4100, 123, 27, 30)
sets = []
change = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=1000,
endframe=30,
frames=30,
rate=30,
error=1.1,
type='video')
sets.append(change)
change = video_tools.create_segment(
starttime=2500,
startframe=75,
endtime=3500,
endframe=75 + amount - 1,
frames=amount,
rate=30,
error=1.2,
videosegment=fileOne,
type='video')
sets.append(change)
change = video_tools.create_segment(
starttime=4100,
startframe=123,
endtime=5000,
endframe=149,
frames=int(27),
rate=30,
error=1.3,
videosegment=fileTwo,
type='video')
sets.append(change)
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 90,
'starttime': 3000,
'endframe': 117,
'endtime': 4000
})], sets)
self._add_mask_files_to_kill(result)
self.assertEqual(3, len(result))
self.assertEqual(15, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(89, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(96, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(122, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 63,
'starttime': 2100,
'endframe': 90,
'endtime': 3000
})], sets)
self._add_mask_files_to_kill(result)
self.assertEqual(3, len(result))
self.assertEqual(15, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(63, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(77, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(96, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(122, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 87,
'starttime': 2900,
'endframe': 93,
'endtime': 3100
})], sets)
self._add_mask_files_to_kill(result)
self.assertEqual(4, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(86, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(12, video_tools.get_frames_from_segment(result[2]))
self.assertEqual(87, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(98, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(117, video_tools.get_start_frame_from_segment(result[3]))
self.assertEqual(143, video_tools.get_end_frame_from_segment(result[3]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[3]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 87,
'starttime': 2900,
'endframe': 93,
'endtime': 3100
})], sets, keepTime=True)
self._add_mask_files_to_kill(result)
self.assertEqual(4, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(12, video_tools.get_frames_from_segment(result[2]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(86, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(93, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(104, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(123, video_tools.get_start_frame_from_segment(result[3]))
self.assertEqual(149, video_tools.get_end_frame_from_segment(result[3]))
self.assertEqual(4, len(result))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[3]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 1,
'starttime': 0,
'endframe': 93,
'endtime': 3100
})], sets)
self._add_mask_files_to_kill(result)
self.assertEqual(2, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(12, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(31, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(57, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[1]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 93,
'starttime': 3100
})], sets)
self._add_mask_files_to_kill(result)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(30, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(92, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 1,
'starttime': 0,
'endframe': 93,
'endtime': 3100
})], sets, keepTime=True)
self._add_mask_files_to_kill(result)
self.assertEqual(2, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(93, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(104, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(123, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(149, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[1]))
result = video_tools.dropFramesFromMask([video_tools.create_segment(**{
'startframe': 93,
'starttime': 3100,
})], sets, keepTime=True)
self._add_mask_files_to_kill(result)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(30, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(18, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(92, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
def test_before_dropping_nomask(self):
amount = 30
sets = []
change = video_tools.create_segment(
starttime=0,
startframe=1,
endtime=1000,
endframe=30,
frames=30,
rate=30,
type='video',
error=1.1)
sets.append(change)
change = video_tools.create_segment(
starttime=2500,
startframe=75,
endtime=3500,
endframe=75 + amount - 1,
frames=amount,
rate=30,
type='video',
error=1.2)
sets.append(change)
change = video_tools.create_segment(
starttime=4100,
startframe=123,
endtime=5000,
endframe=149,
frames=int(27),
rate=30,
type='video',
error=1.3)
sets.append(change)
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 87,
'starttime': 2900,
'endframe': 92,
'endtime': 3100
})], sets)
self.assertEqual(4, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(86, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(12, video_tools.get_frames_from_segment(result[2]))
self.assertEqual(87, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(98, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(117, video_tools.get_start_frame_from_segment(result[3]))
self.assertEqual(143, video_tools.get_end_frame_from_segment(result[3]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[3]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 63,
'starttime': 2100,
'endframe': 90,
'endtime': 3000
})], sets)
self.assertEqual(3, len(result))
self.assertEqual(14, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(63, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(76, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(95, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(121, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[2]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 87,
'starttime': 2900,
'endframe': 93,
'endtime': 3100
})], sets, keepTime=True)
self.assertEqual(4, len(result))
self.assertEqual(12, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(11, video_tools.get_frames_from_segment(result[2]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(86, video_tools.get_end_frame_from_segment(result[1]))
self.assertEqual(94, video_tools.get_start_frame_from_segment(result[2]))
self.assertEqual(104, video_tools.get_end_frame_from_segment(result[2]))
self.assertEqual(123, video_tools.get_start_frame_from_segment(result[3]))
self.assertEqual(149, video_tools.get_end_frame_from_segment(result[3]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[2]))
self.assertEqual(1.3, video_tools.get_error_from_segment(result[3]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 1,
'starttime': 0,
'endframe': 93,
'endtime': 3100
})], sets)
self.assertEqual(2, len(result))
self.assertEqual(11, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(11, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(30, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(56, video_tools.get_end_frame_from_segment(result[1]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 93,
'starttime': 3100
})], sets)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(30, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(18, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(92, video_tools.get_end_frame_from_segment(result[1]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 1,
'starttime': 0,
'endframe': 93,
'endtime': 3100
})], sets, keepTime=True)
self.assertEqual(2, len(result))
self.assertEqual(11, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(94, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(104, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(123, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(149, video_tools.get_end_frame_from_segment(result[1]))
result = video_tools.dropFramesWithoutMask([video_tools.create_segment(**{
'startframe': 93,
'starttime': 3100
})], sets, keepTime=True)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[0]))
self.assertEqual(1, video_tools.get_start_frame_from_segment(result[0]))
self.assertEqual(30, video_tools.get_end_frame_from_segment(result[0]))
self.assertEqual(18, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(92, video_tools.get_end_frame_from_segment(result[1]))
def after_general_all(self, sets, func):
result = func(
[video_tools.create_segment(**{
'startframe': 180,
'starttime': 6000,
'endframe': 210,
'endtime': 7000
})], sets)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self._add_mask_files_to_kill(result)
result = func([video_tools.create_segment(**{
'startframe': 63,
'starttime': 2100,
'endframe': 90,
'endtime': 3000
})], sets)
self.assertEqual(2, len(result))
self.assertEqual(30, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(103, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(1.1, video_tools.get_error_from_segment(result[0]))
self.assertEqual(1.2, video_tools.get_error_from_segment(result[1]))
self._add_mask_files_to_kill(result)
result = func([video_tools.create_segment(**{
'startframe': 81,
'starttime': 2700,
'endframe': 111,
'endtime': 3700
})], sets)
self.assertEqual(3, len(result))
self.assertEqual(6, video_tools.get_frames_from_segment(result[1]))
self.assertEqual(75, video_tools.get_start_frame_from_segment(result[1]))
self.assertEqual(24, | |
[('ADD', 3, M6805_AddressMode.EXT), ['EXT'], lambda self, il, ext: il.set_reg(1, 'A', il.add(1, il.reg(1, 'A'), il.load(1, ext), 'NZC'))],
[('JMP', 3, M6805_AddressMode.EXT), ['EXT'], lambda self, il, ext: M6805.branch(il, ext)],
[('JSR', 3, M6805_AddressMode.EXT), ['EXT'], lambda self, il, ext: il.call(ext)],
[('LDX', 3, M6805_AddressMode.EXT), ['EXT'], lambda self, il, ext: il.set_reg(1, 'X', il.load(1, ext))],
[('STX', 3, M6805_AddressMode.EXT), ['EXT'], lambda self, il, ext: il.store(1, ext, il.reg(1, 'X'))],
# 0xd0-0xdf: Register/Memory IX2
[('SUB', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.sub(1, il.reg(1, 'A'), il.load(1, ix2), 'NZC'))],
[('CMP', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.sub(1, il.reg(1, 'A'), il.load(1, ix2), 'NZC')],
[('SBC', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.sub_borrow(1, il.reg(1, 'A'), il.load(1, ix2), il.flag('C'), 'NZC'))],
[('CPX', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.sub(1, il.reg(1, 'X'), il.load(1, ix2), 'NZC')],
[('AND', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.and_expr(1, il.reg(1, 'A'), il.load(1, ix2), 'NZ'))],
[('BIT', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.and_expr(1, il.reg(1, 'A'), il.load(1, ix2), 'NZ')],
[('LDA', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.load(1, ix2))],
[('STA', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.store(1, ix2, il.reg(1, 'A'))],
[('EOR', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.xor_expr(1, il.reg(1, 'A'), il.load(1, ix2), 'NZ'))],
[('ADC', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.add_carry(1, il.reg(1, 'A'), il.load(1, ix2), il.flag('C'), 'NZC'))],
[('ORA', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.or_expr(1, il.reg(1, 'A'), il.load(1, ix2), 'NZ'))],
[('ADD', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: il.set_reg(1, 'A', il.add(1, il.reg(1, 'A'), il.load(1, ix2), 'NZC'))],
[('JMP', 3, M6805_AddressMode.IX2), ['IX2', 'X'], lambda self, il, ix2: M6805.branch(il, ix2)],
[('JSR', 3, M6805_AddressMode.IX2), ['IX2' ,'X'], lambda self, il, ix2: il.call(ix2)],
[('LDX', 3, M6805_AddressMode.IX2), ['IX2' ,'X'], lambda self, il, ix2: il.set_reg(1, 'X', il.load(1, ix2))],
[('STX', 3, M6805_AddressMode.IX2), ['IX2' ,'X'], lambda self, il, ix2: il.store(1, ix2, il.reg(1, 'X'))],
# 0xe0-0xef: Register/Memory IX1
[('SUB', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.sub(1, il.reg(1, 'A'), il.load(1, ix1), 'NZC'))],
[('CMP', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.sub(1, il.reg(1, 'A'), il.load(1, ix1), 'NZC')],
[('SBC', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.sub_borrow(1, il.reg(1, 'A'), il.load(1, ix1), il.flag('C'), 'NZC'))],
[('CPX', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.sub(1, il.reg(1, 'X'), il.load(1, ix1), 'NZC')],
[('AND', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.and_expr(1, il.reg(1, 'A'), il.load(1, ix1), 'NZ'))],
[('BIT', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.and_expr(1, il.reg(1, 'A'), il.load(1, ix1), 'NZ')],
[('LDA', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.load(1, ix1))],
[('STA', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.store(1, ix1, il.reg(1, 'A'))],
[('EOR', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.xor_expr(1, il.reg(1, 'A'), il.load(1, ix1), 'NZ'))],
[('ADC', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.add_carry(1, il.reg(1, 'A'), il.load(1, ix1), il.flag('C'), 'NZC'))],
[('ORA', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.or_expr(1, il.reg(1, 'A'), il.load(1, ix1), 'NZ'))],
[('ADD', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'A', il.add(1, il.reg(1, 'A'), il.load(1, ix1), 'NZC'))],
[('JMP', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: M6805.branch(il, ix1)],
[('JSR', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.call(ix1)],
[('LDX', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.set_reg(1, 'X', il.load(1, ix1))],
[('STX', 2, M6805_AddressMode.IX1), ['IX1', 'X'], lambda self, il, ix1: il.store(1, ix1, il.reg(1, 'X'))],
# 0xf0-0xff: Register/Memory IX
[('SUB', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.sub(1, il.reg(1, 'A'), il.load(1, x), 'NZC'))],
[('CMP', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.sub(1, il.reg(1, 'A'), il.load(1, x))],
[('SBC', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.sub_borrow(1, il.reg(1, 'A'), il.load(1, x), il.flag('C'), 'NZC'))],
[('CPX', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.sub(1, il.reg(1, 'X'), il.load(1, x))],
[('AND', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.and_expr(1, il.reg(1, 'A'), il.load(1, x), 'NZ'))],
[('BIT', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.and_expr(1, il.reg(1, 'A'), il.load(1, x), 'NZ')],
[('LDA', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.load(1, x))],
[('STA', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.store(1, x, il.reg(1, 'A'))],
[('EOR', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.xor_expr(1, il.reg(1, 'A'), il.load(1, x), 'NZ'))],
[('ADC', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.add_carry(1, il.reg(1, 'A'), il.load(1, x), il.flag('C'), 'NZC'))],
[('ORA', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.or_expr(1, il.reg(1, 'A'), il.load(1, x), 'NZ'))],
[('ADD', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'A', il.add(1, il.reg(1, 'A'), il.load(1, x), 'NZC'))],
[('JMP', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: M6805.branch(il, x)],
[('JSR', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.call(x)],
[('LDX', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.set_reg(1, 'X', il.load(1, x))],
[('STX', 1, M6805_AddressMode.IX), ['', 'X'], lambda self, il, x: il.store(1, x, x)],
]
@staticmethod
def branch(il, target):
# try to find a label for the branch target
if isinstance(target, LowLevelILInstruction) and target.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
target_label = il.get_label_for_address(il.arch, target.operand[0].value)
if target_label is not None:
il.append(il.goto(target_label))
return
il.append(il.jump(target))
@staticmethod
def bit_test_branch(il, length, target, addr, bit, invert = False):
# try to find a label for the branch target
if isinstance(target, LowLevelILInstruction) and target.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
taken_label = il.get_label_for_address(il.arch, target.operand[0].value)
else:
taken_label = None
# create taken target
taken_found = True
if taken_label is None:
taken_label = LowLevelILLabel()
taken_found = False
# create untaken target
untaken_found = True
untaken_label = il.get_label_for_address(il.arch, il.current_address + length)
if untaken_label is None:
untaken_label = LowLevelILLabel()
untaken_found = False
# generate the conditional branch LLIL
il.append(il.rotate_right(1, il.load(1, addr), il.const(1, bit + 1), 'C'))
if not invert:
il.append(il.if_expr(il.flag('C'), taken_label, untaken_label))
else:
il.append(il.if_expr(il.flag('C'), untaken_label, taken_label))
# generate a jump to the branch target if a label couldn't be found
if not taken_found:
il.mark_label(taken_label)
il.append(il.jump(target))
# generate a label for the untaken branch
if not untaken_found:
il.mark_label(untaken_label)
@staticmethod
def cond_branch(il, length, target, cond, invert = False):
# try to find a label for the branch target
if isinstance(target, LowLevelILInstruction) and target.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
taken_label = il.get_label_for_address(il.arch, target.operand[0].value)
else:
taken_label = None
# create taken target
taken_found = True
if taken_label is None:
taken_label = LowLevelILLabel()
taken_found = False
# create untaken target
untaken_found = True
untaken_label = il.get_label_for_address(il.arch, il.current_address + length)
if untaken_label is None:
untaken_label = LowLevelILLabel()
untaken_found = False
# generate the conditional branch LLIL
if not invert:
il.append(il.if_expr(cond, taken_label, untaken_label))
else:
il.append(il.if_expr(cond, untaken_label, taken_label))
# generate a jump to the branch target if a label couldn't be found
if not taken_found:
il.mark_label(taken_label)
il.append(il.jump(target))
# generate a label for the untaken branch
if not untaken_found:
il.mark_label(untaken_label)
def get_instruction_info(self, data, addr):
# instruction lookup
instruction = self.instructions[struct.unpack('>B', data[0])[0]]
if instruction is None:
return None
(opcode, length, mode) = instruction[0]
result = InstructionInfo()
result.length = length
# add branches
if opcode in ['RTS', 'RTI']:
result.add_branch(BranchType.FunctionReturn)
elif opcode in ['JMP']:
if mode == M6805_AddressMode.DIR:
result.add_branch(BranchType.UnconditionalBranch, struct.unpack('>B', data[1])[0])
elif mode == M6805_AddressMode.EXT:
result.add_branch(BranchType.UnconditionalBranch, struct.unpack('>H', data[1:3])[0])
else:
result.add_branch(BranchType.UnresolvedBranch)
elif opcode in ['BRA']:
result.add_branch(BranchType.UnconditionalBranch, addr + length + struct.unpack('>b', data[1])[0])
elif opcode.startswith('BRSET') or opcode.startswith('BRCLR'):
result.add_branch(BranchType.TrueBranch, addr + length + struct.unpack('>b', data[2])[0])
result.add_branch(BranchType.FalseBranch, addr + length)
elif opcode in ['BRN', 'BHI', 'BLS', 'BCC', 'BCS', 'BNE', 'BEQ', 'BHCC', 'BHCS', 'BPL', 'BMI', 'BMC', 'BMS', 'BIL', 'BIH']:
result.add_branch(BranchType.TrueBranch, addr + length + struct.unpack('>b', data[1])[0])
result.add_branch(BranchType.FalseBranch, addr + length)
elif opcode in ['BSR', 'JSR']:
if mode == M6805_AddressMode.DIR:
result.add_branch(BranchType.CallDestination, struct.unpack('>B', data[1])[0])
elif mode == M6805_AddressMode.EXT:
result.add_branch(BranchType.CallDestination, struct.unpack('>H', data[1:3])[0])
else:
result.add_branch(BranchType.UnresolvedBranch)
return result
def get_instruction_text(self, data, addr):
# instruction lookup
instruction = self.instructions[struct.unpack('>B', data[0])[0]]
if instruction is None:
return None
(opcode, length, mode) = instruction[0]
# opcode
tokens = [InstructionTextToken(InstructionTextTokenType.InstructionToken, '{:6}'.format(opcode))]
# operands
offset = 1
for operand in instruction[1]:
# add a separator if needed
if len(tokens) > 1:
tokens += [InstructionTextToken(InstructionTextTokenType.OperandSeparatorToken, ',')]
if operand == 'IMM':
immediate = struct.unpack('>B', data[offset])[0]
tokens += [InstructionTextToken(InstructionTextTokenType.IntegerToken, '#${:X}'.format(immediate), immediate)]
offset += 1
elif operand == 'DIR':
| |
IloResponseError:
pass
self.auxcommands['logout'].run("")
@log_decor
def smbiostesting(self):
""" smbios automatic testing """
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("***************************SMBIOS "
"AUTOMATIC TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
if self.rdmc.app.typepath.flagiften:
self.auxcommands['login'].run("")
self.auxcommands['smbios'].run("smbios")
self.auxcommands['logout'].run("")
else:
self.rdmc.ui.printer("Skipping smbios testing, server not gen10.\n")
@log_decor
def directoryautomatictesting(self):
""" Directory command automatic testing """
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("*************************DIRECTORY "
"AUTOMATIC TESTING*****************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
self.rdmc.ui.printer("Testing settings output.\n")
self.auxcommands['directory'].run("kerberos")
self.auxcommands['directory'].run("ldap")
self.auxcommands['directory'].run("kerberos -j")
self.auxcommands['directory'].run("ldap -j")
self.rdmc.ui.printer("Testing setting properties.\n")
self.auxcommands['directory'].run("kerberos --serviceaddress test.account --port 1337 --realm "
"testrealm")
self.auxcommands['directory'].run("ldap testusername testpassword --enable")
self.auxcommands['directory'].run("ldap --serviceaddress test2.account --addsearch autotestsearch,"
"autotestsearch2")
self.rdmc.ui.printer("Testing adding roles.\n")
self.auxcommands['directory'].run('kerberos --addrole "Administrator:a test,'
'ReadOnly:another test" --disable')
self.rdmc.ui.printer("Validating changes...\n")
results = self.rdmc.app.select(selector='AccountService.', path_refresh=True)[0].dict
if results['LDAP']['Authentication']['Username'] == 'testusername':
self.rdmc.ui.printer("Validated Username.\n")
else:
sys.stderr.write("Username not changed.\n")
if results['ActiveDirectory']['ServiceAddresses'][0] == 'test.account:1337' and \
results['LDAP']['ServiceAddresses'][0] == 'test2.account':
self.rdmc.ui.printer("Validated Service addresses.\n")
else:
sys.stderr.write("Service addresses not changed.\n")
raise IloResponseError("")
if results['Oem']['Hpe']['DirectorySettings']['LdapServerPort'] == 55 and \
results['Oem']['Hpe']['KerberosSettings']['KDCServerPort'] == 1337:
self.rdmc.ui.printer("Validated Ports.\n")
else:
sys.stderr.write("Ports not changed.\n")
raise IloResponseError("")
if results['Oem']['Hpe']['KerberosSettings']['KerberosRealm'] == 'testrealm':
self.rdmc.ui.printer("Validated Realm.\n")
else:
sys.stderr.write("Realm not changed.\n")
raise IloResponseError("")
if 'autotestsearch' in results['LDAP']['LDAPService']['SearchSettings']\
['BaseDistinguishedNames'] and 'autotestsearch2' in results['LDAP']\
['LDAPService']['SearchSettings']['BaseDistinguishedNames']:
self.rdmc.ui.printer("Validated SearchSettings.\n")
else:
sys.stderr.write("SearchSettings not changed.\n")
if results['LDAP']['ServiceEnabled'] and not results['ActiveDirectory']['ServiceEnabled']:
self.rdmc.ui.printer("Validated ServiceEnabled.\n")
else:
sys.stderr.write("Service not enabled/disabled.\n")
raise IloResponseError("")
rolecount = 0
for role in results['LDAP']['RemoteRoleMapping']:
if role['RemoteGroup'] == 'a test' or \
role['RemoteGroup'] == 'another test':
rolecount += 1
if rolecount == 2:
self.rdmc.ui.printer("Validated Role mappings.\n")
else:
sys.stderr.write("Remote roles not changed.\n")
raise IloResponseError("")
self.rdmc.ui.printer("Removing changes...\n")
self.auxcommands['directory'].run('kerberos --serviceaddress "" --realm ""')
self.auxcommands['directory'].run('ldap --serviceaddress "" --disable --removesearch '
'autotestsearch,autotestsearch2')
self.auxcommands['directory'].run('ldap --removerole "dirgroupa test,dirgroupanother test"')
self.rdmc.ui.printer("Validating removal.\n")
results = self.rdmc.app.select(selector='AccountService.', path_refresh=True)[0].dict
if not results['LDAP']['ServiceEnabled']:
self.rdmc.ui.printer("Validated Service disabled.\n")
else:
sys.stderr.write("Service not disabled.\n")
raise IloResponseError("")
if not results['ActiveDirectory']['ServiceAddresses'][0] and not\
results['LDAP']['ServiceAddresses'][0]:
self.rdmc.ui.printer("Validated Service addresses.\n")
else:
sys.stderr.write("Service addresses not removed.\n")
raise IloResponseError("")
if not results['Oem']['Hpe']['KerberosSettings']['KerberosRealm']:
self.rdmc.ui.printer("Validated Realm.\n")
else:
sys.stderr.write("Realm not removed.\n")
raise IloResponseError("")
if not 'autotestsearch' in results['LDAP']['LDAPService']['SearchSettings']\
['BaseDistinguishedNames'] and not 'autotestsearch2' in results['LDAP']\
['LDAPService']['SearchSettings']['BaseDistinguishedNames']:
self.rdmc.ui.printer("Validated SearchSettings.\n")
else:
sys.stderr.write("SearchSettings not removed.\n")
raise IloResponseError("")
rolecount = 0
for role in results['LDAP']['RemoteRoleMapping']:
if role['RemoteGroup'] == 'a test' or \
role['RemoteGroup'] == 'another test':
rolecount += 1
if rolecount == 0:
self.rdmc.ui.printer("Validated Role mappings.\n")
else:
sys.stderr.write("Remote roles not removed.\n")
raise IloResponseError("")
@log_decor
def serverclonecommandautomatictesting(self, local):
""" copy command automatic testing """
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("**************************SERVER CLONE "
"AUTOMATIC TESTING*************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
url_path = "http://infinitymaster.us.rdlabs.hpecorp.net:1051/automatic_testing/"\
"serverclone_test_archives/"
ue_clone_file_name = "ilorest_clone_ue.json"
enc_clone_file_name = "ilorest_clone_enc.json"
delete_list = [ue_clone_file_name, enc_clone_file_name]
error = {}
self.auxcommands['login'].loginfunction("")
try:
self.rdmc.ui.printer("Testing \'save\' operation (unencrypted)...\n")
line_str = "save --nobios --silent -f " + ue_clone_file_name
self.auxcommands['serverclone'].run(line_str)
self.rdmc.ui.printer("Testing \'load\' operation (unencrypted)...\n")
line_str = "load --silent -f "
tmp_file = self.serverclone_helper(ue_clone_file_name, line_str)
delete_list.append(tmp_file)
self.auxcommands['serverclone'].run(line_str + tmp_file)
self.rdmc.ui.printer("Unencrypted Clone Test Complete\n")
except Exception as excp:
self.rdmc.ui.printer("Unencrypted Clone Test Failed\n")
with open('clone_error_logfile.log', 'r') as err_logfile:
error_log = err_logfile.read()
with open('changelog.log', 'r') as chng_logfile:
try:
chng_log = json.loads(chng_logfile.read())
except ValueError:
chng_log = chng_logfile.read()
error['unencrypted_clone'] = {
'base_error': "An error occurred working with serverclone unencrypted save/load: {}"
.format(excp),
'command': line_str + tmp_file,
'traceback': traceback.format_exc(),
'clone_error_logfile': error_log,
'change_log': chng_log }
try:
self.rdmc.ui.printer("Testing \'save\' operation (encrypted)...\n")
line_str = "save --encryption HPESecretAESKey1 --silent --nobios -f " + enc_clone_file_name
self.auxcommands['serverclone'].run(line_str)
self.rdmc.ui.printer("Testing \'load\' operation (encrypted)...\n")
line_str = "load --silent --encryption HPESecretAESKey1 -f "
tmp_file = self.serverclone_helper(enc_clone_file_name, line_str)
delete_list.append(tmp_file)
self.auxcommands['serverclone'].run(line_str)
self.rdmc.ui.printer("Encrypted Clone Test Complete\n")
except Exception as excp:
self.rdmc.ui.printer("Encrypted Clone Test Failed\n")
with open('clone_error_logfile.log', 'r') as err_logfile:
error_log = err_logfile.read()
with open('changelog.log', 'r') as chng_logfile:
try:
chng_log = json.loads(chng_logfile.read())
except ValueError:
chng_log = chng_logfile.read()
error['encrypted_clone'] = {
'base_error': "An error occurred working with serverclone unencrypted save/load: {}"
.format(excp),
'command': line_str + tmp_file,
'traceback': traceback.format_exc(),
'clone_error_logfile': error_log,
'change_log': chng_log }
''' #DO NOT OPEN UNTIL CHRISTMAS (Ok maybe earlier)
self.rdmc.ui.printer("Attempting load of clone files from NFS server...\n")
for item in self.automatictesting_helper_fb(url_path):
f_name = ""
clone_file = urlopen(url_path + item)
data = clone_file.read()
with open(item, 'w+b') as target:
target.write(data)
if isinstance(item, six.string_types):
f_name += item
delete_list.append(f_name)
self.auxcommands['serverclone'].run("load --silent -f " + f_name)
self.rdmc.ui.printer("Test Complete...cleaning up.\n")
'''
try:
err_list, err_str = self.automatictesting_helper_fd(delete_list)
if err_list:
raise Exception(err_str)
except Exception as excp:
error['delete_cfs'] = "An error occurred deleting clone files: \'%s\'" % str(excp)
if error:
raise Exception("The following exceptions occured in ServerClone:\n {}".format(error))
@log_decor
def cloningautomatictesting(self):
""" cloning automatic testing """
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("**************************CLONING "
"AUTOMATIC TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping iloclone command, server is not gen10.\n")
return ReturnCodes.SUCCESS
self.auxcommands['login'].run("")
self.cloneobj.run("save -f CLONETEST.json", testing=True)
self.auxcommands['logout'].run("")
self.auxcommands['login'].run("")
self.cloneobj.run("load -f CLONETEST.json", testing=True)
self.auxcommands['logout'].run("")
@log_decor
def pendingautomatictesting(self):
""" pending automatic testing """
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("**************************PENDING "
"AUTOMATIC TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
self.auxcommands['login'].run("")
self.auxcommands['pending'].run("")
self.auxcommands['logout'].run("")
@log_decor
def deletecomtesting(self):
"""Delete component command testing"""
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("*************************DELETE "
"COMPONENT TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"********************************\n\n")
self.auxcommands['login'].run("")
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping delete component command, server is not gen10.\n")
return ReturnCodes.SUCCESS
self.auxcommands['deletecomp'].run("-a")
self.auxcommands['logout'].run("")
@log_decor
def uploadcomptesting(self):
"""Upload component command testing"""
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("***************************UPLOAD "
"COMPONENT TESTING******************************\n")
self.rdmc.ui.printer("*************************************************"
"********************************\n\n")
error_dict = dict()
self.auxcommands['login'].run("")
url_path = "http://infinitymaster.us.rdlabs.hpecorp.net:1051/automatic_testing/components"\
"/COMPONENTS/"
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping upload component command, server is not gen10.\n")
return ReturnCodes.SUCCESS
#skip = False
#compname = "cp029917.exe"
#compsigname_1 = "cp029917_part1.compsig"
#compsigname_2 = "cp029917_part2.compsig"
#compnamepath = os.path.join(os.getcwd(), compname)
#compsigname_1path = os.path.join(os.getcwd(), compsigname_1)
#compsigname_2path = os.path.join(os.getcwd(), compsigname_2)
components_dict = self.automatictesting_helper_fb(url_path, True)
delete_list = []
for _, val in list(components_dict.items()):
compy = None
try:
uploadstr = ""
for file in val:
if len(file.split('.')) <= 2:
tmparr = file.split('.')
fld = urlopen(url_path + file)
data = fld.read()
with open(file, 'w+b') as target:
target.write(data)
delete_list.append(file)
if 'compsig' in tmparr[-1]:
uploadstr += ' --compsig='
else:
uploadstr += ' --component='
compy = file
uploadstr += file
self.auxcommands['uploadcomp'].run(uploadstr + " --forceupload")
except Exception:
error_dict[compy] = uploadstr
self.rdmc.ui.printer("iLO flagged an error while uploading the"\
" previous files: %s\n" %uploadstr)
self.rdmc.ui.printer("Check for correct file type and compsig.\n")
continue
#cleanup routine
self.automatictesting_helper_fd(delete_list)
if error_dict:
try:
raise ValueError
except ValueError as err:
if not err.args:
err.args = ("iLO flagged an error with components: ",)
for item in error_dict:
err.args = err.args + ("Component: %s, Full String: %s" \
% (item, error_dict[item]),)
raise
# try:
# if not os.path.isfile(compnamepath):
# (compname, _) = urlretrieve("http://infinitymaster:81"\
# "/automatic_testing/" \
# + compname, compname)
#
# if not os.path.isfile(compsigname_1path):
# (compsigname_1, _) = urlretrieve(\
# "http://16.83.62.70/jack/" + compsigname_1, compsigname_1)
#
# if not os.path.isfile(compsigname_2path):
# (compsigname_2, _) = urlretrieve(\
# "http://16.83.62.70/jack/" + compsigname_2, compsigname_2)
# except:
# skip = True
#
# if not skip:
# self.auxcommands['uploadcomp'].run("--component={0} --compsig={1} " \
# "--forceupload".format(compname, compsigname_1))
# #self.auxcommands['uploadcomp'].run("--component=firmware-nic-qlogic-nx2-2." \
# # "19.6-1.1.x86_64.rpm --compsig=firmware-" \
# # "nic-qlogic-nx2-2.19.6-1.1.x86_64.compsig")
#
# self.auxcommands['logout'].run("")
# else:
# self.rdmc.ui.printer("Could not complete test due to missing test " \
# "files.\n")
@log_decor
def listanddownloadcomptesting(self):
"""List and Download component command testing"""
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("*************************DOWNLOAD "
"COMPONENT TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
delete_list = list()
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping download component command, server is not gen10.\n")
return ReturnCodes.SUCCESS
self.auxcommands['login'].run("")
self.rdmc.ui.printer("Components found in iLO Repository:\n\n")
self.auxcommands['listcompt'].run("")
comps = self.rdmc.app.getcollectionmembers(
'/redfish/v1/UpdateService/ComponentRepository/')
for item in comps:
uri_str = ""
uri_str += "/fwrepo/" + item['Filename']
self.rdmc.ui.printer("Downloading component: \'%s\'.\n" % item['Filename'])
self.auxcommands['downloadcomp'].run(uri_str)
self.rdmc.ui.printer("Successfully downloaded \'%s\' from iLO " \
"Repository.\n" % item['Filename'])
delete_list.append(item['Filename'])
self.rdmc.ui.printer("Test Complete...cleaning up.\n")
self.automatictesting_helper_fd(delete_list)
self.auxcommands['logout'].run("")
return ReturnCodes.SUCCESS
@log_decor
def deletecomptesting(self):
"""Delete from component repository testing"""
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("***************************DELETE "
"COMPONENT TESTING******************************\n")
self.rdmc.ui.printer("***************************************************"
"******************************\n\n")
return
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping download component command, server is not gen10.\n")
return ReturnCodes.SUCCESS
self.auxcommands['login'].run("")
self.rdmc.ui.printer("Components found in iLO Repository:\n\n")
self.auxcommands['listcompt'].run("")
comps = self.rdmc.app.getcollectionmembers(
'/redfish/v1/UpdateService/ComponentRepository/')
for item in comps:
#str = ""
#str += "/fwrepo/" + item['Filename']
self.rdmc.ui.printer("Deleting component: \'%s\'.\n" % item['Filename'])
self.auxcommands['deletecomp'].run(item['Filename'])
self.rdmc.ui.printer("Successfully downloaded \'%s\' from iLO " \
"Repository.\n" % item['Filename'])
self.rdmc.ui.printer("Test Complete...\n")
self.auxcommands['logout'].run("")
return ReturnCodes.SUCCESS
@log_decor
def installsettesting(self):
"""Install set command testing"""
self.rdmc.ui.printer("\n*************************************************"
"********************************\n")
self.rdmc.ui.printer("*************************INSTALL "
"SET COMMAND TESTING*****************************\n")
self.rdmc.ui.printer("**************************************************"
"*******************************\n\n")
self.auxcommands['login'].run("")
if not self.rdmc.app.typepath.flagiften:
self.rdmc.ui.printer("Skipping install set command, server is not gen10.\n")
return ReturnCodes.SUCCESS
installsetlist = []
url = "http://infinitymaster.us.rdlabs.hpecorp.net:1051/automatic_testing/install_sets/"
#skip MakeInstallSet
installsetlist = self.automatictesting_helper_fb(url)
i = 0
self.auxcommands['login'].loginfunction("")
for installset in installsetlist:
try:
(installsetfile, _) = urlretrieve(url + installset,
installset)
i = i+1
self.rdmc.ui.printer("Uploading Installset: %s\n" % installsetfile)
self.auxcommands['installset'].run("add " + installset)
self.rdmc.ui.printer("Invoking Installset: %s\n" % installsetfile)
self.auxcommands['installset'].run("invoke --name=TestSet" + str(i) + \
" --cleartaskqueue")
self.rdmc.ui.printer("Removing Installset: %s\n" % installsetfile)
self.auxcommands['installset'].run("delete --name=TestSet" + str(i))
except Exception as excp:
self.rdmc.ui.printer("A general error occured while attempting to "\
"use the file: %s. The following error was "\
"logged: %s\n" % (installsetfile, excp))
self.rdmc.ui.printer("Check for missing test files\n")
continue
self.rdmc.ui.printer("Removing any | |
add_to_header(self.ccd_e_proc.header, 'h', s, t_ref=_t)
# ================================================================================================== #
# * 6. CR-rejection
# ================================================================================================== #
if do_crrej:
crkw = dict(
crrej_kw=crrej_kw,
filt=self.filt,
verbose=verbose_crrej,
full=True,
update_header=True
)
self.ccd_o_bdfc, self.mask_o_cr, self.crrej_kw = cr_reject_nic(
self.ccd_o_proc,
mask=self.mask_o_proc,
**crkw
)
self.ccd_e_bdfc, self.mask_e_cr, _ = cr_reject_nic(
self.ccd_e_proc,
mask=self.mask_e_proc,
**crkw
)
self.ccd_o_proc = self.ccd_o_bdfc.copy()
self.ccd_e_proc = self.ccd_e_bdfc.copy()
self.mask_o_proc = self.mask_o_proc | self.mask_o_cr
self.mask_e_proc = self.mask_e_proc | self.mask_e_cr
# ================================================================================================== #
# * 8. Trim by bezel widths
# ================================================================================================== #
bzkw = dict(bezel_x=bezel_x, bezel_y=bezel_y, replace=replace)
self.ccd_o_proc = bezel_ccd(self.ccd_o_proc, **bzkw, verbose=verbose)
self.ccd_e_proc = bezel_ccd(self.ccd_e_proc, **bzkw, verbose=verbose)
self.mask_o_proc = bezel_ccd(self.mask_o_proc, **bzkw, verbose=verbose)
self.mask_e_proc = bezel_ccd(self.mask_e_proc, **bzkw, verbose=verbose)
# # ==============================================================
# # * 8. Add "list" version of CCDs
# # ==============================================================
# # By not copying, they're linked together and updated simult.ly.
# self.ccd_bdfx = [self.ccd_o_bdfx , self.ccd_e_bdfx ]
# self.ccd_bdfc = [self.ccd_o_bdfc , self.ccd_e_bdfc ]
# self.ccd_proc = [self.ccd_o_proc , self.ccd_e_proc]
# self.mask_proc = [self.mask_o_proc , self.mask_e_proc ]
# self.err = [self.err_o , self.err_e ]
# self.flat = [self.ccd_flat_o , self.ccd_flat_e ]
# self.dark = [self.ccd_dark_o , self.ccd_dark_e ]
def edgereplace(self, bezel_x=(5, 5), bezel_y=(5, 5), replace=np.nan):
''' Replace edge values to null for better zscale display.
Parameters
----------
bezel_x, bezel_y : array-like of int
The x and y bezels, in ``[lower, upper]`` convention. If
``0``, no replacement happens.
replace : int, float, nan, optional.
The value to replace the pixel value where it should be
masked. If `None`, the ccds will be trimmed.
'''
pass
# def edgemask(self, bezel_x=(10, 10), bezel_y=(10, 10),
# sigma_lower=1, sigma_upper=1,
# maxiters=10, edge_ksigma=3, replace=np.nan):
# ''' Replace edge values to null for better zscale display.
# Parameters
# ----------
# bezel_x, bezel_y : int, float, list of such, optional.
# The x and y bezels, in ``[lower, upper]`` convention.
# replace : int, float, nan, None, optional.
# The value to replace the pixel value where it should be
# masked. If `None`, nothing is replaced, but only the
# ``self.mask_proc`` will have been updated.
# '''
# def _idxmask(maskarr):
# try:
# idx_mask = np.max(np.where(maskarr))
# except ValueError: # sometimes no edge is detected
# idx_mask = 0
# return idx_mask
# sc_kw = dict(sigma_lower=sigma_lower,
# sigma_upper=sigma_upper,
# maxiters=maxiters)
# self._edge_sigclip_mask = [None, None]
# # Iterate thru o-/e-ray
# for i, (ccd, mask) in enumerate(zip(self.ccd_proc, self.mask_proc)):
# ny, nx = ccd.data.shape
# _, med, std = sigma_clipped_stats(ccd.data, mask, **sc_kw)
# scmask = (ccd.data < (med - edge_ksigma*std))
# self._edge_sigclip_mask[i] = scmask
# # Mask for more than half of the total N is masked
# xmask = np.sum(scmask, axis=0) > ny/2
# ymask = np.sum(scmask, axis=1) > nx/2
# # Sometimes "low level row/col" may occur other than edge.
# # Find whether the edge is at left/right of x and
# # upper/lower of y.
# isleft = (np.sum(xmask[:nx//2]) > np.sum(xmask[nx//2:]))
# islowr = (np.sum(ymask[:ny//2]) > np.sum(ymask[ny//2:]))
# # * Set mask for x-axis edge
# if isleft:
# ix = np.min([_idxmask(xmask[:nx//2]), bezel_x[0]])
# sx = (slice(None, None, None), slice(None, ix, None))
# else:
# ix = np.min([_idxmask(xmask[nx//2:]), nx - bezel_x[0]])
# sx = (slice(None, None, None), slice(ix, None, None))
# # * Set mask for y-axis edge
# if islowr:
# iy = np.min([_idxmask(ymask[:ny//2]), bezel_y[0]])
# sy = (slice(None, iy, None), slice(None, None, None))
# else:
# iy = np.max([_idxmask(ymask[ny//2:]), ny - bezel_y[1]])
# sy = (slice(iy, None, None), slice(None, None, None))
# mask[sx] = True
# mask[sy] = True
# if replace is not None:
# ccd.data[sx] = replace
# ccd.data[sy] = replace
def find_obj(self, thresh=3, bezel_x=(40, 40), bezel_y=(200, 120),
box_size=(64, 64), filter_size=(12, 12), deblend_cont=1,
minarea=100, verbose=True,
**extract_kw):
bkg_kw = dict(maskthresh=0.0, filter_threshold=0.0,
box_size=box_size, filter_size=filter_size)
ext_kw = dict(thresh=thresh, minarea=minarea,
deblend_cont=deblend_cont, bezel_x=bezel_x,
bezel_y=bezel_y, **extract_kw)
sepv = sep.__version__
s_bkg = f"Background estimated from sep (v {sepv}) with {bkg_kw}."
s_obj = "Objects found from sep (v {}) with {}."
_t = Time.now()
self.bkg_o = sep_back(
self.ccd_o_proc.data,
mask=self.mask_o_proc,
**bkg_kw
)
add_to_header(self.ccd_o_proc.header, 'h', s_bkg,
verbose=verbose, t_ref=_t)
_t = Time.now()
self.obj_o, self.seg_o = sep_extract(
self.ccd_o_proc.data,
bkg=self.bkg_o,
err=self.err_o,
mask=self.mask_o_proc,
**ext_kw)
_s = s_obj.format(sepv, ext_kw)
add_to_header(self.ccd_o_proc.header, 'h', _s,
verbose=verbose, t_ref=_t)
_t = Time.now()
self.bkg_e = sep_back(
self.ccd_e_proc.data,
mask=self.mask_e_proc,
**bkg_kw
)
add_to_header(self.ccd_e_proc.header, 'h', s_bkg,
verbose=verbose, t_ref=_t)
_t = Time.now()
self.obj_e, self.seg_e = sep_extract(
self.ccd_e_proc.data,
bkg=self.bkg_e,
err=self.err_e,
mask=self.mask_e_proc,
**ext_kw)
_s = s_obj.format(sepv, ext_kw)
add_to_header(self.ccd_e_proc.header, 'h', _s,
verbose=verbose, t_ref=_t)
self.nobj_o = len(self.obj_o)
self.nobj_e = len(self.obj_e)
for ccd, obj, n, oe in zip([self.ccd_o_proc, self.ccd_e_proc],
[self.obj_o, self.obj_e],
[self.nobj_o, self.nobj_e],
['o', 'e']
):
ccd.header["NOBJ-SEP"] = (n, "Number of objects found from SEP.")
if n < 1:
warn(f"No object found for {oe}-ray of {self.file}!", Warning)
elif n > 1:
_sort_obj(ccd, obj, nobj=n, verbose=verbose)
warn(f"No object found for {oe}-ray of {self.file}!", Warning)
# def ellipphot_sep(self, f_ap=(2., 2.), f_in=(4., 4.), f_out=(6., 6.),
# g_init_o=None, g_init_e=None, keys=USEFUL_KEYS,
# verbose=True):
# '''
# Parameters
# ----------
# f_ap, f_in, f_out: int or float, array-like of such, optional.
# The factors multiplied to ``fwhm`` to set the aperture ``a``
# and ``b``, inner sky ``a`` and ``b``, and outer sky ``a``
# and ``b``, respectively. If scalar, it is assumed to be
# identical for both ``a`` and ``b`` parameters. Defaults are
# ``(1.5, 1.5)``, ``(4.0, 4.0)``, and ``(6.0, 6.0)``,
# respectively, which are de facto standard values used by
# classical IRAF users.
# g_init_o, g_init_e : astropy FunctionalModel2D, None, optional.
# The Gaussian initial guess of the PSF of objects in o- and
# e-ray, respectively.
# keys : list of str, None, optional.
# The list of header keywords to be appended to the
# ``self.phot_o`` and ``self.phot_e``.
# Note
# ----
# The sep A/B paramters are for the ellipse to describe the
# "boundary". We need flux-describing Gaussian, so I need to do 2D
# gaussian fitting.
# '''
# s_phot = ('Photometry done for elliptical aperture/annulus with '
# + f"f_ap = {f_ap}, f_in = {f_in}, f_out = {f_out}"
# + "for FWHM = ({:.3f}, {:.3f})")
# fs = dict(f_ap=f_ap, f_in=f_in, f_out=f_out)
# if self.nobj_o < 1:
# self.fwhm_o = (np.nan, np.nan)
# self.ap_o = None
# self.an_o = None
# self.phot_o = pd.DataFrame([[np.nan]*len(_PHOT_COLNAMES)],
# columns=_PHOT_COLNAMES)
# _append_to_phot(self.phot_o, self.ccd_o_proc.header,
# fpath=self.file.name, nboj=self.nobj_o, keys=keys)
# else:
# _t = Time.now()
# self.ap_o, self.an_o, self.phot_o = _eap_phot(
# self.ccd_o_proc,
# self.err_o,
# self.gfit_o,
# **fs
# )
# s = s_phot.format(*self.fwhm_o)
# _append_to_phot(self.phot_o, self.ccd_o_proc.header,
# fpath=self.file.name, nobj=self.nobj_o, keys=keys)
# add_to_header(self.ccd_o_proc.header, 'h', s,
# verbose=verbose, t_ref=_t)
# if self.nobj_e < 1:
# self.gfit_e = None
# self.fitter_e = None
# self.fwhm_e = (np.nan, np.nan)
# self.ap_e = None
# self.an_e = None
# self.phot_e = pd.DataFrame([[np.nan]*len(_PHOT_COLNAMES)],
# columns=_PHOT_COLNAMES)
# _append_to_phot(self.phot_o, self.ccd_o_proc.header,
# fpath=self.file.name, nboj=self.nobj_e, keys=keys)
# else:
# _t = Time.now()
# self.gfit_e, self.fitter_e = _nic_Gaussian2D_fit(
# self.ccd_e_proc,
# self.obj_e,
# err=self.err_e,
# g_init=g_init_e
# )
# # self.gfit_e.x_fwhm = self.gfit_e.x_stddev*gaussian_sigma_to_fwhm
# # self.gfit_e.y_fwhm = self.gfit_e.y_stddev*gaussian_sigma_to_fwhm
# self.fwhm_e = (self.gfit_e.x_fwhm, self.gfit_e.y_fwhm)
# add_to_header(self.ccd_e_proc.header, 'h', s_fit,
# verbose=verbose, t_ref=_t)
# _t = Time.now()
# self.ap_e, self.an_e, self.phot_e = _eap_phot(
# self.ccd_e_proc,
# self.err_e,
# self.gfit_e,
# **fs
# )
# s = s_phot.format(*self.fwhm_e)
# _append_to_phot(self.phot_e, self.ccd_e_proc.header,
# fpath=self.file.name, nobj=self.nobj_e, keys=keys)
# add_to_header(self.ccd_e_proc.header, 'h', s,
# verbose=verbose, t_ref=_t)
def ellipphot_fit(self, f_ap=(2., 2.), f_in=(4., 4.), f_out=(6., 6.),
g_init_o=None, g_init_e=None, keys=USEFUL_KEYS,
verbose=True):
'''
Parameters
----------
f_ap, f_in, f_out: int or float, array-like of such, optional.
The factors multiplied to ``fwhm`` to set the aperture ``a``
and ``b``, inner sky ``a`` and ``b``, and outer sky ``a``
and ``b``, respectively. If scalar, it is assumed to be
identical for both ``a`` and ``b`` parameters. Defaults are
``(1.5, 1.5)``, ``(4.0, 4.0)``, and ``(6.0, 6.0)``,
respectively, which are de facto standard values used by
classical IRAF users.
g_init_o, g_init_e : astropy FunctionalModel2D, None, optional.
The Gaussian initial guess of the PSF of objects in o- and
e-ray, respectively.
keys : list of str, None, optional.
The list of header keywords to be appended to the
``self.phot_o`` and ``self.phot_e``.
Note
----
The sep A/B paramters are for the ellipse to describe the
"boundary". We need flux-describing Gaussian, so I need to do 2D
gaussian fitting.
'''
s_fit = 'Gaussian2D function fitted.'
s_phot = ('Photometry done for elliptical aperture/annulus with '
+ f"f_ap = {f_ap}, f_in = {f_in}, f_out = {f_out}"
+ "for FWHM = ({:.3f}, {:.3f})")
fs = dict(f_ap=f_ap, f_in=f_in, f_out=f_out)
if self.nobj_o < 1:
self.gfit_o = None
self.fitter_o = None
self.fwhm_o = (np.nan, np.nan)
self.ap_o = None
self.an_o = | |
<gh_stars>1-10
import unittest
import itertools
import time
import sys
import pickle
import numpy as np
from mpinoseutils import *
import pygsti
from pygsti.modelpacks.legacy import std1Q_XYI as std
from pygsti.objects import profiler
g_maxLengths = [1,2,4,8]
g_numSubTrees = 3
def assertGatesetsInSync(mdl, comm):
if comm is not None:
bc = mdl if comm.Get_rank() == 0 else None
mdl_cmp = comm.bcast(bc, root=0)
assert(mdl.frobeniusdist(mdl_cmp) < 1e-6)
def runAnalysis(obj, ds, prepStrs, effectStrs, gsTarget, lsgstStringsToUse,
useFreqWeightedChiSq=False,
minProbClipForWeighting=1e-4, fidPairList=None,
comm=None, distributeMethod="circuits"):
#Run LGST to get starting model
assertGatesetsInSync(gsTarget, comm)
mdl_lgst = pygsti.do_lgst(ds, prepStrs, effectStrs, gsTarget,
svdTruncateTo=gsTarget.dim, verbosity=3)
assertGatesetsInSync(mdl_lgst, comm)
mdl_lgst_go = pygsti.gaugeopt_to_target(mdl_lgst,gsTarget)
assertGatesetsInSync(mdl_lgst_go, comm)
#Run full iterative LSGST
tStart = time.time()
if obj == "chi2":
all_gs_lsgst = pygsti.do_iterative_mc2gst(
ds, mdl_lgst_go, lsgstStringsToUse,
minProbClipForWeighting=minProbClipForWeighting,
probClipInterval=(-1e5,1e5),
verbosity=1, memLimit=3*(1024)**3, returnAll=True,
useFreqWeightedChiSq=useFreqWeightedChiSq, comm=comm,
distributeMethod=distributeMethod)
elif obj == "logl":
all_gs_lsgst = pygsti.do_iterative_mlgst(
ds, mdl_lgst_go, lsgstStringsToUse,
minProbClip=minProbClipForWeighting,
probClipInterval=(-1e5,1e5),
verbosity=1, memLimit=3*(1024)**3, returnAll=True,
useFreqWeightedChiSq=useFreqWeightedChiSq, comm=comm,
distributeMethod=distributeMethod)
tEnd = time.time()
print("Time = ",(tEnd-tStart)/3600.0,"hours")
return all_gs_lsgst
def runOneQubit(obj, ds, lsgstStrings, comm=None, distributeMethod="circuits"):
#specs = pygsti.construction.build_spam_specs(
# std.fiducials, prep_labels=std.target_model().get_prep_labels(),
# effect_labels=std.target_model().get_effect_labels())
return runAnalysis(obj, ds, std.fiducials, std.fiducials, std.target_model(),
lsgstStrings, comm=comm,
distributeMethod=distributeMethod)
def create_fake_dataset(comm):
fidPairList = None
maxLengths = [1,2,4,8,16]
nSamples = 1000
#specs = pygsti.construction.build_spam_specs(
# std.fiducials, prep_labels=std.target_model().get_prep_labels(),
# effect_labels=std.target_model().get_effect_labels())
#rhoStrs, EStrs = pygsti.construction.get_spam_strs(specs)
rhoStrs = EStrs = std.fiducials
lgstStrings = pygsti.construction.list_lgst_circuits(
rhoStrs, EStrs, list(std.target_model().operations.keys()))
lsgstStrings = pygsti.construction.make_lsgst_lists(
list(std.target_model().operations.keys()), rhoStrs, EStrs,
std.germs, maxLengths, fidPairList )
lsgstStringsToUse = lsgstStrings
allRequiredStrs = pygsti.remove_duplicates(lgstStrings + lsgstStrings[-1])
if comm is None or comm.Get_rank() == 0:
mdl_dataGen = std.target_model().depolarize(op_noise=0.1)
dsFake = pygsti.construction.generate_fake_data(
mdl_dataGen, allRequiredStrs, nSamples, sampleError="multinomial",
seed=1234)
dsFake = comm.bcast(dsFake, root=0)
else:
dsFake = comm.bcast(None, root=0)
#for mdl in dsFake:
# if abs(dsFake[mdl]['0']-dsFake_cmp[mdl]['0']) > 0.5:
# print("DS DIFF: ",mdl, dsFake[mdl]['0'], "vs", dsFake_cmp[mdl]['0'] )
return dsFake, lsgstStrings
@mpitest(4)
def test_MPI_products(comm):
assert(comm.Get_size() == 4)
#Create some model
mdl = std.target_model()
#Remove spam elements so product calculations have element indices <=> product indices
del mdl.preps['rho0']
del mdl.povms['Mdefault']
mdl.kick(0.1,seed=1234)
#Get some operation sequences
maxLengths = [1,2,4,8]
gstrs = pygsti.construction.make_lsgst_experiment_list(
list(std.target_model().operations.keys()), std.fiducials, std.fiducials, std.germs, maxLengths)
tree,lookup,outcome_lookup = mdl.bulk_evaltree(gstrs)
split_tree = tree.copy()
split_lookup = split_tree.split(lookup,numSubTrees=g_numSubTrees)
# Check wrtFilter functionality in dproduct
some_wrtFilter = [0,2,3,5,10]
for s in gstrs[0:20]:
result = mdl._fwdsim().dproduct(s, wrtFilter=some_wrtFilter)
chk_result = mdl.dproduct(s) #no filtering
for ii,i in enumerate(some_wrtFilter):
assert(np.linalg.norm(chk_result[i]-result[ii]) < 1e-6)
taken_chk_result = chk_result.take( some_wrtFilter, axis=0 )
assert(np.linalg.norm(taken_chk_result-result) < 1e-6)
#Check bulk products
#bulk_product - no parallelization unless tree is split
serial = mdl.bulk_product(tree, bScale=False)
parallel = mdl.bulk_product(tree, bScale=False, comm=comm)
assert(np.linalg.norm(serial-parallel) < 1e-6)
serial_scl, sscale = mdl.bulk_product(tree, bScale=True)
parallel, pscale = mdl.bulk_product(tree, bScale=True, comm=comm)
assert(np.linalg.norm(serial_scl*sscale[:,None,None] -
parallel*pscale[:,None,None]) < 1e-6)
# will use a split tree to parallelize
parallel = mdl.bulk_product(split_tree, bScale=False, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial[lookup[i]]-parallel[split_lookup[i]]) < 1e-6)
parallel, pscale = mdl.bulk_product(split_tree, bScale=True, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial_scl[lookup[i]]*sscale[lookup[i],None,None] -
parallel[split_lookup[i]]*pscale[split_lookup[i],None,None]) < 1e-6)
#bulk_dproduct - no split tree => parallel by col
serial = mdl.bulk_dproduct(tree, bScale=False)
parallel = mdl.bulk_dproduct(tree, bScale=False, comm=comm)
assert(np.linalg.norm(serial-parallel) < 1e-6)
serial_scl, sscale = mdl.bulk_dproduct(tree, bScale=True)
parallel, pscale = mdl.bulk_dproduct(tree, bScale=True, comm=comm)
assert(np.linalg.norm(serial_scl*sscale[:,None,None,None] -
parallel*pscale[:,None,None,None]) < 1e-6)
# will just ignore a split tree for now (just parallel by col)
parallel = mdl.bulk_dproduct(split_tree, bScale=False, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial[lookup[i]] - parallel[split_lookup[i]]) < 1e-6)
parallel, pscale = mdl.bulk_dproduct(split_tree, bScale=True, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial_scl[lookup[i]]*sscale[lookup[i],None,None,None] -
parallel[split_lookup[i]]*pscale[split_lookup[i],None,None,None]) < 1e-6)
#bulk_hproduct - no split tree => parallel by col
serial = mdl.bulk_hproduct(tree, bScale=False)
parallel = mdl.bulk_hproduct(tree, bScale=False, comm=comm)
assert(np.linalg.norm(serial-parallel) < 1e-6)
serial_scl, sscale = mdl.bulk_hproduct(tree, bScale=True)
parallel, pscale = mdl.bulk_hproduct(tree, bScale=True, comm=comm)
assert(np.linalg.norm(serial_scl*sscale[:,None,None,None,None] -
parallel*pscale[:,None,None,None,None]) < 1e-6)
# will just ignore a split tree for now (just parallel by col)
parallel = mdl.bulk_hproduct(split_tree, bScale=False, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial[lookup[i]] - parallel[split_lookup[i]]) < 1e-6)
parallel, pscale = mdl.bulk_hproduct(split_tree, bScale=True, comm=comm)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(serial_scl[lookup[i]]*sscale[lookup[i],None,None,None,None] -
parallel[split_lookup[i]]*pscale[split_lookup[i],None,None,None,None]) < 1e-6)
#OLD: pr functions deprecated
#@mpitest(4)
#def test_MPI_pr(comm):
#
# #Create some model
# mdl = std.target_model()
# mdl.kick(0.1,seed=1234)
#
# #Get some operation sequences
# maxLengths = g_maxLengths
# gstrs = pygsti.construction.make_lsgst_experiment_list(
# list(std.target_model().operations.keys()), std.fiducials, std.fiducials, std.germs, maxLengths)
# tree,lookup,outcome_lookup = mdl.bulk_evaltree(gstrs)
# split_tree = tree.copy()
# lookup = split_tree.split(lookup,numSubTrees=g_numSubTrees)
#
# #Check single-spam-label bulk probabilities
#
# # non-split tree => automatically adjusts wrtBlockSize to accomodate
# # the number of processors
# serial = mdl.bulk_pr('0', tree, clipTo=(-1e6,1e6))
# parallel = mdl.bulk_pr('0', tree, clipTo=(-1e6,1e6), comm=comm)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
#
# serial = mdl.bulk_dpr('0', tree, clipTo=(-1e6,1e6))
# parallel = mdl.bulk_dpr('0', tree, clipTo=(-1e6,1e6), comm=comm)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
#
# serial, sp = mdl.bulk_dpr('0', tree, returnPr=True, clipTo=(-1e6,1e6))
# parallel, pp = mdl.bulk_dpr('0', tree, returnPr=True, clipTo=(-1e6,1e6), comm=comm)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
# assert(np.linalg.norm(sp-pp) < 1e-6)
#
# serial, sdp, sp = mdl.bulk_hpr('0', tree, returnPr=True, returnDeriv=True,
# clipTo=(-1e6,1e6))
# parallel, pdp, pp = mdl.bulk_hpr('0', tree, returnPr=True,
# returnDeriv=True, clipTo=(-1e6,1e6), comm=comm)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
# assert(np.linalg.norm(sdp-pdp) < 1e-6)
# assert(np.linalg.norm(sp-pp) < 1e-6)
#
#
# # split tree => distribures on sub-trees prior to adjusting
# # wrtBlockSize to accomodate remaining processors
# serial = mdl.bulk_pr('0', tree, clipTo=(-1e6,1e6))
# parallel = mdl.bulk_pr('0', split_tree, clipTo=(-1e6,1e6), comm=comm)
# parallel = split_tree.permute_computation_to_original(parallel)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
#
# serial = mdl.bulk_dpr('0', tree, clipTo=(-1e6,1e6))
# parallel = mdl.bulk_dpr('0', split_tree, clipTo=(-1e6,1e6), comm=comm)
# parallel = split_tree.permute_computation_to_original(parallel)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
#
# serial, sp = mdl.bulk_dpr('0', tree, returnPr=True, clipTo=(-1e6,1e6))
# parallel, pp = mdl.bulk_dpr('0', split_tree, returnPr=True, clipTo=(-1e6,1e6), comm=comm)
# parallel = split_tree.permute_computation_to_original(parallel)
# pp = split_tree.permute_computation_to_original(pp)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
# assert(np.linalg.norm(sp-pp) < 1e-6)
#
# serial, sdp, sp = mdl.bulk_hpr('0', tree, returnPr=True, returnDeriv=True,
# clipTo=(-1e6,1e6))
# parallel, pdp, pp = mdl.bulk_hpr('0', split_tree, returnPr=True,
# returnDeriv=True, clipTo=(-1e6,1e6), comm=comm)
# parallel = split_tree.permute_computation_to_original(parallel)
# pdp = split_tree.permute_computation_to_original(pdp)
# pp = split_tree.permute_computation_to_original(pp)
# assert(np.linalg.norm(serial-parallel) < 1e-6)
# assert(np.linalg.norm(sdp-pdp) < 1e-6)
# assert(np.linalg.norm(sp-pp) < 1e-6)
@mpitest(4)
def test_MPI_probs(comm):
#Create some model
mdl = std.target_model()
mdl.kick(0.1,seed=1234)
#Get some operation sequences
maxLengths = g_maxLengths
gstrs = pygsti.construction.make_lsgst_experiment_list(
list(std.target_model().operations.keys()), std.fiducials, std.fiducials, std.germs, maxLengths)
#tree,lookup,outcome_lookup = mdl.bulk_evaltree(gstrs)
#split_tree = tree.copy()
#lookup = split_tree.split(lookup, numSubTrees=g_numSubTrees)
#Check all-spam-label bulk probabilities
def compare_prob_dicts(a,b,indices=None):
for opstr in gstrs:
for outcome in a[opstr].keys():
if indices is None:
assert(np.linalg.norm(a[opstr][outcome] -b[opstr][outcome]) < 1e-6)
else:
for i in indices:
assert(np.linalg.norm(a[opstr][outcome][i] -b[opstr][outcome][i]) < 1e-6)
# non-split tree => automatically adjusts wrtBlockSize to accomodate
# the number of processors
serial = mdl.bulk_probs(gstrs, clipTo=(-1e6,1e6))
parallel = mdl.bulk_probs(gstrs, clipTo=(-1e6,1e6), comm=comm)
compare_prob_dicts(serial,parallel)
serial = mdl.bulk_dprobs(gstrs, clipTo=(-1e6,1e6))
parallel = mdl.bulk_dprobs(gstrs, clipTo=(-1e6,1e6), comm=comm)
compare_prob_dicts(serial,parallel)
serial = mdl.bulk_dprobs(gstrs, returnPr=True, clipTo=(-1e6,1e6))
parallel = mdl.bulk_dprobs(gstrs, returnPr=True, clipTo=(-1e6,1e6), comm=comm)
compare_prob_dicts(serial,parallel,(0,1))
serial = mdl.bulk_hprobs(gstrs, returnPr=True, returnDeriv=True,
clipTo=(-1e6,1e6))
parallel = mdl.bulk_hprobs(gstrs, returnPr=True,
returnDeriv=True, clipTo=(-1e6,1e6), comm=comm)
compare_prob_dicts(serial,parallel,(0,1,2))
##OLD: cannot tell bulk_probs to use a split tree anymore (just give list)
## split tree => distribures on sub-trees prior to adjusting
## wrtBlockSize to accomodate remaining processors
#serial = mdl.bulk_probs(tree, clipTo=(-1e6,1e6))
#parallel = mdl.bulk_probs(split_tree, clipTo=(-1e6,1e6), comm=comm)
#for sl in serial:
# p = split_tree.permute_computation_to_original(parallel[sl])
# assert(np.linalg.norm(serial[sl]-p) < 1e-6)
#
#serial = mdl.bulk_dprobs(tree, clipTo=(-1e6,1e6))
#parallel = mdl.bulk_dprobs(split_tree, clipTo=(-1e6,1e6), comm=comm)
#for sl in serial:
# p = split_tree.permute_computation_to_original(parallel[sl])
# assert(np.linalg.norm(serial[sl]-p) < 1e-6)
#
#serial = mdl.bulk_dprobs(tree, returnPr=True, clipTo=(-1e6,1e6))
#parallel = mdl.bulk_dprobs(split_tree, returnPr=True, clipTo=(-1e6,1e6), comm=comm)
#for sl in serial:
# p0 = split_tree.permute_computation_to_original(parallel[sl][0])
# p1 = split_tree.permute_computation_to_original(parallel[sl][1])
# assert(np.linalg.norm(serial[sl][0]-p0) < 1e-6)
# assert(np.linalg.norm(serial[sl][1]-p1) < 1e-6)
#
#serial = mdl.bulk_hprobs(tree, returnPr=True, returnDeriv=True,
# clipTo=(-1e6,1e6))
#parallel = mdl.bulk_hprobs(split_tree, returnPr=True,
# returnDeriv=True, clipTo=(-1e6,1e6), comm=comm)
#for sl in serial:
# p0 = split_tree.permute_computation_to_original(parallel[sl][0])
# p1 = split_tree.permute_computation_to_original(parallel[sl][1])
# p2 = split_tree.permute_computation_to_original(parallel[sl][2])
# assert(np.linalg.norm(serial[sl][0]-p0) < 1e-6)
# assert(np.linalg.norm(serial[sl][1]-p1) < 1e-6)
# assert(np.linalg.norm(serial[sl][2]-p2) < 1e-6)
@mpitest(4)
def test_MPI_fills(comm):
#Create some model
mdl = std.target_model()
mdl.kick(0.1,seed=1234)
#Get some operation sequences
maxLengths = g_maxLengths
gstrs = pygsti.construction.make_lsgst_experiment_list(
list(std.target_model().operations.keys()), std.fiducials, std.fiducials, std.germs, maxLengths)
tree,lookup,outcome_lookup = mdl.bulk_evaltree(gstrs)
split_tree = tree.copy()
split_lookup = split_tree.split(lookup,numSubTrees=g_numSubTrees)
#Check fill probabilities
nEls = tree.num_final_elements()
nCircuits = len(gstrs)
nDerivCols = mdl.num_params()
#Get serial results
vhp_serial = np.empty( (nEls,nDerivCols,nDerivCols),'d')
vdp_serial = np.empty( (nEls,nDerivCols), 'd' )
vp_serial = np.empty( nEls, 'd' )
vhp_serial2 = np.empty( (nEls,nDerivCols,nDerivCols),'d')
vdp_serial2 = np.empty( (nEls,nDerivCols), 'd' )
vp_serial2 = np.empty( nEls, 'd' )
mdl.bulk_fill_probs(vp_serial, tree,
(-1e6,1e6), comm=None)
mdl.bulk_fill_dprobs(vdp_serial, tree,
vp_serial2, (-1e6,1e6), comm=None,
wrtBlockSize=None)
assert(np.linalg.norm(vp_serial2-vp_serial) < 1e-6)
mdl.bulk_fill_hprobs(vhp_serial, tree,
vp_serial2, vdp_serial2, (-1e6,1e6), comm=None,
wrtBlockSize1=None, wrtBlockSize2=None)
assert(np.linalg.norm(vp_serial2-vp_serial) < 1e-6)
assert(np.linalg.norm(vdp_serial2-vdp_serial) < 1e-6)
#Check serial results with a split tree, just to be sure
mdl.bulk_fill_probs(vp_serial2, split_tree,
(-1e6,1e6), comm=None)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(vp_serial[ lookup[i] ] -
vp_serial2[ split_lookup[i] ]) < 1e-6)
mdl.bulk_fill_dprobs(vdp_serial2, split_tree,
vp_serial2, (-1e6,1e6), comm=None,
wrtBlockSize=None)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(vp_serial[ lookup[i] ] -
vp_serial2[ split_lookup[i] ]) < 1e-6)
assert(np.linalg.norm(vdp_serial[ lookup[i] ] -
vdp_serial2[ split_lookup[i] ]) < 1e-6)
mdl.bulk_fill_hprobs(vhp_serial2, split_tree,
vp_serial2, vdp_serial2, (-1e6,1e6), comm=None,
wrtBlockSize1=None, wrtBlockSize2=None)
for i,opstr in enumerate(gstrs):
assert(np.linalg.norm(vp_serial[ lookup[i] ] -
vp_serial2[ split_lookup[i] ]) < 1e-6)
assert(np.linalg.norm(vdp_serial[ lookup[i] ] -
vdp_serial2[ split_lookup[i] | |
<gh_stars>1-10
""" Tiles demo
This demo is good for illustrating or testing the tile routines.
The demo displays a graph where instead of lines, there is a dot for
each x,y pair in the graph. The x,y pairs are points whose tiles match the
point we are doing tiles for. There is a separate graph entry for each
of the following tile types: regular(square), stripes, diagonals, diamond,
logarithmic sized, and exponential sized.
The graph window first comes up with tiles for the point 2.0, 2.0 displayed. You
can click anywhere in the graph window to show the tiles for that point. You can
use the graph highlighting functions by pressing the space bar, and then the
arrow keys, to show the different tiles (some may be hidden by others, so this
is the only way to see some of them). There is a menu of other options for the
tile codes (e.g. collision tables, number of tilings). If you choose one of these,
a dnw window will be created, and you can move it side by side with the old one
to compare things.
When imported, a tile window is automatically created for you. There is a function to
create windows which you may wish to call:
showtiles(numtilings, memct, floats, ints, title, start, end, intervals)
where:
numtilings is the number of tilings to be done
memct is the memory size or collision table to use
floats are the (two) floating point numbers to do tiles for
ints is an optional list of integers to use in the tiling (defaults to none)
start is the starting point of the tile window (defaults to 0.0)
end is the end point of the tile window (defaults to 5.0)
intervals is the number of intervals between whole number points (default 10)
Note: don't make the start and end too far apart, or too many intervals
between. The program will call the tiles function for each interval between\
the start and end points (in each direction) for each type of tiling and
compare it against the tiles returned by those functions for the original
point, so if you ask for too many, it will be VERY slow.
"""
import tiles
from . import fancytiles
import random
import RLtoolkit.graph as graph
from RLtoolkit.g import *
class Tileview(graph.Dataview):
"""Special graph view for tile display"""
def gDrawView(self):
self.parentgraph.gDrawView()
pass
def gClickEventHandler(self, x, y):
print(("clicked at ", x, y))
self.newExample(x, y)
def newExample(self, x, y):
global inputarray, functionapproximator, examples
self.parentgraph.drawExample(x, y)
class TileDisplay(graph.Graph):
def __init__(self, x, y, numtilings=1, memct=8192, title="Tile Display c", \
dataviewtype=Tileview, start=0.0, end=5.0, intervals=10,
**kwargs):
title = title + " for " + str(numtilings) + " tilings"
graph.Graph.__init__(self, title, dataviewtype, **kwargs)
self.dataview.parentgraph = self
self.tilex = x
self.tiley = y
self.numtilings = numtilings
self.memct = memct
self.start = start
self.end = end
self.intervals = intervals
self.initDemo()
gAddMenu(self, 'Tile Window', \
[['1 tiling, memory 1024',
lambda: showtiles(1, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['2 tilings, memory 1024',
lambda: showtiles(2, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['4 tilings, memory 1024',
lambda: showtiles(4, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['8 tilings, memory 1024',
lambda: showtiles(8, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['16 tilings, memory 1024',
lambda: showtiles(16, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
'---', \
['1 tiling, memory 2048',
lambda: showtiles(1, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['2 tilings, memory 2048',
lambda: showtiles(2, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['4 tilings, memory 2048',
lambda: showtiles(4, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['8 tilings, memory 2048',
lambda: showtiles(8, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['16 tilings, memory 2048',
lambda: showtiles(16, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
'---', \
['1 tiling, memory 4096',
lambda: showtiles(1, 4096, [1.0, 2.0],
title="Tile Display c, memory 4096")], \
['2 tilings, memory 4096',
lambda: showtiles(2, 4096, [1.0, 2.0],
title="Tile Display c, memory 4096")], \
['4 tilings, memory 4096',
lambda: showtiles(4, 4096, [1.0, 2.0],
title="Tile Display c, memory 4096")], \
['8 tilings, memory 4096',
lambda: showtiles(8, 4096, [1.0, 2.0],
title="Tile Display c, memory 4096")], \
['16 tilings, memory 4096',
lambda: showtiles(16, 4096, [1.0, 2.0],
title="Tile Display c, memory 4096")], \
'---', \
['1 tiling, safe collision table',
lambda: showtiles(1, cts, [1.0, 2.0],
title="Tile Display c, safe collision table")], \
['2 tilings, safe collision table',
lambda: showtiles(2, cts, [1.0, 2.0],
title="Tile Display c, safe collision table")], \
'---', \
['1 tiling, super safe collision table',
lambda: showtiles(1, ctss, [1.0, 2.0],
title="Tile Display c, super safe collision table")], \
['2 tilings, super safe collision table',
lambda: showtiles(2, ctss, [1.0, 2.0],
title="Tile Display c, super safe collision table")], \
'---', \
['1 tiling, range -2 to 7, memory 4096',
lambda: showtiles(1, 4096, [1.0, 2.0], start=-2.0, end=7.0,
title="Tile Display c, memory 4096")], \
['2 tilings, range -2 to 7, memory 4096',
lambda: showtiles(2, 4096, [1.0, 2.0], start=-2.0, end=7.0,
title="Tile Display c, memory 4096")], \
'---', \
['Quit', gQuit]])
def gDrawView(self):
graph.Graph.gDrawView(self)
gDrawLineR(self.dataview, self.tilex, self.tiley, 0, .2, 'black')
gDrawLineR(self.dataview, self.tilex, self.tiley, 0, -.2, 'black')
gDrawLineR(self.dataview, self.tilex, self.tiley, .2, 0, 'black')
gDrawLineR(self.dataview, self.tilex, self.tiley, -.2, 0, 'black')
def drawExample(self, x, y):
self.tilex = x
self.tiley = y
graph.graph(self.calcTiledata(self.numtilings, self.memct, [x, y]),
None, self)
gDrawLineR(self.dataview, x, y, 0, .2, 'black')
gDrawLineR(self.dataview, x, y, 0, -.2, 'black')
gDrawLineR(self.dataview, x, y, .2, 0, 'black')
gDrawLineR(self.dataview, x, y, -.2, 0, 'black')
def initDemo(self):
gClear(self)
gClear(self.dataview)
self.data = []
self.drawExample(self.tilex, self.tiley)
# graph.xGraphLimits(0.0, 5.0, self)
# graph.yGraphLimits(0.0, 5.0, self)
graph.xGraphLimits(self.start, self.end, self)
graph.yGraphLimits(self.start, self.end, self)
graph.graphPointsOnly(self)
graph.xTickmarks(1, self)
graph.yTickmarks(1, self)
graph.gridGraph(5, self)
def calcTiledata(self, numtilings, memct, floats, ints=[]):
samet = []
sametd = []
sametbd = []
sametdm = []
sametl = []
samete = []
samets = []
t = tiles.tiles(numtilings, memct, floats, ints)
tsx = fancytiles.stripetiles(numtilings, memct, [floats[0]], None, ints)
tsy = fancytiles.stripetiles(numtilings, memct, [floats[1]], None, ints)
td = fancytiles.diagonaltiles(numtilings, memct, floats, None, ints)
tbd = fancytiles.backdiagonaltiles(numtilings, memct, floats, None,
ints)
tdm = fancytiles.diamondtiles(numtilings, memct, floats, None, ints)
tl = fancytiles.logtiles(numtilings, memct, floats, ints)
te = fancytiles.exptiles(numtilings, memct, floats, ints)
total = int((self.end - self.start) * self.intervals)
for i in range(total):
for j in range(total):
x = float(i) / self.intervals + self.start
y = float(j) / self.intervals + self.start
newfloats = [x, y]
if tiles.tiles(numtilings, memct, newfloats, ints) == t:
samet.append(newfloats)
if fancytiles.stripetiles(numtilings, memct, [x], None,
ints) == tsx or \
fancytiles.stripetiles(numtilings, memct, [y],
None, ints) == tsy:
samets.append(newfloats)
if fancytiles.diagonaltiles(numtilings, memct, newfloats, None,
ints) == td:
sametd.append(newfloats)
if fancytiles.backdiagonaltiles(numtilings, memct, newfloats,
None, ints) == tbd:
sametbd.append(newfloats)
if fancytiles.diamondtiles(numtilings, memct, newfloats, None,
ints) == tdm:
sametdm.append(newfloats)
if fancytiles.logtiles(numtilings, memct, newfloats,
ints) == tl:
sametl.append(newfloats)
if fancytiles.exptiles(numtilings, memct, newfloats,
ints) == te:
samete.append(newfloats)
data = [samet, samets, sametd, sametbd, sametdm, sametl, samete]
return data
def showtiles(numtilings, memct, floats, ints=[], title="Tile Display c", \
start=0.0, end=5.0, intervals=10):
w = TileDisplay(2.0, 2.0, numtilings, memct, title=title, start=start, \
end=end, intervals=intervals, gdViewport=(0, 20, 600, 620))
# should really have one for each type of tiling for test?
ctu = tiles.CollisionTable(4096, 'unsafe')
cts = tiles.CollisionTable(4096, 'safe')
ctss = tiles.CollisionTable(4096, 'super safe')
gAddMenu(GMENU, 'Tile Window', \
[['1 tiling, memory 1024', lambda: showtiles(1, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['2 tilings, memory 1024', lambda: showtiles(2, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['4 tilings, memory 1024', lambda: showtiles(4, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['8 tilings, memory 1024', lambda: showtiles(8, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
['16 tilings, memory 1024', lambda: showtiles(16, 1024, [1.0, 2.0],
title="Tile Display c, memory 1024")], \
'---', \
['1 tiling, memory 2048', lambda: showtiles(1, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['2 tilings, memory 2048', lambda: showtiles(2, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['4 tilings, memory 2048', lambda: showtiles(4, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['8 tilings, memory 2048', lambda: showtiles(8, 2048, [1.0, 2.0],
title="Tile Display c, memory 2048")], \
['16 | |
<filename>tests/integration/providers/ldap/delta_inbound_sync_tests.py
# Copyright 2018 Contributors to Hyperledger Sawtooth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -----------------------------------------------------------------------------
"""LDAP Inbound Delta Sync Test"""
# pylint: disable=redefined-outer-name
# NOTE: disabling for pytest as per:
# https://stackoverflow.com/questions/46089480/pytest-fixtures-redefining-name-from-outer-scope-pylint
import time
from datetime import datetime, timedelta, timezone
import requests
from ldap3 import (
Server,
Connection,
MOCK_SYNC,
OFFLINE_AD_2012_R2,
ALL_ATTRIBUTES,
MODIFY_REPLACE,
)
from ldap3.extend.microsoft import addMembersToGroups, removeMembersFromGroups
import pytest
import rethinkdb as r
from environs import Env
from rbac.providers.common.db_queries import connect_to_db
from rbac.common.crypto.secrets import generate_api_key
from rbac.providers.ldap.delta_inbound_sync import (
insert_updated_entries,
insert_deleted_entries,
)
from tests import utilities
SERVER = Server("my_fake_server", get_info=OFFLINE_AD_2012_R2)
# ------------------------------------------------------------------------------
# <==== BEGIN TEST PARAMETERS =================================================>
# ------------------------------------------------------------------------------
TEST_USERS = [
{"common_name": "User0", "name": "Zeroth User", "given_name": "Zeroth"},
{"common_name": "User1", "name": "First User", "given_name": "First"},
{"common_name": "User2", "name": "Second User", "given_name": "Second"},
{"common_name": "User3", "name": "Third User", "given_name": "Third"},
{"common_name": "User4", "name": "Fourth User", "given_name": "Fourth"},
]
TEST_GROUPS = [{"common_name": "test_group", "name": "test_group"}]
# ------------------------------------------------------------------------------
# <==== END TEST PARAMETERS ===================================================>
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# <==== BEGIN TEST FIXTURES ===================================================>
# ------------------------------------------------------------------------------
@pytest.fixture(autouse=True, scope="module")
def ldap_connection():
"""Binds and yields a mock ldap connection for integration testing.
"""
connection = Connection(
SERVER,
user="cn=my_user,ou=test,o=lab",
password="<PASSWORD>",
client_strategy=MOCK_SYNC,
)
connection.bind()
yield connection
connection.unbind()
# ------------------------------------------------------------------------------
# <==== END TEST FIXTURES =====================================================>
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# <==== BEGIN TEST HELPER FUNCTIONS ===========================================>
# ------------------------------------------------------------------------------
def _get_user_attributes(common_name, name, given_name):
"""Generates valid AD user attributes for creating a fake user in a mock AD.
Args:
common_name:
str: A common name for a fake user.
name:
str: A username for a fake user.
given_name:
str: A given name for a fake user.
Returns:
attributes:
obj: a dict of fake user attributes.
"""
attributes = {
"cn": common_name,
"displayName": name,
"distinguishedName": "CN=%s,OU=Users,OU=Accounts,DC=AD2012,DC=LAB"
% common_name,
"givenName": given_name,
"name": name,
"objectCategory": "CN=Person,CN=Schema,CN=Configuration,DC=AD2012,DC=LAB",
"objectClass": ["top", "person", "organizationalPerson", "user"],
"sn": "%s_sn" % common_name,
"userPassword": "<PASSWORD>",
"mail": <EMAIL>" % common_name,
"whenChanged": datetime.utcnow().replace(tzinfo=timezone.utc),
"whenCreated": datetime.utcnow().replace(tzinfo=timezone.utc),
}
return attributes
def _get_group_attributes(common_name, name, owner=""):
"""Generates valid AD group attributes for creating a fake group in a mock AD.
Args:
common_name:
str: A common name for a fake group.
name:
str: A username for a fake group.
Returns:
attributes:
obj: a dict of fake group attributes.
"""
group = {
"cn": common_name,
"distinguishedName": "CN=%s,OU=Roles,OU=Security,OU=Groups,DC=AD2012,DC=LAB"
% common_name,
"name": name,
"objectCategory": "CN=Group,CN=Schema,CN=Configuration,DC=AD2012,DC=LAB",
"objectClass": ["top", "group"],
"whenChanged": datetime.utcnow().replace(tzinfo=timezone.utc),
"whenCreated": datetime.utcnow().replace(tzinfo=timezone.utc),
"managedBy": owner,
}
return group
def create_fake_user(ldap_connection, common_name, name, given_name):
"""Puts a given user object in the mock AD server.
Args:
ldap_connection:
obj: A bound ldap connection object.
common_name:
str: A string containing the common name of a fake AD user.
name:
str: A string containing the username of a fake AD user.
given_name:
str: A string containing the given name of a fake AD user.
"""
attributes = _get_user_attributes(common_name, name, given_name)
ldap_connection.strategy.add_entry(
"CN=%s,OU=Users,OU=Accounts,DC=AD2012,DC=LAB" % common_name,
attributes=attributes,
)
def create_fake_group(ldap_connection, common_name, name, owner=""):
"""Puts a given user object in the mock AD server.
Args:
ldap_connection:
obj: A bound ldap connection object.
common_name:
str: A string containing the common name of a fake AD role.
name:
str: A string containing the name of a fake group.
owner:
"""
attributes = _get_group_attributes(common_name, name, owner)
ldap_connection.strategy.add_entry(
"CN=%s,OU=Roles,OU=Security,OU=Groups,DC=AD2012,DC=LAB" % common_name,
attributes=attributes,
)
def create_next_role_ldap(user, role_name):
"""" Create a NEXT role as an imported LDAP user
Args:
user:
dict: User table entry for imported LDAP user
role_name:
str: Name of role
Returns:
role_id:
str: UUID of newly created NEXT role
Raises:
ValueError: When user was not auth successfully.
"""
token = ldap_auth_login(user)
if token:
user_next_id = user["next_id"]
role_data = {
"name": role_name,
"owners": [user_next_id],
"administrators": [user_next_id],
}
with requests.Session() as session:
session.headers.update({"Authorization": token})
response = utilities.create_test_role(session, role_data)
return response.json()["data"]["id"]
raise ValueError("Unsuccessful authentication.")
def ldap_auth_login(user):
"""" Authenticate as a test LDAP user and create a new entry in
auth RethinkDB table.
Args:
user:
dict: User table entry for imported LDAP user
Returns:
token:
str: Bearer token upon user's successful authentication
"""
env = Env()
ldap_conn = Connection(
SERVER,
user=user["remote_id"],
password="<PASSWORD>",
client_strategy=MOCK_SYNC,
)
# On successful bind, create auth table entry
if ldap_conn.bind():
conn = connect_to_db()
user_map = (
r.table("user_mapping")
.filter({"next_id": user["next_id"]})
.coerce_to("array")
.run(conn)
)
auth_entry = {
"next_id": user["next_id"],
"username": user["username"],
"email": user["email"],
"encrypted_private_key": user_map[0]["encrypted_key"],
"public_key": user_map[0]["public_key"],
}
r.table("auth").insert(auth_entry).run(conn)
conn.close()
return generate_api_key(env("SECRET_KEY"), user["next_id"])
return None
def get_fake_user(ldap_connection, user_common_name):
"""Gets a fake user from the mock AD server.
Args:
ldap_connection:
obj: a mock ldap connection object.
user_common_name:
str: the common name of the fake user.
Returns:
fake_user:
arr<obj>: an array containing any users with a matching common name.
"""
search_parameters = {
"search_base": "OU=Users,OU=Accounts,DC=AD2012,DC=LAB",
"search_filter": "(&(objectClass=person)(cn=%s))" % user_common_name,
"attributes": ALL_ATTRIBUTES,
"paged_size": len(TEST_USERS),
}
ldap_connection.search(**search_parameters)
fake_user = ldap_connection.entries
return fake_user
def get_fake_group(ldap_connection, group_common_name):
"""Gets a fake user from the mock AD server.
Args:
ldap_connection:
obj: a mock ldap connection object.
group_common_name:
str: the common name of the fake group.
Returns:
fake_user:
arr<obj>: an array containing any users with a matching common name.
"""
search_parameters = {
"search_base": "OU=Roles,OU=Security,OU=Groups,DC=AD2012,DC=LAB",
"search_filter": "(&(objectClass=group)(cn=%s))" % group_common_name,
"attributes": ALL_ATTRIBUTES,
"paged_size": len(TEST_GROUPS),
}
ldap_connection.search(**search_parameters)
fake_user = ldap_connection.entries
return fake_user
def put_in_inbound_queue(fake_data, data_type):
"""Puts a fake ( user | group ) object in the inbound queue to be ingested by
rbac_ledger_sync.
Args:
fake_data:
obj: a fake ( user | group ) object to insert.
data_type:
str: type of object "user"/"group"
"""
when_changed = (datetime.utcnow() - timedelta(days=1)).replace(tzinfo=timezone.utc)
insert_updated_entries(fake_data, when_changed, data_type)
def is_user_in_db(email):
"""Returns the number of users in rethinkdb with the given email.
Args:
email:
str: an email address.
"""
with connect_to_db() as db_connection:
result = r.table("users").filter({"email": email}).count().run(db_connection)
return result > 0
def get_user_in_db_by_email(email):
"""Returns the user in rethinkdb with the given email.
Args:
email:
str: an email address.
"""
with connect_to_db() as db_connection:
result = (
r.table("users")
.filter({"email": email})
.coerce_to("array")
.run(db_connection)
)
return result
def get_user_next_id(distinguished_name):
"""Returns the next_id for a given user's distinguished name.
Args:
distinguished_name:
str: A string containing the user's AD distinguished name.
Returns:
next_id:
str: A string containing the user's unique next_id.
"""
with connect_to_db() as db_connection:
results = list(
r.table("users")
.filter({"remote_id": distinguished_name})
.pluck("next_id")
.run(db_connection)
)[0]
next_id = results["next_id"]
return next_id
def is_group_in_db(name):
"""Returns the number of groups from the roles table in rethinkdb with
the given name.
Args:
name:
str: The name of a fake group.
"""
with connect_to_db() as db_connection:
result = r.table("roles").filter({"name": name}).count().run(db_connection)
return result > 0
def get_role_id_from_cn(role_common_name):
"""Returns the NEXT role_id for a given role/group's common name.
Args:
role_common_name:
str: A string containing the common name of an AD group.
Returns:
role_id:
str: A string containing the NEXT role id of the corresponding role.
"""
with connect_to_db() as db_connection:
results = list(
r.table("roles")
.order_by(index=r.desc("start_block_num"))
.filter({"name": role_common_name})
.pluck("role_id")
.run(db_connection)
)[0]
role_id = results["role_id"]
return role_id
def get_role(name):
"""Returns a role in rethinkDB via name.
Args:
name:
str: a name of a role in rethinkDB.
"""
with connect_to_db() as db_connection:
role = (
r.table("roles")
.filter({"name": name})
.coerce_to("array")
.run(db_connection)
)
return role
def get_role_members(role_id):
"""Returns a list of member user_ids from a role in rethnkDB.
Args:
role_id:
str: a NEXT role_id from rethinkDB.
"""
with connect_to_db() as db_connection:
role_members = (
r.table("role_members")
.filter({"role_id": role_id})
.pluck("related_id")
.coerce_to("array")
.run(db_connection)
)
return role_members
def is_user_a_role_member(role_common_name, user_common_name):
"""Checks to see if a given user is a member of the given role/group in
rethinkDB.
Args:
user_common_name:
str: string containing the common name of an AD user object.
role_common_name:
str: string containing the common name of an AD role/group object.
Returns:
bool:
True: if the user is a member of the given group.
False: if the user is not a member of the given group.
"""
role_id = get_role_id_from_cn(role_common_name)
user_distinct_name = (
"CN=%s,OU=Users,OU=Accounts,DC=AD2012,DC=LAB" % user_common_name
)
next_id = get_user_next_id(user_distinct_name)
user_is_role_member = False
for member in get_role_members(role_id):
if member["related_id"] == next_id:
user_is_role_member = True
return user_is_role_member
def update_when_changed(ldap_connection, object_distinct_name):
"""Replace the whenChanged AD attribute to a newer datetime. This is required
for delta inbound sync as old timestamps | |
<filename>aristopy/plotter.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# P L O T T E R
# ==============================================================================
"""
* File name: plotter.py
* Last edited: 2020-06-14
* Created by: <NAME> (TU Berlin)
The Plotter class provides three basic plotting methods:
* :meth:`plot_operation <aristopy.plotter.Plotter.plot_operation>`:
A mixed bar and line plot that visualizes the operation of a component on the
basis of a selected commodity.
* :meth:`plot_objective <aristopy.plotter.Plotter.plot_objective>`:
Bar chart that summarizes the cost contributions of each component to the
overall objective function value.
* :meth:`quick_plot <aristopy.plotter.Plotter.quick_plot>`:
Quick visualization for the values of one component variable as a line,
scatter, or bar plot.
.. note::
The results of the optimization are exported to dictionaries and stored
as strings in a json-file to easily handle multidimensional indices (e.g.
tuples). To evaluate the Python strings we use the function "literal_eval"
from the python built in library "ast". (the strings can only consist of:
strings, bytes, numbers, tuples, lists, dicts, sets, booleans, and None)
[`Ref <https://stackoverflow.com/questions/4547274/
convert-a-python-dict-to-a-string-and-back>`_]
"""
import os
import copy
import json
import ast
import matplotlib.pyplot as plt
import numpy as np
from warnings import warn
from aristopy import utils
# Option: Add a method for capet plotting on a requested component variable.
class Plotter:
def __init__(self, json_file):
"""
The Plotter class provides three basic plotting methods:
* plot_operation
* plot_objective
* quick_plot
:param json_file: Path to the optimization results file in JSON-Format
"""
# Leave if no results file available
if not os.path.isfile(json_file):
self.data = None
return
self.json_file = json_file # name / path to the json-file
# Read the data from the json-file
with open(self.json_file, encoding='utf-8') as f:
self.data = json.loads(f.read())
# Read general values from the data dict
self.nbr_of_ts = self.data['number_of_time_steps']
self.dt = self.data['hours_per_time_step']
self.is_clustered = self.data['is_data_clustered']
self.nbr_of_typ_periods = self.data['number_of_typical_periods']
self.nbr_of_ts_per_period = self.data['number_of_time_steps_per_period']
self.nbr_of_periods = self.data['total_number_of_periods']
self.periods_order = ast.literal_eval(self.data['periods_order'])
# Init values:
self.single_period = None # if clustered: plot only period with idx 'X'
self.level_of_detail = 2 # 1 (simple) or 2 (more detailed)
self.comp = '' # name of the component of interest
self.model_class = None # string, class name of comp, e.g. 'Storage'
# Values used for plotting -> can be changed by 'plot_operation'
self.dt_plot = self.dt # init
self.scale_plot = 1 / self.dt # init
# https://matplotlib.org/3.1.0/tutorials/colors/colormaps.html
self.line_colors = ['black', 'red', 'blue', 'green', 'orange' 'brown']
# 'tab10' contains 20 discrete bar_colors (first from 0 to 0.049, ...)
# Todo: Simply use a colormap and divide it according to needed colors
# Extended the available colors with Dark and Accent...
self.bar_colors = np.r_[plt.get_cmap('tab20')(np.linspace(0, 1, 20)),
plt.get_cmap('Dark2')(np.linspace(0, 1, 8)),
plt.get_cmap('Accent')(np.linspace(0, 1, 8))]
# Global properties dictionary:
self.props = {'fig_width': 10, 'fig_height': 6,
'bar_width': 1, 'bar_lw': 0, 'line_lw': 2, 'period_lw': 0,
'xlabel': 'Time steps [-]', 'ylabel': '',
'xticks_rotation': 0,
'grid': False, 'lgd_ncol': 1, 'lgd_pos': 'best',
'save_pgf': False, 'save_pdf': False,
'save_png': True, 'dpi': 200, 'pad_inches': None}
# ==========================================================================
# P L O T O B J E C T I V E
# ==========================================================================
def plot_objective(self, show_plot=False, save_plot=True,
file_name='objective_plot', **kwargs):
"""
Method to create a bar chart that summarizes the cost contributions of
each component of the EnergySystem instance to the overall objective
function value.
:param show_plot: State whether the plot should be shown once finalized
|br| *Default: False*
:type show_plot: bool
:param save_plot: State whether the plot should be saved once finalized
|br| *Default: True*
:type save_plot: bool
:param file_name: Name of the file (if saved); no file-ending required
|br| *Default: 'objective_plot'*
:type file_name: str
:param kwargs: Additional keyword arguments to manipulate the plot
(e.g., labels, figure size, legend position, ...).
See dict 'props' of the Plotter class.
"""
if self.data is None:
return
# Get the global plotting properties of the Plotter class (defaults)
props = copy.copy(self.props)
props['xlabel'] = None # default
props['ylabel'] = 'Objective function value (contribution)' # default
props['bar_width'] = 0.8 # default
# Overwrite props with local kwargs if specified and found.
for key, val in kwargs.items():
if key in props.keys():
props[key] = val
else:
warn('Keyword argument "{}" is unknown and ignored'.format(key))
# Get the plotting data:
obj_data = {}
for comp_name, comp_data in self.data['components'].items():
# ORDER: capex, opex, start_up, commodity_cost, commodity_revenues
data = comp_data['comp_obj_dict']
# Skip the component in the plot if all obj. entries are zero:
if sum(abs(i) for i in data.values()) <= 0.01: # rounding errors
continue
obj_data[comp_name] = \
[data['capex_capacity'] + data['capex_exist'],
data['opex_capacity'] + data['opex_exist'] + data[
'opex_operation'],
data['start_up_cost'],
data['commodity_cost'],
data['commodity_revenues']]
names = list(obj_data.keys())
# to vertically stacked and transposed array (1. row: capex, 2. opex,..)
values = np.vstack(list(obj_data.values())).transpose()
labels = ['CAPEX', 'OPEX', 'Start up cost',
'Commodity cost', 'Commodity revenues']
# If objective function contributions have been added via method
# 'add_objective_function_contribution' in EnergySystem:
added_obj = self.data['added_objective_function_contributions']
# if dict is not empty and if the sum of all (abs) entries is not zero
if added_obj and sum(abs(i) for i in added_obj.values()) != 0:
names.append('Added')
labels.extend(added_obj.keys())
add_rows = np.zeros(shape=(len(added_obj.keys()), values.shape[1]))
add_col = np.append(np.zeros(values.shape[0]),
list(added_obj.values()))
values = np.append(values, values=add_rows, axis=0)
values = np.insert(values, values.shape[1], values=add_col, axis=1)
# values = np.c_[values, tot] # --> faster but bad readability
# Plot the Total as an overall sum:
total = values.sum()
names.append('Total')
labels.append('Total')
add_row = np.zeros(shape=(1, values.shape[1]))
add_col = np.append(np.zeros(values.shape[0]), total)
values = np.append(values, values=add_row, axis=0)
values = np.insert(values, values.shape[1], values=add_col, axis=1)
# ----------------------
# https://stackoverflow.com/questions/35979852/stacked-bar-charts-using-python-matplotlib-for-positive-and-negative-values
# Take negative and positive data apart and cumulate
def get_cumulated_array(data, **kwargs):
cum = data.clip(**kwargs)
cum = np.cumsum(cum, axis=0)
d = np.zeros(np.shape(data))
d[1:] = cum[:-1]
return d
cumulated_data = get_cumulated_array(values, min=0)
cumulated_data_neg = get_cumulated_array(values, max=0)
# Re-merge negative and positive data.
row_mask = (values < 0)
cumulated_data[row_mask] = cumulated_data_neg[row_mask]
data_stack = cumulated_data
# ----------------------
# Plot stacked bars for all components and the total
fig, ax = plt.subplots(figsize=(props['fig_width'],
props['fig_height']))
for i, category in enumerate(labels):
ax.bar(names, values[i], props['bar_width'],
label=category, bottom=data_stack[i],
color=self.bar_colors[i], zorder=10,
edgecolor='black', linewidth=props['bar_lw'])
# Add horizontal line at y=0
ax.axhline(0, color='black', lw=0.8)
ax.tick_params(axis='x', labelrotation=props['xticks_rotation'])
ax.set_xlabel(props['xlabel'])
ax.set_ylabel(props['ylabel'])
ax.legend(ncol=props['lgd_ncol'], loc=props['lgd_pos'],
framealpha=0.8, edgecolor='black').set_zorder(100)
if props['grid']:
ax.grid(which='major', linestyle='--', zorder=0)
fig.tight_layout(pad=0.0, w_pad=0.2)
if show_plot:
plt.show()
if save_plot:
if props['save_png']:
fig.savefig(file_name+'.png', bbox_inches="tight",
pad_inches=props['pad_inches'], dpi=props['dpi'])
if props['save_pdf']:
fig.savefig(file_name+'.pdf', bbox_inches="tight",
pad_inches=props['pad_inches'])
if props['save_pgf']:
fig.savefig(file_name+'.pgf', bbox_inches="tight",
pad_inches=props['pad_inches'])
plt.close()
# ==========================================================================
# Q U I C K P L O T
# ==========================================================================
def quick_plot(self, component_name, variable_name, kind='bar',
save_plot=False, file_name=None):
"""
Method to create a quick visualization for the values of one component
variable as a line, scatter, or bar plot.
:param component_name: Name of the component that holds the variable
of interest.
:type component_name: str
:param variable_name: Name of the variable (or parameter) that should
be plotted.
:type variable_name: str
:param kind: States the kind of plot. Possible options are:
'plot' (line plot), 'scatter', 'bar'.
|br| *Default: 'bar'*
:type kind: str
:param save_plot: State whether the plot should be saved once finalized
|br| *Default: False*
:type save_plot: bool
:param file_name: Name of the file (if saved); no file-ending required.
Name is auto-generated if None is provided and plot should be saved.
|br| *Default: None*
:type file_name: str
"""
if self.data is None:
return
# Set the component and try to find values for the requested var / param
self.comp = component_name
data = self._get_values(variable_name)
if data is None: # return with warning if not successful
return warn('Could not find variable {} in component {}'
.format(variable_name, component_name))
fig, ax = plt.subplots(figsize=(self.props['fig_width'],
self.props['fig_height']))
if kind == 'plot':
ax.plot(range(len(data)), list(data.values()),
label=variable_name, zorder=10)
elif kind == 'scatter':
ax.scatter(range(len(data)), list(data.values()),
label=variable_name, zorder=10)
elif kind == 'bar':
ax.bar(range(len(data)), list(data.values()),
label=variable_name, zorder=10)
ax.set_xticks(range(len(data)))
ax.set_xticklabels(list(data.keys()))
ax.tick_params(axis='x', labelrotation=self.props['xticks_rotation'])
ax.set_title('Quickplot for component "{}"'.format(component_name),
size=16, color='black', ha='center')
ax.set_xlabel('Time index [period, time step]')
ax.set_ylabel('Quantity of variable "{}"'.format(variable_name))
# ax.grid(which='major', linestyle='--', zorder=0)
ax.legend(framealpha=0.8, edgecolor='black').set_zorder(100)
fig.tight_layout()
if save_plot:
f_name = file_name + '.png' if file_name is not None \
else '{}_{}.png'.format(component_name, variable_name)
fig.savefig(f_name, dpi=200)
else:
plt.show()
# ==========================================================================
# P L O T O P E R A T I O N
# ==========================================================================
def plot_operation(self, component_name, commodity, level_of_detail=2,
scale_to_hourly_resolution=False,
plot_single_period_with_index=None, show_plot=False,
save_plot=True, file_name='operation_plot', **kwargs):
# Todo: Add start and end arguments to enable plotting of index slices?!
"""
Method to create a mixed bar | |
from datetime import datetime, timedelta
import json
import unittest
import logging
from alerta.utils.format import DateTime
from alerta.app import create_app, db, plugins
from alerta.models.key import ApiKey
LOG = logging.getLogger("test.test_notification_rule")
def get_id(object: dict):
return object["id"]
class NotificationRuleTestCase(unittest.TestCase):
def setUp(self) -> None:
test_config = {
"TESTING": True,
"AUTH_REQUIRED": True,
"CUSTOMER_VIEWS": True,
"PLUGINS": [],
}
self.app = create_app(test_config)
self.client = self.app.test_client()
self.sms_channel = {
"id": "SMS_Channel",
"sender": "sender",
"type": "twilio_sms",
"apiToken": "api_token",
"apiSid": "api_sid",
}
self.call_channel = {
"id": "CALL_Channel",
"sender": "sender",
"type": "twilio_call",
"apiToken": "api_token",
"apiSid": "api_sid",
}
self.mail_channel = {
"id": "MAIL_Channel",
"sender": "sender",
"type": "sendgrid",
"apiToken": "api_token",
}
self.prod_alert = {
"resource": "node404",
"event": "node_down",
"environment": "Production",
"severity": "minor",
"correlate": ["node_down", "node_marginal", "node_up"],
"service": ["Core", "Web", "Network"],
"group": "Network",
"tags": ["level=20", "switch:off"],
}
self.dev_alert = {
"resource": "node404",
"event": "node_marginal",
"environment": "Development",
"severity": "warning",
"correlate": ["node_down", "node_marginal", "node_up"],
"service": ["Core", "Web", "Network"],
"group": "Network",
"tags": ["level=20", "switch:off"],
}
self.fatal_alert = {
"event": "node_down",
"resource": "net01",
"environment": "Production",
"service": ["Network"],
"severity": "critical",
"correlate": ["node_down", "node_marginal", "node_up"],
"tags": ["foo"],
"attributes": {"foo": "abc def", "bar": 1234, "baz": False},
}
self.critical_alert = {
"event": "node_marginal",
"resource": "net02",
"environment": "Production",
"service": ["Network"],
"severity": "critical",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 30,
}
self.major_alert = {
"event": "node_marginal",
"resource": "net03",
"environment": "Production",
"service": ["Network"],
"severity": "major",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 40,
}
self.normal_alert = {
"event": "node_up",
"resource": "net03",
"environment": "Production",
"service": ["Network"],
"severity": "normal",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 100,
}
self.minor_alert = {
"event": "node_marginal",
"resource": "net04",
"environment": "Production",
"service": ["Network"],
"severity": "minor",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 40,
}
self.ok_alert = {
"event": "node_up",
"resource": "net04",
"environment": "Production",
"service": ["Network"],
"severity": "ok",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 100,
}
self.warn_alert = {
"event": "node_marginal",
"resource": "net05",
"environment": "Production",
"service": ["Network"],
"severity": "warning",
"correlate": ["node_down", "node_marginal", "node_up"],
"timeout": 50,
}
with self.app.test_request_context("/"):
self.app.preprocess_request()
self.admin_api_key = ApiKey(
user="<EMAIL>",
scopes=["admin", "read", "write"],
text="demo-key",
)
self.customer_api_key = ApiKey(
user="<EMAIL>",
scopes=["admin", "read", "write"],
text="demo-key",
customer="Foo",
)
self.admin_api_key.create()
self.customer_api_key.create()
self.headers = {
"Authorization": f"Key {self.admin_api_key.key}",
"Content-type": "application/json",
}
def tearDown(self) -> None:
plugins.plugins.clear()
db.destroy()
def create_api_obj(self, apiurl: str, apidata: dict, apiheaders: dict, status_code: int = 201) -> dict:
response = self.client.post(apiurl, data=json.dumps(apidata), headers=apiheaders)
self.assertEqual(response.status_code, status_code)
return json.loads(response.data.decode("utf-8"))
def update_api_obj(self, apiurl: str, apidata: dict, apiheaders: dict, status_code: int = 200) -> dict:
response = self.client.put(apiurl, data=json.dumps(apidata), headers=apiheaders)
self.assertEqual(response.status_code, status_code)
return json.loads(response.data.decode("utf-8"))
def get_api_obj(self, apiurl: str, apiheaders: dict, status_code: int = 200) -> dict:
response = self.client.get(apiurl, headers=apiheaders)
self.assertEqual(response.status_code, status_code)
return json.loads(response.data.decode("utf-8"))
def delete_api_obj(self, apiurl: str, apiheaders: dict, status_code: int = 200) -> dict:
response = self.client.delete(apiurl, headers=apiheaders)
self.assertEqual(response.status_code, status_code)
return json.loads(response.data.decode("utf-8"))
def get_notification_rule_id(self, notification_rule: dict) -> str:
return notification_rule["id"]
def test_notification_sms(self):
notification_rule = {
"environment": "Production",
"channelId": "SMS_Channel",
"service": ["Core"],
"receivers": [],
}
self.channel_id = self.create_api_obj("/notificationchannels", self.sms_channel, self.headers)["id"]
data = self.create_api_obj("/notificationrules", notification_rule, self.headers)
notification_rule_id = data["id"]
# new alert should activate notification_rule
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# duplicate alert should not activate notification_rule
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# duplicate alert should not activate notification_rule (again)
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# increase severity alert should activate notification_rule
self.prod_alert["severity"] = "major"
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# increase severity alert should activate notification_rule (again)
self.prod_alert["severity"] = "critical"
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# decrease severity alert should activate notification_rule
self.prod_alert["severity"] = "minor"
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
# decrease severity alert should activate notification_rule (again)
self.prod_alert["severity"] = "warning"
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
self.delete_api_obj("/notificationrules/" + notification_rule_id, self.headers)
def test_edit_notification_rule(self):
self.create_api_obj("/alert", self.prod_alert, self.headers)
notification_rule = {
"environment": "Production",
"channelId": "SMS_Channel",
"resource": "node404",
"service": ["Network", "Web"],
"receivers": [],
"startTime": "00:00",
"endTime": "23:59",
}
self.channel_id = self.create_api_obj("/notificationchannels", self.sms_channel, self.headers)["id"]
notification_rule_data = self.create_api_obj("/notificationrules", notification_rule, self.headers)
notification_rule_id = notification_rule_data["id"]
self.prod_alert["severity"] = "minor" if self.prod_alert["severity"] != "minor" else "major"
data = self.get_api_obj("/notificationrules", self.headers)
data = self.create_api_obj("/alert", self.prod_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
update = {
"environment": "Development",
"event": None,
"tags": [],
"endTime": "22:00",
}
data = self.update_api_obj("/notificationrules/" + notification_rule_id, update, self.headers)
self.assertEqual(data["status"], "ok")
data = self.get_api_obj("/notificationrules/" + notification_rule_id, self.headers)
self.assertEqual(data["notificationRule"]["environment"], "Development")
self.assertEqual(data["notificationRule"]["resource"], "node404")
self.assertEqual(data["notificationRule"]["service"], ["Network", "Web"])
self.assertEqual(data["notificationRule"]["group"], None)
self.assertEqual(data["notificationRule"]["startTime"], "00:00")
self.assertEqual(data["notificationRule"]["endTime"], "22:00")
data = self.create_api_obj("/alert", self.dev_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_id,
map(get_id, active_notification_rules),
)
self.delete_api_obj("/notificationrules/" + notification_rule_id, self.headers)
def test_full_notification_rule(self):
base_alert = {
"environment": "Production",
"resource": "notification_net",
"event": "notification_down",
"severity": "minor",
"service": ["Core", "Web", "Network"],
"group": "Network",
"tags": ["notification_test", "network"],
}
more_service_alert = {**base_alert, "service": ["Core", "Web", "Network", "More"]}
less_service_alert = {**base_alert, "service": ["Core", "Web"]}
none_service_alert = {**base_alert, "service": []}
pop_service_alert = {**base_alert}
pop_service_alert.pop("service")
more_tags_alert = {**base_alert, "tags": ["notification_test", "network", "more"]}
less_tags_alert = {**base_alert, "tags": ["notification_test"]}
none_tags_alert = {**base_alert, "tags": []}
pop_tags_alert = {**base_alert}
pop_tags_alert.pop("tags")
wrong_resource_alert = {**base_alert, "resource": "wrong"}
none_resource_alert = {**base_alert, "resource": None}
pop_resource_alert = {**base_alert}
pop_resource_alert.pop("resource")
wrong_event_alert = {**base_alert, "event": "wrong"}
none_event_alert = {**base_alert, "event": None}
pop_event_alert = {**base_alert}
pop_event_alert.pop("event")
wrong_environment_alert = {**base_alert, "environment": "wrong"}
none_environment_alert = {**base_alert, "environment": None}
pop_environment_alert = {**base_alert}
pop_environment_alert.pop("environment")
wrong_severity_alert = {**base_alert, "severity": "critical"}
none_severity_alert = {**base_alert, "severity": None}
pop_severity_alert = {**base_alert}
pop_severity_alert.pop("severity")
wrong_group_alert = {**base_alert, "group": "wrong"}
none_group_alert = {**base_alert, "group": None}
pop_group_alert = {**base_alert}
pop_group_alert.pop("group")
self.assertNotEqual(wrong_group_alert["group"], base_alert["group"])
notification_rule = {
"channelId": "SMS_Channel",
"receivers": [],
"environment": "Production",
"resource": "notification_net",
"event": "notification_down",
"severity": ["major", "minor"],
"service": ["Core", "Web", "Network"],
"days": ["Mon", "Tue", "Wed", "Thu", "Fri"],
"group": "Network",
"tags": ["notification_test", "network"],
"startTime": "00:00",
"endTime": "23:59",
"text": "Hey, this is a test of notification rules",
}
self.channel_id = self.create_api_obj("/notificationchannels", self.sms_channel, self.headers)["id"]
notification_rule_data = self.create_api_obj("/notificationrules", notification_rule, self.headers)
data = self.create_api_obj("/alert", base_alert, self.headers)
active_notification_rules = self.create_api_obj("/notificationrules/active", data["alert"], self.headers, 200)["notificationRules"]
self.assertIn(
notification_rule_data["notificationRule"],
active_notification_rules,
)
more_data = self.create_api_obj("/alert", more_service_alert, self.headers)
less_data = self.create_api_obj("/alert", less_service_alert, self.headers)
none_data = self.create_api_obj("/alert", none_service_alert, self.headers)
pop_data = self.create_api_obj("/alert", pop_service_alert, self.headers)
more_active_notification_rules = self.create_api_obj("/notificationrules/active", more_data["alert"], self.headers, 200)["notificationRules"]
less_active_notification_rules = self.create_api_obj("/notificationrules/active", less_data["alert"], self.headers, 200)["notificationRules"]
none_active_notification_rules = self.create_api_obj("/notificationrules/active", none_data["alert"], self.headers, 200)["notificationRules"]
pop_active_notification_rules = self.create_api_obj("/notificationrules/active", pop_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], more_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], less_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], none_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], pop_active_notification_rules)
more_data = self.create_api_obj("/alert", more_tags_alert, self.headers)
less_data = self.create_api_obj("/alert", less_tags_alert, self.headers)
none_data = self.create_api_obj("/alert", none_tags_alert, self.headers)
pop_data = self.create_api_obj("/alert", pop_tags_alert, self.headers)
more_active_notification_rules = self.create_api_obj("/notificationrules/active", more_data["alert"], self.headers, 200)["notificationRules"]
less_active_notification_rules = self.create_api_obj("/notificationrules/active", less_data["alert"], self.headers, 200)["notificationRules"]
none_active_notification_rules = self.create_api_obj("/notificationrules/active", none_data["alert"], self.headers, 200)["notificationRules"]
pop_active_notification_rules = self.create_api_obj("/notificationrules/active", pop_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], more_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], less_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], none_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], pop_active_notification_rules)
wrong_data = self.create_api_obj("/alert", wrong_resource_alert, self.headers)
none_data = self.create_api_obj("/alert", none_resource_alert, self.headers, 400)
pop_data = self.create_api_obj("/alert", pop_resource_alert, self.headers, 400)
wrong_active_notification_rules = self.create_api_obj("/notificationrules/active", wrong_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], wrong_active_notification_rules)
wrong_data = self.create_api_obj("/alert", wrong_event_alert, self.headers)
none_data = self.create_api_obj("/alert", none_event_alert, self.headers, 400)
pop_data = self.create_api_obj("/alert", pop_event_alert, self.headers, 400)
wrong_active_notification_rules = self.create_api_obj("/notificationrules/active", wrong_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], wrong_active_notification_rules)
wrong_data = self.create_api_obj("/alert", wrong_environment_alert, self.headers)
none_data = self.create_api_obj("/alert", none_environment_alert, self.headers)
pop_data = self.create_api_obj("/alert", pop_environment_alert, self.headers)
wrong_active_notification_rules = self.create_api_obj("/notificationrules/active", wrong_data["alert"], self.headers, 200)["notificationRules"]
none_active_notification_rules = self.create_api_obj("/notificationrules/active", none_data["alert"], self.headers, 200)["notificationRules"]
pop_active_notification_rules = self.create_api_obj("/notificationrules/active", pop_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], wrong_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], none_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], pop_active_notification_rules)
wrong_data = self.create_api_obj("/alert", wrong_severity_alert, self.headers)
none_data = self.create_api_obj("/alert", none_severity_alert, self.headers)
pop_data = self.create_api_obj("/alert", pop_severity_alert, self.headers)
wrong_active_notification_rules = self.create_api_obj("/notificationrules/active", wrong_data["alert"], self.headers, 200)["notificationRules"]
none_active_notification_rules = self.create_api_obj("/notificationrules/active", none_data["alert"], self.headers, 200)["notificationRules"]
pop_active_notification_rules = self.create_api_obj("/notificationrules/active", pop_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], wrong_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], none_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], pop_active_notification_rules)
data = self.delete_api_obj(f"/alert/{data['id']}", self.headers)
wrong_data = self.create_api_obj("/alert", wrong_group_alert, self.headers)
none_data = self.create_api_obj("/alert", none_group_alert, self.headers)
pop_data = self.create_api_obj("/alert", pop_group_alert, self.headers)
wrong_active_notification_rules = self.create_api_obj("/notificationrules/active", wrong_data["alert"], self.headers, 200)["notificationRules"]
none_active_notification_rules = self.create_api_obj("/notificationrules/active", none_data["alert"], self.headers, 200)["notificationRules"]
pop_active_notification_rules = self.create_api_obj("/notificationrules/active", pop_data["alert"], self.headers, 200)["notificationRules"]
self.assertNotIn(notification_rule_data["notificationRule"], wrong_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], none_active_notification_rules)
self.assertNotIn(notification_rule_data["notificationRule"], pop_active_notification_rules)
def test_full_alert(self):
now_time = datetime.now()
diff_time = timedelta(hours=2)
diff_start_time = now_time + diff_time
diff_end_time = now_time - diff_time
alert = {
"environment": "Development",
"resource": "notification_resource",
"event": "notification_event",
"severity": "major",
"service": ["Core", "Web", "Network", "Notification_service"],
"group": "Network",
"tags": ["notification_test", "network"],
"text": "No Descrition",
"value": "notification_value",
"origin": | |
<reponame>PythonBiellaGroup/LearningPythonWithGames
# Player movement prototype program:
# - Cursor left-right: move player
# - Cursor up: player jump
# - Space: shoot a rainbow
import pgzrun
from pygame import image, Color, Surface
from shapely.geometry import Point, Polygon, LineString
from shapely.affinity import translate
from random import randint
import math
from Listing11_PlatformNames import platformNames
from Listing13_PlatformLines import platformLines
from Listing15_Platforms import platforms
from Listing18_EnemyNames import enemyNames
from Listing20_Enemies import enemies
drawLines = False
def RectanglesIntersect(centreA, halfSizeA, centreB, halfSizeB):
if centreA[0] - halfSizeA[0] < centreB[0] + halfSizeB[0] and centreA[0] + halfSizeA[0] > centreB[0] - halfSizeB[0]:
if centreA[1] - halfSizeA[1] < centreB[1] + halfSizeB[1] and centreA[1] + halfSizeA[1] > centreB[1] - halfSizeB[1]:
return True
return False
maxHeightPlatform = 124
class AllPlatforms():
def __init__(self):
self.platformActors = [Actor(platformNames[platforms[i][0]], (platforms[i][1], platforms[i][2])) for i in range(len(platforms))]
self.platformLineStrings = []
for i in range(len(platforms)):
points = [(platformLines[platforms[i][0]][j][0] + platforms[i][1],
platformLines[platforms[i][0]][j][1] + platforms[i][2])
for j in range(len(platformLines[platforms[i][0]]))]
self.platformLineStrings.append(LineString(points))
def draw(self):
for platform in self.platformActors:
if platform.y > -maxHeightPlatform/2 and platform.y < 600+maxHeightPlatform/2:
platform.draw()
if drawLines:
for line in self.platformLineStrings:
for i in range(1, len(line.coords)):
screen.draw.line((line.coords[i-1][0], line.coords[i-1][1]-screenPosition),
(line.coords[i][0], line.coords[i][1]-screenPosition), (255,255,255))
def update(self, newScreenPosition):
for i in range(len(platforms)):
self.platformActors[i].x = platforms[i][1]
self.platformActors[i].y = platforms[i][2] - screenPosition
allPlatforms = AllPlatforms()
rainbowHalfSize = 39
rainbowTimeLife = 200
class Rainbow:
def __init__(self, centreX, centreY, creationTime):
self.centre = [centreX, centreY]
self.timeFromCreation = creationTime
self.rainbowActor = Actor('rainbow', (centreX, centreY - rainbowHalfSize/2))
points = []
for i in range(0, 180+20, 20):
points.append((self.centre[0] + rainbowHalfSize*math.cos(math.pi*i/180)*0.75, self.centre[1] - rainbowHalfSize*math.sin(math.pi*i/180)))
self.lineString = LineString(points)
def draw(self):
if self.timeFromCreation >= 0:
self.rainbowActor.y = self.centre[1] - rainbowHalfSize/2 - screenPosition
self.rainbowActor.draw()
if drawLines:
for i in range(1, len(self.lineString.coords)):
screen.draw.line((self.lineString.coords[i-1][0], self.lineString.coords[i-1][1]-screenPosition),
(self.lineString.coords[i][0], self.lineString.coords[i][1]-screenPosition), (255,255,255))
def update(self):
if self.timeFromCreation == 0:
for i in reversed(range(len(allEnemies.enemies))):
enemyLine = LineString([(allEnemies.enemies[i].centre[0] - enemyHalfSize, allEnemies.enemies[i].centre[1] - enemyHalfSize),
(allEnemies.enemies[i].centre[0] - enemyHalfSize, allEnemies.enemies[i].centre[1] + enemyHalfSize),
(allEnemies.enemies[i].centre[0] + enemyHalfSize, allEnemies.enemies[i].centre[1] + enemyHalfSize),
(allEnemies.enemies[i].centre[0] + enemyHalfSize, allEnemies.enemies[i].centre[1] - enemyHalfSize)])
if self.lineString.intersects(enemyLine):
allEnemies.killEnemy(i)
self.timeFromCreation += 1
rainbowAccelerationDown = 1
class FallingRainbow:
def __init__(self, centreX, centreY):
self.centre = [centreX, centreY]
self.rainbowActor = Actor('falling_rainbow', (centreX, centreY - rainbowHalfSize/2))
self.speedY = 0
def draw(self):
self.rainbowActor.y = self.centre[1] - rainbowHalfSize/2 - screenPosition
self.rainbowActor.draw()
if drawLines:
screen.draw.line((self.centre[0] - rainbowHalfSize, self.centre[1] - screenPosition - rainbowHalfSize//2),
(self.centre[0] + rainbowHalfSize, self.centre[1] - screenPosition - rainbowHalfSize//2), (255,255,255))
screen.draw.line((self.centre[0] + rainbowHalfSize, self.centre[1] - screenPosition - rainbowHalfSize//2),
(self.centre[0] + rainbowHalfSize, self.centre[1] - screenPosition + rainbowHalfSize + self.speedY), (255,255,255))
screen.draw.line((self.centre[0] + rainbowHalfSize, self.centre[1] - screenPosition + rainbowHalfSize + self.speedY),
(self.centre[0] - rainbowHalfSize, self.centre[1] - screenPosition + rainbowHalfSize + self.speedY), (255,255,255))
screen.draw.line((self.centre[0] - rainbowHalfSize, self.centre[1] - screenPosition + rainbowHalfSize + self.speedY),
(self.centre[0] - rainbowHalfSize, self.centre[1] - screenPosition - rainbowHalfSize//2), (255,255,255))
def update(self):
self.speedY += rainbowAccelerationDown
self.centre[1] += self.speedY
centerCollision = [self.centre[0], self.centre[1] + self.speedY * 2]
halfSizeCollision = [rainbowHalfSize, (rainbowHalfSize + self.speedY) / 2]
for i in reversed(range(len(allEnemies.enemies))):
if RectanglesIntersect(centerCollision, halfSizeCollision, allEnemies.enemies[i].centre, [enemyHalfSize, enemyHalfSize]):
allEnemies.killEnemy(i)
for i in reversed(range(len(allRainbows.rainbows))):
if RectanglesIntersect(centerCollision, halfSizeCollision, allRainbows.rainbows[i].centre, [rainbowHalfSize, rainbowHalfSize/2]):
allRainbows.rainbowFall(i)
class AllRainbows:
def __init__(self):
self.rainbows = []
self.fallingRainbows = []
def restart(self):
self.rainbows = []
self.fallingRainbows = []
def draw(self):
for rainbow in self.rainbows:
rainbow.draw()
for fallingRainbow in self.fallingRainbows:
fallingRainbow.draw()
def update(self):
for i in reversed(range(len(self.rainbows))):
self.rainbows[i].update()
if self.rainbows[i].timeFromCreation > rainbowTimeLife:
self.rainbowFall(i)
for i in reversed(range(len(self.fallingRainbows))):
self.fallingRainbows[i].update()
if self.fallingRainbows[i].centre[1]-screenPosition-rainbowHalfSize > 600:
del self.fallingRainbows[i]
def rainbowFall(self, index):
self.fallingRainbows.append(FallingRainbow(self.rainbows[index].centre[0], self.rainbows[index].centre[1]))
del self.rainbows[index]
def append(self, posX, posY, time):
self.rainbows.append(Rainbow(posX, posY, time))
allRainbows = AllRainbows()
playerHalfSizeX = 10
playerHalfSizeY = 18
playerAccelerationDown = 0.5
playerJumpSpeed = 14
playerTerminalSpeed = 12
playerLateralSpeed = 4
coyoteTimeVerticalSpeed = 4
playerVerticalSpeedToDestroyRainbow = 5
playerMinTimeBetweenRainbows = 16
screenPosition = 0
playerImageNames = ['player_stand_right', 'player_walk_1_right', 'player_stand_right', 'player_walk_2_right', 'player_jump_up_right', 'player_jump_down_right',
'player_stand_left', 'player_walk_1_left', 'player_stand_left', 'player_walk_2_left', 'player_jump_up_left', 'player_jump_down_left',
'player_collided_1', 'player_collided_2']
class Player:
def __init__(self):
self.centre = [400, 500]
self.centredLineString = LineString([(-playerHalfSizeX,-playerHalfSizeY*0),
(-playerHalfSizeX,playerHalfSizeY),
(playerHalfSizeX,playerHalfSizeY),
(playerHalfSizeX,-playerHalfSizeY*0)])
self.lineString = translate(self.centredLineString, self.centre[0], self.centre[1])
self.polygon = Polygon(self.lineString)
self.speedY = 0
self.walking = False
self.directionX = -1
self.jumping = False
self.actors = [Actor(name) for name in playerImageNames]
self.numberOfRainbows = 1
self.lastRainbowShot = playerMinTimeBetweenRainbows
self.active = True
self.lives = 3
def draw(self):
indexImage = 0
if self.active:
if self.jumping:
if self.speedY < 0:
indexImage = 4
else:
indexImage = 5
elif self.walking:
indexImage = (self.centre[0]//13) % 4
if self.directionX < 0:
indexImage += 6
else:
indexImage = 12 + (self.centre[0]//13) % 2
self.actors[indexImage].x = self.centre[0]
self.actors[indexImage].y = self.centre[1] - screenPosition
self.actors[indexImage].draw()
if drawLines:
for i in range(len(self.lineString.coords)):
screen.draw.line((self.lineString.coords[i-1][0],self.lineString.coords[i-1][1]-screenPosition),
(self.lineString.coords[i][0],self.lineString.coords[i][1]-screenPosition), (255,0,0))
def intersectPlatform(self, platform, collidingObject):
intersection = collidingObject.intersection(platform)
if not intersection.is_empty:
newPositionY = self.centre[1]
if intersection.geom_type == 'MultiPoint':
newPositionY = min(intersection.geoms, key=lambda x: x.coords[0][1]).coords[0][1] - playerHalfSizeY
elif intersection.geom_type == 'Point':
newPositionY = intersection.coords[0][1] - playerHalfSizeY
elif intersection.geom_type == 'LineString':
newPositionY = min(intersection.coords, key=lambda x: x[1])[1] - playerHalfSizeY
if self.centre[1] > newPositionY:
self.centre[1] = newPositionY
self.speedY = 0
self.jumping = False
return True
return False
def intersectEnemy(self, enemies):
for enemy in enemies:
if enemy.active:
if RectanglesIntersect(self.centre, [playerHalfSizeX, playerHalfSizeY], enemy.centre, [enemyHalfSize, enemyHalfSize]):
return True
return False
def addRainbow(self):
if self.numberOfRainbows < 3:
self.numberOfRainbows += 1
def update(self):
if self.lives == 0:
return
if not self.active:
self.speedY += playerAccelerationDown
self.centre[0] += self.speedX // 2
self.centre[1] += self.speedY // 2
if self.centre[1] > 600 + playerHalfSizeY*6:
self.lives -= 1
if self.lives > 0:
self.centre = [400, 500]
self.speedX = 0
self.speedY = 0
self.numberOfRainbows = 1
self.lineString = translate(self.centredLineString, self.centre[0], self.centre[1])
self.polygon = Polygon(self.lineString)
allRainbows.restart()
allCollectables.restart()
allEnemies.restart()
self.active = True
return
if self.speedY >= 0:
if not self.jumping:
collidingObject = self.polygon
else:
collidingObject = LineString([(self.centre[0]-playerHalfSizeX, self.centre[1]+playerHalfSizeY-2),
(self.centre[0]-playerHalfSizeX, self.centre[1]+playerHalfSizeY+self.speedY+2),
(self.centre[0]+playerHalfSizeX, self.centre[1]+playerHalfSizeY+self.speedY+2),
(self.centre[0]+playerHalfSizeX, self.centre[1]+playerHalfSizeY-2)])
for platform in allPlatforms.platformLineStrings:
self.intersectPlatform(platform, collidingObject)
previousSpeed = self.speedY
for i in reversed(range(len(allRainbows.rainbows))):
if allRainbows.rainbows[i].timeFromCreation >= 0:
if self.intersectPlatform(allRainbows.rainbows[i].lineString, collidingObject):
if previousSpeed >= playerVerticalSpeedToDestroyRainbow:
allRainbows.rainbowFall(i)
if not self.jumping and self.speedY >= coyoteTimeVerticalSpeed:
self.jumping = True
if self.centre[1] > 600 - playerHalfSizeY:
self.centre[1] = 600 - playerHalfSizeY
self.speedY = 0
self.jumping = False
else:
self.speedY += playerAccelerationDown
if self.speedY > playerTerminalSpeed:
self.speedY = playerTerminalSpeed
self.centre[1] += self.speedY
if self.intersectEnemy(allEnemies.enemies):
self.active = False
self.speedY = -playerJumpSpeed
if self.centre[0] > 400:
self.speedX = -playerLateralSpeed
else:
self.speedX = playerLateralSpeed
self.lineString = translate(self.centredLineString, self.centre[0], self.centre[1])
self.polygon = Polygon(self.lineString)
self.lastRainbowShot += 1
def jump(self):
if not self.jumping:
self.speedY = -playerJumpSpeed
self.jumping = True
def stopJump(self):
if self.jumping and self.speedY < -playerJumpSpeed/2:
self.speedY = -playerJumpSpeed/2
def left(self):
self.centre[0] -= playerLateralSpeed
self.directionX = -1
if self.centre[0] < playerHalfSizeX:
self.centre[0] = playerHalfSizeX
self.walking = True
def right(self):
self.centre[0] += playerLateralSpeed
self.directionX = 1
if self.centre[0] > 800 - playerHalfSizeX:
self.centre[0] = 800 - playerHalfSizeX
self.walking = True
def still(self):
self.walking = False
def shootRainbow(self):
if self.lastRainbowShot > playerMinTimeBetweenRainbows:
self.lastRainbowShot = 0
for i in range(self.numberOfRainbows):
allRainbows.append(self.centre[0] + self.directionX * ((rainbowHalfSize + playerHalfSizeX + 2) + i * (rainbowHalfSize-2)*2), self.centre[1] + playerHalfSizeY, -i*10)
player = Player()
enemyHalfSize = 12
enemyAccelerationDown = 0.5
enemyTerminalSpeed = 8
enemyLateralSpeed = 2
enemyFlyingVerticalSpeed = 1
class Enemy:
def __init__(self, centreX, centreY, indexEnemy, directionX):
self.centre = [centreX, centreY]
self.centredLineString = LineString([(-enemyHalfSize,-enemyHalfSize),
(-enemyHalfSize,enemyHalfSize),
(enemyHalfSize,enemyHalfSize),
(enemyHalfSize,-enemyHalfSize)])
self.lineString = translate(self.centredLineString, self.centre[0], self.centre[1])
self.speedX = directionX * enemyLateralSpeed
self.speedY = 0
if indexEnemy == 2:
self.speedY = enemyFlyingVerticalSpeed
self.index = indexEnemy
self.actors = [Actor(name) for name in enemyNames[indexEnemy]]
self.active = False
def draw(self):
if self.active and self.centre[1] - screenPosition > -enemyHalfSize and self.centre[1] - screenPosition < 600+enemyHalfSize:
indexActor = ((self.centre[0]//6) % (len(enemyNames[self.index])//2))*2
if self.speedX < 0:
indexActor += 1
self.actors[indexActor].x = self.centre[0]
self.actors[indexActor].y = self.centre[1] - screenPosition
self.actors[indexActor].draw()
if drawLines:
for i in range(len(self.lineString.coords)):
screen.draw.line((self.lineString.coords[i-1][0], self.lineString.coords[i-1][1] - screenPosition),
(self.lineString.coords[i][0], self.lineString.coords[i][1] - screenPosition), (255,0,0))
def update(self):
if self.centre[1] - screenPosition > -enemyHalfSize/2:
self.active = True
if not self.active or self.centre[1] - screenPosition > 600+enemyHalfSize:
return
if self.index == 0:
if self.speedX > 0:
lineIntersection = LineString([(self.centre[0]+enemyHalfSize, self.centre[1]-6),
(self.centre[0]+enemyHalfSize, self.centre[1]+enemyHalfSize+16)])
else:
lineIntersection = LineString([(self.centre[0]-enemyHalfSize, self.centre[1]-6),
(self.centre[0]-enemyHalfSize, self.centre[1]+enemyHalfSize+16)])
else:
lineIntersection = LineString([(self.centre[0]-enemyHalfSize, self.centre[1]-enemyHalfSize),
(self.centre[0]-enemyHalfSize, self.centre[1]+enemyHalfSize+self.speedY+2),
(self.centre[0]+enemyHalfSize, self.centre[1]+enemyHalfSize+self.speedY+2),
(self.centre[0]+enemyHalfSize, self.centre[1]-enemyHalfSize)])
intersectionFound = False
for platform in allPlatforms.platformLineStrings:
intersection = lineIntersection.intersection(platform)
if not intersection.is_empty:
if self.index != 2:
if intersection.geom_type == 'MultiPoint':
self.centre[1] = min(intersection.geoms, key=lambda x: x.coords[0][1]).coords[0][1] - enemyHalfSize
elif intersection.geom_type == 'Point':
self.centre[1] = intersection.coords[0][1] - enemyHalfSize
intersectionFound = True
if self.index != 2:
self.speedY = 0
for rainbow in allRainbows.rainbows:
intersection = lineIntersection.intersection(rainbow.lineString)
if not intersection.is_empty:
if self.index == 0 or self.index == 2:
self.speedX = -self.speedX
elif self.index == 1 and self.speedY < 1:
self.speedX = -self.speedX
else:
if | |
28736.6017845307 * self.t)
Y0 += 0.00000000045 * math.cos(2.54247709679 + 55504.18575691209 * self.t)
Y0 += 0.00000000062 * math.cos(6.05167319913 + 176954.23375935068 * self.t)
Y0 += 0.00000000043 * math.cos(4.09072234194 + 78903.8505285201 * self.t)
Y0 += 0.00000000044 * math.cos(2.10728026202 + 16703.3059509825 * self.t)
Y0 += 0.00000000052 * math.cos(6.07065313687 + 110012.70079796549 * self.t)
Y0 += 0.00000000050 * math.cos(3.89612671834 + 25138.9713500879 * self.t)
Y0 += 0.00000000043 * math.cos(2.78443984846 + 1272.9248431107 * self.t)
Y0 += 0.00000000044 * math.cos(3.76849382783 + 105460.74730090669 * self.t)
Y0 += 0.00000000058 * math.cos(2.82347260176 + 26094.77287109149 * self.t)
Y0 += 0.00000000042 * math.cos(0.46194308183 + 1486.2239385487 * self.t)
Y0 += 0.00000000042 * math.cos(0.51106392440 + 78189.17164363878 * self.t)
Y0 += 0.00000000048 * math.cos(4.03335182320 + 65831.91059180829 * self.t)
Y0 += 0.00000000054 * math.cos(4.84278312652 + 52382.23564906909 * self.t)
Y0 += 0.00000000041 * math.cos(1.77651394552 + 1795.5022612045 * self.t)
Y0 += 0.00000000043 * math.cos(2.60404615127 + 60055.65161900389 * self.t)
Y0 += 0.00000000046 * math.cos(4.86931523516 + 54087.2495838491 * self.t)
Y0 += 0.00000000053 * math.cos(0.13354671820 + 260878.78759825832 * self.t)
Y0 += 0.00000000048 * math.cos(5.22216564191 + 25466.4031582185 * self.t)
Y0 += 0.00000000048 * math.cos(3.58305204289 + 26709.8907598969 * self.t)
Y0 += 0.00000000048 * math.cos(0.57485699057 + 76667.76679992149 * self.t)
Y0 += 0.00000000047 * math.cos(4.05954911572 + 104275.59031250469 * self.t)
Y0 += 0.00000000039 * math.cos(5.49383143740 + 132658.51662954128 * self.t)
Y0 += 0.00000000040 * math.cos(0.07089879206 + 26824.02347258949 * self.t)
Y0 += 0.00000000040 * math.cos(2.44203081578 + 25352.2704455259 * self.t)
Y0 += 0.00000000043 * math.cos(4.79238528651 + 365230.88779952223 * self.t)
Y0 += 0.00000000044 * math.cos(0.28163075339 + 52712.85461272729 * self.t)
Y0 += 0.00000000045 * math.cos(4.58626302576 + 28256.9074413403 * self.t)
Y0 += 0.00000000039 * math.cos(0.56315956692 + 103241.99019671988 * self.t)
Y0 += 0.00000000051 * math.cos(0.42021554272 + 182085.87484340828 * self.t)
Y0 += 0.00000000050 * math.cos(0.52050720367 + 147423.75567281108 * self.t)
Y0 += 0.00000000043 * math.cos(4.74188084964 + 214364.80099922928 * self.t)
Y0 += 0.00000000039 * math.cos(4.16197164473 + 99799.41525175449 * self.t)
Y0 += 0.00000000044 * math.cos(3.72596196534 + 25867.73431661889 * self.t)
Y0 += 0.00000000043 * math.cos(0.20439431464 + 1265.81129610991 * self.t)
Y0 += 0.00000000038 * math.cos(4.24623905840 + 104505.63519426509 * self.t)
Y0 += 0.00000000037 * math.cos(4.01161399115 + 104198.07757329549 * self.t)
Y0 += 0.00000000037 * math.cos(2.96320182560 + 128320.99566497609 * self.t)
Y0 += 0.00000000042 * math.cos(3.62647853730 + 25170.2166730759 * self.t)
Y0 += 0.00000000040 * math.cos(3.65913470233 + 206.42936592071 * self.t)
Y0 += 0.00000000038 * math.cos(1.95151248327 + 130459.42928625426 * self.t)
Y0 += 0.00000000037 * math.cos(2.36549717602 + 26102.3740530593 * self.t)
Y0 += 0.00000000050 * math.cos(1.81815722451 + 153.5349930013 * self.t)
Y0 += 0.00000000037 * math.cos(0.10000949167 + 24203.24579564031 * self.t)
Y0 += 0.00000000041 * math.cos(5.40449209098 + 34282.4222922663 * self.t)
Y0 += 0.00000000036 * math.cos(1.44241697006 + 26118.4738200621 * self.t)
Y0 += 0.00000000036 * math.cos(1.07961540756 + 26057.82009805329 * self.t)
Y0 += 0.00000000036 * math.cos(6.15630697749 + 34281.9346572993 * self.t)
Y0 += 0.00000000042 * math.cos(1.45143122814 + 38653.81102407349 * self.t)
Y0 += 0.00000000035 * math.cos(0.16883470965 + 26073.91986505609 * self.t)
Y0 += 0.00000000039 * math.cos(5.17224451182 + 27006.0772450395 * self.t)
Y0 += 0.00000000039 * math.cos(3.83113919893 + 151.2914873264 * self.t)
Y0 += 0.00000000037 * math.cos(3.86078277718 + 78367.04601642469 * self.t)
Y0 += 0.00000000034 * math.cos(1.99485322788 + 27567.01353312209 * self.t)
Y0 += 0.00000000035 * math.cos(5.28138513945 + 78896.73698151928 * self.t)
Y0 += 0.00000000037 * math.cos(2.43731961667 + 25773.96077918809 * self.t)
Y0 += 0.00000000037 * math.cos(0.08471276095 + 26402.33313892731 * self.t)
Y0 += 0.00000000045 * math.cos(5.51053584744 + 2126.1212248627 * self.t)
Y0 += 0.00000000037 * math.cos(0.41442508719 + 9384.59719059169 * self.t)
Y0 += 0.00000000033 * math.cos(1.23373917586 + 116783.89903417809 * self.t)
Y0 += 0.00000000035 * math.cos(1.05079898841 + 27463.9207589035 * self.t)
Y0 += 0.00000000033 * math.cos(5.24254633919 + 51852.54468397449 * self.t)
Y0 += 0.00000000037 * math.cos(6.12798409572 + 130420.08976445469 * self.t)
Y0 += 0.00000000035 * math.cos(2.14843472729 + 62388.84801187589 * self.t)
Y0 += 0.00000000033 * math.cos(0.42458452244 + 26190.1094773233 * self.t)
Y0 += 0.00000000033 * math.cos(2.09744785518 + 25986.18444079209 * self.t)
Y0 += 0.00000000035 * math.cos(1.60180137355 + 51653.47268253809 * self.t)
Y0 += 0.00000000032 * math.cos(4.47872593179 + 39629.56816154889 * self.t)
Y0 += 0.00000000040 * math.cos(2.83686756644 + 148.32254190981 * self.t)
Y0 += 0.00000000043 * math.cos(0.95235131647 + 157587.04459711789 * self.t)
Y0 += 0.00000000034 * math.cos(2.42204384323 + 27973.04812247509 * self.t)
Y0 += 0.00000000043 * math.cos(5.25286806261 + 26308.5596014965 * self.t)
Y0 += 0.00000000040 * math.cos(2.56508188611 + 205.9417309537 * self.t)
Y0 += 0.00000000034 * math.cos(5.11562530130 + 13541.17738500769 * self.t)
Y0 += 0.00000000031 * math.cos(5.39821911963 + 207594.09039798369 * self.t)
Y0 += 0.00000000032 * math.cos(2.88931723147 + 204151.51545301828 * self.t)
Y0 += 0.00000000034 * math.cos(2.40270209936 + 51868.4924796623 * self.t)
Y0 += 0.00000000043 * math.cos(1.44438681078 + 51529.03926731709 * self.t)
Y0 += 0.00000000031 * math.cos(4.01104942140 + 78160.86046798748 * self.t)
Y0 += 0.00000000036 * math.cos(5.64714725231 + 130226.46042991648 * self.t)
Y0 += 0.00000000030 * math.cos(5.53920573257 + 130432.64597835368 * self.t)
Y0 += 0.00000000039 * math.cos(4.02419894411 + 64901.5035354069 * self.t)
Y0 += 0.00000000031 * math.cos(0.59714240312 + 51433.0600400993 * self.t)
Y0 += 0.00000000041 * math.cos(1.72851918476 + 40852.8983673605 * self.t)
Y0 += 0.00000000039 * math.cos(3.48396647568 + 76674.39271195528 * self.t)
Y0 += 0.00000000031 * math.cos(1.66062366332 + 94330.01910368088 * self.t)
Y0 += 0.00000000030 * math.cos(3.30916696393 + 181026.49291321907 * self.t)
Y0 += 0.00000000030 * math.cos(1.35962398425 + 96356.84030678908 * self.t)
Y0 += 0.00000000029 * math.cos(3.24046570458 + 182188.96761762688 * self.t)
Y0 += 0.00000000034 * math.cos(5.68539193875 + 111122.07935005888 * self.t)
Y0 += 0.00000000031 * math.cos(5.77478962928 + 26514.25751496669 * self.t)
Y0 += 0.00000000036 * math.cos(2.47015155723 + 75615.49841673308 * self.t)
Y0 += 0.00000000028 * math.cos(2.60390025059 + 86143.55476057807 * self.t)
Y0 += 0.00000000028 * math.cos(5.41996849568 + 80382.71710258449 * self.t)
Y0 += 0.00000000037 * math.cos(2.73064236635 + 37698.6989174319 * self.t)
Y0 += 0.00000000030 * math.cos(5.69224082014 + 77101.47853779829 * self.t)
Y0 += 0.00000000029 * math.cos(2.84649684108 + 114.6429243969 * self.t)
Y0 += 0.00000000030 * math.cos(2.60710276130 + 26084.21480579459 * self.t)
Y0 += 0.00000000027 * math.cos(2.18911511513 + 26241.4381345755 * self.t)
Y0 += 0.00000000027 * math.cos(6.24126624865 + 60369.83779887349 * self.t)
Y0 += 0.00000000027 * math.cos(4.34927842452 + 101011.24395708049 * self.t)
Y0 += 0.00000000027 * math.cos(0.05549126696 + 52509.9060414013 * self.t)
Y0 += 0.00000000029 * math.cos(6.20759958805 + 26092.07911232079 * self.t)
Y0 += 0.00000000033 * math.cos(1.33379538421 + 42153.72518556549 * self.t)
Y0 += 0.00000000033 * math.cos(1.11257666070 + 49954.19346603489 * self.t)
Y0 += 0.00000000027 * math.cos(1.97164111012 + 25933.8805136059 * self.t)
Y0 += 0.00000000027 * math.cos(5.75010433988 + 91785.21704883048 * self.t)
Y0 += 0.00000000026 * math.cos(0.45274965388 + 35472.9879671329 * self.t)
Y0 += 0.00000000034 * math.cos(5.96598332393 + 22004.1584523533 * self.t)
Y0 += 0.00000000026 * math.cos(6.17560870969 + 71980.38975724769 * self.t)
Y0 += 0.00000000026 * math.cos(4.75787796399 + 130653.05862079248 * self.t)
Y0 += 0.00000000029 * math.cos(3.02581288540 + 136722.83537534589 * self.t)
Y0 += 0.00000000035 * math.cos(6.21411204570 + 52483.6077216015 * self.t)
Y0 += 0.00000000027 * math.cos(3.05142430504 + 54375.1374439189 * self.t)
Y0 += 0.00000000026 * math.cos(4.35081548550 + 26050.0139234199 * self.t)
Y0 += 0.00000000033 * math.cos(4.63692423082 + 77727.14873011068 * self.t)
Y0 += 0.00000000031 * math.cos(4.44084625304 + 87367.86023632369 * self.t)
Y0 += 0.00000000028 * math.cos(3.43792139137 + 183571.16555011149 * self.t)
Y0 += 0.00000000034 * math.cos(4.71651658587 + 1905.70858242391 * self.t)
Y0 += 0.00000000028 * math.cos(0.48686633466 + 51226.87449166209 * self.t)
Y0 += 0.00000000032 * math.cos(6.03865727615 + 77307.66408623548 * self.t)
Y0 += 0.00000000033 * math.cos(4.55124740226 + 78039.60844650418 * self.t)
Y0 += 0.00000000031 * math.cos(2.31561412140 + 60370.3254338405 * self.t)
Y0 += 0.00000000033 * math.cos(0.91420804533 + 123201.08393375449 * self.t)
Y0 += 0.00000000027 * math.cos(5.63137827099 + 1224.06165826211 * self.t)
Y0 += 0.00000000025 * math.cos(1.86918416559 + 78338.73484077338 * self.t)
Y0 += 0.00000000029 * math.cos(3.85756830106 + 49842.36607279289 * self.t)
Y0 += 0.00000000025 * math.cos(0.32014728581 + 76785.08666814168 * self.t)
Y0 += 0.00000000024 * math.cos(1.09872540797 + 24824.9895964795 * self.t)
Y0 += 0.00000000031 * math.cos(1.78245808102 + 161079.61616398748 * self.t)
Y0 += 0.00000000031 * math.cos(0.13179619883 + 75930.75684933408 * self.t)
Y0 += 0.00000000024 * math.cos(1.67322759859 + 1045.3986536711 * self.t)
Y0 += 0.00000000029 * math.cos(4.01431074745 + 25600.5122078035 * self.t)
Y0 += 0.00000000029 * math.cos(4.79090693735 + 26575.7817103119 * self.t)
Y0 += 0.00000000026 * math.cos(4.40340514614 + 39450.5966658569 * self.t)
Y0 += 0.00000000027 * math.cos(5.45015263034 + 27171.2271913513 * self.t)
Y0 += 0.00000000027 * math.cos(3.35506505446 + 25005.0667267641 * self.t)
Y0 += 0.00000000032 * math.cos(2.36425003443 + 9103.6631766341 * self.t)
Y0 += 0.00000000024 * math.cos(0.92163258785 + 129373.26404816448 * self.t)
Y0 += 0.00000000023 * math.cos(1.97682509801 + 85502.14119883909 * self.t)
Y0 += 0.00000000024 * math.cos(1.70355204155 + 52609.7618385087 * self.t)
Y0 += 0.00000000025 * math.cos(3.42869920852 + 299.37021175271 * self.t)
Y0 += 0.00000000027 * math.cos(0.19474964949 + 143005.91122533729 * | |
def makeMappedReadsUniqueList(self, includeForward=True, includeReverse=True):
readNameSet = set()
srList = []
numReads = 0
for paftolTarget in self.paftolTargetDict.values():
for mappedRead in paftolTarget.mappedReadList:
readName = mappedRead.getReadName()
if readName not in readNameSet:
numReads = numReads + 1
readNameSet.add(readName)
if includeForward:
if mappedRead.forwardRead is None:
raise StandardError, 'mapped read %s: no forward read SeqRecord' % mappedRead.getReadName()
srList.append(mappedRead.forwardRead)
if includeReverse:
if mappedRead.reverseRead is None:
raise StandardError, 'mapped read %s: no reverse read SeqRecord' % mappedRead.getReadName()
srList.append(mappedRead.reverseRead)
return srList
def writeMappedReadsFasta(self, fastaHandle, writeForward=True, writeReverse=True, maxNumReads=None):
readsList = self.makeMappedReadsUniqueList(writeForward, writeReverse)
if maxNumReads is not None and len(readsList) > maxNumReads:
selectedReadsList = tools.selectLongestReads(readsList, maxNumReads)
else:
selectedReadsList = readsList
logger.debug('maxNumReads: %s, numReads: %d, selected: %d', str(maxNumReads), len(readsList), len(selectedReadsList))
Bio.SeqIO.write(selectedReadsList, fastaHandle, 'fasta')
def extractOrganismAndGeneNames(s):
# FIXME: should tighten this up to fail on dangling garbage (?)
paftolTargetRe = re.compile('([^-]+)-([^-]+)')
m = paftolTargetRe.match(s)
if m is not None:
organismName = m.group(1)
geneName = m.group(2)
else:
organismName = 'unknown'
geneName = s
return organismName, geneName
class PaftolTargetSet(object):
"""Represent a set of PAFTOL targets.
This class supports mapping using C{bwa} and C{tblastn} by
implementing the processSamAlignment and processBlastAlignment
methods, respectively.
"""
def __init__(self):
self.paftolGeneDict = {}
self.organismDict = {}
self.numOfftargetReads = None
self.fastaHandleStr = None
# FIXME: static?
def makeFastaId(self, organismName, geneName):
return '%s-%s' % (organismName, geneName)
# deprecated, use getSeqRecordSelection instead
def getGeneSeqRecordList(self, geneNameList):
srList = []
for geneName in geneNameList:
if geneName not in self.paftolGeneDict:
raise StandardError, 'gene %s not found in this target set' % geneName
for paftolTarget in self.paftolGeneDict[geneName].paftolTargetDict.values():
srList.append(paftolTarget.seqRecord)
return srList
def getSeqRecordSelection(self, organismNameList=None, geneNameList=None):
if organismNameList is None:
organismList = self.organismDict.values()
else:
organismList = []
for organismName in organismNameList:
if organismName not in self.organismDict:
raise StandardError, 'organism %s not found in this target set' % organismName
organismList.append(self.organismDict[organismName])
if geneNameList is not None:
for geneName in geneNameList:
if geneName not in self.paftolGeneDict:
raise StandardError, 'gene %s not found in this target set' % geneName
srList = []
for organism in organismList:
for geneName in organism.paftolTargetDict:
if geneNameList is None or geneName in geneNameList:
srList.append(organism.paftolTargetDict[geneName].seqRecord)
return srList
def readFasta(self, fastaHandle):
# FIXME: add provision to control tolerance for invalid bases -- checkTargets type functionality?
self.paftolGeneDict = {}
self.organismDict = {}
self.fastaHandleStr = str(fastaHandle)
for sr in Bio.SeqIO.parse(fastaHandle, 'fasta', alphabet=Bio.Alphabet.IUPAC.ambiguous_dna):
organismName, geneName = extractOrganismAndGeneNames(sr.id)
if not isSane(organismName):
raise StandardError('bad organism name: %s' % organismName)
if not isSane(geneName):
raise StandardError('bad gene name: %s' % geneName)
if organismName not in self.organismDict:
self.organismDict[organismName] = Organism(organismName)
if geneName not in self.paftolGeneDict:
self.paftolGeneDict[geneName] = PaftolGene(geneName)
paftolTarget = PaftolTarget(self.organismDict[organismName], self.paftolGeneDict[geneName], sr)
def meanTargetLength(self, geneName):
if geneName not in self.paftolGeneDict:
raise StandardError, 'gene %s not contained in this target set'
return self.paftolGeneDict[geneName].meanSequenceLength()
def getSeqRecordList(self):
srList = []
for organism in self.organismDict.values():
for paftolTarget in organism.paftolTargetDict.values():
srList.append(paftolTarget.seqRecord)
return srList
def writeFasta(self, fastaHandle):
srList = self.getSeqRecordList()
sys.stderr.write('writeFasta: writing %d sequences\n' % len(srList))
Bio.SeqIO.write(srList, fastaHandle, 'fasta')
def checkOrganismAndGene(self, organismName, geneName):
if organismName not in self.organismDict:
raise StandardError('unknown organism: %s' % organismName)
if geneName not in self.paftolGeneDict:
raise StandardError('unknown gene: %s' % geneName)
if geneName not in self.organismDict[organismName].paftolTargetDict:
raise StandardError('no entry for gene %s in organism %s' % (geneName, organismName))
def processSamAlignment(self, samAlignment):
if samAlignment.isMapped():
organismName, geneName = extractOrganismAndGeneNames(samAlignment.rname)
self.checkOrganismAndGene(organismName, geneName)
paftolTarget = self.organismDict[organismName].paftolTargetDict[geneName]
mappedRead = SamMappedRead(paftolTarget, samAlignment)
paftolTarget.addMappedRead(mappedRead)
else:
self.numOfftargetReads = self.numOfftargetReads + 1
def processBlastAlignment(self, query, blastAlignment):
organismName, geneName = extractOrganismAndGeneNames(query)
self.checkOrganismAndGene(organismName, geneName)
paftolTarget = self.organismDict[organismName].paftolTargetDict[geneName]
mappedRead = BlastMappedRead(paftolTarget, blastAlignment)
paftolTarget.addMappedRead(mappedRead)
def makeReadNameGeneDict(self):
readNameGeneDict = {}
for paftolGene in self.paftolGeneDict.values():
for readName in paftolGene.getReadNameSet():
if readName not in readNameGeneDict:
readNameGeneDict[readName] = []
readNameGeneDict[readName].append(paftolGene)
return readNameGeneDict
def makeReadNameMappedReadDict(self):
# FIXME: not exactly exemplary for following law of Demeter -- inner parts of loop probably want to be PaftolTarget or MappedRead methods
readNameMappedReadDict = {}
for paftolGene in self.paftolGeneDict.values():
for paftolTarget in paftolGene.paftolTargetDict.values():
for mappedRead in paftolTarget.mappedReadList:
readName = mappedRead.getReadName()
if readName not in readNameMappedReadDict:
readNameMappedReadDict[readName] = []
readNameMappedReadDict[readName].append(mappedRead)
return readNameMappedReadDict
def targetStats(self):
dataFrame = paftol.tools.DataFrame(PaftolTarget.csvFieldNames)
for organism in self.organismDict.values():
for paftolTarget in organism.paftolTargetDict.values():
dataFrame.addRow(paftolTarget.csvRowDict())
return dataFrame
def geneStats(self):
dataFrame = paftol.tools.DataFrame(PaftolGene.csvFieldNames)
for paftolGene in self.paftolGeneDict.values():
dataFrame.addRow(paftolGene.csvRowDict())
return dataFrame
def organismStats(self):
dataFrame = paftol.tools.DataFrame(Organism.csvFieldNames)
for organism in self.organismDict.values():
dataFrame.addRow(organism.csvRowDict())
return dataFrame
def getMappedReadNameSet(self):
mappedReadNameSet = set()
for paftolGene in self.paftolGeneDict.values():
mappedReadNameSet = mappedReadNameSet | paftolGene.getReadNameSet()
return mappedReadNameSet
def numMappedReads(self):
return len(self.getMappedReadNameSet())
# FIXME: no check for multiple counting -- should change to len(self.getMappedReadNameSet())
# n = 0
# for organism in self.organismDict.values():
# for paftolTarget in organism.paftolTargetDict.values():
# n = n + paftolTarget.numMappedReads()
# return n
# FIXME: apparently obsolete -- delete?
def writeMappedReadsFasta(self, fastaFname):
with open(fastaFname, 'w') as fastaFile:
for organism in self.organismDict.values():
for paftolTarget in organism.paftolTargetDict.values():
paftolTarget.writeMappedReadsFasta(fastaFile)
# FIXME: untested after transplant from HybpiperAnalyser
def sanityCheck(self, allowInvalidBases=False):
for organism in self.organismDict.values():
for paftolTarget in organism.paftolTargetDict.values():
if not allowInvalidBases:
setDiff = set(str(paftolTarget.seqRecord.seq).lower()) - set('acgt')
if len(setDiff) != 0:
raise StandardError('target %s: illegal base(s) %s' % (paftolTarget.seqRecord.id, ', '.join(setDiff)))
class ReferenceGene(object):
def __init__(self, geneId, referenceGenome, seqRecord, geneFeature, mrnaFeatureList=None, cdsFeatureList=None):
self.geneId = geneId
self.referenceGenome = referenceGenome
self.seqRecord = seqRecord
self.geneFeature = geneFeature
self.mrnaFeatureList = [] if mrnaFeatureList is None else mrnaFeatureList[:]
self.cdsFeatureList = [] if cdsFeatureList is None else cdsFeatureList[:]
def getSequenceId(self):
return self.seqRecord.id.split('.')[0]
def containsHsp(self, hspAccession, hsp):
if self.getSequenceId() != hspAccession:
return False
return self.geneFeature.location.start <= hsp.sbjct_start and self.geneFeature.location.end >= hsp.sbjct_end
def getLength(self):
return abs(self.geneFeature.location.end - self.geneFeature.location.start)
def containsSamAlignment(self, samAlignment):
if self.getSequenceId() != samAlignment.rname:
return False
return self.geneFeature.location.start <= samAlignment.pos and self.geneFeature.location.end >= samAlignment.getEndpos()
def getGeneName(self):
if 'name' in self.geneFeature.qualifiers:
return self.geneFeature.qualifiers['name'][0]
else:
return None
def getGeneNote(self):
if 'note' in self.geneFeature.qualifiers:
return self.geneFeature.qualifiers['note'][0]
else:
return None
def getMrnaProduct(self):
# CHECKME: returning 'product' qualifier value from feature with that qualifier -- may be more thorough to check that all are the same?
for mrnaFeature in self.mrnaFeatureList:
if 'product' in mrnaFeature.qualifiers:
return mrnaFeature.qualifiers['product'][0]
return None
def getCdsProduct(self):
# CHECKME: returning 'product' qualifier value from feature with that qualifier -- may be more thorough to check that all are the same?
for cdsFeature in self.cdsFeatureList:
if 'product' in cdsFeature.qualifiers:
return cdsFeature.qualifiers['product'][0]
return None
def makeGenomicSeqRecord(self, seqId=None):
s = self.geneFeature.extract(self.seqRecord.seq)
if seqId is None:
seqId = self.geneId
return Bio.SeqRecord.SeqRecord(s, id=seqId, description='sequence extracted from gene feature')
def makeMrnaSeqRecord(self, seqId=None):
if len(self.mrnaFeatureList) == 0:
return None
if len(self.mrnaFeatureList) > 1:
logger.warning('reference gene %s: %d mRNA features, using #0 to extract sequence', self.geneId, len(self.mrnaFeatureList))
mrnaFeature = self.mrnaFeatureList[0]
s = mrnaFeature.extract(self.seqRecord.seq)
if seqId is None:
seqId = self.geneId
return Bio.SeqRecord.SeqRecord(s, id=seqId, description='sequence extracted from mRNA feature')
def makeCdsSeqRecord(self, seqId=None):
if len(self.cdsFeatureList) == 0:
return None
if len(self.cdsFeatureList) > 1:
logger.warning('reference gene %s: %d CDS features, using #0 to extract sequence', self.geneId, len(self.cdsFeatureList))
cdsFeature = self.cdsFeatureList[0]
s = cdsFeature.extract(self.seqRecord.seq)
if seqId is None:
seqId = self.geneId
return Bio.SeqRecord.SeqRecord(s, id=seqId, description='sequence extracted from CDS feature')
class ReferenceGenomeMappingProcessor(object):
def __init__(self, referenceGenome):
if referenceGenome.genomeLength is None:
raise StandardError, 'reference genome length is None'
self.referenceGenome = referenceGenome
self.intergenicId = 'intergenic'
self.unmappedId = 'unmapped'
self.geneHitDict = {}
self.intergenicLength = referenceGenome.genomeLength
for gene in referenceGenome.geneList:
geneLength = gene.getLength()
self.geneHitDict[gene.geneId] = {'geneId': gene.geneId, 'geneLength': geneLength, 'numHits': 0}
self.intergenicLength = self.intergenicLength - geneLength
self.geneHitDict[self.intergenicId] = {'geneId': self.intergenicId, 'geneLength': self.intergenicLength, 'numHits': 0}
self.geneHitDict[self.unmappedId] = {'geneId': self.unmappedId, 'geneLength': None, 'numHits': 0}
self.rawmapTable = paftol.tools.DataFrame(['qname', 'rname', 'pos'])
def getStatsTable(self):
statsTable = paftol.tools.DataFrame(['geneId', 'geneLength', 'numHits'])
for gene in self.referenceGenome.geneList:
statsTable.addRow(self.geneHitDict[gene.geneId])
statsTable.addRow(self.geneHitDict[self.intergenicId])
statsTable.addRow(self.geneHitDict[self.unmappedId])
return statsTable
def processSamAlignment(self, samAlignment):
if samAlignment.isMapped():
self.rawmapTable.addRow({'qname': samAlignment.qname, 'rname': samAlignment.rname, 'pos': samAlignment.pos})
geneId = self.referenceGenome.findGeneIdForSamAlignment(samAlignment)
if geneId is None:
geneId = self.intergenicId
else:
geneId = self.unmappedId
self.geneHitDict[geneId]['numHits'] = self.geneHitDict[geneId]['numHits'] + 1
class ReferenceGenome(object):
"""Represent a reference genome, provided via FASTA and GenBank files (possibly both).
@ivar name: the name of this reference genome
@type name: C{str}
@ivar fastaFname: name of FASTA file containing the sequences of this genome
@type fastaFname: C{str}
@ivar genbankFname: name of GenBank file containing the sequences of this genome
@type genbankFname: C{str}
"""
def __init__(self, name, fastaFname, genbankFname):
self.name = name
self.fastaFname = fastaFname
self.genbankFname = genbankFname
self.geneList = None
self.genomeLength = None
def makeCdsListGeneric(self):
def extractCdsId(qualifiers, qualifierId):
qualifier = qualifiers[qualifierId]
if len(qualifier) == 0:
raise StandardError, 'qualifier %s has length 0' % qualifierId
elif len(qualifier) > 1:
logger.warning('qualifier %s has %d values, | |
thread = api.find_document_with_http_info(applicant_id, document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str applicant_id: (required)
:param str document_id: (required)
:return: Document
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['applicant_id', 'document_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'applicant_id' is set
if ('applicant_id' not in params) or (params['applicant_id'] is None):
raise ValueError("Missing the required parameter `applicant_id` when calling `find_document`")
# verify the required parameter 'document_id' is set
if ('document_id' not in params) or (params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `find_document`")
collection_formats = {}
resource_path = '/applicants/{applicant_id}/documents/{document_id}'.replace('{format}', 'json')
path_params = {}
if 'applicant_id' in params:
path_params['applicant_id'] = params['applicant_id']
if 'document_id' in params:
path_params['document_id'] = params['document_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client. \
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client. \
select_header_content_type([])
# Authentication setting
auth_settings = ['Token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Document',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
collection_formats=collection_formats)
def find_live_photo(self, live_photo_id, **kwargs):
"""
Retrieve live photo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_live_photo(live_photo_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str live_photo_id: The live photo’s unique identifier. (required)
:return: LivePhoto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_live_photo_with_http_info(live_photo_id, **kwargs)
else:
(data) = self.find_live_photo_with_http_info(live_photo_id, **kwargs)
return data
def find_live_photo_with_http_info(self, live_photo_id, **kwargs):
"""
Retrieve live photo
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_live_photo_with_http_info(live_photo_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str live_photo_id: The live photo’s unique identifier. (required)
:return: LivePhoto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['live_photo_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_live_photo" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'live_photo_id' is set
if ('live_photo_id' not in params) or (params['live_photo_id'] is None):
raise ValueError("Missing the required parameter `live_photo_id` when calling `find_live_photo`")
collection_formats = {}
resource_path = '/live_photos/{live_photo_id}'.replace('{format}', 'json')
path_params = {}
if 'live_photo_id' in params:
path_params['live_photo_id'] = params['live_photo_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client. \
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client. \
select_header_content_type([])
# Authentication setting
auth_settings = ['Token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LivePhoto',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
collection_formats=collection_formats)
def find_report(self, check_id, report_id, **kwargs):
"""
A single report can be retrieved using this endpoint with the corresponding unique identifier.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_report(check_id, report_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str check_id: (required)
:param str report_id: (required)
:return: Report
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_report_with_http_info(check_id, report_id, **kwargs)
else:
(data) = self.find_report_with_http_info(check_id, report_id, **kwargs)
return data
def find_report_with_http_info(self, check_id, report_id, **kwargs):
"""
A single report can be retrieved using this endpoint with the corresponding unique identifier.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_report_with_http_info(check_id, report_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str check_id: (required)
:param str report_id: (required)
:return: Report
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['check_id', 'report_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'check_id' is set
if ('check_id' not in params) or (params['check_id'] is None):
raise ValueError("Missing the required parameter `check_id` when calling `find_report`")
# verify the required parameter 'report_id' is set
if ('report_id' not in params) or (params['report_id'] is None):
raise ValueError("Missing the required parameter `report_id` when calling `find_report`")
collection_formats = {}
resource_path = '/checks/{check_id}/reports/{report_id}'.replace('{format}', 'json')
path_params = {}
if 'check_id' in params:
path_params['check_id'] = params['check_id']
if 'report_id' in params:
path_params['report_id'] = params['report_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client. \
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client. \
select_header_content_type([])
# Authentication setting
auth_settings = ['Token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Report',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
collection_formats=collection_formats)
def find_report_type_group(self, report_type_group_id, **kwargs):
"""
Retrieve single report type group object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_report_type_group(report_type_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str report_type_group_id: (required)
:return: ReportTypeGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_report_type_group_with_http_info(report_type_group_id, **kwargs)
else:
(data) = self.find_report_type_group_with_http_info(report_type_group_id, **kwargs)
return data
def find_report_type_group_with_http_info(self, report_type_group_id, **kwargs):
"""
Retrieve single report type group object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_report_type_group_with_http_info(report_type_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str report_type_group_id: (required)
:return: ReportTypeGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['report_type_group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_report_type_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'report_type_group_id' is set
if ('report_type_group_id' not in params) or (params['report_type_group_id'] is None):
raise ValueError(
"Missing the required parameter `report_type_group_id` when calling `find_report_type_group`")
collection_formats = {}
resource_path = '/report_type_groups/{report_type_group_id}'.replace('{format}', 'json')
path_params = {}
if 'report_type_group_id' in params:
path_params['report_type_group_id'] = params['report_type_group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client. \
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client. \
select_header_content_type([])
# Authentication setting
auth_settings = ['Token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportTypeGroup',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
collection_formats=collection_formats)
def find_webhook(self, webhook_id, **kwargs):
"""
Retrieve a Webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_webhook(webhook_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str webhook_id: (required)
:return: Webhook
If the method | |
import argparse
import asyncio
import grp
import logging
import os
import pwd
import random
import re
import string
import sys
import urllib
from fnmatch import fnmatch
from typing import Dict
from typing import List
from typing import Tuple
import aiohttp
import yaml
from aiohttp import ClientSession
from aiohttp import web
from heisenbridge import __version__
from heisenbridge.appservice import AppService
from heisenbridge.channel_room import ChannelRoom
from heisenbridge.control_room import ControlRoom
from heisenbridge.identd import Identd
from heisenbridge.matrix import Matrix
from heisenbridge.matrix import MatrixError
from heisenbridge.matrix import MatrixForbidden
from heisenbridge.matrix import MatrixUserInUse
from heisenbridge.network_room import NetworkRoom
from heisenbridge.plumbed_room import PlumbedRoom
from heisenbridge.private_room import PrivateRoom
from heisenbridge.room import Room
from heisenbridge.room import RoomInvalidError
class BridgeAppService(AppService):
_rooms: Dict[str, Room]
_users: Dict[str, str]
def register_room(self, room: Room):
self._rooms[room.id] = room
def unregister_room(self, room_id):
if room_id in self._rooms:
del self._rooms[room_id]
# this is mostly used by network rooms at init, it's a bit slow
def find_rooms(self, rtype=None, user_id=None) -> List[Room]:
ret = []
if rtype is not None and type(rtype) != str:
rtype = rtype.__name__
for room in self._rooms.values():
if (rtype is None or room.__class__.__name__ == rtype) and (user_id is None or room.user_id == user_id):
ret.append(room)
return ret
def is_admin(self, user_id: str):
if user_id == self.config["owner"]:
return True
for mask, value in self.config["allow"].items():
if fnmatch(user_id, mask) and value == "admin":
return True
return False
def is_user(self, user_id: str):
if self.is_admin(user_id):
return True
for mask in self.config["allow"].keys():
if fnmatch(user_id, mask):
return True
return False
def is_local(self, mxid: str):
return mxid.endswith(":" + self.server_name)
def strip_nick(self, nick: str) -> Tuple[str, str]:
m = re.match(r"^([~&@%\+]?)(.+)$", nick)
if m:
return (m.group(2), (m.group(1) if len(m.group(1)) > 0 else None))
else:
raise TypeError(f"Input nick is not valid: '{nick}'")
def irc_user_id(self, network, nick, at=True, server=True):
nick, mode = self.strip_nick(nick)
ret = re.sub(
r"[^0-9a-z\-\.=\_/]",
lambda m: "=" + m.group(0).encode("utf-8").hex(),
f"{self.puppet_prefix}{network}_{nick}".lower(),
)
if at:
ret = "@" + ret
if server:
ret += ":" + self.server_name
return ret
async def cache_user(self, user_id, displayname):
# start by caching that the user_id exists without a displayname
if user_id not in self._users:
self._users[user_id] = None
# if the cached displayname is incorrect
if displayname and self._users[user_id] != displayname:
try:
await self.api.put_user_displayname(user_id, displayname)
self._users[user_id] = displayname
except MatrixError as e:
logging.warning(f"Failed to set displayname '{displayname}' for user_id '{user_id}', got '{e}'")
def is_user_cached(self, user_id, displayname=None):
return user_id in self._users and (displayname is None or self._users[user_id] == displayname)
async def ensure_irc_user_id(self, network, nick, update_cache=True):
user_id = self.irc_user_id(network, nick)
# if we've seen this user before, we can skip registering
if not self.is_user_cached(user_id):
try:
await self.api.post_user_register(
{
"type": "m.login.application_service",
"username": self.irc_user_id(network, nick, False, False),
}
)
except MatrixUserInUse:
pass
# always ensure the displayname is up-to-date
if update_cache:
await self.cache_user(user_id, nick)
return user_id
async def _on_mx_event(self, event):
if "room_id" in event and event["room_id"] in self._rooms:
try:
room = self._rooms[event["room_id"]]
await room.on_mx_event(event)
except RoomInvalidError:
logging.info(f"Event handler for {event['type']} threw RoomInvalidError, leaving and cleaning up.")
self.unregister_room(room.id)
room.cleanup()
await self.leave_room(room.id, room.members)
except Exception:
logging.exception("Ignoring exception from room handler. This should be fixed.")
elif (
event["type"] == "m.room.member"
and event["sender"] != self.user_id
and event["content"]["membership"] == "invite"
):
if "is_direct" not in event["content"] or event["content"]["is_direct"] is not True:
logging.debug("Got an invite to non-direct room, ignoring")
return
logging.info(f"Got an invite from {event['sender']}")
# only respond to an invite
if event["room_id"] in self._rooms:
logging.debug("Control room already open, uhh")
return
# handle invites against puppets
if event["state_key"] != self.user_id:
logging.info(f"Whitelisted user {event['sender']} invited {event['state_key']}, going to reject.")
try:
await self.api.post_room_kick(
event["room_id"],
event["state_key"],
reason="Inviting puppets is not supported",
user_id=event["state_key"],
)
except Exception:
logging.exception("Failed to reject invitation.")
return
# set owner if we have none and the user is from the same HS
if self.config.get("owner", None) is None and event["sender"].endswith(":" + self.server_name):
logging.info(f"We have an owner now, let us rejoice, {event['sender']}!")
self.config["owner"] = event["sender"]
await self.save()
if not self.is_user(event["sender"]):
logging.info(f"Non-whitelisted user {event['sender']} tried to invite us, ignoring.")
return
logging.info(f"Whitelisted user {event['sender']} invited us, going to accept.")
# accept invite sequence
try:
room = ControlRoom(id=event["room_id"], user_id=event["sender"], serv=self, members=[event["sender"]])
await room.save()
self.register_room(room)
# sometimes federated rooms take a while to join
for i in range(6):
try:
await self.api.post_room_join(room.id)
break
except MatrixForbidden:
logging.debug("Responding to invite failed, retrying")
await asyncio.sleep((i + 1) * 5)
# show help on open
await room.show_help()
except Exception:
if event["room_id"] in self._rooms:
del self._rooms[event["room_id"]]
logging.exception("Failed to create control room.")
else:
pass
# print(json.dumps(event, indent=4, sort_keys=True))
async def _transaction(self, req):
body = await req.json()
for event in body["events"]:
asyncio.ensure_future(self._on_mx_event(event))
return web.json_response({})
async def detect_public_endpoint(self):
async with ClientSession() as session:
# first try https well-known
try:
resp = await session.request(
"GET",
"https://{}/.well-known/matrix/client".format(self.server_name),
)
data = await resp.json(content_type=None)
return data["m.homeserver"]["base_url"]
except Exception:
logging.debug("Did not find .well-known for HS")
# try https directly
try:
resp = await session.request("GET", "https://{}/_matrix/client/versions".format(self.server_name))
await resp.json(content_type=None)
return "https://{}".format(self.server_name)
except Exception:
logging.debug("Could not use direct connection to HS")
# give up
logging.warning("Using internal URL for homeserver, media links are likely broken!")
return self.api.url
def mxc_to_url(self, mxc, filename=None):
mxc = urllib.parse.urlparse(mxc)
if filename is None:
filename = ""
else:
filename = "/" + urllib.parse.quote(filename)
return "{}/_matrix/media/r0/download/{}{}{}".format(self.endpoint, mxc.netloc, mxc.path, filename)
async def reset(self, config_file, homeserver_url):
with open(config_file) as f:
registration = yaml.safe_load(f)
self.api = Matrix(homeserver_url, registration["as_token"])
whoami = await self.api.get_user_whoami()
self.user_id = whoami["user_id"]
print("We are " + whoami["user_id"])
resp = await self.api.get_user_joined_rooms()
print(f"Leaving from {len(resp['joined_rooms'])} rooms...")
for room_id in resp["joined_rooms"]:
print(f"Leaving from {room_id}...")
await self.leave_room(room_id, None)
print("Resetting configuration...")
self.config = {}
await self.save()
print("All done!")
def load_reg(self, config_file):
with open(config_file) as f:
self.registration = yaml.safe_load(f)
async def leave_room(self, room_id, members):
members = members if members else []
for member in members:
(name, server) = member.split(":")
if name.startswith("@" + self.puppet_prefix) and server == self.server_name:
try:
await self.api.post_room_leave(room_id, member)
except Exception:
logging.exception("Removing puppet on leave failed")
try:
await self.api.post_room_leave(room_id)
except MatrixError:
pass
try:
await self.api.post_room_forget(room_id)
except MatrixError:
pass
def _keepalive(self):
async def put_presence():
try:
await self.api.put_user_presence(self.user_id)
except Exception:
pass
asyncio.ensure_future(put_presence())
asyncio.get_event_loop().call_later(60, self._keepalive)
async def run(self, listen_address, listen_port, homeserver_url, owner):
app = aiohttp.web.Application()
app.router.add_put("/transactions/{id}", self._transaction)
app.router.add_put("/_matrix/app/v1/transactions/{id}", self._transaction)
if "sender_localpart" not in self.registration:
print("Missing sender_localpart from registration file.")
sys.exit(1)
if "namespaces" not in self.registration or "users" not in self.registration["namespaces"]:
print("User namespaces missing from registration file.")
sys.exit(1)
# remove self namespace if exists
self_ns = f"@{self.registration['sender_localpart']}:.*"
ns_users = [x for x in self.registration["namespaces"]["users"] if x["regex"] != self_ns]
if len(ns_users) != 1:
print("A single user namespace is required for puppets in the registration file.")
sys.exit(1)
if "exclusive" not in ns_users[0] or not ns_users[0]["exclusive"]:
print("User namespace must be exclusive.")
sys.exit(1)
m = re.match(r"^@([^.]+)\.\*$", ns_users[0]["regex"])
if not m:
print("User namespace regex must be a prefix like '@irc_.*' and not contain anything else.")
sys.exit(1)
self.puppet_prefix = m.group(1)
print(f"Heisenbridge v{__version__}", flush=True)
self.api = Matrix(homeserver_url, self.registration["as_token"])
try:
await self.api.post_user_register(
{
"type": "m.login.application_service",
"username": self.registration["sender_localpart"],
}
)
logging.debug("Appservice user registration succeeded.")
except MatrixUserInUse:
logging.debug("Appservice user is already registered.")
except Exception:
logging.exception("Unexpected failure when registering appservice user.")
whoami = await self.api.get_user_whoami()
logging.info("We are " + whoami["user_id"])
self._rooms = {}
self._users = {}
self.user_id = whoami["user_id"]
self.server_name = self.user_id.split(":")[1]
self.config = {
"networks": {},
"owner": None,
"allow": {},
"idents": {},
"member_sync": "half",
"media_url": None,
}
logging.debug(f"Default config: {self.config}")
self.synapse_admin = False
try:
is_admin = await self.api.get_synapse_admin_users_admin(self.user_id)
self.synapse_admin = is_admin["admin"]
except MatrixForbidden:
logging.info(f"We ({self.user_id}) are not a server admin, inviting puppets is required.")
except Exception:
logging.info("Seems we are not connected to Synapse, inviting puppets is required.")
# load config from HS
await self.load()
# use configured media_url for endpoint if we have it
if self.config["media_url"]:
self.endpoint = self.config["media_url"]
else:
self.endpoint = await self.detect_public_endpoint()
print("Homeserver is publicly available at " + self.endpoint, flush=True)
logging.info("Starting presence loop")
self._keepalive()
# do a little migration for servers, remove this later
for network in self.config["networks"].values():
new_servers = []
for server in network["servers"]:
if isinstance(server, str):
new_servers.append({"address": server, "port": 6667, "tls": False})
if len(new_servers) > 0:
logging.debug("Migrating servers from old to new config format")
network["servers"] = new_servers
logging.debug(f"Merged configuration from HS: {self.config}")
# honor command line owner
if owner is not None and self.config["owner"] != owner:
logging.info(f"Overriding loaded owner with '{owner}'")
self.config["owner"] = owner
await self.save()
resp = await self.api.get_user_joined_rooms()
logging.debug(f"Appservice rooms: {resp['joined_rooms']}")
# room types and their init order, network must be before chat and group
room_types = [ControlRoom, NetworkRoom, PrivateRoom, | |
<gh_stars>1-10
'''This module extends PTPDevice for Canon devices.
Use it in a master module that determines the vendor and automatically uses its
extension. This is why inheritance is not explicit.
'''
from ...util import _main_thread_alive
from .properties import EOSPropertiesMixin
from contextlib import contextmanager
from construct import (
Array, Byte, Container, Embedded, Enum, Pass, PrefixedArray, Range, Struct,
Switch, Computed
)
from six.moves.queue import Queue
from threading import Thread, Event
from time import sleep
import atexit
import logging
logger = logging.getLogger(__name__)
__all__ = ('Canon',)
class Canon(EOSPropertiesMixin, object):
'''This class implements Canon's PTP operations.'''
def __init__(self, *args, **kwargs):
logger.debug('Init Canon')
super(Canon, self).__init__(*args, **kwargs)
# TODO: expose the choice to poll or not Canon events
self.__no_polling = False
self.__eos_event_shutdown = Event()
self.__eos_event_proc = None
@contextmanager
def session(self):
'''
Manage Canon session with context manager.
'''
# When raw device, do not perform
if self.__no_polling:
with super(Canon, self).session():
yield
return
# Within a normal PTP session
with super(Canon, self).session():
# Set up remote mode and extended event info
self.eos_set_remote_mode(1)
self.eos_event_mode(1)
# And launch a polling thread
self.__event_queue = Queue()
self.__eos_event_proc = Thread(
name='EOSEvtPolling',
target=self.__eos_poll_events
)
self.__eos_event_proc.daemon = False
atexit.register(self._eos_shutdown)
self.__eos_event_proc.start()
try:
yield
finally:
self._eos_shutdown()
def _shutdown(self):
self._eos_shutdown()
super(Canon, self)._shutdown()
def _eos_shutdown(self):
logger.debug('Shutdown EOS events request')
self.__eos_event_shutdown.set()
# Only join a running thread.
if self.__eos_event_proc and self.__eos_event_proc.is_alive():
self.__eos_event_proc.join(2)
def _PropertyCode(self, **product_properties):
return super(Canon, self)._PropertyCode(
BeepMode=0xD001,
ViewfinderMode=0xD003,
ImageQuality=0xD006,
CanonImageSize=0xD008,
CanonFlashMode=0xD00A,
TvAvSetting=0xD00C,
MeteringMode=0xD010,
MacroMode=0xD011,
FocusingPoint=0xD012,
CanonWhiteBalance=0xD013,
ISOSpeed=0xD01C,
Aperture=0xD01D,
ShutterSpeed=0xD01E,
ExpCompensation=0xD01F,
Zoom=0xD02A,
SizeQualityMode=0xD02C,
FlashMemory=0xD031,
CameraModel=0xD032,
CameraOwner=0xD033,
UnixTime=0xD034,
ViewfinderOutput=0xD036,
RealImageWidth=0xD039,
PhotoEffect=0xD040,
AssistLight=0xD041,
**product_properties
)
def _OperationCode(self, **product_operations):
return super(Canon, self)._OperationCode(
GetObjectSize=0x9001,
SetObjectArchive=0x9002,
KeepDeviceOn=0x9003,
LockDeviceUI=0x9004,
UnlockDeviceUI=0x9005,
GetObjectHandleByName=0x9006,
InitiateReleaseControl=0x9008,
TerminateReleaseControl=0x9009,
TerminatePlaybackMode=0x900A,
ViewfinderOn=0x900B,
ViewfinderOff=0x900C,
DoAeAfAwb=0x900D,
GetCustomizeSpec=0x900E,
GetCustomizeItemInfo=0x900F,
GetCustomizeData=0x9010,
SetCustomizeData=0x9011,
GetCaptureStatus=0x9012,
CheckEvent=0x9013,
FocusLock=0x9014,
FocusUnlock=0x9015,
GetLocalReleaseParam=0x9016,
SetLocalReleaseParam=0x9017,
AskAboutPcEvf=0x9018,
SendPartialObject=0x9019,
InitiateCaptureInMemory=0x901A,
GetPartialObjectEx=0x901B,
SetObjectTime=0x901C,
GetViewfinderImage=0x901D,
GetObjectAttributes=0x901E,
ChangeUSBProtocol=0x901F,
GetChanges=0x9020,
GetObjectInfoEx=0x9021,
InitiateDirectTransfer=0x9022,
TerminateDirectTransfer=0x9023,
SendObjectInfoByPath=0x9024,
SendObjectByPath=0x9025,
InitiateDirectTansferEx=0x9026,
GetAncillaryObjectHandles=0x9027,
GetTreeInfo=0x9028,
GetTreeSize=0x9029,
NotifyProgress=0x902A,
NotifyCancelAccepted=0x902B,
GetDirectory=0x902D,
SetPairingInfo=0x9030,
GetPairingInfo=0x9031,
DeletePairingInfo=0x9032,
GetMACAddress=0x9033,
SetDisplayMonitor=0x9034,
PairingComplete=0x9035,
GetWirelessMAXChannel=0x9036,
EOSGetStorageIDs=0x9101,
EOSGetStorageInfo=0x9102,
EOSGetObjectInfo=0x9103,
EOSGetObject=0x9104,
EOSDeleteObject=0x9105,
EOSFormatStore=0x9106,
EOSGetPartialObject=0x9107,
EOSGetDeviceInfoEx=0x9108,
EOSGetObjectInfoEx=0x9109,
EOSGetThumbEx=0x910A,
EOSSendPartialObject=0x910B,
EOSSetObjectAttributes=0x910C,
EOSGetObjectTime=0x910D,
EOSSetObjectTime=0x910E,
EOSRemoteRelease=0x910F,
EOSSetDevicePropValueEx=0x9110,
EOSGetRemoteMode=0x9113,
EOSSetRemoteMode=0x9114,
EOSSetEventMode=0x9115,
EOSGetEvent=0x9116,
EOSTransferComplete=0x9117,
EOSCancelTransfer=0x9118,
EOSResetTransfer=0x9119,
EOSPCHDDCapacity=0x911A,
EOSSetUILock=0x911B,
EOSResetUILock=0x911C,
EOSKeepDeviceOn=0x911D,
EOSSetNullPacketMode=0x911E,
EOSUpdateFirmware=0x911F,
EOSTransferCompleteDT=0x9120,
EOSCancelTransferDT=0x9121,
EOSSetWftProfile=0x9122,
EOSGetWftProfile=0x9122,
EOSSetProfileToWft=0x9124,
EOSBulbStart=0x9125,
EOSBulbEnd=0x9126,
EOSRequestDevicePropValue=0x9127,
EOSRemoteReleaseOn=0x9128,
EOSRemoteReleaseOff=0x9129,
EOSInitiateViewfinder=0x9151,
EOSTerminateViewfinder=0x9152,
EOSGetViewFinderImage=0x9153,
EOSDoAf=0x9154,
EOSDriveLens=0x9155,
EOSDepthOfFieldPreview=0x9156,
EOSClickWB=0x9157,
EOSZoom=0x9158,
EOSZoomPosition=0x9159,
EOSSetLiveAfFrame=0x915a,
EOSAfCancel=0x9160,
EOSFAPIMessageTX=0x91FE,
EOSFAPIMessageRX=0x91FF,
EOSSendCanonMessage=0x9052,
EOSProcReturnData=0x9053,
**product_operations
)
def _ObjectFormatCode(self, **product_object_formats):
return super(Canon, self)._ObjectFormatCode(
CRW=0xB101,
CRW3=0xB103,
MOV=0xB104,
**product_object_formats
)
def _ResponseCode(self, **product_responses):
return super(Canon, self)._ResponseCode(
**product_responses
)
def _EventCode(self, **product_events):
return super(Canon, self)._EventCode(
CanonDeviceInfoChanged=0xC008,
CanonRequestObjectTransfer=0xC009,
CameraModeChanged=0xC00C,
**product_events
)
def _FilesystemType(self, **product_filesystem_types):
return super(Canon, self)._FilesystemType(
**product_filesystem_types
)
def _EOSEventCode(self):
'''Return desired endianness for Canon EOS event codes'''
return Enum(
self._UInt32,
default=Pass,
EmptyEvent=0x0000,
RequestGetEvent=0xC101,
ObjectAdded=0xC181,
ObjectRemoved=0xC182,
RequestGetObjectInfoEx=0xC183,
StorageStatusChanged=0xC184,
StorageInfoChanged=0xC185,
RequestObjectTransfer=0xC186,
ObjectInfoChangedEx=0xC187,
ObjectContentChanged=0xC188,
DevicePropChanged=0xC189,
AvailListChanged=0xC18A,
CameraStatusChanged=0xC18B,
WillSoonShutdown=0xC18D,
ShutdownTimerUpdated=0xC18E,
RequestCancelTransfer=0xC18F,
RequestObjectTransferDT=0xC190,
RequestCancelTransferDT=0xC191,
StoreAdded=0xC192,
StoreRemoved=0xC193,
BulbExposureTime=0xC194,
RecordingTime=0xC195,
InnerDevelopParam=0xC196,
RequestObjectTransferDevelop=0xC197,
GPSLogOutputProgress=0xC198,
GPSLogOutputComplete=0xC199,
TouchTrans=0xC19A,
RequestObjectTransferExInfo=0xC19B,
PowerZoomInfoChanged=0xC19D,
RequestPushMode=0xC19F,
RequestObjectTransferTS=0xC1A2,
AfResult=0xC1A3,
CTGInfoCheckComplete=0xC1A4,
OLCInfoChanged=0xC1A5,
ObjectAddedEx64=0xC1A7,
ObjectInfoChangedEx64=0xC1A8,
RequestObjectTransfer64=0xC1A9,
RequestObjectTransferFTP64=0xC1AB,
ImportFailed=0xC1AF,
BlePairing=0xC1B0,
RequestObjectTransferFTP=0xC1F1,
UnknownError=0xFFFFFFFF,
)
def _EOSPropertyCode(self):
'''Return desired endianness for Canon EOS property codes'''
return Enum(
self._UInt32,
default=Pass,
Aperture=0xD101,
ShutterSpeed=0xD102,
ISO=0xD103,
ExposureCompensation=0xD104,
ShootingMode=0xD105,
DriveMode=0xD106,
ExposureMeteringMode=0xD107,
AutoFocusMode=0xD108,
WhiteBalance=0xD109,
ColorTemperature=0xD10A,
WhiteBalanceAdjustBA=0xD10B,
WhiteBalanceAdjustMG=0xD10C,
WhiteBalanceBracketBA=0xD10D,
WhiteBalanceBracketMG=0xD10E,
ColorSpace=0xD10F,
PictureStyle=0xD110,
BatteryPower=0xD111,
BatterySelect=0xD112,
CameraTime=0xD113,
AutoPowerOff=0xD114,
Owner=0xD115,
ModelID=0xD116,
PTPExtensionVersion=0xD119,
DPOFVersion=0xD11A,
AvailableShots=0xD11B,
CaptureDestination=0xD11C,
BracketMode=0xD11D,
CurrentStorage=0xD11E,
CurrentFolder=0xD11F,
ImageFormat=0xD120,
ImageFormatCF=0xD121,
ImageFormatSD=0xD122,
ImageFormatHDD=0xD123,
CompressionS=0xD130,
CompressionM1=0xD131,
CompressionM2=0xD132,
CompressionL=0xD133,
AEModeDial=0xD138,
AEModeCustom=0xD139,
MirrorUpSetting=0xD13A,
HighlightTonePriority=0xD13B,
AFSelectFocusArea=0xD13C,
HDRSetting=0xD13D,
PCWhiteBalance1=0xD140,
PCWhiteBalance2=0xD141,
PCWhiteBalance3=0xD142,
PCWhiteBalance4=0xD143,
PCWhiteBalance5=0xD144,
MWhiteBalance=0xD145,
MWhiteBalanceEx=0xD146,
PictureStyleStandard=0xD150,
PictureStylePortrait=0xD151,
PictureStyleLandscape=0xD152,
PictureStyleNeutral=0xD153,
PictureStyleFaithful=0xD154,
PictureStyleBlackWhite=0xD155,
PictureStyleAuto=0xD156,
PictureStyleUserSet1=0xD160,
PictureStyleUserSet2=0xD161,
PictureStyleUserSet3=0xD162,
PictureStyleParam1=0xD170,
PictureStyleParam2=0xD171,
PictureStyleParam3=0xD172,
HighISOSettingNoiseReduction=0xD178,
MovieServoAF=0xD179,
ContinuousAFValid=0xD17A,
Attenuator=0xD17B,
UTCTime=0xD17C,
Timezone=0xD17D,
Summertime=0xD17E,
FlavorLUTParams=0xD17F,
CustomFunc1=0xD180,
CustomFunc2=0xD181,
CustomFunc3=0xD182,
CustomFunc4=0xD183,
CustomFunc5=0xD184,
CustomFunc6=0xD185,
CustomFunc7=0xD186,
CustomFunc8=0xD187,
CustomFunc9=0xD188,
CustomFunc10=0xD189,
CustomFunc11=0xD18A,
CustomFunc12=0xD18B,
CustomFunc13=0xD18C,
CustomFunc14=0xD18D,
CustomFunc15=0xD18E,
CustomFunc16=0xD18F,
CustomFunc17=0xD190,
CustomFunc18=0xD191,
CustomFunc19=0xD192,
InnerDevelop=0xD193,
MultiAspect=0xD194,
MovieSoundRecord=0xD195,
MovieRecordVolume=0xD196,
WindCut=0xD197,
ExtenderType=0xD198,
OLCInfoVersion=0xD199,
CustomFuncEx=0xD1A0,
MyMenu=0xD1A1,
MyMenuList=0xD1A2,
WftStatus=0xD1A3,
WftInputTransmission=0xD1A4,
HDDDirectoryStructure=0xD1A5,
BatteryInfo=0xD1A6,
AdapterInfo=0xD1A7,
LensStatus=0xD1A8,
QuickReviewTime=0xD1A9,
CardExtension=0xD1AA,
TempStatus=0xD1AB,
ShutterCounter=0xD1AC,
SpecialOption=0xD1AD,
PhotoStudioMode=0xD1AE,
SerialNumber=0xD1AF,
EVFOutputDevice=0xD1B0,
EVFMode=0xD1B1,
DepthOfFieldPreview=0xD1B2,
EVFSharpness=0xD1B3,
EVFWBMode=0xD1B4,
EVFClickWBCoeffs=0xD1B5,
EVFColorTemp=0xD1B6,
ExposureSimMode=0xD1B7,
EVFRecordStatus=0xD1B8,
LvAfSystem=0xD1BA,
MovSize=0xD1BB,
LvViewTypeSelect=0xD1BC,
MirrorDownStatus=0xD1BD,
MovieParam=0xD1BE,
MirrorLockupState=0xD1BF,
FlashChargingState=0xD1C0,
AloMode=0xD1C1,
FixedMovie=0xD1C2,
OneShotRawOn=0xD1C3,
ErrorForDisplay=0xD1C4,
AEModeMovie=0xD1C5,
BuiltinStroboMode=0xD1C6,
StroboDispState=0xD1C7,
StroboETTL2Metering=0xD1C8,
ContinousAFMode=0xD1C9,
MovieParam2=0xD1CA,
StroboSettingExpComposition=0xD1CB,
MovieParam3=0xD1CC,
LVMedicalRotate=0xD1CF,
Artist=0xD1D0,
Copyright=0xD1D1,
BracketValue=0xD1D2,
FocusInfoEx=0xD1D3,
DepthOfField=0xD1D4,
Brightness=0xD1D5,
LensAdjustParams=0xD1D6,
EFComp=0xD1D7,
LensName=0xD1D8,
AEB=0xD1D9,
StroboSetting=0xD1DA,
StroboWirelessSetting=0xD1DB,
StroboFiring=0xD1DC,
LensID=0xD1DD,
LCDBrightness=0xD1DE,
CADarkBright=0xD1DF,
)
def _EOSEventRecords(self):
'''Return desired endianness for EOS Event Records constructor'''
return Range(
# The dataphase can be about as long as a 32 bit unsigned int.
0, 0xFFFFFFFF,
self._EOSEventRecord
)
def _EOSEventRecord(self):
'''Return desired endianness for a single EOS Event Record'''
return Struct(
'Bytes' / self._UInt32,
Embedded(Struct(
'EventCode' / self._EOSEventCode,
'Record' / Switch(
lambda ctx: ctx.EventCode,
{
'AvailListChanged':
Embedded(Struct(
'PropertyCode' / self._EOSPropertyCode,
'Enumeration' / Array(
# TODO: Verify if this is actually an
# enumeration.
lambda ctx: ctx._._.Bytes - 12,
self._UInt8
)
)),
'DevicePropChanged':
Embedded(Struct(
'PropertyCode' / self._EOSPropertyCode,
'DataTypeCode' / Computed(
lambda ctx: self._EOSDataTypeCode[ctx.PropertyCode]
),
'Value' / Switch(
lambda ctx: ctx.DataTypeCode,
{
None: Array(
lambda ctx: ctx._._.Bytes - 12,
self._UInt8
)
},
default=self._DataType
),
)),
# TODO: 'EmptyEvent',
# TODO: 'RequestGetEvent',
# TODO: 'ObjectAdded',
# TODO: 'ObjectRemoved',
# TODO: 'RequestGetObjectInfoEx',
# TODO: 'StorageStatusChanged',
# TODO: 'StorageInfoChanged',
# TODO: 'RequestObjectTransfer',
# TODO: 'ObjectInfoChangedEx',
# TODO: 'ObjectContentChanged',
# TODO: 'DevicePropChanged',
# TODO: 'AvailListChanged',
# TODO: 'CameraStatusChanged',
# TODO: 'WillSoonShutdown',
# TODO: 'ShutdownTimerUpdated',
# TODO: 'RequestCancelTransfer',
# TODO: 'RequestObjectTransferDT',
# TODO: 'RequestCancelTransferDT',
# TODO: 'StoreAdded',
# TODO: 'StoreRemoved',
# TODO: 'BulbExposureTime',
# TODO: 'RecordingTime',
# TODO: 'InnerDevelopParam',
# TODO: 'RequestObjectTransferDevelop',
# TODO: 'GPSLogOutputProgress',
# TODO: 'GPSLogOutputComplete',
# TODO: 'TouchTrans',
# TODO: 'RequestObjectTransferExInfo',
# TODO: 'PowerZoomInfoChanged',
# TODO: 'RequestPushMode',
# TODO: 'RequestObjectTransferTS',
# TODO: 'AfResult',
# TODO: 'CTGInfoCheckComplete',
# TODO: 'OLCInfoChanged',
# TODO: 'ObjectAddedEx64',
# TODO: 'ObjectInfoChangedEx64',
# TODO: 'RequestObjectTransfer64',
# TODO: 'RequestObjectTransferFTP64',
# TODO: 'ImportFailed',
# TODO: 'BlePairing',
# TODO: 'RequestObjectTransferFTP',
# TODO: 'Unknown',
},
default=Array(
lambda ctx: ctx._.Bytes - 8,
self._UInt8
)
)
))
)
def _EOSDeviceInfo(self):
return Struct(
'EventsSupported' / PrefixedArray(
self._UInt32,
self._EOSEventCode
),
'DevicePropertiesSupported' / PrefixedArray(
self._UInt32,
self._EOSPropertyCode
),
'TODO' / PrefixedArray(
self._UInt32,
self._UInt32
),
)
# TODO: Decode Canon specific events and properties.
def _set_endian(self, endian):
logger.debug('Set Canon endianness')
# HACK: The DataType mechanism used for automatic parsing introduces
# HACK: some nasty dependencies, so the PTP types need to be declared
# HACK: before the extension types, and then finally the DataType at
# HACK: the end...
# TODO: This could probably use a decorator to automatically work out the
# TODO: right order...
super(Canon, self)._set_endian(endian, explicit=True)
# Prepare these for DataType
self._EOSPropertyCode = self._EOSPropertyCode()
self._EOSEventCode = self._EOSEventCode()
self._EOSImageSize = self._EOSImageSize()
self._EOSImageType = self._EOSImageType()
self._EOSImageCompression = self._EOSImageCompression()
self._EOSImageFormat = self._EOSImageFormat()
self._EOSWhiteBalance = self._EOSWhiteBalance()
self._EOSFocusMode = self._EOSFocusMode()
# Make sure DataType is available
super(Canon, self)._set_endian(endian, explicit=False)
# Use DataType
self._EOSEventRecord = self._EOSEventRecord()
self._EOSEventRecords = self._EOSEventRecords()
# TODO: implement GetObjectSize
# TODO: implement SetObjectArchive
def keep_device_on(self):
'''Ping non EOS camera so it stays ON'''
ptp = Container(
OperationCode='KeepDeviceOn',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[]
)
response = self.mesg(ptp)
return response
# TODO: implement LockDeviceUI
# TODO: implement UnlockDeviceUI
# TODO: implement GetObjectHandleByName
# TODO: implement InitiateReleaseControl
# TODO: implement TerminateReleaseControl
# TODO: implement TerminatePlaybackMode
# TODO: implement ViewfinderOn
# TODO: implement ViewfinderOff
# TODO: implement DoAeAfAwb
# TODO: implement GetCustomizeSpec
# TODO: implement GetCustomizeItemInfo
# TODO: implement GetCustomizeData
# TODO: implement SetCustomizeData
# TODO: implement GetCaptureStatus
# TODO: implement CheckEvent
# TODO: implement FocusLock
# TODO: implement FocusUnlock
# TODO: implement GetLocalReleaseParam
# TODO: implement SetLocalReleaseParam
# TODO: implement AskAboutPcEvf
# TODO: implement SendPartialObject
# TODO: implement InitiateCaptureInMemory
# TODO: implement GetPartialObjectEx
# TODO: implement SetObjectTime
# TODO: implement GetViewfinderImage
# TODO: implement GetObjectAttributes
# TODO: implement ChangeUSBProtocol
# TODO: implement GetChanges
# TODO: implement GetObjectInfoEx
# TODO: implement InitiateDirectTransfer
# TODO: implement TerminateDirectTransfer
# TODO: implement SendObjectInfoByPath
# TODO: implement SendObjectByPath
# TODO: implement InitiateDirectTansferEx
# TODO: implement GetAncillaryObjectHandles
# TODO: implement GetTreeInfo
# TODO: implement GetTreeSize
# TODO: implement NotifyProgress
# TODO: implement NotifyCancelAccepted
# TODO: implement GetDirectory
# TODO: implement SetPairingInfo
# TODO: implement GetPairingInfo
# TODO: implement DeletePairingInfo
# TODO: implement GetMACAddress
# TODO: implement SetDisplayMonitor
# TODO: implement PairingComplete
# TODO: implement GetWirelessMAXChannel
# TODO: implement EOSGetStorageIDs
# TODO: implement EOSGetStorageInfo
# TODO: implement EOSGetObjectInfo
# TODO: implement EOSGetObject
# TODO: implement EOSDeleteObject
# TODO: implement EOSFormatStore
# TODO: implement EOSGetPartialObject
# TODO: implement EOSGetDeviceInfoEx
def eos_get_device_info(self):
'''Get EOS camera device information'''
ptp = Container(
OperationCode='EOSGetDeviceInfoEx',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[0x00100000]
)
response = self.recv(ptp)
return self._parse_if_data(response, self._EOSDeviceInfo)
# TODO: implement EOSGetObjectInfoEx
# | |
<reponame>gialmisi/desdeo-emo<filename>desdeo_emo/EAs/IOPIS.py
from typing import Dict, List, Union
import numpy as np
import pandas as pd
from desdeo_emo.EAs.BaseEA import BaseDecompositionEA, BaseEA, eaError
from desdeo_emo.EAs.RVEA import RVEA
from desdeo_emo.population.Population import Population
from desdeo_emo.selection.IOPIS_APD import IOPIS_APD_Select
from desdeo_emo.selection.IOPIS_NSGAIII import IOPIS_NSGAIII_select
from desdeo_emo.utilities.ReferenceVectors import ReferenceVectors
from desdeo_problem import MOProblem
from desdeo_tools.interaction import (
ReferencePointPreference,
validate_ref_point_with_ideal_and_nadir,
)
from desdeo_tools.scalarization import AugmentedGuessASF, StomASF
class BaseIOPISDecompositionEA(BaseDecompositionEA, BaseEA):
def __init__(
self,
problem: MOProblem,
population_size: int = None,
population_params: Dict = None,
initial_population: Population = None,
lattice_resolution: int = None,
n_iterations: int = 10,
n_gen_per_iter: int = 100,
total_function_evaluations: int = 0,
use_surrogates: bool = False,
):
a_priori: bool = True
interact: bool = True
if problem.ideal is None or problem.nadir is None:
msg = (
"The problem instance should contain the information about ideal and "
"nadir point."
)
raise eaError(msg)
BaseEA.__init__(
self=self,
a_priori=a_priori,
interact=interact,
n_iterations=n_iterations,
n_gen_per_iter=n_gen_per_iter,
total_function_evaluations=total_function_evaluations,
use_surrogates=use_surrogates,
)
scalarization_methods = [
StomASF(ideal=problem.ideal * problem._max_multiplier),
# PointMethodASF(
# nadir=problem.nadir * problem._max_multiplier,
# ideal=problem.ideal * problem._max_multiplier,
# ),
AugmentedGuessASF(
nadir=problem.nadir * problem._max_multiplier,
ideal=problem.ideal * problem._max_multiplier,
index_to_exclude=[],
),
]
if lattice_resolution is None:
lattice_res_options = [49, 13, 7, 5, 4, 3, 3, 3, 3]
if len(scalarization_methods) < 11:
lattice_resolution = lattice_res_options[len(scalarization_methods) - 2]
else:
lattice_resolution = 3
reference_vectors = ReferenceVectors(
lattice_resolution=lattice_resolution,
number_of_objectives=len(scalarization_methods),
)
# population_size = reference_vectors.number_of_vectors
# population = Population(problem, population_size, population_params)
self.reference_vectors = reference_vectors
self.scalarization_methods = scalarization_methods
if initial_population is not None:
# Population should be compatible.
self.population = initial_population # TODO put checks here.
elif initial_population is None:
if population_size is None:
population_size = self.reference_vectors.number_of_vectors
self.population = Population(
problem, population_size, population_params, use_surrogates
)
self._function_evaluation_count += population_size
self._ref_vectors_are_focused: bool = False
def manage_preferences(self, preference=None):
"""Run the interruption phase of EA.
Use this phase to make changes to RVEA.params or other objects.
Updates Reference Vectors (adaptation), conducts interaction with the user.
"""
if preference is None:
msg = "Giving preferences is mandatory"
raise eaError(msg)
if not isinstance(preference, ReferencePointPreference):
msg = (
f"Wrong object sent as preference. Expected type = "
f"{type(ReferencePointPreference)} or None\n"
f"Recieved type = {type(preference)}"
)
raise eaError(msg)
if preference.request_id != self._interaction_request_id:
msg = (
f"Wrong preference object sent. Expected id = "
f"{self._interaction_request_id}.\n"
f"Recieved id = {preference.request_id}"
)
raise eaError(msg)
refpoint = preference.response.values * self.population.problem._max_multiplier
self._preference = refpoint
scalarized_space_fitness = np.asarray(
[
scalar(self.population.fitness, self._preference)
for scalar in self.scalarization_methods
]
).T
self.reference_vectors.adapt(scalarized_space_fitness)
self.reference_vectors.neighbouring_angles()
def request_preferences(self) -> Union[None, ReferencePointPreference]:
dimensions_data = pd.DataFrame(
index=["minimize", "ideal", "nadir"],
columns=self.population.problem.get_objective_names(),
)
dimensions_data.loc["minimize"] = self.population.problem._max_multiplier
dimensions_data.loc["ideal"] = self.population.ideal_objective_vector
dimensions_data.loc["nadir"] = self.population.nadir_objective_vector
message = (
f"Provide a reference point worse than to the ideal point and better than"
f" the nadir point.\n"
f"Ideal point: \n{dimensions_data.loc['ideal']}\n"
f"Nadir point: \n{dimensions_data.loc['nadir']}\n"
f"The reference point will be used to create scalarization functions in "
f"the preferred region.\n"
)
interaction_priority = "required"
self._interaction_request_id = np.random.randint(0, 1e7)
return ReferencePointPreference(
dimensions_data=dimensions_data,
message=message,
interaction_priority=interaction_priority,
preference_validator=validate_ref_point_with_ideal_and_nadir,
request_id=self._interaction_request_id,
)
def _select(self) -> List:
return self.selection_operator.do(
self.population, self.reference_vectors, self._preference
)
class IOPIS_RVEA(BaseIOPISDecompositionEA, RVEA):
"""The python version reference vector guided evolutionary algorithm.
Most of the relevant code is contained in the super class. This class just assigns
the APD selection operator to BaseDecompositionEA.
NOTE: The APD function had to be slightly modified to accomodate for the fact that
this version of the algorithm is interactive, and does not have a set termination
criteria. There is a time component in the APD penalty function formula of the type:
(t/t_max)^alpha. As there is no set t_max, the formula has been changed. See below,
the documentation for the argument: penalty_time_component
See the details of RVEA in the following paper
<NAME>, <NAME>, <NAME> and <NAME>, A Reference Vector Guided
Evolutionary Algorithm for Many-objective Optimization, IEEE Transactions on
Evolutionary Computation, 2016
Parameters
----------
problem : MOProblem
The problem class object specifying the details of the problem.
population_size : int, optional
The desired population size, by default None, which sets up a default value
of population size depending upon the dimensionaly of the problem.
population_params : Dict, optional
The parameters for the population class, by default None. See
desdeo_emo.population.Population for more details.
initial_population : Population, optional
An initial population class, by default None. Use this if you want to set up
a specific starting population, such as when the output of one EA is to be
used as the input of another.
alpha : float, optional
The alpha parameter in the APD selection mechanism. Read paper for details.
lattice_resolution : int, optional
The number of divisions along individual axes in the objective space to be
used while creating the reference vector lattice by the simplex lattice
design. By default None
a_priori : bool, optional
A bool variable defining whether a priori preference is to be used or not.
By default False
interact : bool, optional
A bool variable defining whether interactive preference is to be used or
not. By default False
n_iterations : int, optional
The total number of iterations to be run, by default 10. This is not a hard
limit and is only used for an internal counter.
n_gen_per_iter : int, optional
The total number of generations in an iteration to be run, by default 100.
This is not a hard limit and is only used for an internal counter.
total_function_evaluations :int, optional
Set an upper limit to the total number of function evaluations. When set to
zero, this argument is ignored and other termination criteria are used.
penalty_time_component: Union[str, float], optional
The APD formula had to be slightly changed.
If penalty_time_component is a float between [0, 1], (t/t_max) is replaced by
that constant for the entire algorithm.
If penalty_time_component is "original", the original intent of the paper is
followed and (t/t_max) is calculated as
(current generation count/total number of generations).
If penalty_time_component is "function_count", (t/t_max) is calculated as
(current function evaluation count/total number of function evaluations)
If penalty_time_component is "interactive", (t/t_max) is calculated as
(Current gen count within an iteration/Total gen count within an iteration).
Hence, time penalty is always zero at the beginning of each iteration, and one
at the end of each iteration.
Note: If the penalty_time_component ever exceeds one, the value one is used as
the penalty_time_component.
If no value is provided, an appropriate default is selected.
If `interact` is true, penalty_time_component is "interactive" by default.
If `interact` is false, but `total_function_evaluations` is provided,
penalty_time_component is "function_count" by default.
If `interact` is false, but `total_function_evaluations` is not provided,
penalty_time_component is "original" by default.
"""
def __init__(
self,
problem: MOProblem,
population_size: int = None,
population_params: Dict = None,
initial_population: Population = None,
alpha: float = None,
lattice_resolution: int = None,
n_iterations: int = 10,
n_gen_per_iter: int = 100,
total_function_evaluations: int = 0,
time_penalty_component: Union[str, float] = None,
use_surrogates: bool = False,
):
super().__init__(
problem=problem,
population_size=population_size,
population_params=population_params,
initial_population=initial_population,
lattice_resolution=lattice_resolution,
n_iterations=n_iterations,
n_gen_per_iter=n_gen_per_iter,
total_function_evaluations=total_function_evaluations,
use_surrogates=use_surrogates,
)
self.time_penalty_component = time_penalty_component
time_penalty_component_options = ["original", "function_count", "interactive"]
if time_penalty_component is None:
if self.interact is True:
time_penalty_component = "interactive"
elif total_function_evaluations > 0:
time_penalty_component = "function_count"
else:
time_penalty_component = "original"
if not (type(time_penalty_component) is float or str):
msg = (
f"type(time_penalty_component) should be float or str"
f"Provided type: {type(time_penalty_component)}"
)
eaError(msg)
if type(time_penalty_component) is float:
if (time_penalty_component <= 0) or (time_penalty_component >= 1):
msg = (
f"time_penalty_component should either be a float in the range"
f"[0, 1], or one of {time_penalty_component_options}.\n"
f"Provided value = {time_penalty_component}"
)
eaError(msg)
time_penalty_function = self._time_penalty_constant
if type(time_penalty_component) is str:
if time_penalty_component == "original":
time_penalty_function = self._time_penalty_original
elif time_penalty_component == "function_count":
time_penalty_function = self._time_penalty_function_count
elif time_penalty_component == "interactive":
time_penalty_function = self._time_penalty_interactive
else:
msg = (
f"time_penalty_component should either be a float in the range"
f"[0, 1], or one of {time_penalty_component_options}.\n"
f"Provided value = {time_penalty_component}"
)
eaError(msg)
self.time_penalty_function = time_penalty_function
self.alpha = alpha
selection_operator = IOPIS_APD_Select(
self.time_penalty_function, self.scalarization_methods, self.alpha
)
self.selection_operator = selection_operator
def _time_penalty_constant(self):
"""Returns the constant time penalty value.
"""
return self.time_penalty_component
def _time_penalty_original(self):
"""Calculates the appropriate time penalty value, by the original formula.
"""
return self._current_gen_count / self.total_gen_count
def _time_penalty_interactive(self):
"""Calculates the appropriate time penalty value.
"""
return self._gen_count_in_curr_iteration / self.n_gen_per_iter
def | |
> dt.timedelta(seconds=refresh_interval)
if refresh_needed and refresh_interval != 0:
self.__log_dicts(dev)
dev.updateStatesOnServer([{'key': 'onOffState', 'value': True, 'uiValue': 'Processing'}])
# {key: (Item Name, Source ID, Source State)}
csv_dict_str = dev.pluginProps['columnDict']
# Convert column_dict from a string to a literal dict.
csv_dict = ast.literal_eval(csv_dict_str)
self.logger.threaddebug(u"[{name}] Refreshing CSV "
u"Device: {csv}".format(name=dev.name, csv=dict(csv_dict)))
self.csv_refresh_process(dev=dev, csv_dict=csv_dict)
# =============================================================================
def csv_refresh_process(self, dev, csv_dict):
"""
The csv_refresh_process() method processes CSV update requests
We import shutil here so that users who don't use CSV Engines don't need
to import it.
-----
:param class 'indigo.Device' dev: indigo device instance
:param dict csv_dict:
:return:
"""
try:
target_lines = int(dev.pluginProps.get('numLinesToKeep', '300'))
delta = dev.pluginProps.get('numLinesToKeepTime', '72')
cycle_time = dt.datetime.now()
column_names = []
data = []
# If delta isn't a valid float, set it to zero.
try:
delta = float(delta)
except ValueError:
delta = 0.0
# Read through the dict and construct headers and data
for k, v in sorted(csv_dict.items()):
# Create a path variable that is based on the target folder and the CSV item name.
full_path = u"{path}{var}.csv".format(path=self.pluginPrefs['dataPath'], var=v[0])
backup = full_path.replace(u'.csv', u' copy.csv')
# ============================= Create (if needed) ============================
# If the appropriate CSV file doesn't exist, create it and write the header
# line.
if not os.path.isdir(self.pluginPrefs['dataPath']):
try:
os.makedirs(self.pluginPrefs['dataPath'])
self.logger.warning(u"Target data folder doesn't exist. Creating it.")
except OSError:
self.logger.critical(u"[{name}] Target data folder either doesn't exist or the plugin is "
u"unable to access/create it.".format(name=dev.name))
if not os.path.isfile(full_path):
try:
self.logger.debug(u"CSV doesn't exist. Creating: {path}".format(path=full_path))
csv_file = open(full_path, 'w')
csv_file.write('{t},{n}\n'.format(t='Timestamp', n=v[0].encode("utf-8")))
csv_file.close()
self.sleep(1)
except IOError:
self.logger.critical(u"[{name}] The plugin is unable to access the data storage location. "
u"See plugin log for more information.".format(name=dev.name))
# =============================== Create Backup ===============================
# Make a backup of the CSV file in case something goes wrong.
try:
shutil.copyfile(full_path, backup)
except IOError as sub_error:
self.logger.error(u"[{name}] Unable to backup CSV file: {s}.".format(name=dev.name, s=sub_error))
except Exception as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.error(u"[{name}] Unable to backup CSV file: {s}. See plugin log for more "
u"information.".format(name=dev.name, s=sub_error))
# ================================= Load Data =================================
# Read CSV data into data frame
try:
with open(full_path) as in_file:
raw_data = [row for row in csv.reader(in_file, delimiter=',')]
# Split the headers and the data
column_names = raw_data[:1]
data = raw_data[1:]
# Coerce header 0 to be 'Timestamp'
if column_names[0][0] != u'Timestamp':
column_names[0][0] = u'Timestamp'
except IOError as sub_error:
self.logger.error(u"[{name}] Unable to load CSV data: {s}.".format(name=dev.name, s=sub_error))
# ============================== Limit for Time ===============================
# Limit data by time
if delta > 0:
cut_off = dt.datetime.now() - dt.timedelta(hours=delta)
time_data = [row for row in data if date_parse(row[0]) >= cut_off]
# If all records are older than the delta, return the original data (so
# there's something to chart) and send a warning to the log.
if len(time_data) == 0:
self.logger.debug(u"[{name} - {cn}] all CSV data are older than the time limit. "
u"Returning original data.".format(name=dev.name,
cn=column_names[0][1].decode('utf-8')
)
)
else:
data = time_data
# ============================ Add New Observation ============================
# Determine if the thing to be written is a device or variable.
try:
state_to_write = u""
if not v[1]:
self.logger.warning(u"Found CSV Data element with missing source ID. Please check to "
u"ensure all CSV sources are properly configured.")
elif int(v[1]) in indigo.devices:
state_to_write = u"{states}".format(states=indigo.devices[int(v[1])].states[v[2]])
elif int(v[1]) in indigo.variables:
state_to_write = u"{vars}".format(vars=indigo.variables[int(v[1])].value)
else:
self.logger.critical(u"The settings for CSV Engine data element '{elm}' are not valid: "
u"[dev: {dev}, state/value: {val}]".format(elm=v[0], dev=v[1], val=v[2]))
# Give matplotlib something it can chew on if the value to be saved is 'None'
if state_to_write in ('None', None, u""):
state_to_write = 'NaN'
# Add the newest observation to the end of the data list.
now = dt.datetime.strftime(cycle_time, '%Y-%m-%d %H:%M:%S.%f')
data.append([now, state_to_write])
except ValueError as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.error(u"[{name}] Invalid Indigo ID: {s}. See plugin log for more "
u"information.".format(name=dev.name, s=sub_error))
except Exception as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.error(u"[{name}] Invalid CSV definition: {s}".format(name=dev.name, s=sub_error))
# ============================= Limit for Length ==============================
# The data frame (with the newest observation included) may now be too long.
# If it is, we trim it for length.
if 0 <= target_lines < len(data):
data = data[len(data) - target_lines:]
# ================================ Write Data =================================
# Write CSV data to file
with open(full_path, 'w') as out_file:
writer = csv.writer(out_file, delimiter=',')
writer.writerows(column_names)
writer.writerows(data)
# =============================== Delete Backup ===============================
# If all has gone well, delete the backup.
try:
os.remove(backup)
except Exception as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.error(u"[{name}] Unable to delete backup file. {s}".format(name=dev.name, s=sub_error))
dev.updateStatesOnServer([{'key': 'csvLastUpdated', 'value': u"{now}".format(now=dt.datetime.now())},
{'key': 'onOffState', 'value': True, 'uiValue': 'Updated'}])
self.logger.info(u"[{name}] CSV data updated successfully.".format(name=dev.name))
dev.updateStateImageOnServer(indigo.kStateImageSel.SensorOn)
except UnboundLocalError:
self.logger.critical(u"[{name}] Unable to reach storage location. Check connections and "
u"permissions.".format(name=dev.name))
except ValueError as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.critical(u"[{name}] Error: {s}".format(name=dev.name, s=sub_error))
except Exception as sub_error:
self.plugin_error_handler(sub_error=traceback.format_exc())
self.logger.critical(u"[{name}] Error: {s}".format(name=dev.name, s=sub_error))
# =============================================================================
def csv_refresh_device_action(self, plugin_action, dev, caller_waiting_for_result=False):
"""
Perform a manual refresh of a single CSV Device
The csv_refresh_device_action() method will allow for the update of a single
CSV Engine device. This method will update all CSV sources associated with the
selected CSV Engine device each time the Action item is called. Only CSV Engine
devices set to a manual refresh interval will be presented.
-----
:param class 'indigo.PluginAction' plugin_action:
:param class 'indigo.Device' dev:
:param bool caller_waiting_for_result:
:return:
"""
dev = indigo.devices[int(plugin_action.props['targetDevice'])]
if dev.enabled:
# {key: (Item Name, Source ID, Source State)}
csv_dict_str = dev.pluginProps['columnDict']
# Convert column_dict from a string to a literal dict.
csv_dict = ast.literal_eval(csv_dict_str)
self.csv_refresh_process(dev=dev, csv_dict=csv_dict)
else:
self.logger.warning(u'CSV data not updated. Reason: target device disabled.')
# =============================================================================
def csv_refresh_source_action(self, plugin_action, dev, caller_waiting_for_result=False):
"""
Perform a manual refresh of a single CSV Source
The csv_refresh_source_action() method will allow for the update of a single
CSV source from a CSV Engine device. When creating a new Action item, the user
selects a target CSV Engine device and then the available CSV sources will be
displayed. The user selects a single CSV source that will be updated each time
the Action is called. Only CSV Engine devices set to a manual refresh interval
will be presented.
-----
:param class 'indigo.PluginAction' plugin_action:
:param class 'indigo.Device' dev:
:param bool caller_waiting_for_result:
:return:
"""
dev_id = int(plugin_action.props['targetDevice'])
dev = indigo.devices[dev_id]
if dev.enabled:
target_source = plugin_action.props['targetSource']
temp_dict = ast.literal_eval(dev.pluginProps['columnDict'])
payload = {target_source: temp_dict[target_source]}
self.csv_refresh_process(dev=dev, csv_dict=payload)
else:
self.logger.warning(u'CSV data not updated. Reason: target device disabled.')
# =============================================================================
def csv_source(self, type_id="", values_dict=None, dev_id=0, target_id=0):
"""
Construct a list of devices and variables for the CSV engine
Constructs a list of devices and variables for the user to select within the
CSV engine configuration dialog box. Devices and variables are listed in
alphabetical order with devices first and then variables. Devices are prepended
with '(D)' and variables with '(V)'. Category labels are also included for
visual clarity.
-----
:param unicode type_id:
:param class 'indigo.Dict' values_dict:
:param int dev_id:
:param int target_id:
"""
list_ = list()
# Devices
if values_dict.get('addSourceFilter', 'A') == "D":
[list_.append(t) for t in [(u"-1", u"%%disabled:Devices%%"), (u"-2", u"%%separator%%")]]
[list_.append((dev.id, u"{name}".format(name=dev.name))) for dev in indigo.devices.iter()]
# Variables
elif values_dict.get('addSourceFilter', 'A') == "V":
[list_.append(t) for t in [(u"-3", u"%%separator%%"),
(u"-4", u"%%disabled:Variables%%"),
(u"-5", u"%%separator%%")
]
]
[list_.append((var.id, u"{name}".format(name=var.name))) for var in indigo.variables.iter()]
# Devices and variables
else:
[list_.append(t) for t in [(u"-1", u"%%disabled:Devices%%"), (u"-2", u"%%separator%%")]]
[list_.append((dev.id, u"{name}".format(name=dev.name))) for dev in indigo.devices.iter()]
[list_.append(t) for t in [(u"-3", u"%%separator%%"),
(u"-4", u"%%disabled:Variables%%"),
(u"-5", u"%%separator%%")
]
]
[list_.append((var.id, u"{name}".format(name=var.name))) for var in indigo.variables.iter()]
return list_
# =============================================================================
def csv_source_edit(self, type_id="", values_dict=None, dev_id=0, target_id=0):
"""
Construct a list of devices and variables for the CSV engine
Constructs a list of devices and variables for the user to select within the
CSV engine configuration dialog box. Devices and variables are listed in
alphabetical order with devices first and then variables. Devices are prepended
with '(D)' and variables with '(V)'. Category labels are also included for
visual clarity.
-----
:param unicode type_id:
:param class 'indigo.Dict' values_dict:
:param int dev_id:
:param int target_id:
"""
list_ = list()
# Devices
if values_dict.get('editSourceFilter', 'A') == "D":
[list_.append(t) for t in [(u"-1", u"%%disabled:Devices%%"), (u"-2", u"%%separator%%")]]
[list_.append((dev.id, u"{name}".format(name=dev.name))) for dev | |
+ m.x987**2) >= 0)
m.c1283 = Constraint(expr=m.x238**2 - (m.x988**2 + m.x989**2) >= 0)
m.c1284 = Constraint(expr=m.x239**2 - (m.x990**2 + m.x991**2) >= 0)
m.c1285 = Constraint(expr=m.x240**2 - (m.x992**2 + m.x993**2) >= 0)
m.c1286 = Constraint(expr=m.x241**2 - (m.x994**2 + m.x995**2) >= 0)
m.c1287 = Constraint(expr=m.x242**2 - (m.x996**2 + m.x997**2) >= 0)
m.c1288 = Constraint(expr=m.x243**2 - (m.x998**2 + m.x999**2) >= 0)
m.c1289 = Constraint(expr=m.x244**2 - (m.x1000**2 + m.x1001**2) >= 0)
m.c1290 = Constraint(expr=m.x245**2 - (m.x1002**2 + m.x1003**2) >= 0)
m.c1291 = Constraint(expr=m.x246**2 - (m.x1004**2 + m.x1005**2) >= 0)
m.c1292 = Constraint(expr=m.x247**2 - (m.x1006**2 + m.x1007**2) >= 0)
m.c1293 = Constraint(expr=m.x248**2 - (m.x1008**2 + m.x1009**2) >= 0)
m.c1294 = Constraint(expr=m.x249**2 - (m.x1010**2 + m.x1011**2) >= 0)
m.c1295 = Constraint(expr=m.x250**2 - (m.x1012**2 + m.x1013**2) >= 0)
m.c1296 = Constraint(expr=m.x251**2 - (m.x1014**2 + m.x1015**2) >= 0)
m.c1297 = Constraint(expr=m.x252**2 - (m.x1016**2 + m.x1017**2) >= 0)
m.c1298 = Constraint(expr=m.x253**2 - (m.x1018**2 + m.x1019**2) >= 0)
m.c1299 = Constraint(expr=m.x254**2 - (m.x1020**2 + m.x1021**2) >= 0)
m.c1300 = Constraint(expr=m.x255**2 - (m.x1022**2 + m.x1023**2) >= 0)
m.c1301 = Constraint(expr=m.x256**2 - (m.x1024**2 + m.x1025**2) >= 0)
m.c1302 = Constraint(expr=m.x257**2 - (m.x1026**2 + m.x1027**2) >= 0)
m.c1303 = Constraint(expr=m.x258**2 - (m.x1028**2 + m.x1029**2) >= 0)
m.c1304 = Constraint(expr=m.x259**2 - (m.x1030**2 + m.x1031**2) >= 0)
m.c1305 = Constraint(expr=m.x260**2 - (m.x1032**2 + m.x1033**2) >= 0)
m.c1306 = Constraint(expr=m.x261**2 - (m.x1034**2 + m.x1035**2) >= 0)
m.c1307 = Constraint(expr=m.x262**2 - (m.x1036**2 + m.x1037**2) >= 0)
m.c1308 = Constraint(expr=m.x263**2 - (m.x1038**2 + m.x1039**2) >= 0)
m.c1309 = Constraint(expr=m.x264**2 - (m.x1040**2 + m.x1041**2) >= 0)
m.c1310 = Constraint(expr=m.x265**2 - (m.x1042**2 + m.x1043**2) >= 0)
m.c1311 = Constraint(expr=m.x266**2 - (m.x1044**2 + m.x1045**2) >= 0)
m.c1312 = Constraint(expr=m.x267**2 - (m.x1046**2 + m.x1047**2) >= 0)
m.c1313 = Constraint(expr=m.x268**2 - (m.x1048**2 + m.x1049**2) >= 0)
m.c1314 = Constraint(expr=m.x269**2 - (m.x1050**2 + m.x1051**2) >= 0)
m.c1315 = Constraint(expr=m.x270**2 - (m.x1052**2 + m.x1053**2) >= 0)
m.c1316 = Constraint(expr=m.x271**2 - (m.x1054**2 + m.x1055**2) >= 0)
m.c1317 = Constraint(expr=m.x272**2 - (m.x1056**2 + m.x1057**2) >= 0)
m.c1318 = Constraint(expr=m.x273**2 - (m.x1058**2 + m.x1059**2) >= 0)
m.c1319 = Constraint(expr=m.x274**2 - (m.x1060**2 + m.x1061**2) >= 0)
m.c1320 = Constraint(expr=m.x275**2 - (m.x1062**2 + m.x1063**2) >= 0)
m.c1321 = Constraint(expr=m.x276**2 - (m.x1064**2 + m.x1065**2) >= 0)
m.c1322 = Constraint(expr=m.x277**2 - (m.x1066**2 + m.x1067**2) >= 0)
m.c1323 = Constraint(expr=m.x278**2 - (m.x1068**2 + m.x1069**2) >= 0)
m.c1324 = Constraint(expr=m.x279**2 - (m.x1070**2 + m.x1071**2) >= 0)
m.c1325 = Constraint(expr=m.x280**2 - (m.x1072**2 + m.x1073**2) >= 0)
m.c1326 = Constraint(expr=m.x281**2 - (m.x1074**2 + m.x1075**2) >= 0)
m.c1327 = Constraint(expr=m.x282**2 - (m.x1076**2 + m.x1077**2) >= 0)
m.c1328 = Constraint(expr=m.x283**2 - (m.x1078**2 + m.x1079**2) >= 0)
m.c1329 = Constraint(expr=m.x284**2 - (m.x1080**2 + m.x1081**2) >= 0)
m.c1330 = Constraint(expr=m.x285**2 - (m.x1082**2 + m.x1083**2) >= 0)
m.c1331 = Constraint(expr=m.x286**2 - (m.x1084**2 + m.x1085**2) >= 0)
m.c1332 = Constraint(expr=m.x287**2 - (m.x1086**2 + m.x1087**2) >= 0)
m.c1333 = Constraint(expr=m.x288**2 - (m.x1088**2 + m.x1089**2) >= 0)
m.c1334 = Constraint(expr=m.x289**2 - (m.x1090**2 + m.x1091**2) >= 0)
m.c1335 = Constraint(expr=m.x290**2 - (m.x1092**2 + m.x1093**2) >= 0)
m.c1336 = Constraint(expr=m.x291**2 - (m.x1094**2 + m.x1095**2) >= 0)
m.c1337 = Constraint(expr=m.x292**2 - (m.x1096**2 + m.x1097**2) >= 0)
m.c1338 = Constraint(expr=m.x293**2 - (m.x1098**2 + m.x1099**2) >= 0)
m.c1339 = Constraint(expr=m.x294**2 - (m.x1100**2 + m.x1101**2) >= 0)
m.c1340 = Constraint(expr=m.x295**2 - (m.x1102**2 + m.x1103**2) >= 0)
m.c1341 = Constraint(expr=m.x296**2 - (m.x1104**2 + m.x1105**2) >= 0)
m.c1342 = Constraint(expr=m.x297**2 - (m.x1106**2 + m.x1107**2) >= 0)
m.c1343 = Constraint(expr=m.x298**2 - (m.x1108**2 + m.x1109**2) >= 0)
m.c1344 = Constraint(expr=m.x299**2 - (m.x1110**2 + m.x1111**2) >= 0)
m.c1345 = Constraint(expr=m.x300**2 - (m.x1112**2 + m.x1113**2) >= 0)
m.c1346 = Constraint(expr=m.x301**2 - (m.x1114**2 + m.x1115**2) >= 0)
m.c1347 = Constraint(expr=m.x302**2 - (m.x1116**2 + m.x1117**2) >= 0)
m.c1348 = Constraint(expr=m.x303**2 - (m.x1118**2 + m.x1119**2) >= 0)
m.c1349 = Constraint(expr=m.x304**2 - (m.x1120**2 + m.x1121**2) >= 0)
m.c1350 = Constraint(expr=m.x305**2 - (m.x1122**2 + m.x1123**2) >= 0)
m.c1351 = Constraint(expr=m.x306**2 - (m.x1124**2 + m.x1125**2) >= 0)
m.c1352 = Constraint(expr=m.x307**2 - (m.x1126**2 + m.x1127**2) >= 0)
m.c1353 = Constraint(expr=m.x308**2 - (m.x1128**2 + m.x1129**2) >= 0)
m.c1354 = Constraint(expr=m.x309**2 - (m.x1130**2 + m.x1131**2) >= 0)
m.c1355 = Constraint(expr=m.x310**2 - (m.x1132**2 + m.x1133**2) >= 0)
m.c1356 = Constraint(expr=m.x311**2 - (m.x1134**2 + m.x1135**2) >= 0)
m.c1357 = Constraint(expr=m.x312**2 - (m.x1136**2 + m.x1137**2) >= 0)
m.c1358 = Constraint(expr=m.x313**2 - (m.x1138**2 + m.x1139**2) >= 0)
m.c1359 = Constraint(expr=m.x314**2 - (m.x1140**2 + m.x1141**2) >= 0)
m.c1360 = Constraint(expr=m.x315**2 - (m.x1142**2 + m.x1143**2) >= 0)
m.c1361 = Constraint(expr=m.x316**2 - (m.x1144**2 + m.x1145**2) >= 0)
m.c1362 = Constraint(expr=m.x317**2 - (m.x1146**2 + m.x1147**2) >= 0)
m.c1363 = Constraint(expr=m.x318**2 - (m.x1148**2 + m.x1149**2) >= 0)
m.c1364 = Constraint(expr=m.x319**2 - (m.x1150**2 + m.x1151**2) >= 0)
m.c1365 = Constraint(expr=m.x320**2 - (m.x1152**2 + m.x1153**2) >= 0)
m.c1366 = Constraint(expr=m.x321**2 - (m.x1154**2 + m.x1155**2) >= 0)
m.c1367 = Constraint(expr=m.x322**2 - (m.x1156**2 + m.x1157**2) >= 0)
m.c1368 = Constraint(expr=m.x323**2 - (m.x1158**2 + m.x1159**2) >= 0)
m.c1369 = Constraint(expr=m.x324**2 - (m.x1160**2 + m.x1161**2) >= 0)
m.c1370 = Constraint(expr=m.x325**2 - (m.x1162**2 + m.x1163**2) >= 0)
m.c1371 = Constraint(expr=m.x326**2 - (m.x1164**2 + m.x1165**2) >= 0)
m.c1372 = Constraint(expr=m.x327**2 - (m.x1166**2 + m.x1167**2) >= 0)
m.c1373 = Constraint(expr=m.x328**2 - (m.x1168**2 + m.x1169**2) >= 0)
m.c1374 = Constraint(expr=m.x329**2 - (m.x1170**2 + m.x1171**2) >= 0)
m.c1375 = Constraint(expr=m.x330**2 - (m.x1172**2 + m.x1173**2) >= 0)
m.c1376 = Constraint(expr=m.x331**2 - (m.x1174**2 + m.x1175**2) >= 0)
m.c1377 = Constraint(expr=m.x332**2 - (m.x1176**2 + m.x1177**2) >= 0)
m.c1378 = Constraint(expr=m.x333**2 - (m.x1178**2 + m.x1179**2) >= 0)
m.c1379 = Constraint(expr=m.x334**2 - (m.x1180**2 + m.x1181**2) >= 0)
m.c1380 = Constraint(expr=m.x335**2 - (m.x1182**2 + m.x1183**2) >= 0)
m.c1381 = Constraint(expr=m.x336**2 - (m.x1184**2 + m.x1185**2) >= 0)
m.c1382 = Constraint(expr=m.x337**2 - (m.x1186**2 + m.x1187**2) >= 0)
m.c1383 = Constraint(expr=m.x338**2 - (m.x1188**2 + m.x1189**2) >= 0)
m.c1384 = Constraint(expr=m.x339**2 - (m.x1190**2 + m.x1191**2) >= 0)
m.c1385 = Constraint(expr=m.x340**2 - (m.x1192**2 + m.x1193**2) >= 0)
m.c1386 = Constraint(expr=m.x341**2 - (m.x1194**2 + m.x1195**2) >= 0)
m.c1387 = Constraint(expr=m.x342**2 - (m.x1196**2 + m.x1197**2) >= 0)
m.c1388 = Constraint(expr=m.x343**2 - (m.x1198**2 + m.x1199**2) >= 0)
m.c1389 = Constraint(expr=m.x344**2 - (m.x1200**2 + m.x1201**2) >= 0)
m.c1390 = Constraint(expr=m.x345**2 - (m.x1202**2 + m.x1203**2) >= 0)
m.c1391 = Constraint(expr=m.x346**2 - (m.x1204**2 + m.x1205**2) >= 0)
m.c1392 = Constraint(expr=m.x347**2 - (m.x1206**2 + m.x1207**2) >= 0)
m.c1393 = Constraint(expr=m.x348**2 - (m.x1208**2 + m.x1209**2) >= 0)
m.c1394 = Constraint(expr=m.x349**2 - (m.x1210**2 + m.x1211**2) >= 0)
m.c1395 = Constraint(expr=m.x350**2 - (m.x1212**2 + m.x1213**2) >= 0)
m.c1396 = Constraint(expr=m.x351**2 - (m.x1214**2 + m.x1215**2) >= 0)
m.c1397 = Constraint(expr=m.x352**2 - (m.x1216**2 + m.x1217**2) >= 0)
m.c1398 = Constraint(expr=m.x353**2 - (m.x1218**2 + m.x1219**2) >= 0)
m.c1399 = Constraint(expr=m.x354**2 - (m.x1220**2 + m.x1221**2) >= 0)
m.c1400 = Constraint(expr=m.x355**2 - (m.x1222**2 + m.x1223**2) >= 0)
m.c1401 = Constraint(expr=m.x356**2 - (m.x1224**2 + m.x1225**2) >= 0)
m.c1402 = Constraint(expr=m.x357**2 - (m.x1226**2 + m.x1227**2) >= 0)
m.c1403 = Constraint(expr=m.x358**2 - (m.x1228**2 + m.x1229**2) >= 0)
m.c1404 = Constraint(expr=m.x359**2 - (m.x1230**2 + m.x1231**2) >= 0)
m.c1405 = Constraint(expr=m.x360**2 - (m.x1232**2 + m.x1233**2) >= 0)
m.c1406 = Constraint(expr=m.x361**2 - (m.x1234**2 + m.x1235**2) >= 0)
m.c1407 = Constraint(expr=m.x362**2 - (m.x1236**2 + m.x1237**2) >= 0)
m.c1408 = Constraint(expr=m.x363**2 - (m.x1238**2 + m.x1239**2) >= 0)
m.c1409 = Constraint(expr=m.x364**2 - (m.x1240**2 + m.x1241**2) >= 0)
m.c1410 = Constraint(expr=m.x365**2 - (m.x1242**2 + m.x1243**2) >= 0)
m.c1411 = Constraint(expr=m.x366**2 - (m.x1244**2 + m.x1245**2) >= 0)
m.c1412 = Constraint(expr=m.x367**2 - (m.x1246**2 + m.x1247**2) >= 0)
m.c1413 = Constraint(expr=m.x368**2 - (m.x1248**2 + m.x1249**2) >= 0)
m.c1414 = Constraint(expr=m.x369**2 - (m.x1250**2 + m.x1251**2) >= 0)
m.c1415 = Constraint(expr=m.x370**2 - (m.x1252**2 + m.x1253**2) >= 0)
m.c1416 = Constraint(expr=m.x371**2 - (m.x1254**2 + m.x1255**2) >= 0)
m.c1417 = Constraint(expr=m.x372**2 - (m.x1256**2 + m.x1257**2) >= 0)
m.c1418 = Constraint(expr=m.x373**2 - (m.x1258**2 + m.x1259**2) >= 0)
m.c1419 = Constraint(expr=m.x374**2 - (m.x1260**2 + m.x1261**2) >= 0)
m.c1420 = Constraint(expr=m.x375**2 - (m.x1262**2 + m.x1263**2) >= 0)
m.c1421 = Constraint(expr=m.x376**2 - (m.x1264**2 + m.x1265**2) >= 0)
m.c1422 = Constraint(expr=m.x377**2 - (m.x1266**2 + m.x1267**2) >= 0)
m.c1423 = Constraint(expr=m.x378**2 - (m.x1268**2 + m.x1269**2) >= 0)
m.c1424 = Constraint(expr=m.x379**2 - (m.x1270**2 + m.x1271**2) >= 0)
m.c1425 = Constraint(expr=m.x380**2 - (m.x1272**2 + m.x1273**2) >= 0)
m.c1426 = Constraint(expr=m.x381**2 - (m.x1274**2 + m.x1275**2) >= 0)
m.c1427 = Constraint(expr=m.x382**2 - (m.x1276**2 + m.x1277**2) >= 0)
m.c1428 = Constraint(expr=m.x383**2 - (m.x1278**2 + m.x1279**2) >= 0)
m.c1429 = Constraint(expr=m.x384**2 - (m.x1280**2 + m.x1281**2) >= 0)
m.c1430 = Constraint(expr=m.x385**2 - (m.x1282**2 + m.x1283**2) >= 0)
m.c1431 = Constraint(expr=m.x386**2 - (m.x1284**2 + m.x1285**2) >= 0)
m.c1432 = Constraint(expr=m.x387**2 - (m.x1286**2 + m.x1287**2) >= 0)
m.c1433 = Constraint(expr=m.x388**2 - (m.x1288**2 + m.x1289**2) >= 0)
m.c1434 = Constraint(expr=m.x389**2 - (m.x1290**2 + m.x1291**2) >= 0)
m.c1435 = Constraint(expr=m.x390**2 - (m.x1292**2 + m.x1293**2) >= 0)
m.c1436 = Constraint(expr=m.x391**2 - (m.x1294**2 + m.x1295**2) >= 0)
m.c1437 = Constraint(expr=m.x392**2 - (m.x1296**2 + m.x1297**2) >= 0)
m.c1438 = Constraint(expr=m.x393**2 - (m.x1298**2 + m.x1299**2) >= 0)
m.c1439 = Constraint(expr=m.x394**2 - (m.x1300**2 + m.x1301**2) >= 0)
m.c1440 = Constraint(expr=m.x395**2 - (m.x1302**2 + m.x1303**2) >= 0)
m.c1441 = Constraint(expr=m.x396**2 - (m.x1304**2 + m.x1305**2) >= 0)
m.c1442 = Constraint(expr=m.x397**2 - (m.x1306**2 | |
simPa.frame_rate_actual
Pausing = np.where(MT_length_diff < 0.1*simPa.growth_speed*1000/60)[0][0]
Pausing = (MT_length.shape[1] - Pausing)*simPa.frame_rate_actual
# Tip position output
MT_length_output = np.vstack((MT_CI_upper, MT_CI_lower))
MT_length_output = np.vstack((MT_length_mean, MT_length_output))
MT_length_output = np.vstack((simPa.frame_rate_actual * np.arange(-len(MT_length_mean),0), MT_length_output)).T
# Cap position output
C_length_output = np.vstack((C_CI_upper, C_CI_lower))
C_length_output = np.vstack((C_length_mean, C_length_output))
C_length_output = np.vstack((simPa.frame_rate_actual * np.arange(-len(C_length_mean),0), C_length_output)).T
#Start plotting
if num_fig != 0:
plt.figure(num_fig)
plt.clf()
# Plot mean tip position
plt.plot(simPa.frame_rate_actual * np.arange(-len(MT_length_mean),0),
MT_length_mean,'k', linewidth=2.0)
# Plot upper 95% confidence interval of tip position
plt.plot(simPa.frame_rate_actual * np.arange(-len(MT_length_mean),0),
MT_CI_upper,'k--', linewidth=1.0)
# Plot lower 95% confidence interval of tip position
plt.plot(simPa.frame_rate_actual * np.arange(-len(MT_length_mean),0),
MT_CI_lower,'k--', linewidth=1.0)
# Plot mean cap position
plt.plot(simPa.frame_rate_actual * np.arange(-len(C_length_mean),0),
C_length_mean,'r', linewidth=2.0)
# Plot upper 95% confidence interval of cap position
plt.plot(simPa.frame_rate_actual * np.arange(-len(C_length_mean),0),
C_CI_upper,'r--', linewidth=1.0)
# Plot lower 95% confidence interval of cap position
plt.plot(simPa.frame_rate_actual * np.arange(-len(C_length_mean),0),
C_CI_lower,'r--', linewidth=1.0)
plt.title("MT length prior to catastrophe")
plt.xlabel('time before catastrophe [s]');
plt.ylabel('Mean MT position [nm]');
#Add parameters to figure
figtext = ['Shrinkage = %.2f nm' %abs(MT_length_mean[-1])]
figtext.append('Pausing = %.2f s' %Pausing)
## Figure styles
from matplotlib.font_manager import FontProperties
font = FontProperties()
font.set_family('sans-serif')
font.set_style('normal')
font.set_weight('light')
figDX = 0.045
for m in range(len(figtext)):
plt.ax.text(0.1,0.82-m*figDX, figtext[m], fontproperties=font,
verticalalignment='bottom', horizontalalignment='left',
transform=plt.ax.transAxes, color='black', fontsize=11)
plt.xlim(-30, 2)
if simPa.record_data:
filename = file_figure + '_fig' + str(int(num_fig))
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
plt.show()
print('Mean shrinkage before catastrophe = %.2f nm' %abs(MT_length_mean[-1]))
print('Mean pause duration before catastrophe = %.2f s' %Pausing)
return MT_length_output, C_length_output
def fig_EB_cat_hist(simPa,
file_figure,
num_fig,
EB_comet_sum,
barrier_contact_times,
EB_average_frames = 2):
""" Have a look at EB intensity at catastrophe...
Args:
-------
simPa: parameter set
Simulation parameters in "ParameterSet" format.
file_figure: str
Folder for storing figures and data.
num_fig: int
Figure number.
EB_comet_sum: list, array #TODO:check
Number of "B"s during a time window before catastophe.
barrier_contact_times: list, array #TODO:check
List/array containing barrier contact times.
EB_average_frames: int
Number of frames to average over. Default = 2.
"""
EB_intensity_before_cat = []
EB_intensity_at_barrier = []
EB_mean = []
# Select valid runs
valid_runs = valid_EB_runs(simPa, EB_comet_sum, barrier_contact_times)
EB_signal, EB_signal_average, max_barrier_contact_frames, min_length_run_frames, frame_window = analyse_EB_signal(simPa,
EB_comet_sum, barrier_contact_times)
for a in range(0,len(valid_runs)):
EB_intensity_before_cat.append(np.mean(np.array(EB_comet_sum[a])[0:(EB_average_frames+1)])) # :-1]))
barrier_contact_frame = int(round(barrier_contact_times[valid_runs[a]]/simPa.frame_rate_actual,0))
EB_intensity_at_barrier.append(np.mean(np.array(EB_comet_sum[a])[barrier_contact_frame:(barrier_contact_frame+EB_average_frames+1)]))
EB_mean.append(np.mean(EB_signal[a][max_barrier_contact_frames:frame_window]))
fig, ax = plt.subplots(figsize=(8, 8)) #figure(9, figsize=(8, 8))
plt.clf()
map = plt.scatter(EB_intensity_before_cat/np.mean(EB_mean), EB_intensity_at_barrier/np.mean(EB_mean),
c = barrier_contact_times[valid_runs], alpha=0.5, cmap='CMRmap')
plt.xlim(xmax=1)
fig.colorbar(map, ax = ax, label = 'barrier contact time [s]')
plt.title('EB intensity before catastrophe (%.2f nM EB)' %(simPa.EB*1000))
plt.xlabel('EB intensity right before catastrophe (last %.0f frames), relative to mean' %EB_average_frames)
plt.ylabel('EB intensity right before barrier contact, relative to mean')
plt.legend(fontsize=14)
if simPa.record_data:
filename = file_figure + '_fig' + str(num_fig) + '_relative'
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
plt.show()
fig, ax = plt.subplots(figsize=(8, 8)) #figure(9, figsize=(8, 8))
plt.clf()
hist_data, hist_bins = np.histogram(EB_intensity_before_cat/np.mean(EB_mean), np.arange(0,1.1,0.1))
bin_width = hist_bins[1]
plt.bar((hist_bins[:-1] + bin_width/2) , np.float_(hist_data)/(np.sum(hist_data)), 0.9*bin_width, alpha=0.8)
plt.title('Relative B-state intensity at catastrophe')
plt.xlabel('Relative B-state intensity (#of elements in state "B" div. by mean)')
plt.ylabel('Probability')
if simPa.record_data:
filename = file_figure + '_fig' + str(num_fig) + '_histogram'
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
#PROBLEM with plt.hist --> normed=1 and density=1 don't work properly
#plt.hist(EB_intensity_before_cat/np.mean(EB_mean), np.arange(0,1.1,0.1), density=True, histtype='bar', rwidth=0.8)
plt.show()
return EB_intensity_before_cat/np.mean(EB_mean)
def fig_display_examples(simPa,
file_figure,
num_fig,
MT_length_sum,
catastrophe_times,
EB_comet_sum,
barrier_contact_times=[]):
""" Show selection of examples (tip position + EB intensity)
Args:
-------
simPa: parameter set
Simulation parameters in "ParameterSet" format.
file_figure: str
Folder for storing figures and data.
num_fig: int
Figure number.
MT_length_sum:
TODO
catastrophe_times:
TODO
EB_comet_sum: list, array #TODO:check
Number of "B"s during a time window before catastophe.
barrier_contact_times: list, array #TODO:check
List/array containing barrier contact times.
"""
min_length_run_frames = int(simPa.min_length_run/simPa.frame_rate_actual)
# Select valid runs
valid_runs = valid_EB_runs(simPa, EB_comet_sum, barrier_contact_times)
EB_signal_before_cat = np.zeros((len(valid_runs), min_length_run_frames+1)) #put individual runs into one np.array
for a in range(0,len(valid_runs)):
EB_signal_before_cat[a][0:min_length_run_frames] = \
np.array(EB_comet_sum[valid_runs[a]])[0:min_length_run_frames]
EB_signal_average = np.sum(EB_signal_before_cat, axis=0)
EB_signal_average = EB_signal_average/len(valid_runs)
show_fraction_frames = int(simPa.show_fraction/simPa.frame_rate_actual)
valid_runs = np.where(catastrophe_times > simPa.show_fraction)[0]
plt.figure(num_fig, figsize=(15, 10))
plt.clf()
f, axarr = plt.subplots(nrows=5, ncols=5, sharey=True, sharex=True, figsize=(15, 10))
for m in range(0,5):
for n in range(0,5):
skip = 0
axarr[m, n].plot(simPa.frame_rate_actual * np.arange(-min_length_run_frames,0) ,MT_length_sum[valid_runs[skip+m+5*n]][0::], 'black')
axarr[m, n].set_title('catastrophe %.0f' %(skip+m+5*n))
axarr[m, n]
plt.plot(simPa.frame_rate_actual * np.arange(-min_length_run_frames,0) ,EB_signal_before_cat[skip+m+5*n][0:show_fraction_frames][::-1],'red')
if simPa.record_data:
filename = file_figure + '_fig' + str(int(num_fig))
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
plt.show()
def fig_EB_profile(simPa, file_figure, num_fig, EB_profiles, MT_length_full, w_size):
""" Figure to display EB profile.
Args:
-------
simPa: parameter set
Simulation parameters in "ParameterSet" format.
file_figure: str
Folder for storing figures and data.
num_fig: int
Figure number.
EB_profiles:
TODO
MT_length_full:
TODO
w_size:
TODO
"""
# Analyse the simulated EB profiles
EB_mean, v_mean = analyse_EB_profile(simPa, MT_length_full, EB_profiles, w_size)
# Calculate the mean EB profile
# x = np.arange(0, len(EB_mean[0]), 1) * simPa.dL_dimer *1000
# y = np.mean(EB_mean, axis=0)
x = np.arange(0, len(EB_mean), 1) * simPa.dL_dimer *1000
y = EB_mean
#plt.plot(x, y)
# Define exponential function
def exponenial_func(x, a, b):
return a*np.exp(b*x)
# Calculate the maturation rate (Duellberg, 2016), i.e. the hydrolysis rate
ind = np.argmax(y)
popt, pcov = curve_fit(exponenial_func, x[0:ind], y[0:ind], p0=(1e-2, 1e-3))
xx= np.linspace(0, x[ind], 1000)
yy = exponenial_func(xx, *popt)
# Align profile from left to right and set tip position to zero
x = -x
x += np.round(np.argmax(y)*(simPa.dL_dimer*1000))
xx = -xx
xx += np.round(np.argmax(y)*(simPa.dL_dimer*1000))
fig = plt.figure(1, figsize=(12, 7))
plt.clf()
plt.plot(x,y,'k.', xx, yy, '--r')
plt.title('Mean GTP/GDP-Pi profile', fontsize=14)
plt.xlabel('Position [nm]', fontsize=12)
plt.ylabel('Intensity [a.u.]', fontsize = 12)
plt.ax = fig.add_subplot(111)
fig.subplots_adjust(top=0.9)
## Figure styles
from matplotlib.font_manager import FontProperties
font = FontProperties()
font.set_family('sans-serif')
font.set_style('normal')
font.set_weight('light')
#Add parameters to figure
figtext = ['Simulation parameters:']
figtext.append('$N_{sim} = %.0f$' %len(MT_length_full))
figtext.append('$v_{g} = %.2f$ $nm/s$' %float(simPa.growth_rate_one*(simPa.dL_dimer*1000)))
figtext.append('$k_{hyd} = %.2f$ $s^{-1}$' %(simPa.kBC))
figtext.append('$D_{tip} = %.0f$ $nm^{2}/s$' %simPa.D_tip)
figtext.append('')
figtext.append('Measured values:')
figtext.append('$N_{profiles} = %.0f$' %len(EB_mean))
figtext.append('$v_{g} = %.2f$ $nm/s$' %np.mean(v_mean))
figtext.append('$L_{comet} = %.0f$ $nm$' %float(1/popt[1]))
figtext.append('$k_{m} = %.2f$ $s^{-1}$' %float(np.mean(v_mean)*popt[1]))
figtext.append('$I_{max} = %.2f$ $a.u.$' %float(np.max(y)))
figDX = 0.045
for m in range(len(figtext)):
plt.ax.text(0.75, 0.9-m*figDX, figtext[m], fontproperties=font,
verticalalignment='bottom', horizontalalignment='left',
transform=plt.ax.transAxes, color='black', fontsize=12)
if simPa.record_data:
filename = file_figure + '_fig' + str(int(num_fig))
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
plt.show()
def fig_MT_ageing(simPa, file_figure, num_fig, c_times):
""" Calculate the age-dependent microtubule catastrophe frequency
Args:
-------
simPa: parameter set
Simulation parameters in "ParameterSet" format.
file_figure: str
Folder for storing figures and data.
num_fig: int
Figure number.
c_times: numpy.array
Array of catastrophe times.
"""
X_dist = np.sort(c_times)
Y_dist = np.cumsum(np.ones(len(c_times)))/len(c_times)
f = (Y_dist/X_dist)/(1-Y_dist)
C_freq = np.vstack((X_dist,Y_dist)).T
plt.figure(1, figsize=(12, 7))
plt.clf()
plt.plot(X_dist, f)
plt.ylim(0, 0.02)
plt.xlim(0, 400)
plt.title('Microtubule ageing', fontsize=14)
plt.xlabel('Microtubule age [s]', fontsize=12)
plt.ylabel('Catastrophe frequency [$s^{-1}$]', fontsize = 12)
if simPa.record_data:
filename = file_figure + '_fig' + str(int(num_fig))
plt.savefig(filename+'.eps', format='eps', dpi=1000)
plt.savefig(filename+'.png', format='png', dpi=200)
plt.show()
return C_freq
def fig_dist_fit(simPa,
file_figure,
num_fig,
Cum_dist):
"""
Args:
-------
simPa: parameter set
Simulation parameters in "ParameterSet" format.
file_figure: str
Folder for storing figures and data.
num_fig: int
Figure number.
Cum_dist:
TODO
"""
fig, ax1 = plt.subplots(figsize=(12, 7))
plt.clf()
if isinstance(Cum_dist, list) and list_dim(Cum_dist) > 1 and list_dim(Cum_dist[0]) > 0:
if isinstance(Cum_dist[0], np.ndarray):
print(list_dim(Cum_dist), ' different cumulative distributions found. ')
else:
print('Error: Input cumulative distributions must be numpy arrays or lists of numpy arrays.' )
elif isinstance(Cum_dist, list) and list_dim(Cum_dist) == 1 and isinstance(Cum_dist[0], np.ndarray):
pass;
elif isinstance(Cum_dist, np.ndarray):
Cum_dist = [Cum_dist] #put numpy array into list
else:
print('Error: Input cumulative distributions must be numpy arrays or lists of numpy arrays.' )
x = Cum_dist[0]
x_fit = np.linspace(0,x[-1],1000)
y = np.linspace(0,1,len(x))
# Fit cumulative distribution to the Gamma function
popt1, pcov1 = curve_fit(gamma_cdf,x, y, p0=(1, 1e-2))
y1 = gamma_cdf(x_fit, *popt1)
print(popt1)
print(pcov1)
# Fit cumulative distribution to an exponential
popt2, pcov2 = curve_fit(exp_cdf, x, y, p0=(1e-2))
y2 = exp_cdf(x_fit, *popt2)
if list_dim(Cum_dist) > 1:
c_range = 1/(list_dim(Cum_dist)-1)
else:
c_range = 1
for i, Cum_dist in enumerate(Cum_dist):
plt.step(Cum_dist, 1/(len(Cum_dist)-1) * np.arange(0, len(Cum_dist) , 1),
where='post', color=(0.95-0.7*(i)*c_range, 0.1, 0.1 + 0.8*(i)*c_range), linewidth=1.5, label='Simulation')
plt.plot(x_fit, y1, 'k--', linewidth=1.5, label='Gamma fit')
plt.plot(x_fit, y2, 'k:', linewidth=1.5, label='Exponential fit')
plt.title('Microtubule lifetime distribution', fontsize=14)
plt.xlabel('time [s]')
plt.ylabel('Cumulative fraction')
plt.ax = fig.add_subplot(111)
figtext = ['Simulation parameters:']
figtext.append('$v_{g} = %.1f$ $nm/s$' %float(simPa.growth_rate_one*(simPa.dL_dimer*1000)))
figtext.append('$k_{hyd} = %.2f$ $s^{-1}$' %(simPa.kBC))
if simPa.D_tip_time:
figtext.append('$k_{ageing} = %.3f$ $s^{-1}$' %(simPa.D_tip_rate_T))
elif simPa.D_tip_length:
figtext.append('$k_{ageing} = %.3f$ $s^{-1}$' %(simPa.D_tip_rate_L))
figtext.append('Gamma fit parameters:')
figtext.append('$steps = %.2f$' %popt1[0])
figtext.append('$rate = %.3f$ $s^{-1}$' %popt1[1])
figDX = 0.045
| |
<reponame>S73ph4n/octavvs<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: <NAME>
Atmospheric and scattering correction
"""
import gc
import os.path
from time import monotonic
import numpy as np
import sklearn.linear_model
import sklearn.cluster
#import statsmodels.multivariate.pca
from scipy.interpolate import PchipInterpolator
from scipy.signal import hilbert, savgol_filter, tukey
from scipy.io import loadmat, savemat
import matplotlib.pyplot as plt
from . import baseline
def load_reference(wn, what=None, matfilename=None):
"""
Loads and normalizes a spectrum from a Matlab file, interpolating at the given points.
The reference is assumed to cover the entire range of wavenumbers.
Parameters:
wn: array of wavenumbers at which to get the spectrum
what: A string defining what type of reference to get, corresponding to a file in the
'reference' directory
matfilename: the name of an arbitrary Matlab file to load data from; the data must be
in a matrix called AB, with wavenumbers in the first column.
Returns: spectrum at the points given by wn
"""
if (what is None) == (matfilename is None):
raise ValueError("Either 'what' or 'matfilename' must be specified")
if what is not None:
matfilename = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__),
'reference', what + '.mat'))
ref = loadmat(matfilename)['AB']
# Handle the case of high-to-low since the interpolator requires low-to-high
d = 1 if ref[0,0] < ref[-1,0] else -1
ref = PchipInterpolator(ref[::d,0], ref[::d,1])(wn)
return ref #/ ref.max()
def nonnegative(y, fracspectra=.02, fracvalues=.02):
"""
Make a matrix of spectral data nonnegative by shifting all the spectra up by the same computed
amount, followed by setting negative values to 0. The shift is chosen such that at most
fracspectra of the spectra get more than fracvalues of their intensities set to zero.
Parameters:
y: array of intensities for (pixel, wavenumber)
fracspectra: unheeded fraction of the spectra
fracvalues: maximal fraction of points to clip at 0
Returns: shifted spectra in the same format as y
"""
s = int(fracspectra * y.shape[0])
v = int(fracvalues * y.shape[1])
if s == 0 or v == 0:
return y - np.min(y.min(), 0)
if s >= y.shape[0] or v >= y.shape[1]:
return np.maximum(y, 0)
yp = np.partition(y, v, axis=1)[:,v]
a = np.partition(yp, s)[s]
return np.maximum(y - a if a < 0 else y, 0)
def find_wn_ranges(wn, ranges):
"""
Find indexes corresponding to the beginning and end of a list of ranges of wavenumbers. The
wavenumbers have to be sorted in either direction.
Parameters:
wn: array of wavenumbers
ranges: numpy array of shape (n, 2) with desired wavenumber ranges in order [low,high]
Returns: numpy array of shape (n, 2) with indexes of the wavenumbers delimiting those ranges
"""
if isinstance(ranges, list):
ranges = np.array(ranges)
if(wn[0] < wn[-1]):
return np.stack((np.searchsorted(wn, ranges[:,0]),
np.searchsorted(wn, ranges[:,1], 'right')), 1)
return len(wn) - np.stack((np.searchsorted(wn[::-1], ranges[:,1], 'right'),
np.searchsorted(wn[::-1], ranges[:,0])), 1)
def cut_wn(wn, y, ranges):
"""
Cut a set of spectra, leaving only the given wavenumber range(s).
Parameters:
wn: array of wavenumbers, sorted in either direction
y: array of spectra, shape (..., wavenumber)
ranges: list or numpy array of shape (..., 2) with desired wavenumber ranges in pairs (low, high)
Returns: (wavenumbers, spectra) with data in the given wavenumber ranges
"""
if isinstance(ranges, list):
ranges = np.array(ranges)
inrange = lambda w: ((w >= ranges[...,0]) & (w <= ranges[...,1])).any()
ix = np.array([inrange(w) for w in wn])
return wn[ix], y[...,ix]
def atmospheric(wn, y, atm=None, cut_co2 = True, extra_iters=5, extra_factor=0.25,
smooth_win=9, progressCallback = None):
"""
Apply atmospheric correction to multiple spectra, subtracting as much of the atompsheric
spectrum as needed to minimize the sum of squares of differences between consecutive points
in the corrected spectra. Each supplied range of wavenumbers is corrected separately.
Parameters:
wn: array of wavenumbers, sorted in either direction
y: array of spectra in the order (pixel, wavenumber), or just one spectrum
atm: atmospheric spectrum; if None, load the default
cut_co2: replace the CO2 region with a neatly fitted spline
extra_iters: number of iterations of subtraction of a locally reshaped atmospheric spectrum
(needed if the relative peak intensities are not always as in the atmospheric reference)
extra_factor: how much of the reshaped atmospheric spectrum to remove per iteration
smooth_win: window size (in cm-1) for smoothing of the spectrum in the atm regions
progressCallback(int a, int b): callback function called to indicated that the processing
is complete to a fraction a/b.
Returns:
tuple of (spectra after correction, array of correction factors; shape (spectra,ranges))
"""
squeeze = False
yorig = y
if y.ndim == 1:
y = y[None,:]
squeeze = True
else:
y = y.copy()
if atm is None or (isinstance(atm, str) and atm == ''):
atm = load_reference(wn, what='water')
elif isinstance(atm, str):
atm = load_reference(wn, matfilename=atm)
else:
atm = atm.copy()
# ranges: numpy array (n, 2) of n non-overlapping wavenumber ranges (typically for H2O only), or None
# extra_winwidth: width of the window (in cm-1) used to locally reshape the atm spectrum
ranges = [[1300, 2100], [3410, 3850], [2190, 2480]]
extra_winwidth = [30, 150, 40]
corr_ranges = 2 if cut_co2 else 3
# ranges = ranges[:2]
# extra_winwidth = extra_winwidth[:2]
if ranges is None:
ranges = np.array([0, len(wn)])
else:
ranges = find_wn_ranges(wn, ranges)
for i in range(corr_ranges):
p, q = ranges[i]
if q - p < 2: continue
atm[p:q] -= baseline.straight(wn[p:q], atm[p:q]);
savgolwin = 1 + 2 * int(smooth_win * (len(wn) - 1) / np.abs(wn[0] - wn[-1]))
if progressCallback:
progressA = 0
progressB = 1 + corr_ranges * (extra_iters + (1 if savgolwin > 1 else 0))
progressCallback(progressA, progressB)
dh = atm[:-1] - atm[1:]
dy = y[:,:-1] - y[:,1:]
dh2 = np.cumsum(dh * dh)
dhdy = np.cumsum(dy * dh, 1)
az = np.zeros((len(y), corr_ranges))
for i in range(corr_ranges):
p, q = ranges[i]
if q - p < 2: continue
r = q-2 if q <= len(wn) else q-1
az[:, i] = ((dhdy[:,r] - dhdy[:,p-1]) / (dh2[r] - dh2[p-1])) if p > 0 else (dhdy[:,r] / dh2[r])
y[:, p:q] -= az[:, i, None] @ atm[None, p:q]
if progressCallback:
progressA += 1
progressCallback(progressA, progressB)
for pss in range(extra_iters):
for i in range(corr_ranges):
p, q = ranges[i]
if q - p < 2: continue
window = 2 * int(extra_winwidth[i] * (len(wn) - 1) / np.abs(wn[0] - wn[-1]))
winh = (window+1)//2
dy = y[:,:-1] - y[:,1:]
dhdy = np.cumsum(dy * dh, 1)
aa = np.zeros_like(y)
aa[:,1:winh+1] = dhdy[:,1:window:2] / np.maximum(dh2[1:window:2], 1e-8)
aa[:,1+winh:-winh-1] = (dhdy[:,window:-1] - dhdy[:,:-1-window]) / np.maximum(dh2[window:-1] - dh2[:-1-window], 1e-8)
aa[:,-winh-1:-1] = (dhdy[:,-1:] - dhdy[:,-1-window:-1:2]) / np.maximum(dh2[-1] - dh2[-1-window:-1:2], 1e-8)
aa[:, 0] = aa[:, 1]
aa[:, -1] = aa[:, -2]
aa = savgol_filter(aa, window + 1, 3, axis=1)
y[:, p:q] -= extra_factor * aa[:, p:q] * atm[p:q]
if progressCallback:
progressA += 1
progressCallback(progressA, progressB)
if savgolwin > 1:
for i in range(corr_ranges):
p, q = ranges[i]
if q - p < savgolwin: continue
y[:, p:q] = savgol_filter(y[:, p:q], savgolwin, 3, axis=1)
if progressCallback:
progressA += 1
progressCallback(progressA, progressB)
if cut_co2:
rng = np.array([[2190, 2260], [2410, 2480]])
rngm = rng.mean(1)
rngd = rngm[1] - rngm[0]
cr = find_wn_ranges(wn, rng).flatten()
if cr[1] - cr[0] > 2 and cr[3] - cr[2] > 2:
a = np.empty((4, len(y)))
a[0:2,:] = np.polyfit((wn[cr[0]:cr[1]]-rngm[0])/rngd, y[:,cr[0]:cr[1]].T, deg=1)
a[2:4,:] = np.polyfit((wn[cr[2]:cr[3]]-rngm[1])/rngd, y[:,cr[2]:cr[3]].T, deg=1)
P,Q = find_wn_ranges(wn, rngm[None,:])[0]
t = np.interp(wn[P:Q], wn[[Q,P] if wn[0] > wn[-1] else [P,Q]], [1, 0])
tt = np.array([-t**3+t**2, -2*t**3+3*t**2, -t**3+2*t**2-t, 2*t**3-3*t**2+1])
pt = a.T @ tt
y[:, P:Q] += (pt - y[:, P:Q]) * tukey(len(t), .3)
corrs = np.zeros(2)
ncorrs = np.zeros_like(corrs)
for i in range(len(ranges)):
p, q = ranges[i]
if q - p < 2: continue
corr = np.abs(yorig[:, p:q] - y[:, p:q]).sum(1) / np.maximum(np.abs(yorig[:, p:q]), np.abs(y[:, p:q])).sum(1)
gas = int(i > 1)
corrs[gas] += corr.mean()
ncorrs[gas] += 1
if ncorrs[0] > 1:
corrs[0] = corrs[0] / ncorrs[0]
return (y.squeeze() if squeeze else y), corrs
def kkre(wn, ref):
wn2 = wn ** 2.
wa = wn * ref
kk = np.empty_like(wn)
for i in range(len(wn)):
with np.errstate(divide='ignore', invalid='ignore'):
fg = wa / (wn2 - wn[i] ** 2.)
if i == 0 or i == len(wn) - 1:
fg[i] = 0
else:
fg[i] = (fg[i-1] + fg[i+1]) / 2
kk[i] = 2/np.pi * np.trapz(x=wn, y=fg)
if wn[0] | |
def setSelectedPanel(self, panel: Panel):
self.model.set_selected_panel(panel)
selectedPanel = property(getSelectedPanel, setSelectedPanel)
@Slot(name='pageLeft')
def pageLeft(self):
span = np.diff(self.viewRange())[0]
self.translateBy(-span)
@Slot(name='shiftRight')
def shiftRight(self):
span = np.diff(self.viewRange())[0]
shift = span / 10
self.translateBy(shift)
@Slot(name='shiftLeft')
def shiftLeft(self):
vb = self.selectedPanel.selected_view.renderer.vb
x_min, x_max = vb.viewRange()[0]
padding = vb.suggestPadding(pg.ViewBox.XAxis)
span = x_max - x_min
shift = span / 10
if x_min < 0:
return
elif x_min - shift < -padding:
shift = max(x_min, padding)
self.translateBy(-shift)
@Slot(name='goToBeginning')
def goToBeginning(self):
x_min, x_max = self.viewRange()
padding = self.selectedPanel.selected_view.renderer.vb.suggestPadding(1)
self.translateBy(-x_min - padding)
@Slot(name='goToEnd')
def goToEnd(self):
x_min, x_max = self.viewRange()
view = self.selectedView
if view is None:
return
track = view.track
end_time = view.track.duration / view.track.fs
self.translateBy(end_time - x_max)
@Slot(name='zoomFit')
def zoomFit(self):
view = self.selectedView
if view is None:
return
track = view.track
max_t = track.duration / track.fs
span = np.diff(view.renderer.vb.viewRange()[0])[0]
self.scaleBy(max_t / span)
self.goToBeginning()
@Slot(name='zoomToMatch')
def zoomToMatch(self):
"""
where each pixel represents exactly one sample at the
highest-available sampling-frequency
:return:
"""
view = self.selectedPanel.selected_view
if view is None:
return
vb = view.renderer.vb
pixels = vb.screenGeometry().width()
mag_span = pixels / self.selectedTrack.fs
span = np.diff(self.viewRange())[0]
mag = mag_span / span
self.scaleBy(mag)
@Slot(name='zoomIn')
def zoomIn(self):
view = self.selectedPanel.selected_view
if view is None:
return
vb = view.renderer.vb
x_range = np.diff(vb.viewRange()[0])
minXRange = vb.getState()['limits']['xRange'][0]
zoom = 0.9
if minXRange / x_range < zoom:
pass
elif zoom < minXRange / x_range <= 1.0:
zoom = minXRange / x_range
else:
return
self.scaleBy(zoom)
@Slot(name='zoomOut')
def zoomOut(self):
self.scaleBy(1.1)
@Slot(name='increaseSize')
def increaseSize(self):
self.selected_frame.increaseSize()
@Slot(name='decreaseSize')
def decreaseSize(self):
self.selected_frame.decreaseSize()
@Slot(name='showInfoDialog')
def showInfoDialog(self):
info_dialog = InfoDialog(str(self.selectedTrack))
info_dialog.exec_()
@Slot(name='showProcessorDialog')
def showProcessorDialog(self):
processor = self.processor_action[self.sender()]
processing_dialog = ProcessingDialog(self, processor)
processing_dialog.show()
@Slot(tuple, name='finishedProcessing')
def insert_processed_tracks(self, new_tracks: List[processing.Tracks]):
for new_track in new_tracks:
self.getSelectedDisplayPanel().createViewWithTrack(new_track)
def status(self, msg: str, timeout: int=3000):
self.statusBar().showMessage(msg, timeout)
def joinGroup(self, view):
group = self.groups[id(view.track)]
group.join(view)
def changeSync(self):
self.synchronized = not self.synchronized
self.reference_plot = self.selectedDisplayPanel.pw.main_vb
self.applySync()
def applySync(self):
if self.synchronized:
self.synchronize()
else:
self.desynchronize()
def synchronize(self):
self.reference_plot = self.selectedDisplayPanel.pw.main_vb
assert isinstance(self.reference_plot, pg.ViewBox)
x_min, x_max = self.reference_plot.viewRange()[0]
for frame in self.frames:
if frame.displayPanel.pw.main_vb is self.reference_plot:
continue
frame.displayPanel.pw.main_vb.setXLink(self.reference_plot)
if frame.displayPanel.panel.selected_view:
frame.displayPanel.panel.selected_view.renderer.vb.setXRange(x_min, x_max, padding=0)
def desynchronize(self):
self.reference_plot = None
for frame in self.frames:
frame.displayPanel.pw.main_vb.setXLink(frame.displayPanel.pw.main_vb)
def toggleXAxis(self):
self.application.config['show_x-axis_label'] = not self.application.config['show_x-axis_label']
for frame in self.frames:
frame.displayPanel.pw.axis_bottom.showLabel(self.application.config['show_x-axis_label'])
def createNewPanel(self, pos=None):
frame = Frame(main_window=self)
w = DisplayPanel(frame=frame)
w.pw.setAxesWidths(self.axis_width)
self.queryAxesWidths.connect(w.pw.updateWidestAxis)
self.setAxesWidth.connect(w.pw.setAxesWidths)
self.moveSplitterPosition.connect(w.setSplitterPosition)
self.setSplitter.connect(w.table_splitter.setSizes_)
self.setColWidths.connect(w.view_table.setColumnWidths)
self.queryColWidths.connect(w.view_table.calcColumnWidths)
w.table_splitter.setSizes([1, w.view_table.viewportSizeHint().width()])
frame.layout.addWidget(w)
frame.displayPanel = w
if pos is not None:
insert_index = pos
elif self.selected_frame:
insert_index = self.frames.index(self.selected_frame) + 1
else:
insert_index = None
panel = self.model.new_panel(pos=insert_index)
w.loadPanel(panel)
self.addFrame(frame, insert_index)
self.applySync()
def delItem(self):
if self.selected_frame is None:
logging.debug('no frame is selected for debug')
return
remove_index = self.frames.index(self.selected_frame)
self.model.remove_panel(remove_index)
self.removeFrame(self.selected_frame)
if not self.frames:
self.selected_frame = None
self.reference_plot = None
self.guiAddPanel()
self.selectFrame(self.frames[-1])
elif remove_index == len(self.frames):
self.selectFrame(self.frames[-1])
else:
self.selectFrame(self.frames[remove_index])
self.applySync()
@Slot(int, name='viewMoved')
def viewMoved(self, panel_index):
view_to_add = self.model.panels[panel_index].views[-1]
self.frames[panel_index].displayPanel.view_table.addView(view_to_add,
setColor=False)
def addFrame(self, frame: Frame, index=None):
if not index:
index = len(self.frames)
self.frames.insert(index, frame)
self.scrollAreaWidgetContents.layout.insertWidget(index, frame)
self.updateFrames()
def removeFrame(self, frame_to_remove: Frame):
if frame_to_remove.displayPanel.pw.main_vb is self.reference_plot:
self.reference_plot = None
self.frames.remove(frame_to_remove)
self.scrollAreaWidgetContents.layout.removeWidget(frame_to_remove)
frame_to_remove.deleteLater()
self.updateFrames()
def updateFrames(self):
self.scrollArea.updateGeometry()
for panel, frame in zip(self.model.panels, self.frames):
frame.displayPanel.handle.updateLabel()
assert frame.displayPanel.panel == panel
def swapFrames(self, positions: Tuple[int, int]):
self.scrollAreaWidgetContents.swapWidgets(positions)
self.frames[positions[0]], self.frames[positions[1]] = \
self.frames[positions[1]], self.frames[positions[0]]
self.model.panels[positions[0]], self.model.panels[positions[1]] =\
self.model.panels[positions[1]], self.model.panels[positions[0]]
self.updateFrames()
@Slot(list, name='determineColumnWidths')
def determineColumnWidths(self, widths: List[int]):
if not self.all_column_widths:
self.all_column_widths = [{self.sender(): width} for width in widths]
else:
for index, width in enumerate(widths):
self.all_column_widths[index][self.sender()] = width
self.column_width_hint = [max(column.values())
for column in self.all_column_widths]
self.setColWidths.emit(self.column_width_hint)
self.moveSplitterPosition.emit()
@Slot(name='moveUp')
def moveUp(self):
index = self.frames.index(self.selected_frame)
if index == 0:
return
self.swapFrames((index, index - 1))
@Slot(name='moveDown')
def moveDown(self):
index = self.frames.index(self.selected_frame)
if index == len(self.frames) - 1:
return
self.swapFrames((index, index + 1))
@Slot(name='selectNext')
def selectNext(self):
index = self.frames.index(self.selected_frame)
if index == len(self.frames) - 1:
return
else:
self.selectFrame(self.frames[index + 1])
@Slot(name='selectPrevious')
def selectPrevious(self):
index = self.frames.index(self.selected_frame)
if index == 0:
return
else:
self.selectFrame(self.frames[index - 1])
@Slot(QtWidgets.QFrame, name='selectFrame')
def selectFrame(self, frame_to_select: Frame):
assert isinstance(frame_to_select, Frame)
assert frame_to_select in self.frames
if self.selected_frame is not None:
self.selected_frame.resetStyle()
self.selected_frame = frame_to_select
self.selected_frame.setFocus(QtCore.Qt.ShortcutFocusReason)
self.selected_frame.setStyleSheet("""
Frame {
border: 3px solid red;
}
""")
index = self.frames.index(self.selected_frame)
self.model.set_selected_panel(self.model.panels[index])
if self.synchronized and self.reference_plot is None:
self.reference_plot = self.selectedDisplayPanel.pw.main_vb
self.evalTrackMenu()
selected_frame_index = self.frames.index(frame_to_select)
selected_panel_index = self.model.panels.index(self.selectedPanel)
assert selected_frame_index == selected_panel_index
@Slot(QtWidgets.QFrame, name='frameToMove')
def frameToMove(self, frame_to_move: Frame):
self.from_index = self.frames.index(frame_to_move)
@Slot(QtWidgets.QFrame, name='whereToInsert')
def whereToInsert(self, insert_here: Frame):
self.to_index = self.frames.index(insert_here)
if self.to_index == self.from_index:
self.from_index = self.to_index = None
return
self.moveFrame()
def moveFrame(self):
if self.to_index is None or self.from_index is None:
logging.debug('To and/or From index not set properly')
return
frame = self.frames[self.from_index]
self.scrollAreaWidgetContents.layout.removeWidget(frame)
self.frames.insert(self.to_index, self.frames.pop(self.from_index))
self.model.move_panel(self.to_index, self.from_index)
self.scrollAreaWidgetContents.layout.insertWidget(self.to_index,
frame)
self.selectFrame(self.frames[self.to_index])
self.updateFrames()
# Resetting moving parameters
self.from_index = self.to_index = None
@Slot(name='moveToEnd')
def moveToEnd(self):
self.frameToMove(self.selected_frame)
self.to_index = len(self.frames) - 1
self.moveFrame()
@Slot(name='checkAxesWidths')
def checkAxesWidths(self):
widths = [axis.preferredWidth()
for frame in self.frames
for axis in frame.displayPanel.pw.axes.values()]
if not widths:
return
axis_width = max(widths)
if axis_width != self.axis_width:
self.axis_width = axis_width
self.setAxesWidth.emit(self.axis_width)
@staticmethod
def showAbout():
about_box = About()
about_box.exec_()
def createHelpWindow(self):
# http://www.walletfox.com/course/qhelpengineexample.php
help_path = (Path(__file__).parent / 'TimeView.qhc').resolve()
assert help_path.exists()
help_engine = QtHelp.QHelpEngine(str(help_path))
help_engine.setupData()
tab_widget = QtWidgets.QTabWidget()
tab_widget.setMaximumWidth(400)
tab_widget.addTab(help_engine.contentWidget(), "Contents")
tab_widget.addTab(help_engine.indexWidget(), "Index")
text_viewer = HelpBrowser(help_engine)
url = "qthelp://org.sphinx.timeview.1.0/doc/index.html"
text_viewer.setSource(QtCore.QUrl(url))
help_engine.contentWidget()\
.linkActivated['QUrl'].connect(text_viewer.setSource)
help_engine.indexWidget()\
.linkActivated['QUrl', str].connect(text_viewer.setSource)
horiz_splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal)
horiz_splitter.insertWidget(0, tab_widget)
horiz_splitter.insertWidget(1, text_viewer)
help_window = QtWidgets.QDockWidget('Help', self)
help_window.setWidget(horiz_splitter)
help_window.hide()
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, help_window)
return help_window
@Slot(name='guiAddView')
def guiAddView(self,
file_name: Union[str, List, Path, None]=None,
renderer: Optional[str]=None,
**kwargs):
if file_name is None:
file_name, _ =\
QtWidgets.QFileDialog.getOpenFileNames(self,
"Add Track to Panel",
self.application.config['working_directory'],
"Track and X/EDF Files (*.wav *.lab *.tmv *.xdf *.edf);;\
All Files (*)",
options=QtWidgets.QFileDialog.Options())
if isinstance(file_name, str):
# panel_index = self.model.panels.index(self.selectedPanel)
self.application.add_view_from_file(Path(file_name))
self.application.config['working_directory'] = str(Path(file_name).parent)
elif isinstance(file_name, List):
if len(file_name):
for f in file_name:
#self.guiAddPanel() # it's difficult to guess what the user really wants
self.application.add_view_from_file(Path(f))
self.application.config['working_directory'] = str(Path(f).parent)
else:
raise Exception
@Slot(name='guiSaveView')
def guiSaveView(self):
"""identifies the selected view and removes it"""
view = self.selectedView
if view is None:
return
track = view.track
file_name, _ = \
QtWidgets.QFileDialog.getSaveFileName(self,
"Save Track",
str(Path(self.application.config['working_directory']) / track.path.name),
f"Files (*{track.default_suffix})")
if file_name:
track.write(file_name)
track.path = Path(file_name)
# TODO: update view table to show you name
self.application.config['working_directory'] = str(Path(file_name).parent)
@Slot(name='guiRevertView')
def guiRevertView(self):
"""identifies the selected view and reverts to contents on disk"""
view = self.selectedView
if view is None:
return
reply = QtWidgets.QMessageBox.question(self,
'Message',
'Are you sure you want to revert to the contents on disk? All changes since loading will be lost',
QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.Cancel)
if reply == QtWidgets.QMessageBox.Yes:
track_path = str(view.track.path)
view.track = view.track.read(view.track.path)
new_track = view.track.read(view.track.path)
view.track = new_track
view.renderer.track = new_track # TODO: change to render property that pulls the track from the view
view.renderer.reload()
@Slot(name='guiDelView')
def guiDelView(self):
"""identifies the selected view and removes it"""
if self.selectedView is None:
return
view_to_remove = self.selectedView
self.selectedDisplayPanel.removeViewFromChildren(view_to_remove)
self.selectedDisplayPanel.delViewFromModel(view_to_remove)
self.evalTrackMenu()
def showRenderDialog(self):
renderer = self.selectedView.renderer
if not renderer.parameters:
logger.info('No parameters to modify for given renderer')
return
render_dialog = RenderDialog(self, renderer)
render_dialog.exec_()
if not render_dialog.result(): # undo changes if cancel is pressed
return
def setTrackMenuStatus(self, enabled):
ignore_actions = ["New Partition", "Open"] # TODO: hate this...
for action in self.track_menu.actions():
if any([ignore_str in action.text() for ignore_str in ignore_actions]):
continue
else:
action.setEnabled(enabled)
def evalTrackMenu(self):
self.setTrackMenuStatus(bool(self.selectedPanel.views))
class TimeView(object): # Application - here's still the best place for it methinks
def __init__(self):
start = timer()
sys.argv[0] = 'TimeView' # to override Application menu on OSX
QtCore.qInstallMessageHandler(self._log_handler)
QtWidgets.QApplication.setDesktopSettingsAware(False)
self.qtapp = qtapp = QtWidgets.QApplication(sys.argv)
qtapp.setStyle("fusion")
qtapp.setApplicationName("TimeView")
qtapp.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
if hasattr(QtWidgets.QStyleFactory, 'AA_UseHighDpiPixmaps'):
qtapp.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps)
self.config = {'working_directory': str(Path.home()),
'panel_height': 300,
'show_x-axis_label': True}
try:
with open(CONFIG_PATH) as file:
self.config.update(json.load(file))
except IOError:
logging.debug('cannot find saved configuration, using default configuration')
self.viewer = Viewer(self)
# audio player here?
if not __debug__:
sys.excepthook = self._excepthook
finish = timer()
logging.debug(f'complete startup time is {finish-start:{0}.{3}} seconds')
@staticmethod
def _log_handler(msg_type, msg_log_context, msg_string):
if msg_type == 1:
if re.match("QGridLayoutEngine::addItem: Cell \\(\\d+, \\d+\\) already taken", msg_string):
return
logger.warning(msg_string)
elif msg_type == 2:
logger.critical(msg_string)
elif msg_type == 3:
logger.error(msg_string)
elif msg_type == 4:
logger.info(msg_string)
elif msg_type == 0:
logger.debug(msg_string)
else:
logger.warning(f'received unknown message type from qt system with contents {msg_string}')
def _excepthook(self, exc_type, exc_value, exc_traceback):
logging.exception('Uncaught Exception', exc_info=(exc_type, exc_value, exc_traceback))
from .dialogs import Bug
bug_box = Bug(self.qtapp, exc_type, exc_value, exc_traceback)
bug_box.exec_()
def start(self):
self.viewer.show()
self._exit(self.qtapp.exec_())
def _exit(self, status):
with open(CONFIG_PATH, 'w') as file:
| |
from __future__ import division # the result of the division will be always a float
from optparse import OptionParser
import os, gc
from copy import deepcopy
from config.latex_labels import variables_latex, measurements_latex, \
met_systematics_latex, b_tag_bins_latex, fit_variables_latex
from config.variable_binning import bin_edges, variable_bins_ROOT, fit_variable_bin_edges
from config import XSectionConfig
from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists
from tools.hist_utilities import value_error_tuplelist_to_hist, \
value_tuplelist_to_hist, value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist
from math import sqrt
# rootpy & matplotlib
from ROOT import kRed, kGreen, kMagenta, kBlue, kBlack
from tools.ROOT_utils import set_root_defaults
import matplotlib as mpl
from tools.plotting import get_best_max_y
mpl.use( 'agg' )
import rootpy.plotting.root2matplotlib as rplt
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.ticker import MultipleLocator
from config import CMS
from matplotlib import rc
rc( 'font', **CMS.font )
rc( 'text', usetex = True )
def read_xsection_measurement_results( category, channel ):
global path_to_JSON, variable, k_values, met_type
filename = ''
if category in met_uncertainties and variable == 'HT':
filename = path_to_JSON + '/xsection_measurement_results/' + channel + '/kv' + str( k_values[channel] ) + '/central/normalised_xsection_' + met_type + '.txt'
else:
filename = path_to_JSON + '/xsection_measurement_results/' + channel + '/kv' + str( k_values[channel] ) + '/' + category + '/normalised_xsection_' + met_type + '.txt'
if channel == 'combined':
filename = filename.replace( 'kv' + str( k_values[channel] ), '' )
normalised_xsection_unfolded = read_data_from_JSON( filename )
h_normalised_xsection = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_measured'], bin_edges[variable] )
h_normalised_xsection_unfolded = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_unfolded'], bin_edges[variable] )
histograms_normalised_xsection_different_generators = {'measured':h_normalised_xsection,
'unfolded':h_normalised_xsection_unfolded}
histograms_normalised_xsection_systematics_shifts = {'measured':h_normalised_xsection,
'unfolded':h_normalised_xsection_unfolded}
if category == 'central':
# true distributions
h_normalised_xsection_MADGRAPH = value_error_tuplelist_to_hist( normalised_xsection_unfolded['MADGRAPH'], bin_edges[variable] )
h_normalised_xsection_MADGRAPH_ptreweight = value_error_tuplelist_to_hist( normalised_xsection_unfolded['MADGRAPH_ptreweight'], bin_edges[variable] )
h_normalised_xsection_POWHEG_PYTHIA = value_error_tuplelist_to_hist( normalised_xsection_unfolded['POWHEG_PYTHIA'], bin_edges[variable] )
h_normalised_xsection_POWHEG_HERWIG = value_error_tuplelist_to_hist( normalised_xsection_unfolded['POWHEG_HERWIG'], bin_edges[variable] )
h_normalised_xsection_MCATNLO = value_error_tuplelist_to_hist( normalised_xsection_unfolded['MCATNLO'], bin_edges[variable] )
h_normalised_xsection_mathchingup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['matchingup'], bin_edges[variable] )
h_normalised_xsection_mathchingdown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['matchingdown'], bin_edges[variable] )
h_normalised_xsection_scaleup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaleup'], bin_edges[variable] )
h_normalised_xsection_scaledown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaledown'], bin_edges[variable] )
histograms_normalised_xsection_different_generators.update( {'MADGRAPH':h_normalised_xsection_MADGRAPH,
'MADGRAPH_ptreweight':h_normalised_xsection_MADGRAPH_ptreweight,
'POWHEG_PYTHIA':h_normalised_xsection_POWHEG_PYTHIA,
'POWHEG_HERWIG':h_normalised_xsection_POWHEG_HERWIG,
'MCATNLO':h_normalised_xsection_MCATNLO} )
histograms_normalised_xsection_systematics_shifts.update( {'MADGRAPH':h_normalised_xsection_MADGRAPH,
'MADGRAPH_ptreweight':h_normalised_xsection_MADGRAPH_ptreweight,
'matchingdown': h_normalised_xsection_mathchingdown,
'matchingup': h_normalised_xsection_mathchingup,
'scaledown': h_normalised_xsection_scaledown,
'scaleup': h_normalised_xsection_scaleup} )
file_template = path_to_JSON + '/xsection_measurement_results/' + channel + '/kv' + str( k_values[channel] ) + '/' + category + '/normalised_xsection_' + met_type
if channel == 'combined':
file_template = file_template.replace( 'kv' + str( k_values[channel] ), '' )
# normalised_xsection_unfolded_with_errors = read_data_from_JSON( file_template + '_with_errors.txt' )
normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory = read_data_from_JSON( file_template + '_with_systematics_but_without_ttbar_theory_errors.txt' )
normalised_xsection_unfolded_with_errors_with_systematics_but_without_generator = read_data_from_JSON( file_template + '_with_systematics_but_without_generator_errors.txt' )
# a rootpy.Graph with asymmetric errors!
h_normalised_xsection_with_systematics_but_without_ttbar_theory = value_errors_tuplelist_to_graph(
normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory['TTJet_measured'],
bin_edges[variable] )
h_normalised_xsection_with_systematics_but_without_ttbar_theory_unfolded = value_errors_tuplelist_to_graph(
normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory['TTJet_unfolded'],
bin_edges[variable] )
h_normalised_xsection_with_systematics_but_without_generator = value_errors_tuplelist_to_graph(
normalised_xsection_unfolded_with_errors_with_systematics_but_without_generator['TTJet_measured'],
bin_edges[variable] )
h_normalised_xsection_with_systematics_but_without_generator_unfolded = value_errors_tuplelist_to_graph(
normalised_xsection_unfolded_with_errors_with_systematics_but_without_generator['TTJet_unfolded'],
bin_edges[variable] )
histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory
histograms_normalised_xsection_different_generators['unfolded_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory_unfolded
histograms_normalised_xsection_systematics_shifts['measured_with_systematics'] = h_normalised_xsection_with_systematics_but_without_generator
histograms_normalised_xsection_systematics_shifts['unfolded_with_systematics'] = h_normalised_xsection_with_systematics_but_without_generator_unfolded
return histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts
def read_fit_templates_and_results_as_histograms( category, channel ):
global path_to_JSON, variable, met_type
templates = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/templates_' + channel + '_' + met_type + '.txt' )
data_values = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/initial_values_' + channel + '_' + met_type + '.txt' )['data']
fit_results = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/fit_results_' + channel + '_' + met_type + '.txt' )
fit_variables = templates.keys()
template_histograms = {fit_variable: {} for fit_variable in fit_variables}
fit_results_histograms = {fit_variable: {} for fit_variable in fit_variables}
for bin_i, variable_bin in enumerate( variable_bins_ROOT[variable] ):
for fit_variable in fit_variables:
h_template_data = value_tuplelist_to_hist( templates[fit_variable]['data'][bin_i], fit_variable_bin_edges[fit_variable] )
h_template_ttjet = value_tuplelist_to_hist( templates[fit_variable]['TTJet'][bin_i], fit_variable_bin_edges[fit_variable] )
h_template_singletop = value_tuplelist_to_hist( templates[fit_variable]['SingleTop'][bin_i], fit_variable_bin_edges[fit_variable] )
h_template_VJets = value_tuplelist_to_hist( templates[fit_variable]['V+Jets'][bin_i], fit_variable_bin_edges[fit_variable] )
h_template_QCD = value_tuplelist_to_hist( templates[fit_variable]['QCD'][bin_i], fit_variable_bin_edges[fit_variable] )
template_histograms[fit_variable][variable_bin] = {
'TTJet' : h_template_ttjet,
'SingleTop' : h_template_singletop,
'V+Jets':h_template_VJets,
'QCD':h_template_QCD
}
h_data = h_template_data.Clone()
h_ttjet = h_template_ttjet.Clone()
h_singletop = h_template_singletop.Clone()
h_VJets = h_template_VJets.Clone()
h_QCD = h_template_QCD.Clone()
data_normalisation = data_values[bin_i][0]
n_ttjet = fit_results['TTJet'][bin_i][0]
n_singletop = fit_results['SingleTop'][bin_i][0]
VJets_normalisation = fit_results['V+Jets'][bin_i][0]
QCD_normalisation = fit_results['QCD'][bin_i][0]
h_data.Scale( data_normalisation )
h_ttjet.Scale( n_ttjet )
h_singletop.Scale( n_singletop )
h_VJets.Scale( VJets_normalisation )
h_QCD.Scale( QCD_normalisation )
h_background = h_VJets + h_QCD + h_singletop
for bin_i_data in range( len( h_data ) ):
h_data.SetBinError( bin_i_data + 1, sqrt( h_data.GetBinContent( bin_i_data + 1 ) ) )
fit_results_histograms[fit_variable][variable_bin] = {
'data' : h_data,
'signal' : h_ttjet,
'background' : h_background
}
return template_histograms, fit_results_histograms
def make_template_plots( histograms, category, channel ):
global variable, output_folder
fit_variables = histograms.keys()
for variable_bin in variable_bins_ROOT[variable]:
path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' + category + '/fit_templates/'
make_folder_if_not_exists( path )
for fit_variable in fit_variables:
plotname = path + channel + '_' + fit_variable + '_template_bin_' + variable_bin
# check if template plots exist already
for output_format in output_formats:
if os.path.isfile( plotname + '.' + output_format ):
continue
# plot with matplotlib
h_ttjet = histograms[fit_variable][variable_bin]['TTJet']
h_single_top = histograms[fit_variable][variable_bin]['SingleTop']
h_VJets = histograms[fit_variable][variable_bin]['V+Jets']
h_QCD = histograms[fit_variable][variable_bin]['QCD']
h_ttjet.linecolor = 'red'
h_single_top.linecolor = 'magenta'
h_VJets.linecolor = 'green'
h_QCD.linecolor = 'gray'
h_VJets.linestyle = 'dashed'
h_QCD.linestyle = 'dotted' # currently not working
# bug report: http://trac.sagemath.org/sage_trac/ticket/13834
h_ttjet.linewidth = 5
h_single_top.linewidth = 5
h_VJets.linewidth = 5
h_QCD.linewidth = 5
plt.figure( figsize = ( 16, 16 ), dpi = 200, facecolor = 'white' )
axes = plt.axes()
axes.minorticks_on()
plt.xlabel( fit_variables_latex[fit_variable], CMS.x_axis_title )
plt.ylabel( 'normalised to unit area/(%s)' % get_unit_string(fit_variable), CMS.y_axis_title )
plt.tick_params( **CMS.axis_label_major )
plt.tick_params( **CMS.axis_label_minor )
rplt.hist( h_ttjet, axes = axes, label = 'signal' )
rplt.hist( h_single_top, axes = axes, label = 'Single Top' )
if ( h_VJets.Integral() != 0 ):
rplt.hist( h_VJets, axes = axes, label = 'V+Jets' )
else:
print "WARNING: in %s bin %s, %s category, %s channel, V+Jets template is empty: not plotting." % ( variable, variable_bin, category, channel )
if ( h_QCD.Integral() != 0 ):
rplt.hist( h_QCD, axes = axes, label = 'QCD' )
else:
print "WARNING: in %s bin %s, %s category, %s channel, QCD template is empty: not plotting." % ( variable, variable_bin, category, channel )
y_max = get_best_max_y([h_ttjet, h_single_top, h_VJets, h_QCD])
axes.set_ylim( [0, y_max * 1.1] )
axes.set_xlim( measurement_config.fit_boundaries[fit_variable] )
plt.legend( numpoints = 1, loc = 'upper right', prop = CMS.legend_properties )
plt.title( get_cms_labels( channel ), CMS.title )
plt.tight_layout()
for output_format in output_formats:
plt.savefig( plotname + '.' + output_format )
plt.close()
gc.collect()
def plot_fit_results( histograms, category, channel ):
global variable, b_tag_bin, output_folder
from tools.plotting import Histogram_properties, make_data_mc_comparison_plot
fit_variables = histograms.keys()
for variable_bin in variable_bins_ROOT[variable]:
path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' + category + '/fit_results/'
make_folder_if_not_exists( path )
for fit_variable in fit_variables:
plotname = channel + '_' + fit_variable + '_bin_' + variable_bin
# check if template plots exist already
for output_format in output_formats:
if os.path.isfile( plotname + '.' + output_format ):
continue
# plot with matplotlib
h_data = histograms[fit_variable][variable_bin]['data']
h_signal = histograms[fit_variable][variable_bin]['signal']
h_background = histograms[fit_variable][variable_bin]['background']
histogram_properties = Histogram_properties()
histogram_properties.name = plotname
histogram_properties.x_axis_title = fit_variables_latex[fit_variable]
histogram_properties.y_axis_title = 'Events/(%s)' % get_unit_string(fit_variable)
histogram_properties.title = get_cms_labels( channel )
histogram_properties.x_limits = measurement_config.fit_boundaries[fit_variable]
make_data_mc_comparison_plot( [h_data, h_background, h_signal],
['data', 'background', 'signal'],
['black', 'green', 'red'], histogram_properties,
save_folder = path, save_as = output_formats )
def get_cms_labels( channel ):
global b_tag_bin
lepton = 'e'
if channel == 'electron':
lepton = 'e + jets'
elif channel == 'muon':
lepton = '$\mu$ + jets'
else:
lepton = 'e, $\mu$ + jets combined'
# channel_label = '%s, $\geq$ 4 jets, %s' % ( lepton, b_tag_bins_latex[b_tag_bin] )
channel_label = lepton
template = 'CMS Preliminary, %.1f fb$^{-1}$ (%d TeV), %s'
label = template % ( measurement_config.new_luminosity / 1000., measurement_config.centre_of_mass_energy, channel_label )
return label
def make_plots( histograms, category, output_folder, histname, show_ratio = True, show_before_unfolding = False ):
global variable, k_values
channel = 'electron'
if 'electron' in histname:
channel = 'electron'
elif 'muon' in histname:
channel = 'muon'
else:
channel = 'combined'
# plot with matplotlib
hist_data = histograms['unfolded']
if category == 'central':
hist_data_with_systematics = histograms['unfolded_with_systematics']
hist_measured = histograms['measured']
hist_data.markersize = 2
hist_data.marker = 'o'
if category == 'central':
hist_data_with_systematics.markersize = 2
hist_data_with_systematics.marker = 'o'
hist_measured.markersize = 2
hist_measured.marker = 'o'
hist_measured.color = 'red'
plt.figure( figsize = CMS.figsize, dpi = CMS.dpi, facecolor = CMS.facecolor )
if show_ratio:
gs = gridspec.GridSpec( 2, 1, height_ratios = [5, 1] )
axes = plt.subplot( gs[0] )
else:
axes = plt.axes()
plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title )
axes.minorticks_on()
plt.ylabel( | |
<filename>esmond/cassandra.py
#!/usr/bin/env python
# encoding: utf-8
"""
Cassandra DB interface calls and data encapsulation objects.
esmond schema in json-like notation:
// regular col family
"raw_data" : {
"snmp:router_a:FastPollHC:ifHCInOctets:xe-0_2_0:30000:2012" : {
"1343955624" : // long column name
"16150333739148" // UTF-8 containing JSON for values.
}
}
// supercolumn
"base_rates" : {
"snmp:router_a:FastPollHC:ifHCInOctets:xe-0_2_0:30000:2012" : {
"1343955600" : { // long column name.
"val": "123", // string key, counter type value.
"is_valid" : "2" // zero or positive non-zero.
}
}
}
// supercolumn
"rate_aggregations" : {
"snmp:router_a:FastPollHC:ifHCInOctets:xe-0_2_0:3600000:2012" : {
"1343955600" : { // long column name.
"val": "1234", // string key, counter type.
"30": "38" // key of the 'non-val' column is freq of the base rate.
} // the value of said is the count used in the average.
}
}
// supercolumn
"stat_aggregations" : {
"snmp:router_a:FastPollHC:ifHCInOctets:xe-0_2_0:86400000:2012" : {
"1343955600" : { // long column name.
"min": "0", // string keys, long types.
"max": "484140"
}
}
}
"""
# Standard
import calendar
import datetime
import json
import logging
import os
import pprint
import sys
import time
from collections import OrderedDict
from esmond.util import get_logger
# Third party
from pycassa import PycassaLogger
from pycassa.pool import ConnectionPool, AllServersUnavailable
from pycassa.columnfamily import ColumnFamily, NotFoundException
from pycassa.system_manager import *
from thrift.transport.TTransport import TTransportException
SEEK_BACK_THRESHOLD = 2592000000 # 30 days in ms
KEY_DELIMITER = ":"
AGG_TYPES = ['average', 'min', 'max', 'raw']
class CassandraException(Exception):
"""Common base"""
pass
class ConnectionException(CassandraException):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class CASSANDRA_DB(object):
keyspace = 'esmond'
raw_cf = 'raw_data'
rate_cf = 'base_rates'
agg_cf = 'rate_aggregations'
stat_cf = 'stat_aggregations'
_queue_size = 200
def __init__(self, config, qname=None):
"""
Class contains all the relevent cassandra logic. This includes:
* schema creation,
* connection information/pooling,
* generating the metadata cache of last val/ts information,
* store data/update the rate/aggregaion bins,
* and execute queries to return data to the REST interface.
"""
# Configure logging - if a qname has been passed in, hook
# into the persister logger, if not, toss together some fast
# console output for devel/testing.
if qname:
self.log = get_logger("espersistd.%s.cass_db" % qname)
else:
self.log = logging.getLogger('cassandra_db')
self.log.setLevel(logging.DEBUG)
format = logging.Formatter('%(name)s [%(levelname)s] %(message)s')
handle = logging.StreamHandler()
handle.setFormatter(format)
self.log.addHandler(handle)
# Add pycassa driver logging to existing logger.
plog = PycassaLogger()
plog.set_logger_name('%s.pycassa' % self.log.name)
# Debug level is far too noisy, so just hardcode the pycassa
# logger to info level.
plog.set_logger_level('info')
# Connect to cassandra with SystemManager, do a schema check
# and set up schema components if need be.
try:
sysman = SystemManager(config.cassandra_servers[0])
except TTransportException, e:
raise ConnectionException("System Manager can't connect to Cassandra "
"at %s - %s" % (config.cassandra_servers[0], e))
# Blow everything away if we're testing - be aware of this and use
# with care. Currently just being explictly set in test harness
# code but no longer set as a config file option since there could
# be unfortunate side effects.
if config.db_clear_on_testing:
self.log.info('Dropping keyspace %s' % self.keyspace)
if self.keyspace in sysman.list_keyspaces():
sysman.drop_keyspace(self.keyspace)
time.sleep(3)
# Create keyspace
_schema_modified = False # Track if schema components are created.
if not self.keyspace in sysman.list_keyspaces():
_schema_modified = True
self.log.info('Creating keyspace %s' % self.keyspace)
sysman.create_keyspace(self.keyspace, SIMPLE_STRATEGY,
{'replication_factor': '1'})
time.sleep(3)
# Create column families if they don't already exist.
# If a new column family is added, make sure to set
# _schema_modified = True so it will be propigated.
self.log.info('Checking/creating column families')
# Raw Data CF
if not sysman.get_keyspace_column_families(self.keyspace).has_key(self.raw_cf):
_schema_modified = True
sysman.create_column_family(self.keyspace, self.raw_cf, super=False,
comparator_type=LONG_TYPE,
default_validation_class=UTF8_TYPE,
key_validation_class=UTF8_TYPE)
self.log.info('Created CF: %s' % self.raw_cf)
# Base Rate CF
if not sysman.get_keyspace_column_families(self.keyspace).has_key(self.rate_cf):
_schema_modified = True
sysman.create_column_family(self.keyspace, self.rate_cf, super=True,
comparator_type=LONG_TYPE,
default_validation_class=COUNTER_COLUMN_TYPE,
key_validation_class=UTF8_TYPE)
self.log.info('Created CF: %s' % self.rate_cf)
# Rate aggregation CF
if not sysman.get_keyspace_column_families(self.keyspace).has_key(self.agg_cf):
_schema_modified = True
sysman.create_column_family(self.keyspace, self.agg_cf, super=True,
comparator_type=LONG_TYPE,
default_validation_class=COUNTER_COLUMN_TYPE,
key_validation_class=UTF8_TYPE)
self.log.info('Created CF: %s' % self.agg_cf)
# Stat aggregation CF
if not sysman.get_keyspace_column_families(self.keyspace).has_key(self.stat_cf):
_schema_modified = True
sysman.create_column_family(self.keyspace, self.stat_cf, super=True,
comparator_type=LONG_TYPE,
default_validation_class=LONG_TYPE,
key_validation_class=UTF8_TYPE)
self.log.info('Created CF: %s' % self.stat_cf)
sysman.close()
self.log.info('Schema check done')
# If we just cleared the keyspace/data and there is more than
# one server, pause to let schema propigate to the cluster machines.
if _schema_modified == True:
self.log.info("Waiting for schema to propagate...")
time.sleep(10)
self.log.info("Done")
# Now, set up the ConnectionPool
# Read auth information from config file and set up if need be.
_creds = {}
if config.cassandra_user and config.cassandra_pass:
_creds['username'] = config.cassandra_user
_creds['password'] = config.cassandra_pass
self.log.debug('Connecting with username: %s' % (config.cassandra_user,))
try:
self.log.debug('Opening ConnectionPool')
self.pool = ConnectionPool(self.keyspace,
server_list=config.cassandra_servers,
pool_size=10,
max_overflow=5,
max_retries=10,
timeout=30,
credentials=_creds)
except AllServersUnavailable, e:
raise ConnectionException("Couldn't connect to any Cassandra "
"at %s - %s" % (config.cassandra_servers, e))
self.log.info('Connected to %s' % config.cassandra_servers)
# Define column family connections for the code to use.
self.raw_data = ColumnFamily(self.pool, self.raw_cf).batch(self._queue_size)
self.rates = ColumnFamily(self.pool, self.rate_cf).batch(self._queue_size)
self.aggs = ColumnFamily(self.pool, self.agg_cf).batch(self._queue_size)
self.stat_agg = ColumnFamily(self.pool, self.stat_cf).batch(self._queue_size)
# Used when a cf needs to be selected on the fly.
self.cf_map = {
'raw': self.raw_data,
'rate': self.rates,
'aggs': self.aggs,
'stat': self.stat_agg
}
# Timing - this turns the database call profiling code on and off.
# This is not really meant to be used in production and generally
# just spits out statistics at the end of a run of test data. Mostly
# useful for timing specific database calls to aid in development.
self.profiling = False
if config.db_profile_on_testing and os.environ.get("ESMOND_TESTING", False):
self.profiling = True
self.stats = DatabaseMetrics(profiling=self.profiling)
# Class members
# Just the dict for the metadata cache.
self.metadata_cache = {}
def flush(self):
"""
Calling this will explicity flush all the batches to the
server. Generally only used in testing/dev scripts and not
in production when the batches will be self-flushing.
"""
self.log.debug('Flush called')
self.raw_data.send()
self.rates.send()
self.aggs.send()
self.stat_agg.send()
def close(self):
"""
Explicitly close the connection pool.
"""
self.log.debug('Close/dispose called')
self.pool.dispose()
def set_raw_data(self, raw_data, ttl=None):
"""
Called by the persister. Writes the raw incoming data to the appropriate
column family. The optional TTL option is passed in self.raw_opts and
is set up in the constructor.
The raw_data arg passes in is an instance of the RawData class defined
in this module.
"""
_kw = {}
if ttl:
_kw['ttl'] = ttl
t = time.time()
# Standard column family update.
self.raw_data.insert(raw_data.get_key(),
{raw_data.ts_to_jstime(): json.dumps(raw_data.val)}, **_kw)
if self.profiling: self.stats.raw_insert(time.time() - t)
def set_metadata(self, k, meta_d):
"""
Just does a simple write to the dict being used as metadata.
"""
self.metadata_cache[k] = meta_d.get_document()
def get_metadata(self, raw_data):
"""
Called by the persister to get the metadata - last value and timestamp -
for a given measurement. If a given value is not found (as in when the
program is initially started for example) it will look in the raw data
as far back as SEEK_BACK_THRESHOLD to find the previous value. If found,
This is seeded to the cache and returned. If not, this is presumed to be
new, and the cache is seeded with the value that is passed in.
The raw_data arg passes in is an instance of the RawData class defined
in this module.
The return value is a Metadata object, also defined in this module.
"""
t = time.time()
meta_d = None
if not self.metadata_cache.has_key(raw_data.get_meta_key()):
# Didn't find a value in the metadata cache. First look
# back through the raw data for SEEK_BACK_THRESHOLD seconds
# to see if we can find the last processed value.
ts_max = raw_data.ts_to_jstime() - 1 # -1ms to look at older vals
ts_min = ts_max - SEEK_BACK_THRESHOLD
ret = self.raw_data._column_family.multiget(
self._get_row_keys(raw_data.path, raw_data.freq,
ts_min, ts_max),
# Note: ts_max and ts_min appear to be reversed here -
# that's because this is a reversed range query.
column_start=ts_max, column_finish=ts_min,
column_count=1, column_reversed=True)
if self.profiling: self.stats.meta_fetch((time.time() - t))
if ret:
# A previous value was found in the raw data, so we can
# seed/return that.
key = ret.keys()[-1]
ts = ret[key].keys()[0]
val = json.loads(ret[key][ts])
meta_d = Metadata(last_update=ts, last_val=val, min_ts=ts, | |
TextField(db_column='USERNAME', null=True)
class Meta:
db_table = 'COMMENT'
class Communications(BaseModel):
communication = FloatField(db_column='COMMUNICATION_ID', primary_key=True)
contact = FloatField(db_column='CONTACT_ID', null=True)
created_by = FloatField(db_column='CREATED_BY')
created_date = TextField(db_column='CREATED_DATE')
data_source = TextField(db_column='DATA_SOURCE', null=True)
modified_by = FloatField(db_column='MODIFIED_BY', null=True)
modified_date = TextField(db_column='MODIFIED_DATE', null=True)
notes = TextField(db_column='NOTES', null=True)
note_category = TextField(db_column='NOTE_CATEGORY', null=True)
note_date = TextField(db_column='NOTE_DATE', null=True)
private_comment = TextField(db_column='PRIVATE_COMMENT', null=True)
row_processed = FloatField(db_column='ROW_PROCESSED', null=True)
row_status = TextField(db_column='ROW_STATUS', null=True)
user = FloatField(db_column='USER_ID', null=True)
vessel = FloatField(db_column='VESSEL_ID', null=True)
class Meta:
db_table = 'COMMUNICATIONS'
class DbSync(BaseModel):
db_sync = PrimaryKeyField(db_column='DB_SYNC_ID')
metadata = BlobField(db_column='METADATA', null=True)
status = IntegerField(db_column='STATUS', null=True)
sync_date = TextField(db_column='SYNC_DATE', null=True)
trip = ForeignKeyField(db_column='TRIP_ID', null=True, rel_model=Trips, to_field='trip')
class Meta:
db_table = 'DB_SYNC'
class Debriefings(BaseModel):
debriefing_end = TextField(db_column='DEBRIEFING_END', null=True)
debriefing = PrimaryKeyField(db_column='DEBRIEFING_ID')
debriefing_start = TextField(db_column='DEBRIEFING_START', null=True)
evaluation_note = TextField(db_column='EVALUATION_NOTE', null=True)
notes = TextField(db_column='NOTES', null=True)
observer = IntegerField(db_column='OBSERVER_ID')
program = IntegerField(db_column='PROGRAM_ID')
class Meta:
db_table = 'DEBRIEFINGS'
class Dissections(BaseModel):
age = IntegerField(db_column='AGE', null=True)
age_date = TextField(db_column='AGE_DATE', null=True)
age_location = TextField(db_column='AGE_LOCATION', null=True)
age_method = TextField(db_column='AGE_METHOD', null=True)
age_reader = TextField(db_column='AGE_READER', null=True)
band = TextField(db_column='BAND_ID', null=True)
bio_specimen_item = ForeignKeyField(db_column='BIO_SPECIMEN_ITEM_ID', rel_model=BioSpecimenItems,
to_field='bio_specimen_item')
bs_result = TextField(db_column='BS_RESULT', null=True)
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
cwt_code = TextField(db_column='CWT_CODE', null=True)
cwt_status = TextField(db_column='CWT_STATUS', null=True)
cwt_type = TextField(db_column='CWT_TYPE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
dissection_barcode = IntegerField(db_column='DISSECTION_BARCODE', null=True, unique=True)
dissection = PrimaryKeyField(db_column='DISSECTION_ID')
dissection_type = TextField(db_column='DISSECTION_TYPE')
rack = IntegerField(db_column='RACK_ID', null=True)
rack_position = TextField(db_column='RACK_POSITION', null=True)
class Meta:
db_table = 'DISSECTIONS'
class StratumGroups(BaseModel):
group = PrimaryKeyField(db_column='GROUP_ID')
group_type = TextField(db_column='GROUP_TYPE', null=True)
name = TextField(db_column='NAME', null=True)
class Meta:
db_table = 'STRATUM_GROUPS'
class FisheryStratumGroupsMtx(BaseModel):
fishery_lu = IntegerField(db_column='FISHERY_LU_ID', null=True)
group = ForeignKeyField(db_column='GROUP_ID', null=True, rel_model=StratumGroups, to_field='group')
id = PrimaryKeyField(db_column='ID')
class Meta:
db_table = 'FISHERY_STRATUM_GROUPS_MTX'
class FishingLocations(BaseModel):
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
depth = FloatField(db_column='DEPTH')
depth_um = TextField(db_column='DEPTH_UM')
fishing_activity = ForeignKeyField(db_column='FISHING_ACTIVITY_ID', rel_model=FishingActivities,
to_field='fishing_activity')
fishing_location = PrimaryKeyField(db_column='FISHING_LOCATION_ID')
latitude = FloatField(db_column='LATITUDE')
location_date = TextField(db_column='LOCATION_DATE')
longitude = FloatField(db_column='LONGITUDE')
notes = TextField(db_column='NOTES', null=True)
position = IntegerField(db_column='POSITION', null=True)
class Meta:
db_table = 'FISHING_LOCATIONS'
indexes = (
(('fishing_activity', 'position'), True),
)
class FishTickets(BaseModel):
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
fish_ticket_date = TextField(db_column='FISH_TICKET_DATE', null=True)
fish_ticket = PrimaryKeyField(db_column='FISH_TICKET_ID')
fish_ticket_number = TextField(db_column='FISH_TICKET_NUMBER')
state_agency = TextField(db_column='STATE_AGENCY', null=True)
trip = ForeignKeyField(db_column='TRIP_ID', rel_model=Trips, to_field='trip')
class Meta:
db_table = 'FISH_TICKETS'
indexes = (
(('trip', 'fish_ticket_number', 'fish_ticket_date', 'state_agency'), False),
)
class GeartypeStratumGroupMtx(BaseModel):
geartype_lu = IntegerField(db_column='GEARTYPE_LU_ID', null=True)
group = ForeignKeyField(db_column='GROUP_ID', null=True, rel_model=StratumGroups, to_field='group')
id = PrimaryKeyField(db_column='ID')
class Meta:
db_table = 'GEARTYPE_STRATUM_GROUP_MTX'
class Hlfc(BaseModel):
avg_aerial_extent = TextField(db_column='AVG_AERIAL_EXTENT', null=True)
avoidance_gear_used = TextField(db_column='AVOIDANCE_GEAR_USED', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
fishing_activity = ForeignKeyField(db_column='FISHING_ACTIVITY_ID', null=True, rel_model=FishingActivities,
to_field='fishing_activity')
floats_per_skate = FloatField(db_column='FLOATS_PER_SKATE', null=True)
floats_used = TextField(db_column='FLOATS_USED', null=True)
hlfc = PrimaryKeyField(db_column='HLFC_ID')
hooks_per_skate = FloatField(db_column='HOOKS_PER_SKATE', null=True)
horizontal_distance = TextField(db_column='HORIZONTAL_DISTANCE', null=True)
mass_per_weight = TextField(db_column='MASS_PER_WEIGHT', null=True)
mitigation_type = TextField(db_column='MITIGATION_TYPE', null=True)
notes = TextField(db_column='NOTES', null=True)
product_delivery = TextField(db_column='PRODUCT_DELIVERY', null=True)
row_processed = FloatField(db_column='ROW_PROCESSED', null=True)
row_status = TextField(db_column='ROW_STATUS', null=True)
speed = TextField(db_column='SPEED', null=True)
trip = ForeignKeyField(db_column='TRIP_ID', rel_model=Trips, to_field='trip')
weights_per_skate = TextField(db_column='WEIGHTS_PER_SKATE', null=True)
weights_used = TextField(db_column='WEIGHTS_USED', null=True)
class Meta:
db_table = 'HLFC'
class HlfcHaulsXref(BaseModel):
data_source = TextField(db_column='DATA_SOURCE', null=True)
fishing_activity = IntegerField(db_column='FISHING_ACTIVITY_ID')
hlfc_haul = PrimaryKeyField(db_column='HLFC_HAUL_ID')
hlfc = ForeignKeyField(db_column='HLFC_ID', rel_model=Hlfc, to_field='hlfc')
row_processed = FloatField(db_column='ROW_PROCESSED', null=True)
row_status = TextField(db_column='ROW_STATUS', null=True)
class Meta:
db_table = 'HLFC_HAULS_XREF'
class LengthFrequencies(BaseModel):
bio_specimen = ForeignKeyField(db_column='BIO_SPECIMEN_ID', rel_model=BioSpecimens, to_field='bio_specimen')
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
frequency = FloatField(db_column='FREQUENCY')
length_frequency = PrimaryKeyField(db_column='LENGTH_FREQUENCY_ID')
lf_length = FloatField(db_column='LF_LENGTH')
lf_length_um = TextField(db_column='LF_LENGTH_UM')
lf_sex = TextField(db_column='LF_SEX', null=True)
notes = TextField(db_column='NOTES', null=True)
class Meta:
db_table = 'LENGTH_FREQUENCIES'
class Lookups(BaseModel):
active = IntegerField(db_column='ACTIVE', null=True)
description = TextField(db_column='DESCRIPTION', null=True)
lookup = PrimaryKeyField(db_column='LOOKUP_ID')
lookup_type = TextField(db_column='LOOKUP_TYPE')
lookup_value = TextField(db_column='LOOKUP_VALUE')
program = IntegerField(db_column='PROGRAM_ID')
class Meta:
db_table = 'LOOKUPS'
class PasswordHistory(BaseModel):
created_by = IntegerField(db_column='CREATED_BY')
created_date = TextField(db_column='CREATED_DATE')
modified_by = IntegerField(db_column='MODIFIED_BY', null=True)
modified_date = TextField(db_column='MODIFIED_DATE', null=True)
password = TextField(db_column='PASSWORD')
password_history = PrimaryKeyField(db_column='PASSWORD_HISTORY_ID')
user = IntegerField(db_column='USER_ID')
class Meta:
db_table = 'PASSWORD_HISTORY'
class Photos(BaseModel):
photo = PrimaryKeyField(db_column='PHOTO_ID')
class Meta:
db_table = 'PHOTOS'
class PrincipalInvestigatorLu(BaseModel):
email_address = TextField(db_column='EMAIL_ADDRESS', null=True)
full_name = TextField(db_column='FULL_NAME', null=True)
last_name = TextField(db_column='LAST_NAME', null=True)
organization = TextField(db_column='ORGANIZATION', null=True)
phone_number = TextField(db_column='PHONE_NUMBER', null=True)
principal_investigator = PrimaryKeyField(db_column='PRINCIPAL_INVESTIGATOR_ID')
program = TextField(db_column='PROGRAM', null=True)
class Meta:
db_table = 'PRINCIPAL_INVESTIGATOR_LU'
class Roles(BaseModel):
description = TextField(db_column='DESCRIPTION', null=True)
role = PrimaryKeyField(db_column='ROLE_ID')
role_name = TextField(db_column='ROLE_NAME', null=True)
class Meta:
db_table = 'ROLES'
class ProgramRoles(BaseModel):
program = ForeignKeyField(db_column='PROGRAM_ID', null=True, rel_model=Programs, to_field='program')
program_role = PrimaryKeyField(db_column='PROGRAM_ROLE_ID')
role = ForeignKeyField(db_column='ROLE_ID', null=True, rel_model=Roles, to_field='role')
class Meta:
db_table = 'PROGRAM_ROLES'
class ProgramStratumGroupMtx(BaseModel):
group = ForeignKeyField(db_column='GROUP_ID', null=True, rel_model=StratumGroups, to_field='group')
id = PrimaryKeyField(db_column='ID')
program = IntegerField(db_column='PROGRAM_ID', null=True)
class Meta:
db_table = 'PROGRAM_STRATUM_GROUP_MTX'
class ProtocolGroups(BaseModel):
group = PrimaryKeyField(db_column='GROUP_ID')
name = TextField(db_column='NAME', null=True, unique=True)
class Meta:
db_table = 'PROTOCOL_GROUPS'
class ProtocolGroupMtx(BaseModel):
group = ForeignKeyField(db_column='GROUP_ID', null=True, rel_model=ProtocolGroups, to_field='group')
id = PrimaryKeyField(db_column='ID')
protocol_lu = IntegerField(db_column='PROTOCOL_LU_ID', null=True)
class Meta:
db_table = 'PROTOCOL_GROUP_MTX'
class Settings(BaseModel):
is_active = TextField(db_column='IS_ACTIVE', null=True)
parameter = TextField(db_column='PARAMETER', null=True)
settings = PrimaryKeyField(db_column='SETTINGS_ID')
value = TextField(db_column='VALUE', null=True)
class Meta:
db_table = 'SETTINGS'
class SpeciesCatchCategories(BaseModel):
catch_category = ForeignKeyField(db_column='CATCH_CATEGORY_ID', rel_model=CatchCategories,
to_field='catch_category')
created_by = IntegerField(db_column='CREATED_BY')
created_date = TextField(db_column='CREATED_DATE')
modified_by = IntegerField(db_column='MODIFIED_BY', null=True)
modified_date = TextField(db_column='MODIFIED_DATE', null=True)
species_catch_category = PrimaryKeyField(db_column='SPECIES_CATCH_CATEGORY_ID')
species = ForeignKeyField(db_column='SPECIES_ID', rel_model=Species, to_field='species')
class Meta:
db_table = 'SPECIES_CATCH_CATEGORIES'
indexes = (
(('species', 'catch_category'), True),
)
class SpeciesCompositions(BaseModel):
basket_number = IntegerField(db_column='BASKET_NUMBER', null=True)
catch = ForeignKeyField(db_column='CATCH_ID', rel_model=Catches, to_field='catch', unique=True)
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_quality = TextField(db_column='DATA_QUALITY')
data_source = TextField(db_column='DATA_SOURCE', null=True)
notes = TextField(db_column='NOTES', null=True)
sample_method = TextField(db_column='SAMPLE_METHOD')
species_composition = PrimaryKeyField(db_column='SPECIES_COMPOSITION_ID')
species_number_kp = FloatField(db_column='SPECIES_NUMBER_KP', null=True)
species_weight_kp = FloatField(db_column='SPECIES_WEIGHT_KP', null=True)
class Meta:
db_table = 'SPECIES_COMPOSITIONS'
class SpeciesCompositionItems(BaseModel):
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
discard_reason = TextField(db_column='DISCARD_REASON', null=True)
handling = TextField(db_column='HANDLING', null=True)
notes = TextField(db_column='NOTES', null=True)
species_composition = ForeignKeyField(db_column='SPECIES_COMPOSITION_ID', rel_model=SpeciesCompositions,
to_field='species_composition')
species_comp_item = PrimaryKeyField(db_column='SPECIES_COMP_ITEM_ID')
species = ForeignKeyField(db_column='SPECIES_ID', rel_model=Species, to_field='species')
species_number = IntegerField(db_column='SPECIES_NUMBER', null=True)
species_weight = FloatField(db_column='SPECIES_WEIGHT', null=True)
species_weight_um = TextField(db_column='SPECIES_WEIGHT_UM', null=True)
total_tally = IntegerField(db_column='TOTAL_TALLY', null=True)
extrapolated_species_weight = FloatField(db_column='EXTRAPOLATED_SPECIES_WEIGHT', null=True)
class Meta:
db_table = 'SPECIES_COMPOSITION_ITEMS'
class SpeciesCompositionBaskets(BaseModel):
basket_weight_itq = FloatField(db_column='BASKET_WEIGHT_ITQ', null=True)
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
fish_number_itq = IntegerField(db_column='FISH_NUMBER_ITQ', null=True)
species_comp_basket = PrimaryKeyField(db_column='SPECIES_COMP_BASKET_ID')
species_comp_item = ForeignKeyField(db_column='SPECIES_COMP_ITEM_ID', rel_model=SpeciesCompositionItems,
to_field='species_comp_item')
is_fg_tally_local = IntegerField(db_column='IS_FG_TALLY_LOCAL', null=True)
is_subsample = IntegerField(db_column='IS_SUBSAMPLE', null=True)
class Meta:
db_table = 'SPECIES_COMPOSITION_BASKETS'
class SpeciesCorrelation(BaseModel):
length = FloatField(db_column='LENGTH', null=True)
species_correlation = PrimaryKeyField(db_column='SPECIES_CORRELATION_ID')
species = IntegerField(db_column='SPECIES_ID', null=True)
weight = FloatField(db_column='WEIGHT', null=True)
class Meta:
db_table = 'SPECIES_CORRELATION'
class SpeciesIdentifications(BaseModel):
data_source = TextField(db_column='DATA_SOURCE', null=True)
identification_date = TextField(db_column='IDENTIFICATION_DATE')
observer = IntegerField(db_column='OBSERVER_ID')
species = ForeignKeyField(db_column='SPECIES_ID', rel_model=Species, to_field='species')
species_ident = PrimaryKeyField(db_column='SPECIES_IDENT_ID')
class Meta:
db_table = 'SPECIES_IDENTIFICATIONS'
class SpeciesSightings(BaseModel):
approach_distance = FloatField(db_column='APPROACH_DISTANCE', null=True)
approach_distance_um = TextField(db_column='APPROACH_DISTANCE_UM', null=True)
beaufort_value = TextField(db_column='BEAUFORT_VALUE', null=True)
body_length = TextField(db_column='BODY_LENGTH', null=True)
confidence = TextField(db_column='CONFIDENCE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
interaction_behaviors = TextField(db_column='INTERACTION_BEHAVIORS', null=True)
interaction_outcome = FloatField(db_column='INTERACTION_OUTCOME', null=True)
notes = TextField(db_column='NOTES', null=True)
sighting_condition = TextField(db_column='SIGHTING_CONDITION', null=True)
sighting_date = TextField(db_column='SIGHTING_DATE')
sighting_latitude = FloatField(db_column='SIGHTING_LATITUDE')
sighting_longitude = FloatField(db_column='SIGHTING_LONGITUDE')
species_best_number = FloatField(db_column='SPECIES_BEST_NUMBER', null=True)
species = IntegerField(db_column='SPECIES_ID')
species_max_number = FloatField(db_column='SPECIES_MAX_NUMBER', null=True)
species_min_number = FloatField(db_column='SPECIES_MIN_NUMBER', null=True)
species_sighting = PrimaryKeyField(db_column='SPECIES_SIGHTING_ID')
trip = ForeignKeyField(db_column='TRIP_ID', rel_model=Trips, to_field='trip')
water_temperature = FloatField(db_column='WATER_TEMPERATURE', null=True)
water_temperature_um = TextField(db_column='WATER_TEMPERATURE_UM', null=True)
class Meta:
db_table = 'SPECIES_SIGHTINGS'
class SpeciesInteractions(BaseModel):
data_source = TextField(db_column='DATA_SOURCE', null=True)
species_interaction = PrimaryKeyField(db_column='SPECIES_INTERACTION_ID')
species_interaction_type = TextField(db_column='SPECIES_INTERACTION_TYPE')
species_sighting = ForeignKeyField(db_column='SPECIES_SIGHTING_ID', rel_model=SpeciesSightings,
to_field='species_sighting')
class Meta:
db_table = 'SPECIES_INTERACTIONS'
class SpeciesInteractionHaulsXref(BaseModel):
data_source = TextField(db_column='DATA_SOURCE', null=True)
fishing_activity = ForeignKeyField(db_column='FISHING_ACTIVITY_ID', rel_model=FishingActivities,
to_field='fishing_activity')
row_processed = FloatField(db_column='ROW_PROCESSED', null=True)
row_status = TextField(db_column='ROW_STATUS', null=True)
si_haul = PrimaryKeyField(db_column='SI_HAUL_ID')
species_sighting = ForeignKeyField(db_column='SPECIES_SIGHTING_ID', rel_model=SpeciesSightings,
to_field='species_sighting')
class Meta:
db_table = 'SPECIES_INTERACTION_HAULS_XREF'
class StratumLu(BaseModel):
disposition = TextField(db_column='DISPOSITION', null=True)
fishery_group = ForeignKeyField(db_column='FISHERY_GROUP_ID', null=True, rel_model=StratumGroups, to_field='group')
gear_type_group = ForeignKeyField(db_column='GEAR_TYPE_GROUP_ID', null=True, rel_model=StratumGroups,
related_name='STRATUM_GROUPS_gear_type_group_set', to_field='group')
name = TextField(db_column='NAME', null=True)
program_group = ForeignKeyField(db_column='PROGRAM_GROUP_ID', null=True, rel_model=StratumGroups,
related_name='STRATUM_GROUPS_program_group_set', to_field='group')
range_max = FloatField(db_column='RANGE_MAX', null=True)
range_min = FloatField(db_column='RANGE_MIN', null=True)
range_units = TextField(db_column='RANGE_UNITS', null=True)
stratum = PrimaryKeyField(db_column='STRATUM_ID')
stratum_subtype = TextField(db_column='STRATUM_SUBTYPE', null=True)
stratum_type = IntegerField(db_column='STRATUM_TYPE_ID', null=True)
value = TextField(db_column='VALUE', null=True)
class Meta:
db_table = 'STRATUM_LU'
class SpeciesSamplingPlanLu(BaseModel):
biosample_assignment_lu = IntegerField(db_column='BIOSAMPLE_ASSIGNMENT_LU_ID', null=True)
biosample_list_lu = ForeignKeyField(db_column='BIOSAMPLE_LIST_LU_ID', null=True,
rel_model=StratumGroups, to_field='group')
count = IntegerField(db_column='COUNT', null=True)
display_name = TextField(db_column='DISPLAY_NAME', null=True)
disposition = TextField(db_column='DISPOSITION', null=True)
parent_species_sampling_plan = ForeignKeyField(db_column='PARENT_SPECIES_SAMPLING_PLAN_ID', null=True,
rel_model='self', to_field='species_sampling_plan')
plan_name = TextField(db_column='PLAN_NAME', null=True)
principal_investigator = ForeignKeyField(db_column='PRINCIPAL_INVESTIGATOR_ID', null=True,
rel_model=PrincipalInvestigatorLu, to_field='principal_investigator')
protocol_group = ForeignKeyField(db_column='PROTOCOL_GROUP_ID', null=True, rel_model=ProtocolGroups,
to_field='group')
species = ForeignKeyField(db_column='SPECIES_ID', null=True, rel_model=Species, to_field='species')
species_sampling_plan = PrimaryKeyField(db_column='SPECIES_SAMPLING_PLAN_ID')
stratum = ForeignKeyField(db_column='STRATUM_ID', null=True, rel_model=StratumLu, to_field='stratum')
weight_method_lu = IntegerField(db_column='WEIGHT_METHOD_LU_ID', null=True)
class Meta:
db_table = 'SPECIES_SAMPLING_PLAN_LU'
class TripCertificates(BaseModel):
certificate_number = TextField(db_column='CERTIFICATE_NUMBER')
certification = IntegerField(db_column='CERTIFICATION_ID', null=True)
created_by = IntegerField(db_column='CREATED_BY', null=True)
created_date = TextField(db_column='CREATED_DATE', null=True)
data_source = TextField(db_column='DATA_SOURCE', null=True)
trip_certificate = PrimaryKeyField(db_column='TRIP_CERTIFICATE_ID')
trip = ForeignKeyField(db_column='TRIP_ID', rel_model=Trips, to_field='trip')
class Meta:
db_table = 'TRIP_CERTIFICATES'
indexes = (
(('trip', 'certificate_number'), False),
)
class TripCheckGroups(BaseModel):
column_list = TextField(db_column='COLUMN_LIST', null=True)
| |
'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
df['complete_date'] = pd.to_datetime(df['issue_d'])
return df
#=============
# Function 225
def cleaning_func_7(df):
# additional context code from user definitions
def loan_condition(status):
if (status in bad_loan):
return 'Bad Loan'
else:
return 'Good Loan'
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
bad_loan = ['Charged Off', 'Default', 'Does not meet the credit policy. Status:Charged Off', 'In Grace Period', 'Late (16-30 days)', 'Late (31-120 days)']
df['loan_condition'] = df['loan_status'].apply(loan_condition)
return df
#=============
# Function 226
def cleaning_func_8(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
dt_series = pd.to_datetime(df['issue_d'])
df['year'] = dt_series.dt.year
return df
#=============
# Function 227
def cleaning_func_9(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
df['complete_date'] = pd.to_datetime(df['issue_d'])
group_dates = df.groupby(['complete_date', 'region'], as_index=False).sum()
group_dates['issue_d'] = [month.to_period('M') for month in group_dates['complete_date']]
return group_dates
#=============
# Function 228
def cleaning_func_10(df):
# additional context code from user definitions
def finding_regions(state):
if (state in west):
return 'West'
elif (state in south_west):
return 'SouthWest'
elif (state in south_east):
return 'SouthEast'
elif (state in mid_west):
return 'MidWest'
elif (state in north_east):
return 'NorthEast'
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
west = ['CA', 'OR', 'UT', 'WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID']
south_west = ['AZ', 'TX', 'NM', 'OK']
south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN']
mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND']
north_east = ['CT', 'NY', 'PA', 'NJ', 'RI', 'MA', 'MD', 'VT', 'NH', 'ME']
df['region'] = df['addr_state'].apply(finding_regions)
return df
#=============
# Function 229
def cleaning_func_11(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['last_credit_pull_d'] = complete_df.groupby('region')['last_credit_pull_d'].transform((lambda x: x.fillna(x.mode)))
return complete_df
#=============
# Function 230
def cleaning_func_12(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
return df
#=============
# Function 231
def cleaning_func_13(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['total_acc'] = complete_df.groupby('region')['total_acc'].transform((lambda x: x.fillna(x.median())))
return complete_df
#=============
# Function 232
def cleaning_func_14(col,df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
col.loc[((col['emp_length'] == '10+ years'), 'emp_length_int')] = 10
col.loc[((col['emp_length'] == '9 years'), 'emp_length_int')] = 9
col.loc[((col['emp_length'] == '8 years'), 'emp_length_int')] = 8
col.loc[((col['emp_length'] == '7 years'), 'emp_length_int')] = 7
col.loc[((col['emp_length'] == '6 years'), 'emp_length_int')] = 6
col.loc[((col['emp_length'] == '5 years'), 'emp_length_int')] = 5
col.loc[((col['emp_length'] == '4 years'), 'emp_length_int')] = 4
col.loc[((col['emp_length'] == '3 years'), 'emp_length_int')] = 3
col.loc[((col['emp_length'] == '2 years'), 'emp_length_int')] = 2
col.loc[((col['emp_length'] == '1 year'), 'emp_length_int')] = 1
col.loc[((col['emp_length'] == '< 1 year'), 'emp_length_int')] = 0.5
col.loc[((col['emp_length'] == 'n/a'), 'emp_length_int')] = 0
return col
#=============
# Function 233
def cleaning_func_15(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['delinq_2yrs'] = complete_df.groupby('region')['delinq_2yrs'].transform((lambda x: x.fillna(x.mean())))
return complete_df
#=============
# Function 234
def cleaning_func_16(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['last_pymnt_d'] = complete_df.groupby('region')['last_pymnt_d'].transform((lambda x: x.fillna(x.mode)))
return complete_df
#=============
# Function 235
def cleaning_func_17(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['earliest_cr_line'] = complete_df.groupby('region')['earliest_cr_line'].transform((lambda x: x.fillna(x.mode)))
return complete_df
#=============
# Function 236
def cleaning_func_18(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['pub_rec'] = complete_df.groupby('region')['pub_rec'].transform((lambda x: x.fillna(x.median())))
return complete_df
#=============
# Function 237
def cleaning_func_19(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['next_pymnt_d'] = complete_df.groupby('region')['next_pymnt_d'].transform((lambda x: x.fillna(x.mode)))
return complete_df
#=============
# Function 238
def cleaning_func_20(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
group_dates = df.groupby(['complete_date', 'region'], as_index=False).sum()
group_dates = group_dates.groupby(['issue_d', 'region'], as_index=False).sum()
return group_dates
#=============
# Function 239
def cleaning_func_22(df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
group_dates = df.groupby(['complete_date', 'region'], as_index=False).sum()
return group_dates
#=============
# Function 240
def cleaning_func_23(col,df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
col.loc[((col['emp_length'] == '10+ years'), 'emp_length_int')] = 10
col.loc[((col['emp_length'] == '9 years'), 'emp_length_int')] = 9
col.loc[((col['emp_length'] == '8 years'), 'emp_length_int')] = 8
col.loc[((col['emp_length'] == '7 years'), 'emp_length_int')] = 7
col.loc[((col['emp_length'] == '6 years'), 'emp_length_int')] = 6
col.loc[((col['emp_length'] == '5 years'), 'emp_length_int')] = 5
col.loc[((col['emp_length'] == '4 years'), 'emp_length_int')] = 4
col.loc[((col['emp_length'] == '3 years'), 'emp_length_int')] = 3
col.loc[((col['emp_length'] == '2 years'), 'emp_length_int')] = 2
col.loc[((col['emp_length'] == '1 year'), 'emp_length_int')] = 1
col.loc[((col['emp_length'] == '< 1 year'), 'emp_length_int')] = 0.5
col.loc[((col['emp_length'] == 'n/a'), 'emp_length_int')] = 0
col.loc[((col['annual_income'] <= 100000), 'income_category')] = 'Low'
col.loc[(((col['annual_income'] > 100000) & (col['annual_income'] <= 200000)), 'income_category')] = 'Medium'
col.loc[((col['annual_income'] > 200000), 'income_category')] = 'High'
return col
#=============
# Function 241
def cleaning_func_24(col,df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
col.loc[((col['emp_length'] == '10+ years'), 'emp_length_int')] = 10
col.loc[((col['emp_length'] == '9 years'), 'emp_length_int')] = 9
col.loc[((col['emp_length'] == '8 years'), 'emp_length_int')] = 8
col.loc[((col['emp_length'] == '7 years'), 'emp_length_int')] = 7
col.loc[((col['emp_length'] == '6 years'), 'emp_length_int')] = 6
col.loc[((col['emp_length'] == '5 years'), 'emp_length_int')] = 5
col.loc[((col['emp_length'] == '4 years'), 'emp_length_int')] = 4
col.loc[((col['emp_length'] == '3 years'), 'emp_length_int')] = 3
col.loc[((col['emp_length'] == '2 years'), 'emp_length_int')] = 2
col.loc[((col['emp_length'] == '1 year'), 'emp_length_int')] = 1
col.loc[((col['emp_length'] == '< 1 year'), 'emp_length_int')] = 0.5
col.loc[((col['emp_length'] == 'n/a'), 'emp_length_int')] = 0
col.loc[((col['annual_income'] <= 100000), 'income_category')] = 'Low'
col.loc[(((col['annual_income'] > 100000) & (col['annual_income'] <= 200000)), 'income_category')] = 'Medium'
col.loc[((col['annual_income'] > 200000), 'income_category')] = 'High'
col.loc[((df['loan_condition'] == 'Bad Loan'), 'loan_condition_int')] = 0
col.loc[((df['loan_condition'] == 'Good Loan'), 'loan_condition_int')] = 1
return col
#=============
# Function 242
def cleaning_func_25(col,df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
col.loc[((col['emp_length'] == '10+ years'), 'emp_length_int')] = 10
col.loc[((col['emp_length'] == '9 years'), 'emp_length_int')] = 9
col.loc[((col['emp_length'] == '8 years'), 'emp_length_int')] = 8
col.loc[((col['emp_length'] == '7 years'), 'emp_length_int')] = 7
col.loc[((col['emp_length'] == '6 years'), 'emp_length_int')] = 6
col.loc[((col['emp_length'] == '5 years'), 'emp_length_int')] = 5
col.loc[((col['emp_length'] == '4 years'), 'emp_length_int')] = 4
col.loc[((col['emp_length'] == '3 years'), 'emp_length_int')] = 3
col.loc[((col['emp_length'] == '2 years'), 'emp_length_int')] = 2
col.loc[((col['emp_length'] == '1 year'), 'emp_length_int')] = 1
col.loc[((col['emp_length'] == '< 1 year'), 'emp_length_int')] = 0.5
col.loc[((col['emp_length'] == 'n/a'), 'emp_length_int')] = 0
col.loc[((col['annual_income'] <= 100000), 'income_category')] = 'Low'
col.loc[(((col['annual_income'] > 100000) & (col['annual_income'] <= 200000)), 'income_category')] = 'Medium'
col.loc[((col['annual_income'] > 200000), 'income_category')] = 'High'
col.loc[((df['loan_condition'] == 'Bad Loan'), 'loan_condition_int')] = 0
col.loc[((df['loan_condition'] == 'Good Loan'), 'loan_condition_int')] = 1
col.loc[((col['interest_rate'] <= 13.23), 'interest_payments')] = 'Low'
col.loc[((col['interest_rate'] > 13.23), 'interest_payments')] = 'High'
return col
#=============
# Function 243
def cleaning_func_26(col,df):
# core cleaning code
import pandas as pd
# df = pd.read_csv('../input/loan.csv', low_memory=False)
df = df.rename(columns={'loan_amnt': 'loan_amount', 'funded_amnt': 'funded_amount', 'funded_amnt_inv': 'investor_funds', 'int_rate': 'interest_rate', 'annual_inc': 'annual_income'})
col.loc[((col['emp_length'] == '10+ years'), 'emp_length_int')] = 10
col.loc[((col['emp_length'] == '9 years'), 'emp_length_int')] = 9
col.loc[((col['emp_length'] == '8 years'), 'emp_length_int')] = 8
col.loc[((col['emp_length'] == '7 years'), 'emp_length_int')] = 7
col.loc[((col['emp_length'] == '6 years'), 'emp_length_int')] = 6
col.loc[((col['emp_length'] == '5 years'), 'emp_length_int')] = 5
col.loc[((col['emp_length'] == '4 years'), 'emp_length_int')] = 4
col.loc[((col['emp_length'] == '3 years'), 'emp_length_int')] = 3
col.loc[((col['emp_length'] == '2 years'), 'emp_length_int')] = 2
col.loc[((col['emp_length'] == '1 year'), 'emp_length_int')] = 1
col.loc[((col['emp_length'] == '< 1 year'), 'emp_length_int')] = 0.5
col.loc[((col['emp_length'] == 'n/a'), 'emp_length_int')] = 0
col.loc[((col['annual_income'] <= 100000), 'income_category')] = 'Low'
col.loc[(((col['annual_income'] > 100000) & (col['annual_income'] <= 200000)), 'income_category')] = 'Medium'
col.loc[((col['annual_income'] > 200000), 'income_category')] = 'High'
complete_df = df.copy()
complete_df[col] = complete_df[col].fillna(0)
complete_df[col] = complete_df[col]
complete_df[col].fillna = complete_df[col].fillna
complete_df[col] = complete_df[col].fillna(0)
complete_df['annual_income'] = complete_df.groupby('region')['annual_income'].transform((lambda x: x.fillna(x.mean())))
return complete_df
#=============
# Function 244
def cleaning_func_27(col,df):
# additional context code from user definitions
def finding_regions(state):
if (state in west):
return 'West'
elif (state in south_west):
return 'SouthWest'
elif (state in south_east):
return 'SouthEast'
elif (state in mid_west):
return 'MidWest'
elif (state in north_east):
return 'NorthEast'
def loan_condition(status):
if (status in bad_loan):
return 'Bad Loan'
else:
| |
thread = api.get_aggregation_account_balance_using_get(aggregation_account_balance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str aggregation_account_balance_id: UUID aggregation_account_balance_id (required)
:param str currency_conversion: USD
:return: AggregationAccountBalance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_aggregation_account_balance_using_get_with_http_info(aggregation_account_balance_id, **kwargs) # noqa: E501
else:
(data) = self.get_aggregation_account_balance_using_get_with_http_info(aggregation_account_balance_id, **kwargs) # noqa: E501
return data
def get_aggregation_account_balance_using_get_with_http_info(self, aggregation_account_balance_id, **kwargs): # noqa: E501
"""Retrieve an aggregation account balance # noqa: E501
Retrieve the information for a specific balance record for an aggregation account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_balance_using_get_with_http_info(aggregation_account_balance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str aggregation_account_balance_id: UUID aggregation_account_balance_id (required)
:param str currency_conversion: USD
:return: AggregationAccountBalance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['aggregation_account_balance_id', 'currency_conversion'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aggregation_account_balance_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'aggregation_account_balance_id' is set
if self.api_client.client_side_validation and ('aggregation_account_balance_id' not in params or
params['aggregation_account_balance_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `aggregation_account_balance_id` when calling `get_aggregation_account_balance_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'aggregation_account_balance_id' in params:
path_params['aggregation_account_balance_id'] = params['aggregation_account_balance_id'] # noqa: E501
query_params = []
if 'currency_conversion' in params:
query_params.append(('currency_conversion', params['currency_conversion'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/aggregation_account_balance/{aggregation_account_balance_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AggregationAccountBalance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aggregation_account_holding_all_using_get(self, **kwargs): # noqa: E501
"""List all aggregation account holdings # noqa: E501
Get all of the holding records for all aggregation accounts defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_holding_all_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str currency_conversion: currency_conversion
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageAggregationAccountHolding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_aggregation_account_holding_all_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_aggregation_account_holding_all_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_aggregation_account_holding_all_using_get_with_http_info(self, **kwargs): # noqa: E501
"""List all aggregation account holdings # noqa: E501
Get all of the holding records for all aggregation accounts defined for your firm. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_holding_all_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool ascending: ascending
:param str currency_conversion: currency_conversion
:param str filter: filter
:param str order_by: order_by
:param int page: page
:param int size: size
:return: PageAggregationAccountHolding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ascending', 'currency_conversion', 'filter', 'order_by', 'page', 'size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aggregation_account_holding_all_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ascending' in params:
query_params.append(('ascending', params['ascending'])) # noqa: E501
if 'currency_conversion' in params:
query_params.append(('currency_conversion', params['currency_conversion'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if 'order_by' in params:
query_params.append(('order_by', params['order_by'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/aggregation_account_holding', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageAggregationAccountHolding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aggregation_account_holding_using_get(self, aggregation_account_holding_id, **kwargs): # noqa: E501
"""Retrieve an aggregation account holding # noqa: E501
Retrieve the information for a specific holding record for an aggregation account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_holding_using_get(aggregation_account_holding_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str aggregation_account_holding_id: UUID aggregation_account_holding_id (required)
:param str currency_conversion: USD
:return: AggregationAccountHolding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_aggregation_account_holding_using_get_with_http_info(aggregation_account_holding_id, **kwargs) # noqa: E501
else:
(data) = self.get_aggregation_account_holding_using_get_with_http_info(aggregation_account_holding_id, **kwargs) # noqa: E501
return data
def get_aggregation_account_holding_using_get_with_http_info(self, aggregation_account_holding_id, **kwargs): # noqa: E501
"""Retrieve an aggregation account holding # noqa: E501
Retrieve the information for a specific holding record for an aggregation account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_holding_using_get_with_http_info(aggregation_account_holding_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str aggregation_account_holding_id: UUID aggregation_account_holding_id (required)
:param str currency_conversion: USD
:return: AggregationAccountHolding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['aggregation_account_holding_id', 'currency_conversion'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aggregation_account_holding_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'aggregation_account_holding_id' is set
if self.api_client.client_side_validation and ('aggregation_account_holding_id' not in params or
params['aggregation_account_holding_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `aggregation_account_holding_id` when calling `get_aggregation_account_holding_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'aggregation_account_holding_id' in params:
path_params['aggregation_account_holding_id'] = params['aggregation_account_holding_id'] # noqa: E501
query_params = []
if 'currency_conversion' in params:
query_params.append(('currency_conversion', params['currency_conversion'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/nucleus/v1/aggregation_account_holding/{aggregation_account_holding_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AggregationAccountHolding', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aggregation_account_overview_by_business_id_using_get(self, business_id, **kwargs): # noqa: E501
"""Retrieve an aggregation account aggregate data # noqa: E501
Retrieve the information for a specific aggregation account with aggregate data for a business. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_overview_by_business_id_using_get(business_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str business_id: UUID business_id (required)
:param str currency_conversion: USD
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_aggregation_account_overview_by_business_id_using_get_with_http_info(business_id, **kwargs) # noqa: E501
else:
(data) = self.get_aggregation_account_overview_by_business_id_using_get_with_http_info(business_id, **kwargs) # noqa: E501
return data
def get_aggregation_account_overview_by_business_id_using_get_with_http_info(self, business_id, **kwargs): # noqa: E501
"""Retrieve an aggregation account aggregate data # noqa: E501
Retrieve the information for a specific aggregation account with aggregate data for a business. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aggregation_account_overview_by_business_id_using_get_with_http_info(business_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str business_id: UUID business_id (required)
:param str currency_conversion: USD
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['business_id', 'currency_conversion'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aggregation_account_overview_by_business_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'business_id' is set
if self.api_client.client_side_validation and ('business_id' not in params or
params['business_id'] is None): # noqa: E501
raise | |
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# Vulkan CTS
# ----------
#
# Copyright (c) 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import os
import re
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts"))
from build.common import DEQP_DIR
from khr_util.format import indentLines, writeInlFile
VULKAN_DIR = os.path.join(os.path.dirname(__file__), "framework", "vulkan")
INL_HEADER = """\
/* WARNING: This is auto-generated file. Do not modify, since changes will
* be lost! Modify the generating script instead.
*/\
"""
PLATFORM_FUNCTIONS = [
"vkCreateInstance",
"vkGetInstanceProcAddr",
"vkEnumerateInstanceExtensionProperties",
"vkEnumerateInstanceLayerProperties",
]
INSTANCE_FUNCTIONS = [
"vkDestroyInstance",
"vkEnumeratePhysicalDevices",
"vkGetPhysicalDeviceFeatures",
"vkGetPhysicalDeviceFormatProperties",
"vkGetPhysicalDeviceImageFormatProperties",
"vkGetPhysicalDeviceSparseImageFormatProperties",
"vkGetPhysicalDeviceLimits",
"vkGetPhysicalDeviceProperties",
"vkGetPhysicalDeviceQueueFamilyProperties",
"vkGetPhysicalDeviceMemoryProperties",
"vkEnumerateDeviceExtensionProperties",
"vkEnumerateDeviceLayerProperties",
"vkCreateDevice",
"vkGetDeviceProcAddr",
# VK_KHR_surface
"vkDestroySurfaceKHR",
"vkGetPhysicalDeviceSurfaceSupportKHR",
"vkGetPhysicalDeviceSurfaceCapabilitiesKHR",
"vkGetPhysicalDeviceSurfaceFormatsKHR",
"vkGetPhysicalDeviceSurfacePresentModesKHR",
# VK_KHR_display
"vkGetPhysicalDeviceDisplayPropertiesKHR",
"vkGetPhysicalDeviceDisplayPlanePropertiesKHR",
"vkGetDisplayPlaneSupportedDisplaysKHR",
"vkGetDisplayModePropertiesKHR",
"vkCreateDisplayModeKHR",
"vkGetDisplayPlaneCapabilitiesKHR",
"vkCreateDisplayPlaneSurfaceKHR",
# VK_KHR_xlib_surface
"vkCreateXlibSurfaceKHR",
"vkGetPhysicalDeviceXlibPresentationSupportKHR",
# VK_KHR_xcb_surface
"vkCreateXcbSurfaceKHR",
"vkGetPhysicalDeviceXcbPresentationSupportKHR",
# VK_KHR_wayland_surface
"vkCreateWaylandSurfaceKHR",
"vkGetPhysicalDeviceWaylandPresentationSupportKHR",
# VK_KHR_mir_surface
"vkCreateMirSurfaceKHR",
"vkGetPhysicalDeviceMirPresentationSupportKHR",
# VK_KHR_android_surface
"vkCreateAndroidSurfaceKHR",
# VK_KHR_win32_surface
"vkCreateWin32SurfaceKHR",
"vkGetPhysicalDeviceWin32PresentationSupportKHR",
# VK_EXT_debug_report
"vkCreateDebugReportCallbackEXT",
"vkDestroyDebugReportCallbackEXT",
"vkDebugReportMessageEXT",
]
DEFINITIONS = [
"VK_API_VERSION",
"VK_MAX_PHYSICAL_DEVICE_NAME_SIZE",
"VK_MAX_EXTENSION_NAME_SIZE",
"VK_UUID_SIZE",
"VK_MAX_MEMORY_TYPES",
"VK_MAX_MEMORY_HEAPS",
"VK_MAX_DESCRIPTION_SIZE",
"VK_ATTACHMENT_UNUSED",
"VK_SUBPASS_EXTERNAL"
]
PLATFORM_TYPES = [
# VK_KHR_xlib_surface
("Display*", "XlibDisplayPtr", "void*"),
("Window", "XlibWindow", "deUintptr",),
("VisualID", "XlibVisualID", "deUint32"),
# VK_KHR_xcb_surface
("xcb_connection_t*", "XcbConnectionPtr", "void*"),
("xcb_window_t", "XcbWindow", "deUintptr"),
("xcb_visualid_t", "XcbVisualid", "deUint32"),
# VK_KHR_wayland_surface
("struct wl_display*", "WaylandDisplayPtr", "void*"),
("struct wl_surface*", "WaylandSurfacePtr", "void*"),
# VK_KHR_mir_surface
("MirConnection*", "MirConnectionPtr", "void*"),
("MirSurface*", "MirSurfacePtr", "void*"),
# VK_KHR_android_surface
("ANativeWindow*", "AndroidNativeWindowPtr", "void*"),
# VK_KHR_win32_surface
("HINSTANCE", "Win32InstanceHandle", "void*"),
("HWND", "Win32WindowHandle", "void*")
]
PLATFORM_TYPE_NAMESPACE = "pt"
class Handle:
TYPE_DISP = 0
TYPE_NONDISP = 1
def __init__ (self, type, name):
self.type = type
self.name = name
def getHandleType (self):
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', self.name)
return "HANDLE_TYPE_" + name[3:].upper()
class Enum:
def __init__ (self, name, values):
self.name = name
self.values = values
class Bitfield:
def __init__ (self, name, values):
self.name = name
self.values = values
class Variable:
def __init__ (self, type, name, arraySize = None):
self.type = type
self.name = name
self.arraySize = arraySize
class CompositeType:
CLASS_STRUCT = 0
CLASS_UNION = 1
def __init__ (self, typeClass, name, members):
self.typeClass = typeClass
self.name = name
self.members = members
def getClassName (self):
names = {CompositeType.CLASS_STRUCT: 'struct', CompositeType.CLASS_UNION: 'union'}
return names[self.typeClass]
class Function:
TYPE_PLATFORM = 0 # Not bound to anything
TYPE_INSTANCE = 1 # Bound to VkInstance
TYPE_DEVICE = 2 # Bound to VkDevice
def __init__ (self, name, returnType, arguments):
self.name = name
self.returnType = returnType
self.arguments = arguments
def getType (self):
if self.name in PLATFORM_FUNCTIONS:
return Function.TYPE_PLATFORM
elif self.name in INSTANCE_FUNCTIONS:
return Function.TYPE_INSTANCE
else:
return Function.TYPE_DEVICE
class API:
def __init__ (self, definitions, handles, enums, bitfields, compositeTypes, functions):
self.definitions = definitions
self.handles = handles
self.enums = enums
self.bitfields = bitfields
self.compositeTypes = compositeTypes
self.functions = functions
def readFile (filename):
with open(filename, 'rb') as f:
return f.read()
IDENT_PTRN = r'[a-zA-Z_][a-zA-Z0-9_]*'
TYPE_PTRN = r'[a-zA-Z_][a-zA-Z0-9_ \t*]*'
def endswith (s, postfix):
return len(s) >= len(postfix) and s[len(s)-len(postfix):] == postfix
def fixupEnumValues (values):
fixed = []
for name, value in values:
if endswith(name, "_BEGIN_RANGE") or endswith(name, "_END_RANGE"):
continue
fixed.append((name, value))
return fixed
def fixupType (type):
for platformType, substitute, compat in PLATFORM_TYPES:
if type == platformType:
return PLATFORM_TYPE_NAMESPACE + "::" + substitute
replacements = [
("uint8_t", "deUint8"),
("uint16_t", "deUint16"),
("uint32_t", "deUint32"),
("uint64_t", "deUint64"),
("int8_t", "deInt8"),
("int16_t", "deInt16"),
("int32_t", "deInt32"),
("int64_t", "deInt64"),
("bool32_t", "deUint32"),
("size_t", "deUintptr"),
]
for src, dst in replacements:
type = type.replace(src, dst)
return type
def fixupFunction (function):
fixedArgs = [Variable(fixupType(a.type), a.name, a.arraySize) for a in function.arguments]
fixedReturnType = fixupType(function.returnType)
return Function(function.name, fixedReturnType, fixedArgs)
def getInterfaceName (function):
assert function.name[:2] == "vk"
return function.name[2].lower() + function.name[3:]
def getFunctionTypeName (function):
assert function.name[:2] == "vk"
return function.name[2:] + "Func"
def endsWith (str, postfix):
return str[-len(postfix):] == postfix
def splitNameExtPostfix (name):
knownExtPostfixes = ["KHR", "EXT"]
for postfix in knownExtPostfixes:
if endsWith(name, postfix):
return (name[:-len(postfix)], postfix)
return (name, "")
def getBitEnumNameForBitfield (bitfieldName):
bitfieldName, postfix = splitNameExtPostfix(bitfieldName)
assert bitfieldName[-1] == "s"
return bitfieldName[:-1] + "Bits" + postfix
def getBitfieldNameForBitEnum (bitEnumName):
bitEnumName, postfix = splitNameExtPostfix(bitEnumName)
assert bitEnumName[-4:] == "Bits"
return bitEnumName[:-4] + "s" + postfix
def parsePreprocDefinedValue (src, name):
definition = re.search(r'#\s*define\s+' + name + r'\s+([^\n]+)\n', src)
if definition is None:
raise Exception("No such definition: %s" % name)
value = definition.group(1).strip()
if value == "UINT32_MAX":
value = "(~0u)"
return value
def parseEnum (name, src):
keyValuePtrn = '(' + IDENT_PTRN + r')\s*=\s*([^\s,}]+)\s*[,}]'
matches = re.findall(keyValuePtrn, src)
return Enum(name, fixupEnumValues(matches))
# \note Parses raw enums, some are mapped to bitfields later
def parseEnums (src):
matches = re.findall(r'typedef enum(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
enums = []
for enumname, contents, typename in matches:
enums.append(parseEnum(typename, contents))
return enums
def parseCompositeType (type, name, src):
# \todo [pyry] Array support is currently a hack (size coupled with name)
typeNamePtrn = r'(' + TYPE_PTRN + ')(\s' + IDENT_PTRN + r'(\[[^\]]+\])*)\s*;'
matches = re.findall(typeNamePtrn, src)
members = [Variable(fixupType(t.strip()), n.strip()) for t, n, a in matches]
return CompositeType(type, name, members)
def parseCompositeTypes (src):
typeMap = { 'struct': CompositeType.CLASS_STRUCT, 'union': CompositeType.CLASS_UNION }
matches = re.findall(r'typedef (struct|union)(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
types = []
for type, structname, contents, typename in matches:
types.append(parseCompositeType(typeMap[type], typename, contents))
return types
def parseHandles (src):
matches = re.findall(r'VK_DEFINE(_NON_DISPATCHABLE|)_HANDLE\((' + IDENT_PTRN + r')\)[ \t]*[\n\r]', src)
handles = []
typeMap = {'': Handle.TYPE_DISP, '_NON_DISPATCHABLE': Handle.TYPE_NONDISP}
for type, name in matches:
handle = Handle(typeMap[type], name)
handles.append(handle)
return handles
def parseArgList (src):
typeNamePtrn = r'(' + TYPE_PTRN + ')(\s' + IDENT_PTRN + r')(\[[^\]]+\])?'
args = []
for rawArg in src.split(','):
m = re.search(typeNamePtrn, rawArg)
args.append(Variable(m.group(1).strip(), m.group(2).strip(), m.group(3)))
return args
def parseFunctions (src):
ptrn = r'VKAPI_ATTR\s+(' + TYPE_PTRN + ')VKAPI_CALL\s+(' + IDENT_PTRN + r')\s*\(([^)]*)\)\s*;'
matches = re.findall(ptrn, src)
functions = []
for returnType, name, argList in matches:
functions.append(Function(name.strip(), returnType.strip(), parseArgList(argList)))
return [fixupFunction(f) for f in functions]
def parseBitfieldNames (src):
ptrn = r'typedef\s+VkFlags\s(' + IDENT_PTRN + r')\s*;'
matches = re.findall(ptrn, src)
return matches
def parseAPI (src):
definitions = [(name, parsePreprocDefinedValue(src, name)) for name in DEFINITIONS]
rawEnums = parseEnums(src)
bitfieldNames = parseBitfieldNames(src)
enums = []
bitfields = []
bitfieldEnums = set([getBitEnumNameForBitfield(n) for n in bitfieldNames])
for enum in rawEnums:
if enum.name in bitfieldEnums:
bitfields.append(Bitfield(getBitfieldNameForBitEnum(enum.name), enum.values))
else:
enums.append(enum)
for bitfieldName in bitfieldNames:
if not bitfieldName in [bitfield.name for bitfield in bitfields]:
# Add empty bitfield
bitfields.append(Bitfield(bitfieldName, []))
return API(
definitions = definitions,
handles = parseHandles(src),
enums = enums,
bitfields = bitfields,
compositeTypes = parseCompositeTypes(src),
functions = parseFunctions(src))
def writeHandleType (api, filename):
def gen ():
yield "enum HandleType"
yield "{"
yield "\t%s = 0," % api.handles[0].getHandleType()
for handle in api.handles[1:]:
yield "\t%s," % handle.getHandleType()
yield "\tHANDLE_TYPE_LAST"
yield "};"
yield ""
writeInlFile(filename, INL_HEADER, gen())
def getEnumValuePrefix (enum):
prefix = enum.name[0]
for i in range(1, len(enum.name)):
if enum.name[i].isupper() and not enum.name[i-1].isupper():
prefix += "_"
prefix += enum.name[i].upper()
return prefix
def parseInt (value):
if value[:2] == "0x":
return int(value, 16)
else:
return int(value, 10)
def areEnumValuesLinear (enum):
curIndex = 0
for name, value in enum.values:
if parseInt(value) != curIndex:
return False
curIndex += 1
return True
def genEnumSrc (enum):
yield "enum %s" % enum.name
yield "{"
for line in indentLines(["\t%s\t= %s," % v for v in enum.values]):
yield line
if areEnumValuesLinear(enum):
yield ""
yield "\t%s_LAST" % getEnumValuePrefix(enum)
yield "};"
def genBitfieldSrc (bitfield):
if len(bitfield.values) > 0:
yield "enum %s" % getBitEnumNameForBitfield(bitfield.name)
yield "{"
for line in indentLines(["\t%s\t= %s," % v for v in bitfield.values]):
yield line
yield "};"
yield "typedef deUint32 %s;" % bitfield.name
def genCompositeTypeSrc (type):
yield "%s %s" % (type.getClassName(), type.name)
yield "{"
for line in indentLines(["\t%s\t%s;" % (m.type, m.name) for m in type.members]):
yield line
yield "};"
def genHandlesSrc (handles):
def genLines (handles):
for handle in handles:
if handle.type == Handle.TYPE_DISP:
yield "VK_DEFINE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
elif handle.type == Handle.TYPE_NONDISP:
yield "VK_DEFINE_NON_DISPATCHABLE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
for line in indentLines(genLines(handles)):
yield line
def writeBasicTypes (api, filename):
def gen ():
for line in indentLines(["enum { %s\t= %s\t};" % define for define in api.definitions]):
yield line
yield ""
for line in genHandlesSrc(api.handles):
yield line
yield ""
for enum in api.enums:
for line in genEnumSrc(enum):
yield line
yield ""
for bitfield in api.bitfields:
for line in genBitfieldSrc(bitfield):
yield line
yield ""
for line in indentLines(["VK_DEFINE_PLATFORM_TYPE(%s,\t%s);" % (s, c) for n, s, c in PLATFORM_TYPES]):
yield line
writeInlFile(filename, INL_HEADER, gen())
def writeCompositeTypes (api, filename):
def gen ():
for type in api.compositeTypes:
for line in genCompositeTypeSrc(type):
yield line
yield ""
writeInlFile(filename, INL_HEADER, gen())
def argListToStr (args):
return ", ".join("%s %s%s" % (v.type, v.name, v.arraySize if v.arraySize != None else "") for v in args)
def writeInterfaceDecl (api, filename, functionTypes, concrete):
def genProtos ():
postfix = "" if concrete else " = 0"
for function in api.functions:
if function.getType() in functionTypes:
yield "virtual %s\t%s\t(%s) const%s;" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments), postfix)
writeInlFile(filename, INL_HEADER, indentLines(genProtos()))
def writeFunctionPtrTypes (api, filename):
def genTypes ():
for function in api.functions:
yield "typedef VKAPI_ATTR %s\t(VKAPI_CALL* %s)\t(%s);" % (function.returnType, getFunctionTypeName(function), argListToStr(function.arguments))
writeInlFile(filename, INL_HEADER, indentLines(genTypes()))
def writeFunctionPointers (api, filename, functionTypes):
writeInlFile(filename, INL_HEADER, indentLines(["%s\t%s;" % (getFunctionTypeName(function), getInterfaceName(function)) for function in api.functions if function.getType() in functionTypes]))
def writeInitFunctionPointers (api, filename, functionTypes):
def makeInitFunctionPointers ():
for function in api.functions:
if function.getType() in functionTypes:
yield "m_vk.%s\t= (%s)\tGET_PROC_ADDR(\"%s\");" % (getInterfaceName(function), getFunctionTypeName(function), function.name)
writeInlFile(filename, INL_HEADER, indentLines(makeInitFunctionPointers()))
def writeFuncPtrInterfaceImpl (api, filename, functionTypes, className):
def makeFuncPtrInterfaceImpl ():
for function in api.functions:
if function.getType() in functionTypes:
yield ""
yield "%s %s::%s (%s) const" % (function.returnType, className, getInterfaceName(function), argListToStr(function.arguments))
yield "{"
yield " %sm_vk.%s(%s);" % ("return " if function.returnType != "void" else "", getInterfaceName(function), ", ".join(a.name for a in function.arguments))
yield "}"
writeInlFile(filename, INL_HEADER, makeFuncPtrInterfaceImpl())
def writeStrUtilProto (api, filename):
def makeStrUtilProto ():
for line in indentLines(["const char*\tget%sName\t(%s value);" % (enum.name[2:], enum.name) for enum in api.enums]):
yield line
yield ""
for line in indentLines(["inline tcu::Format::Enum<%s>\tget%sStr\t(%s value)\t{ return tcu::Format::Enum<%s>(get%sName, value);\t}" % (e.name, e.name[2:], e.name, e.name, e.name[2:]) for e in api.enums]):
yield line
yield ""
for line in indentLines(["inline std::ostream&\toperator<<\t(std::ostream& s, %s value)\t{ return s << get%sStr(value);\t}" % (e.name, e.name[2:]) for e in api.enums]):
yield line
yield ""
for line in indentLines(["tcu::Format::Bitfield<32>\tget%sStr\t(%s value);" % (bitfield.name[2:], bitfield.name) for bitfield in api.bitfields]):
yield line
yield ""
for line in indentLines(["std::ostream&\toperator<<\t(std::ostream& s, const %s& value);" % (s.name) for s in api.compositeTypes]):
yield line
writeInlFile(filename, INL_HEADER, makeStrUtilProto())
def writeStrUtilImpl (api, filename):
def makeStrUtilImpl ():
for line in indentLines(["template<> const char*\tgetTypeName<%s>\t(void) { return \"%s\";\t}" % (handle.name, handle.name) for handle in api.handles]):
yield line
yield ""
yield "namespace %s" % PLATFORM_TYPE_NAMESPACE
yield "{"
for line in indentLines("std::ostream& operator<< (std::ostream& s, %s\tv) { return s << tcu::toHex(v.internal); }" % s for n, s, c in PLATFORM_TYPES):
yield line
yield "}"
for enum in api.enums:
yield ""
yield "const char* get%sName (%s value)" % (enum.name[2:], enum.name)
yield "{"
yield "\tswitch (value)"
yield "\t{"
for line in indentLines(["\t\tcase %s:\treturn \"%s\";" % (n, n) for n, v in enum.values] + ["\t\tdefault:\treturn DE_NULL;"]):
yield line
yield "\t}"
yield "}"
for bitfield in api.bitfields:
yield ""
yield "tcu::Format::Bitfield<32> get%sStr (%s value)" % (bitfield.name[2:], bitfield.name)
yield "{"
if len(bitfield.values) > 0:
yield "\tstatic const tcu::Format::BitDesc s_desc[] ="
yield "\t{"
for line in indentLines(["\t\ttcu::Format::BitDesc(%s,\t\"%s\")," % (n, n) for n, v in bitfield.values]):
yield line
yield "\t};"
yield "\treturn tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));"
else:
yield "\treturn tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);"
yield "}"
bitfieldTypeNames = set([bitfield.name for bitfield in api.bitfields])
for type in api.compositeTypes:
yield ""
yield "std::ostream& operator<< (std::ostream& s, const %s& value)" % type.name
yield "{"
yield "\ts | |
not None:
pulumi.set(__self__, "path", path)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
if secret_file is not None:
pulumi.set(__self__, "secret_file", secret_file)
if secret_ref is not None:
pulumi.set(__self__, "secret_ref", secret_ref)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def monitors(self) -> Sequence[str]:
"""
Required: Monitors is a collection of Ceph monitors More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
return pulumi.get(self, "monitors")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Optional: Used as the mounted root, rather than the full Ceph tree, default is /
"""
return pulumi.get(self, "path")
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[bool]:
"""
Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
return pulumi.get(self, "read_only")
@property
@pulumi.getter(name="secretFile")
def secret_file(self) -> Optional[str]:
"""
Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
return pulumi.get(self, "secret_file")
@property
@pulumi.getter(name="secretRef")
def secret_ref(self) -> Optional['outputs.DatadogAgentSpecClusterChecksRunnerConfigVolumesCephfsSecretRef']:
"""
Optional: SecretRef is reference to the authentication secret for User, default is empty. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
return pulumi.get(self, "secret_ref")
@property
@pulumi.getter
def user(self) -> Optional[str]:
"""
Optional: User is the rados user name, default is admin More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
return pulumi.get(self, "user")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DatadogAgentSpecClusterChecksRunnerConfigVolumesCephfsSecretRef(dict):
"""
Optional: SecretRef is reference to the authentication secret for User, default is empty. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
"""
def __init__(__self__, *,
name: Optional[str] = None):
"""
Optional: SecretRef is reference to the authentication secret for User, default is empty. More info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it
:param str name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DatadogAgentSpecClusterChecksRunnerConfigVolumesCinder(dict):
"""
Cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
"""
def __init__(__self__, *,
volume_id: str,
fs_type: Optional[str] = None,
read_only: Optional[bool] = None,
secret_ref: Optional['outputs.DatadogAgentSpecClusterChecksRunnerConfigVolumesCinderSecretRef'] = None):
"""
Cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
:param str volume_id: volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
:param str fs_type: Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
:param bool read_only: Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
:param 'DatadogAgentSpecClusterChecksRunnerConfigVolumesCinderSecretRefArgs' secret_ref: Optional: points to a secret object containing parameters used to connect to OpenStack.
"""
pulumi.set(__self__, "volume_id", volume_id)
if fs_type is not None:
pulumi.set(__self__, "fs_type", fs_type)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
if secret_ref is not None:
pulumi.set(__self__, "secret_ref", secret_ref)
@property
@pulumi.getter(name="volumeID")
def volume_id(self) -> str:
"""
volume id used to identify the volume in cinder. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
"""
return pulumi.get(self, "volume_id")
@property
@pulumi.getter(name="fsType")
def fs_type(self) -> Optional[str]:
"""
Filesystem type to mount. Must be a filesystem type supported by the host operating system. Examples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
"""
return pulumi.get(self, "fs_type")
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[bool]:
"""
Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
"""
return pulumi.get(self, "read_only")
@property
@pulumi.getter(name="secretRef")
def secret_ref(self) -> Optional['outputs.DatadogAgentSpecClusterChecksRunnerConfigVolumesCinderSecretRef']:
"""
Optional: points to a secret object containing parameters used to connect to OpenStack.
"""
return pulumi.get(self, "secret_ref")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DatadogAgentSpecClusterChecksRunnerConfigVolumesCinderSecretRef(dict):
"""
Optional: points to a secret object containing parameters used to connect to OpenStack.
"""
def __init__(__self__, *,
name: Optional[str] = None):
"""
Optional: points to a secret object containing parameters used to connect to OpenStack.
:param str name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DatadogAgentSpecClusterChecksRunnerConfigVolumesConfigMap(dict):
"""
ConfigMap represents a configMap that should populate this volume
"""
def __init__(__self__, *,
default_mode: Optional[int] = None,
items: Optional[Sequence['outputs.DatadogAgentSpecClusterChecksRunnerConfigVolumesConfigMapItems']] = None,
name: Optional[str] = None,
optional: Optional[bool] = None):
"""
ConfigMap represents a configMap that should populate this volume
:param int default_mode: Optional: mode bits to use on created files by default. Must be a value between 0 and 0777. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
:param Sequence['DatadogAgentSpecClusterChecksRunnerConfigVolumesConfigMapItemsArgs'] items: If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.
:param str name: Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
:param bool optional: Specify whether the ConfigMap or its keys must be defined
"""
if default_mode is not None:
pulumi.set(__self__, "default_mode", default_mode)
if items is not None:
pulumi.set(__self__, "items", items)
if name is not None:
pulumi.set(__self__, "name", name)
if optional is not None:
pulumi.set(__self__, "optional", optional)
@property
@pulumi.getter(name="defaultMode")
def default_mode(self) -> Optional[int]:
"""
Optional: mode bits to use on created files by default. Must be a value between 0 and 0777. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
"""
return pulumi.get(self, "default_mode")
@property
@pulumi.getter
def items(self) -> Optional[Sequence['outputs.DatadogAgentSpecClusterChecksRunnerConfigVolumesConfigMapItems']]:
"""
If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the ConfigMap, the volume setup will error unless it is marked optional. Paths must be relative and may not contain the '..' path or start with '..'.
"""
return pulumi.get(self, "items")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names TODO: Add other useful fields. apiVersion, kind, uid?
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def optional(self) -> Optional[bool]:
"""
Specify whether the ConfigMap or its keys must be defined
"""
return pulumi.get(self, "optional")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DatadogAgentSpecClusterChecksRunnerConfigVolumesConfigMapItems(dict):
"""
Maps a string key to a path within a volume.
"""
def __init__(__self__, *,
key: str,
path: str,
mode: Optional[int] = None):
"""
Maps a string key to a path within a volume.
:param str key: The key to project.
:param str path: The relative path of the file to map the key to. May not be an absolute path. May not contain the path element '..'. May not start with the string '..'.
:param int mode: Optional: mode bits to use on this file, must be a value between 0 and 0777. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other | |
Click on the Browse link
self.assertIn("/browse?namespace=14&my=14",
CanonizerBrowsePage(self.driver).select_by_value_personal_reputations_only_my_topics().get_url())
# 100
def test_select_by_value_professional_services_only_my_topics(self):
print("\n" + str(test_cases(99)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Browse link
self.assertIn("/browse?namespace=15&my=15",
CanonizerBrowsePage(self.driver).select_by_value_professional_services_only_my_topics().get_url())
# 101
def test_select_by_value_sandbox_only_my_topics(self):
print("\n" + str(test_cases(100)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Browse link
self.assertIn("/browse?namespace=16&my=16",
CanonizerBrowsePage(self.driver).select_by_value_sandbox_only_my_topics().get_url())
# 102
def test_select_by_value_terminology_only_my_topics(self):
print("\n" + str(test_cases(101)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Browse link
self.assertIn("/browse?namespace=17&my=17",
CanonizerBrowsePage(self.driver).select_by_value_terminology_only_my_topics().get_url())
# 103
def test_select_by_value_www_only_my_topics(self):
print("\n" + str(test_cases(102)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Browse link
self.assertIn("/browse?namespace=18&my=18",
CanonizerBrowsePage(self.driver).select_by_value_www_only_my_topics().get_url())
# 104
def test_select_by_value_all_only_my_topics(self):
print("\n" + str(test_cases(103)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Browse link
self.assertIn("/browse?namespace=&my=",
CanonizerBrowsePage(self.driver).select_by_value_all_only_my_topics().get_url())
# ----- Browse Page Test Cases End -----
# ----- White Paper Test Cases Start -----
# 105
def test_check_white_paper_should_open_with_login(self):
print("\n" + str(test_cases(104)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the White Paper link
CanonizerWhitePaper(self.driver).check_white_paper_should_open().open("files/2012_amplifying_final.pdf")
# 106
def test_check_white_paper_should_open_without_login(self):
print("\n" + str(test_cases(105)))
# Click on the White Paper link
CanonizerWhitePaper(self.driver).check_white_paper_should_open().open("files/2012_amplifying_final.pdf")
# ----- White Paper Test Cases End -----
# ----- Blog Test Cases Start -----
# 107
def test_check_blog_page_should_open_with_login(self):
print("\n" + str(test_cases(106)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Blog link
self.assertIn("/blog/", CanonizerBlog(self.driver).check_blog_page_should_open().get_url())
# 108
def test_check_blog_page_should_open_without_login(self):
print("\n" + str(test_cases(107)))
# Click on the Blog link
self.assertIn("/blog/", CanonizerBlog(self.driver).check_blog_page_should_open().get_url())
# ----- Blog Test Cases End -----
# ----- Algorithm Information Test Cases Start -----
# 109
def test_check_algorithm_information_page_should_open(self):
print("\n" + str(test_cases(108)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Algorithm Information link
self.assertIn("topic/53-Canonizer-Algorithms/1", CanonizerAlgorithmInformation(
self.driver).check_algorithm_information_page_should_open().get_url())
# ----- Algorithm Information Test Cases End -----
# ----- As Of Filters Test Cases Start -----
# 110
def test_check_include_review_filter_applied(self):
print("\n" + str(test_cases(109)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on include review filter
self.assertIn(DEFAULT_BASE_URL,
CanonizerAsOfFilters(self.driver).check_include_review_filter_applied().get_url())
# 111
def test_check_default_filter_applied(self):
print("\n" + str(test_cases(110)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on default filter
self.assertIn(DEFAULT_BASE_URL, CanonizerAsOfFilters(self.driver).check_default_filter_applied().get_url())
# 112
def test_check_as_of_date_filter_applied(self):
print("\n" + str(test_cases(111)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on as of (yy/mm/dd) and select date
self.assertIn(DEFAULT_BASE_URL, CanonizerAsOfFilters(self.driver).check_as_of_date_filter_applied().get_url())
# ----- As Of Filters Test Cases End -----
# ----- Update Topic Test Cases Start -----
# 113
def test_load_topic_update_page(self):
print("\n" + str(test_cases(112)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Check Topic Update page load
self.assertIn("manage/topic", CanonizerTopicUpdatePage(self.driver).load_topic_update_page().get_url())
# 114
def test_load_view_this_version_page(self):
print("\n" + str(test_cases(113)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
self.assertIn(DEFAULT_BASE_URL, CanonizerTopicUpdatePage(self.driver).load_view_this_version_page().get_url())
# 115
def test_load_topic_object_page(self):
print("\n" + str(test_cases(114)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
result = CanonizerTopicUpdatePage(self.driver).load_topic_object_page()
if result == 1:
self.assertIn(DEFAULT_BASE_URL, CanonizerTopicUpdatePage(self.driver).load_topic_object_page().get_url())
# 116
def test_topic_update_page_mandatory_fields_are_marked_with_asterisk(self):
print("\n" + str(test_cases(115)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
self.assertTrue(CanonizerTopicUpdatePage(
self.driver).load_topic_update_page().topic_update_page_mandatory_fields_are_marked_with_asterisk())
# 117
def test_topic_objection_page_mandatory_fields_are_marked_with_asterisk(self):
print("\n" + str(test_cases(116)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
result = CanonizerTopicUpdatePage(self.driver).load_topic_object_page()
if result == 1:
self.assertTrue(CanonizerTopicUpdatePage(
self.driver).load_topic_object_page().topic_objection_page_mandatory_fields_are_marked_with_asterisk())
# 118
def test_topic_update_page_should_have_add_new_nick_name_link_for_new_users(self):
print("\n" + str(test_cases(117)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
result = CanonizerTopicUpdatePage(
self.driver).load_topic_update_page().topic_update_page_should_have_add_new_nick_name_link_for_new_users()
if result == 1:
self.assertIn("Add New Nick Name", result)
# 119
def test_submit_update_with_blank_nick_name(self):
print("\n" + str(test_cases(118)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Topic update and check if nick name is blank
result = CanonizerTopicUpdatePage(self.driver).load_topic_update_page().submit_update_with_blank_nick_name(
"Test",
"",
"")
if result == 1:
self.assertIn("The nick name field is required.", result)
# ----- Update Topic Test Cases End -----
# ----- Create New Camp and Edit Camp Test Cases Start -----
# 120
def test_load_create_camp_page(self):
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
print("\n" + str(test_cases(119)))
self.assertIn("camp/create/173-Software-Testing/1-Agreement",
CanonizerCampPage(self.driver).load_create_camp_page().get_url())
# 121
def test_create_new_camp_page_mandatory_fields_are_marked_with_asterisk(self):
print("\n" + str(test_cases(120)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Create New camp link
self.assertTrue(CanonizerCampPage(
self.driver).load_create_camp_page().create_new_camp_page_mandatory_fields_are_marked_with_asterisk())
# 122
def test_create_camp_with_blank_nick_name(self):
print("\n" + str(test_cases(121)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Create New camp link and check if nick name is blank
result = CanonizerCampPage(self.driver).load_create_camp_page().create_camp_with_blank_nick_name("Test",
"",
"",
"",
"",
"")
if result == 1:
self.assertIn("The nick name field is required.", result)
# 123
def test_create_camp_with_blank_camp_name(self):
print("\n" + str(test_cases(122)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Create New camp link and check if topic name is blank
result = CanonizerCampPage(self.driver).load_create_camp_page().create_camp_with_blank_camp_name("Test",
"",
"",
"",
"",
"")
self.assertIn("Camp name is required.", result)
# 124
def test_load_camp_update_page(self):
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
print("\n" + str(test_cases(123)))
self.assertIn("/manage/camp", CanonizerEditCampPage(self.driver).load_camp_update_page().get_url())
# TC_EDIT_CAMP_STATEMENT_01
def test_load_camp_manage_edit_page(self):
print("\n" + str(test_cases("TC_EDIT_CAMP_STATEMENT_01")))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(self.driver).load_camp_manage_edit_page()
self.assertTrue(result, True)
# TC_EDIT_CAMP_STATEMENT_02
def test_verify_agreement_page(self):
print("\n" + str(test_cases("TC_EDIT_CAMP_STATEMENT_02")))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(self.driver).verify_agreement_page()
if result:
self.assertIn("topic/173-Software-Testing/2-Types-Of-Testing", result.get_url())
# TC_EDIT_CAMP_STATEMENT_03
def test_verify_camp_update_page(self):
print("\n" + str(test_cases("TC_EDIT_CAMP_STATEMENT_02")))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(self.driver).load_camp_update_page()
if result:
self.assertIn("manage/camp", result.get_url())
# 125
def test_camp_edit_page_mandatory_fields_are_marked_with_asterisk(self):
print("\n" + str(test_cases(124)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Click on the Manage/Edit Camp link
self.assertTrue(CanonizerEditCampPage(
self.driver).load_camp_update_page().camp_edit_page_mandatory_fields_are_marked_with_asterisk())
# 126
def test_submit_camp_update_with_blank_nick_name(self):
print("\n" + str(test_cases(125)))
# Click on the Login Page and Create a Login Session and for further actions.
self.login_to_canonizer_app()
# Go to Manage/Edit Camp and check if nick name is blank
result = CanonizerEditCampPage(self.driver).load_camp_update_page().submit_camp_update_with_blank_nick_name(
"Test", "", "", "", "", "")
if result == 1:
self.assertIn("The nick name field is required.", result)
# TC_EDIT_CAMP_STATEMENT_03
def test_submit_camp_update_with_valid_data(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_03')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(self.driver).load_camp_update_page().submit_camp_update_with_valid_data(
"Agreement",
"Types Of Testing 1",
"Test",
"Testing Keywords, Keywords",
"Just Note for testing",
"www.google.com",
"Pooja",
)
self.assertTrue("Success! Camp change submitted successfully.", result)
# TC_EDIT_CAMP_STATEMENT_05
def test_submit_camp_update_with_invalid_data(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_05')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(self.driver).load_camp_update_page().submit_camp_update_with_invalid_data(
"Agreement",
"Types Of Testing 1",
"Test",
"Testing Keywords, Keywords",
"Just Note for testing",
"invalid url",
"Pooja",
)
self.assertTrue("The camp about url format is invalid. (Example: https://www.example.com?post=1234)", result)
# TC_EDIT_CAMP_STATEMENT_06
def test_submit_camp_update_with_valid_data_with_enter_key(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_06')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(
self.driver).load_camp_update_page().submit_camp_update_with_valid_data_with_enter_key(
"Agreement",
"Types Of Testing 2",
"Test",
"Testing Keywords, Keywords",
"Just Note for testing",
"www.google.com",
"Pooja",
)
self.assertTrue("Success! Camp change submitted successfully.", result)
# TC_EDIT_CAMP_STATEMENT_05
def test_submit_camp_update_with_invalid_data_with_enter_key(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_05')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(
self.driver).load_camp_update_page().submit_camp_update_with_invalid_data_with_enter_key(
"Agreement",
"Types Of Testing 1",
"Test",
"Testing Keywords, Keywords",
"Just Note for testing",
"invalid url",
"Pooja",
)
self.assertTrue("The camp about url format is invalid. (Example: https://www.example.com?post=1234)", result)
# TC_EDIT_CAMP_STATEMENT_06
def test_submit_camp_update_with_invalid_data_with_enter_key(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_06')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(
self.driver).load_camp_update_page().submit_camp_update_with_invalid_data_with_enter_key(
"Agreement",
"Types Of Testing 1",
"Test",
"Testing Keywords, Keywords",
"Just Note for testing",
"invalid url",
"Pooja",
)
self.assertTrue("The camp about url format is invalid. (Example: https://www.example.com?post=1234)", result)
# TC_EDIT_CAMP_STATEMENT_07
def test_submit_camp_update_with_mandatory_fields_only(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_07')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(
self.driver).load_camp_update_page().test_submit_camp_update_with_mandatory_fields_only(
"Agreement",
"Types Of Testing 1",
"",
"",
"",
"",
"",
)
self.assertTrue("Success! Camp change submitted successfully.", result)
# TC_EDIT_CAMP_STATEMENT_08
def test_submit_camp_update_with_tailing_spaces(self):
print("\n" + str(test_cases('TC_EDIT_CAMP_STATEMENT_08')))
self.login_to_canonizer_app()
result = CanonizerEditCampPage(
self.driver).load_camp_update_page().submit_camp_update_with_tailing_spaces(
"Agreement",
"Types Of Testing 1",
"",
" keywords with trailing spaces.",
" notes with trailing spaces.",
" www.google.com",
"",
| |
"""Games or Adversarial Search (Chapter 5)"""
import copy
import itertools
import random
from collections import namedtuple
import numpy as np
from utils import vector_add
GameState = namedtuple('GameState', 'to_move, utility, board, moves')
StochasticGameState = namedtuple('StochasticGameState', 'to_move, utility, board, moves, chance')
# ______________________________________________________________________________
# MinMax Search
def minimax_decision(state, game):
"""Given a state in a game, calculate the best move by searching
forward all the way to the terminal states. [Figure 5.3]"""
player = game.to_move(state)
def max_value(state):
if game.terminal_test(state):
return game.utility(state, player)
v = -np.inf
for a in game.actions(state):
v = max(v, min_value(game.result(state, a)))
return v
def min_value(state):
if game.terminal_test(state):
return game.utility(state, player)
v = np.inf
for a in game.actions(state):
v = min(v, max_value(game.result(state, a)))
return v
# Body of minmax_decision:
return max(game.actions(state), key=lambda a: min_value(game.result(state, a)))
# ______________________________________________________________________________
def expect_minmax(state, game):
"""
[Figure 5.11]
Return the best move for a player after dice are thrown. The game tree
includes chance nodes along with min and max nodes.
"""
player = game.to_move(state)
def max_value(state):
v = -np.inf
for a in game.actions(state):
v = max(v, chance_node(state, a))
return v
def min_value(state):
v = np.inf
for a in game.actions(state):
v = min(v, chance_node(state, a))
return v
def chance_node(state, action):
res_state = game.result(state, action)
if game.terminal_test(res_state):
return game.utility(res_state, player)
sum_chances = 0
num_chances = len(game.chances(res_state))
for chance in game.chances(res_state):
res_state = game.outcome(res_state, chance)
util = 0
if res_state.to_move == player:
util = max_value(res_state)
else:
util = min_value(res_state)
sum_chances += util * game.probability(chance)
return sum_chances / num_chances
# Body of expect_minmax:
return max(game.actions(state), key=lambda a: chance_node(state, a), default=None)
def alpha_beta_search(state, game):
"""Search game to determine best action; use alpha-beta pruning.
As in [Figure 5.7], this version searches all the way to the leaves."""
player = game.to_move(state)
# Functions used by alpha_beta
def max_value(state, alpha, beta):
if game.terminal_test(state):
return game.utility(state, player)
v = -np.inf
for a in game.actions(state):
v = max(v, min_value(game.result(state, a), alpha, beta))
if v >= beta:
return v
alpha = max(alpha, v)
return v
def min_value(state, alpha, beta):
if game.terminal_test(state):
return game.utility(state, player)
v = np.inf
for a in game.actions(state):
v = min(v, max_value(game.result(state, a), alpha, beta))
if v <= alpha:
return v
beta = min(beta, v)
return v
# Body of alpha_beta_search:
best_score = -np.inf
beta = np.inf
best_action = None
for a in game.actions(state):
v = min_value(game.result(state, a), best_score, beta)
if v > best_score:
best_score = v
best_action = a
return best_action
def alpha_beta_cutoff_search(state, game, d=4, cutoff_test=None, eval_fn=None):
"""Search game to determine best action; use alpha-beta pruning.
This version cuts off search and uses an evaluation function."""
player = game.to_move(state)
# Functions used by alpha_beta
def max_value(state, alpha, beta, depth):
if cutoff_test(state, depth):
return eval_fn(state)
v = -np.inf
for a in game.actions(state):
v = max(v, min_value(game.result(state, a), alpha, beta, depth + 1))
if v >= beta:
return v
alpha = max(alpha, v)
return v
def min_value(state, alpha, beta, depth):
if cutoff_test(state, depth):
return eval_fn(state)
v = np.inf
for a in game.actions(state):
v = min(v, max_value(game.result(state, a), alpha, beta, depth + 1))
if v <= alpha:
return v
beta = min(beta, v)
return v
# Body of alpha_beta_cutoff_search starts here:
# The default test cuts off at depth d or at a terminal state
cutoff_test = (cutoff_test or (lambda state, depth: depth > d or game.terminal_test(state)))
eval_fn = eval_fn or (lambda state: game.utility(state, player))
best_score = -np.inf
beta = np.inf
best_action = None
for a in game.actions(state):
v = min_value(game.result(state, a), best_score, beta, 1)
if v > best_score:
best_score = v
best_action = a
return best_action
# ______________________________________________________________________________
# Players for Games
def query_player(game, state):
"""Make a move by querying standard input."""
print("current state:")
game.display(state)
print("available moves: {}".format(game.actions(state)))
print("")
move = None
if game.actions(state):
move_string = input('Your move? ')
try:
move = eval(move_string)
except NameError:
move = move_string
else:
print('no legal moves: passing turn to next player')
return move
def random_player(game, state):
"""A player that chooses a legal move at random."""
return random.choice(game.actions(state)) if game.actions(state) else None
def alpha_beta_player(game, state):
return alpha_beta_search(state, game)
def minmax_player(game,state):
return minmax_decision(state,game)
def expect_minmax_player(game, state):
return expect_minmax(state, game)
# ______________________________________________________________________________
# Some Sample Games
class Game:
"""A game is similar to a problem, but it has a utility for each
state and a terminal test instead of a path cost and a goal
test. To create a game, subclass this class and implement actions,
result, utility, and terminal_test. You may override display and
successors or you can inherit their default methods. You will also
need to set the .initial attribute to the initial state; this can
be done in the constructor."""
def actions(self, state):
"""Return a list of the allowable moves at this point."""
raise NotImplementedError
def result(self, state, move):
"""Return the state that results from making a move from a state."""
raise NotImplementedError
def utility(self, state, player):
"""Return the value of this final state to player."""
raise NotImplementedError
def terminal_test(self, state):
"""Return True if this is a final state for the game."""
return not self.actions(state)
def to_move(self, state):
"""Return the player whose move it is in this state."""
return state.to_move
def display(self, state):
"""Print or otherwise display the state."""
print(state)
def __repr__(self):
return '<{}>'.format(self.__class__.__name__)
def play_game(self, *players):
"""Play an n-person, move-alternating game."""
state = self.initial
while True:
for player in players:
move = player(self, state)
state = self.result(state, move)
if self.terminal_test(state):
self.display(state)
return self.utility(state, self.to_move(self.initial))
class StochasticGame(Game):
"""A stochastic game includes uncertain events which influence
the moves of players at each state. To create a stochastic game, subclass
this class and implement chances and outcome along with the other
unimplemented game class methods."""
def chances(self, state):
"""Return a list of all possible uncertain events at a state."""
raise NotImplementedError
def outcome(self, state, chance):
"""Return the state which is the outcome of a chance trial."""
raise NotImplementedError
def probability(self, chance):
"""Return the probability of occurrence of a chance."""
raise NotImplementedError
def play_game(self, *players):
"""Play an n-person, move-alternating stochastic game."""
state = self.initial
while True:
for player in players:
chance = random.choice(self.chances(state))
state = self.outcome(state, chance)
move = player(self, state)
state = self.result(state, move)
if self.terminal_test(state):
self.display(state)
return self.utility(state, self.to_move(self.initial))
class Fig52Game(Game):
"""The game represented in [Figure 5.2]. Serves as a simple test case."""
succs = dict(A=dict(a1='B', a2='C', a3='D'),
B=dict(b1='B1', b2='B2', b3='B3'),
C=dict(c1='C1', c2='C2', c3='C3'),
D=dict(d1='D1', d2='D2', d3='D3'))
utils = dict(B1=3, B2=12, B3=8, C1=2, C2=4, C3=6, D1=14, D2=5, D3=2)
initial = 'A'
def actions(self, state):
return list(self.succs.get(state, {}).keys())
def result(self, state, move):
return self.succs[state][move]
def utility(self, state, player):
if player == 'MAX':
return self.utils[state]
else:
return -self.utils[state]
def terminal_test(self, state):
return state not in ('A', 'B', 'C', 'D')
def to_move(self, state):
return 'MIN' if state in 'BCD' else 'MAX'
class Fig52Extended(Game):
"""Similar to Fig52Game but bigger. Useful for visualisation"""
succs = {i: dict(l=i * 3 + 1, m=i * 3 + 2, r=i * 3 + 3) for i in range(13)}
utils = dict()
def actions(self, state):
return sorted(list(self.succs.get(state, {}).keys()))
def result(self, state, move):
return self.succs[state][move]
def utility(self, state, player):
if player == 'MAX':
return self.utils[state]
else:
return -self.utils[state]
def terminal_test(self, state):
return state not in range(13)
def to_move(self, state):
return 'MIN' if state in {1, 2, 3} else 'MAX'
class TicTacToe(Game):
"""Play TicTacToe on an h x v board, with Max (first player) playing 'X'.
A state has the player to move, a cached utility, a list of moves in
the form of a list of (x, y) positions, and a board, in the form of
a dict of {(x, y): Player} entries, where Player is 'X' or 'O'."""
def __init__(self, h=3, v=3, k=3):
self.h = h
self.v = v
self.k = k
moves = [(x, y) for x in range(1, h + 1)
for y in range(1, v + 1)]
self.initial = GameState(to_move='X', utility=0, board={}, moves=moves)
def actions(self, state):
"""Legal moves are any square not yet taken."""
return state.moves
def result(self, state, move):
if move not in state.moves:
return state # Illegal move has no effect
board = state.board.copy()
board[move] = state.to_move
moves = list(state.moves)
moves.remove(move)
return GameState(to_move=('O' if state.to_move == 'X' else 'X'),
utility=self.compute_utility(board, move, state.to_move),
board=board, moves=moves)
| |
"""[Chemicals] Chemicals corresponing to each entry in the stoichiometry array."""
return self._chemicals
@property
def stoichiometry(self):
"""[2d array] Stoichiometry coefficients."""
return self._stoichiometry
@property
def reactants(self):
"""tuple[str] Reactants associated to conversion."""
IDs = self._chemicals.IDs
phases = self._phases
X_index = self._X_index
if phases:
return tuple([(phases[i], IDs[j]) for i,j in X_index])
else:
return tuple([IDs[i] for i in X_index])
@property
def MWs(self):
"""[2d array] Molecular weights of all chemicals."""
return self._chemicals.MW[np.newaxis, :]
def to_df(self, index=None):
columns = [f'Stoichiometry (by {self.basis})', 'Reactant', 'Conversion [%]']
chemicals = self._chemicals
phases = self._phases
rxns = [get_stoichiometric_string(i, phases, chemicals) for i in self._stoichiometry]
cmps = [ID + ',' + phase for phase, ID in self.reactants] if phases else self.reactants
Xs = 100. * self.X
data = list([i for i in zip(rxns, cmps, Xs) if i[2]])
df = pd.DataFrame(data, columns=columns, index=index if index else None)
if isinstance(self, ParallelReaction):
df.index.name = 'Parallel reaction'
elif isinstance(self, SeriesReaction):
df.index.name = 'Reaction in series'
return df
def __repr__(self):
return f"{type(self).__name__}([{', '.join([repr(i).replace('ReactionItem', 'Reaction') for i in self])}])"
def _info(self, index_name='index'):
info = f"{type(self).__name__} (by {self.basis}):"
chemicals = self._chemicals
phases = self._phases
length = len
string = str
rxns = [get_stoichiometric_string(i, phases, chemicals) for i in self._stoichiometry]
maxrxnlen = max([13, *[length(i) for i in rxns]]) + 2
cmps = [ID + ',' + phase for phase, ID in self.reactants] if phases else self.reactants
maxcmplen = max([8, *[length(i) for i in cmps]]) + 2
Xs = self.X
N = len(Xs)
maxnumspace = max(length(string(N)) + 1, len(index_name))
info += f"\n{index_name}" + " "*(max(2, length(string(N)))) + "stoichiometry" + " "*(maxrxnlen - 13) + "reactant" + " "*(maxcmplen - 8) + ' X[%]'
for N, rxn, cmp, X in zip(range(N), rxns, cmps, Xs):
rxn_spaces = " "*(maxrxnlen - length(rxn))
cmp_spaces = " "*(maxcmplen - length(cmp))
num = string(N)
numspace = (maxnumspace - length(num)) * " "
info += f"\n[{N}]{numspace}{rxn}{rxn_spaces}{cmp}{cmp_spaces}{X*100: >6.2f}"
return info
_ipython_display_ = show = Reaction.show
class ParallelReaction(ReactionSet):
"""
Create a ParallelReaction object from Reaction objects. When called,
it returns the change in material due to all parallel reactions.
Parameters
----------
reactions : Iterable[Reaction]
Examples
--------
Run two reactions in parallel:
>>> import thermosteam as tmo
>>> chemicals = tmo.Chemicals(['H2', 'Ethanol', 'CH4', 'O2', 'CO2', 'H2O'], cache=True)
>>> tmo.settings.set_thermo(chemicals)
>>> kwargs = dict(phases='lg', correct_atomic_balance=True)
>>> reaction = tmo.ParallelReaction([
... # Reaction definition Reactant Conversion
... tmo.Reaction('H2,g + O2,g -> 2H2O,g', reactant='H2', X=0.7, **kwargs),
... tmo.Reaction('Ethanol,l + O2,g -> CO2,g + 2H2O,g', reactant='Ethanol', X=0.1, **kwargs)
... ])
>>> reaction.reactants # Note that reactants are tuples of phase and ID pairs.
(('g', 'H2'), ('l', 'Ethanol'))
>>> reaction.show()
ParallelReaction (by mol):
index stoichiometry reactant X[%]
[0] H2,g + 0.5 O2,g -> H2O,g H2,g 70.00
[1] 3 O2,g + Ethanol,l -> 2 CO2,g + 3 H2O,g Ethanol,l 10.00
>>> s1 = tmo.MultiStream('s1', T=373.15,
... l=[('Ethanol', 10)],
... g=[('H2', 10), ('CH4', 5), ('O2', 100), ('H2O', 10)])
>>> s1.show() # Before reaction
MultiStream: s1
phases: ('g', 'l'), T: 373.15 K, P: 101325 Pa
flow (kmol/hr): (g) H2 10
CH4 5
O2 100
H2O 10
(l) Ethanol 10
>>> reaction(s1)
>>> s1.show() # After isothermal reaction
MultiStream: s1
phases: ('g', 'l'), T: 373.15 K, P: 101325 Pa
flow (kmol/hr): (g) H2 3
CH4 5
O2 93.5
CO2 2
H2O 20
(l) Ethanol 9
Reaction items are accessible:
>>> reaction[0].show()
ReactionItem (by mol):
stoichiometry reactant X[%]
H2,g + 0.5 O2,g -> H2O,g H2,g 70.00
Note that changing the conversion of a reaction item changes the
conversion of its parent reaction set:
>>> reaction[0].X = 0.5
>>> reaction.show()
ParallelReaction (by mol):
index stoichiometry reactant X[%]
[0] H2,g + 0.5 O2,g -> H2O,g H2,g 50.00
[1] 3 O2,g + Ethanol,l -> 2 CO2,g + 3 H2O,g Ethanol,l 10.00
Reactions subsets can be made as well:
>>> reaction[:1].show()
ParallelReaction (by mol):
index stoichiometry reactant X[%]
[0] H2,g + 0.5 O2,g -> H2O,g H2,g 50.00
Get net reaction conversion of reactants as a material indexer:
>>> mi = reaction.X_net(indexer=True)
>>> mi.show()
MaterialIndexer:
(g) H2 0.5
(l) Ethanol 0.1
>>> mi['g', 'H2']
0.5
If no phases are specified for a reaction set, the `X_net` property returns
a ChemicalIndexer:
>>> kwargs = dict(correct_atomic_balance=True)
>>> reaction = tmo.ParallelReaction([
... # Reaction definition Reactant Conversion
... tmo.Reaction('H2 + O2 -> 2H2O', reactant='H2', X=0.7, **kwargs),
... tmo.Reaction('Ethanol + O2 -> CO2 + 2H2O', reactant='Ethanol', X=0.1, **kwargs)
... ])
>>> ci = reaction.X_net(indexer=True)
>>> ci.show()
ChemicalIndexer:
H2 0.7
Ethanol 0.1
>>> ci['H2']
0.7
"""
__slots__ = ()
def _reaction(self, material_array):
reacted = self._X * np.array([material_array[i] for i in self._X_index], float)
if self._phases:
material_array += (reacted[:, np.newaxis, np.newaxis] * self._stoichiometry).sum(0)
else:
material_array += reacted @ self._stoichiometry
def reduce(self):
"""
Return a new Parallel reaction object that combines reaction
with the same reactant together, reducing the number of reactions.
"""
rxn_dict = {i: [] for i in set(self._X_index)}
for i in self: rxn_dict[i._X_index].append(i)
for key, rxns in rxn_dict.items():
rxn, *rxns = rxns
rxn = rxn.copy()
for i in rxns: rxn += i
rxn_dict[key] = rxn
return self.__class__(rxn_dict.values())
def X_net(self, indexer=False):
"""Return net reaction conversion of reactants as a dictionary or
a ChemicalIndexer if indexer is True."""
X_net = {}
for i, j in zip(self.reactants, self.X):
if i in X_net:
X_net[i] += j
else:
X_net[i] = j
if indexer:
chemicals = self.chemicals
phases = self.phases
if phases:
phases = [i[0] for i in X_net]
mi = MaterialIndexer(phases=phases, chemicals=chemicals)
for i,j in X_net.items(): mi[i] = j
return mi
else:
data = chemicals.kwarray(X_net)
return ChemicalIndexer.from_data(data, NoPhase, chemicals, False)
else:
return X_net
class SeriesReaction(ReactionSet):
"""
Create a ParallelReaction object from Reaction objects. When called,
it returns the change in material due to all reactions in series.
Parameters
----------
reactions : Iterable[Reaction]
"""
__slots__ = ()
def reduce(self):
raise TypeError('cannot reduce a SeriesReation object, only '
'ParallelReaction objects are reducible')
def _reaction(self, material_array):
for i, j, k in zip(self._X_index, self.X, self._stoichiometry):
material_array += material_array[i] * j * k
def X_net(self, indexer=False):
"""Return net reaction conversion of reactants as a dictionary or
a ChemicalIndexer if indexer is True."""
X_net = {}
for i, j in zip(self.reactants, self.X):
if i in X_net:
X_net[i] += (1 - X_net[i]) * j
else:
X_net[i] = j
if indexer:
chemicals = self.chemicals
data = chemicals.kwarray(X_net)
return ChemicalIndexer.from_data(data, NoPhase, chemicals, False)
else:
return X_net
class ReactionSystem:
"""
Create a ReactionSystem object that can react a stream across a series of
reactions.
Parameters
----------
*reactions : Reaction, ParallelReaction, or SeriesReaction
All reactions within the reaction system.
Examples
--------
Create a reaction system for cellulosic fermentation of biomass:
>>> from thermosteam import Rxn, RxnSys, PRxn, SRxn, settings, Chemical, Stream
>>> cal2joule = 4.184
>>> Glucan = Chemical('Glucan', search_db=False, formula='C6H10O5', Hf=-233200*cal2joule, phase='s', default=True)
>>> Glucose = Chemical('Glucose', phase='s')
>>> CO2 = Chemical('CO2', phase='g')
>>> HMF = Chemical('HMF', search_ID='Hydroxymethylfurfural', phase='l', default=True)
>>> Biomass = Glucose.copy(ID='Biomass')
>>> settings.set_thermo(['Water', 'Ethanol', 'LacticAcid', HMF, Glucose, Glucan, CO2, Biomass])
>>> saccharification = PRxn([
... Rxn('Glucan + H2O -> Glucose', reactant='Glucan', X=0.9),
... Rxn('Glucan -> HMF + 2H2O', reactant='Glucan', X=0.025)
... ])
>>> fermentation = SRxn([
... Rxn('Glucose -> 2LacticAcid', reactant='Glucose', X=0.03),
... Rxn('Glucose -> 2Ethanol + 2CO2', reactant='Glucose', X=0.95),
... ])
>>> cell_growth = Rxn('Glucose -> Biomass', reactant='Glucose', X=1.0)
>>> cellulosic_rxnsys = RxnSys(saccharification, fermentation, cell_growth)
>>> cellulosic_rxnsys.show()
ReactionSystem:
index reaction
[0] ParallelReaction (by mol):
subindex stoichiometry reactant X[%]
[0] Water + Glucan -> Glucose Glucan 90.00
[1] Glucan -> 2 Water + HMF Glucan 2.50
[1] SeriesReaction (by mol):
subindex stoichiometry reactant X[%]
[0] Glucose -> 2 LacticAcid Glucose 3.00
[1] Glucose -> 2 Ethanol + 2 CO2 Glucose 95.00
[2] Reaction (by mol):
stoichiometry reactant X[%]
Glucose -> Biomass Glucose 100.00
Compute the flux of glucan through saccharification reactions:
>>> feed = Stream('feed', | |
<reponame>kdschlosser/nvapi
# -*- coding: utf-8 -*-
#
# ***********************************************************************************
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is furnished
# to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ***********************************************************************************
# **********************************************************************************************************************
# Copyright 2012 NVIDIA Corporation. All rights reserved.
# NOTICE TO USER:
# This software is subject to NVIDIA ownership rights under U.S. and international Copyright laws.
# This software and the information contained herein are PROPRIETARY and CONFIDENTIAL to NVIDIA
# and are being provided solely under the terms and conditions of an NVIDIA software license agreement.
# Otherwise, you have no rights to use or access this software in any manner.
#
# If not covered by the applicable NVIDIA software license agreement:
# NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOFTWARE FOR ANY PURPOSE.
# IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
# NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
# IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
# OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE CODE.
#
# U.S. Government End Users.
# This software is a "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT 1995),
# consisting of "commercial computer software" and "commercial computer software documentation"
# as such terms are used in 48 C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government only as a commercial
# end item.
# Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995),
# all U.S. Government End Users acquire the software with only those rights set forth herein.
#
# Any use of this software in individual and commercial software must include,
# in the user documentation and internal comments to the code,
# the above Disclaimer (as applicable) and U.S. Government End Users Notice.
#
# **********************************************************************************************************************
import ctypes
from ctypes.wintypes import INT, WCHAR
from .nvapi_lite_common_h import *
class ENUM(INT):
pass
class _SettingDWORDNameString(ctypes.Structure):
pass
SettingDWORDNameString = _SettingDWORDNameString
class _SettingWSTRINGNameString(ctypes.Structure):
pass
SettingWSTRINGNameString = _SettingWSTRINGNameString
# *************************************************************************
# Copyright NVIDIA Corporation. All rights reserved.
# NOTICE TO USER:
#
# This source code is subject to NVIDIA ownership rights under U.S.
# and international Copyright laws. Users and possessors of this
# source code are hereby granted a nonexclusive, royalty-free
# license to use this code in individual and commercial software.
#
# NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE
# CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR
# IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR
# PURPOSE. IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL,
# INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
# AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING
# OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE
# CODE.
# U.S. Government End Users. This source code is a "commercial item"
# as that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting
# of "commercial computer software" and "commercial computer software
# documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995)
# and is provided to the U.S. Government only as a commercial end item.
# Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through
# 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the
# source code with only those rights set forth herein.
#
# Any use of this source code in individual and commercial software must
# include, in the user documentation and internal comments to the code,
# the above Disclaimer and U.S. Government End Users Notice.
# *************************************************************************
OGL_AA_LINE_GAMMA_STRING = "Antialiasing - Line gamma"
OGL_DEEP_COLOR_SCANOUT_STRING = "Deep color for 3D applications"
OGL_DEFAULT_SWAP_INTERVAL_STRING = "OpenGL default swap interval"
OGL_DEFAULT_SWAP_INTERVAL_FRACTIONAL_STRING = (
"OpenGL default swap interval fraction"
)
OGL_DEFAULT_SWAP_INTERVAL_SIGN_STRING = "OpenGL default swap interval sign"
OGL_EVENT_LOG_SEVERITY_THRESHOLD_STRING = "Event Log Severity Threshold"
OGL_EXTENSION_STRING_VERSION_STRING = "Extension String version"
OGL_FORCE_BLIT_STRING = "Buffer-flipping mode"
OGL_FORCE_STEREO_STRING = "Force Stereo shuttering"
OGL_IMPLICIT_GPU_AFFINITY_STRING = "Preferred OpenGL GPU"
OGL_MAX_FRAMES_ALLOWED_STRING = "Maximum frames allowed"
OGL_OVERLAY_PIXEL_TYPE_STRING = "Exported Overlay pixel types"
OGL_OVERLAY_SUPPORT_STRING = "Enable overlay"
OGL_QUALITY_ENHANCEMENTS_STRING = (
"High level control of the rendering quality on OpenGL"
)
OGL_SINGLE_BACKDEPTH_BUFFER_STRING = "Unified back/depth buffer"
OGL_SLI_CFR_MODE_STRING = "Set CFR mode"
OGL_SLI_MULTICAST_STRING = "Enable NV_gpu_multicast extension"
OGL_THREAD_CONTROL_STRING = "Threaded optimization"
OGL_TMON_LEVEL_STRING = "Event Log Tmon Severity Threshold"
OGL_TRIPLE_BUFFER_STRING = "Triple buffering"
AA_BEHAVIOR_FLAGS_STRING = "Antialiasing - Behavior Flags"
AA_MODE_ALPHATOCOVERAGE_STRING = (
"Antialiasing - Transparency Multisampling"
)
AA_MODE_GAMMACORRECTION_STRING = "Antialiasing - Gamma correction"
AA_MODE_METHOD_STRING = "Antialiasing - Setting"
AA_MODE_REPLAY_STRING = "Antialiasing - Transparency Supersampling"
AA_MODE_SELECTOR_STRING = "Antialiasing - Mode"
AA_MODE_SELECTOR_SLIAA_STRING = "Antialiasing - SLI AA"
ANISO_MODE_LEVEL_STRING = "Anisotropic filtering setting"
ANISO_MODE_SELECTOR_STRING = "Anisotropic filtering mode"
ANSEL_ALLOW_STRING = "NVIDIA Pred Ansel Usage"
ANSEL_ENABLE_STRING = "Enable Ansel"
ANSEL_WHITELISTED_STRING = "Ansel flags for enabled applications"
APPLICATION_PROFILE_NOTIFICATION_TIMEOUT_STRING = (
"Application Profile Notification Popup Timeout"
)
APPLICATION_STEAM_ID_STRING = "Steam Application ID"
BATTERY_BOOST_APP_FPS_STRING = "Battery Boost Application FPS"
CPL_HIDDEN_PROFILE_STRING = (
"Do not display this profile in the Control Panel"
)
CUDA_EXCLUDED_GPUS_STRING = "List of Universal GPU ids"
D3DOGL_GPU_MAX_POWER_STRING = "Maximum GPU Power"
EXPORT_PERF_COUNTERS_STRING = "Export Performance Counters"
EXTERNAL_QUIET_MODE_STRING = "External Quiet Mode (XQM)"
FXAA_ALLOW_STRING = "NVIDIA Pred FXAA Usage"
FXAA_ENABLE_STRING = "Enable FXAA"
FXAA_INDICATOR_ENABLE_STRING = "Enable FXAA Indicator"
MCSFRSHOWSPLIT_STRING = "SLI indicator"
NV_QUALITY_UPSCALING_STRING = "NVIDIA Quality upscaling"
OPTIMUS_MAXAA_STRING = "Maximum AA samples allowed for a given application"
PHYSXINDICATOR_STRING = "Display the PhysX indicator"
PREFERRED_PSTATE_STRING = "Power management mode"
PREVENT_UI_AF_OVERRIDE_STRING = "No override of Anisotropic filtering"
PS_FRAMERATE_LIMITER_STRING = "Frame Rate Limiter"
PS_FRAMERATE_LIMITER_2_CONTROL_STRING = "Frame Rate Limiter 2 Control"
PS_FRAMERATE_LIMITER_GPS_CTRL_STRING = "Frame Rate Monitor"
PS_FRAMERATE_MONITOR_CTRL_STRING = "Frame Rate Monitor Control"
SHIM_MAXRES_STRING = "Maximum resolution allowed for a given application"
SHIM_MCCOMPAT_STRING = "Optimus flags for enabled applications"
SHIM_RENDERING_MODE_STRING = "Enable application for Optimus"
SHIM_RENDERING_OPTIONS_STRING = (
"Shim Rendering Mode Options per application for Optimus"
)
SLI_GPU_COUNT_STRING = "Number of GPUs to use on SLI rendering mode"
SLI_PREDEFINED_GPU_COUNT_STRING = (
"NVIDIA pred number of GPUs to use on SLI rendering mode"
)
SLI_PREDEFINED_GPU_COUNT_DX10_STRING = (
"NVIDIA pred number of GPUs to use on SLI rendering mode on DirectX 10"
)
SLI_PREDEFINED_MODE_STRING = "NVIDIA pred SLI mode"
SLI_PREDEFINED_MODE_DX10_STRING = "NVIDIA pred SLI mode on DirectX 10"
SLI_RENDERING_MODE_STRING = "SLI rendering mode"
VRPRERENDERLIMIT_STRING = "Virtual Reality pre-rendered frames"
VRRFEATUREINDICATOR_STRING = "Toggle the VRR global feature"
VRROVERLAYINDICATOR_STRING = "Display the VRR Overlay Indicator"
VRRREQUESTSTATE_STRING = "VRR requested state"
VRR_APP_OVERRIDE_STRING = "G-SYNC"
VRR_APP_OVERRIDE_REQUEST_STATE_STRING = "G-SYNC"
VRR_MODE_STRING = "Enable G-SYNC globally"
VSYNCSMOOTHAFR_STRING = "Flag to control smooth AFR behavior"
VSYNCVRRCONTROL_STRING = "Variable refresh Rate"
VSYNC_BEHAVIOR_FLAGS_STRING = "Vsync - Behavior Flags"
WKS_API_STEREO_EYES_EXCHANGE_STRING = "Stereo - Swap eyes"
WKS_API_STEREO_MODE_STRING = "Stereo - Display mode"
WKS_MEMORY_ALLOCATION_POLICY_STRING = "Memory Allocation Policy"
WKS_STEREO_DONGLE_SUPPORT_STRING = "Stereo - Dongle Support"
WKS_STEREO_SUPPORT_STRING = "Stereo - Enable"
WKS_STEREO_SWAP_MODE_STRING = "Stereo - swap mode"
AO_MODE_STRING = "Ambient Occlusion"
AO_MODE_ACTIVE_STRING = "NVIDIA Pred Ambient Occlusion Usage"
AUTO_LODBIASADJUST_STRING = (
"Texture filtering - Driver Controlled LOD Bias"
)
EXPORT_PERF_COUNTERS_DX9_ONLY_STRING = (
"Export Performance Counters for DX9 only"
)
ICAFE_LOGO_CONFIG_STRING = "ICafe Settings"
LODBIASADJUST_STRING = "Texture filtering - LOD Bias"
MAXWELL_B_SAMPLE_INTERLEAVE_STRING = "Enable sample interleaving (MFAA)"
PRERENDERLIMIT_STRING = "Maximum pre-rendered frames"
PS_SHADERDISKCACHE_STRING = "Shader Cache"
PS_TEXFILTER_ANISO_OPTS2_STRING = (
"Texture filtering - Anisotropic sample optimization"
)
PS_TEXFILTER_BILINEAR_IN_ANISO_STRING = (
"Texture filtering - Anisotropic filter optimization"
)
PS_TEXFILTER_DISABLE_TRILIN_SLOPE_STRING = (
"Texture filtering - Trilinear optimization"
)
PS_TEXFILTER_NO_NEG_LODBIAS_STRING = (
"Texture filtering - Negative LOD | |
<reponame>PolarMesosphericClouds/SkyWinder<gh_stars>0
import struct
from multiprocessing import Value
import numpy as np
from PyCRC.CRCCCITT import CRCCCITT
import logging
logger = logging.getLogger(__name__)
def get_checksum(data):
return int(np.sum(np.frombuffer(data, dtype='uint8'), dtype='uint8'))
def get_crc(data):
return CRCCCITT().calculate(data)
class PacketError(RuntimeError):
"""
General packet exception
"""
pass
class PacketInsufficientLengthError(PacketError):
"""
Exception for packets that are shorter or longer than their length field specifies
"""
pass
class PacketValidityError(PacketError):
"""
Exception for packets that are shorter or longer than their length field specifies
"""
pass
class PacketChecksumError(PacketError):
"""
Exception for packets that don't pass checksum or CRC tests
"""
pass
def load_gse_packet_from_file(filename):
with open(filename) as fh:
return GSEPacket(buffer=fh.read())
class GSEPacket(object):
_metadata_table = [('1B', 'start_byte'),
('1B', 'sync2_byte'),
('1B', 'origin_byte'),
('1B', 'unused_zero'),
('1H', 'payload_length')]
_header_format_string = '>' + ''.join([format for format, name in _metadata_table])
# GSE packet is defined in LDB manual section 2.4
START_BYTE = 0xFA
_valid_sync2_bytes = bytes([0xFA, 0xFB, 0xFC, 0xFD, 0xFF])
header_length = struct.calcsize(_header_format_string)
# These values refer to bits 0-2 of origin byte
HIRATE_ORIGIN = 2
LOWRATE_ORIGIN = 1
HOUSEKEEPING_ORIGIN = 0
ORIGIN_BITMASK = 0x07
def __init__(self, buffer=None, sync2_byte=None, origin=None, payload=None):
"""
GSE style packet, received from the GSE and passed to the ground computer.
This could contain either low rate or high rate data.
To decode a packet, use as GSEPacket(data), and access the payload attribute. This will raise a PacketError or
subclass if something serious is wrong, and set the is_valid attribute False if the sync2_byte is not
recognized.
To construct a packet, use as
packet = GSEPacket(sync2_byte=0xFA,origin=1,payload="hello world")
data = packet.to_buffer()
Parameters
----------
buffer : string
A data buffer to decode as a packet
sync2_byte :
sync2 byte indicates the communications link. see SIP manual
origin : uint8
origin byte, see SIP manual
payload : string
Bytes to package into the packet
"""
self._minimum_buffer_length = self.header_length + 1
self.is_valid = False
self._max_payload_size = 5000
if buffer is not None:
self.from_buffer(buffer)
else:
if (sync2_byte is None) or (origin is None) or (payload is None):
raise ValueError('All parameters must be specified'
'\n Sync2_byte was %r, Origin was %r, Payload was %r' % (sync2_byte, origin, payload))
if sync2_byte not in self._valid_sync2_bytes:
raise ValueError('sync2_byte not in valid_sync2_bytes \n Sync2 byte is %r' % sync2_byte)
if origin >= 16:
raise ValueError('origin not valid \n Origin is %r' % origin)
self.sync2_byte = sync2_byte
self.origin = origin
self.payload = payload
self.payload_length = len(payload)
origin_payload_length_string = struct.pack('>1B1H', self.origin, self.payload_length)
# Checksum includes origin and payload length; need to put these into the the byte string for calculation.
self.checksum = get_checksum(payload + origin_payload_length_string)
self.start_byte = self.START_BYTE
def __repr__(self):
try:
return 'Sync2: 0x%02x \n Origin: %d \n Payload Length %d \n First 10 bytes: %r' % (self.sync2_byte,
self.origin,
self.payload_length,
self.payload[:10])
except Exception:
return '[Invalid Packet]'
@property
def total_packet_length(self):
return self.header_length + self.payload_length + 1 # 1 is length of checksum
def from_buffer(self, buffer):
"""
Decode and validate the given buffer and update the class attributes accordingly
Parameters
----------
buffer : str
buffer to decode as a packet
"""
if len(buffer) < self._minimum_buffer_length:
raise PacketInsufficientLengthError(
"Buffer of length %d is too short to contain a packet (minimum length is %d)" %
(len(buffer), self._minimum_buffer_length))
self.start_byte, self.sync2_byte, self.origin, zero_byte, self.payload_length = struct.unpack(
self._header_format_string, buffer[:self.header_length])
if self.sync2_byte not in self._valid_sync2_bytes:
raise PacketValidityError('Sync2_byte not in valid_sync2_bytes. Sync2 byte is %r' % self.sync2_byte)
if zero_byte != 0:
raise PacketValidityError("Expected byte at offset 3 to be zero, got %02X. Start of buffer: %r"
% (zero_byte, buffer[:self.header_length]))
if self.payload_length > self._max_payload_size:
raise PacketValidityError(
"Payload length %d is greater than maximum payload size %d. First 15 bytes of buffer are %r" % (
self.payload_length, self._max_payload_size, buffer[:15]))
if self.start_byte != self.START_BYTE:
raise PacketValidityError("First byte is not valid start byte. First byte is %r" % buffer[0])
if (self.origin & 0xF0):
raise PacketValidityError("Origin byte %02X is invalid. Start of buffer: %r" % (self.origin,buffer[:self.header_length]))
checksum_index = self.header_length + self.payload_length
if checksum_index >= len(buffer):
raise PacketInsufficientLengthError('Buffer of length %d is too short to contain complete packet'
'(header, payload, CRC.'
'Minimum length is %d'
% (len(buffer), self.header_length + self.payload_length + 1))
payload = buffer[self.header_length:checksum_index]
if len(payload) != self.payload_length:
raise PacketValidityError("Payload length %d does not match length field value %d" % (len(payload),
self.payload_length))
checksum = get_checksum(buffer[2:checksum_index]) # SIP Checksum calculated from byte 3 on.
#if checksum != ord(buffer[checksum_index]):
if checksum != buffer[checksum_index]:
raise PacketChecksumError("Payload checksum %d does not match checksum field value %d" %
(checksum, buffer[checksum_index]))
self.payload = payload
self.checksum = checksum
def to_buffer(self):
"""
Construct the packet string
Returns
-------
buffer : string containing the packet
"""
assert (self.sync2_byte is not None) and (self.origin is not None) and (self.payload is not None)
header = struct.pack(self._header_format_string, self.start_byte, self.sync2_byte, self.origin, 0,
self.payload_length)
print(self.checksum)
print(chr(self.checksum))
print(bytes([self.checksum]))
return header + self.payload + bytes([self.checksum])
class FilePacket(object):
_metadata_table = [('1B', 'start_byte'),
('1I', 'file_id'),
('1H', 'packet_number'),
('1H', 'total_packet_number'),
('1H', 'payload_length')]
_header_format_string = '>' + ''.join([format for format, name in _metadata_table])
_valid_start_byte = 0xFA
header_length = struct.calcsize(_header_format_string)
_max_payload_size = 1500
def __init__(self, buffer=None, file_id=None,
packet_number=None, total_packet_number=None, payload=None):
"""
File packet. We break data into chunks and send them to the SIP in this packet format.
To decode a packet, use as HiratePacket(data), and access the payload attribute.
To construct a packet, use as
packet = GSEPacket(file_id=101,packet_number=2,total_packet_number=4,payload="hello world")
data = packet.to_buffer()
Parameters
----------
buffer : string
A data buffer to decode as a packet
file_id : uint8
file_id assigned to file when breaking it up to send.
packet_number : uint8
Nth packet in file with file_id
total_packet_number : uint8
Total number of packets in file
payload : string
Bytes to package into the packet
"""
self._minimum_buffer_length = self.header_length + 2
if buffer is not None:
self.from_buffer(buffer)
else:
self.file_id = file_id
self.packet_number = packet_number
self.total_packet_number = total_packet_number
if self.packet_number >= self.total_packet_number:
raise ValueError('Packet number is greater or equal to total packet number.\n'
'Packet number is %r. Total packet number is %r'
% (self.packet_number, self.total_packet_number))
self.payload = payload
self.payload_length = len(payload)
if self.payload_length > self._max_payload_size:
raise ValueError('Payload length is greater than max_payload_size. \n Length is %r'
% self.payload_length)
self.payload_crc = get_crc(payload)
self.start_byte = self._valid_start_byte
@property
def total_packet_length(self):
return self.header_length + self.payload_length + 2 # 2 is lenght of crc
def __repr__(self):
payload = None
try:
payload = self.payload[:10]
except Exception:
pass
return '<FilePacket File_id: %r Packet Number %r of %r...> \n First 10 bytes: %r' % (
self.file_id, self.packet_number, self.total_packet_number, payload)
def from_buffer(self, buffer):
"""
Decode and validate the given buffer and update the class attributes accordingly
Parameters
----------
buffer : str
buffer to decode as a packet
"""
if len(buffer) < self._minimum_buffer_length:
raise PacketInsufficientLengthError(
"Buffer of length %d is too short to contain a packet (minimum length is %d)" %
(len(buffer), self._minimum_buffer_length))
self.start_byte, self.file_id, self.packet_number, self.total_packet_number, self.payload_length = struct.unpack(
self._header_format_string, buffer[:self.header_length])
if self.payload_length > self._max_payload_size:
raise PacketValidityError(
"Payload length %d is greater than maximum payload size %d. First 15 bytes of buffer are %r" % (
self.payload_length, self._max_payload_size, buffer[:15]))
crc_index = self.header_length + self.payload_length
if crc_index > len(buffer):
raise PacketInsufficientLengthError('Buffer of length %d is too short to contain complete packet'
'(header, payload, CRC.'
'Minimum length is %d'
% (len(buffer), (self.header_length + self.payload_length + 2)))
payload = buffer[self.header_length:crc_index]
if len(payload) != self.payload_length:
raise PacketValidityError("Payload length %d does not match length field value %d"
% (len(payload), self.payload_length))
payload_crc = get_crc(payload)
crc_bytes = buffer[crc_index:crc_index + 2]
if len(crc_bytes) < 2:
raise PacketInsufficientLengthError("Buffer length insufficient to contain complete CRC.")
buffer_crc, = struct.unpack('>1H', crc_bytes)
if payload_crc != buffer_crc:
raise PacketChecksumError("Payload CRC %d does not match CRC field value %d \n Packet: %r" %
(payload_crc, buffer_crc, self))
self.payload = payload
self.payload_crc = payload_crc
def to_buffer(self):
"""
Construct the packet string
Returns
-------
buffer : string containing the packet
"""
assert (self.file_id is not None) and (self.packet_number is not None) and (
self.total_packet_number is not None) and (self.payload is not None)
header = struct.pack(self._header_format_string, self.start_byte, self.file_id,
self.packet_number, self.total_packet_number, self.payload_length)
return header + self.payload + struct.pack('>1H', self.payload_crc)
class CommandPacket(object):
_header_format_table = [('1B', 'start_byte'),
('1B', 'identifier'),
('1B', 'length'),
]
_header_format_string = '>' + ''.join([format_ for format_, _ in _header_format_table])
header_length = struct.calcsize(_header_format_string)
_subheader_format_table = [('1H', 'sequence_number'),
| |
= None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
from gym_idsgame.agents.bot_agents.attack_maximal_value_bot_agent import AttackMaximalValueBotAgent
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=7, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=1,
randomize_visibility=True, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
attacker_agent = AttackMaximalValueBotAgent(game_config, self)
idsgame_config = IdsGameConfig(game_config=game_config, attacker_agent=attacker_agent)
idsgame_config.render_config.caption = "idsgame-maximal_attack-v19"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.reconnaissance_bool_features = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
class IdsGameV19Env(AttackDefenseEnv):
"""
[AttackDefenseEnv] 1 layer, 1 server per layer, 7 attack-defense-values
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 19
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Reconnaissance bool features] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=7, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=1,
randomize_visibility=True, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
idsgame_config = IdsGameConfig(game_config=game_config)
idsgame_config.render_config.caption = "idsgame-v19"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.reconnaissance_bool_features = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
# -------- Version 20 ------------
class IdsGameRandomDefenseV20Env(AttackerEnv):
"""
[AttackerEnv] 1 layer, 1 server per layer, 7 attack-defense-values, random defender
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 20
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
[Reconnaissance bool features] Yes
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
from gym_idsgame.agents.bot_agents.random_defense_bot_agent import RandomDefenseBotAgent
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=3, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=2,
randomize_visibility=False, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
defender_agent = RandomDefenseBotAgent(game_config)
idsgame_config = IdsGameConfig(game_config=game_config, defender_agent=defender_agent)
idsgame_config.render_config.caption = "idsgame-random_defense-v20"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_bool_features = True
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
class IdsGameMinimalDefenseV20Env(AttackerEnv):
"""
[AttackerEnv] 1 layer, 1 server per layer, 7 attack-defense-values, defender following the "defend minimal strategy"
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 20
[Observations] Partially observed
[Environment] Random
[Local View] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
[Reconnaissance bool features] Yes
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
from gym_idsgame.agents.bot_agents.defend_minimal_value_bot_agent import DefendMinimalValueBotAgent
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=3, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=2,
randomize_visibility=False, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
defender_agent = DefendMinimalValueBotAgent(game_config)
idsgame_config = IdsGameConfig(game_config=game_config, defender_agent=defender_agent)
idsgame_config.render_config.caption = "idsgame-minimal_defense-v20"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_bool_features = True
idsgame_config.reconnaissance_actions = True
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
idsgame_config.reconnaissance_detection_factor = 1
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
class IdsGameRandomAttackV20Env(DefenderEnv):
"""
[DefenderEnv] 1 layer, 1 server per layer, 7 attack-defense-values
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 20
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
[Reconnaissance bool features] Yes
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
from gym_idsgame.agents.bot_agents.random_attack_bot_agent import RandomAttackBotAgent
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=3, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=2,
randomize_visibility=False, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
attacker_agent = RandomAttackBotAgent(game_config, self)
idsgame_config = IdsGameConfig(game_config=game_config, attacker_agent=attacker_agent)
idsgame_config.render_config.caption = "idsgame-random_attack-v20"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_bool_features = True
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
class IdsGameMaximalAttackV20Env(DefenderEnv):
"""
[DefenderEnv] 1 layer, 1 server per layer, 7 attack-defense-values
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 20
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
[Reconnaissance bool features] Yes
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
from gym_idsgame.agents.bot_agents.attack_maximal_value_bot_agent import AttackMaximalValueBotAgent
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=3, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=2,
randomize_visibility=False, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
attacker_agent = AttackMaximalValueBotAgent(game_config, self)
idsgame_config = IdsGameConfig(game_config=game_config, attacker_agent=attacker_agent)
idsgame_config.render_config.caption = "idsgame-maximal_attack-v20"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.reconnaissance_bool_features = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
class IdsGameV20Env(AttackDefenseEnv):
"""
[AttackDefenseEnv] 1 layer, 1 server per layer, 7 attack-defense-values
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 20
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Reconnaissance bool features] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
:param initial_state_path: path to the initial state (if none, use default)
:param idsgame_config: configuration of the environment (if not specified a default config is used)
"""
if idsgame_config is None:
game_config = GameConfig(num_layers=1, num_servers_per_layer=2, num_attack_types=4, max_value=9,
min_random_a_val=0, min_random_d_val=3, min_random_det_val=1,
reconnaissance_actions=True)
game_config.set_initial_state(defense_val=9, attack_val=0, num_vulnerabilities_per_node=1, det_val=1,
vulnerability_val=1, num_vulnerabilities_per_layer=2,
randomize_visibility=False, visibility_p=0.0)
game_config.dense_rewards_v3 = True
game_config.network_config.fully_observed = False
game_config.reconnaissance_actions = True
game_config.set_attack_actions(local_view=False)
if initial_state_path is not None:
game_config.set_load_initial_state(initial_state_path)
idsgame_config = IdsGameConfig(game_config=game_config)
idsgame_config.render_config.caption = "idsgame-v19"
idsgame_config.randomize_env = True
idsgame_config.randomize_starting_position = True
idsgame_config.reconnaissance_bool_features = True
idsgame_config.local_view_observations = False
idsgame_config.reconnaissance_actions = True
idsgame_config.reconnaissance_reward = False
idsgame_config.randomize_visibility = False
idsgame_config.visibility_p = 0.0
super().__init__(idsgame_config=idsgame_config, save_dir=save_dir)
# -------- Version 21 ------------
class IdsGameRandomDefenseV21Env(AttackerEnv):
"""
[AttackerEnv] 1 layer, 1 server per layer, 7 attack-defense-values, random defender
[Initial State] Defense: 7, Attack:0, Num vulnerabilities: 1, Det: 1, Vulnerability value: 1
[Rewards] Dense
[Version] 21
[Observations] partially observed
[Environment] Random
[Local View] Yes
[Attacker Starting Position] Random
[Reconnaissance activities] enabled
[Reconnaissance bool features] Yes
"""
def __init__(self, idsgame_config: IdsGameConfig = None, save_dir: str = None, initial_state_path: str = None):
"""
Initialization of the environment
:param save_dir: directory to save outputs of the env
| |
<reponame>stevencox/kgx<gh_stars>0
import itertools
import click, rdflib, os, uuid
import networkx as nx
from typing import Tuple, Union, Set, List, Dict, Any, Iterator, Optional
from rdflib import Namespace, URIRef, Literal
from rdflib.namespace import RDF, RDFS, OWL
from kgx.config import get_logger
from kgx.prefix_manager import PrefixManager
from kgx.transformers.transformer import Transformer
from kgx.transformers.rdf_graph_mixin import RdfGraphMixin
from kgx.utils.rdf_utils import property_mapping, reverse_property_mapping
from kgx.utils.kgx_utils import get_toolkit, get_biolink_node_properties, get_biolink_edge_properties, \
current_time_in_millis, get_biolink_association_types, get_biolink_property_types, apply_filters, \
generate_edge_identifiers, generate_uuid
log = get_logger()
class RdfTransformer(RdfGraphMixin, Transformer):
"""
Transformer that parses RDF and loads triples, as nodes and edges, into a networkx.MultiDiGraph
This is the base class which is used to implement other RDF-based transformers.
Parameters
----------
source_graph: Optional[networkx.MultiDiGraph]
The source graph
curie_map: Optional[Dict]
A curie map that maps non-canonical CURIEs to IRIs
"""
def __init__(self, source_graph: nx.MultiDiGraph = None, curie_map: Optional[Dict] = None):
super().__init__(source_graph, curie_map)
self.toolkit = get_toolkit()
self.node_properties = set([URIRef(self.prefix_manager.expand(x)) for x in get_biolink_node_properties()])
self.node_properties.update(get_biolink_node_properties())
self.node_properties.update(get_biolink_edge_properties())
self.node_properties.add(URIRef(self.prefix_manager.expand('biolink:provided_by')))
self.reification_types = {RDF.Statement, self.BIOLINK.Association, self.OBAN.association}
self.reification_predicates = {
self.BIOLINK.subject, self.BIOLINK.predicate, self.BIOLINK.object,
RDF.subject, RDF.object, RDF.predicate,
self.OBAN.association_has_subject, self.OBAN.association_has_predicate, self.OBAN.association_has_object
}
self.reified_nodes: Set = set()
self.start: int = 0
self.count: int = 0
self.property_types: Dict = get_biolink_property_types()
self.node_filters: Dict[str, Union[str, Set]] = {}
self.edge_filters: Dict[str, Union[str, Set]] = {}
def set_predicate_mapping(self, m: Dict) -> None:
"""
Set predicate mappings.
Use this method to update predicate mappings for predicates that are
not in Biolink Model.
Parameters
----------
m: Dict
A dictionary where the keys are IRIs and values are their corresponding property names
"""
for k, v in m.items():
self.predicate_mapping[URIRef(k)] = v
self.reverse_predicate_mapping[v] = URIRef(k)
def set_property_types(self, m: Dict) -> None:
"""
Set property types.
Use this method to populate type information for properties that are
not in Biolink Model.
Parameters
----------
m: Dict
A dictionary where the keys are property URI and values are the type
"""
for k, v in m.items():
(element_uri, canonical_uri, predicate, property_name) = self.process_predicate(k)
if element_uri:
key = element_uri
elif predicate:
key = predicate
else:
key = property_name
self.property_types[key] = v
def parse(self, filename: str, input_format: Optional[str] = None, compression: Optional[str] = None, provided_by: Optional[str] = None, node_property_predicates: Optional[Set[str]] = None) -> None:
"""
Parse a file, containing triples, into a rdflib.Graph
The file can be either a 'turtle' file or any other format supported by rdflib.
Parameters
----------
filename : Optional[str]
File to read from.
input_format : Optional[str]
The input file format.
If ``None`` is provided then the format is guessed using ``rdflib.util.guess_format()``
compression: Optional[str]
The compression type. For example, ``gz``
provided_by : Optional[str]
Define the source providing the input file.
node_property_predicates: Optional[Set[str]]
A set of rdflib.URIRef representing predicates that are to be treated as node properties
"""
rdfgraph = rdflib.Graph()
if node_property_predicates:
self.node_properties.update([URIRef(self.prefix_manager.expand(x)) for x in node_property_predicates])
if compression:
log.warning(f"compression mode '{compression}' not supported by RdfTransformer")
if input_format is None:
input_format = rdflib.util.guess_format(filename)
log.info("Parsing {} with '{}' format".format(filename, input_format))
rdfgraph.parse(filename, format=input_format)
log.info("{} parsed with {} triples".format(filename, len(rdfgraph)))
if provided_by:
self.graph_metadata['provided_by'] = [provided_by]
self.start = current_time_in_millis()
self.load_networkx_graph(rdfgraph)
log.info(f"Done parsing {filename}")
apply_filters(self.graph, self.node_filters, self.edge_filters)
generate_edge_identifiers(self.graph)
def load_networkx_graph(self, rdfgraph: rdflib.Graph, predicates: Optional[Set[URIRef]] = None, **kwargs: Dict) -> None:
"""
Walk through the rdflib.Graph and load all required triples into networkx.MultiDiGraph
Parameters
----------
rdfgraph: rdflib.Graph
Graph containing nodes and edges
predicates: Optional[Set[URIRef]]
A set containing predicates in rdflib.URIRef form
kwargs: Dict
Any additional arguments
"""
self.reified_nodes.clear()
for s, p, o in rdfgraph.triples((None, None, None)):
self.triple(s, p, o)
self.dereify(self.reified_nodes)
def triple(self, s: URIRef, p: URIRef, o: URIRef) -> None:
"""
Parse a triple.
Parameters
----------
s: URIRef
Subject
p: URIRef
Predicate
o: URIRef
Object
"""
self.count += 1
(element_uri, canonical_uri, predicate, property_name) = self.process_predicate(p)
if element_uri:
prop_uri = element_uri
elif predicate:
prop_uri = predicate
else:
prop_uri = property_name
if s in self.reified_nodes:
# subject is a reified node
self.add_node_attribute(s, key=prop_uri, value=o)
elif p in self.reification_predicates:
# subject is a reified node
self.reified_nodes.add(s)
self.add_node_attribute(s, key=prop_uri, value=o)
elif property_name in {'subject', 'edge_label', 'object', 'predicate', 'relation'}:
# subject is a reified node
self.reified_nodes.add(s)
self.add_node_attribute(s, key=prop_uri, value=o)
elif o in self.reification_types:
# subject is a reified node
self.reified_nodes.add(s)
self.add_node_attribute(s, key=prop_uri, value=o)
elif element_uri and element_uri in self.node_properties:
# treating predicate as a node property
self.add_node_attribute(s, key=prop_uri, value=o)
elif p in self.node_properties \
or predicate in self.node_properties \
or property_name in self.node_properties:
# treating predicate as a node property
self.add_node_attribute(s, key=prop_uri, value=o)
elif isinstance(o, rdflib.term.Literal):
self.add_node_attribute(s, key=prop_uri, value=o)
else:
# treating predicate as an edge
self.add_edge(s, o, p)
if self.count % 1000 == 0:
log.debug(f"Parsed {self.count} triples; time taken: {current_time_in_millis() - self.start} ms")
self.start = current_time_in_millis()
def dereify(self, nodes: Set[str]) -> None:
"""
Dereify a set of nodes where each node has all the properties
necessary to create an edge.
Parameters
----------
nodes: Set[str]
A set of nodes
"""
log.info(f"Dereifying {len(nodes)} nodes")
while nodes:
n = nodes.pop()
n_curie = self.prefix_manager.contract(str(n))
node = self.graph.nodes[n_curie]
if 'edge_label' not in node:
node['edge_label'] = "biolink:related_to"
if 'relation' not in node:
node['relation'] = node['edge_label']
if 'category' in node:
del node['category']
if 'subject' in node and 'object' in node:
self.add_edge(node['subject'], node['object'], node['edge_label'], node)
self.graph.remove_node(n_curie)
else:
log.warning(f"Cannot dereify node {n} {node}")
def reify(self, u: str, v: str, k: str, data: Dict) -> Dict:
"""
Create a node representation of an edge.
Parameters
----------
u: str
Subject
v: str
Object
k: str
Edge key
data: Dict
Edge data
Returns
-------
Dict
The reified node
"""
s = self.uriref(u)
p = self.uriref(data['edge_label'])
o = self.uriref(v)
if 'id' in data:
node_id = self.uriref(data['id'])
else:
# generate a UUID for the reified node
node_id = self.uriref(generate_uuid())
reified_node = data.copy()
if 'category' in reified_node:
del reified_node['category']
reified_node['id'] = node_id
reified_node['type'] = 'biolink:Association'
reified_node['subject'] = s
reified_node['edge_label'] = p
reified_node['object'] = o
return reified_node
def save(self, filename: str, output_format: str = "turtle", compression: Optional[str] = None, reify_all_edges: bool = False, **kwargs) -> None:
"""
Transform networkx.MultiDiGraph into rdflib.Graph and export
this graph as a file (``turtle``, by default).
Parameters
----------
filename: str
Filename to write to
output_format: str
The output format; default: ``turtle``
compression: Optional[str]
The compression type. Not yet supported.
reify_all_edges: bool
Whether to reify all edges in the graph
kwargs: Dict
Any additional arguments
"""
# Make a new rdflib.Graph() instance to generate RDF triples
rdfgraph = rdflib.Graph()
rdfgraph.bind('', str(self.DEFAULT))
rdfgraph.bind('OBO', str(self.OBO))
rdfgraph.bind('biolink', str(self.BIOLINK))
nodes_generator = self.export_nodes()
edges_generator = self.export_edges(reify_all_edges)
generator = itertools.chain(nodes_generator, edges_generator)
for t in generator:
rdfgraph.add(t)
# Serialize the graph into the file.
rdfgraph.serialize(destination=filename, format=output_format)
def export_nodes(self) -> Iterator:
"""
Export nodes and its attributes as triples.
This methods yields a 3-tuple of (subject, predicate, object).
Returns
-------
Iterator
An iterator
"""
for n, data in self.graph.nodes(data=True):
s = self.uriref(n)
for k, v in data.items():
if k in {'id', 'iri'}:
continue
(element_uri, canonical_uri, predicate, property_name) = self.process_predicate(k)
if element_uri is None:
# not a biolink predicate
if k in self.reverse_predicate_mapping:
prop_uri = self.reverse_predicate_mapping[k]
#prop_uri = self.prefix_manager.contract(prop_uri)
else:
prop_uri = k
else:
prop_uri = canonical_uri if canonical_uri else element_uri
prop_type = self._get_property_type(prop_uri)
log.debug(f"prop {k} has prop_uri {prop_uri} and prop_type {prop_type}")
prop_uri = self.uriref(prop_uri)
if isinstance(v, (list, set, tuple)):
for x in v:
value_uri = self._prepare_object(k, prop_type, x)
yield (s, prop_uri, value_uri)
else:
value_uri = self._prepare_object(k, prop_type, v)
yield (s, prop_uri, value_uri)
def export_edges(self, reify_all_edges: bool = False) -> Iterator:
"""
Export edges and its attributes as triples.
This methods yields a 3-tuple of (subject, predicate, object).
Parameters
----------
reify_all_edges: bool
Whether to reify all edges in the graph
Returns
-------
Iterator
An iterator
"""
ecache = []
associations = set([self.prefix_manager.contract(x) for x in self.reification_types])
associations.update([str(x) for x in get_biolink_association_types()])
for u, v, k, data in self.graph.edges(data=True, keys=True):
if reify_all_edges:
reified_node = self.reify(u, v, k, data)
s = reified_node['subject']
p = reified_node['edge_label']
o = reified_node['object']
ecache.append((s, p, o))
n = reified_node['id']
for prop, value in reified_node.items():
if prop in {'id', 'association_id', 'edge_key'}:
continue
(element_uri, canonical_uri, predicate, property_name) = self.process_predicate(prop)
if element_uri:
prop_uri = canonical_uri if canonical_uri else element_uri
else:
if prop in self.reverse_predicate_mapping:
prop_uri = self.reverse_predicate_mapping[prop]
#prop_uri = self.prefix_manager.contract(prop_uri)
else:
prop_uri = predicate
prop_type = self._get_property_type(prop)
log.debug(f"prop {prop} has prop_uri {prop_uri} and prop_type | |
return _gui.GuiResource_clear_iconrc()
GuiResource_clear_iconrc = _gui.GuiResource_clear_iconrc
def GuiResource_trash_iconrc():
return _gui.GuiResource_trash_iconrc()
GuiResource_trash_iconrc = _gui.GuiResource_trash_iconrc
def GuiResource_magnifier_iconrc():
return _gui.GuiResource_magnifier_iconrc()
GuiResource_magnifier_iconrc = _gui.GuiResource_magnifier_iconrc
def GuiResource_open_iconrc():
return _gui.GuiResource_open_iconrc()
GuiResource_open_iconrc = _gui.GuiResource_open_iconrc
def GuiResource_save_iconrc():
return _gui.GuiResource_save_iconrc()
GuiResource_save_iconrc = _gui.GuiResource_save_iconrc
def GuiResource_mb_info_iconrc():
return _gui.GuiResource_mb_info_iconrc()
GuiResource_mb_info_iconrc = _gui.GuiResource_mb_info_iconrc
def GuiResource_eye_iconrc():
return _gui.GuiResource_eye_iconrc()
GuiResource_eye_iconrc = _gui.GuiResource_eye_iconrc
def GuiResource_eye_selected_iconrc():
return _gui.GuiResource_eye_selected_iconrc()
GuiResource_eye_selected_iconrc = _gui.GuiResource_eye_selected_iconrc
def GuiResource_class_icon_iconrc():
return _gui.GuiResource_class_icon_iconrc()
GuiResource_class_icon_iconrc = _gui.GuiResource_class_icon_iconrc
def GuiResource_unknown_iconrc():
return _gui.GuiResource_unknown_iconrc()
GuiResource_unknown_iconrc = _gui.GuiResource_unknown_iconrc
def GuiResource_flag_source_iconrc():
return _gui.GuiResource_flag_source_iconrc()
GuiResource_flag_source_iconrc = _gui.GuiResource_flag_source_iconrc
def GuiResource_attribute_iconrc():
return _gui.GuiResource_attribute_iconrc()
GuiResource_attribute_iconrc = _gui.GuiResource_attribute_iconrc
def GuiResource_context_iconrc():
return _gui.GuiResource_context_iconrc()
GuiResource_context_iconrc = _gui.GuiResource_context_iconrc
def GuiResource_flag_localized_iconrc():
return _gui.GuiResource_flag_localized_iconrc()
GuiResource_flag_localized_iconrc = _gui.GuiResource_flag_localized_iconrc
def GuiResource_flag_instance_iconrc():
return _gui.GuiResource_flag_instance_iconrc()
GuiResource_flag_instance_iconrc = _gui.GuiResource_flag_instance_iconrc
def GuiResource_flag_reference_iconrc():
return _gui.GuiResource_flag_reference_iconrc()
GuiResource_flag_reference_iconrc = _gui.GuiResource_flag_reference_iconrc
def GuiResource_class_info():
return _gui.GuiResource_class_info()
GuiResource_class_info = _gui.GuiResource_class_info
def GuiResource____class_destructor__(instance, is_array):
return _gui.GuiResource____class_destructor__(instance, is_array)
GuiResource____class_destructor__ = _gui.GuiResource____class_destructor__
class GuiImage(base.EventObject):
__swig_setmethods__ = {}
for _s in [base.EventObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, GuiImage, name, value)
__swig_getmethods__ = {}
for _s in [base.EventObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, GuiImage, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _gui.new_GuiImage(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gui.delete_GuiImage
__del__ = lambda self: None
def get_width(self):
return _gui.GuiImage_get_width(self)
def get_height(self):
return _gui.GuiImage_get_height(self)
def get_depth(self):
return _gui.GuiImage_get_depth(self)
def get_data(self, *args):
return _gui.GuiImage_get_data(self, *args)
def update(self, *args):
return _gui.GuiImage_update(self, *args)
def set_data(self, *args):
return _gui.GuiImage_set_data(self, *args)
def set_image(self, image, color_space):
return _gui.GuiImage_set_image(self, image, color_space)
def draw_hidden(self, x, y):
return _gui.GuiImage_draw_hidden(self, x, y)
def draw_highlight(self, x, y):
return _gui.GuiImage_draw_highlight(self, x, y)
def draw_disabled(self, *args):
return _gui.GuiImage_draw_disabled(self, *args)
def fast_draw(self, widget, x, y, cx, cy, width, height):
return _gui.GuiImage_fast_draw(self, widget, x, y, cx, cy, width, height)
def draw(self, *args):
return _gui.GuiImage_draw(self, *args)
def fill(self, r, g, b):
return _gui.GuiImage_fill(self, r, g, b)
def modulate(self, r, g, b):
return _gui.GuiImage_modulate(self, r, g, b)
if _newclass:
class_info = staticmethod(_gui.GuiImage_class_info)
else:
class_info = _gui.GuiImage_class_info
if _newclass:
___class_destructor__ = staticmethod(_gui.GuiImage____class_destructor__)
else:
___class_destructor__ = _gui.GuiImage____class_destructor__
def get_class_info(self):
return _gui.GuiImage_get_class_info(self)
GuiImage_swigregister = _gui.GuiImage_swigregister
GuiImage_swigregister(GuiImage)
def GuiImage_class_info():
return _gui.GuiImage_class_info()
GuiImage_class_info = _gui.GuiImage_class_info
def GuiImage____class_destructor__(instance, is_array):
return _gui.GuiImage____class_destructor__(instance, is_array)
GuiImage____class_destructor__ = _gui.GuiImage____class_destructor__
class GuiItemActions(base.CoreBaseType):
__swig_setmethods__ = {}
for _s in [base.CoreBaseType]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, GuiItemActions, name, value)
__swig_getmethods__ = {}
for _s in [base.CoreBaseType]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, GuiItemActions, name)
__repr__ = _swig_repr
if _newclass:
group_items = staticmethod(_gui.GuiItemActions_group_items)
else:
group_items = _gui.GuiItemActions_group_items
if _newclass:
combine_items = staticmethod(_gui.GuiItemActions_combine_items)
else:
combine_items = _gui.GuiItemActions_combine_items
if _newclass:
contextualize_items = staticmethod(_gui.GuiItemActions_contextualize_items)
else:
contextualize_items = _gui.GuiItemActions_contextualize_items
if _newclass:
create_shading_layer_for_items = staticmethod(_gui.GuiItemActions_create_shading_layer_for_items)
else:
create_shading_layer_for_items = _gui.GuiItemActions_create_shading_layer_for_items
if _newclass:
delete_items = staticmethod(_gui.GuiItemActions_delete_items)
else:
delete_items = _gui.GuiItemActions_delete_items
if _newclass:
trash_items = staticmethod(_gui.GuiItemActions_trash_items)
else:
trash_items = _gui.GuiItemActions_trash_items
if _newclass:
instantiate_items = staticmethod(_gui.GuiItemActions_instantiate_items)
else:
instantiate_items = _gui.GuiItemActions_instantiate_items
if _newclass:
context_to_reference = staticmethod(_gui.GuiItemActions_context_to_reference)
else:
context_to_reference = _gui.GuiItemActions_context_to_reference
if _newclass:
reference_to_context = staticmethod(_gui.GuiItemActions_reference_to_context)
else:
reference_to_context = _gui.GuiItemActions_reference_to_context
if _newclass:
revert_items_overrides = staticmethod(_gui.GuiItemActions_revert_items_overrides)
else:
revert_items_overrides = _gui.GuiItemActions_revert_items_overrides
if _newclass:
revert_sub_items_overrides = staticmethod(_gui.GuiItemActions_revert_sub_items_overrides)
else:
revert_sub_items_overrides = _gui.GuiItemActions_revert_sub_items_overrides
if _newclass:
copy_items = staticmethod(_gui.GuiItemActions_copy_items)
else:
copy_items = _gui.GuiItemActions_copy_items
if _newclass:
paste_items = staticmethod(_gui.GuiItemActions_paste_items)
else:
paste_items = _gui.GuiItemActions_paste_items
if _newclass:
cut_items = staticmethod(_gui.GuiItemActions_cut_items)
else:
cut_items = _gui.GuiItemActions_cut_items
if _newclass:
make_local_items = staticmethod(_gui.GuiItemActions_make_local_items)
else:
make_local_items = _gui.GuiItemActions_make_local_items
if _newclass:
make_instance_items = staticmethod(_gui.GuiItemActions_make_instance_items)
else:
make_instance_items = _gui.GuiItemActions_make_instance_items
if _newclass:
toggle_items_display = staticmethod(_gui.GuiItemActions_toggle_items_display)
else:
toggle_items_display = _gui.GuiItemActions_toggle_items_display
if _newclass:
enable_disable_items = staticmethod(_gui.GuiItemActions_enable_disable_items)
else:
enable_disable_items = _gui.GuiItemActions_enable_disable_items
if _newclass:
isolate_more_items = staticmethod(_gui.GuiItemActions_isolate_more_items)
else:
isolate_more_items = _gui.GuiItemActions_isolate_more_items
if _newclass:
isolate_less_items = staticmethod(_gui.GuiItemActions_isolate_less_items)
else:
isolate_less_items = _gui.GuiItemActions_isolate_less_items
if _newclass:
isolate_swap_items = staticmethod(_gui.GuiItemActions_isolate_swap_items)
else:
isolate_swap_items = _gui.GuiItemActions_isolate_swap_items
if _newclass:
colortag_items = staticmethod(_gui.GuiItemActions_colortag_items)
else:
colortag_items = _gui.GuiItemActions_colortag_items
if _newclass:
isolate_items = staticmethod(_gui.GuiItemActions_isolate_items)
else:
isolate_items = _gui.GuiItemActions_isolate_items
if _newclass:
is_context_writable = staticmethod(_gui.GuiItemActions_is_context_writable)
else:
is_context_writable = _gui.GuiItemActions_is_context_writable
if _newclass:
is_item_action_localize_allowed = staticmethod(_gui.GuiItemActions_is_item_action_localize_allowed)
else:
is_item_action_localize_allowed = _gui.GuiItemActions_is_item_action_localize_allowed
if _newclass:
is_item_action_make_instance_allowed = staticmethod(_gui.GuiItemActions_is_item_action_make_instance_allowed)
else:
is_item_action_make_instance_allowed = _gui.GuiItemActions_is_item_action_make_instance_allowed
if _newclass:
is_item_action_group_allowed = staticmethod(_gui.GuiItemActions_is_item_action_group_allowed)
else:
is_item_action_group_allowed = _gui.GuiItemActions_is_item_action_group_allowed
if _newclass:
is_item_action_combine_allowed = staticmethod(_gui.GuiItemActions_is_item_action_combine_allowed)
else:
is_item_action_combine_allowed = _gui.GuiItemActions_is_item_action_combine_allowed
if _newclass:
is_item_action_create_shading_layer_allowed = staticmethod(_gui.GuiItemActions_is_item_action_create_shading_layer_allowed)
else:
is_item_action_create_shading_layer_allowed = _gui.GuiItemActions_is_item_action_create_shading_layer_allowed
if _newclass:
is_item_action_copy_allowed = staticmethod(_gui.GuiItemActions_is_item_action_copy_allowed)
else:
is_item_action_copy_allowed = _gui.GuiItemActions_is_item_action_copy_allowed
if _newclass:
is_item_action_cut_allowed = staticmethod(_gui.GuiItemActions_is_item_action_cut_allowed)
else:
is_item_action_cut_allowed = _gui.GuiItemActions_is_item_action_cut_allowed
if _newclass:
is_item_action_paste_allowed = staticmethod(_gui.GuiItemActions_is_item_action_paste_allowed)
else:
is_item_action_paste_allowed = _gui.GuiItemActions_is_item_action_paste_allowed
if _newclass:
is_item_action_revert_overrides_allowed = staticmethod(_gui.GuiItemActions_is_item_action_revert_overrides_allowed)
else:
is_item_action_revert_overrides_allowed = _gui.GuiItemActions_is_item_action_revert_overrides_allowed
if _newclass:
is_item_action_delete_allowed = staticmethod(_gui.GuiItemActions_is_item_action_delete_allowed)
else:
is_item_action_delete_allowed = _gui.GuiItemActions_is_item_action_delete_allowed
if _newclass:
is_item_action_rename_allowed = staticmethod(_gui.GuiItemActions_is_item_action_rename_allowed)
else:
is_item_action_rename_allowed = _gui.GuiItemActions_is_item_action_rename_allowed
if _newclass:
is_item_action_disable_allowed = staticmethod(_gui.GuiItemActions_is_item_action_disable_allowed)
else:
is_item_action_disable_allowed = _gui.GuiItemActions_is_item_action_disable_allowed
if _newclass:
is_item_action_enable_allowed = staticmethod(_gui.GuiItemActions_is_item_action_enable_allowed)
else:
is_item_action_enable_allowed = _gui.GuiItemActions_is_item_action_enable_allowed
if _newclass:
is_item_action_move_allowed = staticmethod(_gui.GuiItemActions_is_item_action_move_allowed)
else:
is_item_action_move_allowed = _gui.GuiItemActions_is_item_action_move_allowed
if _newclass:
is_item_action_copy_to_allowed = staticmethod(_gui.GuiItemActions_is_item_action_copy_to_allowed)
else:
is_item_action_copy_to_allowed = _gui.GuiItemActions_is_item_action_copy_to_allowed
if _newclass:
is_item_action_instantiate_allowed = staticmethod(_gui.GuiItemActions_is_item_action_instantiate_allowed)
else:
is_item_action_instantiate_allowed = _gui.GuiItemActions_is_item_action_instantiate_allowed
if _newclass:
build_item_creation_menu = staticmethod(_gui.GuiItemActions_build_item_creation_menu)
else:
build_item_creation_menu = _gui.GuiItemActions_build_item_creation_menu
if _newclass:
populate_classes_categories = staticmethod(_gui.GuiItemActions_populate_classes_categories)
else:
populate_classes_categories = _gui.GuiItemActions_populate_classes_categories
if _newclass:
add_create_class = staticmethod(_gui.GuiItemActions_add_create_class)
else:
add_create_class = _gui.GuiItemActions_add_create_class
if _newclass:
get_class_creation_shortcut = staticmethod(_gui.GuiItemActions_get_class_creation_shortcut)
else:
get_class_creation_shortcut = _gui.GuiItemActions_get_class_creation_shortcut
if _newclass:
process_shortcut_creation_class = staticmethod(_gui.GuiItemActions_process_shortcut_creation_class)
else:
process_shortcut_creation_class = _gui.GuiItemActions_process_shortcut_creation_class
if _newclass:
on_create_item = staticmethod(_gui.GuiItemActions_on_create_item)
else:
on_create_item = _gui.GuiItemActions_on_create_item
if _newclass:
process_create_item = staticmethod(_gui.GuiItemActions_process_create_item)
else:
process_create_item = _gui.GuiItemActions_process_create_item
if _newclass:
on_create_context = staticmethod(_gui.GuiItemActions_on_create_context)
else:
on_create_context = _gui.GuiItemActions_on_create_context
if _newclass:
process_create_context = staticmethod(_gui.GuiItemActions_process_create_context)
else:
process_create_context = _gui.GuiItemActions_process_create_context
__swig_setmethods__["s_custom_data_preselected_item"] = _gui.GuiItemActions_s_custom_data_preselected_item_set
__swig_getmethods__["s_custom_data_preselected_item"] = _gui.GuiItemActions_s_custom_data_preselected_item_get
if _newclass:
s_custom_data_preselected_item = _swig_property(_gui.GuiItemActions_s_custom_data_preselected_item_get, _gui.GuiItemActions_s_custom_data_preselected_item_set)
if _newclass:
get_selection = staticmethod(_gui.GuiItemActions_get_selection)
else:
get_selection = _gui.GuiItemActions_get_selection
if _newclass:
has_nodal_preferred_position = staticmethod(_gui.GuiItemActions_has_nodal_preferred_position)
else:
has_nodal_preferred_position = _gui.GuiItemActions_has_nodal_preferred_position
if _newclass:
class_info = staticmethod(_gui.GuiItemActions_class_info)
else:
class_info = _gui.GuiItemActions_class_info
if _newclass:
___class_destructor__ = staticmethod(_gui.GuiItemActions____class_destructor__)
else:
___class_destructor__ = _gui.GuiItemActions____class_destructor__
def get_class_info(self):
return _gui.GuiItemActions_get_class_info(self)
def __init__(self):
this = _gui.new_GuiItemActions()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gui.delete_GuiItemActions
__del__ = lambda self: None
GuiItemActions_swigregister = _gui.GuiItemActions_swigregister
GuiItemActions_swigregister(GuiItemActions)
def GuiItemActions_group_items(active_widget):
return _gui.GuiItemActions_group_items(active_widget)
GuiItemActions_group_items = _gui.GuiItemActions_group_items
def GuiItemActions_combine_items(active_widget, mode=0):
return _gui.GuiItemActions_combine_items(active_widget, mode)
GuiItemActions_combine_items = _gui.GuiItemActions_combine_items
def GuiItemActions_contextualize_items(active_widget):
return _gui.GuiItemActions_contextualize_items(active_widget)
GuiItemActions_contextualize_items = _gui.GuiItemActions_contextualize_items
def GuiItemActions_create_shading_layer_for_items(active_widget, creation_mode):
return _gui.GuiItemActions_create_shading_layer_for_items(active_widget, creation_mode)
GuiItemActions_create_shading_layer_for_items = _gui.GuiItemActions_create_shading_layer_for_items
def GuiItemActions_delete_items(active_widget):
return _gui.GuiItemActions_delete_items(active_widget)
GuiItemActions_delete_items = _gui.GuiItemActions_delete_items
def GuiItemActions_trash_items(active_widget):
return _gui.GuiItemActions_trash_items(active_widget)
GuiItemActions_trash_items = _gui.GuiItemActions_trash_items
def GuiItemActions_instantiate_items(active_widget):
return _gui.GuiItemActions_instantiate_items(active_widget)
GuiItemActions_instantiate_items = _gui.GuiItemActions_instantiate_items
def GuiItemActions_context_to_reference(active_widget):
return _gui.GuiItemActions_context_to_reference(active_widget)
GuiItemActions_context_to_reference = _gui.GuiItemActions_context_to_reference
def GuiItemActions_reference_to_context(active_widget):
return _gui.GuiItemActions_reference_to_context(active_widget)
GuiItemActions_reference_to_context = _gui.GuiItemActions_reference_to_context
def GuiItemActions_revert_items_overrides(active_widget):
return _gui.GuiItemActions_revert_items_overrides(active_widget)
GuiItemActions_revert_items_overrides = _gui.GuiItemActions_revert_items_overrides
def GuiItemActions_revert_sub_items_overrides(active_widget):
return _gui.GuiItemActions_revert_sub_items_overrides(active_widget)
GuiItemActions_revert_sub_items_overrides = _gui.GuiItemActions_revert_sub_items_overrides
def GuiItemActions_copy_items(active_widget, include_dependencies=False):
return _gui.GuiItemActions_copy_items(active_widget, include_dependencies)
GuiItemActions_copy_items = _gui.GuiItemActions_copy_items
def GuiItemActions_paste_items(active_widget):
return _gui.GuiItemActions_paste_items(active_widget)
GuiItemActions_paste_items = _gui.GuiItemActions_paste_items
def GuiItemActions_cut_items(active_widget):
return _gui.GuiItemActions_cut_items(active_widget)
GuiItemActions_cut_items = _gui.GuiItemActions_cut_items
def GuiItemActions_make_local_items(active_widget):
return _gui.GuiItemActions_make_local_items(active_widget)
GuiItemActions_make_local_items = _gui.GuiItemActions_make_local_items
def GuiItemActions_make_instance_items(active_widget):
return _gui.GuiItemActions_make_instance_items(active_widget)
GuiItemActions_make_instance_items = _gui.GuiItemActions_make_instance_items
def GuiItemActions_toggle_items_display(active_widget, display):
return _gui.GuiItemActions_toggle_items_display(active_widget, display)
GuiItemActions_toggle_items_display = _gui.GuiItemActions_toggle_items_display
def GuiItemActions_enable_disable_items(active_widget, selection, disable):
return _gui.GuiItemActions_enable_disable_items(active_widget, selection, disable)
GuiItemActions_enable_disable_items = _gui.GuiItemActions_enable_disable_items
def GuiItemActions_isolate_more_items(active_widget):
return _gui.GuiItemActions_isolate_more_items(active_widget)
GuiItemActions_isolate_more_items = _gui.GuiItemActions_isolate_more_items
def GuiItemActions_isolate_less_items(active_widget):
return _gui.GuiItemActions_isolate_less_items(active_widget)
GuiItemActions_isolate_less_items = _gui.GuiItemActions_isolate_less_items
def GuiItemActions_isolate_swap_items(active_widget):
return _gui.GuiItemActions_isolate_swap_items(active_widget)
GuiItemActions_isolate_swap_items = _gui.GuiItemActions_isolate_swap_items
def GuiItemActions_colortag_items(active_widget, tag):
return _gui.GuiItemActions_colortag_items(active_widget, tag)
GuiItemActions_colortag_items = _gui.GuiItemActions_colortag_items
def GuiItemActions_isolate_items(*args):
return _gui.GuiItemActions_isolate_items(*args)
GuiItemActions_isolate_items = _gui.GuiItemActions_isolate_items
def GuiItemActions_is_context_writable(context):
return _gui.GuiItemActions_is_context_writable(context)
GuiItemActions_is_context_writable = _gui.GuiItemActions_is_context_writable
def GuiItemActions_is_item_action_localize_allowed(item):
return _gui.GuiItemActions_is_item_action_localize_allowed(item)
GuiItemActions_is_item_action_localize_allowed = _gui.GuiItemActions_is_item_action_localize_allowed
def GuiItemActions_is_item_action_make_instance_allowed(item):
return _gui.GuiItemActions_is_item_action_make_instance_allowed(item)
GuiItemActions_is_item_action_make_instance_allowed = _gui.GuiItemActions_is_item_action_make_instance_allowed
def GuiItemActions_is_item_action_group_allowed(item):
return _gui.GuiItemActions_is_item_action_group_allowed(item)
GuiItemActions_is_item_action_group_allowed = _gui.GuiItemActions_is_item_action_group_allowed
def GuiItemActions_is_item_action_combine_allowed(item):
return _gui.GuiItemActions_is_item_action_combine_allowed(item)
GuiItemActions_is_item_action_combine_allowed = _gui.GuiItemActions_is_item_action_combine_allowed
def GuiItemActions_is_item_action_create_shading_layer_allowed(item):
return _gui.GuiItemActions_is_item_action_create_shading_layer_allowed(item)
GuiItemActions_is_item_action_create_shading_layer_allowed = _gui.GuiItemActions_is_item_action_create_shading_layer_allowed
def GuiItemActions_is_item_action_copy_allowed(item):
return _gui.GuiItemActions_is_item_action_copy_allowed(item)
GuiItemActions_is_item_action_copy_allowed = _gui.GuiItemActions_is_item_action_copy_allowed
def GuiItemActions_is_item_action_cut_allowed(item):
return _gui.GuiItemActions_is_item_action_cut_allowed(item)
GuiItemActions_is_item_action_cut_allowed = _gui.GuiItemActions_is_item_action_cut_allowed
def GuiItemActions_is_item_action_paste_allowed(item):
return _gui.GuiItemActions_is_item_action_paste_allowed(item)
GuiItemActions_is_item_action_paste_allowed = _gui.GuiItemActions_is_item_action_paste_allowed
def GuiItemActions_is_item_action_revert_overrides_allowed(item):
return _gui.GuiItemActions_is_item_action_revert_overrides_allowed(item)
GuiItemActions_is_item_action_revert_overrides_allowed = _gui.GuiItemActions_is_item_action_revert_overrides_allowed
def GuiItemActions_is_item_action_delete_allowed(item):
return _gui.GuiItemActions_is_item_action_delete_allowed(item)
GuiItemActions_is_item_action_delete_allowed = _gui.GuiItemActions_is_item_action_delete_allowed
def GuiItemActions_is_item_action_rename_allowed(item):
return _gui.GuiItemActions_is_item_action_rename_allowed(item)
GuiItemActions_is_item_action_rename_allowed = _gui.GuiItemActions_is_item_action_rename_allowed
def GuiItemActions_is_item_action_disable_allowed(item):
return _gui.GuiItemActions_is_item_action_disable_allowed(item)
GuiItemActions_is_item_action_disable_allowed = _gui.GuiItemActions_is_item_action_disable_allowed
def GuiItemActions_is_item_action_enable_allowed(item):
return _gui.GuiItemActions_is_item_action_enable_allowed(item)
GuiItemActions_is_item_action_enable_allowed = _gui.GuiItemActions_is_item_action_enable_allowed
def GuiItemActions_is_item_action_move_allowed(item, destination_ctx):
return _gui.GuiItemActions_is_item_action_move_allowed(item, destination_ctx)
GuiItemActions_is_item_action_move_allowed = _gui.GuiItemActions_is_item_action_move_allowed
def GuiItemActions_is_item_action_copy_to_allowed(item, destination_ctx, allow_in_parent_context):
return _gui.GuiItemActions_is_item_action_copy_to_allowed(item, destination_ctx, allow_in_parent_context)
GuiItemActions_is_item_action_copy_to_allowed = _gui.GuiItemActions_is_item_action_copy_to_allowed
def GuiItemActions_is_item_action_instantiate_allowed(*args):
return _gui.GuiItemActions_is_item_action_instantiate_allowed(*args)
GuiItemActions_is_item_action_instantiate_allowed = _gui.GuiItemActions_is_item_action_instantiate_allowed
def GuiItemActions_build_item_creation_menu(*args):
return _gui.GuiItemActions_build_item_creation_menu(*args)
GuiItemActions_build_item_creation_menu = _gui.GuiItemActions_build_item_creation_menu
def GuiItemActions_populate_classes_categories(*args):
return _gui.GuiItemActions_populate_classes_categories(*args)
GuiItemActions_populate_classes_categories = _gui.GuiItemActions_populate_classes_categories
def GuiItemActions_add_create_class(*args):
return _gui.GuiItemActions_add_create_class(*args)
GuiItemActions_add_create_class = _gui.GuiItemActions_add_create_class
def GuiItemActions_get_class_creation_shortcut(of_class):
return _gui.GuiItemActions_get_class_creation_shortcut(of_class)
GuiItemActions_get_class_creation_shortcut = _gui.GuiItemActions_get_class_creation_shortcut
def GuiItemActions_process_shortcut_creation_class(widget_from, shortcut, creation_context=None):
return _gui.GuiItemActions_process_shortcut_creation_class(widget_from, shortcut, creation_context)
GuiItemActions_process_shortcut_creation_class = _gui.GuiItemActions_process_shortcut_creation_class
def GuiItemActions_on_create_item(widget, action, data):
return _gui.GuiItemActions_on_create_item(widget, action, data)
GuiItemActions_on_create_item = _gui.GuiItemActions_on_create_item
def GuiItemActions_process_create_item(widget_from, creation_class, input_context):
return _gui.GuiItemActions_process_create_item(widget_from, creation_class, input_context)
GuiItemActions_process_create_item = _gui.GuiItemActions_process_create_item
def GuiItemActions_on_create_context(widget, action, data):
return _gui.GuiItemActions_on_create_context(widget, action, data)
GuiItemActions_on_create_context = _gui.GuiItemActions_on_create_context
def GuiItemActions_process_create_context(widget_from, input_context, as_reference=False):
return _gui.GuiItemActions_process_create_context(widget_from, input_context, as_reference)
GuiItemActions_process_create_context = _gui.GuiItemActions_process_create_context
def GuiItemActions_get_selection(active_widget):
return _gui.GuiItemActions_get_selection(active_widget)
GuiItemActions_get_selection = _gui.GuiItemActions_get_selection
def GuiItemActions_has_nodal_preferred_position(widget):
return _gui.GuiItemActions_has_nodal_preferred_position(widget)
GuiItemActions_has_nodal_preferred_position = _gui.GuiItemActions_has_nodal_preferred_position
def GuiItemActions_class_info():
return _gui.GuiItemActions_class_info()
GuiItemActions_class_info = _gui.GuiItemActions_class_info
def GuiItemActions____class_destructor__(instance, is_array):
return _gui.GuiItemActions____class_destructor__(instance, is_array)
GuiItemActions____class_destructor__ = _gui.GuiItemActions____class_destructor__
class GuiMenu(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GuiMenu, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GuiMenu, name)
__repr__ = _swig_repr
def __init__(self, parent):
this = _gui.new_GuiMenu(parent)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
ANCHOR_LEFT = _gui.GuiMenu_ANCHOR_LEFT
ANCHOR_RIGHT = _gui.GuiMenu_ANCHOR_RIGHT
ANCHOR_TOP = _gui.GuiMenu_ANCHOR_TOP
ANCHOR_BOTTOM = _gui.GuiMenu_ANCHOR_BOTTOM
def popup(self, *args):
return _gui.GuiMenu_popup(self, *args)
def repeat_last_action(self):
return _gui.GuiMenu_repeat_last_action(self)
def is_shown(self):
return _gui.GuiMenu_is_shown(self)
def hide(self):
return _gui.GuiMenu_hide(self)
def draw(self, dc):
return _gui.GuiMenu_draw(self, dc)
def resize(self, x, y, w, h):
return _gui.GuiMenu_resize(self, x, y, w, h)
def process_event(self, event_id):
return _gui.GuiMenu_process_event(self, event_id)
def get_size(self):
return _gui.GuiMenu_get_size(self)
def set_max_line_count(self, max_line_count):
return _gui.GuiMenu_set_max_line_count(self, max_line_count)
def get_max_line_count(self):
return _gui.GuiMenu_get_max_line_count(self)
def get_last_selected_item(self):
return _gui.GuiMenu_get_last_selected_item(self)
def get_highlighted_item(self):
return _gui.GuiMenu_get_highlighted_item(self)
def get_parent(self):
return _gui.GuiMenu_get_parent(self)
def | |
if delta > 0:
if core.valid(entry):
year = int(entry[0:4])
month = int(entry[4:6])
day = int(entry[6:8])
datecheck = datetime.date(year, month, day)
displayCutoff = datetime.date.today() - datetime.timedelta(days=delta)
if datecheck > displayCutoff:
feedList.append(os.path.join(entryDir, entry))
else:
feedList.append(os.path.join(entryDir, entry))
metas = core.meta(feedList)
metas.sort(key = lambda entry:entry[3])
metas.reverse()
entries = []
for entry in metas[0:50]:
pad = ""
if len(entry[5]) < 8:
pad = "\t"
entries.append("~{user}{pad}\ton {date} ({wordcount})".format(
user=entry[5], pad=pad, date=entry[3],
wordcount=p.no("word", entry[2])))
return entries, metas
def subscription_manager(subs, intro=""):
'''
'''
menuOptions = [
"add pals",
"remove pals"
]
util.print_menu(menuOptions, SETTINGS.get("rainbows", False))
choice = util.list_select(menuOptions, "what do you want to do? (enter 'q' to go back) ")
top = ""
if choice is not False:
if choice == 0:
prompt = "list of townies recording feels:"
redraw(prompt)
subs = subscribe_handler(subs, prompt)
elif choice == 1:
prompt = "list of townies you're subscribed to:"
redraw(prompt)
subs = unsubscribe_handler(subs, prompt)
else:
redraw()
return
redraw(top+intro)
return subscription_manager(subs, intro)
def unsubscribe_handler(subs, prompt, page=0):
'''
displays a list of currently subscribed users and toggles deletion.
'''
subs.sort()
ans = menu_handler(subs, "pick a pal to unsubscribe (or 'q' to cancel): ", 15, page, SETTINGS.get("rainbows", False), "list of townies recording feels:")
if ans is not False:
(page,choice) = ans
townie = subs[choice]
subs.remove(townie)
save_subs(subs)
redraw("{townie} removed! \n\n> {prompt}".format(townie=townie, prompt=prompt))
return unsubscribe_handler(subs, prompt, page)
else:
redraw()
return subs
def subscribe_handler(subs, prompt, page=0):
'''
displays a list of all users not subscribed to and toggles adding,
returning the subs list when finished.
'''
candidates = []
for townie in core.find_ttbps():
if townie not in subs:
candidates.append(townie)
candidates.sort()
ans = menu_handler(candidates, "pick a townie to add to your subscriptions (or 'q' to cancel): ", 15, page, SETTINGS.get("rainbows", False), "list of townies recording feels:")
if ans is not False:
(page, choice) = ans
townie = candidates[choice]
subs.append(townie)
save_subs(subs)
redraw("{townie} added! \n\n> {prompt}".format(townie=townie, prompt=prompt))
return subscribe_handler(subs, prompt, page)
else:
redraw()
return subs
def save_subs(subs):
'''
takes given subscription list and saves it into the user config,
overwriting whatever is already there.
'''
subs_file = open(config.SUBS, 'w')
for townie in subs:
subs_file.write(townie + "\n")
subs_file.close()
def graffiti_handler():
'''
Main graffiti handler; checks for lockfile from another editing sesison
(overwriting by default if the lock is more than three days old.
'''
if os.path.isfile(config.WALL_LOCK) and \
time.time() - os.path.getmtime(config.WALL_LOCK) < 60*60*24*3:
redraw("sorry, {friend}, but someone's there right now. try again in a few!".format(friend=chatter.say("friend")))
else:
subprocess.call(["touch", config.WALL_LOCK])
redraw()
print("""\
the graffiti wall is a world-writeable text file. anyone can
scribble on it; anyone can move or delete things. please be
considerate of your neighbors when writing on it.
no one will be able to visit the wall while you are here, so don't
worry about overwriting someone else's work. anything you do to the
wall will be recorded if you save the file, and you can cancel
your changes by exiting without saving.
""")
input("press <enter> to visit the wall\n\n")
subprocess.call([SETTINGS.get("editor"), config.WALL])
subprocess.call(["rm", config.WALL_LOCK])
redraw("thanks for visiting the graffiti wall!")
## misc helpers
def toggle_pub_default():
"""setup helper for setting default publish privacy (does not apply
retroactively). """
if SETTINGS.get("post as nopub", False) is True:
(nopub, will) = ("(nopub)", "won't")
else:
(nopub, will) = ("public", "will")
if SETTINGS.get("publishing", False) is True:
publishing = ""
else:
publishing = """\
since you're currently not publishing your posts to html/gopher, this setting
won't affect the visibility of your posts. however, the option is still here if
you'd like to change it.
"""
print("""
DEFAULT POST PRIVACY
your entries are set to automatically post as {nopub}. this means they {will} be
posted to your world-visible pages at first (which you can always change after
the fact.)
this setting only affects subsequent posts; it does not apply retroactively.
{publishing}""".format(nopub=nopub, will=will, publishing=publishing))
ans = util.input_yn("""\
would you like to change this behavior?
please enter""")
if ans:
return not SETTINGS.get("post as nopub")
else:
return SETTINGS.get("post as nopub")
def toggle_rainbows():
"""setup helper for rainbow toggling
"""
if SETTINGS.get("rainbows", False) is True:
status = "enabled"
else:
status = "disabled"
print("\nRAINBOW MENU TOGGLING")
print("rainbow menus are currently {status}".format(status=status))
publish = util.input_yn("""\
would you like to have rainbow menus?
please enter\
""")
return publish
def select_editor():
'''
setup helper for editor selection
'''
print("\nTEXT EDITOR SELECTION")
print("your current editor is: "+SETTINGS.get("editor"))
util.print_menu(EDITORS, SETTINGS.get("rainbows", False))
choice = util.list_select(EDITORS, "pick your favorite text editor, or type 'q' to go back: ")
if choice is False:
# if selection is canceled, return either previously set editor or default
return SETTINGS.get("editor", EDITORS[0])
return EDITORS[int(choice)]
def select_publish_dir():
'''
setup helper for publish directory selection
'''
if not core.publishing():
return None
current = SETTINGS.get("publish dir")
republish = False
print("\nUPDATING HTML PATH")
if current:
print("\ncurrent publish dir:\t"+os.path.join(config.PUBLIC, SETTINGS["publish dir"]))
republish = True
choice = input("\nwhere do you want your blog published? (leave blank to use default \"blog\") ")
if not choice:
choice = "blog"
publishDir = os.path.join(config.PUBLIC, choice)
while os.path.exists(publishDir):
second = input("""
{pDir} already exists!
setting this as your publishing directory means this program may
delete or overwrite file there!
if you're sure you want to use it, hit <enter> to confirm.
otherwise, pick another location: """.format(pDir=publishDir))
if second == "":
break
choice = second
publishDir = os.path.join(config.PUBLIC, choice)
return choice
def select_publishing():
'''
setup helper for toggling publishing
'''
publish = util.input_yn("""\
SETTING UP PUBLISHING
do you want to publish your feels online?
if yes, your feels will be published to a directory of your choice in
your public_html. i'll confirm the location of that directory in a
moment.
if not, your feels will only be readable from within the tilde.town
network. if you already have a publishing directory, i'll remove it for
you (don't worry, your written entries will still be saved!)
you can change this option any time.
please enter\
""")
return publish
def unpublish():
'''remove all published entries in html and gopher.
'''
global SETTINGS
directory = SETTINGS.get("publish dir")
if directory:
publishDir = os.path.join(config.PUBLIC, directory)
if os.path.exists(publishDir):
subprocess.call(["rm", "-rf", publishDir])
subprocess.call(["rm", "-rf", config.WWW])
make_publish_dir(SETTINGS.get("publish dir"))
#SETTINGS.update({"publish dir": None})
if SETTINGS.get("gopher"):
gopher.unpublish()
def update_publishing():
'''
updates publishing directory if user is publishing. otherwise, wipe it.
'''
global SETTINGS
if core.publishing():
oldDir = SETTINGS.get("publish dir")
newDir = select_publish_dir()
SETTINGS.update({"publish dir": newDir})
if oldDir:
subprocess.call(["rm", os.path.join(config.PUBLIC, oldDir)])
make_publish_dir(newDir)
core.load_files()
#core.write_html("index.html")
else:
unpublish()
core.load(SETTINGS)
def make_publish_dir(publish_dir):
'''
setup helper to create publishing directory
'''
if not os.path.exists(config.WWW):
subprocess.call(["mkdir", config.WWW])
subprocess.call(["ln", "-s", os.path.join(config.USER_CONFIG, "style.css"), os.path.join(config.WWW, "style.css")])
subprocess.call(["touch", os.path.join(config.WWW, "index.html")])
index = open(os.path.join(config.WWW, "index.html"), "w")
index.write("<h1>ttbp blog placeholder</h1>")
index.close()
if core.publishing():
live = os.path.join(config.PUBLIC, publish_dir)
if os.path.exists(live):
subprocess.call(["rm", live])
subprocess.call(["ln", "-s", config.WWW, live])
return "\n\tpublishing to "+config.LIVE+config.USER+"/"+SETTINGS.get("publish dir")+"/\n\n"
else:
return ""
#print("\n\tpublishing to "+config.LIVE+config.USER+"/"+SETTINGS.get("publish dir")+"/\n\n")
def update_gopher():
'''
helper for toggling gopher settings
'''
# TODO for now i'm hardcoding where people's gopher stuff is generated. if
# there is demand for this to be configurable we can expose that.
if SETTINGS.get("gopher"):
gopher.setup_gopher('feels')
gopher.publish_gopher("feels", core.get_files())
else:
subprocess.call(["rm", config.GOPHER_PATH])
redraw("gopher publishing set to {gopher}".format(gopher=SETTINGS.get("gopher")))
##### PATCHING UTILITIES
def user_up_to_date():
'''
checks to see if current user is up to the same version as system
'''
versionFile = os.path.join(config.PATH, "version")
if not os.path.exists(versionFile):
return False
ver = open(versionFile, "r").read()
if ver == __version__:
return True
return False
def update_user_version():
'''
updates user to current version, printing relevant release notes and
stepping through new features.
'''
global SETTINGS
versionFile = os.path.join(config.PATH, "version")
print("ttbp had some updates!")
print("\ngive me a second to update you to version "+__version__+"...\n")
time.sleep(1)
print("...")
time.sleep(0.5)
userVersion = ""
(x, y, z) = [0, 0, 0]
if not os.path.isfile(versionFile):
# updates from 0.8.5 to 0.8.6, before versionfile existed
# change style.css location
if core.publishing():
if os.path.isfile(os.path.join(config.WWW, "style.css")):
subprocess.call(["mv", os.path.join(config.WWW, "style.css"), config.USER_CONFIG])
# change www symlink
if os.path.exists(config.WWW):
subprocess.call(["rm", config.WWW])
subprocess.call(["mkdir", config.WWW])
subprocess.call(["ln", "-s", os.path.join(config.USER_CONFIG, "style.css"), os.path.join(config.WWW, "style.css")])
publishDir = os.path.join(config.PUBLIC, SETTINGS.get("publish dir"))
if os.path.exists(publishDir):
subprocess.call(["rm", "-rf", publishDir])
subprocess.call(["ln", "-s", config.WWW, os.path.join(config.PUBLIC, SETTINGS.get("publish dir"))])
# repopulate html files
core.load_files()
#core.write_html("index.html")
# add publishing setting
print("\nnew feature!\n")
SETTINGS.update({"publishing":select_publishing()})
update_publishing()
ttbprc = open(config.TTBPRC, "w")
ttbprc.write(json.dumps(SETTINGS, sort_keys=True, indent=2, separators=(',',':')))
ttbprc.close()
else: # version at least 0.8.6
userVersion = open(versionFile, "r").read().rstrip()
x, y, z = [int(num) for num in userVersion.split(".")]
# from 0.8.6
if userVersion == "0.8.6":
print("\nresetting your publishing settings...\n")
SETTINGS.update({"publishing":select_publishing()})
update_publishing()
ttbprc = open(config.TTBPRC, "w")
ttbprc.write(json.dumps(SETTINGS, sort_keys=True, indent=2, separators=(',',':')))
ttbprc.close()
# | |
= set(
state_key
for ev_type, state_key in itertools.chain(to_delete, to_insert)
if ev_type == EventTypes.Member
)
for member in members_changed:
txn.call_after(
self.get_rooms_for_user_with_stream_ordering.invalidate, (member,)
)
self._invalidate_state_caches_and_stream(txn, room_id, members_changed)
def _update_forward_extremities_txn(
self, txn, new_forward_extremities, max_stream_order
):
for room_id, new_extrem in iteritems(new_forward_extremities):
self._simple_delete_txn(
txn, table="event_forward_extremities", keyvalues={"room_id": room_id}
)
txn.call_after(self.get_latest_event_ids_in_room.invalidate, (room_id,))
self._simple_insert_many_txn(
txn,
table="event_forward_extremities",
values=[
{"event_id": ev_id, "room_id": room_id}
for room_id, new_extrem in iteritems(new_forward_extremities)
for ev_id in new_extrem
],
)
# We now insert into stream_ordering_to_exterm a mapping from room_id,
# new stream_ordering to new forward extremeties in the room.
# This allows us to later efficiently look up the forward extremeties
# for a room before a given stream_ordering
self._simple_insert_many_txn(
txn,
table="stream_ordering_to_exterm",
values=[
{
"room_id": room_id,
"event_id": event_id,
"stream_ordering": max_stream_order,
}
for room_id, new_extrem in iteritems(new_forward_extremities)
for event_id in new_extrem
],
)
@classmethod
def _filter_events_and_contexts_for_duplicates(cls, events_and_contexts):
"""Ensure that we don't have the same event twice.
Pick the earliest non-outlier if there is one, else the earliest one.
Args:
events_and_contexts (list[(EventBase, EventContext)]):
Returns:
list[(EventBase, EventContext)]: filtered list
"""
new_events_and_contexts = OrderedDict()
for event, context in events_and_contexts:
prev_event_context = new_events_and_contexts.get(event.event_id)
if prev_event_context:
if not event.internal_metadata.is_outlier():
if prev_event_context[0].internal_metadata.is_outlier():
# To ensure correct ordering we pop, as OrderedDict is
# ordered by first insertion.
new_events_and_contexts.pop(event.event_id, None)
new_events_and_contexts[event.event_id] = (event, context)
else:
new_events_and_contexts[event.event_id] = (event, context)
return list(new_events_and_contexts.values())
def _update_room_depths_txn(self, txn, events_and_contexts, backfilled):
"""Update min_depth for each room
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
backfilled (bool): True if the events were backfilled
"""
depth_updates = {}
for event, context in events_and_contexts:
# Remove the any existing cache entries for the event_ids
txn.call_after(self._invalidate_get_event_cache, event.event_id)
if not backfilled:
txn.call_after(
self._events_stream_cache.entity_has_changed,
event.room_id,
event.internal_metadata.stream_ordering,
)
if not event.internal_metadata.is_outlier() and not context.rejected:
depth_updates[event.room_id] = max(
event.depth, depth_updates.get(event.room_id, event.depth)
)
for room_id, depth in iteritems(depth_updates):
self._update_min_depth_for_room_txn(txn, room_id, depth)
def _update_outliers_txn(self, txn, events_and_contexts):
"""Update any outliers with new event info.
This turns outliers into ex-outliers (unless the new event was
rejected).
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
Returns:
list[(EventBase, EventContext)] new list, without events which
are already in the events table.
"""
txn.execute(
"SELECT event_id, outlier FROM events WHERE event_id in (%s)"
% (",".join(["?"] * len(events_and_contexts)),),
[event.event_id for event, _ in events_and_contexts],
)
have_persisted = {event_id: outlier for event_id, outlier in txn}
to_remove = set()
for event, context in events_and_contexts:
if event.event_id not in have_persisted:
continue
to_remove.add(event)
if context.rejected:
# If the event is rejected then we don't care if the event
# was an outlier or not.
continue
outlier_persisted = have_persisted[event.event_id]
if not event.internal_metadata.is_outlier() and outlier_persisted:
# We received a copy of an event that we had already stored as
# an outlier in the database. We now have some state at that
# so we need to update the state_groups table with that state.
# insert into event_to_state_groups.
try:
self._store_event_state_mappings_txn(txn, ((event, context),))
except Exception:
logger.exception("")
raise
metadata_json = encode_json(event.internal_metadata.get_dict())
sql = (
"UPDATE event_json SET internal_metadata = ?" " WHERE event_id = ?"
)
txn.execute(sql, (metadata_json, event.event_id))
# Add an entry to the ex_outlier_stream table to replicate the
# change in outlier status to our workers.
stream_order = event.internal_metadata.stream_ordering
state_group_id = context.state_group
self._simple_insert_txn(
txn,
table="ex_outlier_stream",
values={
"event_stream_ordering": stream_order,
"event_id": event.event_id,
"state_group": state_group_id,
},
)
sql = "UPDATE events SET outlier = ?" " WHERE event_id = ?"
txn.execute(sql, (False, event.event_id))
# Update the event_backward_extremities table now that this
# event isn't an outlier any more.
self._update_backward_extremeties(txn, [event])
return [ec for ec in events_and_contexts if ec[0] not in to_remove]
@classmethod
def _delete_existing_rows_txn(cls, txn, events_and_contexts):
if not events_and_contexts:
# nothing to do here
return
logger.info("Deleting existing")
for table in (
"events",
"event_auth",
"event_json",
"event_edges",
"event_forward_extremities",
"event_reference_hashes",
"event_search",
"event_to_state_groups",
"guest_access",
"history_visibility",
"local_invites",
"room_names",
"state_events",
"rejections",
"redactions",
"room_memberships",
"topics",
):
txn.executemany(
"DELETE FROM %s WHERE event_id = ?" % (table,),
[(ev.event_id,) for ev, _ in events_and_contexts],
)
for table in ("event_push_actions",):
txn.executemany(
"DELETE FROM %s WHERE room_id = ? AND event_id = ?" % (table,),
[(ev.room_id, ev.event_id) for ev, _ in events_and_contexts],
)
def _store_event_txn(self, txn, events_and_contexts):
"""Insert new events into the event and event_json tables
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
"""
if not events_and_contexts:
# nothing to do here
return
def event_dict(event):
d = event.get_dict()
d.pop("redacted", None)
d.pop("redacted_because", None)
return d
self._simple_insert_many_txn(
txn,
table="event_json",
values=[
{
"event_id": event.event_id,
"room_id": event.room_id,
"internal_metadata": encode_json(
event.internal_metadata.get_dict()
),
"json": encode_json(event_dict(event)),
"format_version": event.format_version,
}
for event, _ in events_and_contexts
],
)
self._simple_insert_many_txn(
txn,
table="events",
values=[
{
"stream_ordering": event.internal_metadata.stream_ordering,
"topological_ordering": event.depth,
"depth": event.depth,
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"processed": True,
"outlier": event.internal_metadata.is_outlier(),
"origin_server_ts": int(event.origin_server_ts),
"received_ts": self._clock.time_msec(),
"sender": event.sender,
"contains_url": (
"url" in event.content
and isinstance(event.content["url"], text_type)
),
}
for event, _ in events_and_contexts
],
)
def _store_rejected_events_txn(self, txn, events_and_contexts):
"""Add rows to the 'rejections' table for received events which were
rejected
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
Returns:
list[(EventBase, EventContext)] new list, without the rejected
events.
"""
# Remove the rejected events from the list now that we've added them
# to the events table and the events_json table.
to_remove = set()
for event, context in events_and_contexts:
if context.rejected:
# Insert the event_id into the rejections table
self._store_rejections_txn(txn, event.event_id, context.rejected)
to_remove.add(event)
return [ec for ec in events_and_contexts if ec[0] not in to_remove]
def _update_metadata_tables_txn(
self, txn, events_and_contexts, all_events_and_contexts, backfilled
):
"""Update all the miscellaneous tables for new events
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
all_events_and_contexts (list[(EventBase, EventContext)]): all
events that we were going to persist. This includes events
we've already persisted, etc, that wouldn't appear in
events_and_context.
backfilled (bool): True if the events were backfilled
"""
# Insert all the push actions into the event_push_actions table.
self._set_push_actions_for_event_and_users_txn(
txn,
events_and_contexts=events_and_contexts,
all_events_and_contexts=all_events_and_contexts,
)
if not events_and_contexts:
# nothing to do here
return
for event, context in events_and_contexts:
if event.type == EventTypes.Redaction and event.redacts is not None:
# Remove the entries in the event_push_actions table for the
# redacted event.
self._remove_push_actions_for_event_id_txn(
txn, event.room_id, event.redacts
)
# Remove from relations table.
self._handle_redaction(txn, event.redacts)
# Update the event_forward_extremities, event_backward_extremities and
# event_edges tables.
self._handle_mult_prev_events(
txn, events=[event for event, _ in events_and_contexts]
)
for event, _ in events_and_contexts:
if event.type == EventTypes.Name:
# Insert into the room_names and event_search tables.
self._store_room_name_txn(txn, event)
elif event.type == EventTypes.Topic:
# Insert into the topics table and event_search table.
self._store_room_topic_txn(txn, event)
elif event.type == EventTypes.Message:
# Insert into the event_search table.
self._store_room_message_txn(txn, event)
elif event.type == EventTypes.Redaction:
# Insert into the redactions table.
self._store_redaction(txn, event)
elif event.type == EventTypes.RoomHistoryVisibility:
# Insert into the event_search table.
self._store_history_visibility_txn(txn, event)
elif event.type == EventTypes.GuestAccess:
# Insert into the event_search table.
self._store_guest_access_txn(txn, event)
self._handle_event_relations(txn, event)
# Insert into the room_memberships table.
self._store_room_members_txn(
txn,
[
event
for event, _ in events_and_contexts
if event.type == EventTypes.Member
],
backfilled=backfilled,
)
# Insert event_reference_hashes table.
self._store_event_reference_hashes_txn(
txn, [event for event, _ in events_and_contexts]
)
state_events_and_contexts = [
ec for ec in events_and_contexts if ec[0].is_state()
]
state_values = []
for event, context in state_events_and_contexts:
vals = {
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"state_key": event.state_key,
}
# TODO: How does this work with backfilling?
if hasattr(event, "replaces_state"):
vals["prev_state"] = event.replaces_state
state_values.append(vals)
self._simple_insert_many_txn(txn, table="state_events", values=state_values)
# Prefill the event cache
self._add_to_cache(txn, events_and_contexts)
def _add_to_cache(self, txn, events_and_contexts):
to_prefill = []
rows = []
N = 200
for i in range(0, len(events_and_contexts), N):
ev_map = {e[0].event_id: e[0] for e in events_and_contexts[i : i + N]}
if not ev_map:
break
sql = (
"SELECT "
" e.event_id as event_id, "
" r.redacts as redacts,"
" rej.event_id as rejects "
" FROM events as e"
" LEFT JOIN rejections as rej USING (event_id)"
" LEFT JOIN redactions as r ON e.event_id = r.redacts"
" WHERE e.event_id IN (%s)"
) % (",".join(["?"] * len(ev_map)),)
txn.execute(sql, list(ev_map))
rows = self.cursor_to_dict(txn)
for row in rows:
event = ev_map[row["event_id"]]
if not row["rejects"] and not row["redacts"]:
to_prefill.append(
_EventCacheEntry(event=event, redacted_event=None)
)
def | |
# -*- coding: utf-8 -*-
import os
from io import BytesIO
import oss2
import unittest
import unittests
import random
from oss2 import utils
from functools import partial
from mock import patch
import struct
import copy
from .common import *
import json
def make_get_object(content):
request_text = '''GET /sjbhlsgsbecvlpbf HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:PAedG7U86ZxQ2WTB+GdpSltoiTI='''
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Type: text/plain
Content-Length: {0}
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
Accept-Ranges: bytes
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"
Last-Modified: Sat, 12 Dec 2015 00:35:53 GMT
x-oss-object-type: Normal
{1}'''.format(len(content), oss2.to_string(content))
return request_text, response_text
def make_put_encrypted_object(key, content, content_crypto_material):
cipher = content_crypto_material.cipher
encrypted_key = utils.b64encode_as_string(content_crypto_material.encrypted_key)
encrypted_iv = utils.b64encode_as_string(content_crypto_material.encrypted_iv)
encrypted_content = cipher.encrypt(content)
wrap_alg = content_crypto_material.wrap_alg
cek_alg = content_crypto_material.cek_alg
request_text = '''PUT /{0} HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
Content-Length: {1}
x-oss-meta-client-side-encryption-wrap-alg: {2}
x-oss-meta-client-side-encryption-cek-alg: {3}
x-oss-meta-client-side-encryption-key: {4}
x-oss-meta-client-side-encryption-start: {5}
x-oss-meta-unencrypted-content-length: {6}
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
authorization: OSS ZCDmm7TPZKHtx77j:W6whAowN4aImQ0dfbMHyFfD0t1g=
Accept: */*
'''.format(key, len(content), wrap_alg, cek_alg, encrypted_key, encrypted_iv, len(encrypted_content))
io = BytesIO()
io.write(oss2.to_bytes(request_text))
io.write(encrypted_content)
request_text = io.getvalue()
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Length: {0}
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
x-oss-hash-crc64ecma: {1}
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"'''.format(len(content), unittests.common.calc_crc(encrypted_content))
return request_text, response_text
def make_get_encrypted_object(key, content, content_crypto_material, invalid_cek_alg='', ranges=None):
request_text = '''GET /{0} HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS <KEY>PA<KEY>='''.format(key)
encrypted_key = utils.b64encode_as_string(content_crypto_material.encrypted_key)
encrypted_iv = utils.b64encode_as_string(content_crypto_material.encrypted_iv)
wrap_alg = content_crypto_material.wrap_alg
cek_alg = content_crypto_material.cek_alg
if invalid_cek_alg:
cek_alg = invalid_cek_alg
cipher = content_crypto_material.cipher
encrypted_content = cipher.encrypt(content)
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Type: text/plain
Content-Length: {0}
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
Accept-Ranges: bytes
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"
x-oss-meta-client-side-encryption-wrap-alg: {1}
x-oss-meta-client-side-encryption-cek-alg: {2}
x-oss-meta-client-side-encryption-key: {3}
x-oss-meta-client-side-encryption-start: {4}
x-oss-meta-unencrypted-content-length: {5}
Last-Modified: Sat, 12 Dec 2015 00:35:53 GMT
x-oss-object-type: Normal{6}
'''.format(len(encrypted_content), wrap_alg, cek_alg, encrypted_key, encrypted_iv, len(content),
'\nContent-Range: {0}'.format(ranges) if ranges else '')
io = BytesIO()
io.write(oss2.to_bytes(response_text))
io.write(encrypted_content)
response_text = io.getvalue()
return request_text, response_text
def make_get_encrypted_object_compact(key, encrypted_content, encrypted_meta):
request_text = '''GET /{0} HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:PAedG7U86ZxQ2WTB+GdpSltoiTI='''.format(key)
encrypted_key = encrypted_meta['x-oss-meta-client-side-encryption-key']
encrypted_iv = encrypted_meta['x-oss-meta-client-side-encryption-start']
wrap_alg = encrypted_meta['x-oss-meta-client-side-encryption-wrap-alg']
cek_alg = encrypted_meta['x-oss-meta-client-side-encryption-cek-alg']
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Type: text/plain
Content-Length: {0}
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
Accept-Ranges: bytes
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"
x-oss-meta-client-side-encryption-wrap-alg: {1}
x-oss-meta-client-side-encryption-cek-alg: {2}
x-oss-meta-client-side-encryption-key: {3}
x-oss-meta-client-side-encryption-start: {4}
x-oss-meta-unencrypted-content-length: {5}
Last-Modified: Sat, 12 Dec 2015 00:35:53 GMT
x-oss-object-type: Normal
'''.format(len(encrypted_content), wrap_alg, cek_alg, encrypted_key, encrypted_iv, len(encrypted_content))
io = BytesIO()
io.write(oss2.to_bytes(response_text))
io.write(encrypted_content)
response_text = io.getvalue()
return request_text, response_text
def make_get_encrypted_object_compact_deprecated(key, encrypted_content, encrypted_meta):
request_text = '''GET /{0} HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:PAedG7U86ZxQ2WTB+GdpSltoiTI='''.format(key)
encrypted_key = encrypted_meta['x-oss-meta-oss-crypto-key']
encrypted_iv = encrypted_meta['x-oss-meta-oss-crypto-start']
wrap_alg = encrypted_meta['x-oss-meta-oss-wrap-alg']
cek_alg = encrypted_meta['x-oss-meta-oss-cek-alg']
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Type: text/plain
Content-Length: {0}
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
Accept-Ranges: bytes
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"
x-oss-meta-oss-wrap-alg: {1}
x-oss-meta-oss-cek-alg: {2}
x-oss-meta-oss-crypto-key: {3}
x-oss-meta-oss-crypto-start: {4}
x-oss-meta-unencrypted-content-length: {5}
Last-Modified: Sat, 12 Dec 2015 00:35:53 GMT
x-oss-object-type: Normal
'''.format(len(encrypted_content), wrap_alg, cek_alg, encrypted_key, encrypted_iv, len(encrypted_content))
io = BytesIO()
io.write(oss2.to_bytes(response_text))
io.write(encrypted_content)
response_text = io.getvalue()
return request_text, response_text
def make_put_object(content):
request_text = '''PUT /sjbhlsgsbecvlpbf.txt HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
Content-Type: text/plain
Content-Length: {0}
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
authorization: OSS ZCDmm7TPZKHtx77j:W6whAowN4aImQ0dfbMHyFfD0t1g=
Accept: */*
{1}'''.format(len(content), oss2.to_string(content))
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Length: 0
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
x-oss-hash-crc64ecma: {0}
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"'''.format(unittests.common.calc_crc(content))
return request_text, response_text
def make_append_object(position, content):
request_text = '''POST /sjbhlsgsbecvlpbf?position={0}&append= HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
Content-Length: {1}
date: Sat, 12 Dec 2015 00:36:29 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:1njpxsTivMNvTdfYolCUefRInVY=
{2}'''.format(position, len(content), oss2.to_string(content))
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:36:29 GMT
Content-Length: 0
Connection: keep-alive
x-oss-request-id: 566B6C0D1790CF586F72240B
ETag: "24F7FA10676D816E0D6C6B5600000000"
x-oss-next-append-position: {0}
x-oss-hash-crc64ecma: {1}'''.format(position + len(content), unittests.common.calc_crc(content))
return request_text, response_text
def make_get_object_tagging():
request_text = '''GET /sjbhlsgsbecvlpbf?tagging HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:PAedG7U86ZxQ2WTB+GdpSltoiTI='''
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Type: text/plain
Content-Length: 278
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
Accept-Ranges: bytes
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"
Last-Modified: Sat, 12 Dec 2015 00:35:53 GMT
x-oss-object-type: Normal
<?xml version="1.0" encoding="UTF-8"?>
<Tagging>
<TagSet>
<Tag>
<Key>k1</Key>
<Value>v1</Value>
</Tag>
<Tag>
<Key>k2</Key>
<Value>v2</Value>
</Tag>
<Tag>
<Key>k3</Key>
<Value>v3</Value>
</Tag>
</TagSet>
</Tagging>'''
return request_text, response_text
class TestObject(unittests.common.OssTestCase):
@patch('oss2.Session.do_request')
def test_head(self, do_request):
request_text = '''HEAD /apbmntxqtvxjzini HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:35:55 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:Q05CWxpclrtNnUWHY5wS10fhFk0='''
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:55 GMT
Content-Type: application/octet-stream
Content-Length: 10
Connection: keep-alive
x-oss-request-id: 566B6BEBD4C05B21E97261B0
Accept-Ranges: bytes
ETag: "0CF031A5EB9351746195B20B86FD3F68"
Last-Modified: Sat, 12 Dec 2015 00:35:54 GMT
x-oss-object-type: Normal'''
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().head_object('apbmntxqtvxjzini')
self.assertRequest(req_info, request_text)
self.assertEqual(result.content_length, 10)
self.assertEqual(result.status, 200)
self.assertEqual(result.request_id, '566B6BEBD4C05B21E97261B0')
self.assertEqual(result.object_type, 'Normal')
self.assertEqual(result.content_type, 'application/octet-stream')
self.assertEqual(result.etag, '0CF031A5EB9351746195B20B86FD3F68')
self.assertEqual(result.last_modified, 1449880554)
@patch('oss2.Session.do_request')
def test_object_exists_true(self, do_request):
request_text = '''GET /sbowspxjhmccpmesjqcwagfw?objectMeta HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:37:17 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:wopWcmMd/70eNKYOc9M6ZA21yY8='''
response_text = '''HTTP/1.1 200 OK
x-oss-request-id: 566B6C3D010B7A4314D2253D
Date: Sat, 12 Dec 2015 00:37:17 GMT
ETag: "5B3C1A2E053D763E1B002CC607C5A0FE"
Last-Modified: Sat, 12 Dec 2015 00:37:17 GMT
Content-Length: 344606
Connection: keep-alive
Server: AliyunOSS'''
req_info = unittests.common.mock_response(do_request, response_text)
self.assertTrue(unittests.common.bucket().object_exists('sbowspxjhmccpmesjqcwagfw'))
self.assertRequest(req_info, request_text)
@patch('oss2.Session.do_request')
def test_object_exists_false(self, do_request):
request_text = '''GET /sbowspxjhmccpmesjqcwagfw?objectMeta HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:37:17 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:wopWcmMd/70eNKYOc9M6ZA21yY8='''
response_text = '''HTTP/1.1 404 Not Found
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:37:17 GMT
Content-Type: application/xml
Content-Length: 287
Connection: keep-alive
x-oss-request-id: 566B6C3D6086505A0CFF0F68
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>NoSuchKey</Code>
<Message>The specified key does not exist.</Message>
<RequestId>566B6C3D6086505A0CFF0F68</RequestId>
<HostId>ming-oss-share.oss-cn-hangzhou.aliyuncs.com</HostId>
<Key><KEY></Key>
</Error>'''
req_info = unittests.common.mock_response(do_request, response_text)
self.assertTrue(not unittests.common.bucket().object_exists('sbowspxjhmccpmesjqcwagfw'))
self.assertRequest(req_info, request_text)
@patch('oss2.Session.do_request')
def test_object_exists_exception(self, do_request):
request_text = '''GET /sbowspxjhmccpmesjqcwagfw?objectMeta HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:37:17 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:wopWcmMd/70eNKYOc9M6ZA21yY8='''
response_text = '''HTTP/1.1 404 Not Found
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:37:17 GMT
Content-Type: application/xml
Content-Length: 287
Connection: keep-alive
x-oss-request-id: 566B6C3D6086505A0CFF0F68
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>NoSuchBucket</Code>
<Message>The specified bucket does not exist.</Message>
<RequestId>566B6C3D6086505A0CFF0F68</RequestId>
<HostId>ming-oss-share.oss-cn-hangzhou.aliyuncs.com</HostId>
<Bucket>ming-oss-share</Bucket>
</Error>'''
unittests.common.mock_response(do_request, response_text)
self.assertRaises(oss2.exceptions.NoSuchBucket, unittests.common.bucket().object_exists, 'sbowspxjhmccpmesjqcwagfw')
@patch('oss2.Session.do_request')
def test_get_object_meta(self, do_request):
request_text = '''GET /sbowspxjhmccpmesjqcwagfw?objectMeta HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
date: Sat, 12 Dec 2015 00:37:17 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
Accept: */*
authorization: OSS ZCDmm7TPZKHtx77j:wopWcmMd/70eNKYOc9M6ZA21yY8='''
response_text = '''HTTP/1.1 200 OK
x-oss-request-id: 566B6C3D010B7A4314D2253D
Date: Sat, 12 Dec 2015 00:37:17 GMT
ETag: "5B3C1A2E053D763E1B002CC607C5A0FE"
Last-Modified: Sat, 12 Dec 2015 00:37:17 GMT
Content-Length: 344606
Connection: keep-alive
Server: AliyunOSS'''
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().get_object_meta('sbowspxjhmccpmesjqcwagfw')
self.assertRequest(req_info, request_text)
self.assertEqual(result.last_modified, 1449880637)
self.assertEqual(result.content_length, 344606)
self.assertEqual(result.etag, '5B3C1A2E053D763E1B002CC607C5A0FE')
@patch('oss2.Session.do_request')
def test_get(self, do_request):
content = unittests.common.random_bytes(1023)
request_text, response_text = make_get_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().get_object('sjbhlsgsbecvlpbf')
self.assertRequest(req_info, request_text)
self.assertEqual(result.read(), content)
self.assertEqual(result.content_length, len(content))
self.assertEqual(result.status, 200)
self.assertEqual(result.request_id, '566B6BE93A7B8CFD53D4BAA3')
self.assertEqual(result.object_type, 'Normal')
self.assertEqual(result.content_type, 'text/plain')
self.assertEqual(result.etag, 'D80CF0E5BE2436514894D64B2BCFB2AE')
self.assertEqual(result.last_modified, 1449880553)
@patch('oss2.Session.do_request')
def test_get_with_progress(self, do_request):
content = unittests.common.random_bytes(1024 * 1024 + 1)
request_text, response_text = make_get_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
self.previous = -1
result = unittests.common.bucket().get_object('sjbhlsgsbecvlpbf', progress_callback=self.progress_callback)
self.assertRequest(req_info, request_text)
content_read = unittests.common.read_file(result)
self.assertEqual(self.previous, len(content))
self.assertEqual(len(content_read), len(content))
self.assertEqual(oss2.to_bytes(content_read), content)
@patch('oss2.Session.do_request')
def test_get_to_file(self, do_request):
content = unittests.common.random_bytes(1023)
request_text, response_text = make_get_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
filename = self.tempname()
result = unittests.common.bucket().get_object_to_file('sjbhlsgsbecvlpbf', filename)
self.assertRequest(req_info, request_text)
self.assertEqual(result.request_id, '566B6BE93A7B8CFD53D4BAA3')
self.assertEqual(result.content_length, len(content))
self.assertEqual(os.path.getsize(filename), len(content))
with open(filename, 'rb') as f:
self.assertEqual(content, oss2.to_bytes(f.read()))
@patch('oss2.Session.do_request')
def test_get_to_file_with_progress(self, do_request):
size = 1024 * 1024 + 1
content = unittests.common.random_bytes(size)
request_text, response_text = make_get_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
filename = self.tempname()
self.previous = -1
unittests.common.bucket().get_object_to_file('sjbhlsgsbecvlpbf', filename, progress_callback=self.progress_callback)
self.assertRequest(req_info, request_text)
self.assertEqual(self.previous, size)
self.assertEqual(os.path.getsize(filename), size)
with open(filename, 'rb') as f:
self.assertEqual(oss2.to_bytes(content), f.read())
@patch('oss2.Session.do_request')
def test_put_result(self, do_request):
content = b'dummy content'
request_text, response_text = make_put_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().put_object('sjbhlsgsbecvlpbf.txt', content)
self.assertRequest(req_info, request_text)
self.assertEqual(result.status, 200)
self.assertEqual(result.request_id, '566B6BE93A7B8CFD53D4BAA3')
self.assertEqual(result.etag, 'D80CF0E5BE2436514894D64B2BCFB2AE')
@patch('oss2.Session.do_request')
def test_put_bytes(self, do_request):
content = unittests.common.random_bytes(1024 * 1024 - 1)
request_text, response_text = make_put_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
unittests.common.bucket().put_object('sjbhlsgsbecvlpbf.txt', content)
self.assertRequest(req_info, request_text)
@patch('oss2.Session.do_request')
def test_put_bytes_with_progress(self, do_request):
self.previous = -1
content = unittests.common.random_bytes(1024 * 1024 - 1)
request_text, response_text = make_put_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
unittests.common.bucket().put_object('sjbhlsgsbecvlpbf.txt', content, progress_callback=self.progress_callback)
self.assertRequest(req_info, request_text)
self.assertEqual(self.previous, len(content))
@patch('oss2.Session.do_request')
def test_put_from_file(self, do_request):
size = 512 * 2 - 1
content = unittests.common.random_bytes(size)
filename = self.make_tempfile(content)
request_text, response_text = make_put_object(content)
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().put_object_from_file('sjbhlsgsbecvlpbf.txt', filename)
self.assertRequest(req_info, request_text)
self.assertEqual(result.request_id, '566B6BE93A7B8CFD53D4BAA3')
self.assertEqual(result.etag, 'D80CF0E5BE2436514894D64B2BCFB2AE')
@patch('oss2.Session.do_request')
def test_put_without_crc_in_response(self, do_request):
content = b'dummy content'
request_text = '''PUT /sjbhlsgsbecvlpbf.txt HTTP/1.1
Host: ming-oss-share.oss-cn-hangzhou.aliyuncs.com
Accept-Encoding: identity
Connection: keep-alive
Content-Type: text/plain
Content-Length: {0}
date: Sat, 12 Dec 2015 00:35:53 GMT
User-Agent: aliyun-sdk-python/2.0.2(Windows/7/;3.3.3)
authorization: OSS ZCDmm7TPZKHtx77j:W6whAowN4aImQ0dfbMHyFfD0t1g=
Accept: */*
{1}'''.format(len(content), oss2.to_string(content))
response_text = '''HTTP/1.1 200 OK
Server: AliyunOSS
Date: Sat, 12 Dec 2015 00:35:53 GMT
Content-Length: 0
Connection: keep-alive
x-oss-request-id: 566B6BE93A7B8CFD53D4BAA3
ETag: "D80CF0E5BE2436514894D64B2BCFB2AE"'''
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().put_object('sjbhlsgsbecvlpbf.txt', content)
self.assertRequest(req_info, request_text)
self.assertEqual(result.status, 200)
self.assertEqual(result.request_id, '566B6BE93A7B8CFD53D4BAA3')
self.assertEqual(result.etag, 'D80CF0E5BE2436514894D64B2BCFB2AE')
@patch('oss2.Session.do_request')
def test_append(self, do_request):
size = 8192 * 2 - 1
content = unittests.common.random_bytes(size)
request_text, response_text = make_append_object(0, content)
req_info = unittests.common.mock_response(do_request, response_text)
result = unittests.common.bucket().append_object('sjbhlsgsbecvlpbf', 0, content)
self.assertRequest(req_info, request_text)
self.assertEqual(result.status, 200)
self.assertEqual(result.next_position, size)
self.assertEqual(result.etag, '24F7FA10676D816E0D6C6B5600000000')
self.assertEqual(result.crc, unittests.common.calc_crc(content))
@patch('oss2.Session.do_request')
def test_append_with_progress(self, do_request):
size = 1024 * 1024
content = unittests.common.random_bytes(size)
request_text, response_text = make_append_object(0, content)
req_info = unittests.common.mock_response(do_request, response_text)
self.previous = -1
result = unittests.common.bucket().append_object('sjbhlsgsbecvlpbf', 0, content, progress_callback=self.progress_callback)
self.assertRequest(req_info, request_text)
self.assertEqual(self.previous, size)
self.assertEqual(result.next_position, size)
@patch('oss2.Session.do_request')
def test_append_without_crc_in_response(self, do_request):
size = 8192
position | |
"com_github_google_btree",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
importpath = "github.com/google/go-cmp",
sum = "h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w=",
version = "v0.5.0",
)
go_repository(
name = "com_github_google_go_github",
importpath = "github.com/google/go-github",
sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=",
version = "v17.0.0+incompatible",
)
go_repository(
name = "com_github_google_go_querystring",
importpath = "github.com/google/go-querystring",
sum = "h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_gofuzz",
importpath = "github.com/google/gofuzz",
sum = "h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g=",
version = "v1.1.0",
)
go_repository(
name = "com_github_google_martian",
importpath = "github.com/google/martian",
sum = "h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_google_pprof",
importpath = "github.com/google/pprof",
sum = "h1:DLpL8pWq0v4JYoRpEhDfsJhhJyGKCcQM2WPW2TJs31c=",
version = "v0.0.0-20191218002539-d4f498aebedc",
)
go_repository(
name = "com_github_google_renameio",
importpath = "github.com/google/renameio",
sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_shlex",
importpath = "github.com/google/shlex",
sum = "h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=",
version = "v0.0.0-20191202100458-e7afc7fbc510",
)
go_repository(
name = "com_github_google_uuid",
importpath = "github.com/google/uuid",
sum = "h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=",
version = "v1.1.1",
)
go_repository(
name = "com_github_googleapis_gax_go",
importpath = "github.com/googleapis/gax-go",
sum = "h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=",
version = "v2.0.2+incompatible",
)
go_repository(
name = "com_github_googleapis_gax_go_v2",
importpath = "github.com/googleapis/gax-go/v2",
sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
version = "v2.0.5",
)
go_repository(
name = "com_github_googleapis_gnostic",
importpath = "github.com/googleapis/gnostic",
sum = "h1:A8Yhf6EtqTv9RMsU6MQTyrtV1TjWlR6xU9BsZIwuTCM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_gophercloud_gophercloud",
importpath = "github.com/gophercloud/gophercloud",
sum = "h1:Xb2lcqZtml1XjgYZxbeayEemq7ASbeTp09m36gQFpEU=",
version = "v0.6.0",
)
go_repository(
name = "com_github_gopherjs_gopherjs",
importpath = "github.com/gopherjs/gopherjs",
sum = "h1:F7WD09S8QB4LrkEpka0dFPLSotH11HRpCsLIbIcJ7sU=",
version = "v0.0.0-20191106031601-ce3c9ade29de",
)
go_repository(
name = "com_github_gorilla_context",
importpath = "github.com/gorilla/context",
sum = "h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_handlers",
importpath = "github.com/gorilla/handlers",
sum = "h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gorilla_mux",
importpath = "github.com/gorilla/mux",
sum = "h1:zoNxOV7WjqXptQOVngLmcSQgXmgk4NMz1HibBchjl/I=",
version = "v1.7.2",
)
go_repository(
name = "com_github_gorilla_websocket",
importpath = "github.com/gorilla/websocket",
sum = "h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=",
version = "v1.4.0",
)
go_repository(
name = "com_github_gosuri_uitable",
importpath = "github.com/gosuri/uitable",
sum = "h1:IG2xLKRvErL3uhY6e1BylFzG+aJiwQviDDTfOKeKTpY=",
version = "v0.0.4",
)
go_repository(
name = "com_github_gregjones_httpcache",
importpath = "github.com/gregjones/httpcache",
sum = "h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM=",
version = "v0.0.0-20180305231024-9cad4c3443a7",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:THDBEeQ9xZ8JEaCLyLQqXMMdRqNr0QAUJTIkQAUtFjg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:zCy2xE9ablevUOrUZc3Dl72Dt+ya2FNAvC2yLYMHzi4=",
version = "v1.12.1",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_health_probe",
importpath = "github.com/grpc-ecosystem/grpc-health-probe",
sum = "h1:daShAySXI1DnGc8U9B1E4Qm6o7qzmFR4aRIJ4vY/TUo=",
version = "v0.3.2",
)
go_repository(
name = "com_github_hailocab_go_hostpool",
importpath = "github.com/hailocab/go-hostpool",
sum = "h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=",
version = "v0.0.0-20160125115350-e80d13ce29ed",
)
go_repository(
name = "com_github_hashicorp_consul_api",
importpath = "github.com/hashicorp/consul/api",
sum = "h1:HXNYlRkkM/t+Y/Yhxtwcy02dlYwIaoxzvxPnS+cqy78=",
version = "v1.3.0",
)
go_repository(
name = "com_github_hashicorp_consul_sdk",
importpath = "github.com/hashicorp/consul/sdk",
sum = "h1:UOxjlb4xVNF93jak1mzzoBatyFju9nrkxpVwIp/QqxQ=",
version = "v0.3.0",
)
go_repository(
name = "com_github_hashicorp_errwrap",
importpath = "github.com/hashicorp/errwrap",
sum = "h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_cleanhttp",
importpath = "github.com/hashicorp/go-cleanhttp",
sum = "h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_hashicorp_go_immutable_radix",
importpath = "github.com/hashicorp/go-immutable-radix",
sum = "h1:vN9wG1D6KG6YHRTWr8512cxGOVgTMEfgEdSj/hr8MPc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_go_msgpack",
importpath = "github.com/hashicorp/go-msgpack",
sum = "h1:i9R9JSrqIz0QVLz3sz+i3YJdT7TTSLcfLLzJi9aZTuI=",
version = "v0.5.5",
)
go_repository(
name = "com_github_hashicorp_go_multierror",
importpath = "github.com/hashicorp/go-multierror",
sum = "h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_net",
importpath = "github.com/hashicorp/go.net",
sum = "h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=",
version = "v0.0.1",
)
go_repository(
name = "com_github_hashicorp_go_retryablehttp",
importpath = "github.com/hashicorp/go-retryablehttp",
sum = "h1:QlWt0KvWT0lq8MFppF9tsJGF+ynG7ztc2KIPhzRGk7s=",
version = "v0.5.3",
)
go_repository(
name = "com_github_hashicorp_go_rootcerts",
importpath = "github.com/hashicorp/go-rootcerts",
sum = "h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_sockaddr",
importpath = "github.com/hashicorp/go-sockaddr",
sum = "h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_hashicorp_go_syslog",
importpath = "github.com/hashicorp/go-syslog",
sum = "h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_uuid",
importpath = "github.com/hashicorp/go-uuid",
sum = "h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_version",
importpath = "github.com/hashicorp/go-version",
sum = "h1:bPIoEKD27tNdebFGGxxYwcL4nepeY4j1QP23PFRGzg0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_golang_lru",
importpath = "github.com/hashicorp/golang-lru",
sum = "h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=",
version = "v0.5.4",
)
go_repository(
name = "com_github_hashicorp_hcl",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_logutils",
importpath = "github.com/hashicorp/logutils",
sum = "h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_mdns",
importpath = "github.com/hashicorp/mdns",
sum = "h1:WhIgCr5a7AaVH6jPUwjtRuuE7/RDufnUvzIr48smyxs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_memberlist",
importpath = "github.com/hashicorp/memberlist",
sum = "h1:AYBsgJOW9gab/toO5tEB8lWetVgDKZycqkebJ8xxpqM=",
version = "v0.1.5",
)
go_repository(
name = "com_github_hashicorp_serf",
importpath = "github.com/hashicorp/serf",
sum = "h1:ZynDUIQiA8usmRgPdGPHFdPnb1wgGI9tK3mO9hcAJjc=",
version = "v0.8.5",
)
go_repository(
name = "com_github_hpcloud_tail",
importpath = "github.com/hpcloud/tail",
sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_huandu_xstrings",
importpath = "github.com/huandu/xstrings",
sum = "h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs=",
version = "v1.3.1",
)
go_repository(
name = "com_github_iancoleman_strcase",
importpath = "github.com/iancoleman/strcase",
sum = "h1:ECW73yc9MY7935nNYXUkK7Dz17YuSUI9yqRqYS8aBww=",
version = "v0.0.0-20190422225806-e506e3ef7365",
)
go_repository(
name = "com_github_ianlancetaylor_demangle",
importpath = "github.com/ianlancetaylor/demangle",
sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=",
version = "v0.0.0-20181102032728-5e5cf60278f6",
)
go_repository(
name = "com_github_imdario_mergo",
importpath = "github.com/imdario/mergo",
sum = "h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg=",
version = "v0.3.9",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_influxdata_influxdb",
importpath = "github.com/influxdata/influxdb",
sum = "h1:UvNzAPfBrKMENVbQ4mr4ccA9sW+W1Ihl0Yh1s0BiVAg=",
version = "v1.7.7",
)
go_repository(
name = "com_github_jackc_fake",
importpath = "github.com/jackc/fake",
sum = "h1:vr3AYkKovP8uR8AvSGGUK1IDqRa5lAAvEkZG1LKaCRc=",
version = "v0.0.0-20150926172116-812a484cc733",
)
go_repository(
name = "com_github_jackc_pgx",
importpath = "github.com/jackc/pgx",
sum = "h1:0Vihzu20St42/UDsvZGdNE6jak7oi/UOeMzwMPHkgFY=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_jessevdk_go_flags",
importpath = "github.com/jessevdk/go-flags",
sum = "h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=",
version = "v1.4.0",
)
go_repository(
name = "com_github_jmespath_go_jmespath",
importpath = "github.com/jmespath/go-jmespath",
sum = "h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=",
version = "v0.0.0-20180206201540-c2b33e8439af",
)
go_repository(
name = "com_github_jmoiron_sqlx",
importpath = "github.com/jmoiron/sqlx",
sum = "h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=",
version = "v1.2.0",
)
go_repository(
name = "com_github_joefitzgerald_rainbow_reporter",
importpath = "github.com/joefitzgerald/rainbow-reporter",
sum = "h1:AuMG652zjdzI0YCCnXAqATtRBpGXMcAnrajcaTrSeuo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joeshaw_multierror",
importpath = "github.com/joeshaw/multierror",
sum = "h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4=",
version = "v0.0.0-20140124173710-69b34d4ec901",
)
go_repository(
name = "com_github_joho_godotenv",
importpath = "github.com/joho/godotenv",
sum = "h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_jpillora_backoff",
importpath = "github.com/jpillora/backoff",
sum = "h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_json_iterator_go",
importpath = "github.com/json-iterator/go",
sum = "h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=",
version = "v1.1.10",
)
go_repository(
name = "com_github_jsonnet_bundler_jsonnet_bundler",
importpath = "github.com/jsonnet-bundler/jsonnet-bundler",
sum = "h1:KmNzitX12fFoyqjhU8cRifEB5D8x1NT1UAcK7FQ0zpY=",
version = "v0.3.1",
)
go_repository(
name = "com_github_jstemmer_go_junit_report",
importpath = "github.com/jstemmer/go-junit-report",
sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=",
version = "v0.9.1",
)
go_repository(
name = "com_github_jtolds_gls",
importpath = "github.com/jtolds/gls",
sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=",
version = "v4.20.0+incompatible",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=",
version = "v1.3.0",
)
go_repository(
name = "com_github_kardianos_osext",
importpath = "github.com/kardianos/osext",
sum = "h1:iQTw/8FWTuc7uiaSepXwyf3o52HaUYcV+Tu66S3F5GA=",
version = "v0.0.0-20190222173326-2bc1f35cddc0",
)
go_repository(
name = "com_github_kisielk_errcheck",
importpath = "github.com/kisielk/errcheck",
sum = "h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=",
version = "v1.5.0",
)
go_repository(
name = "com_github_kisielk_gotool",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=",
version = "v1.0.3",
)
go_repository(
name = "com_github_kr_logfmt",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
importpath = "github.com/kr/pretty",
sum = "h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=",
version = "v0.2.0",
)
go_repository(
name = "com_github_kr_pty",
importpath = "github.com/kr/pty",
sum = "h1:hyz3dwM5QLc1Rfoz4FuWJQG5BN7tc6K1MndAUnGpQr4=",
version = "v1.1.5",
)
go_repository(
name = "com_github_kr_text",
importpath = "github.com/kr/text",
sum = "h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=",
version = "v0.1.0",
)
go_repository(
name = "com_github_kshvakov_clickhouse",
importpath = "github.com/kshvakov/clickhouse",
sum = "h1:PDTYk9VYgbjPAWry3AoDREeMgOVUFij6bh6IjlloHL0=",
version = "v1.3.5",
)
go_repository(
name = "com_github_kylelemons_godebug",
importpath = "github.com/kylelemons/godebug",
sum = "h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4=",
version = "v0.0.0-20170820004349-d65d576e9348",
)
go_repository(
name = "com_github_lann_builder",
importpath = "github.com/lann/builder",
sum = "h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=",
version = "v0.0.0-20180802200727-47ae307949d0",
)
go_repository(
name = "com_github_lann_ps",
importpath = "github.com/lann/ps",
sum = "h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=",
version = "v0.0.0-20150810152359-62de8c46ede0",
)
go_repository(
name = "com_github_leanovate_gopter",
importpath = "github.com/leanovate/gopter",
sum = "h1:U4YLBggDFhJdqQsG4Na2zX7joVTky9vHaj/AGEwSuXU=",
version = "v0.2.4",
)
go_repository(
name = "com_github_lib_pq",
importpath = "github.com/lib/pq",
sum = "h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU=",
version = "v1.3.0",
)
go_repository(
name = "com_github_liggitt_tabwriter",
importpath = "github.com/liggitt/tabwriter",
sum = "h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=",
version = "v0.0.0-20181228230101-89fcab3d43de",
)
go_repository(
name = "com_github_lightstep_lightstep_tracer_common_golang_gogo",
importpath = "github.com/lightstep/lightstep-tracer-common/golang/gogo",
sum = "h1:143Bb8f8DuGWck/xpNUOckBVYfFbBTnLevfRZ1aVVqo=",
version = "v0.0.0-20190605223551-bc2310a04743",
)
go_repository(
name = "com_github_lightstep_lightstep_tracer_go",
importpath = "github.com/lightstep/lightstep-tracer-go",
sum = "h1:fAazJekOWnfBeQYwk9jEgIWWKmBxq4ev3WfsAnezgc4=",
version = "v0.18.0",
)
go_repository(
name = "com_github_lithammer_dedent",
importpath = "github.com/lithammer/dedent",
sum = "h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=",
version = "v1.1.0",
)
go_repository(
name = "com_github_lovoo_gcloud_opentracing",
importpath = "github.com/lovoo/gcloud-opentracing",
sum = "h1:nAeKG70rIsog0TelcEtt6KU0Y1s5qXtsDLnHp0urPLU=",
version = "v0.3.0",
)
go_repository(
name = "com_github_magiconair_properties",
importpath = "github.com/magiconair/properties",
sum = "h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=",
version = "v1.8.0",
)
go_repository(
name = "com_github_mailru_easyjson",
importpath = "github.com/mailru/easyjson",
sum = "h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM=",
version = "v0.7.0",
)
go_repository(
name = "com_github_makenowjust_heredoc",
importpath = "github.com/MakeNowJust/heredoc",
sum = "h1:sjQovDkwrZp8u+gxLtPgKGjk5hCxuy2hrRejBTA9xFU=",
version = "v0.0.0-20170808103936-bb23615498cd",
)
go_repository(
name = "com_github_markbates_inflect",
importpath = "github.com/markbates/inflect",
sum = "h1:5fh1gzTFhfae06u3hzHYO9xe3l3v3nW5Pwt3naLTP5g=",
version = "v1.0.4",
)
go_repository(
name = "com_github_marstr_guid",
importpath = "github.com/marstr/guid",
sum = | |
<reponame>Jeremiad/Flexget
import os
import re
import socket
from io import BytesIO
from time import sleep
from datetime import datetime
from urllib.parse import urljoin, urlparse, urlsplit
from xmlrpc import client as xmlrpc_client
from loguru import logger
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.bittorrent import Torrent, is_torrent_file
from flexget.utils.pathscrub import pathscrub
from flexget.utils.template import RenderError
logger = logger.bind(name='rtorrent')
class _Method:
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
class HTTPDigestTransport(xmlrpc_client.Transport):
"""
Transport that uses requests to support Digest authentication.
"""
def __init__(self, scheme, digest_auth, username, password, session, *args, **kwargs):
self.__scheme = scheme
self.__session = session
self.__digest_auth = digest_auth
self.__username = username
self.__password = password
self.verbose = 0
xmlrpc_client.Transport.__init__(self, *args, **kwargs) # old style class
def request(self, host, handler, request_body, verbose=False):
return self.single_request(host, handler, request_body, verbose)
def single_request(self, host, handler, request_body, verbose=0):
url = urljoin('{0}://{1}'.format(self.__scheme, host), handler)
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
# if status code is 401, it means we used the wrong auth method
if response.status_code == 401:
logger.warning(
'{} auth failed. Retrying with {}. Please change your config.',
'Digest' if self.__digest_auth else 'Basic',
'Basic' if self.__digest_auth else 'Digest',
)
self.__digest_auth = not self.__digest_auth
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
response.raise_for_status()
return self.parse_response(response)
def get_auth(self):
if self.__digest_auth:
return HTTPDigestAuth(self.__username, self.__password)
return HTTPBasicAuth(self.__username, self.__password)
def send_request(self, url, auth, data):
return self.__session.post(url, auth=auth, data=data, raise_status=False)
def parse_response(self, response):
p, u = self.getparser()
if self.verbose:
logger.info('body: {!r}', response)
p.feed(response.content)
p.close()
return u.close()
def encode_netstring(input):
return str(len(input)).encode() + b':' + input + b','
def encode_header(key, value):
return key + b'\x00' + value + b'\x00'
class SCGITransport(xmlrpc_client.Transport):
"""
Public domain SCGITrannsport implementation from:
https://github.com/JohnDoee/autotorrent/blob/develop/autotorrent/scgitransport.py
"""
def __init__(self, *args, **kwargs):
self.socket_path = kwargs.pop('socket_path', '')
xmlrpc_client.Transport.__init__(self, *args, **kwargs)
def single_request(self, host, handler, request_body, verbose=False):
self.verbose = verbose
if self.socket_path:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.socket_path)
else:
host, port = host.split(':')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, int(port)))
request = encode_header(b'CONTENT_LENGTH', str(len(request_body)).encode())
request += encode_header(b'SCGI', b'1')
request += encode_header(b'REQUEST_METHOD', b'POST')
request += encode_header(b'REQUEST_URI', handler.encode())
request = encode_netstring(request)
request += request_body
s.send(request)
response = b''
while True:
r = s.recv(1024)
if not r:
break
response += r
response_body = BytesIO(b'\r\n\r\n'.join(response.split(b'\r\n\r\n')[1:]))
return self.parse_response(response_body)
if not hasattr(xmlrpc_client.Transport, 'single_request'):
SCGITransport.request = SCGITransport.single_request
def create_proxy(url):
parsed = urlsplit(url)
if not parsed.scheme:
path = parsed.path
return xmlrpc_client.ServerProxy('http://1', transport=SCGITransport(socket_path=path))
if parsed.scheme == 'scgi':
url = 'http://%s' % parsed.netloc
return xmlrpc_client.ServerProxy(url, transport=SCGITransport())
logger.debug('Creating Normal XMLRPC Proxy with url {!r}', url)
return xmlrpc_client.ServerProxy(url)
class RTorrent:
""" rTorrent API client """
default_fields = (
'hash',
'name',
'up_total',
'down_total',
'down_rate',
'is_open',
'is_active',
'custom1',
'custom2',
'custom3',
'custom4',
'custom5',
'state',
'complete',
'bytes_done',
'down.rate',
'left_bytes',
'ratio',
'base_path',
'load_date',
'timestamp_finished',
)
required_fields = ('hash', 'name', 'base_path')
def __init__(self, uri, username=None, password=<PASSWORD>, digest_auth=None, session=None):
"""
New connection to rTorrent
:param uri: RTorrent URL. Supports both http(s) and scgi
:param username: Username for basic auth over http(s)
:param password: Password for basic auth over http(s)
"""
self.uri = uri
self.username = username
self.password = password
self.digest_auth = digest_auth
self._version = None
parsed_uri = urlparse(uri)
if self.username and self.password and parsed_uri.scheme not in ['http', 'https']:
raise OSError('Username and password only supported on http(s)')
# Determine the proxy server
if parsed_uri.scheme in ['http', 'https']:
sp = xmlrpc_client.ServerProxy
elif parsed_uri.scheme == 'scgi':
sp = create_proxy
elif parsed_uri.scheme == '' and parsed_uri.path:
self.uri = parsed_uri.path
sp = create_proxy
else:
raise OSError('Unsupported scheme %s for uri %s' % (parsed_uri.scheme, self.uri))
# Use a special transport if http(s)
if parsed_uri.scheme in ['http', 'https']:
self._server = sp(
self.uri,
transport=HTTPDigestTransport(
parsed_uri.scheme, self.digest_auth, self.username, self.password, session
),
)
else:
self._server = sp(self.uri)
def _clean_fields(self, fields, reverse=False):
if not fields:
fields = list(self.default_fields)
if reverse:
for field in ['up.total', 'down.total', 'down.rate', 'timestamp.finished']:
if field in fields:
fields[fields.index(field)] = field.replace('.', '_')
return fields
for required_field in self.required_fields:
if required_field not in fields:
fields.insert(0, required_field)
for field in ['up_total', 'down_total', 'down_rate', 'timestamp_finished']:
if field in fields:
fields[fields.index(field)] = field.replace('_', '.')
return fields
def load(self, raw_torrent, fields=None, start=False, mkdir=True):
if fields is None:
fields = {}
# First param is empty 'target'
params = ['', xmlrpc_client.Binary(raw_torrent)]
# Additional fields to set
for key, val in fields.items():
# Values must be escaped if within params
# TODO: What are the escaping requirements? re.escape works differently on python 3.7+
params.append('d.%s.set=%s' % (key, re.escape(str(val))))
if mkdir and 'directory' in fields:
result = self._server.execute.throw('', 'mkdir', '-p', fields['directory'])
if result != 0:
raise xmlrpc_client.Error('Failed creating directory %s' % fields['directory'])
# by default rtorrent won't allow calls over 512kb in size.
xmlrpc_size = (
len(xmlrpc_client.dumps(tuple(params), 'raw_start')) + 71680
) # Add 70kb for buffer
if xmlrpc_size > 524288:
prev_size = self._server.network.xmlrpc.size_limit()
self._server.network.xmlrpc.size_limit.set('', xmlrpc_size)
# Call load method and return the response
if start:
result = self._server.load.raw_start(*params)
else:
result = self._server.load.raw(*params)
if xmlrpc_size > 524288:
self._server.network.xmlrpc.size_limit.set('', prev_size)
return result
def get_directory(self):
return self._server.get_directory()
def torrent(self, info_hash, fields=None):
""" Get the details of a torrent """
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
multi_call = xmlrpc_client.MultiCall(self._server)
for field in fields:
method_name = 'd.%s' % field
getattr(multi_call, method_name)(info_hash)
resp = multi_call()
# TODO: Maybe we should return a named tuple or a Torrent class?
return dict(list(zip(self._clean_fields(fields, reverse=True), [val for val in resp])))
def torrents(self, view='main', fields=None):
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
params = ['d.%s=' % field for field in fields]
params.insert(0, view)
resp = self._server.d.multicall2('', params)
# Response is formatted as a list of lists, with just the values
return [dict(list(zip(self._clean_fields(fields, reverse=True), val))) for val in resp]
def update(self, info_hash, fields):
multi_call = xmlrpc_client.MultiCall(self._server)
for key, val in fields.items():
method_name = 'd.%s.set' % key
getattr(multi_call, method_name)(info_hash, val)
return multi_call()[0]
def delete(self, info_hash):
return self._server.d.erase(info_hash)
def stop(self, info_hash):
self._server.d.stop(info_hash)
return self._server.d.close(info_hash)
def start(self, info_hash):
return self._server.d.start(info_hash)
def move(self, info_hash, dst_path):
self.stop(info_hash)
torrent = self.torrent(info_hash, fields=['base_path'])
try:
logger.verbose('Creating destination directory `{}`', dst_path)
self._server.execute.throw('', 'mkdir', '-p', dst_path)
except xmlrpc_client.Error:
raise xmlrpc_client.Error("unable to create folder %s" % dst_path)
self._server.execute.throw('', 'mv', '-u', torrent['base_path'], dst_path)
self._server.d.set_directory(info_hash, dst_path)
self.start(info_hash)
class RTorrentPluginBase:
priority_map = {'high': 3, 'medium': 2, 'low': 1, 'off': 0}
def _build_options(self, config, entry, entry_first=True):
options = {}
for opt_key in (
'path',
'message',
'priority',
'custom1',
'custom2',
'custom3',
'custom4',
'custom5',
):
# Values do not merge config with task
# Task takes priority then config is used
entry_value = entry.get(opt_key)
config_value = config.get(opt_key)
if entry_first:
if entry_value:
options[opt_key] = entry.render(entry_value)
elif config_value:
options[opt_key] = entry.render(config_value)
else:
if config_value:
options[opt_key] = entry.render(config_value)
elif entry_value:
options[opt_key] = entry.render(entry_value)
# Convert priority from string to int
priority = options.get('priority')
if priority and priority in self.priority_map:
options['priority'] = self.priority_map[priority]
# Map Flexget path to directory in rTorrent
if options.get('path'):
options['directory'] = options['path']
del options['path']
if 'directory' in options:
options['directory'] = pathscrub(options['directory'])
return options
class RTorrentOutputPlugin(RTorrentPluginBase):
schema = {
'type': 'object',
'properties': {
# connection info
'uri': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'digest_auth': {'type': 'boolean', 'default': False},
'start': {'type': 'boolean', 'default': True},
'mkdir': {'type': 'boolean', 'default': True},
'action': {'type': 'string', 'emun': ['update', 'delete', 'add'], 'default': 'add'},
# properties to set on rtorrent download object
'message': {'type': 'string'},
'priority': {'type': 'string'},
'path': {'type': 'string'},
'custom1': {'type': 'string'},
'custom2': {'type': 'string'},
'custom3': {'type': 'string'},
'custom4': {'type': 'string'},
'custom5': {'type': 'string'},
'fast_resume': {'type': 'boolean', 'default': False},
},
'required': ['uri'],
'additionalProperties': False,
}
def _verify_load(self, client, info_hash):
ex = xmlrpc_client.Error()
for _ in range(0, 5):
try:
return client.torrent(info_hash, fields=['hash'])
except xmlrpc_client.Error as e:
ex = e
sleep(0.5)
raise ex
@plugin.priority(120)
def on_task_download(self, task, config):
# If the download plugin is not enabled, we need to call it to get
# our temp .torrent files
if config['action'] == 'add' and 'download' not in task.config:
download = plugin.get('download', self)
download.get_temp_files(task, handle_magnets=True, fail_html=True)
@plugin.priority(135)
def on_task_output(self, task, config):
client = RTorrent(
os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests,
)
try:
for entry in task.accepted:
if config['action'] == 'add':
if task.options.test:
logger.info('Would add {} | |
getRuleIndex(self):
return LaTeXParser.RULE_mp
def mp(self, _p=0):
_parentctx = self._ctx
_parentState = self.state
localctx = LaTeXParser.MpContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 10
self.enterRecursionRule(localctx, 10, self.RULE_mp, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 103
self.unary()
self._ctx.stop = self._input.LT(-1)
self.state = 110
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = LaTeXParser.MpContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_mp)
self.state = 105
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 106
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << LaTeXParser.MUL) | (1 << LaTeXParser.DIV) | (1 << LaTeXParser.CMD_TIMES) | (1 << LaTeXParser.CMD_CDOT) | (1 << LaTeXParser.CMD_DIV) | (1 << LaTeXParser.COLON))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 107
self.mp(3)
self.state = 112
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,2,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Mp_nofuncContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Mp_nofuncContext, self).__init__(parent, invokingState)
self.parser = parser
def unary_nofunc(self):
return self.getTypedRuleContext(LaTeXParser.Unary_nofuncContext,0)
def mp_nofunc(self, i=None):
if i is None:
return self.getTypedRuleContexts(LaTeXParser.Mp_nofuncContext)
else:
return self.getTypedRuleContext(LaTeXParser.Mp_nofuncContext,i)
def MUL(self):
return self.getToken(LaTeXParser.MUL, 0)
def CMD_TIMES(self):
return self.getToken(LaTeXParser.CMD_TIMES, 0)
def CMD_CDOT(self):
return self.getToken(LaTeXParser.CMD_CDOT, 0)
def DIV(self):
return self.getToken(LaTeXParser.DIV, 0)
def CMD_DIV(self):
return self.getToken(LaTeXParser.CMD_DIV, 0)
def COLON(self):
return self.getToken(LaTeXParser.COLON, 0)
def getRuleIndex(self):
return LaTeXParser.RULE_mp_nofunc
def mp_nofunc(self, _p=0):
_parentctx = self._ctx
_parentState = self.state
localctx = LaTeXParser.Mp_nofuncContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 12
self.enterRecursionRule(localctx, 12, self.RULE_mp_nofunc, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 114
self.unary_nofunc()
self._ctx.stop = self._input.LT(-1)
self.state = 121
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = LaTeXParser.Mp_nofuncContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_mp_nofunc)
self.state = 116
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 117
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << LaTeXParser.MUL) | (1 << LaTeXParser.DIV) | (1 << LaTeXParser.CMD_TIMES) | (1 << LaTeXParser.CMD_CDOT) | (1 << LaTeXParser.CMD_DIV) | (1 << LaTeXParser.COLON))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 118
self.mp_nofunc(3)
self.state = 123
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,3,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class UnaryContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.UnaryContext, self).__init__(parent, invokingState)
self.parser = parser
def unary(self):
return self.getTypedRuleContext(LaTeXParser.UnaryContext,0)
def ADD(self):
return self.getToken(LaTeXParser.ADD, 0)
def SUB(self):
return self.getToken(LaTeXParser.SUB, 0)
def postfix(self, i=None):
if i is None:
return self.getTypedRuleContexts(LaTeXParser.PostfixContext)
else:
return self.getTypedRuleContext(LaTeXParser.PostfixContext,i)
def getRuleIndex(self):
return LaTeXParser.RULE_unary
def unary(self):
localctx = LaTeXParser.UnaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_unary)
self._la = 0 # Token type
try:
self.state = 131
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [LaTeXParser.ADD, LaTeXParser.SUB]:
self.enterOuterAlt(localctx, 1)
self.state = 124
_la = self._input.LA(1)
if not(_la==LaTeXParser.ADD or _la==LaTeXParser.SUB):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 125
self.unary()
pass
elif token in [LaTeXParser.L_PAREN, LaTeXParser.L_BRACE, LaTeXParser.L_BRACKET, LaTeXParser.BAR, LaTeXParser.FUNC_LIM, LaTeXParser.FUNC_INT, LaTeXParser.FUNC_SUM, LaTeXParser.FUNC_PROD, LaTeXParser.FUNC_EXP, LaTeXParser.FUNC_LOG, LaTeXParser.FUNC_LN, LaTeXParser.FUNC_SIN, LaTeXParser.FUNC_COS, LaTeXParser.FUNC_TAN, LaTeXParser.FUNC_CSC, LaTeXParser.FUNC_SEC, LaTeXParser.FUNC_COT, LaTeXParser.FUNC_ARCSIN, LaTeXParser.FUNC_ARCCOS, LaTeXParser.FUNC_ARCTAN, LaTeXParser.FUNC_ARCCSC, LaTeXParser.FUNC_ARCSEC, LaTeXParser.FUNC_ARCCOT, LaTeXParser.FUNC_SINH, LaTeXParser.FUNC_COSH, LaTeXParser.FUNC_TANH, LaTeXParser.FUNC_ARSINH, LaTeXParser.FUNC_ARCOSH, LaTeXParser.FUNC_ARTANH, LaTeXParser.FUNC_SQRT, LaTeXParser.CMD_FRAC, LaTeXParser.CMD_BINOM, LaTeXParser.CMD_DBINOM, LaTeXParser.CMD_TBINOM, LaTeXParser.CMD_MATHIT, LaTeXParser.DIFFERENTIAL, LaTeXParser.LETTER, LaTeXParser.NUMBER, LaTeXParser.SYMBOL]:
self.enterOuterAlt(localctx, 2)
self.state = 127
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 126
self.postfix()
else:
raise NoViableAltException(self)
self.state = 129
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,4,self._ctx)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_nofuncContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Unary_nofuncContext, self).__init__(parent, invokingState)
self.parser = parser
def unary_nofunc(self):
return self.getTypedRuleContext(LaTeXParser.Unary_nofuncContext,0)
def ADD(self):
return self.getToken(LaTeXParser.ADD, 0)
def SUB(self):
return self.getToken(LaTeXParser.SUB, 0)
def postfix(self):
return self.getTypedRuleContext(LaTeXParser.PostfixContext,0)
def postfix_nofunc(self, i=None):
if i is None:
return self.getTypedRuleContexts(LaTeXParser.Postfix_nofuncContext)
else:
return self.getTypedRuleContext(LaTeXParser.Postfix_nofuncContext,i)
def getRuleIndex(self):
return LaTeXParser.RULE_unary_nofunc
def unary_nofunc(self):
localctx = LaTeXParser.Unary_nofuncContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_unary_nofunc)
self._la = 0 # Token type
try:
self.state = 142
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [LaTeXParser.ADD, LaTeXParser.SUB]:
self.enterOuterAlt(localctx, 1)
self.state = 133
_la = self._input.LA(1)
if not(_la==LaTeXParser.ADD or _la==LaTeXParser.SUB):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 134
self.unary_nofunc()
pass
elif token in [LaTeXParser.L_PAREN, LaTeXParser.L_BRACE, LaTeXParser.L_BRACKET, LaTeXParser.BAR, LaTeXParser.FUNC_LIM, LaTeXParser.FUNC_INT, LaTeXParser.FUNC_SUM, LaTeXParser.FUNC_PROD, LaTeXParser.FUNC_EXP, LaTeXParser.FUNC_LOG, LaTeXParser.FUNC_LN, LaTeXParser.FUNC_SIN, LaTeXParser.FUNC_COS, LaTeXParser.FUNC_TAN, LaTeXParser.FUNC_CSC, LaTeXParser.FUNC_SEC, LaTeXParser.FUNC_COT, LaTeXParser.FUNC_ARCSIN, LaTeXParser.FUNC_ARCCOS, LaTeXParser.FUNC_ARCTAN, LaTeXParser.FUNC_ARCCSC, LaTeXParser.FUNC_ARCSEC, LaTeXParser.FUNC_ARCCOT, LaTeXParser.FUNC_SINH, LaTeXParser.FUNC_COSH, LaTeXParser.FUNC_TANH, LaTeXParser.FUNC_ARSINH, LaTeXParser.FUNC_ARCOSH, LaTeXParser.FUNC_ARTANH, LaTeXParser.FUNC_SQRT, LaTeXParser.CMD_FRAC, LaTeXParser.CMD_BINOM, LaTeXParser.CMD_DBINOM, LaTeXParser.CMD_TBINOM, LaTeXParser.CMD_MATHIT, LaTeXParser.DIFFERENTIAL, LaTeXParser.LETTER, LaTeXParser.NUMBER, LaTeXParser.SYMBOL]:
self.enterOuterAlt(localctx, 2)
self.state = 135
self.postfix()
self.state = 139
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,6,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 136
self.postfix_nofunc()
self.state = 141
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,6,self._ctx)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PostfixContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.PostfixContext, self).__init__(parent, invokingState)
self.parser = parser
def exp(self):
return self.getTypedRuleContext(LaTeXParser.ExpContext,0)
def postfix_op(self, i=None):
if i is None:
return self.getTypedRuleContexts(LaTeXParser.Postfix_opContext)
else:
return self.getTypedRuleContext(LaTeXParser.Postfix_opContext,i)
def getRuleIndex(self):
return LaTeXParser.RULE_postfix
def postfix(self):
localctx = LaTeXParser.PostfixContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_postfix)
try:
self.enterOuterAlt(localctx, 1)
self.state = 144
self.exp(0)
self.state = 148
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 145
self.postfix_op()
self.state = 150
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,8,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Postfix_nofuncContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Postfix_nofuncContext, self).__init__(parent, invokingState)
self.parser = parser
def exp_nofunc(self):
return self.getTypedRuleContext(LaTeXParser.Exp_nofuncContext,0)
def postfix_op(self, i=None):
if i is None:
return self.getTypedRuleContexts(LaTeXParser.Postfix_opContext)
else:
return self.getTypedRuleContext(LaTeXParser.Postfix_opContext,i)
def getRuleIndex(self):
return LaTeXParser.RULE_postfix_nofunc
def postfix_nofunc(self):
localctx = LaTeXParser.Postfix_nofuncContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_postfix_nofunc)
try:
self.enterOuterAlt(localctx, 1)
self.state = 151
self.exp_nofunc(0)
self.state = 155
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 152
self.postfix_op()
self.state = 157
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Postfix_opContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Postfix_opContext, self).__init__(parent, invokingState)
self.parser = parser
def BANG(self):
return self.getToken(LaTeXParser.BANG, 0)
def eval_at(self):
return self.getTypedRuleContext(LaTeXParser.Eval_atContext,0)
def getRuleIndex(self):
return LaTeXParser.RULE_postfix_op
def postfix_op(self):
localctx = LaTeXParser.Postfix_opContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_postfix_op)
try:
self.state = 160
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [LaTeXParser.BANG]:
self.enterOuterAlt(localctx, 1)
self.state = 158
self.match(LaTeXParser.BANG)
pass
elif token in [LaTeXParser.BAR]:
self.enterOuterAlt(localctx, 2)
self.state = 159
self.eval_at()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Eval_atContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Eval_atContext, self).__init__(parent, invokingState)
self.parser = parser
def BAR(self):
return self.getToken(LaTeXParser.BAR, 0)
def eval_at_sup(self):
return self.getTypedRuleContext(LaTeXParser.Eval_at_supContext,0)
def eval_at_sub(self):
return self.getTypedRuleContext(LaTeXParser.Eval_at_subContext,0)
def getRuleIndex(self):
return LaTeXParser.RULE_eval_at
def eval_at(self):
localctx = LaTeXParser.Eval_atContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_eval_at)
try:
self.enterOuterAlt(localctx, 1)
self.state = 162
self.match(LaTeXParser.BAR)
self.state = 168
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
self.state = 163
self.eval_at_sup()
pass
elif la_ == 2:
self.state = 164
self.eval_at_sub()
pass
elif la_ == 3:
self.state = 165
self.eval_at_sup()
self.state = 166
self.eval_at_sub()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Eval_at_subContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Eval_at_subContext, self).__init__(parent, invokingState)
self.parser = parser
def UNDERSCORE(self):
return self.getToken(LaTeXParser.UNDERSCORE, 0)
def L_BRACE(self):
return self.getToken(LaTeXParser.L_BRACE, 0)
def R_BRACE(self):
return self.getToken(LaTeXParser.R_BRACE, 0)
def expr(self):
return self.getTypedRuleContext(LaTeXParser.ExprContext,0)
def equality(self):
return self.getTypedRuleContext(LaTeXParser.EqualityContext,0)
def getRuleIndex(self):
return LaTeXParser.RULE_eval_at_sub
def eval_at_sub(self):
localctx = LaTeXParser.Eval_at_subContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_eval_at_sub)
try:
self.enterOuterAlt(localctx, 1)
self.state = 170
self.match(LaTeXParser.UNDERSCORE)
self.state = 171
self.match(LaTeXParser.L_BRACE)
self.state = 174
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.state = 172
self.expr()
pass
elif la_ == 2:
self.state = 173
self.equality()
pass
self.state = 176
self.match(LaTeXParser.R_BRACE)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Eval_at_supContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(LaTeXParser.Eval_at_supContext, self).__init__(parent, invokingState)
self.parser = parser
def CARET(self):
return self.getToken(LaTeXParser.CARET, 0)
def L_BRACE(self):
return self.getToken(LaTeXParser.L_BRACE, 0)
def R_BRACE(self):
return self.getToken(LaTeXParser.R_BRACE, 0)
def expr(self):
return self.getTypedRuleContext(LaTeXParser.ExprContext,0)
def equality(self):
return self.getTypedRuleContext(LaTeXParser.EqualityContext,0)
def getRuleIndex(self):
return LaTeXParser.RULE_eval_at_sup
def eval_at_sup(self):
localctx = LaTeXParser.Eval_at_supContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_eval_at_sup)
try:
self.enterOuterAlt(localctx, 1)
self.state = 178
self.match(LaTeXParser.CARET)
self.state = 179
| |
# -*- coding: utf-8 -*-
"""
Utilities for reading and writing USID datasets that are highly model-dependent (with or without N-dimensional form)
Created on Tue Nov 3 21:14:25 2015
@author: <NAME>, <NAME>
"""
from __future__ import division, print_function, absolute_import, unicode_literals
from warnings import warn
import sys
import h5py
import numpy as np
from dask import array as da
from sidpy.hdf.hdf_utils import get_attr, write_simple_attrs, is_editable_h5, \
copy_dataset, lazy_load_array
from sidpy.base.num_utils import contains_integers
from sidpy.base.dict_utils import flatten_dict
from sidpy.base.string_utils import validate_single_string_arg, \
validate_list_of_strings, validate_string_args
from sidpy.hdf.dtype_utils import validate_dtype
from sidpy import sid
from .base import write_book_keeping_attrs
from .simple import link_as_main, check_if_main, write_ind_val_dsets, validate_dims_against_main, validate_anc_h5_dsets
from ..dimension import Dimension, validate_dimensions
from ..anc_build_utils import INDICES_DTYPE, make_indices_matrix
if sys.version_info.major == 3:
unicode = str
def reshape_to_n_dims(h5_main, h5_pos=None, h5_spec=None, get_labels=False, verbose=False, sort_dims=False,
lazy=False):
"""
Reshape the input 2D matrix to be N-dimensions based on the
position and spectroscopic datasets.
Parameters
----------
h5_main : HDF5 Dataset
2D data to be reshaped
h5_pos : HDF5 Dataset, optional
Position indices corresponding to rows in `h5_main`
h5_spec : HDF5 Dataset, optional
Spectroscopic indices corresponding to columns in `h5_main`
get_labels : bool, optional
Whether or not to return the dimension labels. Default False
verbose : bool, optional
Whether or not to print debugging statements
sort_dims : bool
If True, the data is sorted so that the dimensions are in order from slowest to fastest
If False, the data is kept in the original order
If `get_labels` is also True, the labels are sorted as well.
lazy : bool, optional. Default = False
If False, ds_Nd will be a numpy.ndarray object - this is suitable if the HDF5 dataset fits into memory
If True, ds_Nd will be a dask.array object - This is suitable if the HDF5 dataset is too large to fit into
memory. Note that this will bea lazy computation meaning that the returned object just contains the instructions
. In order to get the actual value or content in numpy arrays, call ds_Nd.compute()
Returns
-------
ds_Nd : N-D numpy array or dask.array object
N dimensional array arranged as [positions slowest to fastest, spectroscopic slowest to fastest]
success : boolean or string
True if full reshape was successful
"Positions" if it was only possible to reshape by
the position dimensions
False if no reshape was possible
ds_labels : list of str
List of the labels of each dimension of `ds_Nd`
Notes
-----
If either `h5_pos` or `h5_spec` are not provided, the function will first
attempt to find them as attributes of `h5_main`. If that fails, it will
generate dummy values for them.
"""
# TODO: automatically switch on lazy if the data is larger than memory
# TODO: sort_dims does not appear to do much. Functions as though it was always True
if h5_pos is None and h5_spec is None:
if not check_if_main(h5_main):
raise ValueError('if h5_main is a h5py.Dataset it should be a Main dataset')
else:
if not isinstance(h5_main, (h5py.Dataset, np.ndarray, da.core.Array)):
raise TypeError('h5_main should either be a h5py.Dataset or numpy array')
if h5_pos is not None:
if not isinstance(h5_pos, (h5py.Dataset, np.ndarray, da.core.Array)):
raise TypeError('h5_pos should either be a h5py.Dataset or numpy array')
if h5_pos.shape[0] != h5_main.shape[0]:
raise ValueError('The size of h5_pos: {} does not match with h5_main: {}'.format(h5_pos.shape,
h5_main.shape))
if h5_spec is not None:
if not isinstance(h5_spec, (h5py.Dataset, np.ndarray, da.core.Array)):
raise TypeError('h5_spec should either be a h5py.Dataset or numpy array')
if h5_spec.shape[1] != h5_main.shape[1]:
raise ValueError('The size of h5_spec: {} does not match with h5_main: {}'.format(h5_spec.shape,
h5_main.shape))
pos_labs = np.array(['Positions'])
spec_labs = np.array(['Spectral_Step'])
if h5_pos is None:
"""
Get the Position datasets from the references if possible
"""
if isinstance(h5_main, h5py.Dataset):
try:
h5_pos = h5_main.file[h5_main.attrs['Position_Indices']]
ds_pos = h5_pos[()]
pos_labs = get_attr(h5_pos, 'labels')
except KeyError:
print('No position datasets found as attributes of {}'.format(h5_main.name))
if len(h5_main.shape) > 1:
ds_pos = np.arange(h5_main.shape[0], dtype=INDICES_DTYPE).reshape(-1, 1)
pos_labs = np.array(['Position Dimension {}'.format(ipos) for ipos in range(ds_pos.shape[1])])
else:
ds_pos = np.array(0, dtype=INDICES_DTYPE).reshape(-1, 1)
else:
ds_pos = np.arange(h5_main.shape[0], dtype=INDICES_DTYPE).reshape(-1, 1)
pos_labs = np.array(['Position Dimension {}'.format(ipos) for ipos in range(ds_pos.shape[1])])
elif isinstance(h5_pos, h5py.Dataset):
"""
Position Indices dataset was provided
"""
ds_pos = h5_pos[()]
pos_labs = get_attr(h5_pos, 'labels')
elif isinstance(h5_pos, (np.ndarray, da.core.Array)):
ds_pos = np.atleast_2d(h5_pos)
pos_labs = np.array(['Position Dimension {}'.format(ipos) for ipos in range(ds_pos.shape[1])])
else:
raise TypeError('Position Indices must be either h5py.Dataset or None')
if h5_spec is None:
"""
Get the Spectroscopic datasets from the references if possible
"""
if isinstance(h5_main, h5py.Dataset):
try:
h5_spec = h5_main.file[h5_main.attrs['Spectroscopic_Indices']]
ds_spec = h5_spec[()]
spec_labs = get_attr(h5_spec, 'labels')
except KeyError:
print('No spectroscopic datasets found as attributes of {}'.format(h5_main.name))
if len(h5_main.shape) > 1:
ds_spec = np.arange(h5_main.shape[1], dtype=INDICES_DTYPE).reshape([1, -1])
spec_labs = np.array(['Spectral Dimension {}'.format(ispec) for ispec in range(ds_spec.shape[0])])
else:
ds_spec = np.array(0, dtype=INDICES_DTYPE).reshape([1, 1])
else:
ds_spec = np.arange(h5_main.shape[1], dtype=INDICES_DTYPE).reshape([1, -1])
spec_labs = np.array(['Spectral Dimension {}'.format(ispec) for ispec in range(ds_spec.shape[0])])
elif isinstance(h5_spec, h5py.Dataset):
"""
Spectroscopic Indices dataset was provided
"""
ds_spec = h5_spec[()]
spec_labs = get_attr(h5_spec, 'labels')
elif isinstance(h5_spec, (np.ndarray, da.core.Array)):
ds_spec = h5_spec
spec_labs = np.array(['Spectral Dimension {}'.format(ispec) for ispec in range(ds_spec.shape[0])])
else:
raise TypeError('Spectroscopic Indices must be either h5py.Dataset or None')
'''
Sort the indices from fastest to slowest
'''
pos_sort = get_sort_order(np.transpose(ds_pos))
spec_sort = get_sort_order(ds_spec)
if verbose:
print('Position dimensions:', pos_labs)
print('Position sort order:', pos_sort)
print('Spectroscopic Dimensions:', spec_labs)
print('Spectroscopic sort order:', spec_sort)
'''
Get the size of each dimension in the sorted order
'''
pos_dims = get_dimensionality(np.transpose(ds_pos), pos_sort)
spec_dims = get_dimensionality(ds_spec, spec_sort)
if np.prod(pos_dims) != h5_main.shape[0]:
mesg = 'Product of position dimension sizes: {} = {} not matching ' \
'with size of first axis of main dataset: {}. One or more ' \
'dimensions are dependent dimensions and not marked as such' \
'.'.format(pos_dims, np.prod(pos_dims), h5_main.shape[0])
raise ValueError(mesg)
if np.prod(spec_dims) != h5_main.shape[1]:
mesg = 'Product of spectroscopic dimension sizes: {} = {} not matching ' \
'with size of second axis of main dataset: {}. One or more ' \
'dimensions are dependent dimensions and not marked as such' \
'.'.format(spec_dims, np.prod(spec_dims), h5_main.shape[1])
raise ValueError(mesg)
if verbose:
print('\nPosition dimensions (sort applied):', pos_labs[pos_sort])
print('Position dimensionality (sort applied):', pos_dims)
print('Spectroscopic dimensions (sort applied):', spec_labs[spec_sort])
print('Spectroscopic dimensionality (sort applied):', spec_dims)
if lazy:
ds_main = lazy_load_array(h5_main)
else:
ds_main = h5_main[()]
"""
Now we reshape the dataset based on those dimensions
numpy reshapes correctly when the dimensions are arranged from slowest to fastest.
Since the sort orders we have are from fastest to slowest, we need to reverse the orders
for both the position and spectroscopic dimensions
"""
if verbose:
print('Will attempt to reshape main dataset from:\n{} to {}'.format(ds_main.shape, pos_dims[::-1] + spec_dims[::-1]))
try:
ds_Nd = ds_main.reshape(pos_dims[::-1] + spec_dims[::-1])
except ValueError:
warn('Could not reshape dataset to full N-dimensional form. Attempting reshape based on position only.')
try:
ds_Nd = ds_main.reshape(pos_dims[::-1] + [-1])
except ValueError:
warn('Reshape by position only also failed. Will keep dataset in 2d form.')
if get_labels:
return ds_main, False, ['Position', 'Spectral Step']
else:
return ds_main, False
# No exception
else:
if get_labels:
return ds_Nd, 'Positions', ['Position'] + spec_labs
else:
return ds_Nd, 'Positions'
all_labels = np.hstack((pos_labs[pos_sort][::-1],
spec_labs[spec_sort][::-1]))
if verbose:
print('\nAfter reshaping, labels are', all_labels)
print('Data shape is', ds_Nd.shape)
"""
At this point, the data is arranged from slowest to fastest dimension in both pos and spec
"""
if sort_dims:
results = [ds_Nd, True]
if get_labels:
results.append(all_labels)
return results
if verbose:
print('\nGoing to put dimensions back in the same order as in the file:')
swap_axes = list()
# Compare the original order of the pos / spec labels with where these dimensions occur in the sorted labels
for lab in pos_labs:
swap_axes.append(np.argwhere(all_labels == lab).squeeze())
for lab in spec_labs:
swap_axes.append(np.argwhere(all_labels == lab).squeeze())
swap_axes = np.array(swap_axes)
if verbose:
print('Axes will permuted in this order:', swap_axes)
print('New labels ordering:', all_labels[swap_axes])
ds_Nd = ds_Nd.transpose(tuple(swap_axes))
results = [ds_Nd, True]
if verbose:
print('Dataset now of shape:', ds_Nd.shape)
if get_labels:
'''
Get the labels in the proper order
'''
results.append(all_labels[swap_axes])
return results
def reshape_from_n_dims(data_n_dim, h5_pos=None, h5_spec=None, verbose=False):
"""
Reshape the input 2D matrix to be N-dimensions based on the
position and spectroscopic datasets.
Parameters
----------
data_n_dim : numpy.array or dask.array.core.Array
N dimensional array arranged as [positions dimensions..., spectroscopic dimensions]
If h5_pos and | |
_topology_operations_unreserve(self, unreservation):
"""
:param unreservation (list):
list of object with fields
slot (java.lang.Integer):
port (java.lang.Integer):
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/topology/operations/unreserve', headers={'content-type': 'application/json'}, data=json.dumps({'unreservation': unreservation}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Deletes a Test Report from the database.
@staticmethod
def _reports_operations_delete(self, runid):
"""
Deletes a Test Report from the database.
:param runid (string): The run id of the test that generated the report you want to delete.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/reports/operations/delete', headers={'content-type': 'application/json'}, data=json.dumps({'runid': runid}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Adds a list of SuperFlow to the current working Application Profile. ([{'superflow':'adadad', 'weight':'20'},{..}])
@staticmethod
def _appProfile_operations_add(self, add):
"""
Adds a list of SuperFlow to the current working Application Profile. ([{'superflow':'adadad', 'weight':'20'},{..}])
:param add (list):
list of object with fields
superflow (java.lang.String): The name of the super flow
weight (java.lang.String): The weight of the super flow
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/appProfile/operations/add', headers={'content-type': 'application/json'}, data=json.dumps({'add': add}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Imports the a list of strikes residing in a file.
@staticmethod
def _strikeList_operations_importStrikeList(self, name, filename, force):
"""
Imports the a list of strikes residing in a file.
:param name (java.lang.String): The name of the object being imported
:param filename (java.lang.String): The file containing the object to be imported.
:param force (java.lang.Boolean): Force to import the file and the object having the same name will be replaced.
"""
appWrapper = self._wrapper
files = {'file': (name, open(filename, 'rb'), 'application/xml')}
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/strikeList/operations/importStrikeList', files=files, data={'fileInfo':str({'name': name, 'filename': filename, 'force': force})}, verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### null
@staticmethod
def _testmodel_operations_stopRun(self, runid):
"""
:param runid (java.lang.Integer): Test RUN ID
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/testmodel/operations/stopRun', headers={'content-type': 'application/json'}, data=json.dumps({'runid': runid}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### null
@staticmethod
def _topology_operations_stopRun(self, runid):
"""
:param runid (java.lang.Inetger): Test RUN ID
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/topology/operations/stopRun', headers={'content-type': 'application/json'}, data=json.dumps({'runid': runid}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Load an existing Strike List and sets it as the current one.
@staticmethod
def _strikeList_operations_load(self, template):
"""
Load an existing Strike List and sets it as the current one.
:param template (string): The name of the Strike List template
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/strikeList/operations/load', headers={'content-type': 'application/json'}, data=json.dumps({'template': template}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Creates a new Strike List.
@staticmethod
def _strikeList_operations_new(self, template=None):
"""
Creates a new Strike List.
:param template (string): The name of the template. In this case will be empty.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/strikeList/operations/new', headers={'content-type': 'application/json'}, data=json.dumps({'template': template}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### null
@staticmethod
def _loadProfile_operations_load(self, template):
"""
:param template (java.lang.String):
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/loadProfile/operations/load', headers={'content-type': 'application/json'}, data=json.dumps({'template': template}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### null
@staticmethod
def _superflow_operations_search(self, searchString, limit, sort, sortorder):
"""
:param searchString (java.lang.String): Search Super Flow name matching the string given.
:param limit (java.lang.String): The limit of rows to return
:param sort (java.lang.String): Parameter to sort by.
:param sortorder (java.lang.String): The sort order (ascending/descending)
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/superflow/operations/search', headers={'content-type': 'application/json'}, data=json.dumps({'searchString': searchString, 'limit': limit, 'sort': sort, 'sortorder': sortorder}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Deletes a given Super Flow from the database.
@staticmethod
def _superflow_operations_delete(self, name):
"""
Deletes a given Super Flow from the database.
:param name (string): The name of the Super Flow.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/superflow/operations/delete', headers={'content-type': 'application/json'}, data=json.dumps({'name': name}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### null
@staticmethod
def _testmodel_operations_exportModel(self, name, attachments, filepath, runid=None):
"""
:param name (java.lang.String): The name of the test model to be exported.
:param attachments (java.lang.Boolean): True if object attachments are needed.
:param filepath (java.lang.String): The local path where to save the exported object.
:param runid (int): Test RUN ID
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/testmodel/operations/exportModel', headers={'content-type': 'application/json'}, data=json.dumps({'name': name, 'attachments': attachments, 'filepath': filepath, 'runid': runid}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
with open(filepath, 'wb') as fd:
for chunk in r.iter_content(chunk_size=1024):
fd.write(chunk)
fd.close()
r.close()
return {'status_code': r.status_code, 'content': 'success'}
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Saves the current working Application Profiles and gives it a new name.
@staticmethod
def _appProfile_operations_saveAs(self, name, force):
"""
Saves the current working Application Profiles and gives it a new name.
:param name (string): The new name given for the current working Application Profile
:param force (boolean): Force to save the working Application Profile using the given name.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/appProfile/operations/saveAs', headers={'content-type': 'application/json'}, data=json.dumps({'name': name, 'force': force}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Saves the current working application profile using the current name
@staticmethod
def _appProfile_operations_save(self, name=None, force=True):
"""
Saves the current working application profile using the current name
:param name (string): The name of the template. Default is empty.
:param force (boolean): Force to save the working Application Profile with the same name.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/appProfile/operations/save', headers={'content-type': 'application/json'}, data=json.dumps({'name': name, 'force': force}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Removes an action from the current working SuperFlow.
@staticmethod
def _superflow_operations_removeAction(self, id):
"""
Removes an action from the current working SuperFlow.
:param id (int): The action ID.
"""
appWrapper = self._wrapper
r = appWrapper.session.post(url='https://' + appWrapper.host + '/bps/api/v2/core/superflow/operations/removeAction', headers={'content-type': 'application/json'}, data=json.dumps({'id': id}), verify=False)
jsonContent = r.content is not None and (r.content.startswith(b'{') or r.content.startswith(b'['))
if(r.status_code == 200):
return json.loads(r.content) if jsonContent else r.content
else:
return {'status_code': r.status_code, 'content': json.loads(r.content) if jsonContent else r.content}
### Reboots the card. Only available for PerfectStorm and CloudStorm cards.
@staticmethod
def _topology_operations_reboot(self, board):
"""
Reboots the card. Only available for PerfectStorm and CloudStorm | |
range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-10-29"
Version to retrieve
The available versions are:
- 2021-11-10
- 2021-06-02
- 2021-10-29
"""
return AutomaticallyRetrievedGraph(
"FOVT", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def XPO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2021-03-05", **kwargs
) -> Graph:
"""Return XPO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2021-03-05"
Version to retrieve
The available versions are:
- 2021-03-05
"""
return AutomaticallyRetrievedGraph(
"XPO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def ZFS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-03-10", **kwargs
) -> Graph:
"""Return ZFS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-03-10"
Version to retrieve
The available versions are:
- 2020-03-10
"""
return AutomaticallyRetrievedGraph(
"ZFS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def RS(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "6.107", **kwargs
) -> Graph:
"""Return RS graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "6.107"
Version to retrieve
The available versions are:
- 6.107
"""
return AutomaticallyRetrievedGraph(
"RS", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def CTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return CTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"CTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def OMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-06-08", **kwargs
) -> Graph:
"""Return OMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-06-08"
Version to retrieve
The available versions are:
- 2022-04-27
- 2020-06-08
"""
return AutomaticallyRetrievedGraph(
"OMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def FIX(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-04-13", **kwargs
) -> Graph:
"""Return FIX graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-04-13"
Version to retrieve
The available versions are:
- 2020-04-13
"""
return AutomaticallyRetrievedGraph(
"FIX", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MAMO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "no_version", **kwargs
) -> Graph:
"""Return MAMO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "no_version"
Version to retrieve
The available versions are:
- no_version
"""
return AutomaticallyRetrievedGraph(
"MAMO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def VTO(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2020-11-13", **kwargs
) -> Graph:
"""Return VTO graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2020-11-13"
Version to retrieve
The available versions are:
- 2020-11-13
"""
return AutomaticallyRetrievedGraph(
"VTO", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def UBERON(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, auto_enable_tradeoffs = True,
sort_tmp_dir = None, verbose = 2, cache = True, cache_path = None,
cache_sys_var = "GRAPH_CACHE_DIR", version = "2022-05-17", **kwargs
) -> Graph:
"""Return UBERON graph
Parameters
----------
directed = False
preprocess = "auto"
Preprocess for optimal load time & memory peak.
Will preprocess in Linux/macOS but not Windows.
load_nodes = True
Load node names or use numeric range
auto_enable_tradeoffs = True
Enable when graph has < 50M edges
cache_path = None
Path to store graphs
Defaults either to `GRAPH_CACHE_DIR` sys var or `graphs`
cache_sys_var = "GRAPH_CACHE_DIR"
version = "2022-05-17"
Version to retrieve
The available versions are:
- 2022-05-27
- 2021-10-01
- 2021-11-28
- 2022-02-21
- 2022-04-05
- 2022-04-18
- 2022-05-17
"""
return AutomaticallyRetrievedGraph(
"UBERON", version, "kgobo", directed, preprocess, load_nodes,
load_node_types, load_edge_weights, auto_enable_tradeoffs, sort_tmp_dir, verbose, cache,
cache_path, cache_sys_var, kwargs
)()
def MFOMD(
directed = False, preprocess = "auto", load_nodes = True, load_node_types = True,
load_edge_weights = True, | |
'''
Created on Oct 21, 2021
@author: willg
This module helps track and store data from Wiimmfi.
'''
import asyncio
import json
import os
import time
import traceback
from collections import defaultdict
from copy import deepcopy
from itertools import chain
from typing import List, Dict, Tuple, Set
import aiosqlite
import Placement
import Player
import Race
import UserDataProcessing
import UtilityFunctions
import common
from data_tracking import Data_Tracker_SQL_Query_Builder as QB
DEBUGGING_DATA_TRACKER = False
DEBUGGING_SQL = False
db_connection:aiosqlite.Connection = None
class SQLDataBad(Exception):
pass
class SQLTypeWrong(SQLDataBad):
pass
class SQLFormatWrong(SQLDataBad):
pass
#dict of channel IDs to tier numbers
RT_NAME = "rt"
CT_NAME = "ct"
RXX_LOCKER_NAME = "rxx_locker"
RT_TABLE_BOT_CHANNEL_TIER_MAPPINGS = {
843981870751678484:8,
836652527432499260:7,
747290199242965062:6,
747290182096650332:5,
873721400056238160:5,
747290167391551509:4,
801620685826818078:4,
747290151016857622:3,
801620818965954580:3,
805860420224942080:3,
747290132675166330:2,
754104414335139940:2,
801630085823725568:2,
747289647003992078:1,
747544598968270868:1,
781249043623182406:1
}
CT_TABLE_BOT_CHANNEL_TIER_MAPPINGS = {
875532532383363072:7,
850520560424714240:6,
801625226064166922:5,
747290436275535913:4,
879429019546812466:4,
747290415404810250:3,
747290383297282156:2,
823014979279519774:2,
747290363433320539:1,
871442059599429632:1
}
RT_REVERSE_TIER_MAPPINGS = defaultdict(set)
CT_REVERSE_TIER_MAPPINGS = defaultdict(set)
for k,v in RT_TABLE_BOT_CHANNEL_TIER_MAPPINGS.items():
RT_REVERSE_TIER_MAPPINGS[v].add(k)
for k,v in CT_TABLE_BOT_CHANNEL_TIER_MAPPINGS.items():
CT_REVERSE_TIER_MAPPINGS[v].add(k)
TABLE_BOT_CHANNEL_TIER_MAPPINGS = {RT_NAME:RT_TABLE_BOT_CHANNEL_TIER_MAPPINGS, CT_NAME:CT_TABLE_BOT_CHANNEL_TIER_MAPPINGS}
# aiosqlite.Cursor is a async generator (not a regular generator) so list(cursor) does not work
class ConnectionWrapper():
def __init__(self, connection):
self.con: aiosqlite.Connection = connection
async def execute(self, *args):
cursor = await self.con.execute(*args)
return await cursor.fetchall()
async def executemany(self, *args):
cursor = await self.con.executemany(*args)
return await cursor.fetchall()
async def executescript(self, *args):
cursor = await self.con.executescript(*args)
return await cursor.fetchall()
def __getattr__(self, attr):
return self.con.__getattribute__(attr)
class DataRetriever(object):
#TODO: Finish method
@staticmethod
async def get_tracks_played_count(is_ct=False, tier=None, in_last_days=None):
tracks_query = QB.SQL_Search_Query_Builder.get_tracks_played_query(is_ct, tier, in_last_days)
return await db_connection.execute(tracks_query)
@staticmethod
async def get_best_tracks(fcs, is_ct=False, tier=None, in_last_days=None, sort_asc=False, min_count = 1):
tracks_query = QB.SQL_Search_Query_Builder.get_best_tracks(fcs, is_ct, tier, in_last_days, min_count)
result = await db_connection.execute(tracks_query)
if sort_asc:
return list(reversed(result))
return result
@staticmethod
async def get_top_players(track, tier=None, in_last_days=None, min_count=1):
#await db_connection.execute("WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt lIMIT 20000000) SELECT avg(x) FROM cnt;")
tracks_query = QB.SQL_Search_Query_Builder.get_top_players_query(tier, in_last_days, min_count)
return await db_connection.execute(tracks_query, [track])
@staticmethod
async def get_record(player_did, opponent_did, days):
record_query = QB.SQL_Search_Query_Builder.get_record_query(player_did, opponent_did, days)
return await db_connection.execute(record_query)
@staticmethod
async def get_track_list():
return await db_connection.execute("SELECT track_name, url, fixed_track_name, is_ct, track_name_lookup "
"FROM Track")
class ChannelBotSQLDataValidator(object):
def wrong_type_message(self, data, expected_type, multi=False):
if multi:
return f"{data} of type {type(data)} is not any of the expected types: ({', '.join([(t.__name__ if t is not None else 'None') for t in expected_type])})"
else:
return f"{data} of type {type(data)} is not expected type: {expected_type.__name__}"
def validate_type(self, data, expected_type, can_be_none):
if can_be_none:
if not isinstance(data, (expected_type, type(None))):
raise SQLTypeWrong(self.wrong_type_message(data, (expected_type, None), multi=True))
else:
if not isinstance(data, expected_type):
raise SQLTypeWrong(self.wrong_type_message(data, expected_type))
def validate_int(self, data, can_be_none=False):
self.validate_type(data, int, can_be_none)
def validate_str(self, data, can_be_none=False):
self.validate_type(data, str, can_be_none)
def validate_float(self, data, can_be_none=False):
self.validate_type(data, float, can_be_none)
def validate_bool(self, data, can_be_none=False):
self.validate_type(data, bool, can_be_none)
def is_from_wiimmfi_validation(self, is_from_wiimmfi):
self.validate_bool(is_from_wiimmfi)
def event_id_validation(self, event_id):
self.validate_int(event_id)
if event_id < 1:
raise SQLFormatWrong(f"{event_id} is not a formatted like an event id, which should be a number")
def channel_id_validation(self, channel_id):
self.validate_int(channel_id)
if channel_id < 1:
raise SQLFormatWrong(f"{channel_id} is not a formatted like an channel id, which should be a number")
def discord_id_validation(self, discord_id):
self.validate_int(discord_id)
if discord_id < 1:
raise SQLFormatWrong(f"{discord_id} is not a formatted like a discord id, which should be a number")
def placement_time_validation(self, time_str):
self.validate_str(time_str)
if not Placement.is_valid_time_str(time_str):
raise SQLFormatWrong(f"{time_str} is not formatted like a valid finishing time")
def placement_delta_validation(self, delta):
self.validate_float(delta, can_be_none=True)
def player_ol_status_validation(self, ol_status):
self.validate_str(ol_status, can_be_none=True)
def player_position_validation(self, player_pos):
self.validate_int(player_pos)
if player_pos < 1 and player_pos != -1:
raise SQLFormatWrong(f"{player_pos} is not a valid player position")
def player_finish_place_validation(self, place):
self.validate_int(place)
if place < 1:
raise SQLFormatWrong(f"{place} is not a valid finishing place")
def fc_validation(self, fc):
self.validate_str(fc)
if not UtilityFunctions.is_fc(fc):
raise SQLFormatWrong(f"{fc} is not a formatted like an FC")
def race_id_validation(self, race_id):
self.validate_str(race_id)
if not UtilityFunctions.is_race_ID(race_id):
raise SQLFormatWrong(f"{race_id} is not a formatted like a race ID")
def mii_hex_validation(self, mii_hex):
self.validate_str(mii_hex, can_be_none=True)
if isinstance(mii_hex, str):
if not UtilityFunctions.is_hex(mii_hex):
raise SQLFormatWrong(f"{mii_hex} is not a valid mii hex")
def player_id_validation(self, player_id):
self.validate_int(player_id)
def player_mkwx_url_validation(self, mkwx_url):
self.validate_str(mkwx_url)
def validate_player_data(self, players:List[Player.Player]):
'''Validates that all the data in players is the correct type and format before going into the database'''
for player in players:
self.fc_validation(player.get_FC())
self.player_id_validation(player.get_player_id())
self.player_mkwx_url_validation(player.get_mkwx_url())
def track_name_validation(self, track_name, rxx=None):
self.validate_str(track_name)
if track_name == "None":
raise SQLDataBad(f"track_name cannot be an 'None', room rxx: {rxx}")
def track_url_validation(self, track_url):
self.validate_str(track_url, can_be_none=True)
def track_name_no_author_validation(self, track_name_author_stripped, rxx=None):
self.validate_str(track_name_author_stripped)
if track_name_author_stripped == "None":
raise SQLDataBad(f"track_name without author cannot be an 'None', room rxx: {rxx}")
def track_lookup_name_validation(self, track_lookup_name, rxx=None):
self.validate_str(track_lookup_name)
if track_lookup_name == "None" or ' ' in track_lookup_name:
raise SQLDataBad(f"{track_lookup_name} is not a valid track lookup name, room rxx: {rxx}")
def track_is_ct_validation(self, is_ct):
self.validate_bool(is_ct)
def validate_tracks_data(self, races:List[Race.Race]):
'''Validates that all the relevant data (regarding track information) in races is the correct type and format before going into the database'''
for race in races:
self.track_name_validation(race.get_track_name(), rxx=race.rxx)
self.track_url_validation(race.get_track_url())
no_author_name = race.getTrackNameWithoutAuthor()
self.track_name_no_author_validation(no_author_name, rxx=race.rxx)
self.track_is_ct_validation(race.is_custom_track())
self.track_lookup_name_validation(Race.get_track_name_lookup(no_author_name), rxx=race.rxx)
def rxx_validation(self, rxx):
self.validate_str(rxx)
if not UtilityFunctions.is_rLID(rxx):
raise SQLFormatWrong(f"{rxx} is not a formatted like an rxx")
def wiimmfi_utc_time_validation(self, wiimmfi_time):
self.validate_str(wiimmfi_time)
if not UtilityFunctions.is_wiimmfi_utc_time(wiimmfi_time):
raise SQLFormatWrong(f"{wiimmfi_time} is not a formatted like the expected Wiimmfi time")
def race_number_validation(self, race_number):
self.validate_int(race_number)
if race_number < 1:
raise SQLDataBad(f"{race_number} race number must be greater than 0")
def race_room_name_validation(self, room_name):
self.validate_str(room_name)
def race_room_type_validation(self, room_type):
self.validate_str(room_type)
def race_cc_validation(self, cc):
self.validate_str(cc)
def region_validation(self, region):
self.validate_str(region)
if not Race.is_valid_region(region):
raise SQLFormatWrong(f"{region} region is not a valid region (see Race.is_valid_region)")
def connection_fails_validation(self, conn_fails):
self.validate_float(conn_fails, can_be_none=True)
def player_role_validation(self, player_role):
self.validate_str(player_role)
def player_vr_validation(self, vr):
self.validate_int(vr, can_be_none=True)
if isinstance(vr, int):
if vr < 0:
raise SQLFormatWrong(f"{vr} VR cannot be less than 0")
def player_character_validation(self, character):
self.validate_str(character, can_be_none=True)
if isinstance(character, str):
if character.strip() == "":
raise SQLFormatWrong(f"{character} character for player cannot be an empty string")
def player_vehicle_validation(self, vehicle):
self.validate_str(vehicle, can_be_none=True)
if isinstance(vehicle, str):
if vehicle.strip() == "":
raise SQLFormatWrong(f"{vehicle} vehicle for player cannot be an empty string")
def name_validation(self, name):
self.validate_str(name, can_be_none=True)
if isinstance(name, str):
if name.strip() == "":
raise SQLFormatWrong(f"{name} name player cannot be an empty string")
def validate_races_data(self, races:List[Race.Race]):
'''Validates that all the data in races is the correct type and format before going into the database'''
for race in races:
race:Race.Race
self.race_id_validation(race.get_race_id())
self.rxx_validation(race.get_rxx())
self.wiimmfi_utc_time_validation(race.get_match_start_time())
self.race_number_validation(race.get_race_number())
self.race_room_name_validation(race.get_room_name())
self.race_room_type_validation(race.get_room_type())
self.race_cc_validation(race.get_cc())
self.region_validation(race.get_region())
self.is_from_wiimmfi_validation(race.is_from_wiimmfi())
self.validate_tracks_data(races)
def validate_placement_data(self, placements:Dict[Tuple,Placement.Placement]):
for (race_id, fc), placement in placements.items():
self.race_id_validation(race_id)
self.fc_validation(fc)
player = placement.getPlayer()
self.fc_validation(player.get_FC())
self.player_finish_place_validation(placement.get_place())
self.placement_time_validation(placement.get_time_string())
self.placement_delta_validation(placement.get_delta())
self.player_ol_status_validation(player.get_ol_status())
self.player_position_validation(player.get_position())
self.region_validation(player.get_region())
self.connection_fails_validation(player.get_connection_fails())
self.player_role_validation(player.get_role())
self.player_vr_validation(player.get_VR())
self.player_character_validation(player.get_character())
self.player_vehicle_validation(player.get_vehicle())
self.name_validation(player.get_discord_name())
self.name_validation(player.get_lounge_name())
self.mii_hex_validation(player.get_mii_hex())
self.is_from_wiimmfi_validation(placement.is_from_wiimmfi())
def validate_event_id_race_ids(self, event_id_race_ids:Set[Tuple]):
for event_id, race_id in event_id_race_ids:
self.event_id_validation(event_id)
self.race_id_validation(race_id)
def validate_placement_mii_hex_update(self, race_id_fc_placements:Dict[Tuple, Placement.Placement]):
for (race_id, fc), placement in race_id_fc_placements.items():
self.race_id_validation(race_id)
self.fc_validation(fc)
self.mii_hex_validation(placement.getPlayer().get_mii_hex())
def validate_event_mii_hex_update(self, event_id_fc_miis:Set[Tuple]):
for (event_id, fc, mii_hex) in event_id_fc_miis:
self.event_id_validation(event_id)
self.fc_validation(fc)
self.mii_hex_validation(mii_hex)
def validate_event_data(self, channel_bot):
self.event_id_validation(channel_bot.get_event_id())
self.channel_id_validation(channel_bot.get_channel_id())
self.discord_id_validation(channel_bot.getRoom().get_set_up_user_discord_id())
if not isinstance(channel_bot.getRoom().get_known_region(), str):
raise SQLTypeWrong(self.wrong_type_message(channel_bot.getRoom().get_known_region(), str))
if not isinstance(channel_bot.getRoom().get_set_up_display_name(), str):
raise SQLTypeWrong(self.wrong_type_message(channel_bot.getRoom().get_set_up_display_name(), str))
self.validate_int(channel_bot.getWar().get_num_players())
def validate_event_fc_data(self, event_id_fcs):
for event_id, fc, _ in event_id_fcs:
self.event_id_validation(event_id)
self.fc_validation(fc)
def validate_event_structure_data(self, event_structure_tuple):
#Warning: this was apparently never completed
#TODO: complete validation of event_structure data
pass
class RoomTrackerSQL(object):
def __init__(self, channel_bot):
self.channel_bot = channel_bot
self.data_validator = ChannelBotSQLDataValidator()
def get_race_as_sql_tuple(self, race:Race.Race):
'''Converts a given table bot race into a tuple that is ready to be inserted into the Race SQL table'''
times = [x.get_time_seconds() for x in race.getPlacements() if not (x.is_bogus_time() or x.is_disconnected())]
if len(times) == 0:
times = [-1]
return (race.get_race_id(),
race.get_rxx(),
UtilityFunctions.get_wiimmfi_utc_time(race.get_match_start_time()),
race.get_race_number(),
race.get_room_name(),
race.get_track_name(),
race.get_room_type(),
race.get_cc(),
race.get_region(),
race.is_from_wiimmfi(),
race.numRacers(),
min(times),
max(times),
sum(times)/len(times)
)
def get_race_as_sql_track_tuple(self, race):
'''Converts a given table bot race into a tuple that is ready to be inserted into the Track SQL table'''
no_author_name = race.getTrackNameWithoutAuthor()
return (race.get_track_name(),
race.get_track_url(),
no_author_name,
race.is_custom_track(),
Race.get_track_name_lookup(no_author_name)
)
def get_player_as_sql_player_tuple(self, player):
'''Converts a given table bot player into a tuple that is ready to be inserted into the Player SQL table'''
return (player.get_FC(),
int(player.get_player_id()),
player.get_mkwx_url())
def get_placement_as_sql_place_tuple(self, race_id, placement:Placement.Placement):
'''Converts a given table bot Placement into a tuple that is ready to be inserted into the Place SQL table'''
player:Placement.Player.Player = placement.getPlayer()
return (race_id,
player.get_FC(),
player.get_name(),
placement.get_place(),
placement.get_time_seconds(),
placement.get_delta(),
player.get_ol_status(),
player.get_position(),
player.get_region(),
player.get_connection_fails(),
player.get_role(),
player.get_VR(),
player.get_character(),
player.get_vehicle(),
player.get_discord_name(),
player.get_lounge_name(),
player.get_mii_hex(),
placement.is_from_wiimmfi())
| |
# Accelerator for pip, the Python package manager.
#
# Author: <NAME> <<EMAIL>>
# Last Change: January 10, 2016
# URL: https://github.com/paylogic/pip-accel
"""
Configuration handling for `pip-accel`.
This module defines the :class:`Config` class which is used throughout the
pip accelerator. At runtime an instance of :class:`Config` is created and
passed down like this:
.. digraph:: config_dependency_injection
node [fontsize=10, shape=rect]
PipAccelerator -> BinaryDistributionManager
BinaryDistributionManager -> CacheManager
CacheManager -> LocalCacheBackend
CacheManager -> S3CacheBackend
BinaryDistributionManager -> SystemPackageManager
The :class:`.PipAccelerator` class receives its configuration object from
its caller. Usually this will be :func:`.main()` but when pip-accel is used
as a Python API the person embedding or extending pip-accel is responsible for
providing the configuration object. This is intended as a form of `dependency
injection`_ that enables non-default configurations to be injected into
pip-accel.
Support for runtime configuration
---------------------------------
The properties of the :class:`Config` class can be set at runtime using
regular attribute assignment syntax. This overrides the default values of the
properties (whether based on environment variables, configuration files or hard
coded defaults).
Support for configuration files
-------------------------------
You can use a configuration file to permanently configure certain options of
pip-accel. If ``/etc/pip-accel.conf`` and/or ``~/.pip-accel/pip-accel.conf``
exist they are automatically loaded. You can also set the environment variable
``$PIP_ACCEL_CONFIG`` to load a configuration file in a non-default location.
If all three files exist the system wide file is loaded first, then the user
specific file is loaded and then the file set by the environment variable is
loaded (duplicate settings are overridden by the configuration file that's
loaded last).
Here is an example of the available options:
.. code-block:: ini
[pip-accel]
auto-install = yes
max-retries = 3
data-directory = ~/.pip-accel
s3-bucket = my-shared-pip-accel-binary-cache
s3-prefix = ubuntu-trusty-amd64
s3-readonly = yes
Note that the configuration options shown above are just examples, they are not
meant to represent the configuration defaults.
----
.. _dependency injection: http://en.wikipedia.org/wiki/Dependency_injection
"""
# Standard library modules.
import logging
import os
import os.path
import sys
# Modules included in our package.
from pip_accel.compat import configparser
from pip_accel.utils import is_root, expand_path
# External dependencies.
from coloredlogs import DEFAULT_LOG_FORMAT
from cached_property import cached_property
from humanfriendly import coerce_boolean, parse_path
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
# The locations of the user specific and system wide configuration files.
LOCAL_CONFIG = '~/.pip-accel/pip-accel.conf'
GLOBAL_CONFIG = '/etc/pip-accel.conf'
class Config(object):
"""Configuration of the pip accelerator."""
def __init__(self, load_configuration_files=True, load_environment_variables=True):
"""
Initialize the configuration of the pip accelerator.
:param load_configuration_files: If this is :data:`True` (the default) then
configuration files in known locations
are automatically loaded.
:param load_environment_variables: If this is :data:`True` (the default) then
environment variables are used to
initialize the configuration.
"""
self.overrides = {}
self.configuration = {}
self.environment = os.environ if load_environment_variables else {}
if load_configuration_files:
for filename in self.available_configuration_files:
self.load_configuration_file(filename)
@cached_property
def available_configuration_files(self):
"""A list of strings with the absolute pathnames of the available configuration files."""
known_files = [GLOBAL_CONFIG, LOCAL_CONFIG, self.environment.get('PIP_ACCEL_CONFIG')]
absolute_paths = [parse_path(pathname) for pathname in known_files if pathname]
return [pathname for pathname in absolute_paths if os.path.isfile(pathname)]
def load_configuration_file(self, configuration_file):
"""
Load configuration defaults from a configuration file.
:param configuration_file: The pathname of a configuration file (a
string).
:raises: :exc:`Exception` when the configuration file cannot be
loaded.
"""
configuration_file = parse_path(configuration_file)
logger.debug("Loading configuration file: %s", configuration_file)
parser = configparser.RawConfigParser()
files_loaded = parser.read(configuration_file)
if len(files_loaded) != 1:
msg = "Failed to load configuration file! (%s)"
raise Exception(msg % configuration_file)
elif not parser.has_section('pip-accel'):
msg = "Missing 'pip-accel' section in configuration file! (%s)"
raise Exception(msg % configuration_file)
else:
self.configuration.update(parser.items('pip-accel'))
def __setattr__(self, name, value):
"""
Override the value of a property at runtime.
:param name: The name of the property to override (a string).
:param value: The overridden value of the property.
"""
attribute = getattr(self, name, None)
if isinstance(attribute, (property, cached_property)):
self.overrides[name] = value
else:
self.__dict__[name] = value
def get(self, property_name=None, environment_variable=None, configuration_option=None, default=None):
"""
Internal shortcut to get a configuration option's value.
:param property_name: The name of the property that users can set on
the :class:`Config` class (a string).
:param environment_variable: The name of the environment variable (a
string).
:param configuration_option: The name of the option in the
configuration file (a string).
:param default: The default value.
:returns: The value of the environment variable or configuration file
option or the default value.
"""
if self.overrides.get(property_name) is not None:
return self.overrides[property_name]
elif environment_variable and self.environment.get(environment_variable):
return self.environment[environment_variable]
elif self.configuration.get(configuration_option) is not None:
return self.configuration[configuration_option]
else:
return default
@cached_property
def cache_format_revision(self):
"""
The revision of the binary distribution cache format in use (an integer).
This number is encoded in the directory name of the binary cache so
that multiple revisions can peacefully coexist. When pip-accel breaks
backwards compatibility this number is bumped so that pip-accel starts
using a new directory.
"""
return 7
@cached_property
def source_index(self):
"""
The absolute pathname of pip-accel's source index directory (a string).
This is the ``sources`` subdirectory of :data:`data_directory`.
"""
return self.get(property_name='source_index',
default=os.path.join(self.data_directory, 'sources'))
@cached_property
def binary_cache(self):
"""
The absolute pathname of pip-accel's binary cache directory (a string).
This is the ``binaries`` subdirectory of :data:`data_directory`.
"""
return self.get(property_name='binary_cache',
default=os.path.join(self.data_directory, 'binaries'))
@cached_property
def eggs_cache(self):
"""
The absolute pathname of pip-accel's eggs cache directory (a string).
This is the ``eggs`` subdirectory of :data:`data_directory`. It is used
to cache setup requirements which avoids continuous rebuilding of setup
requirements.
"""
return self.get(property_name='eggs_cache',
default=os.path.join(self.data_directory, 'eggs'))
@cached_property
def data_directory(self):
"""
The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-accel`` otherwise
"""
return expand_path(self.get(property_name='data_directory',
environment_variable='PIP_ACCEL_CACHE',
configuration_option='data-directory',
default='/var/cache/pip-accel' if is_root() else '~/.pip-accel'))
@cached_property
def on_debian(self):
""":data:`True` if running on a Debian derived system, :data:`False` otherwise."""
return self.get(property_name='on_debian',
default=os.path.exists('/etc/debian_version'))
@cached_property
def install_prefix(self):
"""
The absolute pathname of the installation prefix to use (a string).
This property is based on :data:`sys.prefix` except that when
:data:`sys.prefix` is ``/usr`` and we're running on a Debian derived
system ``/usr/local`` is used instead.
The reason for this is that on Debian derived systems only apt (dpkg)
should be allowed to touch files in ``/usr/lib/pythonX.Y/dist-packages``
and ``python setup.py install`` knows this (see the ``posix_local``
installation scheme in ``/usr/lib/pythonX.Y/sysconfig.py`` on Debian
derived systems). Because pip-accel replaces ``python setup.py
install`` it has to replicate this logic. Inferring all of this from
the :mod:`sysconfig` module would be nice but that module wasn't
available in Python 2.6.
"""
return self.get(property_name='install_prefix',
default='/usr/local' if sys.prefix == '/usr' and self.on_debian else sys.prefix)
@cached_property
def python_executable(self):
"""The absolute pathname of the Python executable (a string)."""
return self.get(property_name='python_executable',
default=sys.executable or os.path.join(self.install_prefix, 'bin', 'python'))
@cached_property
def auto_install(self):
"""
Whether automatic installation of missing system packages is enabled.
:data:`True` if automatic installation of missing system packages is
enabled, :data:`False` if it is disabled, :data:`None` otherwise (in this case
the user will be prompted at the appropriate time).
- Environment variable: ``$PIP_ACCEL_AUTO_INSTALL`` (refer to
:func:`~humanfriendly.coerce_boolean()` for details on how the
value of the environment variable is interpreted)
- Configuration option: ``auto-install`` (also parsed using
:func:`~humanfriendly.coerce_boolean()`)
- Default: :data:`None`
"""
value = self.get(property_name='auto_install',
environment_variable='PIP_ACCEL_AUTO_INSTALL',
configuration_option='auto-install')
if value is not None:
return coerce_boolean(value)
@cached_property
def log_format(self):
"""
The format of log messages written to the terminal.
- Environment variable: ``$PIP_ACCEL_LOG_FORMAT``
- Configuration option: ``log-format``
- Default: :data:`~coloredlogs.DEFAULT_LOG_FORMAT`
"""
return self.get(property_name='log_format',
environment_variable='PIP_ACCEL_LOG_FORMAT',
configuration_option='log-format',
default=DEFAULT_LOG_FORMAT)
@cached_property
def log_verbosity(self):
"""
The verbosity of log messages written to the terminal.
- Environment variable: ``$PIP_ACCEL_LOG_VERBOSITY``
- Configuration option: ``log-verbosity``
- Default: 'INFO' (a string).
"""
return self.get(property_name='log_verbosity',
environment_variable='PIP_ACCEL_LOG_VERBOSITY',
configuration_option='log-verbosity',
default='INFO')
@cached_property
def max_retries(self):
"""
The number of times to retry ``pip install --download`` if it fails.
- Environment variable: ``$PIP_ACCEL_MAX_RETRIES``
- Configuration option: ``max-retries``
- Default: ``3``
"""
value = self.get(property_name='max_retries',
environment_variable='PIP_ACCEL_MAX_RETRIES',
configuration_option='max-retries')
try:
n = int(value)
if n >= 0:
return n
except:
return 3
@cached_property
def trust_mod_times(self):
"""
Whether to trust file modification times for cache invalidation.
- Environment variable: ``$PIP_ACCEL_TRUST_MOD_TIMES``
- Configuration option: ``trust-mod-times``
- Default: :data:`True` unless the AppVeyor_ continuous integration
environment is detected (see `issue 62`_).
.. _AppVeyor: http://www.appveyor.com
.. _issue 62: https://github.com/paylogic/pip-accel/issues/62
"""
on_appveyor = coerce_boolean(os.environ.get('APPVEYOR', 'False'))
return coerce_boolean(self.get(property_name='trust_mod_times',
environment_variable='PIP_ACCEL_TRUST_MOD_TIMES',
configuration_option='trust-mod-times',
default=(not on_appveyor)))
@cached_property
def s3_cache_url(self):
"""
The URL of the Amazon S3 API endpoint to use.
By default this points to the official Amazon S3 API | |
for p in pname:
if '.' in p:
ind = pname.index(p)
del pname[ind]
idf.append({'name':'packList',
'widgetType':kbScrolledListBox,
'wcfg':{'items':pname,
#'defaultValue':pname[0],
'listbox_exportselection':0,
'labelpos':'nw',
'label_text':'Select a package:',
#'dblclickcommand':self.loadMod_cb,
'selectioncommand':self.displayMod_cb
},
'gridcfg':{'sticky':'wesn'}})
idf.append({'name':'modList',
'widgetType':kbScrolledListBox,
'wcfg':{'items':[],
'listbox_exportselection':0,
'labelpos':'nw',
'label_text':'Select a module:',
#'dblclickcommand':self.loadMod_cb,
'selectioncommand':self.displayCmds_cb,
},
'gridcfg':{'sticky':'wesn', 'row':-1}})
idf.append({'name':'cmdList',
'widgetType':kbScrolledListBox,
'wcfg':{'items':[],
'listbox_exportselection':0,
'listbox_selectmode':'extended',
'labelpos':'nw',
'label_text':'Available commands:',
#'dblclickcommand':self.loadCmd_cb,
'selectioncommand':self.displayCmd_cb,
},
'gridcfg':{'sticky':'wesn', 'row':-1}})
# idf.append({'name':'docbutton',
# 'widgetType':Tkinter.Checkbutton,
# #'parent':'DOCGROUP',
# 'defaultValue':0,
# 'wcfg':{'text':'Show documentation',
# 'onvalue':1,
# 'offvalue':0,
# 'command':self.showdoc_cb,
# 'variable':Tkinter.IntVar()},
# 'gridcfg':{'sticky':'nw','columnspan':3}})
idf.append({'name':'DOCGROUP',
'widgetType':Pmw.Group,
'container':{'DOCGROUP':"w.interior()"},
'collapsedsize':0,
'wcfg':{'tag_text':'Description'},
'gridcfg':{'sticky':'wnse', 'columnspan':3}})
idf.append({'name':'doclist',
'widgetType':kbScrolledListBox,
'parent':'DOCGROUP',
'wcfg':{'items':[],
'listbox_exportselection':0,
'listbox_selectmode':'extended',
},
'gridcfg':{'sticky':'wesn', 'columnspan':3}})
idf.append({'name':'allPacks',
'widgetType':Tkinter.Button,
'wcfg':{'text':'Show all packages',
'command':self.allPacks_cb},
'gridcfg':{'sticky':'ew'}})
idf.append({'name':'loadMod',
'widgetType':Tkinter.Button,
'wcfg':{'text':'Load selected module',
'command':self.loadMod_cb},
'gridcfg':{'sticky':'ew', 'row':-1}})
# idf.append({'name':'loadCmd',
# 'widgetType':Tkinter.Button,
# 'wcfg':{'text':'Load Command',
# 'command':self.loadCmd_cb},
# 'gridcfg':{'sticky':'ew', 'row':-1}})
idf.append({'name':'dismiss',
'widgetType':Tkinter.Button,
'wcfg':{'text':'Dismiss',
'command':self.dismiss_cb},
'gridcfg':{'sticky':'ew', 'row':-1}})
# idf.append({'name':'dismiss',
# 'widgetType':Tkinter.Button,
# 'wcfg':{'text':'DISMISS',
# 'command':self.dismiss_cb,
# },
# 'gridcfg':{'sticky':Tkinter.E+Tkinter.W,'columnspan':3}})
return idf
def guiCallback(self):
self.vf.GUI.ROOT.config(cursor='watch')
self.vf.GUI.ROOT.update()
if self.allPack == {}:
self.allPack = findAllVFPackages()
val = self.showForm('loadCmds', force=1,modal=0,blocking=0)
ebn = self.cmdForms['loadCmds'].descr.entryByName
# docb=ebn['docbutton']['widget']
# var=ebn['docbutton']['wcfg']['variable'].get()
# if var==0:
# dg=ebn['DOCGROUP']['widget']
# dg.collapse()
self.vf.GUI.ROOT.config(cursor='')
def dismiss_cb(self, event=None):
self.cmdForms['loadCmds'].withdraw()
def allPacks_cb(self, event=None):
ebn = self.cmdForms['loadCmds'].descr.entryByName
packW = ebn['packList']['widget']
if not self.allPackFlag:
packName = self.allPack.keys()
packW.setlist(packName)
ebn['allPacks']['widget'].configure(text='Show default packages')
self.allPackFlag = True
else:
packName = self.vf.libraries
packW.setlist(packName)
ebn['allPacks']['widget'].configure(text='Show all packages')
self.allPackFlag = False
ebn['modList']['widget'].clear()
ebn['cmdList']['widget'].clear()
# def showdoc_cb(self,event=None):
# #when a show documentation is on and a module is selected then
# #expands dg else dg is collapsed
# ebn = self.cmdForms['loadCmds'].descr.entryByName
# docb=ebn['docbutton']['widget']
# var=ebn['docbutton']['wcfg']['variable'].get()
# dg=ebn['DOCGROUP']['widget']
# docw=ebn['doclist']['widget']
# packW = ebn['packList']['widget']
# psel=packW.getcurselection()
# if var==0:
# dg.collapse()
# if var==1 and psel:
# if docw.size()>0:
# dg.expand()
def displayMod_cb(self, event=None):
#print "displayMod_cb"
# c = self.cmdForms['loadCmds'].mf.cget('cursor')
# self.cmdForms['loadCmds'].mf.configure(cursor='watch')
# self.cmdForms['loadCmds'].mf.update_idletasks()
ebn = self.cmdForms['loadCmds'].descr.entryByName
# docb=ebn['docbutton']['widget']
# var=ebn['docbutton']['wcfg']['variable'].get()
# dg = ebn['DOCGROUP']['widget']
# dg.collapse()
packW = ebn['packList']['widget']
packs = packW.getcurselection()
if len(packs) == 0:
return
packName = packs[0]
if not self.packMod.has_key(packName):
package = self.allPack[packName]
self.packMod[packName] = findModulesInPackage(package,"^def initModule",fileNameFilters=['Command'])
self.currentPack = packName
modNames = []
for key, value in self.packMod[packName].items():
pathPack = key.split(os.path.sep)
if pathPack[-1] == packName:
newModName = map(lambda x: x[:-3], value)
#for mname in newModName:
#if "Command" not in mname :
#ind = newModName.index(mname)
#del newModName[ind]
modNames = modNames+newModName
else:
pIndex = pathPack.index(packName)
prefix = join(pathPack[pIndex+1:], '.')
newModName = map(lambda x: "%s.%s"%(prefix, x[:-3]), value)
#for mname in newModName:
#if "Command" not in mname :
#ind = newModName.index(mname)
#del newModName[ind]
modNames = modNames+newModName
modNames.sort()
modW = ebn['modList']['widget']
modW.setlist(modNames)
# and clear contents in self.libraryGUI
cmdW = ebn['cmdList']['widget']
cmdW.clear()
m = __import__(packName, globals(), locals(),[])
d = []
docstring=m.__doc__
#d.append(m.__doc__)
docw = ebn['doclist']['widget']
docw.clear()
#formatting documentation.
if docstring!=None :
if '\n' in docstring:
x = string.split(docstring,"\n")
for i in x:
if i !='':
d.append(i)
if len(d)>8:
docw.configure(listbox_height=8)
else:
docw.configure(listbox_height=len(d))
else:
x = string.split(docstring," ")
#formatting documenation
if len(x)>10:
docw.configure(listbox_height=len(x)/10)
else:
docw.configure(listbox_height=1)
docw.setlist(d)
# self.cmdForms['loadCmds'].mf.configure(cursor=c)
#when show documentation on after selcting a package
#dg is expanded to show documenttation
#if var==1 and docw.size()>0:
if docw.size()>0:
dg.expand()
def displayCmds_cb(self, event=None):
#print "displayCmds_cb"
global cmd_docslist
self.cmdForms['loadCmds'].mf.update_idletasks()
ebn = self.cmdForms['loadCmds'].descr.entryByName
dg = ebn['DOCGROUP']['widget']
dg.collapse()
cmdW = ebn['cmdList']['widget']
cmdW.clear()
# docb=ebn['docbutton']['widget']
# var=ebn['docbutton']['wcfg']['variable'].get()
modName = ebn['modList']['widget'].getcurselection()
if modName == (0 or ()): return
else:
modName = modName[0]
importName = self.currentPack + '.' + modName
try:
m = __import__(importName, globals(), locals(),['commandList'])
except:
return
if not hasattr(m, 'commandList'):
return
cmdNames = map(lambda x: x['name'], m.commandList)
cmdNames.sort()
if modName:
self.var=1
d =[]
docstring =m.__doc__
import string
docw = ebn['doclist']['widget']
docw.clear()
if docstring!=None :
if '\n' in docstring:
x = string.split(docstring,"\n")
for i in x:
if i !='':
d.append(i)
#formatting documenation
if len(d)>8:
docw.configure(listbox_height=8)
else:
docw.configure(listbox_height=len(d))
else:
d.append(docstring)
x = string.split(docstring," ")
#formatting documenation
if len(x)>10:
docw.configure(listbox_height=len(x)/10)
else:
docw.configure(listbox_height=1)
docw.setlist(d)
CmdName=ebn['cmdList']['widget'].getcurselection()
cmdW.setlist(cmdNames)
#when show documentation is on after selcting a module or a command
#dg is expanded to show documenttation
#if var==1 and docw.size()>0:
if docw.size()>0:
dg.expand()
def displayCmd_cb(self, event=None):
#print "displayCmd_cb"
global cmd_docslist
self.cmdForms['loadCmds'].mf.update_idletasks()
ebn = self.cmdForms['loadCmds'].descr.entryByName
dg = ebn['DOCGROUP']['widget']
dg.collapse()
# docb=ebn['docbutton']['widget']
# var=ebn['docbutton']['wcfg']['variable'].get()
modName = ebn['modList']['widget'].getcurselection()
if modName == (0 or ()): return
else:
modName = modName[0]
importName = self.currentPack + '.' + modName
try:
m = __import__(importName, globals(), locals(),['commandList'])
except:
self.warningMsg("ERROR: Cannot find commands for %s"%modName)
return
if not hasattr(m, 'commandList'):
return
cmdNames = map(lambda x: x['name'], m.commandList)
cmdNames.sort()
if modName:
self.var=1
d =[]
docstring =m.__doc__
import string
docw = ebn['doclist']['widget']
docw.clear()
if docstring!=None :
if '\n' in docstring:
x = string.split(docstring,"\n")
for i in x:
if i !='':
d.append(i)
#formatting documenation
if len(d)>8:
docw.configure(listbox_height=8)
else:
docw.configure(listbox_height=len(d))
else:
d.append(docstring)
x = string.split(docstring," ")
#formatting documenation
if len(x)>10:
docw.configure(listbox_height=len(x)/10)
else:
docw.configure(listbox_height=1)
docw.setlist(d)
cmdW = ebn['cmdList']['widget']
CmdName=ebn['cmdList']['widget'].getcurselection()
cmdW.setlist(cmdNames)
if len(CmdName)!=0:
for i in m.commandList:
if i['name']==CmdName[0]:
c = i['cmd']
if CmdName[0] in cmdNames:
ind= cmdNames.index(CmdName[0])
cmdW.selection_clear()
cmdW.selection_set(ind)
d =[]
docstring=c.__doc__
docw = ebn['doclist']['widget']
docw.clear()
if CmdName[0] not in cmd_docslist.keys():
cmd_docslist[CmdName[0]]=d
import string
if docstring!=None :
if '\n' in docstring:
x = string.split(docstring,"\n")
for i in x:
if i !='':
d.append(i)
if len(d)>8:
docw.configure(listbox_height=8)
else:
docw.configure(listbox_height=len(d))
else:
d.append(docstring)
x = string.split(docstring," ")
if len(x)>10:
docw.configure(listbox_height=len(x)/10)
else:
docw.configure(listbox_height=1)
docw.setlist(d)
#when show documentation is on after selcting a module or a command
#dg is expanded to show documenttation
#if var==1 and docw.size()>0:
if docw.size()>0:
dg.expand()
def loadMod_cb(self, event=None):
ebn = self.cmdForms['loadCmds'].descr.entryByName
selMod = ebn['modList']['widget'].getcurselection()
if len(selMod)==0: return
else:
self.txtGUI = ""
apply(self.doitWrapper, ( selMod[0],),
{'commands':None, 'package':self.currentPack, 'removable':True})
self.dismiss_cb(None)
if self.txtGUI:
self.txtGUI = "\n Access this command via:\n"+self.txtGUI
tkMessageBox.showinfo("Load Module", selMod[0]+" loaded successfully!\n"+self.txtGUI)
# def loadCmd_cb(self, event=None):
# ebn = self.cmdForms['loadCmds'].descr.entryByName
# selCmds = ebn['cmdList']['widget'].getcurselection()
# selMod = ebn['modList']['widget'].getcurselection()
# if len(selCmds)==0: return
# else:
# apply(self.doitWrapper, (selMod[0],), {'commands':selCmds,
# 'package':self.currentPack})
class loadModuleCommand(Command):
"""Command to load dynamically modules to the Viewer import the file called name.py and execute the function initModule defined in that file Raises a ValueError exception if initModule is not defined
\nPackage : ViewerFramework
\nModule : basicCommand.py
\nClass : loadModuleCommand
\nCommand : loadModule
\nSynopsis:\n
None<--loadModule(filename, package=None, **kw)
\nRequired Arguements:\n
filename --- name of the module
\nOptional Arguements:\n
package --- name of the package to which filename belongs
"""
active = 0
def doit(self, filename, package):
# This is NOT called because we call browseCommand()"
if package is None:
_package = filename
else:
_package = "%s.%s"%(package, filename)
try:
mod = __import__( _package, globals(), locals(), ['initModule'])
if hasattr(mod, 'initModule') or not callable(mod.initModule):
mod.initModule(self.vf)
else:
self.vf.warningMsg('module %s has not initModule function')
except ImportError:
self.vf.warningMsg('module %s could not be imported'%_package)
## if package is None:
## _package = filename
## else:
## _package = "%s.%s"%(package, filename)
## module = self.vf.tryto( __import__ , _package, globals(), locals(),
## [filename])
## if module=='ERROR':
## print '\nWARNING: Could not load module %s' % filename
## return
def __call__(self, filename, package=None, **kw):
"""None<---loadModule(filename, package=None, **kw)
\nRequired Arguements:\n
filename --- name of the module
\nOptional Arguements:\n
package --- name of the package to which filename belongs
"""
if package==None:
package=self.vf.libraries[0]
if not kw.has_key('redraw'):
kw['redraw'] = 0
kw['package'] = package
apply(self.vf.browseCommands, (filename,), kw)
#apply( self.doitWrapper, (filename, package), kw )
def loadModule_cb(self, event=None):
# c = self.cmdForms['loadModule'].mf.cget('cursor')
# self.cmdForms['loadModule'].mf.configure(cursor='watch')
# self.cmdForms['loadModule'].mf.update_idletasks()
ebn = self.cmdForms['loadModule'].descr.entryByName
moduleName = ebn['Module List']['widget'].get()
package = ebn['package']['widget'].get()
if moduleName:
self.vf.browseCommands(moduleName[0], package=package, redraw=0)
# self.cmdForms['loadModule'].mf.configure(cursor=c)
def loadModules(self, package, library=None):
modNames = []
doc = []
self.filenames={}
self.allPack={}
self.allPack=findAllVFPackages()
if package is None: return [], []
if not self.filenames.has_key(package):
pack=self.allPack[package]
#finding modules in a package
self.filenames[pack] =findModulesInPackage(pack,"^def initModule",fileNameFilters=['Command'])
# dictionary of files keys=widget, values = filename
for key, value in self.filenames[pack].items():
pathPack = key.split(os.path.sep)
if pathPack[-1] == package:
newModName = map(lambda x: x[:-3], value)
#for mname in newModName:
#if not modulename has Command in it delete from the
#modules list
#if "Command" not in mname :
#ind = newModName.index(mname)
#del newModName[ind]
#if "Command" in mname :
if hasattr(newModName,"__doc__"):
doc.append(newModName.__doc__)
else:
doc.append(None)
modNames = modNames + newModName
else:
pIndex = pathPack.index(package)
prefix = join(pathPack[pIndex+1:], '.')
newModName = map(lambda x: "%s.%s"%(prefix, x[:-3]), value)
#for mname in newModName:
#if not modulename has Command in it delete from the
#modules list
#if "Command" not in mname :
#ind = newModName.index(mname)
#del newModName[ind]
if hasattr(newModName,"__doc__"):
doc.append(newModName.__doc__)
else:
doc.append(None)
modNames = modNames + newModName
modNames.sort()
return modNames, doc
| |
<gh_stars>0
# -*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
import atexit, inspect, time
import os.path as path
from bes.common.check import check
from bes.common.object_util import object_util
from bes.fs.file_util import file_util
from bes.fs.file_find import file_find
from bes.fs.temp_file import temp_file
from bes.fs.testing.temp_content import temp_content
from bes.version.software_version import software_version
from bes.common.inspect_util import inspect_util
from .git import git
from .git_address_util import git_address_util
from .git_commit_hash import git_commit_hash
from .git_error import git_error
from .git_exe import git_exe
from .git_modules_file import git_modules_file
#import warnings
#with warnings.catch_warnings():
# warnings.filterwarnings("ignore", category = DeprecationWarning)
class git_repo(object):
'A git repo abstraction.'
def __init__(self, root, address = None):
self.root = path.abspath(root)
self.address = address or git.remote_origin_url(self.root)
def __str__(self):
return '%s@%s' % (self.root, self.address)
def has_changes(self, untracked_files = False, submodules = False):
if git.has_changes(self.root, untracked_files = untracked_files):
return True
if submodules:
for st in self.submodule_status_all():
sub_repo = self.submodule_repo(st.name)
if sub_repo.has_changes(untracked_files = untracked_files):
return True
return False
def clone_or_pull(self, options = None):
return git.clone_or_pull(self.address, self.root, options = options)
def clone(self, options = None):
return git.clone(self.address, self.root, options = options)
def sync(self, options = None):
return git.sync(self.address, self.root, options = options)
def init(self, *args):
return git.init(self.root, *args)
def add(self, filenames):
return git.add(self.root, filenames)
def remove(self, filenames):
return git.remove(self.root, filenames)
def pull(self, remote_name = None, branch_name = None, options = None):
return git.pull(self.root, remote_name = remote_name, branch_name = branch_name, options = options)
def push(self, *args):
return git.push(self.root, *args)
def push_with_rebase(self, remote_name = None, num_tries = None, retry_wait_seconds = None):
return git.push_with_rebase(self.root,
remote_name = remote_name,
num_tries = num_tries,
retry_wait_seconds = retry_wait_seconds)
def safe_push(self, *args):
return git.safe_push(self.root, *args)
def commit(self, message, filenames):
return git.commit(self.root, message, filenames)
def checkout(self, revision):
return git.checkout(self.root, revision)
def status(self, filenames):
return git.status(self.root, filenames)
def diff(self):
return git.diff(self.root)
def exists(self):
return path.isdir(self._dot_git_path())
def branch_status(self):
return git.branch_status(self.root)
def write_temp_content(self, items, commit = False, commit_message = None):
commit_message = commit_message or 'add temp content'
temp_content.write_items(items, self.root)
if commit:
if self.has_changes():
raise git_error('You need a clean tree with no changes to add temp content.')
self.add('.')
self.commit(commit_message, '.')
def _dot_git_path(self):
return path.join(self.root, '.git')
def find_all_files(self):
files = file_find.find(self.root, relative = True, file_type = file_find.FILE|file_find.LINK)
is_git = lambda f: f.startswith('.git') or f.endswith('.git')
files = [ f for f in files if not is_git(f) ]
return files
def last_commit_hash(self, short_hash = False):
return git.last_commit_hash(self.root, short_hash = short_hash)
def remote_origin_url(self):
return git.remote_origin_url(self.root)
def remote_set_url(self, url, name = 'origin'):
return git.remote_set_url(self.root, url, name = name)
def remote_get_url(self, name = 'origin'):
return git.remote_get_url(self.root, name = name)
def add_file(self, filename, content, codec = 'utf-8', mode = None, commit = True, push = False,
commit_message = None):
p = self.file_path(filename)
assert not path.isfile(p)
file_util.save(p, content = content, codec = codec, mode = mode)
self.add( [ filename ])
result = None
if commit:
commit_message = commit_message or 'add {}'.format(filename)
self.commit(commit_message, [ filename ])
result = self.last_commit_hash(short_hash = True)
if push:
self.push()
return result
def save_file(self, filename, content, codec = 'utf-8', mode = None, add = True, commit = True):
if add and not commit:
raise ValueError('If add is True then commit should be True as well.')
p = self.file_path(filename)
file_util.save(p, content = content, mode = mode)
if add:
self.add([ filename ])
if commit:
msg = 'add or change {}'.format(filename)
self.commit(msg, [ filename ])
def read_file(self, filename, codec = 'utf-8'):
return file_util.read(self.file_path(filename), codec = codec)
def has_file(self, filename):
return path.exists(self.file_path(filename))
def file_path(self, filename):
return path.join(self.root, filename)
def greatest_local_tag(self):
return git.greatest_local_tag(self.root)
def greatest_remote_tag(self):
return git.greatest_remote_tag(self.root)
def list_local_tags(self, lexical = False, reverse = False):
return git.list_local_tags(self.root, lexical = lexical, reverse = reverse)
def list_local_tags_gt(self, tag, lexical = False, reverse = False):
'List tags greater than tag'
tags = self.list_local_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) > 0 ]
def list_local_tags_ge(self, tag, lexical = False, reverse = False):
'List tags greater or equal to tag'
tags = self.list_local_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) >= 0 ]
def list_local_tags_le(self, tag, lexical = False, reverse = False):
'List tags lesser or equal to tag'
tags = self.list_local_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) <= 0 ]
def list_local_tags_lt(self, tag, lexical = False, reverse = False):
'List tags lesser than tag'
tags = self.list_local_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) < 0 ]
def list_remote_tags(self, lexical = False, reverse = False):
return git.list_remote_tags(self.root, lexical = lexical, reverse = reverse)
def list_remote_tags_gt(self, tag, lexical = False, reverse = False):
'List tags greater than tag'
tags = self.list_remote_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) > 0 ]
def list_remote_tags_ge(self, tag, lexical = False, reverse = False):
'List tags greater or equal to tag'
tags = self.list_remote_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) >= 0 ]
def list_remote_tags_le(self, tag, lexical = False, reverse = False):
'List tags lesser or equal to tag'
tags = self.list_remote_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) <= 0 ]
def list_remote_tags_lt(self, tag, lexical = False, reverse = False):
'List tags lesser than tag'
tags = self.list_remote_tags(lexical = lexical, reverse = reverse)
return [ t for t in tags if software_version.compare(t, tag) < 0 ]
def tag(self, tag, allow_downgrade = True, push = False, commit = None):
git.tag(self.root, tag, allow_downgrade = allow_downgrade, push = push)
def has_remote_tag(self, tag):
return git.has_remote_tag(self.root, tag)
def has_local_tag(self, tag):
return git.has_local_tag(self.root, tag)
def delete_local_tag(self, tag):
git.delete_local_tag(self.root, tag)
def delete_remote_tag(self, tag):
git.delete_remote_tag(self.root, tag)
def delete_tag(clazz, tag, where, dry_run):
return git.delete_tag(self.root, tag, where, dry_run)
def push_tag(self, tag):
git.push_tag(self.root, tag)
def bump_tag(self, component, push = True, dry_run = False, default_tag = None, reset_lower = False):
return git.bump_tag(self.root, component, push = push, dry_run = dry_run,
default_tag = default_tag, reset_lower = reset_lower)
def reset(self, revision = None, submodules = False):
git.reset(self.root, revision = revision)
if submodules:
for st in self.submodule_status_all():
sub_repo = self.submodule_repo(st.name)
sub_repo.reset(revision = revision)
self.submodule_init()
def reset_to_revision(self, revision):
git.reset_to_revision(self.root, revision)
def revision_equals(self, revision1, revision2):
'Return True if revision1 is the same as revision2. Short and long hashes can be mixed.'
return git.revision_equals(self.root, revision1, revision2)
def list_branches(self, where):
return git.list_branches(self.root, where)
def list_remote_branches(self):
return git.list_remote_branches(self.root)
def list_local_branches(self):
return git.list_local_branches(self.root)
def has_remote_branch(self, branch):
return git.has_remote_branch(self.root, branch)
def has_local_branch(self, branch):
return git.has_local_branch(self.root, branch)
def branch_create(self, branch_name, checkout = False, push = False):
git.branch_create(self.root, branch_name, checkout = checkout, push = push)
def branch_push(self, branch_name):
git.branch_push(self.root, branch_name)
def fetch(self):
git.fetch(self.root)
def author(self, commit):
git.author(self.root, commit)
def files_for_commit(self, commit):
return git.files_for_commit(self.root, commit)
def active_branch(self):
return git.active_branch(self.root)
def archive_to_file(self, prefix, revision, output_filename,
archive_format = None, short_hash = True):
git.archive_to_file(self.root, prefix, revision, output_filename,
archive_format = archive_format,
short_hash = short_hash)
def archive_to_dir(self, revision, output_dir):
return git.archive_to_dir(self.root, revision, output_dir)
def lfs_track(self, pattern):
return git.lfs_track(self.root, pattern)
def lfs_pull(self):
return git.lfs_pull(self.root)
def lfs_files(self):
return git.lfs_files(self.root)
def lfs_files_needing_smudge(self):
return git.lfs_files_needing_smudge(self.root)
def call_git(self, args, raise_error = True, extra_env = None,
num_tries = None, retry_wait_seconds = None):
return git_exe.call_git(self.root,
args,
raise_error = raise_error,
extra_env = extra_env,
num_tries = num_tries,
retry_wait_seconds = retry_wait_seconds)
def unpushed_commits(self):
return git.unpushed_commits(self.root)
def has_unpushed_commits(self):
return git.has_unpushed_commits(self.root)
def has_commit(self, commit):
return git.has_commit(self.root, commit)
def has_revision(self, revision):
return git.has_revision(self.root, revision)
@classmethod
def is_long_hash(clazz, h):
return git_commit_hash.is_long(h)
@classmethod
def is_short_hash(clazz, h):
return git_commit_hash.is_short(h)
@classmethod
def is_hash(clazz, h):
return git_commit_hash.is_valid(h)
def short_hash(self, long_hash):
return git.short_hash(self.root, long_hash)
def long_hash(self, short_hash):
return git.long_hash(self.root, short_hash)
def submodule_init(self, submodule = None, recursive = False):
return git.submodule_init(self.root, submodule = submodule, recursive = recursive)
def submodule_add(self, address, local_path):
return git.submodule_add(self.root, address, local_path)
def submodule_status_all(self, submodule = None):
return git.submodule_status_all(self.root, submodule = submodule)
def submodule_status_one(self, submodule):
return git.submodule_status_one(self.root, submodule)
def submodule_file(self):
filename = path.join(self.root, '.gitmodules')
if not path.isfile(filename):
raise IOError('no modules file found: {}'.format(filename))
return git_modules_file(filename)
def submodule_repo(self, submodule):
'Return a git_repo object for the given submodule.'
return git_repo(path.join(self.root, submodule))
def has_submodule(self, submodule):
'Return True if this repo has submodule.'
return submodule in set([ info.name for info in self.submodule_status_all() ])
def submodule_set_branch(self, module_name, branch_name):
| |
<reponame>aria-systems-group/PDDLtoSim
import os
import re
import time
import random
import datetime
import yaml
import copy
import sys
import math
import warnings
import pybullet as pb
import numpy as np
from typing import Tuple, Optional, List, Dict
from src.graph_construction.causal_graph import CausalGraph
from src.graph_construction.transition_system import FiniteTransitionSystem
from src.graph_construction.two_player_game import TwoPlayerGame
# call the regret synthesis code
from regret_synthesis_toolbox.src.graph import TwoPlayerGraph
from regret_synthesis_toolbox.src.strategy_synthesis import RegMinStrSyn
from regret_synthesis_toolbox.src.strategy_synthesis import ValueIteration
from src.pddl_env_simualtor.envs.panda_sim import PandaSim
# define a constant to dump the yaml file
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
def compute_adv_strs(product_graph: TwoPlayerGraph,
purely_avd: bool = True,
no_intervention: bool = False,
cooperative: bool = False,
print_sim_str: bool = True) -> List:
"""
A method to play the adversarial game.
"""
comp_mcr_solver = ValueIteration(product_graph, competitive=True)
comp_mcr_solver.solve(debug=True, plot=False)
# coop_val_dict = coop_mcr_solver.state_value_dict
comp_str_dict = comp_mcr_solver.str_dict
_init_state = product_graph.get_initial_states()[0][0]
_next_state = comp_str_dict[_init_state]
_action_seq = []
_action_seq.append(product_graph._graph[_init_state][_next_state][0].get("actions"))
# print(_action_seq[-1])
if purely_avd:
while _next_state is not None:
_curr_state = _next_state
_next_state = comp_str_dict.get(_curr_state)
if _next_state is not None:
_edge_act = product_graph._graph[_curr_state][_next_state][0].get("actions")
if _action_seq[-1] != _edge_act:
_action_seq.append(product_graph._graph[_curr_state][_next_state][0].get("actions"))
# print(_action_seq[-1])
elif no_intervention:
while _next_state is not None:
_curr_state = _next_state
if product_graph.get_state_w_attribute(_curr_state, "player") == "adam":
# get the state that sys wanted to evolve to
for _succ in product_graph._graph.successors(_curr_state):
_edge_action = product_graph._graph[_curr_state][_succ][0]["actions"]
_edge_type = get_action_from_causal_graph_edge(_edge_action)
if _edge_type != "human-move":
_next_state = _succ
break
else:
_next_state = comp_str_dict.get(_curr_state)
if _next_state is not None:
_edge_action = product_graph._graph[_curr_state][_next_state][0].get('actions')
if _action_seq[-1] != _edge_action:
_action_seq.append(_edge_action)
elif cooperative:
_coop_str_dict = compute_cooperative_actions_for_env(product_graph)
_max_coop_actions: int = 0
while _next_state is not None:
_curr_state = _next_state
if product_graph.get_state_w_attribute(_curr_state, attribute="player") == "eve":
_next_state = comp_str_dict.get(_curr_state)
else:
if _max_coop_actions <= 2:
_next_state = _coop_str_dict[_curr_state]
# only increase the counter when the human moves
_max_coop_actions += 1
else:
for _succ in product_graph._graph.successors(_curr_state):
_edge_action = product_graph._graph[_curr_state][_succ][0]["actions"]
_edge_type = get_action_from_causal_graph_edge(_edge_action)
if _edge_type != "human-move":
_next_state = _succ
break
if _next_state is not None:
_edge_act = product_graph._graph[_curr_state][_next_state][0].get("actions")
if _action_seq[-1] != _edge_act:
_action_seq.append(product_graph._graph[_curr_state][_next_state][0].get("actions"))
else:
warnings.warn("Please at-least one of the flags i.e Cooperative, no_intervention or purely_adversarial is True")
if print_sim_str:
for _action in _action_seq:
print(_action)
return _action_seq
def compute_reg_strs(product_graph: TwoPlayerGraph,
coop_str: bool = False,
epsilon: float = -1) -> Tuple[list, dict, TwoPlayerGraph]:
"""
A method to compute strategies. We control the env's behavior by making it purely cooperative, pure adversarial, or
epsilon greedy.
@param coop_str: Set this to be true for purely cooperative behavior from the env
@param epsilon: Set this value to be 0 for purely adversarial behavior or with epsilon probability human picks
random actions.
"""
payoff = payoff_factory.get("cumulative", graph=product_graph)
# build an instance of regret strategy minimization class
reg_syn_handle = RegMinStrSyn(product_graph, payoff)
reg_str, reg_val = reg_syn_handle.edge_weighted_arena_finite_reg_solver(minigrid_instance=None,
purge_states=True,
plot=False)
twa_game = reg_syn_handle.graph_of_alternatives
_init_state = twa_game.get_initial_states()[0][0]
for _n in twa_game._graph.successors(_init_state):
print(f"Reg Val: {_n}: {reg_val[_n]}")
# the reg str is dict that one from one state to another. Lets convert this to print a sequence of edge actions
_next_state = reg_str[_init_state]
_action_seq = []
_action_seq.append(twa_game._graph[_init_state][_next_state][0].get("actions"))
if coop_str:
# compute cooperative strs for the player
_coop_str_dict = compute_cooperative_actions_for_env(twa_game)
_max_coop_actions: int = 1
# print(f"{_init_state}: {reg_val[_init_state]}")
# print(f"{_next_state}: {reg_val[_init_state]}")
while _next_state is not None:
_curr_state = _next_state
if twa_game.get_state_w_attribute(_curr_state, attribute="player") == "eve":
_next_state = reg_str.get(_curr_state)
else:
if _max_coop_actions <= 10:
_next_state = _coop_str_dict[_curr_state]
# only increase the counter when the human moves
_max_coop_actions += 1
else:
_next_state = reg_str.get(_curr_state)
if _next_state is not None:
_edge_act = twa_game._graph[_curr_state][_next_state][0].get("actions")
if _action_seq[-1] != _edge_act:
_action_seq.append(twa_game._graph[_curr_state][_next_state][0].get("actions"))
# print(f"{_next_state}: {reg_val[_init_state]}")
elif 0 <= epsilon <= 1:
# we randomise human strategies
_new_str_dict = compute_epsilon_str_dict(epsilon=epsilon,
reg_str_dict=reg_str,
max_human_int=3, twa_game=twa_game)
while _next_state is not None:
_curr_state = _next_state
# if twa_game.get_state_w_attribute(_curr_state, attribute="player") == "eve":
_next_state = _new_str_dict.get(_curr_state)
# else:
# _new
if _next_state is not None:
_edge_act = twa_game._graph[_curr_state][_next_state][0].get("actions")
if _action_seq[-1] != _edge_act:
_action_seq.append(twa_game._graph[_curr_state][_next_state][0].get("actions"))
for _action in _action_seq:
print(_action)
return _action_seq, reg_val, twa_game
def compute_cooperative_actions_for_env(product_graph: TwoPlayerGraph) -> Dict:
"""
A helper method to compute the cooperative strategies for the players.
"""
coop_mcr_solver = ValueIteration(product_graph, competitive=False)
coop_mcr_solver.cooperative_solver(debug=False, plot=False)
coop_val_dict = coop_mcr_solver.state_value_dict
coop_str_dict = coop_mcr_solver.str_dict
return coop_str_dict
def compute_epsilon_str_dict(epsilon: float, reg_str_dict: dict, max_human_int: int, twa_game: TwoPlayerGraph) -> dict:
"""
A helper method that return the human action as per the Epsilon greedy algorithm.
Using this policy we either select a random human action with epsilon probability and the human can select the
optimal action (as given in the str dict if any) with 1-epsilon probability.
Epsilon = 0: Env is completely adversarial - Maximizing Sys player's regret
Epsilon = 1: Env is completely random
"""
_new_str_dict = reg_str_dict
if epsilon == 0:
return _new_str_dict
_human_has_intervened: int = 0
for _from_state, _to_state in reg_str_dict.items():
if twa_game.get_state_w_attribute(_from_state, 'player') == "adam":
_succ_states: List[tuple] = [_state for _state in twa_game._graph.successors(_from_state)]
# if human can still intervene
# if max_human_int >= _human_has_intervened:
# act random
if np.random.rand() < epsilon:
_next_state = random.choice(_succ_states)
_new_str_dict[_from_state] = _next_state
# else:
# _next_state = _new_str_dict[_from_state]
# _human_int_counter = _from_state[0][0][0][1]
# if _next_state[0][0][0][1] != _human_int_counter:
# _human_has_intervened += 1
# if human exhausted the limit set by the user
# else:
# _human_int_counter = _from_state[0][0][0][1]
# for _succ in _succ_states:
# if _succ[0][0][0][1] == _human_int_counter:
# _new_str_dict[_from_state] = _succ
return _new_str_dict
def re_arrange_blocks(box_id: int, curr_loc, sim_handle):
"""
A function to place all the blocks at their respective locations.
"""
# for _box_id, _box_loc in current_world_confg:
# if _box_loc != "gripper" and _box_id <= 2:
_obj_name = f"b{box_id}"
_obj_id = sim_handle.world.get_obj_id(_obj_name)
_urdf_name, _, _, _ = sim_handle.world.get_obj_attr(_obj_id)
pb.removeBody(_obj_id)
# you have to subtract the table height
curr_loc[2] = curr_loc[2] - sim_handle.world.table_height
# add a new to the location that human moved-the obj too
sim_handle.world.load_object(urdf_name=_urdf_name,
obj_name=_obj_name,
obj_init_position=curr_loc,
obj_init_orientation=pb.getQuaternionFromEuler([0, 0, 0]))
def execute_str(actions: list,
causal_graph: CausalGraph,
transition_system: FiniteTransitionSystem,
exp_name: str,
record_sim: bool = False,
debug: bool = False):
# determine the action type first
_action_type = ""
_loc_dict: dict = load_pre_built_loc_info(exp_name=exp_name)
# some constants useful during simulation
_wait_pos_left = [-0.2, 0.0, 1.2, math.pi, 0, math.pi]
_wait_pos_right = [0.2, 0.0, 1.2, math.pi, 0, math.pi]
# load the simulator env
panda_handle = initialize_simulation(causal_graph=causal_graph,
transition_system=transition_system,
loc_dict=_loc_dict,
record_sim=record_sim,
debug=debug)
# loop and add the const table height to all valid loc
for _loc in _loc_dict.values():
_loc[2] = _loc[2] + panda_handle.world.table_height
_release_from_top_loc_to_right = False
_release_from_top_loc_to_left = False
for _action in actions:
_action_type = transition_system._get_action_from_causal_graph_edge(_action)
_box_id, _loc = transition_system._get_multiple_box_location(_action)
# _current
if len(_loc) == 2:
_from_loc = _loc[0]
_to_loc = _loc[1]
else:
_from_loc = ""
_to_loc = _loc[0]
_loc = _loc_dict.get(_to_loc)
_org_loc_copy = copy.copy(_loc)
# if you building an arch then lo and l1 are location on top of boxes. You need a different type of grab action
# to execute this successfully.
_transfer_to_top_loc: bool = False
if _to_loc in ["l0", "l1"]:
_transfer_to_top_loc = True
_transfer_from_top_loc = False
if _from_loc in ["l0", "l1"]:
_transfer_from_top_loc = True
if _action_type == "transit":
# pre-image based on the object loc
if _loc[0] < 0:
panda_handle.apply_high_level_action("transit", _wait_pos_left, vel=0.5)
else:
panda_handle.apply_high_level_action("transit", _wait_pos_right, vel=0.5)
# every transfer and transit action will have a from and to location. Lets extract it.
_pos = [_loc[0], _loc[1], _loc[2] + 0.3, math.pi, 0, math.pi]
panda_handle.apply_high_level_action("transit", _pos, vel=0.5)
elif _action_type == "transfer":
if _transfer_to_top_loc:
# place it at an intermediate loc and grab it from side and then continue
panda_handle.apply_high_level_action("transfer", [0.0, 0.0, 0.65 + 0.17, math.pi, 0, math.pi], vel=0.5)
panda_handle.apply_high_level_action("openEE", [], vel=0.5)
# grab it from side based on where you are going
if _loc[0] < 0:
# going left
_pos = [+0.1, 0.0, 0.625 + 0.2 + 0.4, 0, math.pi / 2, -math.pi]
panda_handle.apply_high_level_action("transit", _pos, vel=0.5)
# _pos = [+0.1, 0.0, 0.625 + 0.17 / 2, 0, math.pi / 2, -math.pi]
# panda_handle.apply_high_level_action("transit", _pos, vel=0.5)
# 2. go down towards the object and grab it
_pos = [0.03, 0.0, 0.625 + 0.17 / 2, 0, math.pi / 2, -math.pi]
panda_handle.apply_high_level_action("transit", _pos, vel=0.5)
_pos = [-0.01, 0.0, 0.625 + 0.17 / 2, 0, math.pi / 2, -math.pi]
panda_handle.apply_high_level_action("transit", _pos, vel=0.5)
panda_handle.apply_high_level_action("closeEE", [], vel=0.5)
# 3. grab the object and take the appr stance
_pos = [0.0, 0.0, 0.625 + 0.2 + 0.3, 0, math.pi / 2, -math.pi]
panda_handle.apply_high_level_action("transfer", _pos, vel=0.5)
# _pos = [0.0, 0.0, 0.625 + 0.2 + 0.3, 0, -math.pi / 2, -math.pi]
# panda_handle.apply_high_level_action("transfer", _pos, vel=0.5)
_org_loc = copy.copy(_loc)
# | |
# coding: utf-8
import json
class StixCyberObservable:
def __init__(self, opencti, file):
self.opencti = opencti
self.file = file
self.properties = """
id
standard_id
entity_type
parent_types
spec_version
created_at
updated_at
createdBy {
... on Identity {
id
standard_id
entity_type
parent_types
spec_version
name
description
roles
contact_information
x_opencti_aliases
created
modified
objectLabel {
edges {
node {
id
value
color
}
}
}
}
... on Organization {
x_opencti_organization_type
x_opencti_reliability
}
... on Individual {
x_opencti_firstname
x_opencti_lastname
}
}
objectMarking {
edges {
node {
id
standard_id
entity_type
definition_type
definition
created
modified
x_opencti_order
x_opencti_color
}
}
}
objectLabel {
edges {
node {
id
value
color
}
}
}
externalReferences {
edges {
node {
id
standard_id
entity_type
source_name
description
url
hash
external_id
created
modified
}
}
}
observable_value
x_opencti_description
x_opencti_score
indicators {
edges {
node {
id
pattern
pattern_type
}
}
}
... on AutonomousSystem {
number
name
rir
}
... on Directory {
path
path_enc
ctime
mtime
atime
}
... on DomainName {
value
}
... on EmailAddr {
value
display_name
}
... on EmailMessage {
is_multipart
attribute_date
content_type
message_id
subject
received_lines
body
}
... on Artifact {
mime_type
payload_bin
url
encryption_algorithm
decryption_key
hashes {
algorithm
hash
}
}
... on StixFile {
extensions
size
name
name_enc
magic_number_hex
mime_type
ctime
mtime
atime
hashes {
algorithm
hash
}
}
... on X509Certificate {
is_self_signed
version
serial_number
signature_algorithm
issuer
validity_not_before
validity_not_after
hashes {
algorithm
hash
}
}
... on IPv4Addr {
value
}
... on IPv6Addr {
value
}
... on MacAddr {
value
}
... on Mutex {
name
}
... on NetworkTraffic {
extensions
start
end
is_active
src_port
dst_port
protocols
src_byte_count
dst_byte_count
src_packets
dst_packets
}
... on Process {
extensions
is_hidden
pid
created_time
cwd
command_line
environment_variables
}
... on Software {
name
cpe
swid
languages
vendor
version
}
... on Url {
value
}
... on UserAccount {
extensions
user_id
credential
account_login
account_type
display_name
is_service_account
is_privileged
can_escalate_privs
is_disabled
account_created
account_expires
credential_last_changed
account_first_login
account_last_login
}
... on WindowsRegistryKey {
attribute_key
modified_time
number_of_subkeys
}
... on WindowsRegistryValueType {
name
data
data_type
}
... on X509V3ExtensionsType {
basic_constraints
name_constraints
policy_constraints
key_usage
extended_key_usage
subject_key_identifier
authority_key_identifier
subject_alternative_name
issuer_alternative_name
subject_directory_attributes
crl_distribution_points
inhibit_any_policy
private_key_usage_period_not_before
private_key_usage_period_not_after
certificate_policies
policy_mappings
}
... on XOpenCTICryptographicKey {
value
}
... on XOpenCTICryptocurrencyWallet {
value
}
... on XOpenCTIText {
value
}
... on XOpenCTIUserAgent {
value
}
"""
"""
List StixCyberObservable objects
:param types: the array of types
:param filters: the filters to apply
:param search: the search keyword
:param first: return the first n rows from the after ID (or the beginning if not set)
:param after: ID of the first row
:return List of StixCyberObservable objects
"""
def list(self, **kwargs):
types = kwargs.get("types", None)
filters = kwargs.get("filters", None)
search = kwargs.get("search", None)
first = kwargs.get("first", 500)
after = kwargs.get("after", None)
order_by = kwargs.get("orderBy", None)
order_mode = kwargs.get("orderMode", None)
custom_attributes = kwargs.get("customAttributes", None)
get_all = kwargs.get("getAll", False)
with_pagination = kwargs.get("withPagination", False)
if get_all:
first = 500
self.opencti.log(
"info",
"Listing StixCyberObservables with filters " + json.dumps(filters) + ".",
)
query = (
"""
query StixCyberObservables($types: [String], $filters: [StixCyberObservablesFiltering], $search: String, $first: Int, $after: ID, $orderBy: StixCyberObservablesOrdering, $orderMode: OrderingMode) {
stixCyberObservables(types: $types, filters: $filters, search: $search, first: $first, after: $after, orderBy: $orderBy, orderMode: $orderMode) {
edges {
node {
"""
+ (custom_attributes if custom_attributes is not None else self.properties)
+ """
}
}
pageInfo {
startCursor
endCursor
hasNextPage
hasPreviousPage
globalCount
}
}
}
"""
)
result = self.opencti.query(
query,
{
"types": types,
"filters": filters,
"search": search,
"first": first,
"after": after,
"orderBy": order_by,
"orderMode": order_mode,
},
)
if get_all:
final_data = []
data = self.opencti.process_multiple(result["data"]["stixCyberObservables"])
final_data = final_data + data
while result["data"]["stixCyberObservables"]["pageInfo"]["hasNextPage"]:
after = result["data"]["stixCyberObservables"]["pageInfo"]["endCursor"]
self.opencti.log("info", "Listing StixCyberObservables after " + after)
result = self.opencti.query(
query,
{
"types": types,
"filters": filters,
"search": search,
"first": first,
"after": after,
"orderBy": order_by,
"orderMode": order_mode,
},
)
data = self.opencti.process_multiple(
result["data"]["stixCyberObservables"]
)
final_data = final_data + data
return final_data
else:
return self.opencti.process_multiple(
result["data"]["stixCyberObservables"], with_pagination
)
"""
Read a StixCyberObservable object
:param id: the id of the StixCyberObservable
:param filters: the filters to apply if no id provided
:return StixCyberObservable object
"""
def read(self, **kwargs):
id = kwargs.get("id", None)
filters = kwargs.get("filters", None)
custom_attributes = kwargs.get("customAttributes", None)
if id is not None:
self.opencti.log("info", "Reading StixCyberObservable {" + id + "}.")
query = (
"""
query StixCyberObservable($id: String!) {
stixCyberObservable(id: $id) {
"""
+ (
custom_attributes
if custom_attributes is not None
else self.properties
)
+ """
}
}
"""
)
result = self.opencti.query(query, {"id": id})
return self.opencti.process_multiple_fields(
result["data"]["stixCyberObservable"]
)
elif filters is not None:
result = self.list(filters=filters, customAttributes=custom_attributes)
if len(result) > 0:
return result[0]
else:
return None
else:
self.opencti.log(
"error",
"[opencti_stix_cyber_observable] Missing parameters: id or filters",
)
return None
"""
Create a Stix-Observable object
:param observableData: the data of the observable (STIX2 structure)
:return Stix-Observable object
"""
def create(self, **kwargs):
observable_data = kwargs.get("observableData", {})
simple_observable_id = kwargs.get("simple_observable_id", None)
simple_observable_key = kwargs.get("simple_observable_key", None)
simple_observable_value = kwargs.get("simple_observable_value", None)
simple_observable_description = kwargs.get(
"simple_observable_description", None
)
x_opencti_score = kwargs.get("x_opencti_score", None)
created_by = kwargs.get("createdBy", None)
object_marking = kwargs.get("objectMarking", None)
object_label = kwargs.get("objectLabel", None)
external_references = kwargs.get("externalReferences", None)
update = kwargs.get("update", False)
create_indicator = (
observable_data["x_opencti_create_indicator"]
if "x_opencti_create_indicator" in observable_data
else kwargs.get("createIndicator", False)
)
attribute = None
if simple_observable_key is not None:
key_split = simple_observable_key.split(".")
type = key_split[0].title()
attribute = key_split[1]
if attribute not in ["hashes", "extensions"]:
observable_data[attribute] = simple_observable_value
else:
type = (
observable_data["type"].title() if "type" in observable_data else None
)
if type is None:
return
if type.lower() == "file":
type = "StixFile"
elif type.lower() == "ipv4-addr":
type = "IPv4-Addr"
elif type.lower() == "ipv6-addr":
type = "IPv6-Addr"
elif type.lower() == "x-opencti-hostname":
type = "X-OpenCTI-Hostname"
elif type.lower() == "x-opencti-cryptocurrency-wallet":
type = "X-OpenCTI-Cryptocurrency-Wallet"
elif type.lower() == "x-opencti-user-agent":
type = "X-OpenCTI-User-Agent"
elif type.lower() == "x-opencti-cryptographic-key":
type = "X-OpenCTI-Cryptographic-Key"
elif type.lower() == "x-opencti-text":
type = "X-OpenCTI-text"
x_opencti_description = (
observable_data["x_opencti_description"]
if "x_opencti_description" in observable_data
else None
)
if simple_observable_description is not None:
x_opencti_description = simple_observable_description
x_opencti_score = (
observable_data["x_opencti_score"]
if "x_opencti_score" in observable_data
else x_opencti_score
)
if simple_observable_description is not None:
x_opencti_description = simple_observable_description
stix_id = observable_data["id"] if "id" in observable_data else None
if simple_observable_id is not None:
stix_id = simple_observable_id
hashes = []
if (
simple_observable_key is not None
and simple_observable_key.lower() == "file.hashes.md5"
):
hashes.append({"algorithm": "MD5", "hash": simple_observable_value})
if (
simple_observable_key is not None
and simple_observable_key.lower() == "file.hashes.sha-1"
):
hashes.append({"algorithm": "SHA-1", "hash": simple_observable_value})
if (
simple_observable_key is not None
and simple_observable_key.lower() == "file.hashes.sha-256"
):
hashes.append({"algorithm": "SHA-256", "hash": simple_observable_value})
if "hashes" in observable_data:
for key, value in observable_data["hashes"].items():
hashes.append({"algorithm": key, "hash": value})
if type is not None:
self.opencti.log(
"info",
"Creating Stix-Cyber-Observable {"
+ type
+ "} with indicator at "
+ str(create_indicator)
+ ".",
)
input_variables = {
"type": type,
"stix_id": stix_id,
"x_opencti_score": x_opencti_score,
"x_opencti_description": x_opencti_description,
"createIndicator": create_indicator,
"createdBy": created_by,
"objectMarking": object_marking,
"objectLabel": object_label,
"externalReferences": external_references,
"update": update,
}
query = """
mutation StixCyberObservableAdd(
$type: String!,
$stix_id: String,
$x_opencti_score: Int,
$x_opencti_description: String,
$createIndicator: Boolean,
$createdBy: String,
$objectMarking: [String],
$objectLabel: [String],
$externalReferences: [String],
$AutonomousSystem: AutonomousSystemAddInput,
$Directory: DirectoryAddInput,
$DomainName: DomainNameAddInput,
$EmailAddr: EmailAddrAddInput,
$EmailMessage: EmailMessageAddInput,
$EmailMimePartType: EmailMimePartTypeAddInput,
$Artifact: ArtifactAddInput,
$StixFile: StixFileAddInput,
$X509Certificate: X509CertificateAddInput,
$IPv4Addr: IPv4AddrAddInput,
$IPv6Addr: IPv6AddrAddInput,
$MacAddr: MacAddrAddInput,
$Mutex: MutexAddInput,
$NetworkTraffic: NetworkTrafficAddInput,
$Process: ProcessAddInput,
$Software: SoftwareAddInput,
$Url: UrlAddInput,
$UserAccount: UserAccountAddInput,
$WindowsRegistryKey: WindowsRegistryKeyAddInput,
$WindowsRegistryValueType: WindowsRegistryValueTypeAddInput,
$X509V3ExtensionsType: X509V3ExtensionsTypeAddInput,
$XOpenCTICryptographicKey: XOpenCTICryptographicKeyAddInput,
$XOpenCTICryptocurrencyWallet: XOpenCTICryptocurrencyWalletAddInput,
$XOpenCTIHostname: XOpenCTIHostnameAddInput
$XOpenCTIText: XOpenCTITextAddInput,
$XOpenCTIUserAgent: XOpenCTIUserAgentAddInput
) {
stixCyberObservableAdd(
type: $type,
stix_id: $stix_id,
x_opencti_score: $x_opencti_score,
x_opencti_description: $x_opencti_description,
createIndicator: $createIndicator,
createdBy: $createdBy,
objectMarking: $objectMarking,
objectLabel: $objectLabel,
externalReferences: $externalReferences,
AutonomousSystem: $AutonomousSystem,
Directory: $Directory,
DomainName: $DomainName,
EmailAddr: $EmailAddr,
EmailMessage: $EmailMessage,
EmailMimePartType: $EmailMimePartType,
Artifact: $Artifact,
StixFile: $StixFile,
X509Certificate: $X509Certificate,
IPv4Addr: $IPv4Addr,
IPv6Addr: $IPv6Addr,
MacAddr: $MacAddr,
Mutex: $Mutex,
NetworkTraffic: $NetworkTraffic,
Process: $Process,
Software: $Software,
Url: $Url,
UserAccount: $UserAccount,
WindowsRegistryKey: $WindowsRegistryKey,
WindowsRegistryValueType: $WindowsRegistryValueType,
X509V3ExtensionsType: $X509V3ExtensionsType,
XOpenCTICryptographicKey: $XOpenCTICryptographicKey,
XOpenCTICryptocurrencyWallet: $XOpenCTICryptocurrencyWallet,
XOpenCTIHostname: $XOpenCTIHostname,
XOpenCTIText: $XOpenCTIText,
XOpenCTIUserAgent: $XOpenCTIUserAgent
) {
id
standard_id
entity_type
parent_types
indicators {
edges {
node {
id
pattern
pattern_type
}
}
}
}
}
| |
import numpy as np
import matplotlib.pyplot as plt
from import_explore import import_csv
from import_explore import normalise
# for performing regression
from regression_models import construct_rbf_feature_mapping
# for plotting results
from regression_plot import plot_train_test_errors
# two new functions for cross validation
from regression_train_test import create_cv_folds
from regression_train_test import cv_evaluation_linear_model
from regression_train_test import train_and_test_split
from regression_train_test import train_and_test_partition
from regression_train_test import train_and_test
def parameter_search_rbf(inputs, targets, test_fraction, folds):
"""
"""
n = inputs.shape[0]
# for the centres of the basis functions sample 10% of the data
sample_fraction = 0.05
p = (1-sample_fraction, sample_fraction)
centres = inputs[np.random.choice([False, True], size=n, p=p), :]
print("\ncentres.shape = %r" % (centres.shape,))
scales = np.logspace(0, 4, 20) # of the basis functions
reg_params = np.logspace(-16, -1, 20) # choices of regularisation strength
# create empty 2d arrays to store the train and test errors
train_mean_errors = np.empty((scales.size, reg_params.size))
test_mean_errors = np.empty((scales.size, reg_params.size))
# iterate over the scales
for i, scale in enumerate(scales):
# i is the index, scale is the corresponding scale
# we must recreate the feature mapping each time for different scales
feature_mapping = construct_rbf_feature_mapping(centres, scale)
designmtx = feature_mapping(inputs)
# partition the design matrix and targets into train and test
# iterating over the regularisation parameters
for j, reg_param in enumerate(reg_params):
# j is the index, reg_param is the corresponding regularisation
# parameter
# train and test the data
train_error, test_error = cv_evaluation_linear_model(
designmtx, targets, folds, reg_param=reg_param)
# store the train and test errors in our 2d arrays
train_mean_errors[i, j] = np.mean(train_error)
test_mean_errors[i, j] = np.mean(test_error)
# we have a 2d array of train and test errors, we want to know the (i,j)
# index of the best value
best_i = np.argmin(np.argmin(test_mean_errors, axis=1))
best_j = np.argmin(test_mean_errors[i, :])
min_place = np.argmin(test_mean_errors)
best_i_correct = (int)(min_place/test_mean_errors.shape[1])
best_j_correct = min_place%test_mean_errors.shape[1]
print("\nBest joint choice of parameters:")
print(
"\tscale %.2g and lambda = %.2g" % (scales[best_i_correct], reg_params[best_j_correct]))
# now we can plot the error for different scales using the best
# regularisation choice
fig, ax = plot_train_test_errors(
"scale", scales, train_mean_errors[:, best_j_correct], test_mean_errors[:, best_j_correct])
ax.set_xscale('log')
ax.set_title('Train vs Test Error Across Scales')
fig.savefig("../plots/rbf_searching_scales.pdf", fmt="pdf")
# ...and the error for different regularisation choices given the best
# scale choice
fig, ax = plot_train_test_errors(
"$\lambda$", reg_params, train_mean_errors[best_i_correct, :], test_mean_errors[best_i_correct, :])
ax.set_xscale('log')
ax.set_title('Train vs Test Error Across Reg Params')
fig.savefig("../plots/rbf_searching_reg_params.pdf", fmt="pdf")
'''
# using the best parameters found above,
# we now vary the number of centres and evaluate the performance
reg_param = reg_params[best_j]
scale = scales[best_i]
n_centres_seq = np.arange(1, 20)
train_errors = []
test_errors = []
for n_centres in n_centres_seq:
# constructing the feature mapping anew for each number of centres
centres = np.linspace(0, 1, n_centres)
feature_mapping = construct_rbf_feature_mapping(centres, scale)
design_matrix = feature_mapping(inputs)
# evaluating the test and train error for the given regularisation parameter and scale
train_error, test_error = cv_evaluation_linear_model(
design_matrix, targets, folds, reg_param=reg_param)
# collecting the errors
train_errors.append(train_error)
test_errors.append(test_error)
# plotting the results
fig, ax = plot_train_test_errors(
"no. centres", n_centres_seq, train_errors, test_errors)
ax.set_title('Train vs Test Error Across Centre Number')
fig.savefig("../plots/rbf_searching_number_centres.pdf", fmt="pdf")
'''
return scales[best_i_correct], reg_params[best_j_correct]
def evaluate_reg_param(inputs, targets, folds, centres, scale, reg_params=None):
"""
Evaluate then plot the performance of different regularisation parameters
"""
# creating the feature mapping and then the design matrix
feature_mapping = construct_rbf_feature_mapping(centres, scale)
designmtx = feature_mapping(inputs)
# choose a range of regularisation parameters
if reg_params is None:
reg_params = np.logspace(-15, 2, 20) # choices of regularisation strength
num_values = reg_params.size
num_folds = len(folds)
# create some arrays to store results
train_mean_errors = np.zeros(num_values)
test_mean_errors = np.zeros(num_values)
train_stdev_errors = np.zeros(num_values)
test_stdev_errors = np.zeros(num_values)
#
for r, reg_param in enumerate(reg_params):
# r is the index of reg_param, reg_param is the regularisation parameter
# cross validate with this regularisation parameter
train_errors, test_errors = cv_evaluation_linear_model(
designmtx, targets, folds, reg_param=reg_param)
# we're interested in the average (mean) training and testing errors
train_mean_error = np.mean(train_errors)
test_mean_error = np.mean(test_errors)
train_stdev_error = np.std(train_errors)
test_stdev_error = np.std(test_errors)
# store the results
train_mean_errors[r] = train_mean_error
test_mean_errors[r] = test_mean_error
train_stdev_errors[r] = train_stdev_error
test_stdev_errors[r] = test_stdev_error
# Now plot the results
fig, ax = plot_train_test_errors(
"$\lambda$", reg_params, train_mean_errors, test_mean_errors)
# Here we plot the error ranges too: mean plus/minus 1 standard error.
# 1 standard error is the standard deviation divided by sqrt(n) where
# n is the number of samples.
# (There are other choices for error bars.)
# train error bars
lower = train_mean_errors - train_stdev_errors/np.sqrt(num_folds)
upper = train_mean_errors + train_stdev_errors/np.sqrt(num_folds)
ax.fill_between(reg_params, lower, upper, alpha=0.2, color='b')
# test error bars
lower = test_mean_errors - test_stdev_errors/np.sqrt(num_folds)
upper = test_mean_errors + test_stdev_errors/np.sqrt(num_folds)
ax.fill_between(reg_params, lower, upper, alpha=0.2, color='r')
ax.set_xscale('log')
# ax.set_xlim([0, 0.02])
ax.set_title('Train vs Test Error Across Reg Params With Cross-Validation')
fig.savefig("../plots/rbf_searching_reg_params_cross_validation.pdf", fmt="pdf")
def evaluate_scale(inputs, targets, folds, centres, reg_param, scales=None):
"""
evaluate then plot the performance of different basis function scales
"""
# choose a range of scales
if scales is None:
scales = np.logspace(0, 6, 20) # of the basis functions
#
num_values = scales.size
num_folds = len(folds)
# create some arrays to store results
train_mean_errors = np.zeros(num_values)
test_mean_errors = np.zeros(num_values)
train_stdev_errors = np.zeros(num_values)
test_stdev_errors = np.zeros(num_values)
#
for s, scale in enumerate(scales):
feature_mapping = construct_rbf_feature_mapping(centres,scale)
designmtx = feature_mapping(inputs)
# r is the index of reg_param, reg_param is the regularisation parameter
# cross validate with this regularisation parameter
train_errors, test_errors = cv_evaluation_linear_model(
designmtx, targets, folds, reg_param=reg_param)
# we're interested in the average (mean) training and testing errors
train_mean_error = np.mean(train_errors)
test_mean_error = np.mean(test_errors)
train_stdev_error = np.std(train_errors)
test_stdev_error = np.std(test_errors)
# store the results
train_mean_errors[s] = train_mean_error
test_mean_errors[s] = test_mean_error
train_stdev_errors[s] = train_stdev_error
test_stdev_errors[s] = test_stdev_error
# Now plot the results
fig, ax = plot_train_test_errors(
"scale", scales, train_mean_errors, test_mean_errors)
# Here we plot the error ranges too: mean plus/minus 1 standard error.
# 1 standard error is the standard deviation divided by sqrt(n) where
# n is the number of samples.
# (There are other choices for error bars.)
# train error bars
lower = train_mean_errors - train_stdev_errors/np.sqrt(num_folds)
upper = train_mean_errors + train_stdev_errors/np.sqrt(num_folds)
ax.fill_between(scales, lower, upper, alpha=0.2, color='b')
# test error bars
lower = test_mean_errors - test_stdev_errors/np.sqrt(num_folds)
upper = test_mean_errors + test_stdev_errors/np.sqrt(num_folds)
ax.fill_between(scales, lower, upper, alpha=0.2, color='r')
ax.set_xscale('log')
# ax.set_xlim([0, 100])
ax.set_title('Train vs Test Error Across Scales With Cross-Validation')
fig.savefig("../plots/rbf_searching_scales_cross_validation.pdf", fmt="pdf")
def evaluate_num_centres(
inputs, targets, folds, scale, reg_param, num_centres_sequence=None):
"""
Evaluate then plot the performance of different numbers of basis
function centres.
"""
# choose a range of numbers of centres
if num_centres_sequence is None:
num_centres_sequence = np.arange(1, 20)
num_values = num_centres_sequence.size
num_folds = len(folds)
#
# create some arrays to store results
train_mean_errors = np.zeros(num_values)
test_mean_errors = np.zeros(num_values)
train_stdev_errors = np.zeros(num_values)
test_stdev_errors = np.zeros(num_values)
#
# run the experiments
for c, num_centres in enumerate(num_centres_sequence):
centres = np.linspace(0, 1, num_centres)
feature_mapping = construct_rbf_feature_mapping(centres, scale)
designmtx = feature_mapping(inputs)
# r is the index of reg_param, reg_param is the regularisation parameter
# cross validate with this regularisation parameter
train_errors, test_errors = cv_evaluation_linear_model(
designmtx, targets, folds, reg_param=reg_param)
# we're interested in the average (mean) training and testing errors
train_mean_error = np.mean(train_errors)
test_mean_error = np.mean(test_errors)
train_stdev_error = np.std(train_errors)
test_stdev_error = np.std(test_errors)
# store the results
train_mean_errors[c] = train_mean_error
test_mean_errors[c] = test_mean_error
train_stdev_errors[c] = train_stdev_error
test_stdev_errors[c] = test_stdev_error
#
# Now plot the results
fig, ax = plot_train_test_errors(
"no. centres", num_centres_sequence, train_mean_errors, test_mean_errors)
# Here we plot the error ranges too: mean plus/minus 1 standard error.
# 1 standard error is the standard deviation divided by sqrt(n) where
# n is the number of samples.
# (There are other choices for error bars.)
# train error bars
lower = train_mean_errors - train_stdev_errors/np.sqrt(num_folds)
upper = train_mean_errors + train_stdev_errors/np.sqrt(num_folds)
ax.fill_between(num_centres_sequence, lower, upper, alpha=0.2, color='b')
# test error bars
lower = test_mean_errors - test_stdev_errors/np.sqrt(num_folds)
upper = test_mean_errors + test_stdev_errors/np.sqrt(num_folds)
ax.fill_between(num_centres_sequence, lower, upper, alpha=0.2, color='r')
ax.set_title('Train vs Test Error Across Centre Number With Cross-Validation')
fig.savefig("../plots/rbf_searching_number_centres_cross_validation.pdf", fmt="pdf")
def main(name, delimiter, columns, has_header=True, test_fraction=0.25):
"""
This function contains example code that demonstrates how to use the
functions defined in poly_fit_base for fitting polynomial curves to data.
"""
# importing using csv reader and storing as numpy | |
from direct.interval.IntervalGlobal import *
from direct.gui.DirectGui import DirectLabel
from direct.gui.DirectGui import DirectButton
from direct.gui.OnscreenText import OnscreenText
from direct.interval.IntervalGlobal import *
import random, string
from direct.gui import DirectGuiGlobals
from direct.directnotify import DirectNotifyGlobal
import Localizer, re
from pinballbase.DirectMaxScaleLabel import DirectMaxScaleLabel
from pinballbase.DirectMaxScaleButton import DirectMaxScaleButton
from pinballbase.PinballElements import PlayTimer
class PinballDisplay:
__module__ = __name__
notify = DirectNotifyGlobal.directNotify.newCategory('PinballDisplay.PinballDisplay')
def __init__(self, world, fontName, idleMessages, attractMessages):
self.hudElements = {}
self.tutorialElements = {}
self.world = world
self.extraBallLimit = 5
self.numberOfLines = 3
self.controlsOver = False
self.grayScreen = None
self.controlLabels = None
self.alertMessageSequence = None
if self.world.fromPalace == True:
self.playTimer = PlayTimer()
self.scoreLineLabel = None
self.scoreLine = None
self.lines = []
self.hudElements['mainBackground'] = None
self.instructions = None
self.tutorialElements['tutorialLabel'] = None
self.hudElements['ballHolder'] = None
self.exitButton = None
self.resumeButton = None
self.tutorialElements['continueOn'] = None
self.tutorialElements['skipIt'] = None
self.tutorialElements['instructionsBackground'] = None
self.ballIcons = []
self.extraBallsReleased = 0
self.extraBallScoreBase = 250000
self.extraBallMultiplier = 4
self.extraBallSound = base.loadSfx('pinballbase/extraBall')
self.levelupSound = base.loadSfx('pinballbase/levelup')
self.myScore = 0
self.idleState = 0
self.shouldIdle = False
self.alertMode = True
self.displayLocked = False
self.numBallsLeft = 3
taskMgr.doMethodLater(30, self.idle, 'idletimer')
if fontName != None:
font = loader.loadFont(fontName)
DirectGuiGlobals.setDefaultFont(font)
self.numberFont = loader.loadFont('DomCasualBT.ttf')
self.hudState = 3
self.exitButton = DirectMaxScaleButton(relief=None, command=self.world.exit, image=('pinballbase/exitButton.png',
'pinballbase/exitButtonOver.png',
'pinballbase/exitButtonOver.png'), scale=(0.2,
1.0,
0.2), image_scale=(1.7,
1.0,
0.85), pos=(0.4, 0, -0.65), text_pos=(-0.03,
-0.08), text=Localizer.pDisplayExitButton, text_fg=(1,
1,
1,
1), text_shadow=(1,
1,
1,
1), text_shadowOffset=(0.02,
0.02), maxHeight=0.37, maxWidth=2.2)
self.exitButton.reparentTo(hidden)
self.exitButton.setTransparency(1)
self.resumeButton = DirectMaxScaleButton(relief=None, command=self.world.pauseGame, image=('pinballbase/resumeButton.png',
'pinballbase/resumeButtonOver.png',
'pinballbase/resumeButtonOver.png'), scale=(0.2,
1.0,
0.2), image_scale=(1.7,
1.0,
0.85), pos=(-0.4, 0, -0.65), text_pos=(-0.02,
-0.06), text=Localizer.pDisplayResumeButton, text_fg=(1,
1,
1,
1), text_shadow=(1,
1,
1,
1), text_shadowOffset=(0.02,
0.02), maxHeight=0.37, maxWidth=2.2)
self.resumeButton.reparentTo(hidden)
self.resumeButton.setTransparency(1)
self.alertMessageLabel = DirectMaxScaleLabel(relief=None, pos=(0, 0, 0), text=' ', text_fg=(1,
1,
1,
1), text_shadow=(0,
0,
0,
1), text_shadowOffset=(0.02,
0.02), maxWidth=1.8, maxHeight=0.3)
self.alertMessageLabel.reparentTo(hidden)
self.alertMessageLabel.setTransparency(1)
self.idleMessages = idleMessages
self.attractMessages = attractMessages
return
def increaseScore(self):
self.world.updateScore(49900, 'increase')
def finishInit(self):
self.instructionFont = loader.loadFont(Localizer.instructionFont)
self.hudElements['leftInstructions1'].component('text0').setFont(self.instructionFont)
self.hudElements['leftInstructions2'].component('text0').setFont(self.instructionFont)
self.hudElements['rightInstructions1'].component('text0').setFont(self.instructionFont)
self.hudElements['rightInstructions2'].component('text0').setFont(self.instructionFont)
self.tutorialElements['continueOn'].component('text0').setFont(self.instructionFont)
self.tutorialElements['skipIt'].component('text0').setFont(self.instructionFont)
self.scoreFont = loader.loadFont(Localizer.scoreFont)
self.scoreLineLabel.component('text0').setFont(self.scoreFont)
self.scoreLine.component('text0').setFont(self.scoreFont)
if Localizer.myLanguage != 'japanese':
for l in self.lines:
l['squish'] = True
self.threeLinesZValues = []
self.twoLinesZValues = []
self.statusLineInitialZ = self.lines[0].getZ()
self.statusLineZIncrement = self.lines[0].getZ() - self.lines[1].getZ()
for line in self.lines:
self.threeLinesZValues.append(line.getZ())
self.twoLinesZValues.append(self.threeLinesZValues[0] + (self.threeLinesZValues[1] - self.threeLinesZValues[0]) / 2.0)
self.twoLinesZValues.append(self.threeLinesZValues[1] + (self.threeLinesZValues[2] - self.threeLinesZValues[1]) / 2.0)
def gameRunning(self):
cm = CardMaker('animatedBillboardNode')
cm.setFrame(-1, 1, -1, 1)
self.grayScreen = NodePath(cm.generate())
self.grayScreen.reparentTo(aspect2d, 90)
self.grayScreen.setTransparency(1)
self.grayScreen.setColorScale(0, 0, 0, 0.6)
self.grayScreen.setPos(0, 0, 0)
self.grayScreen.setScale(10)
self.controlText = [
(
Localizer.pEsc, Localizer.pPauseResume), (Localizer.pDownArrow, Localizer.pLaunchBall), (Localizer.pLeftControl, Localizer.pLeftFlipper), (Localizer.pRightControl, Localizer.pRightFlipper), (Localizer.pTiltControls, Localizer.pTiltInstructions), (Localizer.pEnter, Localizer.pStartGame)]
if Localizer.myLanguage == 'japanese':
self.controlLabels = []
self.controlLabels.append(DirectLabel(relief=None, scale=(1, 1, 0.8), image='pinballbase/help_screen_background_top.png'))
self.controlLabels[0].setTransparency(1)
self.controlLabels[0].reparentTo(aspect2d, 100)
self.controlLabels.append(DirectLabel(relief=None, pos=(0, 0, -0.6), scale=(1,
1,
0.2), image='pinballbase/help_screen_background_bottom.png'))
self.controlLabels[1].setTransparency(1)
self.controlLabels[1].reparentTo(aspect2d, 100)
self.grayScreen.setColorScale(0, 0, 0, 0.75)
if Localizer.myLanguage != 'japanese':
self.controlLabels = []
cl = DirectMaxScaleLabel(relief=None, text=Localizer.pControls, pos=(0.05,
0.0,
0.67), text_fg=(1,
1,
1,
1), text_shadow=(0,
0,
0,
1), text_shadowOffset=(0.02,
0.02), text_align=TextNode.ACenter, text_mayChange=1, maxWidth=1, maxHeight=0.2)
self.controlLabels.append(cl)
i = 0
for ct in self.controlText:
i += 1
if i == len(self.controlText):
i += 1
textfg = (0.4, 1, 0.4, 1)
else:
textfg = (1, 1, 1, 1)
cl = DirectMaxScaleLabel(relief=None, text=ct[0], pos=(-0.02, 0.0, 0.57 - i * 0.18), text_fg=textfg, text_shadow=(0,
0,
0,
1), text_shadowOffset=(0.02,
0.02), text_align=TextNode.ARight, text_mayChange=1, maxWidth=1.0, maxHeight=0.17)
self.controlLabels.append(cl)
cl = DirectMaxScaleLabel(relief=None, text='-', pos=(0.06, 0.0, 0.57 - i * 0.18), text_fg=textfg, text_shadow=(0,
0,
0,
1), text_shadowOffset=(0.02,
0.02), text_align=TextNode.ACenter, text_mayChange=1, maxWidth=1.1, maxHeight=0.17)
self.controlLabels.append(cl)
cl = DirectMaxScaleLabel(relief=None, text=ct[1], pos=(0.14, 0.0, 0.57 - i * 0.18), text_fg=textfg, text_shadow=(0,
0,
0,
1), text_shadowOffset=(0.02,
0.02), text_align=TextNode.ALeft, text_mayChange=1, maxWidth=1.0, maxHeight=0.17)
self.controlLabels.append(cl)
for cl in self.controlLabels:
cl.reparentTo(aspect2d, 100)
cl.component('text0').setFont(self.instructionFont)
self.setSmallestScale(self.controlLabels)
self.controlsOver = False
messenger.send('boardRunning')
if not self.world.gameOver:
self.start()
return
def setSmallestScale(self, labels):
smallestScaleX = 100
smallestScaleY = 100
for lineIndex in range(len(labels)):
if labels[lineIndex]['text'] == '':
continue
lscale = labels[lineIndex]['text_scale']
if isinstance(lscale, tuple) or isinstance(lscale, list):
if lscale[0] < smallestScaleX:
smallestScaleX = lscale[0]
smallestScaleY = lscale[1]
elif lscale < smallestScaleX and lscale < smallestScaleX:
smallestScaleX = lscale
for lineIndex in range(len(labels)):
labels[lineIndex]['text_scale'] = (
smallestScaleX, smallestScaleY)
return smallestScaleY
def resetExtraBallNumber(self):
self.extraBallsReleased = 0
def start(self):
self.idleState = 0
if self.world.fromPalace:
self.playTimer.reset()
self.playTimer.playing()
self.world.pbTaskMgr.doMethodLater(1, self.resetExtraBallNumber, 'resetextraballnumber')
self.setHudState(1)
self.controlsOver = True
if self.grayScreen:
self.grayScreen.reparentTo(hidden)
if self.controlLabels != None:
for cl in self.controlLabels:
cl.reparentTo(hidden)
return
def wake(self):
taskMgr.doMethodLater(30, self.idle, 'idletimer')
self.extraBallSound = base.loadSfx('pinballbase/extraBall')
self.levelupSound = base.loadSfx('pinballbase/levelup')
def sleep(self):
taskMgr.remove('idletimer')
if self.grayScreen:
self.grayScreen.reparentTo(hidden)
del self.extraBallSound
del self.levelupSound
if self.controlLabels != None:
for cl in self.controlLabels:
cl.reparentTo(hidden)
cl.removeNode()
self.controlLabels = None
return
def destroy(self):
self.sleep()
if self.grayScreen:
self.grayScreen.removeNode()
taskMgr.remove('idletimer')
for (key, p) in list(self.hudElements.items()):
self.notify.debug('deleteing hud element: %s' % key)
p.destroy()
p.removeNode()
del self.hudElements[key]
for (key, p) in list(self.tutorialElements.items()):
self.notify.debug('deleteing tutorial element: %s' % key)
p.destroy()
p.removeNode()
del self.tutorialElements[key]
del self.hudElements
del self.tutorialElements
del self.world
self.exitButton.destroy()
self.exitButton.removeNode()
del self.exitButton
if self.alertMessageSequence is not None:
self.alertMessageSequence.finish()
self.alertMessageLabel.destroy()
self.resumeButton.destroy()
self.resumeButton.removeNode()
del self.resumeButton
self.scoreLineLabel.removeNode()
self.scoreLine.removeNode()
self.instructions.removeNode()
while self.lines != []:
l = self.lines.pop()
l.removeNode()
while self.ballIcons != []:
b = self.ballIcons.pop()
b.destroy()
del b
return
def displayCommonText(self, text):
for i in range(self.numberOfLines):
self.lines[i]['text'] = ''
barSubtract = string.count(text, '|') * 2
self.notify.debug('displayCommonText: amount of characters lost to |s is %d ' % barSubtract)
characterLengthOfText = len(text) - barSubtract
averageCharacterLineLength = characterLengthOfText / self.numberOfLines
if Localizer.myLanguage == 'japanese':
newLines = text.split(Localizer.japanSep)
else:
splitText = text.split()
self.notify.debug('displayCommonText: characterLengthOfText = %d and average is %d ' % (characterLengthOfText, averageCharacterLineLength))
newLines = ['']
lineCount = 0
wordIndex = -1
for i in range(len(splitText)):
wordIndex += 1
if wordIndex >= len(splitText):
break
enforcedBreakAtEnd = False
while splitText[wordIndex] == '|':
if newLines[lineCount] != '':
lineCount += 1
wordIndex += 1
if wordIndex >= len(splitText):
enforcedBreakAtEnd = True
self.notify.warning(' displayCommonText: Text with a | as last character entered')
if lineCount >= self.numberOfLines:
lineCount -= 1
break
else:
newLines.append('')
if enforcedBreakAtEnd:
break
if newLines[lineCount] == '':
newLines[lineCount] += splitText[wordIndex]
elif Localizer.myLanguage == 'japanese':
newLines[lineCount] += splitText[wordIndex]
else:
newLines[lineCount] += ' ' + splitText[wordIndex]
if len(newLines[lineCount]) >= averageCharacterLineLength:
if lineCount + 1 < self.numberOfLines:
lineCount += 1
newLines.append('')
while newLines[(len(newLines) - 1)] == '':
newLines.pop()
self.notify.debug('displayCommonText: newlines')
self.notify.debug(newLines)
success = True
while len(newLines) > self.numberOfLines and success:
success = False
for i in range(len(newLines)):
if newLines[i] == '':
newLines.pop(i)
success = True
break
while len(newLines) > self.numberOfLines:
self.notify.warning('----------------------------------------------------------------------------')
self.notify.warning('displayCommonText: Number of lines to displayed greater then lines available')
print(newLines)
self.notify.warning('----------------------------------------------------------------------------')
newLines.pop()
if len(newLines) == 2:
for i in range(2):
self.lines[i].setZ(self.twoLinesZValues[i])
for i in range(3):
self.lines[i].setZ(self.threeLinesZValues[i])
if len(newLines) == 1:
newLines.insert(0, '')
for lineIndex in range(len(newLines)):
self.lines[lineIndex]['text'] = newLines[lineIndex]
if Localizer.myLanguage == 'japanese':
maxHeight = self.setSmallestScale(self.lines)
new0Z = self.lines[0].getZ() - self.statusLineZIncrement * (1 - maxHeight / 0.08)
self.lines[0].setZ(new0Z)
new1Z = self.lines[2].getZ() + self.statusLineZIncrement * (1 - maxHeight / 0.08)
self.lines[2].setZ(new1Z)
def getCurrentScreenWidth(self, dLabel):
if dLabel.hascomponent('text0'):
return dLabel.component('text0').textNode.getWidth() * dLabel.component('text0')['scale'][0]
else:
return 0
def setBallNumber(self, num):
if num > self.extraBallLimit:
return False
self.numBallsLeft = num
if self.hudState == 0 or self.hudState == 2:
return False
self.hudElements['ballHolder'].reparentTo(aspect2d, 50)
for i in range(len(self.ballIcons)):
if num > i:
self.ballIcons[i].reparentTo(aspect2d, 60)
else:
self.ballIcons[i].reparentTo(hidden)
if self.world.gameOver:
self.show(Localizer.pDisplayStartMessage)
else:
self.show('%d %s' % (num, Localizer.pBallsLeft))
return True
def idle(self, taskInstance):
if not self.shouldIdle:
self.shouldIdle = True
taskMgr.doMethodLater(6, self.idle, 'idletimer')
return
if self.world.gameOver and self.idleState < 3:
self.idleState = 3
if self.idleState == 0:
self.show(self.idleMessages[self.idleState])
self.idleState = 1
elif self.idleState == 1:
self.show(self.idleMessages[self.idleState])
self.idleState = 2
elif self.idleState == 2:
self.show(self.idleMessages[self.idleState])
self.idleState = 0
elif self.idleState >= 3:
self.show(self.attractMessages[(self.idleState - 3)])
self.idleState = self.idleState + 1
if self.idleState - 3 == len(self.attractMessages):
self.idleState = 3
taskMgr.doMethodLater(4, self.idle, 'idletimer')
def pause(self):
if self.world.fromPalace:
self.playTimer.stop()
if self.grayScreen and not base.direct:
self.grayScreen.reparentTo(aspect2d, 90)
self.show(Localizer.pDisplayPauseMessage)
self.hudElements['leftInstructions1']['text'] = Localizer.pDisplayResumeInstructions
if self.controlLabels != None:
if len(self.controlLabels) == 2:
self.controlLabels[0].reparentTo(aspect2d, 100)
else:
for i in range(len(self.controlLabels) - 3):
self.controlLabels[i].reparentTo(aspect2d, 90)
if self.controlsOver:
self.exitButton.reparentTo(aspect2d, 100)
self.resumeButton.reparentTo(aspect2d, 100)
return
def unPause(self):
if self.world.fromPalace:
self.playTimer.playing()
if self.controlsOver:
if self.grayScreen:
self.grayScreen.reparentTo(hidden)
if self.controlLabels != None:
for cl in self.controlLabels:
cl.reparentTo(hidden)
self.hudElements['leftInstructions1']['text'] = Localizer.pDisplayExitInstructions
self.exitButton.reparentTo(hidden)
self.resumeButton.reparentTo(hidden)
return
def showContinue(self, showIt):
if showIt:
self.tutorialElements['continueOn'].reparentTo(aspect2d, 50)
else:
self.tutorialElements['continueOn'].reparentTo(hidden)
def unlockDisplay(self):
self.displayLocked = False
def show(self, text, alert=False, priority=False):
if self.hudState == 0 or self.hudState == 2:
return
if self.displayLocked:
return
if priority:
self.displayLocked = True
if isinstance(text, list):
text = text[0]
if not isinstance(text, str):
text = str(text)
if Localizer.myLanguage != 'japanese':
text = self.myUpper(text)
if | |
<reponame>TangleSpace/pytorch-widedeep<filename>pytorch_widedeep/datasets/_base.py
# dataframes are saved as parquet, pyarrow, brotli
# pd.to_parquet(path=None, engine="auto", compression="brotli", index=False)
# see related post: https://python.plainenglish.io/storing-pandas-98-faster-disk-reads-and-72-less-space-208e2e2be8bb
from importlib import resources
import pandas as pd
def load_bio_kdd04(as_frame: bool = False):
"""Load and return the higly imbalanced binary classification Protein Homology
Dataset from [KDD cup 2004](https://www.kdd.org/kdd-cup/view/kdd-cup-2004/Data).
This datasets include only bio_train.dat part of the dataset
* The first element of each line is a BLOCK ID that denotes to which native sequence
this example belongs. There is a unique BLOCK ID for each native sequence.
BLOCK IDs are integers running from 1 to 303 (one for each native sequence,
i.e. for each query). BLOCK IDs were assigned before the blocks were split
into the train and test sets, so they do not run consecutively in either file.
* The second element of each line is an EXAMPLE ID that uniquely describes
the example. You will need this EXAMPLE ID and the BLOCK ID when you submit results.
* The third element is the class of the example. Proteins that are homologous to
the native sequence are denoted by 1, non-homologous proteins (i.e. decoys) by 0.
Test examples have a "?" in this position.
* All following elements are feature values. There are 74 feature values in each line.
The features describe the match (e.g. the score of a sequence alignment) between
the native protein sequence and the sequence that is tested for homology.
"""
# header_list = ["EXAMPLE_ID", "BLOCK_ID", "target"] + [str(i) for i in range(4, 78)]
with resources.path(
"pytorch_widedeep.datasets.data", "bio_train.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_adult(as_frame: bool = False):
"""Load and return the higly imbalanced binary classification [adult income datatest](http://www.cs.toronto.edu/~delve/data/adult/desc.html).
you may find detailed description [here](http://www.cs.toronto.edu/~delve/data/adult/adultDetail.html)
"""
with resources.path(
"pytorch_widedeep.datasets.data", "adult.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_ecoli(as_frame: bool = False):
"""Load and return the higly imbalanced multiclass classification e.coli dataset
Dataset from [UCI Machine learning Repository](https://archive.ics.uci.edu/ml/datasets/ecoli).
1. Title: Protein Localization Sites
2. Creator and Maintainer:
<NAME>
Institue of Molecular and Cellular Biology
Osaka, University
1-3 Yamada-oka, Suita 565 Japan
<EMAIL>
http://www.imcb.osaka-u.ac.jp/nakai/psort.html
Donor: <NAME> (<EMAIL>)
Date: September, 1996
See also: yeast database
3. Past Usage.
Reference: "A Probablistic Classification System for Predicting the Cellular
Localization Sites of Proteins", Paul Horton & Kenta Nakai,
Intelligent Systems in Molecular Biology, 109-115.
St. Louis, USA 1996.
Results: 81% for E.coli with an ad hoc structured
probability model. Also similar accuracy for Binary Decision Tree and
Bayesian Classifier methods applied by the same authors in
unpublished results.
Predicted Attribute: Localization site of protein. ( non-numeric ).
4. The references below describe a predecessor to this dataset and its
development. They also give results (not cross-validated) for classification
by a rule-based expert system with that version of the dataset.
Reference: "Expert Sytem for Predicting Protein Localization Sites in
Gram-Negative Bacteria", <NAME> & <NAME>,
PROTEINS: Structure, Function, and Genetics 11:95-110, 1991.
Reference: "A Knowledge Base for Predicting Protein Localization Sites in
Eukaryotic Cells", <NAME> & <NAME>,
Genomics 14:897-911, 1992.
5. Number of Instances: 336 for the E.coli dataset and
6. Number of Attributes.
for E.coli dataset: 8 ( 7 predictive, 1 name )
7. Attribute Information.
1. Sequence Name: Accession number for the SWISS-PROT database
2. mcg: McGeoch's method for signal sequence recognition.
3. gvh: von Heijne's method for signal sequence recognition.
4. lip: von Heijne's Signal Peptidase II consensus sequence score.
Binary attribute.
5. chg: Presence of charge on N-terminus of predicted lipoproteins.
Binary attribute.
6. aac: score of discriminant analysis of the amino acid content of
outer membrane and periplasmic proteins.
7. alm1: score of the ALOM membrane spanning region prediction program.
8. alm2: score of ALOM program after excluding putative cleavable signal
regions from the sequence.
8. Missing Attribute Values: None.
9. Class Distribution. The class is the localization site. Please see Nakai & Kanehisa referenced above for more details.
cp (cytoplasm) 143
im (inner membrane without signal sequence) 77
pp (perisplasm) 52
imU (inner membrane, uncleavable signal sequence) 35
om (outer membrane) 20
omL (outer membrane lipoprotein) 5
imL (inner membrane lipoprotein) 2
imS (inner membrane, cleavable signal sequence) 2
"""
with resources.path(
"pytorch_widedeep.datasets.data", "ecoli.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_california_housing(as_frame: bool = False):
"""Load and return the higly imbalanced regression California housing dataset.
Characteristics:
Number of Instances: 20640
Number of Attributes: 8 numeric, predictive attributes and the target
Attribute Information:
- MedInc median income in block group
- HouseAge median house age in block group
- AveRooms average number of rooms per household
- AveBedrms average number of bedrooms per household
- Population block group population
- AveOccup average number of household members
- Latitude block group latitude
- Longitude block group longitude
This dataset was obtained from the StatLib repository.
https://www.dcc.fc.up.pt/~ltorgo/Regression/cal_housing.html
The target variable is the median house value for California districts,
expressed in hundreds of thousands of dollars ($100,000).
This dataset was derived from the 1990 U.S. census, using one row per census
block group. A block group is the smallest geographical unit for which the U.S.
Census Bureau publishes sample data (a block group typically has a population
of 600 to 3,000 people).
An household is a group of people residing within a home. Since the average
number of rooms and bedrooms in this dataset are provided per household, these
columns may take surpinsingly large values for block groups with few households
and many empty houses, such as vacation resorts.
References
----------
<NAME> and <NAME>, Sparse Spatial Autoregressions,
Statistics and Probability Letters, 33 (1997) 291-297.
"""
with resources.path(
"pytorch_widedeep.datasets.data", "california_housing.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_birds(as_frame: bool = False):
"""Load and return the multi-label classification bird dataset.
References
----------
http://mulan.sourceforge.net/datasets-mlc.html
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME> <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
"The 9th annual MLSP competition: New methods for acoustic classification of multiple
simultaneous bird species in a noisy environment", in proc. 2013 IEEE International Workshop
on Machine Learning for Signal Processing (MLSP)
"""
with resources.path(
"pytorch_widedeep.datasets.data", "birds_train.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_rf1(as_frame: bool = False):
"""Load and return the multi-target regression River Flow(RF1) dataset.
Characterisctics:
The river flow data set (RF1) concerns a prediction task in which flows in a river network are
predicted for 48 hours in the future at 8 different locations in the Mississippi River network
in the United States [18]. RF1 is one of the multi-target regression problems listed in the
literature survey on multi-target regression problems by Borchani et al. [2], and therefore
serves as a good test case for the active learning algorithm. Each row includes the most recent
observation for each of the 8 sites as well as time-lagged observations from 6, 12, 18, 24, 36,
48 and 60 hours in the past. Therefore, the data set consists in total of 64 attribute variables
and 8 target variables. The data set contains over 1 year of hourly observations (over 9000
data points) collected from September 2011 to September 2012 by the US National Weather
Service. From these 9000 data points, 1000 points have been randomly sampled for training
and 2000 for evaluation.
"""
with resources.path(
"pytorch_widedeep.datasets.data", "rf1_train.parquet.brotli"
) as fpath:
df = pd.read_parquet(fpath)
if as_frame:
return df
else:
return df.to_numpy()
def load_womens_ecommerce(as_frame: bool = False):
"""
Context
This is a Women’s Clothing E-Commerce dataset revolving around the reviews written by customers.
Its nine supportive features offer a great environment to parse out the text through its multiple
dimensions. Because this is real commercial data, it has been anonymized, and references to the company
in the review text | |
"""
Time module.
This is a work in progress port that I started from the time module in **Transcrypt**.
No:
- Platform specific functions
- sleep. In js currently not possible in browsers
except via busy loops, we don't do that.
- struct_time CLASS. we work only via the tuple interface of it.
- handling of weird stuff.
e.g.: In Europe/Simferopool (Ukraine) the UTC offset before 1924 was +2.67
Spec for all below (must have open to read this module):
> https://docs.python.org/3.5/library/time.html
Jul 2016, <NAME>, Axiros GmbH
"""
try:
__language = window.navigator.language
except:
__language = 'en-US'
def __debugGetLanguage ():
return __language
def __adapt__ (request): # Needed when running on top of Node.js rather than in browser
global __language
__language = request.headers ['accept-language'] .split (',')[0]
# js date object. might be modified during calculations:
__date = new Date(0)
__now = new Date()
# build the locale's weekday names
__weekdays = []
__weekdays_long = []
__d = new Date(1467662339080) # a monday
for i in range(7):
for l, s in [(__weekdays, 'short'), (__weekdays_long, 'long')]:
l.append(__d.toLocaleString(__language,
{'weekday': s}).toLowerCase())
__d.setDate(__d.getDate() + 1)
# build the locale's months names
__months = []
__months_long = []
__d = new Date(946681200000.0) # 1.1.2000
for i in range(12):
for l, s in [(__months, 'short'), (__months_long, 'long')]:
l.append(__d.toLocaleString(__language,
{'month': s}).toLowerCase())
__d.setMonth(__d.getMonth() + 1)
# lookup for positions directives in struct_time tuples:
# its a 9-sequence
# time.struct_time(tm_year=2016, tm_mon=7, tm_mday=19, tm_hour=2,
# tm_min=24, tm_sec=2, tm_wday=1, tm_yday=201,
# tm_isdst=1)
__lu = {'Y': 0, 'm': 1, 'd': 2, 'H': 3, 'M': 4, 'S': 5}
def _lsplit(s, sep, maxsplit):
""" not yet in TS """
if maxsplit == 0:
return [s]
split = s.split(sep)
if not maxsplit:
return split
ret = split.slice(0, maxsplit, 1)
if len(ret) == len(split):
return ret
ret.append(sep.join(split[maxsplit:]))
return ret
def _local_time_tuple(jd):
""" jd: javascript Date object, from unixtimestamp """
res = ( jd.getFullYear()
,jd.getMonth() + 1 # zero based
,jd.getDate()
,jd.getHours()
,jd.getMinutes()
,jd.getSeconds()
,jd.getDay() - 1 if jd.getDay() > 0 else 6
,_day_of_year(jd, True)
,_daylight_in_effect(jd)
,jd.getMilliseconds() # not in use by the pub API
)
return res
def _utc_time_tuple(jd):
""" jd: javascript Date object, from unixtimestamp """
res = ( jd.getUTCFullYear()
,jd.getUTCMonth() + 1 # zero based
,jd.getUTCDate()
,jd.getUTCHours()
,jd.getUTCMinutes()
,jd.getUTCSeconds()
,jd.getUTCDay() - 1
,_day_of_year(jd, False)
,0 # is dst for utc: 0
,jd.getUTCMilliseconds()
)
return res
def _day_of_year(jd, local):
# check if jd hours are ahead of UTC less than the offset to it:
day_offs = 0
if jd.getHours() + jd.getTimezoneOffset() * 60 / 3600 < 0:
day_offs = -1
was = jd.getTime()
cur = jd.setHours(23)
jd.setUTCDate(1)
jd.setUTCMonth(0)
jd.setUTCHours(0)
jd.setUTCMinutes(0)
jd.setUTCSeconds(0)
res = Math.round((cur - jd) / 86400000 )
#res = round(((jd.setHours(23) - new Date(jd.getYear(), 0, 1, 0, 0, 0)
# ) / 1000 / 60 / 60 / 24))
if not local:
res += day_offs
if res == 0:
res = 365
jd.setTime(jd.getTime() - 86400)
last_year = jd.getUTCFullYear()
if _is_leap(last_year):
res = 366
jd.setTime(was)
return res
def _is_leap(year):
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def __jan_jun_tz(t, func):
""" information about local jan and jun month of a t's year
default is to deliver timezone offset, but a function can be handed to us,
which we'll run on those two months
"""
# required to detect dst (daylight saving time) in effect:
was = t.getTime() # avoid new date objs
t.setDate(1)
res = []
for m in 0, 6:
t.setMonth(m)
if not func:
res.append(t.getTimezoneOffset())
else:
res.append(func(t))
t.setTime(was)
return res
def _daylight(t):
"""
http://stackoverflow.com/questions/11887934/
check-if-daylight-saving-time-is-in-effect-and-if-it-is-for-how-many-hours
return 0 or 1 like python
CAUTION: https://docs.python.org/2/library/time.html#time.daylight:
"Nonzero if a DST timezone is DEFINED." (but not necessarily in effect!!)
-> we just check if there is a delta of tz offsets in june an jan of the
year of t:
"""
jj = __jan_jun_tz(t)
if jj[0] != jj[1]:
# daylight saving is DEFINED, since there's a difference in tz offsets
# in jan and jun, in the year of t:
return 1
return 0
def _daylight_in_effect(t):
jj = __jan_jun_tz(t)
if min(jj[0], jj[1]) == t.getTimezoneOffset():
return 1
return 0
def _timezone(t):
jj = __jan_jun_tz(t)
# in southern hemisphere the daylight saving is in winter months!
return max(jj[0], jj[1])
def __tzn(t):
# depending on browser ? new Date() -> Wed Jul... (CEST)
try:
return str(t).split('(')[1].split(')')[0]
except:
# better no crash:
return 'n.a.'
def _tzname(t):
'''the first is the name of the local non-DST timezone,
the second is the name of the local DST timezone.'''
cn = __tzn(t)
ret = [cn, cn]
jj = __jan_jun_tz(t, __tzn)
ind = 0
if not _daylight_in_effect(t):
ind = 1
for i in jj:
if i != cn:
ret[ind] = i
return ret
# ------------------------------------------------------------------ Public API
# we calc those only once. I mean - we run in the browser in the end.
altzone = __now.getTimezoneOffset()
if not _daylight_in_effect(__now):
# then we must use the other offset we have in the current year:
_jj = __jan_jun_tz(__now)
altzone = _jj[0] if altzone == _jj[1] else _jj[1]
altzone = altzone * 60
timezone = _timezone(__now) * 60
daylight = _daylight(__now)
tzname = _tzname(__now)
def time():
"""
time() -> floating point number\n\nReturn the current time in seconds
since the Epoch.
Fractions of a second may be present if the system clock provides them.
"""
return Date.now() / 1000
def asctime(t):
return strftime('%a %b %d %H:%M:%S %Y', t)
def mktime(t):
''' inverse of localtime '''
d = new Date(t[0], t[1] - 1, t[2], t[3], t[4], t[5], 0)
return (d - 0) / 1000
def ctime(seconds):
"""
ctime(seconds) -> string
Convert a time in seconds since the Epoch to a string in local time.
This is equivalent to asctime(localtime(seconds)). When the time tuple is
not present, current time as returned by localtime() is used.'
"""
if not seconds:
seconds = time()
return asctime(localtime(seconds))
def localtime(seconds):
"""
localtime([seconds]) -> (tm_year,tm_mon,tm_mday,tm_hour,tm_min,
tm_sec,tm_wday,tm_yday,tm_isdst)
Convert seconds since the Epoch to a time tuple expressing local time.
When 'seconds' is not passed in, convert the current time instead.
"""
if not seconds:
seconds = time()
return gmtime(seconds, True)
def gmtime(seconds, localtime):
"""
localtime([seconds]) -> (tm_year,tm_mon,tm_mday,tm_hour,tm_min,
tm_sec,tm_wday,tm_yday,tm_isdst)
Convert seconds since the Epoch to a time tuple expressing local time.
When 'seconds' is not passed in, convert the current time instead.
"""
if not seconds:
seconds = time()
millis = seconds * 1000
__date.setTime(millis)
if localtime:
t = _local_time_tuple(__date)
else:
t = _utc_time_tuple(__date)
return t[:9]
# ----------------------------------------------------------------------------
# now the workhorses:
def strptime(string, format):
"""
strptime(string, format) -> struct_time
Parse a string to a time tuple according to a format specification.
See the library reference manual for formatting codes (same as
strftime()).
Commonly used format codes:
%Y Year with century as a decimal number.
%m Month as a decimal number [01,12].
%d Day of the month as a decimal number [01,31].
%H Hour (24-hour clock) as a decimal number [00,23].
%M Minute as a decimal number [00,59].
%S Second as a decimal number [00,61].
%z Time zone offset from UTC.
%a Locale's abbreviated weekday name.
%A Locale's full weekday name.
%b Locale's abbreviated month name.
%B Locale's full month name.
%c Locale's appropriate date and time representation.
%I Hour (12-hour clock) as a decimal number [01,12].
%p Locale's equivalent of either AM or PM.
Tradoffs of this Transcrypt implementation:
1. platform specific codes not supported
2. %% and %c not supported
"""
if not format:
format = "%a %b %d %H:%M:%S %Y"
ts, fmt = string, format
def get_next(fmt):
''' returns next directive, next seperator, rest of format str'''
def get_sep(fmt):
res = []
if not fmt:
return '', ''
for i in range(len(fmt)-1):
c = fmt[i]
if c == '%':
break
res.append(c)
return ''.join(res), fmt[i:]
# return next seperator:
d, sep, f = None, None, None
if fmt:
if fmt[0] == '%':
d = fmt[1]
sep, f = get_sep(fmt[2:])
else:
sep, f = get_sep(fmt)
return d, sep, f
# directive / value tuples go in here:
dir_val = {}
while ts:
d, sep, fmt = get_next(fmt)
if sep == '':
lv = None
if d:
# we have a directive, seperator is empty. Is the directive
# fixed length, with next w/o sep? e.g. %Y%Z ?
# then get the next one | |
self.output_dropout(
decoder_outputs, deterministic=not enable_dropout)
if logit_mask is not None:
decoder_outputs = logit_mask * decoder_outputs
if self.sow_intermediates:
self.sow('intermediates', 'pre_logits_layer', decoder_outputs)
# Decoded Logits
if self.logits_dense is not None:
logits = self.logits_dense(decoder_outputs)
else:
# Use the transpose of embedding matrix for logit transform.
#
# TODO: Module subclass API if we want to keep using this.
logits = self.embedder.embedders['token_ids'].attend(decoder_outputs) # pytype: disable=attribute-error
# Correctly normalize pre-softmax logits for this shared case.
logits = logits / jnp.sqrt(decoder_outputs.shape[-1])
return logits
def __call__(self,
encoder_outputs,
decoder_input_tokens,
decoder_positions=None,
decoder_mask=None,
encoder_decoder_mask=None,
*,
segment_ids: Optional[Array] = None,
enable_dropout: bool = True,
decode: bool = False,
max_decode_length: Optional[int] = None,
prefill: bool = False,
prefill_lengths: Optional[Array] = None):
"""Applies Transformer model on the inputs.
TODO: For consistency it would be better to flip the order of the
first two positional arguments here.
Args:
encoder_outputs: The outputs from the encoder. If None, do not attend to
encoder outputs, resulting in a decoder only model (i.e. language
model).
decoder_input_tokens: The decoder input token IDs.
decoder_positions: Decoder subsequence positions for packed examples.
decoder_mask: Decoder self-attention mask.
encoder_decoder_mask: The attention mask for the encoder outputs.
segment_ids: Input segmentation info for packed examples.
enable_dropout: Enables dropout if set to True.
decode: Whether to prepare and use an autoregressive cache.
max_decode_length: An optional integer specifying the maximum decoding
length. Note that this is only used for defining the relative position
embedding parameters.
prefill: Whether to run a partial sequence to prefill the cache.
prefill_lengths: The length of each partial sequence we are filling in the
cache, lengths are inferred from the mask if not provided.
Returns:
The decoder output logits for next token prediction.
"""
embedded_inputs = self.embed_and_combine_inputs(
decoder_input_tokens,
decoder_positions=decoder_positions,
segment_ids=segment_ids,
enable_dropout=enable_dropout,
decode=decode,
)
logit_mask = dense_attention.get_decoder_logit_mask(decoder_input_tokens,
embedded_inputs.dtype)
logits = self.decode_from_continuous_inputs(
embedded_inputs,
encoder_outputs,
decoder_positions=decoder_positions,
decoder_mask=decoder_mask,
encoder_decoder_mask=encoder_decoder_mask,
logit_mask=logit_mask,
enable_dropout=enable_dropout,
decode=decode,
max_decode_length=max_decode_length,
prefill=prefill,
prefill_lengths=prefill_lengths)
return logits
def _call_optional(
fn: Optional[Callable[[], nn.Module]]) -> Optional[nn.Module]:
return fn() if fn else None
class EncoderDecoder(nn.Module, param_remapping.ParameterRemappable):
"""Transformer Model for sequence to sequence translation.
Attributes:
encoder_factory: A callable that returns the lower-level Encoder object. If
shared_token_embedder_factory is non-None, then the result of it will be
passed as the `shared_token_embedder` argument to `encoder_factory`.
decoder_factory: A callable that returns the lower-level Decoder object. If
shared_token_embedder_factory is non-None, then the result of it will be
passed as the `shared_token_embedder` argument to `decoder_factory`.
dtype: DType for encoder/decoder to cast embedded inputs, and for attention
mask generation.
scan_layers: whether to scan over layers.
shared_token_embedder_factory: A callable that returns an embedder that can
be shared between the encoder and decoder.
"""
# Core components: encoder and decoder embedders and layers.
encoder_factory: MakeEncoderFn
decoder_factory: MakeDecoderFn
# Configures behavior when the model is called. Many of these might eventually
# be better as call parameters.
dtype: DType = jnp.float32
scan_layers: bool = False # only used to pass this option to predict_fn.
spmd_annotations: Any = None # only used for scanned spmd layers
shared_token_embedder_factory: Optional[Callable[[], embedding.Embed]] = None
def setup(self):
self.token_embedder = (
self.shared_token_embedder_factory()
if self.shared_token_embedder_factory else None)
# TODO: Clean up SPMD annotation code.
if self.spmd_annotations is None:
encoder_annotations = None
decoder_annotations = None
else:
encoder_annotations = self.spmd_annotations['encoder']
decoder_annotations = self.spmd_annotations['decoder']
encoder_factory_params = tuple(
inspect.signature(self.encoder_factory).parameters.keys())
if 'spmd_annotations' in encoder_factory_params:
self.encoder = self.encoder_factory(
shared_token_embedder=self.token_embedder,
spmd_annotations=encoder_annotations)
else:
self.encoder = self.encoder_factory(
shared_token_embedder=self.token_embedder)
decoder_factory_params = tuple(
inspect.signature(self.decoder_factory).parameters.keys())
if 'spmd_annotations' in decoder_factory_params:
self.decoder = self.decoder_factory(
shared_token_embedder=self.token_embedder,
spmd_annotations=decoder_annotations)
else:
self.decoder = self.decoder_factory(
shared_token_embedder=self.token_embedder)
def encode(self,
encoder_input_tokens,
encoder_segment_ids=None,
encoder_positions=None,
*,
enable_dropout: bool = True):
"""Applies Transformer encoder-branch on the inputs.
Args:
encoder_input_tokens: input data to the encoder.
encoder_segment_ids: encoder input segmentation info for packed examples.
encoder_positions: encoder input subsequence positions for packed
examples.
enable_dropout: Enables dropout if set to True.
Returns:
encoded feature array from the transformer encoder.
"""
# Make padding attention mask.
encoder_mask = dense_attention.make_attention_mask(
encoder_input_tokens > 0, encoder_input_tokens > 0, dtype=self.dtype)
# Add segmentation block-diagonal attention mask if using segmented data.
if encoder_segment_ids is not None:
encoder_mask = dense_attention.combine_masks(
encoder_mask,
dense_attention.make_attention_mask(
encoder_segment_ids,
encoder_segment_ids,
jnp.equal,
dtype=self.dtype))
return self.encoder( # pytype: disable=attribute-error
encoder_input_tokens,
inputs_positions=encoder_positions,
encoder_mask=encoder_mask,
segment_ids=encoder_segment_ids,
enable_dropout=enable_dropout)
def decode(
self,
encoded,
encoder_input_tokens, # only needed for masks
decoder_input_tokens,
decoder_target_tokens,
encoder_segment_ids=None,
decoder_segment_ids=None,
decoder_positions=None,
*,
enable_dropout: bool = True,
decode: bool = False,
max_decode_length: Optional[int] = None):
"""Applies Transformer decoder-branch on encoded-input and target.
Args:
encoded: encoded input data from encoder.
encoder_input_tokens: input to the encoder (only needed for masking).
decoder_input_tokens: input token to the decoder.
decoder_target_tokens: target token to the decoder.
encoder_segment_ids: encoder segmentation info for packed examples.
decoder_segment_ids: decoder segmentation info for packed examples.
decoder_positions: decoder subsequence positions for packed examples.
enable_dropout: Enables dropout if set to True.
decode: Whether to prepare and use an autoregressive cache.
max_decode_length: An optional integer specifying the maximum decoding
length. Note that this is only used for defining the relative position
embedding parameters.
Returns:
logits array from transformer decoder.
"""
# Make padding attention masks.
if decode:
# Do not mask decoder attention based on targets padding at
# decoding/inference time.
decoder_mask = None
encoder_decoder_mask = dense_attention.make_attention_mask(
jnp.ones_like(decoder_target_tokens),
encoder_input_tokens > 0,
dtype=self.dtype)
else:
decoder_mask = dense_attention.make_decoder_mask(
decoder_target_tokens=decoder_target_tokens,
dtype=self.dtype,
decoder_segment_ids=decoder_segment_ids)
encoder_decoder_mask = dense_attention.make_attention_mask(
decoder_target_tokens > 0, encoder_input_tokens > 0, dtype=self.dtype)
# Add segmentation block-diagonal attention masks if using segmented data.
if encoder_segment_ids is not None:
if decode:
raise ValueError(
'During decoding, packing should not be used but '
'`encoder_segment_ids` was passed to `Transformer.decode`.')
encoder_decoder_mask = dense_attention.combine_masks(
encoder_decoder_mask,
dense_attention.make_attention_mask(
decoder_segment_ids,
encoder_segment_ids,
jnp.equal,
dtype=self.dtype))
# When computing the logits, we don't need decoder_target_tokens, which is
# needed for computing the loss.
return self.decoder(
encoded,
decoder_input_tokens=decoder_input_tokens,
decoder_positions=decoder_positions,
decoder_mask=decoder_mask,
encoder_decoder_mask=encoder_decoder_mask,
segment_ids=decoder_segment_ids,
enable_dropout=enable_dropout,
decode=decode,
max_decode_length=max_decode_length)
@property
def encoder_embedder(self) -> embedding.MultiEmbed:
return self.encoder.embedder
@property
def decoder_embedder(self) -> embedding.MultiEmbed:
return self.decoder.embedder
def __call__(self,
encoder_input_tokens,
decoder_input_tokens,
decoder_target_tokens,
encoder_segment_ids=None,
decoder_segment_ids=None,
encoder_positions=None,
decoder_positions=None,
*,
enable_dropout: bool = True,
decode: bool = False,
max_decode_length: Optional[int] = None):
"""Applies Transformer model on the inputs.
This method requires both decoder_target_tokens and decoder_input_tokens,
which is a shifted version of the former. For a packed dataset, it usually
has additional processing applied. For example, the first element of each
sequence has id 0 instead of the shifted EOS id from the previous sequence.
Args:
encoder_input_tokens: input data to the encoder.
decoder_input_tokens: input token to the decoder.
decoder_target_tokens: target token to the decoder.
encoder_segment_ids: encoder segmentation info for packed examples.
decoder_segment_ids: decoder segmentation info for packed examples.
encoder_positions: encoder subsequence positions for packed examples.
decoder_positions: decoder subsequence positions for packed examples.
enable_dropout: Enables dropout if set to True.
decode: Whether to prepare and use an autoregressive cache.
max_decode_length: An optional integer specifying the maximum decoding
length. Note that this is only used for defining the relative position
embedding parameters.
Returns:
logits array from full transformer.
"""
encoded = self.encode(
encoder_input_tokens,
encoder_segment_ids=encoder_segment_ids,
encoder_positions=encoder_positions,
enable_dropout=enable_dropout)
return self.decode(
encoded,
encoder_input_tokens, # Only used for masks.
decoder_input_tokens,
decoder_target_tokens,
encoder_segment_ids=encoder_segment_ids,
decoder_segment_ids=decoder_segment_ids,
decoder_positions=decoder_positions,
enable_dropout=enable_dropout,
decode=decode,
max_decode_length=max_decode_length)
class DecoderOnly(nn.Module, param_remapping.ParameterRemappable):
"""Decoder-only model.
This model sets up the relevant masking and uses Decoder to do the heavy
lifting.
Attributes:
decoder_factory: Factory which will make the lower-level Decoder object. In
the DecoderOnly usage, it will always be called with
`shared_token_embedder` as None.
dtype: DType for encoder/decoder to cast embedded inputs, and for attention
mask generation.
"""
# Core sub-component.
decoder_factory: MakeDecoderFn
# Only used to pass this option to predict_fn.
scan_layers: bool = False
# Configures behavior when the model is called. Many of these might eventually
# be better as call parameters.
dtype: DType = jnp.float32
def setup(self):
self.decoder = self.decoder_factory(shared_token_embedder=None)
def __call__(self,
decoder_input_tokens,
decoder_target_tokens,
decoder_segment_ids=None,
decoder_positions=None,
decoder_causal_attention=None,
*,
enable_dropout: bool = True,
decode: bool = False,
max_decode_length: Optional[int] = None,
prefill: bool = False,
prefill_lengths: Optional[Array] = None):
"""Applies LanguageModel on the inputs.
This method requires both decoder_target_tokens and decoder_input_tokens,
which is typically a shifted version of the former. For a packed dataset, it
usually has additional processing applied. For example, the first element of
each sequence has id 0 instead of the shifted EOS id from | |
transforms=True) or []))
if child_joints:
next_joint = child_joints[0]
else:
end_joint = next_joint
next_joint = None
return end_joint
def get_joint_list(start_joint, end_joint):
"""
Get list of joints between and including given start and end joint
:param start_joint: str, start joint of joint list
:param end_joint: str, end joint of joint list
:return: list<str>
"""
check_joint(start_joint)
check_joint(end_joint)
if start_joint == end_joint:
return [start_joint]
# Check hierarchy
descendant_list = maya.cmds.ls(maya.cmds.listRelatives(start_joint, ad=True), type='joint')
if not descendant_list.count(end_joint):
raise Exception('End joint "{}" is not a descendant of start joint "{}"'.format(end_joint, start_joint))
joint_list = [end_joint]
while joint_list[-1] != start_joint:
parent_jnt = maya.cmds.listRelatives(joint_list[-1], p=True, pa=True)
if not parent_jnt:
raise Exception('Found root joint while searching for start joint "{}"'.format(start_joint))
joint_list.append(parent_jnt[0])
joint_list.reverse()
return joint_list
def get_length(joint):
"""
Returns the length of a given joint
:param joint: str, joint to query length from
:return: str
"""
check_joint(joint)
child_joints = maya.cmds.ls(maya.cmds.listRelatives(joint, c=True, pa=True) or [], type='joint')
if not child_joints:
return 0.0
max_length = 0.0
for child_jnt in child_joints:
pt1 = transform.get_position(joint)
pt2 = transform.get_position(child_jnt)
offset = mathlib.offset_vector(pt1, pt2)
length = mathlib.magnitude(offset)
if length > max_length:
max_length = length
return max_length
def duplicate_joint(joint, name=None):
"""
Duplicates a given joint
:param joint: str, joint to duplicate
:param name: variant, str || None, new name for duplicated joint. If None, leave as default
:return: str
"""
check_joint(joint)
if not name:
name = joint + '_dup'
if maya.cmds.objExists(str(name)):
raise Exception('Joint "{}" already exists!'.format(name))
dup_joint = maya.cmds.duplicate(joint, po=True)[0]
if name:
dup_joint = maya.cmds.rename(dup_joint, name)
# Unlock transforms
for attr in ['tx', 'ty', 'tz', 'rx', 'ry', 'rz', 'sx', 'sy', 'sz', 'v', 'radius']:
maya.cmds.setAttr(dup_joint + '.' + attr, long=False, cb=True)
return dup_joint
def duplicate_chain(start_jnt, end_jnt=None, parent=None, skip_jnt=None, prefix=None):
"""
Duplicats a joint chain based on start and en joint
:param start_jnt: str, start joint of chain
:param end_jnt: str, end joint of chain. If None, use end of current chain
:param parent: str, parent transform for new chain
:param skip_jnt: variant, str ||None, skip joints in chain that match name pattern
:param prefix: variant, str ||None, new name prefix
:return: list<str>, list of duplicate joints
"""
if not maya.cmds.objExists(start_jnt):
raise Exception('Start joint "{}" does not exists!'.format(start_jnt))
if end_jnt and not maya.cmds.objExists(str(end_jnt)):
raise Exception('End joint "{}" does not exists!'.format(end_jnt))
if parent:
if not maya.cmds.objExists(parent):
raise Exception('Given parent transform "{}" does not eixsts!'.format(parent))
if not transform.is_transform(parent):
raise Exception('Parent object "{}" is not a valid transform!'.format(parent))
if not end_jnt:
end_jnt = get_end_joint(start_jnt=start_jnt)
joints = get_joint_list(start_joint=start_jnt, end_joint=end_jnt)
skip_joints = maya.cmds.ls(skip_jnt) if skip_jnt else list()
dup_chain = list()
for i in range(len(joints)):
if joints[i] in skip_joints:
continue
name = None
if prefix:
jnt_index = strings.get_alpha(i, capitalong=True)
if i == (len(joints) - 1):
jnt_index = 'End'
name = prefix + jnt_index + '_jnt'
jnt = duplicate_joint(joint=joints[i], name=name)
if not i:
if not parent:
if maya.cmds.listRelatives(jnt, p=True):
try:
maya.cmds.parent(jnt, w=True)
except Exception:
pass
else:
try:
maya.cmds.parent(jnt, parent)
except Exception:
pass
else:
try:
maya.cmds.parent(jnt, dup_chain[-1])
if not maya.cmds.isConnected(dup_chain[-1] + '.scale', jnt + '.inverseScale'):
maya.cmds.connectAttr(dup_chain[-1] + '.scale', jnt + '.inverseScale', f=True)
except Exception as e:
raise Exception('Error while duplicating joint chain! - {}'.format(str(e)))
dup_chain.append(jnt)
return dup_chain
def joint_buffer(joint, index_str=0):
"""
Creates a joint buffer group in the given joint
:param joint: str, name of the joint we want to create buffer for
:param index_str: int, string index
:return: str, name of the joint buffer group created
"""
if not maya.cmds.objExists(joint):
raise Exception('Joint "{}" does not exists!'.format(joint))
if not index_str:
result = maya.cmds.promptDialog(
title='Index String',
message='Joint Group Index',
text='0',
button=['Create', 'Cancel'],
defaultButton='Create',
cancelButton='Cancel',
dismissString='Cancel'
)
if result == 'Create':
index_str = maya.cmds.promptDialog(q=True, text=True)
else:
LOGGER.warning('User canceled joint group creation ...')
return
# Get joint prefix and create joint buffer group
prefix = strings.strip_suffix(joint)
grp = maya.cmds.duplicate(joint, po=True, n=prefix + 'Buffer' + index_str + '_jnt')[0]
maya.cmds.parent(joint, grp)
if maya.cmds.getAttr(grp + '.radius', se=True):
try:
maya.cmds.setAttr(grp + '.radius', 0)
except Exception:
pass
# Connect inverse scale
inverse_scale_cnt = maya.cmds.listConnections(joint + '.inverseScale', s=True, d=False)
if not inverse_scale_cnt:
inverse_scale_cnt = list()
if not inverse_scale_cnt.count(grp):
try:
maya.cmds.connectAttr(grp + '.scale', joint + '.inverseScale', f=True)
except Exception:
pass
# Delete user attributes
user_attrs = maya.cmds.listAttr(grp, ud=True)
if user_attrs:
for attr in user_attrs:
if maya.cmds.objExists(grp + '.' + attr):
maya.cmds.setAttr(grp + '.' + attr, long=False)
maya.cmds.deleteAttr(grp + '.' + attr)
node.display_override(obj=joint, override_enabled=True, override_lod=0)
node.display_override(obj=grp, override_enabled=True, override_display=2, override_lod=1)
def set_draw_style(joints, draw_style='bone'):
"""
Set joint draw style for the given joints
:param joints: list<str>, list of joints to set draw style for
:param draw_style: str, draw style to apply to the given joints ("bone", "box", "none")
:return: list<str>, list of joints which draw styles have been changed
"""
if not joints:
raise Exception('No joints given!')
draw_style = draw_style.lower()
if draw_style not in ['bone', 'box', 'none']:
raise Exception('Invalid draw style ("{}")! Accepted values are "bone", "box", "none"'.format(draw_style))
if type(joints) not in [list, tuple]:
joints = [joints]
for jnt in joints:
if not is_joint(jnt):
continue
if draw_style == 'bone':
maya.cmds.setAttr('{}.drawStyle'.format(jnt), 0)
elif draw_style == 'box':
maya.cmds.setAttr('{}.drawStyle'.format(jnt), 1)
elif draw_style == 'none':
maya.cmds.setAttr('{}.drawStyle'.format(jnt), 2)
return joints
def create_from_point_list(point_list, orient=False, side='c', part='chain', suffix='jnt'):
"""
Create joint chain from a list of point positions
:param point_list: list<tuple>, list of points to create joint chain from
:param orient: bool, Whether to orient or not the joints
:param side: str, joint side name prefix
:param part: str, joint part name
:param suffix: str, joint suffix name
:return: list<str>, list of new created joints
"""
maya.cmds.select(clong=True)
joint_list = list()
for i in range(len(point_list)):
jnt = maya.cmds.joint(p=point_list[i], n='{}_{}{}_{}'.format(side, part, str(i + 1), suffix))
if i and orient:
maya.cmds.joint(joint_list[-1], e=True, zso=True, oj='xyz', sao='yup')
joint_list.append(jnt)
return joint_list
def orient(joint, aim_axis='x', up_axis='y', up_vector=(0, 1, 0)):
"""
Orient joints based on user defined vectors
:param joint: str, joints to orient
:param aim_axis: str, axis to be aligned down the length of the joint
:param up_axis: str, axis to be aligned with the world vector given by up vector
:param up_vector: tuple<int>, world vector to align up axis to
"""
check_joint(joint)
child_list = maya.cmds.listRelatives(joint, c=True)
child_joint_list = maya.cmds.listRelatives(joint, c=True, type='joint', pa=True)
if child_list:
child_list = maya.cmds.parent(child_list, world=True)
if not child_joint_list:
maya.cmds.setAttr('{}.jo'.format(joint), 0, 0, 0)
else:
parent_matrix = maya.api.OpenMaya.MMatrix()
parent_joint = maya.cmds.listRelatives(joint, p=True, pa=True)
if parent_joint:
parent_matrix = transform.get_matrix(parent_joint[0])
# Aim Vector
aim_point_1 = transform.get_position(joint)
aim_point_2 = transform.get_position(child_joint_list[0])
aim_vector = mathlib.offset_vector(aim_point_1, aim_point_2)
target_matrix = matrix_utils.build_rotation(aim_vector, up_vector, aim_axis, up_axis)
orient_matrix = target_matrix * parent_matrix.inverse()
# Extract joint orient values
rotation_order = maya.cmds.getAttr('{}.ro'.format(joint))
orient_rotation = matrix_utils.get_rotation(orient_matrix, rotation_order=rotation_order)
# Reset joint rotation and orientation
maya.cmds.setAttr('{}.r'.format(joint), 0, 0, 0)
maya.cmds.setAttr('{}.jo'.format(orient_rotation[0], orient_rotation[1], orient_rotation[2]))
# Reparent children
if child_list:
maya.cmds.aprent(child_list, joint)
def orient_to(joint, target):
"""
Matches given joint orientation to given transform
:param joint: str, joint to set orientation for
:param target: str, transform to match joint orientation to
"""
if not maya.cmds.objExists(joint):
raise Exception('Joint "{}" does not exist!'.format(joint))
if not maya.cmds.objExists(target):
raise Exception('Target "{}" does not exist!'.format(target))
if not transform.is_transform(target):
raise Exception('Target "{}" is not a valid transform!'.format(target))
# Unparent children
child_list = maya.cmds.listRelatives(joint, c=True, type=['joint', 'transform'])
if child_list:
child_list = maya.cmds.parent(child_list, world=True)
# Get parent joint matrix
parent_matrix = maya.api.OpenMaya.MMatrix()
parent_joint = maya.cmds.listRelatives(joint, p=True, pa=True)
if parent_joint:
parent_matrix = transform.get_matrix(parent_joint[0])
target_matrix = transform.get_matrix(target)
orient_matrix = target_matrix * parent_matrix.inverse()
# Extract joint orient values
rotation_order = maya.cmds.getAttr('{}.ro'.format(joint))
orient_rotation = matrix_utils.get_rotation(orient_matrix, rotation_order=rotation_order)
# Reset joint rotation and orientation
maya.cmds.setAttr('{}.r'.format(joint), 0, 0, 0)
maya.cmds.setAttr('{}.jo'.format(orient_rotation[0], orient_rotation[1], orient_rotation[2]))
# Reparent children
if child_list:
maya.cmds.aprent(child_list, joint)
def orient_x_to_child(joint, invert=False):
"""
Function that orients given joint to its child (points X axis of joint to its child)
:param joint: str
:param invert: bool
"""
aim_axis = [1, 0, 0] if not invert else [-1, 0, 0]
up_axis = [0, 1, 0] if not invert else [0, -1, 0]
children = maya.cmds.listRelatives(joint, type='transform')
if children:
orient = OrientJoint(joint, children)
orient.set_aim_at(3)
orient.set_aim_up_at(0)
orient.set_aim_vector(aim_axis)
orient.set_up_vector(up_axis)
orient.run()
if not children:
maya.cmds.makeIdentity(joint, jo=True, apply=True)
def orient_y_to_child(joint, invert=False):
"""
Function that orients given joint to its child (points Y axis of joint to its child)
:param joint: str
:param invert: bool
"""
aim_axis = [0, 1, 0] if not invert else [0, -1, 0]
| |
<reponame>UBC-MDS/rainfall_group22<filename>src/sklearn_helper_funcs.py
import json
import pickle
import re
import time
from pathlib import Path
from IPython.core.display import display_pdf
import numpy as np
import pandas as pd
from IPython.display import display
from matplotlib.colors import LinearSegmentedColormap, ListedColormap # type: ignore
from seaborn import diverging_palette
from sklearn.compose import ColumnTransformer
from sklearn.feature_extraction.text import CountVectorizer, _VectorizerMixin
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection._base import SelectorMixin
from sklearn.metrics import (accuracy_score, classification_report, f1_score,
make_scorer, recall_score)
from sklearn.model_selection import (GridSearchCV, RandomizedSearchCV,
cross_val_score, cross_validate,
train_test_split)
from sklearn.pipeline import Pipeline
from sklearn.decomposition import PCA
# from icecream import ic
# ic.configureOutput(prefix='')
import shap
try:
import shap
except ImportError as e:
# print('failed to import module', e)
pass
# p_tmp = Path.home() / 'Desktop/sklearn_temp'
# print(p_tmp.exists())
_cmap = diverging_palette(240, 10, sep=10, n=21, as_cmap=True)
search_params = dict(
lgbm=dict(
boosting_type=['gbdt', 'dart', 'goss', 'rf'],
n_estimators=[25, 50, 100],
max_depth=[-1, 5, 10, 20, 30],
num_leaves=[5, 10, 20, 40, 100]))
class ModelManager(object):
"""Manager class to perform cross val etc on multiple models with same underlying column transformer + data
"""
def __init__(self, ct=None, scoring=None, cv_args=None, random_state=0, target: str = 'target', **kw):
random_state = random_state
cv_args = cv_args if not cv_args is None else {}
df_results = pd.DataFrame()
pipes = {}
scores = {}
models = {}
grids = {}
df_preds = {}
v = {**vars(), **kw}
set_self(v)
if any(item in kw for item in ('features', 'encoders')):
self.make_column_transformer(**kw)
def make_column_transformer(self, features: dict, encoders: dict, **kw) -> ColumnTransformer:
"""Create ColumnTransformer from dicts of features and encoders
- NOTE column for CountVectorizer must be 'column_name' not ['column_name']
Parameters
----------
features : dict
feature group names matched to columns
encoders : dict
feature group names matched to encoders, eg MinMaxScaler
Returns
-------
ColumnTransformer
"""
ct = ColumnTransformer(
transformers=[(name, encoder, features[name]) for name, encoder in encoders.items()])
set_self(vars())
return ct
def show_ct(self, x_train: pd.DataFrame = None):
"""Show dataframe summary of transformed columns
- NOTE doesn't work with CountVectorizer (could be thousands of colums), just skip calling this"""
if x_train is None:
x_train = getattr(self, 'x_train', None)
if x_train is None:
raise AttributeError('x_train not set!')
data = self.ct.fit_transform(x_train)
df_trans = df_transformed(data=data, ct=self.ct)
print(df_trans.shape)
display(df_trans.describe().T)
def get_model(self, name: str, best_est=False):
if best_est:
return self.best_est(name=name)
else:
return self.models[name]
def cross_val_feature_params(self, signal_manager, name, model, feature_params: dict, train_size: float = 0.8):
"""Run full cross val pipe with single replacement of each feature in feature_params"""
df_scores_features = pd.DataFrame()
for df, param_name in signal_manager \
.replace_single_feature(
df=self.df,
feature_params=feature_params):
# need to remake train/test splits every time
x_train, y_train, x_test, y_test = self.make_train_test(
df=df,
target=self.target,
train_size=train_size,
shuffle=False)
# just remake models dict with modified param name as key
models = {f'{name}_{param_name}': model}
self.cross_val(models=models, show=False, df_scores=df_scores_features)
self.show(df=df_scores_features)
def timeit(self, func, *args, **kw):
t = time.time()
res = func(*args, **kw)
final_time = time.time() - t
return final_time, res
def fit_score(self, models: dict, show: bool = True):
"""Simple timed fit/score into df to display results"""
for name, model in models.items():
# allow passing model definition, or instantiated model
if isinstance(model, type):
model = model()
t = time.time()
model.fit(self.x_train, self.y_train)
fit_time, _ = self.timeit(model.fit, self.x_train, self.y_train)
score_time, train_score = self.timeit(
model.score, self.x_train, self.y_train)
scores = dict(
fit_time=fit_time,
score_time=score_time,
train_score=train_score,
test_score=model.score(self.x_test, self.y_test))
self.scores[name] = scores
self.models[name] = model
self.df_results.loc[name, scores.keys()] = scores.values()
if show:
self.show()
def make_pipe(self, name: str, model, steps: list = None):
pipe = Pipeline(
steps=[
('ct', self.ct),
(name, model)],
# memory=str(p_tmp)
# ('pca', PCA(n_components=20)),
)
# insert extra steps in pipe, eg RFECV
if not steps is None:
if not isinstance(steps, list):
steps = [steps]
for step in steps:
pipe.steps.insert(step[0], step[1])
self.pipes[name] = pipe
return pipe
def cross_val(self, models: dict, show: bool = True, steps: list = None, df_scores=None, **kw):
"""Perform cross validation on multiple classifiers
Parameters
----------
models : dict
models with {name: classifier} to cross val
show : bool, optional
show dataframe of results, default True
steps : list, optional
list of tuples of [(step_pos, (name, model)), ]
"""
if self.ct is None:
raise AttributeError('ColumnTransformer not init!')
if df_scores is None:
df_scores = self.df_results
for name, model in models.items():
# allow passing model definition, or instantiated model
if isinstance(model, type):
model = model()
model.random_state = self.random_state
# safe model/pipeline by name
self.models[name] = model
pipe = self.make_pipe(name=name, model=model, steps=steps)
scores = cross_validate(
pipe, self.x_train, self.y_train.values.ravel(), error_score='raise', **self.cv_args)
self.scores[name] = scores
df_scores = df_scores \
.pipe(
append_mean_std_score,
scores=scores,
name=name,
scoring=self.cv_args.get('scoring', None))
if show:
self.show(**kw)
def show(self, df=None, **kw):
if df is None:
df = self.df_results
show_scores(df, **kw)
def fit(self, name: str, best_est=False, model=None):
"""Fit model to training data"""
if best_est:
model = self.best_est(name)
if model is None:
model = self.pipes[name]
model.fit(self.x_train, self.y_train)
return model
def y_pred(self, X, model=None, **kw):
if model is None:
model = self.fit(**kw)
return model.predict(X)
def class_rep(self, name: str = None, **kw):
"""Show classification report
Parameters
----------
name : str
name of existing model
"""
y_pred = self.y_pred(name=name, X=self.x_test, **kw)
y_true = self.y_test.values.flatten()
# classification report
m = classification_report(y_true, y_pred, output_dict=True)
df = pd.DataFrame(m).T
display(df)
def df_proba(self, df=None, model=None, **kw):
"""Return df of predict_proba, with timestamp index"""
if df is None:
df = self.x_test
if model is None:
model = self.fit(**kw)
arr = model.predict_proba(df)
m = {-1: 'short', 0: 'neutral', 1: 'long'}
cols = [f'proba_{m.get(c)}' for c in model.classes_]
return pd.DataFrame(data=arr, columns=cols, index=df.index)
def add_proba(self, df, do=False, **kw):
"""Concat df of predict_proba"""
return pd.concat([df, self.df_proba(**kw)], axis=1) if do else df
def add_predict(self, df, proba=True, **kw):
"""Add predicted vals to df"""
df = df \
.assign(y_pred=self.y_pred(X=df.drop(columns=[self.target]), **kw)) \
.pipe(self.add_proba, do=proba, **kw)
# save predicted values for each model
self.df_preds[kw['name']] = df
return df
def best_est(self, name: str):
return self.grids[name].best_estimator_
def search(self, name: str, params: dict=None, estimator=None, search_type: str = 'random', **kw):
"""Perform Random or Grid search to optimize params on specific model
Parameters
----------
name : str
name of model, must have been previously added to ModelManager
params : dict
[description]
estimator : sklearn model/Pipeline, optional
pass in model if not init already, default None
search_type : str, optional
RandomSearchCV or GridSearchCV, default 'random'
Returns
-------
RandomSearchCV | GridSearchCV
sklearn model_selection object
"""
# TODO need to enable NO renaming
# search with saved params for specific model
if params is None:
params = search_params[name]
# rename params to include 'name__parameter'
params = {f'{name}__{k}': v for k, v in params.items()}
if estimator is None:
estimator = self.pipes[name]
m = dict(
random=dict(
cls=RandomizedSearchCV,
param_name='param_distributions'),
grid=dict(
cls=GridSearchCV,
param_name='param_grid')) \
.get(search_type)
# grid/random have different kw for param grid/distribution
kw[m['param_name']] = params
grid = m['cls'](
estimator=estimator,
**kw,
**self.cv_args) \
.fit(self.x_train, self.y_train.values.ravel())
self.grids[name] = grid
results = {
'Best params': grid.best_params_,
'Best score': f'{grid.best_score_:.3f}'}
pretty_dict(results)
return grid
def save_model(self, name: str, **kw):
model = self.get_model(name=name, **kw)
filename = f'{name}.pkl'
with open(filename, 'wb') as file:
pickle.dump(model, file)
def load_model(self, name: str, **kw):
filename = f'{name}.pkl'
with open(filename, 'rb') as file:
return pickle.load(file)
def make_train_test(self, df, target, train_size=0.8, **kw):
"""Make x_train, y_train etc from df
Parameters
---------
target : list
target column to remove for y_
"""
if not 'test_size' in kw:
kw['train_size'] = train_size
df_train, df_test = train_test_split(
df, random_state=self.random_state, **kw)
x_train, y_train = split(df_train, target=target)
x_test, y_test = split(df_test, target=target)
set_self(vars())
return x_train, y_train, x_test, y_test
def shap_plot(self, name, **kw):
"""Convenience wrapper for shap_plot from ModelManager"""
shap_plot(
X=self.x_train,
y=self.y_train,
ct=self.ct,
model=self.models[name],
**kw)
def shap_explainer_values(X, y, ct, model, n_sample=2000):
"""Create shap values/explainer to be used with summary or force plot"""
data = ct.fit_transform(X)
X_enc = df_transformed(data=data, ct=ct)
model.fit(X_enc, y)
# use smaller sample to speed up plot
X_sample = X_enc
if not n_sample is None:
X_sample = X_enc.sample(n_sample, random_state=0)
explainer = shap.TreeExplainer(model)
shap_values = explainer.shap_values(X_sample)
return explainer, shap_values, X_sample, X_enc
def shap_plot(X, y, ct, model, n_sample=2000):
"""Show shap summary plot"""
explainer, shap_values, X_sample, X_enc = shap_explainer_values(X=X, y=y, ct=ct, model=model, n_sample=n_sample)
shap.summary_plot(
shap_values=shap_values,
features=X_sample,
plot_type='violin',
axis_color='white')
def shap_top_features(shap_vals, X_sample):
vals = np.abs(shap_vals).mean(0)
return pd \
.DataFrame(
data=list(zip(X_sample.columns, vals)),
columns=['feature_name','importance']) \
.sort_values(by=['importance'], ascending=False)
def show_prop(df, target_col='target'):
"""Show proportion of classes in target column"""
return df \
.groupby(target_col) \
.agg(num=(target_col, 'size')) \
.assign(prop=lambda x: x.num / x.num.sum()) \
.style \
.format(dict(
num='{:,.0f}',
prop='{:.2%}'))
def split(df, target):
"""Split off target col to make X and y"""
if not isinstance(target, list):
target = | |
<filename>playability.py
"""
Tile Representation
F = FLOOR
B = BLOCK
M = MONSTER
P = ELEMENT (LAVA, WATER)
O = ELEMENT + FLOOR (LAVA/BLOCK, WATER/BLOCK)
I = ELEMENT + BLOCK
D = DOOR
S = STAIR
W = WALL
- = VOID
walkable: F, M, O,
obstacle: B, P, I, W, -
start/end: D, S
"""
from typing import List, Tuple, Dict
from itertools import combinations
import glob
import random
import copy
import numpy as np
# https://pypi.org/project/pathfinding/
from pathfinding.core.diagonal_movement import DiagonalMovement
from pathfinding.core.grid import Grid
from pathfinding.finder.a_star import AStarFinder
from PCGMM_Evaluation_Method.constant import tileTypes
# WALKABLE = ["F", "M", "O"]
# START_END = ["D", "S"]
# ALL_WALKABLE = WALKABLE + START_END
# OBSTACLE = ["B", "P", "I", "W", "-"]
# BRIDGEABLE = ["P"]
WALKABLE = ["F", "M", "-", "C"]
START_END = ["D", "S"]
# START_END = ["D", "S", "A"]
ALL_WALKABLE = WALKABLE + START_END
OBSTACLE = ["B", "P", "W"]
BRIDGEABLE = ["P"]
DOOR_REPLACEABLE = ["A", "N", "E", "U"]
# OBSTACLE = ["B", "I", "W", "-"]
# BRIDGEABLE = ["P"]
# def pathfinder_compatibility_conversion(file_name: str) -> Tuple[List, List[List[int]]]:
# room_matrix = []
# row_idx = 0
# start_end = []
# with open(file_name, 'r') as fp:
# while True:
# line = fp.readline()
# if not line:
# break
# room_line = []
# for column_idx, tile in enumerate(line):
# if tile in OBSTACLE:
# room_line.append(0)
# elif tile in WALKABLE:
# room_line.append(1)
# elif tile in START_END:
# room_line.append(1)
# start_end.append((row_idx, column_idx))
# row_idx = row_idx + 1
# room_matrix.append(room_line)
# return (start_end, room_matrix)
def select_real_start_end(start_end, room_matrix):
result = {"D": [], "S": []}
for k in start_end:
for coor1 in start_end[k]:
# print("-", coor1)
is_add = True
for coor2 in result[k]:
# print("*", coor2)
# up down left right
if (coor1[0] == coor2[0] and coor1[1]-1 == coor2[1]) or \
(coor1[0] == coor2[0] and coor1[1]+1 == coor2[1]) or \
(coor1[1] == coor2[1] and coor1[0]-1 == coor2[0]) or \
(coor1[1] == coor2[1] and coor1[0]+1 == coor2[0]):
is_add = False
break
if (coor1[0] == coor2[0] and coor1[1]-2 == coor2[1]) or \
(coor1[0] == coor2[0] and coor1[1]+2 == coor2[1]) or \
(coor1[1] == coor2[1] and coor1[0]-2 == coor2[0]) or \
(coor1[1] == coor2[1] and coor1[0]+2 == coor2[0]):
is_add = False
break
if is_add:
# print("add", coor1)
result[k].append(coor1)
# print("--",result)
all_start_end = []
for k in start_end:
all_start_end = all_start_end + result[k]
# print(all_start_end)
if len(all_start_end) == 1:
# if ony one door, at least can go inside room
farest_loc = (-1,-1)
farest_dist = 0
for i in range(len(room_matrix)):
for j in range(len(room_matrix[0])):
if room_matrix[i][j] != 1:
continue
dist = abs(i - all_start_end[0][0]) + abs(j - all_start_end[0][1])
if dist > farest_dist:
farest_dist = dist
farest_loc = (i, j)
result["D"].append(farest_loc)
return result
def pathfinder_compatibility_conversion(room: np.ndarray) -> Tuple[List, List[List[int]]]:
room_matrix = []
start_end = {}
# print(room)
for i in range(room.shape[0]):
room_line = []
for j in range(room.shape[1]):
tile = room[i, j]
if tile in OBSTACLE:
room_line.append(0)
elif tile in WALKABLE:
room_line.append(1)
elif tile in START_END:
room_line.append(1)
# start_end.append((i, j))
if tile not in start_end:
start_end[tile] = [(i, j)]
else:
start_end[tile].append((i,j))
# elif tile == "A":
# room_line.append(1)
# if tile not in start_end:
# start_end["D"] = [(i, j)]
# else:
# start_end["D"].append((i,j))
room_matrix.append(room_line)
return (start_end, room_matrix)
def read_file(file_name: str) -> Tuple[np.ndarray, bool]:
room = []
has_bridge_block = False
with open(file_name, 'r') as fp:
while True:
line = fp.readline()
if not line:
break
room_line = []
for tile in line:
if tile in tileTypes:
if tile in DOOR_REPLACEABLE:
room_line.append("D")
else:
room_line.append(tile)
elif tile not in tileTypes and tile != '\n':
raise ValueError('new tile type = {}'.format(tile))
if tile in BRIDGEABLE:
has_bridge_block = True
room.append(room_line)
return (np.asarray(room), has_bridge_block)
def is_playable(start_end: Dict, room_matrix: List[List[int]], print_path: bool=False) -> bool:
# TODO: invesgate
# if len(start_end) < 2:
# playable = True
# return playable
all_start_end = []
for k in start_end:
all_start_end = all_start_end + start_end[k]
grid = Grid(matrix=room_matrix)
comb = list(combinations(all_start_end, 2))
playable = True
# random.shuffle(comb)
for _start, _end in comb:
# print(_start, _end)
# hardcode here
# if (_start in start_end["D"] and _end in start_end["D"]) or (_start in start_end["S"] and _end in start_end["S"]):
# abs_dis = abs(_start[0] - _end[0]) + abs(_start[1] - _end[1])
# if abs_dis < 4:
# continue
start = grid.node(_start[1], _start[0])
end = grid.node(_end[1], _end[0])
finder = AStarFinder(diagonal_movement=DiagonalMovement.never)
path, runs = finder.find_path(start, end, grid)
if print_path and len(path) > 0:
print('operations:', runs, 'path length:', len(path))
print(grid.grid_str(path=path, start=start, end=end))
grid.cleanup()
if len(path) == 0:
playable = False
break
return playable
def final_playable(start_end: Dict, room: np.ndarray, room_matrix: List[List[int]], has_bridge_block: bool) -> bool:
playable = is_playable(start_end=start_end, room_matrix=room_matrix, print_path=False)
# return playable
# print(playable)
# print(has_bridge_block)
if playable == False and has_bridge_block:
result = np.where(room=="P")
# print(bridge_block_locs[0])
listOfCoordinates= list(zip(result[0], result[1]))
# print("before select candidates", listOfCoordinates)
candidates_coor = select_candidates(listOfCoordinates, room)
# print("after select candidates", candidates_coor)
n = 1
break_flag = False
while True:
# print(n)
comb = list(combinations(candidates_coor, n))
for coors in comb:
room_matrix_2 = copy.deepcopy(room_matrix)
# print(coors)
if n == 1:
room_matrix_2[coors[0][0]][coors[0][1]] = 1
# playable = is_playable(start_end, room_matrix_2, print_path=True)
playable = is_playable(start_end, room_matrix_2, print_path=False)
elif n > 1:
# check if the points has contact
if is_contact(coors):
continue
# if not is_connect_to_WALKABLE(coors, room):
# continue
for i in range(n):
room_matrix_2[coors[i][0]][coors[i][1]] = 1
# playable = is_playable(start_end, room_matrix_2, print_path=True)
playable = is_playable(start_end, room_matrix_2, print_path=False)
if playable == True:
# print(np.asarray(room_matrix_2))
return True
n = n + 1
if n > 5:
return False
# break
# return playable
else:
return playable
def manhattan_distance(x,y):
return sum(abs(a-b) for a,b in zip(x,y))
def is_contact(list_loc) -> bool:
# print(list_loc)
comb = list(combinations(list_loc, 2))
# print(comb)
for i in range(len(comb)):
loc1 = comb[i][0]
loc2 = comb[i][1]
# print(loc1)
# print(loc2)
if manhattan_distance(loc1, loc2) == 1:
return True
else:
continue
return False
def select_candidates(listOfCoordinates, room):
candidates_coor = []
for coors in listOfCoordinates:
if is_connect_to_WALKABLE([coors], room):
candidates_coor.append(coors)
return candidates_coor
def is_connect_to_WALKABLE(list_loc, room):
# print(list_loc)
for x, y in list_loc:
# print(x,y)
if room[x-1, y] in ALL_WALKABLE and room[x+1, y] in ALL_WALKABLE:
continue
elif room[x, y-1] in ALL_WALKABLE and room[x, y+1] in ALL_WALKABLE:
continue
else:
# print("haha")
return False
return True
def evaluate_playability(evaluate_data):
unplayable_room= []
playability = 0.0
n = 0
p = 0
for i in range(evaluate_data.shape[0]):
room = evaluate_data[i, :, :]
# print(np.where(room == "P")[0].shape[0])
# print(room[np.where(np.isin(room, BRIDGEABLE))].shape[0])
if room[np.where(np.isin(room, BRIDGEABLE))].shape[0] > 0:
has_bridge_block = True
else:
has_bridge_block = False
# replace DOOR_REPLACABLE tile to door
room[np.where(np.isin(room, DOOR_REPLACEABLE))] = "D"
# print(room)
try:
start_end, room_matrix = pathfinder_compatibility_conversion(room)
# print(room_matrix)
start_end = select_real_start_end(start_end, room_matrix)
except Exception as e:
print("!"*10)
print(e)
print("room is not valid, idx={}".format(i))
print(room)
print("!"*10)
playable = final_playable(start_end, room, room_matrix, has_bridge_block)
# print(has_bridge_block)
# print(playable)
n = n + 1
if playable:
p = p + 1
else:
unplayable_room.append(room)
playability = p / n
return (unplayable_room, playability)
if __name__ == "__main__":
"""
Question:
Is void walkable? [m]aps/tloz6_1_room_16.txt]
maps/tloz5_1_room_13.txt felt odd ??
"""
####### TEST SECTION ########
# # file_name = 'tloz8_2_room_29.txt'
# # file_name = 'tloz5_1_room_13.txt'
# file_name = 'map_data/map_reduced_OI/tloz8_2_room_14.txt'
# # file_name = 'tloz8_2_room_7.txt'
# # file_name = 'unplayable_tloz9_2_room_6.txt'
# # file_name = 'maps/tloz6_1_room_16.txt'
# room, has_bridge_block = read_file(file_name)
# start_end, room_matrix = pathfinder_compatibility_conversion(room)
# print("start:", start_end)
# start_end = select_real_start_end(start_end, room_matrix)
# print("after select: ", start_end)
# # print(np.matrix(room_matrix))
# print(room)
# playable = final_playable(start_end, room, room_matrix, has_bridge_block)
# print("'{}' is playable: {}".format(file_name,playable))
# '''
# # try:
# # playable = is_playable(start_end, room_matrix, print_path=False)
# # except:
# # print("{} is not right".format(file_name))
# # print("'{}' is playable: {}".format(file_name,playable))
# '''
##############################
# files = glob.glob("map_data/map_reduced_OI/*.txt") # 0.9651
# files = glob.glob("generate_map/generate_map_BMC_2/*.txt") #0.8025
files = glob.glob("generate_map/generate_map_RM_2/*.txt")
n = 0
p = 0
for file_name in files:
# print(file_name)
# print(n)
try:
room, has_bridge_block = read_file(file_name)
# start_end, room_matrix = pathfinder_compatibility_conversion(file_name)
start_end, room_matrix = pathfinder_compatibility_conversion(room)
start_end = select_real_start_end(start_end, room_matrix)
except Exception as e:
print(file_name)
print(e)
break
# print(start_end)
# print(np.matrix(room_matrix))
# try:
# playable = is_playable(start_end, room_matrix, print_path=False)
playable = final_playable(start_end, room, room_matrix, has_bridge_block)
n = n + 1
if playable:
p = p + 1
# except Exception as e:
# print("error: {} in file {}".format(str(e), file_name))
| |
#!/usr/bin/env python
# This file should be available from
# http://www.pobox.com/~asl2/software/Pinefs
# and is licensed under the X Consortium license:
# Copyright (c) 2003, <NAME>, <EMAIL>
# All rights reserved.
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, provided that the above
# copyright notice(s) and this permission notice appear in all copies of
# the Software and that both the above copyright notice(s) and this
# permission notice appear in supporting documentation.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
# OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL
# INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING
# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Except as contained in this notice, the name of a copyright holder
# shall not be used in advertising or otherwise to promote the sale, use
# or other dealings in this Software without prior written authorization
# of the copyright holder.
"""Parser for ONC RPC IDL. The grammar is taken from RFC1832, sections
5, and RFC1831, section 11.2.
The output Python code (which requires rpchelp and rpc from the Pinefs
distribution) contains a separate class (with rpchelp.Server as a base
class) for every version defined in every program statement. To
implement a service, for each version of each program, derive a class
from the class named <prog>_<version>, with method names corresponding
to the procedure names in the IDL you want to implement. (At
instantiation, any procedure names defined in the IDL but neither
implemented nor listed in the deliberately_unimplemented member will
cause a warning to be printed.) Also, define a member function
check_host_ok, which is passed (host name, credentials, verifier) on each
call, and should return a true value if the call should be accepted,
and false otherwise.
To use instances of the server class, create a transport server (with
the create_transport_server(port) function), and then, for every server
instance you want associated with that port, call its
register(transport_server) function, which will register with the
local portmapper. (This architecture allows multiple versions of
multiple programs all to listen on the same port, or for a single version
to listen on, e.g, both a TCP and UDP port.)
Member functions will be passed Python values, and should return
a Python value. The correspondence between IDL datatypes and
Python datatypes is:
- base types uint, int, float, double are the same
- void is None
- an array (either fixed or var-length) is a Python sequence
- an opaque or a string is a Python string
- a structure is a Python instance, with IDL member names corresponding
to Python attribute names
- a union is a two-attribute instance, with one attribute named the
name of the discriminant declaration, and the other named '_data'
(with a value appropriate to the value of the discriminant).
- an optional value (*) is either None, or the value
- a linked list is special-cased, and turned into a Python list
of structures without the link member.
- const and enum declarations are top-level constant variables.
IDL identifiers which are Python reserved words (or Python reserved
words with 1 or more underscores suffixed) are escaped by appending
an underscore.
Top-level struct and union declarations generate Python declarations
of the corresponding name, and calling the object bound to the name
will generate an instance suitable for populating. (The class defines
__slots__ to be the member names, and has, as attributes, any nested
struct or union definitions. The packing/unpacking function don't
require the use of this class, and, for the unnamed struct/union
declarations created by declaring struct or union types as either
return values or argument types in a procedure definition, you'll need
to create your own classes, either by using
rpchelp.struct_union_class_factory, or some other way.)
Enum declarations nested inside struct or union declarations, or
procedure definitions, generate top-level definitions. (I think this
treatment of nested enum definitions is wrong, according to RFC1832
section 5.4, but I'm not sure.)
Rpcgen doesn't support:
- 'unsigned' as a synonym for 'unsigned int'
- case fall-through in unions
Neither seems to be defined in the grammar, but I should support them,
and look around for an updated IDL specification.
"""
from __future__ import nested_scopes
import string
import time
class LexError (Exception):
pass
class ParseError(Exception):
pass
tokens = ('LPAREN', 'RPAREN','LBRACK', 'RBRACK', 'LANGLE', 'RANGLE',
'STAR', 'COMMA', 'COLON', 'VOID', 'UNSIGNED', 'TYPE',
'ENUM', 'LCBRACK', 'RCBRACK', 'EQ', 'STRUCT', 'UNION',
'SWITCH', 'CASE', 'DEFAULT', 'CONST', 'SEMICOLON', 'IDENT',
'CONSTVAL', 'OPAQUE', 'STRING', 'TYPEDEF', 'PROGRAM', 'VERSION')
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACK = r'\['
t_RBRACK = r'\]'
t_LANGLE = r'<'
t_RANGLE = r'>'
t_STAR = r'\*'
t_COMMA = r','
t_COLON = r':'
t_LCBRACK = r'{'
t_RCBRACK = r'}'
t_EQ = r'='
t_SEMICOLON = r';'
reserved_tuple = (
# RFC 1832, XDR
'void',
'unsigned',
'opaque',
'string',
'enum',
'struct',
'union',
'switch',
'case',
'default',
'const',
'typedef',
# RFC 1831, RPC
'program',
'version')
types = ('int', 'hyper', 'float', 'double', 'quadruple', 'bool')
py_reserved_words = ['def', 'print', 'del', 'pass', 'break', 'continue',
'return', 'yield', 'raise', 'import', 'from',
'global', 'exec', 'assert', 'if', 'while', 'else',
'for', 'in', 'try', 'finally', 'except', 'and',
'not', 'or', 'is', 'lambda', 'class']
reserved_dict = {}
for r in reserved_tuple:
reserved_dict[r] = r.upper ()
for t in types:
reserved_dict[t] = 'TYPE'
def t_CONSTVAL(t):
r"(0x[0-9A-Fa-f]+)|(-?\d+)"
return t
def t_COMMENT (t):
r"/\*(.|\n)*?\*/"
t.lineno += t.value.count('\n')
return None
def t_NEWLINE(t):
r'\n+'
t.lineno += t.value.count("\n")
t_ignore = " \t"
def needs_escaping (val):
"""Check whether this identifier is a valid IDL but not Python identifier,
or whether it would otherwise be an escaped IDL identifier. (Escaping
is done by appending a '_', if the identifier, once stripped of all
trailing underscores, is a Python keyword."""
while val [-1] == '_':
val = val [:-1]
return val in py_reserved_words
def t_IDENT (t):
r"[a-zA-Z]([a-zA-Z]|_|[0-9])*"
t.type = reserved_dict.get (t.value, 'IDENT')
if needs_escaping (t.value):
t.value += '_'
return t
def t_error (t):
raise LexError (t)
import lex
lexer = lex.lex ()
class Ctx:
def __init__ (self):
self.indent = 0
self.deferred_list = [] # used for deferring nested Enum definitions
def defer (self, val):
self.deferred_list.append (val)
def finish (self):
return "\n".join (self.deferred_list)
class Node:
def __init__ (self, val=None, children=None, **kw):
self.val = val
self.children = children
self.__dict__.update (kw)
def set_ident (self, ident):
"""Sets name (currently only used to get struct/union tags into
the right place)"""
pass
def to_str (self, ctx):
return '# Unimplemented ' + self.__class__.__name__ + " " + str (self.__dict__)
class NodeList(Node):
sep = '\n'
def __init__ (self, node, node_list = None):
if node_list == None:
Node.__init__ (self, children = [node])
else:
Node.__init__ (self, children = [node] +
node_list.children)
def to_str (self, ctx):
l = [c.to_str (ctx) for c in self.children]
return self.sep.join (l)
class NodeListComma (NodeList):
sep = ', '
class Specification(NodeList): pass
class SimpleType (Node):
def __init__ (self, typ, ident): # ident can be None
typ.set_ident (ident)
Node.__init__ (self, typ=typ, ident = ident)
def to_str (self, ctx):
return self.typ.to_str (ctx)
class ArrType (Node):
fixed = 0
var = 1
def __init__ (self, typ, ident, var_fixed, maxind = None):
Node.__init__ (self, typ=typ, ident=ident,
var_fixed = var_fixed, maxind = maxind)
def to_str (self, ctx):
var_fixed = ['rpchelp.fixed', 'rpchelp.var'] [self.var_fixed]
if self.typ in ['string', 'opaque']:
return 'rpchelp.%s (%s, %s)' % (self.typ, var_fixed, self.maxind)
return 'rpchelp.arr (%s, %s, %s)' % (
self.typ.to_str (ctx), var_fixed, self.maxind)
class OptData (Node):
def __init__ (self, type_spec, ident):
Node.__init__ (self, type_spec=type_spec,
ident = ident)
def to_str (self, ctx):
return 'rpchelp.opt_data (lambda : %s)' % (self.type_spec.to_str (ctx))
class TypeSpec (Node):
unsignable = {'int' : 'uint', 'hyper' : 'uhyper'}
def __init__ (self, val, unsigned, base, compound = 0):
if unsigned:
v = self.unsignable.get (val, None)
if v == None:
raise ParseError (val + ' cannot be combined w/ unsigned ')
val = v
Node.__init__ (self, val=val, base = base, compound = compound)
def set_ident (self, | |
+ plg.write2xml()
s += '</plug>' + CHANGELINE
return s
def write2xml(self):
vsUnitStr = ('<iGin>' + str(self.iGin) + '</iGin>' + CHANGELINE +
self.__write_plug__() +
'<m>' + str(self.m) + '</m>' + CHANGELINE +
'<s>' + str(self.s) + '</s>' + CHANGELINE +
'<vol>' + str(self.vol) + '</vol>' + CHANGELINE)
return vsUnitStr
class masterUnit():
def __init__(self, params):
[oDev, plugs, plugSR, rLvl, vol] = params
##plugs是2维列表!!
# plugSR只有一个,所以是一维列表
if len(plugs) == 0:
self.plugs = []
else:
self.plugs = []
for plug_param in plugs:
self.plugs.append(plug(plug_param))
if len(plugSR) == 0:
self.plugSR = ''
else:
self.plugSR = plug(plugSR)
self.oDev = oDev
self.rLvl = rLvl
self.vol = vol
def return_param(self):
return [self.oDev, self.plugs, self.plugSR, self.rLvl, self.vol]
def __write_plug__(self):
if len(self.plugs) == 0:
return ''
s = ''
for plg in self.plugs:
s += '<plug>' + CHANGELINE + ' '
s = s + plg.write2xml()
s += '</plug>' + CHANGELINE
return s
def __write_plugSR__(self):
if self.plugSR == '':
return ''
s = ''
s += '<plugSR>' + CHANGELINE + ' '
s = s + self.plugSR.write2xml()
s += '</plugSR>' + CHANGELINE
return s
def write2xml(self):
vsUnitStr = ('<oDev>' + str(self.oDev) + '</oDev>' + CHANGELINE +
self.__write_plug__() +
self.__write_plugSR__() +
'<rLvl>' + str(self.rLvl) + '</rLvl>' + CHANGELINE +
'<vol>' + str(self.vol) + '</vol>' + CHANGELINE)
return vsUnitStr
class mixer():
# 一个masterUnit,多个vsUnit,一个MomoUnit,一个stUnit
def __init__(self, params):
[masterUnit_param, vsUnits_param, MomoUnit_param, stUnit_param] = params
self.masterUnit = masterUnit(masterUnit_param)
self.monoUnit = monoUnit(MomoUnit_param)
self.stUnit = stUnit(stUnit_param)
self.vsUnits = []
if len(vsUnits_param) != 0:
for vsUnit_param in vsUnits_param:
vs = vsUnit(vsUnit_param)
self.vsUnits.append(vs)
def __write_vsUnit__(self):
if len(self.vsUnits) == 0:
return ''
s = ''
for vsunit in self.vsUnits:
s += '<vsUnit>' + CHANGELINE + ' '
s = s + vsunit.write2xml()
s += '</vsUnit>' + CHANGELINE
return s
def write2xml(self):
mixerStr = ('<masterUnit>' + CHANGELINE + self.masterUnit.write2xml() + '</masterUnit>' + CHANGELINE +
self.__write_vsUnit__() +
'<monoUnit>' + CHANGELINE + self.monoUnit.write2xml() + '</monoUnit>' + CHANGELINE +
'<stUnit>' + CHANGELINE + self.stUnit.write2xml() + '</stUnit>' + CHANGELINE)
return mixerStr
# eg
'''
mixparam=[[0,[['<![CDATA[vy26]]>','<![CDATA[V3Comp]]>',2,2,['10563103','5592517'],0,1,0]],['<![CDATA[H82m]]>','<![CDATA[H82 Harmonic Maximizer]]>',2,7,[0,0,0,0,'6869600',0,'16777216'],0,1,0],0,0],\
[[0,0,[['<![CDATA[vy26]]>','<![CDATA[V3Comp]]>',2,2,['10563103','5592517'],0,1,0],['<![CDATA[vx21]]>','<![CDATA[V3Reverb]]>',2,3,['8388608','3355443','6710886'],0,1,0]],-898,1,0,0,64,0],\
[1,0,[['<![CDATA[ ]]>','<![CDATA[]]>',0,0,[],0,0,0],['<![CDATA[sMax]]>', '<![CDATA[D82 Sonic Maximizer]]>',2,5,[0,0,0,0,'8388608'],0,1,0]],-227,1,0,0,64,0]],\
[0,[['<![CDATA[L82m]]>','<![CDATA[L82 Loudness Maximizer]]>',2,5,[0,0,0,'1671068','16777216'],0,1,0]],-280,1,0,0,64,0],\
[0,[['<![CDATA[ ]]>','<![CDATA[]]>',0,0,[],0,0,0],['<![CDATA[L82m]]>','<![CDATA[L82 Loudness Maximizer]]>',2,5,[0,0,0,'1671068','16777216'],0,1,0]],0,0,-129]]
'''
class seqcc():
def __init__(self, param):
[p, v] = param
self.p = p
self.v = v
def return_param(self):
return [self.p, self.v]
def write2xml(self):
seqccSTR = '<p>' + str(self.p) + '</p><v>' + str(self.v) + '</v>'
return seqccSTR
class Seq_vibDep():
def __init__(self, seqccs_param):
# seqcc_param=param
if len(seqccs_param) == 0:
self.seqcc_param = []
else:
self.seqcc_param = []
for seqcc_param in seqccs_param:
self.seqcc_param.append(seqcc(seqcc_param))
def return_param(self):
return self.seqcc_param
def __write_seqcc__(self):
if len(self.seqcc_param) == 0:
return ''
s = ''
for seq in self.seqcc_param:
s += '<cc>' + seq.write2xml() + '</cc>' + CHANGELINE
return s
def write2xml(self):
if len(self.seqcc_param) == 0:
return ''
else:
STR = '<seq id="vibDep">' + CHANGELINE + self.__write_seqcc__() + '</seq>' + CHANGELINE
return STR
class Seq_vibRate():
def __init__(self, seqccs_param):
# seqcc_param=param
if len(seqccs_param) == 0:
self.seqcc_param = []
else:
self.seqcc_param = []
for seqcc_param in seqccs_param:
self.seqcc_param.append(seqcc(seqcc_param))
def return_param(self):
return self.seqcc_param
def __write_seqcc__(self):
if len(self.seqcc_param) == 0:
return ''
s = ''
for seq in self.seqcc_param:
s += '<cc>' + seq.write2xml() + '</cc>' + CHANGELINE
return s
def write2xml(self):
if len(self.seqcc_param) == 0:
return ''
else:
STR = '<seq id="vibRate">' + CHANGELINE + self.__write_seqcc__() + '</seq>' + CHANGELINE
return STR
class nStyle():
def __init__(self, params):
[accent, bendDep, bendLen, decay, fallPort, opening, risePort, vibLen, vibType, vibDep, vibRate] = params
self.accent = accent
self.bendDep = bendDep
self.bendLen = bendLen
self.decay = decay
self.fallPort = fallPort
self.opening = opening
self.risePort = risePort
self.vibLen = vibLen
self.vibType = vibType
self.vibDep = Seq_vibDep(vibDep)
self.vibRate = Seq_vibRate(vibRate)
def return_param(self):
return [self.accent, self.bendDep, self.bendLen, self.decay, self.fallPort, self.opening, self.risePort,
self.vibLen, self.vibType, self.vibDep, self.vibRate]
def write2xml(self):
nstyleStr = ('<v id="accent">' + str(self.accent) + '</v>' + CHANGELINE +
'<v id="bendDep">' + str(self.bendDep) + '</v>' + CHANGELINE +
'<v id="bendLen">' + str(self.bendLen) + '</v>' + CHANGELINE +
'<v id="decay">' + str(self.decay) + '</v>' + CHANGELINE +
'<v id="fallPort">' + str(self.fallPort) + '</v>' + CHANGELINE +
'<v id="opening">' + str(self.opening) + '</v>' + CHANGELINE +
'<v id="risePort">' + str(self.risePort) + '</v>' + CHANGELINE +
'<v id="vibLen">' + str(self.vibLen) + '</v>' + CHANGELINE +
'<v id="vibType">' + str(self.vibType) + '</v>' + CHANGELINE +
self.vibDep.write2xml() + self.vibRate.write2xml())
return nstyleStr
class VNOTE():
def __init__(self, params):
[t, dur, n, v, y, p, nstyle, lock] = params
self.t = t
self.dur = dur
self.n = n
self.v = v
self.y = y
self.p = p
self.nStyle = nStyle(nstyle)
self.lock = lock
def __write_lock__(self):
if self.lock == '':
return ''
else:
return ' lock="1"'
def return_param(self):
return [self.t, self.dur, self.n, self.v, self.y, self.p, self.nStyle]
def write2xml(self):
writeStr = ('<t>' + str(self.t) + '</t>' + CHANGELINE +
'<dur>' + str(self.dur) + '</dur>' + CHANGELINE +
'<n>' + str(self.n) + '</n>' + CHANGELINE +
'<v>' + str(self.v) + '</v>' + CHANGELINE +
'<y><![CDATA[' + str(self.y) + ']]></y>' + CHANGELINE +
'<p' + self.__write_lock__() + '><![CDATA[' + str(self.p) + ']]></p>' + CHANGELINE +
'<nStyle>' + CHANGELINE + self.nStyle.write2xml() + '</nStyle>' + CHANGELINE)
return writeStr
class VCC():
def __init__(self, params):
[t, ID, v] = params
self.t = t
self.ID = ID
self.v = v
def return_param(self):
return [self.t, self.ID, self.v]
def write2xml(self):
ccSTR = ('<t>' + str(self.t) + '</t><v id="' + str(self.ID) + '">' + str(self.v) + '</v>')
return ccSTR
## for demand in root.getElementsByTagName('DEMAND'):
## for tp in demand.getElementsByTagName('type'):
## print(tp.getAttribute("id")
class sPlug():
def __init__(self, params):
[ID, name, version] = params
self.ID = ID
self.name = name
self.version = version
def return_param(self):
return [self, ID, self.name, self.version]
def write2xml(self):
sPlugStr = ('<id><![CDATA[' + str(self.ID) + ']]></id>' + CHANGELINE +
'<name><![CDATA[' + str(self.name) + ']]></name>' + CHANGELINE +
'<version><![CDATA[' + str(self.version) + ']]></version>' + CHANGELINE)
return sPlugStr
class pStyle():
def __init__(self, params):
[accent, bendDep, bendLen, decay, fallPort, opening, risePort] = params
self.accent = accent
self.bendDep = bendDep
self.bendLen = bendLen
self.decay = decay
self.fallPort = fallPort
self.opening = opening
self.risePort = risePort
def return_param(self):
return [self.accent, self.bendDep, self.bendLen, self.decay, self.fallPort, self.opening, self.risePort]
def write2xml(self):
sPlugStr = ('<v id="accent">' + str(self.accent) + '</v>' + CHANGELINE +
'<v id="bendDep">' + str(self.bendDep) + '</v>' + CHANGELINE +
'<v id="bendLen">' + str(self.bendLen) + '</v>' + CHANGELINE +
'<v id="decay">' + str(self.decay) + '</v>' + CHANGELINE +
'<v id="fallPort">' + str(self.fallPort) + '</v>' + CHANGELINE +
'<v id="opening">' + str(self.opening) + '</v>' + CHANGELINE +
'<v id="risePort">' + str(self.risePort) + '</v>' + CHANGELINE)
return sPlugStr
class singer():
def __init__(self, params):
[t, bs, pc] = params
self.t = t
self.bs = bs
self.pc = pc
def return_param(self):
return [self.t, self.bs, self.pc]
def write2xml(self):
singerSTR = ('<t>' + str(self.t) + '</t>' + CHANGELINE +
'<bs>' + str(self.bs) + '</bs>' + CHANGELINE +
'<pc>' + str(self.pc) + '</pc>' + CHANGELINE)
return singerSTR
class vsPart():
def __init__(self, params):
[t, playTime, name, comment, sPlugs, pStyles, singers, ccs, notes, plane] = params
self.t = t
self.playTime = playTime
self.name = name
self.comment = comment
self.sPlug = sPlug(sPlugs)
self.pStyle = pStyle(pStyles)
self.singer = singer(singers)
self.plane = plane
self.VCC = []
for cc in ccs:
self.VCC.append(VCC(cc))
self.VNote = []
for note in notes:
self.VNote.append(VNOTE(note))
def return_param(self):
return [self.t, self.playTime, self.name, self.comment, self.sPlug, self.pStyle, self.singer, self.ccs,
self.notes, self.plane]
def ChangeVCCID(self, ID):
allID = ['D', 'B', 'R', 'C', 'G', 'T', 'X', 'W', 'P', 'S',
'd', b'', 'r', 'c', 'g', 't', 'x', 'w', 'p', 's',
'DYN', 'BRN', 'BRI', 'CLE', 'GEN', 'POR', 'XSY', 'GWL', 'PIT', 'PBS',
'dyn', 'brn', 'bri', 'cle', 'gen', 'por', 'xsy', 'gwl', 'pit', 'pbs']
if ID not in allID:
print(str(ID) + 'is not acceptable')
return False
if ID in ['D', 'd', 'DYN', 'dyn']:
return 'D'
if ID in ['B', 'b', 'BRN', 'brn']:
return 'B'
if ID in ['R', 'r', 'BRI', 'bri']:
return 'R'
if ID in ['C', 'c', 'CLE', 'cle']:
return 'C'
if ID in ['G', 'g', 'GEN', 'gen']:
return 'G'
if ID in ['T', 't', 'POR', 'por']:
return 'T'
if ID in ['X', 'x', 'GWL', 'gwl']:
return 'X'
if ID in ['W', 'w', 'GWL', 'gwl']:
return 'W'
if ID in ['P', 'p', 'PIT', 'pit']:
return 'P'
if ID in ['S', 's', | |
#!/usr/bin/env python
"""Performance tests for the eipaddress module."""
import sys
import time
import ipaddress as ip
import eipaddress as eip
from sizes import sizeof
# =============================================================================
class TextFx:
'''Terminal strings for text effects.'''
PLAIN = '\033[m'
RESET = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
MAGENTA = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
BG_BLACK = '\033[40m'
BG_RED = '\033[41m'
BG_GREEN = '\033[42m'
BG_YELLOW = '\033[43m'
BG_BLUE = '\033[44m'
BG_MAGENTA = '\033[45m'
BG_CYAN = '\033[46m'
BG_WHITE = '\033[47m'
BOLD_BLACK = '\033[90m'
BOLD_RED = '\033[91m'
BOLD_GREEN = '\033[92m'
BOLD_YELLOW = '\033[93m'
BOLD_BLUE = '\033[94m'
BOLD_MAGENTA = '\033[95m'
BOLD_CYAN = '\033[96m'
BOLD_WHITE = '\033[97m'
UNDERLINE_BLACK = '\033[4;30m'
UNDERLINE_RED = '\033[4;31m'
UNDERLINE_GREEN = '\033[4;32m'
UNDERLINE_YELLOW = '\033[4;33m'
UNDERLINE_BLUE = '\033[4;34m'
UNDERLINE_MAGENTA = '\033[4;35m'
UNDERLINE_CYAN = '\033[4;36m'
UNDERLINE_WHITE = '\033[4;37m'
# =============================================================================
def fn_name(depth = 0):
"""Get the function name from the call stack.
Args:
depth: call stack depth to return, 0=parent, 1=grandparent, etc.
Returns:
The function name from the call stack, at the depth given.
"""
return sys._getframe(depth + 1).f_code.co_name # pylint: disable=W0212
def has_colours(stream):
"""Determine if an output stream supports colours.
Args:
stream: the output stream to check
Returns:
True if more than 2 colours are supported; else False
"""
if hasattr(stream, 'isatty') and stream.isatty():
try:
import curses
curses.setupterm()
return curses.tigetnum('colors') > 2
except Exception:
pass
return False
def timefn(n, fn, *args, **kwargs):
"""Time the execution of a function call.
Args:
n: number of times to call the function
fn: the function to call
args: positional arguments to pass to fn
kwargs: keyword arguments to pass to fn
Returns:
A tuple: (elapsed time, return value from the last call to fn).
"""
start = time.perf_counter_ns()
for i in range(n):
result = fn(*args, **kwargs)
return time.perf_counter_ns() - start, result
def timelist(n, fn, *args, **kwargs):
"""Time the execution of generating a list from an iterator function.
Args:
n: number of times to call the function
fn: the function to call
args: positional arguments to pass to fn
kwargs: keyword arguments to pass to fn
Returns:
A tuple: (elapsed time, the last repeated list).
"""
start = time.perf_counter_ns()
for i in range(n):
result = list(fn(*args, **kwargs))
return time.perf_counter_ns() - start, result
def time_multi(n, fns, *args, **kwargs):
"""Time the execution of multiple functions.
Args:
n: number of times to call each function
fns: a list of functions to time
args: positional arguments to pass to each function
kwargs: keyword arguments to pass to each function
Returns:
A list of tuples: (time, result)
with the elapsed time and last return value from each function
"""
results = []
for fn in fns:
start = time.perf_counter_ns()
for i in range(n):
result = fn(*args, **kwargs)
elapsed = time.perf_counter_ns() - start
results.append((elapsed, result))
return results
def time_multi_list(n, fns, *args, **kwargs):
"""Time the execution of generating a list from iterator functions.
Args:
n: number of times to generate each list
fns: a list of iterator functions to time
args: positional arguments to pass to each iterator function
kwargs: keyword arguments to pass to each iterator function
Returns:
A list of tuples: (time, result)
with the elapsed time and result list from the last iteration
"""
results = []
for fn in fns:
start = time.perf_counter_ns()
for i in range(n):
result = list(fn(*args, **kwargs))
elapsed = time.perf_counter_ns() - start
results.append((elapsed, result))
return results
def generic_test(reporter, test_id, n, fns, *args, **kwargs):
"""Run a timed test for each function in fns and report the results.
Args:
reporer: the Reporter object to use
n: number of times to call each function
fns: a list of functions to time
args: positional arguments to pass to each function
kwargs: keyword arguments to pass to each function
"""
results = time_multi(n, fns, *args, **kwargs)
reporter.report(test_id, n, results, str(args))
def generic_list_test(reporter, test_id, n, fns, *args, **kwargs):
"""Generic timed test for generating a list from iterator functions.
Args:
reporer: the Reporter object to use
n: number of times to call each function
fns: a list of functions to time
args: positional arguments to pass to each function
kwargs: keyword arguments to pass to each function
"""
results = time_multi_list(n, fns, *args, **kwargs)
reporter.report(test_id, n, results, str(args))
# =============================================================================
class Reporter(object):
"""Reporter for performance test results."""
def __init__(self, gt_txt='SLOWER', lt_txt='faster'):
"""Initialise the report.
Args:
gt_txt: the reported message if time1 > time2
lt_txt: the reported message if time1 < time2
"""
self.gt_txt = gt_txt
self.lt_txt = lt_txt
self.time1 = 0.0
self.time2 = 0.0
def report(self, test_id, n, results, msg):
"""Report the results.
Args:
n: the number of iterations
results: a tuple ((time1, result1), (time2, result2)) where
time1: the ipaddress library elapsed time
result1: the ipaddress library result
time2: the eipaddress library elapsed time
result2: the eipaddress library result
For memory tests, the time values are actually memory usage
msg: test information
gt_txt: the reported message if time1 > time2
lt_txt: the reported message if time1 < time2
"""
(time1, result1), (time2, result2) = results
self.time1 += time1
self.time2 += time2
fx0 = TextFx.RESET
fx1 = fx2 = ''
if time1 == 0.0 or time2 == 0.0:
ratio = 0.0
summary = 'NO DATA'
elif time1 == time2:
ratio = 1.0
summary = 'EQUAL'
elif time2 < time1:
ratio = time1 / time2
summary = f'{ratio:.2f} times {self.lt_txt}'
else:
ratio = time2 / time1
summary = f'{ratio:.2f} times {self.gt_txt} >>>'
fx1 = TextFx.BOLD_RED
if ratio < 1.02:
fx1 = TextFx.YELLOW
if str(result1) != str(result2):
suffix = f'\n {result1}\n {result2}'
fx2 = TextFx.BOLD_MAGENTA
else:
suffix = ''
if not has_colours(sys.stdout):
fx0 = fx1 = fx2 = ''
print(f'{fx1}{test_id}: {msg}')
pc = (time2 * 100) / time1 if time1 else 0.0
print(f'({n:7d}) {time1:>11,.0f} -> {time2:>11,.0f} {pc:6.1f}% '
f'{summary}{fx2}{suffix}{fx0}')
@staticmethod
def group_report(name, group, gt_txt='SLOWER', lt_txt='faster', quiet=True):
"""Report a summary of a group of results.
Args:
name: a name for the group
group: the Reporter objects in this group
gt_txt: the reported message if time1 > time2
lt_txt: the reported message if time1 < time2
quiet: if True, suppress reports with no data
"""
time1 = sum(x.time1 for x in group)
time2 = sum(x.time2 for x in group)
if quiet and time1 == 0.0 and time2 == 0.0:
return
fx0 = TextFx.RESET
fx1 = ''
if time1 == 0.0 or time2 == 0.0:
ratio = 0.0
summary = 'NO DATA'
elif time1 == time2:
ratio = 1.0
summary = f'EQUAL'
elif time2 < time1:
ratio = time1 / time2
summary = f'{ratio:.2f} times {lt_txt}'
else:
ratio = time2 / time1
summary = f'{ratio:.2f} times {gt_txt} >>>'
fx1 = TextFx.BOLD_RED
if ratio < 1.02:
fx1 = TextFx.YELLOW
if not has_colours(sys.stdout):
fx0 = fx1 = ''
pc = (time2 * 100) / time1 if time1 else 0.0
print(f'{fx1}{name:14} {time1:>14,.0f} -> {time2:>14,.0f} {pc:6.1f}% '
f'{summary}{fx0}')
# =============================================================================
class PerfTest(object):
"""Performance tests for the eipaddress module."""
def __init__(self):
"""Instantiate: build a list of test methods."""
self._tests = [(name, fn)
for name, fn in sorted(self.__class__.__dict__.items())
if name.startswith('test_')]
self.report_u = Reporter() # for utility functions
self.report_4a = Reporter() # for IPv4 Address
self.report_4n = Reporter() # for IPv4 Network
self.report_4i = Reporter() # for IPv4 Interface
self.report_6a = Reporter() # for IPv6 Address
self.report_6n = Reporter() # for IPv6 Network
self.report_6i = Reporter() # for IPv6 Interface
self.report_m = Reporter(gt_txt='BIGGER', lt_txt='smaller') # for memory
def run(self, matches=None):
"""Run the tests.
Args:
matches: sequence of strings to match test names to be run
"""
for name, fn in self._tests:
if matches:
for match in matches:
if match in name:
fn(self)
break
else:
fn(self)
# summarise by type
utils = self.report_u,
addresses = self.report_4a, self.report_6a
networks = self.report_4n, self.report_6n
interfaces = self.report_4i, self.report_6i
v4 = self.report_4a, self.report_4n, self.report_4i
v6 = self.report_6a, self.report_6n, self.report_6i
total = utils + v4 + v6
Reporter.group_report('IPv4Address', [self.report_4a])
Reporter.group_report('IPv4Network', [self.report_4n])
Reporter.group_report('IPv4INterface', [self.report_4i])
Reporter.group_report('IPv6Address', [self.report_6a])
Reporter.group_report('IPv6Network', [self.report_6n])
Reporter.group_report('IPv6INterface', [self.report_6i])
Reporter.group_report('Memory', [self.report_m], 'BIGGER', 'smaller')
Reporter.group_report('Utils', utils)
Reporter.group_report('Addresses', addresses)
Reporter.group_report('Networks', networks)
Reporter.group_report('Interfaces', interfaces)
Reporter.group_report('IPv4', v4)
Reporter.group_report('IPv6', v6)
Reporter.group_report('TOTAL', total)
| |
<filename>im2mesh/common.py
from numpy.lib.twodim_base import tri
import torch
from im2mesh.utils.libkdtree import KDTree
import numpy as np
import logging
from copy import deepcopy
logger_py = logging.getLogger(__name__)
def rgb2gray(rgb):
''' rgb of size B x h x w x 3
'''
r, g, b = rgb[:, :, :, 0], rgb[:, :, :, 1], rgb[:, :, :, 2]
gray = 0.2989 * r + 0.5870 * g + 0.1140 * b
return gray
def sample_patch_points(batch_size, n_points, patch_size=1,
image_resolution=(128, 128), continuous=True):
''' Returns sampled points in the range [-1, 1].
Args:
batch_size (int): required batch size
n_points (int): number of points to sample
patch_size (int): size of patch; if > 1, patches of size patch_size
are sampled instead of individual points
image_resolution (tuple): image resolution (required for calculating
the pixel distances)
continuous (bool): whether to sample continuously or only on pixel
locations
'''
assert(patch_size > 0)
# Calculate step size for [-1, 1] that is equivalent to a pixel in
# original resolution
h_step = 1. / image_resolution[0]
w_step = 1. / image_resolution[1]
# Get number of patches
patch_size_squared = patch_size ** 2
n_patches = int(n_points / patch_size_squared)
if continuous:
p = torch.rand(batch_size, n_patches, 2) # [0, 1]
else:
px = torch.randint(0, image_resolution[1], size=(
batch_size, n_patches, 1)).float() / (image_resolution[1] - 1)
py = torch.randint(0, image_resolution[0], size=(
batch_size, n_patches, 1)).float() / (image_resolution[0] - 1)
p = torch.cat([px, py], dim=-1)
# Scale p to [0, (1 - (patch_size - 1) * step) ]
p[:, :, 0] *= 1 - (patch_size - 1) * w_step
p[:, :, 1] *= 1 - (patch_size - 1) * h_step
# Add points
patch_arange = torch.arange(patch_size)
x_offset, y_offset = torch.meshgrid(patch_arange, patch_arange)
patch_offsets = torch.stack(
[x_offset.reshape(-1), y_offset.reshape(-1)],
dim=1).view(1, 1, -1, 2).repeat(batch_size, n_patches, 1, 1).float()
patch_offsets[:, :, :, 0] *= w_step
patch_offsets[:, :, :, 1] *= h_step
# Add patch_offsets to points
p = p.view(batch_size, n_patches, 1, 2) + patch_offsets
# Scale to [-1, x]
p = p * 2 - 1
p = p.view(batch_size, -1, 2)
amax, amin = p.max(), p.min()
assert(amax <= 1. and amin >= -1.)
return p
def get_proposal_points_in_unit_cube(ray0, ray_direction, padding=0.1,
eps=1e-6, n_steps=40):
''' Returns n_steps equally spaced points inside the unit cube on the rays
cast from ray0 with direction ray_direction.
This function is used to get the ray marching points {p^ray_j} for a given
camera position ray0 and
a given ray direction ray_direction which goes from the camera_position to
the pixel location.
NOTE: The returned values d_proposal are the lengths of the ray:
p^ray_j = ray0 + d_proposal_j * ray_direction
Args:
ray0 (tensor): Start positions of the rays
ray_direction (tensor): Directions of rays
padding (float): Padding which is applied to the unit cube
eps (float): The epsilon value for numerical stability
n_steps (int): number of steps
'''
batch_size, n_pts, _ = ray0.shape
device = ray0.device
p_intervals, d_intervals, mask_inside_cube = \
check_ray_intersection_with_unit_cube(ray0, ray_direction, padding,
eps)
d_proposal = d_intervals[:, :, 0].unsqueeze(-1) + \
torch.linspace(0, 1, steps=n_steps).to(device).view(1, 1, -1) * \
(d_intervals[:, :, 1] - d_intervals[:, :, 0]).unsqueeze(-1)
d_proposal = d_proposal.unsqueeze(-1)
return d_proposal, mask_inside_cube
def check_ray_intersection_with_unit_cube(ray0, ray_direction, padding=0.1,
eps=1e-6):
''' Checks if rays ray0 + d * ray_direction intersect with unit cube with
padding padding.
It returns the two intersection points as well as the sorted ray lengths d.
Args:
ray0 (tensor): Start positions of the rays
ray_direction (tensor): Directions of rays
padding (float): Padding which is applied to the unit cube
eps (float): The epsilon value for numerical stability
'''
batch_size, n_pts, _ = ray0.shape
device = ray0.device
# calculate intersections with unit cube (< . , . > is the dot product)
# <n, x - p> = <n, ray0 + d * ray_direction - p_e> = 0
# d = - <n, ray0 - p_e> / <n, ray_direction>
# Get points on plane p_e
p_distance = 0.5 + padding/2
p_e = torch.ones(batch_size, n_pts, 6).to(device) * p_distance
p_e[:, :, 3:] *= -1.
# Calculate the intersection points with given formula
nominator = p_e - ray0.repeat(1, 1, 2)
denominator = ray_direction.repeat(1, 1, 2)
d_intersect = nominator / denominator
p_intersect = ray0.unsqueeze(-2) + d_intersect.unsqueeze(-1) * \
ray_direction.unsqueeze(-2)
# Calculate mask where points intersect unit cube
p_mask_inside_cube = (
(p_intersect[:, :, :, 0] <= p_distance + eps) &
(p_intersect[:, :, :, 1] <= p_distance + eps) &
(p_intersect[:, :, :, 2] <= p_distance + eps) &
(p_intersect[:, :, :, 0] >= -(p_distance + eps)) &
(p_intersect[:, :, :, 1] >= -(p_distance + eps)) &
(p_intersect[:, :, :, 2] >= -(p_distance + eps))
).cpu()
# Correct rays are these which intersect exactly 2 times
mask_inside_cube = p_mask_inside_cube.sum(-1) == 2
# Get interval values for p's which are valid
p_intervals = p_intersect[mask_inside_cube][p_mask_inside_cube[
mask_inside_cube]].view(-1, 2, 3)
p_intervals_batch = torch.zeros(batch_size, n_pts, 2, 3).to(device)
p_intervals_batch[mask_inside_cube] = p_intervals
# Calculate ray lengths for the interval points
d_intervals_batch = torch.zeros(batch_size, n_pts, 2).to(device)
norm_ray = torch.norm(ray_direction[mask_inside_cube], dim=-1)
d_intervals_batch[mask_inside_cube] = torch.stack([
torch.norm(p_intervals[:, 0] -
ray0[mask_inside_cube], dim=-1) / norm_ray,
torch.norm(p_intervals[:, 1] -
ray0[mask_inside_cube], dim=-1) / norm_ray,
], dim=-1)
# Sort the ray lengths
d_intervals_batch, indices_sort = d_intervals_batch.sort()
p_intervals_batch = p_intervals_batch[
torch.arange(batch_size).view(-1, 1, 1),
torch.arange(n_pts).view(1, -1, 1),
indices_sort
]
return p_intervals_batch, d_intervals_batch, mask_inside_cube
def intersect_camera_rays_with_unit_cube(
pixels, camera_mat, world_mat, scale_mat, padding=0.1, eps=1e-6,
use_ray_length_as_depth=True):
''' Returns the intersection points of ray cast from camera origin to
pixel points p on the image plane.
The function returns the intersection points as well the depth values and
a mask specifying which ray intersects the unit cube.
Args:
pixels (tensor): Pixel points on image plane (range [-1, 1])
camera_mat (tensor): camera matrix
world_mat (tensor): world matrix
scale_mat (tensor): scale matrix
padding (float): Padding which is applied to the unit cube
eps (float): The epsilon value for numerical stability
'''
batch_size, n_points, _ = pixels.shape
pixel_world = image_points_to_world(
pixels, camera_mat, world_mat, scale_mat)
camera_world = origin_to_world(
n_points, camera_mat, world_mat, scale_mat)
ray_vector = (pixel_world - camera_world)
p_cube, d_cube, mask_cube = check_ray_intersection_with_unit_cube(
camera_world, ray_vector, padding=padding, eps=eps)
if not use_ray_length_as_depth:
p_cam = transform_to_camera_space(p_cube.view(
batch_size, -1, 3), camera_mat, world_mat, scale_mat).view(
batch_size, n_points, -1, 3)
d_cube = p_cam[:, :, :, -1]
return p_cube, d_cube, mask_cube
def arange_pixels(resolution=(128, 128), batch_size=1, image_range=(-1., 1.),
subsample_to=None):
''' Arranges pixels for given resolution in range image_range.
The function returns the unscaled pixel locations as integers and the
scaled float values.
Args:
resolution (tuple): image resolution
batch_size (int): batch size
image_range (tuple): range of output points (default [-1, 1])
subsample_to (int): if integer and > 0, the points are randomly
subsampled to this value
'''
h, w = resolution
n_points = resolution[0] * resolution[1]
# Arrange pixel location in scale resolution
pixel_locations = torch.meshgrid(torch.arange(0, w), torch.arange(0, h))
pixel_locations = torch.stack(
[pixel_locations[0], pixel_locations[1]],
dim=-1).long().view(1, -1, 2).repeat(batch_size, 1, 1)
pixel_scaled = pixel_locations.clone().float()
# Shift and scale points to match image_range
scale = (image_range[1] - image_range[0])
loc = scale / 2
pixel_scaled[:, :, 0] = scale * pixel_scaled[:, :, 0] / (w - 1) - loc
pixel_scaled[:, :, 1] = scale * pixel_scaled[:, :, 1] / (h - 1) - loc
# Subsample points if subsample_to is not None and > 0
if (subsample_to is not None and subsample_to > 0 and
subsample_to < n_points):
idx = np.random.choice(pixel_scaled.shape[1], size=(subsample_to,),
replace=False)
pixel_scaled = pixel_scaled[:, idx]
pixel_locations = pixel_locations[:, idx]
return pixel_locations, pixel_scaled
def to_pytorch(tensor, return_type=False):
''' Converts input tensor to pytorch.
Args:
tensor (tensor): Numpy or Pytorch tensor
return_type (bool): whether to return input type
'''
is_numpy = False
if type(tensor) == np.ndarray:
tensor = torch.from_numpy(tensor)
is_numpy = True
tensor = tensor.clone()
if return_type:
return tensor, is_numpy
return tensor
def get_mask(tensor):
''' Returns mask of non-illegal values for tensor.
Args:
tensor (tensor): Numpy or Pytorch tensor
'''
tensor, is_numpy = to_pytorch(tensor, True)
mask = ((abs(tensor) != np.inf) & (torch.isnan(tensor) == False))
mask = mask.bool()
if is_numpy:
mask = mask.numpy()
return mask
def transform_mesh(mesh, transform):
''' Transforms a mesh with given transformation.
Args:
mesh (trimesh mesh): mesh
transform (tensor): transformation matrix of size 4 x 4
'''
mesh = deepcopy(mesh)
v = np.asarray(mesh.vertices).astype(np.float32)
v_transformed = transform_pointcloud(v, transform)
mesh.vertices = v_transformed
return mesh
def | |
from graphcnn.helper import *
from graphcnn.network import *
from graphcnn.layers import *
from sklearn.model_selection import KFold
import numpy as np
import tensorflow as tf
import glob
import time
from tensorflow.python.training import queue_runner
# This function is used to create tf.cond compatible tf.train.batch alternative
def _make_batch_queue(input, capacity, num_threads=1):
queue = tf.PaddingFIFOQueue(capacity=capacity, dtypes=[s.dtype for s in input], shapes=[s.get_shape() for s in input])
tf.summary.scalar("fraction_of_%d_full" % capacity,
tf.cast(queue.size(), tf.float32) *
(1. / capacity))
enqueue_ops = [queue.enqueue(input)]*num_threads
queue_runner.add_queue_runner(queue_runner.QueueRunner(queue, enqueue_ops))
return queue
# This class is responsible for setting up and running experiments
# Also provides helper functions related to experiments (e.g. get accuracy)
class GraphCNNExperiment(object):
def __init__(self, dataset_name, model_name, net_constructor):
# Initialize all defaults
self.dataset_name = dataset_name
self.model_name = model_name
self.num_iterations = 200
self.iterations_per_test = 5
self.display_iter = 5
self.snapshot_iter = 1000000
self.train_batch_size = 0
self.test_batch_size = 0
self.crop_if_possible = True
self.debug = False
self.starter_learning_rate = 0.1
self.learning_rate_exp = 0.1
self.learning_rate_step = 1000
self.reports = {}
self.silent = False
self.optimizer = 'momentum'
self.net_constructor = net_constructor
self.net = GraphCNNNetwork()
self.net_desc = GraphCNNNetworkDescription()
tf.reset_default_graph()
# print_ext can be disabled through the silent flag
def print_ext(self, *args):
if self.silent == False:
print_ext(*args)
# Will retrieve the value stored as the maximum test accuracy on a trained network
# SHOULD ONLY BE USED IF test_batch_size == ALL TEST SAMPLES
def get_max_accuracy(self):
tf.reset_default_graph()
with tf.variable_scope('loss') as scope:
max_acc_test = tf.Variable(tf.zeros([]), name="max_acc_test")
saver = tf.train.Saver()
with tf.Session() as sess:
max_it = self.load_model(sess, saver)
return sess.run(max_acc_test), max_it
# Run all folds in a CV and calculate mean/std
def run_kfold_experiments(self, no_folds=10):
acc = []
self.net_constructor.create_network(self.net_desc, [])
desc = self.net_desc.get_description()
self.print_ext('Running CV for:', desc)
start_time = time.time()
for i in range(no_folds):
tf.reset_default_graph()
self.set_kfold(no_folds=no_folds, fold_id=i)
cur_max, max_it = self.run()
self.print_ext('Fold %d max accuracy: %g at %d' % (i, cur_max, max_it))
acc.append(cur_max)
acc = np.array(acc)
mean_acc= np.mean(acc)*100
std_acc = np.std(acc)*100
self.print_ext('Result is: %.2f (+- %.2f)' % (mean_acc, std_acc))
verify_dir_exists('./results/')
with open('./results/%s.txt' % self.dataset_name, 'a+') as file:
file.write('%s\t%s\t%d-fold\t%d seconds\t%.2f (+- %.2f)\n' % (str(datetime.now()), desc, no_folds, time.time()-start_time, mean_acc, std_acc))
return mean_acc, std_acc
# Prepares samples for experiment, accepts a list (vertices, adjacency, labels) where:
# vertices = list of NxC matrices where C is the same over all samples, N can be different between samples
# adjacency = list of NxLxN tensors containing L NxN adjacency matrices of the given samples
# labels = list of sample labels
# len(vertices) == len(adjacency) == len(labels)
def preprocess_data(self, dataset):
self.graph_size = np.array([s.shape[0] for s in dataset[0]]).astype(np.int64)
self.largest_graph = max(self.graph_size)
self.print_ext('Padding samples')
self.graph_vertices = []
self.graph_adjacency = []
for i in range(len(dataset[0])):
# pad all vertices to match size
self.graph_vertices.append(np.pad(dataset[0][i].astype(np.float32), ((0, self.largest_graph-dataset[0][i].shape[0]), (0, 0)), 'constant', constant_values=(0)))
# pad all adjacency matrices to match size
self.graph_adjacency.append(np.pad(dataset[1][i].astype(np.float32), ((0, self.largest_graph-dataset[1][i].shape[0]), (0, 0), (0, self.largest_graph-dataset[1][i].shape[0])), 'constant', constant_values=(0)))
self.print_ext('Stacking samples')
self.graph_vertices = np.stack(self.graph_vertices, axis=0)
self.graph_adjacency = np.stack(self.graph_adjacency, axis=0)
self.graph_labels = dataset[2].astype(np.int64)
self.no_samples = self.graph_labels.shape[0]
single_sample = [self.graph_vertices, self.graph_adjacency, self.graph_labels, self.graph_size]
# Create CV information
def set_kfold(self, no_folds = 10, fold_id = 0):
inst = KFold(n_splits = no_folds, shuffle=True, random_state=125)
self.fold_id = fold_id
self.KFolds = list(inst.split(np.arange(self.no_samples)))
self.train_idx, self.test_idx = self.KFolds[fold_id]
self.no_samples_train = self.train_idx.shape[0]
self.no_samples_test = self.test_idx.shape[0]
self.print_ext('Data ready. no_samples_train:', self.no_samples_train, 'no_samples_test:', self.no_samples_test)
if self.train_batch_size == 0:
self.train_batch_size = self.no_samples_train
if self.test_batch_size == 0:
self.test_batch_size = self.no_samples_test
self.train_batch_size = min(self.train_batch_size, self.no_samples_train)
self.test_batch_size = min(self.test_batch_size, self.no_samples_test)
# This function is cropped before batch
# Slice each sample to improve performance
def crop_single_sample(self, single_sample):
vertices = tf.slice(single_sample[0], np.array([0, 0], dtype=np.int64), tf.cast(tf.stack([single_sample[3], -1]), tf.int64))
vertices.set_shape([None, self.graph_vertices.shape[2]])
adjacency = tf.slice(single_sample[1], np.array([0, 0, 0], dtype=np.int64), tf.cast(tf.stack([single_sample[3], -1, single_sample[3]]), tf.int64))
adjacency.set_shape([None, self.graph_adjacency.shape[2], None])
# V, A, labels, mask
return [vertices, adjacency, single_sample[2], tf.expand_dims(tf.ones(tf.slice(tf.shape(vertices), [0], [1])), axis=-1)]
def create_input_variable(self, input):
for i in range(len(input)):
placeholder = tf.placeholder(tf.as_dtype(input[i].dtype), shape=input[i].shape)
var = tf.Variable(placeholder, trainable=False, collections=[tf.GraphKeys.LOCAL_VARIABLES])
self.variable_initialization[placeholder] = input[i]
input[i] = var
return input
# Create input_producers and batch queues
def create_data(self):
with tf.device("/cpu:0"):
with tf.variable_scope('input') as scope:
# Create the training queue
with tf.variable_scope('train_data') as scope:
self.print_ext('Creating training Tensorflow Tensors')
# Create tensor with all training samples
training_samples = [self.graph_vertices, self.graph_adjacency, self.graph_labels, self.graph_size]
training_samples = [s[self.train_idx, ...] for s in training_samples]
if self.crop_if_possible == False:
training_samples[3] = get_node_mask(training_samples[3], max_size=self.graph_vertices.shape[1])
# Create tf.constants
training_samples = self.create_input_variable(training_samples)
# Slice first dimension to obtain samples
single_sample = tf.train.slice_input_producer(training_samples, shuffle=True, capacity=self.train_batch_size)
# Cropping samples improves performance but is not required
if self.crop_if_possible:
self.print_ext('Cropping smaller graphs')
single_sample = self.crop_single_sample(single_sample)
# creates training batch queue
train_queue = _make_batch_queue(single_sample, capacity=self.train_batch_size*2, num_threads=6)
# Create the test queue
with tf.variable_scope('test_data') as scope:
self.print_ext('Creating test Tensorflow Tensors')
# Create tensor with all test samples
test_samples = [self.graph_vertices, self.graph_adjacency, self.graph_labels, self.graph_size]
test_samples = [s[self.test_idx, ...] for s in test_samples]
# If using mini-batch we will need a queue
if self.test_batch_size != self.no_samples_test:
if self.crop_if_possible == False:
test_samples[3] = get_node_mask(test_samples[3], max_size=self.graph_vertices.shape[1])
test_samples = self.create_input_variable(test_samples)
single_sample = tf.train.slice_input_producer(test_samples, shuffle=True, capacity=self.test_batch_size)
if self.crop_if_possible:
single_sample = self.crop_single_sample(single_sample)
test_queue = _make_batch_queue(single_sample, capacity=self.test_batch_size*2, num_threads=1)
# If using full-batch no need for queues
else:
test_samples[3] = get_node_mask(test_samples[3], max_size=self.graph_vertices.shape[1])
test_samples = self.create_input_variable(test_samples)
# obtain batch depending on is_training and if test is a queue
if self.test_batch_size == self.no_samples_test:
return tf.cond(self.net.is_training, lambda: train_queue.dequeue_many(self.train_batch_size), lambda: test_samples)
return tf.cond(self.net.is_training, lambda: train_queue.dequeue_many(self.train_batch_size), lambda: test_queue.dequeue_many(self.test_batch_size))
# Function called with the output of the Graph-CNN model
# Should add the loss to the 'losses' collection and add any summaries needed (e.g. accuracy)
def create_loss_function(self):
with tf.variable_scope('loss') as scope:
self.print_ext('Creating loss function and summaries')
cross_entropy = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=self.net.current_V, labels=self.net.labels))
correct_prediction = tf.cast(tf.equal(tf.argmax(self.net.current_V, 1), self.net.labels), tf.float32)
accuracy = tf.reduce_mean(correct_prediction)
# we have 2 variables that will keep track of the best accuracy obtained in training/testing batch
# SHOULD ONLY BE USED IF test_batch_size == ALL TEST SAMPLES
self.max_acc_train = tf.Variable(tf.zeros([]), name="max_acc_train")
self.max_acc_test = tf.Variable(tf.zeros([]), name="max_acc_test")
max_acc = tf.cond(self.net.is_training, lambda: tf.assign(self.max_acc_train, tf.maximum(self.max_acc_train, accuracy)), lambda: tf.assign(self.max_acc_test, tf.maximum(self.max_acc_test, accuracy)))
tf.add_to_collection('losses', cross_entropy)
tf.summary.scalar('accuracy', accuracy)
tf.summary.scalar('max_accuracy', max_acc)
tf.summary.scalar('cross_entropy', cross_entropy)
# if silent == false display these statistics:
self.reports['accuracy'] = accuracy
self.reports['max acc.'] = max_acc
self.reports['cross_entropy'] = cross_entropy
# check if the model has a saved iteration and return the latest iteration step
def check_model_iteration(self):
latest = tf.train.latest_checkpoint(self.snapshot_path)
if latest == None:
return 0
return int(latest[len(self.snapshot_path + 'model-'):])
# load_model if any checkpoint exist
def load_model(self, sess, saver, ):
latest = tf.train.latest_checkpoint(self.snapshot_path)
if latest == None:
return 0
saver.restore(sess, latest)
i = int(latest[len(self.snapshot_path + 'model-'):])
self.print_ext("Model restored at %d." % i)
return i
def save_model(self, sess, saver, i):
latest = tf.train.latest_checkpoint(self.snapshot_path)
if latest == None or i != int(latest[len(self.snapshot_path + 'model-'):]):
self.print_ext('Saving model at %d' % i)
verify_dir_exists(self.snapshot_path)
result = saver.save(sess, self.snapshot_path + 'model', global_step=i)
self.print_ext('Model saved to %s' % result)
# Create graph (input, network, loss)
# Handle checkpoints
# Report summaries if silent == false
# start/end threads
def run(self):
self.variable_initialization = {}
self.print_ext('Training model "%s"!' % self.model_name)
if hasattr(self, 'fold_id') and self.fold_id:
self.snapshot_path = './snapshots/%s/%s/' % (self.dataset_name, self.model_name + '_fold%d' % self.fold_id)
self.test_summary_path = './summary/%s/test/%s_fold%d' %(self.dataset_name, self.model_name, self.fold_id)
self.train_summary_path = './summary/%s/train/%s_fold%d' %(self.dataset_name, self.model_name, self.fold_id)
else:
self.snapshot_path = './snapshots/%s/%s/' % (self.dataset_name, self.model_name)
self.test_summary_path = './summary/%s/test/%s' %(self.dataset_name, self.model_name)
self.train_summary_path = './summary/%s/train/%s' %(self.dataset_name, self.model_name)
if self.debug:
i = 0
else:
i = self.check_model_iteration()
if i < self.num_iterations:
self.print_ext('Creating training network')
self.net.is_training = tf.placeholder(tf.bool, shape=())
self.net.global_step = tf.Variable(0,name='global_step',trainable=False)
input = self.create_data()
self.net_constructor.create_network(self.net, input)
self.create_loss_function()
self.print_ext('Preparing training')
loss = tf.add_n(tf.get_collection('losses'))
if len(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)) > 0:
loss += tf.add_n(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
if self.optimizer == 'adam':
train_step = tf.train.AdamOptimizer().minimize(loss, global_step=self.net.global_step)
else:
self.learning_rate = tf.train.exponential_decay(self.starter_learning_rate, self.net.global_step, self.learning_rate_step, self.learning_rate_exp, staircase=True)
train_step = tf.train.MomentumOptimizer(self.learning_rate, 0.9).minimize(loss, global_step=self.net.global_step)
self.reports['lr'] = self.learning_rate
tf.summary.scalar('learning_rate', self.learning_rate)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer(), self.variable_initialization)
if self.debug == False:
saver = tf.train.Saver()
self.load_model(sess, saver)
self.print_ext('Starting summaries')
test_writer = tf.summary.FileWriter(self.test_summary_path, sess.graph)
train_writer = tf.summary.FileWriter(self.train_summary_path, sess.graph)
summary_merged = tf.summary.merge_all()
self.print_ext('Starting threads')
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
self.print_ext('Starting training. train_batch_size:', self.train_batch_size, 'test_batch_size:', self.test_batch_size)
wasKeyboardInterrupt | |
moves = '8h8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and Wboard.w7i=='':
moves = '8h7i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and Wboard.w9i=='':
moves = '8h9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w7h=='':
moves = '8h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w9h=='':
moves = '8h9h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8g=='':
moves = '8h8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and Wboard.w7g=='':
moves = '8h7g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w8h)and Wboard.w9g=='':
moves = '8h9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8f==''\
and board.s8e=='':
moves = '8h8f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8f==''\
and board.s8e=='':
moves = '8h8f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8e==''\
and board.s8g+board.s8f=='':
moves = '8h8e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8e==''\
and board.s8g+board.s8f=='':
moves = '8h8e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8d==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8h8d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8d==''\
and board.s8g+board.s8f+board.s8e=='':
moves = '8h8d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8c==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8h8c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8c==''\
and board.s8g+board.s8f+board.s8e+board.s8d=='':
moves = '8h8c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8b==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8h8b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8b==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c=='':
moves = '8h8b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w8a==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8h8a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w8a==''\
and board.s8g+board.s8f+board.s8e+board.s8d+board.s8c+board.s8b=='':
moves = '8h8a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w6h==''\
and board.s7h=='':
moves = '8h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w6h==''\
and board.s7h=='':
moves = '8h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w5h==''\
and board.s7h+board.s6h=='':
moves = '8h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w5h==''\
and board.s7h+board.s6h=='':
moves = '8h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w4h==''\
and board.s7h+board.s6h+board.s5h=='':
moves = '8h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w4h==''\
and board.s7h+board.s6h+board.s5h=='':
moves = '8h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w3h==''\
and board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '8h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w3h==''\
and board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '8h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w2h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '8h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w2h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '8h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w8h)and Wboard.w1h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '8h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w8h)and Wboard.w1h==''\
and board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '8h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w6f==''\
and board.s7g=='':
moves = '8h6f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w5e==''\
and board.s7g+board.s6f=='':
moves = '8h5e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w4d==''\
and board.s7g+board.s6f+board.s5e=='':
moves = '8h4d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w3c==''\
and board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '8h3c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w2b==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '8h2b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w8h)and Wboard.w1a==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '8h1a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w6f==''\
and board.s7g=='':
moves = '8h6f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w5e==''\
and board.s7g+board.s6f=='':
moves = '8h5e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w4d==''\
and board.s7g+board.s6f+board.s5e=='':
moves = '8h4d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w3c==''\
and board.s7g+board.s6f+board.s5e+board.s4d=='':
moves = '8h3c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w2b==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c=='':
moves = '8h2b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w8h)and Wboard.w1a==''\
and board.s7g+board.s6f+board.s5e+board.s4d+board.s3c+board.s2b=='':
moves = '8h1a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if Wboard.w9h !='':
if re.match(r'[sgk+]', Wboard.w9h)and Wboard.w9i=='':
moves = '9h9i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[sgk+]', Wboard.w9h)and Wboard.w8i=='':
moves = '9h8i'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9h)and Wboard.w8h=='':
moves = '9h8h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[gk+]', Wboard.w9h)and Wboard.w9g=='':
moves = '9h9g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'\+r|\+b|s|k',Wboard.w9h)and Wboard.w8g=='':
moves = '9h8g'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[plsr]', Wboard.w9h)and Wboard.w9i=='':
moves = '9h9i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9h)and Wboard.w8i=='':
moves = '9h8i+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w8h=='':
moves = '9h8h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9g=='':
moves = '9h9g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match(r'[bs]', Wboard.w9h)and Wboard.w8g=='':
moves = '9h8g+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9f==''\
and board.s9e=='':
moves = '9h9f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9f==''\
and board.s9e=='':
moves = '9h9f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9e==''\
and board.s9g+board.s9f=='':
moves = '9h9e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9e==''\
and board.s9g+board.s9f=='':
moves = '9h9e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9d==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9h9d'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9d==''\
and board.s9g+board.s9f+board.s9e=='':
moves = '9h9d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9c==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9h9c'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9c==''\
and board.s9g+board.s9f+board.s9e+board.s9d=='':
moves = '9h9c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9b==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9h9b'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9b==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c=='':
moves = '9h9b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w9a==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9h9a'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w9a==''\
and board.s9g+board.s9f+board.s9e+board.s9d+board.s9c+board.s9b=='':
moves = '9h9a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w7h==''\
and board.s8h=='':
moves = '9h7h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w7h==''\
and board.s8h=='':
moves = '9h7h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w6h==''\
and board.s8h+board.s7h=='':
moves = '9h6h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w6h==''\
and board.s8h+board.s7h=='':
moves = '9h6h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w5h==''\
and board.s8h+board.s7h+board.s6h=='':
moves = '9h5h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w5h==''\
and board.s8h+board.s7h+board.s6h=='':
moves = '9h5h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w4h==''\
and board.s8h+board.s7h+board.s6h+board.s5h=='':
moves = '9h4h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w4h==''\
and board.s8h+board.s7h+board.s6h+board.s5h=='':
moves = '9h4h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w3h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '9h3h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w3h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h=='':
moves = '9h3h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w2h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '9h2h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w2h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h=='':
moves = '9h2h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+r', Wboard.w9h)and Wboard.w1h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '9h1h'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('r', Wboard.w9h)and Wboard.w1h==''\
and board.s8h+board.s7h+board.s6h+board.s5h+board.s4h+board.s3h+board.s2h=='':
moves = '9h1h+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w7f==''\
and board.s8g=='':
moves = '9h7f+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w6e==''\
and board.s8g+board.s7f=='':
moves = '9h6e+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w5d==''\
and board.s8g+board.s7f+board.s6e=='':
moves = '9h5d+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w4c==''\
and board.s8g+board.s7f+board.s6e+board.s5d=='':
moves = '9h4c+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w3b==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c=='':
moves = '9h3b+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('b',Wboard.w9h)and Wboard.w2a==''\
and board.s8g+board.s7f+board.s6e+board.s5d+board.s4c+board.s3b=='':
moves = '9h2a+'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and Wboard.w7f==''\
and board.s8g=='':
moves = '9h7f'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and Wboard.w6e==''\
and board.s8g+board.s7f=='':
moves = '9h6e'
kaihimore(moves)
if oute.oute == 0:
depth1.append(moves)
if re.match('\+b', Wboard.w9h)and Wboard.w5d==''\
and board.s8g+board.s7f+board.s6e=='':
moves | |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The Index class can use several implementations as its
engine. Any implementation should implement the following:
__init__(data, row_index) : initialize index based on key/row list pairs
add(key, row) -> None : add (key, row) to existing data
remove(key, data=None) -> boolean : remove data from self[key], or all of
self[key] if data is None
shift_left(row) -> None : decrement row numbers after row
shift_right(row) -> None : increase row numbers >= row
find(key) -> list : list of rows corresponding to key
range(lower, upper, bounds) -> list : rows in self[k] where k is between
lower and upper (<= or < based on bounds)
sort() -> None : make row order align with key order
sorted_data() -> list of rows in sorted order (by key)
replace_rows(row_map) -> None : replace row numbers based on slice
items() -> list of tuples of the form (key, data)
Notes
-----
When a Table is initialized from another Table, indices are
(deep) copied and their columns are set to the columns of the new Table.
Column creation:
Column(c) -> deep copy of indices
c[[1, 2]] -> deep copy and reordering of indices
c[1:2] -> reference
array.view(Column) -> no indices
"""
from copy import deepcopy
import numpy as np
from .bst import MinValue, MaxValue
from .sorted_array import SortedArray
class QueryError(ValueError):
'''
Indicates that a given index cannot handle the supplied query.
'''
pass
class Index:
'''
The Index class makes it possible to maintain indices
on columns of a Table, so that column values can be queried
quickly and efficiently. Column values are stored in lexicographic
sorted order, which allows for binary searching in O(log n).
Parameters
----------
columns : list or None
List of columns on which to create an index. If None,
create an empty index for purposes of deep copying.
engine : type, instance, or None
Indexing engine class to use (from among SortedArray, BST,
and SCEngine) or actual engine instance.
If the supplied argument is None (by default), use SortedArray.
unique : bool (defaults to False)
Whether the values of the index must be unique
'''
def __new__(cls, *args, **kwargs):
self = super().__new__(cls)
# If (and only if) unpickling for protocol >= 2, then args and kwargs
# are both empty. The class __init__ requires at least the `columns`
# arg. In this case return a bare `Index` object which is then morphed
# by the unpickling magic into the correct SlicedIndex object.
if not args and not kwargs:
return self
self.__init__(*args, **kwargs)
return SlicedIndex(self, slice(0, 0, None), original=True)
def __init__(self, columns, engine=None, unique=False):
# Local imports to avoid import problems.
from .table import Table, Column
from astropy.time import Time
if columns is not None:
columns = list(columns)
if engine is not None and not isinstance(engine, type):
# create from data
self.engine = engine.__class__
self.data = engine
self.columns = columns
return
# by default, use SortedArray
self.engine = engine or SortedArray
if columns is None: # this creates a special exception for deep copying
columns = []
data = []
row_index = []
elif len(columns) == 0:
raise ValueError("Cannot create index without at least one column")
elif len(columns) == 1:
col = columns[0]
row_index = Column(col.argsort())
data = Table([col[row_index]])
else:
num_rows = len(columns[0])
# replace Time columns with approximate form and remainder
new_columns = []
for col in columns:
if isinstance(col, Time):
new_columns.append(col.jd)
remainder = col - col.__class__(col.jd, format='jd', scale=col.scale)
new_columns.append(remainder.jd)
else:
new_columns.append(col)
# sort the table lexicographically and keep row numbers
table = Table(columns + [np.arange(num_rows)], copy_indices=False)
sort_columns = new_columns[::-1]
try:
lines = table[np.lexsort(sort_columns)]
except TypeError: # arbitrary mixins might not work with lexsort
lines = table[table.argsort()]
data = lines[lines.colnames[:-1]]
row_index = lines[lines.colnames[-1]]
self.data = self.engine(data, row_index, unique=unique)
self.columns = columns
def __len__(self):
'''
Number of rows in index.
'''
return len(self.columns[0])
def replace_col(self, prev_col, new_col):
'''
Replace an indexed column with an updated reference.
Parameters
----------
prev_col : Column
Column reference to replace
new_col : Column
New column reference
'''
self.columns[self.col_position(prev_col.info.name)] = new_col
def reload(self):
'''
Recreate the index based on data in self.columns.
'''
self.__init__(self.columns, engine=self.engine)
def col_position(self, col_name):
'''
Return the position of col_name in self.columns.
Parameters
----------
col_name : str
Name of column to look up
'''
for i, c in enumerate(self.columns):
if c.info.name == col_name:
return i
raise ValueError(f"Column does not belong to index: {col_name}")
def insert_row(self, pos, vals, columns):
'''
Insert a new row from the given values.
Parameters
----------
pos : int
Position at which to insert row
vals : list or tuple
List of values to insert into a new row
columns : list
Table column references
'''
key = [None] * len(self.columns)
for i, col in enumerate(columns):
try:
key[self.col_position(col.info.name)] = vals[i]
except ValueError: # not a member of index
continue
num_rows = len(self.columns[0])
if pos < num_rows:
# shift all rows >= pos to the right
self.data.shift_right(pos)
self.data.add(tuple(key), pos)
def get_row_specifier(self, row_specifier):
'''
Return an iterable corresponding to the
input row specifier.
Parameters
----------
row_specifier : int, list, ndarray, or slice
'''
if isinstance(row_specifier, (int, np.integer)):
# single row
return (row_specifier,)
elif isinstance(row_specifier, (list, np.ndarray)):
return row_specifier
elif isinstance(row_specifier, slice):
col_len = len(self.columns[0])
return range(*row_specifier.indices(col_len))
raise ValueError("Expected int, array of ints, or slice but "
"got {} in remove_rows".format(row_specifier))
def remove_rows(self, row_specifier):
'''
Remove the given rows from the index.
Parameters
----------
row_specifier : int, list, ndarray, or slice
Indicates which row(s) to remove
'''
rows = []
# To maintain the correct row order, we loop twice,
# deleting rows first and then reordering the remaining rows
for row in self.get_row_specifier(row_specifier):
self.remove_row(row, reorder=False)
rows.append(row)
# second pass - row order is reversed to maintain
# correct row numbers
for row in reversed(sorted(rows)):
self.data.shift_left(row)
def remove_row(self, row, reorder=True):
'''
Remove the given row from the index.
Parameters
----------
row : int
Position of row to remove
reorder : bool
Whether to reorder indices after removal
'''
# for removal, form a key consisting of column values in this row
if not self.data.remove(tuple([col[row] for col in self.columns]), row):
raise ValueError(f"Could not remove row {row} from index")
# decrement the row number of all later rows
if reorder:
self.data.shift_left(row)
def find(self, key):
'''
Return the row values corresponding to key, in sorted order.
Parameters
----------
key : tuple
Values to search for in each column
'''
return self.data.find(key)
def same_prefix(self, key):
'''
Return rows whose keys contain the supplied key as a prefix.
Parameters
----------
key : tuple
Prefix for which to search
'''
return self.same_prefix_range(key, key, (True, True))
def same_prefix_range(self, lower, upper, bounds=(True, True)):
'''
Return rows whose keys have a prefix in the given range.
Parameters
----------
lower : tuple
Lower prefix bound
upper : tuple
Upper prefix bound
bounds : tuple (x, y) of bools
Indicates whether the search should be inclusive or
exclusive with respect to the endpoints. The first
argument x corresponds to an inclusive lower bound,
and the second argument y to an inclusive upper bound.
'''
n = len(lower)
ncols = len(self.columns)
a = MinValue() if bounds[0] else MaxValue()
b = MaxValue() if bounds[1] else MinValue()
# [x, y] search corresponds to [(x, min), (y, max)]
# (x, y) search corresponds to ((x, max), (x, min))
lower = lower + tuple((ncols - n) * [a])
upper = upper + tuple((ncols - n) * [b])
return self.data.range(lower, upper, bounds)
def range(self, lower, upper, bounds=(True, True)):
'''
Return rows within the given range.
Parameters
----------
lower : tuple
Lower prefix bound
upper : tuple
Upper prefix bound
bounds : tuple (x, y) of bools
Indicates whether the search should be inclusive or
exclusive with respect to the endpoints. The first
argument x corresponds to an inclusive lower bound,
and the second argument y to an inclusive upper bound.
'''
return self.data.range(lower, upper, bounds)
def replace(self, row, col_name, val):
'''
Replace the value of a column at a given position.
| |
# coding: utf-8
"""
This module provides a Renderer class to render templates.
"""
import cgi
import os
import sys
from .context import Context
from .locator import DEFAULT_EXTENSION
from .locator import Locator
from .reader import Reader
from .renderengine import RenderEngine
# The quote=True argument causes double quotes to be escaped,
# but not single quotes:
# http://docs.python.org/library/cgi.html#cgi.escape
DEFAULT_ESCAPE = lambda s: cgi.escape(s, quote=True)
class Renderer(object):
"""
A class for rendering mustache templates.
This class supports several rendering options which are described in
the constructor's docstring. Among these, the constructor supports
passing a custom partial loader.
Here is an example of rendering a template using a custom partial loader
that loads partials from a string-string dictionary.
>>> partials = {'partial': 'Hello, {{thing}}!'}
>>> renderer = Renderer(partials=partials)
>>> renderer.render('{{>partial}}', {'thing': 'world'})
u'Hello, world!'
"""
def __init__(self, file_encoding=None, default_encoding=None,
decode_errors='strict', search_dirs=None, file_extension=None,
escape=None, partials=None):
"""
Construct an instance.
Arguments:
partials: an object (e.g. a dictionary) for custom partial loading
during the rendering process.
The object should have a get() method that accepts a string
and returns the corresponding template as a string, preferably
as a unicode string. If there is no template with that name,
the get() method should either return None (as dict.get() does)
or raise an exception.
If this argument is None, the rendering process will use
the normal procedure of locating and reading templates from
the file system -- using relevant instance attributes like
search_dirs, file_encoding, etc.
escape: the function used to escape variable tag values when
rendering a template. The function should accept a unicode
string (or subclass of unicode) and return an escaped string
that is again unicode (or a subclass of unicode).
This function need not handle strings of type `str` because
this class will only pass it unicode strings. The constructor
assigns this function to the constructed instance's escape()
method.
The argument defaults to `cgi.escape(s, quote=True)`. To
disable escaping entirely, one can pass `lambda u: u` as the
escape function, for example. One may also wish to consider
using markupsafe's escape function: markupsafe.escape().
file_encoding: the name of the encoding of all template files.
This encoding is used when reading and converting any template
files to unicode. All templates are converted to unicode prior
to parsing. Defaults to the default_encoding argument.
default_encoding: the name of the encoding to use when converting
to unicode any strings of type str encountered during the
rendering process. The name will be passed as the encoding
argument to the built-in function unicode(). Defaults to the
encoding name returned by sys.getdefaultencoding().
decode_errors: the string to pass as the errors argument to the
built-in function unicode() when converting to unicode any
strings of type str encountered during the rendering process.
Defaults to "strict".
search_dirs: the list of directories in which to search for
templates when loading a template by name. Defaults to the
current working directory. If given a string, the string is
interpreted as a single directory.
file_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. for extensionless files).
"""
if default_encoding is None:
default_encoding = sys.getdefaultencoding()
if escape is None:
escape = DEFAULT_ESCAPE
# This needs to be after we set the default default_encoding.
if file_encoding is None:
file_encoding = default_encoding
if file_extension is None:
file_extension = DEFAULT_EXTENSION
if search_dirs is None:
search_dirs = os.curdir # i.e. "."
if isinstance(search_dirs, basestring):
search_dirs = [search_dirs]
self.decode_errors = decode_errors
self.default_encoding = default_encoding
self.escape = escape
self.file_encoding = file_encoding
self.file_extension = file_extension
self.partials = partials
self.search_dirs = search_dirs
def _to_unicode_soft(self, s):
"""
Convert a basestring to unicode, preserving any unicode subclass.
"""
# Avoid the "double-decoding" TypeError.
return s if isinstance(s, unicode) else self.unicode(s)
def _to_unicode_hard(self, s):
"""
Convert a basestring to a string with type unicode (not subclass).
"""
return unicode(self._to_unicode_soft(s))
def _escape_to_unicode(self, s):
"""
Convert a basestring to unicode (preserving any unicode subclass), and escape it.
Returns a unicode string (not subclass).
"""
return unicode(self.escape(self._to_unicode_soft(s)))
def unicode(self, s):
"""
Convert a string to unicode, using default_encoding and decode_errors.
Raises:
TypeError: Because this method calls Python's built-in unicode()
function, this method raises the following exception if the
given string is already unicode:
TypeError: decoding Unicode is not supported
"""
# TODO: Wrap UnicodeDecodeErrors with a message about setting
# the default_encoding and decode_errors attributes.
return unicode(s, self.default_encoding, self.decode_errors)
def _make_reader(self):
"""
Create a Reader instance using current attributes.
"""
return Reader(encoding=self.file_encoding, decode_errors=self.decode_errors)
def make_locator(self):
"""
Create a Locator instance using current attributes.
"""
return Locator(extension=self.file_extension)
def _make_load_template(self):
"""
Return a function that loads a template by name.
"""
reader = self._make_reader()
locator = self.make_locator()
def load_template(template_name):
path = locator.locate_path(template_name=template_name, search_dirs=self.search_dirs)
return reader.read(path)
return load_template
def _make_load_partial(self):
"""
Return the load_partial function to pass to RenderEngine.__init__().
"""
if self.partials is None:
load_template = self._make_load_template()
return load_template
# Otherwise, create a load_partial function from the custom partial
# loader that satisfies RenderEngine requirements (and that provides
# a nicer exception, etc).
partials = self.partials
def load_partial(name):
template = partials.get(name)
if template is None:
# TODO: make a TemplateNotFoundException type that provides
# the original partials as an attribute.
raise Exception("Partial not found with name: %s" % repr(name))
# RenderEngine requires that the return value be unicode.
return self._to_unicode_hard(template)
return load_partial
def _make_render_engine(self):
"""
Return a RenderEngine instance for rendering.
"""
load_partial = self._make_load_partial()
engine = RenderEngine(load_partial=load_partial,
literal=self._to_unicode_hard,
escape=self._escape_to_unicode)
return engine
def read(self, path):
"""
Read and return as a unicode string the file contents at path.
This class uses this method whenever it needs to read a template
file. This method uses the file_encoding and decode_errors
attributes.
"""
reader = self._make_reader()
return reader.read(path)
# TODO: add unit tests for this method.
def load_template(self, template_name):
"""
Load a template by name from the file system.
"""
load_template = self._make_load_template()
return load_template(template_name)
def get_associated_template(self, obj):
"""
Find and return the template associated with an object.
The function first searches the directory containing the object's
class definition.
"""
search_dirs = self.search_dirs
locator = self.make_locator()
template_name = locator.make_template_name(obj)
directory = locator.get_object_directory(obj)
# TODO: add a unit test for the case of a None return value.
if directory is not None:
search_dirs = [directory] + self.search_dirs
path = locator.locate_path(template_name=template_name, search_dirs=search_dirs)
return self.read(path)
def _render_string(self, template, *context, **kwargs):
"""
Render the given template string using the given context.
"""
# RenderEngine.render() requires that the template string be unicode.
template = self._to_unicode_hard(template)
context = Context.create(*context, **kwargs)
engine = self._make_render_engine()
rendered = engine.render(template, context)
return unicode(rendered)
def _render_object(self, obj, *context, **kwargs):
"""
Render the template associated with the given object.
"""
context = [obj] + list(context)
template = self.get_associated_template(obj)
return self._render_string(template, *context, **kwargs)
def render_path(self, template_path, *context, **kwargs):
"""
Render the template at the given path using the given context.
Read the render() docstring for more information.
"""
template = self.read(template_path)
return self._render_string(template, *context, **kwargs)
def render(self, template, *context, **kwargs):
"""
Render the given template (or template object) using the given context.
Returns the rendering as a unicode string.
Prior to rendering, templates of type str are converted to unicode
using the default_encoding and decode_errors attributes. See the
constructor docstring for more information.
Arguments:
template: a template string of type unicode or str, or an object
instance. If the argument is an object, the function first looks
for the template associated to the object by calling this class's
get_associated_template() method. The rendering process also
uses the passed object as the first element of the context stack
when rendering.
*context: zero or more dictionaries, Context instances, or objects
with which to populate the initial context stack. None
arguments are skipped. Items in the *context list are added to
the context stack in order so that later items in the argument
list take precedence over earlier items.
**kwargs: additional key-value data to add to the context stack.
As these arguments appear after all items in the *context list,
in the case of key conflicts these values take | |
import tkinter as tk
from tkinter import ttk
from tkinter import filedialog
from . import widgets as w
from . import constants as c
import os
import webbrowser
from tkinter.scrolledtext import ScrolledText
class ProjectPurposeScreen(tk.Frame):
def __init__(self):
tk.Frame.__init__(self)
self.project_goal_selected = False
self.goal_model_selected = False
self.create_doc()
self.create_project_goals_section()
self.create_goal_model_section()
self.create_method_fragment_section()
def create_doc(self):
# load in documentation
frame_project_docs = ttk.LabelFrame(self, text="View help documentation",
width=c.Size.label_frame_width,
height=80,
style="Doc.TLabelframe")
frame_project_docs.grid_propagate(0)
frame_project_docs.grid(row=0, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
frame_steps_1 = ttk.LabelFrame(self, text="Phase 1 Checklist",
width=400,
height=200,
style="Doc.TLabelframe")
frame_steps_1.grid(row=0, column=1,
rowspan=4,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
for step in c.MethodSteps.phase_1:
if step.startswith(('1', '2', '3', '4', '5', '6', '7')):
tk.Label(frame_steps_1,
text=step).grid(sticky='w', padx=5, pady=(10,0))
else:
tk.Label(frame_steps_1,
text=step).grid(sticky='w', padx=(20,10), pady=0)
tk.Label(frame_steps_1,
text=" " * 150).grid(sticky='w', padx=(20, 10), pady=0)
tk.Button(frame_project_docs,
text='1.1 Project Goals',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.project_goals)]).grid(row=0, column=0,
padx=(10, 0), pady=5,
sticky='w')
tk.Button(frame_project_docs,
text='1.2 Goal Model',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.goal_model)]).grid(row=0, column=1,
padx=(10, 0), pady=5,
sticky='w')
tk.Button(frame_project_docs,
text='1.3.1 Method Fragments',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.method_fragments)]).grid(row=0, column=2,
padx=(10, 0), pady=5,
sticky='w')
tk.Button(frame_project_docs,
text='1.3.2 Metrics',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.metrics)]).grid(row=0, column=3,
padx=(10, 0), pady=5,
sticky='w')
def create_project_goals_section(self):
frame_project_goals = ttk.LabelFrame(self, text="1.1 Project Goals",
width=c.Size.label_frame_width, height=c.Size.label_frame_height)
frame_project_goals.grid_propagate(0)
frame_project_goals.grid(row=1, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
label_project_goals = tk.Label(frame_project_goals,
text='Define project goals & link the file here')
label_project_goals.grid(row=0, column=0,
padx=(10, 0), columnspan=2,
sticky='n')
# make object
self.project_pdf = w.FileOpener(self)
# convert to string var and set init text
self.text_project_pdf = tk.StringVar()
self.text_project_pdf.set("")
# create label and place in gui
self.project_label = tk.Label(frame_project_goals,
textvariable=self.text_project_pdf).grid(row=3, column=0, sticky='w',
padx=(20, 0), columnspan=150)
# create button with actions
button_upload_1 = tk.Button(frame_project_goals,
text='Select',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [select_goal_select_functions()])
# place upload button
button_upload_1.grid(row=2, column=0,
padx=(10, 0), pady=5,
sticky='w')
def select_goal_select_functions():
file_path = self.project_pdf.get_file_path()
filename = self.project_pdf.return_file_name()
if file_path:
self.text_project_pdf.set(filename)
self.project_goal_selected = True
self.status_message_project_txt.set("")
self.dict_paths.update_user_doc_path_dict('project_goals', file_path)
else:
self.project_goal_selected = False
self.text_project_pdf.set('')
self.dict_paths.update_user_doc_path_dict('project_goals', '')
self.status_message_project_txt = tk.StringVar()
self.status_message_project_txt.set("")
status_message_project_label = tk.Label(frame_project_goals,
font='Helvetica 11', foreground='red',
textvariable=self.status_message_project_txt).grid(row=4, column=0,
sticky='w',
padx=(20, 0),
columnspan=150)
def select_goal_show_functions():
if self.project_goal_selected:
self.project_pdf.show_project_goals()
else:
self.status_message_project_txt.set("Select project goals first!")
# place show button
button_show_1 = tk.Button(frame_project_goals,
text='Show',
width=c.Size.button_width, height=c.Size.button_height,
command=select_goal_show_functions)
button_show_1.grid(row=2, column=1,
padx=(10, 0), pady=5,
sticky='w')
def create_goal_model_section(self):
frame_goal_model = ttk.LabelFrame(self, text="1.2 Goal Model",
width=c.Size.label_frame_width, height=c.Size.label_frame_height)
frame_goal_model.grid_propagate(0)
frame_goal_model.grid(row=2, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
label_project_goals = tk.Label(frame_goal_model,
text='Create goal model & link the file here')
label_project_goals.grid(row=0, column=0,
padx=(10, 0),
columnspan=2,
sticky='w')
self.goal_pdf = w.FileOpener(self)
# convert to string var and set init text
self.text_goal_pdf = tk.StringVar()
self.text_goal_pdf.set("")
# create label and place in gui
self.project_goals_label = tk.Label(frame_goal_model,
textvariable=self.text_goal_pdf).grid(row=4, column=0, sticky='w',
padx=(20, 0), columnspan=150)
def goal_model_select_functions():
file_path = self.goal_pdf.get_file_path()
filename = self.goal_pdf.return_file_name()
if file_path:
self.text_goal_pdf.set(filename)
status_message_project_model_txt.set("")
self.goal_model_selected = True
self.dict_paths.update_user_doc_path_dict('goal_model', file_path)
else:
self.goal_model_selected = False
self.text_goal_pdf.set('')
self.dict_paths.update_user_doc_path_dict('goal_model', '')
def goal_model_show_functions():
if self.goal_model_selected:
self.goal_pdf.show_project_goals()
else:
status_message_project_model_txt.set("Select goal model first!")
button_upload_2 = tk.Button(frame_goal_model,
text='Select',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [goal_model_select_functions()])
button_upload_2.grid(row=2, column=0,
padx=(10, 0),
pady=5,
sticky='w')
status_message_project_model_txt = tk.StringVar()
status_message_project_model_txt.set("")
tk.Label(frame_goal_model,
font='Helvetica 11', foreground='red',
textvariable=status_message_project_model_txt).grid(row=5, column=0, sticky='w', padx=(20, 0),
columnspan=150)
button_show_2 = tk.Button(frame_goal_model,
text='Show',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [goal_model_show_functions()])
button_show_2.grid(row=2, column=1,
padx=(10, 0),
pady=2,
sticky='w')
def create_method_fragment_section(self):
frame_select_method_fragments = ttk.LabelFrame(self, text="1.3 Method Fragments",
width=c.Size.label_frame_width, height=250)
frame_select_method_fragments.grid_propagate(0)
frame_select_method_fragments.grid(row=3, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
label_selected_method_fragments = tk.Label(frame_select_method_fragments,
text='Select method fragments')
label_selected_method_fragments.grid(row=1, column=0, columnspan=2,
padx=(20, 0),
sticky='w')
self.method_fragment = w.MethodFragmentSelection(self)
# checkboxes and method fragments
button_upload_3 = tk.Button(frame_select_method_fragments,
text='Select',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [self.method_fragment.show_selection_screen(),
self.method_fragment.send_status_message(show_status_message,
show_status_message_metric_def)])
button_upload_3.grid(row=3, column=0,
padx=(10, 0),
pady=2,
sticky='w')
status_message_show_method_frags = ''
status_message_add_metric_def = ''
def if_clicked(section):
self.method_fragment.send_status_message(show_status_message, show_status_message_metric_def)
if self.method_fragment.methode_frags_selected == False:
show_status_message['text'] = 'Select method fragments first!'
show_status_message_metric_def['text'] = 'Select method fragments first!'
else:
show_status_message['text'] = ''
show_status_message_metric_def['text'] = ''
if section == 'method_frag':
self.method_fragment.show_info_screen()
else:
self.method_fragment.show_add_metric_definition_window()
button_upload_4 = tk.Button(frame_select_method_fragments,
text='Show',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [if_clicked('method_frag')])
button_upload_4.grid(row=3, column=1,
padx=(10, 0),
pady=2,
sticky='w')
# ------------
show_status_message = ttk.Label(frame_select_method_fragments,
font='Helvetica 11', foreground='red',
text=status_message_show_method_frags)
show_status_message.grid(row=4, column=0,
columnspan=20,
padx=10, pady=(10),
sticky='w')
label_add_definition = tk.Label(frame_select_method_fragments,
text='Add metric definition & set targets')
label_add_definition.grid(row=5, column=0, columnspan=100,
padx=(20, 0),
sticky='w')
button_upload_5 = tk.Button(frame_select_method_fragments,
text='Add / Show',
height=c.Size.button_height,
command=lambda: [if_clicked('add_metrics')])
button_upload_5.grid(row=6, column=0,
padx=(10, 0),
pady=2,
sticky='w')
show_status_message_metric_def = ttk.Label(frame_select_method_fragments,
font='Helvetica 11', foreground='red',
text=status_message_add_metric_def)
show_status_message_metric_def.grid(row=7, column=0,
columnspan=20,
padx=10, pady=(10),
sticky='w')
self.sendFrame(frame_select_method_fragments)
def save_data(self):
self.save_file_object.get_project_purpose(self.dict_paths.user_doc_file_paths,
self.method_fragment.checkbox_list,
self.method_fragment.methode_frags_selected)
def sendFrame(self, frame):
self.method_fragment.retrieve_frame(frame)
def getProjectPdfPath(self):
self.project_pdf_file_path = filedialog.askopenfilename()
def send_data_object(self, data):
self.data_object = data
self.method_fragment.get_data_object(self.data_object)
def send_dict_paths(self, dict):
self.dict_paths = dict
def send_save_file_object(self, data):
self.save_file_object = data
def restore_from_save_file(self):
# if path to project model was saved
if self.save_file_object.data['project_goals_path']:
self.project_goal_selected = True
self.text_project_pdf.set(self.project_pdf.clean_file_name(self.save_file_object.data['project_goals_path']))
# if path to goal model was saved
if self.save_file_object.data['goal_model_path']:
self.goal_model_selected = True
self.text_goal_pdf.set(self.project_pdf.clean_file_name(self.save_file_object.data['goal_model_path']))
# if method fragments were saved
# checkboxes
if self.save_file_object.data['selected_method_fragments']:
self.method_fragment.show_selection_screen()
self.method_fragment.selection_window.withdraw()
sql = 'select method_fragment_name from method_fragment'
retrieve_sql = self.data_object.query_no_par(sql)
for item in self.save_file_object.data['selected_method_fragments']:
self.method_fragment.checkbox[item].select()
self.method_fragment.methode_frags_selected = True
self.method_fragment.checkbox_list = self.save_file_object.data['selected_method_fragments']
self.method_fragment.show_info_screen()
self.method_fragment.delete_frame(self.method_fragment.scrollable_metric_frame)
self.method_fragment.delete_frame(self.method_fragment.add_metrics_frame)
self.method_fragment.delete_frame(self.method_fragment.remove_frame)
self.method_fragment.add_metric()
self.method_fragment.show_summary_metrics()
self.method_fragment.info_window.withdraw()
class DataCollectionScreen(tk.Frame):
def __init__(self):
tk.Frame.__init__(self)
global start_project_window
self.start_project_window = None
self.data_collection_window = self
self.sampling_selected = False
frame_project_docs = ttk.LabelFrame(self, text="View help documentation",
width=c.Size.label_frame_width,
height=80,
style="Doc.TLabelframe")
frame_project_docs.grid_propagate(0)
frame_project_docs.grid(row=0, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
frame_steps_2 = ttk.LabelFrame(self, text="Phase 2 Checklist",
width=400,
height=200,
style="Doc.TLabelframe")
frame_steps_2.grid(row=0, column=1,
rowspan=3,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
for step in c.MethodSteps.phase_2:
if step.startswith(('1', '2', '3', '4', '5', '6')):
tk.Label(frame_steps_2,
text=step).grid(sticky='w', padx=5, pady=(10, 0))
else:
tk.Label(frame_steps_2,
text=step).grid(sticky='w', padx=(20, 10), pady=0)
tk.Label(frame_steps_2,
text=" " * 150).grid(sticky='w', padx=(20, 10), pady=0)
tk.Button(frame_project_docs,
text='2.1 Sampling Strategy',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.sampling_strategy)]).grid(row=0, column=0,
padx=(10, 0), pady=5,
sticky='w')
tk.Button(frame_project_docs,
text='2.2 Data Collection',
width=20, height=c.Size.button_height,
command=lambda: [webbrowser.open(c.PdfFiles.data_collection)]).grid(row=0, column=1,
padx=(10, 0), pady=5,
sticky='w')
# --------- 2.1 Sampling strategy frame
frame_sampling = ttk.LabelFrame(self, text="2.1 Sampling strategy",
width=c.Size.label_frame_width, height=150)
frame_sampling.grid_propagate(0)
frame_sampling.grid(row=1, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
label_sampling = tk.Label(frame_sampling,
text='Determine sampling strategy')
label_sampling.grid(row=1, column=0, columnspan=100,
padx=(20, 0),
sticky='w')
# make file opener object
self.data_collection_pdf = w.FileOpener(self)
# make data collection object
self.data_collection = w.DataCollection(self)
# convert to string var and set init text
self.text_sampling_pdf = tk.StringVar()
self.text_sampling_pdf.set("")
# create label and place in gui
self.project_label = tk.Label(frame_sampling,
textvariable=self.text_sampling_pdf,
foreground = "black")
self.project_label.grid(row=3, column=0,
sticky='w',
padx=(20, 0),
columnspan=150)
# functions if valid
def sampling_show_functions():
if self.sampling_selected:
self.project_label["foreground"] = "black"
self.data_collection_pdf.show_project_goals()
else:
self.status_message_label.config(foreground="red")
self.status_message_txt.set("Select sampling strategy first!")
self.status_message_txt = tk.StringVar()
self.status_message_txt.set("")
self.status_message_label = tk.Label(frame_sampling,
font='Helvetica 11', foreground='red',
textvariable=self.status_message_txt)
self.status_message_label.grid(row=4, column=0,
sticky='w',
padx=(20, 0),
columnspan=150)
# check if valid link
def sampling_strategy_select_functions():
file_path = self.data_collection_pdf.get_file_path()
filename = self.data_collection_pdf.return_file_name()
if file_path:
self.text_sampling_pdf.set(filename)
self.status_message_txt.set("")
self.sampling_selected = True
self.dict_paths.update_user_doc_path_dict('sampling_strategy', file_path)
else:
self.sampling_selected = False
self.text_sampling_pdf.set('')
self.dict_paths.update_user_doc_path_dict('sampling_strategy', '')
# create button with actions
button_upload_1 = tk.Button(frame_sampling,
text='Select',
width=c.Size.button_width, height=c.Size.button_height,
command=lambda: [sampling_strategy_select_functions()])
# place upload button
button_upload_1.grid(row=2, column=0,
padx=(10, 0), pady=5,
sticky='w')
# place show button
button_show_1 = tk.Button(frame_sampling,
text='Show',
width=c.Size.button_width, height=c.Size.button_height,
command=sampling_show_functions)
button_show_1.grid(row=2, column=1,
padx=(10, 0), pady=5,
sticky='w')
# --------- 2.2 Data collection frame
frame_data_collection = ttk.LabelFrame(self, text="2.2 Data collection",
width=c.Size.label_frame_width, height=400)
frame_data_collection.grid_propagate(0)
frame_data_collection.grid(row=2, column=0,
padx=(10, 0),
pady=(10, 0),
sticky='nsew')
# header
label_date = tk.Label(frame_data_collection,
text='Date')
label_date.grid(row=3, column=0, columnspan=4,
padx=(20, 0), pady=(10),
sticky='w')
label_time_period_header = tk.Label(frame_data_collection,
text='Time period')
label_time_period_header.grid(row=3, column=5, columnspan=4,
padx=(20, 0),
sticky='w')
# row 1
self.user_date_1 = tk.StringVar()
self.user_date_1_input = ttk.Entry(frame_data_collection, width=15, textvariable=self.user_date_1)
self.user_date_1_input.grid(row=4, column=0, padx=(20, 0), pady=15, sticky='nswe')
label_time_period_1 = tk.Label(frame_data_collection,
text='Start of project')
label_time_period_1.grid(row=4, column=5, columnspan=4,
padx=(20, 0),
sticky='w')
button_upload_1 = tk.Button(frame_data_collection,
text='Upload',
width=10, height=1,
command=lambda : [self.show_project_start(), self.notebook_data_collection.select(0)])
button_upload_1.grid(row=4, column=11,
padx=(100, 0),
sticky='w')
# row 2
self.user_date_2 = tk.StringVar()
self.user_date_2_input = ttk.Entry(frame_data_collection, width=15, textvariable=self.user_date_2)
self.user_date_2_input.grid(row=5, column=0, padx=(20, 0), pady=15, sticky='nswe')
label_time_period_2 = tk.Label(frame_data_collection,
text='Halfway point of project')
label_time_period_2.grid(row=5, column=5, columnspan=4,
padx=(20, 0),
sticky='w')
button_upload_2 = tk.Button(frame_data_collection,
text='Upload',
width=10, height=1,
command=lambda : [self.show_project_start(), self.notebook_data_collection.select(1)])
button_upload_2.grid(row=5, column=11,
padx=(100, 0),
sticky='w')
# row 3
self.user_date_3 = tk.StringVar()
self.user_date_3_input = ttk.Entry(frame_data_collection, width=15, textvariable=self.user_date_3)
self.user_date_3_input.grid(row=6, column=0, padx=(20, 0), pady=15, sticky='nswe')
label_time_period_3 = tk.Label(frame_data_collection,
text='End of project')
label_time_period_3.grid(row=6, column=5, columnspan=4,
padx=(20, 0),
sticky='w')
button_upload_3 = tk.Button(frame_data_collection,
text='Upload',
width=10, height=1,
command=lambda : [self.show_project_start(), self.notebook_data_collection.select(2)])
button_upload_3.grid(row=6, column=11,
padx=(100, 0),
sticky='w')
# row 4
self.user_date_4 = tk.StringVar()
self.user_date_4_input = ttk.Entry(frame_data_collection, width=15, textvariable=self.user_date_4)
self.user_date_4_input.grid(row=7, column=0, padx=(20, 0), pady=15, sticky='nswe')
label_time_period_4 = tk.Label(frame_data_collection,
text='Year after end of project')
label_time_period_4.grid(row=7, column=5, columnspan=4,
padx=(20, 0),
sticky='w')
button_upload_4 = tk.Button(frame_data_collection,
text='Upload',
width=10, height=1,
command=lambda : [self.show_project_start(), self.notebook_data_collection.select(3)])
button_upload_4.grid(row=7, column=11,
padx=(100, 0),
sticky='w')
self.user_dates_objects = [self.user_date_1,
self.user_date_2,
self.user_date_3,
self.user_date_4]
def restore_from_save_file(self):
self.user_date_1.set(self.save_file_object.data['date_sop'])
self.user_date_2.set(self.save_file_object.data['date_hop'])
self.user_date_3.set(self.save_file_object.data['date_eop'])
self.user_date_4.set(self.save_file_object.data['date_yap'])
# if path to sampling strategy was saved
if self.save_file_object.data['sampling_strategy_path']:
self.sampling_selected = True
self.text_sampling_pdf.set(self.data_collection_pdf.clean_file_name(self.save_file_object.data['sampling_strategy_path']))
# if paths to loading in | |
is_training=is_training)
net = tf_util.conv2d_transpose(net, 4, kernel_size=[1,1], stride=[1,1], padding='VALID', scope='upconv5', activation_fn=None)
num_point_conv = 1024
elif num_point<=1536 and num_point>896:
conv_feat = tf.expand_dims(tf.expand_dims(feat, 1),1)
net = tf_util.conv2d_transpose(conv_feat, 512, kernel_size=[2,2], stride=[1,1], padding='VALID', scope='upconv1', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 256, kernel_size=[2,2], stride=[1,1], padding='VALID', scope='upconv2', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 256, kernel_size=[3,3], stride=[2,2], padding='VALID', scope='upconv3', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 128, kernel_size=[4,4], stride=[3,3], padding='VALID', scope='upconv4', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 4, kernel_size=[1,1], stride=[1,1], padding='VALID', scope='upconv5', activation_fn=None)
num_point_conv = 484
elif num_point<=896 and num_point>384:
conv_feat = tf.expand_dims(tf.expand_dims(feat, 1),1)
net = tf_util.conv2d_transpose(conv_feat, 512, kernel_size=[3,3], stride=[1,1], padding='VALID', scope='upconv1', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 256, kernel_size=[3,3], stride=[2,2], padding='VALID', scope='upconv2', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 128, kernel_size=[4,4], stride=[2,2], padding='VALID', scope='upconv3', bn=True, bn_decay=bn_decay, is_training=is_training)
net = tf_util.conv2d_transpose(net, 4, kernel_size=[1,1], stride=[1,1], padding='VALID', scope='upconv4', activation_fn=None)
num_point_conv = 256
else:
raise('Exception')
pc_upconv = tf.reshape(net, [-1, num_point_conv, 4])
num_point_fc = num_point - num_point_conv
# FC Decoder
net = tf_util.fully_connected(feat, 512, bn=True, is_training=is_training, scope='de_fc2', bn_decay=bn_decay)
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training, scope='de_fc3', bn_decay=bn_decay)
net = tf_util.fully_connected(net, num_point_fc*4, activation_fn=None, scope='de_fc4')
pc_fc = tf.reshape(net, [-1, num_point_fc, 4])
# Merge
pc = tf.concat([pc_upconv, pc_fc], axis=1)
pc = tf.reshape(pc, [-1, nsmp, num_point, 4])
pc_conf = pc[:,:,:,3]
pc_xyz = pc[:,:,:,:3]
return pc_xyz, pc_conf
def shape_proposal_net(pc, pc_ins, group_label, group_indicator, bbox_ins_gt, num_category, scope, is_training, bn_decay=None, nsmp=128, return_fullfea=False):
''' Shape proposal generation
Inputs:
pc: [B, NUM_POINT, 3]
pc_ins: [B, NUM_GROUP, NUM_POINT_INS, 3], in world coord sys
group_label: [B, NUM_POINT]
group_indicator: [B, NUM_GROUP]
bbox_ins_gt: [B, NUM_GROUP, 6], only used during training
Returns:
fb_logits: [B, NUM_SAMPLE, 2] confidence logits (before softmax)
fb_prob: [B, NUM_SAMPLE, 2] confidence probabilities
bbox_ins: [B, NUM_SAMPLE, (x, y, z, l, w, h)]
entity_fea: [B, NUM_POINT, nfea] entity feature for each point
center_pos: [B, NUM_POINT, 3] center coordinate for each point, in world coord sys
'''
with tf.variable_scope(scope) as myscope:
# Parameter extraction
batch_size = pc.get_shape()[0].value
ngroup = pc_ins.get_shape()[1].value
nsmp_ins = pc_ins.get_shape()[2].value
end_points = {}
# Shift prediction, ind_seed [B, nsmp], shift_pred_seed [B, nsmp, 3]
end_points = shift_pred_net(pc, None, nsmp, end_points, 'shift_predictor', is_training, bn_decay=bn_decay, return_fullfea=return_fullfea)
pc_seed = end_points['pc_seed']
shift_pred_seed_4d = end_points['shift_pred_seed_4d']
ind_seed = end_points['ind_seed']
shift_pred_seed = tf.multiply(shift_pred_seed_4d[:,:,:3], shift_pred_seed_4d[:,:,3:])
# Semantic prediction, sem_fea_seed [B, nsmp, nfea]
end_points = sem_net(pc, None, 10000 if return_fullfea else 1024, num_category, ind_seed, end_points, 'sem_predictor', is_training, bn_decay=bn_decay, return_fullfea=return_fullfea)
# end_points = sem_net2(pc, None, 4096, num_category, ind_seed, end_points, 'sem_predictor', is_training, bn_decay=bn_decay, return_fullfea=return_fullfea)
sem_fea_seed = end_points['sem_fea_seed']
# Encode instance, pcfea_ins_centered [B, ngroup, nfea_ins], pc_ins_center [B, ngroup, 1, 3]
pc_ins_center = (tf.reduce_max(pc_ins, 2, keep_dims=True)+tf.reduce_min(pc_ins, 2, keep_dims=True))/2 # [B, ngroup, 1, 3] -> requires random padding for pc_ins generation
pc_ins_centered = pc_ins-pc_ins_center
idx = tf.where(tf.greater(group_indicator, 0))
pc_ins_centered_list = tf.gather_nd(pc_ins_centered, idx)
pcfea_ins_centered_list = single_encoding_net(pc_ins_centered_list, [64, 256, 512], [256], 'instance_encoder', is_training, bn_decay)
nfea_ins = pcfea_ins_centered_list.get_shape()[1].value
pcfea_ins_centered = tf.scatter_nd(tf.cast(idx,tf.int32), pcfea_ins_centered_list, tf.constant([batch_size, ngroup, nfea_ins])) # [B, ngroup, nfea_ins]
# Collect instance feature for seed points [B, nsmp, nfea_seed]
idx = tf.where(tf.greater_equal(ind_seed,0))
ind_seed_aug = tf.concat((tf.expand_dims(tf.cast(idx[:,0],tf.int32),-1),tf.reshape(ind_seed,[-1,1])),1)
group_label_seed = tf.reshape(tf.gather_nd(group_label, ind_seed_aug), [-1, nsmp]) # [B, nsmp]
idx = tf.where(tf.greater_equal(group_label_seed,0))
group_label_seed_aug = tf.concat((tf.expand_dims(tf.cast(idx[:,0],tf.int32),-1),tf.reshape(group_label_seed,[-1,1])),1)
pcfea_ins_seed = tf.reshape(tf.gather_nd(pcfea_ins_centered, group_label_seed_aug), [-1, nsmp, nfea_ins])
pc_ins_centered_seed = tf.reshape(tf.gather_nd(pc_ins_centered, group_label_seed_aug), [-1, nsmp, nsmp_ins, 3])
pc_ins_center_seed = tf.reshape(tf.gather_nd(pc_ins_center, group_label_seed_aug), [-1, nsmp, 1, 3])
# Encode context, pcfea_seed [B, nsmp, nfea_context]
# _, pcfea_seed, _, _ = multi_encoding_net(pc, None, nsmp, [0.25,0.5,1.0,1.5], [128,256,256,512], [[64,64,128], [64,128,256], [64,128,256], [64,128,256]], [], is_training, bn_decay, scope='context_encoder', use_xyz=True, output_shift=False, shift_pred=tf.stop_gradient(shift_pred_seed), fps_idx=ind_seed)
_, pcfea_seed, _, _ = multi_encoding_net(pc, None, nsmp, [0.25,0.5,1.0,1.5], [96,192,192,384], [[64,64,128], [64,128,128], [64,128,128], [64,128,128]], [], is_training, bn_decay, scope='context_encoder', use_xyz=True, output_shift=False, shift_pred=tf.stop_gradient(shift_pred_seed), fps_idx=ind_seed)
# _, pcfea_seed, _, _ = multi_encoding_net(pc, None, nsmp, [0.4,0.8,1.5], [192,256,384], [[64,128,192], [64,128,192], [64,128,192]], [], is_training, bn_decay, scope='context_encoder', use_xyz=True, output_shift=False, shift_pred=tf.stop_gradient(shift_pred_seed), fps_idx=ind_seed)
# Compute foreground/background score [B, nsmp, 2]
fb_logits = fea_trans_net(pcfea_seed, [256, 64, 2], 'fb_logits', is_training, bn_decay)
# fb_logits = fea_trans_net(tf.concat((sem_fea_seed, pcfea_seed), axis=-1), [256, 64, 2], 'fb_logits', is_training, bn_decay)
fb_prob = tf.nn.softmax(fb_logits, -1)
#### CVAE
# Compute mu and sigma [B, nsmp, 512]
# mu_sigma_c = fea_trans_net(tf.concat((sem_fea_seed, pcfea_seed), axis=-1), [256, 512, 512], 'mu_sigma_c', is_training, bn_decay)
# mu_sigma_x = fea_trans_net(tf.concat((sem_fea_seed, pcfea_seed, pcfea_ins_seed), axis=-1), [256, 512, 512], 'mu_sigma_x', is_training, bn_decay)
mu_sigma_c = fea_trans_net(pcfea_seed, [256, 512, 512], 'mu_sigma_c', is_training, bn_decay)
mu_sigma_x = fea_trans_net(tf.concat((pcfea_seed, pcfea_ins_seed), axis=-1), [256, 512, 512], 'mu_sigma_x', is_training, bn_decay)
# mu_sigma_x = fea_trans_net(pcfea_ins_seed, [256, 512, 512], 'mu_sigma_x', is_training, bn_decay)
# Sample z [B, nsmp, 256]
mean = mu_sigma_x[:,:,:256]
log_var = mu_sigma_x[:,:,256:]
log_var = tf.clip_by_value(log_var, -10.0, 1.0)
cmean = mu_sigma_c[:,:,:256]
clog_var = mu_sigma_c[:,:,256:]
clog_var = tf.clip_by_value(clog_var, -10.0, 1.0)
zi = sample(mean, log_var)
zc = cmean
z = tf.cond(is_training, lambda: zi, lambda: zc)
# Decode shapes pc [B, nsmp, nsmp_ins, 3]
gcfeat = tf_util.conv1d(pcfea_seed, 256, 1, padding='VALID', bn=True, is_training=is_training,
scope='dec_fc', bn_decay=bn_decay)
feat = tf.concat((z, gcfeat), axis=-1)
pc_ins_pred, pc_ins_conf_pred = decoding_conf_net(feat, nsmp_ins, 'decoder', is_training=is_training, bn_decay=bn_decay)
# pc_ins_pred = decoding_net(feat, nsmp_ins, 'decoder', is_training=is_training, bn_decay=bn_decay)
pc_ins_pred = pc_ins_pred + tf.stop_gradient(tf.expand_dims(shift_pred_seed, 2))
# pc_ins_pred = pc_ins_pred + tf.expand_dims(shift_pred_seed, 2)
# Collect bbox for reconstructions, pc_ins_conf_pred [B, nsmp, nsmp_ins]
pc_ins_pred_world_coord = pc_ins_pred + tf.expand_dims(pc_seed, 2)
bbox_ins_pred = tf.concat(((tf.reduce_max(pc_ins_pred_world_coord, 2)+tf.reduce_min(pc_ins_pred_world_coord, 2))/2,
tf.reduce_max(pc_ins_pred_world_coord, 2)-tf.reduce_min(pc_ins_pred_world_coord, 2)), 2) # [B, nsmp, 6] -> center + l,w,h
# pc_ins_pred_world_coord = pc_ins_pred + tf.expand_dims(pc_seed, 2) # [B, nsmp, nsmp_ins, 3]
# inconf_pt_mask = 1000.0*tf.cast(tf.greater(pc_ins_conf_pred,0.5), tf.float32)
# inconf_pt_mask = tf.multiply(inconf_pt_mask, 1-tf.cast(tf.less_equal(pc_ins_conf_pred, 1e-3+tf.reduce_min(pc_ins_conf_pred, 2, keep_dims=True)), tf.float32))
# inconf_pt_mask = tf.expand_dims(inconf_pt_mask, -1) # [B, nsmp, nsmp_ins, 1]
# max_edge = tf.reduce_max(pc_ins_pred_world_coord-inconf_pt_mask, 2)
# min_edge = tf.reduce_min(pc_ins_pred_world_coord+inconf_pt_mask, 2)
# bbox_ins_pred = tf.concat(((max_edge+min_edge)/2,
# max_edge-min_edge), 2) # [B, nsmp, 6] -> center + l,w,h
# # Random shift and scale box during training
# bbox_ins_pred = tf.concat( (bbox_ins_pred[:,:,:3]+bbox_ins_pred[:,:,3:]*tf.random_normal(tf.shape(bbox_ins_pred[:,:,3:]), mean=0.0, stddev=0.1),
# bbox_ins_pred[:,:,3:]*tf.random_normal(tf.shape(bbox_ins_pred[:,:,3:]), mean=1.0, stddev=0.1)), axis=-1)
# Propagate seed feature and center position
if return_fullfea:
entity_fea = pointnet_fp_module(pc, pc_seed, None, pcfea_seed, [], is_training=False, bn_decay=None, scope='entity_fea_prop', bn=False)
# shift_pred = pointnet_fp_module(pc, pc_seed, None, shift_pred_seed, [], is_training=False, bn_decay=None, scope='shift_prop', bn=False)
shift_pred = tf.multiply(end_points['shift_pred_full_4d'][:,:,:3], end_points['shift_pred_full_4d'][:,:,3:])
center_pos = pc+shift_pred
end_points['entity_fea'] = entity_fea # [B, N, 256] entity feature of each point
end_points['center_pos'] = center_pos # [B, N, 3] center location in the world coord sys
# Store end_points
end_points['shift_pred_seed'] = shift_pred_seed # [B, nsmp, 3], offset from seed point to ins center
end_points['shift_pred_seed_4d'] = shift_pred_seed_4d # [B, nsmp, 4], offset from seed point to ins center
end_points['pc_seed'] = pc_seed # [B, nsmp, 3], seed point coordinate in world coord sys
end_points['ind_seed'] = ind_seed # [B, nsmp], seed index
end_points['pc_ins_centered_seed'] = pc_ins_centered_seed # [B, nsmp, nsmp_ins, 3], centered gt instance point cloud for each seed
end_points['pc_ins_center_seed'] = pc_ins_center_seed # [B, nsmp, 1, 3], gt instance center for each seed in world coord sys
end_points['mean'] = mean # [B, nsmp, 256]
end_points['log_var'] = log_var
end_points['cmean'] = cmean
end_points['clog_var'] = clog_var
end_points['fb_logits'] = fb_logits # [B, nsmp, 2] foreground/backgroud logits
end_points['fb_prob'] = fb_prob # [B, nsmp, 2] foreground/background probability
end_points['pc_ins_pred'] = pc_ins_pred # [B, nsmp, nsmp_ins, 3], in local sys, needs to add pc_seed
end_points['pc_ins_conf_pred'] = pc_ins_conf_pred # [B, nsmp, nsmp_ins] per-point confidence
end_points['bbox_ins_pred'] = bbox_ins_pred # [B, nsmp, 6]
return end_points #, alexnetmodel
def nms_3d(boxes, scores, pre_nms_limit, max_output_size, iou_threshold=0.5, score_threshold=float('-inf')):
''' Non maximum suppression in 3D
Inputs:
boxes: [B, N, 6] center + l,w,h
scores: [B, N] prob between 0 and 1
Outputs:
selected_indices: [B, M]
'''
batch_size = scores.shape[0]
num_box_input = scores.shape[1]
sidx = np.argsort(-scores, 1) # [B, N] from large to small
selected_indices = -np.ones((batch_size, max_output_size), dtype=np.int32) # [B, M]
for i in range(batch_size):
cursidx = sidx[i,:]
curscores = scores[i,:]
curvolume = boxes[i,:,3]*boxes[i,:,4]*boxes[i,:,5]
if pre_nms_limit>0:
cursidx = cursidx[:pre_nms_limit]
cursidx = cursidx[curscores[cursidx]>score_threshold]
count = 0
while len(cursidx)>0 and count<max_output_size:
selected_indices[i,count] = cursidx[0]
count += 1
vA = np.maximum(boxes[i,[cursidx[0]],:3]-boxes[i,[cursidx[0]],3:]/2, boxes[i,cursidx,:3]-boxes[i,cursidx,3:]/2)
vB = np.minimum(boxes[i,[cursidx[0]],:3]+boxes[i,[cursidx[0]],3:]/2, boxes[i,cursidx,:3]+boxes[i,cursidx,3:]/2)
intersection_cube = np.maximum(vB-vA,0)
intersection_volume = intersection_cube[:,0]*intersection_cube[:,1]*intersection_cube[:,2]
iou = np.divide(intersection_volume,curvolume[cursidx]+curvolume[cursidx[0]]-intersection_volume+1e-8)
cursidx = np.delete(cursidx, np.where(iou>iou_threshold)[0])
return selected_indices
def gather_selection(source, selected_idx, max_selected_size):
'''
Inputs:
source: [B, N, C]
selected_idx: [B, M], -1 means not selecting anything
Returns:
target: [B, M, C], 0 padded
'''
batch_size = source.get_shape()[0].value
fea_size = source.get_shape()[2].value
pos_idx = tf.cast(tf.where(tf.greater_equal(selected_idx,0)), tf.int32)
selected_idx_vec = tf.gather_nd(selected_idx, pos_idx)
target_vec = tf.gather_nd(source, tf.concat((tf.expand_dims(pos_idx[:,0],-1), tf.reshape(selected_idx_vec,[-1,1])),1))
target = tf.scatter_nd(pos_idx, target_vec, tf.constant([batch_size, max_selected_size, fea_size]))
return target
def trim_zeros_graph(boxes, name=None):
"""Often boxes are represented | |
= U.dag()*U_target
part_idx = [0, 1, 3, 4] # only computational subspace
ptrace = 0
for i in part_idx:
ptrace += inner[i, i]
dim = 4 # 2 qubits comp subspace
return np.real(((np.abs(ptrace))**2+dim*(1-L1))/(dim*(dim+1)))
elif U.type=='super':
U=qtp.to_super(Ucorrection)*U
kraus_form = qtp.to_kraus(U)
dim=4 # 2 qubits in the computational subspace
part_idx = [0, 1, 3, 4] # only computational subspace
psum=0
for A_k in kraus_form:
ptrace = 0
inner = U_target_diffdims.dag()*A_k # otherwise dimension mismatch
for i in part_idx:
ptrace += inner[i, i]
psum += (np.abs(ptrace))**2
return np.real((dim*(1-L1) + psum) / (dim*(dim + 1)))
def leakage_from_superoperator(U):
if U.type=='oper':
"""
Calculates leakage by summing over all in and output states in the
computational subspace.
L1 = 1- 1/2^{number computational qubits} sum_i sum_j abs(|<phi_i|U|phi_j>|)**2
"""
sump = 0
for i in range(4):
for j in range(4):
bra_i = qtp.tensor(qtp.ket([i//2], dim=[3]),
qtp.ket([i % 2], dim=[3])).dag()
ket_j = qtp.tensor(qtp.ket([j//2], dim=[3]),
qtp.ket([j % 2], dim=[3]))
p = np.abs((bra_i*U*ket_j).data[0, 0])**2
sump += p
sump /= 4 # divide by dimension of comp subspace
L1 = 1-sump
return L1
elif U.type=='super':
"""
Calculates leakage by summing over all in and output states in the
computational subspace.
L1 = 1- 1/2^{number computational qubits} sum_i sum_j Tr(rho_{x'y'}C_U(rho_{xy}))
where C is U in the channel representation
"""
sump = 0
for i in range(4):
for j in range(4):
ket_i = qtp.tensor(qtp.ket([i//2], dim=[3]),
qtp.ket([i % 2], dim=[3])) #notice it's a ket
rho_i=qtp.operator_to_vector(qtp.ket2dm(ket_i))
ket_j = qtp.tensor(qtp.ket([j//2], dim=[3]),
qtp.ket([j % 2], dim=[3]))
rho_j=qtp.operator_to_vector(qtp.ket2dm(ket_j))
p = (rho_i.dag()*U*rho_j).data[0, 0]
sump += p
sump /= 4 # divide by dimension of comp subspace
sump=np.real(sump)
L1 = 1-sump
return L1
def seepage_from_superoperator(U):
"""
Calculates seepage by summing over all in and output states outside the
computational subspace.
L1 = 1- 1/2^{number non-computational states} sum_i sum_j abs(|<phi_i|U|phi_j>|)**2
"""
if U.type=='oper':
sump = 0
for i_list in [[0,2],[1,2],[2,0],[2,1],[2,2]]:
for j_list in [[0,2],[1,2],[2,0],[2,1],[2,2]]:
bra_i = qtp.tensor(qtp.ket([i_list[0]], dim=[3]),
qtp.ket([i_list[1]], dim=[3])).dag()
ket_j = qtp.tensor(qtp.ket([j_list[0]], dim=[3]),
qtp.ket([j_list[1]], dim=[3]))
p = np.abs((bra_i*U*ket_j).data[0, 0])**2 # could be sped up
sump += p
sump /= 5 # divide by number of non-computational states
L1 = 1-sump
return L1
elif U.type=='super':
sump = 0
for i_list in [[0,2],[1,2],[2,0],[2,1],[2,2]]:
for j_list in [[0,2],[1,2],[2,0],[2,1],[2,2]]:
ket_i = qtp.tensor(qtp.ket([i_list[0]], dim=[3]),
qtp.ket([i_list[1]], dim=[3]))
rho_i=qtp.operator_to_vector(qtp.ket2dm(ket_i))
ket_j = qtp.tensor(qtp.ket([j_list[0]], dim=[3]),
qtp.ket([j_list[1]], dim=[3]))
rho_j=qtp.operator_to_vector(qtp.ket2dm(ket_j))
p = (rho_i.dag()*U*rho_j).data[0, 0]
sump += p
sump /= 5 # divide by number of non-computational states
sump=np.real(sump)
L1 = 1-sump
return L1
def pro_avfid_superoperator(U):
"""
Average process (gate) fidelity in the whole space for two qutrits
"""
if U.type=='oper':
ptrace = np.abs((U.dag()*U_target).tr())**2
dim = 9 # dimension of the whole space
return np.real((ptrace+dim)/(dim*(dim+1)))
elif U.type=='super':
return np.real(qtp.average_gate_fidelity(U,target=U_target_diffdims))
def pro_avfid_superoperator_phasecorrected(U,phases):
"""
Average process (gate) fidelity in the whole space for a qubit and qutrit
Qubit Z rotation and qutrit "Z" rotations are applied, taking into account the anharmonicity as well
"""
Ucorrection = qtp.Qobj([[np.exp(-1j*np.deg2rad(phases[0])), 0, 0, 0, 0, 0, 0, 0, 0],
[0, np.exp(-1j*np.deg2rad(phases[1])), 0, 0, 0, 0, 0, 0, 0],
[0, 0, np.exp(-1j*np.deg2rad(phases[4]-phases[-1])), 0, 0, 0, 0, 0, 0],
[0, 0, 0, np.exp(-1j*np.deg2rad(phases[2])), 0, 0, 0, 0, 0],
[0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[3]-phases[-1])), 0, 0, 0, 0],
[0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[4]-phases[-1]+phases[2]-phases[0])), 0, 0, 0],
[0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[5])), 0, 0],
[0, 0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[5]+phases[1]-phases[0])), 0],
[0, 0, 0, 0, 0, 0, 0, 0, np.exp(-1j*np.deg2rad(phases[4]-phases[-1]+phases[5]-phases[0]))]],
type='oper',
dims=[[3, 3], [3, 3]])
if U.type=='oper':
U=Ucorrection*U
ptrace = np.abs((U.dag()*U_target).tr())**2
dim = 9 # dimension of the whole space
return np.real((ptrace+dim)/(dim*(dim+1)))
elif U.type=='super':
U=qtp.to_super(Ucorrection)*U
return np.real(qtp.average_gate_fidelity(U,target=U_target_diffdims))
'''
# benchmark for phase correction
gamma=2j*np.pi/3
alpha=2j*np.pi
beta=2j*np.pi/2
phi=2j*np.pi/4
delta=2j*np.pi/7
Uphases= qtp.Qobj([[np.exp(gamma), 0, 0, 0, 0, 0],
[0, np.exp(gamma+beta), 0, 0, 0, 0],
[0, 0, np.exp(gamma+delta+phi), 0, 0, 0],
[0, 0, 0, np.exp(gamma+alpha), 0, 0],
[0, 0, 0, 0, np.exp(gamma+alpha+beta+phi), 0],
[0, 0, 0, 0, 0, np.exp(gamma+alpha+delta)]],
type='oper',
dims=[[2, 3], [2, 3]])
print(Uphases)
phases=phases_from_superoperator(Uphases)
print(phases)
print(pro_avfid_superoperator_phasecorrected(Uphases,phases))
print(pro_avfid_superoperator_compsubspace_phasecorrected(Uphases,
leakage_from_superoperator(Uphases),phases))
'''
#tlist = np.arange(0, 240e-9, 1/2.4e9)
def matrix_change_of_variables(H_0):
eigs,eigvectors=H_0.eigenstates()
eigvectors_ordered_according2basis = []
eigvectors_ordered_according2basis.append(eigvectors[0].full()) # 00 state
eigvectors_ordered_according2basis.append(eigvectors[2].full()) # 01 state
eigvectors_ordered_according2basis.append(eigvectors[5].full()) # 02 state
eigvectors_ordered_according2basis.append(eigvectors[1].full()) # 10 state
eigvectors_ordered_according2basis.append(eigvectors[4].full()) # 11 state
eigvectors_ordered_according2basis.append(eigvectors[7].full()) # 12 state
eigvectors_ordered_according2basis.append(eigvectors[3].full()) # 20 state
eigvectors_ordered_according2basis.append(eigvectors[6].full()) # 21 state
eigvectors_ordered_according2basis.append(eigvectors[8].full()) # 22 state
S=np.hstack(eigvectors_ordered_according2basis)
return S
def time_evolution(sim_step,eps_vec,H_0,c_ops,initial_propagator):
'''scalefactor=1e6
tlist_sim=tlist_sim*scalefactor
eps_vec=eps_vec/scalefactor
H_0=H_0/scalefactor
if c_ops!=[]: # c_ops is a list of either operators or lists where the first element is
# an operator and the second one is a list of the (time-dependent) coefficients
for c in range(len(c_ops)):
if isinstance(c_ops[c],list):
c_ops[c][1]=c_ops[c][1]/np.sqrt(scalefactor)
else:
c_ops[c]=c_ops[c]/np.sqrt(scalefactor)'''
exp_L_total=initial_propagator
#S = qtp.Qobj(matrix_change_of_variables(H_0),dims=[[3, 3], [3, 3]])
'''
if isinstance(tlist_sim,list):
length_tlist=len(tlist_sim)
else:
length_tlist=np.size(tlist_sim)'''
for eps in eps_vec:
H=H_0+eps*H_c #(eps_vec[i+1]+eps_vec[i])/2
#H=S*H*S.dag()
c_ops_temp=[]
if c_ops != []:
for c in range(len(c_ops)):
if isinstance(c_ops[c],list):
c_ops_temp.append(c_ops[c][0]*c_ops[c][1][i])
else:
c_ops_temp.append(c_ops[c])
liouville_exp_t=(qtp.liouvillian(H,c_ops_temp)*sim_step).expm()
else:
liouville_exp_t=(-1j*H*sim_step).expm()
exp_L_total=liouville_exp_t*exp_L_total
return exp_L_total
def simulate_quantities_of_interest_superoperator(H_0, tlist, c_ops, eps_vec,
sim_step,
verbose: bool=True):
"""
Calculates the quantities of interest from the propagator U
Args:
H_0 (Qobj): static hamiltonian, see "coupled_transmons_hamiltonian"
for the expected form of the Hamiltonian.
tlist (array): times in s, describes the x component of the
trajectory to simulate
c-ops (list of Qobj): list of jump operators, time-independent at the momennt
eps_vec(array): detuning describes the y-component of the trajectory
to simulate.
Returns
phi_cond (float): conditional phase (deg)
L1 (float): leakage
L2 (float): seepage
avgatefid (float): average gate fidelity in full space
avgatefid_compsubspace (float): average gate fidelity only in the computational subspace
"""
scalefactor=1 # otherwise qtp.propagator in parallel mode doesn't work
# time is multiplied by scalefactor and frequency is divided by it
tlist=tlist*scalefactor
eps_vec=eps_vec/scalefactor
sim_step=sim_step*scalefactor
H_0=H_0/scalefactor
if c_ops!=[]: # c_ops is a list of either operators or lists where the first element is
# an operator and the second one is a list of the (time-dependent) coefficients
for c in range(len(c_ops)):
if isinstance(c_ops[c],list):
c_ops[c][1]=c_ops[c][1]/np.sqrt(scalefactor)
else:
c_ops[c]=c_ops[c]/np.sqrt(scalefactor)
''' # step of 1/sampling_rate=1/2.4e9=0.4 ns seems good by itself
sim_step_new=sim_step*2
eps_interp = interp1d(tlist, eps_vec, fill_value='extrapolate')
tlist_new = (np.linspace(0, np.max(tlist), 576/2))
eps_vec_new=eps_interp(tlist_new)
c_ops_new=[]
for c in range(len(c_ops)):
if isinstance(c_ops[c],list):
c_ops_interp=interp1d(tlist,c_ops[c][1], fill_value='extrapolate')
c_ops_new.append([c_ops[c][0],c_ops_interp(tlist_new)])
else:
c_ops_new.append(c_ops[c])
# function only exists to wrap
#def eps_t(t, args=None):
# return eps_interp(t)
print(len(eps_vec),len(eps_vec_new))
t0 = time.time()
exp_L_total_new=1
for i in range(len(tlist_new)):
H=H_0+eps_vec_new[i]*H_c
c_ops_temp=[]
for c in range(len(c_ops_new)):
if isinstance(c_ops_new[c],list):
c_ops_temp.append(c_ops_new[c][0]*c_ops_new[c][1][i])
else:
c_ops_temp.append(c_ops_new[c])
liouville_exp_t=(qtp.liouvillian(H,c_ops_temp)*sim_step_new).expm()
exp_L_total_new=liouville_exp_t*exp_L_total_new
#exp_L_oneway=(qtp.liouvillian(H_0,c_ops)*240e-3).expm()
t1 = time.time()
print('\n alternative propagator_new',t1-t0)
'''
t0 = time.time()
exp_L_total=1
for i in range(len(tlist)):
H=H_0+eps_vec[i]*H_c
c_ops_temp=[]
for c in range(len(c_ops)):
if isinstance(c_ops[c],list):
c_ops_temp.append(c_ops[c][0]*c_ops[c][1][i])
else:
c_ops_temp.append(c_ops[c])
liouville_exp_t=(qtp.liouvillian(H,c_ops_temp)*sim_step).expm()
exp_L_total=liouville_exp_t*exp_L_total
#exp_L_oneway=(qtp.liouvillian(H_0,c_ops)*240e-3).expm()
t1 = time.time()
#print('\n alternative propagator',t1-t0)
''' # qutip propagator not used anymore because it takes too much time
t0 = time.time()
if c_ops==[]:
nstepsmax=1000
else:
nstepsmax=100000
H_t = [H_0, [H_c, eps_vec]]
U_t = qtp.propagator(H_t, tlist, c_ops, parallel=True, options=qtp.Options(nsteps=nstepsmax)) # returns unitary 'oper' if c_ops=[], otherwise 'super'
t1 = time.time()
print('/n propagator',t1-t0)
if verbose:
print('simulation took {:.2f}s'.format(t1-t0))
'''
U_final = exp_L_total
phases = phases_from_superoperator(U_final)
phi_cond = phases[-1]
L1 = leakage_from_superoperator(U_final)
L2 = seepage_from_superoperator(U_final)
avgatefid = pro_avfid_superoperator_phasecorrected(U_final,phases)
avgatefid_compsubspace = pro_avfid_superoperator_compsubspace_phasecorrected(U_final,L1,phases) # leakage has to be taken into account, see Woods & Gambetta
#print('avgatefid_compsubspace',avgatefid_compsubspace)
'''
U_final = exp_L_total_new
phases2 = phases_from_superoperator(U_final)
phi_cond2 = phases2[-1]
L12 = leakage_from_superoperator(U_final)
L22 = seepage_from_superoperator(U_final)
avgatefid2 = pro_avfid_superoperator_phasecorrected(U_final,phases2)
avgatefid_compsubspace2 = pro_avfid_superoperator_compsubspace_phasecorrected(U_final,L12,phases2)
print(phi_cond-phi_cond2,phi_cond)
print(L1-L12,L1)
print(L2-L22,L2)
print(avgatefid-avgatefid2,avgatefid)
print(avgatefid_compsubspace-avgatefid_compsubspace2,avgatefid_compsubspace)
'''
return {'phi_cond': phi_cond, 'L1': L1, 'L2': L2, 'avgatefid_pc': avgatefid, 'avgatefid_compsubspace_pc': avgatefid_compsubspace}
def simulate_quantities_of_interest_superoperator2(U_final):
"""
Calculates the quantities of interest from the propagator U_final
Args:
U_final = propagator (either unitary or superoperator)
Returns
phi_cond (float): conditional phase (deg)
L1 (float): leakage
L2 (float): seepage
avgatefid (float): average gate fidelity in full space
avgatefid_compsubspace (float): average gate fidelity only in the computational subspace
"""
phases = phases_from_superoperator(U_final)
phi_cond = phases[-1]
L1 = leakage_from_superoperator(U_final)
L2 = seepage_from_superoperator(U_final)
avgatefid = pro_avfid_superoperator_phasecorrected(U_final,phases)
avgatefid_compsubspace = pro_avfid_superoperator_compsubspace_phasecorrected(U_final,L1,phases) # leakage has to be taken into account, see Woods & Gambetta
return {'phi_cond': phi_cond, 'L1': L1, 'L2': L2, 'avgatefid_pc': avgatefid, 'avgatefid_compsubspace_pc': avgatefid_compsubspace}
### functions for FAQUAD
def gap_and_eigenvectors(H_0,Omega): # returns gap between 11 and 02 and the corresponding eigenvectors
H=H_0+Omega*H_c
eigs,eigvectors=H.eigenstates() # they're ordered from smallest to highest
E_11=eigs[4]
E_02=eigs[5]
gap=np.abs(E_02-E_11)
return gap, eigvectors[4], eigvectors[5]
def integrand_for_ctilde(H_0,Omega):
dH_over_dOmega=H_c
gap,psi11,psi02=gap_and_eigenvectors(H_0,Omega)
scalprod=np.abs(psi11.dag().overlap(dH_over_dOmega*psi02))
return scalprod/gap**2
def fix_ctilde(H_0,Omega_initial,Omega_final):
result=scipy.integrate.quad(lambda Omega: integrand_for_ctilde(H_0,Omega), Omega_initial, Omega_final,epsabs=1e-14)
return result
def create_epsvecFAQUAD(rampup_forward,rampup_backwards,nsimsteps_rampup,nsimsteps_interaction):
t_list_rampup=np.arange(0,nsimsteps_rampup+1,1)
t_list_rampup=t_list_rampup/nsimsteps_rampup
eps_rampupforward=rampup_forward(t_list_rampup)
eps_interaction=np.full(nsimsteps_interaction-1,rampup_forward([1])[0])
eps_rampupbackwards=rampup_backwards(t_list_rampup)
return np.concatenate((eps_rampupforward,eps_interaction,eps_rampupbackwards),axis=0)
def create_epsvecFAQUADandtlist(rampup_forward,rampup_backwards,nsimsteps_rampup,nsimsteps_interaction):
tlist_sim=rampup_forward[0]*nsimsteps_rampup
tlist_sim=np.concatenate((tlist_sim,np.array([nsimsteps_interaction+tlist_sim[-1]])),axis=0)
tlist_sim=np.concatenate((tlist_sim,np.delete(rampup_backwards[0],0)*nsimsteps_rampup+tlist_sim[-1]),axis=0)
epsvecFAQUAD=rampup_forward[1]
epsvecFAQUAD=np.concatenate((epsvecFAQUAD,np.array([rampup_forward[1][-1]])),axis=0)
epsvecFAQUAD=np.concatenate((epsvecFAQUAD,np.delete(rampup_backwards[1],0)),axis=0)
return epsvecFAQUAD,tlist_sim
'''
Omega_initial=0
Omega_final=w_q1-alpha_q0-w_q0
ctilde=fix_ctilde(H_0,Omega_initial,Omega_final)
s_span=(0,1)
Omega0=np.array([0,])
def rhs_diffequ(t,Omega):
dH_over_dOmega=H_c
gap,psi11,psi02=gap_and_eigenvectors(H_0,Omega[0])
scalprod=np.abs((psi11.dag()*dH_over_dOmega*psi02).data[0,0])
| |
# -*- encoding: utf-8 -*-
"""
License: MIT
Copyright (c) 2019 - present AppSeed.us
"""
import os
import dateutil.parser
import math, json, re, itertools
import collections
from datetime import datetime, date, timedelta
from multiprocessing.pool import ThreadPool as threadpool
import random
import time
import numpy as np
import nltk
import requests
import pandas as pd
from flask import jsonify
from flask import render_template, redirect, url_for, request
from flask_babelex import _
from flask_login import login_required, current_user
from flask_uploads import UploadSet
from jinja2 import TemplateNotFound
from sqlalchemy import func, exc
from app import login_manager, db, celery
from app import constants as c
from app.main import blueprint
from app.main.models import Region, Country, VisitedCountry, Infected_Country_Category, JobCategory
from app.main.models import TravelType, BorderControl, VariousTravel, BlockpostTravel, Address, AddressLocationType, HGBDToken, OldDataTravel
from app.main.patients.forms import PatientForm, UpdateProfileForm, AddFlightFromExcel, ContactedPatientsSearchForm, PatientsSearchForm, SelectContactedForm
from app.main.patients.models import Patient, PatientStatus, ContactedPersons, State, PatientState
from app.main.patients.modules import ContactedPatientsTableModule, AllPatientsTableModule
from app.main.hospitals.models import Hospital, Hospital_Type
from app.main.flights_trains.models import FlightCode, FlightTravel, Train, TrainTravel
from app.main.forms import TableSearchForm
from app.main.routes import route_template
from app.main.util import get_regions, get_regions_choices, get_flight_code, populate_form, parse_date
from app.login.util import hash_pass
def process_travel_type(typ, type_value, user_right, patient_form):
if typ.value == type_value and user_right:
patient_form.travel_type.choices.append((typ.value, typ.name))
return True
return False
def prepare_patient_form(patient_form, with_old_data = False, with_all_travel_type=False, search_form=False):
regions_choices = get_regions_choices(current_user, False)
# Regions for select field
if not patient_form.region_id.choices:
patient_form.region_id.choices = [("", "")] if not search_form else [(-1, c.all_regions)]
patient_form.region_id.choices += regions_choices
if current_user.region_id != None and not search_form:
patient_form.region_id.default = current_user.region_id
if not search_form:
if not patient_form.hospital_region_id.choices:
patient_form.hospital_region_id.choices = regions_choices
if current_user.region_id != None:
patient_form.hospital_region_id.default = current_user.region_id
if not current_user.user_role.can_found_by_default:
patient_form.is_found_date.default = datetime.today()
patient_form.is_found.default = 1
# TravelTypes for select fiels: Местный, Самолет итд
if not patient_form.travel_type.choices:
patient_form.travel_type.choices = [] if not with_all_travel_type else [c.all_travel_types]
for typ in TravelType.query.all():
for travel_typ in [(c.flight_type[0], current_user.user_role.can_add_air),
(c.train_type[0], current_user.user_role.can_add_train),
(c.local_type[0], current_user.user_role.can_add_local),
(c.by_auto_type[0], current_user.user_role.can_add_auto),
(c.by_foot_type[0], current_user.user_role.can_add_foot),
(c.by_sea_type[0], current_user.user_role.can_add_sea),
(c.blockpost_type[0], current_user.user_role.can_add_blockpost)]:
if process_travel_type(typ, travel_typ[0], travel_typ[1], patient_form):
break
if typ.value == c.old_data_type[0]:
if with_old_data:
patient_form.travel_type.choices.append((typ.value, typ.name))
if not search_form:
patient_form.travel_type.default = c.local_type[0]
# Flight Travel
if not patient_form.flight_arrival_date.choices:
patient_form.flight_arrival_date.choices = [c.all_dates] if search_form else []
dates = np.unique([f.date for f in FlightCode.query.all()])
patient_form.flight_arrival_date.choices += [(date, date) for date in dates]
# Flight Code id if exists
if not patient_form.flight_code_id.choices:
if patient_form.flight_arrival_date.choices and not search_form:
first_date = patient_form.flight_arrival_date.choices[0][0]
patient_form.flight_code_id.choices = [(f.id,"{}, {} - {}".format(
f.code, f.from_city, f.to_city)) for f in FlightCode.query.filter_by(date=first_date).all()]
else:
patient_form.flight_code_id.choices = []
t_ids = {}
for typ in TravelType.query.all():
t_ids[typ.value] = typ.id
travel_id_form = [(t_ids[c.by_auto_type[0]], patient_form.auto_border_id),
(t_ids[c.by_foot_type[0]], patient_form.foot_border_id),
(t_ids[c.by_sea_type[0]], patient_form.sea_border_id)]
# Various Travel
for typ_id, typ_select in travel_id_form:
if not typ_select.choices:
typ_select.choices = [c.all_blockposts] if search_form else []
borders = BorderControl.query.filter_by(travel_type_id = typ_id).all()
typ_select.choices += [(b.id, b.name) for b in borders]
# Blockpost Travel
if not patient_form.blockpost_region_id.choices:
patient_form.blockpost_region_id.choices = get_regions_choices(current_user, with_all_regions=search_form)
# Hospital
if not search_form:
hospital_types = Hospital_Type.query.all()
hospital_types = [(h.id, h.name) for h in hospital_types]
patient_form.hospital_type_id.choices = hospital_types
# States
# patient_form.patient_status.choices = [(s[0], s[1]) for s in c.form_states]
# Countries
countries = Country.query.all()
kz = Country.query.filter_by(code="KZ").first()
# Job Category
job_categories = JobCategory.query.all()
if not patient_form.job_category_id.choices:
patient_form.job_category_id.choices = [] if not search_form else [c.all_job_categories]
if search_form:
patient_form.job_category_id.default = c.all_job_categories[0]
patient_form.job_category_id.choices += [c.unknown]
else:
patient_form.job_category_id.choices += [c.unknown_num]
patient_form.job_category_id.choices += [(cat.id, cat.name) for cat in job_categories]
def populate_countries_select(select_input, default, with_unknown = True):
if not select_input.choices:
select_input.choices = [(-1, c.unknown[1])] if with_unknown else []
select_input.choices += [(c.id, c.name) for c in countries]
select_input.default = default
if not search_form:
populate_countries_select(patient_form.citizenship_id, kz.id, False)
populate_countries_select(patient_form.country_of_residence_id, kz.id)
populate_countries_select(patient_form.home_address_country_id, kz.id)
populate_countries_select(patient_form.job_address_country_id, kz.id)
populate_countries_select(patient_form.visited_country_id, -1)
# Address loc. type
patient_form.home_address_location_type_id.choices = [c.unknown_loc_type] + [(loc_type.id, loc_type.name) for loc_type in AddressLocationType.query.all()]
return patient_form
# @celery.task(bind=True)
# def long_task(self):
# """Background task that runs a long function with progress reports."""
# verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking']
# adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast']
# noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit']
# message = ''
# total = random.randint(10, 50)
# for i in range(total):
# if not message or random.random() < 0.25:
# message = '{0} {1} {2}...'.format(random.choice(verb),
# random.choice(adjective),
# random.choice(noun))
# self.update_state(state='PROGRESS',
# meta={'current': i, 'total': total,
# 'status': message})
# time.sleep(1)
# return {'current': 100, 'total': 100, 'status': 'Task completed!',
# 'result': 42}
def is_same_address(request_dict, address, form_prefix='home'):
is_same = True
country_id = request_dict[form_prefix + '_address_country_id']
if address.country_id != (int(country_id) if country_id != None else country_id):
is_same = False
elif address.state != request_dict.get(form_prefix + '_address_state', None):
is_same = False
elif address.county != request_dict.get(form_prefix + '_address_county', None):
is_same = False
elif address.city != request_dict[form_prefix + '_address_city']:
is_same = False
elif address.street != request_dict[form_prefix + '_address_street']:
is_same = False
elif address.house != request_dict[form_prefix + '_address_house']:
is_same = False
elif address.flat != request_dict.get(form_prefix + '_address_flat', None):
is_same = False
elif address.building != request_dict.get(form_prefix + '_address_building', None):
is_same = False
elif address.location_type_id != request_dict.get(form_prefix + '_address_location_type_id', None):
is_same = False
return is_same
def process_address(request_dict, form_prefix='home', lat_lng = True, address = None):
if request_dict[form_prefix + '_address_country_id'] == '-1':
request_dict[form_prefix + '_address_country_id'] = None
if request_dict.get(form_prefix + '_address_location_type_id', '-1') == '-1':
request_dict[form_prefix + '_address_location_type_id'] = None
if address is None:
address = Address()
else:
if is_same_address(request_dict, address):
return address
address.country_id = request_dict[form_prefix + '_address_country_id']
address.state = request_dict.get(form_prefix + '_address_state', None)
address.county = request_dict.get(form_prefix + '_address_county', None)
address.city = request_dict[form_prefix + '_address_city']
address.street = request_dict[form_prefix + '_address_street']
address.house = request_dict[form_prefix + '_address_house']
address.flat = request_dict.get(form_prefix + '_address_flat', None)
address.building = request_dict.get(form_prefix + '_address_building', None)
address.location_type_id = request_dict.get(form_prefix + '_address_location_type_id', None)
db.session.add(address)
db.session.commit()
if lat_lng:
lat_lng = get_lat_lng([address])[0]
address.lat = lat_lng[0]
address.lng = lat_lng[1]
db.session.add(address)
return address
def can_we_add_patient(request_dict):
iin = request_dict.get('iin', '')
if iin:
if Patient.query.filter_by(iin = iin.strip()).count():
return False, _("Пациент с данным ИИН уже есть в системе")
pass_num = request_dict.get('pass_num', '')
if pass_num:
if Patient.query.filter_by(pass_num = request_dict['pass_num'].strip()).count():
return False, _("Пациент с данным номером паспорта уже есть в системе")
return True, None
def handle_add_update_patient(request_dict, final_dict, update_dict = {}):
form_val_key = ['region_id', 'first_name', 'second_name', 'patronymic_name', 'dob', 'iin',
'citizenship_id', 'pass_num', 'country_of_residence_id', 'telephone', 'email',
'job', 'job_position', 'hospital_id']
# 1 Set values from request_dict
for key in form_val_key:
if key in request_dict:
if key == "country_of_residence_id" and request_dict[key] == '-1':
request_dict[key] = None
final_dict[key] = request_dict[key]
# 2
final_dict['dob'] = parse_date(request.form['dob'])
final_dict['gender'] = None if int(request_dict['gender']) == -1 else int(request_dict['gender']) == 1
if update_dict == {}:
if 'is_transit' in request_dict:
final_dict['is_transit'] = int(request_dict['is_transit']) == 1
if 'is_found' in request_dict:
final_dict['is_found'] = int(request_dict['is_found']) == 1
final_dict['is_found_date'] = request.form.get("is_found_date", None)
if 'job_category_id' in request_dict:
job_category_id = None if request_dict['job_category_id'] == "-1" else request_dict['job_category_id']
final_dict['job_category_id'] = job_category_id
# 3
travel_type = TravelType.query.filter_by(value=request_dict['travel_type']).first()
if travel_type.value != c.old_data_type[0]:
final_dict['travel_type_id'] = travel_type.id if travel_type else None
# 5
# Home Address
home_address = process_address(request_dict, address=update_dict.get("home_address", None))
final_dict['home_address_id'] = home_address.id
# Job Address
job_address = process_address(request_dict, "job", False, address=update_dict.get("job_address", None))
final_dict['job_address_id'] = job_address.id
def handle_after_patient(request_dict, final_dict, patient, update_dict = {}, update_patient=True):
if not update_patient:
if not current_user.user_role.can_found_by_default:
if final_dict['is_found']:
is_found_date = final_dict["is_found_date"]
patient.addState(State.query.filter_by(value=c.state_found[0]).first(), detection_date=is_found_date)
patient.is_found = True
else:
patient.addState(State.query.filter_by(value=c.state_found[0]).first())
patient.is_found = True
if current_user.user_role.can_set_transit:
if final_dict['is_transit'] == True:
patient.addState(State.query.filter_by(value=c.state_is_transit[0]).first())
travel_type = request_dict['travel_type']
if travel_type:
if travel_type == c.flight_type[0]:
f_travel = update_dict.get('flight_travel', FlightTravel(patient_id = patient.id))
f_code_id = request_dict['flight_code_id']
seat = request_dict.get('flight_seat', None)
if f_travel.flight_code_id != f_code_id or f_travel.seat != seat:
f_travel.flight_code_id = f_code_id
f_travel.seat = seat
db.session.add(f_travel)
db.session.commit()
elif travel_type == c.train_type[0]:
t_travel = update_dict.get('train_travel', TrainTravel(patient_id = patient.id))
t_id = request_dict['train_id']
wagon = request_dict.get('train_wagon', None)
seat = request_dict.get('train_seat', None)
if t_travel.train_id != t_id or t_travel.seat != seat or t_travel.wagon != wagon:
t_travel.train_id = t_id
t_travel.seat = seat
t_travel.wagon = wagon
db.session.add(t_travel)
db.session.commit()
elif travel_type == c.blockpost_type[0]:
blockpost_t = update_dict.get('blockpost_travel', BlockpostTravel(patient_id = patient.id))
date = parse_date(request_dict['arrival_date'])
blockpost_r_id = request_dict['blockpost_region_id']
if blockpost_t.region_id != blockpost_r_id or blockpost_t.date != date:
blockpost_t.date = date
blockpost_t.region_id = blockpost_r_id
db.session.add(blockpost_t)
db.session.commit()
elif travel_type == c.old_data_type[0]:
pass
else:
border_form_key = None
if travel_type == c.by_auto_type[0]:
border_form_key = 'auto_border_id'
elif travel_type == c.by_foot_type[0]:
border_form_key = 'foot_border_id'
elif travel_type == c.by_sea_type[0]:
border_form_key = 'sea_border_id'
if border_form_key:
v_travel = update_dict.get('various_travel', VariousTravel(patient_id = patient.id))
date = parse_date(request_dict['arrival_date'])
border_control_id = request_dict[border_form_key]
if v_travel.border_control_id != border_control_id or v_travel.date != date:
v_travel.date = date
v_travel.border_control_id = border_control_id
db.session.add(v_travel)
db.session.commit()
# 4 Visited Country
v_country_id = request_dict.get('visited_country_id', None)
v_country_id = None if v_country_id == '-1' else v_country_id
v_country = update_dict.get('visited_country', VisitedCountry(patient_id = patient.id))
from_date = request_dict.get('visited_from_date', None)
from_date | |
= ValUtil.sshortpop(buf)
self.numberOfHMetrics, buf = ValUtil.ushortpop(buf)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
print(" version = 0x%08x" % (self.version))
print(" Ascender = %d" % (self.Ascender))
print(" Descender = %d" % (self.Descender))
print(" LineGap = %d" % (self.LineGap))
print(" advanceWidthMax = %d" % (self.advanceWidthMax))
print(" minLeftSideBearing = %d" % (self.minLeftSideBearing))
print(" minRightSideBearing = %d" % (self.minRightSideBearing))
print(" xMaxExtent = %d" % (self.xMaxExtent))
print(" caretSlopeRise = %d" % (self.caretSlopeRise))
print(" caretSlopeRun = %d" % (self.caretSlopeRun))
print(" caretOffset = %d" % (self.caretOffset))
print(" reserved1 = %d" % (self.reserved1))
print(" reserved2 = %d" % (self.reserved2))
print(" reserved3 = %d" % (self.reserved3))
print(" reserved4 = %d" % (self.reserved4))
print(" metricDataFormat = %d" % (self.metricDataFormat))
print(" numberOfHMetrics = %d" % (self.numberOfHMetrics))
# hhea table
##################################################
# vhea table
# https://www.microsoft.com/typography/otspec/vhea.htm
class VheaTable(Table):
def __init__(self, buf, tag):
super(VheaTable, self).__init__(buf, tag)
def parse(self, buf):
super(VheaTable, self).parse(buf)
self.version, buf = OTData.Fixed(buf)
self.ascent, buf = ValUtil.sshortpop(buf)
self.descent, buf = ValUtil.sshortpop(buf)
self.lineGap, buf = ValUtil.sshortpop(buf)
self.advanceHeightMax, buf = ValUtil.sshortpop(buf)
self.minTopSideBearing, buf = ValUtil.sshortpop(buf)
self.minBottomSideBearing, buf = ValUtil.sshortpop(buf)
self.yMaxExtent, buf = ValUtil.sshortpop(buf)
self.caretSlopeRise, buf = ValUtil.sshortpop(buf)
self.caretSlopeRun, buf = ValUtil.sshortpop(buf)
self.caretOffset, buf = ValUtil.sshortpop(buf)
self.reserved1, buf = ValUtil.sshortpop(buf)
self.reserved2, buf = ValUtil.sshortpop(buf)
self.reserved3, buf = ValUtil.sshortpop(buf)
self.reserved4, buf = ValUtil.sshortpop(buf)
self.metricDataFormat, buf = ValUtil.sshortpop(buf)
self.numOfLongVerMetrics, buf = ValUtil.ushortpop(buf)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
print(" version = 0x%08x" % (self.version))
print(" ascent = %d" % (self.ascent))
print(" descent = %d" % (self.descent))
print(" lineGap = %d" % (self.lineGap))
print(" advanceHeightMax = %d" % (self.advanceHeightMax))
print(" minTopSideBearing = %d" % (self.minTopSideBearing))
print(" minBottomSideBearing = %d" % (self.minBottomSideBearing))
print(" yMaxExtent = %d" % (self.yMaxExtent))
print(" caretSlopeRise = %d" % (self.caretSlopeRise))
print(" caretSlopeRun = %d" % (self.caretSlopeRun))
print(" caretOffset = %d" % (self.caretOffset))
print(" reserved1 = %d" % (self.reserved1))
print(" reserved2 = %d" % (self.reserved2))
print(" reserved3 = %d" % (self.reserved3))
print(" reserved4 = %d" % (self.reserved4))
print(" metricDataFormat = %d" % (self.metricDataFormat))
print(" numOfLongVerMetrics = %d" % (self.numOfLongVerMetrics))
# vhea table
##################################################
# hmtx table
## https://www.microsoft.com/typography/otspec/hmtx.htm
class HmtxTable(Table):
def __init__(self, buf, tag, numberOfHMetrics, numGlyphs):
self.numberOfHMetrics = numberOfHMetrics
self.numGlyphs = numGlyphs
super(HmtxTable, self).__init__(buf, tag)
def parse(self, buf):
super(HmtxTable, self).parse(buf)
self.hMetrics = []
for i in range(self.numberOfHMetrics):
hMtx = longHorMetric(buf)
self.hMetrics.append(hMtx)
buf = hMtx.buf
self.leftSideBearing, buf = ValUtil.sshortspop(buf, self.numGlyphs-self.numberOfHMetrics)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
for hMtx in self.hMetrics:
hMtx.show()
print(" leftSideBearing = {0}".format(self.leftSideBearing))
class longHorMetric(object):
def __init__(self, buf):
self.buf = self.parse(buf)
def parse(self, buf):
self.advanceWidth, buf = ValUtil.ushortpop(buf)
self.lsb, buf = ValUtil.sshortpop(buf)
return buf
def show(self):
print(" [longHorMetric]")
print(" advanceWidth = %d" % (self.advanceWidth))
print(" lsb = %d" % (self.lsb))
# hmtx table
##################################################
# vmtx table
## https://www.microsoft.com/typography/otspec/vmtx.htm
class VmtxTable(Table):
def __init__(self, buf, tag, numOfLongVerMetrics, numGlyphs):
self.numOfLongVerMetrics = numOfLongVerMetrics
self.numGlyphs = numGlyphs
super(VmtxTable, self).__init__(buf, tag)
def parse(self, buf):
super(VmtxTable, self).parse(buf)
self.vMetrics = []
for i in range(self.numOfLongVerMetrics):
vMtx = longVerMetric(buf)
self.vMetrics.append(vMtx)
buf = vMtx.buf
self.topSideBearing, buf = ValUtil.sshortspop(buf, self.numGlyphs-self.numOfLongVerMetrics)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
for vMtx in self.vMetrics:
vMtx.show()
print(" topSideBearing = {0}".format(self.topSideBearing))
class longVerMetric(object):
def __init__(self, buf):
self.buf = self.parse(buf)
def parse(self, buf):
self.advanceHeight, buf = ValUtil.ushortpop(buf)
self.topSideBearing, buf = ValUtil.sshortpop(buf)
return buf
def show(self):
print(" [longVerMetric]")
print(" advanceHeight = %d" % (self.advanceHeight))
print(" topSideBearing = %d" % (self.topSideBearing))
# vmtx table
##################################################
# maxp table
# https://www.microsoft.com/typography/otspec/maxp.htm
class MaxpTable(Table):
def __init__(self, buf, tag):
super(MaxpTable, self).__init__(buf, tag)
def parse(self, buf):
super(MaxpTable, self).parse(buf)
self.version, buf = OTData.Fixed(buf)
self.numGlyphs, buf = ValUtil.ushortpop(buf)
if self.version >= 0x00010000:
self.maxPoints, buf = ValUtil.ushortpop(buf)
self.maxContours, buf = ValUtil.ushortpop(buf)
self.maxCompositePoints, buf = ValUtil.ushortpop(buf)
self.maxCompositeContours, buf = ValUtil.ushortpop(buf)
self.maxZones, buf = ValUtil.ushortpop(buf)
self.maxTwilightPoints, buf = ValUtil.ushortpop(buf)
self.maxStorage, buf = ValUtil.ushortpop(buf)
self.maxFunctionDefs, buf = ValUtil.ushortpop(buf)
self.maxInstructionDefs, buf = ValUtil.ushortpop(buf)
self.maxStackElements, buf = ValUtil.ushortpop(buf)
self.maxSizeOfInstructions, buf = ValUtil.ushortpop(buf)
self.maxComponentElements, buf = ValUtil.ushortpop(buf)
self.maxComponentDepth, buf = ValUtil.ushortpop(buf)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
print(" version = 0x%08x" % (self.version))
print(" numGlyphs = %d" % (self.numGlyphs))
if self.version >= 0x00010000:
print(" maxPoints = %d" % (self.maxPoints))
print(" maxContours = %d" % (self.maxContours))
print(" maxCompositePoints = %d" % (self.maxCompositePoints))
print(" maxCompositeContours = %d" % (self.maxCompositeContours))
print(" maxZones = %d" % (self.maxZones))
print(" maxTwilightPoints = %d" % (self.maxTwilightPoints))
print(" maxStorage = %d" % (self.maxStorage))
print(" maxFunctionDefs = %d" % (self.maxFunctionDefs))
print(" maxInstructionDefs = %d" % (self.maxInstructionDefs))
print(" maxStackElements = %d" % (self.maxStackElements))
print(" maxSizeOfInstructions = %d" % (self.maxSizeOfInstructions))
print(" maxComponentElements = %d" % (self.maxComponentElements))
print(" maxComponentDepth = %d" % (self.maxComponentDepth))
# maxp table
##################################################
# name table
class NameTable(Table):
def __init__(self, buf, tag):
super(NameTable, self).__init__(buf, tag)
def parse(self, buf):
super(NameTable, self).parse(buf)
self.format, buf = ValUtil.ushortpop(buf)
self.count, buf = ValUtil.ushortpop(buf)
self.stringOffset, buf = ValUtil.ushortpop(buf)
self.nameRecord = []
for i in range(self.count):
name_record = NameRecord(buf)
buf = name_record.buf
self.nameRecord.append(name_record)
if self.format != 0:
self.langTagCount, buf = ValUtil.ushortpop(buf)
for i in range(self.langTagCount):
lang_tag_record = LangTagRecord(buf)
buf = lang_tag_record.buf
self.langTagRecord.append(lang_tag_record)
self.storage = buf
def show(self):
print("[Table(%s)]" % (self.tag))
#print("%s" % (self.buf))
print(" format = %d" % (self.format))
print(" count = %d" % (self.count))
print(" stringOffset = %d" % (self.stringOffset))
for name_record in self.nameRecord:
name_record.show(self.storage)
if self.format != 0:
for lang_tag_record in self.langTagRecord:
lang_tag_record.show(self.storage)
class NameRecord(object):
def __init__(self, buf):
self.buf = self.parse(buf)
def parse(self, buf):
self.platformID, buf = ValUtil.ushortpop(buf)
self.encodingID, buf = ValUtil.ushortpop(buf)
self.languageID, buf = ValUtil.ushortpop(buf)
self.nameID, buf = ValUtil.ushortpop(buf)
self.length, buf = ValUtil.ushortpop(buf)
self.offset, buf = ValUtil.ushortpop(buf)
return buf
def show(self, storage = None):
print(" [NameRecord]")
print(" platformID = %d" % (self.platformID))
print(" encodingID = %d" % (self.encodingID))
print(" languageID = %d" % (self.languageID))
print(" nameID = %d" % (self.nameID))
print(" length = %d" % (self.length))
print(" offset = %d" % (self.offset))
if storage:
s = storage[self.offset:self.offset+self.length]
print(" string = %s" % (self.bytes2str(s)))
def bytes2str(self, data):
if self.platformID == 0:
return data.decode("utf-8")
elif self.platformID == 1:
if self.encodingID == 0:
return data.decode("mac_roman")
elif self.encodingID == 1:
try:
return data.decode("shift_jis")
except:
pass
elif self.platformID == 3:
if self.encodingID == 1:
return data.decode("utf_16_be")
else:
raise NotImplementedError()
return data
class LangTagRecord(object):
def __init__(self, buf):
self.buf = self.parse(buf)
def parse(self, buf):
self.length, buf = ValUtil.ushortpop(buf)
self.offset, buf = ValUtil.ushortpop(buf)
return buf
def show(self, storage = None):
print(" [LangTagRecord]")
print(" length = %d" % (self.length))
print(" offset = %d" % (self.offset))
if storage:
s = storage[self.offset:self.offset+self.length]
print(" Lang-tag = %s" % (s))
# name table
##################################################
# OS/2 table
# https://www.microsoft.com/typography/otspec/os2.htm
class OS_2Table(Table):
def __init__(self, buf, tag):
super(OS_2Table, self).__init__(buf, tag)
def parse(self, buf):
super(OS_2Table, self).parse(buf)
self.version, buf = ValUtil.ushortpop(buf)
self.xAvgCharWidth, buf = ValUtil.sshortpop(buf)
self.usWeightClass, buf = ValUtil.ushortpop(buf)
self.usWidthClass, buf = ValUtil.ushortpop(buf)
self.fsType, buf = ValUtil.ushortpop(buf)
self.ySubscriptXSize, buf = ValUtil.sshortpop(buf)
self.ySubscriptYSize, buf = ValUtil.sshortpop(buf)
self.ySubscriptXOffset, buf = ValUtil.sshortpop(buf)
self.ySubscriptYOffset, buf = ValUtil.sshortpop(buf)
self.ySuperscriptXSize, buf = ValUtil.sshortpop(buf)
self.ySuperscriptYSize, buf = ValUtil.sshortpop(buf)
self.ySuperscriptXOffset, buf = ValUtil.sshortpop(buf)
self.ySuperscriptYOffset, buf = ValUtil.sshortpop(buf)
self.yStrikeoutSize, buf = ValUtil.sshortpop(buf)
self.yStrikeoutPosition, buf = ValUtil.sshortpop(buf)
self.sFamilyClass, buf = ValUtil.sshortpop(buf)
self.panose, buf = ValUtil.bytespop(buf, 10)
self.ulUnicodeRange1, buf = ValUtil.ulongpop(buf)
self.ulUnicodeRange2, buf = ValUtil.ulongpop(buf)
self.ulUnicodeRange3, buf = ValUtil.ulongpop(buf)
self.ulUnicodeRange4, buf = ValUtil.ulongpop(buf)
self.achVendID, buf = ValUtil.charspop(buf, 4)
self.fsSelection, buf = ValUtil.ushortpop(buf)
self.usFirstCharIndex, buf = ValUtil.ushortpop(buf)
self.usLastCharIndex, buf = ValUtil.ushortpop(buf)
self.sTypoAscender, buf = ValUtil.sshortpop(buf)
self.sTypoDescender, buf = ValUtil.sshortpop(buf)
self.sTypoLineGap, buf = ValUtil.sshortpop(buf)
self.usWinAscent, buf = ValUtil.ushortpop(buf)
self.usWinDescent, buf = ValUtil.ushortpop(buf)
self.ulCodePageRange1, buf = ValUtil.ulongpop(buf)
self.ulCodePageRange2, buf = ValUtil.ulongpop(buf)
self.sxHeight, buf = ValUtil.sshortpop(buf)
self.sCapHeight, buf = ValUtil.sshortpop(buf)
self.usDefaultChar, buf = ValUtil.ushortpop(buf)
self.usBreakChar, buf = ValUtil.ushortpop(buf)
self.usMaxContext, buf = ValUtil.ushortpop(buf)
if self.version >= 5:
self.usLowerOpticalPointSize, buf = ValUtil.ushortpop(buf)
self.usUpperOpticalPointSize, buf = ValUtil.ushortpop(buf)
return buf
def show(self):
print("[Table(%s)]" % (self.tag))
print(" version = %d" % (self.version))
print(" xAvgCharWidth = %d" % (self.xAvgCharWidth))
print(" usWeightClass = %d" % (self.usWeightClass))
print(" usWidthClass = %d" % (self.usWidthClass))
print(" fsType = %d" % (self.fsType))
print(" ySubscriptXSize = %d" % (self.ySubscriptXSize))
print(" ySubscriptYSize = %d" % (self.ySubscriptYSize))
print(" ySubscriptXOffset = %d" % (self.ySubscriptXOffset))
print(" ySubscriptYOffset = %d" % (self.ySubscriptYOffset))
print(" ySuperscriptXSize = %d" % (self.ySuperscriptXSize))
print(" ySuperscriptYSize = %d" % (self.ySuperscriptYSize))
print(" ySuperscriptXOffset = %d" % (self.ySuperscriptXOffset))
print(" ySuperscriptYOffset = %d" % (self.ySuperscriptYOffset))
print(" yStrikeoutSize = %d" % (self.yStrikeoutSize))
print(" yStrikeoutPosition = %d" % (self.yStrikeoutPosition))
print(" panose = {0}".format(self.panose))
print(" ulUnicodeRange1 = 0x%08x" % (self.ulUnicodeRange1))
print(" ulUnicodeRange2 = 0x%08x" % (self.ulUnicodeRange2))
print(" ulUnicodeRange3 = 0x%08x" % (self.ulUnicodeRange3))
print(" ulUnicodeRange4 = 0x%08x" % (self.ulUnicodeRange4))
| |
<reponame>kdlucas/pyrering
#!/usr/bin/python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library for indirecting common filesystem operations and process calls.
This library's main purpose is to help isolate policy from procedure in I/O
heavy code to make it more easily testable. It also has some basic support
for cleaning up external processes cleanly. We also provide a companion mock
object (mock_filesystem_handler.py) to facilitate testing.
"""
__author__ = '<EMAIL> (<NAME>)'
import os
import select
import shutil
import signal
import StringIO
import subprocess
import sys
import time
import traceback
import types
def CommonStartingTokens(*token_lists):
"""Returns a list of common starting tokens.
Args:
token_lists: any number of lists of tokens
Returns:
the longest list of tokens that are the same in all lists
"""
if token_lists:
min_length = min([len(t) for t in token_lists])
last_matching_token = -1
mismatched = False
for pos in xrange(min_length):
reference_token = token_lists[0][pos]
for tokenlist in token_lists[1:]:
if reference_token != tokenlist[pos]:
mismatched = True
break
if mismatched:
break
else:
last_matching_token = pos
results = token_lists[0][0:last_matching_token + 1]
else:
results = []
return results
class FileSystemHandler(object):
"""An indirection class around underlying os services to make testing easier.
This also insulates the upper levels of the build from knowing too much about
the underlying operating system environment.
"""
SEP = os.sep # a stand-in for os.sep
SIGTERM_TIMEOUT = 30 # time to wait for a process to die gracefully.
class ScriptError(Exception):
"""Thrown when something goes wrong while running an external script."""
def __init__(self):
os.umask(022)
###### Some General Utilities (mostly so we can mock for testing) ######
def RunCommandFG(self, command, combine_stdout_stderr = True):
"""Run a shell command in the foreground (ie. wait for completion).
Args: command: a string containing the shell command.
combine_stdout_stderr: When true, combine stdout and stderr
Returns: a tuple of (exit_code, stdout, stderr), where stdout & stderr are
file handles.
"""
return self.RunCommandFGWithTimeout(command, -1, combine_stdout_stderr)
def RunCommandFGToPipe(self, command, pipe_out, pipe_err = subprocess.STDOUT):
"""Run a shell command in the foreground (ie. wait for completion).
Args: command: a string containing the shell command.
pipe_out/err: an open file handler to redirect stdout/stderr.
When pipe_err = subprocess.STDOUT, stderr is redirected
to stdout.
Returns: an exit code.
"""
timeout_code, _, _ = self._RunCmdInFGAndWait(command, pipe_out, pipe_err,
-1)
return timeout_code
def RunCommandFGWithTimeout(self, command, timeout = 60,
combine_stdout_stderr = True):
"""Run a shell command in the forground (ie. wait for completion), but
with a timeout specified in secs.
Args:
command: a string containing the shell command.
timeout: an integer timeout value in seconds.
combine_stdout_stderr: When true, combine stdout and stderr.
Returns: a tuple of (timeout_code, stdout, stderr) where stdout/stderr are
file handles.
timeout_code is an integer equal to the return code or None if
the process had to be killed by timeout.
"""
if combine_stdout_stderr:
stderr = subprocess.STDOUT
else:
stderr = subprocess.PIPE
return self._RunCmdInFGAndWait(command, subprocess.PIPE, stderr, timeout)
def RunCommandFGToPipeWithTimeout(self, command, pipe_out,
pipe_err = subprocess.STDOUT, timeout = 60,
env = None):
"""Run a shell command in the forground (ie. wait for completion), but
with a timeout specified in secs. Sends all the stdout and stderr to a
caller supplied file handle.
Args:
command: a string containing the shell command.
pipe_out/err: an open file handler to redirect stdout/stderr.
When pipe_err = subprocess.STDOUT, stderr is redirected
to stdout.
timeout: an integer timeout value in seconds.
env: a dictionary of exported values
Returns: an integer exit code, None if the process timed out.
"""
timeout_code, _, _ = self._RunCmdInFGAndWait(command, pipe_out,
pipe_err, timeout, env)
return timeout_code
def RunCommandBGToPipe(self, command, pipe_out, pipe_err = subprocess.STDOUT,
env = None):
"""Run a shell command in the background (ie. do not wait for completion),
instead returns the process object of the command.
Note that we do not support a timeout, we let the caller do that.
Args:
command: a string containing the shell command.
pipe_out/err: an open file handler to redirect stdout/stderr.
When either is set to subprocess.PIPE and new pipe is made
in pipe.stdout/err.
When pipe_err = subprocess.STDOUT, stderr is redirected
to stdout.
env: a dictionary of exported values
Returns: a subprocess.Popen object which can be poll()ed or wait()ed on,
os.waitpgid()ed, etc...
"""
# Use external values when no environment dictionary is provided.
if env is None:
env = os.environ
# Flush our various pipes since subprocess output will NOT cause previous
# stuff in the buffer to flush. This makes sure our output stays ordered in
# the file.
for pipe in [pipe_out, pipe_err]:
if isinstance(pipe, types.FileType):
pipe.flush()
proc = subprocess.Popen(command, shell = True, close_fds = True,
preexec_fn = os.setpgrp,
stdout = pipe_out, stderr = pipe_err,
env = env)
return proc
def _RunCmdInFGAndWait(self, command, pipe_out, pipe_err, timeout,
env = None):
"""Helper function to handle command running and error handling.
Use one of the other entry points above and not this function.
"""
proc = self.RunCommandBGToPipe(command, pipe_out, pipe_err, env)
should_read_output = (pipe_out == subprocess.PIPE or
pipe_err == subprocess.PIPE)
try:
return self._WaitOnProcessTimeout(proc, timeout, should_read_output)
except OSError:
err_msg = ('Exception [%s] on command [%s].\n\tSTACK TRACE:\n%s'
% (sys.exc_type, command, traceback.format_exc()))
raise self.ScriptError(err_msg)
def _WaitOnProcessTimeout(self, proc, timeout, should_read_output):
"""Takes a popen subprocess object and does the right thing for its
timeout. Kills a process that has hit timeout. If the process responds,
and if successfully killed, we return None. If the process will not respond
to SIGTERM, we send a SIGKILL.
The only requirement is that any process object's children have to be in the
same process group. This is true for all run command functions defined
above.
Args:
proc: a subprocess module Popen object
timeout: an integer with any negative or zero value indicating do not
timeout.
should_read_output: When True, while waiting, also read the output to a
new buffer.
Returns:
A tuple of the (exit_code, stdout, stderr) where exit_code is the exit
code of the process, with None indicating a timeout, and
stdout/err are the output pipes of the object. If should_read_output is
True, then stdout/err are copied to a pre-loaded StringIO object.
"""
if should_read_output:
# Only create StringIO objects for those outputs we want to save.
if proc.stdout:
stdout_buffer = StringIO.StringIO()
else:
stdout_buffer = None
if proc.stderr:
stderr_buffer = StringIO.StringIO()
else:
stderr_buffer = None
# Note that if we selected to combine stdout and stderr when we launched
# the process, proc.stderr would be None.
outputs_to_probe = [pipe for pipe in [proc.stdout, proc.stderr] if pipe]
else:
stdout_buffer = proc.stdout
stderr_buffer = proc.stderr
outputs_to_probe = []
start_time = time.time()
try:
while ((proc.poll() is None) and
(time.time() - start_time < timeout or timeout <= 0)):
if should_read_output:
# Probe for up to half a second to see if there is any new output
results, _, _ = select.select(outputs_to_probe, [], [], 0.5)
# Read out from any available pipe. results is [] if timed out.
for out_stream in results:
if out_stream == proc.stdout:
stdout_buffer.write(out_stream.readline())
elif out_stream == proc.stderr:
stderr_buffer.write(out_stream.readline())
else:
time.sleep(0.5)
# Normal return case, we terminated OK.
if proc.poll() is not None:
return proc.wait(), stdout_buffer, stderr_buffer
# If we're here, we timed out. Try asking the process to stop nicely.
os.killpg(proc.pid, signal.SIGTERM)
deadline = time.time() + self.SIGTERM_TIMEOUT
while time.time() < deadline and proc.poll() is not None:
time.sleep(1)
if proc.poll() is None:
# SIGTERM failed! Pull out the heavy guns.
os.killpg(proc.pid, signal.SIGKILL)
return None, stdout_buffer, stderr_buffer
finally:
# In case we get an exception (typically a KeyboardException) while
# waiting, just clean up.
if proc.poll() is None:
os.killpg(proc.pid, signal.SIGKILL)
time.sleep(0.1)
elif should_read_output:
# We know the process terminated, so it is safe to read all output.
if proc.stdout:
stdout_buffer.write(proc.stdout.read())
if proc.stderr:
stderr_buffer.write(proc.stderr.read())
# | |
# Currently our impl does not remove duplicate objectIds
assert object_ids.available() == 5
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_composition_ids_by_repositories([self.fake_id])
def test_get_compositions_by_repositories(self):
"""Tests get_compositions_by_repositories"""
# From test_templates/resource.py::ResourceBinSession::get_resources_by_bins_template
if not is_never_authz(self.service_config):
catalog_ids = [self.catalog.ident, self.assigned_catalog.ident]
results = self.session.get_compositions_by_repositories(catalog_ids)
assert isinstance(results, ABCObjects.CompositionList)
# Currently our impl does not remove duplicate objects
assert results.available() == 5
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_compositions_by_repositories([self.fake_id])
def test_get_repository_ids_by_composition(self):
"""Tests get_repository_ids_by_composition"""
# From test_templates/resource.py::ResourceBinSession::get_bin_ids_by_resource_template
if not is_never_authz(self.service_config):
cats = self.svc_mgr.get_repository_ids_by_composition(self.composition_ids[1])
assert cats.available() == 2
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repository_ids_by_composition(self.fake_id)
def test_get_repositories_by_composition(self):
"""Tests get_repositories_by_composition"""
# From test_templates/resource.py::ResourceBinSession::get_bins_by_resource_template
if not is_never_authz(self.service_config):
cats = self.svc_mgr.get_repositories_by_composition(self.composition_ids[1])
assert cats.available() == 2
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repositories_by_composition(self.fake_id)
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def composition_repository_assignment_session_class_fixture(request):
# From test_templates/resource.py::ResourceBinAssignmentSession::init_template
request.cls.service_config = request.param
request.cls.composition_list = list()
request.cls.composition_ids = list()
request.cls.svc_mgr = Runtime().get_service_manager(
'REPOSITORY',
proxy=PROXY,
implementation=request.cls.service_config)
request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU')
if not is_never_authz(request.cls.service_config):
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test Repository'
create_form.description = 'Test Repository for CompositionRepositoryAssignmentSession tests'
request.cls.catalog = request.cls.svc_mgr.create_repository(create_form)
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test Repository for Assignment'
create_form.description = 'Test Repository for CompositionRepositoryAssignmentSession tests assignment'
request.cls.assigned_catalog = request.cls.svc_mgr.create_repository(create_form)
for num in [0, 1, 2]:
create_form = request.cls.catalog.get_composition_form_for_create([])
create_form.display_name = 'Test Composition ' + str(num)
create_form.description = 'Test Composition for CompositionRepositoryAssignmentSession tests'
obj = request.cls.catalog.create_composition(create_form)
request.cls.composition_list.append(obj)
request.cls.composition_ids.append(obj.ident)
def class_tear_down():
if not is_never_authz(request.cls.service_config):
for obj in request.cls.catalog.get_compositions():
request.cls.catalog.delete_composition(obj.ident)
request.cls.svc_mgr.delete_repository(request.cls.assigned_catalog.ident)
request.cls.svc_mgr.delete_repository(request.cls.catalog.ident)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def composition_repository_assignment_session_test_fixture(request):
# From test_templates/resource.py::ResourceBinAssignmentSession::init_template
request.cls.session = request.cls.svc_mgr
@pytest.mark.usefixtures("composition_repository_assignment_session_class_fixture", "composition_repository_assignment_session_test_fixture")
class TestCompositionRepositoryAssignmentSession(object):
"""Tests for CompositionRepositoryAssignmentSession"""
def test_can_assign_compositions(self):
"""Tests can_assign_compositions"""
# From test_templates/resource.py::ResourceBinAssignmentSession::can_assign_resources_template
result = self.session.can_assign_compositions()
assert isinstance(result, bool)
def test_can_assign_compositions_to_repository(self):
"""Tests can_assign_compositions_to_repository"""
# From test_templates/resource.py::ResourceBinAssignmentSession::can_assign_resources_to_bin_template
result = self.session.can_assign_compositions_to_repository(self.assigned_catalog.ident)
assert isinstance(result, bool)
def test_get_assignable_repository_ids(self):
"""Tests get_assignable_repository_ids"""
# From test_templates/resource.py::ResourceBinAssignmentSession::get_assignable_bin_ids_template
# Note that our implementation just returns all catalogIds, which does not follow
# the OSID spec (should return only the catalogIds below the given one in the hierarchy.
if not is_never_authz(self.service_config):
results = self.session.get_assignable_repository_ids(self.catalog.ident)
assert isinstance(results, IdList)
# Because we're not deleting all banks from all tests, we might
# have some crufty banks here...but there should be at least 2.
assert results.available() >= 2
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_assignable_repository_ids(self.fake_id)
def test_get_assignable_repository_ids_for_composition(self):
"""Tests get_assignable_repository_ids_for_composition"""
# From test_templates/resource.py::ResourceBinAssignmentSession::get_assignable_bin_ids_for_resource_template
# Note that our implementation just returns all catalogIds, which does not follow
# the OSID spec (should return only the catalogIds below the given one in the hierarchy.
if not is_never_authz(self.service_config):
results = self.session.get_assignable_repository_ids_for_composition(self.catalog.ident, self.composition_ids[0])
assert isinstance(results, IdList)
# Because we're not deleting all banks from all tests, we might
# have some crufty banks here...but there should be at least 2.
assert results.available() >= 2
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_assignable_repository_ids_for_composition(self.fake_id, self.fake_id)
def test_assign_composition_to_repository(self):
"""Tests assign_composition_to_repository"""
# From test_templates/resource.py::ResourceBinAssignmentSession::assign_resource_to_bin_template
if not is_never_authz(self.service_config):
results = self.assigned_catalog.get_compositions()
assert results.available() == 0
self.session.assign_composition_to_repository(self.composition_ids[1], self.assigned_catalog.ident)
results = self.assigned_catalog.get_compositions()
assert results.available() == 1
self.session.unassign_composition_from_repository(
self.composition_ids[1],
self.assigned_catalog.ident)
else:
with pytest.raises(errors.PermissionDenied):
self.session.assign_composition_to_repository(self.fake_id, self.fake_id)
def test_unassign_composition_from_repository(self):
"""Tests unassign_composition_from_repository"""
# From test_templates/resource.py::ResourceBinAssignmentSession::unassign_resource_from_bin_template
if not is_never_authz(self.service_config):
results = self.assigned_catalog.get_compositions()
assert results.available() == 0
self.session.assign_composition_to_repository(
self.composition_ids[1],
self.assigned_catalog.ident)
results = self.assigned_catalog.get_compositions()
assert results.available() == 1
self.session.unassign_composition_from_repository(
self.composition_ids[1],
self.assigned_catalog.ident)
results = self.assigned_catalog.get_compositions()
assert results.available() == 0
else:
with pytest.raises(errors.PermissionDenied):
self.session.unassign_composition_from_repository(self.fake_id, self.fake_id)
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def repository_lookup_session_class_fixture(request):
# From test_templates/resource.py::BinLookupSession::init_template
request.cls.service_config = request.param
request.cls.catalogs = list()
request.cls.catalog_ids = list()
request.cls.svc_mgr = Runtime().get_service_manager(
'REPOSITORY',
proxy=PROXY,
implementation=request.cls.service_config)
request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU')
if not is_never_authz(request.cls.service_config):
for num in [0, 1]:
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test Repository ' + str(num)
create_form.description = 'Test Repository for repository proxy manager tests'
catalog = request.cls.svc_mgr.create_repository(create_form)
request.cls.catalogs.append(catalog)
request.cls.catalog_ids.append(catalog.ident)
def class_tear_down():
if not is_never_authz(request.cls.service_config):
for catalog in request.cls.svc_mgr.get_repositories():
request.cls.svc_mgr.delete_repository(catalog.ident)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def repository_lookup_session_test_fixture(request):
# From test_templates/resource.py::BinLookupSession::init_template
request.cls.session = request.cls.svc_mgr
@pytest.mark.usefixtures("repository_lookup_session_class_fixture", "repository_lookup_session_test_fixture")
class TestRepositoryLookupSession(object):
"""Tests for RepositoryLookupSession"""
def test_can_lookup_repositories(self):
"""Tests can_lookup_repositories"""
# From test_templates/resource.py::BinLookupSession::can_lookup_bins_template
assert isinstance(self.session.can_lookup_repositories(), bool)
def test_use_comparative_repository_view(self):
"""Tests use_comparative_repository_view"""
# From test_templates/resource.py::BinLookupSession::use_comparative_bin_view_template
self.svc_mgr.use_comparative_repository_view()
def test_use_plenary_repository_view(self):
"""Tests use_plenary_repository_view"""
# From test_templates/resource.py::BinLookupSession::use_plenary_bin_view_template
self.svc_mgr.use_plenary_repository_view()
def test_get_repository(self):
"""Tests get_repository"""
# From test_templates/resource.py::BinLookupSession::get_bin_template
if not is_never_authz(self.service_config):
catalog = self.svc_mgr.get_repository(self.catalogs[0].ident)
assert catalog.ident == self.catalogs[0].ident
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repository(self.fake_id)
def test_get_repositories_by_ids(self):
"""Tests get_repositories_by_ids"""
# From test_templates/resource.py::BinLookupSession::get_bins_by_ids_template
if not is_never_authz(self.service_config):
catalogs = self.svc_mgr.get_repositories_by_ids(self.catalog_ids)
assert catalogs.available() == 2
assert isinstance(catalogs, ABCObjects.RepositoryList)
catalog_id_strs = [str(cat_id) for cat_id in self.catalog_ids]
for index, catalog in enumerate(catalogs):
assert str(catalog.ident) in catalog_id_strs
catalog_id_strs.remove(str(catalog.ident))
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repositories_by_ids([self.fake_id])
def test_get_repositories_by_genus_type(self):
"""Tests get_repositories_by_genus_type"""
# From test_templates/resource.py::BinLookupSession::get_bins_by_genus_type_template
if not is_never_authz(self.service_config):
catalogs = self.svc_mgr.get_repositories_by_genus_type(DEFAULT_GENUS_TYPE)
assert catalogs.available() > 0
assert isinstance(catalogs, ABCObjects.RepositoryList)
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repositories_by_genus_type(DEFAULT_GENUS_TYPE)
def test_get_repositories_by_parent_genus_type(self):
"""Tests get_repositories_by_parent_genus_type"""
if is_never_authz(self.service_config):
pass # no object to call the method on?
elif uses_cataloging(self.service_config):
pass # cannot call the _get_record() methods on catalogs
else:
with pytest.raises(errors.Unimplemented):
self.session.get_repositories_by_parent_genus_type(True)
def test_get_repositories_by_record_type(self):
"""Tests get_repositories_by_record_type"""
if is_never_authz(self.service_config):
pass # no object to call the method on?
elif uses_cataloging(self.service_config):
pass # cannot call the _get_record() methods on catalogs
else:
with pytest.raises(errors.Unimplemented):
self.session.get_repositories_by_record_type(True)
def test_get_repositories_by_provider(self):
"""Tests get_repositories_by_provider"""
if is_never_authz(self.service_config):
pass # no object to call the method on?
elif uses_cataloging(self.service_config):
pass # cannot call the _get_record() methods on catalogs
else:
with pytest.raises(errors.Unimplemented):
self.session.get_repositories_by_provider(True)
def test_get_repositories(self):
"""Tests get_repositories"""
# From test_templates/resource.py::BinLookupSession::get_bins_template
if not is_never_authz(self.service_config):
catalogs = self.svc_mgr.get_repositories()
assert catalogs.available() > 0
assert isinstance(catalogs, ABCObjects.RepositoryList)
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repositories()
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def repository_query_session_class_fixture(request):
# From test_templates/resource.py::BinQuerySession::init_template
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'REPOSITORY',
proxy=PROXY,
implementation=request.cls.service_config)
if not is_never_authz(request.cls.service_config):
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test catalog'
create_form.description = 'Test catalog description'
request.cls.catalog = request.cls.svc_mgr.create_repository(create_form)
request.cls.fake_id = Id('resource.Resource%3A1%40ODL.MIT.EDU')
def class_tear_down():
if not is_never_authz(request.cls.service_config):
request.cls.svc_mgr.delete_repository(request.cls.catalog.ident)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def repository_query_session_test_fixture(request):
# From test_templates/resource.py::BinQuerySession::init_template
request.cls.session = request.cls.svc_mgr
@pytest.mark.usefixtures("repository_query_session_class_fixture", "repository_query_session_test_fixture")
class TestRepositoryQuerySession(object):
"""Tests for RepositoryQuerySession"""
def test_can_search_repositories(self):
"""Tests can_search_repositories"""
# From test_templates/resource.py::BinQuerySession::can_search_bins_template
assert isinstance(self.session.can_search_repositories(), bool)
def test_get_repository_query(self):
"""Tests get_repository_query"""
# From test_templates/resource.py::BinQuerySession::get_bin_query_template
if not is_never_authz(self.service_config):
query = self.session.get_repository_query()
assert isinstance(query, ABCQueries.RepositoryQuery)
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_repository_query()
def test_get_repositories_by_query(self):
"""Tests get_repositories_by_query"""
# From test_templates/resource.py::BinQuerySession::get_bins_by_query_template
if not is_never_authz(self.service_config):
query = self.session.get_repository_query()
query.match_display_name('Test catalog')
assert self.session.get_repositories_by_query(query).available() == 1
query.clear_display_name_terms()
query.match_display_name('Test catalog', match=False)
assert self.session.get_repositories_by_query(query).available() == 0
else:
with pytest.raises(errors.PermissionDenied):
self.session.get_repositories_by_query('foo')
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def repository_admin_session_class_fixture(request):
# From test_templates/resource.py::BinAdminSession::init_template
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'REPOSITORY',
proxy=PROXY,
implementation=request.cls.service_config)
request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU')
@pytest.fixture(scope="function")
def repository_admin_session_test_fixture(request):
# From test_templates/resource.py::BinAdminSession::init_template
if not is_never_authz(request.cls.service_config):
# Initialize test catalog:
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test Repository'
create_form.description = 'Test Repository for RepositoryAdminSession tests'
request.cls.catalog = request.cls.svc_mgr.create_repository(create_form)
# Initialize catalog to be deleted:
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
create_form.display_name = 'Test Repository For Deletion'
create_form.description = 'Test Repository for RepositoryAdminSession deletion test'
request.cls.catalog_to_delete = request.cls.svc_mgr.create_repository(create_form)
request.cls.session = request.cls.svc_mgr
def test_tear_down():
if not is_never_authz(request.cls.service_config):
for catalog in request.cls.svc_mgr.get_repositories():
request.cls.svc_mgr.delete_repository(catalog.ident)
request.addfinalizer(test_tear_down)
@pytest.mark.usefixtures("repository_admin_session_class_fixture", "repository_admin_session_test_fixture")
class TestRepositoryAdminSession(object):
"""Tests for RepositoryAdminSession"""
def test_can_create_repositories(self):
"""Tests can_create_repositories"""
# From test_templates/resource.py BinAdminSession.can_create_bins_template
assert isinstance(self.svc_mgr.can_create_repositories(), bool)
def test_can_create_repository_with_record_types(self):
"""Tests can_create_repository_with_record_types"""
# From test_templates/resource.py BinAdminSession.can_create_bin_with_record_types_template
assert isinstance(self.svc_mgr.can_create_repository_with_record_types(DEFAULT_TYPE), bool)
def test_get_repository_form_for_create(self):
"""Tests get_repository_form_for_create"""
# From test_templates/resource.py BinAdminSession.get_bin_form_for_create_template
from dlkit.abstract_osid.repository.objects import RepositoryForm
if not is_never_authz(self.service_config):
catalog_form = self.svc_mgr.get_repository_form_for_create([])
assert isinstance(catalog_form, OsidCatalogForm)
assert not catalog_form.is_for_update()
with pytest.raises(errors.InvalidArgument):
self.svc_mgr.get_repository_form_for_create([1])
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repository_form_for_create([])
def test_create_repository(self):
"""Tests create_repository"""
# From test_templates/resource.py BinAdminSession.create_bin_template
from dlkit.abstract_osid.repository.objects import Repository
if not is_never_authz(self.service_config):
catalog_form = self.svc_mgr.get_repository_form_for_create([])
catalog_form.display_name = 'Test Repository'
catalog_form.description = 'Test Repository for RepositoryAdminSession.create_repository tests'
new_catalog = self.svc_mgr.create_repository(catalog_form)
assert isinstance(new_catalog, OsidCatalog)
with pytest.raises(errors.IllegalState):
self.svc_mgr.create_repository(catalog_form)
with pytest.raises(errors.InvalidArgument):
self.svc_mgr.create_repository('I Will Break You!')
update_form = self.svc_mgr.get_repository_form_for_update(new_catalog.ident)
with pytest.raises(errors.InvalidArgument):
self.svc_mgr.create_repository(update_form)
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.create_repository('foo')
def test_can_update_repositories(self):
"""Tests can_update_repositories"""
# From test_templates/resource.py BinAdminSession.can_update_bins_template
assert isinstance(self.svc_mgr.can_update_repositories(), bool)
def test_get_repository_form_for_update(self):
"""Tests get_repository_form_for_update"""
# From test_templates/resource.py BinAdminSession.get_bin_form_for_update_template
from dlkit.abstract_osid.repository.objects import RepositoryForm
if not is_never_authz(self.service_config):
catalog_form = self.svc_mgr.get_repository_form_for_update(self.catalog.ident)
assert isinstance(catalog_form, OsidCatalogForm)
assert catalog_form.is_for_update()
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.get_repository_form_for_update(self.fake_id)
def test_update_repository(self):
"""Tests update_repository"""
# From test_templates/resource.py BinAdminSession.update_bin_template
if not is_never_authz(self.service_config):
catalog_form = self.svc_mgr.get_repository_form_for_update(self.catalog.ident)
# Update some elements here?
self.svc_mgr.update_repository(catalog_form)
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.update_repository('foo')
def test_can_delete_repositories(self):
"""Tests can_delete_repositories"""
# From test_templates/resource.py BinAdminSession.can_delete_bins_template
assert isinstance(self.svc_mgr.can_delete_repositories(), bool)
def test_delete_repository(self):
"""Tests delete_repository"""
# From test_templates/resource.py BinAdminSession.delete_bin_template
if not is_never_authz(self.service_config):
cat_id = self.catalog_to_delete.ident
self.svc_mgr.delete_repository(cat_id)
with pytest.raises(errors.NotFound):
self.svc_mgr.get_repository(cat_id)
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.delete_repository(self.fake_id)
def test_can_manage_repository_aliases(self):
"""Tests can_manage_repository_aliases"""
# From test_templates/resource.py::ResourceAdminSession::can_manage_resource_aliases_template
assert isinstance(self.svc_mgr.can_manage_repository_aliases(), bool)
def test_alias_repository(self):
"""Tests alias_repository"""
# From test_templates/resource.py BinAdminSession.alias_bin_template
alias_id = Id('repository.Repository%3Amy-alias%40ODL.MIT.EDU')
if not is_never_authz(self.service_config):
self.svc_mgr.alias_repository(self.catalog_to_delete.ident, alias_id)
aliased_catalog = self.svc_mgr.get_repository(alias_id)
assert self.catalog_to_delete.ident == aliased_catalog.ident
else:
with pytest.raises(errors.PermissionDenied):
self.svc_mgr.alias_repository(self.fake_id, alias_id)
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def repository_hierarchy_session_class_fixture(request):
# From test_templates/resource.py::BinHierarchySession::init_template
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'REPOSITORY',
proxy=PROXY,
implementation=request.cls.service_config)
request.cls.catalogs = dict()
request.cls.fake_id = Id('resource.Resource%3Afake%40DLKIT.MIT.EDU')
if not is_never_authz(request.cls.service_config):
for name in ['Root', 'Child 1', 'Child 2', 'Grandchild 1']:
create_form = request.cls.svc_mgr.get_repository_form_for_create([])
| |
# Discord
import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO
from .utils import checks
from __main__ import send_cmd_help
# Others
import os
from copy import deepcopy, copy
import datetime
import time
import math
import random
try:
import validators
validatorsAvail = True
except:
validatorsAvail = False
class DiscordRPG:
"""The Discord RPG. I mean, *Thee Discord RPG*"""
def __init__(self, bot):
self.bot = bot
self.playerPath = "data/discordrpg/players.json"
self.inventoryPath = "data/discordrpg/inventories.json"
self.monsterPath = "data/discordrpg/monsters.json"
self.townPath = "data/discordrpg/towns.json"
self.tilePath = "data/discordrpg/tiletypes.json"
self.mapPath = "data/discordrpg/map.json"
self.player = Player(bot, self.playerPath, self.inventoryPath)
self.monster = Monster(bot, self.monsterPath)
self.town = Town(bot, self.player, "data/discordrpg/towns.json")
self.map = Map(
self.player, bot, "data/discordrpg/tiletypes.json", "data/discordrpg/map.json")
self.settings_path = "data/discordrpg/settings.json"
self.settings = dataIO.load_json(self.settings_path)
self.logged_in_users = []
self.default_options = ["Survey the Landscape",
"Have a look around",
"Check Inventory",
"Meditate",
"Rest"]
@commands.group(name='rpgset', pass_context=True)
async def rpgset(self, ctx):
"""Settings for the RPG on this server"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@rpgset.command(pass_context=True)
async def townname(self, ctx, *, name):
"""Allows you to set a name for this server's Home Town"""
author = ctx.message.author
sid = ctx.message.server
await self.town.set_town_name(ctx, name)
@rpgset.command(pass_context=True)
async def townavatar(self, ctx, *, avatarurl):
"""Allows you to set a new Avatar picture for this server's Home Town"""
# TODOLATER allow attachment grabbing. its possible, but im lazy
author = ctx.message.author
sid = ctx.message.server.id
if validators.url(avatarurl):
await self.town.set_town_avatar(sid, avatarurl)
else:
await self.bot.say("Not a valid URL. Try again.")
@commands.group(name="rpg", pass_context=True)
async def rpg(self, ctx):
"""General RPG stuff."""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@rpg.command(pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def signup(self, ctx):
"""Allows an admin or moderator to signup this server into the RPG"""
await self.town.create_town(ctx)
@rpg.command(pass_context=True)
async def character(self, ctx):
"""Character options menu"""
author = ctx.message.author
sid = ctx.message.server
current_player = {}
player_exists = await self.player.check_player(author.id)
if player_exists:
current_player = await self.player.get_player_records(author.id)
else:
await self.bot.say("You have not yet joined the RPG. Please register using `{}rpg register`".format(ctx.prefix))
return
embed = discord.Embed(title="Options for {}".format(current_player[
'CharName']), description="Use the numbers to make a choice.", colour=0xfd0000)
embed.add_field(
name='Options', value="`1.` Get Character Sheet\n`2.` Change Avatar\n`3.` Change Bio\n`4.` View Home Town", inline=False)
embed.set_author(name='{}'.format(author.name),
icon_url='{}'.format(author.avatar_url))
embed.set_thumbnail(
url='https://i.ytimg.com/vi/Pq824AM9ZHQ/maxresdefault.jpg')
await self.bot.say("", embed=embed)
response = await self.bot.wait_for_message(author=author)
if '1' in response.content:
# Return the character sheet
await self.player.getCharacterSheet(author)
elif '2' in response.content:
# Grab url, validate it and save to the players profile in
# players.json
await self.bot.say("Please provide me with a url only, to use as an image for your character sheet.")
# TODOLATER allow attachment grabbing. its possible, but im lazy
avatarurl = await self.bot.wait_for_message(author=author)
if validators.url(avatarurl.content):
await self.player.setProfileAvatar(author.id, avatarurl.content)
else:
await self.bot.say("Not a valid URL. Try again.")
elif '3' in response.content:
await self.player.setBio(ctx, author.id)
elif '4' in response.content:
self.town.savetowns()
await self.town.reload_town_records()
await self.town.get_town_sheet(current_player['HomeTownID'])
else:
await self.bot.say("Invalid response. Please try again.")
@rpg.command(pass_context=True, no_pm=False)
async def register(self, ctx):
"""Registers and Creates your RPG Character."""
author = ctx.message.author
sid = ctx.message.server.id
townExists = await self.town.check_town(sid)
if not townExists:
await self.bot.say("Oops. Your town is still in piles of rubble. Please ask an admin or moderator of this channel to get your town started with `{}rpg signup`".format(ctx.prefix))
return
player_exists = await self.player.check_player(author.id)
if player_exists:
await self.bot.say("You are already regsitered. You can use `{}rpg character` to do things.".format(ctx.prefix))
return
await self.bot.say("Thanks for joining {}! We are going to need some information...".format(author.mention))
await self.player._createplayer(ctx)
@rpg.command(pass_context=True, no_pm=False)
async def viewtown(self, ctx):
"""View the details of the guild's town you are currently in"""
sid = ctx.message.server.id
await self.town.get_town_sheet(sid)
@rpg.command(pass_context=True, no_pm=False)
async def viewmonster(self, ctx, *, monsterID):
"""Testing Stub. Please do not use."""
await self.monster.getMonsterSheet(monsterID)
@rpg.command(pass_context=True, no_pm=False)
async def viewtile(self, ctx, locX: int, locY: int):
"""Testing sub. Please do not use."""
user = ctx.message.author
location = {"X" : locX, "Y" : locY}
current_player = await self.player.get_player_records(user.id)
tile = await self.map.map_provider(user, location)
await self.bot.say(tile)
@rpg.command(pass_context=True, no_pm=False)
async def findtile(self, ctx, tile_type):
"""Stub. Do not Use"""
user = ctx.message.author
tile = await self.map.find_tile(tile_type)
await self.bot.say(tile)
@rpg.command(pass_context = True, no_pm = False)
async def viewsurrounds(self,ctx, loc_x:int,loc_y:int):
location={'X': loc_x,'Y':loc_y}
user = ctx.message.author
await self.map.get_surrounds(user,location)
@rpg.command(pass_context=True, no_pm=False)
async def viewplayer(self, ctx, user: discord.Member):
"""Allows you to see the character sheet of another player in the game."""
hasProfile = await self.player.check_player(user.id)
if hasProfile:
await self.player.getCharacterSheet(user)
else:
await self.bot.say("That player does not yet exist. Perhaps consider asking them to join using `{}rpg register`?".format(ctx.prefix))
@rpg.command(pass_context=True, no_pm=False)
async def logout(self, ctx):
"""Logs you out of your current session, if there is one"""
await self._logout(ctx)
@rpg.command(pass_context=True, no_pm=False)
async def play(self, ctx):
"""Runs a session of DiscordRPG"""
# from this point onwards, CTX cannot be used for resources like server ID's.
# Needs to be pulled from the existing resources, in the dicts.
userDO = ctx.message.author
await self.reload_town_records()
current_player = await self.player.get_player_records(userDO.id)
if current_player is None:
await self.bot.say("It seems you have never registered with the rpg."
" Please try `{}rpg register`".format(ctx.prefix))
return
player_town = await self.town.get_town_records(current_player['HomeTownID'])
if player_town is None:
await self.bot.say("Hmmm... It appears your town is still in Rubble unfortunately."
"Torn down by a war long since forgotten. "
"Get an admin of this server to try `{}rpg signup`".format(ctx.prefix))
return
if not self._login(userDO):
await self.bot.say("You already have an Ongoing play session. "
"If you can't find it, please try "
"`{}rpg logout`".format(ctx.prefix))
return
# TODO remove when complete.
await self.bot.say("This is still under construction. Any bugs are please to be reported using `;;contact` followed by the error given. Thanks for testing out DiscordRPG!")
if 'Never' in current_player['Last_Played']:
await self.bot.say("Thank you for signing up. Welcome to your next grand adventure")
await self.first_adventure_town(ctx, current_player, player_town)
async def first_adventure_town(self, ctx, current_player, player_town):
user = ctx.message.author
player_race = current_player['Race']
player_old_wep = ""
if 'W' in player_race:
player_old_wep = "Half Blade"
elif 'R' in player_race:
player_old_wep = "Dagger"
elif 'S' in player_race:
player_old_wep = "Wand"
header = ["Awakening", ("First, nothing. Then, just white pain."
" You wake up on the cold, hard stones."
" Dazed and Confused,"
" you wonder how it is you ended up here."
" Opening your eyes to the eye-shattering sunlight,"
" you sit yourself up and take a look around."
" Doesn't look like much,"
" a simple town you think to yourself."
" You stand up and decide to have a look around.")]
option = ["Take a look around."]
em1 = await self.embed_builder(ctx, current_player, header, option)
await self.bot.say("", embed=em1)
valid = False
while not valid:
response = await self.loop_checks(ctx)
if not response:
valid = True
return
elif '1' in response.content:
option = ["Try for the gate. It's time to get out of here.",
"Approach the Fountain.",
"Take a closer look at the buildings around you."]
# the 1 reference here is to keep the "Awakening chapter heading"
header[1] = (" You pull yourself to your feet. As you do, "
"a stranger calls out to you, 'Woah hold on there friend! Name's Timm.' "
"He hands you a tankard,which you gulp down.\n "
"Timm continues, 'Some big fella's dragged you in overnight. "
"I saw it from my shop, Timms Town Improvement! "
"How about you come round and have a look when you're ready? "
"I've also put some coin in your pocket. You could use it.'"
"\n You have a look around the town. It's all in a circle, "
"clearly having only just being rebuilt from Rubble. "
"You see a dirty Fountain in the centre of the courtyard. "
"You notice a Gate behind you, seemingly ungaurded.\n\n "
"What shall you do next?")
em1 = await self.embed_builder(ctx, current_player, header, option)
await self.bot.say("", embed=em1)
valid = True
break
else:
await self.bot.say("No correct response detected. Please try again.")
continue
valid = False
while not valid:
response = await self.loop_checks(ctx)
if not response:
valid = True
return
elif '1' in response.content:
# Approach the gate.
option = ["Turn Around. You don't have enough strength to face that yet.",
"Ignore the advice. Ask to leave the gate."]
header[0] = ("The Gate and the Gate Keeper")
header[1] = ("It's a sad, wooden afair."
" The wood looks rotted and pieced together."
" How this serves as protection for anything is anyone's | |
value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 3 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_3_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 3 Lower Limit"]
@drybulb_temperature_range_3_lower_limit.setter
def drybulb_temperature_range_3_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 3 Lower Limit`
"""
self["Dry-Bulb Temperature Range 3 Lower Limit"] = value
@property
def drybulb_temperature_range_3_upper_limit(self):
"""field `Dry-Bulb Temperature Range 3 Upper Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 3 Upper Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_3_upper_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 3 Upper Limit"]
@drybulb_temperature_range_3_upper_limit.setter
def drybulb_temperature_range_3_upper_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 3 Upper Limit`
"""
self["Dry-Bulb Temperature Range 3 Upper Limit"] = value
@property
def range_3_equipment_list_name(self):
"""field `Range 3 Equipment List Name`
Args:
value (str): value for IDD Field `Range 3 Equipment List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `range_3_equipment_list_name` or None if not set
"""
return self["Range 3 Equipment List Name"]
@range_3_equipment_list_name.setter
def range_3_equipment_list_name(self, value=None):
"""Corresponds to IDD field `Range 3 Equipment List Name`"""
self["Range 3 Equipment List Name"] = value
@property
def drybulb_temperature_range_4_lower_limit(self):
"""field `Dry-Bulb Temperature Range 4 Lower Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 4 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_4_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 4 Lower Limit"]
@drybulb_temperature_range_4_lower_limit.setter
def drybulb_temperature_range_4_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 4 Lower Limit`
"""
self["Dry-Bulb Temperature Range 4 Lower Limit"] = value
@property
def drybulb_temperature_range_4_upper_limit(self):
"""field `Dry-Bulb Temperature Range 4 Upper Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 4 Upper Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_4_upper_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 4 Upper Limit"]
@drybulb_temperature_range_4_upper_limit.setter
def drybulb_temperature_range_4_upper_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 4 Upper Limit`
"""
self["Dry-Bulb Temperature Range 4 Upper Limit"] = value
@property
def range_4_equipment_list_name(self):
"""field `Range 4 Equipment List Name`
Args:
value (str): value for IDD Field `Range 4 Equipment List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `range_4_equipment_list_name` or None if not set
"""
return self["Range 4 Equipment List Name"]
@range_4_equipment_list_name.setter
def range_4_equipment_list_name(self, value=None):
"""Corresponds to IDD field `Range 4 Equipment List Name`"""
self["Range 4 Equipment List Name"] = value
@property
def drybulb_temperature_range_5_lower_limit(self):
"""field `Dry-Bulb Temperature Range 5 Lower Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 5 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_5_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 5 Lower Limit"]
@drybulb_temperature_range_5_lower_limit.setter
def drybulb_temperature_range_5_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 5 Lower Limit`
"""
self["Dry-Bulb Temperature Range 5 Lower Limit"] = value
@property
def drybulb_temperature_range_5_upper_limit(self):
"""field `Dry-Bulb Temperature Range 5 Upper Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 5 Upper Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_5_upper_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 5 Upper Limit"]
@drybulb_temperature_range_5_upper_limit.setter
def drybulb_temperature_range_5_upper_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 5 Upper Limit`
"""
self["Dry-Bulb Temperature Range 5 Upper Limit"] = value
@property
def range_5_equipment_list_name(self):
"""field `Range 5 Equipment List Name`
Args:
value (str): value for IDD Field `Range 5 Equipment List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `range_5_equipment_list_name` or None if not set
"""
return self["Range 5 Equipment List Name"]
@range_5_equipment_list_name.setter
def range_5_equipment_list_name(self, value=None):
"""Corresponds to IDD field `Range 5 Equipment List Name`"""
self["Range 5 Equipment List Name"] = value
@property
def drybulb_temperature_range_6_lower_limit(self):
"""field `Dry-Bulb Temperature Range 6 Lower Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 6 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_6_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 6 Lower Limit"]
@drybulb_temperature_range_6_lower_limit.setter
def drybulb_temperature_range_6_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 6 Lower Limit`
"""
self["Dry-Bulb Temperature Range 6 Lower Limit"] = value
@property
def drybulb_temperature_range_6_upper_limit(self):
"""field `Dry-Bulb Temperature Range 6 Upper Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 6 Upper Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_6_upper_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 6 Upper Limit"]
@drybulb_temperature_range_6_upper_limit.setter
def drybulb_temperature_range_6_upper_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 6 Upper Limit`
"""
self["Dry-Bulb Temperature Range 6 Upper Limit"] = value
@property
def range_6_equipment_list_name(self):
"""field `Range 6 Equipment List Name`
Args:
value (str): value for IDD Field `Range 6 Equipment List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `range_6_equipment_list_name` or None if not set
"""
return self["Range 6 Equipment List Name"]
@range_6_equipment_list_name.setter
def range_6_equipment_list_name(self, value=None):
"""Corresponds to IDD field `Range 6 Equipment List Name`"""
self["Range 6 Equipment List Name"] = value
@property
def drybulb_temperature_range_7_lower_limit(self):
"""field `Dry-Bulb Temperature Range 7 Lower Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 7 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_7_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 7 Lower Limit"]
@drybulb_temperature_range_7_lower_limit.setter
def drybulb_temperature_range_7_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 7 Lower Limit`
"""
self["Dry-Bulb Temperature Range 7 Lower Limit"] = value
@property
def drybulb_temperature_range_7_upper_limit(self):
"""field `Dry-Bulb Temperature Range 7 Upper Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 7 Upper Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_7_upper_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 7 Upper Limit"]
@drybulb_temperature_range_7_upper_limit.setter
def drybulb_temperature_range_7_upper_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 7 Upper Limit`
"""
self["Dry-Bulb Temperature Range 7 Upper Limit"] = value
@property
def range_7_equipment_list_name(self):
"""field `Range 7 Equipment List Name`
Args:
value (str): value for IDD Field `Range 7 Equipment List Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `range_7_equipment_list_name` or None if not set
"""
return self["Range 7 Equipment List Name"]
@range_7_equipment_list_name.setter
def range_7_equipment_list_name(self, value=None):
"""Corresponds to IDD field `Range 7 Equipment List Name`"""
self["Range 7 Equipment List Name"] = value
@property
def drybulb_temperature_range_8_lower_limit(self):
"""field `Dry-Bulb Temperature Range 8 Lower Limit`
| Units: C
| value >= -70.0
| value <= 70.0
Args:
value (float): value for IDD Field `Dry-Bulb Temperature Range 8 Lower Limit`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drybulb_temperature_range_8_lower_limit` or None if not set
"""
return self["Dry-Bulb Temperature Range 8 Lower Limit"]
@drybulb_temperature_range_8_lower_limit.setter
def drybulb_temperature_range_8_lower_limit(self, value=None):
""" Corresponds to IDD field `Dry-Bulb Temperature Range 8 Lower Limit`
"""
self["Dry-Bulb Temperature Range 8 Lower Limit"] = value
@property
def drybulb_temperature_range_8_upper_limit(self):
"""field `Dry-Bulb Temperature Range 8 Upper Limit`
| Units: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.