input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
<filename>riptable/rt_itemcontainer.py
__all__ = [ 'ItemContainer', ]
import numpy as np
import warnings
import re
from riptable.rt_enum import ColumnAttribute
ATTRIBUTE_LABEL = "Label"
ATTRIBUTE_SUMMARY = "Right"
ATTRIBUTE_FOOTER = "Footer"
ATTRIBUTE_MARGIN_COLUMN = "MarginColumn"
ATTRIBUTE_NUMBER_OF_FOOTER_ROWS = "NumberOfFooterRows"
class ItemAttribute():
'''
An attribute about an item which, in turn, contains attributes in the
form of Python attributes, set and retrieved using setattr() and getattr()
'''
ATTRIB_EXCLUSION_LIST = 'copy'
def __repr__(self, indent=2):
result = self.__class__.__name__ + '\n'
for k,v in self._attribs():
result += ' '*indent + k + ': ' + str(v)
result += '\n'
return result
def _attribs(self):
'''
Returns all attributes dynamically set for this ItemAttribute..
NOTE: Add to the ATTRIB_EXCLUSION_LIST all method or property names statically
added to ItemAttribute that don't begin with '_'.
:return:
'''
return [(k, getattr(self, k)) for k in dir(self) if
(not k.startswith('_') and k not in ItemAttribute.ATTRIB_EXCLUSION_LIST)]
def copy(self):
'''
Performs a deep copy of the ItemAttribute, including all values
of any dynamically added attributes.
:return:
'''
attrib = ItemAttribute()
for k, v in self._attribs():
setattr(attrib, k, v.copy() if hasattr(v, 'copy') else v)
return attrib
class ItemContainer():
'Container for items in Struct -- all values are tuples with an attribute'
def __init__(self, *args, **kwds):
'''Initialize an IC
'''
self._items={}
self._items.update(*args, **kwds)
def __getitem__(self, key):
return self._items[key]
def __setitem__(self, key, value):
'ic.__setitem__(i, y) <==> ic[i]=y'
self._items[key] = value
def __delitem__(self, key):
'ic.__delitem__(y) <==> del ic[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
del self._items[key]
#def __iter__(self):
# 'od.__iter__() <==> iter(od)'
# # Traverse the linked list in order.
# root = self.__root
# curr = root[NEXT]
# while curr is not root:
# yield curr[KEY]
# curr = curr[NEXT]
#def __reversed__(self):
# 'od.__reversed__() <==> reversed(od)'
# # Traverse the linked list in reverse order.
# root = self.__root
# curr = root[PREV]
# while curr is not root:
# yield curr[KEY]
# curr = curr[PREV]
#def __reduce__(self):
# 'Return state information for pickling'
# items = [[k, self[k]] for k in self]
# tmp = self.__map, self.__root
# del self.__map, self.__root
# inst_dict = vars(self).copy()
# self.__map, self.__root = tmp
# if inst_dict:
# return (self.__class__, (items,), inst_dict)
# return self.__class__, (items,)
def clear(self):
self._items.clear()
def __contains__(self,*args):
return self._items.__contains__(*args)
def __next__(self):
return self._items.__next__()
def __len__(self):
return self._items.__len__()
def __iter__(self):
#return self._items.__iter__()
return iter(self._items)
def items(self):
return self._items.items()
def values(self):
return self._items.values()
def keys(self):
# how to best do this?
return list(self._items.keys())
def setdefault(self, *args):
return self._items.setdefault(*args)
def update(self, *args):
return self._items.update(*args)
def pop(self, *args):
return self._items.pop(*args)
#setdefault = MutableMapping.setdefault
#update = MutableMapping.update
#pop = MutableMapping.pop
#keys = MutableMapping.keys
#values = MutableMapping.values
#items = MutableMapping.items
#__ne__ = MutableMapping.__ne__
#def popitem(self, last=True):
# '''od.popitem() -> (k, v), return and remove a (key, value) pair.
# Pairs are returned in LIFO order if last is true or FIFO order if false.
# '''
# if not self:
# raise KeyError('dictionary is empty')
# key = next(reversed(self) if last else iter(self))
# value = self.pop(key)
# return key, value
#-----------------------------------------
def __repr__(self):
'ic.__repr__() <==> repr(ic)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self._items.items())
#-----------------------------------------
def copy_inplace(self, rowmask):
'''
inplace rowmask applied
'''
for v in self._items.values():
# first item in tuple is the array
arr = v[0]
# preserve name when copying inplace
name = arr.get_name()
arr=arr[rowmask]
arr.set_name(name)
v[0] = arr
#-----------------------------------------
def copy(self, cols=None, deep=False):
'''
Returns a shallow copy of the item container.
cols list can be provided for specific selection.
'''
newcontainer = ItemContainer()
if cols is None:
newcontainer._items = self._items.copy()
for k,v in newcontainer._items.items():
newcontainer._items[k] = v.copy()
else:
for k in cols:
newcontainer._items[k] = self._items[k].copy()
return newcontainer
#-----------------------------------------
def copy_apply(self, func, *args, cols=None):
'''
Returns a copy of the itemcontainer, applying a function to items before swapping them out in the new ItemContainer object.
Used in Dataset row masking.
'''
newcontainer = ItemContainer()
if cols is None:
for k, v in self._items.items():
# tuple copy
v = v.copy()
newcontainer._items[k] = v
v[0] = func(v[0], *args)
else:
for k in cols:
# tuple copy
v= self._items[k].copy()
newcontainer._items[k] = v
v[0] = func(v[0], *args)
return newcontainer
#-----------------------------------------
def apply(self, func, *args, cols=None):
'''
Performs a possibly inplace operation on items in the itemcontainer
'''
if cols is None:
for v in self._items.values():
func(v[0], *args)
else:
for k in cols:
v = self._items[k]
func(v[0], *args)
#-----------------------------------------
def __eq__(self, other):
if isinstance(other, ItemContainer):
return self._items == other._items
return self._items == other
def __ne__(self, other):
if isinstance(other, ItemContainer):
return self._items != other._items
return self._items != other
#def __del__(self):
# self._items.clear() # eliminate cyclical references
#-----------------------------------------
def items_as_dict(self):
'''
Return dictionary of items without attributes.
'''
return {k:v[0] for k,v in self._items.items()}
#-----------------------------------------
def items_tolist(self):
return [v[0] for v in self._items.values()]
#-----------------------------------------
def item_delete(self, key):
del self._items[key]
# -------------------------------------------------------
def item_get_dict(self):
'''
return the underlying dict
values are stored in the first tuple, attributes in the second tuple
'''
return self._items
# -------------------------------------------------------
def iter_values(self):
'''
This will yield the full values in _items dict (lists with item, attribute)
'''
for v in self._items.values():
yield v
# -------------------------------------------------------
def item_get_value(self, key):
'''
return the value for the given key
NOTE: a good spot to put a counter for debugging
'''
return self._items[key][0]
# -------------------------------------------------------
def item_get_values(self, keylist):
'''
return list of value for the given key
used for fast dataset slicing/copy with column selection
'''
return [ self.item_get_value(i) for i in keylist ]
# -------------------------------------------------------
def item_set_value(self, key, value, attr=None):
# check if already exists...
temp = [value, attr]
v = self._items.setdefault(key, temp)
if v is not temp:
v[0] = value
def item_set_value_internal(self, key, value):
# no checks, go to dict
self._items[key] = value
# -------------------------------------------------------
def item_get_attribute(self, key, attrib_name, default=None):
'''
Params
------
Arg1: key: name of the item
Arg2: attrib_name: name of the attribute
Retrieves the value of the attribute previously assigned with item_set_attribute
'''
item = self._items.get(key, None)
if item is None:
return None
attrib = item[1]
if attrib is None:
return None
return getattr(attrib, attrib_name, default)
# -------------------------------------------------------
def _set_attribute(self, item, name, value):
attrib = item[1]
if attrib is None:
attrib = ItemAttribute()
setattr(attrib, name, value)
item[1]=attrib
# -------------------------------------------------------
def item_set_attribute(self, key, attrib_name, attrib_value):
'''
Params
------
Arg1: key: name of the item
Arg2: attrib_name: name of the attribute
Arg3: attrib_value: value of the attribute
Attaches an attribute (name,value) pair to the item
Any valid dictionary name and any object can be assigned.
Note: see item_get_attribute to retrieve
'''
# check if already exists...
if self.item_exists(key):
self._set_attribute(self._items[key], attrib_name, attrib_value)
else:
raise KeyError(f"{key!r} does not already exist, thus cannot add attribute")
# -------------------------------------------------------
def item_get_len(self):
return len(self._items)
# -------------------------------------------------------
def item_exists(self, item):
return item in self._items
# -------------------------------------------------------
def get_dict_values(self):
'''
Returns a tuple of items in the item dict. Each item is a list.
'''
return tuple(self._items.values())
# -------------------------------------------------------
def item_replace_all(self, newdict, check_exists=True):
'''
Replace the data for each item in the item dict. Original attributes
will be retained.
Parameters
----------
newdict : dictionary of item names -> new item data (can also be a dataset)
check_exists : if True, all newdict keys and old item keys will be compared to ensure a match
'''
# for intenal routines, an existance check can often be skipped
if check_exists:
for k in newdict:
if self.item_exists(k) is False:
raise ValueError(f"Item {k} not found in original item dictionary.")
for k in self._items:
if k not in newdict:
raise ValueError(f"Item {k} in original item dictionary not found in new items.")
# replace the data, keep any attributes if set
for k, v in newdict.items():
self._items[k][0] = v
# -------------------------------------------------------
def item_rename(self, old, new):
"""
Rename a single column.
:param old: Current column name.
:param new: New column name.
:return: value portion of item that was renamed
"""
if | |
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import boto3
import json
import re
from datetime import datetime, date
from botocore.config import Config
from botocore.parsers import ResponseParserError
import urllib3.util
# Disable insecure warnings
urllib3.disable_warnings()
"""PARAMETERS"""
AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
AWS_ROLE_ARN = demisto.params().get('roleArn')
AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
AWS_ROLE_POLICY = None
AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
config = Config(
connect_timeout=1,
retries=dict(
max_attempts=5
),
proxies=proxies
)
"""HELPER FUNCTIONS"""
def aws_session(service='ec2', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
rolePolicy=None):
kwargs = {}
if roleArn and roleSessionName is not None:
kwargs.update({
'RoleArn': roleArn,
'RoleSessionName': roleSessionName,
})
elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
kwargs.update({
'RoleArn': AWS_ROLE_ARN,
'RoleSessionName': AWS_ROLE_SESSION_NAME,
})
if roleSessionDuration is not None:
kwargs.update({'DurationSeconds': int(roleSessionDuration)})
elif AWS_ROLE_SESSION_DURATION is not None:
kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
if rolePolicy is not None:
kwargs.update({'Policy': rolePolicy})
elif AWS_ROLE_POLICY is not None:
kwargs.update({'Policy': AWS_ROLE_POLICY})
if kwargs and AWS_ACCESS_KEY_ID is None:
if AWS_ACCESS_KEY_ID is None:
sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
sts_response = sts_client.assume_role(**kwargs)
if region is not None:
client = boto3.client(
service_name=service,
region_name=region,
aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
aws_session_token=sts_response['Credentials']['SessionToken'],
verify=VERIFY_CERTIFICATE,
config=config
)
else:
client = boto3.client(
service_name=service,
region_name=AWS_DEFAULT_REGION,
aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
aws_session_token=sts_response['Credentials']['SessionToken'],
verify=VERIFY_CERTIFICATE,
config=config
)
elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
sts_client = boto3.client(
service_name='sts',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
verify=VERIFY_CERTIFICATE,
config=config
)
kwargs.update({
'RoleArn': AWS_ROLE_ARN,
'RoleSessionName': AWS_ROLE_SESSION_NAME,
})
sts_response = sts_client.assume_role(**kwargs)
client = boto3.client(
service_name=service,
region_name=AWS_DEFAULT_REGION,
aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
aws_session_token=sts_response['Credentials']['SessionToken'],
verify=VERIFY_CERTIFICATE,
config=config
)
else:
if region is not None:
client = boto3.client(
service_name=service,
region_name=region,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
verify=VERIFY_CERTIFICATE,
config=config
)
else:
client = boto3.client(
service_name=service,
region_name=AWS_DEFAULT_REGION,
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
verify=VERIFY_CERTIFICATE,
config=config
)
return client
def parse_filter_field(filter_str):
filters = []
regex = re.compile(r'name=([\w\d_:.-]+),values=([ /\w\d@_,.*-]+)', flags=re.I)
for f in filter_str.split(';'):
match = regex.match(f)
if match is None:
demisto.log('could not parse filter: %s' % (f,))
continue
filters.append({
'Name': match.group(1),
'Values': match.group(2).split(',')
})
return filters
def parse_tag_field(tags_str):
tags = []
regex = re.compile(r'key=([\w\d_:.-]+),value=([ /\w\d@_,.*-]+)', flags=re.I)
for f in tags_str.split(';'):
match = regex.match(f)
if match is None:
demisto.log('could not parse field: %s' % (f,))
continue
tags.append({
'Key': match.group(1),
'Value': match.group(2)
})
return tags
class DatetimeEncoder(json.JSONEncoder):
# pylint: disable=method-hidden
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def parse_resource_ids(resource_id):
id_list = resource_id.replace(" ", "")
resourceIds = id_list.split(",")
return resourceIds
def multi_split(data):
data = data.replace(" ", "")
data = data.split(";")
return data
def parse_date(dt):
try:
arr = dt.split("-")
parsed_date = (datetime(int(arr[0]), int(arr[1]), int(arr[2]))).isoformat()
except ValueError as e:
return_error("Date could not be parsed. Please check the date again.\n{error}".format(error=e))
return parsed_date
"""MAIN FUNCTIONS"""
def describe_regions_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
data = []
kwargs = {}
if args.get('regionNames') is not None:
kwargs.update({'RegionNames': parse_resource_ids(args.get('regionNames'))})
response = client.describe_regions(**kwargs)
for region in response['Regions']:
data.append({
'Endpoint': region['Endpoint'],
'RegionName': region['RegionName']
})
ec = {'AWS.Regions(val.RegionName === obj.RegionName)': data}
human_readable = tableToMarkdown('AWS Regions', data)
return_outputs(human_readable, ec)
def describe_instances_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
data = []
kwargs = {}
output = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('instanceIds') is not None:
kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
response = client.describe_instances(**kwargs)
for i, reservation in enumerate(response['Reservations']):
for instance in reservation['Instances']:
try:
launch_date = datetime.strftime(instance['LaunchTime'], '%Y-%m-%dT%H:%M:%SZ')
except ValueError as e:
return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=e))
data.append({
'InstanceId': instance['InstanceId'],
'ImageId': instance['ImageId'],
'State': instance['State']['Name'],
'PublicIPAddress': instance.get('PublicIpAddress'),
'Region': obj['_user_provided_options']['region_name'],
'Type': instance['InstanceType'],
'LaunchDate': launch_date,
'PublicDNSName': instance['PublicDnsName'],
'Monitoring': instance['Monitoring']['State'],
})
if 'Tags' in instance:
for tag in instance['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
if 'KeyName' in instance:
data[i].update({'KeyName': instance['KeyName']})
instance.update({'Region': obj['_user_provided_options']['region_name']})
output.append(instance)
try:
raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
except ValueError as e:
return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
ec = {'AWS.EC2.Instances(val.InstanceId === obj.InstanceId)': raw}
human_readable = tableToMarkdown('AWS Instances', data)
return_outputs(human_readable, ec)
def describe_images_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('imageIds') is not None:
kwargs.update({'ImageIds': parse_resource_ids(args.get('imageIds'))})
if args.get('owners') is not None:
kwargs.update({'Owners': parse_resource_ids(args.get('owners'))})
if args.get('executableUsers') is not None:
kwargs.update({'ExecutableUsers': parse_resource_ids(args.get('executableUsers'))})
response = client.describe_images(**kwargs)
for i, image in enumerate(response['Images']):
data.append({
'CreationDate': image['CreationDate'],
'ImageId': image['ImageId'],
'Public': image['Public'],
'State': image['State'],
'Region': obj['_user_provided_options']['region_name'],
})
if 'Description' in image:
data[i].update({'Description': image['Description']})
if 'EnaSupport' in image:
data[i].update({'EnaSupport': image['EnaSupport']})
if 'Name' in image:
data[i].update({'Name': image['Name']})
if 'Tags' in image:
for tag in image['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
try:
output = json.dumps(response['Images'], cls=DatetimeEncoder)
raw = json.loads(output)
raw[0].update({'Region': obj['_user_provided_options']['region_name']})
except ValueError as e:
return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
ec = {'AWS.EC2.Images(val.ImageId === obj.ImageId)': raw}
human_readable = tableToMarkdown('AWS EC2 Images', data)
return_outputs(human_readable, ec)
def describe_addresses_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('publicIps') is not None:
kwargs.update({'PublicIps': parse_resource_ids(args.get('publicIps'))})
if args.get('allocationIds') is not None:
kwargs.update({'AllocationIds': parse_resource_ids(args.get('allocationIds'))})
response = client.describe_addresses(**kwargs)
for i, address in enumerate(response['Addresses']):
data.append({
'PublicIp': address['PublicIp'],
'AllocationId': address['AllocationId'],
'Domain': address['Domain'],
'Region': obj['_user_provided_options']['region_name'],
})
if 'InstanceId' in address:
data[i].update({'InstanceId': address['InstanceId']})
if 'AssociationId' in address:
data[i].update({'AssociationId': address['AssociationId']})
if 'NetworkInterfaceId' in address:
data[i].update({'NetworkInterfaceId': address['NetworkInterfaceId']})
if 'PrivateIpAddress' in address:
data[i].update({'PrivateIpAddress': address['PrivateIpAddress']})
if 'Tags' in address:
for tag in address['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
raw = response['Addresses']
raw[0].update({'Region': obj['_user_provided_options']['region_name']})
ec = {'AWS.EC2.ElasticIPs(val.AllocationId === obj.AllocationId)': raw}
human_readable = tableToMarkdown('AWS EC2 ElasticIPs', data)
return_outputs(human_readable, ec)
def describe_snapshots_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('ownerIds') is not None:
kwargs.update({'OwnerIds': parse_resource_ids(args.get('ownerIds'))})
if args.get('snapshotIds') is not None:
kwargs.update({'SnapshotIds': parse_resource_ids(args.get('snapshotIds'))})
if args.get('restorableByUserIds') is not None:
kwargs.update({'RestorableByUserIds': parse_resource_ids(args.get('restorableByUserIds'))})
response = client.describe_snapshots(**kwargs)
for i, snapshot in enumerate(response['Snapshots']):
try:
start_time = datetime.strftime(snapshot['StartTime'], '%Y-%m-%dT%H:%M:%SZ')
except ValueError as e:
return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=e))
data.append({
'Description': snapshot['Description'],
'Encrypted': snapshot['Encrypted'],
'OwnerId': snapshot['OwnerId'],
'Progress': snapshot['Progress'],
'SnapshotId': snapshot['SnapshotId'],
'StartTime': start_time,
'State': snapshot['State'],
'VolumeId': snapshot['VolumeId'],
'VolumeSize': snapshot['VolumeSize'],
'Region': obj['_user_provided_options']['region_name'],
})
if 'Tags' in snapshot:
for tag in snapshot['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
try:
output = json.dumps(response['Snapshots'], cls=DatetimeEncoder)
raw = json.loads(output)
raw[0].update({'Region': obj['_user_provided_options']['region_name']})
except ValueError as e:
return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
ec = {'AWS.EC2.Snapshots(val.SnapshotId === obj.SnapshotId)': raw}
human_readable = tableToMarkdown('AWS EC2 Snapshots', data)
return_outputs(human_readable, ec)
def describe_volumes_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('volumeIds') is not None:
kwargs.update({'VolumeIds': parse_resource_ids(args.get('volumeIds'))})
response = client.describe_volumes(**kwargs)
for i, volume in enumerate(response['Volumes']):
try:
create_date = datetime.strftime(volume['CreateTime'], '%Y-%m-%dT%H:%M:%SZ')
except ValueError as e:
return_error('Date could not be parsed. Please check the date again.\n{}'.format(e))
data.append({
'AvailabilityZone': volume['AvailabilityZone'],
'Encrypted': volume['Encrypted'],
'State': volume['State'],
'VolumeId': volume['VolumeId'],
'VolumeType': volume['VolumeType'],
'CreateTime': create_date,
})
if 'Tags' in volume:
for tag in volume['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
try:
output = json.dumps(response['Volumes'], cls=DatetimeEncoder)
raw = json.loads(output)
raw[0].update({'Region': obj['_user_provided_options']['region_name']})
except ValueError as e:
return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
ec = {'AWS.EC2.Volumes(val.VolumeId === obj.VolumeId)': raw}
human_readable = tableToMarkdown('AWS EC2 Volumes', data)
return_outputs(human_readable, ec)
def describe_launch_templates_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('launchTemplateIds') is not None:
kwargs.update({'LaunchTemplateIds': parse_resource_ids(args.get('launchTemplateIds'))})
if args.get('launchTemplateNames') is not None:
kwargs.update({'LaunchTemplateNames': parse_resource_ids(args.get('launchTemplateNamess'))})
response = client.describe_launch_templates(**kwargs)
for i, template in enumerate(response['LaunchTemplates']):
try:
create_time = datetime.strftime(template['CreateTime'], '%Y-%m-%dT%H:%M:%SZ')
except ValueError as e:
return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=e))
data.append({
'LaunchTemplateId': template['LaunchTemplateId'],
'LaunchTemplateName': template['LaunchTemplateName'],
'CreatedBy': template['CreatedBy'],
'DefaultVersionNumber': template['DefaultVersionNumber'],
'LatestVersionNumber': template['LatestVersionNumber'],
'CreateTime': create_time,
'Region': obj['_user_provided_options']['region_name'],
})
if 'Tags' in template:
for tag in template['Tags']:
data[i].update({
tag['Key']: tag['Value']
})
try:
output = json.dumps(response['LaunchTemplates'], cls=DatetimeEncoder)
raw = json.loads(output)
raw[0].update({'Region': obj['_user_provided_options']['region_name']})
except ValueError as e:
return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
ec = {'AWS.EC2.LaunchTemplates(val.LaunchTemplateId === obj.LaunchTemplateId)': raw}
human_readable = tableToMarkdown('AWS EC2 LaunchTemplates', data)
return_outputs(human_readable, ec)
def describe_key_pairs_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = []
if args.get('filters') is not None:
kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
if args.get('keyNames') is not None:
kwargs.update({'KeyNames': parse_resource_ids(args.get('keyNames'))})
response = client.describe_key_pairs(**kwargs)
for key in response['KeyPairs']:
data.append({
'KeyFingerprint': key['KeyFingerprint'],
'KeyName': key['KeyName'],
'Region': obj['_user_provided_options']['region_name'],
})
ec = {'AWS.EC2.KeyPairs(val.KeyName === obj.KeyName)': data}
human_readable = tableToMarkdown('AWS EC2 Key Pairs', data)
return_outputs(human_readable, ec)
def describe_vpcs_command(args):
client = aws_session(
region=args.get('region'),
roleArn=args.get('roleArn'),
roleSessionName=args.get('roleSessionName'),
roleSessionDuration=args.get('roleSessionDuration'),
)
obj = vars(client._client_config)
kwargs = {}
data = | |
'ref', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'},
'items': {'key': 'items', 'type': 'SwaggerSchema'},
'properties': {'key': 'properties', 'type': '{SwaggerSchema}'},
'additional_properties': {'key': 'additionalProperties', 'type': 'object'},
'required': {'key': 'required', 'type': '[str]'},
'max_properties': {'key': 'maxProperties', 'type': 'int'},
'min_properties': {'key': 'minProperties', 'type': 'int'},
'all_of': {'key': 'allOf', 'type': '[SwaggerSchema]'},
'discriminator': {'key': 'discriminator', 'type': 'str'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'xml': {'key': 'xml', 'type': 'SwaggerXml'},
'external_docs': {'key': 'externalDocs', 'type': 'SwaggerExternalDocumentation'},
'example': {'key': 'example', 'type': 'object'},
'notification_url_extension': {'key': 'notificationUrlExtension', 'type': 'bool'},
'dynamic_schema_old': {'key': 'dynamicSchemaOld', 'type': 'SwaggerCustomDynamicSchema'},
'dynamic_schema_new': {'key': 'dynamicSchemaNew', 'type': 'SwaggerCustomDynamicProperties'},
'dynamic_list_new': {'key': 'dynamicListNew', 'type': 'SwaggerCustomDynamicList'},
'dynamic_tree': {'key': 'dynamicTree', 'type': 'SwaggerCustomDynamicTree'},
}
def __init__(
self,
*,
ref: Optional[str] = None,
type: Optional[Union[str, "SwaggerSchemaType"]] = None,
title: Optional[str] = None,
items: Optional["SwaggerSchema"] = None,
properties: Optional[Dict[str, "SwaggerSchema"]] = None,
additional_properties: Optional[object] = None,
required: Optional[List[str]] = None,
max_properties: Optional[int] = None,
min_properties: Optional[int] = None,
all_of: Optional[List["SwaggerSchema"]] = None,
discriminator: Optional[str] = None,
read_only: Optional[bool] = None,
xml: Optional["SwaggerXml"] = None,
external_docs: Optional["SwaggerExternalDocumentation"] = None,
example: Optional[object] = None,
notification_url_extension: Optional[bool] = None,
dynamic_schema_old: Optional["SwaggerCustomDynamicSchema"] = None,
dynamic_schema_new: Optional["SwaggerCustomDynamicProperties"] = None,
dynamic_list_new: Optional["SwaggerCustomDynamicList"] = None,
dynamic_tree: Optional["SwaggerCustomDynamicTree"] = None,
**kwargs
):
super(SwaggerSchema, self).__init__(**kwargs)
self.ref = ref
self.type = type
self.title = title
self.items = items
self.properties = properties
self.additional_properties = additional_properties
self.required = required
self.max_properties = max_properties
self.min_properties = min_properties
self.all_of = all_of
self.discriminator = discriminator
self.read_only = read_only
self.xml = xml
self.external_docs = external_docs
self.example = example
self.notification_url_extension = notification_url_extension
self.dynamic_schema_old = dynamic_schema_old
self.dynamic_schema_new = dynamic_schema_new
self.dynamic_list_new = dynamic_list_new
self.dynamic_tree = dynamic_tree
class SwaggerXml(msrest.serialization.Model):
"""The Swagger XML.
:param name: The xml element or attribute name.
:type name: str
:param namespace: The xml namespace.
:type namespace: str
:param prefix: The name prefix.
:type prefix: str
:param attribute: Indicates whether the property should be an attribute instead of an element.
:type attribute: bool
:param wrapped: Indicates whether the array elements are wrapped in a container element.
:type wrapped: bool
:param extensions: The vendor extensions.
:type extensions: dict[str, object]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'prefix': {'key': 'prefix', 'type': 'str'},
'attribute': {'key': 'attribute', 'type': 'bool'},
'wrapped': {'key': 'wrapped', 'type': 'bool'},
'extensions': {'key': 'extensions', 'type': '{object}'},
}
def __init__(
self,
*,
name: Optional[str] = None,
namespace: Optional[str] = None,
prefix: Optional[str] = None,
attribute: Optional[bool] = None,
wrapped: Optional[bool] = None,
extensions: Optional[Dict[str, object]] = None,
**kwargs
):
super(SwaggerXml, self).__init__(**kwargs)
self.name = name
self.namespace = namespace
self.prefix = prefix
self.attribute = attribute
self.wrapped = wrapped
self.extensions = extensions
class TrackingEvent(msrest.serialization.Model):
"""The tracking event.
All required parameters must be populated in order to send to Azure.
:param event_level: Required. The event level. Possible values include: "LogAlways",
"Critical", "Error", "Warning", "Informational", "Verbose".
:type event_level: str or ~azure.mgmt.logic.models.EventLevel
:param event_time: Required. The event time.
:type event_time: ~datetime.datetime
:param record_type: Required. The record type. Possible values include: "NotSpecified",
"Custom", "AS2Message", "AS2MDN", "X12Interchange", "X12FunctionalGroup", "X12TransactionSet",
"X12InterchangeAcknowledgment", "X12FunctionalGroupAcknowledgment",
"X12TransactionSetAcknowledgment", "EdifactInterchange", "EdifactFunctionalGroup",
"EdifactTransactionSet", "EdifactInterchangeAcknowledgment",
"EdifactFunctionalGroupAcknowledgment", "EdifactTransactionSetAcknowledgment".
:type record_type: str or ~azure.mgmt.logic.models.TrackingRecordType
:param record: The record.
:type record: object
:param error: The error.
:type error: ~azure.mgmt.logic.models.TrackingEventErrorInfo
"""
_validation = {
'event_level': {'required': True},
'event_time': {'required': True},
'record_type': {'required': True},
}
_attribute_map = {
'event_level': {'key': 'eventLevel', 'type': 'str'},
'event_time': {'key': 'eventTime', 'type': 'iso-8601'},
'record_type': {'key': 'recordType', 'type': 'str'},
'record': {'key': 'record', 'type': 'object'},
'error': {'key': 'error', 'type': 'TrackingEventErrorInfo'},
}
def __init__(
self,
*,
event_level: Union[str, "EventLevel"],
event_time: datetime.datetime,
record_type: Union[str, "TrackingRecordType"],
record: Optional[object] = None,
error: Optional["TrackingEventErrorInfo"] = None,
**kwargs
):
super(TrackingEvent, self).__init__(**kwargs)
self.event_level = event_level
self.event_time = event_time
self.record_type = record_type
self.record = record
self.error = error
class TrackingEventErrorInfo(msrest.serialization.Model):
"""The tracking event error info.
:param message: The message.
:type message: str
:param code: The code.
:type code: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'code': {'key': 'code', 'type': 'str'},
}
def __init__(
self,
*,
message: Optional[str] = None,
code: Optional[str] = None,
**kwargs
):
super(TrackingEventErrorInfo, self).__init__(**kwargs)
self.message = message
self.code = code
class TrackingEventsDefinition(msrest.serialization.Model):
"""The tracking events definition.
All required parameters must be populated in order to send to Azure.
:param source_type: Required. The source type.
:type source_type: str
:param track_events_options: The track events options. Possible values include: "None",
"DisableSourceInfoEnrich".
:type track_events_options: str or ~azure.mgmt.logic.models.TrackEventsOperationOptions
:param events: Required. The events.
:type events: list[~azure.mgmt.logic.models.TrackingEvent]
"""
_validation = {
'source_type': {'required': True},
'events': {'required': True},
}
_attribute_map = {
'source_type': {'key': 'sourceType', 'type': 'str'},
'track_events_options': {'key': 'trackEventsOptions', 'type': 'str'},
'events': {'key': 'events', 'type': '[TrackingEvent]'},
}
def __init__(
self,
*,
source_type: str,
events: List["TrackingEvent"],
track_events_options: Optional[Union[str, "TrackEventsOperationOptions"]] = None,
**kwargs
):
super(TrackingEventsDefinition, self).__init__(**kwargs)
self.source_type = source_type
self.track_events_options = track_events_options
self.events = events
class Workflow(Resource):
"""The workflow type.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource id.
:vartype id: str
:ivar name: Gets the resource name.
:vartype name: str
:ivar type: Gets the resource type.
:vartype type: str
:param location: The resource location.
:type location: str
:param tags: A set of tags. The resource tags.
:type tags: dict[str, str]
:ivar provisioning_state: Gets the provisioning state. Possible values include: "NotSpecified",
"Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled",
"Failed", "Succeeded", "Moving", "Updating", "Registering", "Registered", "Unregistering",
"Unregistered", "Completed", "Renewing", "Pending", "Waiting", "InProgress".
:vartype provisioning_state: str or ~azure.mgmt.logic.models.WorkflowProvisioningState
:ivar created_time: Gets the created time.
:vartype created_time: ~datetime.datetime
:ivar changed_time: Gets the changed time.
:vartype changed_time: ~datetime.datetime
:param state: The state. Possible values include: "NotSpecified", "Completed", "Enabled",
"Disabled", "Deleted", "Suspended".
:type state: str or ~azure.mgmt.logic.models.WorkflowState
:ivar version: Gets the version.
:vartype version: str
:ivar access_endpoint: Gets the access endpoint.
:vartype access_endpoint: str
:param endpoints_configuration: The endpoints configuration.
:type endpoints_configuration: ~azure.mgmt.logic.models.FlowEndpointsConfiguration
:param access_control: The access control configuration.
:type access_control: ~azure.mgmt.logic.models.FlowAccessControlConfiguration
:ivar sku: The sku.
:vartype sku: ~azure.mgmt.logic.models.Sku
:param integration_account: The integration account.
:type integration_account: ~azure.mgmt.logic.models.ResourceReference
:param integration_service_environment: The integration service environment.
:type integration_service_environment: ~azure.mgmt.logic.models.ResourceReference
:param definition: The definition.
:type definition: object
:param parameters: The parameters.
:type parameters: dict[str, ~azure.mgmt.logic.models.WorkflowParameter]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
'created_time': {'readonly': True},
'changed_time': {'readonly': True},
'version': {'readonly': True},
'access_endpoint': {'readonly': True},
'sku': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'created_time': {'key': 'properties.createdTime', 'type': 'iso-8601'},
'changed_time': {'key': 'properties.changedTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'access_endpoint': {'key': 'properties.accessEndpoint', 'type': 'str'},
'endpoints_configuration': {'key': 'properties.endpointsConfiguration', 'type': 'FlowEndpointsConfiguration'},
'access_control': {'key': 'properties.accessControl', 'type': 'FlowAccessControlConfiguration'},
'sku': {'key': 'properties.sku', 'type': 'Sku'},
'integration_account': {'key': 'properties.integrationAccount', 'type': 'ResourceReference'},
'integration_service_environment': {'key': 'properties.integrationServiceEnvironment', 'type': 'ResourceReference'},
'definition': {'key': 'properties.definition', 'type': 'object'},
'parameters': {'key': 'properties.parameters', 'type': '{WorkflowParameter}'},
}
def __init__(
self,
*,
location: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
state: Optional[Union[str, "WorkflowState"]] = None,
endpoints_configuration: Optional["FlowEndpointsConfiguration"] = None,
access_control: Optional["FlowAccessControlConfiguration"] = None,
integration_account: Optional["ResourceReference"] = None,
integration_service_environment: Optional["ResourceReference"] = None,
definition: Optional[object] = None,
parameters: Optional[Dict[str, "WorkflowParameter"]] = None,
**kwargs
):
super(Workflow, self).__init__(location=location, tags=tags, **kwargs)
self.provisioning_state = None
self.created_time = None
self.changed_time = None
self.state = state
self.version = None
self.access_endpoint = None
self.endpoints_configuration = endpoints_configuration
self.access_control = access_control
self.sku = None
self.integration_account = integration_account
self.integration_service_environment = integration_service_environment
self.definition = definition
self.parameters = parameters
class WorkflowFilter(msrest.serialization.Model):
"""The workflow filter.
:param state: The state of workflows. Possible values include: "NotSpecified", "Completed",
"Enabled", "Disabled", "Deleted", "Suspended".
:type state: str or ~azure.mgmt.logic.models.WorkflowState
"""
_attribute_map = {
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
*,
state: Optional[Union[str, "WorkflowState"]] = None,
**kwargs
):
super(WorkflowFilter, self).__init__(**kwargs)
self.state = state
class WorkflowListResult(msrest.serialization.Model):
"""The list of workflows.
:param value: The list of workflows.
:type value: list[~azure.mgmt.logic.models.Workflow]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Workflow]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Workflow"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(WorkflowListResult, self).__init__(**kwargs)
self.value = value
self.next_link | |
return None
return self.cursor.fetchone()[0]
def get_HEAD(self, group_name, id):
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
stmt = """
SELECT
A.post_id,
C.username,
C.user_email,
CASE WHEN A.post_subject = '' THEN CONCAT('Re: ', E.topic_title) ELSE A.post_subject END,
A.post_time,
A.topic_id,
A.post_username,
MIN(D.post_id)
FROM
%sposts A
INNER JOIN
%stopics E
ON
A.topic_id = E.topic_id
INNER JOIN
%sposts D
ON
D.topic_id=A.topic_id
LEFT JOIN
%susers C
ON
A.poster_id=C.user_id
WHERE
(A.forum_id=%s OR A.forum_id=0) AND
A.post_id=%s
GROUP BY
D.topic_id""" % (prefix, prefix, prefix, prefix, forum_id, id)
num_rows = self.query(stmt)
if num_rows == 0:
return None
result = list(self.cursor.fetchone())
# check if there is a registered user
if len(result[6]) == 0 or result[6] == '':
if len(result[2]) == 0:
author = result[1]
else:
#author = "%s <%s>" % (result[1], result[2])
author = result[1]
else:
author = result[6]
formatted_time = strutil.get_formatted_time(time.localtime(result[4]))
headers = []
headers.append("Path: %s" % (settings.nntp_hostname))
headers.append("From: %s" % (author))
headers.append("Newsgroups: %s" % (group_name))
headers.append("Date: %s" % (formatted_time))
headers.append("Subject: %s" % (result[3]))
headers.append("Message-ID: <%s@%s>" % (result[0], group_name))
headers.append("Xref: %s %s:%s" % (settings.nntp_hostname, group_name, result[0]))
# because topics are all related in forums we can only reference the first topic
if result[7] != result[0]:
headers.append("References: <%s@%s>" % (result[7], group_name))
headers.append("In-Reply-To: <%s@%s>" % (result[7], group_name))
return "\r\n".join(headers)
def get_BODY(self, group_name, id):
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
stmt = """
SELECT
A.post_text
FROM
%sposts A
WHERE
(A.forum_id=%s OR A.forum_id=0) AND
A.post_id=%s""" % (prefix, forum_id, id)
num_rows = self.query(stmt)
if num_rows == 0:
return None
else:
return strutil.format_body(self.cursor.fetchone()[0])
def get_XOVER(self, group_name, start_id, end_id='ggg'):
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
#print "xover startid=%s endid=%s\r\n" % (start_id, end_id)
stmt = """
SELECT
A.post_id,
A.topic_id,
C.username,
C.user_email,
CASE WHEN A.post_subject = '' THEN CONCAT('Re: ', D.topic_title) ELSE A.post_subject END,
A.post_time,
A.post_text,
A.post_username,
E.MinPostID
FROM
%sposts A
LEFT JOIN
(select topic_id, MIN(post_id) as MinPostID from %sposts group by topic_id) E
ON
E.topic_id=A.topic_id
LEFT JOIN
%susers C
ON
A.poster_id=C.user_id
LEFT JOIN
%stopics D
ON
A.topic_id = D.topic_id
WHERE
(A.forum_id=%s OR A.forum_id=0) AND
A.post_id >= %s""" % (prefix, prefix, prefix, prefix, forum_id, start_id)
if end_id != 'ggg':
stmt = "%s AND A.post_id <= %s" % (stmt, end_id)
self.query(stmt)
result = list(self.cursor.fetchall())
overviews = []
for row in result:
if row[7] == '':
if row[3] == '':
author = row[2]
else:
#author = "%s <%s>" % (row[2], row[3])
author = row[2]
else:
author = row[7]
formatted_time = strutil.get_formatted_time(time.localtime(row[5]))
message_id = "<%s@%s>" % (row[0], group_name)
line_count = len(row[6].split('\n'))
xref = 'Xref: %s %s:%s' % (settings.nntp_hostname, group_name, row[0])
if row[8] != row[0]:
reference = "<%s@%s>" % (row[8], group_name)
else:
reference = ""
# message_number <tab> subject <tab> author <tab> date <tab> message_id <tab> reference <tab> bytes <tab> lines <tab> xref
overviews.append("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (row[0], row[4], author, formatted_time, message_id, reference, len(strutil.format_body(row[6])), line_count, xref))
return "\r\n".join(overviews)
def get_XPAT(self, group_name, header, pattern, start_id, end_id='ggg'):
# XXX: need to actually check for the header values being passed as
# XXX: not all header names map to column names on the tables
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
stmt = """
SELECT
A.post_id,
A.topic_id,
C.username,
C.user_email,
CASE WHEN A.post_subject = '' THEN CONCAT('Re: ', D.topic_title) ELSE A.post_subject END,
A.post_time,
A.post_text,
A.post_username
FROM
%sposts A
LEFT JOIN
%susers C
ON
A.poster_id=C.user_id
LEFT JOIN
%stopics D
ON
A.topic_id = D.topic_id
WHERE
(A.forum_id=%s OR A.forum_id=0) AND
%s REGEXP '%s' AND
A.post_id >= %s""" % (prefix, prefix, prefix, forum_id, header, strutil.format_wildcards(pattern), start_id)
if end_id != 'ggg':
stmt = "%s AND A.post_id <= %s" % (stmt, end_id)
num_rows = self.query(stmt)
if num_rows == 0:
return None
result = list(self.cursor.fetchall())
hdrs = []
for row in result:
if header.upper() == 'SUBJECT':
hdrs.append('%s %s' % (row[0], row[4]))
elif header.upper() == 'FROM':
if row[7] == '':
if row[3] == '':
author = row[2]
else:
#author = "%s <%s>" % (row[2], row[3])
author = row[2]
else:
author = row[7]
# XXX: totally broken with empty values for the email address
hdrs.append('%s %s' % (row[0], author))
elif header.upper() == 'DATE':
hdrs.append('%s %s' % (row[0], strutil.get_formatted_time(time.localtime(result[5]))))
elif header.upper() == 'MESSAGE-ID':
hdrs.append('%s <%s@%s>' % (row[0], row[0], group_name))
elif (header.upper() == 'REFERENCES') and (row[1] != 0):
hdrs.append('%s <%s@%s>' % (row[0], row[1], group_name))
elif header.upper() == 'BYTES':
hdrs.append('%s %s' % (row[0], len(row[6])))
elif header.upper() == 'LINES':
hdrs.append('%s %s' % (row[0], len(row[6].split('\n'))))
elif header.upper() == 'XREF':
hdrs.append('%s %s %s:%s' % (row[0], settings.nntp_hostname, group_name, row[0]))
if len(hdrs) == 0:
return ""
else:
return "\r\n".join(hdrs)
def get_LISTGROUP(self, group_name):
forum_id = self.get_forum(group_name)
stmt = """
SELECT
post_id
FROM
%sposts
WHERE
(forum_id=%s OR forum_id=0)
ORDER BY
post_id ASC""" % (settings.phpbb_table_prefix, forum_id)
self.query(stmt)
result = list(self.cursor.fetchall())
return "\r\n".join(["%s" % k for k in result])
def get_XGTITLE(self, pattern=None):
stmt = """
SELECT
nntp_group_name,
forum_desc
FROM
%sforums
WHERE
LENGTH(nntp_group_name) > 0""" % (settings.phpbb_table_prefix)
if pattern != None:
stmt = stmt + """ AND
nntp_group_name REGEXP '%s'""" % (strutil.format_wildcards(pattern))
stmt = stmt + """
ORDER BY
nntp_group_name ASC"""
self.query(stmt)
result = list(self.cursor.fetchall())
return "\r\n".join(["%s %s" % (k, v) for k, v in result])
def get_XHDR(self, group_name, header, style, range):
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
stmt = """
SELECT
A.post_id,
A.topic_id,
D.username,
D.user_email,
CASE WHEN A.post_subject = '' THEN CONCAT('Re: ', C.topic_title) ELSE A.post_subject END,
A.post_time,
A.post_text,
A.post_username
FROM
%sposts A
LEFT JOIN
%stopics C
ON
A.topic_id = C.topic_id
LEFT JOIN
%susers D
ON
A.poster_id=D.user_id
WHERE
(A.forum_id=%s OR A.forum_id=0) AND
""" % (prefix, prefix, prefix, forum_id)
if style == 'range':
stmt = '%s A.post_id >= %s' % (stmt, range[0])
if len(range) == 2:
stmt = '%s AND A.post_id <= %s' % (stmt, range[1])
else:
stmt = '%s A.post_id = %s' % (stmt, range[0])
if self.query(stmt) == 0:
return None
result = self.cursor.fetchall()
hdrs = []
for row in result:
if header.upper() == 'SUBJECT':
hdrs.append('%s %s' % (row[0], row[4]))
elif header.upper() == 'FROM':
if row[7] == '':
if row[3] == '':
author = row[2]
else:
#author = "%s <%s>" % (row[2], row[3])
author = row[2]
else:
author = row[7]
hdrs.append('%s %s' % (row[0], author))
elif header.upper() == 'DATE':
hdrs.append('%s %s' % (row[0], strutil.get_formatted_time(time.localtime(result[5]))))
elif header.upper() == 'MESSAGE-ID':
hdrs.append('%s <%s@%s>' % (row[0], row[0], group_name))
elif (header.upper() == 'REFERENCES') and (row[1] != 0):
hdrs.append('%s <%s@%s>' % (row[0], row[1], group_name))
elif header.upper() == 'BYTES':
hdrs.append('%s %s' % (row[0], len(row[6])))
elif header.upper() == 'LINES':
hdrs.append('%s %s' % (row[0], len(row[6].split('\n'))))
elif header.upper() == 'XREF':
hdrs.append('%s %s %s:%s' % (row[0], settings.nntp_hostname, group_name, row[0]))
if len(hdrs) == 0:
return ""
else:
return "\r\n".join(hdrs)
def do_POST(self, group_name, lines, ip_address, username=''):
forum_id = self.get_forum(group_name)
prefix = settings.phpbb_table_prefix
# patch by <NAME> <<EMAIL>> to fix the handling of unusual encodings of messages
lines = mime_decode_header(re.sub(q_quote_multiline, "=?\\1?Q?\\2\\3?=", lines))
body = self.get_message_body(lines)
author, email = from_regexp.search(lines, 0).groups()
subject = subject_regexp.search(lines, 0).groups()[0].strip()
# get the authentication information now
if username != '':
stmt = """
SELECT
user_id
FROM
%susers
WHERE
username_clean='%s'""" % (prefix, self.quote_string(username.lower().strip()))
num_rows = self.query(stmt)
if num_rows == 0:
poster_id = 0
post_username = username
else:
poster_id = self.cursor.fetchone()[0]
# use name and email provided by news client
if email!='':
post_username = "%s <%s>" % (author, email)
else:
post_username = author
# post_username = ''
else:
poster_id = 0
if email!='':
post_username = "%s <%s>" % (author, email)
else:
post_username = author
# check if user can post
if self.ip_allowed(ip_address)==0:
return 2
if self.check_permission(forum_id, poster_id, 'f_post')==0:
return 2
postercolor=self.get_poster_color(poster_id)
replying=lines.find('References') != -1
if replying:
# get the 'modifystamp' value from the parent (if any)
references = references_regexp.search(lines, 0).groups()
parent_id, void = references[-1].strip().split('@')
stmt = """
SELECT
topic_id
FROM
%sposts
WHERE
post_id=%s
GROUP BY
post_id""" % (prefix, self.quote_string(parent_id))
num_rows = self.query(stmt)
if num_rows == 0:
return None
thread_id = self.cursor.fetchone()[0]
# check if topic locked
stmt = """
SELECT topic_status
FROM %stopics
WHERE topic_id=%s AND topic_status=0
""" % (prefix, thread_id)
if self.query(stmt) == 0:
# create new topic instead
replying=0
if not replying:
# create a new topic
stmt = """
INSERT INTO
%stopics
(
forum_id,
topic_title,
topic_poster,
topic_time,
topic_status,
topic_type
) VALUES (
%s,
'%s',
%s,
UNIX_TIMESTAMP(),
0,
0
)""" % (prefix, forum_id, | |
<reponame>lucasb-eyer/DeepFried
#!/usr/bin/env python3
import DeepFried.util as _u
import numpy as _np
import theano as _th
import theano.tensor as _T
class StreaMiniOptimizer(object):
"""
This is an optimizer that works through minibatches of the dataset, each
minibatch being uploaded onto the GPU each time.
This is slower than moving the whole dataset on the GPU once and addressing
each slices of it, but it allows for larger datasets to fit on the GPU as
well as "infinite" data augmentation.
"""
def __init__(self, batchsize, model, cost, extra_outs=None, Xnames=[], tnames=[]):
"""
Initializes the things that are common amongst all streaming minibatch
optimizers.
- `batchsize`: The number of samples in a minibatch.
- `model`: The model. This should be an object with at least:
- `make_inputs(basename='X')`: a function which returns a list of
as many symbolic variables of the correct dimensions as the
model takes as inputs. That's usually just one.
- `train_exprs(*Xs)`: a function which returns the symbolic
output(s) of the model, during training, given symbolic model
input(s) `X`.
- `params`: an iterable containing all trainable parameters.
- `cost`: The cost. This should be an object with at least:
- `make_target(name='')`: a function which returns a symbolic
variable of the correct dimensions for serving as target.
- `out_expr(Y, t)`: a function which returns the symbolic cost
of the output `Y` wrt. the targets `t`.
- `aggregate_batches(costs)`: a function which returns the
aggregation of the `costs` of each minibatch.
- `extra_outs`: A single or a list of extra outputs to compute along
the way. Each such extra should be an object with both `out_expr`
and `aggregate_batches` just like described for `cost` above.
- `Xnames`: Optional list of names to use for input variables. Note
that this must be exactly as many names as the model has inputs,
then these names may be used as keyword arguments to `fit_epoch`.
- `tnames`: The same as `Xnames`, but for target variables.
"""
self.model = model
self.cost = cost
self.batchsize = batchsize
self.Xs = _u.tuplize(self.model.make_inputs(*Xnames))
self.targets = _u.tuplize(self.cost.make_target(*tnames))
self.xtras = _u.tuplize(extra_outs, tuplize_none=True)
# These two will collect any additional updates that layers may have,
# for example batch-normalization's statistics collection.
self.fwd_updates = []
self.fin_updates = []
train_expr = _u.tuplize(self.model.train_expr(*self.Xs, fwd_updates=self.fwd_updates, fin_updates=self.fin_updates))
self.cost_expr = self.cost.out_expr(self.model, train_expr, self.targets)
self.outs = (self.cost_expr,) + tuple(
x.out_expr(self.model, train_expr, self.targets) for x in self.xtras
)
def _mk_train_fn(self, name, updates, extra_in=None, extra_out=None):
""" To be used by specializations only. """
self.fn_train = _th.function(
inputs=self.Xs + self.targets + _u.tuplize(extra_in, tuplize_none=True),
outputs=self.outs + _u.tuplize(extra_out, tuplize_none=True),
updates=updates + self.fwd_updates,
name=name
)
if len(self.fin_updates):
# Because targets might or might not be used by the layers in the
# extra update rules, we'll just allow for unused inputs.
self.fn_finalize = _th.function(
inputs=self.Xs + self.targets,
updates=self.fin_updates,
name=name + " finalize",
on_unused_input='ignore'
)
def reinit(self):
"""
This will reinitialize any state (such as momentum) that may be kept by
this optimizer.
"""
pass
def fit_epoch(self, X, t, aug=None, batchsize=None, shuf=False, **kwargs):
"""
Trains the model for one full epoch by iterating through minibatches.
- `X`: A numpy array or a list of numpy arrays containing the model input(s).
The first dimension of an input should be the datapoints,
i.e. X.shape[0] == ndata,
and any remaining dimensions should fit the model's expected input shape(s).
- `t`: The target values where the first dimension should be the
datapoints, just like for `X`.
- `aug`: An optional data augmentation pipeline that can transform each
sample in the minibatch individually.
- `batchsize`: Optionally override the batchsize given at construction.
- `shuf`: If not False, go through `X` and `t` in lockstep-random order.
Use `shuf` as rng or seed for the shuffling.
Any remaining arguments will be passed on to the optimization function;
this can be used to pass values such as learning-rate, momentum etc.
"""
self.model.pre_epoch()
costs = []
xtras = []
# Sanitize inputs for more flexibility.
Xs = _u.tuplize(X)
ts = _u.tuplize(t)
bs = batchsize or self.batchsize
N = Xs[0].shape[0]
assert all(X.shape[0] == N for X in Xs), "All inputs to fit_epoch should contain the same amount of datapoints."
assert all(t.shape[0] == N for t in ts), "All targets to fit_epoch should contain the same amount of datapoints."
# Keyword arguments for `batched`, for conciseness.
if shuf is False:
bxkw = btkw = {}
else:
common_seed = _u.check_random_state(shuf).randint(2**31)
bxkw = dict(shuf=_np.random.RandomState(common_seed))
btkw = dict(shuf=_np.random.RandomState(common_seed))
# Go through the training in minibatches. Note that the last batch
# may be smaller than the batchsize.
for bxs, bts in zip(_u.batched(bs, *Xs, **bxkw), _u.batched(bs, *ts, **btkw)):
# Possibly need to re-tuplize them because `batched` tries to be
# smart and not return a tuple if batching a single array.
bxs = _u.tuplize(bxs)
bts = _u.tuplize(bts)
# Potentially generate a new augmentation on-the-fly.
if aug is not None:
assert len(bxs) == 1, "Augmentation with multiple inputs not implemented yet. Please open an issue describing the use-case!"
bx, bts = aug.augbatch_train(bxs[0], *bts)
bxs = (bx,)
self.model.pre_minibatch()
# Uploads to the GPU, does the forward pass,
# the backward pass *and* the weight updates!
cost, *xtra = self.fn_train(*bxs+bts, **kwargs)
# Collect stats over the batches, so we can aggregate.
costs.append(cost)
xtras.append(xtra)
self.model.post_minibatch()
self.model.post_epoch()
# Average the stats over the batches.
return _u.maybetuple((self.cost.aggregate_batches(costs),)
+ tuple(x.aggregate_batches(b) for x, b in zip(self.xtras, zip(*xtras))))
# The above zip transposes from minibatches of extras to extras of minibatches.
def finalize(self, X, t, batchsize=None, aug=None, fast=False, **kwargs):
"""
A forward-pass through the training data, but using only the
`fin_updates` of layers such as batch-normalization.
The call is just like that of `fit_epoch`, but a few parameters as well
as most comments have been omitted.
"""
# Early-exit if unnecessary.
if len(self.fin_updates) == 0:
return
bs = batchsize or self.batchsize
# Ignore that one.
kwargs.pop('shuf', None)
self.model.pre_finalize()
for bxs, bts in zip(_u.batched(bs, *_u.tuplize(X)), _u.batched(bs, *_u.tuplize(t))):
if aug is not None:
for bxs_aug in aug.augbatch_pred(*_u.tuplize(bxs), fast=fast):
self.model.finalize_pre_minibatch()
self.fn_finalize(*_u.tuplize(bxs_aug)+_u.tuplize(bts), **kwargs)
self.model.finalize_post_minibatch()
else:
self.model.finalize_pre_minibatch()
self.fn_finalize(*_u.tuplize(bxs)+_u.tuplize(bts), **kwargs)
self.model.finalize_post_minibatch()
self.model.post_finalize()
class StreaMiniSGD(StreaMiniOptimizer):
"""
Vanilla Stochastic Gradient Descent on minibatches. The training is quite
simple:
p_{e+1} = p_e - lr * ∇p_e
Additional parameters added to `fit_epoch`:
- `lrate`: The learning-rate.
"""
def __init__(self, batchsize, model, cost, *args, **kwargs):
"""
See `StreaMiniOptimizer` for details on the arguments.
"""
super(StreaMiniSGD, self).__init__(batchsize, model, cost, *args, **kwargs)
self.sh_learningrate = _T.scalar('lrate')
g = _T.grad(cost=self.cost_expr, wrt=self.model.params)
self._mk_train_fn("StreaMiniSGD train",
[(p, p - self.sh_learningrate * gp) for p, gp in zip(self.model.params, g)],
extra_in=self.sh_learningrate)
class StreaMiniMomentum(StreaMiniOptimizer):
"""
TL;DR: Nesterov allows for larger momentum to be used, making it better.
Very finicky parameter-selection.
Implements both the "Classical Momentum (CM)" and "Nesterov's
Accelerated Gradient (NAG)" which are explained in further detail in
"On the importance of initialization and momentum in deep learning"
But the equation for NAG has been reshuffled by <NAME> in
https://github.com/lisa-lab/pylearn2/pull/136#issuecomment-10381617
for easier implementation in Theano. The updates are:
v_{e+1} = mom * v_e - lr * ∇p_e
p_{e+1} = p_e + v_{e+1}
for CM, and
p_{e+1} = p_e + mom * v_{e+1} - lr * ∇p_e
for Nicolas' reformulated NAG.
Additional parameters added to `fit_epoch`:
- `lrate`: The learning-rate.
- `momentum`: The momentum, defaulting to the one passed at construction.
"""
def __init__(self, batchsize, model, cost, momentum, nesterov=False, *args, **kwargs):
"""
See `StreaMiniOptimizer` for details on the arguments.
- `momentum`: The amount of momentum to use, typically something around
0.9, 0.95 or 0.99. This value sets the default, but it can also
be overridden in each individual call to `fit_epoch`.
- `nesterov`: If `True`, Nesterov's momentum (NAG) is used instead
of classical momentum (CM).
"""
super(StreaMiniMomentum, self).__init__(batchsize, model, cost, *args, **kwargs)
self.sh_learningrate = _T.scalar('lrate')
self.sh_momentum = _T.scalar('momentum')
# For momentum, we need a "mirror" of each parameter, which keeps track
# of the "velocity" of that parameter during training.
self.sh_v = [
_th.shared(_np.zeros_like(p.get_value()), broadcastable=p.broadcastable, name='v_'+p.name)
for p in model.params
]
g = _T.grad(cost=self.cost_expr, wrt=self.model.params)
updates = []
for sh_p, gp, sh_v in zip(self.model.params, g, self.sh_v):
v = | |
driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_edges(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_edges_data(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_extrude(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def extrude_options(self, context):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_faces(bpy_types.Menu, bpy_types._GenericUI):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_faces_data(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_edit_mesh_merge(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
''' | |
<filename>deprecated/PPO_network.py
import random
import os
import time
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from collections import deque
from torch.distributions import Categorical
# import matplotlib.pyplot as plt
def convert_state_to_tensor(state):
state_representation = torch.tensor(state, dtype=torch.float32)
state_representation = torch.reshape(state_representation, (1, 6, 18, 34))
return state_representation
class Actor_network(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(Actor_network, self).__init__()
self.feature_extraction2 = nn.Sequential(
nn.Conv2d(6, 18, 3, stride=1),
nn.Conv2d(18, 18, 3, stride=1),
nn.MaxPool2d(2, 2),
nn.Conv2d(18, 18, 3, stride=1),
nn.Conv2d(18, 18, 3, stride=1),
nn.AvgPool2d(2, 2),
nn.Flatten()
)
self.shared_weights = nn.Sequential(
nn.Linear(num_inputs, 64),
nn.Tanh(),
nn.Linear(64, 64),
nn.Tanh()
)
self.agent1 = nn.Sequential(
nn.Linear(64, num_outputs),
nn.Softmax(dim=0)
)
self.agent2 = nn.Sequential(
nn.Linear(64, num_outputs),
nn.Softmax(dim=0)
)
def forward(self, states, scores, times, agents):
x = self.feature_extraction2(states)
x = torch.cat((x, scores, times), dim=1)
shared = self.shared_weights(x)
outputs = torch.tensor([])
for i in range(len(agents)):
if agents[i] == 0:
agent_out= self.agent1(shared[i])
else:
agent_out = self.agent2(shared[i])
outputs = torch.cat((outputs, agent_out), 0)
return Categorical(outputs)
class Critic_network(nn.Module):
def __init__(self, num_inputs):
super(Critic_network, self).__init__()
self.feature_extraction2 = nn.Sequential(
nn.Conv2d(6, 18, 3, stride=1),
nn.Conv2d(18, 18, 3, stride=1),
nn.MaxPool2d(2, 2),
nn.Conv2d(18, 18, 3, stride=1),
nn.Conv2d(18, 18, 3, stride=1),
nn.AvgPool2d(2, 2),
nn.Flatten()
)
self.critic = nn.Sequential(
nn.Linear(num_inputs, 64),
nn.Tanh(),
nn.Linear(64, 64),
nn.Tanh(),
nn.Linear(64, 1)
)
def forward(self, states, scores, times):
x = self.feature_extraction2(states)
x = torch.cat((x, scores, times), dim=1)
return self.critic(x)
class ExperienceReplayBuffer(object):
def __init__(self, maximum_length=1000):
self.buffer = deque(maxlen=maximum_length)
def append(self, experience):
self.buffer.append(experience)
def __len__(self):
return len(self.buffer)
def change_last_reward(self, reward):
self.buffer[-1] = (*self.buffer[-1][:-2], True, reward)
def all_samples(self):
batch = [self.buffer[i] for i in range(len(self.buffer))]
states, scores, times, actions, agents, dones, rewards = zip(*batch)
rewards = torch.from_numpy(np.vstack([r for r in rewards])).float()
actions = torch.from_numpy(np.vstack([a for a in actions])).float()
scores = torch.from_numpy(np.vstack([s for s in scores])).float()
times = torch.from_numpy(np.vstack([t for t in times])).float()
agents = torch.from_numpy(np.vstack([a for a in agents])).int()
dones = torch.from_numpy(np.vstack([d for d in dones]).astype(np.uint8)).float()
return states, scores, times, actions, agents, dones, rewards
class PPO:
def __init__(self, training_agent=False):
self.discount_factor = 0.99
self.GAE_gamma = 0.95
self.epsilon = 0.2
# self.num_steps = 4 # 8
self.exp_to_learn = 1000
self.step_count = 0
self.steps_per_game = []
self.ppo_epochs = 10
self.minibatch_size = 32
self.action_dim = 5
self.state_dim = 92
self.buffer_size = 4000
self.lr_actor = 1e-5
self.lr_critic = 3e-4
self.c2 = 0.001 # Exploration
self.input_clipping = 10
self.buffer = ExperienceReplayBuffer(maximum_length=self.buffer_size)
self.training_agent = training_agent
self.reward_count = 0
self.rewards_games = []
self.target_value_mean = 0.0
self.target_value_squared_mean = 0.0
self.target_value_std = 0.0
self.training_samples = 0
self.observation_mean = np.zeros(shape=(6, 18, 34))
self.observation_squared_mean = np.zeros(shape=(6, 18, 34))
self.time_mean = self.score_mean = self.time_squared_mean = self.score_squared_mean = 0
self.next_state = None
self.initialize_networks()
def initialize_networks(self):
self.actor_network = Actor_network(self.state_dim, self.action_dim)
self.actor_optimizer = optim.Adam(self.actor_network.parameters(), lr=self.lr_actor)
self.critic_network = Critic_network(self.state_dim)
self.critic_optimizer = optim.Adam(self.critic_network.parameters(), lr=self.lr_critic)
print()
self.load_weights()
print()
def load_weights(self):
try:
file = 'neural-network_2.pth'
if self.training_agent:
agent_options = os.listdir('past_agents_2')
# file = random.choice(agent_options)
file = agent_options[-1]
file = 'past_agents_2/' + file
checkpoint = torch.load(file)
self.actor_network.load_state_dict(checkpoint['network_actor_state_dict'])
self.critic_network.load_state_dict(checkpoint['network_critic_state_dict'])
self.actor_optimizer.load_state_dict(checkpoint['optimizer_actor_state_dict'])
self.critic_optimizer.load_state_dict(checkpoint['optimizer_critic_state_dict'])
self.target_value_mean, self.target_value_squared_mean, self.target_value_std,\
self.observation_mean, self.observation_squared_mean, self.time_mean, self.score_mean,\
self.time_squared_mean, self.score_squared_mean, self.training_samples = checkpoint['previous_info']
print("Loaded previous model")
except:
print("Error loading model")
def save_weights(self):
try:
previous_info = [self.target_value_mean, self.target_value_squared_mean, self.target_value_std,
self.observation_mean, self.observation_squared_mean, self.time_mean,
self.score_mean, self.time_squared_mean, self.score_squared_mean, self.training_samples]
torch.save({
'network_actor_state_dict': self.actor_network.state_dict(),
'network_critic_state_dict': self.critic_network.state_dict(),
'optimizer_actor_state_dict': self.actor_optimizer.state_dict(),
'optimizer_critic_state_dict': self.critic_optimizer.state_dict(),
'previous_info': previous_info
}, 'neural-network_2.pth')
# torch.save({
# 'network_actor_state_dict': self.actor_network.state_dict(),
# 'network_critic_state_dict': self.critic_network.state_dict(),
# 'optimizer_actor_state_dict': self.actor_optimizer.state_dict(),
# 'optimizer_critic_state_dict': self.critic_optimizer.state_dict(),
# 'previous_info': previous_info
# }, 'past_agents_2/neural-network_' + str(time.time()) + '.pth')
# print("Model saved")
except:
print("Error saving the model")
def compute_action(self, state, l, agent):
state_rep, score, time_ = state
state_rep, score, time_ = self.normalize_state(state_rep, score, time_)
state_rep = torch.reshape(state_rep, (1, 6, 18, 34))
score = torch.reshape(score, (-1, 1))
time_ = torch.reshape(time_, (-1, 1))
dist = self.actor_network.forward(state_rep, score, time_, [agent])
action = int(dist.sample().numpy())
if action not in l:
return action, 100
return action, None
def last_experience_reward(self, reward):
self.buffer.change_last_reward(reward)
def store_experience(self, exp):
self.training_samples += 1
self.step_count += 1
self.reward_count += exp[-2]
self.buffer.append(exp[:-1])
self.next_state = exp[-1]
if exp[-3]:
self.steps_per_game.append(self.step_count)
# print("Current game ", len(self.steps_per_game))
self.step_count = 0
self.rewards_games.append(self.reward_count)
self.reward_count = 0
if len(self.buffer) >= self.exp_to_learn:
self.mean_rewards = np.mean(self.rewards_games[-20:])
print("Game - %d, Reward - %.2f " % (len(self.steps_per_game), self.mean_rewards), end='\r')
self.train()
# if len(self.steps_per_game)%2==0: self.save_weights()
if len(self.steps_per_game)%100 == 0:
self.save_weights()
# if len(self.steps_per_game)==50:
# plt.plot(self.rewards_games)
# plt.show()
def compute_target_value(self, rewards):
y = []
start_idx = 0
for t in self.steps_per_game:
temp_y = [
np.sum([self.discount_factor ** (n - e) * rewards[n] for n in range(e + start_idx, t + start_idx)]) for
e in range(start_idx, t + start_idx)]
start_idx += t
y += temp_y
y = torch.tensor([y], requires_grad=False, dtype=torch.float32)
y = torch.reshape(y, (-1, 1))
y = self.normalize_target_value(y)
return y
def normalize_state(self, state, score, time_):
observation_std = (self.observation_squared_mean - self.observation_mean ** 2) ** 0.5
time_std = (self.time_squared_mean - self.time_mean ** 2) ** 0.5
score_std = (self.score_squared_mean - self.score_mean ** 2) ** 0.5
digital_state = (state - self.observation_mean) / np.clip(observation_std, a_min=1e-6, a_max=None)
score = (score - self.score_mean) / max(score_std, 1e-6)
time_ = (time_ - self.time_mean) / max(time_std, 1e-6)
digital_state = np.clip(digital_state, a_min=-self.input_clipping, a_max=self.input_clipping)
score = float(np.clip(score, a_min=-self.input_clipping, a_max=self.input_clipping))
time_ = float(np.clip(time_, a_min=-self.input_clipping, a_max=self.input_clipping))
digital_state = convert_state_to_tensor(digital_state)
time_ = torch.tensor(time_, dtype=torch.float32)
score = torch.tensor(score, dtype=torch.float32)
return digital_state, score, time_
def compute_gae(self, values, rewards, dones):
self.next_state = self.normalize_state(self.next_state[0], self.next_state[1], self.next_state[2])
state_rep = torch.reshape(self.next_state[0], (1, 6, 18, 34))
score = torch.reshape(self.next_state[1], (-1, 1))
time_ = torch.reshape(self.next_state[2], (-1, 1))
next_value = self.critic_network(state_rep, score, time_)
next_value = self.de_normalize_target_value(next_value)
masks = 1 - np.array(dones)
values = torch.cat((values, next_value), 0).detach().numpy()
rewards = rewards.numpy()
gae = 0
ys = np.zeros(len(rewards))
for step in reversed(range(len(rewards))):
delta = rewards[step] + self.discount_factor * values[step + 1] * masks[step] - values[step]
gae = delta + self.discount_factor * self.GAE_gamma * masks[step] * gae
ys[step] = gae + values[step]
ys = torch.tensor(ys)
ys = torch.reshape(ys, (-1, 1))
return ys
def normalize_target_value(self, y):
percentage = (len(y)/self.training_samples)
self.target_value_mean = self.target_value_mean*(1-percentage) + y.mean() * percentage
self.target_value_squared_mean = self.target_value_squared_mean*(1-percentage) + torch.square(y).mean() * percentage
self.target_value_std = torch.clamp(torch.sqrt(self.target_value_squared_mean - torch.square(self.target_value_mean)), min=1e-6)
y = (y-self.target_value_mean)/self.target_value_std
return y
def normalize_value_functions(self, value_functions):
return (value_functions - self.target_value_mean) / self.target_value_std
def de_normalize_target_value(self, y):
y = y*self.target_value_std+self.target_value_mean
return y
def normalize_all_observation(self, digital_state, score, time_):
percentage = len(time_) / self.training_samples
self.observation_mean = self.observation_mean * (1 - percentage) + np.average(digital_state, axis=0) * percentage
self.observation_squared_mean = self.observation_squared_mean * (1 - percentage) + np.average(np.square(digital_state), axis=0) * percentage
self.time_mean = self.time_mean * (1 - percentage) + np.average(time_) * percentage
self.time_squared_mean = self.time_squared_mean * (1 - percentage) + np.average(np.square(time_)) * percentage
self.score_mean = self.score_mean * (1 - percentage) + np.average(score) * percentage
self.score_squared_mean = self.score_squared_mean * (1 - percentage) + np.average(np.square(score)) * percentage
observation_std = (self.observation_squared_mean - self.observation_mean ** 2) ** 0.5
time_std = (self.time_squared_mean - self.time_mean ** 2) ** 0.5
score_std = (self.score_squared_mean - self.score_mean ** 2) ** 0.5
digital_state = (np.array(digital_state) - self.observation_mean) / np.clip(observation_std, a_min=1e-6, a_max=None)
score = (np.array(score) - self.score_mean) / max(score_std, 1e-6)
time_ = (np.array(time_) - self.time_mean) / max(time_std, 1e-6)
digital_state = np.clip(digital_state, a_min=-self.input_clipping, a_max=self.input_clipping)
score = np.clip(score, a_min=-self.input_clipping, a_max=self.input_clipping)
time_ = np.clip(time_, a_min=-self.input_clipping, a_max=self.input_clipping)
time_ = time_.astype('float32')
score = score.astype('float32')
time_ = torch.tensor(time_, dtype=torch.float32)
score = torch.tensor(score, dtype=torch.float32)
states = torch.tensor([])
for i in range(len(digital_state)):
states = torch.cat((states, convert_state_to_tensor(digital_state[i])), 0)
return states, score, time_
def train(self):
states, scores, times, actions, agents, dones, rewards = self.buffer.all_samples()
states, scores, times = self.normalize_all_observation(states, scores, times)
value_functions = self.critic_network(states, scores, times)
value_functions = torch.reshape(value_functions, (-1, 1))
old_log_probs = self.actor_network(states, scores, times, agents).log_prob(actions)
old_log_probs = torch.reshape(old_log_probs, (-1, 1))
value_functions = self.de_normalize_target_value(value_functions)
y = self.compute_gae(value_functions, rewards, dones)
y = self.normalize_target_value(y)
y = y.detach()
old_log_probs = old_log_probs.detach()
value_functions = value_functions.detach()
value_functions = self.normalize_value_functions(value_functions)
advantage_estimation = y - value_functions
self.ppo_update_split(states, scores, times, actions, agents, old_log_probs, y, advantage_estimation)
self.buffer = ExperienceReplayBuffer(maximum_length=self.buffer_size)
def ppo_iter(self, states, scores, times, actions, agents, log_probs, ys, advantage):
batch_size = len(states)
for _ in range(batch_size // self.minibatch_size):
rand_ids = np.random.randint(0, batch_size, self.minibatch_size)
yield states[rand_ids, :], scores[rand_ids, :], times[rand_ids, :], actions[rand_ids, :], agents[rand_ids, :], \
log_probs[rand_ids, :], ys[rand_ids, :], advantage[rand_ids, :]
def ppo_update_split(self, states, scores, times, actions, agents, log_probs, ys, advantages):
# actor_loss = 0
# critic_loss = 0
for _ in range(self.ppo_epochs):
for state_, score_, time_, action_, agent_, old_log_prob_, y_, advantage_ in self.ppo_iter(states, scores, times, actions, agents,
log_probs,
ys,
advantages):
value_ = self.critic_network(state_, score_, time_)
value_ = torch.reshape(value_, (-1, 1))
| |
<filename>tests/ethereum/EVM/test_EVMEXP.py
import struct
import unittest
import json
from manticore.platforms import evm
from manticore.core import state
from manticore.core.smtlib import Operators, ConstraintSet
import os
class EVMTest_EXP(unittest.TestCase):
_multiprocess_can_split_ = True
maxDiff = None
def _execute(self, new_vm):
last_returned = None
last_exception = None
try:
new_vm.execute()
except evm.Stop as e:
last_exception = "STOP"
except evm.NotEnoughGas:
last_exception = "OOG"
except evm.StackUnderflow:
last_exception = "INSUFFICIENT STACK"
except evm.InvalidOpcode:
last_exception = "INVALID"
except evm.SelfDestruct:
last_exception = "SUICIDED"
except evm.Return as e:
last_exception = "RETURN"
last_returned = e.data
except evm.Revert:
last_exception = "REVERT"
return last_exception, last_returned
def test_EXP_1(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[115792089237316195423570985008687907853269984665640564039457584007913129639935],
)
def test_EXP_2(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_3(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_4(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_5(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[104454113832828984621679659393253883542637298667129925477260695573804969029359],
)
def test_EXP_6(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_7(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_8(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_9(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_10(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_11(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_12(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_13(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_14(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_15(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_16(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, | |
<gh_stars>1-10
# coding=utf-8
# Copyright 2022 The Fiddle-Config Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for fiddle.diff."""
import copy
import dataclasses
from typing import Any
from absl.testing import absltest
import fiddle as fdl
from fiddle import tagging
from fiddle import testing
from fiddle.experimental import daglish
from fiddle.experimental import diff
# Functions and classes that can be used to build Configs.
@dataclasses.dataclass
class SimpleClass:
x: Any
y: Any
z: Any
@dataclasses.dataclass
class AnotherClass:
x: Any
y: Any
a: Any
b: Any
def make_pair(first, second):
return (first, second)
def make_triple(first, second, third):
return (first, second, third)
def basic_fn(arg1, arg2, kwarg1=0, kwarg2=None):
return {'a': arg1 + arg2, 'b': arg2 + kwarg1, 'c': kwarg2}
class GreenTag(tagging.Tag):
"""Fiddle tag for testing."""
class BlueTag(tagging.Tag):
"""Fiddle tag for testing."""
# Helper functions to make expected Paths easier to write (and read).
parse_path = testing.parse_path
parse_reference = testing.parse_reference
@dataclasses.dataclass(frozen=True)
class UnsupportedPathElement(daglish.PathElement):
code = property(lambda self: '<unsupported>')
follow = lambda self, container: container
class DiffAlignmentTest(absltest.TestCase):
def test_constructor(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, 3),
fdl.Config(basic_fn, 4, 5, 6))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, 2, 3, 4),
fdl.Partial(SimpleClass, z=12))
empty_alignment = diff.DiffAlignment(old, new)
# No values should be aligned (including the root objects `old` and `new`).
self.assertEmpty(empty_alignment.aligned_values())
self.assertEmpty(empty_alignment.aligned_value_ids())
self.assertFalse(empty_alignment.is_old_value_aligned(old))
self.assertFalse(empty_alignment.is_new_value_aligned(new))
self.assertEqual(empty_alignment.old_name, 'old')
self.assertEqual(empty_alignment.new_name, 'new')
self.assertEqual(
repr(empty_alignment),
"<DiffAlignment from 'old' to 'new': 0 object(s) aligned>")
self.assertEqual(
str(empty_alignment), 'DiffAlignment:\n (no objects aligned)')
def test_align(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, 5, 6, 7))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, 2, 3, 4),
fdl.Partial(SimpleClass, z=[12, 13]))
alignment = diff.DiffAlignment(old, new)
alignment.align(old, new) # Same type, same __fn_or_cls__.
alignment.align(old.first, new.first) # Different __fn_or_cls__.
alignment.align(old.first.z, new.second.z) # Aligned lists.
self.assertIs(alignment.new_from_old(old), new)
self.assertIs(alignment.old_from_new(new), old)
self.assertIs(alignment.new_from_old(old.first), new.first)
self.assertIs(alignment.old_from_new(new.first), old.first)
self.assertIs(alignment.new_from_old(old.first.z), new.second.z)
self.assertIs(alignment.old_from_new(new.second.z), old.first.z)
with self.subTest('aligned_value_ids'):
aligned_value_ids = alignment.aligned_value_ids()
expected_aligned_value_ids = [
diff.AlignedValueIds(id(old), id(new)),
diff.AlignedValueIds(id(old.first), id(new.first)),
diff.AlignedValueIds(id(old.first.z), id(new.second.z)),
]
self.assertCountEqual(aligned_value_ids, expected_aligned_value_ids)
with self.subTest('aligned_values'):
aligned_values = alignment.aligned_values()
expected_aligned_values = [
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.first.z, new.second.z),
]
aligned_values.sort(key=lambda p: id(p.old_value))
expected_aligned_values.sort(key=lambda p: id(p.old_value))
self.assertEqual(aligned_values, expected_aligned_values)
with self.subTest('__repr__'):
self.assertEqual(
repr(alignment),
"<DiffAlignment from 'old' to 'new': 3 object(s) aligned>")
with self.subTest('__str__'):
self.assertEqual(
str(alignment), '\n'.join([
'DiffAlignment:',
' old -> new',
' old.first -> new.first',
' old.first.z -> new.second.z',
]))
def test_alignment_errors(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, [1], [2], [3]),
fdl.Config(basic_fn, 4, 5, 6))
new = fdl.Config(make_pair, fdl.Config(basic_fn, [1], [2], 3, 4),
fdl.Partial(SimpleClass, z=[12, 13]))
alignment = diff.DiffAlignment(old, new)
alignment.align(old.first.x, new.first.arg1)
with self.subTest('type(old_value) != type(new_value)'):
with self.assertRaisesRegex(diff.AlignmentError, '.* different types .*'):
alignment.align(old.second, new.second)
with self.subTest('old_value already aligned'):
with self.assertRaisesRegex(
diff.AlignmentError,
'An alignment has already been added for old value .*'):
alignment.align(old.first.x, new.first.arg2)
with self.subTest('new_value already aligned'):
with self.assertRaisesRegex(
diff.AlignmentError,
'An alignment has already been added for new value .*'):
alignment.align(old.first.y, new.first.arg1)
with self.subTest('len(old_value) != len(new_value)'):
with self.assertRaisesRegex(diff.AlignmentError,
'.* different lengths .*'):
alignment.align(old.first.z, new.second.z)
with self.subTest('non-memoizable old_value'):
with self.assertRaisesRegex(
diff.AlignmentError,
'old_value=4 may not be aligned because it is not '
'memoizable'):
alignment.align(old.second.arg1, new.second.z)
with self.subTest('non-memoizable new_value'):
with self.assertRaisesRegex(
diff.AlignmentError,
'new_value=3 may not be aligned because it is not '
'memoizable'):
alignment.align(old.first.z, new.first.kwarg1)
def test_align_by_id(self):
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, 5, 6, 7))
new = fdl.Config(make_pair, old.first,
fdl.Partial(SimpleClass, z=old.first.z))
alignment = diff.align_by_id(old, new)
self.assertCountEqual(alignment.aligned_values(), [
diff.AlignedValues(old.first.z, new.second.z),
diff.AlignedValues(old.first, new.first),
])
def test_align_heuristically(self):
c = fdl.Config(SimpleClass) # Shared object (same id) in `old` and `new`
d = fdl.Config(SimpleClass, x='bop')
old = fdl.Config(
make_triple,
first=fdl.Config(SimpleClass, x=1, y=2, z=[3, 4]),
second=fdl.Config(basic_fn, arg1=[5], arg2=5, kwarg1=c),
third=[[1], 2])
new = fdl.Config(
make_triple,
first=fdl.Config(basic_fn, arg1=1, arg2=c, kwarg1=3, kwarg2=4),
second=fdl.Partial(basic_fn, arg1=[8], arg2=[3, 4], kwarg1=d),
third=[[1, 2], 2, [3, 4]])
alignment = diff.align_heuristically(old, new)
self.assertCountEqual(
alignment.aligned_values(),
[
# Values aligned by id:
diff.AlignedValues(old.second.kwarg1, new.first.arg2),
# Values aligned by path:
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.second.arg1, new.second.arg1),
# Values aligned by equality:
diff.AlignedValues(old.first.z, new.second.arg2),
])
class ReferenceTest(absltest.TestCase):
def test_repr(self):
reference = diff.Reference(
'old', (daglish.Attr('foo'), daglish.Index(1), daglish.Key('bar')))
self.assertEqual(repr(reference), "<Reference: old.foo[1]['bar']>")
class DiffTest(absltest.TestCase):
def test_str(self):
cfg_diff = diff.Diff(
changes={
parse_path('.foo[1]'):
diff.ModifyValue(2),
parse_path('.foo[2]'):
diff.SetValue(parse_reference('old', '.bar')),
parse_path('.bar.x'):
diff.DeleteValue(),
parse_path('.bar.y'):
diff.ModifyValue(parse_reference('new_shared_values', '[0]')),
parse_path('.bar.z'):
diff.SetValue(
{'a': parse_reference('new_shared_values', '[0]')}),
},
new_shared_values=([1, 2, parse_reference('old', '.bar')],))
expected_str = (
'Diff(changes=[\n'
' .foo[1]: ModifyValue(new_value=2)\n'
' .foo[2]: SetValue(new_value=<Reference: old.bar>)\n'
' .bar.x: DeleteValue()\n'
' .bar.y: ModifyValue(new_value='
'<Reference: '
'new_shared_values[0]>)\n'
' .bar.z: SetValue(new_value='
"{'a': <Reference: new_shared_values[0]>})\n"
' ],\n'
' new_shared_values=[\n'
' [1, 2, <Reference: old.bar>]\n'
' ])')
self.assertEqual(str(cfg_diff), expected_str)
class DiffFromAlignmentBuilderTest(absltest.TestCase):
def check_diff(self,
old,
new,
expected_changes,
expected_new_shared_values=()):
"""Checks that building a Diff generates the expected values.
Builds a diff using a heuristic alignment between `old` and `new`, and
then checks that `diff.changes` and `diff.new_shared_values` have the
indicated values.
Args:
old: The `old` value for the diff.
new: The `new` value for the diff.
expected_changes: Dictionary mapping string path representations to
DiffOperations. The keys are parsed using `parse_path`.
expected_new_shared_values: Tuple of value
"""
alignment = diff.align_heuristically(old, new)
cfg_diff = diff.build_diff_from_alignment(alignment)
self.assertEqual(
cfg_diff.changes,
dict([(parse_path(p), c) for (p, c) in expected_changes.items()]))
self.assertEqual(cfg_diff.new_shared_values, expected_new_shared_values)
def make_test_diff_builder(self):
"""Returns a DiffBuilder that can be used for testing."""
c = fdl.Config(SimpleClass) # Shared object (same id)
old = fdl.Config(make_pair, fdl.Config(SimpleClass, 1, 2, [3, 4]),
fdl.Config(basic_fn, [5], [6, 7], c))
new = fdl.Config(make_pair, fdl.Config(basic_fn, 1, c, 3, 4.0),
fdl.Partial(basic_fn, [8], 9, [3, 4]))
aligned_values = [
diff.AlignedValues(old, new),
diff.AlignedValues(old.first, new.first),
diff.AlignedValues(old.second.arg1, new.second.arg1),
diff.AlignedValues(old.second.kwarg1, new.first.arg2),
diff.AlignedValues(old.first.z, new.second.kwarg1),
]
alignment = diff.DiffAlignment(old, new)
for aligned_value in aligned_values:
alignment.align(aligned_value.old_value, aligned_value.new_value)
return diff._DiffFromAlignmentBuilder(alignment)
def test_modify_buildable_callable(self):
old = fdl.Config(AnotherClass, fdl.Config(SimpleClass, 1, 2), 3)
new = copy.deepcopy(old)
fdl.update_callable(new, SimpleClass)
fdl.update_callable(new.x, AnotherClass)
expected_changes = {
'.__fn_or_cls__': diff.ModifyValue(SimpleClass),
'.x.__fn_or_cls__': diff.ModifyValue(AnotherClass)
}
self.check_diff(old, new, expected_changes)
def test_modify_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3))
new = copy.deepcopy(old)
new.x = 11
new.y.x = 22
expected_changes = {
'.x': diff.ModifyValue(11),
'.y.x': diff.ModifyValue(22)
}
self.check_diff(old, new, expected_changes)
def test_modify_sequence_element(self):
old = fdl.Config(SimpleClass, [1, 2, [3]])
new = copy.deepcopy(old)
new.x[0] = 11
new.x[2][0] = 33
expected_changes = {
'.x[0]': diff.ModifyValue(11),
'.x[2][0]': diff.ModifyValue(33)
}
self.check_diff(old, new, expected_changes)
def test_modify_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': 4, 'c': {'d': 7}})
new = copy.deepcopy(old)
new.x['a'] = 11
new.x['c']['d'] = 33
expected_changes = {
".x['a']": diff.ModifyValue(11),
".x['c']['d']": diff.ModifyValue(33)
}
self.check_diff(old, new, expected_changes)
def test_set_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3))
new = copy.deepcopy(old)
new.z = 11
new.y.a = 22
expected_changes = {'.z': diff.SetValue(11), '.y.a': diff.SetValue(22)}
self.check_diff(old, new, expected_changes)
def test_set_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': 4, 'c': {'d': 7}})
new = copy.deepcopy(old)
new.x['foo'] = 11
new.x['c']['bar'] = 33
expected_changes = {
".x['foo']": diff.SetValue(11),
".x['c']['bar']": diff.SetValue(33)
}
self.check_diff(old, new, expected_changes)
def test_delete_buildable_argument(self):
old = fdl.Config(SimpleClass, 1, fdl.Config(AnotherClass, 2, 3),
fdl.Config(SimpleClass, 4))
new = copy.deepcopy(old)
del new.x
del new.y.x
del new.z
expected_changes = {
'.x': diff.DeleteValue(),
'.y.x': diff.DeleteValue(),
'.z': diff.DeleteValue()
}
self.check_diff(old, new, expected_changes)
def test_delete_dict_item(self):
old = fdl.Config(SimpleClass, {'a': 2, 'b': {}, 'c': {'d': 7}})
new = copy.deepcopy(old)
del new.x['a']
del new.x['b']
del new.x['c']['d']
expected_changes = {
".x['a']": diff.DeleteValue(),
".x['b']": diff.DeleteValue(),
".x['c']['d']": diff.DeleteValue()
}
self.check_diff(old, new, expected_changes)
def test_add_shared_new_objects(self):
old = fdl.Config(
SimpleClass,
x=1,
y=fdl.Config(SimpleClass, x=2, y=3, z=[12]),
z=fdl.Config(SimpleClass, x=4))
new = copy.deepcopy(old)
new.x = [1, 2, [3, 4], new.y.z]
new.y.x = new.x
new.y.y = [99]
new.z.y = fdl.Config(SimpleClass, new.x[2], new.y.y)
expected_new_shared_values = (
[3, 4],
[
1, 2,
parse_reference('new_shared_values', '[0]'),
parse_reference('old', '.y.z')
],
[99],
)
expected_changes = {
'.x':
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
'.y.x':
diff.ModifyValue(parse_reference('new_shared_values', '[1]')),
'.y.y':
diff.ModifyValue(parse_reference('new_shared_values', '[2]')),
'.z.y':
diff.SetValue(
fdl.Config(SimpleClass,
parse_reference('new_shared_values', '[0]'),
parse_reference('new_shared_values', '[2]'))),
}
self.check_diff(old, new, expected_changes, expected_new_shared_values)
def test_multiple_modifications(self):
cfg_diff = self.make_test_diff_builder().build_diff()
expected_changes = {
'.first.__fn_or_cls__': diff.ModifyValue(basic_fn),
'.first.x': diff.DeleteValue(),
'.first.y': diff.DeleteValue(),
'.first.z': diff.DeleteValue(),
'.first.arg1': diff.SetValue(1),
'.first.arg2': diff.SetValue(parse_reference('old', '.second.kwarg1')),
'.first.kwarg1': diff.SetValue(3),
'.first.kwarg2': diff.SetValue(4.0),
'.second': diff.ModifyValue(
fdl.Partial(basic_fn, parse_reference('old', '.second.arg1'),
9, parse_reference('old', '.first.z'))),
'.second.arg1[0]': diff.ModifyValue(8)
} # pyformat: disable
self.assertEqual(
cfg_diff.changes,
dict([(parse_path(p), c) for (p, c) in expected_changes.items()]))
self.assertEqual(cfg_diff.new_shared_values, ())
def test_replace_object_with_equal_value(self):
c = SimpleClass(1, 2, 3)
with self.subTest('with sharing'):
old = fdl.Config(SimpleClass, x=c, y=[4, c, 5])
new = copy.deepcopy(old)
new.y[1] = SimpleClass(1, 2, 3)
self.assertEqual(new.x, new.y[1])
self.assertIsNot(new.x, new.y[1])
# new.y[1] can't be aligned with | |
chats_all = {}
bot = self.info
if isinstance(bot, BotInfo):
bot = ChatMember(
description=bot.description, user_id=bot.user_id, name=bot.name, username=bot.username,
is_bot=bot.is_bot, last_activity_time=bot.last_activity_time,
avatar_url=bot.avatar_url, full_avatar_url=bot.full_avatar_url,
last_access_time=0, is_owner=False, is_admin=True, join_time=0,
permissions=[ChatAdminPermission.WRITE, ChatAdminPermission.READ_ALL_MESSAGES],
)
while True:
if marker:
chat_list = self.chats.get_chats(marker=marker)
else:
chat_list = self.chats.get_chats()
if isinstance(chat_list, ChatList):
marker = chat_list.marker
for chat in chat_list.chats:
self.lgz.debug('Found chat => chat_id=%(id)s; type: %(type)s; status: %(status)s; title: %(title)s; participants: %(participants)s; owner: %(owner)s' %
{'id': chat.chat_id, 'type': chat.type, 'status': chat.status, 'title': chat.title, 'participants': chat.participants_count, 'owner': chat.owner_id})
if chat.status not in [ChatStatus.ACTIVE]:
continue
admins = {}
if chat.type == ChatType.DIALOG:
dialog_user = chat.dialog_with_user
if isinstance(dialog_user, UserWithPhoto):
dialog_user = ChatMember(
description=dialog_user.description, user_id=dialog_user.user_id, name=dialog_user.name, username=dialog_user.username,
is_bot=dialog_user.is_bot, last_activity_time=dialog_user.last_activity_time,
avatar_url=dialog_user.avatar_url, full_avatar_url=dialog_user.full_avatar_url,
last_access_time=0, is_owner=False, is_admin=True, join_time=0,
permissions=[ChatAdminPermission.WRITE, ChatAdminPermission.READ_ALL_MESSAGES]
)
# dialog_user_id = self.user_id ^ chat.chat_id
admins[dialog_user.user_id] = dialog_user
admins[bot.user_id] = bot
else:
try:
admins = self.get_chat_admins(chat.chat_id)
except ApiException as err:
if err.status != 403:
raise
bot_user = admins.get(self.user_id)
if bot_user:
for admin in admins.values():
if admin.user_id != self.user_id:
# chat_ext = chats_available[admin.user_id].get(chat.chat_id)
chat_ext = chats_all.get(chat.chat_id)
if not isinstance(chat_ext, ChatExt):
chat_ext = ChatExt(chat, self.get_dialog_name(self.title, user=admin))
chats_all[chat.chat_id] = chat_ext
chat_ext.admin_permissions[self.user_id] = bot_user.permissions
self.adm_perm_correct(chat_ext.admin_permissions[self.user_id])
chat_ext.admin_permissions[admin.user_id] = admin.permissions
self.adm_perm_correct(chat_ext.admin_permissions[admin.user_id])
if chat_ext and (admin_only or self.chat_is_allowed(chat_ext, admin.user_id)):
if chats_available_cm.get(admin.user_id) is None:
chats_available_cm[admin.user_id] = {}
if chats_available_c.get(chat_ext.chat_id) is None:
chats_available_c[chat_ext.chat_id] = chat_ext
if chats_available_m.get(admin.user_id) is None:
chats_available_m[admin.user_id] = admins.get(admin.user_id)
chats_available_cm[admin.user_id][chat.chat_id] = chat_ext
else:
self.lgz.debug('Pass, because for chat_id=%s bot (id=%s) is not admin' % (chat.chat_id, self.user_id))
if not marker:
break
return chats_available
@staticmethod
def limited_buttons_index(**kwargs):
"""
:rtype: str
"""
if 'mid' in kwargs:
return kwargs['mid']
@staticmethod
def limited_buttons_get(index):
# type: (str) -> [[]]
return TamTamBot.limited_buttons.get(index)
@staticmethod
def limited_buttons_set(index, buttons):
# type: (str, [[]]) -> None
TamTamBot.limited_buttons[index] = buttons
@staticmethod
def limited_buttons_del(index):
# type: (str) -> None
if index in TamTamBot.limited_buttons:
TamTamBot.limited_buttons.pop(index)
def cmd_handler_get_buttons_oth(self, update):
if not isinstance(update.update_current, MessageCallbackUpdate):
return False
if update.cmd_args:
direction = update.cmd_args.get('direction')
start_from = update.cmd_args.get('start_from')
max_lines = update.cmd_args.get('max_lines')
add_close_button = update.cmd_args.get('add_close_button')
add_info = update.cmd_args.get('add_info')
mid = update.message.body.mid
if direction == 'close':
self.limited_buttons_del(self.limited_buttons_index(mid=mid))
return True
buttons = self.limited_buttons_get(self.limited_buttons_index(mid=mid))
if mid and buttons:
self.view_buttons(title=None, buttons=buttons, update=mid, add_info=add_info, add_close_button=add_close_button, start_from=start_from, max_lines=max_lines)
else:
self.send_notification(update, _('Something went wrong...'))
return True
return False
def view_buttons(self, title, buttons, user_id=None, chat_id=None, link=None, update=None, add_info=False, add_close_button=False, start_from=None, max_lines=None):
# type: (str or None, list, int or None, int or None, NewMessageLink, Update, bool, bool, int or None, int or None) -> SendMessageResult
start_from = start_from or 0
max_lines_orig = max_lines
max_lines = min(max(max_lines or CallbackButtonCmd.MAX_ROWS - 1, 1), CallbackButtonCmd.MAX_ROWS - 1)
base_buttons = buttons
limited = False
if buttons:
buttons = []
buttons_service = [[]]
pos_start = min(len(base_buttons), max(0, start_from))
pos_end = min(len(base_buttons), max(0, start_from + max_lines))
pages = len(base_buttons) % max_lines
is_pages_start = pos_start == 0
is_pages_end = pos_end == len(base_buttons)
cmd = 'get_buttons_oth'
fast_rev_need = pages >= 5
if len(base_buttons) > max_lines:
if fast_rev_need and not is_pages_start:
button_title = '⏮'
buttons_service[0].append(CallbackButtonCmd(
button_title, cmd, {
'direction': 'backward', 'start_from': 0, 'max_lines': max_lines_orig,
'add_close_button': add_close_button, 'add_info': add_info
}, Intent.POSITIVE, bot_username=self.username
))
if pos_start > 0:
button_title = '←'
if add_info:
button_title = '%s %d-%d/\n%d' % (button_title, max(0, pos_start - max_lines) + 1, pos_start, len(base_buttons))
buttons_service[0].append(CallbackButtonCmd(
button_title, cmd, {
'direction': 'backward', 'start_from': pos_start - max_lines, 'max_lines': max_lines_orig,
'add_close_button': add_close_button, 'add_info': add_info
}, Intent.POSITIVE, bot_username=self.username
))
limited = True
buttons.extend(base_buttons[pos_start:pos_end])
if len(base_buttons) > max_lines:
if pos_end < len(base_buttons):
button_title = '→'
if add_info:
button_title = '%s %d-%d/%d' % (button_title, pos_start + 1 + max_lines, min(len(base_buttons), pos_start + max_lines * 2), len(base_buttons))
buttons_service[0].append(CallbackButtonCmd(
button_title, cmd, {
'direction': 'forward', 'start_from': pos_end, 'max_lines': max_lines_orig,
'add_close_button': add_close_button, 'add_info': add_info
}, Intent.POSITIVE, bot_username=self.username
))
limited = True
if fast_rev_need and not is_pages_end:
button_title = '⏭'
buttons_service[0].append(CallbackButtonCmd(
button_title, cmd, {
'direction': 'forward', 'start_from': len(base_buttons) - max_lines, 'max_lines': max_lines_orig,
'add_close_button': add_close_button, 'add_info': add_info
}, Intent.POSITIVE, bot_username=self.username
))
if add_close_button:
buttons_service[0].append(CallbackButtonCmd(
_('Close'), cmd, {
'direction': 'close', 'start_from': pos_end,
'add_close_button': add_close_button, 'add_info': add_info
}, Intent.NEGATIVE, bot_username=self.username
))
if buttons_service[0]:
buttons.extend(buttons_service)
mb = self.add_buttons_to_message_body(NewMessageBody(title, link=link), buttons)
else:
mb = NewMessageBody(_('No available items found.'), link=link)
mid = None
if isinstance(update, MessageCallbackUpdate):
mid = update.message.body.mid
elif isinstance(update, str):
mid = update
if not (user_id or chat_id or mid):
raise TypeError('user_id or chat_id or mid must be defined.')
res = None
if mid:
self.msg.edit_message(mid, mb)
else:
if chat_id:
res = self.msg.send_message(mb, chat_id=chat_id)
else:
res = self.msg.send_message(mb, user_id=user_id)
if isinstance(res, SendMessageResult):
mid = res.message.body.mid
if limited and mid:
self.limited_buttons_set(self.limited_buttons_index(mid=mid), base_buttons)
return res
def view_buttons_lim(self, title, buttons, user_id=None, chat_id=None, link=None, update=None, lim_items=None, lim_notify=None, lim_notify_g=None, lim_notify_admin=None,
add_info=False, add_close_button=False, start_from=None, max_lines=None):
# type: (str or None, list, int or None, int or None, NewMessageLink, Update, int, str, str, str, bool, bool, int or None, int or None) -> SendMessageResult
if lim_items:
first_call = update and UpdateCmn(update).cmd_args is None
num_subscribers_cur = len(buttons)
if num_subscribers_cur > lim_items:
b = buttons or []
buttons = []
i = 0
for e in b:
if i >= lim_items:
break
i += 1
buttons.append(e)
if lim_notify_admin and first_call:
self.send_admin_message(lim_notify_admin)
if lim_notify and ((lim_notify_g and num_subscribers_cur >= lim_items) or (not lim_notify_g and num_subscribers_cur > lim_items)):
m_t = lim_notify
if lim_notify_g and num_subscribers_cur > lim_items:
m_t += '\n' + lim_notify_g
title = '%s\n\n{m_t}' % title
return self.view_buttons(title, buttons, user_id, chat_id, link=link, update=update,
add_info=add_info, add_close_button=add_close_button, start_from=start_from, max_lines=max_lines)
def get_yes_no_buttons(self, cmd_dict):
# type: ([{}]) -> list
if not cmd_dict:
return []
return self.get_buttons([
CallbackButtonCmd(_('Yes'), cmd_dict['yes']['cmd'], cmd_dict['yes']['cmd_args'], Intent.POSITIVE, bot_username=self.username),
CallbackButtonCmd(_('No'), cmd_dict['no']['cmd'], cmd_dict['no']['cmd_args'], Intent.NEGATIVE, bot_username=self.username),
])
@staticmethod
def get_buttons(cbc, orientation='horizontal'):
# type: ([CallbackButtonCmd], str) -> list
if not cbc:
return []
orientation = orientation or 'horizontal'
res = []
for bt in cbc:
res.append(bt)
if orientation == 'horizontal':
res = [res]
else:
res = [[_] for _ in res]
return res
def prev_step_write(self, index, update):
# type: (str, Update) -> None
if not self.prev_step_exists(index):
self.lgz.debug('Put index %s into previous step stack.' % index)
b_obj = self.serialize_update(update)
cursor = self.conn_srv.cursor()
# noinspection SqlResolve
cursor.execute(
'INSERT INTO %(table)s ([index], [update]) VALUES (:index, :update)' %
{'table': self.prev_step_table_name}, {'index': index, 'update': b_obj})
cursor.connection.commit()
cursor.close()
self.lgz.debug('previous step stack:\n%s' % self.prev_step_all())
def prev_step_exists(self, index):
# type: (str) -> bool
update = self.prev_step_get(index)
if update:
return True
else:
return False
def prev_step_delete(self, index):
# type: (str) -> None
if self.prev_step_exists(index):
self.lgz.debug('Deleting index %s from previous step stack.' % index)
cursor = self.conn_srv.cursor()
# noinspection SqlResolve
cursor.execute(
'DELETE FROM %(table)s WHERE [index]=:index' %
{'table': self.prev_step_table_name}, {'index': index})
cursor.connection.commit()
cursor.close()
self.lgz.debug('previous step stack:\n%s' % self.prev_step_all())
def prev_step_all(self):
# type: () -> {}
res = {}
cursor = self.conn_srv.cursor()
# noinspection SqlResolve
cursor.execute(
'SELECT [index], [update] FROM %(table)s' %
{'table': self.prev_step_table_name})
sql_res = cursor.fetchall()
cursor.close()
if sql_res is not None:
for row in sql_res:
res[row[0]] = self.deserialize_update(row[1])
return res
def prev_step_get(self, index):
# type: (str) -> Update
cursor = self.conn_srv.cursor()
# noinspection SqlResolve
cursor.execute(
'SELECT [index], [update] FROM %(table)s WHERE [index]=:index' %
{'table': self.prev_step_table_name}, {'index': index})
row = cursor.fetchone()
cursor.close()
if row:
return self.deserialize_update(row[1])
def upload_content(self, content, upload_type, content_name=None):
# type: ([], str, str) -> dict
upload_ep = self.upload.get_upload_url(type=upload_type)
if isinstance(upload_ep, UploadEndpoint):
rdf = requests.post(upload_ep.url, files={'files': ('file' if not content_name else content_name, content, 'multipart/form-data')})
if rdf.status_code == 200:
return rdf.json()
def attach_contents(self, items):
# type: ([(bytes, str)]) -> []
if not items:
return
attachments = []
for item in items:
klass = None
if item[1] == UploadType.VIDEO:
klass = VideoAttachmentRequest
elif item[1] == UploadType.IMAGE:
klass = PhotoAttachmentRequest
elif item[1] == UploadType.AUDIO:
klass = AudioAttachmentRequest
elif item[1] == UploadType.FILE:
klass = FileAttachmentRequest
if klass:
if not isinstance(item[0], dict):
upl = self.upload_content(item[0], item[1], None if len(item) < 3 else item[2])
if isinstance(upl, dict):
attachments.append(klass(upl))
else:
attachments.append(klass(item[0]))
return attachments
# noinspection PyIncorrectDocstring
def send_message(self, mb, max_retry=20, sl_time=1, **kwargs):
"""
:param NewMessageBody mb: (required)
:param int max_retry: maximum number of repetitions
:param int sl_time: delay time for repeating an error
:param int user_id: Fill this parameter if you want to send message to user
:param int chat_id: Fill this if you send message to chat
:return: SendMessageResult
If the method is called asynchronously,
returns the request thread.
"""
rpt = 0
while rpt < max_retry:
| |
<reponame>hyan15/proxyscrape
# MIT License
#
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import time
import unittest
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
from proxyscrape.scrapers import Proxy, ProxyResource, RESOURCE_MAP
class TestProxyResource(unittest.TestCase):
def test_refreshes_if_expired(self):
expected = [Proxy('host', 'port', 'code', 'country', 'anonymous', 'type', 'source')]
def func():
return expected
pr = ProxyResource(func, -1)
refreshed, actual = pr.refresh()
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
refreshed, actual = pr.refresh()
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
def test_doesnt_refresh_if_not_expired(self):
expected = [Proxy('host', 'port', 'code', 'country', 'anonymous', 'type', 'source')]
def func():
return expected
pr = ProxyResource(func, 5)
refreshed, actual = pr.refresh()
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
refreshed, actual = pr.refresh()
self.assertEqual(False, refreshed)
self.assertEqual(None, actual)
def test_refreshes_if_forced(self):
expected = [Proxy('host', 'port', 'code', 'country', 'anonymous', 'type', 'source')]
def func():
return expected
pr = ProxyResource(func, 5)
refreshed, actual = pr.refresh()
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
refreshed, actual = pr.refresh(True)
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
def test_doesnt_refresh_if_lock_check(self):
expected = [Proxy('host', 'port', 'code', 'country', 'anonymous', 'type', 'source')]
def func():
return expected
pr = ProxyResource(func, 5)
refreshed, actual = pr.refresh()
self.assertEqual(True, refreshed)
self.assertEqual(expected[0], actual[0])
with patch('proxyscrape.scrapers.time') as time_mock:
times = [time.time() + 10, -1, 0]
time_iter = iter(times)
time_mock.time = lambda: next(time_iter)
refreshed, actual = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(actual)
class TestScrapers(unittest.TestCase):
def setUp(self):
self.requests_patcher = patch('proxyscrape.scrapers.requests')
self.requests = self.requests_patcher.start()
def tearDown(self):
self.requests_patcher.stop()
def test_anonymous_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'anonymous-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('172.16.31.10', '53281', 'br', 'brazil', True, 'https', 'anonymous-proxy'),
Proxy('192.168.127.12', '8080', 'ua', 'ukraine', True, 'http', 'anonymous-proxy'),
Proxy('192.168.3.11', '8080', 'ru', 'russian federation', True, 'http', 'anonymous-proxy')
}
func = RESOURCE_MAP['anonymous-proxy']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_anonymous_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['anonymous-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_anonymous_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['anonymous-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_free_proxy_list_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'free-proxy-list-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('172.16.31.10', '53281', 'br', 'brazil', True, 'https', 'free-proxy-list'),
Proxy('192.168.127.12', '8080', 'ua', 'ukraine', False, 'http', 'free-proxy-list'),
Proxy('192.168.3.11', '8080', 'ru', 'russian federation', True, 'http', 'free-proxy-list')
}
func = RESOURCE_MAP['free-proxy-list']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_free_proxy_list_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['free-proxy-list']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_free_proxy_list_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['free-proxy-list']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_http_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'proxy-daily-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('172.16.58.3', '80', None, None, None, 'http', 'proxy-daily-http'),
Proxy('172.16.58.3', '3128', None, None, None, 'http', 'proxy-daily-http'),
Proxy('172.16.17.32', '53281', None, None, None, 'http', 'proxy-daily-http')
}
func = RESOURCE_MAP['proxy-daily-http']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_proxy_daily_http_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-http']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_http_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-http']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_socks4_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'proxy-daily-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('192.168.3.11', '8888', None, None, None, 'socks4', 'proxy-daily-socks4'),
Proxy('192.168.3.11', '1080', None, None, None, 'socks4', 'proxy-daily-socks4'),
Proxy('172.16.17.32', '1080', None, None, None, 'socks4', 'proxy-daily-socks4')
}
func = RESOURCE_MAP['proxy-daily-socks4']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_proxy_daily_socks4_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-socks4']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_socks4_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-socks4']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_socks5_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'proxy-daily-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('192.168.127.12', '1080', None, None, None, 'socks5', 'proxy-daily-socks5'),
Proxy('172.16.58.3', '1080', None, None, None, 'socks5', 'proxy-daily-socks5'),
Proxy('192.168.127.12', '6667', None, None, None, 'socks5', 'proxy-daily-socks5')
}
func = RESOURCE_MAP['proxy-daily-socks5']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_proxy_daily_socks5_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-socks5']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_proxy_daily_socks5_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['proxy-daily-socks5']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_socks_proxy_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'socks-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('172.16.31.10', '53281', 'br', 'brazil', True, 'socks4', 'socks-proxy'),
Proxy('192.168.127.12', '8080', 'ua', 'ukraine', True, 'socks5', 'socks-proxy'),
Proxy('192.168.3.11', '8080', 'ru', 'russian federation', True, 'socks4', 'socks-proxy')
}
func = RESOURCE_MAP['socks-proxy']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_socks_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['socks-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_socks_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['socks-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_ssl_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'ssl-proxy.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
expected = {
Proxy('172.16.31.10', '53281', 'br', 'brazil', True, 'https', 'ssl-proxy'),
Proxy('192.168.127.12', '8080', 'ua', 'ukraine', True, 'https', 'ssl-proxy'),
Proxy('192.168.3.11', '8080', 'ru', 'russian federation', False, 'https', 'ssl-proxy')
}
func = RESOURCE_MAP['ssl-proxy']
pr = ProxyResource(func, 10)
_, proxies = pr.refresh()
for proxy in proxies:
self.assertIn(proxy, expected)
def test_ssl_proxies_not_ok(self):
response = Mock()
response.ok = False
self.requests.get = lambda url: response
func = RESOURCE_MAP['ssl-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_ssl_proxies_invalid_html(self):
with open(os.path.join(cwd, 'mock_pages', 'empty.html'), 'r') as html:
response = Mock()
response.content = html
response.ok = True
self.requests.get = lambda url: response
func = RESOURCE_MAP['ssl-proxy']
pr = ProxyResource(func, 10)
refreshed, proxies = pr.refresh()
self.assertEqual(False, refreshed)
self.assertIsNone(proxies)
def test_uk_proxies_success(self):
with open(os.path.join(cwd, 'mock_pages', 'uk-proxy.html'), 'r') as html:
response = Mock()
response.content = html
| |
import os
import sys
import json
import functools
from traceback import format_exc
import mimetypes
import time
import itertools
from urllib.parse import urljoin
from .helpers import (
cached_property, WSGIFileWrapper, parse_range_header,
parse_date, html_escape, tob
)
from .radirouter import RadiRouter
from .request import Request, BaseRequest
from . import request_mixin
from .response import Response, HTTPResponse, HTTPError
from . import server_adapters
__version__ = "0.0.1"
HTTP_METHODS = 'DELETE GET HEAD OPTIONS PATCH POST PUT'.split()
Request.mixin(request_mixin.mixin())
class Config:
__slots__ = 'domain_map'
config = Config()
class _closeiter:
''' This only exists to be able to attach a .close method to iterators that
do not support attribute assignment (most of itertools). '''
def __init__(self, iterator, close=None):
self.iterator = iterator
self.close_callbacks = close if isinstance(close, (list, tuple)) else [close]
def __iter__(self):
return iter(self.iterator)
def close(self):
[cb() for cb in self.close_callbacks]
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
quiet=False, **kargs):
_stderr = sys.stderr.write
try:
app = app or default_app()
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
server_names = server_adapters.server_names
if server in server_names:
server = server_names.get(server)
server = server(host=host, port=port, **kargs)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Ombott v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
raise
def with_method_shortcuts(methods):
def injector(cls):
for m in methods:
setattr(cls, m.lower(), functools.partialmethod(cls.route, method = m))
return cls
return injector
###############################################################################
# Application Object ###########################################################
###############################################################################
@with_method_shortcuts(HTTP_METHODS)
class Ombott:
def __init__(self):
self.router = RadiRouter()
self.request = Request()
self.response = Response()
self._route_hooks = {}
self.error_handler = {}
def run(self, **kwargs):
''' Calls :func:`run` with the same parameters. '''
run(self, **kwargs)
def to_route(self, environ):
verb = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO'] or '/'
if verb == 'HEAD':
methods = [verb, 'GET', 'ANY']
else:
methods = [verb, 'ANY']
tmp, error = self.router.get(path, methods)
if error:
raise HTTPError(*error)
route, names, values, hooks = tmp
param_values = []
params = {n: v for n, v in zip(names, values) if n and (param_values.append(v) or True)}
return route, params, param_values, hooks
def add_route(self, rule, method, handler, name = None):
self.router.add(rule, method, handler, name)
def route(self, rule=None, method='GET', *, callback=None, name=None):
def decorator(callback):
self.add_route(rule, method, callback, name)
return callback
return decorator(callback) if callback else decorator
@property
def routes(self):
return self.router.routes
__hook_names = 'before_request', 'after_request', 'app_reset', 'config'
__hook_reversed = 'after_request'
@cached_property
def _hooks(self):
return dict((name, []) for name in self.__hook_names)
def add_hook(self, name, func):
''' Attach a callback to a hook. Three hooks are currently implemented:
before_request
Executed once before each request. The request context is
available, but no routing has happened yet.
after_request
Executed once after each request regardless of its outcome.
'''
if name in self.__hook_reversed:
self._hooks[name].insert(0, func)
else:
self._hooks[name].append(func)
def remove_hook(self, name, func):
if func in self._hooks[name]:
self._hooks[name].remove(func)
return True
def emit(self, name, *args, **kwargs):
[hook(*args, **kwargs) for hook in self._hooks[name][:]]
def on(self, name, func = None):
if not func: # used as decorator
def decorator(func):
self.add_hook(name, func)
return func
return decorator
else:
self.add_hook(name, func)
def add_route_hook(self, route, func = None):
self.router.add_hook(route, func)
if not (rhooks := self._route_hooks.get(route)):
self._route_hooks[route] = [func]
else:
rhooks.append(func)
def remove_route_hook(self, route, func = None):
self.router.remove_hook(route, func)
if not (rhooks := self._route_hooks.get(route)):
return
else:
try:
rhooks.remove(func)
except ValueError:
pass
def on_route(self, route, func = None):
if not func: # used as decorator
def decorator(func):
self.add_route_hook(route, func)
return func
return decorator
else:
self.add_route_hook(route, func)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def default_error_handler(self, res):
ret = json.dumps(dict(
body = res.body,
exception = repr(res.exception),
traceback = res.traceback
))
self.response.headers['Content-Type'] = 'application/json'
return ret
def _handle(self, environ):
response = self.response
request = self.request
path = environ['ombott.raw_path'] = environ['PATH_INFO']
try:
environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
except UnicodeError:
return HTTPError(400, 'Invalid path string. Expected UTF-8')
try: # init thread
environ['ombott.app'] = self
request.__init__(environ)
response.__init__()
try: # routing
self.emit('before_request')
route, args, values, route_hooks = self.to_route(environ)
environ['ombott.route'] = route
environ['route.url_args'] = args
environ['route.hooks'] = route_hooks
return route(**args)
finally:
self.emit('after_request')
except HTTPResponse as resp:
return resp
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as err500:
# raise
stacktrace = format_exc()
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", err500, stacktrace)
def _cast(self, out):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
response = self.response
resp_headers = response.headers
request = self.request
loops_cnt = 0
while True: # <-------
loops_cnt += 1
if loops_cnt > 1000:
out = HTTPError(500, 'too many iterations')
out.apply(response)
out = self.default_error_handler(out)
# Empty output is done here
if not out:
if 'Content-Length' not in resp_headers:
resp_headers['Content-Length'] = 0
return []
if isinstance(out, str):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
if 'Content-Length' not in resp_headers:
resp_headers['Content-Length'] = len(out)
return [out]
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(
out.status_code,
self.default_error_handler
)(out); continue # -----------------^
if isinstance(out, HTTPResponse):
out.apply(response)
out = out.body; continue # -----------------^
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
iout = iter(out)
while not (first := next(iout)):
pass
except StopIteration:
out = ''; continue # -----------------^
except HTTPResponse as rs:
first = rs
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as err500:
# if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', err500, format_exc())
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
out = first; continue # -----------------^
elif isinstance(first, bytes):
new_iter = itertools.chain([first], iout)
elif isinstance(first, str):
encoder = lambda x: x.encode(response.charset)
new_iter = map(encoder, itertools.chain([first], iout))
else:
out = HTTPError(500, f'Unsupported response type: {type(first)}')
continue # -----------------^
if hasattr(out, 'close'):
new_iter = _closeiter(new_iter, out.close)
return new_iter
def wsgi(self, environ, start_response):
if (domain_map := getattr(config, 'domain_map', None)):
if (app_name := domain_map(environ.get('HTTP_X_FORWARDED_HOST') or environ.get('HTTP_HOST'))):
environ["HTTP_X_PY4WEB_APPNAME"] = '/' + app_name
environ["PATH_INFO"] = '/' + app_name + environ["PATH_INFO"]
response = self.response
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304) \
or environ['REQUEST_METHOD'] == 'HEAD':
if hasattr(out, 'close'):
out.close()
out = []
start_response(response._status_line, response.headerlist)
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as _e:
# if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if True: # DEBUG: FIX ME
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e)), html_escape(format_exc()))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
return [tob(err)]
def __call__(self, environ, start_response):
return self.wsgi(environ, start_response)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(location, code=None):
url = location
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if not code:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
res = response.copy(cls=HTTPResponse)
res.status = code
res.body = ""
res.set_header('Location', urljoin(request.url, url))
raise res
def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
``Content-Length`` and ``Last-Modified`` headers are set if possible.
Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
requests.
:param filename: Name or path of the file to send.
:param root: Root path for file lookups. Should be an absolute directory
path.
:param mimetype: Defines the content-type header (default: guess from
file extension)
:param download: If True, ask the browser to open a `Save as...` dialog
instead of opening the file with the associated program. You can
specify a custom filename as a string. If not specified, the
original filename is used (default: False).
:param charset: The charset to use for files with a ``text/*``
mime-type. (default: UTF-8)
"""
def _file_iter_range(fp, offset, bytes, maxread = 1024 * 1024):
''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
fp.seek(offset)
while bytes > 0 and | |
<reponame>liamchalcroft/nitorch
# -*- coding: utf-8 -*-
"""Spatial deformations (i.e., grids)."""
import torch
from nitorch.core import utils, linalg
from nitorch.core.utils import expand, make_vector
from nitorch.core.py import make_list, prod
from nitorch._C.spatial import BoundType, InterpolationType
from nitorch._C.grid import GridPull, GridPush, GridCount, GridGrad
from ._affine import affine_resize, affine_lmdiv
from ._regularisers import solve_grid_sym
from ._finite_differences import diff
__all__ = ['grid_pull', 'grid_push', 'grid_count', 'grid_grad', 'grid_inv',
'identity_grid', 'affine_grid', 'resize', 'resize_grid', 'reslice',
'grid_jacobian', 'grid_jacdet',
'BoundType', 'InterpolationType']
_doc_interpolation = \
"""`interpolation` can be an int, a string or an InterpolationType.
Possible values are:
- 0 or 'nearest' or InterpolationType.nearest
- 1 or 'linear' or InterpolationType.linear
- 2 or 'quadratic' or InterpolationType.quadratic
- 3 or 'cubic' or InterpolationType.cubic
- 4 or 'fourth' or InterpolationType.fourth
- etc.
A list of values can be provided, in the order [W, H, D],
to specify dimension-specific interpolation orders."""
_doc_bound = \
"""`bound` can be an int, a string or a BoundType.
Possible values are:
- 'replicate' or BoundType.replicate
- 'dct1' or BoundType.dct1
- 'dct2' or BoundType.dct2
- 'dst1' or BoundType.dst1
- 'dst2' or BoundType.dst2
- 'dft' or BoundType.dft
- 'zero' or BoundType.zero
A list of values can be provided, in the order [W, H, D],
to specify dimension-specific boundary conditions.
Note that
- `dft` corresponds to circular padding
- `dct2` corresponds to Neumann boundary conditions (symmetric)
- `dst2` corresponds to Dirichlet boundary conditions (antisymmetric)
See https://en.wikipedia.org/wiki/Discrete_cosine_transform
https://en.wikipedia.org/wiki/Discrete_sine_transform
"""
def grid_pull(input, grid, interpolation='linear', bound='zero', extrapolate=False):
"""Sample an image with respect to a deformation field.
Notes
-----
{interpolation}
{bound}
If the input dtype is not a floating point type, the input image is
assumed to contain labels. Then, unique labels are extracted
and resampled individually, making them soft labels. Finally,
the label map is reconstructed from the individual soft labels by
assigning the label with maximum soft value.
Parameters
----------
input : ([batch], [channel], *inshape) tensor
Input image.
grid : ([batch], *outshape, dim) tensor
Transformation field.
interpolation : int or sequence[int], default=1
Interpolation order.
bound : BoundType or sequence[BoundType], default='zero'
Boundary conditions.
extrapolate : bool or int, default=True
Extrapolate out-of-bound data.
Returns
-------
output : ([batch], [channel], *outshape) tensor
Deformed image.
"""
# Broadcast
dim = grid.shape[-1]
input_no_batch = input.dim() < dim + 2
input_no_channel = input.dim() == dim
grid_no_batch = grid.dim() == dim + 1
if input_no_channel:
input = input[None, None]
elif input_no_batch:
input = input[None]
if grid_no_batch:
grid = grid[None]
batch = max(input.shape[0], grid.shape[0])
channel = input.shape[1]
input = expand(input, [batch, *input.shape[1:]])
grid = expand(grid, [batch, *grid.shape[1:]])
is_label = not utils.dtypes.dtype(input.dtype).is_floating_point
if is_label:
# label map -> specific processing
out = input.new_zeros([batch, channel, *grid.shape[1:-1]])
pmax = grid.new_zeros([batch, channel, *grid.shape[1:-1]])
for label in input.unique():
soft = (input == label).to(grid.dtype)
soft = expand(soft, [batch, *input.shape[1:]])
soft = GridPull.apply(soft, grid, interpolation, bound, extrapolate)
out[soft > pmax] = label
pmax = torch.max(pmax, soft)
else:
input = expand(input, [batch, *input.shape[1:]])
out = GridPull.apply(input, grid, interpolation, bound, extrapolate)
if input_no_channel:
out = out[:, 0]
if input_no_batch and grid_no_batch:
out = out[0]
return out
def grid_push(input, grid, shape=None, interpolation='linear', bound='zero',
extrapolate=False):
"""Splat an image with respect to a deformation field (pull adjoint).
Notes
-----
{interpolation}
{bound}
Parameters
----------
input : ([batch], [channel], *inshape) tensor
Input image.
grid : ([batch], *inshape, dim) tensor
Transformation field.
shape : sequence[int], default=inshape
Output shape
interpolation : int or sequence[int], default=1
Interpolation order.
bound : BoundType, or sequence[BoundType], default='zero'
Boundary conditions.
extrapolate : bool or int, default=True
Extrapolate out-of-bound data.
Returns
-------
output : ([batch], [channel], *shape) tensor
Spatted image.
"""
# Broadcast
dim = grid.shape[-1]
input_no_batch = input.dim() == dim + 1
input_no_channel = input.dim() == dim
grid_no_batch = grid.dim() == dim + 1
if input_no_channel:
input = input[None, None]
elif input_no_batch:
input = input[None]
if grid_no_batch:
grid = grid[None]
batch = max(input.shape[0], grid.shape[0])
channel = input.shape[1]
ndims = grid.shape[-1]
input_shape = input.shape[2:]
grid_shape = grid.shape[1:-1]
spatial = [max(sinp, sgrd) for sinp, sgrd in zip(input_shape, grid_shape)]
input = expand(input, [batch, channel, *spatial])
grid = expand(grid, [batch, *spatial, ndims])
if shape is None:
shape = tuple(input.shape[2:])
out = GridPush.apply(input, grid, shape, interpolation, bound, extrapolate)
if input_no_channel:
out = out[:, 0]
if input_no_batch and grid_no_batch:
out = out[0]
return out
def grid_count(grid, shape=None, interpolation='linear', bound='zero',
extrapolate=False):
"""Splatting weights with respect to a deformation field (pull adjoint).
Notes
-----
{interpolation}
{bound}
Parameters
----------
grid : ([batch], *inshape, dim) tensor
Transformation field.
shape : sequence[int], default=inshape
Output shape
interpolation : int or sequence[int], default=1
Interpolation order.
bound : BoundType, or sequence[BoundType], default='zero'
Boundary conditions.
extrapolate : bool or int, default=True
Extrapolate out-of-bound data.
Returns
-------
output : ([batch], 1, *shape) tensor
Spatting weights.
"""
dim = grid.shape[-1]
grid_no_batch = grid.dim() == dim + 1
if grid_no_batch:
grid = grid[None]
if shape is None:
shape = tuple(grid.shape[1:-1])
out = GridCount.apply(grid, shape, interpolation, bound, extrapolate)
if grid_no_batch:
out = out[0]
return out
def grid_grad(input, grid, interpolation='linear', bound='zero',
extrapolate=False):
"""Sample spatial gradients of an image with respect to a deformation field.
Notes
-----
{interpolation}
{bound}
Parameters
----------
input : ([batch], [channel], *inshape) tensor
Input image.
grid : ([batch], *inshape, dim) tensor
Transformation field.
shape : sequence[int], default=inshape
Output shape
interpolation : int or sequence[int], default=1
Interpolation order.
bound : BoundType, or sequence[BoundType], default='zero'
Boundary conditions.
extrapolate : bool or int, default=True
Extrapolate out-of-bound data.
Returns
-------
output : ([batch], [channel], *shape, dim) tensor
Sampled gradients.
"""
# Broadcast
dim = grid.shape[-1]
input_no_batch = input.dim() == dim + 1
input_no_channel = input.dim() == dim
grid_no_batch = grid.dim() == dim + 1
if input_no_channel:
input = input[None, None]
elif input_no_batch:
input = input[None]
if grid_no_batch:
grid = grid[None]
batch = max(input.shape[0], grid.shape[0])
input = expand(input, [batch, *input.shape[1:]])
grid = expand(grid, [batch, *grid.shape[1:]])
out = GridGrad.apply(input, grid, interpolation, bound, extrapolate)
if input_no_channel:
out = out[:, 0]
if input_no_batch and grid_no_batch:
out = out[0]
return out
grid_pull.__doc__ = grid_pull.__doc__.format(
interpolation=_doc_interpolation, bound=_doc_bound)
grid_push.__doc__ = grid_push.__doc__.format(
interpolation=_doc_interpolation, bound=_doc_bound)
grid_count.__doc__ = grid_count.__doc__.format(
interpolation=_doc_interpolation, bound=_doc_bound)
grid_grad.__doc__ = grid_grad.__doc__.format(
interpolation=_doc_interpolation, bound=_doc_bound)
# aliases
pull = grid_pull
push = grid_push
count = grid_count
def identity_grid(shape, dtype=None, device=None, jitter=False):
"""Returns an identity deformation field.
Parameters
----------
shape : (dim,) sequence of int
Spatial dimension of the field.
dtype : torch.dtype, default=`get_default_dtype()`
Data type.
device torch.device, optional
Device.
jitter : bool or 'reproducible', default=False
Jitter identity grid.
Returns
-------
grid : (*shape, dim) tensor
Transformation field
"""
mesh1d = [torch.arange(float(s), dtype=dtype, device=device)
for s in shape]
grid = torch.meshgrid(*mesh1d)
grid = torch.stack(grid, dim=-1)
if jitter:
reproducible = jitter == 'reproducible'
device_ids = [grid.device.index] if grid.device.type == 'cuda' else None
with torch.random.fork_rng(device_ids, enabled=reproducible):
if reproducible:
torch.manual_seed(0)
grid += torch.rand_like(grid)
grid -= 0.5
return grid
def affine_grid(mat, shape, jitter=False):
"""Create a dense transformation grid from an affine matrix.
Parameters
----------
mat : (..., D[+1], D[+1]) tensor
Affine matrix (or matrices).
shape : (D,) sequence[int]
Shape of the grid, with length D.
jitter : bool or 'reproducible', default=False
Jitter identity grid.
Returns
-------
grid : (..., *shape, D) tensor
Dense transformation grid
"""
mat = torch.as_tensor(mat)
shape = list(shape)
nb_dim = mat.shape[-1] - 1
if nb_dim != len(shape):
raise ValueError('Dimension of the affine matrix ({}) and shape ({}) '
'are not the same.'.format(nb_dim, len(shape)))
if mat.shape[-2] not in (nb_dim, nb_dim+1):
raise ValueError('First argument should be matrces of shape '
'(..., {0}, {1}) or (..., {1], {1}) but got {2}.'
.format(nb_dim, nb_dim+1, mat.shape))
batch_shape = mat.shape[:-2]
grid = identity_grid(shape, mat.dtype, mat.device, jitter=jitter)
if batch_shape:
grid = utils.unsqueeze(grid, dim=0, ndim=len(batch_shape))
mat = utils.unsqueeze(mat, dim=-3, ndim=nb_dim)
lin = mat[..., :nb_dim, :nb_dim]
off = mat[..., :nb_dim, -1]
grid = linalg.matvec(lin, grid) + off
return grid
def resize(image, factor=None, shape=None, affine=None, anchor='c',
*args, **kwargs):
"""Resize an image by a factor or to a specific shape.
Notes
-----
.. A least one of `factor` and `shape` must be specified
.. If `anchor in ('centers', 'edges')`, and both `factor` and `shape`
are specified, `factor` is discarded.
.. If `anchor in ('first', 'last')`, `factor` must be provided even
if `shape` is specified.
.. Because of rounding, it is in general not assured that
`resize(resize(x, f), 1/f)` returns a tensor with | |
central_longitude=0.0, central_latitude=0.0,
false_easting=0, false_northing=0, globe=None,
sweep_axis=None):
proj4_params = [('proj', projection), ('lon_0', central_longitude),
('lat_0', central_latitude), ('h', satellite_height),
('x_0', false_easting), ('y_0', false_northing),
('units', 'm')]
if sweep_axis:
proj4_params.append(('sweep', sweep_axis))
super(_Satellite, self).__init__(proj4_params, globe=globe)
def _set_boundary(self, coords):
self._boundary = sgeom.LinearRing(coords.T)
mins = np.min(coords, axis=1)
maxs = np.max(coords, axis=1)
self._x_limits = mins[0], maxs[0]
self._y_limits = mins[1], maxs[1]
self._threshold = np.diff(self._x_limits)[0] * 0.02
@property
def boundary(self):
return self._boundary
@property
def threshold(self):
return self._threshold
@property
def x_limits(self):
return self._x_limits
@property
def y_limits(self):
return self._y_limits
class Geostationary(_Satellite):
"""
A view appropriate for satellites in Geostationary Earth orbit.
Perspective view looking directly down from above a point on the equator.
In this projection, the projected coordinates are scanning angles measured
from the satellite looking directly downward, multiplied by the height of
the satellite.
"""
def __init__(self, central_longitude=0.0, satellite_height=35785831,
false_easting=0, false_northing=0, globe=None,
sweep_axis='y'):
"""
Parameters
----------
central_longitude: float, optional
The central longitude. Defaults to 0.
satellite_height: float, optional
The height of the satellite. Defaults to 35785831 meters
(true geostationary orbit).
false_easting:
X offset from planar origin in metres. Defaults to 0.
false_northing:
Y offset from planar origin in metres. Defaults to 0.
globe: :class:`cartopy.crs.Globe`, optional
If omitted, a default globe is created.
sweep_axis: 'x' or 'y', optional. Defaults to 'y'.
Controls which axis is scanned first, and thus which angle is
applied first. The default is appropriate for Meteosat, while
'x' should be used for GOES.
"""
super(Geostationary, self).__init__(
projection='geos',
satellite_height=satellite_height,
central_longitude=central_longitude,
central_latitude=0.0,
false_easting=false_easting,
false_northing=false_northing,
globe=globe,
sweep_axis=sweep_axis)
# TODO: Let the globe return the semimajor axis always.
a = np.float(self.globe.semimajor_axis or WGS84_SEMIMAJOR_AXIS)
h = np.float(satellite_height)
# These are only exact for a spherical Earth, owing to assuming a is
# constant. Handling elliptical would be much harder for this.
sin_max_th = a / (a + h)
tan_max_th = a / np.sqrt((a + h) ** 2 - a ** 2)
# Using Napier's rules for right spherical triangles
# See R2 and R6 (x and y coords are h * b and h * a, respectively):
# https://en.wikipedia.org/wiki/Spherical_trigonometry
t = np.linspace(0, -2 * np.pi, 61) # Clockwise boundary.
coords = np.vstack([np.arctan(tan_max_th * np.cos(t)),
np.arcsin(sin_max_th * np.sin(t))])
coords *= h
coords += np.array([[false_easting], [false_northing]])
self._set_boundary(coords)
class NearsidePerspective(_Satellite):
"""
Perspective view looking directly down from above a point on the globe.
In this projection, the projected coordinates are x and y measured from
the origin of a plane tangent to the Earth directly below the perspective
point (e.g. a satellite).
"""
_handles_ellipses = False
def __init__(self, central_longitude=0.0, central_latitude=0.0,
satellite_height=35785831,
false_easting=0, false_northing=0, globe=None):
"""
Parameters
----------
central_longitude: float, optional
The central longitude. Defaults to 0.
central_latitude: float, optional
The central latitude. Defaults to 0.
satellite_height: float, optional
The height of the satellite. Defaults to 35785831 meters
(true geostationary orbit).
false_easting:
X offset from planar origin in metres. Defaults to 0.
false_northing:
Y offset from planar origin in metres. Defaults to 0.
globe: :class:`cartopy.crs.Globe`, optional
If omitted, a default globe is created.
.. note::
This projection does not handle elliptical globes.
"""
super(NearsidePerspective, self).__init__(
projection='nsper',
satellite_height=satellite_height,
central_longitude=central_longitude,
central_latitude=central_latitude,
false_easting=false_easting,
false_northing=false_northing,
globe=globe)
# TODO: Let the globe return the semimajor axis always.
a = self.globe.semimajor_axis or WGS84_SEMIMAJOR_AXIS
h = np.float(satellite_height)
max_x = a * np.sqrt(h / (2 * a + h))
coords = _ellipse_boundary(max_x, max_x,
false_easting, false_northing, 61)
self._set_boundary(coords)
class AlbersEqualArea(Projection):
"""
An Albers Equal Area projection
This projection is conic and equal-area, and is commonly used for maps of
the conterminous United States.
"""
def __init__(self, central_longitude=0.0, central_latitude=0.0,
false_easting=0.0, false_northing=0.0,
standard_parallels=(20.0, 50.0), globe=None):
"""
Parameters
----------
central_longitude: optional
The central longitude. Defaults to 0.
central_latitude: optional
The central latitude. Defaults to 0.
false_easting: optional
X offset from planar origin in metres. Defaults to 0.
false_northing: optional
Y offset from planar origin in metres. Defaults to 0.
standard_parallels: optional
The one or two latitudes of correct scale. Defaults to (20, 50).
globe: optional
A :class:`cartopy.crs.Globe`. If omitted, a default globe is
created.
"""
proj4_params = [('proj', 'aea'),
('lon_0', central_longitude),
('lat_0', central_latitude),
('x_0', false_easting),
('y_0', false_northing)]
if standard_parallels is not None:
try:
proj4_params.append(('lat_1', standard_parallels[0]))
try:
proj4_params.append(('lat_2', standard_parallels[1]))
except IndexError:
pass
except TypeError:
proj4_params.append(('lat_1', standard_parallels))
super(AlbersEqualArea, self).__init__(proj4_params, globe=globe)
# bounds
minlon, maxlon = self._determine_longitude_bounds(central_longitude)
n = 103
lons = np.empty(2 * n + 1)
lats = np.empty(2 * n + 1)
tmp = np.linspace(minlon, maxlon, n)
lons[:n] = tmp
lats[:n] = 90
lons[n:-1] = tmp[::-1]
lats[n:-1] = -90
lons[-1] = lons[0]
lats[-1] = lats[0]
points = self.transform_points(self.as_geodetic(), lons, lats)
self._boundary = sgeom.LinearRing(points)
mins = np.min(points, axis=0)
maxs = np.max(points, axis=0)
self._x_limits = mins[0], maxs[0]
self._y_limits = mins[1], maxs[1]
@property
def boundary(self):
return self._boundary
@property
def threshold(self):
return 1e5
@property
def x_limits(self):
return self._x_limits
@property
def y_limits(self):
return self._y_limits
class AzimuthalEquidistant(Projection):
"""
An Azimuthal Equidistant projection
This projection provides accurate angles about and distances through the
central position. Other angles, distances, or areas may be distorted.
"""
def __init__(self, central_longitude=0.0, central_latitude=0.0,
false_easting=0.0, false_northing=0.0,
globe=None):
"""
Parameters
----------
central_longitude: optional
The true longitude of the central meridian in degrees.
Defaults to 0.
central_latitude: optional
The true latitude of the planar origin in degrees.
Defaults to 0.
false_easting: optional
X offset from the planar origin in metres. Defaults to 0.
false_northing: optional
Y offset from the planar origin in metres. Defaults to 0.
globe: optional
An instance of :class:`cartopy.crs.Globe`. If omitted, a default
globe is created.
"""
# Warn when using Azimuthal Equidistant with proj < 4.9.2 due to
# incorrect transformation past 90 deg distance (see
# https://github.com/OSGeo/proj.4/issues/246).
if PROJ4_VERSION != ():
if PROJ4_VERSION < (4, 9, 2):
warnings.warn('The Azimuthal Equidistant projection in Proj '
'older than 4.9.2 incorrectly transforms points '
'farther than 90 deg from the origin. Use this '
'projection with caution.',
stacklevel=2)
else:
warnings.warn('Cannot determine Proj version. The Azimuthal '
'Equidistant projection may be unreliable and '
'should be used with caution.',
stacklevel=2)
proj4_params = [('proj', 'aeqd'), ('lon_0', central_longitude),
('lat_0', central_latitude),
('x_0', false_easting), ('y_0', false_northing)]
super(AzimuthalEquidistant, self).__init__(proj4_params, globe=globe)
# TODO: Let the globe return the semimajor axis always.
a = np.float(self.globe.semimajor_axis or WGS84_SEMIMAJOR_AXIS)
b = np.float(self.globe.semiminor_axis or a)
coords = _ellipse_boundary(a * np.pi, b * np.pi,
false_easting, false_northing, 61)
self._boundary = sgeom.LinearRing(coords.T)
mins = np.min(coords, axis=1)
maxs = np.max(coords, axis=1)
self._x_limits = mins[0], maxs[0]
self._y_limits = mins[1], maxs[1]
@property
def boundary(self):
return self._boundary
@property
def threshold(self):
return 1e5
@property
def x_limits(self):
return self._x_limits
@property
def y_limits(self):
return self._y_limits
class Sinusoidal(Projection):
"""
A Sinusoidal projection.
This projection is equal-area.
"""
def __init__(self, central_longitude=0.0, false_easting=0.0,
false_northing=0.0, globe=None):
"""
Parameters
----------
central_longitude: optional
The central longitude. Defaults to 0.
false_easting: optional
X offset from planar origin in metres. Defaults to 0.
false_northing: optional
Y offset from planar origin in metres. Defaults to 0.
globe: optional
A :class:`cartopy.crs.Globe`. If omitted, a default globe is
created.
"""
proj4_params = [('proj', 'sinu'),
('lon_0', central_longitude),
('x_0', false_easting),
('y_0', false_northing)]
super(Sinusoidal, self).__init__(proj4_params, globe=globe)
# Obtain boundary points
minlon, maxlon = self._determine_longitude_bounds(central_longitude)
points = []
n = 91
lon = np.empty(2 * n + 1)
lat = np.empty(2 * n + 1)
lon[:n] = minlon
lat[:n] = np.linspace(-90, 90, n)
lon[n:2 * n] = maxlon
lat[n:2 * n] = np.linspace(90, -90, n)
lon[-1] = minlon
lat[-1] = -90
points = self.transform_points(self.as_geodetic(), lon, lat)
self._boundary = sgeom.LinearRing(points)
mins = np.min(points, axis=0)
maxs = np.max(points, axis=0)
self._x_limits = mins[0], maxs[0]
self._y_limits = mins[1], maxs[1]
self._threshold = max(np.abs(self.x_limits + self.y_limits)) * 1e-5
@property
def boundary(self):
return self._boundary
@property
def threshold(self):
return self._threshold
@property
def x_limits(self):
return self._x_limits
@property
def y_limits(self):
return self._y_limits
# MODIS data products use a Sinusoidal projection of a spherical Earth
# https://modis-land.gsfc.nasa.gov/GCTP.html
Sinusoidal.MODIS = Sinusoidal(globe=Globe(ellipse=None,
semimajor_axis=6371007.181,
semiminor_axis=6371007.181))
class EquidistantConic(Projection):
"""
An Equidistant Conic projection.
This projection is conic and equidistant, and the scale is true along all
meridians and along one or two specified standard parallels.
"""
def __init__(self, central_longitude=0.0, central_latitude=0.0,
false_easting=0.0, false_northing=0.0,
standard_parallels=(20.0, 50.0), globe=None):
"""
Parameters
----------
central_longitude: optional
The central longitude. Defaults to 0.
central_latitude: optional
The true latitude of the | |
k in range(nlay):
# for i in range(self.nrow):
# for j in range(self.ncol):
# if ibound[k, i, j] == 0:
# continue
#
# ivert = []
# pts = self.get_vertices(i, j)
# pt0, pt1, pt2, pt3, pt0 = pts
#
# z = bot[k, i, j]
#
# verts[ipoint, 0:2] = np.array(pt1)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt2)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt0)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt3)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# z = top[k, i, j]
#
# verts[ipoint, 0:2] = np.array(pt1)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt2)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt0)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# verts[ipoint, 0:2] = np.array(pt3)
# verts[ipoint, 2] = z
# ivert.append(ipoint)
# ipoint += 1
#
# iverts.append(ivert)
#
# return verts, iverts
# class EpsgRef:
# """
# Sets up a local database of text representations of coordinate reference
# systems, keyed by EPSG code.
#
# The database is epsgref.json, located in the user's data directory. If
# optional 'appdirs' package is available, this is in the platform-dependent
# user directory, otherwise in the user's 'HOME/.flopy' directory.
# """
#
# def __init__(self):
# try:
# from appdirs import user_data_dir
# except ImportError:
# user_data_dir = None
# if user_data_dir:
# datadir = user_data_dir('flopy')
# else:
# # if appdirs is not installed, use user's home directory
# datadir = os.path.join(os.path.expanduser('~'), '.flopy')
# if not os.path.isdir(datadir):
# os.makedirs(datadir)
# dbname = 'epsgref.json'
# self.location = os.path.join(datadir, dbname)
#
# def to_dict(self):
# """
# Returns dict with EPSG code integer key, and WKT CRS text
# """
# data = OrderedDict()
# if os.path.exists(self.location):
# with open(self.location, 'r') as f:
# loaded_data = json.load(f, object_pairs_hook=OrderedDict)
# # convert JSON key from str to EPSG integer
# for key, value in loaded_data.items():
# try:
# data[int(key)] = value
# except ValueError:
# data[key] = value
# return data
#
# def _write(self, data):
# with open(self.location, 'w') as f:
# json.dump(data, f, indent=0)
# f.write('\n')
#
# def reset(self, verbose=True):
# if os.path.exists(self.location):
# os.remove(self.location)
# if verbose:
# print('Resetting {}'.format(self.location))
#
# def add(self, epsg, prj):
# """
# add an epsg code to epsgref.json
# """
# data = self.to_dict()
# data[epsg] = prj
# self._write(data)
#
# def get(self, epsg):
# """
# returns prj from a epsg code, otherwise None if not found
# """
# data = self.to_dict()
# return data.get(epsg)
#
# def remove(self, epsg):
# """
# removes an epsg entry from epsgref.json
# """
# data = self.to_dict()
# if epsg in data:
# del data[epsg]
# self._write(data)
#
# @staticmethod
# def show():
# ep = EpsgRef()
# prj = ep.to_dict()
# for k, v in prj.items():
# print('{}:\n{}\n'.format(k, v))
# class CRS(object):
# """
# Container to parse and store coordinate reference system parameters,
# and translate between different formats.
# """
#
# def __init__(self, prj=None, esri_wkt=None, epsg=None):
# warnings.warn(
# "crs has been deprecated. Use CRS in shapefile_utils instead.",
# category=DeprecationWarning)
# self.wktstr = None
# if prj is not None:
# with open(prj) as fprj:
# self.wktstr = fprj.read()
# elif esri_wkt is not None:
# self.wktstr = esri_wkt
# elif epsg is not None:
# wktstr = getprj(epsg)
# if wktstr is not None:
# self.wktstr = wktstr
# if self.wktstr is not None:
# self.parse_wkt()
#
# @property
# def crs(self):
# """
# Dict mapping crs attributes to proj4 parameters
# """
# proj = None
# if self.projcs is not None:
# # projection
# if 'mercator' in self.projcs.lower():
# if 'transverse' in self.projcs.lower() or \
# 'tm' in self.projcs.lower():
# proj = 'tmerc'
# else:
# proj = 'merc'
# elif 'utm' in self.projcs.lower() and \
# 'zone' in self.projcs.lower():
# proj = 'utm'
# elif 'stateplane' in self.projcs.lower():
# proj = 'lcc'
# elif 'lambert' and 'conformal' and 'conic' in self.projcs.lower():
# proj = 'lcc'
# elif 'albers' in self.projcs.lower():
# proj = 'aea'
# elif self.projcs is None and self.geogcs is not None:
# proj = 'longlat'
#
# # datum
# datum = None
# if 'NAD' in self.datum.lower() or \
# 'north' in self.datum.lower() and \
# 'america' in self.datum.lower():
# datum = 'nad'
# if '83' in self.datum.lower():
# datum += '83'
# elif '27' in self.datum.lower():
# datum += '27'
# elif '84' in self.datum.lower():
# datum = 'wgs84'
#
# # ellipse
# ellps = None
# if '1866' in self.spheroid_name:
# ellps = 'clrk66'
# elif 'grs' in self.spheroid_name.lower():
# ellps = 'grs80'
# elif 'wgs' in self.spheroid_name.lower():
# ellps = 'wgs84'
#
# # prime meridian
# pm = self.primem[0].lower()
#
# return {'proj': proj,
# 'datum': datum,
# 'ellps': ellps,
# 'a': self.semi_major_axis,
# 'rf': self.inverse_flattening,
# 'lat_0': self.latitude_of_origin,
# 'lat_1': self.standard_parallel_1,
# 'lat_2': self.standard_parallel_2,
# 'lon_0': self.central_meridian,
# 'k_0': self.scale_factor,
# 'x_0': self.false_easting,
# 'y_0': self.false_northing,
# 'units': self.projcs_unit,
# 'zone': self.utm_zone}
#
# @property
# def grid_mapping_attribs(self):
# """
# Map parameters for CF Grid Mappings
# http://http://cfconventions.org/cf-conventions/cf-conventions.html,
# Appendix F: Grid Mappings
# """
# if self.wktstr is not None:
# sp = [p for p in [self.standard_parallel_1,
# self.standard_parallel_2]
# if p is not None]
# sp = sp if len(sp) > 0 else None
# proj = self.crs['proj']
# names = {'aea': 'albers_conical_equal_area',
# 'aeqd': 'azimuthal_equidistant',
# 'laea': 'lambert_azimuthal_equal_area',
# 'longlat': 'latitude_longitude',
# 'lcc': 'lambert_conformal_conic',
# 'merc': 'mercator',
# 'tmerc': 'transverse_mercator',
# 'utm': 'transverse_mercator'}
# attribs = {'grid_mapping_name': names[proj],
# 'semi_major_axis': self.crs['a'],
# 'inverse_flattening': self.crs['rf'],
# 'standard_parallel': sp,
# 'longitude_of_central_meridian': self.crs['lon_0'],
# 'latitude_of_projection_origin': self.crs['lat_0'],
# 'scale_factor_at_projection_origin': self.crs['k_0'],
# 'false_easting': self.crs['x_0'],
# 'false_northing': self.crs['y_0']}
# return {k: v for k, v in attribs.items() if v is not None}
#
# @property
# def proj4(self):
# """
# Not implemented yet
# """
# return None
#
# def parse_wkt(self):
#
# self.projcs = self._gettxt('PROJCS["', '"')
# self.utm_zone = None
# if self.projcs is not None and 'utm' in self.projcs.lower():
# self.utm_zone = self.projcs[-3:].lower().strip('n').strip('s')
# self.geogcs = self._gettxt('GEOGCS["', '"')
# self.datum = self._gettxt('DATUM["', '"')
# tmp = self._getgcsparam('SPHEROID')
# self.spheroid_name = tmp.pop(0)
# self.semi_major_axis = tmp.pop(0)
# self.inverse_flattening = tmp.pop(0)
# self.primem = self._getgcsparam('PRIMEM')
# self.gcs_unit = self._getgcsparam('UNIT')
# self.projection = self._gettxt('PROJECTION["', '"')
# self.latitude_of_origin = self._getvalue('latitude_of_origin')
# self.central_meridian = self._getvalue('central_meridian')
# self.standard_parallel_1 = self._getvalue('standard_parallel_1')
# self.standard_parallel_2 = self._getvalue('standard_parallel_2')
# self.scale_factor = self._getvalue('scale_factor')
# self.false_easting = self._getvalue('false_easting')
# self.false_northing = self._getvalue('false_northing')
# self.projcs_unit = self._getprojcs_unit()
#
# def _gettxt(self, s1, s2):
# s = self.wktstr.lower()
# strt = s.find(s1.lower())
# if strt >= 0: # -1 indicates not found
# strt += len(s1)
# end = s[strt:].find(s2.lower()) + strt
# return self.wktstr[strt:end]
#
# def _getvalue(self, k):
# s = self.wktstr.lower()
# strt = s.find(k.lower())
# if strt >= 0:
# strt += len(k)
# end = s[strt:].find(']') + strt
# try:
# return float(self.wktstr[strt:end].split(',')[1])
# except:
# print(' could not typecast wktstr to a float')
#
# def _getgcsparam(self, txt):
# nvalues = 3 if txt.lower() == 'spheroid' else 2
# tmp = self._gettxt('{}["'.format(txt), ']')
# if tmp is not None:
# tmp = tmp.replace('"', '').split(',')
# name = tmp[0:1]
# values = list(map(float, tmp[1:nvalues]))
# return name + values
# else:
# return [None] * nvalues
#
# def _getprojcs_unit(self):
# if self.projcs is not None:
# tmp = self.wktstr.lower().split('unit["')[-1]
# uname, ufactor = tmp.strip().strip(']').split('",')[0:2]
# ufactor = float(ufactor.split(']')[0].split()[0].split(',')[0])
# return uname, ufactor
# return None, None
# def getprj(epsg, addlocalreference=True, text='esriwkt'):
# """
# Gets projection file (.prj) text for given epsg code from
# spatialreference.org
#
# Parameters
# ----------
# epsg : int
# epsg code for coordinate system
# addlocalreference : boolean
# adds the projection file text associated with epsg to a local
# database, epsgref.json, located in the user's data directory.
#
# References
# ----------
# https://www.epsg-registry.org/
#
# Returns
# -------
# prj : str
# text for a projection (*.prj) file.
#
# """
# warnings.warn("SpatialReference has been deprecated. Use StructuredGrid "
# "instead.", category=DeprecationWarning)
# epsgfile = EpsgRef()
# wktstr = epsgfile.get(epsg)
# if wktstr is None:
# wktstr = get_spatialreference(epsg, text=text)
# if addlocalreference and wktstr is not None:
# epsgfile.add(epsg, wktstr)
# return wktstr
#
# def get_spatialreference(epsg, text='esriwkt'):
# """
# Gets text for given epsg code and text format from spatialreference.org
#
# Fetches the reference text using the url:
# https://spatialreference.org/ref/epsg/<epsg code>/<text>/
#
# See: https://www.epsg-registry.org/
#
# Parameters
# ----------
# epsg : int
# epsg code for coordinate system
# text : str
# string added to url
#
# Returns
# -------
# url : str
#
# """
# from flopy.utils.flopy_io import get_url_text
#
# warnings.warn("SpatialReference has been deprecated. Use StructuredGrid "
# "instead.", category=DeprecationWarning)
#
# epsg_categories = ['epsg', 'esri']
# for cat in epsg_categories:
# url = "{}/ref/{}/{}/{}/".format(srefhttp, cat, epsg, text)
# result = get_url_text(url)
# if result is not None:
# break
# if result is not None:
# return result.replace("\n", "")
# elif result is None and text != 'epsg':
# for cat in epsg_categories:
# error_msg = 'No internet connection or ' + \
# 'epsg code {} '.format(epsg) + \
# 'not found at {}/ref/'.format(srefhttp) + \
# '{}/{}/{}'.format(cat, cat, epsg)
# print(error_msg)
# # epsg code not listed on spatialreference.org
# # may still work with pyproj
# elif text == 'epsg':
# return '+init=epsg:{}'.format(epsg)
#
#
# def getproj4(epsg):
# """
# Get projection file (.prj) text for | |
bool]:
'''
Takes an 4x2 array with UV coordinates of 4 points (left bottom,
right bottom, right top, left top) and checks if they're mapped to
rectangular shape. The rectangle can have any width and height (
including negative values) but can't be rotated.
Returns 3 flags:
1. Whether the object is a cuboid.
- all vertices must be in the corners in the right order
- top/bottom vertices must be at the top/bottom
- left/right vertices must be at the left/right
2. Whether left and right vertices are flipped (object scaled with
negative value on U axis)
3. Whether top and bottom vertices are flipped (object scaled with
negative value on V axis)
Notes:
- When first flag is False the second and third flat is also False.
- Usually used in combination with CubePolygon.uv_layer_coordinates
'''
min_ = crds.min(axis=0)
max_ = crds.max(axis=0)
# All loops in the corners
if not (
np.isclose(crds, min_) | np.isclose(crds, max_)
).all():
return False, False, False
lb, rb, rt, lt = crds
# Left to left, right to right, bottom to bottom, top to top
if (
not np.isclose(lb[0], lt[0]) or
not np.isclose(rb[0], rt[0]) or
not np.isclose(lt[1], rt[1]) or
not np.isclose(lb[1], rb[1])
):
return False, False, False
# is_valid, is_u_flipped, is_v_flipped
return True, lb[0] != min_[0], lb[1] != min_[1]
class McblendObjectGroup:
'''
A group of :class:`McblendObject`s often used as a main datasource for
operations executed by Mcblend.
The objects can be accessed with ObjectId with __getitem__ method like
from a dict.
:param armature: the armature used as a root of the object group.
:param world_origin: optional object that replaces the origin point of
the world. The matrix_world of that objects becomes defines the
transformation space of the animation. Animating that object is
equivalent to animating everything else in opposite way.
'''
def __init__(
self, armature: bpy.types.Object,
world_origin: Optional[bpy.types.Object]):
self.data: Dict[ObjectId, McblendObject] = {}
'''the content of the group.'''
self.world_origin: Optional[bpy.types.Object] = world_origin
self._load_objects(armature)
def get_world_origin_matrix(self):
'''
Returns the matrix_world of the world_origin object or rises an
exception.
'''
if self.world_origin is None:
raise RuntimeError("World origin not defined")
return self.world_origin.matrix_world
def __len__(self):
return len(self.data)
def __getitem__(self, key: ObjectId) -> McblendObject:
return self.data[key]
def __contains__(self, item):
return item in self.data
def __iter__(self):
return self.data.__iter__()
def values(self):
'''Lists values of this group (the :class:`McblendObject`s).'''
return self.data.values()
def keys(self):
'''Lists valid keys to use in this object.'''
return self.data.keys()
def items(self):
'''Iterator going through pairs of keys and values of this group.'''
return self.data.items()
def _load_objects(self, armature: bpy.types.Object):
'''
Loops offspring of an armature and and creates :class:`McblendObjects`
for this group. Used by constructor.
:param armature: the armature used as a root of the object group.
'''
# Loop bones
for bone in armature.data.bones:
obj_id: ObjectId = ObjectId(armature.name, bone.name)
parent_bone_id: Optional[ObjectId] = None
if bone.parent is not None:
parent_bone_id = ObjectId(armature.name, bone.parent.name)
self.data[obj_id] = McblendObject(
thisobj_id=obj_id, thisobj=armature,
parentobj_id=parent_bone_id, children_ids=[],
mctype=MCObjType.BONE, group=self)
for obj in armature.children:
if obj.parent_type != 'BONE':
continue # TODO - maybe a warning here?
parentobj_id = ObjectId(obj.parent.name, obj.parent_bone)
obj_id = ObjectId(obj.name, "")
if obj.type == 'MESH':
self.data[obj_id] = McblendObject(
thisobj_id=obj_id, thisobj=obj, parentobj_id=parentobj_id,
children_ids=[], mctype=MCObjType.CUBE, group=self)
self.data[parentobj_id].children_ids.append(obj_id)
# Further offspring of the "child" (share same parent in mc
# model)
offspring: Deque[bpy.types.Object] = deque(obj.children)
while offspring:
child = offspring.pop()
child_id: ObjectId = ObjectId(child.name, "")
if child.parent_type != 'OBJECT':
continue
if child.type == 'MESH':
self.data[child_id] = McblendObject(
thisobj_id=child_id, thisobj=child,
parentobj_id=parentobj_id, children_ids=[],
mctype=MCObjType.CUBE, group=self)
self.data[parentobj_id].children_ids.append(child_id)
offspring.extend(child.children)
elif child.type == 'EMPTY':
self.data[child_id] = McblendObject(
thisobj_id=child_id, thisobj=child,
parentobj_id=parentobj_id, children_ids=[],
mctype=MCObjType.LOCATOR, group=self)
self.data[parentobj_id].children_ids.append(child_id)
elif obj.type == 'EMPTY':
self.data[obj_id] = McblendObject(
thisobj_id=obj_id, thisobj=obj, parentobj_id=parentobj_id,
children_ids=[], mctype=MCObjType.LOCATOR, group=self)
self.data[parentobj_id].children_ids.append(obj_id)
def cyclic_equiv(u: List, v: List) -> bool:
'''
Compare cyclic equivalency of two lists.
Source:
https://stackoverflow.com/questions/31000591/
'''
n, i, j = len(u), 0, 0
if n != len(v):
return False
while i < n and j < n:
k = 1
while k <= n and u[(i + k) % n] == v[(j + k) % n]:
k += 1
if k > n:
return True
if u[(i + k) % n] > v[(j + k) % n]:
i += k
else:
j += k
return False
def apply_obj_transform_keep_origin(obj: bpy.types.Object):
'''
Apply object transformations but keep the origin in place. Resets object
rotation and scale but keeps location the same.
'''
# Decompose object transformations
loc, rot, scl = obj.matrix_local.decompose()
loc_mat = mathutils.Matrix.Translation(loc)
rot_mat = rot.to_matrix().to_4x4()
scl_mat = (
mathutils.Matrix.Scale(scl[0],4,(1,0,0)) @
mathutils.Matrix.Scale(scl[1],4,(0,1,0)) @
mathutils.Matrix.Scale(scl[2],4,(0,0,1)))
obj.matrix_local = loc_mat
for vertex in obj.data.vertices:
vertex.co = rot_mat @ scl_mat @ vertex.co
def fix_cube_rotation(obj: bpy.types.Object):
'''
Rotate the bounding box of a cuboid so it's aligned with
the cube rotation. The scale and rotation of the object must
be in default position for this function to work.
:param obj: blender object with cuboid mesh.
'''
# Get coordinates of 3 points (a,b and c) from any polygon
# I'm assuming this is a cuboid so I also can assume that
# vectors u and v are not planar:
# u = vector(b, a) and v = (b, c)
poly = obj.data.polygons[0]
vertices = obj.data.vertices
a = vertices[poly.vertices[0]].co
b = vertices[poly.vertices[1]].co
c = vertices[poly.vertices[2]].co
# Calculate the normal vector of the surface with points
# a, b and c
u: mathutils.Vector = (a-b).normalized()
v: mathutils.Vector = (c-b).normalized()
# The cross product creates the 3rd vector that defines
# the rotated space
w = u.cross(v).normalized()
# Recalculate V to make sure that all of the vectors are at
# the right angle (even though they should be)
v = w.cross(u).normalized()
# Create rotation matrix (unit vectors x, y, z in columns)
rotation_matrix = mathutils.Matrix((w, v, -u))
# (w, v, -u) - this order of normals in rotation matrix is set up in
# such way that applying the operator to the default cube (without
# rotations) will not change its rotation and won't flip its scale to -1.
# It will have no effect.
# Rotate the mesh
for vertex in obj.data.vertices:
vertex.co = rotation_matrix @ vertex.co
# Counter rotate object around its origin
counter_rotation = rotation_matrix.to_4x4().inverted()
loc, rot, scl = obj.matrix_local.decompose()
loc_mat = mathutils.Matrix.Translation(loc)
rot_mat = rot.to_matrix().to_4x4()
scl_mat = (
mathutils.Matrix.Scale(scl[0],4,(1,0,0)) @
mathutils.Matrix.Scale(scl[1],4,(0,1,0)) @
mathutils.Matrix.Scale(scl[2],4,(0,0,1)))
obj.matrix_local = loc_mat @ counter_rotation @ rot_mat @ scl_mat
def get_vect_json(arr: Iterable) -> List[float]:
'''
Changes the iterable of numbers into basic python list of floats.
Values from the original iterable are rounded to the 3rd deimal
digit.
:param arr: an iterable of numbers.
'''
result = [round(i, 3) for i in arr]
for i, _ in enumerate(result):
if result[i] == -0.0:
result[i] = 0.0
return result
def star_pattern_match(text: str, pattern: str) -> bool:
'''
Matches text with a pattern that uses "*" as a wildcard which
can represent any number of characters.
:param pattern: the pattern
:param text: the text being matched with pattern
'''
lenp, lent = len(pattern), len(text)
# Only empty text can match empty pattern
if lenp == 0:
return lent == 0
# The table that represents matching smaller patterns to
# parts of the text. Row 0 is for empty pattern, column 0
# represents empty text: matches[text+1][pattern+1]
matches = [[False for i in range(lenp + 1)] for j in range(lent + 1)]
# Empty pattern matches the empty string
matches[0][0] = True
# Only paterns made out of '*' can match empty stirng
for p in range(1, lenp+1):
# Propagate matching apttern as long as long as the
# pattern uses only '*'
if pattern[p - 1] == '*':
matches[0][p] = matches[0][p - 1]
else:
break
# Fill the pattern matching table (solutions to
# shorter patterns/texts are used to solve
# other patterns with increasing complexity).
for t in range(1, lent + 1):
for p in range(1, lenp + 1):
if pattern[p - 1] == '*':
# Two wys to propagate matching value
# A) Same pattern without '*' worked so this | |
<gh_stars>0
# engineer_number module
#
# Copyright (c) 2012-2017 梅濁酒(umedoblock)
#
# This software is released under the MIT License.
# https://github.com/umedoblock/engineer_number
import os, sys, math
import unittest
from test import support
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from engineer_number import *
from engineer_number.constants import *
from engineer_number.wire import *
class TestEngineerNumber(unittest.TestCase):
def test_as_number(self):
self.assertEqual(1000000000000000000000000.0, EngineerNumber("1Y"))
self.assertEqual(1000000000000000000000.0, EngineerNumber("1Z"))
self.assertEqual(1000000000000000000.0, EngineerNumber("1E"))
self.assertEqual(1000000000000000.0, EngineerNumber("1P"))
self.assertEqual(1000000000000.0, EngineerNumber("1T"))
self.assertEqual(1000000000.0, EngineerNumber("1G"))
self.assertEqual(1000000.0, EngineerNumber("1M"))
self.assertEqual(1000.0, EngineerNumber("1k"))
self.assertEqual(100.0, EngineerNumber("1h"))
self.assertEqual(10.0, EngineerNumber("1da"))
self.assertEqual(1.0, EngineerNumber("1"))
self.assertEqual(0.1, EngineerNumber("1d"))
self.assertEqual(0.01, EngineerNumber("1c"))
self.assertEqual(0.001, EngineerNumber("1m"))
self.assertEqual(0.000001, EngineerNumber("1u"))
self.assertEqual(0.000000001, EngineerNumber("1n"))
self.assertEqual(0.000000000001, EngineerNumber("1p"))
self.assertEqual(0.000000000000001, EngineerNumber("1f"))
self.assertEqual(0.000000000000000001, EngineerNumber("1a"))
self.assertEqual(0.000000000000000000001, EngineerNumber("1z"))
self.assertEqual(0.000000000000000000000001, EngineerNumber("1y"))
def test__num(self):
self.assertAlmostEqual(123.456, EngineerNumber("123.456Y")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456Z")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456E")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456P")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456T")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456G")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456M")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456k")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456m")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456u")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456n")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456p")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456f")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456a")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456z")._num)
self.assertAlmostEqual(123.456, EngineerNumber("123.456y")._num)
self.assertAlmostEqual(12.3456, EngineerNumber("123.456h")._num)
self.assertEqual("12.346k", str(EngineerNumber("123.456h")))
self.assertAlmostEqual(1.23456, EngineerNumber("123.456da")._num)
self.assertEqual("1.235k", str(EngineerNumber("123.456da")))
self.assertAlmostEqual(12.3456, EngineerNumber("123.456d")._num)
self.assertEqual("12.346", str(EngineerNumber("123.456d")))
self.assertAlmostEqual(1.23456, EngineerNumber("123.456c")._num)
self.assertEqual("1.235", str(EngineerNumber("123.456c")))
def test__exponent10(self):
self.assertEqual(24, EngineerNumber("1Y")._exponent10)
self.assertEqual(21, EngineerNumber("1Z")._exponent10)
self.assertEqual(18, EngineerNumber("1E")._exponent10)
self.assertEqual(15, EngineerNumber("1P")._exponent10)
self.assertEqual(12, EngineerNumber("1T")._exponent10)
self.assertEqual(9, EngineerNumber("1G")._exponent10)
self.assertEqual(6, EngineerNumber("1M")._exponent10)
self.assertEqual(3, EngineerNumber("1k")._exponent10)
self.assertEqual(0, EngineerNumber("1h")._exponent10)
self.assertEqual(0, EngineerNumber("1da")._exponent10)
self.assertEqual(0, EngineerNumber("1")._exponent10)
self.assertEqual(-3, EngineerNumber("1d")._exponent10)
self.assertEqual(-3, EngineerNumber("1c")._exponent10)
self.assertEqual(-3, EngineerNumber("1m")._exponent10)
self.assertEqual(-6, EngineerNumber("1u")._exponent10)
self.assertEqual(-9, EngineerNumber("1n")._exponent10)
self.assertEqual(-12, EngineerNumber("1p")._exponent10)
self.assertEqual(-15, EngineerNumber("1f")._exponent10)
self.assertEqual(-18, EngineerNumber("1a")._exponent10)
self.assertEqual(-21, EngineerNumber("1z")._exponent10)
self.assertEqual(-24, EngineerNumber("1y")._exponent10)
def test_as_str(self):
self.assertEqual("123.457Y", str(EngineerNumber("123.4567Y")))
self.assertEqual("123.456Y", str(EngineerNumber("123.456Y")))
self.assertEqual("123.456Z", str(EngineerNumber("123.456Z")))
self.assertEqual("123.456E", str(EngineerNumber("123.456E")))
self.assertEqual("123.456P", str(EngineerNumber("123.456P")))
self.assertEqual("123.456T", str(EngineerNumber("123.456T")))
self.assertEqual("123.456G", str(EngineerNumber("123.456G")))
self.assertEqual("123.456M", str(EngineerNumber("123.456M")))
self.assertEqual("123.456k", str(EngineerNumber("123.456k")))
self.assertEqual("12.346k", str(EngineerNumber("123.456h")))
self.assertEqual("1.235k", str(EngineerNumber("123.456da")))
self.assertEqual("123.456", str(EngineerNumber("123.456")))
self.assertEqual("12.346", str(EngineerNumber("123.456d")))
self.assertEqual("1.235", str(EngineerNumber("123.456c")))
self.assertEqual("123.456m", str(EngineerNumber("123.456m")))
self.assertEqual("123.456u", str(EngineerNumber("123.456u")))
self.assertEqual("123.456n", str(EngineerNumber("123.456n")))
self.assertEqual("123.456p", str(EngineerNumber("123.456p")))
self.assertEqual("123.456f", str(EngineerNumber("123.456f")))
self.assertEqual("123.456a", str(EngineerNumber("123.456a")))
self.assertEqual("123.456z", str(EngineerNumber("123.456z")))
self.assertEqual("123.456y", str(EngineerNumber("123.456y")))
def test_feed_empty_value(self):
self.assertAlmostEqual(0, EngineerNumber())
self.assertAlmostEqual(0, EngineerNumber(""))
def test_as_abnormal_number(self):
self.assertEqual(1.0, EngineerNumber("1."))
self.assertEqual(0.1, EngineerNumber(".1"))
self.assertEqual(1000.0, EngineerNumber("1.k"))
self.assertEqual(100.0, EngineerNumber(".1k"))
self.assertEqual(0.89, EngineerNumber(".89"))
self.assertEqual(89000.0, EngineerNumber("89.k"))
self.assertEqual(890.0, EngineerNumber(".89k"))
self.assertEqual(28.9, EngineerNumber("2.89da"))
self.assertEqual(2890.0, EngineerNumber("289da"))
def test_simple(self):
M3_3 = EngineerNumber(3.3, MEGA)
# kilo must be "k". K means kelbin.
k47 = EngineerNumber(47, KILO)
mili47 = EngineerNumber(47, MILLI)
mcr3_3 = EngineerNumber(3.3, MICRO)
# __str__()
self.assertEqual("3.300M", str(M3_3))
self.assertEqual(3, k47._exponent10)
self.assertEqual("47.000k", str(k47))
self.assertEqual("47.000m", str(mili47))
self.assertEqual("3.300u", str(mcr3_3))
# mul
self.assertEqual("155.100G", str(k47 * M3_3))
self.assertEqual("155.100n", str(mili47 * mcr3_3))
self.assertEqual("2.209k", str(k47 * mili47))
self.assertEqual("10.890", str(M3_3 * mcr3_3))
self.assertEqual("155.100m", str(k47 * mcr3_3))
self.assertEqual("155.100k", str(M3_3 * mili47))
self.assertEqual("100.000n", EngineerNumber("10p") * 10 ** 4)
self.assertEqual("100.000n", EngineerNumber("10p", 4))
# add, sub
self.assertEqual("3.347M", str(M3_3 + k47))
self.assertEqual("3.253M", str(M3_3 - k47))
self.assertEqual("47.003m", str(mili47 + mcr3_3))
self.assertEqual("46.997m", str(mili47 - mcr3_3))
# big and small, ignored small
self.assertEqual("3.300M", str(M3_3 + mili47))
def test_over_range_for_big(self):
# TOO BIG
# in_YOTTA = 4 * 10 ** 26 = 400 * 10 ** 24
in_YOTTA = EngineerNumber(4, 16) * EngineerNumber(1, 10)
over_YOTTA = in_YOTTA * 10
self.assertEqual("400.000Y", str(in_YOTTA))
self.assertEqual("4" + "0" * 27, str(over_YOTTA))
# over_YOTTA = 4 * 10 ** 29 = 400 * 10 ** 27
over_YOTTA_i = EngineerNumber(4, 5) * EngineerNumber(1, 24)
over_YOTTA_f = EngineerNumber(4, 5) * EngineerNumber(1.0, 24)
self.assertEqual("4" + "0" * 29, str(over_YOTTA_i))
self.assertEqual("4e+29", str(over_YOTTA_f))
# in ZETTA
T10 = EngineerNumber(10, TERA)
G40 = EngineerNumber(40, GIGA)
self.assertEqual("10.000T", str(T10))
self.assertEqual("40.000G", str(G40))
BIG400 = T10 * G40
self.assertEqual("400.000Z", str(BIG400))
def test_over_range_for_small(self):
# too small
# over_yocto = 0.04 * 10 ** -27 = 4 * 10 ** -29
over_yocto_i = EngineerNumber(4, -5) * EngineerNumber(1, -24)
over_yocto_f = EngineerNumber(4, -5) * EngineerNumber(1.0, -24)
self.assertEqual("4e-29", str(over_yocto_i))
self.assertEqual("4e-29", str(over_yocto_f))
over_yocto_f *= 10
self.assertEqual("4e-28", str(over_yocto_f))
over_yocto_f *= 10
self.assertEqual("4e-27", str(over_yocto_f))
over_yocto_f *= 10
self.assertEqual("4e-26", str(over_yocto_f))
over_yocto_f *= 10
self.assertEqual("4e-25", str(over_yocto_f))
in_yocto_f = over_yocto_f * 10
self.assertEqual("4.000y", str(in_yocto_f))
# in yocto
f1 = EngineerNumber(1, FEMTO)
n4 = EngineerNumber(4, NANO)
self.assertEqual("1.000f", str(f1))
self.assertEqual("4.000n", str(n4))
small4 = f1 * n4
self.assertEqual("4.000y", str(small4))
def test_honest_convert(self):
self.assertEqual("987.000m", str(EngineerNumber(0.987)))
self.assertEqual("1.000k", str(EngineerNumber(1000, ONE)))
self.assertEqual("1.040k", str(EngineerNumber(1040, ONE)))
self.assertEqual("999.000", str(EngineerNumber(999, ONE)))
self.assertEqual("999.200", str(EngineerNumber(999.2, ONE)))
self.assertEqual("2.000", str(EngineerNumber(2, ONE)))
self.assertEqual("1.001", str(EngineerNumber(1.001, ONE)))
self.assertEqual("1.000", str(EngineerNumber(1, ONE)))
def test_same_value_different_argument(self):
# same result
self.assertEqual("1.000m", str(EngineerNumber(0.001, ONE)))
self.assertEqual("1.000m", str(EngineerNumber(1, MILLI)))
self.assertEqual("1.000m", str(EngineerNumber(1000, MICRO)))
def test_as_number(self):
u1 = EngineerNumber(1, MICRO)
n4 = EngineerNumber(4, NANO)
self.assertRaises
self.assertEqual("1.004u", str(u1 + n4))
self.assertEqual("996.000n", str(u1 - n4))
self.assertEqual("250.000", str(u1 / n4))
self.assertEqual("249.000", str(u1 // n4))
self.assertEqual("4.000n", str(u1 % n4))
div, mod = divmod(u1, n4)
self.assertEqual("249.000", str(div))
self.assertEqual("4.000n", str(mod))
def test_round(self):
self.assertEqual( "999.999m", str(EngineerNumber("0.9999994")))
# 123
self.assertEqual("1000.000m", str(EngineerNumber("0.9999995")))
u1 = EngineerNumber(1, MICRO)
n4 = EngineerNumber(4, NANO)
self.assertEqual("1000.000m", str(pow(u1, n4))) # 0.9999999447379593
self.assertEqual("999.981m", str(pow(n4, u1))) # 0.9999806632154822
self.assertEqual(0.9999999447379593, pow(u1, n4.num))
def test_zero_neg_pos(self):
self.assertEqual(0, EngineerNumber("0"))
neg1 = EngineerNumber(-1, ONE)
self.assertEqual("-1.000", str(neg1))
u1 = EngineerNumber(1, MICRO)
self.assertEqual("0.000", str(EngineerNumber(0)))
self.assertEqual(-0.000001, -u1)
self.assertEqual(0.000001, math.fabs(-u1))
def test_basic_calc(self):
u1 = EngineerNumber(1, MICRO)
self.assertEqual("2.000", str(EngineerNumber(16) % EngineerNumber(7)))
self.assertEqual("2.000", str(16 % EngineerNumber(7)))
self.assertEqual("2.000", str(EngineerNumber(16) % 7))
self.assertEqual("9.000", str(EngineerNumber(16) - EngineerNumber(7)))
self.assertEqual("9.000", str(16 - EngineerNumber(7)))
self.assertEqual("9.000", str(EngineerNumber(16) - 7))
self.assertEqual("128.000", str(EngineerNumber(2) ** EngineerNumber(7)))
self.assertEqual("128.000", str(2 ** EngineerNumber(7)))
self.assertEqual("128.000", str(EngineerNumber(2) ** 7))
self.assertEqual("2.286", str(EngineerNumber(16) / EngineerNumber(7)))
self.assertEqual("2.286", str(16 / EngineerNumber(7)))
self.assertEqual("2.286", str(EngineerNumber(16) / 7))
self.assertEqual("2.000", str(EngineerNumber(16) // EngineerNumber(7)))
self.assertEqual("2.000", str(16 // EngineerNumber(7)))
self.assertEqual("2.000", str(EngineerNumber(16) // 7))
self.assertEqual("2.286M", str(EngineerNumber(16, MEGA) / EngineerNumber(7)))
self.assertEqual("2.286M", str(16 / EngineerNumber(7, MICRO)))
self.assertEqual("2.286M", str(EngineerNumber(16, MEGA) / 7))
self.assertEqual("1.000u", str(EngineerNumber(u1)))
def test_121_484(self):
self.assertEqual("121.484m", str(EngineerNumber("121.484m")))
self.assertEqual(121.484, EngineerNumber("121.484").num)
self.assertEqual(121.484, EngineerNumber("121.484"))
self.assertEqual(EngineerNumber("121.484"), 121.484)
self.assertEqual("121.484", str(EngineerNumber("121.484")))
self.assertEqual("121.484E", str(EngineerNumber("121.484E")))
self.assertEqual("121.488p", str(EngineerNumber(121.488, PICO)))
self.assertEqual("121.488p", str(EngineerNumber(0.121488, NANO)))
def test_num_dived_by_enm(self):
self.assertIsInstance(math.sqrt(EngineerNumber("150p")), float)
self.assertIsInstance(2 * math.pi * math.sqrt(EngineerNumber("150p")), float)
self.assertIsInstance(EngineerNumber(1) / (2 * math.pi * math.sqrt(EngineerNumber("150p") * EngineerNumber("600u"))), EngineerNumber)
def test_equal_different_value_and_factor(self):
self.assertEqual(EngineerNumber(121.484, KILO), EngineerNumber(0.121484, MEGA))
self.assertEqual(EngineerNumber(121.484, MILLI), EngineerNumber(0.121484, ONE))
self.assertEqual(EngineerNumber(121.484, PICO), EngineerNumber(0.121484, NANO))
self.assertEqual(str(EngineerNumber(121.488, PICO)), str(EngineerNumber(0.121488, NANO)))
def test_equal_with_number(self):
self.assertEqual(121484000000000000000, EngineerNumber("121.484E").num)
self.assertEqual(121484000000000000000, EngineerNumber("121.484E"))
def test_compare_with_same_instance(self):
self.assertGreater(EngineerNumber("1.000"), EngineerNumber("0.999"))
self.assertGreaterEqual(EngineerNumber("1.000"), EngineerNumber("0.999"))
self.assertGreaterEqual(EngineerNumber("1.000"), EngineerNumber("1.000"))
self.assertLess(EngineerNumber("0.999"), EngineerNumber("1.000"))
self.assertLessEqual(EngineerNumber("0.999"), EngineerNumber("0.999"))
self.assertLessEqual(EngineerNumber("1"), EngineerNumber("1.000"))
def test_around_yotta(self):
yotta999999 = EngineerNumber("999.999Y")
self.assertEqual("999.999Y", str(yotta999999))
# over yotta a little
yotta = EngineerNumber(1, YOTTA)
self.assertEqual("1.000Y", str(yotta))
yotta1 = yotta + 1
self.assertEqual("1.000Y", str(yotta1))
self.assertEqual("1000000000000000000000001", str(yotta1.num))
def test___si2exponent10(self):
self.assertEqual(0, EngineerNumber._si2exponent10(""))
self.assertEqual(3, EngineerNumber._si2exponent10("k"))
self.assertEqual(24, EngineerNumber._si2exponent10("Y"))
self.assertEqual(-24, EngineerNumber._si2exponent10("y"))
def test__si2exponent10_wrong(self):
expected_header_en = "SI prefix symbol must be in ("
expected_header_ja = \
_("SI 接頭辞の記号は、次のいずれかでなければなりません。{}")
symbols = (\
'("Y", "Z", "E", "P", "T", "G", "M", "k", "h", "da", '
'"", '
'"d", "c", "%", "m", "u", "n", "p", "f", "a", "z", "y")'
)
expected_message = \
expected_header_en + symbols + "."
expected_message = \
expected_header_ja.format(symbols)
with self.assertRaises(KeyError) as raiz:
EngineerNumber._si2exponent10("Q")
self.assertEqual(expected_message, raiz.exception.args[0])
with self.assertRaises(KeyError) as raiz:
EngineerNumber._si2exponent10("K")
self.assertEqual(expected_message, raiz.exception.args[0])
with self.assertRaises(KeyError) as raiz:
EngineerNumber._si2exponent10("GG")
self.assertEqual(expected_message, raiz.exception.args[0])
with self.assertRaises(KeyError) as raiz:
EngineerNumber._si2exponent10(" ")
self.assertEqual(expected_message, raiz.exception.args[0])
def test_e_expression(self):
self.assertAlmostEqual(float("4e-28"), EngineerNumber("4e-28"))
self.assertAlmostEqual(float("4e-28"), EngineerNumber("4E-28"))
self.assertEqual("4e-28", str(EngineerNumber("4e-28")))
self.assertEqual("4e-28", str(EngineerNumber("4E-28")))
def test_unknown_symbol(self):
unknown_symbols = """!@#$^&*(){}[]+-=|_~`'"?<>,/\;:"""
for unknown_symbol in unknown_symbols:
with self.assertRaises(KeyError) as raiz:
try:
EngineerNumber("10{}".format(unknown_symbol))
except ValueError as e:
print("unknown_symbol = \"{}\"".format(unknown_symbol))
def test_percent(self):
self.assertEqual(EngineerNumber(0.1), EngineerNumber("10%"))
self.assertEqual(EngineerNumber("0.1"), EngineerNumber("10%"))
self.assertEqual("100.000m", str(EngineerNumber("10%")))
self.assertAlmostEqual(0.1, EngineerNumber("10%"))
def test_over_100_percent(self):
self.assertEqual(EngineerNumber(1.1), EngineerNumber("110%"))
self.assertEqual(EngineerNumber("1.1"), EngineerNumber("110%"))
self.assertAlmostEqual(1.1, EngineerNumber("110%"))
self.assertEqual("1.100", str(EngineerNumber("110%")))
def test_compare_with_number(self):
# swap
self.assertGreater(EngineerNumber("1.000"), 0.999)
self.assertGreaterEqual(EngineerNumber("1.000"), 0.999)
self.assertGreaterEqual(EngineerNumber("1.000"), 1.000)
self.assertGreater(1.0, EngineerNumber("0.999"))
self.assertGreaterEqual(1.0, EngineerNumber("0.999"))
self.assertGreaterEqual(1.0, EngineerNumber("1.000"))
self.assertGreater(EngineerNumber("1.000"), 0)
self.assertGreaterEqual(EngineerNumber("1.000"), 0)
self.assertGreaterEqual(EngineerNumber("1.000"), 1)
self.assertGreater(1, EngineerNumber("0.999"))
self.assertGreaterEqual(1, EngineerNumber("0.999"))
self.assertGreaterEqual(1, EngineerNumber("1.000"))
self.assertLess(0.999, EngineerNumber("1.000"))
self.assertLessEqual(0.999, EngineerNumber("0.999"))
self.assertLessEqual(1, EngineerNumber("1.000"))
def test_bool(self):
self.assertTrue(EngineerNumber("1.000"))
self.assertTrue(EngineerNumber("1.000p"))
self.assertTrue(EngineerNumber(1, PICO))
self.assertFalse(EngineerNumber("0.000"))
def test_si_prefix_symbol_error(self):
with self.assertRaises(KeyError) as raiz:
EngineerNumber("100K")
# message = ("cannot accept "K" as SI prefix symbol. "
# "please use "k" as prefix if you hope to describe kilo."
# "Because "K" means Kelbin celcius.")
message = _('"K" を SI 接頭辞の記号として使用することは出来ません。\n'
'kilo を表現したい場合、 "K" ではなく、小文字の "k" を'
'お使い下さい。\n'
'なぜならば、"K" は、Kelvin 温度を表現するための'
'単位記号だからです。')
self.assertEqual(message, raiz.exception.args[0])
def test_force(self):
one = EngineerNumber("1")
self.assertEqual("1.000", str(one))
self.assertEqual("1000.000m", one._force("m"))
self.assertEqual("0.001k", one._force("k"))
self.assertEqual("1000.000m", one["m"])
self.assertEqual("0.001k", one["k"])
self.assertEqual("1.000", one[""])
m1 = EngineerNumber("1m")
self.assertEqual("1.000m", str(m1))
self.assertEqual("1000.000u", m1._force("u"))
self.assertEqual("1000.000u", m1["u"])
self.assertEqual("0.001", m1[""])
self.assertEqual("1000000.000n", m1["n"])
k1 = EngineerNumber("123.456k")
self.assertEqual("123.456k", k1["k"])
self.assertEqual("123456.000", k1[""])
self.assertEqual("123456000000000000.000p", k1["p"])
m1234567 = EngineerNumber("1.234567m")
self.assertEqual("1.235m", str(m1234567))
self.assertEqual("1234.567u", m1234567._force("u"))
self.assertEqual("1234.567u", m1234567["u"])
self.assertEqual("0.001", m1234567[""])
m1534567 = EngineerNumber("1.534567m")
self.assertEqual("0.002", m1534567[""])
self.assertEqual("1.535m", str(m1534567))
self.assertEqual("1534.567u", m1534567._force("u"))
self.assertEqual("1534.567u", m1534567["u"])
def test_si_units(self):
# base
one_meter = EngineerNumber("1")
one_deca = EngineerNumber("1da")
one_hecto_pascal = EngineerNumber("1h")
one_little = EngineerNumber("1")
one_deci_little = EngineerNumber("1d")
one_centi_meter = EngineerNumber("1c")
self.assertEqual(one_meter, 100 * one_centi_meter)
self.assertEqual(one_little, 10 * one_deci_little)
self.assertEqual(EngineerNumber("0.1k"), one_hecto_pascal)
self.assertEqual(1, one_deca / 10)
def test_error_and_warning(self):
n1 = EngineerNumber("0.1m")
# message = "abs\(number\(={}\)\) in range\(0, 1\) convert to int.".format(n1)
message = _("0 < abs(number(={})) < 1 を満たす数字を "
"int に変換しようとしました。").format(n1)
with self.assertRaises(UserWarning) as warn1:
int(n1)
self.assertEqual(message, warn1.exception.args[0])
n2 = EngineerNumber("-0.1m")
# message = "abs\(number\(={}\)\) in range\(0, 1\) convert to int.".format(n2)
message = _("0 < abs(number(={})) < 1 を満たす数字を "
"int に変換しようとしました。").format(n2)
with self.assertRaises(UserWarning) as warn2:
int(n2)
self.assertEqual(message, warn2.exception.args[0])
def test_math(self):
two = EngineerNumber("2")
root2 = math.sqrt(2)
sqrt2 = two.sqrt()
self.assertEqual(2, two)
self.assertEqual(root2, sqrt2)
self.assertIsInstance(sqrt2, EngineerNumber)
def test_error(self):
# base
k1000 = EngineerNumber("1.000k")
# in | |
<reponame>Pe8er/dotfiles
#!/usr/bin/env python3
#
# fusée gelée
#
# Launcher for the {re}switched coldboot/bootrom hacks--
# launches payloads above the Horizon
#
# discovery and implementation by @ktemkin
# likely independently discovered by lots of others <3
#
# this code is political -- it stands with those who fight for LGBT rights
# don't like it? suck it up, or find your own damned exploit ^-^
#
# special thanks to:
# ScirèsM, motezazer -- guidance and support
# hedgeberg, andeor -- dumping the Jetson bootROM
# TuxSH -- for IDB notes that were nice to peek at
#
# much love to:
# <NAME>, Qyriad, f916253, MassExplosion213, and Levi
#
# greetings to:
# shuffle2
# This file is part of Fusée Launcher
# Copyright (C) 2018 <NAME> <<EMAIL>>
# Copyright (C) 2018 <NAME> <<EMAIL>>
# Fusée Launcher is licensed under the terms of the GNU GPLv2
import os
import sys
import errno
import ctypes
import argparse
import platform
# The address where the RCM payload is placed.
# This is fixed for most device.
RCM_PAYLOAD_ADDR = 0x40010000
# The address where the user payload is expected to begin.
PAYLOAD_START_ADDR = 0x40010E40
# Specify the range of addresses where we should inject oct
# payload address.
STACK_SPRAY_START = 0x40014E40
STACK_SPRAY_END = 0x40017000
# notes:
# GET_CONFIGURATION to the DEVICE triggers memcpy from 0x40003982
# GET_INTERFACE to the INTERFACE triggers memcpy from 0x40003984
# GET_STATUS to the ENDPOINT triggers memcpy from <on the stack>
class HaxBackend:
"""
Base class for backends for the TegraRCM vuln.
"""
# USB constants used
STANDARD_REQUEST_DEVICE_TO_HOST_TO_ENDPOINT = 0x82
STANDARD_REQUEST_DEVICE_TO_HOST = 0x80
GET_DESCRIPTOR = 0x6
GET_CONFIGURATION = 0x8
# Interface requests
GET_STATUS = 0x0
# List of OSs this class supports.
SUPPORTED_SYSTEMS = []
def __init__(self, skip_checks=False):
""" Sets up the backend for the given device. """
self.skip_checks = skip_checks
def print_warnings(self):
""" Print any warnings necessary for the given backend. """
pass
def trigger_vulnerability(self, length):
"""
Triggers the actual controlled memcpy.
The actual trigger needs to be executed carefully, as different host OSs
require us to ask for our invalid control request differently.
"""
raise NotImplementedError("Trying to use an abstract backend rather than an instance of the proper subclass!")
@classmethod
def supported(cls, system_override=None):
""" Returns true iff the given backend is supported on this platform. """
# If we have a SYSTEM_OVERRIDE, use it.
if system_override:
system = system_override
else:
system = platform.system()
return system in cls.SUPPORTED_SYSTEMS
@classmethod
def create_appropriate_backend(cls, system_override=None, skip_checks=False):
""" Creates a backend object appropriate for the current OS. """
# Search for a supportive backend, and try to create one.
for subclass in cls.__subclasses__():
if subclass.supported(system_override):
return subclass(skip_checks=skip_checks)
# ... if we couldn't, bail out.
raise IOError("No backend to trigger the vulnerability-- it's likely we don't support your OS!")
def read(self, length):
""" Reads data from the RCM protocol endpoint. """
return bytes(self.dev.read(0x81, length, 1000))
def write_single_buffer(self, data):
"""
Writes a single RCM buffer, which should be 0x1000 long.
The last packet may be shorter, and should trigger a ZLP (e.g. not divisible by 512).
If it's not, send a ZLP.
"""
return self.dev.write(0x01, data, 1000)
def find_device(self, vid=None, pid=None):
""" Set and return the device to be used """
import usb
self.dev = usb.core.find(idVendor=vid, idProduct=pid)
return self.dev
class MacOSBackend(HaxBackend):
"""
Simple vulnerability trigger for macOS: we simply ask libusb to issue
the broken control request, and it'll do it for us. :)
We also support platforms with a hacked libusb and FreeBSD.
"""
BACKEND_NAME = "macOS"
SUPPORTED_SYSTEMS = ['Darwin', 'libusbhax', 'macos', 'FreeBSD']
def trigger_vulnerability(self, length):
# Triggering the vulnerability is simplest on macOS; we simply issue the control request as-is.
return self.dev.ctrl_transfer(self.STANDARD_REQUEST_DEVICE_TO_HOST_TO_ENDPOINT, self.GET_STATUS, 0, 0, length)
class LinuxBackend(HaxBackend):
"""
More complex vulnerability trigger for Linux: we can't go through libusb,
as it limits control requests to a single page size, the limitation expressed
by the usbfs. More realistically, the usbfs seems fine with it, and we just
need to work around libusb.
"""
BACKEND_NAME = "Linux"
SUPPORTED_SYSTEMS = ['Linux', 'linux']
SUPPORTED_USB_CONTROLLERS = ['pci/drivers/xhci_hcd', 'platform/drivers/dwc_otg']
SETUP_PACKET_SIZE = 8
IOCTL_IOR = 0x80000000
IOCTL_TYPE = ord('U')
IOCTL_NR_SUBMIT_URB = 10
URB_CONTROL_REQUEST = 2
class SubmitURBIoctl(ctypes.Structure):
_fields_ = [
('type', ctypes.c_ubyte),
('endpoint', ctypes.c_ubyte),
('status', ctypes.c_int),
('flags', ctypes.c_uint),
('buffer', ctypes.c_void_p),
('buffer_length', ctypes.c_int),
('actual_length', ctypes.c_int),
('start_frame', ctypes.c_int),
('stream_id', ctypes.c_uint),
('error_count', ctypes.c_int),
('signr', ctypes.c_uint),
('usercontext', ctypes.c_void_p),
]
def print_warnings(self):
""" Print any warnings necessary for the given backend. """
print("\nImportant note: on desktop Linux systems, we currently require an XHCI host controller.")
print("A good way to ensure you're likely using an XHCI backend is to plug your")
print("device into a blue 'USB 3' port.\n")
def trigger_vulnerability(self, length):
"""
Submit the control request directly using the USBFS submit_urb
ioctl, which issues the control request directly. This allows us
to send our giant control request despite size limitations.
"""
import os
import fcntl
# We only work for devices that are bound to a compatible HCD.
self._validate_environment()
# Figure out the USB device file we're going to use to issue the
# control request.
fd = os.open('/dev/bus/usb/{:0>3d}/{:0>3d}'.format(self.dev.bus, self.dev.address), os.O_RDWR)
# Define the setup packet to be submitted.
setup_packet = \
int.to_bytes(self.STANDARD_REQUEST_DEVICE_TO_HOST_TO_ENDPOINT, 1, byteorder='little') + \
int.to_bytes(self.GET_STATUS, 1, byteorder='little') + \
int.to_bytes(0, 2, byteorder='little') + \
int.to_bytes(0, 2, byteorder='little') + \
int.to_bytes(length, 2, byteorder='little')
# Create a buffer to hold the result.
buffer_size = self.SETUP_PACKET_SIZE + length
buffer = ctypes.create_string_buffer(setup_packet, buffer_size)
# Define the data structure used to issue the control request URB.
request = self.SubmitURBIoctl()
request.type = self.URB_CONTROL_REQUEST
request.endpoint = 0
request.buffer = ctypes.addressof(buffer)
request.buffer_length = buffer_size
# Manually submit an URB to the kernel, so it issues our 'evil' control request.
ioctl_number = (self.IOCTL_IOR | ctypes.sizeof(request) << 16 | ord('U') << 8 | self.IOCTL_NR_SUBMIT_URB)
fcntl.ioctl(fd, ioctl_number, request, True)
# Close our newly created fd.
os.close(fd)
# The other modules raise an IOError when the control request fails to complete. We don't fail out (as we don't bother
# reading back), so we'll simulate the same behavior as the others.
raise IOError("Raising an error to match the others!")
def _validate_environment(self):
"""
We can only inject giant control requests on devices that are backed
by certain usb controllers-- typically, the xhci_hcd on most PCs.
"""
from glob import glob
# If we're overriding checks, never fail out.
if self.skip_checks:
print("skipping checks")
return
# Search each device bound to the xhci_hcd driver for the active device...
for hci_name in self.SUPPORTED_USB_CONTROLLERS:
for path in glob("/sys/bus/{}/*/usb*".format(hci_name)):
if self._node_matches_our_device(path):
return
raise ValueError("This device needs to be on a supported backend. Usually that means plugged into a blue/USB 3.0 port!\nBailing out.")
def _node_matches_our_device(self, path):
"""
Checks to see if the given sysfs node matches our given device.
Can be used to check if an xhci_hcd controller subnode reflects a given device.,
"""
# If this isn't a valid USB device node, it's not what we're looking for.
if not os.path.isfile(path + "/busnum"):
return False
# We assume that a whole _bus_ is associated with a host controller driver, so we
# only check for a matching bus ID.
if self.dev.bus != self._read_num_file(path + "/busnum"):
return False
# If all of our checks passed, this is our device.
return True
def _read_num_file(self, path):
"""
Reads a numeric value from a sysfs file that contains only a number.
"""
with open(path, 'r') as f:
raw = f.read()
return int(raw)
class WindowsBackend(HaxBackend):
"""
Use libusbK for most of it, and use the handle libusbK gets for us to call kernel32's DeviceIoControl
"""
BACKEND_NAME = "Windows"
SUPPORTED_SYSTEMS = ["Windows"]
# Windows and libusbK specific constants
WINDOWS_FILE_DEVICE_UNKNOWN = 0x00000022
LIBUSBK_FUNCTION_CODE_GET_STATUS = 0x807
WINDOWS_METHOD_BUFFERED = 0
WINDOWS_FILE_ANY_ACCESS = 0
RAW_REQUEST_STRUCT_SIZE = 24 # 24 is how big the struct is, just trust me
TO_ENDPOINT = 2
# Yoinked (with love) from Windows' CTL_CODE macro
def win_ctrl_code(self, DeviceType, Function, Method, Access):
""" Return a control code for use with DeviceIoControl() """
return ((DeviceType) << 16 | ((Access) << 14) | ((Function)) << 2 | (Method))
def __init__(self, skip_checks):
import libusbK
self.libk = libusbK
# Grab libusbK
self.lib = ctypes.cdll.libusbK
def find_device(self, Vid, Pid):
"""
Windows version of this function
Its return isn't | |
any processes.', msgs)
# Give user explicit permissions to list
await core.addUserRule(bond.iden, (True, ('task', 'get')))
# Match all tasks
msgs = await alist(prox.storm(f"ps.kill ''"))
self.stormIsInErr('Provided iden matches more than one process.', msgs)
msgs = await alist(prox.storm('ps.list'))
self.stormIsInPrint(f'task iden: {iden}', msgs)
# Give user explicit license to kill
await core.addUserRule(bond.iden, (True, ('task', 'del')))
# Kill the task as the user
msgs = await alist(prox.storm(f'ps.kill {iden}'))
self.stormIsInPrint('kill status: True', msgs)
self.true(task.done())
# Kill a task that doesn't exist
self.false(await core.kill(bond, 'newp'))
async def test_storm_lib_query(self):
async with self.getTestCore() as core:
# basic
q = '''
$foo = ${ [test:str=theevalthatmendo] }
$foo.exec()
'''
await core.nodes(q)
nodes = await core.nodes('test:str=theevalthatmendo')
self.len(1, nodes)
# exec vars do not populate upwards
q = '''
$foo = "that is one neato burrito"
$baz = ${ $bar=$lib.str.concat(wompwomp, $lib.guid()) $lib.print("in exec") }
$baz.exec()
$lib.print("post exec {bar}", bar=$bar)
[ test:str=$foo ]
'''
with self.raises(s_exc.NoSuchVar):
await core.nodes(q)
# make sure returns work
q = '''
$foo = $(10)
$bar = ${ return ( $($foo+1) ) }
[test:int=$bar.exec()]
'''
nodes = await core.nodes(q)
self.len(1, nodes)
self.eq(nodes[0].ndef, ('test:int', 11))
# make sure it inherits the runt it's created in, not exec'd in
q = '''
$foo = ${$lib.print("look ma, my runt") $bing = $(0) }
function foofunc() {
$bing = $(99)
yield $foo.exec()
$lib.print("bing is now {bing}", bing=$bing)
return ($(0))
}
$foofunc()
'''
msgs = await core.stormlist(q)
self.stormIsInPrint('look ma, my runt', msgs)
self.stormIsInPrint('bing is now 99', msgs)
# vars may be captured for each node flowing through them
q = '''[(test:int=100 :loc=us.va) (test:int=200 :loc=us.ca)] $foo=:loc
$q = ${ $lib.print($foo) } $q.exec()'''
msgs = await core.stormlist(q)
self.stormIsInPrint('us.va', msgs)
self.stormIsInPrint('us.ca', msgs)
# Yield/iterator behavior
nodes = await core.nodes('''
function foo(x) {
return(${
[ inet:ipv4=$x ]
})
}
[it:dev:str=1.2.3.4]
$genr = $foo($node.repr())
-> { yield $genr }
''')
self.len(1, nodes)
self.eq(nodes[0].ndef, ('inet:ipv4', 0x01020304))
nodes = await core.nodes('''
function foo(x) {
return( ${ [ inet:ipv4=$x ] } )
}
[it:dev:str=5.5.5.5]
$genr = $foo($node.repr())
$genr.exec()
''')
self.len(1, await core.nodes('inet:ipv4=5.5.5.5'))
msgs = await core.stormlist('''
$embed = ${[inet:ipv4=1.2.3.4]}
for $xnode in $embed {
$lib.print($xnode.repr())
}
''')
self.stormIsInPrint('1.2.3.4', msgs)
q = '''[test:int=1 test:int=2]
$currentNode = $node
$q=${ [test:str=$currentNode.value()] }
yield $q
'''
nodes = await core.nodes(q)
self.len(4, nodes)
self.eq({n.ndef for n in nodes},
{('test:int', 1), ('test:int', 2), ('test:str', '1'), ('test:str', '2')})
# You can toprim() as Query object.
q = '''$q=${ $lib.print('fire in the hole') } $lib.fire('test', q=$q)
'''
msgs = await core.stormlist(q)
fires = [m for m in msgs if m[0] == 'storm:fire']
self.len(1, fires)
self.eq(fires[0][1].get('data').get('q'),
"$lib.print('fire in the hole')")
async def test_storm_lib_node(self):
async with self.getTestCore() as core:
nodes = await core.nodes('[ test:str=woot :tick=2001] [ test:int=$node.isform(test:str) ] +test:int')
self.eq(1, nodes[0].ndef[1])
q = 'test:str=woot $lib.fire(name=pode, pode=$node.pack(dorepr=True))'
msgs = await core.stormlist(q, opts={'repr': True})
pode = [m[1] for m in msgs if m[0] == 'node'][0]
apode = [m[1].get('data').get('pode') for m in msgs if m[0] == 'storm:fire'][0]
self.eq(pode[0], ('test:str', 'woot'))
pode[1].pop('path')
self.eq(pode, apode)
async def test_storm_lib_dict(self):
async with self.getTestCore() as core:
nodes = await core.nodes('$blah = $lib.dict(foo=vertex.link) [ inet:fqdn=$blah.foo ]')
self.len(1, nodes)
self.eq('vertex.link', nodes[0].ndef[1])
self.eq(2, await core.callStorm('$d=$lib.dict(k1=1, k2=2) return($lib.len($d))'))
async def test_storm_lib_str(self):
async with self.getTestCore() as core:
q = '$v=vertex $l=link $fqdn=$lib.str.concat($v, ".", $l)' \
' [ inet:email=$lib.str.format("visi@{domain}", domain=$fqdn) ]'
nodes = await core.nodes(q)
self.len(1, nodes)
self.eq('<EMAIL>', nodes[0].ndef[1])
nodes = await core.nodes('$s = woot [ test:int=$s.startswith(w) ]')
self.eq(1, nodes[0].ndef[1])
nodes = await core.nodes('$s = woot [ test:int=$s.endswith(visi) ]')
self.eq(0, nodes[0].ndef[1])
nodes = await core.nodes('$s = woot [ test:str=$s.rjust(10) ]')
self.eq(' woot', nodes[0].ndef[1])
nodes = await core.nodes('$s = woot [ test:str=$s.ljust(10) ]')
self.eq('woot ', nodes[0].ndef[1])
sobj = s_stormtypes.Str('beepbeep')
self.len(8, sobj)
nodes = await core.nodes('$s = (foo, bar, baz) [ test:str=$lib.str.join(".", $s) ]')
self.eq('foo.bar.baz', nodes[0].ndef[1])
nodes = await core.nodes('$s = foo-bar-baz [ test:str=$s.replace("-", ".") ]')
self.eq('foo.bar.baz', nodes[0].ndef[1])
nodes = await core.nodes('$s = foo-bar-baz [ test:str=$s.replace("-", ".", 1) ]')
self.eq('foo.bar-baz', nodes[0].ndef[1])
q = '$foo=" foo " return ( $foo.strip() )'
self.eq('foo', await core.callStorm(q))
q = '$foo=" foo " return ( $foo.lstrip() )'
self.eq('foo ', await core.callStorm(q))
q = '$foo=" foo " return ( $foo.rstrip() )'
self.eq(' foo', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.strip(quxk) )'
self.eq('ickbrownfo', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.lstrip(quxk) )'
self.eq('ickbrownfox', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.rstrip(quxk) )'
self.eq('quickbrownfo', await core.callStorm(q))
q = '$foo="QuickBrownFox" return ( $foo.lower() )'
self.eq('quickbrownfox', await core.callStorm(q))
q = '$foo="QuickBrownFox" return ( $foo.upper() )'
self.eq('QUICKBROWNFOX', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice(5) )'
self.eq('brownfox', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice(5, 10) )'
self.eq('brown', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice((-8)) )'
self.eq('brownfox', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice(0, (-3)) )'
self.eq('quickbrown', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice(55, 42) )'
self.eq('', await core.callStorm(q))
q = '$foo="quickbrownfox" return ( $foo.slice("newp") )'
await self.asyncraises(s_exc.BadCast, core.callStorm(q))
q = '$foo="foobar" return ( $foo.reverse() )'
self.eq('raboof', await core.callStorm(q))
# tuck the regx tests in with str
self.true(await core.callStorm(r'''return($lib.regex.matches('^foo', foobar))'''))
self.true(await core.callStorm(r'''return($lib.regex.matches('foo', FOOBAR, $lib.regex.flags.i))'''))
self.false(await core.callStorm(r'''return($lib.regex.matches('^foo$', foobar))'''))
self.false(await core.callStorm(f'return($lib.regex.matches(foo, " foobar"))'))
self.eq(('oo',), await core.callStorm(r'''return($lib.regex.search('([aeiou]+)', foobar))'''))
self.eq(('foo', 'baz'), await core.callStorm('return($lib.regex.search("(foo)bar(baz)", foobarbaz))'))
self.eq((), await core.callStorm('return($lib.regex.search(foo, foobar))'))
self.none(await core.callStorm('return($lib.regex.search(foo, bat))'))
self.eq(('foo', 'bar', 'baz'), await core.callStorm('$x = "foo,bar,baz" return($x.split(","))'))
self.eq(('foo', 'bar', 'baz'), await core.callStorm('$x = "foo,bar,baz" return($x.rsplit(","))'))
self.eq(('foo', 'bar,baz'), await core.callStorm('$x = "foo,bar,baz" return($x.split(",", maxsplit=1))'))
self.eq(('foo,bar', 'baz'), await core.callStorm('$x = "foo,bar,baz" return($x.rsplit(",", maxsplit=1))'))
async def test_storm_lib_bytes_gzip(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
hstr = 'ohhai'
ghstr = base64.urlsafe_b64encode((gzip.compress(hstr.encode()))).decode()
mstr = 'ohgood'
n2 = s_common.guid()
n3 = s_common.guid()
node1 = await snap.addNode('graph:node', '*', {'data': ghstr})
node2 = await snap.addNode('graph:node', '*', {'data': mstr})
text = f'''
graph:node={node1.ndef[1]}
$gzthing = :data
$foo = $lib.base64.decode($gzthing).gunzip()
$lib.print($foo)
[ graph:node={n2} :data=$foo.decode() ]
'''
await core.stormlist(text)
# make sure we gunzip correctly
opts = {'vars': {'iden': n2}}
nodes = await snap.nodes('graph:node=$iden', opts=opts)
self.len(1, nodes)
self.eq(hstr, nodes[0].get('data'))
# gzip
text = f'''
graph:node={node2.ndef[1]}
$bar = :data
[ graph:node={n3} :data=$lib.base64.encode($bar.encode().gzip()) ]
'''
await core.stormlist(text)
# make sure we gzip correctly
opts = {'vars': {'iden': n3}}
nodes = await snap.nodes('graph:node=$iden', opts=opts)
self.len(1, nodes)
self.eq(mstr.encode(), gzip.decompress(base64.urlsafe_b64decode(nodes[0].props['data'])))
async def test_storm_lib_bytes_bzip(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
hstr = 'ohhai'
ghstr = base64.urlsafe_b64encode((bz2.compress(hstr.encode()))).decode()
mstr = 'ohgood'
ggstr = base64.urlsafe_b64encode((bz2.compress(mstr.encode()))).decode()
n2 = s_common.guid()
n3 = s_common.guid()
node1 = await snap.addNode('graph:node', '*', {'data': ghstr})
node2 = await snap.addNode('graph:node', '*', {'data': mstr})
text = '''
graph:node={valu}
$bzthing = :data
$foo = $lib.base64.decode($bzthing).bunzip()
$lib.print($foo)
[ graph:node={n2} :data=$foo.decode() ]
'''
text = text.format(valu=node1.ndef[1], n2=n2)
await core.stormlist(text)
# make sure we bunzip correctly
opts = {'vars': {'iden': n2}}
nodes = await snap.nodes('graph:node=$iden', opts=opts)
self.len(1, nodes)
self.eq(hstr, nodes[0].props['data'])
# bzip
text = '''
graph:node={valu}
$bar = :data
[ graph:node={n3} :data=$lib.base64.encode($bar.encode().bzip()) ]
'''
text = text.format(valu=node2.ndef[1], n3=n3)
await core.stormlist(text)
# make sure we bzip correctly
opts = {'vars': {'iden': n3}}
nodes = await snap.nodes('graph:node=$iden', opts=opts)
self.len(1, nodes)
self.eq(ggstr, nodes[0].props['data'])
async def test_storm_lib_bytes_json(self):
async with self.getTestCore() as core:
async with await core.snap() as snap:
foo = {'a': 'ohhai'}
ghstr = json.dumps(foo)
n2 = s_common.guid()
node1 = await snap.addNode('graph:node', '*', {'data': ghstr})
text = '''
graph:node={valu}
$jzthing = :data
$foo = $jzthing.encode().json()
[ graph:node={n2} :data=$foo ]
'''
text = text.format(valu=node1.ndef[1], n2=n2)
await core.stormlist(text)
# make sure we json loaded correctly
opts = {'vars': {'iden': n2}}
nodes = await snap.nodes('graph:node=$iden', opts=opts)
self.len(1, nodes)
self.eq(foo, nodes[0].props['data'])
async def test_storm_lib_list(self):
async with self.getTestCore() as core:
# Base List object behavior
q = '''// $lib.list ctor
$list=$lib.list(1,2,3)
// __len__
$lib.print('List size is {len}', len=$lib.len($list))
// aiter/iter method
$sum = $(0)
for $valu in $list {
$sum = $( $sum + $valu)
}
$lib.print('Sum is {sum}', sum=$sum)
// Append method
$list.append(4)
// size method
$lib.print('List size is now {len}', len=$list.size())
// Access the values by index
$lib.print('List[0]={zero}, List[-1]={neg1}', zero=$list.index(0), neg1=$list.index(-1))
$sum = $(0)
for $valu in $list {
$sum = $( $sum + $valu)
}
$lib.print('Sum is now {sum}', sum=$sum)
// Empty lists may also be made
$elst=$lib.list()
$lib.print('elst size is {len}', len=$lib.len($elst))
'''
msgs = await core.stormlist(q)
self.stormIsInPrint('List size is 3', msgs)
self.stormIsInPrint('Sum is 6', msgs)
| |
50*m.x176 + m.x316 <= 0)
m.c2319 = Constraint(expr= - 50*m.x177 + m.x317 <= 0)
m.c2320 = Constraint(expr= - 50*m.x178 + m.x318 <= 0)
m.c2321 = Constraint(expr= - 50*m.x179 + m.x319 <= 0)
m.c2322 = Constraint(expr= - 50*m.x180 + m.x320 <= 0)
m.c2323 = Constraint(expr= - 50*m.x181 + m.x321 <= 0)
m.c2324 = Constraint(expr= - 50*m.x182 + m.x322 <= 0)
m.c2325 = Constraint(expr= - 50*m.x183 + m.x323 <= 0)
m.c2326 = Constraint(expr= - 50*m.x184 + m.x324 <= 0)
m.c2327 = Constraint(expr= - 50*m.x185 + m.x325 <= 0)
m.c2328 = Constraint(expr= - 50*m.x186 + m.x326 <= 0)
m.c2329 = Constraint(expr= - 50*m.x187 + m.x327 <= 0)
m.c2330 = Constraint(expr= - 50*m.x188 + m.x328 <= 0)
m.c2331 = Constraint(expr= - 50*m.x189 + m.x329 <= 0)
m.c2332 = Constraint(expr= - 50*m.x190 + m.x330 <= 0)
m.c2333 = Constraint(expr= - 50*m.x191 + m.x331 <= 0)
m.c2334 = Constraint(expr= - 50*m.x192 + m.x332 <= 0)
m.c2335 = Constraint(expr= - 50*m.x193 + m.x333 <= 0)
m.c2336 = Constraint(expr= - 50*m.x194 + m.x334 <= 0)
m.c2337 = Constraint(expr= - 50*m.x195 + m.x335 <= 0)
m.c2338 = Constraint(expr= - 50*m.x196 + m.x336 <= 0)
m.c2339 = Constraint(expr= - 50*m.x197 + m.x337 <= 0)
m.c2340 = Constraint(expr= - 50*m.x198 + m.x338 <= 0)
m.c2341 = Constraint(expr= - 50*m.x199 + m.x339 <= 0)
m.c2342 = Constraint(expr= - 50*m.x200 + m.x340 <= 0)
m.c2343 = Constraint(expr= - 50*m.x201 + m.x341 <= 0)
m.c2344 = Constraint(expr= - 50*m.x202 + m.x342 <= 0)
m.c2345 = Constraint(expr= - 50*m.x203 + m.x343 <= 0)
m.c2346 = Constraint(expr= - 50*m.x204 + m.x344 <= 0)
m.c2347 = Constraint(expr= - 50*m.x205 + m.x345 <= 0)
m.c2348 = Constraint(expr= - 50*m.x206 + m.x346 <= 0)
m.c2349 = Constraint(expr= - 50*m.x207 + m.x347 <= 0)
m.c2350 = Constraint(expr= - 50*m.x208 + m.x348 <= 0)
m.c2351 = Constraint(expr= - 50*m.x209 + m.x349 <= 0)
m.c2352 = Constraint(expr= - 50*m.x210 + m.x350 <= 0)
m.c2353 = Constraint(expr= - 50*m.x211 + m.x351 <= 0)
m.c2354 = Constraint(expr= m.x152 + m.x153 + m.x166 + m.x167 + m.x180 + m.x181 + m.x194 + m.x195 + m.x208 + m.x209
== 12)
m.c2355 = Constraint(expr= m.x154 + m.x155 + m.x168 + m.x169 + m.x182 + m.x183 + m.x196 + m.x197 + m.x210 + m.x211
== 12)
m.c2356 = Constraint(expr= m.x292 + m.x306 + m.x320 + m.x334 + m.x348 >= 50)
m.c2357 = Constraint(expr= m.x295 + m.x309 + m.x323 + m.x337 + m.x351 >= 50)
m.c2358 = Constraint(expr= m.x293 + m.x294 + m.x307 + m.x308 + m.x321 + m.x322 + m.x335 + m.x336 + m.x349 + m.x350
>= 50)
m.c2359 = Constraint(expr= m.x292 + m.x306 + m.x320 + m.x334 + m.x348 <= 50)
m.c2360 = Constraint(expr= m.x295 + m.x309 + m.x323 + m.x337 + m.x351 <= 50)
m.c2361 = Constraint(expr= m.x293 + m.x294 + m.x307 + m.x308 + m.x321 + m.x322 + m.x335 + m.x336 + m.x349 + m.x350
<= 50)
m.c2362 = Constraint(expr= - 0.25*m.x292 + 0.1*m.x422 + 0.85*m.x423 + 0.6*m.x424 + 0.2*m.x425 + 0.5*m.x426 + 0.8*m.x427
+ 0.3*m.x428 >= 0)
m.c2363 = Constraint(expr= - 0.45*m.x293 + 0.1*m.x429 + 0.85*m.x430 + 0.6*m.x431 + 0.2*m.x432 + 0.5*m.x433 + 0.8*m.x434
+ 0.3*m.x435 >= 0)
m.c2364 = Constraint(expr= - 0.45*m.x294 + 0.1*m.x436 + 0.85*m.x437 + 0.6*m.x438 + 0.2*m.x439 + 0.5*m.x440 + 0.8*m.x441
+ 0.3*m.x442 >= 0)
m.c2365 = Constraint(expr= - 0.75*m.x295 + 0.1*m.x443 + 0.85*m.x444 + 0.6*m.x445 + 0.2*m.x446 + 0.5*m.x447 + 0.8*m.x448
+ 0.3*m.x449 >= 0)
m.c2366 = Constraint(expr= - 0.25*m.x306 + 0.1*m.x520 + 0.85*m.x521 + 0.6*m.x522 + 0.2*m.x523 + 0.5*m.x524 + 0.8*m.x525
+ 0.3*m.x526 >= 0)
m.c2367 = Constraint(expr= - 0.45*m.x307 + 0.1*m.x527 + 0.85*m.x528 + 0.6*m.x529 + 0.2*m.x530 + 0.5*m.x531 + 0.8*m.x532
+ 0.3*m.x533 >= 0)
m.c2368 = Constraint(expr= - 0.45*m.x308 + 0.1*m.x534 + 0.85*m.x535 + 0.6*m.x536 + 0.2*m.x537 + 0.5*m.x538 + 0.8*m.x539
+ 0.3*m.x540 >= 0)
m.c2369 = Constraint(expr= - 0.75*m.x309 + 0.1*m.x541 + 0.85*m.x542 + 0.6*m.x543 + 0.2*m.x544 + 0.5*m.x545 + 0.8*m.x546
+ 0.3*m.x547 >= 0)
m.c2370 = Constraint(expr= - 0.25*m.x320 + 0.1*m.x618 + 0.85*m.x619 + 0.6*m.x620 + 0.2*m.x621 + 0.5*m.x622 + 0.8*m.x623
+ 0.3*m.x624 >= 0)
m.c2371 = Constraint(expr= - 0.45*m.x321 + 0.1*m.x625 + 0.85*m.x626 + 0.6*m.x627 + 0.2*m.x628 + 0.5*m.x629 + 0.8*m.x630
+ 0.3*m.x631 >= 0)
m.c2372 = Constraint(expr= - 0.45*m.x322 + 0.1*m.x632 + 0.85*m.x633 + 0.6*m.x634 + 0.2*m.x635 + 0.5*m.x636 + 0.8*m.x637
+ 0.3*m.x638 >= 0)
m.c2373 = Constraint(expr= - 0.75*m.x323 + 0.1*m.x639 + 0.85*m.x640 + 0.6*m.x641 + 0.2*m.x642 + 0.5*m.x643 + 0.8*m.x644
+ 0.3*m.x645 >= 0)
m.c2374 = Constraint(expr= - 0.25*m.x334 + 0.1*m.x716 + 0.85*m.x717 + 0.6*m.x718 + 0.2*m.x719 + 0.5*m.x720 + 0.8*m.x721
+ 0.3*m.x722 >= 0)
m.c2375 = Constraint(expr= - 0.45*m.x335 + 0.1*m.x723 + 0.85*m.x724 + 0.6*m.x725 + 0.2*m.x726 + 0.5*m.x727 + 0.8*m.x728
+ 0.3*m.x729 >= 0)
m.c2376 = Constraint(expr= - 0.45*m.x336 + 0.1*m.x730 + 0.85*m.x731 + 0.6*m.x732 + 0.2*m.x733 + 0.5*m.x734 + 0.8*m.x735
+ 0.3*m.x736 >= 0)
m.c2377 = Constraint(expr= - 0.75*m.x337 + 0.1*m.x737 + 0.85*m.x738 + 0.6*m.x739 + 0.2*m.x740 + 0.5*m.x741 + 0.8*m.x742
+ 0.3*m.x743 >= 0)
m.c2378 = Constraint(expr= - 0.25*m.x348 + 0.1*m.x814 + 0.85*m.x815 + 0.6*m.x816 + 0.2*m.x817 + 0.5*m.x818 + 0.8*m.x819
+ 0.3*m.x820 >= 0)
m.c2379 = Constraint(expr= - 0.45*m.x349 + 0.1*m.x821 + 0.85*m.x822 + 0.6*m.x823 + 0.2*m.x824 + 0.5*m.x825 + 0.8*m.x826
+ 0.3*m.x827 >= 0)
m.c2380 = Constraint(expr= - 0.45*m.x350 + 0.1*m.x828 + 0.85*m.x829 + 0.6*m.x830 + 0.2*m.x831 + 0.5*m.x832 + 0.8*m.x833
+ 0.3*m.x834 >= 0)
m.c2381 = Constraint(expr= - 0.75*m.x351 + 0.1*m.x835 + 0.85*m.x836 + 0.6*m.x837 + 0.2*m.x838 + 0.5*m.x839 + 0.8*m.x840
+ 0.3*m.x841 >= 0)
m.c2382 = Constraint(expr= - 0.35*m.x292 + 0.1*m.x422 + 0.85*m.x423 + 0.6*m.x424 + 0.2*m.x425 + 0.5*m.x426 + 0.8*m.x427
+ 0.3*m.x428 <= 0)
m.c2383 = Constraint(expr= - 0.65*m.x293 + 0.1*m.x429 + 0.85*m.x430 + 0.6*m.x431 + 0.2*m.x432 + 0.5*m.x433 + 0.8*m.x434
+ 0.3*m.x435 <= 0)
m.c2384 = Constraint(expr= - 0.65*m.x294 + 0.1*m.x436 + 0.85*m.x437 + 0.6*m.x438 + 0.2*m.x439 + 0.5*m.x440 + 0.8*m.x441
+ 0.3*m.x442 <= 0)
m.c2385 = Constraint(expr= - 0.85*m.x295 + 0.1*m.x443 + 0.85*m.x444 + 0.6*m.x445 + 0.2*m.x446 + 0.5*m.x447 + 0.8*m.x448
+ 0.3*m.x449 <= 0)
m.c2386 = Constraint(expr= - 0.35*m.x306 + 0.1*m.x520 + 0.85*m.x521 + 0.6*m.x522 + 0.2*m.x523 + 0.5*m.x524 + 0.8*m.x525
+ 0.3*m.x526 <= 0)
m.c2387 = Constraint(expr= - 0.65*m.x307 + 0.1*m.x527 + 0.85*m.x528 + 0.6*m.x529 + 0.2*m.x530 + 0.5*m.x531 + 0.8*m.x532
+ 0.3*m.x533 <= 0)
m.c2388 = Constraint(expr= - 0.65*m.x308 + 0.1*m.x534 + 0.85*m.x535 + 0.6*m.x536 + 0.2*m.x537 + 0.5*m.x538 + 0.8*m.x539
+ 0.3*m.x540 <= 0)
m.c2389 = Constraint(expr= - 0.85*m.x309 + 0.1*m.x541 + 0.85*m.x542 + 0.6*m.x543 + 0.2*m.x544 + 0.5*m.x545 + 0.8*m.x546
+ 0.3*m.x547 <= 0)
m.c2390 = Constraint(expr= - 0.35*m.x320 + 0.1*m.x618 + 0.85*m.x619 + 0.6*m.x620 + 0.2*m.x621 + 0.5*m.x622 + 0.8*m.x623
+ 0.3*m.x624 <= 0)
m.c2391 = Constraint(expr= - 0.65*m.x321 + 0.1*m.x625 + 0.85*m.x626 + 0.6*m.x627 + 0.2*m.x628 + 0.5*m.x629 + 0.8*m.x630
+ 0.3*m.x631 <= 0)
m.c2392 = Constraint(expr= - 0.65*m.x322 + 0.1*m.x632 + 0.85*m.x633 + 0.6*m.x634 + 0.2*m.x635 + 0.5*m.x636 + 0.8*m.x637
+ 0.3*m.x638 <= 0)
m.c2393 = Constraint(expr= - 0.85*m.x323 + 0.1*m.x639 + 0.85*m.x640 + 0.6*m.x641 + 0.2*m.x642 + 0.5*m.x643 + 0.8*m.x644
+ 0.3*m.x645 <= 0)
m.c2394 = Constraint(expr= - 0.35*m.x334 + 0.1*m.x716 + 0.85*m.x717 + 0.6*m.x718 + 0.2*m.x719 + 0.5*m.x720 + 0.8*m.x721
+ 0.3*m.x722 <= 0)
m.c2395 = Constraint(expr= - 0.65*m.x335 + 0.1*m.x723 + 0.85*m.x724 + 0.6*m.x725 + 0.2*m.x726 + 0.5*m.x727 + 0.8*m.x728
+ 0.3*m.x729 <= 0)
m.c2396 = Constraint(expr= - 0.65*m.x336 + 0.1*m.x730 + 0.85*m.x731 + 0.6*m.x732 + 0.2*m.x733 + 0.5*m.x734 + 0.8*m.x735
+ 0.3*m.x736 <= 0)
m.c2397 = Constraint(expr= - 0.85*m.x337 + 0.1*m.x737 + 0.85*m.x738 + 0.6*m.x739 + 0.2*m.x740 + 0.5*m.x741 + 0.8*m.x742
+ 0.3*m.x743 <= 0)
m.c2398 = Constraint(expr= - 0.35*m.x348 + 0.1*m.x814 + 0.85*m.x815 + 0.6*m.x816 + 0.2*m.x817 + 0.5*m.x818 + 0.8*m.x819
+ 0.3*m.x820 <= 0)
m.c2399 = Constraint(expr= - 0.65*m.x349 + 0.1*m.x821 + 0.85*m.x822 + 0.6*m.x823 + 0.2*m.x824 + 0.5*m.x825 + 0.8*m.x826
+ 0.3*m.x827 <= 0)
m.c2400 = Constraint(expr= - 0.65*m.x350 + 0.1*m.x828 + 0.85*m.x829 + 0.6*m.x830 + 0.2*m.x831 + 0.5*m.x832 + 0.8*m.x833
+ 0.3*m.x834 <= 0)
m.c2401 = Constraint(expr= - 0.85*m.x351 + 0.1*m.x835 + 0.85*m.x836 + 0.6*m.x837 + 0.2*m.x838 + 0.5*m.x839 + 0.8*m.x840
+ 0.3*m.x841 <= 0)
m.c2402 = Constraint(expr= - m.x282 - m.x296 - m.x310 - m.x324 - m.x338 >= -50)
m.c2403 = Constraint(expr= - m.x283 - m.x297 - m.x311 - m.x325 - m.x339 >= -50)
m.c2404 = Constraint(expr= - m.x284 - m.x298 - m.x312 - m.x326 - m.x340 >= -50)
m.c2405 = Constraint(expr= m.x282 - m.x285 - m.x286 + m.x296 | |
XXXXXXXXXX XX XXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXX XXXXXXXX
XXX XXXXX XX XXX XXXX XX XX XXXXXXXX XXXXX XXXXXX XXXXXXXX XX XXXXXXXXX XXXXX
XXXX XXXX XXXXXXXXXX XXXXXXXXXXXXXX XXX XXXX XX XXXXXXXXXXXXXXXX XX XXXXXX
XXXXXXXXXXXXX XX XXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXX XXXX X XXXX XXXXXXXXXX XXXX XXXXXXXXX XXXXXXXXXX
XXXX XXXX XXXX XXXXXXXXXX XXX XX XXXX XXXX XXXXXXXXXXXXXXX
XXX XXXX XXXXXXXX XXXXXX XXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXX XXXXXX XXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXX XX XXX XXX XXX XXX XXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXX XX XXX XXX XXX XX XXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXX XXX XXX XXX XX XXX XXXX XXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXX XX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXX
XXXXXXXX X
XXXXXXXX X
XXX XXXXXX XXXX
XXX XXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXX XXX XX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXX
XXXXXXXXXX XXXXXXXXXXXXXX X
XXXXXXXXXX XXXXXXXXXXXXXX X
XX XXXXXXXXXXXXXXXX
XXXX XXXXXXXXX
XXXXXXXXXXXXXX
XXXXX XX XXXXX XXX XXXXXXXXX XXXXX XX XXXX XXXXXXXX XXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXX
X X XXXXXXXXXXXXXXXXXXX
X X XXXXXXXXXXXXXXXXXXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXX XXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXX XXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXXX XXX XXXX XXXX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXX
XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX XXX XXXXXXXXXX XXXXX
XXXX XX XXXXXX XXX XXXXX XX XX XX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXXXX XX XXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXX
XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX XXX XXXXXXXXXX XXXXX XXXX
XX XX XXX XXXXX XX XX XX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXX XX XXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXX
XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXXXXX XX XXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXXXX XX XXXXXXXXXXXXX X XXXXXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXX
XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXX XXXXXXX XXXXXXX XXXX XX XXXXX XX X XXXXXXX XXXXX XX
XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXX XXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXX XX XXXX XXXX XX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXX XX XXXX XXXX XXX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXXXX XX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXXXX XX XXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXXX X XXXXXXX XXXXX XX XXXXXXXXXX XXXX XXXXXXX XX XXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXX XXX XXXX XXXX XXX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXX XX XXXXX XXXX XXXXXXX XXXXXXXXX XXXXX XXXXXX XX XXX XXXXX XX
X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXX XX XXXXXXX XXXX XX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XXXXXXX XXXXX XXX XXXXXXX XXXX X XXXXXXX XXXXX XX
XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXX XXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXX XX XXXXXXX XXXX XX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XX XXXXXXX XXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XXXXXXXXX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXX XXXXXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XXXXXX XX XXXXXXXXXXX XXX XXXXXXX XXX XXXXXXXXX XX
XXX XXXXX XX XXXX XX XXX XXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXX XXXXXXXXXX
XXX
XXXXXXX
XXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XXXXXX XX XXXXXXXXXXX XXX XXXXXXX XXX XXXXXXXXX XX
XXX XXXXX XXXXXXX XX XXX XXXXXX XXXXXXX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXX XXXXXXXXXX
XXX
XXXXXX
XXXXXX
XX XXXXXXX XXXXXXXXXXX
XXXXXXX XXX XXXXX XX XXX XXXXXXXXXXXX XXXXXXXX XXXXXX
XXXXXXXXX
XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXX XXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXXXXX XXXXXX XX XXXXXXXXXXX XXX XXXXXXX XXX XXXXX XX XXX XXXXX
XXXXXX XX XXX XXXXX XX XXX XXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXX
XXX XXXXXXXXXXXX
XXXX
XXXXXXXXXXX
XXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXXX X XXXXXXX XXXXX XX XXXXXXXXXX XXXX XXXXXXX XX XXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXX XXX XXXX XXXX X
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXX XXXXXXXX
XXXXXX X XXXXXXX XXXXX XX XXXXXXXXXX XX XXX XXXXXXX XXXXXXXX XXXXXXX XXXX
XXXXXX XXX XXXXXXX XX XX XXXX XXXXXXX XX XXX XXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXX XXXXXXXXXX
XXXXX XXXXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX X XXX XXXXXXX XXXXX XXXXXXX XX XXXX XXXX XX
XXX XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXX
XXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXX XXXXXXXX
XXXXXXX XXX XXXX XXXX XX XX XX X XXXXXXX XXXXX XX XXXXXXXXXXX
XXXXX XXXXXXXXX
XXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXX XXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXX XXXXXXXXXXXXX
XXX XXX
XX XXX XXXX XX XXXXXXXXXX XX X XXXXXXXXXX XXX XXXXXXXXX
| |
<reponame>gurlinthewurld/eden
# -*- coding: utf-8 -*-
""" Sahana Eden Deployments Model
@copyright: 2011-2013 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3DeploymentModel",
"S3DeploymentAlertModel",
"deploy_rheader",
"deploy_deployment_rheader",
]
try:
# try stdlib (Python 2.6)
import json
except ImportError:
try:
# try external module
import simplejson as json
except:
# fallback to pure-Python module
import gluon.contrib.simplejson as json
from gluon import *
from ..s3 import *
# =============================================================================
class S3DeploymentModel(S3Model):
names = ["deploy_deployment",
"deploy_deployment_id",
"deploy_human_resource_assignment"]
def model(self):
T = current.T
db = current.db
define_table = self.define_table
configure = self.configure
super_link = self.super_link
add_component = self.add_component
s3 = current.response.s3
crud_strings = s3.crud_strings
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
# ---------------------------------------------------------------------
# Deployment
#
deployment_status_opts = {
1 : T("Closed"),
2 : T("Open")
}
tablename = "deploy_deployment"
table = define_table(tablename,
super_link("doc_id", "doc_entity"),
Field("name",
label = T("Name"),
requires=IS_NOT_EMPTY(),
),
self.gis_location_id(
label = T("Country"),
widget = S3LocationAutocompleteWidget(level="L0"),
requires = IS_EMPTY_OR(IS_LOCATION(level="L0")),
represent = self.gis_LocationRepresent(sep=", "),
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Country"),
T("Enter some characters to bring up a list of possible matches"))),
),
Field("event_type", # @todo: replace by link
label = T("Event Type"),
),
Field("status", "integer",
requires = IS_IN_SET(deployment_status_opts),
represent = lambda opt: \
deployment_status_opts.get(opt,
UNKNOWN_OPT),
default = 2,
label = T("Status"),
),
s3_comments(),
*s3_meta_fields())
# Virtual field
# @todo: move to real field written onaccept?
table.hrquantity = Field.Lazy(deploy_deployment_hrquantity)
# CRUD Form
crud_form = S3SQLCustomForm("name",
"location_id",
"status",
"event_type",
S3SQLInlineComponent("document",
name = "file",
label = T("Attachments"),
fields = ["file",
"comments",
],
),
"comments",
"created_on",
)
# Profile
alert_widget = dict(label="Alerts",
insert=lambda r, add_title, add_url: \
A(add_title,
_href=r.url(component="alert",
method="create"),
_class="action-btn profile-add-btn"),
title_create="New Alert",
type="datalist",
list_fields = ["created_on",
"subject",
"body",
],
tablename = "deploy_alert",
context = "deployment",
colspan = 2,
list_layout = deploy_render_alert,
pagesize = 10,
)
response_widget = dict(label="Responses",
insert=False,
type="datalist",
list_fields = [
"created_on",
"human_resource_id$id",
"human_resource_id$person_id",
"human_resource_id$organisation_id",
"message_id$body",
],
tablename = "deploy_response",
context = "deployment",
colspan = 2,
list_layout = deploy_render_response,
pagesize = 10,
)
assignment_widget = dict(label="Members Assigned",
insert=lambda r, add_title, add_url: \
A(add_title,
_href=r.url(component="human_resource_assignment",
method="create"),
_class="action-btn profile-add-btn"),
title_create="Assign New Member",
type="datalist",
list_fields = [
"human_resource_id$id",
"human_resource_id$person_id",
"human_resource_id$organisation_id",
"start_date",
"end_date",
"rating",
],
tablename = "deploy_human_resource_assignment",
context = "deployment",
colspan = 2,
list_layout = deploy_render_human_resource_assignment,
pagesize = None, # all records
)
# Table configuration
profile = URL(c="deploy", f="deployment", args=["[id]", "profile"])
configure(tablename,
super_entity = "doc_entity",
crud_form = crud_form,
create_next = profile,
update_next = profile,
list_fields = ["name",
(T("Date"), "created_on"),
(T("Country"), "location_id"),
(T("Members"), "hrquantity"),
"status",
],
profile_header = lambda r: \
deploy_deployment_rheader(r, profile=True),
profile_widgets = [alert_widget,
response_widget,
assignment_widget,
],
summary=[{"name": "rheader",
"common": True,
"widgets": [
{"method": self.add_button}
]
},
{"name": "table",
"label": "Table",
"widgets": [{"method": "datatable"}]
},
{"name": "map",
"label": "Map",
"widgets": [{"method": "map",
"ajax_init": True}],
},
],
filter_widgets = [
S3TextFilter("name",
label=T("Search"),
),
S3LocationFilter("location_id",
label=T("Location"),
widget="multiselect",
levels=["L0"],
hidden=True,
),
],
orderby="deploy_deployment.created_on desc",
delete_next=URL(c="deploy", f="deployment", args="summary"),
)
# Components
add_component("deploy_human_resource_assignment",
deploy_deployment="deployment_id")
add_component("deploy_alert",
deploy_deployment="deployment_id")
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Deployment"),
title_display = T("Deployment"),
title_list = T("Deployments"),
title_update = T("Edit Deployment Details"),
title_search = T("Search Deployments"),
title_upload = T("Import Deployments"),
subtitle_create = T("Add New Deployment"),
label_list_button = T("List Deployments"),
label_create_button = T("New Deployment"),
label_delete_button = T("Delete Deployment"),
msg_record_created = T("Deployment added"),
msg_record_modified = T("Deployment Details updated"),
msg_record_deleted = T("Deployment deleted"),
msg_list_empty = T("No Deployments currently registered"))
# Reusable field
represent = S3Represent(lookup=tablename)
deployment_id = S3ReusableField("deployment_id", table,
requires = IS_ONE_OF(db,
"deploy_deployment.id",
represent),
represent = represent,
label = T("Deployment"),
ondelete = "CASCADE")
# ---------------------------------------------------------------------
# Deployment of human resources
#
tablename = "deploy_human_resource_assignment"
table = define_table(tablename,
super_link("doc_id", "doc_entity"),
deployment_id(),
self.hrm_human_resource_id(empty=False,
label=T("Member")),
s3_date("start_date",
label = T("Start Date")),
s3_date("end_date",
label = T("End Date")),
Field("rating", "double",
label=T("Rating"),
default=0.0),
*s3_meta_fields())
# Table configuration
configure(tablename,
super_entity="doc_entity",
context = {"deployment": "deployment_id"},
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Assignment"),
title_display = T("Assignment Details"),
title_list = T("Assignments"),
title_update = T("Edit Assignment Details"),
title_search = T("Search Assignments"),
title_upload = T("Import Assignments"),
subtitle_create = T("Add New Assignment"),
label_list_button = T("List Assignments"),
label_create_button = T("Add Assignment"),
label_delete_button = T("Delete Assignment"),
msg_record_created = T("Assignment added"),
msg_record_modified = T("Assignment Details updated"),
msg_record_deleted = T("Assignment deleted"),
msg_list_empty = T("No Assignments currently registered"))
# ---------------------------------------------------------------------
# Deployment of assets
#
# @todo: deploy_asset_assignment
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(deploy_deployment_id = deployment_id,
)
# -------------------------------------------------------------------------
def defaults(self):
"""
Safe defaults for model-global names in case module is disabled
"""
deployment_id = S3ReusableField("deployment_id", "integer",
readable=False, writable=False)
return dict(deploy_deployment_id = deployment_id)
# -------------------------------------------------------------------------
@staticmethod
def add_button(r, widget_id=None, visible=True, **attr):
return A(S3Method.crud_string(r.tablename,
"label_create_button"),
_href=r.url(method="create", id=0, vars={}),
_class="action-btn",
)
# =============================================================================
class S3DeploymentAlertModel(S3Model):
names = ["deploy_alert",
"deploy_alert_recipient",
"deploy_response",
]
def model(self):
T = current.T
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
set_method = self.set_method
super_link = self.super_link
message_id = self.msg_message_id
# ---------------------------------------------------------------------
# Alert (also the PE representing its Recipients)
#
tablename = "deploy_alert"
table = define_table(tablename,
super_link("pe_id", "pr_pentity"),
self.deploy_deployment_id(
requires = IS_ONE_OF(current.db,
"deploy_deployment.id",
S3Represent(lookup="deploy_deployment"),
filterby="status",
filter_opts=(2,),
)),
Field("subject", length=78, # RFC 2822
label = T("Subject"),
requires = IS_NOT_EMPTY(),
),
Field("body", "text",
label = T("Message"),
represent = lambda v: \
v or current.messages["NONE"],
),
# Link to the Message once sent
message_id(readable=False),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Alert"),
title_display = T("Alert Details"),
title_list = T("Alerts"),
title_update = T("Edit Alert Details"),
title_search = T("Search Alerts"),
title_upload = T("Import Alerts"),
subtitle_create = T("Add New Alert"),
label_list_button = T("List Alerts"),
label_create_button = T("Add Alert"),
label_delete_button = T("Delete Alert"),
msg_record_created = T("Alert added"),
msg_record_modified = T("Alert Details updated"),
msg_record_deleted = T("Alert deleted"),
msg_list_empty = T("No Alerts currently registered"))
# CRUD Form
crud_form = S3SQLCustomForm("deployment_id",
"subject",
"body",
"created_on",
)
# Table Configuration
configure(tablename,
super_entity = "pr_pentity",
context = {"deployment": "deployment_id"},
crud_form = crud_form,
list_fields = ["deployment_id",
"subject",
"body",
"alert_recipient.human_resource_id",
],
)
# Components
add_component("deploy_alert_recipient",
deploy_alert=dict(name="recipient",
joinby="alert_id"))
add_component("deploy_response", deploy_alert="alert_id")
# Custom Methods
set_method("deploy", "alert",
method="select",
action=self.deploy_alert_select_recipients)
set_method("deploy", "alert",
method="send",
action=self.deploy_alert_send)
# Reusable field
represent = S3Represent(lookup=tablename)
alert_id = S3ReusableField("alert_id", table,
requires = IS_ONE_OF(db,
"deploy_alert.id",
represent),
represent = represent,
label = T("Alert"),
ondelete = "CASCADE")
# ---------------------------------------------------------------------
# Recipients of the Alert
#
tablename = "deploy_alert_recipient"
table = define_table(tablename,
alert_id(),
self.hrm_human_resource_id(empty=False,
label=T("Member")),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("New Recipient"),
title_display = T("Recipient Details"),
title_list = T("Recipients"),
title_update = T("Edit Recipient Details"),
title_search = T("Search Recipients"),
title_upload = T("Import Recipients"),
subtitle_create = T("Add New Recipient"),
label_list_button = T("List Recipients"),
label_create_button = T("Add Recipient"),
label_delete_button = T("Delete Recipient"),
msg_record_created = T("Recipient added"),
msg_record_modified = T("Recipient Details updated"),
msg_record_deleted = T("Recipient deleted"),
msg_list_empty = T("No Recipients currently defined"))
# ---------------------------------------------------------------------
# Responses to Alerts
#
tablename = "deploy_response"
table = define_table(tablename,
self.deploy_deployment_id(),
self.hrm_human_resource_id(empty=False,
label=T("Member")),
message_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict()
# -------------------------------------------------------------------------
def defaults(self):
"""
Safe defaults for model-global names in case module is disabled
"""
return dict()
# -------------------------------------------------------------------------
@staticmethod
def deploy_alert_select_recipients(r, **attr):
"""
Custom Method to select recipients for an Alert
"""
alert_id = r.id
if r.representation not in ("html", "aadata") or not alert_id or r.component:
raise HTTP(501, BADMETHOD)
T = | |
<gh_stars>0
# MINLP written by GAMS Convert at 08/20/20 01:30:49
#
# Equation counts
# Total E G L N X C B
# 394 46 316 32 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 215 155 60 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 1004 914 90 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,0.26351883),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,0.26351883),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,0.22891574),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,0.22891574),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,0.21464835),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,0.21464835),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,0.17964414),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,0.17964414),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,0.17402843),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,0.17402843),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,0.15355962),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,0.15355962),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,0.1942283),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,0.1942283),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,0.25670555),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,0.25670555),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,0.27088619),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,0.27088619),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,0.28985675),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,0.28985675),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,0.25550303),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,0.25550303),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,0.19001726),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,0.19001726),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,0.23803143),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,0.23803143),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,0.23312962),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,0.23312962),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,0.27705307),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,0.27705307),initialize=0)
m.x31 = Var(within=Reals,bounds=(1.92,2.02),initialize=1.92)
m.x32 = Var(within=Reals,bounds=(3.82,4.01333333333333),initialize=3.82)
m.x33 = Var(within=Reals,bounds=(4.53333333333333,4.76),initialize=4.53333333333333)
m.x34 = Var(within=Reals,bounds=(5.39333333333333,5.96),initialize=5.39333333333333)
m.x35 = Var(within=Reals,bounds=(36.3533333333333,42.0933333333333),initialize=36.3533333333333)
m.x36 = Var(within=Reals,bounds=(85.7466666666667,99.28),initialize=85.7466666666667)
m.x37 = Var(within=Reals,bounds=(6.28,6.59333333333333),initialize=6.28)
m.x38 = Var(within=Reals,bounds=(53.4333333333333,61.8666666666667),initialize=53.4333333333333)
m.x39 = Var(within=Reals,bounds=(48.6133333333333,56.2866666666667),initialize=48.6133333333333)
m.x40 = Var(within=Reals,bounds=(33.9533333333333,41.5),initialize=33.9533333333333)
m.x41 = Var(within=Reals,bounds=(53.9666666666667,62.4933333333333),initialize=53.9666666666667)
m.x42 = Var(within=Reals,bounds=(77.0533333333333,80.9066666666667),initialize=77.0533333333333)
m.x43 = Var(within=Reals,bounds=(24.9066666666667,26.1466666666667),initialize=24.9066666666667)
m.x44 = Var(within=Reals,bounds=(36.1866666666667,38),initialize=36.1866666666667)
m.x45 = Var(within=Reals,bounds=(56.3133333333333,62.24),initialize=56.3133333333333)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,0.5323080366),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,0.918715169866666),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,1.021726146),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,1.0706790744),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,7.32543671346667),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,15.2453990736),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,1.28061192466667),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,15.8815166933333),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,15.2472806811333),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,12.029055125),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,15.9672360214667),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,15.3736631157333),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,6.2237284564),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,8.85892556),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,17.2437830768),initialize=0)
m.x91 = Var(within=Reals,bounds=(0.25788969,0.35227087),initialize=0.25788969)
m.x92 = Var(within=Reals,bounds=(0.25788969,0.35227087),initialize=0.25788969)
m.x93 = Var(within=Reals,bounds=(-0.98493628,-0.7794471),initialize=-0.7794471)
m.x94 = Var(within=Reals,bounds=(-0.98493628,-0.7794471),initialize=-0.7794471)
m.x95 = Var(within=Reals,bounds=(0,0.0580296499999999),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,0.0580296499999999),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,0.0546689399999999),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,0.0546689399999999),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,0.09360565),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,0.09360565),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,0.0476880399999999),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,0.0476880399999999),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,0.05276021),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,0.05276021),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,0.04905388),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,0.04905388),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,0.07731692),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,0.07731692),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,0.08211741),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,0.08211741),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,0.09438118),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,0.09438118),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,0.08436757),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,0.08436757),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,0.06987597),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,0.06987597),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,0.04788831),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,0.04788831),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,0.0668875099999999),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,0.0668875099999999),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,0.07276512),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,0.07276512),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,0.09438118),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,0.09438118),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,0.20548918),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,0.20548918),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,0.1742468),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,0.1742468),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,0.1210427),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,0.1210427),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,0.1319561),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,0.1319561),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,0.12126822),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,0.12126822),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,0.10450574),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,0.10450574),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,0.11691138),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,0.11691138),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,0.17458814),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,0.17458814),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,0.17650501),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,0.17650501),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,0.20548918),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,0.20548918),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,0.18562706),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,0.18562706),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,0.14212895),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,0.14212895),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,0.17114392),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,0.17114392),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,0.1603645),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,0.1603645),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,0.18267189),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,0.18267189),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,0.5323080366),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,0.5323080366),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,0.918715169866666),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,0.918715169866666),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,1.021726146),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,1.021726146),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,1.0706790744),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,1.0706790744),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,7.32543671346667),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,7.32543671346667),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,15.2453990736),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,15.2453990736),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,1.28061192466667),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,1.28061192466667),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,15.8815166933333),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,15.8815166933333),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,15.2472806811333),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,15.2472806811333),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,12.029055125),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,12.029055125),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,15.9672360214667),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,15.9672360214667),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,15.3736631157333),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,15.3736631157333),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,6.2237284564),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,6.2237284564),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,8.85892556),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,8.85892556),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,17.2437830768),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,17.2437830768),initialize=0)
m.b185 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b186 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b187 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b188 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b189 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b190 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b191 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b192 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b193 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b194 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b195 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b196 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b197 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b198 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b199 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b200 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b201 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b202 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b203 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b204 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b205 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b206 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b207 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b208 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b209 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b210 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b211 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b212 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b213 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b214 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= m.x76 + m.x77 + m.x78 + m.x79 + m.x80 + m.x81 + m.x82 + m.x83 + m.x84 + m.x85 + m.x86 + m.x87
+ m.x88 + m.x89 + m.x90, sense=minimize)
m.c2 = Constraint(expr=-m.x31*m.x1*m.b46 + m.x155 >= 0)
m.c3 = Constraint(expr=-m.x31*m.x2*m.b47 + m.x156 >= 0)
m.c4 = Constraint(expr=-m.x32*m.x3*m.b48 + m.x157 >= 0)
m.c5 = Constraint(expr=-m.x32*m.x4*m.b49 + m.x158 >= 0)
m.c6 = Constraint(expr=-m.x33*m.x5*m.b50 + m.x159 >= 0)
m.c7 = Constraint(expr=-m.x33*m.x6*m.b51 + m.x160 >= 0)
m.c8 = Constraint(expr=-m.x34*m.x7*m.b52 + m.x161 >= 0)
m.c9 = Constraint(expr=-m.x34*m.x8*m.b53 + m.x162 >= 0)
m.c10 = Constraint(expr=-m.x35*m.x9*m.b54 + m.x163 >= 0)
m.c11 = Constraint(expr=-m.x35*m.x10*m.b55 + m.x164 >= 0)
m.c12 = Constraint(expr=-m.x36*m.x11*m.b56 + m.x165 >= 0)
m.c13 = Constraint(expr=-m.x36*m.x12*m.b57 + m.x166 >= 0)
m.c14 = Constraint(expr=-m.x37*m.x13*m.b58 + m.x167 >= 0)
m.c15 = Constraint(expr=-m.x37*m.x14*m.b59 + m.x168 >= 0)
m.c16 = Constraint(expr=-m.x38*m.x15*m.b60 + m.x169 >= 0)
m.c17 = Constraint(expr=-m.x38*m.x16*m.b61 + m.x170 >= 0)
m.c18 = Constraint(expr=-m.x39*m.x17*m.b62 + m.x171 >= 0)
m.c19 = Constraint(expr=-m.x39*m.x18*m.b63 + m.x172 >= 0)
m.c20 = Constraint(expr=-m.x40*m.x19*m.b64 + m.x173 >= 0)
m.c21 = Constraint(expr=-m.x40*m.x20*m.b65 + m.x174 >= 0)
m.c22 = Constraint(expr=-m.x41*m.x21*m.b66 + m.x175 >= 0)
m.c23 = Constraint(expr=-m.x41*m.x22*m.b67 + m.x176 >= 0)
m.c24 = Constraint(expr=-m.x42*m.x23*m.b68 + m.x177 >= 0)
m.c25 = Constraint(expr=-m.x42*m.x24*m.b69 + m.x178 >= 0)
m.c26 = Constraint(expr=-m.x43*m.x25*m.b70 + m.x179 >= 0)
m.c27 = Constraint(expr=-m.x43*m.x26*m.b71 + m.x180 >= 0)
m.c28 = Constraint(expr=-m.x44*m.x27*m.b72 + m.x181 >= 0)
m.c29 = Constraint(expr=-m.x44*m.x28*m.b73 + m.x182 >= 0)
m.c30 = Constraint(expr=-m.x45*m.x29*m.b74 + m.x183 >= 0)
m.c31 = Constraint(expr=-m.x45*m.x30*m.b75 + m.x184 >= 0)
m.c32 = Constraint(expr= m.b46 + m.b47 == 1)
m.c33 = Constraint(expr= m.b48 + m.b49 == 1)
m.c34 = Constraint(expr= m.b50 + m.b51 == 1)
m.c35 = Constraint(expr= m.b52 + m.b53 == 1)
m.c36 = Constraint(expr= m.b54 + m.b55 == 1)
m.c37 = Constraint(expr= m.b56 + m.b57 == 1)
m.c38 = Constraint(expr= m.b58 + m.b59 == 1)
m.c39 = Constraint(expr= m.b60 + m.b61 == 1)
m.c40 = Constraint(expr= m.b62 + m.b63 == 1)
m.c41 = Constraint(expr= m.b64 + m.b65 == 1)
m.c42 = Constraint(expr= m.b66 + m.b67 == 1)
m.c43 = Constraint(expr= m.b68 + m.b69 == 1)
m.c44 = Constraint(expr= m.b70 + m.b71 == 1)
m.c45 = Constraint(expr= m.b72 + m.b73 == 1)
m.c46 = Constraint(expr= m.b74 + m.b75 == 1)
m.c47 = Constraint(expr= 2.02*m.b46 + 4.01333333333333*m.b48 + 4.76*m.b50 + 5.96*m.b52 + 42.0933333333333*m.b54
+ 99.28*m.b56 + 6.59333333333333*m.b58 + 61.8666666666667*m.b60 + 56.2866666666667*m.b62
+ 41.5*m.b64 + 62.4933333333333*m.b66 + 80.9066666666667*m.b68 + 26.1466666666667*m.b70
+ 38*m.b72 + 62.24*m.b74 <= 302.08)
m.c48 = Constraint(expr= 2.02*m.b47 + 4.01333333333333*m.b49 + 4.76*m.b51 + 5.96*m.b53 + 42.0933333333333*m.b55
+ 99.28*m.b57 + 6.59333333333333*m.b59 + 61.8666666666667*m.b61 + 56.2866666666667*m.b63
+ 41.5*m.b65 + 62.4933333333333*m.b67 + 80.9066666666667*m.b69 + 26.1466666666667*m.b71
+ 38*m.b73 + 62.24*m.b75 <= 302.08)
m.c49 = Constraint(expr= m.x91 + m.x95 >= 0.29424122)
m.c50 = Constraint(expr= m.x92 + m.x96 >= 0.29424122)
m.c51 = Constraint(expr= m.x91 + m.x97 >= 0.29760193)
m.c52 = Constraint(expr= m.x92 + m.x98 >= 0.29760193)
m.c53 = Constraint(expr= m.x91 + m.x99 >= 0.35149534)
m.c54 = Constraint(expr= m.x92 + m.x100 >= 0.35149534)
m.c55 = Constraint(expr= m.x91 + m.x101 >= 0.30458283)
m.c56 = Constraint(expr= m.x92 + m.x102 >= 0.30458283)
m.c57 = Constraint(expr= m.x91 + m.x103 >= 0.29951066)
m.c58 = Constraint(expr= m.x92 + m.x104 >= 0.29951066)
m.c59 = Constraint(expr= m.x91 + m.x105 >= 0.30694357)
m.c60 = Constraint(expr= m.x92 + m.x106 >= 0.30694357)
m.c61 = Constraint(expr= m.x91 + m.x107 >= 0.33520661)
m.c62 = Constraint(expr= m.x92 + m.x108 >= 0.33520661)
m.c63 = Constraint(expr= m.x91 + m.x109 >= 0.3400071)
m.c64 = Constraint(expr= m.x92 + m.x110 >= 0.3400071)
m.c65 = Constraint(expr= m.x91 + m.x111 >= 0.35227087)
m.c66 = Constraint(expr= m.x92 + m.x112 >= 0.35227087)
m.c67 = Constraint(expr= m.x91 + m.x113 >= 0.34225726)
m.c68 = Constraint(expr= m.x92 + m.x114 >= 0.34225726)
m.c69 = Constraint(expr= m.x91 + m.x115 >= 0.32776566)
m.c70 = Constraint(expr= m.x92 + m.x116 >= 0.32776566)
m.c71 = Constraint(expr= m.x91 + m.x117 >= 0.30438256)
m.c72 = Constraint(expr= m.x92 + m.x118 >= 0.30438256)
m.c73 = Constraint(expr= m.x91 + m.x119 >= 0.28538336)
m.c74 = Constraint(expr= m.x92 + m.x120 >= 0.28538336)
m.c75 = Constraint(expr= m.x91 + m.x121 >= 0.27950575)
m.c76 = Constraint(expr= m.x92 + m.x122 >= 0.27950575)
m.c77 = Constraint(expr= - m.x91 + m.x95 >= -0.29424122)
m.c78 = Constraint(expr= - m.x92 + m.x96 >= -0.29424122)
m.c79 = Constraint(expr= - m.x91 + m.x97 >= -0.29760193)
m.c80 = Constraint(expr= - m.x92 + m.x98 >= -0.29760193)
m.c81 = Constraint(expr= - m.x91 + m.x99 >= -0.35149534)
m.c82 = Constraint(expr= - m.x92 + m.x100 >= -0.35149534)
m.c83 = Constraint(expr= - m.x91 + m.x101 >= -0.30458283)
m.c84 = Constraint(expr= - m.x92 + m.x102 >= -0.30458283)
m.c85 = Constraint(expr= - m.x91 + m.x103 >= -0.29951066)
m.c86 = Constraint(expr= - m.x92 + m.x104 >= -0.29951066)
m.c87 = Constraint(expr= - m.x91 + m.x105 >= -0.30694357)
m.c88 = Constraint(expr= - m.x92 + m.x106 >= -0.30694357)
m.c89 = Constraint(expr= - m.x91 + m.x107 >= -0.33520661)
m.c90 = Constraint(expr= - m.x92 + m.x108 >= -0.33520661)
m.c91 = Constraint(expr= - m.x91 + m.x109 >= -0.3400071)
m.c92 = Constraint(expr= - m.x92 + m.x110 >= -0.3400071)
m.c93 = Constraint(expr= - m.x91 + m.x113 >= -0.34225726)
m.c94 = Constraint(expr= - m.x92 + m.x114 >= -0.34225726)
m.c95 = Constraint(expr= - m.x91 + m.x115 >= -0.32776566)
m.c96 = Constraint(expr= - m.x92 + m.x116 >= -0.32776566)
m.c97 = Constraint(expr= - m.x91 + m.x117 >= -0.30438256)
m.c98 = Constraint(expr= - m.x92 + m.x118 >= -0.30438256)
m.c99 = Constraint(expr= - m.x91 + m.x119 >= | |
import PythonQt
from PythonQt import QtCore, QtGui
import director.objectmodel as om
import director.visualization as vis
from director import affordanceitems
from director import callbacks
from director import cameracontrol
from director import splinewidget
from director import transformUtils
from director.debugpolydata import DebugData
from director.pointpicker import PlacerWidget
from director import vtkNumpy as vnp
from director import applogic as app
from director import vtkAll as vtk
from director import filterUtils
from director.shallowCopy import shallowCopy
from director import segmentationpanel
from director import segmentation
from director import segmentationroutines
from director.robotlinkselector import RobotLinkSelector
from director.vieweventfilter import ViewEventFilter
from director import viewbehaviors
from director.utime import getUtime
from director import drcargs
import numpy as np
from . import ioutils
import os
import re
import random
import colorsys
lastRandomColor = 0.0
class RobotViewBehaviors(object):
def __init__(self, view, _robotSystem):
self.view = view
self.viewBehaviors = viewbehaviors.ViewBehaviors(view)
self.robotViewBehaviors = RobotViewEventFilter(self, view)
self.robotName = _robotSystem.robotName
self.robotSystem = _robotSystem
self.robotModel = self.robotSystem.robotStateModel
if app.getMainWindow() is not None:
self.robotLinkSelector = RobotLinkSelector()
viewbehaviors.registerContextMenuActions(self.getRobotActions)
def resetCameraToRobot(self, view):
link = drcargs.getRobotConfig(self.robotName)["pelvisLink"]
t = self.robotModel.getLinkFrame(link)
if t is None:
t = vtk.vtkTransform()
focalPoint = [0.0, 0.0, 0.25]
position = [-4.0, -2.0, 2.25]
t.TransformPoint(focalPoint, focalPoint)
t.TransformPoint(position, position)
flyer = cameracontrol.Flyer(view)
flyer.zoomTo(focalPoint, position)
def resetCameraToRobotAbove(self, view):
link = drcargs.getRobotConfig(self.robotName)["pelvisLink"]
t = self.robotModel.getLinkFrame(link)
if t is None:
t = vtk.vtkTransform()
focalPoint = [2, 0.0, 0.25]
position = [1, 0.0, 15.25] # to avoid singularities
t.TransformPoint(focalPoint, focalPoint)
t.TransformPoint(position, position)
flyer = cameracontrol.Flyer(view)
flyer.zoomTo(focalPoint, position)
def resetCameraToHeadView(self, view):
head = self.robotModel.getLinkFrame(
drcargs.getRobotConfig(self.robotName)["headLink"]
)
pelvis = self.robotModel.getLinkFrame(
drcargs.getRobotConfig(self.robotName)["pelvisLink"]
)
viewDirection = np.array([1.0, 0.0, 0.0])
pelvis.TransformVector(viewDirection, viewDirection)
cameraPosition = np.array(head.GetPosition()) + 0.10 * viewDirection
camera = view.camera()
focalOffset = np.array(camera.GetFocalPoint()) - np.array(camera.GetPosition())
focalOffset /= np.linalg.norm(focalOffset)
camera.SetPosition(cameraPosition)
camera.SetFocalPoint(cameraPosition + focalOffset * 0.03)
camera.SetViewUp([0, 0, 1])
camera.SetViewAngle(90)
view.render()
def newWalkingGoal(self, displayPoint, view):
# put walking goal at robot's base
mainLink = drcargs.getRobotConfig(self.robotName)["pelvisLink"]
footFrame = self.robotModel.getLinkFrame(mainLink)
if not footFrame:
print(
"ERROR: The link '{}' provided for the key 'pelvisLink' in the configuration file does not exist in "
"the robot's URDF. Cannot place walking goal.".format(mainLink)
)
return
worldPt1, worldPt2 = vis.getRayFromDisplayPoint(view, displayPoint)
groundOrigin = footFrame.GetPosition()
groundNormal = [0.0, 0.0, 1.0]
selectedGroundPoint = [0.0, 0.0, 0.0]
t = vtk.mutable(0.0)
vtk.vtkPlane.IntersectWithLine(
worldPt1, worldPt2, groundNormal, groundOrigin, t, selectedGroundPoint
)
walkingTarget = transformUtils.frameFromPositionAndRPY(
selectedGroundPoint, np.array(footFrame.GetOrientation())
)
frameObj = vis.updateFrame(
walkingTarget,
self.robotName + " walking goal",
parent="planning",
scale=0.25,
)
frameObj.setProperty("Edit", True)
rep = frameObj.widget.GetRepresentation()
rep.SetTranslateAxisEnabled(2, False)
rep.SetRotateAxisEnabled(0, False)
rep.SetRotateAxisEnabled(1, False)
frameObj.widget.HandleRotationEnabledOff()
terrain = om.findObjectByName("HEIGHT_MAP_SCENE")
if terrain:
pos = np.array(frameObj.transform.GetPosition())
polyData = filterUtils.removeNonFinitePoints(terrain.polyData)
if polyData.GetNumberOfPoints():
polyData = segmentation.labelDistanceToLine(
polyData, pos, pos + [0, 0, 1]
)
polyData = segmentation.thresholdPoints(
polyData, "distance_to_line", [0.0, 0.1]
)
if polyData.GetNumberOfPoints():
pos[2] = np.nanmax(vnp.getNumpyFromVtk(polyData, "Points")[:, 2])
frameObj.transform.Translate(
pos - np.array(frameObj.transform.GetPosition())
)
d = DebugData()
d.addSphere((0, 0, 0), radius=0.03)
handle = vis.showPolyData(
d.getPolyData(),
"walking goal terrain handle " + self.robotName,
parent=frameObj,
visible=True,
color=[1, 1, 0],
)
handle.actor.SetUserTransform(frameObj.transform)
placer = PlacerWidget(app.getCurrentRenderView(), handle, terrain)
def onFramePropertyModified(propertySet, propertyName):
if propertyName == "Edit":
if propertySet.getProperty(propertyName):
placer.start()
else:
placer.stop()
frameObj.properties.connectPropertyChanged(onFramePropertyModified)
onFramePropertyModified(frameObj, "Edit")
frameObj.connectFrameModified(self.onWalkingGoalModified)
def onWalkingGoalModified(self, frame):
om.removeFromObjectModel(om.findObjectByName("footstep widget"))
def newDrivingGoal(self, displayPoint, view):
# Places the driving goal on the plane of the root link current yaw
# for husky: the bottom of the wheels.
# for hyq/anymal the midpoint of the trunk
# TODO: read the link from the director config
mainLink = drcargs.getRobotConfig(self.robotName)["pelvisLink"]
footFrame = self.robotModel.getLinkFrame(mainLink)
worldPt1, worldPt2 = vis.getRayFromDisplayPoint(view, displayPoint)
groundOrigin = footFrame.GetPosition()
groundNormal = [0.0, 0.0, 1.0]
selectedGroundPoint = [0.0, 0.0, 0.0]
t = vtk.mutable(0.0)
vtk.vtkPlane.IntersectWithLine(
worldPt1, worldPt2, groundNormal, groundOrigin, t, selectedGroundPoint
)
footFrameRPY = transformUtils.rollPitchYawFromTransform(footFrame)
drivingTarget = transformUtils.frameFromPositionAndRPY(
selectedGroundPoint, [0, 0, footFrameRPY[2] * 180.0 / np.pi]
)
# Create the widget and send a message:
# walkingGoal = walkingGoal or self.newWalkingGoalFrame(self.robotModel)
frameObj = vis.updateFrame(
drivingTarget, "driving goal", parent="planning", scale=0.25
)
frameObj.setProperty("Edit", True)
rep = frameObj.widget.GetRepresentation()
rep.SetTranslateAxisEnabled(2, False)
rep.SetRotateAxisEnabled(0, False)
rep.SetRotateAxisEnabled(1, False)
frameObj.widget.HandleRotationEnabledOff()
frameObj.connectFrameModified(onNewDrivingGoal)
onNewDrivingGoal(frameObj)
def getRobotActions(self, view, pickedObj, pickedPoint):
# TODO this is a somewhat crude transplant to maintain functionality. The context menu construction that uses
# this should be improved
affordanceObj = (
pickedObj if isinstance(pickedObj, affordanceitems.AffordanceItem) else None
)
def addNewFrame():
t = transformUtils.copyFrame(affordanceObj.getChildFrame().transform)
t.PostMultiply()
t.Translate(np.array(pickedPoint) - np.array(t.GetPosition()))
newFrame = vis.showFrame(
t,
"%s frame %d"
% (affordanceObj.getProperty("Name"), len(affordanceObj.children())),
scale=0.2,
parent=affordanceObj,
)
affordanceObj.getChildFrame().getFrameSync().addFrame(
newFrame, ignoreIncoming=True
)
def copyAffordance():
desc = dict(affordanceObj.getDescription())
del desc["uuid"]
desc["Name"] = desc["Name"] + " copy"
aff = self.robotSystem.affordanceManager.newAffordanceFromDescription(desc)
aff.getChildFrame().setProperty("Edit", True)
def onPromoteToAffordance():
affObj = affordanceitems.MeshAffordanceItem.promotePolyDataItem(pickedObj)
self.robotSystem.affordanceManager.registerAffordance(affObj)
actions = []
if affordanceObj:
actions.extend(
[
("Copy affordance", copyAffordance),
("Add new frame", addNewFrame),
]
)
elif type(pickedObj) == vis.PolyDataItem:
actions.extend(
[
("Promote to Affordance", onPromoteToAffordance),
]
)
return actions
def getChildFrame(obj):
if hasattr(obj, "getChildFrame"):
return obj.getChildFrame()
def placeHandModel(displayPoint, view, side="left"):
obj, _ = vis.findPickedObject(displayPoint, view)
if isinstance(obj, vis.FrameItem):
_, handFrame = handFactory.placeHandModelWithTransform(
obj.transform, view, side=side, parent=obj.parent()
)
handFrame.frameSync = vis.FrameSync()
handFrame.frameSync.addFrame(obj)
handFrame.frameSync.addFrame(handFrame, ignoreIncoming=True)
return
pickedPointFields = vis.pickPoint(
displayPoint, view, pickType="cells", tolerance=0.0
)
pickedPoint = pickedPointFields.pickedPoint
prop = pickedPointFields.pickedProp
obj = vis.getObjectByProp(prop)
if not obj:
return
yaxis = -normal
zaxis = [0, 0, 1]
xaxis = np.cross(yaxis, zaxis)
xaxis /= np.linalg.norm(xaxis)
zaxis = np.cross(xaxis, yaxis)
zaxis /= np.linalg.norm(zaxis)
t = transformUtils.getTransformFromAxes(-zaxis, yaxis, xaxis)
t.PostMultiply()
t.Translate(pickedPoint)
if side == "right":
t.PreMultiply()
t.RotateY(180)
handObj, handFrame = handFactory.placeHandModelWithTransform(
t, view, side=side, parent=obj
)
syncFrame = getChildFrame(obj)
if syncFrame:
handFrame.frameSync = vis.FrameSync()
handFrame.frameSync.addFrame(handFrame, ignoreIncoming=True)
handFrame.frameSync.addFrame(syncFrame)
def onNewDrivingGoal(frame):
msg = lcmbotcore.pose_t()
msg.utime = getUtime()
msg.pos, msg.orientation = transformUtils.poseFromTransform(frame.transform)
def toggleFootstepWidget(displayPoint, view, useHorizontalWidget=False):
obj, _ = vis.findPickedObject(displayPoint, view)
if not obj:
return False
name = obj.getProperty("Name")
if name in ("footstep widget", "footstep widget frame"):
om.removeFromObjectModel(om.findObjectByName("footstep widget"))
return True
match = re.match("^step (\d+)$", name)
if not match:
return False
stepIndex = int(match.group(1))
existingWidget = om.findObjectByName("footstep widget")
if existingWidget:
previousStep = existingWidget.stepIndex
om.removeFromObjectModel(existingWidget)
if previousStep == stepIndex:
return True
footMesh = shallowCopy(obj.polyData)
footFrame = transformUtils.copyFrame(obj.getChildFrame().transform)
if useHorizontalWidget:
rpy = [0.0, 0.0, transformUtils.rollPitchYawFromTransform(footFrame)[2]]
footFrame = transformUtils.frameFromPositionAndRPY(
footFrame.GetPosition(), np.degrees(rpy)
)
footObj = vis.showPolyData(
footMesh, "footstep widget", parent="planning", alpha=0.2
)
footObj.stepIndex = stepIndex
frameObj = vis.showFrame(
footFrame, "footstep widget frame", parent=footObj, scale=0.2
)
footObj.actor.SetUserTransform(frameObj.transform)
footObj.setProperty("Color", obj.getProperty("Color"))
frameObj.setProperty("Edit", True)
rep = frameObj.widget.GetRepresentation()
rep.SetTranslateAxisEnabled(2, False)
rep.SetRotateAxisEnabled(0, False)
rep.SetRotateAxisEnabled(1, False)
frameObj.widget.HandleRotationEnabledOff()
walkGoal = om.findObjectByName("walking goal")
if walkGoal:
walkGoal.setProperty("Edit", False)
return True
def getAsFrame(obj):
if isinstance(obj, vis.FrameItem):
return obj
elif hasattr(obj, "getChildFrame"):
return obj.getChildFrame()
def isGraspSeed(obj):
return hasattr(obj, "side")
def getCollisionParent(obj):
"""
If obj is an affordance, return obj
If obj is a frame or a grasp seed, return first parent.
"""
if isinstance(obj, vis.FrameItem):
return obj.parent()
if isGraspSeed(obj):
return obj.parent()
else:
return obj
# The most recently cached PickedPoint - available as input to any other algorithm
lastCachedPickedPoint = np.array([0, 0, 0])
def getObjectAsPointCloud(obj):
try:
obj = obj.model.polyDataObj
except AttributeError:
pass
try:
obj.polyData
except AttributeError:
return None
if (
obj and obj.polyData.GetNumberOfPoints()
): # and (obj.polyData.GetNumberOfCells() == obj.polyData.GetNumberOfVerts()):
return obj
def getRobotActions(view, pickedObj, pickedPoint):
reachFrame = getAsFrame(pickedObj)
collisionParent = getCollisionParent(pickedObj)
pointCloudObj = getObjectAsPointCloud(pickedObj)
def onReachLeft():
reachToFrame(reachFrame, "left", collisionParent)
def onReachRight():
reachToFrame(reachFrame, "right", collisionParent)
def flipHandSide():
for obj in [pickedObj] + pickedObj.children():
if not isGraspSeed(obj):
continue
side = "right" if obj.side == "left" else "left"
obj.side = side
color = [1.0, 1.0, 0.0]
if side == "right":
color = [0.33, 1.0, 0.0]
obj.setProperty("Color", color)
polyData = handFactory.getNewHandPolyData(side)
obj.setPolyData(polyData)
handFrame = obj.children()[0]
t = transformUtils.copyFrame(handFrame.transform)
t.PreMultiply()
t.RotateY(180)
handFrame.copyFrame(t)
objName = obj.getProperty("Name")
frameName = handFrame.getProperty("Name")
if side == "left":
obj.setProperty("Name", objName.replace("right", "left"))
handFrame.setProperty("Name", frameName.replace("right", "left"))
else:
obj.setProperty("Name", objName.replace("left", "right"))
handFrame.setProperty("Name", frameName.replace("left", "right"))
obj._renderAllViews()
def flipHandThumb():
handFrame = pickedObj.children()[0]
t = transformUtils.copyFrame(handFrame.transform)
t.PreMultiply()
t.RotateY(180)
handFrame.copyFrame(t)
pickedObj._renderAllViews()
def onSplineLeft():
splinewidget.planner.newSpline(pickedObj, "left")
def onSplineRight():
splinewidget.planner.newSpline(pickedObj, "right")
def onSegmentGround():
groundPoints, scenePoints = segmentation.removeGround(pointCloudObj.polyData)
vis.showPolyData(
groundPoints, "ground points", color=[0, 1, 0], parent="segmentation"
)
vis.showPolyData(
scenePoints, "scene points", color=[1, 0, 1], parent="segmentation"
)
pickedObj.setProperty("Visible", False)
def onCopyPointCloud():
global lastRandomColor
polyData = vtk.vtkPolyData()
polyData.DeepCopy(pointCloudObj.polyData)
if pointCloudObj.getChildFrame():
polyData = segmentation.transformPolyData(
polyData, pointCloudObj.getChildFrame().transform
)
polyData = segmentation.addCoordArraysToPolyData(polyData)
# generate random color, and average with a common color to make them generally similar
lastRandomColor = lastRandomColor + 0.1 + 0.1 * random.random()
rgb = colorsys.hls_to_rgb(lastRandomColor, 0.7, 1.0)
obj = vis.showPolyData(
polyData,
pointCloudObj.getProperty("Name") + " copy",
color=rgb,
parent="point clouds",
)
# t = vtk.vtkTransform()
# t.PostMultiply()
# t.Translate(filterUtils.computeCentroid(polyData))
# segmentation.makeMovable(obj, t)
om.setActiveObject(obj)
pickedObj.setProperty("Visible", False)
def onMergeIntoPointCloud():
allPointClouds = om.findObjectByName("point clouds")
if allPointClouds:
allPointClouds = [i.getProperty("Name") for i in allPointClouds.children()]
sel = QtGui.QInputDialog.getItem(
None,
"Point Cloud Merging",
"Pick point cloud to merge into:",
allPointClouds,
current=0,
| |
command)
# and return the answer
rawanswer = session.recvmessage(serversocket)
serversocket.close()
return rawanswer
def parse_manifest(rawmanifestdata):
"""
<Purpose>
Given raw manifest data, returns a dictionary containing a manifest
dictionary.
<Arguments>
rawmanifestdata: a string containing the raw manifest data as is produced by the json module.
<Exceptions>
TypeError or ValueError if the manifest data is corrupt
<Side Effects>
None
<Returns>
A dictionary containing the manifest.
"""
if type(rawmanifestdata) != bytes:
raise TypeError("Raw manifest data must be bytes")
manifestdict = msgpack.unpackb(rawmanifestdata, raw=False)
_validate_manifest(manifestdict)
return manifestdict
def populate_xordatastore(manifestdict, xordatastore, datasource, dstype,
precompute):
"""
<Purpose>
Adds the files listed in the manifestdict to the datastore
<Arguments>
manifestdict: a manifest dictionary.
xordatastore: the XOR datastore that we should populate.
datasource: The location to look for the files mentioned in the manifest
dstype: The type (RAM, memory-mapped) of the datastore
precompute: Specifies whether preprocessing should be performed
<Exceptions>
TypeError if the manifest is corrupt or the datasource is the wrong type.
FileNotFound if the datasource does not contain a manifest file.
IncorrectFileContents if the file listed in the manifest file has the wrong size or hash
<Side Effects>
None
<Returns>
None
"""
if type(manifestdict) != dict:
raise TypeError("Manifest dict must be a dictionary")
if type(datasource) != str and type(datasource) != str:
raise TypeError("Mirror root must be a string")
if dstype == "mmap":
_mmap_database(xordatastore, datasource)
else: # RAM
_add_data_to_datastore(xordatastore, manifestdict['fileinfolist'], datasource, manifestdict['hashalgorithm'], manifestdict['datastore_layout'], manifestdict['blocksize'])
hashlist = _compute_block_hashlist_fromdatastore(xordatastore, manifestdict['blockcount'], manifestdict['blocksize'], manifestdict['hashalgorithm'])
for blocknum in range(manifestdict['blockcount']):
if hashlist[blocknum] != manifestdict['blockhashlist'][blocknum]:
raise TypeError("Despite matching file hashes, block '" + str(blocknum) + "' has an invalid hash.\nCorrupt manifest or dirty xordatastore")
# We're done!
if precompute:
print("Preprocessing data...")
start = _timer()
xordatastore.finalize()
elapsed = (_timer() - start)
print("Preprocessing done. Took %f seconds." % elapsed)
def _mmap_database(xordatastore, dbname):
xordatastore.initialize(dbname)
def _add_data_to_datastore(xordatastore, fileinfolist, rootdir, hashalgorithm, datastore_layout, blocksize):
# Private helper to populate the datastore
if not datastore_layout in ['nogaps', 'eqdist']:
raise ValueError("Unknown datastore layout: "+datastore_layout)
# go through the files one at a time and populate the xordatastore
for thisfiledict in fileinfolist:
thisrelativefilename = thisfiledict['filename']
thisfilehash = thisfiledict['hash']
thisfilelength = thisfiledict['length']
thisfilename = os.path.join(rootdir, thisrelativefilename)
# read in the files and populate the xordatastore
if not os.path.exists(thisfilename):
raise FileNotFound("File '" + thisrelativefilename + "' listed in manifest cannot be found in manifest root: '" + rootdir + "'.")
# can't go above the root!
if not os.path.normpath(os.path.abspath(thisfilename)).startswith(os.path.abspath(rootdir)):
raise TypeError("File in manifest cannot go back from the root dir!!!")
# get the relevant data
thisfilecontents = open(thisfilename, 'rb').read()
# let's see if this has the right size
if len(thisfilecontents) != thisfilelength:
raise IncorrectFileContents("File '" + thisrelativefilename + "' has the wrong size")
# let's see if this has the right hash
if thisfilehash != find_hash(thisfilecontents, hashalgorithm):
raise IncorrectFileContents("File '" + thisrelativefilename + "' has the wrong hash")
# and add it to the datastore
if datastore_layout == 'nogaps':
thisoffset = thisfiledict['offset']
xordatastore.set_data(thisoffset, thisfilecontents)
elif datastore_layout == 'eqdist':
offsets = thisfiledict['offsets']
offsetsoffset = 0
fileoffset = 0
while fileoffset < len(thisfilecontents):
block_remaining_bytes = blocksize - (offsets[offsetsoffset]%blocksize)
bytes_to_add = min(len(thisfilecontents)-fileoffset, block_remaining_bytes)
xordatastore.set_data(
offsets[offsetsoffset], thisfilecontents[fileoffset:fileoffset+bytes_to_add])
fileoffset += bytes_to_add
offsetsoffset += 1
def _create_offset_dict(offsetdict, fileinfolist, rootdir, hashalgorithm):
# Private helper to populate the datastore
# go through the files one at a time and populate the xordatastore
for thisfiledict in fileinfolist:
thisrelativefilename = thisfiledict['filename']
thisfilehash = thisfiledict['hash']
thisoffset = thisfiledict['offset']
thisfilelength = thisfiledict['length']
thisfilename = os.path.join(rootdir, thisrelativefilename)
# read in the files and populate the xordatastore
if not os.path.exists(thisfilename):
raise FileNotFound("File " + thisrelativefilename + " -->" + thisfilename + " listed in manifest cannot be found in manifest root: " + rootdir + ".")
# can't go above the root!
# JAC: I would use relpath, but it's 2.6 and on
if not os.path.normpath(os.path.abspath(thisfilename)).startswith(os.path.abspath(rootdir)):
raise TypeError("File in manifest cannot go back from the root dir!!!")
# get the relevant data
fd = open(thisfilename, 'rb')
thisfilecontents = fd.read()
# let's see if this has the right size
if len(thisfilecontents) != thisfilelength:
raise IncorrectFileContents("File '" + thisrelativefilename + "' has the wrong size")
# let's see if this has the right hash
if thisfilehash != find_hash(thisfilecontents, hashalgorithm):
raise IncorrectFileContents("File '" + thisrelativefilename + "' has the wrong hash")
fd.close()
del fd
del thisfilecontents
# and add it to the dict
offsetdict[thisoffset] = thisfilename
print("[INFO] Offset-Dict generated.")
def datastore_layout_function_nogaps(fileinfolist, rootdir, blocksize, hashalgorithm):
"""
<Purpose>
Specifies how to map a set of files into offsets in an xordatastore.
This simple function just adds them linearly.
<Arguments>
fileinfolist: a list of dictionaries with file information
rootdir: the root directory where the files live
block_size: The size of a block of data.
<Exceptions>
TypeError, IndexError, or KeyError if the arguements are incorrect
<Side Effects>
Modifies the fileinfolist to add offset elements to each dict
<Returns>
None
"""
print("[INFO] Using `nogaps` algorithm.")
# Note, this algorithm doesn't use the blocksize. Most of algorithms will.
# We also don't use the rootdir. I think this is typical
currentoffset = 0
for thisfileinfo in fileinfolist:
thisfileinfo['offset'] = currentoffset
currentoffset = currentoffset + thisfileinfo['length']
blockcount = int(math.ceil(currentoffset * 1.0 / blocksize))
# let's ensure the offsets are valid...
# build a list of tuples with offset, etc. info...
offsetlengthtuplelist = []
for fileinfo in fileinfolist:
offsetlengthtuplelist.append((fileinfo['offset'], fileinfo['length']))
# ...sort the tuples so that it's easy to walk down them and check for
# overlapping entries...
offsetlengthtuplelist.sort()
# ...now, we need to ensure the values don't overlap.
nextfreeoffset = 0
for offset, length in offsetlengthtuplelist:
if offset < 0:
raise TypeError("Offset generation led to negative offset!")
if length < 0:
raise TypeError("File lengths must be positive!")
if nextfreeoffset > offset:
raise TypeError("Error! Offset generation led to overlapping files!")
# since this list is sorted by offset, this should ensure the property we want is upheld.
nextfreeoffset = offset + length
offsetdict = {}
_create_offset_dict(offsetdict, fileinfolist, rootdir, hashalgorithm)
print("[INFO] Indexing done ...")
# and it is time to get the blockhashlist...
# manifestdict['blockhashlist'] = _compute_block_hashlist(offsetdict, manifestdict['blockcount'], manifestdict['blocksize'], manifestdict['hashalgorithm'])
blockhashlist = _compute_block_hashlist_fromdisk(offsetdict, blockcount, blocksize, hashalgorithm)
return blockhashlist
def datastore_layout_function_eqdist(fileinfolist, rootdir, blocksize, hashalgorithm):
"""
<Purpose>
Specifies how to map a set of files into offsets in an xordatastore.
This function distributes them equally over the database.
<Arguments>
fileinfolist: a list of dictionaries with file information
rootdir: the root directory where the files live
block_size: The size of a block of data.
<Exceptions>
TypeError, IndexError, or KeyError if the arguements are incorrect
<Side Effects>
Modifies the fileinfolist to add offset elements to each dict
<Returns>
None
"""
print("[INFO] Using `eqdist` algorithm.")
# Note, this algorithm doesn't use the blocksize. Most of algorithms will.
# We also don't use the rootdir. I think this is typical
db_length = 0
for thisfileinfo in fileinfolist:
db_length = db_length + thisfileinfo['length']
blockcount = int(math.ceil(db_length * 1.0 / blocksize))
free_blocks = list(range(1, blockcount))
currentoffset = 0
currentblock = 0
last_block = -1
# progress counter
hashedblocks = 0
pt = blockcount*1.0/20
nextprint = pt
# define the hashlist for the block hashes
hashlist = ['']*blockcount
current_block_content = b''
for thisfileinfo in fileinfolist:
thisfileinfo['offsets'] = []
thisfilename = os.path.join(rootdir, thisfileinfo['filename'])
print("[INFO] reading", thisfilename)
# prevent access above rootdir
if not os.path.normpath(os.path.abspath(thisfilename)).startswith(os.path.abspath(rootdir)):
raise TypeError("File in manifest cannot go back from the root dir!!!")
# open the file for reading (to compute the hash for the current block)
fd = open(thisfilename, 'rb')
remainingbytes = thisfileinfo['length']
blocks_per_file = thisfileinfo['length']*1.0 / blocksize
block_steps = max(2, int(blockcount/blocks_per_file))
current_step = 0
while remainingbytes > 0:
block_remaining_bytes = (blocksize - (currentoffset % blocksize))
thisfileinfo['offsets'].append(currentoffset)
bytes_to_add = min(remainingbytes, block_remaining_bytes)
remainingbytes -= bytes_to_add
currentoffset += bytes_to_add
current_block_content += fd.read(bytes_to_add)
if currentoffset % blocksize == 0 and len(free_blocks) != 0:
# block is full
last_block = int(currentoffset/blocksize) - 1
# show progress
hashedblocks += 1
if blockcount > 99 and hashedblocks >= nextprint:
print(hashedblocks, "/", blockcount,\
"("+str(int(round(hashedblocks*1.0/blockcount*100)))+"%) done...")
nextprint = nextprint + pt
# calculate hash for block
hashlist[last_block] = find_hash(current_block_content, hashalgorithm)
current_block_content = b''
# find new free block
current_step += 1
block_candidate = (last_block + block_steps) % blockcount
while block_candidate not in free_blocks:
block_candidate += 1
if block_candidate == blockcount:
block_candidate = 0
free_blocks.remove(block_candidate)
currentoffset = block_candidate * blocksize
block_remaining_bytes = blocksize
# close the file descriptor
fd.close()
del fd
assert len(free_blocks) == 0
# the last block has to be padded to full block size
block_remaining_bytes = (blocksize - (currentoffset % blocksize))
current_block_content += block_remaining_bytes * b'\0'
# calculate the hash for the last block
current_block = int(currentoffset/blocksize)
hashlist[current_block] = find_hash(current_block_content, hashalgorithm)
for h in hashlist:
assert h != ''
#currentoffset = 0
#for thisfileinfo in fileinfolist:
# thisfileinfo['offset'] = currentoffset
# currentoffset = currentoffset + thisfileinfo['length']
return hashlist
def _find_blockloc_from_offset(offset, sizeofblocks):
# Private helper function that translates an offset into (block, offset)
assert offset >= 0
return (int(offset / sizeofblocks), offset % sizeofblocks)
def extract_file_from_blockdict(filename, manifestdict, blockdict):
"""
<Purpose>
Reconstitutes a file from a block dict
<Arguments>
filename: the file within the release we are asking about
manifestdict: the manifest for the release
blockdict: a dictionary of blocknum -> blockcontents
<Exceptions>
TypeError, IndexError, or KeyError if the args are incorrect
<Side Effects>
None
<Returns>
A string containing the file contents
"""
blocksize = manifestdict['blocksize']
database_layout = manifestdict['datastore_layout']
for fileinfo in manifestdict['fileinfolist']:
if filename == fileinfo['filename']:
if database_layout == 'nogaps':
offset = fileinfo['offset']
quantity = fileinfo['length']
# Let's get the block information
(startblock, startoffset) = _find_blockloc_from_offset(offset, blocksize)
(endblock, endoffset) = _find_blockloc_from_offset(offset + quantity, blocksize)
# Case 1: this does not cross blocks
if startblock == endblock:
return blockdict[startblock][startoffset:endoffset]
# Case 2: this crosses blocks
# we'll build up the string starting with the first block...
currentstring = blockdict[startblock][startoffset:]
# now add in the 'middle' blocks. This is all of the blocks
# after the start and before the end
for currentblock in range(startblock + 1, endblock):
currentstring += blockdict[currentblock]
# this check is needed | |
FTPMockFactory.create("doaj")
job = models.BackgroundJob()
url = "ftp://upload"
file_upload = models.FileUpload()
file_upload.set_id()
file_upload.upload("testuser", url, status="exists")
file_upload.set_schema("doaj")
upload_dir = app.config.get("UPLOAD_DIR")
path = os.path.join(upload_dir, file_upload.local_filename)
self.cleanup_paths.append(path)
self.cleanup_ids.append(file_upload.id)
task = ingestarticles.IngestArticlesBackgroundTask(job)
result = task._download(file_upload)
assert file_upload.status == "failed"
assert file_upload.error is not None and file_upload.error != ""
assert file_upload.error_details is not None and file_upload.error_details != ""
assert list(file_upload.failure_reasons.keys()) == []
def test_22_download_ftp_error(self):
ftplib.FTP = FTPMockFactory.create("doaj")
job = models.BackgroundJob()
url = "ftp://fail"
file_upload = models.FileUpload()
file_upload.set_id()
file_upload.upload("testuser", url, status="exists")
file_upload.set_schema("doaj")
upload_dir = app.config.get("UPLOAD_DIR")
path = os.path.join(upload_dir, file_upload.local_filename)
self.cleanup_paths.append(path)
task = ingestarticles.IngestArticlesBackgroundTask(job)
result = task._download(file_upload)
assert result is False
assert file_upload.status == "failed"
assert file_upload.error is not None and file_upload.error != ""
assert file_upload.error_details is None
assert list(file_upload.failure_reasons.keys()) == []
def test_23_doaj_process_success(self):
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
job = models.BackgroundJob()
file_upload = models.FileUpload()
file_upload.set_id()
file_upload.set_schema("doaj")
file_upload.upload("testowner", "filename.xml")
upload_dir = app.config.get("UPLOAD_DIR")
path = os.path.join(upload_dir, file_upload.local_filename)
self.cleanup_paths.append(path)
stream = DoajXmlArticleFixtureFactory.upload_1_issn_correct()
with open(path, "wb") as f:
f.write(stream.read())
task = ingestarticles.IngestArticlesBackgroundTask(job)
task._process(file_upload)
assert not os.path.exists(path)
assert file_upload.status == "processed"
assert file_upload.imported == 1
assert file_upload.new == 1
def test_24_process_invalid_file(self):
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
job = models.BackgroundJob()
file_upload = models.FileUpload()
file_upload.set_id()
file_upload.set_schema("doaj")
upload_dir = app.config.get("UPLOAD_DIR")
path = os.path.join(upload_dir, file_upload.local_filename)
self.cleanup_paths.append(path)
self.cleanup_ids.append(file_upload.id)
stream = DoajXmlArticleFixtureFactory.invalid_schema_xml()
with open(path, "w") as f:
f.write(stream.read())
task = ingestarticles.IngestArticlesBackgroundTask(job)
task._process(file_upload)
assert not os.path.exists(path)
assert file_upload.status == "failed"
assert file_upload.error is not None and file_upload.error != ""
assert file_upload.error_details is not None and file_upload.error_details != ""
assert list(file_upload.failure_reasons.keys()) == []
def test_25_process_filesystem_error(self):
articleSvc.ArticleService.batch_create_articles = BLLArticleMockFactory.batch_create
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
job = models.BackgroundJob()
file_upload = models.FileUpload()
file_upload.set_id()
file_upload.set_schema("doaj")
upload_dir = app.config.get("UPLOAD_DIR")
path = os.path.join(upload_dir, file_upload.local_filename)
self.cleanup_paths.append(path)
self.cleanup_ids.append(file_upload.id)
stream = DoajXmlArticleFixtureFactory.upload_1_issn_correct()
with open(path, "wb") as f:
f.write(stream.read())
task = ingestarticles.IngestArticlesBackgroundTask(job)
task._process(file_upload)
assert not os.path.exists(path)
assert file_upload.status == "failed"
assert file_upload.error is not None and file_upload.error != ""
assert file_upload.error_details is None
assert list(file_upload.failure_reasons.keys()) == []
def test_26_run_validated(self):
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct()
f = FileMockFactory(stream=handle)
previous = []
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", upload_file=f, schema="doaj", previous=previous)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "processed"
def test_27_run_exists(self):
requests.head = ResponseMockFactory.head_fail
requests.get = ResponseMockFactory.doaj_get_success
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
url = "http://valid"
previous = []
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", url=url, schema="doaj", previous=previous)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "processed"
def test_28_run_errors(self):
job = models.BackgroundJob()
task = ingestarticles.IngestArticlesBackgroundTask(job)
with self.assertRaises(BackgroundException):
task.run()
job.params = {}
with self.assertRaises(BackgroundException):
task.run()
job.params = {"ingest_articles__file_upload_id" : "whatever"}
with self.assertRaises(BackgroundException):
task.run()
def test_29_submit_success(self):
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct()
f = FileMockFactory(stream=handle)
previous = []
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", upload_file=f, schema="doaj", previous=previous)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
# this assumes that huey is in always eager mode, and thus this immediately calls the async task,
# which in turn calls execute, which ultimately calls run
ingestarticles.IngestArticlesBackgroundTask.submit(job)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "processed"
def test_31_doaj_run_fail_unmatched_issn(self):
# Create a journal with 2 issns, one of which is the same as an issn on the
# article, but the article also contains an issn which doesn't match the journal
# We expect a failed ingest
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
bj.add_identifier(bj.E_ISSN, "9876-5432")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_2_issns_ambiguous()
f = FileMockFactory(stream=handle)
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "failed", "received status: {}".format(fu.status)
assert fu.error is not None and fu.error != ""
assert fu.error_details is None
fr = fu.failure_reasons
assert "unmatched" in fr
assert fr["unmatched"] == ["2345-6789"]
def test_32_run_doaj_fail_shared_issn(self):
# Create 2 journals with the same issns but different owners, which match the issns on the article
# We expect an ingest failure
j1 = models.Journal()
j1.set_owner("testowner1")
bj1 = j1.bibjson()
bj1.add_identifier(bj1.P_ISSN, "1234-5678")
bj1.add_identifier(bj1.E_ISSN, "9876-5432")
j1.save()
j2 = models.Journal()
j2.set_owner("testowner2")
j2.set_in_doaj(False)
bj2 = j2.bibjson()
bj2.add_identifier(bj2.P_ISSN, "1234-5678")
bj2.add_identifier(bj2.E_ISSN, "9876-5432")
j2.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner1")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct()
f = FileMockFactory(stream=handle)
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "failed"
assert fu.error is not None and fu.error != ""
assert fu.error_details is None
fr = fu.failure_reasons
assert "shared" in fr
assert "1234-5678" in fr["shared"]
assert "9876-5432" in fr["shared"]
def test_33_run_fail_unowned_issn(self):
# Create 2 journals with different owners and one different issn each. The two issns in the
# article match each of the journals respectively
# We expect an ingest failure
j1 = models.Journal()
j1.set_owner("testowner1")
bj1 = j1.bibjson()
bj1.add_identifier(bj1.P_ISSN, "1234-5678")
j1.save()
j2 = models.Journal()
j2.set_owner("testowner2")
j2.set_in_doaj(False)
bj2 = j2.bibjson()
bj2.add_identifier(bj2.E_ISSN, "9876-5432")
j2.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct()
f = FileMockFactory(stream=handle)
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "failed"
assert fu.error is not None and fu.error != ""
assert fu.error_details is None
fr = fu.failure_reasons
assert "unowned" in fr
assert "9876-5432" in fr["unowned"]
def test_34_doaj_journal_2_article_2_success(self):
# Create a journal with two issns both of which match the 2 issns in the article
# we expect a successful article ingest
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
bj.add_identifier(bj.E_ISSN, "9876-5432")
j.save(blocking=True)
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct()
f = FileMockFactory(stream=handle)
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "processed"
assert fu.imported == 1
assert fu.updates == 0
assert fu.new == 1
fr = fu.failure_reasons
assert len(fr.get("shared", [])) == 0
assert len(fr.get("unowned", [])) == 0
assert len(fr.get("unmatched", [])) == 0
found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])]
assert len(found) == 1
def test_35_doaj_journal_2_article_1_success(self):
# Create a journal with 2 issns, one of which is present in the article as the
# only issn
# We expect a successful article ingest
j = models.Journal()
j.set_owner("testowner")
bj = j.bibjson()
bj.add_identifier(bj.P_ISSN, "1234-5678")
bj.add_identifier(bj.E_ISSN, "9876-5432")
j.save()
asource = AccountFixtureFactory.make_publisher_source()
account = models.Account(**asource)
account.set_id("testowner")
account.save(blocking=True)
handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct()
f = FileMockFactory(stream=handle)
job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f)
id = job.params.get("ingest_articles__file_upload_id")
self.cleanup_ids.append(id)
# because file upload gets created and saved by prepare
time.sleep(2)
task = ingestarticles.IngestArticlesBackgroundTask(job)
task.run()
# because file upload needs to be re-saved
time.sleep(2)
fu = models.FileUpload.pull(id)
assert fu is not None
assert fu.status == "processed"
assert fu.imported == 1
assert fu.updates == 0
assert fu.new == 1
fr = fu.failure_reasons
assert len(fr.get("shared", [])) == 0
assert len(fr.get("unowned", [])) == 0
assert len(fr.get("unmatched", [])) == 0
found = [a for a in models.Article.find_by_issns(["1234-5678"])]
assert len(found) | |
contactgroup = next((cg for cg in cgroups if cg.get("name") == Config.guild_contact_channel_group), None)
if contactgroup is None:
log.debug("Can not find a group '%s' for guild contacts. Skipping.", contactgroup)
else:
for c in contacts:
with self.dbc.lock:
accs = [row[0] for row in self.dbc.cursor.execute("SELECT ts_db_id FROM users WHERE lower(account_name) = lower(?)", (c,)).fetchall()]
for a in accs:
errored = False
try:
u = User(ts3conn, unique_id = a, ex_hand = signal_exception_handler)
tsdbid = u.ts_db_id
_, ex = ts3conn.ts3exec(lambda tsc: tsc.exec_("setclientchannelgroup"
, cid = cinfo.get("cid")
, cldbid = tsdbid
, cgid = contactgroup.get("cgid"))
, signal_exception_handler)
# while we are at it, add the contacts to the guild group as well
_, ex2 = ts3conn.ts3exec(lambda tsc: tsc.exec_("servergroupaddclient"
, sgid = guildgroupid
, cldbid = tsdbid)
, signal_exception_handler)
errored = ex is not None
except Exception as ex:
errored = True
if errored:
log.error("Could not assign contact role '%s' to user '%s' with DB-unique-ID '%s' in guild channel for %s. Maybe the uid is not valid anymore."
, Config.guild_contact_channel_group, c, a, name)
return SUCCESS
def handle_guild_icon(self, name, ts3conn):
#########################################
# RETRIEVE AND UPLOAD GUILD EMBLEM ICON #
#########################################
log.debug("Retrieving and uploading guild emblem as icon from gw2mists...")
icon_url = "https://api.gw2mists.de/guilds/emblem/%s/50.svg" % (urllib.parse.quote(name),)
icon = requests.get(icon_url)
# funnily enough, giving an invalid guild (or one that has no emblem)
# results in HTTP 200, but a JSON explaining the error instead of an SVG image.
# Storing this JSON and uploading it to TS just fails silently without
# causing any problems!
# Therefore checking content length..
if len(icon.content) > 0:
icon_id = binascii.crc32(name.encode('utf8'))
icon_local_file_name = "%s_icon.svg" % (urllib.parse.quote(name),) # using name instead of tag, because tags are not unique
icon_server_path = "/icon_%s" % (icon_id,)
self.upload_icon(icon, icon_local_file_name, icon_server_path, ts3conn)
return icon_id
else:
log.debug("Empty Response. Guild probably has no icon. Skipping Icon upload.")
return None
def upload_icon(self, icon, icon_file_name, icon_server_path, ts3conn):
def _ts_file_upload_hook(c: ts3.response.TS3QueryResponse):
if (c is not None) and (c.parsed is not None) \
and (len(c.parsed) == 1) and (c.parsed[0] is not None) \
and "msg" in c.parsed[0].keys() and c.parsed[0]["msg"] == "invalid size":
from ts3.filetransfer import TS3UploadError
raise TS3UploadError(0, "The uploaded Icon is too large")
return None
with open(icon_file_name, "w+b") as fh:
try:
# svg
fh.write(icon.content)
fh.flush()
fh.seek(0)
# it is important to have acquired the lock for the ts3conn globally
# at this point, as we directly pass the wrapped connection around
upload = ts3.filetransfer.TS3FileTransfer(ts3conn.ts_connection)
res = upload.init_upload(input_file=fh,
name=icon_server_path,
cid=0,
query_resp_hook=lambda c: _ts_file_upload_hook(c))
log.info(f"Icon {icon_file_name} uploaded as {icon_server_path}.")
except ts3.common.TS3Error as ts3error:
log.error("Error Uploading icon {icon_file_name}.")
log.error(ts3error)
finally:
fh.close()
os.remove(icon_file_name)
def create_guild_channel_description(self, contacts, name, tag):
contacts = "\n".join([" • %s" % c for c in contacts])
text = (f"[center]\n"
f"[img]https://api.gw2mists.de/guilds/emblem/{urllib.parse.quote(name)}/128.svg[/img]\n"
f"[size=20]{name} - {tag}[/size]\n"
f"[/center]\n"
f"[hr]\n"
f"[size=12]Contacts:[/size]\n"
f"{contacts}\n"
f"[hr]\n")
return text
def clientMessageHandler(self, ipcserver, clientsocket, message):
mtype = self.try_get(message, "type", lower = True)
mcommand = self.try_get(message, "command", lower = True)
margs = self.try_get(message, "args", typer = lambda a: dict(a), default = {})
mid = self.try_get(message, "message_id", typer = lambda a: int(a), default = -1)
log.debug("[%s] %s", mtype, mcommand)
if mtype == "post":
# POST commands
if mcommand == "setresetroster":
mdate = self.try_get(margs, "date", default = "dd.mm.yyyy")
mred = self.try_get(margs, "rbl", default = [])
mgreen = self.try_get(margs, "gbl", default = [])
mblue = self.try_get(margs, "bbl", default = [])
mebg = self.try_get(margs, "ebg", default = [])
self.setResetroster(ipcserver.ts_connection, mdate, mred, mgreen, mblue, mebg)
if mcommand == "createguild":
mname = self.try_get(margs, "name", default = None)
mtag = self.try_get(margs, "tag", default = None)
mgroupname = self.try_get(margs, "tsgroup", default = mtag)
mcontacts = self.try_get(margs, "contacts", default = [])
res = -1 if mname is None or mtag is None else self.createGuild(mname, mtag, mgroupname, mcontacts)
clientsocket.respond(mid, mcommand, {"status": res})
if mtype == "delete":
# DELETE commands
if mcommand == "user":
mgw2account = self.try_get(margs,"gw2account", default = "")
log.info("Received request to delete user '%s' from the TS registration database.", mgw2account)
changes = self.removePermissionsByGW2Account(mgw2account)
clientsocket.respond(mid, mcommand, {"deleted": changes})
if mcommand == "guild":
mname = self.try_get(margs, "name", default = None)
log.info("Received request to delete guild %s", mname)
res = self.removeGuild(mname)
print(res)
clientsocket.respond(mid, mcommand, {"status": res})
# Handler that is used every time an event (message) is received from teamspeak server
def messageEventHandler(self, event):
"""
*event* is a ts3.response.TS3Event instance, that contains the name
of the event and the data.
"""
log.debug("event.event: %s", event.event)
raw_cmd = event.parsed[0].get('msg')
rec_from_name = event.parsed[0].get('invokername').encode('utf-8') #fix any encoding issues introduced by Teamspeak
rec_from_uid = event.parsed[0].get('invokeruid')
rec_from_id = event.parsed[0].get('invokerid')
rec_type = event.parsed[0].get('targetmode')
if rec_from_id == self.client_id:
return #ignore our own messages.
try:
# Type 2 means it was channel text
if rec_type == "2":
cmd, args = self.commandCheck(raw_cmd) #sanitize the commands but also restricts commands to a list of known allowed commands
if cmd == "hideguild":
log.info("User '%s' wants to hide guild '%s'.", rec_from_name, args[0])
with self.dbc.lock:
try:
self.dbc.cursor.execute("INSERT INTO guild_ignores(guild_id, ts_db_id, ts_name) VALUES((SELECT guild_id FROM guilds WHERE ts_group = ?), ?,?)", (args[0], rec_from_uid, rec_from_name))
self.dbc.conn.commit()
log.debug("Success!")
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_hide_guild_success")))
except sqlite3.IntegrityError:
self.dbc.conn.rollback()
log.debug("Failed. The group probably doesn't exist or the user is already hiding that group.")
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_hide_guild_unknown")))
elif cmd == "unhideguild":
log.info("User '%s' wants to unhide guild '%s'.", rec_from_name, args[0])
with self.dbc.lock:
self.dbc.cursor.execute("DELETE FROM guild_ignores WHERE guild_id = (SELECT guild_id FROM guilds WHERE ts_group = ? AND ts_db_id = ?)", (args[0], rec_from_uid))
changes = self.dbc.cursor.execute("SELECT changes()").fetchone()[0];
self.dbc.conn.commit()
if changes > 0:
log.debug("Success!")
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_unhide_guild_success")))
else:
log.debug("Failed. Either the guild is unknown or the user had not hidden the guild anyway.")
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_unhide_guild_unknown")))
elif cmd == 'verifyme':
return # command disabled for now
if self.clientNeedsVerify(rec_from_uid):
log.info("Verify Request Recieved from user '%s'. Sending PM now...\n ...waiting for user response.", rec_from_name)
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_verify")))
else:
log.info("Verify Request Recieved from user '%s'. Already verified, notified user.", rec_from_name)
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_alrdy_verified")))
# Type 1 means it was a private message
elif rec_type == '1':
#reg_api_auth='\s*(\S+\s*\S+\.\d+)\s+(.*?-.*?-.*?-.*?-.*)\s*$'
reg_api_auth='\s*(.*?-.*?-.*?-.*?-.*)\s*$'
#Command for verifying authentication
if re.match(reg_api_auth, raw_cmd):
pair = re.search(reg_api_auth, raw_cmd)
uapi = pair.group(1)
if self.clientNeedsVerify(rec_from_uid):
log.info("Received verify response from %s", rec_from_name)
auth = TS3Auth.AuthRequest(uapi)
log.debug('Name: |%s| API: |%s|' % (auth.name, uapi))
if auth.success:
limit_hit = self.TsClientLimitReached(auth.name)
if Config.DEBUG:
log.debug("Limit hit check: %s", limit_hit)
if not limit_hit:
log.info("Setting permissions for %s as verified.", rec_from_name)
#set permissions
self.setPermissions(rec_from_uid)
#get todays date
today_date = datetime.date.today()
#Add user to database so we can query their API key over time to ensure they are still on our server
self.addUserToDB(rec_from_uid, auth.name, uapi, today_date, today_date)
self.updateGuildTags(User(self.ts_connection, unique_id = rec_from_uid, ex_hand = signal_exception_handler), auth)
# self.updateGuildTags(rec_from_uid, auth)
log.debug("Added user to DB with ID %s", rec_from_uid)
#notify user they are verified
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_success")))
else:
# client limit is set and hit
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_limit_Hit")))
log.info("Received API Auth from %s, but %s has reached the client limit.", rec_from_name, rec_from_name)
else:
#Auth Failed
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_fail")))
else:
log.debug("Received API Auth from %s, but %s is already verified. Notified user as such.", rec_from_name, rec_from_name)
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_alrdy_verified")))
else:
self.ts_connection.ts3exec(lambda tsc: tsc.exec_("sendtextmessage", targetmode = 1, target = rec_from_id, msg = Config.locale.get("bot_msg_rcv_default")))
log.info("Received bad response from %s [msg= %s]", rec_from_name, raw_cmd.encode('utf-8'))
# sys.exit(0)
except Exception as e:
log.error("BOT Event: Something went wrong during message received from teamspeak server. Likely bad user command/message.")
log.error(e)
log.error(traceback.format_exc())
return None
#######################################
class Ticker(object):
'''
Class that schedules events regularly and wraps the TS3Bot.
'''
def __init__(self, ts3bot, interval):
self.ts3bot = ts3bot
self.interval = interval
schedule.every(interval).seconds.do(self.execute)
def execute(self):
pass
#######################################
class Channel(object):
def __init__(self, ts_conn, | |
from pycocotools.coco import COCO
import numpy as np
import skimage.io as io
import matplotlib.pyplot as plt
import pylab
import cv2
import math
# import Polygon as plg
# from tqdm import tqdm
from pycocotools.coco import COCO
from .custom import CustomDataset
from .custompolarmask import CustomDatasetpolarmask
from .registry import DATASETS
import os.path as osp
import warnings
import mmcv
import numpy as np
from imagecorruptions import corrupt
from mmcv.parallel import DataContainer as DC
from torch.utils.data import Dataset
import torch
from .extra_aug import ExtraAugmentation
from .registry import DATASETS
from .transforms import (BboxTransform, ImageTransform, MaskTransform,
Numpy2Tensor, SegMapTransform, SegmapTransform)
from .utils import random_scale, to_tensor
from IPython import embed
import time
INF = 1e8
def get_angle(v1, v2=[0,0,100,0]):
dx1 = v1[2] - v1[0]
dy1 = v1[3] - v1[1]
dx2 = v2[2] - v2[0]
dy2 = v2[3] - v2[1]
angle1 = math.atan2(dy1, dx1)
angle1 = int(angle1 * 180/math.pi)
angle2 = math.atan2(dy2, dx2)
angle2 = int(angle2 * 180/math.pi)
included_angle = angle2 - angle1
if included_angle < 0:
included_angle += 360
return included_angle
@DATASETS.register_module
class Coco_Seg_Dataset(CustomDatasetpolarmask):
CLASSES = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant',
'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog',
'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',
'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat',
'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket',
'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl',
'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch',
'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop',
'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock',
'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush')
def load_annotations(self, ann_file):
self.coco = COCO(ann_file)
self.cat_ids = self.coco.getCatIds()
self.cat2label = {
cat_id: i + 1
for i, cat_id in enumerate(self.cat_ids)
}
self.img_ids = self.coco.getImgIds()
img_infos = []
for i in self.img_ids:
info = self.coco.loadImgs([i])[0]
info['filename'] = info['file_name']
img_infos.append(info)
return img_infos
def get_ann_info(self, idx):
img_id = self.img_infos[idx]['id']
ann_ids = self.coco.getAnnIds(imgIds=[img_id])
ann_info = self.coco.loadAnns(ann_ids)
return self._parse_ann_info(ann_info, self.with_mask)
def _filter_imgs(self, min_size=32):
"""Filter images too small or without ground truths."""
valid_inds = []
ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values())
for i, img_info in enumerate(self.img_infos):
if self.img_ids[i] not in ids_with_ann:
continue
if min(img_info['width'], img_info['height']) >= min_size:
valid_inds.append(i)
return valid_inds
def _parse_ann_info(self, ann_info, with_mask=True):
"""Parse bbox and mask annotation.
Args:
ann_info (list[dict]): Annotation info of an image.
with_mask (bool): Whether to parse mask annotations.
Returns:
dict: A dict containing the following keys: bboxes, bboxes_ignore,
labels, masks, mask_polys, poly_lens.
"""
gt_bboxes = []
gt_labels = []
gt_bboxes_ignore = []
# Two formats are provided.
# 1. mask: a binary map of the same size of the image.
# 2. polys: each mask consists of one or several polys, each poly is a
# list of float.
self.debug = False
if with_mask:
gt_masks = []
gt_mask_polys = []
gt_poly_lens = []
if self.debug:
count = 0
total = 0
for i, ann in enumerate(ann_info):
if ann.get('ignore', False):
continue
x1, y1, w, h = ann['bbox']
#filter bbox < 10
if self.debug:
total+=1
if ann['area'] <= 15 or (w < 10 and h < 10) or self.coco.annToMask(ann).sum() < 15:
# print('filter, area:{},w:{},h:{}'.format(ann['area'],w,h))
if self.debug:
count+=1
continue
bbox = [x1, y1, x1 + w - 1, y1 + h - 1]
if ann['iscrowd']:
gt_bboxes_ignore.append(bbox)
else:
gt_bboxes.append(bbox)
gt_labels.append(self.cat2label[ann['category_id']])
if with_mask:
gt_masks.append(self.coco.annToMask(ann))
mask_polys = [
p for p in ann['segmentation'] if len(p) >= 6
] # valid polygons have >= 3 points (6 coordinates)
poly_lens = [len(p) for p in mask_polys]
gt_mask_polys.append(mask_polys)
gt_poly_lens.extend(poly_lens)
if self.debug:
print('filter:',count/total)
if gt_bboxes:
gt_bboxes = np.array(gt_bboxes, dtype=np.float32)
gt_labels = np.array(gt_labels, dtype=np.int64)
else:
gt_bboxes = np.zeros((0, 4), dtype=np.float32)
gt_labels = np.array([], dtype=np.int64)
if gt_bboxes_ignore:
gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32)
else:
gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32)
ann = dict(
bboxes=gt_bboxes, labels=gt_labels, bboxes_ignore=gt_bboxes_ignore)
if with_mask:
ann['masks'] = gt_masks
# poly format is not used in the current implementation
ann['mask_polys'] = gt_mask_polys
ann['poly_lens'] = gt_poly_lens
return ann
def prepare_train_img(self, idx):
img_info = self.img_infos[idx]
img = mmcv.imread(osp.join(self.img_prefix, img_info['filename']))
# corruption
if self.corruption is not None:
img = corrupt(
img,
severity=self.corruption_severity,
corruption_name=self.corruption)
# load proposals if necessary
if self.proposals is not None:
proposals = self.proposals[idx][:self.num_max_proposals]
# TODO: Handle empty proposals properly. Currently images with
# no proposals are just ignored, but they can be used for
# training in concept.
if len(proposals) == 0:
return None
if not (proposals.shape[1] == 4 or proposals.shape[1] == 5):
raise AssertionError(
'proposals should have shapes (n, 4) or (n, 5), '
'but found {}'.format(proposals.shape))
if proposals.shape[1] == 5:
scores = proposals[:, 4, None]
proposals = proposals[:, :4]
else:
scores = None
ann = self.get_ann_info(idx)
gt_bboxes = ann['bboxes']
gt_labels = ann['labels']
if self.with_crowd:
gt_bboxes_ignore = ann['bboxes_ignore']
# skip the image if there is no valid gt bbox
if len(gt_bboxes) == 0 and self.skip_img_without_anno:
warnings.warn('Skip the image "%s" that has no valid gt bbox' %
osp.join(self.img_prefix, img_info['filename']))
return None
# apply transforms
flip = True if np.random.rand() < self.flip_ratio else False
# randomly sample a scale
img_scale = random_scale(self.img_scales, self.multiscale_mode)
img, img_shape, pad_shape, scale_factor = self.img_transform(img, img_scale, flip, keep_ratio=self.resize_keep_ratio)
img = img.copy()
if self.with_seg:
gt_seg = mmcv.imread(
osp.join(self.seg_prefix,
img_info['filename'].replace('jpg', 'png')),
flag='unchanged')
gt_seg = self.seg_transform(gt_seg.squeeze(), img_scale, flip)
gt_seg = mmcv.imrescale(
gt_seg, self.seg_scale_factor, interpolation='nearest')
gt_seg = gt_seg[None, ...]
if self.proposals is not None:
proposals = self.bbox_transform(proposals, img_shape, scale_factor,
flip)
proposals = np.hstack([proposals, scores
]) if scores is not None else proposals
gt_bboxes = self.bbox_transform(gt_bboxes, img_shape, scale_factor,
flip)
if self.with_crowd:
gt_bboxes_ignore = self.bbox_transform(gt_bboxes_ignore, img_shape,
scale_factor, flip)
if self.with_mask:
gt_masks = self.mask_transform(ann['masks'], pad_shape,
scale_factor, flip)
ori_shape = (img_info['height'], img_info['width'], 3)
img_meta = dict(
ori_shape=ori_shape,
img_shape=img_shape,
pad_shape=pad_shape,
scale_factor=scale_factor,
flip=flip)
data = dict(
img=DC(to_tensor(img), stack=True),
img_meta=DC(img_meta, cpu_only=True),
gt_bboxes=DC(to_tensor(gt_bboxes)))
if self.with_label:
data['gt_labels'] = DC(to_tensor(gt_labels))
if self.with_crowd:
data['gt_bboxes_ignore'] = DC(to_tensor(gt_bboxes_ignore))
if self.with_mask:
data['gt_masks'] = DC(gt_masks, cpu_only=True)
#--------------------offline ray label generation-----------------------------
self.center_sample = True
self.use_mask_center = True
self.radius = 1.5
self.strides = [8, 16, 32, 64, 128]
self.regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512),(512, INF))
featmap_sizes = self.get_featmap_size(pad_shape)
self.featmap_sizes = featmap_sizes
num_levels = len(self.strides)
all_level_points = self.get_points(featmap_sizes)
self.num_points_per_level = [i.size()[0] for i in all_level_points]
expanded_regress_ranges = [
all_level_points[i].new_tensor(self.regress_ranges[i])[None].expand_as(
all_level_points[i]) for i in range(num_levels)
]
concat_regress_ranges = torch.cat(expanded_regress_ranges, dim=0)
concat_points = torch.cat(all_level_points, 0)
gt_masks = gt_masks[:len(gt_bboxes)]
gt_bboxes = torch.Tensor(gt_bboxes)
gt_labels = torch.Tensor(gt_labels)
_labels, _bbox_targets, _mask_targets = self.polar_target_single(
gt_bboxes,gt_masks,gt_labels,concat_points, concat_regress_ranges)
data['_gt_labels'] = DC(_labels)
data['_gt_bboxes'] = DC(_bbox_targets)
data['_gt_masks'] = DC(_mask_targets)
#--------------------offline ray label generation-----------------------------
return data
def get_featmap_size(self, shape):
h,w = shape[:2]
featmap_sizes = []
for i in self.strides:
featmap_sizes.append([int(h / i), int(w / i)])
return featmap_sizes
def get_points(self, featmap_sizes):
mlvl_points = []
for i in range(len(featmap_sizes)):
mlvl_points.append(
self.get_points_single(featmap_sizes[i], self.strides[i]))
return mlvl_points
def get_points_single(self, featmap_size, stride):
h, w = featmap_size
x_range = torch.arange(
0, w * stride, stride)
y_range = torch.arange(
0, h * stride, stride)
y, x = torch.meshgrid(y_range, x_range)
points = torch.stack(
(x.reshape(-1), y.reshape(-1)), dim=-1) + stride // 2
return points.float()
def polar_target_single(self, gt_bboxes, gt_masks, gt_labels, points, regress_ranges):
num_points = points.size(0)
num_gts = gt_labels.size(0)
if num_gts == 0:
return gt_labels.new_zeros(num_points), \
gt_bboxes.new_zeros((num_points, 4))
areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0] + 1) * (
gt_bboxes[:, 3] - gt_bboxes[:, 1] + 1)
# TODO: figure out why these two are different
# areas = areas[None].expand(num_points, num_gts)
areas = areas[None].repeat(num_points, 1)
regress_ranges = regress_ranges[:, None, :].expand(
num_points, num_gts, 2)
gt_bboxes = gt_bboxes[None].expand(num_points, num_gts, 4)
#xs ys 分别是points的x y坐标
xs, ys = points[:, 0], points[:, 1]
xs = xs[:, None].expand(num_points, num_gts)
ys = ys[:, None].expand(num_points, num_gts)
left = xs - gt_bboxes[..., 0]
right = gt_bboxes[..., 2] - xs
top = ys - gt_bboxes[..., 1]
bottom = gt_bboxes[..., 3] - ys
bbox_targets = torch.stack((left, top, right, bottom), -1) #feature map上所有点对于gtbox的上下左右距离 [num_pix, num_gt, 4]
#mask targets 也按照这种写 同时labels 得从bbox中心修改成mask 重心
mask_centers = []
mask_contours = []
#第一步 先算重心 return [num_gt, 2]
for mask in gt_masks:
cnt, contour = self.get_single_centerpoint(mask)
contour = contour[0]
contour = torch.Tensor(contour).float()
y, x = cnt
mask_centers.append([x,y])
mask_contours.append(contour)
mask_centers = torch.Tensor(mask_centers).float()
# 把mask_centers assign到不同的层上,根据regress_range和重心的位置
mask_centers = mask_centers[None].expand(num_points, num_gts, 2)
#-------------------------------------------------------------------------------------------------------------------------------------------------------------
# condition1: inside a gt bbox
#加入center sample
if self.center_sample:
strides = [8, 16, 32, 64, 128]
if self.use_mask_center:
inside_gt_bbox_mask = self.get_mask_sample_region(gt_bboxes,
mask_centers,
strides,
self.num_points_per_level,
xs,
ys,
radius=self.radius)
else:
inside_gt_bbox_mask = self.get_sample_region(gt_bboxes,
strides,
self.num_points_per_level,
xs,
ys,
radius=self.radius)
else:
inside_gt_bbox_mask = bbox_targets.min(-1)[0] > 0
# condition2: limit the regression range for each location
max_regress_distance | |
"metadataonly",
("prop", "1954:98"): "metadataonly",
("prop", "1954:99"): "metadataonly",
("prop", "1955:10"): "metadataonly",
("prop", "1955:101"): "metadataonly",
("prop", "1955:102"): "metadataonly",
("prop", "1955:104"): "metadataonly",
("prop", "1955:105"): "metadataonly",
("prop", "1955:106"): "metadataonly",
("prop", "1955:107"): "metadataonly",
("prop", "1955:108"): "metadataonly",
("prop", "1955:109"): "metadataonly",
("prop", "1955:11"): "metadataonly",
("prop", "1955:110"): "metadataonly",
("prop", "1955:111"): "metadataonly",
("prop", "1955:112"): "metadataonly",
("prop", "1955:113"): "metadataonly",
("prop", "1955:114"): "metadataonly",
("prop", "1955:115"): "metadataonly",
("prop", "1955:116"): "metadataonly",
("prop", "1955:119"): "metadataonly",
("prop", "1955:12"): "metadataonly",
("prop", "1955:122"): "metadataonly",
("prop", "1955:123"): "metadataonly",
("prop", "1955:124"): "metadataonly",
("prop", "1955:125"): "metadataonly",
("prop", "1955:126"): "metadataonly",
("prop", "1955:129"): "metadataonly",
("prop", "1955:130"): "metadataonly",
("prop", "1955:131"): "metadataonly",
("prop", "1955:132"): "metadataonly",
("prop", "1955:133"): "metadataonly",
("prop", "1955:134"): "metadataonly",
("prop", "1955:135"): "metadataonly",
("prop", "1955:136"): "metadataonly",
("prop", "1955:137"): "metadataonly",
("prop", "1955:138"): "metadataonly",
("prop", "1955:14"): "metadataonly",
("prop", "1955:140"): "metadataonly",
("prop", "1955:141"): "metadataonly",
("prop", "1955:142"): "metadataonly",
("prop", "1955:143"): "metadataonly",
("prop", "1955:144"): "metadataonly",
("prop", "1955:145"): "metadataonly",
("prop", "1955:146"): "metadataonly",
("prop", "1955:148"): "metadataonly",
("prop", "1955:149"): "metadataonly",
("prop", "1955:150"): "metadataonly",
("prop", "1955:152"): "metadataonly",
("prop", "1955:153"): "metadataonly",
("prop", "1955:154"): "metadataonly",
("prop", "1955:155"): "metadataonly",
("prop", "1955:156"): "metadataonly",
("prop", "1955:158"): "metadataonly",
("prop", "1955:159"): "metadataonly",
("prop", "1955:161"): "metadataonly",
("prop", "1955:162"): "metadataonly",
("prop", "1955:163"): "metadataonly",
("prop", "1955:164"): "metadataonly",
("prop", "1955:167"): "metadataonly",
("prop", "1955:168"): "metadataonly",
("prop", "1955:169"): "metadataonly",
("prop", "1955:17"): "metadataonly",
("prop", "1955:170"): "metadataonly",
("prop", "1955:172"): "metadataonly",
("prop", "1955:173"): "metadataonly",
("prop", "1955:174"): "metadataonly",
("prop", "1955:175"): "metadataonly",
("prop", "1955:178"): "metadataonly",
("prop", "1955:18"): "metadataonly",
("prop", "1955:181"): "metadataonly",
("prop", "1955:182"): "metadataonly",
("prop", "1955:183"): "metadataonly",
("prop", "1955:184"): "metadataonly",
("prop", "1955:185"): "metadataonly",
("prop", "1955:186"): "metadataonly",
("prop", "1955:187"): "metadataonly",
("prop", "1955:189"): "metadataonly",
("prop", "1955:19"): "metadataonly",
("prop", "1955:190"): "metadataonly",
("prop", "1955:191"): "metadataonly",
("prop", "1955:192"): "metadataonly",
("prop", "1955:193"): "metadataonly",
("prop", "1955:194"): "metadataonly",
("prop", "1955:195"): "metadataonly",
("prop", "1955:196"): "metadataonly",
("prop", "1955:2"): "metadataonly",
("prop", "1955:20"): "metadataonly",
("prop", "1955:21"): "metadataonly",
("prop", "1955:23"): "metadataonly",
("prop", "1955:24"): "metadataonly",
("prop", "1955:25"): "metadataonly",
("prop", "1955:28"): "metadataonly",
("prop", "1955:29"): "metadataonly",
("prop", "1955:30"): "metadataonly",
("prop", "1955:31"): "metadataonly",
("prop", "1955:32"): "metadataonly",
("prop", "1955:33"): "metadataonly",
("prop", "1955:34"): "metadataonly",
("prop", "1955:35"): "metadataonly",
("prop", "1955:36"): "metadataonly",
("prop", "1955:38"): "metadataonly",
("prop", "1955:39"): "metadataonly",
("prop", "1955:4"): "metadataonly",
("prop", "1955:40"): "metadataonly",
("prop", "1955:41"): "metadataonly",
("prop", "1955:42"): "metadataonly",
("prop", "1955:43"): "metadataonly",
("prop", "1955:44"): "metadataonly",
("prop", "1955:45"): "metadataonly",
("prop", "1955:46"): "metadataonly",
("prop", "1955:48"): "metadataonly",
("prop", "1955:49"): "metadataonly",
("prop", "1955:5"): "metadataonly",
("prop", "1955:50"): "metadataonly",
("prop", "1955:51"): "metadataonly",
("prop", "1955:52"): "metadataonly",
("prop", "1955:55"): "metadataonly",
("prop", "1955:57"): "metadataonly",
("prop", "1955:58"): "metadataonly",
("prop", "1955:6"): "metadataonly",
("prop", "1955:60"): "metadataonly",
("prop", "1955:62"): "metadataonly",
("prop", "1955:64"): "metadataonly",
("prop", "1955:65"): "metadataonly",
("prop", "1955:66"): "metadataonly",
("prop", "1955:67"): "metadataonly",
("prop", "1955:69"): "metadataonly",
("prop", "1955:7"): "metadataonly",
("prop", "1955:70"): "metadataonly",
("prop", "1955:71"): "metadataonly",
("prop", "1955:72"): "metadataonly",
("prop", "1955:73"): "metadataonly",
("prop", "1955:74"): "metadataonly",
("prop", "1955:75"): "metadataonly",
("prop", "1955:76"): "metadataonly",
("prop", "1955:77"): "metadataonly",
("prop", "1955:78"): "metadataonly",
("prop", "1955:79"): "metadataonly",
("prop", "1955:8"): "metadataonly",
("prop", "1955:80"): "metadataonly",
("prop", "1955:82"): "metadataonly",
("prop", "1955:83"): "metadataonly",
("prop", "1955:84"): "metadataonly",
("prop", "1955:85"): "metadataonly",
("prop", "1955:86"): "metadataonly",
("prop", "1955:88"): "metadataonly",
("prop", "1955:89"): "metadataonly",
("prop", "1955:9"): "metadataonly",
("prop", "1955:90"): "metadataonly",
("prop", "1955:91"): "metadataonly",
("prop", "1955:92"): "metadataonly",
("prop", "1955:93"): "metadataonly",
("prop", "1955:94"): "metadataonly",
("prop", "1955:97"): "metadataonly",
("prop", "1956:101"): "metadataonly",
("prop", "1956:102"): "metadataonly",
("prop", "1956:103"): "metadataonly",
("prop", "1956:104"): "metadataonly",
("prop", "1956:106"): "metadataonly",
("prop", "1956:107"): "metadataonly",
("prop", "1956:108"): "metadataonly",
("prop", "1956:11"): "metadataonly",
("prop", "1956:111"): "metadataonly",
("prop", "1956:112"): "metadataonly",
("prop", "1956:113"): "metadataonly",
("prop", "1956:114"): "metadataonly",
("prop", "1956:115"): "metadataonly",
("prop", "1956:117"): "metadataonly",
("prop", "1956:118"): "metadataonly",
("prop", "1956:119"): "metadataonly",
("prop", "1956:12"): "metadataonly",
("prop", "1956:121"): "metadataonly",
("prop", "1956:124"): "metadataonly",
("prop", "1956:125"): "metadataonly",
("prop", "1956:128"): "metadataonly",
("prop", "1956:129"): "metadataonly",
("prop", "1956:13"): "metadataonly",
("prop", "1956:130"): "metadataonly",
("prop", "1956:131"): "metadataonly",
("prop", "1956:132"): "metadataonly",
("prop", "1956:133"): "metadataonly",
("prop", "1956:134"): "metadataonly",
("prop", "1956:135"): "metadataonly",
("prop", "1956:138"): "metadataonly",
("prop", "1956:139"): "metadataonly",
("prop", "1956:14"): "metadataonly",
("prop", "1956:140"): "metadataonly",
("prop", "1956:141"): "metadataonly",
("prop", "1956:145"): "metadataonly",
("prop", "1956:151"): "metadataonly",
("prop", "1956:152"): "metadataonly",
("prop", "1956:153"): "metadataonly",
("prop", "1956:154"): "metadataonly",
("prop", "1956:155"): "metadataonly",
("prop", "1956:156"): "metadataonly",
("prop", "1956:157"): "metadataonly",
("prop", "1956:158"): "metadataonly",
("prop", "1956:159"): "metadataonly",
("prop", "1956:16"): "metadataonly",
("prop", "1956:163"): "metadataonly",
("prop", "1956:164"): "metadataonly",
("prop", "1956:165"): "metadataonly",
("prop", "1956:167"): "metadataonly",
("prop", "1956:169"): "metadataonly",
("prop", "1956:170"): "metadataonly",
("prop", "1956:171"): "metadataonly",
("prop", "1956:174"): "metadataonly",
("prop", "1956:177"): "metadataonly",
("prop", "1956:179"): "metadataonly",
("prop", "1956:180"): "metadataonly",
("prop", "1956:19"): "metadataonly",
("prop", "1956:2"): "metadataonly",
("prop", "1956:21"): "metadataonly",
("prop", "1956:22"): "metadataonly",
("prop", "1956:24"): "metadataonly",
("prop", "1956:25"): "metadataonly",
("prop", "1956:26"): "metadataonly",
("prop", "1956:27"): "metadataonly",
("prop", "1956:28"): "metadataonly",
("prop", "1956:29"): "metadataonly",
("prop", "1956:3"): "metadataonly",
("prop", "1956:30"): "metadataonly",
("prop", "1956:31"): "metadataonly",
("prop", "1956:32"): "metadataonly",
("prop", "1956:33"): "metadataonly",
("prop", "1956:34"): "metadataonly",
("prop", "1956:36"): "metadataonly",
("prop", "1956:37"): "metadataonly",
("prop", "1956:38"): "metadataonly",
("prop", "1956:39"): "metadataonly",
("prop", "1956:4"): "metadataonly",
("prop", "1956:40"): "metadataonly",
("prop", "1956:41"): "metadataonly",
("prop", "1956:43"): "metadataonly",
("prop", "1956:44"): "metadataonly",
("prop", "1956:46"): "metadataonly",
("prop", "1956:49"): "metadataonly",
("prop", "1956:5"): "metadataonly",
("prop", "1956:50"): "metadataonly",
("prop", "1956:51"): "metadataonly",
("prop", "1956:52"): "metadataonly",
("prop", "1956:53"): "metadataonly",
("prop", "1956:54"): "metadataonly",
("prop", "1956:55"): "metadataonly",
("prop", "1956:56"): "metadataonly",
("prop", "1956:57"): "metadataonly",
("prop", "1956:58"): "metadataonly",
("prop", "1956:59"): "metadataonly",
("prop", "1956:6"): "metadataonly",
("prop", "1956:60"): "metadataonly",
("prop", "1956:61"): "metadataonly",
("prop", "1956:62"): "metadataonly",
("prop", "1956:63"): "metadataonly",
("prop", "1956:64"): "metadataonly",
("prop", "1956:65"): "metadataonly",
("prop", "1956:66"): "metadataonly",
("prop", "1956:67"): "metadataonly",
("prop", "1956:68"): "metadataonly",
("prop", "1956:69"): "metadataonly",
("prop", "1956:70"): "metadataonly",
("prop", "1956:72"): "metadataonly",
("prop", "1956:73"): "metadataonly",
("prop", "1956:74"): "metadataonly",
("prop", "1956:75"): "metadataonly",
("prop", "1956:77"): "metadataonly",
("prop", "1956:78"): "metadataonly",
("prop", "1956:79"): "metadataonly",
("prop", "1956:8"): "metadataonly",
("prop", "1956:80"): "metadataonly",
("prop", "1956:84"): "metadataonly",
("prop", "1956:85"): "metadataonly",
("prop", "1956:86"): "metadataonly",
("prop", "1956:87"): "metadataonly",
("prop", "1956:88"): "metadataonly",
("prop", "1956:89"): "metadataonly",
("prop", "1956:91"): "metadataonly",
("prop", "1956:92"): "metadataonly",
("prop", "1956:94"): "metadataonly",
("prop", "1956:95"): "metadataonly",
("prop", "1956:96"): "metadataonly",
("prop", "1956:97"): "metadataonly",
("prop", "1957:10"): "metadataonly",
("prop", "1957:101"): "metadataonly",
("prop", "1957:102"): "metadataonly",
("prop", "1957:103"): "metadataonly",
("prop", "1957:107"): "metadataonly",
("prop", "1957:108"): "metadataonly",
("prop", "1957:11"): "metadataonly",
("prop", "1957:111"): "metadataonly",
("prop", "1957:113"): "metadataonly",
("prop", "1957:115"): "metadataonly",
("prop", "1957:116"): "metadataonly",
("prop", "1957:118"): "metadataonly",
("prop", "1957:119"): "metadataonly",
("prop", "1957:12"): "metadataonly",
("prop", "1957:120"): "metadataonly",
("prop", "1957:121"): "metadataonly",
("prop", "1957:124"): "metadataonly",
("prop", "1957:125"): "metadataonly",
("prop", "1957:126"): "metadataonly",
("prop", "1957:128"): "metadataonly",
("prop", "1957:129"): "metadataonly",
("prop", "1957:13"): "metadataonly",
("prop", "1957:130"): "metadataonly",
("prop", "1957:131"): "metadataonly",
("prop", "1957:134"): "metadataonly",
("prop", "1957:135"): "metadataonly",
("prop", "1957:136"): "metadataonly",
("prop", "1957:137"): "metadataonly",
("prop", "1957:138"): "metadataonly",
("prop", "1957:14"): "metadataonly",
("prop", "1957:140"): "metadataonly",
("prop", "1957:141"): "metadataonly",
("prop", "1957:145"): "metadataonly",
("prop", "1957:147"): "metadataonly",
("prop", "1957:148"): "metadataonly",
("prop", "1957:149"): "metadataonly",
("prop", "1957:15"): "metadataonly",
("prop", "1957:152"): "default",
("prop", "1957:154"): "metadataonly",
("prop", "1957:155"): "metadataonly",
("prop", "1957:158"): "metadataonly",
("prop", "1957:159"): "metadataonly",
("prop", "1957:160"): "metadataonly",
("prop", "1957:162"): "metadataonly",
("prop", "1957:165"): "metadataonly",
("prop", "1957:166"): "metadataonly",
("prop", "1957:167"): "metadataonly",
("prop", "1957:169"): "metadataonly",
("prop", "1957:172"): "metadataonly",
("prop", "1957:174"): "metadataonly",
("prop", "1957:19"): "metadataonly",
("prop", "1957:20"): "metadataonly",
("prop", "1957:21"): "metadataonly",
("prop", "1957:22"): "metadataonly",
("prop", "1957:23"): "metadataonly",
("prop", "1957:24"): "metadataonly",
("prop", "1957:25"): "metadataonly",
("prop", "1957:26"): "metadataonly",
("prop", "1957:28"): "metadataonly",
("prop", "1957:29"): "metadataonly",
("prop", "1957:30"): "metadataonly",
("prop", "1957:31"): "metadataonly",
("prop", "1957:32"): "metadataonly",
("prop", "1957:33"): "metadataonly",
("prop", "1957:36"): "metadataonly",
("prop", "1957:37"): "metadataonly",
("prop", "1957:39"): "metadataonly",
("prop", "1957:4"): "metadataonly",
("prop", "1957:41"): "metadataonly",
("prop", "1957:42"): "metadataonly",
("prop", "1957:43"): "metadataonly",
("prop", "1957:44"): "metadataonly",
("prop", "1957:45"): "metadataonly",
("prop", "1957:47"): "metadataonly",
("prop", "1957:48"): "metadataonly",
("prop", "1957:49"): "metadataonly",
("prop", "1957:5"): "metadataonly",
("prop", "1957:50"): "metadataonly",
("prop", "1957:51"): "metadataonly",
("prop", "1957:52"): "metadataonly",
("prop", "1957:53"): "metadataonly",
("prop", "1957:54"): "metadataonly",
("prop", "1957:55"): "metadataonly",
("prop", "1957:57"): "metadataonly",
("prop", "1957:58"): "metadataonly",
("prop", "1957:6"): "metadataonly",
("prop", "1957:60"): "metadataonly",
("prop", "1957:62"): "metadataonly",
("prop", "1957:63"): "metadataonly",
("prop", "1957:64"): "metadataonly",
("prop", "1957:67"): "metadataonly",
("prop", "1957:68"): "metadataonly",
("prop", "1957:70"): "metadataonly",
("prop", "1957:73"): "metadataonly",
("prop", "1957:74"): "metadataonly",
("prop", "1957:75"): "metadataonly",
("prop", "1957:77"): "metadataonly",
("prop", "1957:78"): "metadataonly",
("prop", "1957:79"): "metadataonly",
("prop", "1957:81"): "metadataonly",
("prop", "1957:82"): "metadataonly",
("prop", "1957:83"): "metadataonly",
("prop", "1957:84"): "metadataonly",
("prop", "1957:85"): "metadataonly",
("prop", "1957:86"): "metadataonly",
("prop", "1957:88"): "metadataonly",
("prop", "1957:89"): "metadataonly",
("prop", "1957:90"): "metadataonly",
("prop", "1957:92"): "metadataonly",
("prop", "1957:93"): "metadataonly",
("prop", "1957:95"): "metadataonly",
("prop", "1957:96"): "metadataonly",
("prop", "1957:98"): "metadataonly",
("prop", "1957:99"): "metadataonly",
("prop", "1959:101"): "metadataonly",
("prop", "1959:102"): "metadataonly",
("prop", "1959:103"): "metadataonly",
("prop", "1959:106"): "metadataonly",
("prop", "1959:107"): "metadataonly",
("prop", "1959:108"): "metadataonly",
("prop", "1959:109"): "metadataonly",
("prop", "1959:111"): "metadataonly",
("prop", "1959:113"): "metadataonly",
("prop", "1959:115"): "metadataonly",
("prop", "1959:116"): "metadataonly",
("prop", "1959:117"): "metadataonly",
("prop", "1959:118"): "metadataonly",
("prop", "1959:12"): "metadataonly",
("prop", "1959:121"): "metadataonly",
("prop", "1959:123"): "metadataonly",
("prop", "1959:124"): "metadataonly",
("prop", "1959:125"): "metadataonly",
("prop", "1959:127"): "metadataonly",
("prop", "1959:128"): "metadataonly",
("prop", "1959:129"): "metadataonly",
("prop", "1959:13"): "metadataonly",
("prop", "1959:131"): "metadataonly",
("prop", "1959:132"): "metadataonly",
("prop", "1959:133"): "metadataonly",
("prop", "1959:138"): "metadataonly",
("prop", "1959:140"): "metadataonly",
("prop", "1959:141"): "metadataonly",
("prop", "1959:142"): "metadataonly",
("prop", "1959:145"): "metadataonly",
("prop", "1959:146"): "metadataonly",
("prop", | |
<reponame>tomdoherty/salt
"""
Test case for the consul execution module
"""
import logging
import pytest
import salt.modules.consul as consul
import salt.utils.http
import salt.utils.json
import salt.utils.platform
from salt.exceptions import SaltInvocationError
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
@pytest.fixture
def configure_loader_modules():
return {
consul: {
"__opts__": {"consul": {"url": "http://127.0.0.1", "token": "<PASSWORD>"}},
"__grains__": {"id": "test-minion"},
}
}
def test_list():
"""
Test salt.modules.consul.list function
"""
mock_query = MagicMock(return_value={"data": ["foo"], "res": True})
with patch.object(consul, "_query", mock_query):
consul_return = consul.list_(consul_url="http://127.0.0.1", token="<PASSWORD>")
assert consul_return == {"data": ["foo"], "res": True}
def test_get():
"""
Test salt.modules.consul.get function
"""
#
# No key argument results in SaltInvocationError, exception
#
with pytest.raises(SaltInvocationError):
consul.put(consul_url="http://127.0.0.1", token="<PASSWORD>")
mock_query = MagicMock(
return_value={
"data": [
{
"LockIndex": 0,
"Key": "foo",
"Flags": 0,
"Value": "YmFy",
"CreateIndex": 128,
"ModifyIndex": 128,
},
],
"res": True,
}
)
with patch.object(consul, "_query", mock_query):
consul_return = consul.get(
consul_url="http://127.0.0.1", key="foo", token="<PASSWORD>"
)
_expected = {
"data": [
{
"CreateIndex": 128,
"Flags": 0,
"Key": "foo",
"LockIndex": 0,
"ModifyIndex": 128,
"Value": "YmFy",
}
],
"res": True,
}
assert consul_return == _expected
mock_query = MagicMock(
return_value={
"data": [
{
"LockIndex": 0,
"Key": "foo",
"Flags": 0,
"Value": "b'bar'",
"CreateIndex": 128,
"ModifyIndex": 128,
},
],
"res": True,
}
)
with patch.object(consul, "_query", mock_query):
consul_return = consul.get(
consul_url="http://127.0.0.1", key="foo", token="<PASSWORD>"
)
_expected = {
"data": [
{
"CreateIndex": 128,
"Flags": 0,
"Key": "foo",
"LockIndex": 0,
"ModifyIndex": 128,
"Value": "b'bar'",
}
],
"res": True,
}
assert consul_return == _expected
def test_put():
"""
Test salt.modules.consul.put function
"""
#
# No key argument results in SaltInvocationError, exception
#
with pytest.raises(SaltInvocationError):
consul.put(consul_url="http://127.0.0.1", token="<PASSWORD>")
#
# Test when we're unable to connect to Consul
#
mock_consul_get = {
"data": [
{
"LockIndex": 0,
"Key": "<KEY>",
"Flags": 0,
"Value": "ImhlbGxvIHRoZXJlIg==",
"CreateIndex": 299,
"ModifyIndex": 299,
}
],
"res": True,
}
with patch.object(consul, "session_list", MagicMock(return_value=[])):
with patch.object(consul, "get", MagicMock(return_value=mock_consul_get)):
ret = consul.put(
consul_url="http://127.0.0.1:8501",
token="test_token",
key="web/key1",
value="Hello world",
)
expected_res = (False,)
expected_data = "Unable to add key web/key1 with value Hello world."
if salt.utils.platform.is_windows():
expected_error = "Unknown error"
else:
expected_error = "Connection refused"
assert not ret["res"]
assert expected_data == ret["data"]
assert expected_error in ret["error"]
#
# Working as expected
#
mock_query = MagicMock(
return_value={
"data": [
{
"LockIndex": 0,
"Key": "foo",
"Flags": 0,
"Value": "YmFy",
"CreateIndex": 128,
"ModifyIndex": 128,
},
],
"res": True,
}
)
with patch.object(consul, "session_list", MagicMock(return_value=[])):
with patch.object(consul, "get", MagicMock(return_value=mock_consul_get)):
with patch.object(consul, "_query", mock_query):
ret = consul.put(
consul_url="http://127.0.0.1:8500",
token="test_token",
key="web/key1",
value="Hello world",
)
_expected = {"res": True, "data": "Added key web/key1 with value Hello world."}
assert ret == _expected
def test_delete():
"""
Test salt.modules.consul.delete function
"""
#
# No key argument results in SaltInvocationError, exception
#
with pytest.raises(SaltInvocationError):
consul.put(consul_url="http://127.0.0.1", token="test_token")
#
# Test when we're unable to connect to Consul
#
ret = consul.delete(
consul_url="http://127.0.0.1:8501",
token="test_token",
key="web/key1",
value="Hello world",
)
expected_res = (False,)
expected_data = "Unable to delete key web/key1."
if salt.utils.platform.is_windows():
expected_error = "Unknown error"
else:
expected_error = "Connection refused"
assert not ret["res"]
assert expected_data == ret["message"]
assert expected_error in ret["error"]
#
# Working as expected
#
mock_query = MagicMock(return_value={"data": True, "res": True})
with patch.object(consul, "_query", mock_query):
ret = consul.delete(
consul_url="http://127.0.0.1:8500",
token="test_token",
key="<KEY>",
value="Hello <PASSWORD>",
)
_expected = {"res": True, "message": "Deleted key web/key1."}
assert ret == _expected
def test_agent_maintenance():
"""
Test consul agent maintenance
"""
consul_url = "http://localhost:1313"
key = "cluster/key"
mock_result = "test"
mock_http_result = {"status": 200, "dict": mock_result}
mock_http_result_false = {"status": 204, "dict": mock_result}
mock_url = MagicMock(return_value=consul_url)
mock_nourl = MagicMock(return_value=None)
# no consul url error
with patch.dict(consul.__salt__, {"config.get": mock_nourl}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
result = consul.agent_maintenance(consul_url="")
expected = {"message": "No Consul URL found.", "res": False}
assert expected == result
# no required argument
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = 'Required parameter "enable" is missing.'
result = consul.agent_maintenance(consul_url=consul_url)
expected = {"message": msg, "res": False}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Agent maintenance mode {}ed."
value = "enabl"
result = consul.agent_maintenance(consul_url=consul_url, enable=value)
expected = {"message": msg.format(value), "res": True}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result_false):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Unable to change maintenance mode for agent."
value = "enabl"
result = consul.agent_maintenance(consul_url=consul_url, enable=value)
expected = {"message": msg, "res": True}
assert expected == result
def test_agent_join():
"""
Test consul agent join
"""
consul_url = "http://localhost:1313"
key = "cluster/key"
mock_result = "test"
mock_http_result = {"status": 200, "dict": mock_result}
mock_http_result_false = {"status": 204, "dict": mock_result}
mock_url = MagicMock(return_value=consul_url)
mock_nourl = MagicMock(return_value=None)
# no consul url error
with patch.dict(consul.__salt__, {"config.get": mock_nourl}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
result = consul.agent_join(consul_url="")
expected = {"message": "No Consul URL found.", "res": False}
assert expected == result
# no required argument
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = 'Required parameter "address" is missing.'
pytest.raises(
SaltInvocationError, consul.agent_join, consul_url=consul_url
)
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Agent joined the cluster"
result = consul.agent_join(consul_url=consul_url, address="test")
expected = {"message": msg, "res": True}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result_false):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Unable to join the cluster."
value = "enabl"
result = consul.agent_join(consul_url=consul_url, address="test")
expected = {"message": msg, "res": False}
assert expected == result
def test_agent_leave():
"""
Test consul agent leave
"""
consul_url = "http://localhost:1313"
key = "cluster/key"
mock_result = "test"
mock_http_result = {"status": 200, "dict": mock_result}
mock_http_result_false = {"status": 204, "dict": mock_result}
mock_url = MagicMock(return_value=consul_url)
mock_nourl = MagicMock(return_value=None)
# no consul url error
with patch.dict(consul.__salt__, {"config.get": mock_nourl}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
result = consul.agent_join(consul_url="")
expected = {"message": "No Consul URL found.", "res": False}
assert expected == result
node = "node1"
# no required argument
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
pytest.raises(
SaltInvocationError, consul.agent_leave, consul_url=consul_url
)
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Node {} put in leave state."
result = consul.agent_leave(consul_url=consul_url, node=node)
expected = {"message": msg.format(node), "res": True}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result_false):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Unable to change state for {}."
result = consul.agent_leave(consul_url=consul_url, node=node)
expected = {"message": msg.format(node), "res": False}
assert expected == result
def test_agent_check_register():
"""
Test consul agent check register
"""
consul_url = "http://localhost:1313"
key = "cluster/key"
mock_result = "test"
mock_http_result = {"status": 200, "dict": mock_result}
mock_http_result_false = {"status": 204, "dict": mock_result}
mock_url = MagicMock(return_value=consul_url)
mock_nourl = MagicMock(return_value=None)
# no consul url error
with patch.dict(consul.__salt__, {"config.get": mock_nourl}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
result = consul.agent_check_register(consul_url="")
expected = {"message": "No Consul URL found.", "res": False}
assert expected == result
name = "name1"
# no required arguments
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
pytest.raises(
SaltInvocationError,
consul.agent_check_register,
consul_url=consul_url,
)
# missing script, or http
msg = 'Required parameter "script" or "http" is missing.'
result = consul.agent_check_register(consul_url=consul_url, name=name)
expected = {"message": msg, "res": False}
assert expected == result
# missing interval
msg = 'Required parameter "interval" is missing.'
result = consul.agent_check_register(
consul_url=consul_url,
name=name,
script="test",
http="test",
ttl="test",
)
expected = {"message": msg, "res": False}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Check {} added to agent."
result = consul.agent_check_register(
consul_url=consul_url,
name=name,
script="test",
http="test",
ttl="test",
interval="test",
)
expected = {"message": msg.format(name), "res": True}
assert expected == result
with patch.object(salt.utils.http, "query", return_value=mock_http_result_false):
with patch.dict(consul.__salt__, {"config.get": mock_url}):
with patch.object(
salt.modules.consul, "session_list", return_value=mock_result
):
msg = "Unable to add check to agent."
result = consul.agent_check_register(
consul_url=consul_url,
name=name,
script="test",
http="test",
ttl="test",
interval="test",
)
expected = {"message": msg.format(name), "res": False}
assert expected == result
def test_agent_check_deregister():
"""
Test consul agent check register
"""
consul_url = "http://localhost:1313"
key = "cluster/key"
mock_result = "test"
mock_http_result = {"status": 200, "dict": mock_result}
mock_http_result_false = {"status": 204, "dict": mock_result}
mock_url = MagicMock(return_value=consul_url)
mock_nourl = MagicMock(return_value=None)
# no consul url error
with | |
<reponame>HanseMerkur/nitro-python<gh_stars>1-10
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nitro.resource.base.base_resource import base_resource
from nitro.resource.base.base_resource import base_response
from nitro.service.options import options
from nitro.exception.nitro_exception import nitro_exception
from nitro.util.nitro_util import nitro_util
class nsacl(base_resource) :
"""Configuration for ACL entry resource."""
def __init__(self) :
self._aclname = ""
self._aclaction = ""
self._td = 0
self._srcip = False
self._srcipop = ""
self._srcipval = ""
self._srcport = False
self._srcportop = ""
self._srcportval = ""
self._destip = False
self._destipop = ""
self._destipval = ""
self._destport = False
self._destportop = ""
self._destportval = ""
self._ttl = 0
self._srcmac = ""
self._srcmacmask = ""
self._protocol = ""
self._protocolnumber = 0
self._vlan = 0
self._vxlan = 0
self._Interface = ""
self._established = False
self._icmptype = 0
self._icmpcode = 0
self._priority = 0
self._state = ""
self._logstate = ""
self._ratelimit = 0
self._newname = ""
self._hits = 0
self._kernelstate = ""
self.___count = 0
@property
def aclname(self) :
"""Name for the extended ACL rule. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Cannot be changed after the extended ACL rule is created.<br/>Minimum length = 1."""
try :
return self._aclname
except Exception as e:
raise e
@aclname.setter
def aclname(self, aclname) :
"""Name for the extended ACL rule. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Cannot be changed after the extended ACL rule is created.<br/>Minimum length = 1
:param aclname:
"""
try :
self._aclname = aclname
except Exception as e:
raise e
@property
def aclaction(self) :
"""Action to perform on incoming IPv4 packets that match the extended ACL rule.
Available settings function as follows:
* ALLOW - The NetScaler appliance processes the packet.
* BRIDGE - The NetScaler appliance bridges the packet to the destination without processing it.
* DENY - The NetScaler appliance drops the packet.<br/>Possible values = BRIDGE, DENY, ALLOW.
"""
try :
return self._aclaction
except Exception as e:
raise e
@aclaction.setter
def aclaction(self, aclaction) :
"""Action to perform on incoming IPv4 packets that match the extended ACL rule.
Available settings function as follows:
* ALLOW - The NetScaler appliance processes the packet.
* BRIDGE - The NetScaler appliance bridges the packet to the destination without processing it.
* DENY - The NetScaler appliance drops the packet.<br/>Possible values = BRIDGE, DENY, ALLOW
:param aclaction:
"""
try :
self._aclaction = aclaction
except Exception as e:
raise e
@property
def td(self) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Maximum length = 4094."""
try :
return self._td
except Exception as e:
raise e
@td.setter
def td(self, td) :
"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Maximum length = 4094
:param td:
"""
try :
self._td = td
except Exception as e:
raise e
@property
def srcip(self) :
"""IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189]."""
try :
return self._srcip
except Exception as e:
raise e
@srcip.setter
def srcip(self, srcip) :
"""IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189].
:param srcip:
"""
try :
self._srcip = srcip
except Exception as e:
raise e
@property
def srcipop(self) :
"""Either the equals (=) or does not equal (!=) logical operator.<br/>Possible values = =, !=, EQ, NEQ."""
try :
return self._srcipop
except Exception as e:
raise e
@srcipop.setter
def srcipop(self, srcipop) :
"""Either the equals (=) or does not equal (!=) logical operator.<br/>Possible values = =, !=, EQ, NEQ
:param srcipop:
"""
try :
self._srcipop = srcipop
except Exception as e:
raise e
@property
def srcipval(self) :
"""IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189]."""
try :
return self._srcipval
except Exception as e:
raise e
@srcipval.setter
def srcipval(self, srcipval) :
"""IP address or range of IP addresses to match against the source IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189].
:param srcipval:
"""
try :
self._srcipval = srcipval
except Exception as e:
raise e
@property
def srcport(self) :
"""Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [40-90]."""
try :
return self._srcport
except Exception as e:
raise e
@srcport.setter
def srcport(self, srcport) :
"""Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [40-90].
:param srcport:
"""
try :
self._srcport = srcport
except Exception as e:
raise e
@property
def srcportop(self) :
"""Either the equals (=) or does not equal (!=) logical operator.<br/>Possible values = =, !=, EQ, NEQ."""
try :
return self._srcportop
except Exception as e:
raise e
@srcportop.setter
def srcportop(self, srcportop) :
"""Either the equals (=) or does not equal (!=) logical operator.<br/>Possible values = =, !=, EQ, NEQ
:param srcportop:
"""
try :
self._srcportop = srcportop
except Exception as e:
raise e
@property
def srcportval(self) :
"""Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [40-90].<br/>Maximum length = 65535."""
try :
return self._srcportval
except Exception as e:
raise e
@srcportval.setter
def srcportval(self, srcportval) :
"""Port number or range of port numbers to match against the source port number of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [40-90].<br/>Maximum length = 65535
:param srcportval:
"""
try :
self._srcportval = srcportval
except Exception as e:
raise e
@property
def destip(self) :
"""IP address or range of IP addresses to match against the destination IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189]."""
try :
return self._destip
except Exception as e:
raise e
@destip.setter
def destip(self, destip) :
"""IP address or range of IP addresses to match against the destination IP address of an incoming IPv4 packet. In the command line interface, separate the range with a hyphen and enclose within brackets. For example: [10.102.29.30-10.102.29.189].
:param destip: | |
import ast
import re
from math import ceil, floor
import simpleeval
from simpleeval import DEFAULT_NAMES, EvalWithCompoundTypes, IterableTooLong, SimpleEval
from api.avrae.cogs5e.funcs.dice import roll
from api.avrae.cogs5e.models.errors import ConsumableException, EvaluationError, FunctionRequiresCharacter, InvalidArgument
from . import MAX_ITER_LENGTH, SCRIPTING_RE
from .functions import DEFAULT_FUNCTIONS, DEFAULT_OPERATORS
from .helpers import get_uvars, update_uvars
from .legacy import LegacyRawCharacter
if 'format_map' not in simpleeval.DISALLOW_METHODS:
simpleeval.DISALLOW_METHODS.append('format_map')
class MathEvaluator(SimpleEval):
"""Evaluator with basic math functions exposed."""
MATH_FUNCTIONS = {'ceil': ceil, 'floor': floor, 'max': max, 'min': min, 'round': round}
def __init__(self, operators=None, functions=None, names=None):
if operators is None:
operators = DEFAULT_OPERATORS.copy()
if functions is None:
functions = DEFAULT_FUNCTIONS.copy()
if names is None:
names = DEFAULT_NAMES.copy()
super(MathEvaluator, self).__init__(operators, functions, names)
@classmethod
def with_character(cls, character, spell_override=None):
names = character.get_scope_locals()
if spell_override is not None:
names['spell'] = spell_override
return cls(names=names)
def parse(self, string):
"""Parses a dicecloud-formatted string (evaluating text in {})."""
try:
return re.sub(r'(?<!\\){(.+?)}', lambda m: str(self.eval(m.group(1))), string)
except Exception as ex:
raise EvaluationError(ex, string)
class ScriptingEvaluator(EvalWithCompoundTypes):
"""Evaluator with compound types, comprehensions, and assignments exposed."""
def __init__(self, ctx, operators=None, functions=None, names=None):
if operators is None:
operators = DEFAULT_OPERATORS.copy()
if functions is None:
functions = DEFAULT_FUNCTIONS.copy()
if names is None:
names = DEFAULT_NAMES.copy()
super(ScriptingEvaluator, self).__init__(operators, functions, names)
self.nodes.update({
ast.JoinedStr: self._eval_joinedstr, # f-string
ast.FormattedValue: self._eval_formattedvalue, # things in f-strings
ast.ListComp: self._eval_listcomp,
ast.SetComp: self._eval_setcomp,
ast.DictComp: self._eval_dictcomp,
ast.comprehension: self._eval_comprehension
})
self.functions.update( # character-only functions
get_cc=self.needs_char, set_cc=self.needs_char, get_cc_max=self.needs_char,
get_cc_min=self.needs_char, mod_cc=self.needs_char,
cc_exists=self.needs_char, create_cc_nx=self.needs_char, create_cc=self.needs_char,
get_slots=self.needs_char, get_slots_max=self.needs_char, set_slots=self.needs_char,
use_slot=self.needs_char,
get_hp=self.needs_char, set_hp=self.needs_char, mod_hp=self.needs_char, hp_str=self.needs_char,
get_temphp=self.needs_char, set_temphp=self.needs_char,
set_cvar=self.needs_char, delete_cvar=self.needs_char, set_cvar_nx=self.needs_char,
get_raw=self.needs_char
)
self.functions.update(
set=self.set, exists=self.exists, combat=self.combat,
get_gvar=self.get_gvar,
set_uvar=self.set_uvar, delete_uvar=self.delete_uvar, set_uvar_nx=self.set_uvar_nx,
uvar_exists=self.uvar_exists,
chanid=self.chanid, servid=self.servid,
get=self.get
)
self.assign_nodes = {
ast.Name: self._assign_name,
ast.Tuple: self._assign_tuple,
ast.Subscript: self._assign_subscript
}
self._loops = 0
self._cache = {
"gvars": {},
"uvars": {}
}
self.ctx = ctx
self.character_changed = False
self.combat_changed = False
self.uvars_changed = set()
@classmethod
async def new(cls, ctx):
inst = cls(ctx)
uvars = await get_uvars(ctx)
inst.names.update(uvars)
inst._cache['uvars'].update(uvars)
return inst
async def with_character(self, character):
self.names.update(character.get_scope_locals())
self._cache['character'] = character
# define character-specific functions
# helpers
def _get_consumable(name):
consumable = next((con for con in character.consumables if con.name == name), None)
if consumable is None:
raise ConsumableException(f"There is no counter named {name}.")
return consumable
# funcs
def combat():
cmbt = self.combat()
if cmbt and not cmbt.me:
cmbt.func_set_character(character)
return cmbt
def get_cc(name):
return _get_consumable(name).value
def get_cc_max(name):
return _get_consumable(name).get_max()
def get_cc_min(name):
return _get_consumable(name).get_min()
def set_cc(name, value: int, strict=False):
_get_consumable(name).set(value, strict)
self.character_changed = True
def mod_cc(name, val: int, strict=False):
return set_cc(name, get_cc(name) + val, strict)
def delete_cc(name):
to_delete = _get_consumable(name)
character.consumables.remove(to_delete)
self.character_changed = True
def create_cc_nx(name: str, minVal: str = None, maxVal: str = None, reset: str = None,
dispType: str = None):
if not cc_exists(name):
from api.avrae.cogs5e.models.sheet.player import CustomCounter
new_consumable = CustomCounter.new(character, name, minVal, maxVal, reset, dispType)
character.consumables.append(new_consumable)
self.character_changed = True
def create_cc(name: str, *args, **kwargs):
if cc_exists(name):
delete_cc(name)
create_cc_nx(name, *args, **kwargs)
def cc_exists(name):
return name in set(con.name for con in character.consumables)
def cc_str(name):
return str(_get_consumable(name))
def get_slots(level: int):
return character.spellbook.get_slots(level)
def get_slots_max(level: int):
return character.spellbook.get_max_slots(level)
def slots_str(level: int):
return character.spellbook.slots_str(level)
def set_slots(level: int, value: int):
character.spellbook.set_slots(level, value)
self.character_changed = True
def use_slot(level: int):
character.spellbook.use_slot(level)
self.character_changed = True
def get_hp():
return character.hp
def set_hp(val: int):
character.hp = val
self.character_changed = True
def mod_hp(val: int, overflow: bool = True):
character.modify_hp(val, overflow=overflow)
self.character_changed = True
def hp_str():
return character.hp_str()
def get_temphp():
return character.temp_hp
def set_temphp(val: int):
character.temp_hp = val
self.character_changed = True
def set_cvar(name, val: str):
character.set_cvar(name, val)
self.names[name] = str(val)
self.character_changed = True
def set_cvar_nx(name, val: str):
if name not in character.cvars:
set_cvar(name, val)
def delete_cvar(name):
if name in character.cvars:
del character.cvars[name]
self.character_changed = True
def get_raw():
return LegacyRawCharacter(character).to_dict()
self.functions.update(
combat=combat,
get_cc=get_cc, set_cc=set_cc, get_cc_max=get_cc_max, get_cc_min=get_cc_min, mod_cc=mod_cc,
delete_cc=delete_cc, cc_exists=cc_exists, create_cc_nx=create_cc_nx, create_cc=create_cc, cc_str=cc_str,
get_slots=get_slots, get_slots_max=get_slots_max, set_slots=set_slots, use_slot=use_slot,
slots_str=slots_str,
get_hp=get_hp, set_hp=set_hp, mod_hp=mod_hp, hp_str=hp_str,
get_temphp=get_temphp, set_temphp=set_temphp,
set_cvar=set_cvar, delete_cvar=delete_cvar, set_cvar_nx=set_cvar_nx,
get_raw=get_raw
)
return self
async def run_commits(self):
if self.character_changed and 'character' in self._cache:
await self._cache['character'].commit(self.ctx)
if self.combat_changed and 'combat' in self._cache and self._cache['combat']:
await self._cache['combat'].func_commit()
if self.uvars_changed and 'uvars' in self._cache and self._cache['uvars']:
await update_uvars(self.ctx, self._cache['uvars'], self.uvars_changed)
# helpers
def needs_char(self, *args, **kwargs):
raise FunctionRequiresCharacter() # no. bad.
def set(self, name, value):
"""
Sets the value of a name in the current scripting context.
.. deprecated:: 0.1.0
Use ``name = value`` instead.
:param name: The name to set.
:param value: The value to set it to.
"""
self.names[name] = value
def exists(self, name):
"""
Returns whether or not a name is set in the current evaluation context.
:rtype: bool
"""
return name in self.names
def combat(self):
"""
Returns the combat active in the channel if one is. Otherwise, returns ``None``.
:rtype: :class:`~cogs5e.funcs.scripting.combat.SimpleCombat`
"""
from .combat import SimpleCombat
if 'combat' not in self._cache:
self._cache['combat'] = SimpleCombat.from_ctx(self.ctx)
self.combat_changed = True
return self._cache['combat']
def uvar_exists(self, name):
"""
Returns whether a uvar exists.
:rtype: bool
"""
return self.exists(name) and name in self._cache['uvars']
def get_gvar(self, address):
"""
Retrieves and returns the value of a gvar (global variable).
:param str address: The gvar address.
:return: The value of the gvar.
:rtype: str
"""
if address not in self._cache['gvars']:
result = self.ctx.bot.mdb.gvars.delegate.find_one({"key": address})
if result is None:
return None
self._cache['gvars'][address] = result['value']
return self._cache['gvars'][address]
def set_uvar(self, name: str, value: str):
"""
Sets a user variable.
:param str name: The name of the variable to set.
:param str value: The value to set it to.
"""
if not name.isidentifier():
raise InvalidArgument("Cvar contains invalid character.")
self._cache['uvars'][name] = str(value)
self.names[name] = str(value)
self.uvars_changed.add(name)
def set_uvar_nx(self, name, value: str):
"""
Sets a user variable if there is not already an existing name.
:param str name: The name of the variable to set.
:param str value: The value to set it to.
"""
if not name in self.names:
self.set_uvar(name, value)
def delete_uvar(self, name):
"""
Deletes a user variable. Does nothing if the variable does not exist.
:param str name: The name of the variable to delete.
"""
if name in self._cache['uvars']:
del self._cache['uvars'][name]
self.uvars_changed.add(name)
def chanid(self):
"""
Returns the ID of the active Discord channel.
:rtype: str
"""
return str(self.ctx.channel.id)
def servid(self):
"""
Returns the ID of the active Discord guild, or None if in DMs.
:rtype: str
"""
if self.ctx.guild:
return str(self.ctx.guild.id)
return None
def get(self, name, default=None):
"""
Gets the value of a name, or returns *default* if the name is not set.
:param str name: The name to retrieve.
:param default: What to return if the name is not set.
"""
if name in self.names:
return self.names[name]
return default
# evaluation
def parse(self, string, double_curly=None, curly=None, ltgt=None):
"""Parses a scripting string (evaluating text in {{}})."""
ops = r"([-+*/().<>=])"
def evalrepl(match):
try:
if match.group(1): # {{}}
double_func = double_curly or self.eval
evalresult = double_func(match.group(1))
elif match.group(2): # <>
if re.match(r'<a?([@#]|:.+:)[&!]{0,2}\d+>', match.group(0)): # ignore mentions
return match.group(0)
out = match.group(2)
ltgt_func = ltgt or (lambda s: str(self.names.get(s, s)))
evalresult = ltgt_func(out)
elif match.group(3): # {}
varstr = match.group(3)
def default_curly_func(s):
curlyout = ""
for substr in re.split(ops, s):
temp = substr.strip()
curlyout += str(self.names.get(temp, temp)) + " "
return str(roll(curlyout).total)
curly_func = curly or default_curly_func
evalresult = curly_func(varstr)
else:
evalresult = None
except Exception as ex:
raise EvaluationError(ex, match.group(0))
return str(evalresult) if evalresult is not None else ''
output = re.sub(SCRIPTING_RE, evalrepl, string) # evaluate
return output
def eval(self, expr): # allow for ast.Assign to set names
""" evaluate an expression, using the operators, functions and
names previously set up. """
# set a copy of the expression aside, so we can give nice errors...
self.expr = expr
# and evaluate:
expression = ast.parse(expr.strip()).body[0]
if isinstance(expression, ast.Expr):
return self._eval(expression.value)
elif isinstance(expression, ast.Assign):
return self._eval_assign(expression)
else:
raise TypeError("Unknown ast body type")
# private magic
def _eval_assign(self, node):
names = node.targets[0]
values = node.value
self._assign(names, values)
def _assign(self, names, values, eval_values=True):
try:
handler = self.assign_nodes[type(names)]
except KeyError:
raise TypeError(f"Assignment to {type(names).__name__} is not allowed")
return handler(names, values, eval_values)
def _assign_name(self, name, value, eval_value=True):
if not isinstance(self.names, dict):
raise TypeError("cannot set name: incorrect name type")
else:
if eval_value:
value = self._eval(value)
self.names[name.id] = value
def _assign_tuple(self, names, values, eval_values=True):
if not all(isinstance(n, ast.Name) for n in names.elts):
raise TypeError("Assigning to multiple | |
####################
# ES-DOC CIM Questionnaire
# Copyright (c) 2017 ES-DOC. All rights reserved.
#
# University of Colorado, Boulder
# http://cires.colorado.edu/
#
# This project is distributed according to the terms of the MIT license [http://www.opensource.org/licenses/MIT].
####################
from django.db.models import QuerySet
from django.forms import widgets
from django.utils.html import format_html
from django.utils.encoding import force_text
from django.forms.fields import BooleanField
from django.forms.widgets import CheckboxInput
from django.forms.models import BaseModelForm, BaseModelFormSet
from djng.forms import NgModelFormMixin, NgFormValidationMixin, NgModelForm
from djng.forms.angular_base import TupleErrorList, SafeTuple
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
from Q.questionnaire.q_utils import update_field_widget_attributes, set_field_widget_attributes
from Q.questionnaire.q_utils import QValidator, legacy_code
from djng.styling.bootstrap3.field_mixins import BooleanFieldMixin
from djng.styling.bootstrap3.widgets import CheckboxInput as BootstrapCheckBoxInput
@legacy_code
def bootstrap_form(form):
for field_name, field in form.fields.iteritems():
bootstrap_field(field)
@legacy_code
def bootstrap_field(field):
bootstrap_classes = {
"class": "form-control",
}
update_field_widget_attributes(field, bootstrap_classes)
if not isinstance(field, BooleanField):
set_field_widget_attributes(field, {
"placeholder": field.label,
})
@legacy_code
def unbootstrap_field(field):
if isinstance(field, BooleanField):
# field.label = field.verbose_name
# field.widget = CheckboxInput()
pass
# using djangular forms is pretty cool; it automatically maps ng & bootstrap content to the fields.
# But I override some of the default functionality: in particular, error-handling.
# This allows me to render client-side errors in a djangular-consistent way.
# Using the "add_custom_errors" fn below, I add to the existing djangular error constructs w/ any Q-specific content;
# assuming that there are corresponding JS fns (see "q_validators.js"), this will just work for client errors
# However, working w/ server errors is much more complex...
# The "add_custom_errors" fn, adds placeholders for server-generated error content
# additionally "add_server_errors_to_field" adds the "servererror" directive to fields as needed
# if a server error occurs, the DRF API views will return a JSON array of errors...
# it is the responsibility of the ng submit fn (for example, "CustomizerController.submit_customizaation") to alter the validity of djangular fields
# it is also the responsibility of the ng submit fn to add the returned JSON content to the global "$scope.server_errors" array which is used to fill in the aforementioned placeholders
# finally, the "servererror" directive adds a watch to the underlying ng-model for each field - the 1st time it changes after a server error, its validity is reset
# ...and it's just that easy
class QForm(Bootstrap3ModelForm, NgModelFormMixin, NgFormValidationMixin):
# class QForm(Bootstrap3ModelForm, NgFormValidationMixin):
# class QForm(NgModelForm, NgFormValidationMixin):
# class QForm(NgFormValidationMixin, NgModelForm):
class Meta:
abstract = True
required_css_class = 'djng-field-required'
def __init__(self, *args, **kwargs):
# every QForm has a unique "form_name"... the load-on-demand paradigm passes this in using the "name" kwarg
if "form_name" not in kwargs:
kwargs["form_name"] = kwargs.pop("name", None)
assert kwargs.get("form_name", None) is not None, "QForm must have a unique name."
super(QForm, self).__init__(*args, **kwargs)
# I thought that I could specify a formset-specific "ng-model" attribute here but it just gets overwritten:
# "ng-model" gets set at the last-minute by djangular in "get_widget_attrs", so I overwrite _that_ fn below
# TODO: THIS PROBABLY DOESN'T NEED TO BE ITS OWN FN, I CAN JUST CALL "set_field_widget_attributes" DIRECTLY IN A FORM'S __init__ FN
def add_server_errors_to_field(self, field_name):
# adds the possibility of rendering a server error on a given field
field = self.fields[field_name]
set_field_widget_attributes(field, {
"servererror": "true",
})
def unbootstrap_fields(self, field_names):
for field_name in field_names:
self.unbootstrap_field(field_name)
def unbootstrap_field(self, field_name):
# QForm form inherits from Bootstrap3ModelForm;
# this means that funny things happen automatically when rendering fields
# this fn can undo those things on a per-field basis
form_field = self.fields[field_name]
model_field = self.instance.get_field(field_name)
if isinstance(form_field, BooleanField):
# boolean fields include the label_tag as part of the widget and delete the label on the main field
# that's not desired Q behavior (it messes up alignment of form labels & widgets)
# so this code puts everything back
form_field.widget = CheckboxInput(attrs=form_field.widget.attrs)
form_field.label = model_field.verbose_name
else:
# TODO: ANY OTHER FIXES FOR OTHER FIELD TYPES?
pass
@property
def is_new(self):
return self.instance.pk is None
@property
def is_existing(self):
return not self.is_new()
def get_widget_attrs(self, bound_field):
"""
just like the base class fn
except it sets "ng-model" using "get_qualified_model_field_name"
(see comment in __init__ re: why I can't just override the attrs there)
:param bound_field:
:return: dictionary of field widget attrs
"""
# note also that djangular overwrites widget classes further downstream
# to get around this, with custom attributes,
# I make sure to reset the "widget_css_classes" when calling set_widget_attributes or update_widget_attributes
attrs = super(NgModelFormMixin, self).get_widget_attrs(bound_field)
identifier = self.add_prefix(bound_field.name)
ng = {
'name': bound_field.name,
'identifier': identifier,
# here is the different bit:
# 'model': self.scope_prefix and ('%s[\'%s\']' % (self.scope_prefix, identifier)) or identifier
'model': self.get_qualified_model_field_name(bound_field.name)
}
if hasattr(self, 'Meta') and bound_field.name in getattr(self.Meta, 'ng_models', []):
attrs['ng-model'] = ng['model']
for key, fmtstr in self.ng_directives.items():
attrs[key] = fmtstr % ng
return attrs
def get_qualified_form_field_name(self, field_name):
"""
gets a field name suitable for ng use when binding to form
(must match the names in error handling)
:param field_name:
:return:
"""
# TODO: THIS IS CLEARLY A BIT SILLY,
# TODO: B/C IT WINDS UP W/ NAMES LIKE "form_type_12345['form_type_12345.field_name']"
# TODO: WHEN THEY OUGHT TO BE LIKE "form_type_12345['field_name']"
# TODO: BUT THE REST OF THE CODE IS WORKING W/ THIS SILLINESS SO LET'S LEAVE WELL-ENOUGH ALONE FOR NOW
identifier = self.add_prefix(field_name)
return format_html("{0}['{1}']", self.form_name, identifier)
def get_qualified_model_field_name(self, field_name):
"""
gets a field name suitable for ng use when binding to model
(must match the names in $scope)
:param field_name:
:return:
"""
# if self.is_formset():
# # the prefix is already handled implicitly in formsets
# identifier = field_name
# else:
# identifier = self.add_prefix(field_name)
identifier = field_name # THIS ALLOWS ME TO STILL HAVE A UNIQUE PREFIX
return format_html("{0}['{1}']", self.scope_prefix, identifier)
def get_current_field_value(self, *args):
"""
Return the field value from either "data" or "initial". The key will be reformatted to account for the form
prefix.
:param str key:
:param default: If provided as a second argument, this value will be returned in case of a KeyError.
>>> self.get_current_field_value('a')
>>> self.get_current_field_value('a', None)
"""
if len(args) == 1:
key = args[0]
has_default = False
else:
key, default = args
has_default = True
try:
if self.prefix:
key_prefix = '{0}-{1}'.format(self.prefix, key)
ret = self.data[key_prefix]
else: # (the model_customizer_form does not have a prefix)
ret = self.data[key]
except KeyError:
try:
ret = self.initial[key]
except KeyError:
if has_default:
ret = default
else:
msg = 'The key "{0}" was not found in "data" or "initial" for form of type {1} with prefix "{2}".'.format(key, type(self), self.prefix)
raise KeyError(msg)
return ret
def get_fields_from_list(self, field_names_list):
"""
returns the fields corresponding to the names in field_names_list
note that getting them explicitly as keys is more efficient than looping through self
:param field_names_list:
:return: fields corresponding to the names in field_names_list
"""
fields = [self[field_name] for field_name in field_names_list]
return fields
def get_field_errors(self, bound_field):
# identifier = format_html('{0}.{1}', self.form_name, bound_field.name)
identifier = self.get_qualified_form_field_name(bound_field.name)
error_list = self.errors.get(bound_field.html_name, [])
errors = self.error_class([SafeTuple(
(identifier, self.field_error_css_classes, '$pristine', '$pristine', 'invalid', e)) for e in error_list])
if bound_field.is_hidden:
return errors
# identifier = format_html('{0}.{1}', self.form_name, self.add_prefix(bound_field.name))
identifier = self.get_qualified_form_field_name(bound_field.name)
potential_errors = bound_field.field.get_potential_errors()
errors.extend([SafeTuple((identifier, self.field_error_css_classes, '$dirty', pe[0], 'invalid', force_text(pe[1])))
for pe in potential_errors])
if not isinstance(bound_field.field.widget, widgets.PasswordInput):
# all valid fields shall display OK tick after changed into dirty state
errors.append(SafeTuple((identifier, self.field_error_css_classes, '$dirty', '$valid', 'valid', '')))
if bound_field.value():
# valid bound fields shall display OK tick, even in pristine state
errors.append(SafeTuple((identifier, self.field_error_css_classes, '$pristine', '$valid', 'valid', '')))
self.add_custom_errors(errors, bound_field)
return errors
def add_custom_errors(self, existing_errors, bound_field):
"""
called by get_field_errors
ensures custom client-side validation AND server-side validation is taken into account
:param existing_errors:
:param bound_field:
:return:
"""
identifier = self.get_qualified_form_field_name(bound_field.name)
# add custom client-side validation as needed...
custom_potential_errors = getattr(bound_field.field, "custom_potential_errors", [])
if custom_potential_errors:
# TODO: MAY WANT TO CHANGE THIS STRING TO BETTER WORK W/ ng-model
existing_errors.extend([
SafeTuple((identifier, self.field_error_css_classes, '$dirty', '$error.%s' % pe.name, 'invalid', pe.msg,))
for pe in custom_potential_errors
])
# add server-side validation as needed...
server_error = "servererror"
if server_error in bound_field.field.widget.attrs:
# TODO: I'M NOT SURE WHY I NEED "{% verbatim ng %}" WHEN ADDED DIRECTLY TO THE TEMPLATE, BUT NOT HERE
# server_error_msg = "{% verbatim ng %} {{ server_errors.form_name['field_name'] }} {% endverbatim ng %}"
# to escape curly brackets, I have to double them...
server_error_msg = "{{{{ server_errors.{0} }}}}".format(self.get_qualified_form_field_name(bound_field.name))
existing_errors.append(
SafeTuple((identifier, self.field_error_css_classes, '$dirty', '$error.server', "invalid", server_error_msg))
)
return existing_errors
def add_custom_potential_errors_to_field(self, field_name):
form_field = self.fields[field_name]
model_field | |
#!/usr/bin/env python3
import datetime
import deconzapi
import docker
import logging
import os
import requests
import sh
import subprocess
import sys
import telegram
import voluptuous as vol
import yaml
from logging.handlers import RotatingFileHandler
from nsenter import Namespace
"""
requirements.txt:
docker==4.2.0
nsenter==0.2
python_telegram_bot==12.7
pyyaml==5.3.1
requests==2.23.0
sh==1.13.1
voluptuous==0.11.7
To install it under crontab, we need to do the following:
sudo /bin/bash
su -
pip3 install -r check-health-requirements.txt
data
----
docker-host:
<name>:
<container(s)>:
count: <int>
msg: <str>
alarm: <date>
clear: <date>
deconz:
...
"""
#################################################################
# Constants
#################################################################
CRITICAL = "CRITICAL"
DEBUG = "DEBUG"
ERROR = "ERROR"
INFO = "INFO"
WARNING = "WARNING"
CONF_ALARMCOUNT = "alarmcount"
CONF_APIKEY = "apikey"
CONF_CHAT_ID = "chat_id"
CONF_CLIENTS = "clients"
CONF_CODE = "code"
CONF_CONFIG = "config"
CONF_CONTAINER = "container"
CONF_CONTAINERS = "containers"
CONF_DATAFILE = "datafile"
CONF_DISABLE_NOTIFICATION = "disable_notification"
CONF_DNS = "dns"
CONF_ENABLED = "enabled"
CONF_HOST = "host"
CONF_HOSTS = "hosts"
CONF_IGNORE = "ignore"
CONF_INTERVAL = "interval"
CONF_LOGLEVEL = "loglevel"
CONF_NAME = "name"
CONF_NOTIFY = "notify"
CONF_PORT = "port"
CONF_REQUEST = "request"
CONF_TELEGRAM = "telegram"
CONF_TIMEOUT = "timeout"
CONF_TOKEN = "token"
CONF_TYPE = "type"
ATTR_ALARM = "alarm"
ATTR_CLEAR = "clear"
ATTR_CONTAINERS = "containers"
ATTR_COUNT = "count"
ATTR_DECONZ = "deconz"
ATTR_DOCKERHOST = "docker-host"
ATTR_GET = "GET"
ATTR_HEAD = "HEAD"
ATTR_HOSTS = "hosts"
ATTR_HTTP = "http"
ATTR_MSG = "msg"
ATTR_NAME = "name"
ATTR_TELEGRAM = "telegram"
ATTR_TYPE = "type"
#################################################################
logging.basicConfig(
level=logging.ERROR, format="%(asctime)s %(levelname)s: %(message)s"
)
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
LOGGER.propagate = False
#################################################################
BASE_SCHEMA = vol.Schema({})
# CLIENTS_SCHEMA = BASE_SCHEMA.extend(
CLIENTS_SCHEMA = vol.Schema(
{
vol.Optional(CONF_CONTAINERS, default=[]): list,
vol.Optional(CONF_HOSTS, default=[]): list,
}
)
DOCKERHOST_SCHEMA = BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): ATTR_DOCKERHOST,
vol.Optional(CONF_ENABLED, default=True): bool,
vol.Optional(CONF_ALARMCOUNT): int,
vol.Required(CONF_NAME): str,
vol.Required(CONF_CONTAINER): str,
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT): vol.Any(int, list),
vol.Optional(CONF_DNS, default=False): bool,
vol.Required(CONF_CLIENTS): vol.All(dict, CLIENTS_SCHEMA),
},
extra=vol.ALLOW_EXTRA,
)
HTTP_SCHEMA = BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): ATTR_HTTP,
vol.Optional(CONF_ENABLED, default=True): bool,
vol.Optional(CONF_ALARMCOUNT): int,
vol.Required(CONF_NAME): str,
vol.Required(CONF_HOST): str,
vol.Optional(CONF_REQUEST, default=ATTR_GET): vol.Any(
ATTR_GET, ATTR_HEAD, vol.Upper
),
vol.Optional(CONF_CODE, default=200): vol.All(),
vol.Optional(CONF_TIMEOUT, default=5): int,
},
extra=vol.ALLOW_EXTRA,
)
DECONZ_SCHEMA = BASE_SCHEMA.extend(
{
vol.Required(CONF_TYPE): ATTR_DECONZ,
vol.Optional(CONF_ENABLED, default=True): bool,
vol.Optional(CONF_ALARMCOUNT): int,
vol.Optional(CONF_NAME, default="DeCONZ"): str,
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=3080): int,
vol.Required(CONF_APIKEY): str,
vol.Optional(CONF_TIMEOUT, default=360): int,
vol.Optional(CONF_IGNORE, default=[]): list,
},
extra=vol.ALLOW_EXTRA,
)
TELEGRAM_SCHEMA = BASE_SCHEMA.extend(
{
vol.Optional(CONF_ENABLED, default=True): bool,
vol.Required(CONF_TOKEN): str,
vol.Required(CONF_CHAT_ID): int,
vol.Optional(CONF_DISABLE_NOTIFICATION, default=False): bool,
}
)
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(CONF_INTERVAL, default=60): int,
vol.Optional(CONF_ALARMCOUNT, default=2): int,
vol.Required(CONF_NOTIFY): str,
vol.Optional(CONF_LOGLEVEL, default=DEBUG): vol.Any(
CRITICAL, DEBUG, ERROR, INFO, WARNING, vol.Upper
),
vol.Optional(CONF_CONFIG, default={}): vol.All(
list, [vol.Any(DOCKERHOST_SCHEMA, HTTP_SCHEMA, DECONZ_SCHEMA)]
),
vol.Optional(CONF_TELEGRAM, default={}): vol.Schema(TELEGRAM_SCHEMA),
},
extra=vol.ALLOW_EXTRA,
)
#################################################################
class HealthCheck:
"""Class of all our health checks."""
def __init__(self):
"""Create the object with required parameters."""
# Read the configuration
self._readConfig()
# Try to read data from the (temporary) file
self._readData()
# Define msg list, of information to send to me
self._msg = []
# Validate our configuration via voluptuous
self._config = CONFIG_SCHEMA(self._config)
# Set logging
# Setup logging, logfile and rotation
logname = __file__
logname = logname.replace(".py", "")
logname += ".log"
maxBytes = 10 * 1024 * 1024
backupCount = 3
handler = RotatingFileHandler(
logname, maxBytes=maxBytes, backupCount=backupCount
)
handler.setLevel(self._config[CONF_LOGLEVEL])
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s: %(message)s")
)
LOGGER.addHandler(handler)
#############################################################
def _readConfig(self):
# If we get an argument, take first one as filename of our configuration file
if len(sys.argv) > 1:
configname = sys.argv[1]
else:
configname = "{}.yaml".format(
os.path.splitext(os.path.abspath(__file__))[0]
)
try:
with open(configname, "r") as f:
self._config = yaml.safe_load(f)
except FileNotFoundError:
sys.exit("ERROR: No configuration file '{}' found".format(configname))
#############################################################
def _readData(self):
if CONF_DATAFILE in self._config:
self._datafile = self._config[CONF_DATAFILE]
else:
self._datafile = __file__
self._datafile = self._datafile.replace(".py", "")
self._datafile += ".data.yaml"
try:
with open(self._datafile, "r") as f:
self._data = yaml.safe_load(f)
except FileNotFoundError:
LOGGER.info("No datafile '%s' found, using defaults", self._datafile)
self._data = {}
#############################################################
def _writeData(self):
"""Write data file."""
LOGGER.debug("Writing data file (%s)", self._datafile)
# No error check yet ...
with open(self._datafile, "w") as f:
yaml.dump(self._data, f, default_flow_style=False)
#############################################################
def _handleMsg(self, alarm, type, subtype, name, entry, msg):
"""Handle Msg."""
# Setup our structure
self._data[type] = self._data.get(type, {})
self._data[type][name] = self._data[type].get(name, {})
self._data[type][name][entry] = self._data[type][name].get(entry, {})
# shorthand and check if subtype exists or not
if subtype != "":
self._data[type][name][entry][subtype] = {}
data = self._data[type][name][entry][subtype]
else:
data = self._data[type][name][entry]
data[ATTR_COUNT] = self._data[type][name][entry].get(ATTR_COUNT, 0)
data[ATTR_MSG] = self._data[type][name][entry].get(ATTR_MSG, "")
# This is a clear alarm
if alarm == ATTR_CLEAR:
if data[ATTR_COUNT] >= self._config[CONF_ALARMCOUNT]:
LOGGER.debug("Adding clear msg to the queue '%s'", msg)
self._msg.append(entry)
# Record the time when this happened
data[ATTR_CLEAR] = datetime.datetime.now()
data[ATTR_COUNT] = 0
data[ATTR_MSG] = ""
# A real alarm, check the counter
if alarm == ATTR_ALARM:
data[ATTR_COUNT] += 1
data[ATTR_MSG] = msg
if data[ATTR_COUNT] == 1:
data[ATTR_ALARM] = datetime.datetime.now()
if data[ATTR_COUNT] == self._config[CONF_ALARMCOUNT]:
LOGGER.debug("Adding alarm msg to the queue '%s'", msg)
# add all information we got, we can use it later in the notification
entry = {}
entry[ATTR_TYPE] = type
entry[ATTR_NAME] = name
entry[ATTR_ALARM] = alarm
entry[ATTR_MSG] = msg
self._msg.append(entry)
else:
LOGGER.debug(
"%s: Alarm ignored, counter is %d and not equal to %d",
type,
data[ATTR_COUNT],
self._config[CONF_ALARMCOUNT],
)
#############################################################
def _dockerHost(self, config):
"""
Function to check if container/external IP have connections
open to our main container. This container is running in
"network=host" like "hass" and "mosquitto".
"""
# Check configuration
for conf in [CONF_NAME, CONF_HOST, CONF_PORT, CONF_CLIENTS]:
if conf not in config:
LOGGER.error(
"%s: Invalid config, missing '%s' in config=%s",
ATTR_DOCKERHOST,
conf,
str(config),
)
return
if not config[CONF_ENABLED]:
LOGGER.debug("%s: %s is not enabled", ATTR_DOCKERHOST, config[CONF_NAME])
return
# Just report it in debug mode
LOGGER.debug("%s: %s is enabled", ATTR_DOCKERHOST, config[CONF_NAME])
LOGGER.debug("%s: config=%s", ATTR_DOCKERHOST, str(config))
# Get our docker client
client = docker.from_env()
# Check if main docker container exist and is running
try:
container = client.containers.get(config[CONF_CONTAINER])
except docker.errors.NotFound:
# Container doesn't exit, so we shouldn't continue
LOGGER.error(
"%s: %s primary container %s does not exist",
ATTR_DOCKERHOST,
config[CONF_NAME],
config[CONF_CONTAINER],
)
# Add to error list
msg = "Container {} does not exist".format(config[CONF_CONTAINER])
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
config[CONF_CONTAINER],
msg,
)
return
# The container needs to be running, otherwise no connectivity can be there
if container.status != "running":
LOGGER.error(
"%s: %s primary container %s not running",
ATTR_DOCKERHOST,
config[CONF_NAME],
config[CONF_CONTAINER],
)
# Add to error list
msg = "Container {} not running".format(config[CONF_CONTAINER])
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
config[CONF_CONTAINER],
msg,
)
return
pid = container.attrs["State"]["Pid"]
LOGGER.debug(
"%s: %s is running with pid=%d",
ATTR_DOCKERHOST,
config[CONF_CONTAINER],
pid,
)
# Clear possible error with primary container
msg = "Container {} alarm cleared".format(config[CONF_CONTAINER])
self._handleMsg(
ATTR_CLEAR,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
config[CONF_CONTAINER],
msg,
)
# Configure errorfound to False
errorfound = False
# Go through list of containers connected to primary
if CONF_CONTAINERS in config[CONF_CLIENTS]:
host = config[CONF_HOST]
if self.isIPValid(config[CONF_HOST]):
host = config[CONF_HOST].replace(".", "\.")
# We support multiple port(s)
checklist = []
if type(config[CONF_PORT]).__name__ == "list":
for port in config[CONF_PORT]:
checklist.append(
(".*:.*\s*" + host + ":" + str(port) + "\s*ESTABLISHED$")
)
else:
checklist.append(
(
".*:.*\s*"
+ host
+ ":"
+ str(config[CONF_PORT])
+ "\s*ESTABLISHED$"
)
)
checkfor = "|".join(checklist)
LOGGER.debug("%s: Connection string '%s'", ATTR_DOCKERHOST, checkfor)
for name in config[CONF_CLIENTS][CONF_CONTAINERS]:
# Check if client container exist and is running
try:
container = client.containers.get(name)
except docker.errors.NotFound:
# Container doesn't exit, so we shouldn't continue
LOGGER.error(
"%s: %s client container %s does not exist",
ATTR_DOCKERHOST,
config[CONF_NAME],
name,
)
# Add to error list
msg = "Container {} does not exist".format(name)
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
name,
msg,
)
errorfound = True
continue
# The container needs to be running, otherwise no connectivity can be there
if container.status != "running":
LOGGER.error(
"%s: %s client container %s not running",
ATTR_DOCKERHOST,
config[CONF_NAME],
name,
)
# Add to error list
msg = "Container {} not running".format(name)
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
name,
msg,
)
errorfound = True
continue
pid = container.attrs["State"]["Pid"]
LOGGER.debug(
"%s: %s is running with pid=%d", ATTR_DOCKERHOST, name, pid
)
# Check if we have connectivity, we go in their namespace
# With docker this is *only* possible through namespace and shell,
# there doesn't seem to be a simple python option
with Namespace(pid, "net"):
try:
netstatparam = "-a" if config[CONF_DNS] else "-na"
outp = sh.egrep(
sh.netstat(netstatparam, _tty_out=False), checkfor
)
except sh.ErrorReturnCode_1:
# Not found, so no connection
LOGGER.error(
"%s: container %s not connected %s",
ATTR_DOCKERHOST,
name,
config[CONF_NAME],
)
msg = "Container {} not connected to {}".format(
config[CONF_CONTAINER], config[CONF_NAME]
)
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
name,
msg,
)
errorfound = True
continue
except sh.ErrorReturnCode as e:
# Not good, shouldn't happen
LOGGER.error(
"%s: container %s returned an error with checkfor='%s'. msg='%s'",
ATTR_DOCKERHOST,
name,
checkfor,
str(e),
)
msg = "Container {} not connected to {} (RC>1)".format(
config[CONF_CONTAINER], config[CONF_NAME]
)
self._handleMsg(
ATTR_ALARM,
ATTR_DOCKERHOST,
ATTR_CONTAINERS,
config[CONF_NAME],
name,
msg,
)
errorfound = True
continue
# RC=0, should be good
#if outp.count("\n") > 1:
# LOGGER.error(
# "%s: | |
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: base_doc.py
# License: LICENSE.TXT
#
# Copyright (c) 2011, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
import re
from definition_items import DefinitionItem
from line_functions import is_empty, get_indent, fix_backspace, NEW_LINE
underline_regex = re.compile(r'\s*\S+\s*\Z')
#------------------------------------------------------------------------------
# Classes
#------------------------------------------------------------------------------
class BaseDoc(object):
"""Base abstract docstring refactoring class.
The class' main purpose is to parse the docstring and find the
sections that need to be refactored. Subclasses should provide
the methods responsible for refactoring the sections.
Attributes
----------
docstring : list
A list of strings (lines) that holds docstrings
index : int
The current zero-based line number of the docstring that is currently
processed.
headers : dict
The sections that the class will refactor. Each entry in the
dictionary should have as key the name of the section in the
form that it appears in the docstrings. The value should be
the postfix of the method, in the subclasses, that is
responsible for refactoring (e.g. {'Methods': 'method'}).
BaseDoc also provides a number of methods that operate on the docstring to
help with the refactoring. This is necessary because the docstring has to
change inplace and thus it is better to live the docstring manipulation to
the class methods instead of accessing the lines directly.
"""
def __init__(self, lines, headers=None):
""" Initialize the class
The method setups the class attributes and starts parsing the
docstring to find and refactor the sections.
Arguments
---------
lines : list of strings
The docstring to refactor
headers : dict
The sections for which the class has custom refactor methods.
Each entry in the dictionary should have as key the name of
the section in the form that it appears in the docstrings.
The value should be the postfix of the method, in the
subclasses, that is responsible for refactoring (e.g.
{'Methods': 'method'}).
"""
try:
self._docstring = lines.splitlines()
except AttributeError:
self._docstring = lines
self.headers = {} if headers is None else headers
self.bookmarks = []
def parse(self):
""" Parse the docstring.
The docstring is parsed for sections. If a section is found then
the corresponding refactoring method is called.
"""
self.index = 0
self.seek_to_next_non_empty_line()
while not self.eod:
header = self.is_section()
if header:
self._refactor(header)
else:
self.index += 1
self.seek_to_next_non_empty_line()
def _refactor(self, header):
"""Call the heading refactor method.
The header is removed from the docstring and the docstring
refactoring is dispatched to the appropriate refactoring method.
The name of the refactoring method is constructed using the form
_refactor_<header>. Where <header> is the value corresponding to
``self.headers[header]``. If there is no custom method for the
section then the self._refactor_header() is called with the
found header name as input.
"""
self.remove_lines(self.index, 2) # Remove header
self.remove_if_empty(self.index) # Remove space after header
refactor_postfix = self.headers.get(header, 'header')
method_name = ''.join(('_refactor_', refactor_postfix))
method = getattr(self, method_name)
lines = method(header)
self.insert_and_move(lines, self.index)
def _refactor_header(self, header):
""" Refactor the header section using the rubric directive.
The method has been tested and supports refactoring single word
headers, two word headers and headers that include a backslash
''\''.
Arguments
---------
header : string
The header string to use with the rubric directive.
"""
header = fix_backspace(header)
directive = '.. rubric:: {0}'.format(header)
lines = []
lines += [directive, NEW_LINE]
return lines
def extract_items(self, item_class=None):
""" Extract the definition items from a docstring.
Parse the items in the description of a section into items of the
provided class time. Given a DefinitionItem or a subclass defined by
the ``item_class`` parameter. Staring from the current index position,
the method checks if in the next two lines a valid header exists.
If successful, then the lines that belong to the item description
block (i.e. header + definition) are popped out from the docstring
and passed to the ``item_class`` parser and create an instance of
``item_class``.
The process is repeated until there is no compatible ``item_class``
found or we run out of docstring. Then the method returns a list of
item_class instances.
The exit conditions allow for two valid section item layouts:
1. No lines between items::
<header1>
<description1>
<more description>
<header2>
<description2>
2. One line between items::
<header1>
<description1>
<more description>
<header2>
<description2>
Arguments
---------
item_class : DefinitionItem
A DefinitionItem or a subclass. This argument is used to check
if a line in the docstring is a valid item and to parse the
individual list items in the section. When ``None`` (default) the
base DefinitionItem class is used.
Returns
-------
parameters : list
List of the parsed item instances of ``item_class`` type.
"""
item_type = DefinitionItem if (item_class is None) else item_class
is_item = item_type.is_definition
item_blocks = []
while (not self.eod) and \
(is_item(self.peek()) or is_item(self.peek(1))):
self.remove_if_empty(self.index)
item_blocks.append(self.get_next_block())
items = [item_type.parse(block) for block in item_blocks]
return items
def get_next_block(self):
""" Get the next item block from the docstring.
The method reads the next item block in the docstring. The first line
is assumed to be the DefinitionItem header and the following lines to
belong to the definition::
<header line>
<definition>
The end of the field is designated by a line with the same indent
as the field header or two empty lines are found in sequence.
"""
item_header = self.pop()
sub_indent = get_indent(item_header) + ' '
block = [item_header]
while not self.eod:
peek_0 = self.peek()
peek_1 = self.peek(1)
if is_empty(peek_0) and not peek_1.startswith(sub_indent) \
or not is_empty(peek_0) \
and not peek_0.startswith(sub_indent):
break
else:
line = self.pop()
block += [line.rstrip()]
return block
def is_section(self):
""" Check if the current line defines a section.
.. todo:: split and cleanup this method.
"""
if self.eod:
return False
header = self.peek()
line2 = self.peek(1)
# check for underline type format
underline = underline_regex.match(line2)
if underline is None:
return False
# is the next line an rst section underline?
striped_header = header.rstrip()
expected_underline1 = re.sub(r'[A-Za-z\\]|\b\s', '-', striped_header)
expected_underline2 = re.sub(r'[A-Za-z\\]|\b\s', '=', striped_header)
if ((underline.group().rstrip() == expected_underline1) or
(underline.group().rstrip() == expected_underline2)):
return header.strip()
else:
return False
def insert_lines(self, lines, index):
""" Insert refactored lines
Arguments
---------
new_lines : list
The list of lines to insert
index : int
Index to start the insertion
"""
docstring = self.docstring
for line in reversed(lines):
docstring.insert(index, line)
def insert_and_move(self, lines, index):
""" Insert refactored lines and move current index to the end.
"""
self.insert_lines(lines, index)
self.index += len(lines)
def seek_to_next_non_empty_line(self):
""" Goto the next non_empty line.
"""
docstring = self.docstring
for line in docstring[self.index:]:
if not is_empty(line):
break
self.index += 1
def get_next_paragraph(self):
""" Get the next paragraph designated by an empty line.
"""
lines = []
while (not self.eod) and (not is_empty(self.peek())):
line = self.pop()
lines.append(line)
return lines
def read(self):
""" Return the next line and advance the index.
"""
index = self.index
line = self._docstring[index]
self.index += 1
return line
def remove_lines(self, index, count=1):
""" Removes the lines from the docstring
"""
docstring = self.docstring
del docstring[index:(index + count)]
def remove_if_empty(self, index=None):
""" Remove the line from the docstring if it is empty.
"""
if is_empty(self.docstring[index]):
self.remove_lines(index)
def bookmark(self):
""" append the current index to the end of the list of bookmarks.
"""
self.bookmarks.append(self.index)
def goto_bookmark(self, bookmark_index=-1):
""" Move to bookmark.
Move the current index to the docstring line given my the
``self.bookmarks[bookmark_index]`` and remove it from the bookmark
list. Default value will pop the last entry.
Returns
-------
bookmark : int
"""
self.index = self.bookmarks[bookmark_index]
return self.bookmarks.pop(bookmark_index)
def peek(self, ahead=0):
""" Peek ahead a number of lines
The function retrieves the line that is ahead of the current
index. If the index is at the end of the list then it returns an
empty string.
Arguments
---------
ahead : int
The number of lines to look ahead.
"""
position = self.index + ahead
try:
line = self.docstring[position]
except IndexError:
line = ''
return line
def pop(self, index=None):
""" Pop a line from the dostrings.
"""
index = self.index if (index is None) else index
return self._docstring.pop(index)
@property
def eod(self):
""" End of docstring.
"""
return self.index >= len(self.docstring)
@property
def | |
rpm for each motor
rpm = super().before_step(action)
# Determine disturbance force.
disturb_force = None
passive_disturb = "dynamics" in self.disturbances
adv_disturb = self.adversary_disturbance == "dynamics"
if passive_disturb or adv_disturb:
disturb_force = np.zeros(2)
if passive_disturb:
disturb_force = self.disturbances["dynamics"].apply(
disturb_force, self)
if adv_disturb and self.adv_action is not None:
disturb_force = disturb_force + self.adv_action
# Clear the adversary action, wait for the next one.
self.adv_action = None
# Construct full (3D) disturbance force.
if disturb_force is not None:
if self.QUAD_TYPE == QuadType.ONE_D:
# Only disturb on z direction.
disturb_force = [0, 0, float(disturb_force)]
elif self.QUAD_TYPE == QuadType.TWO_D:
# Only disturb on x-z plane.
disturb_force = [
float(disturb_force[0]), 0,
float(disturb_force[1])
]
else:
raise NotImplementedError(
"[ERROR] in Quadrotor._advance_simulation(), disturb force for quad 3D is not available."
)
# Advance the simulation.
super()._advance_simulation(rpm, disturb_force)
# Standard Gym return.
obs = self._get_observation()
rew = self._get_reward()
done = self._get_done()
info = self._get_info()
obs, rew, done, info = super().after_step(obs, rew, done, info)
self._update_sis_info(obs, info)
info.update(self.sis_info)
return obs, rew, done, info
####################################################
# method for update energy infomartion
def _update_sis_info(self, obs, info):
assert len(info['constraint_values']) == 2
dist2ub = info['constraint_values'][1] # z - 1.5
dist2lb = info['constraint_values'][0] # 0.5 - z
assert dist2ub == obs[2] - 1.5 and dist2lb == 0.5 - obs[2]
dot_dist2ub = obs[3] # dot_z
dot_dist2lb = -obs[3] # -dot_z
sis_info_tp1 = [(dist2ub, dot_dist2ub), (dist2lb, dot_dist2lb)]
sis_info_t = self.sis_info.get('sis_data', [])
assert sis_info_t.shape == np.array(sis_info_tp1).shape
self.sis_info.update(dict(
sis_data=np.array(sis_info_tp1, dtype=np.float32),
sis_trans=np.array((sis_info_t, sis_info_tp1), dtype=np.float32)
))
####################################################
def render(self, mode='human'):
"""Retrieves a frame from PyBullet rendering.
Args:
mode (str): Unused.
Returns:
ndarray: A multidimensional array with the RGB frame captured by PyBullet's camera.
"""
[w, h, rgb, dep, seg] = p.getCameraImage(width=self.RENDER_WIDTH,
height=self.RENDER_HEIGHT,
shadow=1,
viewMatrix=self.CAM_VIEW,
projectionMatrix=self.CAM_PRO,
renderer=p.ER_TINY_RENDERER,
flags=p.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX,
physicsClientId=self.PYB_CLIENT)
# Image.fromarray(np.reshape(rgb, (h, w, 4)), 'RGBA').show()
return np.reshape(rgb, (h, w, 4))
def _setup_symbolic(self):
"""Creates symbolic (CasADi) models for dynamics, observation, and cost.
Returns:
SymbolicModel: CasADi symbolic model of the environment.
"""
m, g, l = self.MASS, self.GRAVITY_ACC, self.L
Iyy = self.J[1, 1]
dt = self.CTRL_TIMESTEP
# Define states.
z = cs.MX.sym('z')
z_dot = cs.MX.sym('z_dot')
if self.QUAD_TYPE == QuadType.ONE_D:
nx, nu = 2, 1
# Define states.
X = cs.vertcat(z, z_dot)
# Define input thrust.
T = cs.MX.sym('T')
U = cs.vertcat(T)
# Define dynamics equations.
X_dot = cs.vertcat(z_dot, T / m - g)
# Define observation equation.
Y = cs.vertcat(z, z_dot)
elif self.QUAD_TYPE == QuadType.TWO_D:
nx, nu = 6, 2
# Define states.
x = cs.MX.sym('x')
x_dot = cs.MX.sym('x_dot')
theta = cs.MX.sym('theta')
theta_dot = cs.MX.sym('theta_dot')
X = cs.vertcat(x, x_dot, z, z_dot, theta, theta_dot)
# Define input thrusts.
T1 = cs.MX.sym('T1')
T2 = cs.MX.sym('T2')
U = cs.vertcat(T1, T2)
# Define dynamics equations.
X_dot = cs.vertcat(x_dot,
cs.sin(theta) * (T1 + T2) / m, z_dot,
cs.cos(theta) * (T1 + T2) / m - g, theta_dot,
l * (T2 - T1) / Iyy / np.sqrt(2))
# Define observation.
Y = cs.vertcat(x, x_dot, z, z_dot, theta, theta_dot)
# Define cost (quadratic form).
Q = cs.MX.sym('Q', nx, nx)
R = cs.MX.sym('R', nu, nu)
Xr = cs.MX.sym('Xr', nx, 1)
Ur = cs.MX.sym('Ur', nu, 1)
cost_func = 0.5 * (X - Xr).T @ Q @ (X - Xr) + 0.5 * (U - Ur).T @ R @ (U - Ur)
# Define dynamics and cost dictionaries.
dynamics = {"dyn_eqn": X_dot, "obs_eqn": Y, "vars": {"X": X, "U": U}}
cost = {
"cost_func": cost_func,
"vars": {
"X": X,
"U": U,
"Xr": Xr,
"Ur": Ur,
"Q": Q,
"R": R
}
}
# Setup symbolic model.
self.symbolic = SymbolicModel(dynamics=dynamics, cost=cost, dt=dt)
def _set_action_space(self):
"""Returns the action space of the environment.
Returns:
gym.spaces: The quadrotor environment's action space, of size 1 or 2 depending on QUAD_TYPE.
"""
# Define action/input dimension, labels, and units.
if self.QUAD_TYPE == QuadType.ONE_D:
action_dim = 1
self.ACTION_LABELS = ['T']
self.ACTION_UNITS = ['N'] if not self.NORMALIZED_RL_ACTION_SPACE else ['-']
elif self.QUAD_TYPE == QuadType.TWO_D:
action_dim = 2
self.ACTION_LABELS = ['T1', 'T2']
self.ACTION_UNITS = ['N', 'N'] if not self.NORMALIZED_RL_ACTION_SPACE else ['-', '-']
else:
raise NotImplementedError(
"[ERROR] in Quadrotor._set_action_space(), quad_type not supported."
)
if self.NORMALIZED_RL_ACTION_SPACE:
# normalized thrust (around hover thrust)
self.hover_thrust = self.GRAVITY_ACC * self.MASS / action_dim
self.action_space = spaces.Box(low=-np.ones(action_dim),
high=np.ones(action_dim),
dtype=np.float32)
else:
# direct thrust control
self.action_space = spaces.Box(low=np.zeros(action_dim),
high=self.MAX_THRUST * np.ones(action_dim),
dtype=np.float32)
def _set_observation_space(self):
"""Returns the observation space of the environment.
Returns:
gym.spaces: The bounded observation (state) space, of size 2 or 6 depending on QUAD_TYPE.
"""
self.x_threshold = 2
self.z_threshold = 3
self.theta_threshold_radians = 85 * math.pi / 180
# Define obs/state bounds, labels and units.
if self.QUAD_TYPE == QuadType.ONE_D:
# obs/state = {z, z_dot}.
low = np.array([self.GROUND_PLANE_Z * 2, -np.finfo(np.float32).max])
high = np.array([self.z_threshold * 2, np.finfo(np.float32).max])
self.STATE_LABELS = ['z', 'z_dot']
self.STATE_UNITS = ['m', 'm/s']
elif self.QUAD_TYPE == QuadType.TWO_D:
# obs/state = {x, x_dot, z, z_dot, theta, theta_dot}.
low = np.array([
-self.x_threshold * 2, -np.finfo(np.float32).max,
self.GROUND_PLANE_Z * 2, -np.finfo(np.float32).max,
-self.theta_threshold_radians * 2, -np.finfo(np.float32).max
])
high = np.array([
self.x_threshold * 2, np.finfo(np.float32).max,
self.z_threshold * 2, np.finfo(np.float32).max,
self.theta_threshold_radians * 2, np.finfo(np.float32).max
])
self.STATE_LABELS = ['x', 'x_dot', 'z', 'z_dot', 'theta', 'theta_dot']
self.STATE_UNITS = ['m', 'm/s', 'm', 'm/s', 'rad', 'rad/s']
# Define underlying state space in dynamics transition
self.state_space = spaces.Box(low=low, high=high, dtype=np.float32)
# Concatenate goal info for RL
if self.COST == Cost.RL_REWARD and self.TASK == Task.TRAJ_TRACKING:
# include future goal state(s)
# e.g. horizon=1, obs = {state, state_target}
mul = 1 + self.obs_goal_horizon
low = np.concatenate([low] * mul)
high = np.concatenate([high] * mul)
elif self.COST == Cost.RL_REWARD and self.TASK == Task.STABILIZATION:
low = np.concatenate([low] * 2)
high = np.concatenate([high] * 2)
# Define obs space exposed to the controller
# Note obs space is often different to state space for RL (with additional task info)
self.observation_space = spaces.Box(low=low, high=high, dtype=np.float32)
def _preprocess_control(self, action):
"""Converts the action passed to .step() into motors' RPMs (ndarray of shape (4,)).
Args:
action (ndarray): The raw action input, of size 1 or 2 depending on QUAD_TYPE.
Returns:
ndarray: The motors RPMs to apply to the quadrotor.
"""
if self.NORMALIZED_RL_ACTION_SPACE:
# rescale action to around hover thrust
action = np.clip(action, self.action_space.low, self.action_space.high)
thrust = (1 + self.norm_act_scale * action) * self.hover_thrust
else:
thrust = np.clip(action, self.action_space.low, self.action_space.high)
if not np.array_equal(thrust, np.array(action)) and self.VERBOSE:
print("[WARNING]: action was clipped in Quadrotor._preprocess_control().")
self.current_preprocessed_action = thrust
# Apply disturbances.
if "action" in self.disturbances:
thrust = self.disturbances["action"].apply(thrust, self)
if self.adversary_disturbance == "action":
thrust = thrust + self.adv_action
# convert to quad motor rpm commands
pwm = cmd2pwm(thrust, self.PWM2RPM_SCALE, self.PWM2RPM_CONST, self.KF, self.MIN_PWM, self.MAX_PWM)
rpm = pwm2rpm(pwm, self.PWM2RPM_SCALE, self.PWM2RPM_CONST)
return rpm
def _get_observation(self):
"""Returns the current observation (state) of the environment.
Returns:
ndarray: The state of the quadrotor, of size 2 or 6 depending on QUAD_TYPE.
"""
full_state = self._get_drone_state_vector(0)
pos, _, rpy, vel, ang_v, _ = np.split(full_state, [3, 7, 10, 13, 16])
if self.QUAD_TYPE == QuadType.ONE_D:
# {z, z_dot}.
self.state = np.hstack([pos[2], vel[2]]).reshape((2,))
elif self.QUAD_TYPE == QuadType.TWO_D:
# {x, x_dot, z, z_dot, theta, theta_dot}.
self.state = np.hstack(
[pos[0], vel[0], pos[2], vel[2], rpy[1], ang_v[1]]
).reshape((6,))
# if not np.array_equal(self.state,
# np.clip(self.state, self.observation_space.low, self.observation_space.high)):
# if self.GUI and self.VERBOSE:
# print(
# "[WARNING]: observation was clipped in Quadrotor._get_observation()."
# )
# Apply observation disturbance.
obs = deepcopy(self.state)
if "observation" in self.disturbances:
obs = self.disturbances["observation"].apply(obs, self)
# Concatenate goal info (goal state(s)) for RL
if self.COST == Cost.RL_REWARD and self.TASK == Task.TRAJ_TRACKING:
# increment by 1 since counter is post-updated after _get_observation(),
# obs should contain goal state desired for the next state
next_step = self.ctrl_step_counter + 1 + self.start_index
wp_idx = [
(next_step + i) % self.X_GOAL.shape[0]
for i in range(self.obs_goal_horizon)
]
# wp_idx = [
# min(next_step + i, self.X_GOAL.shape[0]-1)
# for i in range(self.obs_goal_horizon)
# ]
goal_state = self.X_GOAL[wp_idx].flatten()
obs = np.concatenate([obs, goal_state])
elif self.COST == Cost.RL_REWARD and self.TASK == Task.STABILIZATION:
goal_state = self.X_GOAL.flatten()
obs = np.concatenate([obs, goal_state])
return obs
def _get_reward(self):
"""Computes the current step's reward value.
Returns:
float: The evaluated reward/cost.
"""
| |
"""
depends = []
"""List of other action classes to be executed before this one.
The ``depends`` class attribute contains a list of other action
classes that need to be executed before this one is. Actions which
depend on another will be executed after those actions are
executed.
Omit if you don't care about the order.
"""
group_class = None
"""Action class to group with.
This class attribute can be supplied with the class of another
action that this action should be grouped with. Only actions in
the same group can be in conflict. Actions in the same group share
the ``config`` and ``before`` and ``after`` of the action class
indicated by ``group_class``.
By default an action only groups with others of its same class.
"""
filter_name = {}
"""Map of names used in query filter to attribute names.
If for instance you want to be able to filter the attribute
``_foo`` using ``foo`` in the query, you can map ``foo`` to
``_foo``::
filter_name = {
'foo': '_foo'
}
If a filter name is omitted the filter name is assumed to be the
same as the attribute name.
"""
def filter_get_value(self, name):
"""A function to get the filter value.
Takes two arguments, action and name. Should return the
value on the filter.
This function is called if the name cannot be determined by
looking for the attribute directly using
:attr:`Action.filter_name`.
The function should return :attr:`NOT_FOUND` if no value with that
name can be found.
For example if the filter values are stored on ``key_dict``::
def filter_get_value(self, name):
return self.key_dict.get(name, dectate.NOT_FOUND)
:param name: the name of the filter.
:return: the value to filter on.
"""
return NOT_FOUND
filter_compare = {}
"""Map of names used in query filter to comparison functions.
If for instance you want to be able check whether the value of
``model`` on the action is a subclass of the value provided in the
filter, you can provide it here::
filter_compare = {
'model': issubclass
}
The default filter compare is an equality comparison.
"""
filter_convert = {}
"""Map of names to convert functions.
The query tool that can be generated for a Dectate-based
application uses this information to parse filter input into
actual objects. If omitted it defaults to passing through the
string unchanged.
A conversion function takes a string as input and outputs a Python
object. The conversion function may raise ``ValueError`` if the
conversion failed.
A useful conversion function is provided that can be used to refer
to an object in a module using a dotted name:
:func:`convert_dotted_name`.
"""
# the directive that was used gets stored on the instance
directive = None
# this is here to make update_wrapper work even when an __init__
# is not provided by the subclass
def __init__(self):
pass
@property
def code_info(self):
"""Info about where in the source code the action was invoked.
Is an instance of :class:`CodeInfo`.
Can be ``None`` if action does not have an associated directive
but was created manually.
"""
if self.directive is None:
return None
return self.directive.code_info
def _log(self, configurable, obj):
"""Log this directive for configurable given configured obj."""
if self.directive is None:
return
self.directive.log(configurable, obj)
def get_value_for_filter(self, name):
"""Get value. Takes into account ``filter_name``, ``filter_get_value``
Used by the query system. You can override it if your action
has a different way storing values altogether.
:param name: the filter name to get the value for.
:return: the value to filter on.
"""
actual_name = self.filter_name.get(name, name)
value = getattr(self, actual_name, NOT_FOUND)
if value is not NOT_FOUND:
return value
if self.filter_get_value is None:
return value
return self.filter_get_value(name)
@classmethod
def _get_config_kw(cls, configurable):
"""Get the config objects set up for this configurable into a dict.
This dict can then be passed as keyword parameters (using ``**``)
into the relevant methods such as :meth:`Action.perform`.
:param configurable: the configurable object to get the config
dict for.
:return: a dict of config values.
"""
result = {}
config = configurable.config
group_class = cls.group_class
if group_class is None:
group_class = cls
# check if we want to have an app_class argument
if group_class.app_class_arg:
result["app_class"] = configurable.app_class
# add the config items themselves
for name, factory in group_class.config.items():
result[name] = getattr(config, name)
return result
@abc.abstractmethod
def identifier(self, **kw):
"""Returns an immutable that uniquely identifies this config.
Needs to be implemented by the :class:`Action` subclass.
Used for overrides and conflict detection.
If two actions in the same group have the same identifier in
the same configurable, those two actions are in conflict and a
:class:`ConflictError` is raised during :func:`commit`.
If an action in an extending configurable has the same
identifier as the configurable being extended, that action
overrides the original one in the extending configurable.
:param ``**kw``: a dictionary of configuration objects as specified
by the ``config`` class attribute.
:return: an immutable value uniquely identifying this action.
"""
def discriminators(self, **kw):
"""Returns an iterable of immutables to detect conflicts.
Can be implemented by the :class:`Action` subclass.
Used for additional configuration conflict detection.
:param ``**kw``: a dictionary of configuration objects as specified
by the ``config`` class attribute.
:return: an iterable of immutable values.
"""
return []
@abc.abstractmethod
def perform(self, obj, **kw):
"""Do whatever configuration is needed for ``obj``.
Needs to be implemented by the :class:`Action` subclass.
Raise a :exc:`DirectiveError` to indicate that the action
cannot be performed due to incorrect configuration.
:param obj: the object that the action should be performed
for. Typically a function or a class object.
:param ``**kw``: a dictionary of configuration objects as specified
by the ``config`` class attribute.
"""
@staticmethod
def before(**kw):
"""Do setup just before actions in a group are performed.
Can be implemented as a static method by the :class:`Action`
subclass.
:param ``**kw``: a dictionary of configuration objects as specified
by the ``config`` class attribute.
"""
pass
@staticmethod
def after(**kw):
"""Do setup just after actions in a group are performed.
Can be implemented as a static method by the :class:`Action`
subclass.
:param ``**kw``: a dictionary of configuration objects as specified
by the ``config`` class attribute.
"""
pass
class Composite(metaclass=abc.ABCMeta):
"""A composite configuration action.
Base class of composite actions.
Composite actions are very simple: implement the ``action``
method and return a iterable of actions in there.
"""
query_classes = []
"""A list of actual action classes that this composite can generate.
This is to allow the querying of composites. If the list if empty
(the default) the query system refuses to query the
composite. Note that if actions of the same action class can also
be generated in another way they are in the same query result.
"""
filter_convert = {}
"""Map of names to convert functions.
The query tool that can be generated for a Dectate-based
application uses this information to parse filter input into
actual objects. If omitted it defaults to passing through the
string unchanged.
A conversion function takes a string as input and outputs a Python
object. The conversion function may raise ``ValueError`` if the
conversion failed.
A useful conversion function is provided that can be used to refer
to an object in a module using a dotted name:
:func:`convert_dotted_name`.
"""
# this is here to make update_wrapper work even when an __init__
# is not provided by the subclass
def __init__(self):
pass
@property
def code_info(self):
"""Info about where in the source code the action was invoked.
Is an instance of :class:`CodeInfo`.
Can be ``None`` if action does not have an associated directive
but was created manually.
"""
if self.directive is None:
return None
return self.directive.code_info
@abc.abstractmethod
def actions(self, obj):
"""Specify a iterable of actions to perform for ``obj``.
The iteratable should yield ``action, obj`` tuples,
where ``action`` is an instance of
class :class:`Action` or :class:`Composite` and ``obj``
is the object to perform the action with.
Needs to be implemented by the :class:`Composite` subclass.
:param obj: | |
<gh_stars>1-10
#!/usr/bin/env python3
# coding=utf-8
"""
@author: guoyanfeng
@software: PyCharm
@time: 18-12-25 下午3:56
"""
import atexit
from collections.abc import MutableMapping, MutableSequence
from typing import Dict, List, Optional, Tuple, Union
import aelog
# noinspection PyProtectedMember
from bson import ObjectId
from bson.errors import BSONError
from pymongo import MongoClient as MongodbClient
# noinspection PyPackageRequirements
from pymongo.errors import ConnectionFailure, DuplicateKeyError, InvalidName, PyMongoError
from eclients.utils import verify_message
from .err_msg import mongo_msg
from .exceptions import FuncArgsError, HttpError, MongoDuplicateKeyError, MongoError, MongoInvalidNameError
__all__ = ("MongoClient",)
class MongoClient(object):
"""
mongo 工具类
"""
def __init__(self, app=None, *, username: str = "mongo", passwd: str = None, host: str = "127.0.0.1",
port: int = 27017, dbname: str = None, pool_size: int = 50, **kwargs):
"""
mongo 工具类
Args:
app: app应用
host:mongo host
port:mongo port
dbname: database name
username: mongo user
passwd: <PASSWORD>
pool_size: mongo pool size
"""
self.client = None
self.db = None
self.username = username
self.passwd = <PASSWORD>
self.host = host
self.port = port
self.dbname = dbname
self.pool_size = pool_size
self.message = kwargs.get("message", {})
self.use_zh = kwargs.get("use_zh", True)
self.msg_zh = None
if app is not None:
self.init_app(app, username=self.username, passwd=self.passwd, host=self.host, port=self.port,
dbname=self.dbname, pool_size=self.pool_size, **kwargs)
# noinspection DuplicatedCode
def init_app(self, app, *, username: str = None, passwd: str = None, host: str = None, port: int = None,
dbname: str = None, pool_size: int = None, **kwargs):
"""
mongo 实例初始化
Args:
app: app应用
host:mongo host
port:mongo port
dbname: database name
username: mongo user
passwd: <PASSWORD>
pool_size: mongo pool size
"""
username = username or app.config.get("ECLIENTS_MONGO_USERNAME", None) or self.username
passwd = passwd or app.config.get("ECLIENTS_MONGO_PASSWD", None) or self.passwd
host = host or app.config.get("ECLIENTS_MONGO_HOST", None) or self.host
port = port or app.config.get("ECLIENTS_MONGO_PORT", None) or self.port
dbname = dbname or app.config.get("ECLIENTS_MONGO_DBNAME", None) or self.dbname
pool_size = pool_size or app.config.get("ECLIENTS_MONGO_POOL_SIZE", None) or self.pool_size
message = kwargs.get("message") or app.config.get("ECLIENTS_MONGO_MESSAGE", None) or self.message
use_zh = kwargs.get("use_zh") or app.config.get("ECLIENTS_MONGO_MSGZH", None) or self.use_zh
passwd = passwd if passwd is None else str(passwd)
self.message = verify_message(mongo_msg, message)
self.msg_zh = "msg_zh" if use_zh else "msg_en"
# 初始化连接
self.open_connection(host, port, username, passwd, dbname, pool_size)
@atexit.register
def close_connection():
"""
释放mongo连接池所有连接
Args:
Returns:
"""
if self.client:
self.client.close()
# noinspection DuplicatedCode
def init_engine(self, *, username: str = None, passwd: str = None, host: str = None, port: int = None,
dbname: str = None, pool_size: int = None, **kwargs):
"""
mongo 实例初始化
Args:
host:mongo host
port:mongo port
dbname: database name
username: mongo user
passwd: <PASSWORD>
pool_size: mongo pool size
"""
username = username or self.username
passwd = passwd or self.passwd
host = host or self.host
port = port or self.port
dbname = dbname or self.dbname
pool_size = pool_size or self.pool_size
message = kwargs.get("message") or self.message
use_zh = kwargs.get("use_zh") or self.use_zh
passwd = passwd if passwd is None else str(passwd)
self.message = verify_message(mongo_msg, message)
self.msg_zh = "msg_zh" if use_zh else "msg_en"
# 初始化连接
self.open_connection(host, port, username, passwd, dbname, pool_size)
@atexit.register
def close_connection():
"""
释放mongo连接池所有连接
Args:
Returns:
"""
if self.client:
self.client.close()
def open_connection(self, host: str, port: int, username: str, passwd: str, dbname: str, pool_size: int):
"""
初始化连接
Args:
host: host
port: port
username: username
passwd: <PASSWORD>
dbname: database name
pool_size: pool size
Returns:
"""
try:
self.client = MongodbClient(host, port, maxPoolSize=pool_size, username=username, password=<PASSWORD>)
self.db = self.client.get_database(name=dbname)
except ConnectionFailure as e:
aelog.exception(f"Mongo connection failed host={host} port={port} error:{str(e)}")
raise MongoError(f"Mongo connection failed host={host} port={port} error:{str(e)}")
except InvalidName as e:
aelog.exception(f"Invalid mongo db name {dbname} {str(e)}")
raise MongoInvalidNameError(f"Invalid mongo db name {dbname} {str(e)}")
except PyMongoError as err:
aelog.exception(f"Mongo DB init failed! error: {str(err)}")
raise MongoError("Mongo DB init failed!") from err
def _insert_document(self, name: str, document: Union[List[Dict], Dict], insert_one: bool = True
) -> Union[Tuple[str], str]:
"""
插入一个单独的文档
Args:
name:collection name
document: document obj
insert_one: insert_one insert_many的过滤条件,默认True
Returns:
返回插入的Objectid
"""
try:
if insert_one:
result = self.db.get_collection(name).insert_one(document)
else:
result = self.db.get_collection(name).insert_many(document)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except DuplicateKeyError as e:
raise MongoDuplicateKeyError("Duplicate key error, {}".format(e))
except PyMongoError as err:
aelog.exception("Insert one document failed, {}".format(err))
raise HttpError(400, message=mongo_msg[100][self.msg_zh])
else:
return str(result.inserted_id) if insert_one else (str(val) for val in result.inserted_ids)
def _insert_documents(self, name: str, documents: List[Dict]) -> Tuple[str]:
"""
批量插入文档
Args:
name:collection name
documents: documents obj
Returns:
返回插入的Objectid列表
"""
return self._insert_document(name, documents, insert_one=False)
def _find_document(self, name: str, query_key: Dict, filter_key: Dict = None) -> Optional[Dict]:
"""
查询一个单独的document文档
Args:
name: collection name
query_key: 查询document的过滤条件
filter_key: 过滤返回值中字段的过滤条件
Returns:
返回匹配的document或者None
"""
try:
find_data = self.db.get_collection(name).find_one(query_key, projection=filter_key)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except PyMongoError as err:
aelog.exception("Find one document failed, {}".format(err))
raise HttpError(400, message=mongo_msg[103][self.msg_zh])
else:
if find_data and find_data.get("_id", None) is not None:
find_data["id"] = str(find_data.pop("_id"))
return find_data
def _find_documents(self, name: str, query_key: Dict, filter_key: Dict = None, limit: int = None,
skip: int = None, sort: List[Tuple] = None) -> List[Dict]:
"""
批量查询documents文档
Args:
name: collection name
query_key: 查询document的过滤条件
filter_key: 过滤返回值中字段的过滤条件
limit: 限制返回的document条数
skip: 从查询结果中调过指定数量的document
sort: 排序方式,可以自定多种字段的排序,值为一个列表的键值对, eg:[('field1', pymongo.ASCENDING)]
Returns:
返回匹配的document列表
"""
try:
find_data = []
cursor = self.db.get_collection(name).find(query_key, projection=filter_key, limit=limit, skip=skip,
sort=sort)
for doc in cursor:
if doc.get("_id", None) is not None:
doc["id"] = str(doc.pop("_id"))
find_data.append(doc)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except PyMongoError as err:
aelog.exception("Find many documents failed, {}".format(err))
raise HttpError(400, message=mongo_msg[104][self.msg_zh])
else:
return find_data
def _find_count(self, name: str, query_key: Dict) -> int:
"""
查询documents的数量
Args:
name: collection name
query_key: 查询document的过滤条件
Returns:
返回匹配的document数量
"""
try:
return self.db.get_collection(name).count(query_key)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except PyMongoError as err:
aelog.exception("Find many documents failed, {}".format(err))
raise HttpError(400, message=mongo_msg[104][self.msg_zh])
def _update_document(self, name: str, query_key: Dict, update_data: Dict, upsert: bool = False,
update_one: bool = True) -> Dict:
"""
更新匹配到的一个的document
Args:
name: collection name
query_key: 查询document的过滤条件
update_data: 对匹配的document进行更新的document
upsert: 没有匹配到document的话执行插入操作,默认False
update_one: update_one or update_many的匹配条件
Returns:
返回匹配的数量和修改数量的dict, eg:{"matched_count": 1, "modified_count": 1, "upserted_id":"f"}
"""
try:
if update_one:
result = self.db.get_collection(name).update_one(query_key, update_data, upsert=upsert)
else:
result = self.db.get_collection(name).update_many(query_key, update_data, upsert=upsert)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except DuplicateKeyError as e:
raise MongoDuplicateKeyError("Duplicate key error, {}".format(e))
except PyMongoError as err:
aelog.exception("Update documents failed, {}".format(err))
raise HttpError(400, message=mongo_msg[101][self.msg_zh])
else:
return {"matched_count": result.matched_count, "modified_count": result.modified_count,
"upserted_id": str(result.upserted_id) if result.upserted_id else None}
def _update_documents(self, name: str, query_key: Dict, update_data: Dict, upsert: bool = False) -> Dict:
"""
更新匹配到的所有的document
Args:
name: collection name
query_key: 查询document的过滤条件
update_data: 对匹配的document进行更新的document
upsert: 没有匹配到document的话执行插入操作,默认False
Returns:
返回匹配的数量和修改数量的dict, eg:{"matched_count": 2, "modified_count": 2, "upserted_id":"f"}
"""
return self._update_document(name, query_key, update_data, upsert, update_one=False)
def _delete_document(self, name: str, query_key: Dict, delete_one: bool = True) -> int:
"""
删除匹配到的一个的document
Args:
name: collection name
query_key: 查询document的过滤条件
delete_one: delete_one delete_many的匹配条件
Returns:
返回删除的数量
"""
try:
if delete_one:
result = self.db.get_collection(name).delete_one(query_key)
else:
result = self.db.get_collection(name).delete_many(query_key)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except PyMongoError as err:
aelog.exception("Delete documents failed, {}".format(err))
raise HttpError(400, message=mongo_msg[102][self.msg_zh])
else:
return result.deleted_count
def _delete_documents(self, name: str, query_key: Dict) -> int:
"""
删除匹配到的所有的document
Args:
name: collection name
query_key: 查询document的过滤条件
Returns:
返回删除的数量
"""
return self._delete_document(name, query_key, delete_one=False)
def _aggregate(self, name: str, pipline: List[Dict]) -> List[Dict]:
"""
根据pipline进行聚合查询
Args:
name: collection name
pipline: 聚合查询的pipeline,包含一个后者多个聚合命令
Returns:
返回聚合后的documents
"""
result = []
try:
for doc in self.db.get_collection(name).aggregate(pipline):
if doc.get("_id", None) is not None:
doc["id"] = str(doc.pop("_id"))
result.append(doc)
except InvalidName as e:
raise MongoInvalidNameError("Invalid collention name {} {}".format(name, e))
except PyMongoError as err:
aelog.exception("Aggregate documents failed, {}".format(err))
raise HttpError(400, message=mongo_msg[105][self.msg_zh])
else:
return result
@staticmethod
def _update_update_data(update_data: Dict) -> Dict:
"""
处理update data, 包装最常使用的操作
Args:
update_data: 需要更新的文档值
Returns:
返回处理后的update data
"""
# $set用的比较多,这里默认做个封装
if len(update_data) > 1:
update_data = {"$set": update_data}
else:
operator, doc = update_data.popitem()
pre_flag = operator.startswith("$")
update_data = {"$set" if not pre_flag else operator: {operator: doc} if not pre_flag else doc}
return update_data
@staticmethod
def _update_query_key(query_key: Dict) -> Dict:
"""
更新查询的query
Args:
query_key: 查询document的过滤条件
Returns:
返回处理后的query key
"""
query_key = dict(query_key) if query_key else {}
try:
for key, val in query_key.items():
if isinstance(val, MutableMapping):
if key != "id":
query_key[key] = {key if key.startswith("$") else f"${key}": val for key, val in val.items()}
else:
query_key["_id"] = {
key if key.startswith("$") else f"${key}": [ObjectId(val) for val in val]
if "in" in key else val for key, val in query_key.pop(key).items()}
else:
if key == "id":
query_key["_id"] = ObjectId(query_key.pop("id"))
except BSONError as e:
| |
of the sequence to find.
http://www.nslc.wustl.edu/elgin/genomics/bio4342/1archives/2006/AccReference.pdf
*keyword args*
**savetofile**, true or false to save the gb file in the same directory as sms for future use
'''
Entrez.email = "<EMAIL>"
Entrez.tool = 'SingleMoleculeSimulator'
er = False
try:
handle = Entrez.efetch(db="nucleotide", rettype="gb", retmode="text", id=accession_number)
gb_record = SeqIO.read(handle, "genbank") #using "gb" as an alias for "genbank"
handle.close()
except:
er = True
time.sleep(2)
if er == True:
print('HTTP Error: Could not find specified ascession ID')
return
self.gb_rec = gb_record
self.gb_obj = gb_record
self.sequence_str = str(gb_record.seq)
self.sequence_name = gb_record.name
if savetofile:
filename = self.sequence_name
f = open(filename, 'w')
f.write(self.gb_rec.format('gb'))
f.close()
def tau_plot(self,ssa_obj,t,tau,plot_type='contour', plot_all = False):
stime = ssa_obj.time_rec-ssa_obj.start_time
idx_t = (np.abs(stime - t)).argmin()
idx_tau = (np.abs(stime - tau)).argmin()
diff = idx_tau - idx_t
difftime = t-tau
if plot_type == 'Average':
fig,ax= plt.subplots()
for i in range(len(stime)-idx_tau,0,-4):
idx_tau = (np.abs(stime- (stime[i]+difftime ))).argmin()
Itau = ssa_obj.intensity_vec[:,idx_tau]
x,y = np.mean(ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe)),np.mean(ssa_obj.intensity_vec[:,idx_tau+diff]/np.sum(ssa_obj.probe))
if plot_type == 'window':
minx = 10000000
maxx = 0
miny = 10000000
maxy = 0
fig,ax= plt.subplots()
for i in range(len(stime)-idx_tau,0,-10):
idx_tau = (np.abs(stime - (idx_t+i))).argmin()
Itau = ssa_obj.intensity_vec[:,idx_tau]
x,y = np.mean(ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe)),np.mean(ssa_obj.intensity_vec[:,idx_tau+diff]/np.sum(ssa_obj.probe))
minx = min(np.min(x),minx)
miny = min(np.min(y),miny)
maxx = max(np.max(x),maxx)
maxy = max(np.max(y),maxy)
ax.scatter(x, y,zorder=3,color= cm.viridis_r(1.*i/len(stime)))
c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8])
c_map_ax.axes.get_xaxis().set_visible(False)
cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis_r, orientation = 'vertical')
cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) )
cbar.ax.set_title('t')
ax.plot([min(minx,miny),max(maxx,maxy)],[min(minx,miny),max(maxx,maxy)], color='red',ls='--')
ax.set_ylabel(('<I(t=' + 't + tau'+')>'))
ax.set_xlabel(('<I(t=' +'t'+')>'))
ax.set_title(( 'Average I(t) vs Average I(t+tau) for tau = ' + str(diff) ) )
if plot_type == 'density':
fig,ax= plt.subplots()
nbins = int(np.max(ssa_obj.intensity_vec/np.sum(ssa_obj.probe)))+2
x, y = ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe),ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe)
k = kde.gaussian_kde([x,y])
xi, yi = np.mgrid[x.min():x.max():nbins*1j, y.min():y.max():nbins*1j]
zi = k(np.vstack([xi.flatten(), yi.flatten()]))
R = pearsonr(x,y)[0]
ax.set_title(('Density Plot' + ' R = ' + str(np.round(R,3))))
ax.pcolormesh(xi, yi, zi.reshape(xi.shape), shading='gouraud', cmap=plt.cm.viridis)
ax.contour(xi, yi, zi.reshape(xi.shape) )
ax.set_ylabel(('I(t=' + str(tau)+')'))
ax.set_xlabel(('I(t=' + str(t)+')'))
fig.show()
if plot_type == 'set_tau':
fig,ax= plt.subplots()
for i in range(len(stime)-diff-idx_t):
idx_tau = (np.abs(stime - (idx_t+i))).argmin()
plt.scatter(ssa_obj.intensity_vec[:,i]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,i+diff]/np.sum(ssa_obj.probe),c= cm.viridis(1.*i/len(stime)),alpha=.5 )
plt.ylabel('I(t + s)')
plt.xlabel(('I(t)'))
plt.title(('Set tau, all times s = ' + str(diff) ))
c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8])
c_map_ax.axes.get_xaxis().set_visible(False)
cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis, orientation = 'vertical')
cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) )
if plot_type == 'scatter':
if not plot_all:
plt.scatter(ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe) )
plt.ylabel(('I(t=' + str(tau)+')'))
else:
for i in range(idx_t,len(stime)):
idx_tau = (np.abs(stime - (idx_t+i))).argmin()
plt.scatter(ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe),c= cm.viridis(1.*i/len(stime)),alpha=.1 )
plt.ylabel('I(tau)')
plt.xlabel(('I(t=' + str(t)+')'))
if plot_type == 'contour':
fig,ax= plt.subplots()
if not plot_all:
It = ssa_obj.intensity_vec[:,idx_t]
Itau = ssa_obj.intensity_vec[:,idx_tau]
cov = np.cov(It,Itau)
eigs, v = np.linalg.eig(cov)
eigs = np.sqrt(eigs)
plt.ylabel(('I(t=' + str(tau)+')'))
colors = [cm.viridis(1.0),cm.viridis(.5),cm.viridis(0.0),cm.viridis(0.0)]
for j in xrange(3, 0,-1):
ell_artist = Ellipse(xy=(np.mean(It), np.mean(Itau)),
width=eigs[0]*j*2, height=eigs[1]*j*2,
angle=np.rad2deg(np.arccos(v[0, 0])))
ell_artist.set_linewidth(2)
ell_artist.set_edgecolor(colors[j-1])
ell_artist.set_color(colors[j-1])
ax.add_patch(ell_artist)
ax.autoscale()
ax.set_xlim(0)
ax.set_ylim(0)
ax.scatter(It, Itau,zorder=3,alpha=0.3,color='red',marker='.')
fig.show()
else:
plt.ylabel('I(tau)')
It = ssa_obj.intensity_vec[:,idx_t]
for i in range(len(stime)-idx_t,0,-10):
idx_tau = (np.abs(stime - (idx_t+i))).argmin()
Itau = ssa_obj.intensity_vec[:,idx_tau]
cov = np.cov(It,Itau)
eigs, v = np.linalg.eig(cov)
eigs = np.sqrt(eigs)
j = 3
ell_artist = Ellipse(xy=(np.mean(It), np.mean(Itau)),
width=eigs[0]*j*2, height=eigs[1]*j*2,
angle=np.rad2deg(np.arccos(v[0, 0])))
ell_artist.set_linewidth(2)
ell_artist.set_edgecolor( cm.viridis_r(1.*i/len(stime)))
ell_artist.set_color( cm.viridis_r(1.*i/len(stime)))
ax.autoscale()
ax.add_patch(ell_artist)
ax.figure.canvas.draw()
plt.xlabel(('I(t=' + str(t)+')'))
ax.set_xlim(0)
ax.set_ylim(0)
c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8])
c_map_ax.axes.get_xaxis().set_visible(False)
cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis_r, orientation = 'vertical')
cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) )
fig.show()
def kymograph(self,ssa_obj,n_traj,bg_intense=True,show_intense = True,tag = 0, show_col=True,col_size = 1.5, custom_fig = None, facecolor='black', *args,**kwargs):
'''
Constructs a kymograph of ribosome locations
'''
startfrags = 0
for i in range(n_traj):
startfrags += ssa_obj.frag_per_traj[i]
endfrags = startfrags + ssa_obj.frag_per_traj[n_traj]
fragments = ssa_obj.fragments[startfrags:endfrags]
time = ssa_obj.time#[0:len(ssa_obj.time_rec)-1]
if len(ssa_obj.intensity_vec.shape) ==3:
ivec = ssa_obj.intensity_vec[tag][n_traj]
else:
ivec = ssa_obj.intensity_vec[n_traj]
ftimes = ssa_obj.fragtimes[startfrags:startfrags+endfrags]
nfrag = fragments.shape[0]
maxlen= fragments.shape[1]
#plt.figure(figsize=(5,10))
if show_intense == True:
gs = gridspec.GridSpec(1, 2, custom_fig, width_ratios=[3, 1])
else:
gs = gridspec.GridSpec(1, 1)
plt.subplot(gs[0])
lenplot = np.max(fragments)
maxin = np.max(ivec)
ax = plt.gca()
ax.set_facecolor(facecolor)
if bg_intense == True:
for i in range(len(time)):
plt.plot([0,lenplot],[time[i],time[i]],color = cm.summer(1.*ivec[i]/maxin),lw=1)
for i in range(nfrag):
if maxlen <= np.where(fragments[i] > 0 )[0][-1]:
timeseg = time[ftimes[i]:ftimes[i]+maxlen]
plt.plot(fragments[i][0:len(timeseg)] ,timeseg[::-1] )
else:
timeseg = time[ftimes[i]:]
stop = np.where(fragments[i] > 0 )[0][-1]
timelen = len(fragments[i][0:stop])
plt.plot(fragments[i][0:stop] ,timeseg[0:timelen],**kwargs )
plt.xlabel('Ribosome position')
plt.ylabel('Time (sec)')
segtime = ssa_obj.time[0:len(ssa_obj.time_rec)]
plt.ylim(ssa_obj.time_rec[-1], ssa_obj.time_rec[0])
if show_col == True:
try:
col = ssa_obj.col_points[n_traj]
plt.plot(col[:,0],col[:,1],color='#00ff00',markersize=col_size,linestyle='none',marker='o')
except:
pass
if show_intense == True:
plt.subplot(gs[1])
ax = plt.gca()
ax.set_facecolor(facecolor)
plt.plot(ivec.T/ np.sum(ssa_obj.probe),segtime,**kwargs)
plt.xlabel('Intensity (ump)')
plt.xlim(0,30)
plt.ylim(segtime[-1], segtime[0])
plt.tight_layout()
def get_autocorr_norm(self, intensity_vec, time_vec, totalSimulationTime, geneLength,normalization= 'Individual'):
'''
returns the autocorrelations
'''
autocorr_vec = np.zeros((intensity_vec.shape))
if normalization in [ 'Individual','I','individual','ind']:
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-np.mean(intensity_vec[i]))
elif normalization in ['global','Global','g','G']:
global_mean = np.mean(intensity_vec)
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-global_mean)
else:
print('unrecognized normalization, using indivdual means')
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-np.mean(intensity_vec[i]))
normalized_autocorr = autocorr_vec.T/ autocorr_vec[:,0]
mean_autocorr = np.mean(normalized_autocorr, axis=1)
error_autocorr = np.std(normalized_autocorr, axis=1)/np.sqrt(intensity_vec.shape[0])
dwelltime = None
try:
dwelltime = time_vec[np.where(mean_autocorr < .01)[0][0]]
except:
try:
dwelltime = time_vec[np.where(mean_autocorr < .05)[0][0]]
except:
dwelltime = 1
try:
zeroind = np.where(mean_autocorr<0)[0][0]
length = int(.3*len(mean_autocorr))
zeromean = np.mean(mean_autocorr[-length:])
zeromean2 = np.mean(mean_autocorr[zeroind:])
normalized_autocorr = normalized_autocorr-zeromean2
mean_autocorr = np.mean(normalized_autocorr, axis=1)
error_autocorr = np.std(normalized_autocorr, axis=1)/np.sqrt(intensity_vec.shape[0])
except:
pass
ke_exp = np.round(geneLength/dwelltime ,1)
return normalized_autocorr, mean_autocorr, error_autocorr, dwelltime, ke_exp
def get_autocorr(self, intensity_vec, time_vec, totalSimulationTime, geneLength, normalization='Individual'):
'''
returns the autocorrelations
'''
autocorr_vec = np.zeros((intensity_vec.shape))
if normalization in [ 'Individual','I','individual','ind']:
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-np.mean(intensity_vec[i]))
elif normalization in ['global','Global','g','G']:
global_mean = np.mean(intensity_vec)
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-global_mean)
else:
print('unrecognized normalization, using indivdual means')
for i in range(intensity_vec.shape[0]):
autocorr_vec[i,:] = self.get_acc2(intensity_vec[i]-np.mean(intensity_vec[i]))
autocorr = autocorr_vec.T
mean_autocorr = np.mean(autocorr, axis=1)
error_autocorr = np.std(autocorr, axis=1)/np.sqrt(intensity_vec.shape[0])
dwelltime = None
try:
dwelltime = time_vec[np.where(mean_autocorr < .01)[0][0]]
except:
try:
dwelltime = time_vec[np.where(mean_autocorr < .05)[0][0]]
except:
dwelltime = 1
ke_exp = np.round(geneLength/dwelltime ,1)
return autocorr, mean_autocorr, error_autocorr, dwelltime, ke_exp
def get_crosscorr(self, iv1,iv2):
'''
returns the autocorrelations
'''
i = 0
slen = np.correlate(iv1[i]-np.mean(iv1[i]),iv2[i]-np.mean(iv2[i]),'full').shape[0]
crosscorr_vec = np.zeros((iv1.shape[0],slen))
for i in range(iv1.shape[0]):
crosscorr_vec[i,:] = np.correlate(iv1[i]-np.mean(iv1[i]),iv2[i]-np.mean(iv2[i]),'full')/len(iv1)
normalized_autocorr = crosscorr_vec.T/ crosscorr_vec[:,len(iv1[i])-1]
mean_autocorr = np.mean(normalized_autocorr, axis=1)
return crosscorr_vec, mean_autocorr
def normalize_cc(self, correlation,mode='max'):
'''
normalize cc via either center or maximum.
'''
if mode.lower() in ['max','maximum']:
norm_cor = correlation/np.max(correlation,1)
if mode.lower() in ['center','middle']:
centerpoint = int((correlation.shape[1]+1)/2)-1
norm_cor = correlation/(correlation[:,centerpoint])
return norm_cor
def get_g0(self,correlation, mode = 'interp'):
'''
'''
if mode.lower() in ['interp','inter','extrapolate','interpolate']:
X = [1,2,3,4]
V = correlation[:,X]
G0 = np.interp(0,X,V)
if mode.lower() in ['g1','1']:
G0 = correlation[:,1]
if mode.lower() in ['g0','0']:
G0 = correlation[:,0]
return G0
def generate_additional_ks(self,k_enters,k_pauses,k_jumps,k_stops,L):
max_enter = 0
max_pause = 0
max_stop = 0
max_jump = 0
if k_enters != []:
k_enters[:,0] = k_enters[:,0]+L*k_enters[:,1]
k_enters[:,1] = k_enters[:,2]
k_enters = k_enters[:,0:2]
max_enter = np.max( k_enters[:,0])
if k_pauses != []:
k_pauses[:,0] = k_pauses[:,0]+ L*k_pauses[:,1]
k_pauses[:,1] = k_pauses[:,2]
k_pauses = k_pauses[:,0:2]
max_pause = np.max( k_pauses[:,0])
if k_stops != []:
k_stops[:,0] = k_stops[:,0]+L*k_stops[:,1]
k_stops[:,1] = k_stops[:,2]
k_stops = k_stops[:,0:2]
max_stop = np.max( k_stops[:,0])
if k_jumps != []:
k_jumps[:,0] = k_jumps[:,0]+ L*k_jumps[:,1]
k_jumps[:,1] = k_jumps[:,2]+ L*k_jumps[:,3]
k_jumps[:,2] = k_jumps[:,4]
k_jumps = k_jumps[:,0:3]
max_jump = max([np.max( k_jumps[:,0]),np.max( k_jumps[:,1])])
max_loc = max(max_jump,max_stop,max_pause,max_enter)
if max_loc <=L:
frames_used = 0
if max_loc > L:
frames_used = 1
if max_loc > 2*L :
frames_used = 1
return k_enters, k_pauses, k_stops, k_jumps, frames_used
def get_all_autocovariances(self,intensity_vec,time_vec,geneLength,shotnoise=True):
'''
Get all autocovariances for all 4 routines of normalization / means
'''
not_equal = False
firstlen = len(intensity_vec[0])
for traj in intensity_vec:
if len(traj) != firstlen:
not_equal = True
if not_equal == True:
| |
#!/usr/bin/env python3
from __future__ import print_function
import sys
import time
import argparse
import threading
try:
import readline
except:
pass
import mufsim.stackitems as si
import mufsim.gamedb as db
import mufsim.utils as util
from mufsim.logger import log, warnlog, set_output_command
from mufsim.compiler import MufCompiler
from mufsim.interface import network_interface as netifc
from mufsim.processlist import process_list
import mufsim.configs as confs
def log_print(msgtype, msg):
if msgtype in ['warning', 'error']:
print(msg, file=sys.stderr)
sys.stderr.flush()
else:
print(msg)
sys.stdout.flush()
def process_daemon():
while True:
netifc.poll()
daemon = threading.Thread(name='ProcessDaemon', target=process_daemon)
daemon.setDaemon(True)
daemon.start()
class ConsoleMufDebugger(object):
def __init__(self, fr):
self.fr = fr
self.matches = []
process_list.watch_process_change(self.handle_process_change)
process_list.set_read_handler(self.handle_read)
def handle_process_change(self):
self.fr = process_list.current_process
log("Process process changed to PID %d." % self.fr.pid)
def handle_read(self):
while True:
return input("READ>")
def resume_execution(self):
if self.fr and self.fr.call_stack:
self.fr.execute_code()
if not self.fr.get_call_stack():
warnlog("Program exited.")
def complete(self, text, state):
cmds = [
'list ', 'quit', 'run', 'show ', 'next', 'step', 'break ',
'continue', 'finish', 'stack', 'trace', 'notrace', 'delete ',
'print ', 'pop', 'push ', 'rot', 'dup', 'swap', 'help'
]
response = None
origline = readline.get_line_buffer()
begin = readline.get_begidx()
end = readline.get_endidx()
text = origline[begin:end]
muvname = ("_%s" % text).replace("::", "__")
words = origline.split(' ')
if state == 0:
addr = self.fr.curr_addr()
# This is the first time for this text, so build a match list.
if begin == 0:
self.matches = [s for s in cmds if s and s.startswith(text)]
elif words[0] in ['l', 'list', 'b', 'break']:
self.matches = [
x for x in self.fr.program_functions(addr.prog)
if x.startswith(text) or x.startswith(muvname)
]
elif words[0] == 'show':
showcmds = ['breakpoints', 'functions', 'globals', 'vars']
self.matches = [x for x in showcmds if x.startswith(text)]
elif words[0] in ['p', 'print']:
fun = self.fr.program_find_func(addr)
fvars = self.fr.program_func_vars(addr.prog, fun)
gvars = self.fr.program_global_vars(addr.prog)
self.matches = [
x for x in (fvars + gvars)
if x.startswith(text) or x.startswith(muvname)
]
else:
self.matches = cmds[:]
# Return the state'th item from the match list,
# if we have that many.
try:
response = self.matches[state]
except IndexError:
response = None
return response
def show_compiled_tokens(self, prog):
alltokens = self.fr.program_tokens(prog)
for inum, tokeninfo in enumerate(alltokens):
rep = tokeninfo['repr']
if inum > 0 and rep.startswith("Function:"):
log("")
log("% 5d: %s" % (inum, rep))
def show_addr_line(self, addr):
if not addr:
return
inst = self.fr.get_inst(addr)
src = self.fr.program_source_line(addr.prog, inst.line)
curraddr = self.fr.curr_addr()
mark = ' '
if addr == curraddr:
mark = '>'
log("%s% 5d: %s" % (mark, inst.line, src))
def debug_cmd_step(self, args):
if not args:
args = "1"
if not util.is_int(args):
log("Usage: step [COUNT]")
return
self.fr.set_break_steps(int(args))
self.resume_execution()
self.show_addr_line(self.fr.curr_addr())
self.fr.nextline = -1
def debug_cmd_next(self, args):
if not args:
args = "1"
if not util.is_int(args):
log("Usage: next [COUNT]")
return
self.fr.set_break_lines(int(args))
self.resume_execution()
self.show_addr_line(self.fr.curr_addr())
self.fr.nextline = -1
def debug_cmd_continue(self, args):
self.fr.reset_breaks()
self.resume_execution()
self.show_addr_line(self.fr.curr_addr())
self.fr.nextline = -1
def debug_cmd_finish(self, args):
self.fr.set_break_on_finish()
self.resume_execution()
self.show_addr_line(self.fr.curr_addr())
self.fr.nextline = -1
def debug_cmd_break(self, args):
addr = self.fr.curr_addr()
prog = addr.prog
if ' ' in args:
prg, args = args.split(' ', 1)
prg = prg.strip()
args = args.strip()
obj = db.match_dbref(prg)
if obj == -1:
obj = db.match_registered(db.getobj(0), prg)
obj = db.getobj(obj)
if not db.validobj(obj):
log("Invalid program!")
return
if db.getobj(obj).objtype != "program":
log("Invalid program!")
return
prog = obj
addr = self.fr.program_function_addr(prog, args)
if addr:
line = self.fr.get_inst_line(addr)
bpnum = self.fr.add_breakpoint(prog, line)
log("Added breakpoint %d at #%d line %d." % (bpnum, prog, line))
elif util.is_int(args):
line = int(args)
bpnum = self.fr.add_breakpoint(prog, line)
log("Added breakpoint %d at #%d line %d." % (bpnum, prog, line))
else:
log("Usage: break [PROG] LINE")
log(" or: break [PROG] FUNCNAME")
def debug_cmd_delete(self, args):
bps = self.fr.get_breakpoints()
if not util.is_int(args) or int(args) - 1 not in list(range(len(bps))):
log("Usage: delete BREAKPOINTNUM")
else:
self.fr.del_breakpoint(int(args) - 1)
log("Deleted breakpoint %d." % int(args))
def debug_cmd_list(self, args):
addr = self.fr.curr_addr()
inst = self.fr.get_inst(addr)
prog = addr.prog
if self.fr.program_function_addr(prog, args):
addr = self.fr.program_function_addr(prog, args)
start = self.fr.get_inst_line(addr)
end = start + 10
elif ',' in args:
start, end = args.split(',', 1)
start = start.strip()
end = end.strip()
elif args:
start = end = args
elif self.fr.nextline < 0:
start = str(inst.line - 5)
end = str(inst.line + 5)
else:
start = self.fr.nextline
end = self.fr.nextline + 10
if not util.is_int(start) or not util.is_int(end):
log("Usage: list [LINE[,LINE]]")
log(" or: list FUNCNAME")
else:
srcs = self.fr.program_source_lines(prog)
start = max(1, min(int(start), len(srcs)))
end = max(1, min(int(end), len(srcs)))
self.fr.nextline = end + 1
for i in range(start, end + 1):
src = srcs[i - 1]
if i == inst.line:
log(">% 5d: %s" % (i, src))
else:
log(" % 5d: %s" % (i, src))
def debug_cmd_print(self, args):
addr = self.fr.curr_addr()
vname = args
muvname = ("_%s" % vname).replace("::", "__")
fun = self.fr.program_find_func(addr)
if self.fr.program_func_var(addr.prog, fun, vname):
v = self.fr.program_func_var(addr.prog, fun, vname)
val = self.fr.funcvar_get(v)
elif self.fr.program_global_var(addr.prog, vname):
v = self.fr.program_global_var(addr.prog, vname)
val = self.fr.globalvar_get(v)
elif self.fr.program_func_var(addr.prog, fun, muvname):
v = self.fr.program_func_var(addr.prog, fun, muvname)
val = self.fr.funcvar_get(v)
vname = muvname
elif self.fr.program_global_var(addr.prog, muvname):
v = self.fr.program_global_var(addr.prog, muvname)
val = self.fr.globalvar_get(v)
vname = muvname
else:
log("Variable not found: %s" % vname)
val = None
if val is not None:
val = si.item_repr(val)
log("%s = %s" % (vname, val))
def debug_cmd_show_breakpoints(self):
log("Breakpoints")
cnt = 0
bps = self.fr.get_breakpoints()
for i, bp in enumerate(bps):
prog, line = bp
if prog and line:
log(" %d: Program #%d Line %d" % (i + 1, prog, line))
cnt += 1
if not cnt:
log(" - None -")
def debug_cmd_show_functions(self):
log("Declared Functions")
addr = self.fr.curr_addr()
funcs = self.fr.program_functions(addr.prog)
if funcs:
for func in funcs:
log(" %s" % func)
else:
log(" - None -")
def debug_cmd_show_globals(self):
log("Global Variables")
addr = self.fr.curr_addr()
gvars = self.fr.program_global_vars(addr.prog)
if gvars:
for vnum, vname in enumerate(gvars):
val = self.fr.globalvar_get(vnum)
val = si.item_repr(val)
log(" LV%-3d %s = %s" % (vnum, vname, val))
else:
log(" - None -")
def debug_cmd_show_vars(self):
log("Function Variables")
addr = self.fr.curr_addr()
fun = self.fr.program_find_func(addr)
fvars = self.fr.program_func_vars(addr.prog, fun)
if fvars:
for vnum, vname in enumerate(fvars):
val = self.fr.funcvar_get(vnum)
val = si.item_repr(val)
log(" SV%-3d %s = %s" % (vnum, vname, val))
else:
log(" - None -")
def debug_cmd_show(self, args):
if args == "breakpoints":
self.debug_cmd_show_breakpoints()
elif args == "functions":
self.debug_cmd_show_functions()
elif args == "globals":
self.debug_cmd_show_globals()
elif args == "vars":
self.debug_cmd_show_vars()
else:
log("Usage: show breakpoints")
log(" or: show functions")
log(" or: show globals")
log(" or: show vars")
def debug_cmd_stack(self, args):
if not args:
args = "999999"
if not util.is_int(args):
log("Usage: stack [DEPTH]")
else:
depth = self.fr.data_depth()
args = int(args)
if args > depth:
args = depth
for i in range(args):
val = self.fr.data_pick(i + 1)
val = si.item_repr(val)
log("Stack %d: %s" % (depth - i, val))
if not depth:
log("- Empty Stack -")
def debug_cmd_trace(self, args):
self.fr.set_trace(True)
log("Turning on Trace mode.")
def debug_cmd_notrace(self, args):
self.fr.set_trace(False)
log("Turning off Trace mode.")
def debug_cmd_pop(self, args):
self.fr.data_pop()
log("Stack item POPed.")
def debug_cmd_dup(self, args):
a = self.fr.data_pick(1)
self.fr.data_push(a)
log("Stack item DUPed.")
def debug_cmd_swap(self, args):
a = self.fr.data_pop()
b = self.fr.data_pop()
self.fr.data_push(a)
self.fr.data_push(b)
log("Stack items SWAPed.")
def debug_cmd_rot(self, args):
a = self.fr.data_pop()
b = self.fr.data_pop()
c = self.fr.data_pop()
self.fr.data_push(b)
self.fr.data_push(a)
self.fr.data_push(c)
log("Stack items ROTed.")
def debug_cmd_push(self, args):
if util.is_int(args):
self.fr.data_push(int(args))
elif util.is_float(args):
self.fr.data_push(float(args))
elif util.is_dbref(args):
self.fr.data_push(si.DBRef(int(args[1:])))
elif util.is_strlit(args):
self.fr.data_push(args[1:-1])
log("Stack item pushed.")
def debug_cmd_where(self, args):
fmt = "{level:-3d}: In prog {prog}, func '{func}', line {line}: {inst}"
fmt += "\n {src}"
for callinfo in self.fr.get_call_stack():
log(fmt.format(**callinfo))
def debug_cmd_run(self, args):
userobj = db.get_player_obj("John_Doe")
progobj = db.get_registered_obj(userobj, "$cmd/test")
trigobj = db.get_registered_obj(userobj, "$testaction")
self.fr = process_list.new_process()
self.fr.setup(progobj, userobj, trigobj, self.opts.command)
log("Restarting program.")
self.debug_cmd_list("")
def debug_cmd_help(self, args):
log("help Show this message.")
log("where Display the call stack.")
log("stack [DEPTH] Show top N data stack items.")
log("list List next few source code lines.")
log("list LINE List source code LINE.")
log("list START,END List source code from START to END.")
log("list FUNC List source code at start of FUNC.")
log("break LINE Set breakpoint at given line.")
log("break FUNC Set breakpoint at start of FUNC.")
log("delete BREAKNUM Delete a breakpoint.")
log("show breakpoints Show current breakpoints.")
log("show functions List all declared functions.")
log("show globals List all global vars.")
log("show vars List all vars in the current func.")
log("step [COUNT] Step 1 or COUNT lines, enters calls.")
log("next [COUNT] Step 1 or COUNT lines, skips calls.")
log("finish Finish the current function.")
log("cont Continue until | |
of size
n_triplets x ... x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It
is in units of Hz
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains
information about closure phases. Under each of these keys is
information about delay spectra of different frequency sub-bands
(n_win in number) under the following keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_triplets x ... x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It is
in units of Hz
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if cpinfo is not None:
if not isinstance(cpinfo, dict):
raise TypeError('Input cpinfo must be a dictionary')
else:
cpinfo = self.ia.getClosurePhase(antenna_triplets=antenna_triplets, specsmooth_info=specsmooth_info, delay_filter_info=delay_filter_info, spectral_window_info=spectral_window_info)
result = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
# lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# lag_kernel = DSP.FT1D(NP.pad(freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'npad': npad, 'lags': lags, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=-1)}
for key in cpinfo:
if key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
available_CP_key = key
ndim_padtuple = [(0,0) for i in range(1+len(cpinfo[key].shape[:-2]))] + [(0,npad), (0,0)]
result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key].reshape(cpinfo[key].shape[:-2]+(1,)+cpinfo[key].shape[-2:])) * freq_wts.reshape(tuple(NP.ones(len(cpinfo[key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key][:,NP.newaxis,:,:]) * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(freq_wts.reshape(tuple(NP.ones(len(cpinfo[available_CP_key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = lag_kernel
if verbose:
print('\tSub-band(s) delay transform computed')
result_resampled = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
result_resampled['freq_center'] = result['freq_center']
result_resampled['bw_eff'] = result['bw_eff']
result_resampled['freq_wts'] = result['freq_wts']
downsample_factor = NP.min((self.f.size + npad) * self.df / result_resampled['bw_eff'])
result_resampled['lags'] = DSP.downsampler(result['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled['lag_kernel'] = DSP.downsampler(result['lag_kernel'], downsample_factor, axis=-2, method='interp', kind='linear')
dlag = result_resampled['lags'][1] - result_resampled['lags'][0]
result_resampled['lag_corr_length'] = (1/result['bw_eff']) / dlag
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in result:
result_resampled[key] = DSP.downsampler(result[key], downsample_factor, axis=-2, method='FFT')
if verbose:
print('\tDownsampled | |
'''
Originally part of the pynbody package. Modified to support pymses units
'''
import numpy as np
import weakref
import os
from pynbody import units
_units = units
# from .backcompat import property
from pynbody.backcompat import fractions
import atexit
import functools
# from seren3.utils.constants import unit_string
from pymses.utils.constants.unit import Unit as pymses_Unit
def _unit_string(unit):
u = unit._decompose_base_units().replace("^", "**").replace(".", " ")
return "%s %s" % (unit.coeff, u)
class SimArray(np.ndarray):
"""
Defines a shallow wrapper around numpy.ndarray for extra
functionality like unit-tracking.
"""
_ufunc_registry = {}
@property
def ancestor(self):
"""Provides the basemost SimArray that an IndexedSimArray is based on."""
return self
@property
def derived(self):
if self.sim and self.name:
return self.sim.is_derived_array(self.name, getattr(self, 'family', None))
else:
return False
@derived.setter
def derived(self, value):
if value:
raise ValueError, "Can only unlink an array. Delete an array to force a rederivation if this is the intended effect."
if self.derived:
self.sim.unlink_array(self.name)
def __reduce__(self):
T = np.ndarray.__reduce__(self)
T = (
T[0], T[1], (self.units, T[2][0], T[2][1], T[2][2], T[2][3], T[2][4]))
return T
def __setstate__(self, args):
self._units = args[0]
self.sim = None
self._name = None
np.ndarray.__setstate__(self, args[1:])
def __new__(subtype, data, units=None, latex=None, snapshot=None, **kwargs):
if isinstance(data, pymses_Unit):
units = data._decompose_base_units().replace("^", "**").replace(".", " ")
data = data.coeff
if isinstance(units, pymses_Unit):
units = _unit_string(units)
new = np.array(data, **kwargs).view(subtype)
new._context = {}
if hasattr(data, 'units') and hasattr(data, 'snapshot') and units is None and snapshot is None:
units = data.units
snapshot = data.snapshot
if hasattr(data, 'family'):
new.family = data.family
if isinstance(units, str):
units = _units.Unit(units)
new._units = units
# Always associate a SimArray with the top-level snapshot.
# Otherwise we run into problems with how the reference should
# behave: we don't want to lose the link to the simulation by
# storing a weakref to a SubSnap that might be deconstructed,
# but we also wouldn't want to store a strong ref to a SubSnap
# since that would keep the entire simulation alive even if
# deleted.
#
# So, set the sim attribute to the top-level snapshot and use
# the normal weak-reference system.
if snapshot is not None:
new._sim = weakref.ref(snapshot)
new.set_context(new.sim)
# new._context["h"] = snapshot.cosmo["h"]
# new._context["a"] = snapshot.cosmo["aexp"]
# will generate a weakref automatically
new._name = None
new._latex = latex
return new
def __array_finalize__(self, obj):
if obj is None:
return
elif obj is not self and hasattr(obj, 'units'):
self._units = obj.units
self._sim = obj._sim
self._name = obj._name
if hasattr(obj, 'family'):
self.family = obj.family
else:
self._units = None
self._sim = lambda: None
self._name = None
def __array_wrap__(self, array, context=None):
if context is None:
n_array = array.view(SimArray)
return n_array
try:
ufunc = context[0]
output_units = SimArray._ufunc_registry[ufunc](*context[1])
n_array = array.view(SimArray)
n_array.units = output_units
n_array.sim = self.sim
n_array._name = self._name
if hasattr(self, "_context"):
n_array._context = self._context
return n_array
except (KeyError, units.UnitsException):
return array
@staticmethod
def ufunc_rule(for_ufunc):
def x(fn):
SimArray._ufunc_registry[for_ufunc] = fn
return fn
return x
@property
def latex(self):
from pynbody.units import NoUnit
if hasattr(self, "_latex") and (self._latex is not None):
f_latex = self.get_field_latex()
if self.units == NoUnit() or ( ("[" in f_latex) and "]" in f_latex ):
return r"%s" % f_latex
else:
return r"%s [$%s$]" % (f_latex, self.units.latex())
return None
def set_field_latex(self, latex):
self._latex = latex
def get_field_latex(self):
return self._latex
@property
def units(self):
if hasattr(self.base, 'units'):
return self.base.units
else:
if self._units is None:
return _units.no_unit
else:
return self._units
@units.setter
def units(self, u):
# print 'SimArray::units::u\t', u
if not isinstance(u, units.UnitBase) and u is not None:
u = units.Unit(str(u))
if hasattr(self.base, 'units'):
self.base.units = u
else:
if hasattr(u, "_no_unit"):
self._units = None
else:
self._units = u
@property
def name(self):
if hasattr(self.base, 'name'):
return self.base.name
return self._name
@property
def sim(self):
if hasattr(self.base, 'sim'):
if self.family and self.base.sim:
return self.base.sim[self.family]
else:
return self.base.sim
return self._sim()
@sim.setter
def sim(self, s):
if hasattr(self.base, 'sim'):
self.base.sim = s
else:
if s is not None:
self._sim = weakref.ref(s)
else:
self._sim = lambda: None
@property
def family(self):
try:
return self._family
except AttributeError:
return None
@family.setter
def family(self, fam):
self._family = fam
def __mul__(self, rhs):
if isinstance(rhs, _units.UnitBase):
x = self.copy()
x.units = x.units * rhs
return x
else:
res = np.ndarray.__mul__(self, rhs)
# if isinstance(res, SimArray):
# res._context = self._context
return res
def __div__(self, rhs):
if isinstance(rhs, _units.UnitBase):
x = self.copy()
x.units = x.units / rhs
return x
else:
res = np.ndarray.__div__(self, rhs)
# if isinstance(res, SimArray):
# res._context = self._context
return res
def __truediv__(self, rhs):
if isinstance(rhs, _units.UnitBase):
x = self.copy()
x.units = x.units / rhs
return x
else:
res = np.ndarray.__truediv__(self, rhs)
# if isinstance(res, SimArray):
# res._context = self._context
return res
def __imul__(self, rhs):
if isinstance(rhs, _units.UnitBase):
self.units *= rhs
else:
np.ndarray.__imul__(self, rhs)
try:
self.units *= rhs.units
except AttributeError:
pass
return self
def __idiv__(self, rhs):
if isinstance(rhs, _units.UnitBase):
self.units /= rhs
else:
np.ndarray.__idiv__(self, rhs)
try:
self.units /= rhs.units
except AttributeError:
pass
return self
def __itruediv__(self, rhs):
if isinstance(rhs, _units.UnitBase):
self.units /= rhs
else:
np.ndarray.__itruediv__(self, rhs)
try:
self.units /= rhs.units
except AttributeError:
pass
return self
def set_context(self, context):
self._context["h"] = context.cosmo["h"]
self._context["a"] = context.cosmo["aexp"]
def conversion_context(self):
if hasattr(self, "_context"):
return self._context
else:
return {}
def _generic_add(self, x, add_op=np.add):
if hasattr(x, 'units') and not hasattr(self.units, "_no_unit") and not hasattr(x.units, "_no_unit"):
# Check unit compatibility
try:
context = x.conversion_context()
except AttributeError:
context = {}
# Our own contextual information overrides x's
context.update(self.conversion_context())
try:
cr = x.units.ratio(self.units,
**context)
except units.UnitsException:
raise ValueError("Incompatible physical dimensions %r and %r, context %r" % (
str(self.units), str(x.units), str(self.conversion_context())))
if cr == 1.0:
r = add_op(self, x)
else:
b = np.multiply(x, cr)
if hasattr(b, 'units'):
b.units = None
if not np.can_cast(b.dtype,self.dtype):
b = np.asarray(b, dtype=x.dtype)
r = add_op(self, b)
return r
elif units.is_unit(x):
x = x.in_units(self.units, **self.conversion_context())
r = add_op(self, x)
return r
else:
r = add_op(self, x)
return r
def __add__(self, x):
if isinstance(x, _units.UnitBase):
return x + self
else:
return self._generic_add(x)
def __sub__(self, x):
if isinstance(x, _units.UnitBase):
return (-x + self).in_units(self.units)
else:
return self._generic_add(x, np.subtract)
def __iadd__(self, x):
self._generic_add(x, np.ndarray.__iadd__)
return self
def __isub__(self, x):
self._generic_add(x, np.ndarray.__isub__)
return self
def __pow__(self, x):
numerical_x = x
if isinstance(x, tuple):
x = fractions.Fraction(x[0], x[1])
numerical_x = float(x)
elif isinstance(x, fractions.Fraction):
numerical_x = float(x)
# The following magic circumvents our normal unit-assignment
# code which couldn't cope with the numerical version of x
# in the case of fractions. All this is necessary to make the
# magic tuple->fraction conversions work seamlessly.
r = np.asarray(np.power(self.view(np.ndarray), numerical_x)).view(SimArray)
# Recent numpy versions can take 1-element arrays and return
# scalars, in which case we now have a floating point number :(
if type(r) is not SimArray:
return r
if self.units is not None and (
isinstance(x, fractions.Fraction) or
isinstance(x, int)):
r.sim = self.sim
r.units = self.units ** x
else:
r.units = None
r.sim = self.sim
return r
def __repr__(self):
x = np.ndarray.__repr__(self)
if not hasattr(self.units, "_no_unit"):
return x[:-1] + ", '" + str(self.units) + "')"
else:
return x
def __setitem__(self, item, to):
if hasattr(to, "in_units") and not hasattr(self.units, "_no_unit") and not hasattr(to.units, "_no_unit"):
np.ndarray.__setitem__(self, item, to.in_units(self.units))
else:
np.ndarray.__setitem__(self, item, to)
def __setslice__(self, a, b, to):
self.__setitem__(slice(a, b), to)
def abs(self, *args, **kwargs):
x = np.abs(self, *args, **kwargs)
if hasattr(x, 'units') and self.units is not None:
x.units = self.units
if hasattr(x, 'sim') and self.sim is not None:
x.sim = self.sim
return x
def cumsum(self, axis=None, dtype=None, out=None):
x = np.ndarray.cumsum(self, axis, dtype, out)
if hasattr(x, 'units') and self.units is not None:
x.units = self.units
if hasattr(x, 'sim') and self.sim is not None:
x.sim = self.sim
return x
def prod(self, axis=None, dtype=None, out=None):
x = np.ndarray.prod(self, axis, dtype, out)
if hasattr(x, 'units') and axis is not None and self.units is not None:
x.units = self.units ** self.shape[axis]
if hasattr(x, 'units') and axis is None and self.units is not None:
x.units = self.units
if hasattr(x, 'sim') and self.sim is not None:
x.sim = self.sim
return x
def sum(self, *args, **kwargs):
x = np.ndarray.sum(self, *args, **kwargs)
if hasattr(x, 'units') and self.units is not None:
x.units = self.units
if hasattr(x, 'sim') and self.sim is not None:
x.sim = self.sim
return x
| |
#/*
# * Copyright (c) 2019,2020 Xilinx Inc. All rights reserved.
# *
# * Author:
# * <NAME> <<EMAIL>>
# *
# * SPDX-License-Identifier: BSD-3-Clause
# */
import copy
import struct
import sys
import types
import unittest
import os
import getopt
import re
import subprocess
import shutil
from pathlib import Path
from pathlib import PurePath
from io import StringIO
import contextlib
import importlib
from lopper import Lopper
from lopper import LopperFmt
import lopper
from lopper_tree import *
from re import *
sys.path.append(os.path.dirname(__file__))
from openamp_xlnx_common import *
RPU_PATH = "/rpu@ff9a0000"
def trim_ipis(sdt):
unneeded_props = ["compatible", "xlnx,ipi-bitmask","interrupts", "xlnx,ipi-id", "xlnx,ipi-target-count", "xlnx,cpu-name", "xlnx,buffer-base", "xlnx,buffer-index", "xlnx,int-id", "xlnx,bit-position"]
amba_sub_nodes = sdt.tree['/amba'].subnodes()
for node in amba_sub_nodes:
node_compat = node.propval("compatible")
if node_compat != [""]:
if 'xlnx,zynqmp-ipi-mailbox' in node_compat:
for i in unneeded_props:
node[i].value = ""
node.sync(sdt.FDT)
def is_compat( node, compat_string_to_test ):
if re.search( "openamp,xlnx-rpu", compat_string_to_test):
return xlnx_openamp_rpu
return ""
def update_mbox_cntr_intr_parent(sdt):
# find phandle of a72 gic for mailbox controller
a72_gic_node = sdt.tree["/amba_apu/interrupt-controller@f9000000"]
# set mailbox controller interrupt-parent to this phandle
mailbox_cntr_node = sdt.tree["/zynqmp_ipi1"]
mailbox_cntr_node["interrupt-parent"].value = a72_gic_node.phandle
sdt.tree.sync()
sdt.tree.resolve()
# 1 for master, 0 for slave
# for each openamp channel, return mapping of role to resource group
def determine_role(sdt, domain_node):
include_prop = domain_node["include"]
rsc_groups = []
current_rsc_group = None
if len(list(include_prop.value)) % 2 == 1:
print("list of include not valid. expected even number of elements. got ", len(list(include_prop.value)), include_prop.value)
return -1
for index,value in enumerate(include_prop.value):
if index % 2 == 0:
current_rsc_group = sdt.tree.pnode(value)
else:
if value == 1: # only for openamp master
if current_rsc_group == None:
print("invalid resource group phandle: ", value)
return -1
rsc_groups.append(current_rsc_group)
else:
print("only do processing in host openamp channel domain ", value)
return -1
return rsc_groups
# in this case remote is rpu
# find node that is other end of openamp channel
def find_remote(sdt, domain_node, rsc_group_node):
domains = sdt.tree["/domains"]
# find other domain including the same resource group
remote_domain = None
for node in domains.subnodes():
# look for other domains with include
if node.propval("include") != [''] and node != domain_node:
# if node includes same rsc group, then this is remote
for i in node.propval("include"):
included_node = sdt.tree.pnode(i)
if included_node != None and included_node == rsc_group_node:
return node
return -1
# tests for a bit that is set, going fro 31 -> 0 from MSB to LSB
def check_bit_set(n, k):
if n & (1 << (k)):
return True
return False
# return rpu cluster configuration
# rpu cpus property fields: Cluster | cpus-mask | execution-mode
#
#execution mode ARM-R CPUs:
#bit 30: lockstep (lockstep enabled == 1)
#bit 31: secure mode / normal mode (secure mode == 1)
# e.g. &cpus_r5 0x2 0x80000000>
# this maps to arg1 as rpu_cluster node
# arg2: cpus-mask: 0x2 is r5-1, 0x1 is r5-0, 0x3 is both nodes
# if 0x3/both nodes and in split then need to openamp channels provided,
# otherwise return error
# if lockstep valid cpus-mask is 0x3 needed to denote both being used
#
def construct_carveouts(sdt, rsc_group_node, core, openamp_app_inputs):
# static var that persists beyond lifetime of first function call
# this is needed as there may be more than 1 openamp channel
# so multiple carveouts' phandles are required
if not hasattr(construct_carveouts,"carveout_phandle"):
# it doesn't exist yet, so initialize it
construct_carveouts.carveout_phandle = 0x5ed0
# carveouts each have addr,range
mem_regions = [[0 for x in range(2)] for y in range(4)]
mem_region_names = {
0 : "elfload",
1 : "vdev0vring0",
2 : "vdev0vring1",
3 : "vdev0buffer",
}
for index,value in enumerate(rsc_group_node["memory"].value):
if index % 4 == 1:
mem_regions[index//4][0] = value
elif index % 4 == 3:
mem_regions[index//4][1] = value
carveout_phandle_list = []
for i in range(4):
name = "rpu"+str(core)+mem_region_names[i]
addr = mem_regions[i][0]
openamp_app_inputs[rsc_group_node.name + mem_region_names[i] + '_base'] = hex(mem_regions[i][0])
length = mem_regions[i][1]
openamp_app_inputs[rsc_group_node.name + mem_region_names[i] + '_size'] = hex(mem_regions[i][1])
new_node = LopperNode(-1, "/reserved-memory/"+name)
new_node + LopperProp(name="no-map", value=[])
new_node + LopperProp(name="reg",value=[0,addr,0,length])
new_node + LopperProp(name="phandle",value=construct_carveouts.carveout_phandle)
new_node.phandle = new_node
sdt.tree.add(new_node)
print("added node: ",new_node)
carveout_phandle_list.append(construct_carveouts.carveout_phandle)
construct_carveouts.carveout_phandle += 1
return carveout_phandle_list
def construct_mem_region(sdt, domain_node, rsc_group_node, core, openamp_app_inputs):
# add reserved mem if not present
res_mem_node = None
carveout_phandle_list = None
try:
res_mem_node = sdt.tree["/reserved-memory"]
print("found pre-existing reserved mem node")
except:
res_mem_node = LopperNode(-1, "/reserved-memory")
res_mem_node + LopperProp(name="#address-cells",value=2)
res_mem_node + LopperProp(name="#size-cells",value=2)
res_mem_node + LopperProp(name="ranges",value=[])
sdt.tree.add(res_mem_node)
print("added reserved mem node ", res_mem_node)
return construct_carveouts(sdt, rsc_group_node, core, openamp_app_inputs)
# set pnode id for current rpu node
def set_rpu_pnode(sdt, r5_node, rpu_config, core, platform, remote_domain):
if r5_node.propval("pnode-id") != ['']:
print("pnode id already exists for node ", r5_node)
return -1
rpu_pnodes = {}
if platform == SOC_TYPE.VERSAL:
rpu_pnodes = {0 : 0x18110005, 1: 0x18110006}
else:
print("only versal supported for openamp domains")
return -1
rpu_pnode = None
# rpu config : true is split
if rpu_config == "lockstep":
rpu_pnode = rpu_pnodes[0]
else:
rpu_pnode = rpu_pnodes[core]
r5_node + LopperProp(name="pnode-id", value = rpu_pnodes[core])
r5_node.sync(sdt.FDT)
return
def setup_mbox_info(sdt, domain_node, r5_node, mbox_ctr):
if mbox_ctr.propval("reg-names") == [''] or mbox_ctr.propval("xlnx,ipi-id") == ['']:
print("invalid mbox ctr")
return -1
r5_node + LopperProp(name="mboxes",value=[mbox_ctr.phandle,0,mbox_ctr.phandle,1])
r5_node + LopperProp(name="mbox-names", value = ["tx", "rx"]);
sdt.tree.sync()
r5_node.sync(sdt.FDT)
return
# based on rpu_cluster_config + cores determine which tcm nodes to use
# add tcm nodes to device tree
def setup_tcm_nodes(sdt, r5_node, platform, rsc_group_node):
tcm_nodes = {}
if platform == SOC_TYPE.VERSAL:
tcm_pnodes = {
"ffe00000" : 0x1831800b,
"ffe20000" : 0x1831800c,
"ffe90000" : 0x1831800d,
"ffeb0000" : 0x1831800e,
}
tcm_to_hex = {
"ffe00000" : 0xffe00000,
"ffe20000" : 0xffe20000,
"ffe90000" : 0xffe90000,
"ffeb0000" : 0xffeb0000,
}
else:
print("only versal supported for openamp domains")
return -1
# determine which tcm nodes to use based on access list in rsc group
bank = 0
for phandle_val in rsc_group_node["access"].value:
tcm = sdt.tree.pnode(phandle_val)
if tcm != None:
key = tcm.abs_path.split("@")[1]
node_name = r5_node.abs_path+"/tcm_remoteproc"+str(bank)+"@"+key
tcm_node = LopperNode(-1, node_name)
tcm_node + LopperProp(name="pnode-id",value=tcm_pnodes[key])
tcm_node + LopperProp(name="reg",value=[0,tcm_to_hex[key],0,0x10000])
sdt.tree.add(tcm_node)
bank +=1
print('added ',tcm_node.abs_path)
return 0
def setup_r5_core_node(rpu_config, sdt, domain_node, rsc_group_node, core, remoteproc_node, platform, remote_domain, mbox_ctr, openamp_app_inputs):
carveout_phandle_list = None
r5_node = None
# add r5 node if not present
try:
r5_node = sdt.tree["/rpu@ff9a0000/r5_"+str(core)]
print("node already exists: ", r5_node)
except:
r5_node = LopperNode(-1, "/rpu@ff9a0000/r5_"+str(core))
r5_node + LopperProp(name="#address-cells",value=2)
r5_node + LopperProp(name="#size-cells",value=2)
r5_node + LopperProp(name="ranges",value=[])
sdt.tree.add(r5_node)
print("added r5 node ", r5_node)
print("add props for ",str(r5_node))
# props
ret = set_rpu_pnode(sdt, r5_node, rpu_config, core, platform, remote_domain)
if ret == -1:
print("set_rpu_pnode failed")
return ret
ret = setup_mbox_info(sdt, domain_node, r5_node, mbox_ctr)
if ret == -1:
print("setup_mbox_info failed")
return ret
carveout_phandle_list = construct_mem_region(sdt, domain_node, rsc_group_node, core, openamp_app_inputs)
if carveout_phandle_list == -1:
print("construct_mem_region failed")
return ret
if carveout_phandle_list != None:
print("adding prop memory-region to ",r5_node)
r5_node + LopperProp(name="memory-region",value=carveout_phandle_list)
#tcm nodes
for i in r5_node.subnodes():
if "tcm" in i.abs_path:
"tcm nodes exist"
return -1
# tcm nodes do not exist. set them up
setup_tcm_nodes(sdt, r5_node, platform, rsc_group_node)
# add props to remoteproc node
def set_remoteproc_node(remoteproc_node, sdt, rpu_config):
props = []
props.append(LopperProp(name="reg", value = [0x0, 0xff9a0000, 0x0, 0x10000]))
props.append(LopperProp(name="#address-cells",value=2))
props.append(LopperProp(name="ranges",value=[]))
props.append(LopperProp(name="#size-cells",value=2))
props.append(LopperProp(name="core_conf",value=rpu_config))
props.append(LopperProp(name="compatible",value="xlnx,zynqmp-r5-remoteproc-1.0"))
for i in props:
remoteproc_node + i
#
core = []
# this should only add nodes to tree
# openamp_app_inputs: dictionary to fill with openamp header info for openamp code base later on
def construct_remoteproc_node(remote_domain, rsc_group_node, sdt, domain_node, platform, mbox_ctr, openamp_app_inputs):
rpu_cluster_node = remote_domain.parent
rpu_config = None # split or lockstep
cpus_prop_val = rpu_cluster_node.propval("cpus")
if cpus_prop_val != ['']:
if len(cpus_prop_val) != 3:
print("rpu cluster cpu prop invalid len")
return -1
rpu_config = "lockstep" if check_bit_set(cpus_prop_val[2], 30)==True else "split"
if rpu_config == "lockstep":
core = 0
else:
if cpus_prop_val[1] == 3:
# if here this means that cluster is in split mode. look at which core from remote domain
core_prop_val = remote_domain.propval("cpus")
if core_prop_val == ['']:
print("no cpus val for core ", remote_domain)
else:
if core_prop_val[1] == 2:
core = 1
elif core_prop_val[1] == 1:
core = 0
else:
print("invalid cpu prop for core ", remote_domain, core_prop_val[1])
return -1
else:
print("invalid cpu prop for rpu: ",remote_domain, cpus_prop_val[1])
return -1
# only add remoteproc node if mbox is present in access list of domain node
# check domain's access list for mbox
has_corresponding_mbox = False
if domain_node.propval("access") != ['']:
for i in domain_node.propval("access"):
possible_mbox = sdt.tree.pnode(i)
if possible_mbox != None:
if possible_mbox.propval("reg-names") != ['']:
has_corresponding_mbox = True
# setup remoteproc node if not already present
remoteproc_node = None
try:
remoteproc_node = sdt.tree["/rpu@ff9a0000"]
except:
print("remoteproc node not present. now add it to tree")
remoteproc_node = LopperNode(-1, "/rpu@ff9a0000")
set_remoteproc_node(remoteproc_node, sdt, rpu_config)
| |
num_zero_pad)
lm_label_ids.extend([self.pad_id] * num_zero_pad)
return [input_ids, segment_ids, input_mask, is_random_next, lm_label_ids]
def get_random_token_id(self):
return self.tokenizer.get_random_token_id()
def __str__(self):
name, _ = os.path.splitext(os.path.basename(self.dataset_path))
return name
class NextSentencePredictionDataset(Dataset):
def __init__(self, tokenizer, max_pos, dataset_path=None, documents=[], encoding="utf-8", on_memory=True):
self.tokenizer = tokenizer
self.max_pos = max_pos
if dataset_path is None and len(documents) == 0:
raise ValueError('dataset_path or documents require.')
self.dataset_path = dataset_path
self.on_memory = on_memory
self.encoding = encoding
self.current_doc = 0 # to avoid random sentence from same doc
# for loading samples directly from file
self.sample_counter = 0 # used to keep track of full epochs on file
self.line_buffer = None # keep second sentence of a pair in memory and use as first sentence in next pair
# for loading samples in memory
self.current_random_doc = 0
self.sample_to_doc = [] # map sample index to doc and line
# BERT reserved tokens
self.pad_id = self.tokenizer.convert_tokens_to_ids(["[PAD]"])[0]
self.cls_id = self.tokenizer.convert_tokens_to_ids(["[CLS]"])[0]
self.sep_id = self.tokenizer.convert_tokens_to_ids(["[SEP]"])[0]
self.mask_id = self.tokenizer.convert_tokens_to_ids(["[MASK]"])[0]
self.corpus_lines = 0
# load samples into memory
if len(documents) > 0 or on_memory:
self.all_documents = []
doc = []
if len(documents) > 0:
for text in documents:
doc = self._load_text(doc, text)
else:
with open(dataset_path, "r", encoding=encoding) as reader:
for text in tqdm(reader, desc="Loading Dataset", total=self.corpus_lines):
doc = self._load_text(doc, text)
# FIX to last rows ""... . last line is not "" and EOF
if len(doc) > 0:
self.all_documents.append(doc)
self.sample_to_doc.pop()
self.num_docs = len(self.all_documents)
if len(self.all_documents) is 0:
raise ValueError(dataset_path + ' were not includes documents.')
# load samples later lazily from disk
else:
self.num_docs = 0
with open(dataset_path, "r", encoding=encoding) as reader:
for line in tqdm(reader, desc="Loading Dataset", total=self.corpus_lines):
if line.strip() == "":
self.num_docs += 1
else:
self.corpus_lines += 1
# if doc does not end with empty line
if line.strip() != "":
self.num_docs += 1
self.file = open(dataset_path, "r", encoding=encoding)
self.random_file = open(dataset_path, "r", encoding=encoding)
def _load_text(self, doc, text):
text = text.strip()
if text == "":
if len(doc) > 0: # FIX to last rows ""...
self.all_documents.append(doc)
doc = []
# remove last added sample because there won't be a subsequent line anymore in the doc
self.sample_to_doc.pop()
else:
# store as one sample
sample = {"doc_id": len(self.all_documents), "line": len(doc)}
self.sample_to_doc.append(sample)
tokens = self.tokenizer.tokenize(text)
doc.append(self.tokenizer.convert_tokens_to_ids(tokens))
self.corpus_lines += 1
return doc
def __len__(self):
# last line of doc won't be used, because there's no "nextSentence". Additionally, we start counting at 0.
return self.corpus_lines - self.num_docs - 1 # self.num_docs = num_spaces
def __getitem__(self, item):
if not self.on_memory:
cur_id = self.sample_counter
self.sample_counter += 1
# after one epoch we start again from beginning of file
if cur_id != 0 and (cur_id % len(self) == 0):
self.file.close()
self.file = open(self.dataset_path, "r", encoding=self.encoding)
t1, t2, is_next_label = self.random_sent(item)
# transform sample to features
features = self.convert_example_to_features(t1, t2, is_next_label, self.max_pos)
return [torch.tensor(x, dtype=torch.long) for x in features]
def random_sent(self, index):
"""
Get one sample from corpus consisting of two sentences. With prob. 50% these are two subsequent sentences
from one doc. With 50% the second sentence will be a random one from another doc.
:param index: int, index of sample.
:return: (str, str, int), sentence 1, sentence 2, isNextSentence Label
"""
t1, t2 = self.get_corpus_line(index)
if random() > 0.5:
label = 0
else:
t2 = self.get_random_line()
label = 1
return t1, t2, label
def get_corpus_line(self, item):
"""
Get one sample from corpus consisting of a pair of two subsequent lines from the same doc.
:param item: int, index of sample.
:return: (str, str), two subsequent sentences from corpus
"""
t1 = ""
t2 = ""
assert isinstance(item, int), 'item only support int(index) access.'
assert item < self.corpus_lines, 'item index out range corpus.'
if self.on_memory:
sample = self.sample_to_doc[item]
t1 = self.all_documents[sample["doc_id"]][sample["line"]]
t2 = self.all_documents[sample["doc_id"]][sample["line"]+1]
# used later to avoid random nextSentence from same doc
self.current_doc = sample["doc_id"]
return t1, t2
else:
if self.line_buffer is None:
# read first non-empty line of file
while t1 == "":
t1 = self.file.__next__().strip()
t2 = self.file.__next__().strip()
else:
# use t2 from previous iteration as new t1
t1 = self.line_buffer
t2 = self.file.__next__().strip()
# skip empty rows that are used for separating documents and keep track of current doc id
while t2 == "" or t1 == "":
t1 = self.file.__next__().strip()
t2 = self.file.__next__().strip()
self.current_doc = self.current_doc+1
self.line_buffer = t2
t1 = self.tokenizer.convert_tokens_to_ids(self.tokenizer.tokenize(t1))
t2 = self.tokenizer.convert_tokens_to_ids(self.tokenizer.tokenize(t2))
return t1, t2
def get_random_line(self):
"""
Get random line from another document for nextSentence task.
:return: str, content of one line
"""
# Similar to original tf repo: This outer loop should rarely go for more than one iteration for large
# corpora. However, just to be careful, we try to make sure that
# the random document is not the same as the document we're processing.
for _ in range(10):
if self.on_memory:
rand_doc_idx = randrange(len(self.all_documents))
rand_doc = self.all_documents[rand_doc_idx]
line = rand_doc[randrange(len(rand_doc))]
else:
rand_index = randint(1, self.corpus_lines if self.corpus_lines < 1000 else 1000)
# pick random line
for _ in range(rand_index):
line = self.get_next_line()
# check if our picked random line is really from another doc like we want it to be
if self.current_random_doc != self.current_doc:
break
return line
def get_next_line(self):
""" Gets next line of random_file and starts over when reaching end of file"""
try:
line = self.random_file.__next__().strip()
# keep track of which document we are currently looking at to later avoid having the same doc as t1
if line == "":
self.current_random_doc = self.current_random_doc + 1
line = self.random_file.__next__().strip()
except StopIteration:
self.random_file.close()
self.random_file = open(self.dataset_path, "r", encoding=self.encoding)
line = self.random_file.__next__().strip()
line = self.tokenizer.convert_tokens_to_ids(self.tokenizer.tokenize(line))
return line
def get_random_token_id(self):
return self.tokenizer.get_random_token_id()
def convert_example_to_features(
self, tokens_a, tokens_b, is_next_label, max_pos, short_seq_prob=0.1, masked_lm_prob=0.15):
"""
Convert a raw sample (pair of sentences as tokenized strings) into a proper training sample with
IDs, LM labels, input_mask, CLS and SEP tokens etc.
:param tokens_a: str, example tokens.
:param tokens_b: str, example next tokens.
:param is_next_label: int, is next label.
:param max_pos: int, maximum length of sequence.
:param short_seq_prob: float, Probability of creating sequences which are shorter than the maximum length.
:param masked_lm_prob: float, Masked LM probability.
:return: features
"""
target_max_pos = max_pos - 3 if tokens_b else max_pos - 2
tokens_a_ids = copy.copy(tokens_a)
tokens_b_ids = copy.copy(tokens_b)
# However, sequences to minimize the mismatch between pre-training and fine-tuning.
if random() < short_seq_prob:
target_max_pos = randint(2, target_max_pos -1)
truncate_seq_pair(tokens_a_ids, tokens_b_ids, target_max_pos)
# Add Special Tokens
tokens_a_ids.insert(0, self.cls_id)
tokens_a_ids.append(self.sep_id)
if len(tokens_b_ids) != 0:
tokens_b_ids.append(self.sep_id)
else:
tokens_b_ids = []
tokens = copy.copy(tokens_a_ids)
tokens.extend(copy.copy(tokens_b_ids))
# Add next sentence segment
segment_ids = [0] * len(tokens_a_ids) + [1] * len(tokens_b_ids)
lm_label_ids = [self.pad_id] * len(tokens_a_ids) + [self.pad_id] * len(tokens_b_ids)
# mask prediction calc
mask_prediction = int(round(len(tokens) * masked_lm_prob))
mask_candidate_pos = [i for i, token in enumerate(tokens) if token != self.cls_id and token != self.sep_id]
# masked and random token
shuffle(mask_candidate_pos)
for pos in mask_candidate_pos[:mask_prediction]:
if random() < 0.8: # 80%
# masked
lm_label_ids[pos] = tokens[pos]
tokens[pos] = self.mask_id
elif random() < 0.5: # 10%
# random token
lm_label_ids[pos] = tokens[pos]
tokens[pos] = self.get_random_token_id()
else:
# 10% not mask and not modify
lm_label_ids[pos] = tokens[pos]
input_ids = tokens
input_mask = [1] * len(input_ids)
# zero padding
num_zero_pad = max_pos - len(input_ids)
input_ids.extend([self.pad_id] * num_zero_pad)
segment_ids.extend([0] * num_zero_pad)
input_mask.extend([0] * num_zero_pad)
lm_label_ids.extend([self.pad_id] * num_zero_pad)
return [input_ids, segment_ids, input_mask, is_next_label, lm_label_ids]
def __str__(self):
name, _ = os.path.splitext(os.path.basename(self.dataset_path))
return name
class PretrainDataGeneration(object):
def __init__(
self,
dataset_path='data/jawiki_norm.txt',
output_path='data/jawiki_norm.pickle',
vocab_path='config/wiki-ja_1003.vocab',
sp_model_path='config/wiki-ja_1003.model',
max_pos=512,
epochs=20,
tokenizer_name='sp_pos',
task='mlm',
sentence_stack=True,
pickle_path=None
):
tokenizer = get_tokenizer(
vocab_path=vocab_path, sp_model_path=sp_model_path, name=tokenizer_name)
if task == 'sop':
self.dataset = StackedSentenceDataset(
tokenizer=tokenizer, max_pos=max_pos, dataset_path=dataset_path,
pickle_path=pickle_path, is_sop=True
)
elif task == 'mlm':
self.dataset = StackedSentenceDataset(
tokenizer=tokenizer, max_pos=max_pos, dataset_path=dataset_path,
sentence_stack=sentence_stack, pickle_path=pickle_path
)
else:
self.dataset = NextSentencePredictionDataset(
tokenizer=tokenizer, max_pos=max_pos, dataset_path=dataset_path, on_memory=True
)
self.output_path = output_path
self.epochs = epochs
def generate_text_tensor(self):
if not isinstance(self.dataset, StackedSentenceDataset):
raise('Not support dataset class {}: ', self.dataset.____class__.__name__)
self.dataset.dump_ids_documents(self.output_path)
def generate(self, is_gzip=True):
sampler = RandomSampler(self.dataset)
gen_dataloader = DataLoader(self.dataset, sampler=sampler, batch_size=1)
for e in range(self.epochs):
iter_bar = tqdm(
gen_dataloader, "generate pretrain input file")
if is_gzip:
with gzip.open(self.output_path + '_' | |
import numpy as np
import torch
import torch.nn.functional as F
import torch.nn as nn
from torch.distributions.utils import log_sum_exp
from torch import optim
from torch.autograd import Variable
import util
import coref_ops
import conll
import metrics
import scipy.misc as sp
class CorefModel(nn.Module):
def __init__(self, config):
super(CorefModel, self).__init__()
self.config = config
self.config = config
self.embedding_info = [(emb["size"], emb["lowercase"]) for emb in
config["embeddings"]] # [(300,false)(50,false)]
self.embedding_size = sum(size for size, _ in self.embedding_info) # 350 = 300+50
self.char_embedding_size = config["char_embedding_size"] # 8
self.char_dict = util.load_char_dict(config["char_vocab_path"]) # all characters + <unk> size 115
self.max_mention_width = config["max_mention_width"] # 10
self.genres = {g: i for i, g in enumerate(config["genres"])}
self.char_embeddings = nn.Parameter(torch.randn([len(self.char_dict), self.config["char_embedding_size"]]))
self.char_cnn = CNN()
# TODO check if the input to the BILSTM should be a pack(_padded)_sequence so that minibatches can be used
self.bilstm = nn.LSTM(input_size=500, hidden_size=200, num_layers=1, dropout=0.2, bidirectional=True)
self.genre_tensor = nn.Parameter(torch.randn([len(self.genres), self.config["feature_size"]]))
self.mention_width_tensor = nn.Parameter(torch.randn([self.config["max_mention_width"], self.config["feature_size"]]))
self.head_scores = nn.Linear(400, 1)
self.mention = FFNNMention()
self.same_speaker_emb = nn.Parameter(torch.randn([2, self.config["feature_size"]]))
self.mention_distance_emb = nn.Parameter(torch.zeros([10, self.config["feature_size"]]))
self.antecedent = FFNNAntecedent()
nn.init.xavier_uniform_(self.char_embeddings)
self.weights_init(self.char_cnn.parameters())
self.hidden = self.bilstm_init(self.bilstm.hidden_size)
nn.init.xavier_uniform_(self.genre_tensor)
nn.init.xavier_uniform_(self.mention_width_tensor)
self.weights_init(self.mention.parameters())
nn.init.xavier_uniform_(self.same_speaker_emb)
nn.init.xavier_uniform_(self.mention_distance_emb)
self.weights_init(self.antecedent.parameters())
# coreference score = mention score span 1 + mention score span 2 + pairwise antecedent score of both spans
def bilstm_init(self, hidden_dim, num_layers=1):
h_0 = torch.randn(2, num_layers, hidden_dim)
c_0 = torch.randn(2, num_layers, hidden_dim)
nn.init.orthogonal_(h_0)
nn.init.orthogonal_(c_0)
return h_0, c_0
def weights_init(self, m):
if isinstance(m, nn.Conv1d) or isinstance(m, nn.Linear):
nn.init.xavier_uniform_(m.weight.data)
nn.init.xavier_uniform_(m.bias.data)
def forward(self, word_emb, char_index, text_len, speaker_ids, genre, is_training, gold_starts, gold_ends, cluster_ids):
training_num = 0.0
if is_training == 1:
training_num = 1.0
self.dropout = 1 - (training_num * self.config["dropout_rate"]) # 0.2
self.lexical_dropout = 1 - (training_num * self.config["lexical_dropout_rate"]) # 0.5
num_sentences = word_emb.shape[0] # number of sentences to predict from
max_sentence_length = word_emb.shape[1] # maybe caused by applying padding to the dataset to have all sentences in the same shape
text_emb_list = [word_emb] # 3D tensor added in an array
if self.config["char_embedding_size"] > 0: # true is 8
char_emb = torch.index_select(self.char_embeddings,
0,
char_index.view(-1)).view(num_sentences,
max_sentence_length,
-1,
self.config["char_embedding_size"])
# [num_sentences, max_sentence_length, max_word_length, emb]
# [a vector of embedding 8 for each character for each word for each sentence for all sentences]
# (according to longest word and longest sentence)
flattened_char_emb = char_emb.view([num_sentences * max_sentence_length,
util.shape(char_emb, 2),
util.shape(char_emb, 3)])
# [num_sentences * max_sentence_length, max_word_length, emb]
flattened_aggregated_char_emb = self.char_cnn(flattened_char_emb)
# [num_sentences * max_sentence_length, emb] character level CNN
aggregated_char_emb = flattened_aggregated_char_emb.view([num_sentences, max_sentence_length,
util.shape(flattened_aggregated_char_emb,
1)])
# [num_sentences, max_sentence_length, emb]
text_emb_list.append(aggregated_char_emb)
text_emb = torch.cat(text_emb_list, 2)
text_emb = F.dropout(text_emb, self.lexical_dropout)
text_len_mask = self.sequence_mask(text_len, max_len=max_sentence_length)
text_len_mask = text_len_mask.view(num_sentences * max_sentence_length)
text_outputs = self.encode_sentences(text_emb, text_len, text_len_mask)
text_outputs = F.dropout(text_outputs, self.dropout)
genre_emb = self.genre_tensor[genre] # [emb]
sentence_indices = torch.unsqueeze(torch.arange(num_sentences), 1).repeat(1, max_sentence_length)
# [num_sentences, max_sentence_length]
# TODO make sure self.flatten_emb_by_sentence works as expected
flattened_sentence_indices = self.flatten_emb_by_sentence(sentence_indices, text_len_mask) # [num_words]
flattened_text_emb = self.flatten_emb_by_sentence(text_emb, text_len_mask) # [num_words]
candidate_starts, candidate_ends = coref_ops.coref_kernels_spans(
sentence_indices=flattened_sentence_indices,
max_width=self.max_mention_width)
candidate_mention_emb = self.get_mention_emb(flattened_text_emb, text_outputs, candidate_starts,
candidate_ends) # [num_candidates, emb]
# this is now a nn candidate_mention_scores = self.get_mention_scores(candidate_mention_emb) # [num_mentions, 1]
candidate_mention_scores = self.mention(candidate_mention_emb)
candidate_mention_scores = torch.squeeze(candidate_mention_scores, 1) # [num_mentions]
k = int(np.floor(float(text_outputs.shape[0]) * self.config["mention_ratio"]))
predicted_mention_indices = coref_ops.coref_kernels_extract_mentions(candidate_mention_scores, candidate_starts,
candidate_ends, k) # ([k], [k])
# predicted_mention_indices.set_shape([None])
mention_starts = torch.index_select(candidate_starts, 0, predicted_mention_indices.type(torch.LongTensor)) # [num_mentions]
mention_ends = torch.index_select(candidate_ends, 0, predicted_mention_indices.type(torch.LongTensor)) # [num_mentions]
mention_emb = torch.index_select(candidate_mention_emb, 0, predicted_mention_indices.type(torch.LongTensor)) # [num_mentions, emb]
mention_scores = torch.index_select(candidate_mention_scores, 0, predicted_mention_indices.type(torch.LongTensor)) # [num_mentions]
mention_start_emb = torch.index_select(text_outputs, 0, mention_starts.type(torch.LongTensor)) # [num_mentions, emb]
mention_end_emb = torch.index_select(text_outputs, 0, mention_ends.type(torch.LongTensor)) # [num_mentions, emb]
mention_speaker_ids = torch.index_select(speaker_ids, 0, mention_starts.type(torch.LongTensor)) # [num_mentions]
max_antecedents = self.config["max_antecedents"]
antecedents, antecedent_labels, antecedents_len = coref_ops.coref_kernels_antecedents(mention_starts,
mention_ends,
gold_starts,
gold_ends,
cluster_ids,
max_antecedents)
# ([num_mentions, max_ant], [num_mentions, max_ant + 1], [num_mentions]
antecedent_scores = self.get_antecedent_scores(mention_emb, mention_scores, antecedents, antecedents_len,
mention_starts, mention_ends, mention_speaker_ids,
genre_emb) # [num_mentions, max_ant + 1]
loss = self.softmax_loss(antecedent_scores, antecedent_labels) # [num_mentions]
loss2 = F.multilabel_margin_loss(antecedent_scores, antecedent_labels.type(torch.LongTensor))
loss = torch.sum(loss) # []
return [candidate_starts, candidate_ends, candidate_mention_scores, mention_starts, mention_ends, antecedents,
antecedent_scores], loss
def softmax_loss(self, antecedent_scores, antecedent_labels):
gold_scores = antecedent_scores + torch.log(antecedent_labels.type(torch.FloatTensor)) # [num_mentions, max_ant + 1]
marginalized_gold_scores = self.logsumexp(gold_scores, 1, keepdims=True) # [num_mentions]
log_norm = self.logsumexp(antecedent_scores, 1, keepdims=True) # [num_mentions]
return log_norm - marginalized_gold_scores # [num_mentions]
def logsumexp(self, x, dim=1, keepdims=True):
if dim is None:
x, dim = x.view(-1), 0
xm, _ = torch.max(x, dim, keepdim=True)
x = torch.where(
(xm == float('inf')) | (xm == float('-inf')),
torch.zeros(xm.shape),
xm + torch.log(torch.sum(torch.exp(x - xm), dim, keepdim=True)))
return x if keepdims else x.squeeze(dim)
def reverse_tensor(self, tensor, seq_lengths, seq_dim, batch_dim):
# this works TODO check if it may also need a split across either seq_dim or batch_dim and of seq_lengths
idx = [i for i in range(tensor.size(0) - 1, -1, -1)]
idx = torch.LongTensor(idx)
inverted_tensor = tensor.index_select(0, idx)
return inverted_tensor
def sequence_mask(self, lengths, max_len=None):
batch_size = lengths.numel()
max_len = max_len or lengths.max()
return (torch.arange(0, max_len)
.type_as(lengths)
.repeat(batch_size, 1)
.lt(lengths.unsqueeze(1)))
# text_emb = the 500d embedding of text
# text_len = length of text
# text_len_mask = a mask of 0 and 1
def encode_sentences(self, text_emb, text_len, text_len_mask):
num_sentences = text_emb.shape[0]
max_sentence_length = text_emb.shape[1]
# Transpose before and after because it is expected by the LSTM.
inputs = torch.transpose(text_emb, 0, 1) # [max_sentence_length, num_sentences, emb]
# # with tf.variable_scope("fw_cell"):
# cell_fw = util.CustomLSTMCell(self.config["lstm_size"], num_sentences, self.dropout)
# preprocessed_inputs_fw = cell_fw.preprocess_input(inputs)
# # with tf.variable_scope("bw_cell"):
# cell_bw = util.CustomLSTMCell(self.config["lstm_size"], num_sentences, self.dropout)
# preprocessed_inputs_bw = cell_bw.preprocess_input(inputs)
# # preprocessed_inputs_bw = tf.reverse_sequence(preprocessed_inputs_bw, seq_lengths=text_len, seq_dim=0, batch_dim=1)
# preprocessed_inputs_bw = self.reverse_tensor(preprocessed_inputs_bw, seq_lengths=text_len, seq_dim=0,
# batch_dim=1)
#
# state_fw = nn.LSTMCell(cell_fw.initial_state.c.repeat(num_sentences, 1),
# cell_fw.initial_state.h.repeat(num_sentences, 1))
# state_bw = nn.LSTMCell(cell_bw.initial_state.c.repeat([num_sentences, 1]),
# cell_bw.initial_state.h.repeat([num_sentences, 1]))
# # with tf.variable_scope("lstm"):
# # with tf.variable_scope("fw_lstm"):
# # fw_outputs, fw_states = tf.nn.dynamic_rnn(cell=cell_fw, inputs=preprocessed_inputs_fw, sequence_length=text_len, initial_state=state_fw, time_major=True)
# fw_outputs, fw_states = cell_fw(preprocessed_inputs_fw, state_fw)
#
# # with tf.variable_scope("bw_lstm"):
# # bw_outputs, bw_states = tf.nn.dynamic_rnn(cell=cell_bw,inputs=preprocessed_inputs_bw,sequence_length=text_len,initial_state=state_bw,time_major=True)
# bw_outputs, bw_states = cell_bw(preprocessed_inputs_bw, state_bw)
#
# # bw_outputs = tf.reverse_sequence(bw_outputs, seq_lengths=text_len, seq_dim=0, batch_dim=1)
# bw_outputs = self.reverse_tensor(bw_outputs, seq_lengths=text_len, seq_dim=0, batch_dim=1)
#
# text_outputs = torch.cat([fw_outputs, bw_outputs], 2)
self.hidden = self.bilstm_init(self.bilstm.hidden_size, num_sentences)
text_outputs, self.hidden = self.bilstm(inputs, self.hidden)
text_outputs = torch.transpose(text_outputs, 0, 1)
# inputs_list = inputs.chunk(num_sentences, dim=1)
# text_outputs_list = []
# for i in range(num_sentences):
# text_outputs, self.hidden = self.bilstm(inputs_list[i], self.hidden)
# text_outputs_list.append(text_outputs)
# # [num_sentences, max_sentence_length, emb]
# text_outputs = torch.transpose(torch.cat(text_outputs_list, dim=1), 0, 1)
return self.flatten_emb_by_sentence(text_outputs, text_len_mask)
def flatten_emb_by_sentence(self, emb, text_len_mask):
num_sentences = emb.shape[0]
max_sentence_length = emb.shape[1]
emb_rank = len(emb.shape)
# TODO check if it works correctly for both rank 2 and 3
if emb_rank == 2:
flattened_emb = emb.contiguous().view([num_sentences * max_sentence_length])
res = torch.masked_select(flattened_emb, text_len_mask.view(-1))
return res
elif emb_rank == 3:
flattened_emb = emb.contiguous().view(num_sentences * max_sentence_length, util.shape(emb, emb_rank - 1))
res = torch.masked_select(flattened_emb, text_len_mask.view(-1, 1))
return res.view(-1, util.shape(emb, emb_rank - 1))
else:
raise ValueError("Unsupported rank: {}".format(emb_rank))
def get_mention_emb(self, text_emb, text_outputs, mention_starts, mention_ends):
mention_emb_list = []
mention_start_emb = torch.index_select(text_outputs, 0, mention_starts.type(torch.LongTensor)) # [num_mentions, emb]
mention_emb_list.append(mention_start_emb)
mention_end_emb = torch.index_select(text_outputs, 0, mention_ends.type(torch.LongTensor)) # [num_mentions, emb]
mention_emb_list.append(mention_end_emb)
mention_width = 1 + mention_ends - mention_starts # [num_mentions]
if self.config["use_features"]:
mention_width_index = mention_width - 1 # [num_mentions]
mention_width_emb = torch.index_select(self.mention_width_tensor, 0, mention_width_index.type(torch.LongTensor)) # [num_mentions, emb]
mention_width_emb = F.dropout(mention_width_emb, self.dropout)
mention_emb_list.append(mention_width_emb)
if self.config["model_heads"]:
mention_indices = torch.unsqueeze(torch.arange(self.config["max_mention_width"]).type(torch.IntTensor), 0) \
+ torch.unsqueeze(mention_starts, 1) # [num_mentions, max_mention_width]
# replaces the value inside the tensor with the minimum
min_dim_val = util.shape(text_outputs, 0) - 1
mention_indices[mention_indices > min_dim_val] = min_dim_val # [num_mentions, max_mention_width]
mention_text_emb = torch.index_select(text_emb, 0, mention_indices.type(torch.LongTensor).view(-1)).view(
mention_indices.shape[0], mention_indices.shape[1], text_emb.shape[1])
# [num_mentions, max_mention_width, emb]
head_scores = self.head_scores(text_outputs) # [num_words, 1]
mention_head_scores = torch.index_select(head_scores, 0, mention_indices.type(torch.LongTensor).view(-1)).view(
mention_indices.shape[0], mention_indices.shape[1], 1)
# [num_mentions, max_mention_width, 1]
mention_mask = torch.unsqueeze(
self.sequence_mask(mention_width, self.config["max_mention_width"]).type(torch.FloatTensor),
2) # [num_mentions, max_mention_width, 1]
mention_attention = F.softmax(mention_head_scores + torch.log(mention_mask),
dim=1) # [num_mentions, max_mention_width, 1]
mention_head_emb = torch.sum(mention_attention * mention_text_emb, 1) # [num_mentions, emb]
mention_emb_list.append(mention_head_emb)
mention_emb = torch.cat(mention_emb_list, 1) # [num_mentions, emb]
return mention_emb
def get_antecedent_scores(self, mention_emb, mention_scores, antecedents, antecedents_len, mention_starts,
mention_ends, mention_speaker_ids, genre_emb):
num_mentions = util.shape(mention_emb, 0)
max_antecedents = util.shape(antecedents, 1)
feature_emb_list = []
if self.config["use_metadata"]:
antecedent_speaker_ids = torch.index_select(mention_speaker_ids, 0, antecedents.view(-1).type(
torch.LongTensor)).view(num_mentions, max_antecedents) # [num_mentions, max_ant]
same_speaker = torch.unsqueeze(mention_speaker_ids, 1) == antecedent_speaker_ids # [num_mentions, max_ant]
speaker_pair_emb = torch.index_select(self.same_speaker_emb, 0,
same_speaker.view(-1).long()).view(num_mentions, max_antecedents, -1) # [num_mentions, max_ant, emb]
feature_emb_list.append(speaker_pair_emb)
tiled_genre_emb = torch.unsqueeze(torch.unsqueeze(genre_emb, 0), 0).repeat([num_mentions, max_antecedents, 1]) # [num_mentions, max_ant, emb]
feature_emb_list.append(tiled_genre_emb)
if self.config["use_features"]:
target_indices = torch.arange(num_mentions) # [num_mentions]
mention_distance = torch.unsqueeze(target_indices, | |
most the number of
elements in the array.
Returns
-------
indices : array
The indices in which the array should be split.
Notes
-----
Solution from https://stackoverflow.com/a/54024280
"""
array = np.atleast_1d(array).ravel()
if parts > array.size:
raise ValueError(
"Cannot partition an array of size {} into {} parts of equal sum.".
format(array.size, parts))
cumulative_sum = array.cumsum()
# Ideally, we want each part to have the same number of points (total /
# parts).
ideal_sum = cumulative_sum[-1] // parts
# If the parts are ideal, the cumulative sum of each part will be this
ideal_cumsum = np.arange(1, parts) * ideal_sum
indices = np.searchsorted(cumulative_sum, ideal_cumsum, side="right")
# Check for repeated split points, which indicates that there is no way to
# split the array.
if np.unique(indices).size != indices.size:
raise ValueError(
"Could not find partition points to split the array into {} parts "
"of equal sum.".format(parts))
return indices
class _BaseSpatialCrossValidator(BaseCrossValidator, metaclass=ABCMeta):
"""
Base class for spatial cross-validators.
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...,
method : str
Which algorithm to use to seperate data points. Either 'KMeans' or 'GMM'
n_splits : int
Number of splitting iterations.
"""
def __init__(self,
n_groups=None,
coordinates=None,
method=None,
max_distance=None,
n_splits=None):
self.n_groups = n_groups
self.coordinates = coordinates
self.method = method
self.max_distance = max_distance
self.n_splits = n_splits
def split(self, X, y=None, groups=None):
"""
Generate indices to split data into training and test set.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
train : ndarray
The training set indices for that split.
test : ndarray
The testing set indices for that split.
"""
if X.shape[1] != 2:
raise ValueError(
"X (the coordinate data) must have exactly 2 columns ({} given)."
.format(X.shape[1]))
for train, test in super().split(X, y, groups):
yield train, test
def get_n_splits(self, X=None, y=None, groups=None):
"""
Returns the number of splitting iterations in the cross-validator
Parameters
----------
X : object
Always ignored, exists for compatibility.
y : object
Always ignored, exists for compatibility.
groups : object
Always ignored, exists for compatibility.
Returns
-------
n_splits : int
Returns the number of splitting iterations in the cross-validator.
"""
return self.n_splits
@abstractmethod
def _iter_test_indices(self, X=None, y=None, groups=None):
"""
Generates integer indices corresponding to test sets.
MUST BE IMPLEMENTED BY DERIVED CLASSES.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
test : ndarray
The testing set indices for that split.
"""
class _SpatialShuffleSplit(_BaseSpatialCrossValidator):
"""
Random permutation of spatial cross-validator.
Yields indices to split data into training and test sets. Data are first
grouped into clusters using either a KMeans or GMM algorithm
and are then split into testing and training sets randomly.
The proportion of clusters assigned to each set is controlled by *test_size*
and/or *train_size*. However, the total amount of actual data points in
each set could be different from these values since clusters can have
a different number of data points inside them. To guarantee that the
proportion of actual data is as close as possible to the proportion of
clusters, this cross-validator generates an extra number of splits and
selects the one with proportion of data points in each set closer to the
desired amount. The number of balance splits per
iteration is controlled by the *balance* argument.
This cross-validator is preferred over `sklearn.model_selection.ShuffleSplit`
for spatial data to avoid overestimating cross-validation scores.
This can happen because of the inherent spatial autocorrelation.
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
cluster_method='Hierarchical' then this parameter is ignored.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
cluster_method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
n_splits : int,
Number of re-shuffling & splitting iterations.
test_size : float, int, None
If float, should be between 0.0 and 1.0 and represent the proportion
of the dataset to include in the test split. If int, represents the
absolute number of test samples. If None, the value is set to the
complement of the train size. If ``train_size`` is also None, it will
be set to 0.1.
train_size : float, int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
balance : int
The number of splits generated per iteration to try to balance the
amount of data in each set so that *test_size* and *train_size* are
respected. If 1, then no extra splits are generated (essentially
disabling the balacing). Must be >= 1.
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the cluster_method argument.
Returns
--------
generator
containing indices to split data into training and test sets
"""
def __init__(self,
n_groups=None,
coordinates=None,
method='Heirachical',
max_distance=None,
n_splits=None,
test_size=0.15,
train_size=None,
random_state=None,
balance=10,
**kwargs):
super().__init__(n_groups=n_groups,
coordinates=coordinates,
method=method,
max_distance=max_distance,
n_splits=n_splits,
**kwargs)
if balance < 1:
raise ValueError(
"The *balance* argument must be >= 1. To disable balance, use 1."
)
self.test_size = test_size
self.train_size = train_size
self.random_state = random_state
self.balance = balance
self.kwargs = kwargs
def _iter_test_indices(self, X=None, y=None, groups=None):
"""
Generates integer indices corresponding to test sets.
Runs several iterations until a split is found that yields clusters with
the right amount of data points in it.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
test : ndarray
The testing set indices for that split.
"""
labels = spatial_clusters(n_groups=self.n_groups,
coordinates=self.coordinates,
method=self.method,
max_distance=self.max_distance,
**self.kwargs)
cluster_ids = np.unique(labels)
# Generate many more splits so that we can pick and choose the ones
# that have the right balance of training and testing data.
shuffle = ShuffleSplit(
n_splits=self.n_splits * self.balance,
test_size=self.test_size,
train_size=self.train_size,
random_state=self.random_state,
).split(cluster_ids)
for _ in range(self.n_splits):
test_sets, balance = [], []
for _ in range(self.balance):
# This is a false positive in pylint which is why the warning
# is disabled at the top of this file:
# https://github.com/PyCQA/pylint/issues/1830
# pylint: disable=stop-iteration-return
train_clusters, test_clusters = next(shuffle)
# pylint: enable=stop-iteration-return
train_points = np.where(
np.isin(labels, cluster_ids[train_clusters]))[0]
test_points = np.where(
np.isin(labels, cluster_ids[test_clusters]))[0]
# The proportion of data points assigned to each group should
# be close the proportion of clusters assigned to each group.
balance.append(
abs(train_points.size / | |
##############################################################################
# Copyright (C) 2008 Novell Inc. All rights reserved.
# Copyright (C) 2008 SUSE Linux Products GmbH. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the name of Novell Inc. nor of SUSE Linux Products GmbH nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL Novell Inc. OR SUSE Linux Products GmbH OR
# THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##############################################################################
# Author: <NAME> <<EMAIL>>
from pywbem.cim_provider2 import ProviderProxy
import pywbem
import types
import syslog
import sys
import cmpi
##==============================================================================
##
## _exception_to_error()
##
## This function converts a cmpi.CMPIException to a pywbem.CIMError.
##
##==============================================================================
def _exception_to_error(ex):
code = ex.get_error_code()
desc = ex.get_description()
if code < 0 or code > 17:
if desc is None:
desc = str(code)
else:
desc = str(code) + ':' + desc
code = pywbem.CIM_ERR_FAILED
return pywbem.CIMError(code, desc)
##==============================================================================
##
## ExceptionMethodWrapper
##
## This class puts an exception translation block around any method. This
## block catches a cmpi.CMPIException, converts it a pywbem.CIMError, and
## raises the new exception.
##
##==============================================================================
class ExceptionMethodWrapper:
def __init__(self, meth):
self.meth = meth
def __call__(self, *args, **kwds):
try:
return self.meth(*args, **kwds)
except cmpi.CMPIException,e:
exc_class, exc, tb = sys.exc_info()
new_exc = _exception_to_error(e)
raise new_exc.__class__, new_exc, tb
##==============================================================================
##
## ExceptionClassWrapper
##
## This class puts an exception translation block around all methods of any
## class. It creates an ExceptionMethodWrapper to invoke each method. For
## example, the following snipett wraps an instance of the Gadget class.
##
## g = Gadget()
## w = ExceptionClassWrapper(g)
## w.foo() # call g.foo() with exception translation block around it.
##
##==============================================================================
class ExceptionClassWrapper:
def __init__(self, obj):
self.obj = obj
def __getattr__(self, name):
attr = getattr(self.obj, name)
if type(attr) is types.MethodType:
return ExceptionMethodWrapper(attr)
else:
return attr
##==============================================================================
##
## _mwrap()
##
## Wrap a method in a try block.
##
##==============================================================================
def _mwrap(obj, meth, *args, **kwds):
try:
return obj.meth(*args, **kwds)
except cmpi.CMPIException,e:
raise _exception_to_error(e)
##==============================================================================
##
## _fwrap()
##
## Wrap a function in a try block.
##
##==============================================================================
def _fwrap(meth, *args, **kwds):
try:
return meth(*args, **kwds)
except cmpi.CMPIException,e:
raise _exception_to_error(e)
##==============================================================================
##
##
##
##==============================================================================
class ContextWrap(object):
def __init__(self, proxy, cmpicontext):
self.proxy = proxy
self.cmpicontext = cmpicontext
def __getitem__(self, key):
data = self.cmpicontext.get_entry(key)
_type, is_array = _cmpi_type2string(data.type)
return self.proxy.cmpi2pywbem_data(data, _type, is_array)
def __setitem__(self, key, pval):
data, _type = self.proxy.pywbem2cmpi_value(pval)
ctype = _pywbem2cmpi_typemap[_type]
if isinstance(pval, list):
ctype = ctype | cmpi.CMPI_ARRAY
self.cmpicontext.add_entry(str(key), data, ctype)
def __len__(self):
return self.cmpicontext.get_entry_count()
def __repr__(self):
return `self.todict()`
def keys(self):
return self.todict().keys()
def items(self):
return self.todict().items()
def values(self):
return self.todict().values()
def __contains__(self, key):
return key in self.todict()
def has_key(self, key):
return self.todict().has_key(key)
def iterkeys(self):
return self.todict().iterkeys()
def itervalues(self):
return self.todict().itervalues()
def iteritems(self):
return self.todict().iteritems()
def update(self, *args, **kwargs):
for mapping in args:
if hasattr(mapping, 'items'):
for k, v in mapping.items():
self[k] = v
else:
for (k, v) in mapping:
self[k] = v
for k, v in kwargs.items():
self[k] = v
def get(self, key, default = None):
try:
return self.todict()[key]
except KeyError:
return default
def todict(self):
d = {}
for i in xrange(0, self.cmpicontext.get_entry_count()):
name, data = self.cmpicontext.get_entry_at(i)
_type, is_array = _cmpi_type2string(data.type)
pval = self.proxy.cmpi2pywbem_data(data, _type, is_array)
d[name] = pval
return d
class BrokerCIMOMHandle(object):
def __init__(self, proxy, ctx):
#self.broker = proxy.broker
self.broker = ExceptionClassWrapper(proxy.broker)
self.proxy = proxy
self.ctx = ctx
def _yield_instance_names(self, e):
while e and e.hasNext():
data=e.next()
assert(data.type == cmpi.CMPI_ref)
piname=self.proxy.cmpi2pywbem_instname(data.value.ref)
yield piname
def EnumerateInstanceNames(self, ns, cn):
cop = self.broker.new_object_path(ns, cn)
e = self.broker.enumInstanceNames(self.ctx, cop)
while e and e.hasNext():
data=e.next()
assert(data.type == cmpi.CMPI_ref)
piname=self.proxy.cmpi2pywbem_instname(data.value.ref)
yield piname
def EnumerateInstances(self, ns, cn, props = None):
cop = self.broker.new_object_path(ns, cn)
e = self.broker.enumInstances(self.ctx, cop, props)
while e and e.hasNext():
data=e.next()
assert(data.type == cmpi.CMPI_instance)
pinst=self.proxy.cmpi2pywbem_inst(data.value.inst)
yield pinst
def GetInstance(self, path, props = None):
cop = self.proxy.pywbem2cmpi_instname(path)
ci = self.broker.getInstance(self.ctx, cop, props)
if ci is None:
return None
return self.proxy.cmpi2pywbem_inst(ci)
def Associators(self, path, assocClass = None, resultClass = None,
role = None, resultRole = None, props = None):
cop = self.proxy.pywbem2cmpi_instname(path)
e = self.broker.associators(self.ctx, cop, assocClass, resultClass,
role, resultRole, props)
while e and e.hasNext():
data = e.next()
assert(data.type == cmpi.CMPI_instance)
pinst=self.proxy.cmpi2pywbem_inst(data.value.inst)
yield pinst
def AssociatorNames(self, path, assocClass = None, resultClass = None,
role = None, resultRole = None):
cop = self.proxy.pywbem2cmpi_instname(path)
e = self.broker.associatorNames(self.ctx, cop, assocClass, resultClass,
role, resultRole)
while e and e.hasNext():
data = e.next()
assert(data.type == cmpi.CMPI_ref)
piname=self.proxy.cmpi2pywbem_instname(data.value.ref)
yield piname
def References(self, path, resultClass=None, role=None, props=None):
cop = self.proxy.pywbem2cmpi_instname(path)
e = self.broker.references(self.ctx, cop, resultClass,
role, props)
while e and e.hasNext():
data = e.next()
assert(data.type == cmpi.CMPI_instance)
pinst=self.proxy.cmpi2pywbem_inst(data.value.inst)
yield pinst
def ReferenceNames(self, path, resultClass=None, role=None):
cop = self.proxy.pywbem2cmpi_instname(path)
e = self.broker.referenceNames(self.ctx, cop, resultClass, role)
while e and e.hasNext():
data = e.next()
assert(data.type == cmpi.CMPI_ref)
piname=self.proxy.cmpi2pywbem_instname(data.value.ref)
yield piname
def InvokeMethod(self, path, method, **params):
if not isinstance(path, pywbem.CIMClassName) and \
not isinstance(path, pywbem.CIMInstanceName):
# invalid parameter
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_PARAMETER)
if path.namespace is None:
# must have namespace
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_NAMESPACE)
cop = self.proxy.pywbem2cmpi_instname(path)
# dirty hack to get upcall agrument to correct format,
# i.e. dictionary of (type, value)
wparams = {}
for name, value in params.items():
if isinstance(value, list):
data, _type = self.proxy.pywbem2cmpi_value(value[0])
else:
data, _type = self.proxy.pywbem2cmpi_value(value)
wparams[name] = (_type, value)
inargs=self.proxy.pywbem2cmpi_args(wparams)
poutargs = self.broker.new_args()
rc=self.broker.invokeMethod(self.ctx, cop, method, inargs, poutargs)
outrc = self.proxy.cmpi2pywbem_data(rc)
outargs = self.proxy.cmpi2pywbem_args(poutargs)
rslt = (outrc,outargs)
return rslt
def CreateInstance(self, instance):
if instance.path is None or not instance.path:
# no INVALID_PATH error... INVALID_NAMESPACE is best option
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_NAMESPACE)
if instance.path.namespace is None or not instance.path.namespace:
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_NAMESPACE)
cop = self.proxy.pywbem2cmpi_instname(instance.path)
inst = self.proxy.pywbem2cmpi_inst(instance)
ciname = self.broker.createInstance(self.ctx, cop, inst)
if ciname is None:
return None
return self.proxy.cmpi2pywbem_instname(ciname)
def DeleteInstance(self, path):
cop = self.proxy.pywbem2cmpi_instname(path)
return self.broker.deleteInstance(self.ctx, cop)
def ModifyInstance(self, instance):
if instance.path is None or not instance.path:
# no INVALID_PATH error... INVALID_NAMESPACE is best option
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_NAMESPACE)
if instance.path.namespace is None or not instance.path.namespace:
raise pywbem.CIMError(pywbem.CIM_ERR_INVALID_NAMESPACE)
cop = self.proxy.pywbem2cmpi_instname(instance.path)
inst = self.proxy.pywbem2cmpi_inst(instance)
return self.broker.modifyInstance(self.ctx, cop, inst)
def DeliverIndication(self, ns, instance):
if self.broker.name() == 'Pegasus':
allow_null_ns = False
else:
allow_null_ns = True
if self.broker.name() == 'RequestHandler':
# Check sblim bug #2185410.
if instance.path is not None:
instance.path.namespace = None
inst = self.proxy.pywbem2cmpi_inst(instance, allow_null_ns)
rv = self.broker.deliverIndication(self.ctx, ns, inst)
return rv
def PrepareAttachThread(self):
# Return new *BrokerCIMOMHandle*, the context itself would be useless
new_ctx = self.broker.prepareAttachThread(self.ctx)
new_broker = BrokerCIMOMHandle(self.proxy, new_ctx)
return new_broker
def AttachThread(self):
return self.broker.attachThread(self.ctx)
def DetachThread(self):
return self.broker.detachThread(self.ctx)
def is_subclass(self, ns, super, sub):
subObjPath=self.broker.new_object_path(ns, sub)
return bool(self.broker.classPathIsA(subObjPath,super))
def bummer(self):
self.broker.bummer()
_log_pri_map = {
cmpi.CMPI_SEV_ERROR :syslog.LOG_ERR,
cmpi.CMPI_SEV_INFO :syslog.LOG_INFO,
cmpi.CMPI_SEV_WARNING :syslog.LOG_WARNING,
cmpi.CMPI_DEV_DEBUG :syslog.LOG_DEBUG,
}
_trace_prefix_map = {
cmpi.CMPI_LEV_VERBOSE :"DEBG",
cmpi.CMPI_LEV_INFO :"INFO",
cmpi.CMPI_LEV_WARNING :"WARN",
}
class Logger(object):
def __init__(self, broker, miname):
#self.broker = ExceptionClassWrapper(broker)
self.broker = broker
self.miname = miname
def __log_message(self, severity, msg):
try:
self.broker.LogMessage(severity, self.miname, msg);
except cmpi.CMPIException, e:
if e.get_error_code() == cmpi.CMPI_RC_ERR_NOT_SUPPORTED:
syslog.syslog(syslog.LOG_DAEMON | _log_pri_map[severity],
'%s: %s' % (self.miname, msg))
def __trace_message(self, severity, component, msg):
try:
self.broker.TraceMessage(severity, component, msg);
except cmpi.CMPIException, e:
if e.get_error_code() == cmpi.CMPI_RC_ERR_NOT_SUPPORTED:
# fall back to log_debug if tracing is not supported
self.log_debug("%s:%s: %s" % (
component, _trace_prefix_map[severity], msg))
def log_error(self, msg):
self.__log_message(cmpi.CMPI_SEV_ERROR, msg);
def log_info(self, msg):
self.__log_message(cmpi.CMPI_SEV_INFO, msg);
def log_warn(self, msg):
self.__log_message(cmpi.CMPI_SEV_WARNING, msg);
def log_debug(self, msg):
self.__log_message(cmpi.CMPI_DEV_DEBUG, msg);
def trace_verbose(self, component, msg):
| |
def p_assumed_shape_spec_1(p):
'assumed_shape_spec : COLON'
pass
def p_assumed_shape_spec_2(p):
'assumed_shape_spec : lower_bound COLON'
pass
def p_assumed_shape_spec_list(p):
'''assumed_shape_spec_list : explicit_shape_list COMMA assumed_shape_spec
| explicit_shape
'''
if len(p) < 4: p[0] = [p[1]]
else:
p[3].append(p[1])
p[0] = p[3]
# R515
def p_deferred_shape_spec(p):
'deferred_shape_spec : COLON'
pass
def p_deferred_shape_spec_list(p):
'''deferred_shape_spec_list : deferred_shape_list COMMA deferred_shape_spec
| deferred_shape
'''
if len(p) < 4: p[0] = [p[1]]
else:
p[3].append(p[1])
p[0] = p[3]
# R516
def p_assumed_size_spec(p):
'assumed_size_spec : opt_explicit_shape_spec_list opt_lower_bound TIMES'
# TODO
pass
def p_opt_explicit_shape_spec_list(p):
'''opt_explicit_shape_spec_list : explicit_shape_spec_list COMMA explicit_shape
| empty
'''
if len(p) < 3: p[0] = None
else: p[0] = p[1]
def p_opt_lower_bound(p):
'''opt_lower_bound : lower_bound COLON
| empty
'''
if len(p) < 3: p[0] = None
else: p[0] = p[1]
# R517
def p_intent_spec(p):
'''intent_spec : IN
| OUT
| INOUT
'''
p[0] = p[1]
pass
# R518
def p_access_stmt_1(p):
'access_stmt : access_spec'
# TODO
pass
def p_access_stmt_2(p):
'access_stmt : access_spec_list opt_colon_colon access_id'
# TODO
pass
# R519
def p_access_id(p):
'''access_id : use_name
| generic_spec
'''
p[0] = p[1]
# R520
def p_allocatable_stmt(p):
'allocatable_stmt : ALLOCATABLE opt_colon_colon object_name_with_opt_deferred_shape_list'
# TODO
pass
def p_opt_deferred_shape_spec_list_in_paren(p):
'''opt_deferred_shape_spec_list_in_paren : LPAREN deferred_shape_spec_list RPAREN
| empty
'''
if len(p) < 4: p[0] = None
else: p[0] = p[2]
def p_object_name_with_opt_deferred_shape_list_1(p):
'object_name_with_opt_deferred_shape_list : object_name opt_deferred_shape_spec_list_in_paren'
# TODO
pass
def p_object_name_with_opt_deferred_shape_list_2(p):
'object_name_with_opt_deferred_shape_list : object_name opt_deferred_shape_spec_list_in_paren COMMA object_name_with_opt_deferred_shape_list'
# TODO
pass
# R521
def p_asyncrhonous_stmt(p):
'asynchronous_stmt : ASYNCHRONOUS opt_colon_colon object_name_list'
# TODO
pass
def p_object_name_list_1(p):
'object_name_list : object_name'
p[0] = [p[1]]
def p_object_name_list_2(p):
'object_name_list : object_name_list COMMA object_name'
p[3].append(p[1])
p[0] = p[3]
# R522
def p_bind_stmt(p):
'bind_stmt : language_binding_spec opt_colon_colon bind_entity_list'
# TODO
pass
# R523
def p_bind_entity_1(p):
'bind_entity : entity_name'
pass
def p_bind_entity_2(p):
'bind_entity : DIVIDE common_block_name DIVIDE'
pass
###--- Expressions --------------------------------
# R701
def p_primary_1(p):
'''primary : constant
| designator
| array_constructor
| structure_constructor
| function_reference
| type_param_inquiry
| type_param_name
'''
p[0] = p[1]
def p_primary_2(p):
'primary : LPAREN expr RPAREN'
p[0] = p[2]
# R702
def level_one_expr_1(p):
'level_one_expr : primary'
p[0] = p[1]
def level_one_expr_2(p):
'level_one_expr : defined_unary_op primary'
#p[2].setop(p[1])
p[0] = p[2]
# R703
def p_defined_unary_operator(p):
'defined_unary_operator : DEFINED_UNARY_OP'
p[0] = p[1]
# R704
def p_mult_operand_1(p):
'mult_operand : level_one_expr'
p[0] = p[1]
def p_mult_operand_2(p):
'mult_operand : level_one_expr power_op mult_operand'
# TODO: create new binary op node and add operands
pass
# R705
def p_add_operand_1(p):
'add_operand : mult_operand'
p[0] = p[1]
def p_add_operand_2(p):
'add_operand : add_operand mult_op mult_operand'
# TODO: create new binary op node and add operands
pass
# R706
def p_level_two_expr_1(p):
'level_two_expr : add_operand'
p[0] = p[1]
def p_level_two_expr_2(p):
'level_two_expr : add_op add_operand'
p[0] = p[1]
def p_level_two_expr_3(p):
'level_two_expr : level_two_expr add_op add_operand'
# TODO: create new binary op node and add operands
pass
# R707
def p_power_op(p):
'power_op : TIMES TIMES'
pass
# R708
def p_mult_op(p):
'''mult_op : TIMES
| DIVIDE
'''
pass
# R709
def p_add_op(p):
'''add_op : PLUS
| MINUS
'''
pass
# R710
def p_level_three_expr_1(p):
'level_three_expr : level_two_expr'
p[0] = p[1]
def p_level_three_expr_2(p):
'level_three_expr : level_three_expr concat_op level_three_expr'
# TODO
pass
# R711
def p_concat_op(p):
'concat_op : DIVIDE DIVIDE'
pass
# R712
def p_level_four_expr_1(p):
'level_four_expr : level_three_expr'
p[0] = p[1]
def p_level_four_expr_2(p):
'level_four_expr : level_three_expr rel_op level_three_expr'
# TODO
pass
# R713
def p_rel_op_1(p):
'''rel_op : EQ
| NE
| LT
| LE
| GT
| GE
| LESSTHAN
| LESSTHAN_EQ
| GREATERTHAN
| GREATERTHAN_EQ
| EQ_GT
| EQ_EQ
| SLASH_EQ
'''
pass
# R714
def p_and_operand_1(p):
'and_operand : level_four_expr'
pass
def p_and_operand_2(p):
'and_operand : NOT level_four_expr'
pass
# R715
def p_or_operand_1(p):
'or_operand : and_operand'
pass
def p_or_operand_2(p):
'or_operand : or_operand AND and_operand'
# TODO
pass
# R716
def p_equiv_operand_1(p):
'equiv_operand : or_operand'
# TODO
pass
def p_equiv_operand_2(p):
'equiv_operand : equiv_operand OR or_operand'
# TODO
pass
# R717
def p_level_five_expr_1(p):
'level_five_expr : equiv_operand'
pass
def p_level_five_expr_2(p):
'level_five_expr : level_five_expr equiv_op equiv_operand'
# TODO
pass
# R718 - R720 are in lexer
# R721
def p_equiv_op(p):
'''equiv_op : EQV
| NEQV
'''
p[0] = p[1]
# R722
def p_expr_1(p):
'expr : level_five_expr'
p[0] = p[1]
def p_expr_2(p):
'expr : expr DEFINED_BINARY_OP level_five_expr'
# TODO
pass
# R723 is in lexer
# R724
def p_logical_expr(p):
'logical_expr : expr'
p[0] = p[1]
# R725
def p_char_expr(p):
'char_expr : expr'
p[0] = p[1]
# R726
def p_default_char_expr(p):
'default_char_expr : expr'
p[0] = p[1]
# R727
def p_int_expr(p):
'int_expr : expr'
p[0] = p[1]
# R728
def p_numeric_expr(p):
'numeric_expr : expr'
p[0] = p[1]
# R729
def p_specification_expr(p):
'specification_expr : scalar_int_expr'
p[0] = p[1]
# R730
def p_initialization_expr(p):
'initialization_expr : expr'
p[0] = p[1]
# R731
def p_char_initialization_expr(p):
'char_initialization_expr : char_expr'
p[0] = p[1]
# R732
def p_int_initialization_expr(p):
'int_initialization_expr : int_expr'
p[0] = p[1]
# R733
def p_logical_initialization_expr(p):
'logical_initialization_expr : logical_expr'
p[0] = p[1]
# R734
def p_assignment_stmt(p):
'assignment_stmt : variable EQUALS expr'
# TODO
pass
# R735
def p_pointer_assignment_stmt_1(p):
'''pointer_assignment_stmt : data_pointer_object EQ_GT data_target
| proc_pointer_object EQ_GT proc_target
'''
# TODO
pass
def p_pointer_assignment_stmt_2(p):
'''pointer_assignment_stmt : data_pointer_object LPAREN bounds_spec_part RPAREN EQ_GT data_target
| data_pointer_object LPAREN bounds_remapping_part RPAREN EQ_GT data_target
'''
# TODO
pass
# R736
def p_data_pointer_object_1(p):
'data_pointer_object : variable_name'
p[0] = p[1]
def p_data_pointer_object_2(p):
'data_pointer_object : variable MOD data_pointer_component_name'
p[0] = p[1]
# R737
def p_bounds_spec(p):
'bounds_spec : lower_bound_expr COLON'
# TODO
pass
# R738
def p_bounds_remapping(p):
'bounds_remapping : lower_bound_expr COLON upper_bound_expr'
# TODO
pass
# R739
def p_data_target(p):
'''data_target : variable
| expr
'''
p[0] = p[1]
# R740
def p_proc_pointer_object(p):
'''proc_pointer_object : proc_pointer_name
| proc_component_ref
'''
p[0] = p[1]
# R741
def p_proc_component_ref(p):
'proc_component_ref : variable MOD procedure_component_name'
# TODO
pass
# R742
def p_proc_target(p):
'''proc_target : expr
| procedure_name
| proc_component_ref
'''
p[0] = p[1]
def p_scalar_int_expression(p):
'scalar_int_expression : expr'
# Restricted expression, see p. 125 in standard (sec. 7.1.6)
p[0] = p[1]
def p_scalar_int_expression_list(p):
'''scalar_int_expression_list : scalar_int_expression_list COMMA scalar_int_expression
| scalar_int_expression
'''
if len(p) < 4: p[0] = [p[1]]
else:
p[3].append(p[1])
p[0] = p[3]
###--- Statements ------------------------
# R743
def p_where_stmt(p):
'where_stmt : WHERE LPAREN mask_expr RPAREN where_assignment_stmt'
# TODO
pass
# R744
def p_where_construct_1(p):
'where_construct : where_construct_stmt end_where_stmt'
# TODO
pass
def p_where_stmt_2(p):
'where_construct : where_construct_stmt where_body_construct_part masked_elsewhere_stmt_part elsewhere_stmt_part end_where_stmt'
# TODO
pass
def p_where_body_construct_part_1(p):
'where_body_construct_part : where_body_construct'
p[0] = [p[1]]
def p_where_body_construct_part_2(p):
'where_body_construct_part : where_body_construct where_body_construct_part'
p[2].append(p[1])
p[0] = p[2]
def p_where_body_construct_part_3(p):
'where_body_construct_part : empty'
p[0] = []
def p_masked_elsewhere_stmt_part(p):
'masked_elsewhere_stmt_part : masked_elsewhere_stmt where_body_construct_part'
# TODO
pass
def p_elsewhere_stmt_part(p):
'elsewhere_stmt_part : elsewhere_stmt where_body_construct_part'
# TODO
pass
# R745
def p_where_construct_stmt_1(p):
'where_construct_stmt : WHERE LPAREN mask_expr RPAREN'
# TODO
pass
def p_where_construct_stmt_2(p):
'where_construct_stmt : where_construct_name COLON WHERE LPAREN mask_expr RPAREN'
# TODO
pass
# R746
def p_where_body_construct(p):
'''where_body_construct : where_assignment_stmt
| where_stmt
| where_construct
'''
p[0] = p[1]
# R747
def p_where_assignment_stmt(p):
'where_assignment_stmt : assignment_stmt'
p[0] = p[1]
# R748
def p_mask_expr(p):
'mask_expr : logical_expr'
p[0] = p[1]
# R749
def p_masked_elsewhere_stmt_1(p):
'masked_elsewhere_stmt : ELSEWHERE LPAREN masked_expr RPAREN'
# TODO
pass
def p_masked_elsewhere_stmt_2(p):
'masked_elsewhere_stmt : ELSEWHERE LPAREN masked_expr RPAREN where_construct_name'
# TODO
pass
# R750
def p_elsewhere_stmt_1(p):
'elsewhere_stmt : ELSEWHERE'
pass
def p_elsewhere_stmt_2(p):
'elsewhere_stmt : ELSEWHERE where_construct_name'
pass
# R751
def p_end_where_stmt_1(p):
'end_where_stmt : END WHERE'
pass
def p_end_where_stmt_2(p):
'end_where_stmt : END WHERE where_construct_name'
pass
# R752
def p_forall_construct(p):
'forall_construct : forall_construct_stmt forall_body_construct_part end_forall_stmt'
# TODO
pass
def p_forall_body_construct_part_1(p):
'forall_body_construct_part : forall_body_construct forall_body_construct_part'
p[2].append(p[1])
p[0] = p[2]
def p_forall_body_construct_part_2(p):
'forall_body_construct_part : empty'
p[0] = []
# R753
def p_forall_construct_stmt_1(p):
'forall_construct_stmt : FORALL forall_header'
# TODO
pass
def p_forall_construct_stmt_2(p):
'forall_construct_stmt : forall_construct_name COLON FORALL forall_header'
# TODO
pass
# R754
def p_forall_header_1(p):
'forall_header : LPAREN forall_triplet_spec_list RPAREN'
# TODO
pass
def p_forall_header_2(p):
'forall_header : LPAREN forall_triplet_spec_list COMMA scalar_mask_expr RPAREN'
# TODO
pass
# R755
def p_forall_triplet_spec_1(p):
'forall_triplet_spec : index_name EQUALS subscript COLON subscript'
# TODO
pass
def p_forall_triplet_spec_2(p):
'forall_triplet_spec : index_name EQUALS subscript COLON subscript COLON stride'
# TODO
pass
def p_forall_triplet_spec_list_1(p):
'forall_triplet_spec_list : forall_triplet_spec_list COMMA forall_triplet_spec'
p[3].append(p[1])
p[0] = p[3]
def p_forall_triplet_spec_list_2(p):
'forall_triplet_spec_list : forall_triplet_spec'
p[0] = [p[1]]
# R756
def p_forall_body_construct(p):
'''forall_body_construct : forall_assignment_stmt
| where_stmt
| where_construct
| forall_construct
| forall_stmt
'''
p[0] = p[1]
# R757
def p_forall_assignment_stmt(p):
'''forall_assignment_stmt : assignment_stmt
| pointer_assignment_stmt
'''
p[0] = p[1]
# R758
def p_end_forall_stmt_1(p):
'end_forall_stmt : END FORALL'
pass
def p_end_forall_stmt_2(p):
'end_forall_stmt : END FORALL forall_construct_name'
pass
# R759
def p_forall_stmt(p):
'forall_stmt : FORALL forall_header forall_assignment_stmt'
# TODO
pass
###--- Procedures and interfaces ------------------------------------------------
# R1201
def p_interface_block(p):
'interface_block : interface_stmt interface_spec_list end_interface_stmt'
p[0] = p[2]
def p_interface_spec_list(p):
'''interface_spec_list : interface_spec_list interface_spec
| empty
'''
if len(p) > 1:
p[1].append(p[2])
p[0] = p[1]
else:
p[0] = []
# R2102
def p_interface_spec(p):
'interface_spec : interface_body'
p[0] = p[1]
# R1214
def p_proc_decl_1(p):
'proc_decl : procedure_entity_name'
p[0] = p[1]
def p_proc_decl_2(p):
'proc_decl : procedure_entity_name EQ_GT null_init'
p[0] = p[1]
def p_proc_decl_list(p):
'''proc_decl_list : proc_decl_list COMMA proc_decl
| empty
'''
if len(p) < 4: | |
"""Support for HomeMatic devices."""
from datetime import timedelta, datetime
from functools import partial
import logging
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_NAME,
CONF_HOST,
CONF_HOSTS,
CONF_PASSWORD,
CONF_PLATFORM,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
EVENT_HOMEASSISTANT_STOP,
STATE_UNKNOWN,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "homematic"
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
DISCOVER_SWITCHES = "homematic.switch"
DISCOVER_LIGHTS = "homematic.light"
DISCOVER_SENSORS = "homematic.sensor"
DISCOVER_BINARY_SENSORS = "homematic.binary_sensor"
DISCOVER_COVER = "homematic.cover"
DISCOVER_CLIMATE = "homematic.climate"
DISCOVER_LOCKS = "homematic.locks"
DISCOVER_BATTERY = "homematic.battery"
ATTR_DISCOVER_DEVICES = "devices"
ATTR_PARAM = "param"
ATTR_CHANNEL = "channel"
ATTR_ADDRESS = "address"
ATTR_VALUE = "value"
ATTR_VALUE_TYPE = "value_type"
ATTR_INTERFACE = "interface"
ATTR_ERRORCODE = "error"
ATTR_MESSAGE = "message"
ATTR_MODE = "mode"
ATTR_TIME = "time"
ATTR_UNIQUE_ID = "unique_id"
ATTR_PARAMSET_KEY = "paramset_key"
ATTR_PARAMSET = "paramset"
ATTR_DISCOVERY_TYPE = "discovery_type"
ATTR_LOW_BAT = "LOW_BAT"
ATTR_LOWBAT = "LOWBAT"
EVENT_KEYPRESS = "homematic.keypress"
EVENT_IMPULSE = "homematic.impulse"
EVENT_ERROR = "homematic.error"
SERVICE_VIRTUALKEY = "virtualkey"
SERVICE_RECONNECT = "reconnect"
SERVICE_SET_VARIABLE_VALUE = "set_variable_value"
SERVICE_SET_DEVICE_VALUE = "set_device_value"
SERVICE_SET_INSTALL_MODE = "set_install_mode"
SERVICE_PUT_PARAMSET = "put_paramset"
HM_DEVICE_TYPES = {
DISCOVER_SWITCHES: [
"Switch",
"SwitchPowermeter",
"IOSwitch",
"IPSwitch",
"RFSiren",
"IPSwitchPowermeter",
"HMWIOSwitch",
"Rain",
"EcoLogic",
"IPKeySwitchPowermeter",
"IPGarage",
"IPKeySwitch",
"IPMultiIO",
],
DISCOVER_LIGHTS: [
"Dimmer",
"KeyDimmer",
"IPKeyDimmer",
"IPDimmer",
"ColorEffectLight",
],
DISCOVER_SENSORS: [
"SwitchPowermeter",
"Motion",
"MotionV2",
"RemoteMotion",
"MotionIP",
"ThermostatWall",
"AreaThermostat",
"RotaryHandleSensor",
"WaterSensor",
"PowermeterGas",
"LuxSensor",
"WeatherSensor",
"WeatherStation",
"ThermostatWall2",
"TemperatureDiffSensor",
"TemperatureSensor",
"CO2Sensor",
"IPSwitchPowermeter",
"HMWIOSwitch",
"FillingLevel",
"ValveDrive",
"EcoLogic",
"IPThermostatWall",
"IPSmoke",
"RFSiren",
"PresenceIP",
"IPAreaThermostat",
"IPWeatherSensor",
"RotaryHandleSensorIP",
"IPPassageSensor",
"IPKeySwitchPowermeter",
"IPThermostatWall230V",
"IPWeatherSensorPlus",
"IPWeatherSensorBasic",
"IPBrightnessSensor",
"IPGarage",
"UniversalSensor",
"MotionIPV2",
"IPMultiIO",
"IPThermostatWall2",
],
DISCOVER_CLIMATE: [
"Thermostat",
"ThermostatWall",
"MAXThermostat",
"ThermostatWall2",
"MAXWallThermostat",
"IPThermostat",
"IPThermostatWall",
"ThermostatGroup",
"IPThermostatWall230V",
"IPThermostatWall2",
],
DISCOVER_BINARY_SENSORS: [
"ShutterContact",
"Smoke",
"SmokeV2",
"Motion",
"MotionV2",
"MotionIP",
"RemoteMotion",
"WeatherSensor",
"TiltSensor",
"IPShutterContact",
"HMWIOSwitch",
"MaxShutterContact",
"Rain",
"WiredSensor",
"PresenceIP",
"IPWeatherSensor",
"IPPassageSensor",
"SmartwareMotion",
"IPWeatherSensorPlus",
"MotionIPV2",
"WaterIP",
"IPMultiIO",
"TiltIP",
"IPShutterContactSabotage",
],
DISCOVER_COVER: ["Blind", "KeyBlind", "IPKeyBlind", "IPKeyBlindTilt"],
DISCOVER_LOCKS: ["KeyMatic"],
}
HM_IGNORE_DISCOVERY_NODE = ["ACTUAL_TEMPERATURE", "ACTUAL_HUMIDITY"]
HM_IGNORE_DISCOVERY_NODE_EXCEPTIONS = {
"ACTUAL_TEMPERATURE": [
"IPAreaThermostat",
"IPWeatherSensor",
"IPWeatherSensorPlus",
"IPWeatherSensorBasic",
"IPThermostatWall",
"IPThermostatWall2",
]
}
HM_ATTRIBUTE_SUPPORT = {
"LOWBAT": ["battery", {0: "High", 1: "Low"}],
"LOW_BAT": ["battery", {0: "High", 1: "Low"}],
"ERROR": ["error", {0: "No"}],
"ERROR_SABOTAGE": ["sabotage", {0: "No", 1: "Yes"}],
"SABOTAGE": ["sabotage", {0: "No", 1: "Yes"}],
"RSSI_PEER": ["rssi_peer", {}],
"RSSI_DEVICE": ["rssi_device", {}],
"VALVE_STATE": ["valve", {}],
"LEVEL": ["level", {}],
"BATTERY_STATE": ["battery", {}],
"CONTROL_MODE": [
"mode",
{0: "Auto", 1: "Manual", 2: "Away", 3: "Boost", 4: "Comfort", 5: "Lowering"},
],
"POWER": ["power", {}],
"CURRENT": ["current", {}],
"VOLTAGE": ["voltage", {}],
"OPERATING_VOLTAGE": ["voltage", {}],
"WORKING": ["working", {0: "No", 1: "Yes"}],
"STATE_UNCERTAIN": ["state_uncertain", {}],
}
HM_PRESS_EVENTS = [
"PRESS_SHORT",
"PRESS_LONG",
"PRESS_CONT",
"PRESS_LONG_RELEASE",
"PRESS",
]
HM_IMPULSE_EVENTS = ["SEQUENCE_OK"]
CONF_RESOLVENAMES_OPTIONS = ["metadata", "json", "xml", False]
DATA_HOMEMATIC = "homematic"
DATA_STORE = "homematic_store"
DATA_CONF = "homematic_conf"
CONF_INTERFACES = "interfaces"
CONF_LOCAL_IP = "local_ip"
CONF_LOCAL_PORT = "local_port"
CONF_PORT = "port"
CONF_PATH = "path"
CONF_CALLBACK_IP = "callback_ip"
CONF_CALLBACK_PORT = "callback_port"
CONF_RESOLVENAMES = "resolvenames"
CONF_JSONPORT = "jsonport"
CONF_VARIABLES = "variables"
CONF_DEVICES = "devices"
CONF_PRIMARY = "primary"
DEFAULT_LOCAL_IP = "0.0.0.0"
DEFAULT_LOCAL_PORT = 0
DEFAULT_RESOLVENAMES = False
DEFAULT_JSONPORT = 80
DEFAULT_PORT = 2001
DEFAULT_PATH = ""
DEFAULT_USERNAME = "Admin"
DEFAULT_PASSWORD = ""
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = False
DEFAULT_CHANNEL = 1
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "homematic",
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_ADDRESS): cv.string,
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_CHANNEL, default=DEFAULT_CHANNEL): vol.Coerce(int),
vol.Optional(ATTR_PARAM): cv.string,
vol.Optional(ATTR_UNIQUE_ID): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_INTERFACES, default={}): {
cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(
CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES
): vol.In(CONF_RESOLVENAMES_OPTIONS),
vol.Optional(CONF_JSONPORT, default=DEFAULT_JSONPORT): cv.port,
vol.Optional(
CONF_USERNAME, default=DEFAULT_USERNAME
): cv.string,
vol.Optional(
CONF_PASSWORD, default=DEFAULT_PASSWORD
): cv.string,
vol.Optional(CONF_CALLBACK_IP): cv.string,
vol.Optional(CONF_CALLBACK_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(
CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL
): cv.boolean,
}
},
vol.Optional(CONF_HOSTS, default={}): {
cv.match_all: {
vol.Required(CONF_HOST): cv.string,
vol.Optional(
CONF_USERNAME, default=DEFAULT_USERNAME
): cv.string,
vol.Optional(
CONF_PASSWORD, default=<PASSWORD>
): cv.string,
}
},
vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string,
vol.Optional(CONF_LOCAL_PORT): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SCHEMA_SERVICE_VIRTUALKEY = vol.Schema(
{
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): cv.string,
vol.Optional(ATTR_INTERFACE): cv.string,
}
)
SCHEMA_SERVICE_SET_VARIABLE_VALUE = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
}
)
SCHEMA_SERVICE_SET_DEVICE_VALUE = vol.Schema(
{
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_CHANNEL): vol.Coerce(int),
vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_VALUE): cv.match_all,
vol.Optional(ATTR_VALUE_TYPE): vol.In(
["boolean", "dateTime.iso8601", "double", "int", "string"]
),
vol.Optional(ATTR_INTERFACE): cv.string,
}
)
SCHEMA_SERVICE_RECONNECT = vol.Schema({})
SCHEMA_SERVICE_SET_INSTALL_MODE = vol.Schema(
{
vol.Required(ATTR_INTERFACE): cv.string,
vol.Optional(ATTR_TIME, default=60): cv.positive_int,
vol.Optional(ATTR_MODE, default=1): vol.All(vol.Coerce(int), vol.In([1, 2])),
vol.Optional(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
}
)
SCHEMA_SERVICE_PUT_PARAMSET = vol.Schema(
{
vol.Required(ATTR_INTERFACE): cv.string,
vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_PARAMSET_KEY): vol.All(cv.string, vol.Upper),
vol.Required(ATTR_PARAMSET): dict,
}
)
def setup(hass, config):
"""Set up the Homematic component."""
from pyhomematic import HMConnection
conf = config[DOMAIN]
hass.data[DATA_CONF] = remotes = {}
hass.data[DATA_STORE] = set()
# Create hosts-dictionary for pyhomematic
for rname, rconfig in conf[CONF_INTERFACES].items():
remotes[rname] = {
"ip": rconfig.get(CONF_HOST),
"port": rconfig.get(CONF_PORT),
"path": rconfig.get(CONF_PATH),
"resolvenames": rconfig.get(CONF_RESOLVENAMES),
"jsonport": rconfig.get(CONF_JSONPORT),
"username": rconfig.get(CONF_USERNAME),
"password": r<PASSWORD>(CONF_PASSWORD),
"callbackip": rconfig.get(CONF_CALLBACK_IP),
"callbackport": rconfig.get(CONF_CALLBACK_PORT),
"ssl": rconfig.get(CONF_SSL),
"verify_ssl": rconfig.get(CONF_VERIFY_SSL),
"connect": True,
}
for sname, sconfig in conf[CONF_HOSTS].items():
remotes[sname] = {
"ip": sconfig.get(CONF_HOST),
"port": DEFAULT_PORT,
"username": sconfig.get(CONF_USERNAME),
"password": s<PASSWORD>(CONF_PASSWORD),
"connect": False,
}
# Create server thread
bound_system_callback = partial(_system_callback_handler, hass, config)
hass.data[DATA_HOMEMATIC] = homematic = HMConnection(
local=config[DOMAIN].get(CONF_LOCAL_IP),
localport=config[DOMAIN].get(CONF_LOCAL_PORT, DEFAULT_LOCAL_PORT),
remotes=remotes,
systemcallback=bound_system_callback,
interface_id="homeassistant",
)
# Start server thread, connect to hosts, initialize to receive events
homematic.start()
# Stops server when HASS is shutting down
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop)
# Init homematic hubs
entity_hubs = []
for hub_name in conf[CONF_HOSTS].keys():
entity_hubs.append(HMHub(hass, homematic, hub_name))
def _hm_service_virtualkey(service):
"""Service to handle virtualkey servicecalls."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found for service virtualkey!", address)
return
# Parameter doesn't exist for device
if param not in hmdevice.ACTIONNODE:
_LOGGER.error("%s not datapoint in hm device %s", param, address)
return
# Channel doesn't exist for device
if channel not in hmdevice.ACTIONNODE[param]:
_LOGGER.error("%i is not a channel in hm device %s", channel, address)
return
# Call parameter
hmdevice.actionNodeData(param, True, channel)
hass.services.register(
DOMAIN,
SERVICE_VIRTUALKEY,
_hm_service_virtualkey,
schema=SCHEMA_SERVICE_VIRTUALKEY,
)
def _service_handle_value(service):
"""Service to call setValue method for HomeMatic system variable."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
name = service.data[ATTR_NAME]
value = service.data[ATTR_VALUE]
if entity_ids:
entities = [
entity for entity in entity_hubs if entity.entity_id in entity_ids
]
else:
entities = entity_hubs
if not entities:
_LOGGER.error("No HomeMatic hubs available")
return
for hub in entities:
hub.hm_set_variable(name, value)
hass.services.register(
DOMAIN,
SERVICE_SET_VARIABLE_VALUE,
_service_handle_value,
schema=SCHEMA_SERVICE_SET_VARIABLE_VALUE,
)
def _service_handle_reconnect(service):
"""Service to reconnect all HomeMatic hubs."""
homematic.reconnect()
hass.services.register(
DOMAIN,
SERVICE_RECONNECT,
_service_handle_reconnect,
schema=SCHEMA_SERVICE_RECONNECT,
)
def _service_handle_device(service):
"""Service to call setValue method for HomeMatic devices."""
address = service.data.get(ATTR_ADDRESS)
channel = service.data.get(ATTR_CHANNEL)
param = service.data.get(ATTR_PARAM)
value = service.data.get(ATTR_VALUE)
value_type = service.data.get(ATTR_VALUE_TYPE)
# Convert value into correct XML-RPC Type.
# https://docs.python.org/3/library/xmlrpc.client.html#xmlrpc.client.ServerProxy
if value_type:
if value_type == "int":
value = int(value)
elif value_type == "double":
value = float(value)
elif value_type == "boolean":
value = bool(value)
elif value_type == "dateTime.iso8601":
value = datetime.strptime(value, "%Y%m%dT%H:%M:%S")
else:
# Default is 'string'
value = str(value)
# Device not found
hmdevice = _device_from_servicecall(hass, service)
if hmdevice is None:
_LOGGER.error("%s not found!", address)
return
hmdevice.setValue(param, value, channel)
hass.services.register(
DOMAIN,
SERVICE_SET_DEVICE_VALUE,
_service_handle_device,
schema=SCHEMA_SERVICE_SET_DEVICE_VALUE,
)
def _service_handle_install_mode(service):
"""Service to set interface into install mode."""
interface = service.data.get(ATTR_INTERFACE)
mode = service.data.get(ATTR_MODE)
time = service.data.get(ATTR_TIME)
address = service.data.get(ATTR_ADDRESS)
homematic.setInstallMode(interface, t=time, mode=mode, address=address)
hass.services.register(
DOMAIN,
SERVICE_SET_INSTALL_MODE,
_service_handle_install_mode,
schema=SCHEMA_SERVICE_SET_INSTALL_MODE,
)
def _service_put_paramset(service):
"""Service to call the putParamset method on a HomeMatic connection."""
interface = service.data.get(ATTR_INTERFACE)
address = service.data.get(ATTR_ADDRESS)
paramset_key = service.data.get(ATTR_PARAMSET_KEY)
# When passing in the paramset from a YAML file we get an OrderedDict
# here instead of a dict, so add this explicit cast.
# The service schema makes sure that this cast works.
paramset = dict(service.data.get(ATTR_PARAMSET))
_LOGGER.debug(
"Calling putParamset: %s, %s, %s, %s",
interface,
address,
paramset_key,
paramset,
)
homematic.putParamset(interface, address, paramset_key, paramset)
hass.services.register(
DOMAIN,
SERVICE_PUT_PARAMSET,
_service_put_paramset,
schema=SCHEMA_SERVICE_PUT_PARAMSET,
)
return True
def _system_callback_handler(hass, config, src, *args):
"""System callback handler."""
# New devices available at hub
if src == "newDevices":
(interface_id, dev_descriptions) = args
interface = interface_id.split("-")[-1]
# Device support active?
if not hass.data[DATA_CONF][interface]["connect"]:
return
addresses = []
for dev in dev_descriptions:
address = dev["ADDRESS"].split(":")[0]
if address not in hass.data[DATA_STORE]:
hass.data[DATA_STORE].add(address)
addresses.append(address)
# Register EVENTS
# Search all devices with an EVENTNODE that includes data
bound_event_callback = partial(_hm_event_handler, hass, interface)
for dev in addresses:
hmdevice = hass.data[DATA_HOMEMATIC].devices[interface].get(dev)
if hmdevice.EVENTNODE:
hmdevice.setEventCallback(callback=bound_event_callback, bequeath=True)
# Create HASS entities
if addresses:
for component_name, discovery_type in (
("switch", DISCOVER_SWITCHES),
("light", DISCOVER_LIGHTS),
("cover", DISCOVER_COVER),
("binary_sensor", DISCOVER_BINARY_SENSORS),
("sensor", DISCOVER_SENSORS),
("climate", DISCOVER_CLIMATE),
("lock", DISCOVER_LOCKS),
("binary_sensor", DISCOVER_BATTERY),
):
# Get all devices of a specific type
found_devices = _get_devices(hass, discovery_type, addresses, interface)
# When devices of this type are found
# they are setup in HASS and a discovery event is fired
if found_devices:
discovery.load_platform(
hass,
component_name,
DOMAIN,
{
ATTR_DISCOVER_DEVICES: found_devices,
ATTR_DISCOVERY_TYPE: discovery_type,
},
config,
)
# Homegear error message
elif src == "error":
_LOGGER.error("Error: %s", args)
(interface_id, errorcode, message) = args
hass.bus.fire(EVENT_ERROR, {ATTR_ERRORCODE: errorcode, ATTR_MESSAGE: message})
def _get_devices(hass, discovery_type, keys, interface):
"""Get the HomeMatic devices for given discovery_type."""
device_arr = []
for key in keys:
device = | |
"'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (
msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace, pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace, name, pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name, base64.b64encode(self.value), self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class ComplexMetricType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, _instances=None, _derived=None, _real_archetype=None, _desynched_atts=None, MetricID=None, _subtype=None, SubType=None, DataFormat=None, _archetype=None, _id=None, Type=None, Metric=None):
self.original_tagname_ = None
self._instances = _cast(None, _instances)
self._derived = _cast(None, _derived)
self._real_archetype = _cast(bool, _real_archetype)
self._desynched_atts = _cast(None, _desynched_atts)
self.MetricID = _cast(None, MetricID)
self._subtype = _cast(bool, _subtype)
self.SubType = _cast(None, SubType)
self.DataFormat = _cast(None, DataFormat)
self._archetype = _cast(None, _archetype)
self._id = _cast(None, _id)
self.Type = _cast(None, Type)
if Metric is None:
self.Metric = []
else:
self.Metric = Metric
def factory(*args_, **kwargs_):
if ComplexMetricType.subclass:
return ComplexMetricType.subclass(*args_, **kwargs_)
else:
return ComplexMetricType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Metric(self): return self.Metric
def set_Metric(self, Metric): self.Metric = Metric
def add_Metric(self, value): self.Metric.append(value)
def insert_Metric(self, index, value): self.Metric[index] = value
def get__instances(self): return self._instances
def set__instances(self, _instances): self._instances = _instances
def get__derived(self): return self._derived
def set__derived(self, _derived): self._derived = _derived
def get__real_archetype(self): return self._real_archetype
def set__real_archetype(self, _real_archetype): self._real_archetype = _real_archetype
def get__desynched_atts(self): return self._desynched_atts
def set__desynched_atts(self, _desynched_atts): self._desynched_atts = _desynched_atts
def get_MetricID(self): return self.MetricID
def set_MetricID(self, MetricID): self.MetricID = MetricID
def get__subtype(self): return self._subtype
def set__subtype(self, _subtype): self._subtype = _subtype
def get_SubType(self): return self.SubType
def set_SubType(self, SubType): self.SubType = SubType
def get_DataFormat(self): return self.DataFormat
def set_DataFormat(self, DataFormat): self.DataFormat = DataFormat
def get__archetype(self): return self._archetype
def set__archetype(self, _archetype): self._archetype = _archetype
def get__id(self): return self._id
def set__id(self, _id): self._id = _id
def get_Type(self): return self.Type
def set_Type(self, Type): self.Type = Type
def hasContent_(self):
if (
self.Metric
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='ComplexMetricType', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ComplexMetricType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='ComplexMetricType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='ComplexMetricType'):
if self._instances is not None and '_instances' not in already_processed:
already_processed.add('_instances')
outfile.write(' _instances=%s' % (self.gds_format_string(quote_attrib(self._instances).encode(ExternalEncoding), input_name='_instances'), ))
if self._derived is not None and '_derived' not in already_processed:
already_processed.add('_derived')
outfile.write(' _derived=%s' % (self.gds_format_string(quote_attrib(self._derived).encode(ExternalEncoding), input_name='_derived'), ))
if self._real_archetype is not None and '_real_archetype' not in already_processed:
already_processed.add('_real_archetype')
outfile.write(' _real_archetype="%s"' % self.gds_format_boolean(self._real_archetype, input_name='_real_archetype'))
if self._desynched_atts is not None and '_desynched_atts' not in already_processed:
already_processed.add('_desynched_atts')
outfile.write(' _desynched_atts=%s' % (self.gds_format_string(quote_attrib(self._desynched_atts).encode(ExternalEncoding), input_name='_desynched_atts'), ))
if self.MetricID is not None and 'MetricID' not in already_processed:
already_processed.add('MetricID')
outfile.write(' MetricID=%s' % (self.gds_format_string(quote_attrib(self.MetricID).encode(ExternalEncoding), input_name='MetricID'), ))
if self._subtype is not None and '_subtype' not in already_processed:
already_processed.add('_subtype')
outfile.write(' _subtype="%s"' % self.gds_format_boolean(self._subtype, input_name='_subtype'))
if self.SubType is not None and 'SubType' not in already_processed:
already_processed.add('SubType')
outfile.write(' SubType=%s' % (self.gds_format_string(quote_attrib(self.SubType).encode(ExternalEncoding), input_name='SubType'), ))
if self.DataFormat is not None and 'DataFormat' not in already_processed:
already_processed.add('DataFormat')
outfile.write(' DataFormat=%s' % (self.gds_format_string(quote_attrib(self.DataFormat).encode(ExternalEncoding), input_name='DataFormat'), ))
if self._archetype is not None and '_archetype' not in already_processed:
already_processed.add('_archetype')
outfile.write(' _archetype=%s' % (self.gds_format_string(quote_attrib(self._archetype).encode(ExternalEncoding), input_name='_archetype'), ))
if self._id is not None and '_id' not in already_processed:
already_processed.add('_id')
outfile.write(' _id=%s' % (self.gds_format_string(quote_attrib(self._id).encode(ExternalEncoding), input_name='_id'), ))
if self.Type is not None and 'Type' not in already_processed:
already_processed.add('Type')
outfile.write(' Type=%s' % (self.gds_format_string(quote_attrib(self.Type).encode(ExternalEncoding), input_name='Type'), ))
def exportChildren(self, outfile, level, namespace_='', name_='ComplexMetricType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for Metric_ in self.Metric:
Metric_.export(outfile, level, namespace_, name_='Metric', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='ComplexMetricType'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self._instances is not None and '_instances' not in already_processed:
already_processed.add('_instances')
showIndent(outfile, level)
outfile.write('_instances="%s",\n' % (self._instances,))
if self._derived is not None and '_derived' not in already_processed:
already_processed.add('_derived')
showIndent(outfile, level)
outfile.write('_derived="%s",\n' % (self._derived,))
if self._real_archetype is not None and '_real_archetype' not in already_processed:
already_processed.add('_real_archetype')
showIndent(outfile, level)
outfile.write('_real_archetype=%s,\n' % (self._real_archetype,))
if self._desynched_atts is not None and '_desynched_atts' not in already_processed:
already_processed.add('_desynched_atts')
showIndent(outfile, level)
outfile.write('_desynched_atts="%s",\n' % (self._desynched_atts,))
if self.MetricID is not None and 'MetricID' not in already_processed:
already_processed.add('MetricID')
showIndent(outfile, level)
outfile.write('MetricID="%s",\n' % (self.MetricID,))
if self._subtype is not None and '_subtype' not in already_processed:
already_processed.add('_subtype')
showIndent(outfile, level)
outfile.write('_subtype=%s,\n' % (self._subtype,))
if self.SubType is not None and 'SubType' not in already_processed:
already_processed.add('SubType')
showIndent(outfile, level)
outfile.write('SubType="%s",\n' % (self.SubType,))
if self.DataFormat is not None and 'DataFormat' not in already_processed:
already_processed.add('DataFormat')
showIndent(outfile, level)
outfile.write('DataFormat="%s",\n' % (self.DataFormat,))
if self._archetype is not None and '_archetype' not in already_processed:
already_processed.add('_archetype')
showIndent(outfile, level)
outfile.write('_archetype="%s",\n' % (self._archetype,))
if self._id is not None and '_id' not | |
<reponame>gomez-addams/USD<gh_stars>1-10
#!/pxrpythonsubst
#
# Copyright 2017 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
from pxr import Gf, Tf, Sdf, Usd, UsdGeom, Vt
import unittest, math
class TestUsdGeomXformable(unittest.TestCase):
def _AssertCloseXf(self, a, b):
for av, bv in zip(a, b):
self.assertTrue(Gf.IsClose(av, bv, 1e-4))
def test_TranslateOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
translation = Gf.Vec3d(10., 20., 30.)
x.AddTranslateOp().Set(translation)
xform = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xform, Gf.Matrix4d(1.0).SetTranslate(translation))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:translate', )))
def test_ScaleOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
scaleVec = Gf.Vec3f(1., 2., 3.)
x.AddScaleOp().Set(scaleVec)
xform = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xform, Gf.Matrix4d(1.0).SetScale(Gf.Vec3d(scaleVec)))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:scale', )))
def test_ScalarRotateOps(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/X')
x.AddRotateXOp().Set(45.)
xformX = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformX,
Gf.Matrix4d(1.0, 0.0, 0.0, 0.0,
0.0, 0.7071067811865475, 0.7071067811865476, 0.0,
0.0, -0.7071067811865476, 0.7071067811865475, 0.0,
0.0, 0.0, 0.0, 1.0))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateX', )))
y = UsdGeom.Xform.Define(s, '/Y')
y.AddRotateYOp().Set(90.)
xformY = y.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformY,
Gf.Matrix4d(0, 0.0, -1.0, 0.0,
0.0, 1.0, 0.0, 0.0,
1.0, 0.0, 0, 0.0,
0.0, 0.0, 0.0, 1.0))
self.assertEqual(y.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateY', )))
z = UsdGeom.Xform.Define(s, '/Z')
z.AddRotateZOp().Set(30.)
xformZ = z.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformZ,
Gf.Matrix4d(0.866025403784439, 0.5, 0, 0,
-0.5, 0.866025403784439, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1))
self.assertEqual(z.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZ', )))
xy = UsdGeom.Xform.Define(s, '/XY')
xy.AddRotateYOp().Set(90.)
xy.AddRotateXOp().Set(45.)
xformXY = xy.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformXY,
Gf.Matrix4d(0.0, 0.0, -1.0, 0.0,
0.7071067811865476, 0.7071067811865475, 0.0, 0.0,
0.7071067811865475, -0.7071067811865476, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0))
self.assertEqual(xy.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateY', 'xformOp:rotateX')))
yz = UsdGeom.Xform.Define(s, '/YZ')
yz.AddRotateZOp().Set(30.)
yz.AddRotateYOp().Set(90.)
xformYZ = yz.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformYZ,
Gf.Matrix4d(0.0, 0.0, -1.0, 0.0,
-0.5, 0.8660254037844387, 0.0, 0.0,
0.8660254037844387, 0.5, 0.0, 0.0,
0.0, 0.0, 0.0, 1.0))
self.assertEqual(yz.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZ', 'xformOp:rotateY')))
zx = UsdGeom.Xform.Define(s, '/ZX')
zx.AddRotateXOp().Set(45.)
zx.AddRotateZOp().Set(30.)
xformZX = zx.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xformZX,
Gf.Matrix4d(0.8660254037844387, 0.3535533905932737, 0.35355339059327373, 0.0,
-0.5, 0.6123724356957945, 0.6123724356957946, 0.0,
0.0, -0.7071067811865476, 0.7071067811865475, 0.0,
0.0, 0.0, 0.0, 1.0))
self.assertEqual(zx.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateX', 'xformOp:rotateZ')))
def test_VectorRotateOps(self):
s = Usd.Stage.CreateInMemory()
rot = Gf.Vec3f(30., 45., 60.)
# Rotation order XYZ
xyz = UsdGeom.Xform.Define(s, '/XYZ')
xyz.AddRotateXYZOp().Set(rot)
xformXYZ = xyz.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(xyz.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateXYZ', )))
xyz2 = UsdGeom.Xform.Define(s, '/XYZ2')
xyz2.AddRotateZOp().Set(rot[2])
xyz2.AddRotateYOp().Set(rot[1])
xyz2.AddRotateXOp().Set(rot[0])
xformXYZ2 = xyz2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(xyz2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZ', 'xformOp:rotateY',
'xformOp:rotateX')))
self._AssertCloseXf(xformXYZ, xformXYZ2)
# Rotation order XZY
xzy = UsdGeom.Xform.Define(s, '/XZY')
xzy.AddRotateXZYOp().Set(rot)
xformXZY = xzy.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(xzy.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateXZY', )))
xzy2 = UsdGeom.Xform.Define(s, '/XZY2')
xzy2.AddRotateYOp().Set(rot[1])
xzy2.AddRotateZOp().Set(rot[2])
xzy2.AddRotateXOp().Set(rot[0])
xformXZY2 = xzy2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(xzy2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateY', 'xformOp:rotateZ',
'xformOp:rotateX')))
self._AssertCloseXf(xformXZY, xformXZY2)
# Rotation order YXZ
yxz = UsdGeom.Xform.Define(s, '/YXZ')
yxz.AddRotateYXZOp().Set(rot)
xformYXZ = yxz.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(yxz.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateYXZ', )))
yxz2 = UsdGeom.Xform.Define(s, '/YXZ2')
yxz2.AddRotateZOp().Set(rot[2])
yxz2.AddRotateXOp().Set(rot[0])
yxz2.AddRotateYOp().Set(rot[1])
xformYXZ2 = yxz2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(yxz2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZ', 'xformOp:rotateX',
'xformOp:rotateY')))
self._AssertCloseXf(xformYXZ, xformYXZ2)
# Rotation order YZX
yzx = UsdGeom.Xform.Define(s, '/YZX')
yzx.AddRotateYZXOp().Set(rot)
xformYZX = yzx.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(yzx.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateYZX', )))
yzx2 = UsdGeom.Xform.Define(s, '/YZX2')
yzx2.AddRotateXOp().Set(rot[0])
yzx2.AddRotateZOp().Set(rot[2])
yzx2.AddRotateYOp().Set(rot[1])
xformYZX2 = yzx2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(yzx2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateX', 'xformOp:rotateZ',
'xformOp:rotateY')))
self._AssertCloseXf(xformYZX, xformYZX2)
# Rotation order ZXY
zxy = UsdGeom.Xform.Define(s, '/ZXY')
zxy.AddRotateZXYOp().Set(rot)
xformZXY = zxy.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(zxy.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZXY', )))
zxy2 = UsdGeom.Xform.Define(s, '/ZXY2')
zxy2.AddRotateYOp().Set(rot[1])
zxy2.AddRotateXOp().Set(rot[0])
zxy2.AddRotateZOp().Set(rot[2])
xformZXY2 = zxy2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(zxy2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateY', 'xformOp:rotateX',
'xformOp:rotateZ')))
self._AssertCloseXf(xformZXY, xformZXY2)
# Rotation order ZYX
zyx = UsdGeom.Xform.Define(s, '/ZYX')
zyx.AddRotateZYXOp().Set(rot)
xformZYX = zyx.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(zyx.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateZYX', )))
zyx2 = UsdGeom.Xform.Define(s, '/ZYX2')
zyx2.AddRotateXOp().Set(rot[0])
zyx2.AddRotateYOp().Set(rot[1])
zyx2.AddRotateZOp().Set(rot[2])
xformZYX2 = zyx2.GetLocalTransformation(Usd.TimeCode.Default())
self.assertEqual(zyx2.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:rotateX', 'xformOp:rotateY',
'xformOp:rotateZ')))
self._AssertCloseXf(xformZYX, xformZYX2)
def test_PrestoRotatePivot(self):
""" Test that simulates how the pivot position is taken into account in the
presto transformable prim with transformType=Vectors.
"""
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
x.AddTranslateOp().Set(Gf.Vec3d(10., 0., 0.))
# Use token for 'pivot'
x.AddTranslateOp(opSuffix='pivot', isInverseOp=False).Set(Gf.Vec3d(0, 10, 0))
x.AddRotateXYZOp().Set(Gf.Vec3f(60, 0, 30))
x.AddScaleOp().Set(Gf.Vec3f(2,2,2))
# Insert the inverse pivot.
inverseTranslateOp = x.AddTranslateOp(opSuffix='pivot', isInverseOp=True)
# Calling set on an inverseOp results in a coding error.
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:translate', 'xformOp:translate:pivot',
'xformOp:rotateXYZ', 'xformOp:scale',
'!invert!xformOp:translate:pivot')))
xform = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xform,
Gf.Matrix4d(1.7320508075688774, 1.0, 0.0, 0.0,
-0.5, 0.8660254037844389, 1.7320508075688772, 0.0,
0.8660254037844385, -1.5, 1.0, 0.0,
15.0, 1.339745962155611, -17.32050807568877, 1.0))
def test_OrientOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
orientOp = x.AddOrientOp()
orientOp.Set(Gf.Quatf(1, Gf.Vec3f(2, 3, 4)).GetNormalized())
xform = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xform,
Gf.Matrix4d(-0.666666666666667, 0.66666666666667, 0.333333333333333, 0.0,
0.133333333333333, -0.33333333333333, 0.933333333333333, 0.0,
0.733333333333333, 0.66666666666666, 0.133333333333333, 0.0,
0.0, 0.0, 0.0, 1.0))
# 90-degree on x-axis
orientOp.Set(Gf.Quatf(0.7071067811865476, Gf.Vec3f(0.7071067811865475, 0, 0)))
xform = x.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(xform,
Gf.Matrix4d(1, 0, 0, 0,
0, 0, 1, 0,
0, -1, 0, 0,
0, 0, 0, 1))
orientOp.Set(Gf.Quatf(0, Gf.Vec3f(0, 0, 0)))
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), Gf.Matrix4d(1.))
def test_TransformOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
transformOp = x.AddTransformOp()
xform = Gf.Matrix4d(2.0).SetTranslate(Gf.Vec3d(10, 20, 30))
transformOp.Set(xform)
self._AssertCloseXf(xform, x.GetLocalTransformation(Usd.TimeCode.Default()))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:transform', )))
xformOp = x.MakeMatrixXform()
self.assertEqual(xformOp.GetOpName(), "xformOp:transform")
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:transform', )))
# Clear xformOpOrder
x.ClearXformOpOrder()
# Clearing opOrder does not remove the attribute.
self.assertTrue(x.GetPrim().HasAttribute("xformOp:transform"))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray())
self.assertTrue(x.SetXformOpOrder(orderedXformOps=[xformOp], resetXformStack=True))
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('!resetXformStack!', 'xformOp:transform')))
def test_ResetXformStack(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
x.AddTranslateOp().Set(Gf.Vec3d(20, 30, 40))
x.SetResetXformStack(True)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('!resetXformStack!', 'xformOp:translate')))
# Calling it twice should have no effect the second time.
x.SetResetXformStack(True)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('!resetXformStack!', 'xformOp:translate')))
x.SetResetXformStack(False)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:translate', )))
# Again, calling this twice shouldn't make a difference.
x.SetResetXformStack(False)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:translate', )))
x.AddTransformOp().Set(Gf.Matrix4d(1.0))
x.SetResetXformStack(True)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('!resetXformStack!', 'xformOp:translate',
'xformOp:transform')))
x.SetResetXformStack(False)
self.assertEqual(x.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('xformOp:translate',
'xformOp:transform')))
cx = UsdGeom.Xform.Define(s, '/World/Model')
cx.AddTranslateOp().Set(Gf.Vec3d(10, 10, 10))
cache = UsdGeom.XformCache()
cxCtm = cache.GetLocalToWorldTransform(cx.GetPrim())
self._AssertCloseXf(cxCtm, Gf.Matrix4d(1.0).SetTranslate(Gf.Vec3d(30.0, 40.0, 50.0)))
cx.SetResetXformStack(True)
self.assertEqual(cx.GetXformOpOrderAttr().Get(),
Vt.TokenArray(('!resetXformStack!', 'xformOp:translate')))
# Clear the xform cache and recompute local-to-world xform.
cache.Clear()
newCxCtm = cache.GetLocalToWorldTransform(cx.GetPrim())
localCxXform = cx.GetLocalTransformation(Usd.TimeCode.Default())
self._AssertCloseXf(newCxCtm, Gf.Matrix4d(1.0).SetTranslate(Gf.Vec3d(10.0, 10.0, 10.0)))
self._AssertCloseXf(newCxCtm, localCxXform)
# Test resetXformStack when it's not at the beginning of xformOpOrder.
cx.SetResetXformStack(False)
newXformOpOrder = list(cx.GetXformOpOrderAttr().Get())
newXformOpOrder.append(UsdGeom.XformOpTypes.resetXformStack)
cx.GetXformOpOrderAttr().Set(newXformOpOrder)
cx.AddTransformOp().Set(Gf.Matrix4d(2.0))
self.assertTrue(cx.GetResetXformStack())
def test_InverseOps(self):
IDENTITY = Gf.Matrix4d(1.)
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
x.AddTranslateOp().Set(Gf.Vec3d(20, 30, 40))
x.AddTranslateOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddScaleOp().Set(Gf.Vec3f(2,3,4))
x.AddScaleOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddRotateXOp().Set(30.)
x.AddRotateXOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddRotateYOp().Set(45.)
x.AddRotateYOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddRotateZOp().Set(60.)
x.AddRotateZOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddRotateXYZOp(opSuffix="firstRotate").Set(Gf.Vec3f(10, 20, 30))
x.AddRotateXYZOp(opSuffix="firstRotate", isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
x.AddRotateZYXOp(opSuffix="lastRotate").Set(Gf.Vec3f(30, 60, 45))
x.AddRotateZYXOp(opSuffix="lastRotate", isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
quat = Gf.Quatf(1, Gf.Vec3f(2, 3, 4))
x.AddOrientOp().Set(quat)
x.AddOrientOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
rotation = Gf.Rotation(Gf.Vec3d(quat.GetImaginary()), quat.GetReal())
x.AddTransformOp().Set(Gf.Matrix4d(rotation, Gf.Vec3d(10, 20, 30)))
x.AddTransformOp(isInverseOp=True)
self._AssertCloseXf(x.GetLocalTransformation(Usd.TimeCode.Default()), IDENTITY)
# We've got tons of xform ops in x now, let's test GetOrderedXformOps API.
orderedXformOps = x.GetOrderedXformOps()
xformOpOrder = Vt.TokenArray(len(orderedXformOps))
index = 0
for op in orderedXformOps:
xformOpOrder[index] = op.GetOpName()
index += 1
self.assertEqual(xformOpOrder, x.GetXformOpOrderAttr().Get())
def test_AddExistingXformOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
xlateOp = x.AddTranslateOp()
with self.assertRaises(RuntimeError):
x.AddTranslateOp()
# Adding an inverse op is OK, since it is considered to be separate from the
# original op.
invTranslateOp = x.AddTranslateOp(isInverseOp=True)
self.assertTrue(invTranslateOp)
# Setting a value on an inverse op is not ok.
with self.assertRaises(RuntimeError):
invTranslateOp.Set(Gf.Vec3d(1,1,1))
scaleOp = x.AddScaleOp(precision=UsdGeom.XformOp.PrecisionDouble)
with self.assertRaises(RuntimeError):
invScaleOp = x.AddScaleOp(
#precision=UsdGeom.XformOp.PrecisionFloat, # this is the default
isInverseOp=True)
def test_SingularTransformOp(self):
s = Usd.Stage.CreateInMemory()
x = UsdGeom.Xform.Define(s, '/World')
transformOp = x.AddTransformOp()
singularMat = Gf.Matrix4d(32, 8, 11, 17,
8, 20, 17, 23,
11, 17, 14, 26,
17, 23, 26, 2)
transformOp.Set(singularMat, Usd.TimeCode(1.0))
# Insert a translate op in the middle , as two consecutive inverse
# ops are simply skipped when computing local transform value.
x.AddTranslateOp().Set(Gf.Vec3d(1,1,1))
x.AddTransformOp(isInverseOp=True)
with self.assertRaises(RuntimeError):
xform = x.GetLocalTransformation(Usd.TimeCode(1.))
# If the translateOp in the middle is removed from xformOpOrder, then
# calling GetLocalTransformation() should not result in an error as the pair
# of consecutive inverse xformOps will get skipped.
x.GetXformOpOrderAttr().Set(Vt.TokenArray(('xformOp:transform',
'!invert!xformOp:transform')))
self.assertEqual(x.GetLocalTransformation(1.0), Gf.Matrix4d(1))
def test_VaryingPrecisionOps(self):
s = Usd.Stage.CreateInMemory()
x1 = UsdGeom.Xform.Define(s, '/World')
halfRotOp = x1.AddRotateXYZOp(precision=UsdGeom.XformOp.PrecisionHalf,
opSuffix='Half')
self.assertEqual(halfRotOp.GetPrecision(), UsdGeom.XformOp.PrecisionHalf)
halfRotOp.Set(Gf.Vec3h(0.0, 0.0, 60.0))
doubleRotOp = x1.AddRotateXYZOp(precision=UsdGeom.XformOp.PrecisionDouble,
opSuffix='Double')
self.assertEqual(doubleRotOp.GetPrecision(), UsdGeom.XformOp.PrecisionDouble)
doubleRotOp.Set(Gf.Vec3d(0.0, 45.123456789, 0.0))
floatRotOp | |
"""
Base class for generic `p`-adic polynomials
This provides common functionality for all `p`-adic polynomials, such
as printing and factoring.
AUTHORS:
- <NAME> (2013-11-22): initial version, split off from other
files, made Polynomial_padic the common base class for all p-adic
polynomials.
"""
#*****************************************************************************
# Copyright (C) 2007 <NAME> <<EMAIL>>
# Copyright (C) 2013 <NAME> <<EMAIL>>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
import re
from sage.rings.padics.precision_error import PrecisionError
from sage.rings.polynomial.polynomial_element import Polynomial
from sage.structure.factorization import Factorization
class Polynomial_padic(Polynomial):
def __init__(self, parent, x=None, check=True, is_gen=False, construct=False):
Polynomial.__init__(self, parent, is_gen, construct)
def _repr(self, name=None):
r"""
EXAMPLES::
sage: R.<w> = PolynomialRing(Zp(5, prec=5, type = 'capped-abs', print_mode = 'val-unit'))
sage: f = 24 + R(4/3)*w + w^4
sage: f._repr()
'(1 + O(5^5))*w^4 + O(5^5)*w^3 + O(5^5)*w^2 + (1043 + O(5^5))*w + 24 + O(5^5)'
sage: f._repr(name='z')
'(1 + O(5^5))*z^4 + O(5^5)*z^3 + O(5^5)*z^2 + (1043 + O(5^5))*z + 24 + O(5^5)'
TESTS::
sage: k = Qp(5,10)
sage: R.<x> = k[]
sage: f = R([k(0,-3), 0, k(0,-1)]); f
O(5^-1)*x^2 + O(5^-3)
sage: f + f
O(5^-1)*x^2 + O(5^-3)
AUTHOR:
- <NAME> (2007-03-03), based on Polynomial_generic_dense._repr()
"""
s = " "
coeffs = self.list(copy=False)
m = len(coeffs)
if name is None:
name = self.parent().variable_name()
for n in reversed(range(m)):
x = y = str(coeffs[n])
if n == m-1 or x != "0":
if n != m-1:
s += " + "
if y.find("-") == 0:
y = y[1:]
if n > 0 and ("+" in y or ("-" in y and y[0] != "O")):
x = "(%s)" % x
if n > 1:
var = "*%s^%s" % (name, n)
elif n == 1:
var = "*%s" % name
else:
var = ""
s += x + var
s = s.replace(" + -", " - ")
s = re.sub(r' 1\*',' ', s)
s = re.sub(r' -1\*',' -', s)
if s == " ":
return "0"
return s[1:]
def content(self):
r"""
Compute the content of this polynomial.
OUTPUT:
If this is the zero polynomial, return the constant coefficient.
Otherwise, since the content is only defined up to a unit, return the
content as `\pi^k` with maximal precision where `k` is the minimal
valuation of any of the coefficients.
EXAMPLES::
sage: K = Zp(13,7)
sage: R.<t> = K[]
sage: f = 13^7*t^3 + K(169,4)*t - 13^4
sage: f.content()
13^2 + O(13^9)
sage: R(0).content()
0
sage: f = R(K(0,3)); f
O(13^3)
sage: f.content()
O(13^3)
sage: P.<x> = ZZ[]
sage: f = x + 2
sage: f.content()
1
sage: fp = f.change_ring(pAdicRing(2, 10))
sage: fp
(1 + O(2^10))*x + 2 + O(2^11)
sage: fp.content()
1 + O(2^10)
sage: (2*fp).content()
2 + O(2^11)
Over a field it would be sufficient to return only zero or one, as the
content is only defined up to multiplication with a unit. However, we
return `\pi^k` where `k` is the minimal valuation of any coefficient::
sage: K = Qp(13,7)
sage: R.<t> = K[]
sage: f = 13^7*t^3 + K(169,4)*t - 13^-4
sage: f.content()
13^-4 + O(13^3)
sage: f = R.zero()
sage: f.content()
0
sage: f = R(K(0,3))
sage: f.content()
O(13^3)
sage: f = 13*t^3 + K(0,1)*t
sage: f.content()
13 + O(13^8)
"""
if self.is_zero():
return self[0]
else:
return self.base_ring()(self.base_ring().prime_pow(min([x.valuation() for x in self.coefficients(sparse=False)])))
def factor(self):
r"""
Return the factorization of this polynomial.
EXAMPLES::
sage: R.<t> = PolynomialRing(Qp(3,3,print_mode='terse',print_pos=False))
sage: pol = t^8 - 1
sage: for p,e in pol.factor():
....: print("{} {}".format(e, p))
1 (1 + O(3^3))*t + 1 + O(3^3)
1 (1 + O(3^3))*t - 1 + O(3^3)
1 (1 + O(3^3))*t^2 + (5 + O(3^3))*t - 1 + O(3^3)
1 (1 + O(3^3))*t^2 + (-5 + O(3^3))*t - 1 + O(3^3)
1 (1 + O(3^3))*t^2 + O(3^3)*t + 1 + O(3^3)
sage: R.<t> = PolynomialRing(Qp(5,6,print_mode='terse',print_pos=False))
sage: pol = 100 * (5*t - 1) * (t - 5)
sage: pol
(500 + O(5^9))*t^2 + (-2600 + O(5^8))*t + 500 + O(5^9)
sage: pol.factor()
(500 + O(5^9)) * ((1 + O(5^5))*t - 1/5 + O(5^5)) * ((1 + O(5^6))*t - 5 + O(5^6))
sage: pol.factor().value()
(500 + O(5^8))*t^2 + (-2600 + O(5^8))*t + 500 + O(5^8)
The same factorization over `\ZZ_p`. In this case, the "unit"
part is a `p`-adic unit and the power of `p` is considered to be
a factor::
sage: R.<t> = PolynomialRing(Zp(5,6,print_mode='terse',print_pos=False))
sage: pol = 100 * (5*t - 1) * (t - 5)
sage: pol
(500 + O(5^9))*t^2 + (-2600 + O(5^8))*t + 500 + O(5^9)
sage: pol.factor()
(4 + O(5^6)) * (5 + O(5^7))^2 * ((1 + O(5^6))*t - 5 + O(5^6)) * ((5 + O(5^6))*t - 1 + O(5^6))
sage: pol.factor().value()
(500 + O(5^8))*t^2 + (-2600 + O(5^8))*t + 500 + O(5^8)
In the following example, the discriminant is zero, so the `p`-adic
factorization is not well defined::
sage: factor(t^2)
Traceback (most recent call last):
...
PrecisionError: p-adic factorization not well-defined since the discriminant is zero up to the requestion p-adic precision
An example of factoring a constant polynomial (see :trac:`26669`)::
sage: R.<x> = Qp(5)[]
sage: R(2).factor()
2 + O(5^20)
More examples over `\ZZ_p`::
sage: R.<w> = PolynomialRing(Zp(5, prec=6, type = 'capped-abs', print_mode = 'val-unit'))
sage: f = w^5-1
sage: f.factor()
((1 + O(5^6))*w + 3124 + O(5^6)) * ((1 + O(5^6))*w^4 + (12501 + O(5^6))*w^3 + (9376 + O(5^6))*w^2 + (6251 + O(5^6))*w + 3126 + O(5^6))
See :trac:`4038`::
sage: E = EllipticCurve('37a1')
sage: K =Qp(7,10)
sage: EK = E.base_extend(K)
sage: E = EllipticCurve('37a1')
sage: K = Qp(7,10)
sage: EK = E.base_extend(K)
sage: g = EK.division_polynomial_0(3)
sage: g.factor()
(3 + O(7^10)) * ((1 + O(7^10))*x + 1 + 2*7 + 4*7^2 + 2*7^3 + 5*7^4 + 7^5 + 5*7^6 + 3*7^7 + 5*7^8 + 3*7^9 + O(7^10)) * ((1 + O(7^10))*x^3 + (6 + 4*7 + 2*7^2 + 4*7^3 + 7^4 + 5*7^5 + 7^6 + 3*7^7 + 7^8 + 3*7^9 + O(7^10))*x^2 + (6 + 3*7 + 5*7^2 + 2*7^4 + 7^5 + 7^6 + 2*7^8 + 3*7^9 + O(7^10))*x + 2 + 5*7 + 4*7^2 + 2*7^3 + 6*7^4 + 3*7^5 + 7^6 + 4*7^7 + O(7^10))
TESTS:
Check that :trac:`13293` is fixed::
sage: R.<T> = Qp(3)[]
sage: f = 1926*T^2 + 312*T + 387
sage: f.factor()
(3^2 + 2*3^3 + 2*3^4 + 3^5 + 2*3^6 + O(3^22)) * ((1 + O(3^19))*T + 2*3^-1 + 3 + 3^2 + 2*3^5 + 2*3^6 + 2*3^7 + 3^8 + 3^9 + 2*3^11 + 3^15 + 3^17 + O(3^19)) * ((1 + O(3^20))*T + 2*3 + 3^2 + 3^3 + 3^5 + 2*3^6 + 2*3^7 + 3^8 + 3^10 + 3^11 + 2*3^12 + 2*3^14 + 2*3^15 + 2*3^17 + 2*3^18 + O(3^20))
Check that :trac:`24065` is fixed::
sage: R = Zp(2, type='fixed-mod', prec=3)
sage: P.<x> = R[]
sage: ((1 + 2)*x + (1 + 2)*x^2).factor()
(1 + 2) * (x + 1) * x
"""
if self == 0:
raise ArithmeticError("factorization of {!r} is not defined".format(self))
elif self.is_constant():
return Factorization((), self.constant_coefficient())
# Scale self such that 0 is the lowest valuation
# amongst the coefficients
try:
val = self.valuation(val_of_var=0)
except TypeError:
val = min([c.valuation() for c in self])
self_normal = self / self.base_ring().uniformizer_pow(val)
absprec = min([x.precision_absolute() for x in self_normal])
if self_normal.discriminant().valuation() >= absprec:
raise PrecisionError(
"p-adic factorization not well-defined since the discriminant is zero up to the requestion p-adic precision")
G = self_normal.__pari__().factorpadic(self.base_ring().prime(), absprec)
return _pari_padic_factorization_to_sage(G, self.parent(), self.leading_coefficient())
def root_field(self, names, check_irreducible=True, **kwds):
"""
Return the p-adic extension field generated by the roots of the irreducible
polynomial self.
INPUT:
* ``names`` -- name of the generator of the extension
* ``check_irreducible`` -- check whether the polynomial is irreducible
* ``kwds`` -- see :meth:`sage.ring.padics.padic_generic.pAdicGeneric.extension`
EXAMPLES::
sage: R.<x> = Qp(3,5,print_mode='digits')[]
sage: f = x^2 - 3
sage: f.root_field('x')
3-adic Eisenstein Extension Field in x defined by x^2 - 3
::
| |
<gh_stars>0
'''
Author: <NAME>
Project 10 for CIS 322
Fall 2017
Flask redirect with arguments from: https://stackoverflow.com/questions/17057191/flask-redirect-while-passing-arguments
Info on obtaining a google user's email address from: https://stackoverflow.com/questions/24442668/google-oauth-api-to-get-users-email-address
Method to obtain a Google client secrets file remotely from: https://developers.google.com/api-client-library/python/guide/aaa_oauth
'''
import flask
from flask import render_template
from flask import request
from flask import url_for
import uuid
from apiclient.discovery import build # google api
from dateutil import tz
from freeAndBusyTimeCalculator import freeBusyTimes
import os
import random # to create a unique meetingID
import sys
import json
import logging
# Date handling
import arrow # Replacement for datetime, based on moment.js
# import datetime # But we still need time
from dateutil import tz # For interpreting local times
# OAuth2 - Google library implementation for convenience
from oauth2client import client
import httplib2 # used in oauth2 flow
# Google API for services
from apiclient import discovery
from oauth2client.client import OAuth2WebServerFlow
from pymongo import MongoClient
###
# Globals
###
import config
isMain = True
app = flask.Flask(__name__)
if __name__ == "__main__":
# if run from localhost, get config data from credentials.ini
CONFIG = config.configuration()
app.debug = CONFIG.DEBUG
app.secret_key = CONFIG.SECRET_KEY
CLIENT_SECRET_FILE = CONFIG.GOOGLE_KEY_FILE # You'll need this
MONGO_CLIENT_URL = "mongodb://{}:{}@{}:{}/{}".format(
CONFIG.DB_USER,
CONFIG.DB_USER_PW,
CONFIG.DB_HOST,
CONFIG.DB_PORT,
CONFIG.DB)
configDB = CONFIG.DB
clientSecret = CONFIG.CLIENTSECRET
clientID = CONFIG.CLIENTID
else:
# else if run from Heroku, get config data from Heroku env vars
isMain = False
app.debug = os.environ.get('debug', None)
app.secret_key = os.environ.get('Secret_Key', None)
clientId = os.environ.get('clientID', None)
clientSecret = os.environ.get('clientSecret', None)
MONGO_CLIENT_URL = "mongodb://{}:{}@{}:{}/{}".format(
os.environ.get('DB_USER', None),
os.environ.get('DB_USER_PW', None),
os.environ.get('DB_HOST', None),
os.environ.get('DB_PORT', None),
os.environ.get('DB', None))
configDB = os.environ.get('DB', None)
# access MongoDB
try:
dbclient = MongoClient(MONGO_CLIENT_URL)
db = getattr(dbclient, configDB)
except:
print("Failure opening database. Correct MongoDB user? Correct password?")
sys.exit(1)
app.logger.setLevel(logging.DEBUG)
SCOPES = ['https://www.googleapis.com/auth/calendar.readonly', ' https://www.googleapis.com/auth/userinfo.email',
"https://www.googleapis.com/auth/plus.login", 'https://www.googleapis.com/auth/plus.me', 'https://www.googleapis.com/auth/userinfo.profile']
#############################
#
# Pages (routed from URLs)
#
#############################
@app.route("/")
@app.route("/index")
def index():
app.logger.debug("Entering index")
if 'begin_date' not in flask.session:
init_session_values()
return render_template('index.html')
@app.route("/choose")
def choose():
# authorize a list of calendars
app.logger.debug("In /choose")
credentials = valid_credentials()
if not credentials:
app.logger.debug("Redirecting to authorization")
return flask.redirect(flask.url_for('oauth2callback'))
service = get_gcal_service(credentials)
gcal_service = service[0]
flask.g.calendars = list_calendars(gcal_service)
dbCollections = db.collection_names()
uniqueMeetingID = 0
# assign a random and unique meeting ID
while(uniqueMeetingID == 0 or uniqueMeetingID in dbCollections):
uniqueMeetingID = random.randint(10000,100000)
userTimezone = flask.session["userTimezone"]
flask.g.meetingID = uniqueMeetingID
# prepend "a" to meetingID - mongoDB collections can't start with numbers
mongoCollectionName = "a" + str(flask.g.meetingID)
collection = db[mongoCollectionName]
# create initial collection entry with relevant meta data
collection.insert({"init":1, "dateRange":flask.session['daterange'], "startTime":flask.session['startInput'],
"endTime":flask.session['endInput'], "userTimezone": userTimezone})
return flask.redirect(flask.url_for('meeting', meetingID=flask.g.meetingID))
@app.route("/meeting/<meetingID>")
def meeting(meetingID):
app.logger.debug("In Meeting")
credentials = valid_credentials()
flask.session['meetingID'] = meetingID
if not credentials:
app.logger.debug("Redirecting to authorization")
return flask.redirect(flask.url_for('oauth2callbackmeeting'))
service = get_gcal_service(credentials)
gcal_service = service[0]
p_service = service[1]
flask.g.userEmail = p_service.people().get(userId="me").execute()["emails"][0]['value']
flask.g.calendars = list_calendars(gcal_service)
dbCollections = db.collection_names()
mongoCollectionName = "a" + str(meetingID)
collectionExists = False
for collection in dbCollections:
if mongoCollectionName == collection:
collectionExists = True
if not collectionExists:
return render_template('noSuchMeeting.html')
startingInfo = db[mongoCollectionName].find({"init":1})
flask.session['daterange'] = startingInfo[0]["dateRange"]
flask.session['endInput'] = startingInfo[0]["endTime"]
flask.session['startInput'] = startingInfo[0]["startTime"]
flask.session['userTimezone'] = startingInfo[0]["userTimezone"]
flask.g.meetingID = meetingID
return render_template('meeting.html')
@app.route("/updateCalendar")
def updateCalendar():
'''
Returns a list of formatted google calendar entries.
'''
meetingID = request.args.get("meetingID", type=str)
userEmail = request.args.get("userEmail", type=str)
calendarToAdd = json.loads(request.args.get("val"))
startingBound = request.args.get("startTime", type=str)
endingBound = request.args.get("endTime", type=str)
userTimezone = request.args.get("userTimezone")
dateRanges = request.args.get("dates", type=str)
dateRanges = dateRanges.split(" ")
dateRanges.remove("-")
dateRanges[0] = dateRanges[0].split("/")
dateRanges[1] = dateRanges[1].split("/")
startingBoundDate = dateRanges[0][2] + dateRanges[0][0] + dateRanges[0][1]
endingBoundDate = dateRanges[1][2] + dateRanges[1][0] + dateRanges[1][1]
arrowStartBound = arrow.get(startingBoundDate + startingBound, "YYYYMMDDHH:mm", tzinfo=userTimezone)
arrowEndBound = arrow.get(startingBoundDate + endingBound, "YYYYMMDDHH:mm", tzinfo=userTimezone)
arrowEndBoundDate = arrow.get(endingBoundDate + endingBound, "YYYYMMDDHH:mm", tzinfo=userTimezone)
arrowDayRange = arrowEndBoundDate - arrowStartBound
numberOfDays = arrowDayRange.days
if(arrowDayRange.seconds > 0):
numberOfDays += 1
startingBoundDateArray = []
endingBoundDateArray = []
for i in range(numberOfDays):
startingBoundDateArray.append(arrowStartBound.replace(days=+i))
endingBoundDateArray.append(arrowEndBound.replace(days=+i))
if(startingBound == ""):
app.logger.debug("No start time specified.")
exit(1)
if(endingBound == ""):
app.logger.debug("No end time specified.")
exit(1)
credentials = valid_credentials()
if not credentials:
app.logger.debug("Redirecting to authorization")
return flask.redirect(flask.url_for('oauth2callback'))
service = get_gcal_service(credentials)
gcal_service = service[0]
page_token = None
mongoCollectionName = "a" + meetingID
collection = db[mongoCollectionName]
allEntries = []
# remove all DB entries from current user
allInDBToRemove = collection.find({"email":userEmail})
for e in allInDBToRemove:
collection.remove(e)
# add selected calendars of current user to DB
for calendar in calendarToAdd:
events = gcal_service.events().list(calendarId=calendar,
pageToken=page_token).execute()
arrowEntries = pullBusyTimes(events, startingBoundDateArray, endingBoundDateArray, userTimezone)
for aEntry in arrowEntries:
collectionEntry = {"start":str(aEntry[0]), "end":str(aEntry[1]), "email":userEmail, "init":0}
collection.insert(collectionEntry)
# add all DB entries for specified meetingID
allInDBToAdd = collection.find({"init":0})
for e in allInDBToAdd:
tempStart = arrow.get(e['start'])
tempEnd = arrow.get(e['end'])
allEntries.append([tempStart, tempEnd])
allEntries.sort()
unionEntries = disjointSetBusyTimes(allEntries)
displayEntries = freeBusyTimes(unionEntries, startingBoundDateArray, endingBoundDateArray)
formattedEntries = formatEntries(displayEntries)
return flask.jsonify(result=formattedEntries)
def leadingZero(n):
'''
A simple auxilary function which converts integers into strings,
prepending a "0" if the integer is < 10.
'''
if(n < 10):
return "0" + str(n)
else:
return str(n)
def formatEntries(listOfEntries):
'''
Returns a human-readable list of busy/free time entries and dates.
'''
entriesToDisplay = []
currentDay = listOfEntries[0][1].day
entriesToDisplay.append(str(listOfEntries[0][1].date()))
for entry in listOfEntries:
if(entry[1].day != currentDay):
currentDay = entry[1].day
entriesToDisplay.append(str(entry[1].date()))
entryStartTime = leadingZero(entry[1].hour) + ":"
entryStartTime += leadingZero(entry[1].minute)
entryEndTime = leadingZero(entry[2].hour) + ":"
entryEndTime += leadingZero(entry[2].minute)
formatted = entry[0] + entryStartTime + " - " + entryEndTime
entriesToDisplay.append(formatted)
return entriesToDisplay
def pullBusyTimes(googleEvents, startingBoundDates, endingBoundDates, userTimezone):
'''
Returns a list of busy times that from events that fall between the selected dates/times.
googleEvents is a list of events from the user's Google calendar.
'''
entriesToDisplay = []
arrowEntries = []
while True:
for startDate, endDate in zip(startingBoundDates, endingBoundDates):
for calendar_entry in googleEvents['items']:
try:
arrowStart = arrow.get(calendar_entry['start']['date'])
arrowStart = arrowStart.replace(tzinfo=userTimezone)
arrowEnd = arrowStart.replace(hours=endDate.hour, minutes=endDate.minute)
arrowStart = arrowStart.replace(hours=startDate.hour, minutes=startDate.minute)
if(arrowStart.format("YYYYMMDD") == startDate.format("YYYYMMDD")):
arrowEntries.append([arrowStart, arrowEnd])
except:
arrowStart = arrow.get(calendar_entry['start']["dateTime"])
arrowEnd = arrow.get(calendar_entry['end']["dateTime"])
# if starting time for entry falls within bounds
if(arrowEnd.format("YYYYMMDD") == startDate.format("YYYYMMDD")):
# if starting time for entry falls within bounds
if(arrowEnd.format("HHmm") >= startDate.format("HHmm") and
arrowStart.format("HHmm") <= endDate.format("HHmm")):
arrowEntries.append([arrowStart, arrowEnd])
page_token = googleEvents.get('nextPageToken')
if not page_token:
break
return arrowEntries
def disjointSetBusyTimes(arrowEntries):
'''
arrowEntries must be a sorted list
Returns a disjoint set from a list of timeslots
'''
disjointSet = []
for entry in arrowEntries:
joined = False
for i in range(len(disjointSet) - 1):
if(entry[0] >= disjointSet[i] and entry[0] <= disjointSet[i+1]):
if(disjointSet[i+1] < entry[1]):
disjointSet[i+1] = entry[1]
joined = True
if not joined:
disjointSet.append(entry[0])
disjointSet.append(entry[1])
return disjointSet
@app.route("/parse_times")
def parse_time():
time = request.args.get("time", type=str)
try:
arrowTime = arrow.get(time, "HH:mm").isoformat()
result = {"time": arrowTime}
except:
result = {"time": "failed"}
return flask.jsonify(result=result)
def valid_credentials():
"""
Returns OAuth2 credentials if we have valid
credentials in the session. This is a 'truthy' value.
Return None if we don't have credentials, or if they
have expired or are otherwise invalid. This is a 'falsy' value.
"""
if 'credentials' not in flask.session:
return None
credentials = client.OAuth2Credentials.from_json(
flask.session['credentials'])
if (credentials.invalid or credentials.access_token_expired):
return None
return credentials
def get_gcal_service(credentials):
"""
We need a Google calendar 'service' object to obtain
list of calendars, busy times, etc. This requires
authorization. If authorization is already in effect,
we'll just return with the authorization. Otherwise,
control flow will be interrupted by authorization, and we'll
end up redirected back to /choose *without a service object*.
Then the second call will succeed without additional authorization.
"""
app.logger.debug("Entering get_gcal_service")
http_auth = credentials.authorize(httplib2.Http())
service = discovery.build('calendar', 'v3', http=http_auth)
plusService = discovery.build('plus', 'v1', http=http_auth)
app.logger.debug("Returning service")
return [service, plusService]
@app.route('/oauth2callback')
def oauth2callback():
"""
The 'flow' has this one place to call back to. We'll enter here
more than once as steps in the flow are completed, and need to keep
track of how far we've gotten. The first time we'll do the first
step, the second time we'll skip the first step and do the second,
and so on.
"""
app.logger.debug("Entering oauth2callback")
if(isMain):
flow = client.flow_from_clientsecrets(
CLIENT_SECRET_FILE,
scope=SCOPES,
redirect_uri=flask.url_for('oauth2callback', _external=True))
else:
# from Heroku, a clientID and client secrets are needed for OAuth.
# Normally these are taken from client_secrets.json,
# but they can be manually entered, eliminating the need for the .json file
flow = OAuth2WebServerFlow(client_id=clientId,
client_secret=clientSecret,
scope=SCOPES,
redirect_uri=flask.url_for('oauth2callback', _external=True))
# Note we are *not* redirecting above. We are noting *where*
# we will redirect to, which is this function.
# The *second* time we enter here, it's a callback
# with 'code' set in the URL parameter. If we don't
# see that, it must be the first time through, so we
# need to do | |
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 7 23:15:48 2020
@author: shiri
"""
import matplotlib.pyplot as plt
import numpy as np
import sklearn
from pandas import DataFrame
from sklearn import metrics
from sklearn.model_selection import validation_curve
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestRegressor
from tkinter import filedialog
from tkinter import Tk
from tkinter import messagebox
from sklearn.preprocessing import LabelEncoder
import pandas as pd
import tkinter
from tkinter import*
from matplotlib.figure import Figure
import webbrowser
from PIL import ImageTk , Image
def panfiles():
global reslist1
global inde_var,reslist,de_var
global file,df,col1,df2,df3
reslist=[]
reslist1=[]
file=filedialog.askopenfilename()
suc_label=Label(frame,text="Succesful!")
print(file)
suc_label.grid(row=3)
try:
df2=pd.read_csv(file)
df=df2
df3=df2
print(df2)
except FileNotFoundError:
messagebox.showerror("Error","Please select a file")
else:
col=list(df.columns)
for i in range(len(col)):
df.rename(columns={col[i]:str(col[i]).lower()},inplace=True)
col1=list(df.columns)
inde_var.set(col1)
b=Button(frame01,text="*Select",command=selection,borderwidth="3")
b.grid(row=9,column=1)
def selection():
inde_var.set(col1)
reslist1=[]
flag=0
seleccion = inde_enter.curselection()
for i in seleccion:
entrada = inde_enter.get(i)
reslist.append(entrada)
for i in col1:
if i not in reslist:
reslist1.append(i)
de_enter=OptionMenu(frame01 ,de_var,*reslist1)
de_enter.grid(row=10, column=1,padx=2)
de_b=Button(frame01,text=" *Select",command=pred,borderwidth="3")
de_b.grid(row=11,column=1)
def pred():
global inp,oup,cate,cont
oup=[de_var.get()]
cont=[]
cate=[]
inp=reslist
global inp1,num,i
i=0
num=len(inp)
sel=Label(frame01,text="select type of the feature")
sel.grid(row=12,column=0)
select_fea()
print(cate)
print(cont)
def select_fea():
global num,i,val,v,cate,cont
v=IntVar()
lb=Label(frame01,text=inp[i])
lb.grid(row=13,column=0)
rb=Radiobutton(frame01,text="continuous",variable=v,value=1)
rb.grid(row=13,column=1)
rb=Radiobutton(frame01,text="categorical",variable=v,value=2)
rb.grid(row=14,column=1)
nex=Button(frame01,text="next",command=nextone)
nex.grid(row=15,column=1)
def nextone():
global valu,i,num,v,cate,cont,inp
valu=v.get()
if(valu!=1):
cate.append(inp[i])
else:
cont.append(inp[i])
print(valu)
i+=1
if(i<num):
select_fea()
# frame03=LabelFrame(main_frame,borderwidth=6 ,text="Greenviz",font="25", padx=10 , pady=5,width=1000,height=1000)
# frame.grid(padx=10,pady=10)
def sub():
global clicked,alg
global tarin_p,frame2,xl,yl
global test_p,x,y,train_percent,test_percent
try:
x=inp
xl=x
y=oup
yl=y
except NameError:
messagebox.showerror("Error","please select the features and click the Select button")
else:
try:
train_percent=float(train_p.get())
test_percent=float(test_p.get())
except ValueError:
messagebox.showerror("Error","please enter valid train and test percent")
else:
alg=clicked.get()
dest()
bg_image=ImageTk.PhotoImage(Image.open("bg1.jpg"))
bg_label= Label(root,image=bg_image)
bg_label.image=bg_image
bg_label.place(x=0, y=0, relwidth=1, relheight=1)
frame2=LabelFrame(root ,borderwidth=6 ,text="Model Building",font="25", padx=10, pady=10,width=1000,height=1000)
frame2.grid(padx=15,pady=15)
s_label=Label(frame2,text=" * Your response has been succesfully Recorded ",font="5")
s_label.grid(row=1,padx=80,pady=8)
algo=Label(frame2,text=" * "+alg+" Model",font="5")
algo.grid(row=2,pady=10)
tr_button=Button(frame2,text=" * Train",command=train_d,width="15",font="5",borderwidth="4")
tr_button.grid(row=3)
tr_label=Label(frame2,text="Click here to train the model")
tr_label.grid(row=4)
back_b=Button(frame2,text="Back",width="20",font="20",borderwidth="3",command=main)
back_b.grid(row=0,column=4)
def train_d():
global cate_data
cate_data=df[cate]
global xtrain,ytrain,ytest,xtest
try:
from sklearn.model_selection import train_test_split
xtrain,xtest,ytrain,ytest=train_test_split(df[x],df[y],train_size=train_percent/100,test_size=test_percent/100)
except:
messagebox.showerror("Error","Please enter valid training and test percentage")
main()
else:
if(alg=="Linear Regression"):
linreg()
elif(alg=="Logistic Regression"):
print(alg)
logreg()
else:
print(alg)
print("#################################################")
dtc()
def lin():
global alg
alg="Linear Regression"
clicked.set("Linear Regression")
def log():
global alg
clicked.set("Logistic Regression")
alg="Logistic Regression"
def dtc():
global linr
k=1
try:
global xtrain,ytrain,xtest,ytest,labelenc,cont,cate
from sklearn.tree import DecisionTreeClassifier
linr=DecisionTreeClassifier(max_depth=4)
labelenc=LabelEncoder()
print(cate)
for i in cate:
print(i)
labelenc.fit(df[i])
df[i]=labelenc.transform(df[i])
print(df[i])
x=cont
cont=tuple(cont)
x.extend(cate)
cont=list(cont)
from sklearn.model_selection import train_test_split
xtrain,xtest,ytrain,ytest=train_test_split(df[x],df[y],train_size=train_percent/100,test_size=test_percent/100)
linr.fit(xtrain,ytrain)
except ValueError:
messagebox.showerror("Error","your selected dependent and independent are producing value errors for training the model \n go back and try another algorithm")
frame2.destroy()
main()
else:
stat2()
def stat2():
global y1
tl=Label(frame2,text=" * Training statistics:: ",font="5")
tl.grid(row=1,pady=10,column=1)
from sklearn import metrics
st1=Label(frame2,text=" * No of training samples:: "+" * "+str(len(xtrain)))
st1.grid(row=6,column=0)
st3=Label(frame2,text=" * total no of smaples::"+ " * "+str(len(xtrain)+len(xtest)))
st3.grid(row=5,column=1)
st5=Label(frame2,text=" * training accuracy::"+" * "+str(metrics.accuracy_score(ytrain,linr.predict(xtrain))))
st5.grid(row=6,column=1)
st7=Label(frame2,text="Confusion Matrix",font="5")
st7.grid(row=7,column=0)
global cnf_matrix
cnf_matrix=metrics.confusion_matrix(ytrain,linr.predict(xtrain))
cnf_button=Button(frame2,text="Click here to view the confusion matrix",command=view_conf,borderwidth="3")
cnf_button.grid(row=8)
st8=Label(frame2,text=" * mean of independent and dependent variables",font="5")
st8.grid(row=8,column=1,pady=5)
y2=[]
for i in range(len(x)):
y2.append("*"+x[i]+" *"+str(df[x[i]].mean()))
for i in range(len(y2)):
mean=Label(frame2,text=y2[i])
mean.grid(row=i+10,column=1)
index=i+11
ypr=linr.predict(xtrain)
print(ytrain,ypr)
f_sco=metrics.precision_recall_fscore_support(ytrain,linr.predict(xtrain),average="weighted")
pres=Label(frame2,text="* precison :: "+" * "+str(f_sco[0]))
pres.grid(row=2,column=1)
recall=Label(frame2,text=" * recall ::"+" * "+str(f_sco[1]))
recall.grid(row=3,column=1)
fscore=Label(frame2,text="* Fscore ::"+" * "+str(f_sco[2]))
fscore.grid(row=4,column=1)
global unique
global uv
uniq=df[y].nunique()
uv=list(uniq)
dtc_graph=Button(frame2,text="Click here to view Decision tree",command=view_tree,borderwidth="3")
dtc_graph.grid(row=index+1,column=0)
test_b=Button(frame2,text="Test and visualisation",width="20",font="5",padx=80,borderwidth="5",command=test_but)
test_b.grid(row=index+2,pady=20,padx=100)
test_l=Label(frame2,text="Click here to validate/test the data")
test_l.grid(row=index+3)
global clf
clf=linr.fit(xtrain,ytrain)
def view_tree():
import pydotplus
global clf
import matplotlib.pyplot as plt
from sklearn.tree import export_graphviz
import graphviz
dot_file=export_graphviz(linr,feature_names=x)
graph=graphviz.Source(dot_file)
graph.render(filename="tree2",format="png",cleanup=True)
top=Toplevel()
dl=Label(top,text="Decision tree")
img = Image.open("tree2.png") # PIL solution
img = img.resize((1500, 500), Image.ANTIALIAS) #The (250, 250) is (height, width)
img = ImageTk.PhotoImage(img)
dtc_label=Label(top, image=img )
dtc_label.image=img
dtc_label.grid(row=1,column=0)
def linreg():
global linr
try:
global xtrain,ytrain,xtest,ytest
linr=LinearRegression()
linr.fit(xtrain,ytrain)
except ValueError:
messagebox.showerror("Error","your selected dependent and independent are producing value errors for training the model \n go back and try another algorithm")
frame2.destroy()
main()
else:
stat()
def logreg():
global linr
try:
global ytrain,ytest,xtrain,xtest
y1=np.array(df[y])
from sklearn.model_selection import train_test_split
xtrain,xtest,ytrain,ytest=train_test_split(df[x],y1,train_size=train_percent/100,test_size=test_percent/100)
linr=LogisticRegression(solver="lbfgs")
xtrain,xtest=np.array(xtrain),np.array(xtest)
linr.fit(xtrain,ytrain)
except ValueError:
messagebox.showerror("Error","your selected dependent and independent are producing value errors for training the model \n go back and try another algorithm")
frame2.destroy()
main()
else:
stat1()
def test_but():
if(alg=="Linear Regression"):
test_but_lin()
elif(alg=="Logistic Regression"):
test_but_log()
else:
test_but_dtc()
def test_but_dtc():
frame2.destroy()
global frame3
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import plot_precision_recall_curve
frame3=LabelFrame(root ,borderwidth=6 ,text="Test and visualisation",font="30", padx=30 , pady=30,width=1000,height=1000)
frame3.grid(padx=200,pady=100)
tl=Label(frame3,text=" * Testiing Statistics:: ",font="15")
tl.grid(row=1,pady=10,column=0)
from sklearn .metrics import r2_score
st1=Label(frame3,text=" * No of Testing Samples:: "+" * "+str(len(xtest)))
st1.grid(row=2,column=0)
from sklearn import metrics
st5=Label(frame3,text=" * Testing Accuracy::"+" * "+str(metrics.accuracy_score(ytest,linr.predict(xtest))))
st5.grid(row=3,column=0)
st7=Label(frame3,text="Confusion Matrix",font="5")
st7.grid(row=4,column=0)
global cnf_matrix
cnf_matrix=metrics.confusion_matrix(ytest,linr.predict(xtest))
print(cnf_matrix)
cnf_button=Button(frame3,text="Click here to view confusion matrix",command=view_conf,borderwidth="3")
cnf_button.grid(row=5)
index=6
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use('TkAgg')
plots=Label(frame3,text=" * Select your plot ",pady=20,font="15")
plots.grid(row=index,column=0,pady=10)
global charts,plot_var,df1
charts=["line plot","Bar","histplot","precision_recall"]
back_2=Button(frame3,text="Back",font="5",width="10",borderwidth="3",command=sub)
back_2.grid(row=0,column=4)
option_b=Label(frame3,text="Enter your predictors",font="5")
option_b.grid(row=4,column=2)
global cont_fea
cont_fea=[]
prediction_button2()
def prediction_button2():
global ind
ind=0
global x_val
x_val=StringVar()
global flag
flag=1
global fea
fea=[]
fun=prediction10()
def prediction10():
global ind,xl,eg,lg
print(cont)
print(cate)
lg=Label(frame3,text=cont[ind],font="5")
lg.grid(row=5,column=1)
global eg
eg=Entry(frame3,textvariable=x_val)
eg.grid(row=5,column=2)
b=Button(frame3,text="click",command=dirs01)
b.grid(row=5,column=3)
def dirs01():
global ind
global fea
fea.append(float(x_val.get()))
print(fea)
if(ind<len(cont)-1):
print(ind)
flag=1
eg.destroy()
x_val.set("")
ind+=1
prediction10()
else:
global subm
if(len(cate)>0):
eg.destroy()
global lg
lg.destroy()
ind=0
prediction01()
def prediction01():
global eg,lb
global ind,sel_var,df3
lb=Label(frame3,text=cate[ind],font="5")
lb.grid(row=5,column=1)
global cate_data
print(cate_data)
sel_var=StringVar()
l=cate_data[cate[ind]].unique()
print(cate_data)
eg=OptionMenu(frame3,sel_var,*l)
eg.grid(row=5,column=2)
b=Button(frame3,text="click",command=dirs02)
b.grid(row=5,column=3)
def dirs02():
global ind
global cont_fea
cont_fea.append(sel_var.get())
print(cont_fea)
if(ind<len(cate)-1):
print(ind)
flag=1
global lb
eg.destroy()
lb.destroy()
sel_var.set("")
ind+=1
prediction10()
else:
global subm
subm=Button(frame3,text="View Result",command=view_res01)
subm.grid(row=6,column=2)
#eg=OptionMenu(frame3,sel_var,)
def view_res01():
r=[]
global subm,fea,cont_fea,enc,subm,eg
r=fea
enc=[]
for i in range(len(cont_fea)):
labelenc=LabelEncoder()
labelenc.fit([cont_fea[i]])
enc.extend(labelenc.transform([cont_fea[i]]))
r.extend(enc)
r=[r]
re=linr.predict(r)
global result_lab,bac
print(re)
result_lab=Label(frame3,text="Result is "+str(re[0]),font="5")
subm.destroy()
result_lab.grid(row=6,column=2)
bac=Button(frame3,text="Clear",command=clear01)
bac.grid(row=6,column=3)
def clear01():
global result_lab,bac,fea,eg,ind,cont_fea
cont_fea=[]
fea=[]
ind=0
cont_fea=[]
bac.destroy()
eg.destroy()
result_lab.destroy()
print("###################################################")
prediction_button2()
def test_but_log():
frame2.destroy()
global frame3
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import plot_precision_recall_curve
frame3=LabelFrame(root ,borderwidth=6 ,text="Test and visualisation",font="30", padx=30 , pady=30,width=1000,height=1000)
frame3.grid(padx=200,pady=100)
tl=Label(frame3,text=" * Testiing Statistics:: ",font="15")
tl.grid(row=1,pady=10,column=0)
from sklearn .metrics import r2_score
st1=Label(frame3,text=" * No of Testing Samples:: "+" * "+str(len(xtest)))
st1.grid(row=2,column=0)
from sklearn import metrics
st5=Label(frame3,text=" * Testing Accuracy::"+" * "+str(metrics.accuracy_score(ytest,linr.predict(xtest))))
st5.grid(row=3,column=0)
st7=Label(frame3,text="Confusion Matrix",font="5")
st7.grid(row=4,column=0)
global cnf_matrix
cnf_matrix=metrics.confusion_matrix(ytest,linr.predict(xtest))
print(cnf_matrix)
cnf_button=Button(frame3,text="Click here to view confusion matrix",command=view_conf,borderwidth="3")
cnf_button.grid(row=5)
c=[]
index=6
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use('TkAgg')
plots=Label(frame3,text=" * Select your plot ",pady=20,font="15")
plots.grid(row=index,column=0,pady=10)
global charts,plot_var,df1
charts=["line plot","Bar","histplot","precision_recall"]
if(uv[0]>2):
charts.remove("precision_recall")
plot_var=StringVar()
# plot_var().set(charts)
plt_box=OptionMenu(frame3,plot_var,*charts)
plt_box.grid(row=index+1)
k=x
df1=df.head(100)
k.append(y[0])
global data1
data1=DataFrame(df1[k])
click_sub=Button(frame3,text="View",borderwidth="3",width="10",font="5",command=submit)
click_sub.grid(row=index+2)
back_2=Button(frame3,text="Back",font="5",width="10",borderwidth="3",command=sub)
back_2.grid(row=0,column=4)
option_b=Label(frame3,text="Enter your predictors",font="5")
option_b.grid(row=4,column=2)
prediction_button1()
def prediction_button1():
global ind
ind=0
global x_val
x_val=StringVar()
global flag
flag=1
global fea
fea=[]
fun=prediction1()
def prediction1():
global ind
lg=Label(frame3,text=x[ind],font="5")
lg.grid(row=5,column=1)
eg=Entry(frame3,textvariable=x_val)
eg.grid(row=5,column=2)
b=Button(frame3,text="click",command=dirs1)
b.grid(row=5,column=3)
def dirs1():
global ind
global fea
fea.append(float(x_val.get()))
print(fea)
if(ind<len(x)-2):
print(ind)
flag=1
x_val.set("")
ind+=1
prediction1()
else:
global subm
subm=Button(frame3,text="view result",command=view_res1)
subm.grid(row=6,column=2,pady=5)
def view_res1():
global subm
r=[fea]
re=linr.predict(r)
global result_lab,bac
print(re)
result_lab=Label(frame3,text="Result is "+str(re[0]),font="5")
subm.destroy()
result_lab.grid(row=6,column=2)
bac=Button(frame3,text="Clear",command=clear1)
bac.grid(row=6,column=3)
def clear1():
global result_lab,bac
bac.destroy()
result_lab.destroy()
prediction_button1()
def test_but_lin():
frame2.destroy()
global frame3
frame3=LabelFrame(root ,borderwidth=6 ,text="Test and visualisation",font="25", padx=30 , pady=30,width=1000,height=1000)
frame3.grid(padx=30,pady=30)
tl=Label(frame3,text=" * Testiing Statistics:: ",font="15")
tl.grid(row=1,pady=10,column=0)
from sklearn .metrics import r2_score
st1=Label(frame3,text=" * No of Testing Samples:: "+" * "+str(len(xtest)))
st1.grid(row=2,column=0)
from sklearn import metrics
if(alg=="Linear Regression"):
st5=Label(frame3,text=" * Testing Accuracy::"+" * "+str(metrics.r2_score(ytest,linr.predict(xtest))))
st5.grid(row=3,column=0)
error=Label(frame3,text=" * Mean Squarred Error "+" * "+str(metrics.mean_squared_error(ytest,linr.predict(xtest))))
error.grid(row=4,column=0,pady=10)
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use('TkAgg')
plots=Label(frame3,text=" * Select your plot ",pady=20,font="15")
plots.grid(row=5,column=0,pady=10)
global charts,plot_var,df1
charts=["line plot","Bar","histplot"]
plot_var=StringVar()
# plot_var().set(charts)
plt_box=OptionMenu(frame3,plot_var,*charts)
plt_box.grid(row=6,column=0)
k=x
df1=df.head(100)
k.append(y[0])
global data1
data1=DataFrame(df1[k])
click_sub=Button(frame3,text="View",borderwidth="3",width="10",font="5",command=submit)
click_sub.grid(row=7,column=0)
back_2=Button(frame3,text="Back",font="5",width="10",borderwidth="3",command=sub)
back_2.grid(row=0,column=4)
show_b=Label(frame3,text="Enter your predictors",font="5")
show_b.grid(row=4,column=2)
prediction_button()
def prediction_button():
global ind
ind=0
global x_val
x_val=StringVar()
global flag
flag=1
global fea
fea=[]
fun=prediction()
def prediction():
global ind
lg=Label(frame3,text=x[ind],font="5")
lg.grid(row=5,column=1)
eg=Entry(frame3,textvariable=x_val)
eg.grid(row=5,column=2)
b=Button(frame3,text="click",command=dirs)
b.grid(row=5,column=3)
def dirs():
global ind
global fea
fea.append(float(x_val.get()))
print(fea)
if(ind<len(x)-2):
print(ind)
flag=1
x_val.set("")
ind+=1
prediction()
else:
global subm
subm=Button(frame3,text="view result",command=view_res2)
subm.grid(row=6,column=2,pady=5)
def view_res2():
global subm
r=[fea]
re=linr.predict(r)
global result_lab,bac
result_lab=Label(frame3,text="Result is "+str(re[0][0]),font="5")
subm.destroy()
result_lab.grid(row=6,column=2)
bac=Button(frame3,text="Clear",command=clear)
bac.grid(row=6,column=3)
def clear():
global result_lab,bac
bac.destroy()
result_lab.destroy()
prediction_button()
def stat1():
global y1
tl=Label(frame2,text=" * Training statistics:: ",font="5")
tl.grid(row=1,pady=10,column=1)
from sklearn import metrics
st1=Label(frame2,text=" * No of training samples:: "+" * | |
<reponame>Cheaterman/garden.cefpython
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
The CEFBrowser Widget actually displays the browser. It displays ONLY the
browser. If you need controls or tabs, check out the `examples`
"""
import ctypes
from functools import partial
import json
import os
import random
import time
from kivy.core.clipboard import Clipboard
from kivy.core.window import Window
from kivy.graphics import Color, Rectangle
from kivy.graphics.texture import Texture
from kivy.factory import Factory
from kivy.lang import Builder
from kivy.logger import Logger
from kivy.properties import StringProperty
from kivy.properties import NumericProperty
from kivy.properties import BooleanProperty
from kivy.properties import ReferenceListProperty
from kivy import resources
from kivy.uix.behaviors import FocusBehavior
from kivy.uix.bubble import Bubble, BubbleButton
from kivy.uix.widget import Widget
from .cefpython import cefpython, cefpython_initialize
from .cefkeyboard import CEFKeyboardManager
class CEFAlreadyInitialized(Exception):
pass
class CEFBrowser(Widget, FocusBehavior):
"""Displays a Browser"""
# Class Variables
certificate_error_handler = None
"""The value of the `certificate_error_handler` class variable is a
function that handles certificate errors.
It takes 2 arguments:
- `err`: The certificate error number that occurred
- `url`: The URL that was to be loaded
It should return a bool that indicates whether to ignore the error or not:
- True: Ignore warning - False: Abort loading
If `certificate_error_handler` is None or cannot be executed, the default
is False."""
_cefpython_initialized = False
_flags = {}
"""Flags for CEFBrowser"""
_command_line_switches = {
"ppapi-flash-path":
"/opt/google/chrome/PepperFlash/libpepflashplayer.so",
"disable-gpu-compositing": ""}
"""Command line switches for cefpython"""
_settings = {}
"""Settings for cefpython"""
_caches_path = None
_cookies_path = None
_logs_path = None
_cookie_manager = None
# Instance Variables
url = StringProperty("")
"""The URL of the (main frame of the) browser."""
is_loading = BooleanProperty(False)
"""Whether the browser is loading content"""
can_go_back = BooleanProperty(False)
"""Whether the browser gan go back in history at this time"""
can_go_forward = BooleanProperty(False)
"""Whether the browser gan go forward in history at this time"""
title = StringProperty("")
"""The title of the currently displayed content
(e.g. for tab/window title)"""
popup_policy = None
"""The value of the `popup_policy` variable is a function that handles
the policy whether to allow or block popups.
It takes 2 arguments:
- `browser`: The browser which wants to open the popup
- `url`: The URL of the (future) popup
It should return a bool that indicates whether to open the popup or not:
- True: Allow popup - False: Block popup
If `popup_policy` is None or cannot be executed, the default is False."""
popup_handler = None
"""The value of the `popup_handler` variable is a function that handles
newly created popups.
It takes 2 arguments:
- `browser`: The browser which opened the popup
- `popup_browser`: The (newly created) popup browser
It should place the `popup_browser` somewhere in the widget tree
If `popup_handler` is None, cannot be executed or doesn't insert
`popup_browser` to the widget tree, the default is to add it to the Window.
"""
close_handler = None
"""The value of the `close_handler` variable is a function that handles
closing browsers or popups.
It takes 1 argumeSetAsChildnt:
- `browser`: The browser to be closed
It remove everything belonging to `browser` from the widget tree
If `close_handler` is None, cannot be executed or doesn't remove `browser`
from the widget tree, the default is to just remove the `browser` from its
parent."""
keyboard_position = None
"""The value of the `keyboard_position` variable is a function that handles
positioning of the keyboard on focusing a keyboard element in the browser.
It takes 1 argument:
- `browser`: The browser in which the element was focused
- `keyboard_widget`: The keyboard widget
- `rect`: The rectangle the focused element takes *within* the browser
- `attributes`: The HTML attributes of the focused element
It should set `keyboard_widget.pos` to the desired value
If `close_handler` is None, cannot be executed or doesn't remove `browser`
from the widget tree, the default is to just leave the keyboard widget
where it is."""
_touches = []
_browser = None
_popup = None
_texture = None
def __init__(self, url="", *largs, **dargs):
self.url = url
self.popup_policy = dargs.pop(
"popup_policy", CEFBrowser.always_block_popups)
self.popup_handler = dargs.pop(
"popup_handler", CEFBrowser.fullscreen_popup)
self.close_handler = dargs.pop(
"close_handler", CEFBrowser.do_nothing)
self.keyboard_position = dargs.pop(
"keyboard_position", CEFBrowser.keyboard_position_optimal)
self._browser = dargs.pop("browser", None)
self._popup = CEFBrowserPopup(self)
self._selection_bubble = CEFBrowserCutCopyPasteBubble(self)
self.__rect = None
self.__keyboard_state = {}
self.js = CEFBrowserJSProxy(self)
super(CEFBrowser, self).__init__(**dargs)
self.register_event_type("on_load_start")
self.register_event_type("on_load_end")
self.register_event_type("on_load_error")
self.register_event_type("on_js_dialog")
self.register_event_type("on_before_unload_dialog")
self._texture = Texture.create(
size=self.size, colorfmt="rgba", bufferfmt="ubyte")
self._texture.flip_vertical()
with self.canvas:
Color(1, 1, 1)
self.__rect = Rectangle(
pos=self.pos, size=self.size, texture=self._texture)
if not CEFBrowser._cefpython_initialized:
cefpython_initialize(CEFBrowser)
CEFBrowser._cefpython_initialized = True
if not self._browser:
# On x11 input provider we have the window-id (handle)
window_id = 0
try:
from kivy.core.window import Window as KivyWindow
window_id = KivyWindow.window_id
except Exception as e:
Logger.debug("Use window handle %s, because: %s", window_id, e)
window_info = cefpython.WindowInfo()
window_info.SetAsOffscreen(window_id)
self._browser = cefpython.CreateBrowserSync(
window_info,
{"windowless_frame_rate": 60},
navigateUrl=self.url,
)
self._browser.SetClientHandler(client_handler)
client_handler.browser_widgets[self._browser] = self
self._browser.WasResized()
self.bind(size=self._realign)
self.bind(pos=self._realign)
self.bind(parent=self._on_parent)
self.bind(focus=self._on_focus)
self.html5_drag_representation = Factory.HTML5DragIcon()
self.js._inject()
@classmethod
def update_flags(cls, d):
""" Updates the flags for CEFBrowser with the options given in the dict `d`.
For possible keys and values, see the docs."""
CEFBrowser._flags.update(d)
@classmethod
def update_command_line_switches(cls, d):
""" Updates the command line switches for cefpython with the options
given in the dict `d`.
For possible keys and values, see the cefpython docs."""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
CEFBrowser._command_line_switches.update(d)
Logger.debug(
"CEFBrowser: update_command_line_switches => %s",
CEFBrowser._command_line_switches,
)
# print("update_command_line_switches", cls._command_line_switches)
@classmethod
def update_settings(cls, d):
""" Updates the settings for cefpython with the options given in the dict `d`.
For possible keys and values, see the cefpython docs."""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
CEFBrowser._settings.update(d)
Logger.debug("CEFBrowser: update_settings => %s", CEFBrowser._settings)
@classmethod
def set_caches_path(cls, cp):
""" The string `cp` is the path to a read- and writeable location
where CEF can store its run-time caches."""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
CEFBrowser._caches_path = cp
Logger.debug(
"CEFBrowser: caches_path: %s\n cookies_path: %s\n logs_path: %s",
CEFBrowser._caches_path,
CEFBrowser._cookies_path,
CEFBrowser._logs_path,
)
@classmethod
def set_cookies_path(cls, cp):
""" The string `cp` is the path to a read- and writeable location
where CEF can store its run-time cookies."""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
CEFBrowser._cookies_path = cp
Logger.debug(
"CEFBrowser: caches_path: %s\n cookies_path: %s\n logs_path: %s",
CEFBrowser._caches_path,
CEFBrowser._cookies_path,
CEFBrowser._logs_path,
)
@classmethod
def set_logs_path(cls, lp):
""" The string `lp` is the path to a read- and writeable location
where CEF can write its log."""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
CEFBrowser._logs_path = lp
Logger.debug(
"CEFBrowser: caches_path: %s\n cookies_path: %s\n logs_path: %s",
CEFBrowser._caches_path,
CEFBrowser._cookies_path,
CEFBrowser._logs_path,
)
@classmethod
def set_data_path(cls, dp):
""" The string `dp` class variable is the path to a read- and
writeable location where CEF can write its run-time data:
- caches to '`dp`/cache'
- cookies to '`dp`/cookies'
- logs to '`dp`/logs'
"""
if CEFBrowser._cefpython_initialized:
raise CEFAlreadyInitialized()
if not os.path.isdir(dp):
os.mkdir(dp, 0o700)
CEFBrowser._caches_path = os.path.join(dp, "caches")
CEFBrowser._cookies_path = os.path.join(dp, "cookies")
CEFBrowser._logs_path = os.path.join(dp, "logs")
Logger.debug(
"CEFBrowser: \ncaches_path: %s\n cookies_path: %s\n logs_path: %s",
CEFBrowser._caches_path,
CEFBrowser._cookies_path,
CEFBrowser._logs_path,
)
def _realign(self, *largs):
ts = self._texture.size
ss = self.size
schg = (ts[0] != ss[0] or ts[1] != ss[1])
if schg:
self._texture = Texture.create(
size=self.size, colorfmt="rgba", bufferfmt="ubyte")
self._texture.flip_vertical()
if self.__rect:
with self.canvas:
Color(1, 1, 1)
self.__rect.pos = self.pos
if schg:
self.__rect.size = self.size
if schg:
self._update_rect()
if self._browser:
self._browser.WasResized()
self._browser.NotifyScreenInfoChanged()
try:
self._keyboard_update(**self.__keyboard_state)
except:
pass
def _on_parent(self, obj, parent):
self._browser.WasHidden(not parent) # optimize the shit out of CEF
try:
self._keyboard_update(**self.__keyboard_state)
except:
pass
def _on_focus(self, obj, focus):
super(CEFBrowser, self)._on_focus(obj, focus)
if not focus and self.__keyboard_state["shown"]:
self._browser.GetMainFrame().ExecuteJavascript(
"__kivy__activeKeyboardElement.blur();")
def _update_rect(self):
if self.__rect:
self.__rect.texture = self._texture
def go_back(self):
self._browser.GoBack()
def go_forward(self):
self._browser.GoForward()
def stop_loading(self):
self._browser.StopLoad()
def reload(self, ignore_cache=True):
if ignore_cache:
self._browser.ReloadIgnoreCache()
else:
self._browser.Reload()
def delete_cookie(self, url=""):
""" Deletes the cookie with the given url. If url is empty all cookies
get deleted.
"""
cookie_manager = cefpython.CookieManager.GetGlobalManager()
if cookie_manager:
cookie_manager.DeleteCookies(url, "")
else:
Logger.warning("No cookie manager found!, Can't delete cookie(s)")
def on_url(self, instance, value):
if self._browser and value and value != self._browser.GetUrl():
# print(
# "ON URL",
# instance,
# value,
# self._browser.GetUrl(),
# self._browser.GetMainFrame().GetUrl(),
# )
self._browser.Navigate(self.url)
def on_js_dialog(
self,
browser,
origin_url,
accept_lang,
dialog_type,
message_text,
default_prompt_text,
callback,
suppress_message,
):
pass
def on_before_unload_dialog(
self,
browser,
message_text,
is_reload,
callback,
):
pass
def on_load_start(self, frame):
pass
def on_load_end(self, frame, http_status_code):
pass
def on_load_error(self, frame, error_code, error_text, failed_url):
Logger.error(
"on_load_error=> Code: %s, error_text: %s, failedURL: %s",
error_code,
error_text,
failed_url,
)
pass
def _keyboard_update(self, shown, rect, attributes):
"""
:param shown: | |
1, 0, 0, 0, 1, 0],
[1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0],
[1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1],
[0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0],
[1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1],
[1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, | |
not None:
pulumi.set(__self__, "http_get", http_get)
if tcp_socket is not None:
pulumi.set(__self__, "tcp_socket", tcp_socket)
@property
@pulumi.getter(name="exec")
def exec_(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartExec']:
"""
One and only one of the following should be specified. Exec specifies the action to take.
"""
return pulumi.get(self, "exec_")
@property
@pulumi.getter(name="httpGet")
def http_get(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGet']:
"""
HTTPGet specifies the http request to perform.
"""
return pulumi.get(self, "http_get")
@property
@pulumi.getter(name="tcpSocket")
def tcp_socket(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocket']:
"""
TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported TODO: implement a realistic TCP lifecycle hook
"""
return pulumi.get(self, "tcp_socket")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartExec(dict):
"""
One and only one of the following should be specified. Exec specifies the action to take.
"""
def __init__(__self__, *,
command: Optional[Sequence[str]] = None):
"""
One and only one of the following should be specified. Exec specifies the action to take.
:param Sequence[str] command: Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy.
"""
if command is not None:
pulumi.set(__self__, "command", command)
@property
@pulumi.getter
def command(self) -> Optional[Sequence[str]]:
"""
Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy.
"""
return pulumi.get(self, "command")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGet(dict):
"""
HTTPGet specifies the http request to perform.
"""
def __init__(__self__, *,
port: 'outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetPort',
host: Optional[str] = None,
http_headers: Optional[Sequence['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetHttpHeaders']] = None,
path: Optional[str] = None,
scheme: Optional[str] = None):
"""
HTTPGet specifies the http request to perform.
:param 'SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetPortArgs' port: Name or number of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
:param str host: Host name to connect to, defaults to the pod IP. You probably want to set "Host" in httpHeaders instead.
:param Sequence['SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetHttpHeadersArgs'] http_headers: Custom headers to set in the request. HTTP allows repeated headers.
:param str path: Path to access on the HTTP server.
:param str scheme: Scheme to use for connecting to the host. Defaults to HTTP.
"""
pulumi.set(__self__, "port", port)
if host is not None:
pulumi.set(__self__, "host", host)
if http_headers is not None:
pulumi.set(__self__, "http_headers", http_headers)
if path is not None:
pulumi.set(__self__, "path", path)
if scheme is not None:
pulumi.set(__self__, "scheme", scheme)
@property
@pulumi.getter
def port(self) -> 'outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetPort':
"""
Name or number of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
Host name to connect to, defaults to the pod IP. You probably want to set "Host" in httpHeaders instead.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="httpHeaders")
def http_headers(self) -> Optional[Sequence['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetHttpHeaders']]:
"""
Custom headers to set in the request. HTTP allows repeated headers.
"""
return pulumi.get(self, "http_headers")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Path to access on the HTTP server.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def scheme(self) -> Optional[str]:
"""
Scheme to use for connecting to the host. Defaults to HTTP.
"""
return pulumi.get(self, "scheme")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetHttpHeaders(dict):
"""
HTTPHeader describes a custom header to be used in HTTP probes
"""
def __init__(__self__, *,
name: str,
value: str):
"""
HTTPHeader describes a custom header to be used in HTTP probes
:param str name: The header field name
:param str value: The header field value
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The header field name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The header field value
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartHttpGetPort(dict):
def __init__(__self__):
pass
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocket(dict):
"""
TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported TODO: implement a realistic TCP lifecycle hook
"""
def __init__(__self__, *,
port: 'outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocketPort',
host: Optional[str] = None):
"""
TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported TODO: implement a realistic TCP lifecycle hook
:param 'SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocketPortArgs' port: Number or name of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
:param str host: Optional: Host name to connect to, defaults to the pod IP.
"""
pulumi.set(__self__, "port", port)
if host is not None:
pulumi.set(__self__, "host", host)
@property
@pulumi.getter
def port(self) -> 'outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocketPort':
"""
Number or name of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
Optional: Host name to connect to, defaults to the pod IP.
"""
return pulumi.get(self, "host")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePostStartTcpSocketPort(dict):
def __init__(__self__):
pass
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStop(dict):
"""
PreStop is called immediately before a container is terminated due to an API request or management event such as liveness/startup probe failure, preemption, resource contention, etc. The handler is not called if the container crashes or exits. The reason for termination is passed to the handler. The Pod's termination grace period countdown begins before the PreStop hooked is executed. Regardless of the outcome of the handler, the container will eventually terminate within the Pod's termination grace period. Other management of the container blocks until the hook completes or until the termination grace period is reached. More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks
"""
def __init__(__self__, *,
exec_: Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopExec'] = None,
http_get: Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopHttpGet'] = None,
tcp_socket: Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopTcpSocket'] = None):
"""
PreStop is called immediately before a container is terminated due to an API request or management event such as liveness/startup probe failure, preemption, resource contention, etc. The handler is not called if the container crashes or exits. The reason for termination is passed to the handler. The Pod's termination grace period countdown begins before the PreStop hooked is executed. Regardless of the outcome of the handler, the container will eventually terminate within the Pod's termination grace period. Other management of the container blocks until the hook completes or until the termination grace period is reached. More info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks
:param 'SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopExecArgs' exec_: One and only one of the following should be specified. Exec specifies the action to take.
:param 'SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopHttpGetArgs' http_get: HTTPGet specifies the http request to perform.
:param 'SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopTcpSocketArgs' tcp_socket: TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported TODO: implement a realistic TCP lifecycle hook
"""
if exec_ is not None:
pulumi.set(__self__, "exec_", exec_)
if http_get is not None:
pulumi.set(__self__, "http_get", http_get)
if tcp_socket is not None:
pulumi.set(__self__, "tcp_socket", tcp_socket)
@property
@pulumi.getter(name="exec")
def exec_(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopExec']:
"""
One and only one of the following should be specified. Exec specifies the action to take.
"""
return pulumi.get(self, "exec_")
@property
@pulumi.getter(name="httpGet")
def http_get(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopHttpGet']:
"""
HTTPGet specifies the http request to perform.
"""
return pulumi.get(self, "http_get")
@property
@pulumi.getter(name="tcpSocket")
def tcp_socket(self) -> Optional['outputs.SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopTcpSocket']:
"""
TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported TODO: implement a realistic TCP lifecycle hook
"""
return pulumi.get(self, "tcp_socket")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SeldonDeploymentSpecPredictorsComponentSpecsSpecEphemeralContainersLifecyclePreStopExec(dict):
"""
One and only one of the following should be specified. Exec specifies the action to | |
experiments to measure the T2 star or T2 echo decay time of 1 or more qubits.
:param qc: The QuantumComputer to run the experiment on
:param t2_experiment: A pandas DataFrame containing: time, T2 program
:param detuning: The additional detuning frequency about the z axis.
:return: pandas DataFrame containing T2 results, and detuning used in creating experiments for
those results.
"""
results = []
for index, row in t2_experiment.iterrows():
t = row['Time']
program = row['Program']
detuning = row['Detuning']
executable = qc.compiler.native_quil_to_executable(program)
bitstrings = qc.run(executable)
qubits = list(program.get_qubits())
for i in range(len(qubits)):
avg = np.mean(bitstrings[:, i])
results.append({
'Qubit': qubits[i],
'Time': t,
'Num_bitstrings': len(bitstrings),
'Average': float(avg),
'Detuning': float(detuning),
})
return pd.DataFrame(results)
def estimate_t2(df: pd.DataFrame) -> pd.DataFrame:
"""
Estimate T2 star or T2 echo from experimental data.
:param df: A pandas DataFrame with experimental T2 results
:param detuning: Detuning frequency used in experiment creation
:return: pandas DataFrame
"""
results = []
for q in df['Qubit'].unique():
df2 = df[df['Qubit'] == q].sort_values('Time')
x_data = df2['Time']
y_data = df2['Average']
detuning = df2['Detuning'].values[0]
try:
fit_params, fit_params_errs = fit_to_exponentially_decaying_sinusoidal_curve(x_data,
y_data,
detuning)
results.append({
'Qubit': q,
'T2': fit_params[1] / MICROSECOND,
'Freq': fit_params[2] / MHZ,
'Fit_params': fit_params,
'Fit_params_errs': fit_params_errs,
'Message': None,
})
except RuntimeError:
print(f"Could not fit to experimental data for qubit {q}")
results.append({
'Qubit': q,
'T2': None,
'Freq': None,
'Fit_params': None,
'Fit_params_errs': None,
'Message': 'Could not fit to experimental data for qubit' + str(q),
})
return pd.DataFrame(results)
def plot_t2_estimate_over_data(df: pd.DataFrame,
qubits: list = None,
t2_type: str = 'unknown',
filename: str = None) -> None:
"""
Plot T2 star or T2 echo experimental data and estimated value of T1 as and exponential decay
curve.
:param df: A pandas DataFrame containing experimental results to plot.
:param qubits: A list of qubits that you actually want plotted. The default is all qubits.
:param detuning: Detuning frequency used in experiment creation.
:param type: String either 'star' or 'echo'.
:param filename: String.
:return: None
"""
if qubits is None:
qubits = df['Qubit'].unique().tolist()
# check the user specified valid qubits
for qbx in qubits:
if qbx not in df['Qubit'].unique():
raise ValueError("The list of qubits does not match the ones you experimented on.")
for q in qubits:
df2 = df[df['Qubit'] == q].sort_values('Time')
x_data = df2['Time']
y_data = df2['Average']
detuning = df2['Detuning'].values[0]
plt.plot(x_data / MICROSECOND, y_data, 'o-', label=f"Qubit {q} T2 data")
try:
fit_params, fit_params_errs = fit_to_exponentially_decaying_sinusoidal_curve(x_data,
y_data,
detuning)
except RuntimeError:
print(f"Could not fit to experimental data for qubit {q}")
else:
plt.plot(x_data / MICROSECOND,
exponentially_decaying_sinusoidal_curve(x_data, *fit_params),
label=f"QC{q} fit: freq={fit_params[2] / MHZ:.2f}MHz, "
f""f"T2={fit_params[1] / MICROSECOND:.2f}us")
plt.xlabel("Time [µs]")
plt.ylabel("Pr(measuring 1)")
if t2_type.lower() == 'star':
plt.title("$T_2^*$ (Ramsey) decay")
elif t2_type.lower() == 'echo':
plt.title("$T_2$ (Echo) decay")
else:
plt.title("$T_2$ (unknown) decay")
plt.legend(loc='best')
plt.tight_layout()
if filename is not None:
plt.savefig(filename)
plt.show()
# ==================================================================================================
# TODO CPMG
# ==================================================================================================
# ==================================================================================================
# Rabi
# ==================================================================================================
def generate_single_rabi_experiment(qubits: Union[int, List[int]],
theta: float,
n_shots: int = 1000) -> Program:
"""
Return a Rabi program in native Quil rotated through the given angle.
Rabi oscillations are observed by applying successively larger rotations to the same initial
state.
:param qubits: Which qubits to measure.
:param theta: The angle of the Rabi RX rotation.
:param n_shots: The number of shots to average over for the data point.
:return: A Program that rotates through a given angle about the X axis.
"""
program = Program()
try:
len(qubits)
except TypeError:
qubits = [qubits]
ro = program.declare('ro', 'BIT', len(qubits))
for q in qubits:
program += RX(theta, q)
for i in range(len(qubits)):
program += MEASURE(qubits[i], ro[i])
program.wrap_in_numshots_loop(n_shots)
return program
def generate_rabi_experiments(qubits: Union[int, List[int]],
n_shots: int = 1000,
num_points: int = 15) -> pd.DataFrame:
"""
Return a DataFrame containing programs which, when run in sequence, constitute a Rabi
experiment.
Rabi oscillations are observed by applying successively larger rotations to the same initial
state.
:param qubits: Which qubits to measure.
:param n_shots: The number of shots to average over for each data point
:param num_points: The number of points for each Rabi curve
:return: pandas DataFrame with columns: angle, program
"""
angle_and_programs = []
for theta in np.linspace(0.0, 2 * np.pi, num_points):
angle_and_programs.append({
'Angle': theta,
'Program': generate_single_rabi_experiment(qubits, theta, n_shots),
})
return pd.DataFrame(angle_and_programs)
def acquire_data_rabi(qc: QuantumComputer,
rabi_experiment: pd.DataFrame,
filename: str = None) -> pd.DataFrame:
"""
Execute experiments to measure Rabi flop one or more qubits.
:param qc: The QuantumComputer to run the experiment on
:param rabi_experiment: pandas DataFrame: (theta, Rabi program)
:return: DataFrame with Rabi results
"""
results = []
for index, row in rabi_experiment.iterrows():
theta = row['Angle']
program = row['Program']
executable = qc.compiler.native_quil_to_executable(program)
bitstrings = qc.run(executable)
qubits = list(program.get_qubits())
for i in range(len(qubits)):
avg = np.mean(bitstrings[:, i])
results.append({
'Qubit': qubits[i],
'Angle': theta,
'Num_bitstrings': len(bitstrings),
'Average': float(avg),
})
if filename:
pd.DataFrame(results).to_json(filename)
return pd.DataFrame(results)
def estimate_rabi(df: pd.DataFrame):
"""
Estimate Rabi oscillation from experimental data.
:param df: Experimental Rabi results to estimate
:return: pandas DataFrame
"""
results = []
for q in df['Qubit'].unique():
df2 = df[df['Qubit'] == q].sort_values('Angle')
angles = df2['Angle']
prob_of_one = df2['Average']
try:
# fit to sinusoid
fit_params, fit_params_errs = fit_to_sinusoidal_waveform(angles, prob_of_one)
results.append({
'Qubit': q,
'Angle': fit_params[1],
'Prob_of_one': fit_params[2],
'Fit_params': fit_params,
'Fit_params_errs': fit_params_errs,
'Message': None,
})
except RuntimeError:
print(f"Could not fit to experimental data for qubit {q}")
results.append({
'Qubit': q,
'Angle': None,
'Prob_of_one': None,
'Fit_params': None,
'Fit_params_errs': None,
'Message': 'Could not fit to experimental data for qubit' + str(q),
})
return pd.DataFrame(results)
def plot_rabi_estimate_over_data(df: pd.DataFrame,
qubits: list = None,
filename: str = None) -> None:
"""
Plot Rabi oscillation experimental data and estimated curve.
:param df: Experimental results to plot and fit curve to.
:param qubits: A list of qubits that you actually want plotted. The default is all qubits.
:param filename: String.
:return: None
"""
if qubits is None:
qubits = df['Qubit'].unique().tolist()
# check the user specified valid qubits
for qbx in qubits:
if qbx not in df['Qubit'].unique():
raise ValueError("The list of qubits does not match the ones you experimented on.")
for q in qubits:
df2 = df[df['Qubit'] == q].sort_values('Angle')
angles = df2['Angle']
prob_of_one = df2['Average']
# plot raw data
plt.plot(angles, prob_of_one, 'o-', label=f"qubit {q} Rabi data")
try:
# fit to sinusoid
fit_params, fit_params_errs = fit_to_sinusoidal_waveform(angles, prob_of_one)
except RuntimeError:
print(f"Could not fit to experimental data for qubit {q}")
else:
# overlay fitted sinusoidal curve
plt.plot(angles, sinusoidal_waveform(angles, *fit_params),
label=f"qubit {q} fitted line")
plt.xlabel("RX angle [rad]")
plt.ylabel("Pr($|1\langle)")
plt.title("Rabi flop")
plt.legend(loc='best')
plt.tight_layout()
if filename is not None:
plt.savefig(filename)
plt.show()
# ==================================================================================================
# CZ phase Ramsey
# ==================================================================================================
def generate_cz_phase_ramsey_program(qb: int, other_qb: int, n_shots: int = 1000) -> Program:
"""
Generate a single CZ phase Ramsey experiment at a given phase.
:param qb: The qubit to move around the Bloch sphere and measure the incurred RZ on.
:param other_qb: The other qubit that constitutes a two-qubit pair along with `qb`.
:param n_shots: The number of shots to average over for each data point.
:param phase: The phase kick to supply after playing the CZ pulse on the equator.
:param num_shots: The number of shots to average over for the data point.
:return: A parametric Program for performing a CZ Ramsey experiment.
"""
program = Program()
# NOTE: only need readout register for `qb` not `other_qb` since `other_qb` is only
# needed to identify which CZ gate we're using
ro = program.declare('ro', 'BIT', 1)
theta = program.declare('theta', 'REAL')
# go to the equator
program += Program(RX(np.pi / 2, qb))
# apply the CZ gate - note that CZ is symmetric, so the order of qubits doesn't matter
program += Program(CZ(qb, other_qb))
# go to |1> after a phase kick
program += Program(RZ(theta, qb), RX(np.pi / 2, qb))
program += MEASURE(qb, ro[0])
program.wrap_in_numshots_loop(n_shots)
return program
def generate_cz_phase_ramsey_experiment(edges: List[Tuple[int, int]],
start_phase: float = 0.0,
stop_phase: float = 2 * np.pi,
num_points: int = 15,
num_shots: int = 1000):
'''
Returns a DataFrame of parameters and programs that constitute a CZ phase ramsey experiment.
:param edges: List of Tuples containing edges that one can perform a CZ on.
:param start_phase: The starting phase for the CZ phase Ramsey experiment.
:param stop_phase: The stopping phase for the CZ phase Ramsey experiment.
:param num_points: The | |
percentage_change: float = field(metadata=config(field_name="percentageChange"))
show_num_invites: bool = field(metadata=config(field_name="showNumInvites"))
show_fire: bool = field(metadata=config(field_name="showFire"))
tooltip_markdown: str = field(metadata=config(field_name="tooltipMarkdown"))
class FSStatusCode(Enum):
START = 0
FINISH = 1
ERROR = 2
class FSStatusCodeStrings(Enum):
START = "start"
FINISH = "finish"
ERROR = "error"
class FSNotificationType(Enum):
ENCRYPTING = 0
DECRYPTING = 1
SIGNING = 2
VERIFYING = 3
REKEYING = 4
CONNECTION = 5
MD_READ_SUCCESS = 6
FILE_CREATED = 7
FILE_MODIFIED = 8
FILE_DELETED = 9
FILE_RENAMED = 10
INITIALIZED = 11
SYNC_CONFIG_CHANGED = 12
class FSNotificationTypeStrings(Enum):
ENCRYPTING = "encrypting"
DECRYPTING = "decrypting"
SIGNING = "signing"
VERIFYING = "verifying"
REKEYING = "rekeying"
CONNECTION = "connection"
MD_READ_SUCCESS = "md_read_success"
FILE_CREATED = "file_created"
FILE_MODIFIED = "file_modified"
FILE_DELETED = "file_deleted"
FILE_RENAMED = "file_renamed"
INITIALIZED = "initialized"
SYNC_CONFIG_CHANGED = "sync_config_changed"
class FSErrorType(Enum):
ACCESS_DENIED = 0
USER_NOT_FOUND = 1
REVOKED_DATA_DETECTED = 2
NOT_LOGGED_IN = 3
TIMEOUT = 4
REKEY_NEEDED = 5
BAD_FOLDER = 6
NOT_IMPLEMENTED = 7
OLD_VERSION = 8
OVER_QUOTA = 9
NO_SIG_CHAIN = 10
TOO_MANY_FOLDERS = 11
EXDEV_NOT_SUPPORTED = 12
DISK_LIMIT_REACHED = 13
DISK_CACHE_ERROR_LOG_SEND = 14
OFFLINE_ARCHIVED = 15
OFFLINE_UNSYNCED = 16
class FSErrorTypeStrings(Enum):
ACCESS_DENIED = "access_denied"
USER_NOT_FOUND = "user_not_found"
REVOKED_DATA_DETECTED = "revoked_data_detected"
NOT_LOGGED_IN = "not_logged_in"
TIMEOUT = "timeout"
REKEY_NEEDED = "rekey_needed"
BAD_FOLDER = "bad_folder"
NOT_IMPLEMENTED = "not_implemented"
OLD_VERSION = "old_version"
OVER_QUOTA = "over_quota"
NO_SIG_CHAIN = "no_sig_chain"
TOO_MANY_FOLDERS = "too_many_folders"
EXDEV_NOT_SUPPORTED = "exdev_not_supported"
DISK_LIMIT_REACHED = "disk_limit_reached"
DISK_CACHE_ERROR_LOG_SEND = "disk_cache_error_log_send"
OFFLINE_ARCHIVED = "offline_archived"
OFFLINE_UNSYNCED = "offline_unsynced"
@dataclass
class FSSyncStatusRequest(DataClassJsonMixin):
request_id: int = field(metadata=config(field_name="requestID"))
@dataclass
class PassphraseStream(DataClassJsonMixin):
passphrase_stream: str = field(metadata=config(field_name="passphraseStream"))
generation: int = field(metadata=config(field_name="generation"))
SessionToken = str
CsrfToken = str
HelloRes = str
@dataclass
class KVGetResult(DataClassJsonMixin):
team_name: str = field(metadata=config(field_name="teamName"))
namespace: str = field(metadata=config(field_name="namespace"))
entry_key: str = field(metadata=config(field_name="entryKey"))
revision: int = field(metadata=config(field_name="revision"))
entry_value: Optional[str] = field(
default=None, metadata=config(field_name="entryValue")
)
@dataclass
class KVPutResult(DataClassJsonMixin):
team_name: str = field(metadata=config(field_name="teamName"))
namespace: str = field(metadata=config(field_name="namespace"))
entry_key: str = field(metadata=config(field_name="entryKey"))
revision: int = field(metadata=config(field_name="revision"))
@dataclass
class EncryptedKVEntry(DataClassJsonMixin):
v: int = field(metadata=config(field_name="v"))
e: str = field(metadata=config(field_name="e"))
n: str = field(metadata=config(field_name="n"))
@dataclass
class KVListNamespaceResult(DataClassJsonMixin):
team_name: str = field(metadata=config(field_name="teamName"))
namespaces: Optional[List[str]] = field(
default=None, metadata=config(field_name="namespaces")
)
@dataclass
class KVListEntryKey(DataClassJsonMixin):
entry_key: str = field(metadata=config(field_name="entryKey"))
revision: int = field(metadata=config(field_name="revision"))
@dataclass
class KVDeleteEntryResult(DataClassJsonMixin):
team_name: str = field(metadata=config(field_name="teamName"))
namespace: str = field(metadata=config(field_name="namespace"))
entry_key: str = field(metadata=config(field_name="entryKey"))
revision: int = field(metadata=config(field_name="revision"))
class ResetPromptType(Enum):
COMPLETE = 0
ENTER_NO_DEVICES = 1
ENTER_FORGOT_PW = 2
ENTER_RESET_PW = 3
class ResetPromptTypeStrings(Enum):
COMPLETE = "complete"
ENTER_NO_DEVICES = "enter_no_devices"
ENTER_FORGOT_PW = "enter_forgot_pw"
ENTER_RESET_PW = "enter_reset_pw"
@dataclass
class ResetPromptInfo(DataClassJsonMixin):
has_wallet: bool = field(metadata=config(field_name="hasWallet"))
class ResetPromptResponse(Enum):
NOTHING = 0
CANCEL_RESET = 1
CONFIRM_RESET = 2
class ResetPromptResponseStrings(Enum):
NOTHING = "nothing"
CANCEL_RESET = "cancel_reset"
CONFIRM_RESET = "confirm_reset"
class PassphraseRecoveryPromptType(Enum):
ENCRYPTED_PGP_KEYS = 0
class PassphraseRecoveryPromptTypeStrings(Enum):
ENCRYPTED_PGP_KEYS = "encrypted_pgp_keys"
class ResetMessage(Enum):
ENTERED_VERIFIED = 0
ENTERED_PASSWORDLESS = 1
REQUEST_VERIFIED = 2
NOT_COMPLETED = 3
CANCELED = 4
COMPLETED = 5
RESET_LINK_SENT = 6
class ResetMessageStrings(Enum):
ENTERED_VERIFIED = "entered_verified"
ENTERED_PASSWORDLESS = "entered_passwordless"
REQUEST_VERIFIED = "request_verified"
NOT_COMPLETED = "not_completed"
CANCELED = "canceled"
COMPLETED = "completed"
RESET_LINK_SENT = "reset_link_sent"
KBFSRootHash = str
MerkleStoreSupportedVersion = int
MerkleStoreKitHash = str
MerkleStoreKit = str
MerkleStoreEntryString = str
@dataclass
class KeyBundle(DataClassJsonMixin):
version: int = field(metadata=config(field_name="version"))
bundle: str = field(metadata=config(field_name="bundle"))
@dataclass
class MerkleRoot(DataClassJsonMixin):
version: int = field(metadata=config(field_name="version"))
root: str = field(metadata=config(field_name="root"))
LockID = int
MDPriority = int
@dataclass
class RekeyRequest(DataClassJsonMixin):
folder_id: str = field(metadata=config(field_name="folderID"))
revision: int = field(metadata=config(field_name="revision"))
class NetworkSource(Enum):
LOCAL = 0
REMOTE = 1
class NetworkSourceStrings(Enum):
LOCAL = "local"
REMOTE = "remote"
ChatConversationID = str
@dataclass
class DeletedTeamInfo(DataClassJsonMixin):
team_name: str = field(metadata=config(field_name="teamName"))
deleted_by: str = field(metadata=config(field_name="deletedBy"))
id: gregor1.MsgID = field(metadata=config(field_name="id"))
@dataclass
class WalletAccountInfo(DataClassJsonMixin):
account_id: str = field(metadata=config(field_name="accountID"))
num_unread: int = field(metadata=config(field_name="numUnread"))
@dataclass
class NotificationChannels(DataClassJsonMixin):
badges: bool = field(metadata=config(field_name="badges"))
session: bool = field(metadata=config(field_name="session"))
kbfs: bool = field(metadata=config(field_name="kbfs"))
kbfsdesktop: bool = field(metadata=config(field_name="kbfsdesktop"))
kbfslegacy: bool = field(metadata=config(field_name="kbfslegacy"))
kbfssubscription: bool = field(metadata=config(field_name="kbfssubscription"))
tracking: bool = field(metadata=config(field_name="tracking"))
favorites: bool = field(metadata=config(field_name="favorites"))
paperkeys: bool = field(metadata=config(field_name="paperkeys"))
keyfamily: bool = field(metadata=config(field_name="keyfamily"))
service: bool = field(metadata=config(field_name="service"))
app: bool = field(metadata=config(field_name="app"))
chat: bool = field(metadata=config(field_name="chat"))
pgp: bool = field(metadata=config(field_name="pgp"))
kbfsrequest: bool = field(metadata=config(field_name="kbfsrequest"))
users: bool = field(metadata=config(field_name="users"))
reachability: bool = field(metadata=config(field_name="reachability"))
team: bool = field(metadata=config(field_name="team"))
ephemeral: bool = field(metadata=config(field_name="ephemeral"))
teambot: bool = field(metadata=config(field_name="teambot"))
chatkbfsedits: bool = field(metadata=config(field_name="chatkbfsedits"))
chatdev: bool = field(metadata=config(field_name="chatdev"))
chatemoji: bool = field(metadata=config(field_name="chatemoji"))
chatemojicross: bool = field(metadata=config(field_name="chatemojicross"))
deviceclone: bool = field(metadata=config(field_name="deviceclone"))
chatattachments: bool = field(metadata=config(field_name="chatattachments"))
wallet: bool = field(metadata=config(field_name="wallet"))
audit: bool = field(metadata=config(field_name="audit"))
runtimestats: bool = field(metadata=config(field_name="runtimestats"))
featured_bots: bool = field(metadata=config(field_name="featuredBots"))
saltpack: bool = field(metadata=config(field_name="saltpack"))
class StatsSeverityLevel(Enum):
NORMAL = 0
WARNING = 1
SEVERE = 2
class StatsSeverityLevelStrings(Enum):
NORMAL = "normal"
WARNING = "warning"
SEVERE = "severe"
class ProcessType(Enum):
MAIN = 0
KBFS = 1
class ProcessTypeStrings(Enum):
MAIN = "main"
KBFS = "kbfs"
class PerfEventType(Enum):
NETWORK = 0
TEAMBOXAUDIT = 1
TEAMAUDIT = 2
USERCHAIN = 3
TEAMCHAIN = 4
CLEARCONV = 5
CLEARINBOX = 6
TEAMTREELOAD = 7
class PerfEventTypeStrings(Enum):
NETWORK = "network"
TEAMBOXAUDIT = "teamboxaudit"
TEAMAUDIT = "teamaudit"
USERCHAIN = "userchain"
TEAMCHAIN = "teamchain"
CLEARCONV = "clearconv"
CLEARINBOX = "clearinbox"
TEAMTREELOAD = "teamtreeload"
class SaltpackOperationType(Enum):
ENCRYPT = 0
DECRYPT = 1
SIGN = 2
VERIFY = 3
class SaltpackOperationTypeStrings(Enum):
ENCRYPT = "encrypt"
DECRYPT = "decrypt"
SIGN = "sign"
VERIFY = "verify"
@dataclass
class HttpSrvInfo(DataClassJsonMixin):
address: str = field(metadata=config(field_name="address"))
token: str = field(metadata=config(field_name="token"))
@dataclass
class TeamChangeSet(DataClassJsonMixin):
membership_changed: bool = field(metadata=config(field_name="membershipChanged"))
key_rotated: bool = field(metadata=config(field_name="keyRotated"))
renamed: bool = field(metadata=config(field_name="renamed"))
misc: bool = field(metadata=config(field_name="misc"))
class AvatarUpdateType(Enum):
NONE = 0
USER = 1
TEAM = 2
class AvatarUpdateTypeStrings(Enum):
NONE = "none"
USER = "user"
TEAM = "team"
class RuntimeGroup(Enum):
UNKNOWN = 0
LINUXLIKE = 1
DARWINLIKE = 2
WINDOWSLIKE = 3
class RuntimeGroupStrings(Enum):
UNKNOWN = "unknown"
LINUXLIKE = "linuxlike"
DARWINLIKE = "darwinlike"
WINDOWSLIKE = "windowslike"
@dataclass
class Feature(DataClassJsonMixin):
allow: bool = field(metadata=config(field_name="allow"))
default_value: bool = field(metadata=config(field_name="defaultValue"))
readonly: bool = field(metadata=config(field_name="readonly"))
label: str = field(metadata=config(field_name="label"))
class PassphraseType(Enum):
NONE = 0
PAPER_KEY = 1
PASS_PHRASE = 2
VERIFY_PASS_PHRASE = 3
class PassphraseTypeStrings(Enum):
NONE = "none"
PAPER_KEY = "paper_key"
PASS_PHRASE = "<PASSWORD>"
VERIFY_PASS_PHRASE = "verify_pass_phrase"
@dataclass
class GetPassphraseRes(DataClassJsonMixin):
passphrase: str = field(metadata=config(field_name="passphrase"))
store_secret: bool = field(metadata=config(field_name="storeSecret"))
class SignMode(Enum):
ATTACHED = 0
DETACHED = 1
CLEAR = 2
class SignModeStrings(Enum):
ATTACHED = "attached"
DETACHED = "detached"
CLEAR = "clear"
@dataclass
class PGPEncryptOptions(DataClassJsonMixin):
no_sign: bool = field(metadata=config(field_name="noSign"))
no_self: bool = field(metadata=config(field_name="noSelf"))
binary_out: bool = field(metadata=config(field_name="binaryOut"))
key_query: str = field(metadata=config(field_name="keyQuery"))
recipients: Optional[List[str]] = field(
default=None, metadata=config(field_name="recipients")
)
@dataclass
class PGPDecryptOptions(DataClassJsonMixin):
assert_signed: bool = field(metadata=config(field_name="assertSigned"))
signed_by: str = field(metadata=config(field_name="signedBy"))
@dataclass
class PGPVerifyOptions(DataClassJsonMixin):
signed_by: str = field(metadata=config(field_name="signedBy"))
signature: str = field(metadata=config(field_name="signature"))
@dataclass
class KeyInfo(DataClassJsonMixin):
fingerprint: str = field(metadata=config(field_name="fingerprint"))
key: str = field(metadata=config(field_name="key"))
desc: str = field(metadata=config(field_name="desc"))
@dataclass
class PGPQuery(DataClassJsonMixin):
secret: bool = field(metadata=config(field_name="secret"))
query: str = field(metadata=config(field_name="query"))
exact_match: bool = field(metadata=config(field_name="exactMatch"))
@dataclass
class PGPPurgeRes(DataClassJsonMixin):
filenames: Optional[List[str]] = field(
default=None, metadata=config(field_name="filenames")
)
class FileType(Enum):
UNKNOWN = 0
DIRECTORY = 1
FILE = 2
class FileTypeStrings(Enum):
UNKNOWN = "unknown"
DIRECTORY = "directory"
FILE = "file"
class ProofState(Enum):
NONE = 0
OK = 1
TEMP_FAILURE = 2
PERM_FAILURE = 3
LOOKING = 4
SUPERSEDED = 5
POSTED = 6
REVOKED = 7
DELETED = 8
UNKNOWN_TYPE = 9
SIG_HINT_MISSING = 10
UNCHECKED = 11
class ProofStateStrings(Enum):
NONE = "none"
OK = "ok"
TEMP_FAILURE = "temp_failure"
PERM_FAILURE = "perm_failure"
LOOKING = "looking"
SUPERSEDED = "superseded"
POSTED = "posted"
REVOKED = "revoked"
DELETED = "deleted"
UNKNOWN_TYPE = "unknown_type"
SIG_HINT_MISSING = "sig_hint_missing"
UNCHECKED = "unchecked"
class ProofStatus(Enum):
"""
3: It's been found in the hunt, but not proven yet
1xx: Retryable soft errors; note that this will be put in the proof_cache, but won't
be returned from the proof cache in most cases. Their freshness will always be
RANCID.
2xx: Will likely result in a hard error, if repeated enough
3xx: Hard final errors
"""
NONE = 0
OK = 1
LOCAL = 2
FOUND = 3
BASE_ERROR = 100
HOST_UNREACHABLE = 101
PERMISSION_DENIED = 103
FAILED_PARSE = 106
DNS_ERROR = 107
AUTH_FAILED = 108
HTTP_429 = 129
HTTP_500 = 150
TIMEOUT = 160
INTERNAL_ERROR = 170
UNCHECKED = 171
MISSING_PVL = 172
BASE_HARD_ERROR = 200
NOT_FOUND = 201
CONTENT_FAILURE = 202
BAD_USERNAME = 203
BAD_REMOTE_ID = 204
TEXT_NOT_FOUND = 205
BAD_ARGS = 206
CONTENT_MISSING = 207
TITLE_NOT_FOUND = 208
SERVICE_ERROR = 209
TOR_SKIPPED = 210
TOR_INCOMPATIBLE = 211
HTTP_300 = 230
HTTP_400 = 240
HTTP_OTHER = 260
EMPTY_JSON = 270
DELETED = 301
SERVICE_DEAD = 302
BAD_SIGNATURE = 303
BAD_API_URL = 304
UNKNOWN_TYPE = 305
NO_HINT = 306
BAD_HINT_TEXT = 307
INVALID_PVL = 308
class ProofStatusStrings(Enum):
NONE = "none"
OK = "ok"
LOCAL = "local"
FOUND = "found"
BASE_ERROR = "base_error"
HOST_UNREACHABLE = "host_unreachable"
PERMISSION_DENIED = "permission_denied"
FAILED_PARSE = "failed_parse"
DNS_ERROR = "dns_error"
AUTH_FAILED = "auth_failed"
HTTP_429 = "http_429"
HTTP_500 = "http_500"
TIMEOUT | |
calling `project_project_id_tasks_post`") # noqa: E501
# verify the required parameter 'project_id' is set
if ('project_id' not in params or
params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `project_project_id_tasks_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/plain; charset=utf-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer', 'cookie'] # noqa: E501
return self.api_client.call_api(
'/project/{project_id}/tasks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Task', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def project_project_id_tasks_task_id_delete(self, project_id, task_id, **kwargs): # noqa: E501
"""Deletes task (including output) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_delete(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.project_project_id_tasks_task_id_delete_with_http_info(project_id, task_id, **kwargs) # noqa: E501
else:
(data) = self.project_project_id_tasks_task_id_delete_with_http_info(project_id, task_id, **kwargs) # noqa: E501
return data
def project_project_id_tasks_task_id_delete_with_http_info(self, project_id, task_id, **kwargs): # noqa: E501
"""Deletes task (including output) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_delete_with_http_info(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'task_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method project_project_id_tasks_task_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params or
params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `project_project_id_tasks_task_id_delete`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in params or
params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `project_project_id_tasks_task_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id'] # noqa: E501
if 'task_id' in params:
path_params['task_id'] = params['task_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['bearer', 'cookie'] # noqa: E501
return self.api_client.call_api(
'/project/{project_id}/tasks/{task_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def project_project_id_tasks_task_id_get(self, project_id, task_id, **kwargs): # noqa: E501
"""Get a single task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_get(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: Task
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.project_project_id_tasks_task_id_get_with_http_info(project_id, task_id, **kwargs) # noqa: E501
else:
(data) = self.project_project_id_tasks_task_id_get_with_http_info(project_id, task_id, **kwargs) # noqa: E501
return data
def project_project_id_tasks_task_id_get_with_http_info(self, project_id, task_id, **kwargs): # noqa: E501
"""Get a single task # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_get_with_http_info(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: Task
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'task_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method project_project_id_tasks_task_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params or
params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `project_project_id_tasks_task_id_get`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in params or
params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `project_project_id_tasks_task_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id'] # noqa: E501
if 'task_id' in params:
path_params['task_id'] = params['task_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/plain; charset=utf-8']) # noqa: E501
# Authentication setting
auth_settings = ['bearer', 'cookie'] # noqa: E501
return self.api_client.call_api(
'/project/{project_id}/tasks/{task_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Task', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def project_project_id_tasks_task_id_output_get(self, project_id, task_id, **kwargs): # noqa: E501
"""Get task output # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_output_get(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: list[TaskOutput]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.project_project_id_tasks_task_id_output_get_with_http_info(project_id, task_id, **kwargs) # noqa: E501
else:
(data) = self.project_project_id_tasks_task_id_output_get_with_http_info(project_id, task_id, **kwargs) # noqa: E501
return data
def project_project_id_tasks_task_id_output_get_with_http_info(self, project_id, task_id, **kwargs): # noqa: E501
"""Get task output # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_tasks_task_id_output_get_with_http_info(project_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param int task_id: task ID (required)
:return: list[TaskOutput]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'task_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method project_project_id_tasks_task_id_output_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params or
params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `project_project_id_tasks_task_id_output_get`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in params or
params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `project_project_id_tasks_task_id_output_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id'] # noqa: E501
if 'task_id' in params:
path_params['task_id'] = params['task_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/plain; charset=utf-8']) # noqa: E501
# Authentication setting
auth_settings = ['bearer', 'cookie'] # noqa: E501
return self.api_client.call_api(
'/project/{project_id}/tasks/{task_id}/output', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TaskOutput]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def project_project_id_templates_get(self, project_id, sort, order, **kwargs): # noqa: E501
"""Get template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.project_project_id_templates_get(project_id, sort, order, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int project_id: Project ID (required)
:param str sort: sorting name (required)
:param str order: ordering manner (required)
:return: list[Template]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.project_project_id_templates_get_with_http_info(project_id, sort, order, **kwargs) # noqa: E501
else:
(data) = self.project_project_id_templates_get_with_http_info(project_id, sort, order, **kwargs) # noqa: E501
return data
def project_project_id_templates_get_with_http_info(self, project_id, sort, order, **kwargs): # | |
<gh_stars>0
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from datetime import datetime
from typing import Any, AsyncIterable, Optional, Dict
from azure.core.exceptions import ResourceExistsError, ResourceNotFoundError
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.keyvault.secrets.models import Secret, DeletedSecret, SecretProperties
from .._shared import AsyncKeyVaultClientBase
class SecretClient(AsyncKeyVaultClientBase):
"""A high-level asynchronous interface for managing a vault's secrets.
:param str vault_url: URL of the vault the client will access
:param credential: An object which can provide an access token for the vault, such as a credential from
:mod:`azure.identity.aio`
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START create_secret_client]
:end-before: [END create_secret_client]
:language: python
:caption: Create a new ``SecretClient``
:dedent: 4
"""
# pylint:disable=protected-access
@distributed_trace_async
async def get_secret(self, name: str, version: Optional[str] = None, **kwargs: "**Any") -> Secret:
"""Get a secret. Requires the secrets/get permission.
:param str name: The name of the secret
:param str version: (optional) Version of the secret to get. If unspecified, gets the latest version.
:rtype: ~azure.keyvault.secrets.models.Secret
:raises:
:class:`~azure.core.exceptions.ResourceNotFoundError` if the secret doesn't exist,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START get_secret]
:end-before: [END get_secret]
:language: python
:caption: Get a secret
:dedent: 8
"""
bundle = await self._client.get_secret(
self.vault_url, name, version or "", error_map={404: ResourceNotFoundError}, **kwargs
)
return Secret._from_secret_bundle(bundle)
@distributed_trace_async
async def set_secret(
self,
name: str,
value: str,
content_type: Optional[str] = None,
enabled: Optional[bool] = None,
not_before: Optional[datetime] = None,
expires: Optional[datetime] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs: "**Any"
) -> Secret:
"""Set a secret value. Create a new secret if ``name`` is not in use. If it is, create a new version of the
secret.
:param str name: The name of the secret
:param str value: The value of the secret
:param str content_type: (optional) An arbitrary string indicating the type of the secret, e.g. 'password'
:param bool enabled: (optional) Whether the secret is enabled for use
:param datetime.datetime not_before: (optional) Not before date of the secret in UTC
:param datetime.datetime expires: (optional) Expiry date of the secret in UTC
:param dict tags: (optional) Application specific metadata in the form of key-value pairs
:rtype: ~azure.keyvault.secrets.models.Secret
:raises: :class:`~azure.core.exceptions.HttpResponseError`
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START set_secret]
:end-before: [END set_secret]
:language: python
:caption: Set a secret's value
:dedent: 8
"""
if enabled is not None or not_before is not None or expires is not None:
attributes = self._client.models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires)
else:
attributes = None
bundle = await self._client.set_secret(
self.vault_url, name, value, secret_attributes=attributes, content_type=content_type, tags=tags, **kwargs
)
return Secret._from_secret_bundle(bundle)
@distributed_trace_async
async def update_secret_properties(
self,
name: str,
version: Optional[str] = None,
content_type: Optional[str] = None,
enabled: Optional[bool] = None,
not_before: Optional[datetime] = None,
expires: Optional[datetime] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs: "**Any"
) -> SecretProperties:
"""Update a secret's attributes, such as its tags or whether it's enabled. Requires the secrets/set permission.
**This method can't change a secret's value.** Use :func:`set_secret` to change values.
:param str name: Name of the secret
:param str version: (optional) Version of the secret to update. If unspecified, the latest version is updated.
:param str content_type: (optional) An arbitrary string indicating the type of the secret, e.g. 'password'
:param bool enabled: (optional) Whether the secret is enabled for use
:param datetime.datetime not_before: (optional) Not before date of the secret in UTC
:param datetime.datetime expires: (optional) Expiry date of the secret in UTC.
:param dict(str, str) tags: (optional) Application specific metadata in the form of key-value pairs.
:rtype: ~azure.keyvault.secrets.models.SecretProperties
:raises:
:class:`~azure.core.exceptions.ResourceNotFoundError` if the secret doesn't exist,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START update_secret]
:end-before: [END update_secret]
:language: python
:caption: Updates a secret's attributes
:dedent: 8
"""
if enabled is not None or not_before is not None or expires is not None:
attributes = self._client.models.SecretAttributes(enabled=enabled, not_before=not_before, expires=expires)
else:
attributes = None
bundle = await self._client.update_secret(
self.vault_url,
name,
secret_version=version or "",
content_type=content_type,
tags=tags,
secret_attributes=attributes,
error_map={404: ResourceNotFoundError},
**kwargs
)
return SecretProperties._from_secret_bundle(bundle) # pylint: disable=protected-access
@distributed_trace
def list_secrets(self, **kwargs: "**Any") -> AsyncIterable[SecretProperties]:
"""List the latest identifier and attributes of all secrets in the vault, not including their values. Requires
the secrets/list permission.
:returns: An iterator of secrets
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.models.SecretProperties]
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START list_secrets]
:end-before: [END list_secrets]
:language: python
:caption: Lists all secrets
:dedent: 8
"""
max_results = kwargs.get("max_page_size")
return self._client.get_secrets(
self.vault_url,
maxresults=max_results,
cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
**kwargs
)
@distributed_trace
def list_secret_versions(self, name: str, **kwargs: "**Any") -> AsyncIterable[SecretProperties]:
"""List all versions of a secret, including their identifiers and attributes but not their values. Requires the
secrets/list permission.
:param str name: Name of the secret
:returns: An iterator of secrets
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.models.SecretProperties]
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START list_secret_versions]
:end-before: [END list_secret_versions]
:language: python
:caption: List all versions of a secret
:dedent: 8
"""
max_results = kwargs.get("max_page_size")
return self._client.get_secret_versions(
self.vault_url,
name,
maxresults=max_results,
cls=lambda objs: [SecretProperties._from_secret_item(x) for x in objs],
**kwargs
)
@distributed_trace_async
async def backup_secret(self, name: str, **kwargs: "**Any") -> bytes:
"""Get a backup of all versions of a secret. Requires the secrets/backup permission.
:param str name: Name of the secret
:returns: The raw bytes of the secret backup
:rtype: bytes
:raises:
:class:`~azure.core.exceptions.ResourceNotFoundError` if the secret doesn't exist,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START backup_secret]
:end-before: [END backup_secret]
:language: python
:caption: Back up a secret
:dedent: 8
"""
backup_result = await self._client.backup_secret(
self.vault_url, name, error_map={404: ResourceNotFoundError}, **kwargs
)
return backup_result.value
@distributed_trace_async
async def restore_secret(self, backup: bytes, **kwargs: "**Any") -> SecretProperties:
"""Restore a backed up secret. Requires the secrets/restore permission.
:param bytes backup: The raw bytes of the secret backup
:returns: The restored secret
:rtype: ~azure.keyvault.secrets.models.SecretProperties
:raises:
:class:`~azure.core.exceptions.ResourceExistsError` if the secret's name is already in use,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START restore_secret]
:end-before: [END restore_secret]
:language: python
:caption: Restore a backed up secret
:dedent: 8
"""
bundle = await self._client.restore_secret(
self.vault_url, backup, error_map={409: ResourceExistsError}, **kwargs
)
return SecretProperties._from_secret_bundle(bundle)
@distributed_trace_async
async def delete_secret(self, name: str, **kwargs: "**Any") -> DeletedSecret:
"""Delete all versions of a secret. Requires the secrets/delete permission.
:param str name: Name of the secret
:rtype: ~azure.keyvault.secrets.models.DeletedSecret
:raises:
:class:`~azure.core.exceptions.ResourceNotFoundError` if the secret doesn't exist,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START delete_secret]
:end-before: [END delete_secret]
:language: python
:caption: Delete a secret
:dedent: 8
"""
bundle = await self._client.delete_secret(
self.vault_url, name, error_map={404: ResourceNotFoundError}, **kwargs
)
return DeletedSecret._from_deleted_secret_bundle(bundle)
@distributed_trace_async
async def get_deleted_secret(self, name: str, **kwargs: "**Any") -> DeletedSecret:
"""Get a deleted secret. This is only possible in vaults with soft-delete enabled. Requires the secrets/get
permission.
:param str name: Name of the secret
:rtype: ~azure.keyvault.secrets.models.DeletedSecret
:raises:
:class:`~azure.core.exceptions.ResourceNotFoundError` if the deleted secret doesn't exist,
:class:`~azure.core.exceptions.HttpResponseError` for other errors
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START get_deleted_secret]
:end-before: [END get_deleted_secret]
:language: python
:caption: Get a deleted secret
:dedent: 8
"""
bundle = await self._client.get_deleted_secret(
self.vault_url, name, error_map={404: ResourceNotFoundError}, **kwargs
)
return DeletedSecret._from_deleted_secret_bundle(bundle)
@distributed_trace
def list_deleted_secrets(self, **kwargs: "**Any") -> AsyncIterable[DeletedSecret]:
"""Lists all deleted secrets. This is only possible in vaults with soft-delete enabled. Requires the
secrets/list permission.
:returns: An iterator of deleted secrets
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.keyvault.secrets.models.DeletedSecret]
Example:
.. literalinclude:: ../tests/test_samples_secrets_async.py
:start-after: [START list_deleted_secrets]
:end-before: [END list_deleted_secrets]
:language: python
:caption: Lists deleted secrets
:dedent: 8
"""
max_results = kwargs.get("max_page_size")
return self._client.get_deleted_secrets(
self.vault_url,
maxresults=max_results,
cls=lambda objs: [DeletedSecret._from_deleted_secret_item(x) for x in objs],
**kwargs
)
@distributed_trace_async
async def purge_deleted_secret(self, name: str, **kwargs: "**Any") -> None:
"""Permanently delete a secret. This is only possible in vaults with soft-delete enabled. If a vault
doesn't have soft-delete enabled, :func:`delete_secret` is permanent, and this method will return an error.
Requires the secrets/purge permission.
:param str name: Name of the secret
:returns: None
:raises: :class:`~azure.core.exceptions.HttpResponseError`
Example:
.. code-block:: python
# if the vault has soft-delete enabled, purge permanently deletes the secret
# (with soft-delete disabled, delete_secret is permanent)
await secret_client.purge_deleted_secret("secret-name")
"""
await self._client.purge_deleted_secret(self.vault_url, name, **kwargs)
@distributed_trace_async
async def recover_deleted_secret(self, name: str, **kwargs: "**Any") -> SecretProperties:
"""Recover a deleted secret to its latest version. This is only possible in vaults with soft-delete enabled.
Requires the secrets/recover permission.
:param str name: Name of the secret
:returns: The | |
-1.7254034675761418], [-1.689109989351012, -5.6713946772780321, -1.7192311513713561,
-7.5505076188691662], [-0.9221994157609279, -3.5401612884317997, 0.80850427916708423, 1.0199139822616425]]],
[[[-7.7451841209074272, -3.3370280271940906, -5.4640503616346594, 0.4768178331451427], [-3.0941833085426742,
-8.3605936822477567, -6.1732076120173884, -6.2139892664029643], [-2.9507932621710706, 1.2908432046871843,
-6.1553888846475369, -3.4576864676245389]], [[-5.4605831983232953, -4.5632588207560607, -2.3506306959395573,
-7.2160830348517075], [-4.553733044696977, 0.49804109061298707, -7.0464564944227819, -1.840791513621308],
[-0.83084338896122745, -0.43439469601747493, -4.9305653378866143, -1.2823102544613527]]], [[[-0.74612019769311644,
-2.8609369043905408, -4.542597607847612, -3.7269182378522592], [-5.9158668751936148, -8.5811968066770881,
-1.8717775370272092, -5.1936977784481213], [-6.2973240104558732, -0.39261025846176612, -3.1849805513902769,
-0.30856426958445482]], [[-4.8998227094309055, -3.5498670530165466, -6.8546615379105047, -0.67212150315726138],
[-7.9469373051384116, -0.306764496474214, -7.0758991746099111, -5.1900948687459909], [-1.6865641149686867,
0.8622113075167519, 0.22930603944946082, -2.8416267020685204]]]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(3, 2, 3, 4),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank1_Symbol_rank0(self):
arg0=Data(numpy.array([2.6649927252905226, 0.29496968217893382]),self.functionspace)
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(1.03366663195)
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([1.6313260933372291, -0.73869694977435962]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(2,),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank1_Symbol_rank1(self):
arg0=Data(numpy.array([3.9090880537794526, -3.9706193840215942]),self.functionspace)
arg1=Symbol(shape=(2,))
res=arg0-arg1
s1=numpy.array([-3.7233870114697742, 0.99043840493200186])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([7.6324750652492268, -4.9610577889535961]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(2,),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank2_Symbol_rank0(self):
arg0=Data(numpy.array([[2.8033126273843685, 0.51509190965393792, 3.931306976936968, -3.3823534090429486,
-2.3486719525293087], [-2.9837425664154784, -2.4457160287299686, 3.8981965382683743, -0.89609359902144714,
4.1620406111464288], [3.6868893591462246, -2.9993029597001462, 1.8283120616948665, -2.0195573949932277,
-2.1640627499057361], [-2.9723279323425489, -4.8559061533246624, -1.0130455282709172, -3.7833351321644395,
3.514692525422209]]),self.functionspace)
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(4.86937457463)
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[-2.0660619472497519, -4.3542826649801825, -0.93806759769715242, -8.2517279836770694,
-7.2180465271634286], [-7.8531171410495988, -7.315090603364089, -0.97117803636574607, -5.7654681736555675,
-0.70733396348769162], [-1.1824852154878958, -7.8686775343342665, -3.0410625129392539, -6.8889319696273486,
-7.0334373245398565], [-7.8417025069766693, -9.7252807279587827, -5.8824201029050371, -8.6527097067985608,
-1.3546820492119114]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(4, 5),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank2_Symbol_rank2(self):
arg0=Data(numpy.array([[-1.1140360715186182, -1.5235600156934481, 4.3075103934286023, 4.6800377743432158,
-3.2505150436972521], [0.39123458636258768, 0.41088806870879768, -2.9614108446790501, 1.1049238977643405,
0.92166667279843395], [0.54565864417397059, -4.8476249672143004, 4.9444652981547943, 4.0252126389168215,
-3.9123423425216322], [-3.6777596228844844, -3.4408972758983558, 2.7718180074050611, -0.3997152204895924,
-0.16573647825956073]]),self.functionspace)
arg1=Symbol(shape=(4, 5))
res=arg0-arg1
s1=numpy.array([[-2.4209487163246299, 1.3152643083131128, -0.71046464711788015, 0.21557543046364458,
-2.202065459251934], [-3.9101544501984198, -2.8682151089642827, 2.7125251197023488, 1.4173123031722534,
2.7246295240806209], [-1.5744991442525436, 3.0598215212654001, 0.63494427405471487, -4.906149376046594,
-1.6839564426436748], [4.0729555430880922, -0.83371622418680769, 0.46337987461630981, 4.0014755703742395,
-2.1103899940006032]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[1.3069126448060118, -2.8388243240065609, 5.0179750405464825, 4.4644623438795712,
-1.0484495844453181], [4.301389036561007, 3.2791031776730803, -5.6739359643813989, -0.31238840540791291,
-1.8029628512821869], [2.1201577884265141, -7.9074464884797004, 4.3095210241000794, 8.9313620149634154,
-2.2283858998779573], [-7.7507151659725766, -2.6071810517115481, 2.3084381327887513, -4.4011907908638319,
1.9446535157410425]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(4, 5),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank3_Symbol_rank0(self):
arg0=Data(numpy.array([[[-2.6064326776506652, 4.9989076052590633], [-3.0068821433777249, -3.1193113732509516]],
[[-1.3190483681618739, 3.9479827067009108], [1.0954417889014865, 4.6359051697534426]], [[-2.9778493741722056,
3.4845430816156977], [1.7569072943914552, 1.1616150547614428]], [[-0.91210869485198565, -1.3406976214361355],
[3.2217649968914159, -2.662260898242006]], [[4.1697693146337542, -1.1741423631833072], [-4.9803850608859115,
1.2700647554700222]], [[4.6074170359664368, 1.453706456526124], [0.20949339688511692,
3.0091215511346796]]]),self.functionspace)
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(-1.04145599079)
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[-1.5649766868561219, 6.0403635960536066], [-1.9654261525831815, -2.0778553824564083]],
[[-0.27759237736733056, 4.9894386974954541], [2.1368977796960298, 5.6773611605479859]], [[-1.9363933833776623,
4.525999072410241], [2.7983632851859985, 2.2030710455559861]], [[0.12934729594255767, -0.29924163064159215],
[4.2632209876859593, -1.6208049074474626]], [[5.2112253054282975, -0.13268637238876391], [-3.9389290700913682,
2.3115207462645655]], [[5.6488730267609801, 2.4951624473206673], [1.2509493876796602,
4.0505775419292229]]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(6, 2, 2),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank3_Symbol_rank3(self):
arg0=Data(numpy.array([[[2.0075159970537113, 4.417162011434554], [0.71949384400506577, 1.0783048900035652]],
[[4.7614254606302335, -2.0888542276996978], [-3.5997702799671547, 4.2825487871951644]], [[-0.39389734575197544,
1.3283252585178928], [3.6919455158435834, -0.76277259642421402]], [[-4.4972180700076887, -3.7983795355307128],
[-0.26779668046970784, -0.79380221724008582]], [[-2.0572521505738273, -1.5154686544559368], [4.0972713376059851,
4.5986089620495108]], [[-1.3971821196462377, 0.16028646761807508], [-0.63755809097850857,
-3.3787710682197272]]]),self.functionspace)
arg1=Symbol(shape=(6, 2, 2))
res=arg0-arg1
s1=numpy.array([[[3.5103565349856751, 0.91526758558677379], [-3.7224124618951135, -0.27931399630195397]],
[[1.5813622936549105, 3.6172915696233972], [-1.2364412564258132, 0.16417768270487709]], [[0.64050559170122234,
4.6361361331624593], [-0.47839680540824325, -2.1615310941440589]], [[-0.85667930966756511, 1.669882578368358],
[0.22343162562157293, 0.80905790542025358]], [[-3.5873387244847543, 3.1163266795230058], [3.5553732672252671,
-4.6758779472194405]], [[3.6742958529176484, 0.58762359541383802], [1.5778519953325496, -0.39731537378910975]]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[-1.5028405379319638, 3.5018944258477802], [4.4419063059001793, 1.3576188863055192]],
[[3.180063166975323, -5.7061457973230949], [-2.3633290235413416, 4.1183711044902873]], [[-1.0344029374531978,
-3.3078108746445665], [4.1703423212518267, 1.3987584977198448]], [[-3.6405387603401236, -5.4682621138990708],
[-0.49122830609128076, -1.6028601226603394]], [[1.5300865739109271, -4.6317953339789426], [0.54189807038071791,
9.2744869092689513]], [[-5.0714779725638861, -0.42733712779576294], [-2.2154100863110582,
-2.9814556944306174]]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(6, 2, 2),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank4_Symbol_rank0(self):
arg0=Data(numpy.array([[[[0.66483074145605592, 2.9129070748039982, -1.8655842911981346, -1.098354904466996],
[1.7426470733136448, -2.4896761957460898, 4.3864323453867851, -4.0781460331955177], [-0.62183708580819008,
-2.6186592235582786, -1.8750164189422014, -3.9631241880095969]], [[4.0419620323350909, 0.15536839603964836,
1.9771157591398101, -2.6101097405194453], [-4.7364297803535704, 1.8318126417179714, 3.2354822684907454,
2.2507758179659376], [-4.8699934080808029, -0.35744120243411981, 4.0908957400805122, -3.8440017446794084]]],
[[[4.5466344627836612, -2.8174576749848423, -0.32339288977492142, -3.3368918944053516], [3.3311423168153738,
-1.2448667289851647, -0.66737673743075376, -3.9953617725851598], [-4.8878412407428931, 3.1347720870691358,
-2.4390985397355847, -3.5615840737730475]], [[-3.7978882365989697, 4.345238312451805, 2.8310129832366435,
2.8564779239624674], [-0.85025481289091864, -4.3757742754757345, 3.5451710843902031, -2.5068001174158816],
[2.6943798866386315, 2.2746017608025317, -4.2655778273063607, 0.97165631163417387]]], [[[-2.9330039029788955,
4.3910413333213238, 2.5513441899802833, -3.8678703253194402], [-2.6748516851594308, -3.8887038302549062,
1.2485088138696518, -3.9629424578182251], [-0.38166273681210328, 3.82781593241344, -4.1817331752844087,
4.682478964767725]], [[-0.85849290617372809, -0.49338756563096275, -1.0480256440941615, -0.51008618582467946],
[-0.26820315453886501, 4.8354933917592806, 2.9555158912003154, -2.4766421456452479], [2.5098219987182944,
3.6215601735655589, -4.4497307132070123, -3.9295385075107028]]]]),self.functionspace)
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(-2.59361652138)
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[[3.2584472628375467, 5.506523596185489, 0.72803223018335617, 1.4952616169144948],
[4.3362635946951356, 0.10394032563540101, 6.9800488667682759, -1.4845295118140269], [1.9717794355733007,
-0.025042702176787834, 0.7186001024392894, -1.3695076666281061]], [[6.6355785537165817, 2.7489849174211392,
4.5707322805213009, -0.01649321913795454], [-2.1428132589720796, 4.4254291630994622, 5.8290987898722362,
4.8443923393474284], [-2.2763768866993122, 2.236175318947371, 6.6845122614620029, -1.2503852232979176]]],
[[[7.140250984165152, -0.22384115360335155, 2.2702236316065694, -0.74327537302386082], [5.9247588381968646,
1.3487497923963261, 1.926239783950737, -1.401745251203669], [-2.2942247193614023, 5.7283886084506266,
0.15451798164590613, -0.96796755239155674]], [[-1.2042717152174789, 6.9388548338332958, 5.4246295046181343,
5.4500944453439581], [1.7433617084905721, -1.7821577540942437, 6.1387876057716939, 0.08681640396560919],
[5.2879964080201223, 4.8682182821840225, -1.6719613059248699, 3.5652728330156647]]], [[[-0.33938738159740467,
6.9846578547028146, 5.1449607113617741, -1.2742538039379494], [-0.081235163777940045, -1.2950873088734154,
3.8421253352511426, -1.3693259364367343], [2.2119537845693875, 6.4214324537949308, -1.5881166539029179,
7.2760954861492158]], [[1.7351236152077627, 2.100228955750528, 1.5455908772873292, 2.0835303355568113],
[2.3254133668426258, 7.4291099131407714, 5.5491324125818062, 0.11697437573624292], [5.1034385200997852,
6.2151766949470497, -1.8561141918255215, -1.335921986129212]]]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(3, 2, 3, 4),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_constData_rank4_Symbol_rank4(self):
arg0=Data(numpy.array([[[[2.140332416756844, -4.5756565160935745, 1.0268217328307561, 1.594533973931731],
[4.1426026647673879, 0.1548614651600202, 3.351820863446946, 0.54777524679756073], [-4.6470169243406527,
-3.4101935702258368, 1.3604597013400213, -4.3236653508957374]], [[2.3543066928954612, 1.6355558219698443,
3.8590758340122093, 0.055467084597328409], [1.3949738751098479, -2.9042097100731445, 2.1331143130237962,
-0.45715627400394165], [3.9505052117900146, -4.8644226435153097, 0.13641466419900183, 0.92434447564323374]]],
[[[-4.2036478385109302, -2.2096856472681958, -3.309442061812593, -0.17761420723311439], [-4.5417481392819026,
3.354117107537796, 2.9925164896060084, 4.231145636082223], [-4.3165407391400308, -0.16204594013147311,
-1.5308101185053733, 3.7017204822457384]], [[2.4648028362561725, 0.43817614121240833, -4.4908194091317366,
-0.081928750874263656], [-3.4087689978816016, 4.259133980931324, -4.2850896710829334, 4.6395735766216326],
[-1.3584480043808989, -4.7738821023855085, -1.2617431337636842, -1.2598313032270116]]], [[[2.2708892792624855,
1.9132737394453327, -0.50215367058696003, 0.19108419265161469], [-2.0796597802531669, 1.1505151966811367,
1.2957662425378791, -1.5883201097665802], [-1.7035021892623838, 4.8639671345493021, 3.1243484697100534,
0.47610495992410051]], [[-4.0444287366693015, -1.3614006776767349, -0.18268931922481002, 4.8063591217845332],
[3.1407426206783704, 2.8940879164962441, -4.9664997014592807, 1.6951588068340158], [-3.895479459710558,
1.7220903215355694, -3.7165673657855267, 3.1903385713544257]]]]),self.functionspace)
arg1=Symbol(shape=(3, 2, 3, 4))
res=arg0-arg1
s1=numpy.array([[[[-4.3482304868754991, -1.2480666735558845, 0.43538858115159051, -2.0858236027245205],
[-2.442305699452354, 2.0213192586154003, -2.5262404161243679, -4.458062700052194], [0.26228138879138641,
-2.6430658161459242, -4.7246503759525602, 4.2538788761081854]], [[-1.6124403577544308, -1.8284497197976037,
-3.0160374139385002, 2.7523938918136759], [1.4437250527651582, -2.7814473787336489, 3.5116683735594361,
-3.9808640616716562], [1.7054962689298705, 4.7974185413341068, 1.9447068850818283, -1.2797130952071156]]],
[[[3.7642823106611107, 0.11145650212965919, -0.096799862214571597, 2.0215787533002523], [0.26390717935294816,
0.12612295721321498, 4.0275730341758482, -1.2268861937462172], [-2.947926663434548, -1.4514539315574626,
2.4550945474164232, -2.7897655841602651]], [[-1.5947829088079746, 0.80620330852535815, -4.5614285986030234,
-1.9102368071164841], [2.0807019362652692, -4.099640999530064, -1.8395330667711352, -4.6367501410986929],
[-2.5162327168837786, 4.6954385782651951, -2.1576821461704854, -1.62194811763983]]], [[[0.06729391952569852,
-0.57919376543293488, -3.1838952254737416, 1.7056529660452817], [3.6116233555564143, 0.81964000588296315,
-0.16440769780998377, 0.079355513141521783], [2.9805073823987431, 1.3188532056435962, 3.4153481616516537,
-2.5138710663982189]], [[2.8884594089569315, 1.1351683507610142, -0.68804270946144719, -4.7325886514124882],
[1.1204800401276476, 0.55566378590737031, 0.94240513232859335, 2.9610440134171334], [-2.6222587774463815,
-4.4048348584786705, -0.29650368246657699, -1.0078523107846902]]]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[[6.4885629036323431, -3.32758984253769, 0.59143315167916555, 3.6803575766562515],
[6.5849083642197419, -1.8664577934553801, 5.8780612795713143, 5.0058379468497547], [-4.9092983131320391,
-0.76712775407991263, 6.0851100772925815, -8.5775442270039228]], [[3.9667470506498921, 3.464005541767448,
6.8751132479507095, -2.6969268072163475], [-0.048751177655310229, -0.12276233133949566, -1.3785540605356399,
3.5237077876677145], [2.2450089428601441, -9.6618411848494166, -1.8082922208828265, 2.2040575708503494]]],
[[[-7.9679301491720409, -2.321142149397855, -3.2126421995980214, -2.1991929605333667], [-4.8056553186348507,
3.227994150324581, -1.0350565445698399, 5.4580318298284407], [-1.3686140757054828, 1.2894079914259895,
-3.9859046659217965, 6.4914860664060035]], [[4.0595857450641475, -0.36802716731294982, 0.070609189471286804,
1.8283080562422205], [-5.4894709341468708, 8.3587749804613871, -2.4455566043117982, 9.2763237177203255],
[1.1577847125028797, -9.4693206806507035, 0.89593901240680118, 0.3621168144128184]]], [[[2.203595359736787,
2.4924675048782676, 2.6817415548867816, -1.514568773393667], [-5.6912831358095808, 0.33087519079817351,
1.4601739403478629, -1.667675622908102], [-4.684009571661127, 3.5451139289057059, -0.29099969194160025,
2.9899760263223194]], [[-6.932888145626233, -2.4965690284377491, 0.50535339023663717, 9.5389477731970214],
[2.0202625805507228, 2.3384241305888738, -5.908904833787874, -1.2658852065831177], [-1.2732206822641765,
6.1269251800142399, -3.4200636833189497, 4.1981908821391158]]]]),self.functionspace)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(3, 2, 3, 4),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank0_Symbol_rank0(self):
arg0=Data(-2.29417952191,self.functionspace)
arg0.setTaggedValue(1,-4.27612309963)
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(-2.86386679086)
sub=res.substitute({arg1:s1})
ref=Data(0.569687268944,self.functionspace)
ref.setTaggedValue(1,-1.41225630877)
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank0_Symbol_rank1(self):
arg0=Data(-4.72691427991,self.functionspace)
arg0.setTaggedValue(1,0.483106242273)
arg1=Symbol(shape=(2,))
res=arg0-arg1
s1=numpy.array([-0.58516003749737244, 2.93231182282255])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([-4.1417542424175267, -7.6592261027374491]),self.functionspace)
ref.setTaggedValue(1,numpy.array([1.0682662797700972, -2.4492055805498252]))
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(2,),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank0_Symbol_rank2(self):
arg0=Data(4.84060376911,self.functionspace)
arg0.setTaggedValue(1,-3.32867505476)
arg1=Symbol(shape=(4, 5))
res=arg0-arg1
s1=numpy.array([[3.5332516865172998, 4.2256878903288939, -4.6404295927681405, 4.9721874322243114,
-1.5545932240349902], [0.40603544670242542, -2.879718425724147, -2.1385047584627337, 4.6127992237598132,
0.57646645021785048], [-2.6334801212800754, -2.3655947826469701, 0.48086858542515643, 1.0360291664664301,
-3.4378490059536082], [-0.23853194944872236, -2.0363663305583768, -2.3289186751171798, 3.5102407359843486,
4.1303419895739388]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[1.3073520825884426, 0.6149158787768485, 9.4810333618738838, -0.13158366311856895,
6.3951969931407326], [4.434568322403317, 7.7203221948298895, 6.9791085275684761, 0.2278045453459292,
4.2641373188878919], [7.4740838903858178, 7.2061985517527125, 4.359735183680586, 3.8045746026393124,
8.2784527750593497], [5.0791357185544648, 6.8769700996641188, 7.1695224442229222, 1.3303630331213938,
0.71026177953180358]]),self.functionspace)
ref.setTaggedValue(1,numpy.array([[-6.8619267412736988, -7.5543629450852929, 1.3117545380117415,
-8.3008624869807104, -1.7740818307214088], [-3.7347105014588244, -0.44895662903225197, -1.1901702962936653,
-7.9414742785162122, -3.9051415049742495], [-0.69519493347632366, -0.96308027210942893, -3.8095436401815554,
-4.3647042212228291, 0.10917395119720918], [-3.0901431053076767, -1.2923087241980222, -0.99975637963921926,
-6.8389157907407476, -7.4590170443303379]]))
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(4, 5),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank0_Symbol_rank3(self):
arg0=Data(-3.20552188916,self.functionspace)
arg0.setTaggedValue(1,-0.473083670166)
arg1=Symbol(shape=(6, 2, 2))
res=arg0-arg1
s1=numpy.array([[[0.71230320805011704, -3.008236723891188], [0.81066003773158002, -3.6043239509733382]],
[[3.691034498943317, -3.3919882986743777], [0.84551364067512935, 3.3207859438709946]], [[0.41963337446652105,
-3.6038224020133991], [-2.3537235378574151, -3.7120927558232997]], [[-3.4588851001838727, -0.31880183563871789],
[-1.3379489058063267, -3.9118810181560226]], [[4.4984539881701195, -3.2158956295350851], [1.5013508852420685,
2.8717656529358955]], [[-0.13701019263353231, -3.1176264463626078], [-1.67955120335195, 4.317481449568719]]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[-3.917825097207726, -0.19728516526642093], [-4.016181926889189, 0.3988020618157293]],
[[-6.896556388100926, 0.18646640951676874], [-4.0510355298327383, -6.5263078330286035]], [[-3.62515526362413,
0.39830051285579016], [-0.85179835130019388, 0.50657086666569073]], [[0.2533632110262638, -2.886720053518891],
[-1.8675729833512822, 0.70635912899841369]], [[-7.7039758773277285, 0.010373740377476182], [-4.7068727743996774,
-6.0772875420935044]], [[-3.0685116965240766, -0.087895442795001166], [-1.525970685805659,
-7.523003338726328]]]),self.functionspace)
ref.setTaggedValue(1,numpy.array([[[-1.1853868782160886, 2.5351530537252165], [-1.2837437078975515,
3.1312402808073667]], [[-4.1641181691092886, 2.9189046285084062], [-1.3185973108411009, -3.7938696140369661]],
[[-0.89271704463249257, 3.1307387318474276], [1.8806398676914435, 3.2390090856573281]], [[2.9858014300179012,
-0.15428183452725364], [0.86486523564035522, 3.4387973479900511]], [[-4.9715376583360911, 2.7428119593691136],
[-1.97443455540804, -3.344849323101867]], [[-0.33607347753243921, 2.6445427761966362], [1.2064675331859784,
-4.7905651197346906]]]))
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(6, 2, 2),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank0_Symbol_rank4(self):
arg0=Data(-0.215341183726,self.functionspace)
arg0.setTaggedValue(1,-3.01917111711)
arg1=Symbol(shape=(3, 2, 3, 4))
res=arg0-arg1
s1=numpy.array([[[[3.1718058337950783, -4.3218518167555349, 4.7360170033398816, 2.6415781893387447],
[1.7953624357215787, 0.37239845986582054, 0.85595953231170441, -4.2093909477304852], [-4.0724848735753412,
-2.3789549933876364, 3.8266481046469991, -4.4686983670793881]], [[-1.3807814097985793, -0.9345570079736385,
3.2111606830229267, 2.5248569160832579], [-0.19847478717542089, 3.6200277417416071, -1.3367301493578787,
-1.9914051287776093], [4.2384277387383236, -3.1625190831895669, -4.8267032630177118, -3.7590986361039294]]],
[[[-0.96721285038350846, 0.23717549644533698, -2.0558971771798862, -2.1889488119398925], [2.1163450477817447,
-4.308535473047935, 0.96468545582662735, 0.58036767508710252], [-0.26889479983427034, -4.6749066439752021,
-2.6908936581627731, 3.3090528029139286]], [[1.0683391958055246, -4.3705975019062535, 4.6959723711804546,
-0.58815635047014858], [-1.7921642772643898, 2.8079866307247423, 4.5837878995413348, -3.6656523242301429],
[2.1083853748587442, -0.44280454111162726, -2.5427523262585563, 3.9551312168955626]]], [[[4.0479839543530591,
1.694708528108122, -1.8081650371476021, 2.5627212563151982], [2.9443513555348222, -3.4330381296191126,
-2.3471872352829837, 2.9291777099369405], [0.92208424820838264, -1.7857214370413055, 3.2638247404414695,
3.3713981402987798]], [[-2.3853121535462418, 2.1417428055374232, 3.1558224539661612, -4.4802179321245248],
[-3.0197245205703069, 2.7624146301708477, -4.6790033997765104, -4.0453165901737584], [4.8295161047601614,
-3.5764718373510842, 4.356981591617421, -4.7034098127513264]]]])
sub=res.substitute({arg1:s1})
ref=Data(numpy.array([[[[-3.3871470175211567, 4.1065106330294565, -4.95135818706596, -2.856919373064823],
[-2.0107036194476571, -0.5877396435918989, -1.0713007160377828, 3.9940497640044068], [3.8571436898492628,
2.163613809661558, -4.0419892883730775, 4.2533571833533097]], [[1.165440226072501, 0.71921582424756014,
-3.426501866749005, -2.7401980998093363], [-0.01686639655065747, -3.8353689254676855, 1.1213889656318003,
1.776063945051531], [-4.4537689224644019, 2.9471778994634885, 4.6113620792916334, 3.543757452377851]]],
[[[0.7518716666574301, -0.45251668017141533, 1.8405559934538078, 1.9736076282138142], [-2.3316862315078231,
4.0931942893218567, -1.1800266395527057, -0.79570885881318087], [0.053553616108191981, 4.4595654602491237,
2.4755524744366948, -3.5243939866400069]], [[-1.283680379531603, 4.1552563181801752, -4.911313554906533,
0.37281516674407023], [1.5768230935383114, -3.0233278144508207, -4.7991290832674132, 3.4503111405040645],
[-2.3237265585848226, 0.2274633573855489, 2.3274111425324779, -4.1704724006216409]]], [[[-4.2633251380791375,
-1.9100497118342004, 1.5928238534215238, -2.7780624400412766], [-3.1596925392609005, 3.2176969458930342,
2.1318460515569053, -3.1445188936630188], [-1.137425431934461, 1.5703802533152271, -3.4791659241675479,
-3.5867393240248582]], [[2.1699709698201635, -2.3570839892635016, -3.3711636376922396, 4.2648767483984464],
[2.8043833368442286, -2.977755813896926, 4.463662216050432, 3.8299754064476801], [-5.0448572884862397,
3.3611306536250058, -4.5723227753434994, 4.4880686290252481]]]]),self.functionspace)
ref.setTaggedValue(1,numpy.array([[[[-6.1909769509085075, 1.3026806996421056, -7.7551881204533109,
-5.6607493064521739], [-4.8145335528350079, -3.3915695769792498, -3.8751306494251336, 1.1902198306170559],
[1.0533137564619119, -0.64021612372579284, -6.8458192217604283, 1.4495272499659588]], [[-1.6383897073148499,
-2.0846141091397907, -6.2303318001363559, -5.5440280331966871], [-2.8206963299380083, -6.6391988588550364,
-1.6824409677555505, -1.0277659883358199], [-7.2575988558517528, 0.14334796607613765, 1.8075321459042826,
0.73992751899050013]]], [[[-2.0519582667299208, -3.2563466135587662, -0.96327393993354304, -0.83022230517353668],
[-5.1355161648951739, 1.2893643559345058, -3.9838565729400566, -3.5995387922005317], [-2.7502763172791589,
1.6557355268617728, -0.32827745895065608, -6.3282239200273578]], [[-4.0875103129189538, 1.3514263847928243,
-7.7151434882938839, -2.4310147666432806], [-1.2270068398490395, -5.8271577478381715, -7.602959016654764,
0.64648120711671364], [-5.1275564919721734, -2.576366576001802, -0.47641879085487293, -6.9743023340089918]]],
[[[-7.0671550714664884, -4.7138796452215512, -1.2110060799658271, -5.5818923734286274], [-5.9635224726482514,
0.41386701250568336, -0.67198388183044555, -5.9483488270503697], [-3.9412553653218119, -1.2334496800721237,
-6.2829958575548988, -6.390569257412209]], [[-0.63385896356718741, -5.1609139226508525, -6.1749935710795905,
1.4610468150110956], [0.0005534034568777102, -5.7815857472842769, 1.6598322826630811, 1.0261454730603292],
[-7.8486872218735906, 0.55730072023765498, -7.3761527087308503, 1.6842386956378972]]]]))
self.assertTrue(isinstance(res,Symbol),"wrong type of result.")
self.assertEqual(res.getShape(),(3, 2, 3, 4),"wrong shape of result.")
self.assertTrue(Lsup(sub-ref)<=self.RES_TOL*Lsup(ref),"wrong result")
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_sub_overloaded_taggedData_rank1_Symbol_rank0(self):
arg0=Data(numpy.array([3.3101673523710691, 0.048409361416743124]),self.functionspace)
arg0.setTaggedValue(1,numpy.array([0.70887806236646611, -0.73932065177372408]))
arg1=Symbol(shape=())
res=arg0-arg1
s1=numpy.array(1.15960287006)
sub=res.substitute({arg1:s1})
| |
<filename>testSpot.py
#_*_coding:utf-8_*_
import copy
import os
import random
from enum import Enum
from math import floor, log, sqrt
from sys import float_info
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
import numpy as np
import cv2
import pywt
from imgSim import AirySpot
from tifffile import imread, imsave, imshow
from scipy import stats
from statsmodels.robust import stand_mad
# enumeration of locate methods
class LocateMethod(Enum):
FIT = 0
CENT_DEFAULT = 1 # the method using in our projects: radius:1
# all the same with CENT_DEFAULT other than radius:2(using surrounding background)
CENT_BKG = 2
# consider the impact of second peek intensity(radius:2(using surrounding background))
CENT_SPE = 3
# using row/col's sum(radius:2(using surrounding background))
GRA_DEFAULT = 4
# using row/col's sum(radius:2(using surrounding background)) and consider second peek intensity
GRA_SPE = 5
INTERP_FIT = 6 # interpolate from 3*3 to 5*5 and do gauss2d fitting
# enumeration of prepare ints methods
class IntsMethod(Enum):
DEFAULT = 0
BI_LI_INTERP = 1
def loadPts(path):
pts = []
with open(path, 'rU') as fh:
for line in fh.readlines():
data = line.strip().split()
pts.append((float(data[0]), float(data[1])))
return pts
def simulateSpots(img, space, step, spot_int, airy_radius,
in_path=None):
height, width = img.shape
# fill with spots
pts_orig = []
bias_x = 0.0
bias_y = 0.0
if not in_path:
start_x = int(round(width * 0.1))
start_y = int(round(height * 0.1))
end_x = int(round(width * 0.9))
end_y = int(round(height * 0.9))
for y in range(start_y, end_y, space):
for x in range(start_x, end_x, space):
the_x = x + bias_x
the_y = y + bias_y
ap.createSpot(img, the_y, the_x, airy_radius, spot_int)
pts_orig.append((the_x, the_y))
bias_x = bias_x + step if bias_x + step <= 1.0 else 0.0
bias_y = bias_y + step if bias_y + step <= 1.0 else 0.0
return (img, pts_orig)
else:
pts = loadPts(in_path)
for pt in pts:
ap.createSpot(img, pt[1], pt[0], 5.0, spot_int)
return (img, pts)
# gauss2d fit with 5*5, radius = 2
# q.shape == (25, 25)
# r.shape == (5, 5)
def gauss2dFit(pt_round, ints):
vect_A = np.zeros(25)
matr_B = np.zeros((25, 5))
i = 0
for y in range(5):
for x in range(5):
vect_A[i] = ints[y][x] * log(ints[y][x])
matr_B[i][0] = ints[y][x]
matr_B[i][1] = ints[y][x] * x
matr_B[i][2] = ints[y][x] * y
matr_B[i][3] = ints[y][x] * x * x
matr_B[i][4] = ints[y][x] * y * y
i += 1
# matrix decomposion and block operation
q, r = np.linalg.qr(matr_B)
C = np.linalg.inv(r).dot(q.T.dot(vect_A)[:6])
x = float(pt_round[0]) - 2.0 - 0.5 * C[1] / C[3]
y = float(pt_round[1]) - 2.0 - 0.5 * C[2] / C[4]
return (x, y)
# compute spot background
def calcSpotBkg(img, pt, radius):
x = pt[0]
y = pt[1]
length = (radius << 1) + 1
min_val = float_info.max
for r in range(length):
for c in range(length):
if img[y - radius + r][x - radius + c] < min_val:
min_val = img[y - radius + r][x - radius + c]
return min_val
# calculate centroid
def calcSpotCentDefault(img, pt, radius=1):
x = pt[0]
y = pt[1]
bkg = calcSpotBkg(img, pt, radius)
# print('bkg: %.3f' %bkg)
the_x = (img[y][x + 1] - img[y][x - 1]) / \
(img[y][x - 1] + img[y][x] + img[y][x + 1] - 3.0 * bkg)
the_y = (img[y + 1][x] - img[y - 1][x]) / (img[y - 1]
[x] + img[y][x] + img[y + 1][x] - 3.0 * bkg)
return (float(x) + the_x, float(y) + the_y)
# calculate centroid using more detailed and specific strategy
def calcSpotCentSpe(img, pt, ratio_th, radius=2):
x = pt[0]
y = pt[1]
sec_peek = max(max(img[y - 1][x], img[y + 1][x]),
max(img[y][x - 1], img[y][x + 1]))
ratio = sec_peek / img[y][x]
if ratio < ratio_th:
return calcSpotCentDefault(img, pt, radius)
else: # we need to consider second peek
bkg = calcSpotBkg(img, pt, radius) # calculate background
# print('bkg: %.3f' %bkg)
if sec_peek == img[y][x - 1]: # left
the_x = (-2.0 * (img[y][x - 2] - bkg) - (img[y][x - 1] - bkg) + (img[y][x + 1] - bkg)) / (
img[y][x - 2] + img[y][x - 1] + img[y][x] + img[y][x + 1] - 4.0 * bkg)
the_y = (img[y + 1][x] - img[y - 1][x]) / (img[y - 1]
[x] + img[y][x] + img[y + 1][x] - 3.0 * bkg)
elif sec_peek == img[y][x + 1]: # right
the_x = ((img[y][x + 1] - bkg) - (img[y][x - 1] - bkg) + 2.0 * (img[y][x + 2] - bkg)
) / (img[y][x - 1] + img[y][x] + img[y][x + 1] + img[y][x + 2] - 4.0 * bkg)
the_y = (img[y + 1][x] - img[y - 1][x]) / (img[y - 1]
[x] + img[y][x] + img[y + 1][x] - 3.0 * bkg)
elif sec_peek == img[y - 1][x]: # up
the_x = (img[y][x + 1] - img[y][x - 1]) / \
(img[y][x - 1] + img[y][x] + img[y][x + 1] - 3.0 * bkg)
the_y = (-2.0 * (img[y - 2][x] - bkg) - (img[y - 1][x] - bkg) + (img[y + 1][x] - bkg)) / (
img[y - 2][x] + img[y - 1][x] + img[y][x] + img[y + 1][x] - 4.0 * bkg)
else: # down
the_x = (img[y][x + 1] - img[y][x - 1]) / \
(img[y][x - 1] + img[y][x] + img[y][x + 1] - 3.0 * bkg)
the_y = ((img[y + 1][x] - bkg) - (img[y - 1][x] - bkg) + 2.0 * (img[y + 2][x] - bkg)
) / (img[y - 1][x] + img[y][x] + img[y + 1][x] + img[y + 2][x] - 4.0 * bkg)
return (float(x) + the_x, float(y) + the_y)
# using row/col's sum as intensity: length*length
def calcSpotGravDefault(img, pt, radius=2):
# prepare ints
x = pt[0]
y = pt[1]
length = (radius << 1) + 1
bkg = calcSpotBkg(img, pt, radius)
ints_r = np.zeros(length)
ints_c = np.zeros(length)
for i in range(length):
int_r = 0.0
int_c = 0.0
for j in range(length):
int_r += img[y - radius + i][x - radius + j] - bkg
int_c += img[y - radius + j][x - radius + i] - bkg
ints_r[i] = int_r
ints_c[i] = int_c
# calculate gravity
nume_r = 0.0
nume_c = 0.0 # 分子
deno_r = 0.0
deno_c = 0.0 # 分母
for i in range(length):
nume_r += float(i - radius) * (ints_r[i])
nume_c += float(i - radius) * (ints_c[i])
deno_r += ints_r[i]
deno_c += ints_c[i]
return (float(x) + nume_c / deno_c, float(y) + nume_r / deno_r)
# using row/col's sum as intensity: length*length
# consider second peek
def calcSpotGravSpe(img, pt, ratio_th, radius=2):
x = pt[0]
y = pt[1]
sec_peek = max(max(img[y - 1][x], img[y + 1][x]),
max(img[y][x - 1], img[y][x + 1]))
ratio = sec_peek / img[y][x]
if ratio < ratio_th:
return calcSpotGravDefault(img, pt, radius)
else: # we need to consider second peek
bkg = calcSpotBkg(img, pt, radius) # calculate background
if sec_peek == img[y][x - 1]: # left
ints_r_n1 = img[y - 1][x - 2] + img[y - 1][x - 1] + \
img[y - 1][x] + img[y - 1][x + 1] - 4.0 * bkg
ints_r_0 = img[y][x - 2] + img[y][x - 1] + \
img[y][x] + img[y][x + 1] - 4.0 * bkg
ints_r_p1 = img[y + 1][x - 2] + img[y + 1][x - 1] + \
img[y + 1][x] + img[y + 1][x + 1] - 4.0 * bkg
the_y = (ints_r_p1 - ints_r_n1) / \
(ints_r_n1 + ints_r_0 + ints_r_p1)
ints_c_n2 = img[y - 1][x - 2] + \
img[y][x - 2] + img[y + 1][x - 2] - 3.0 * bkg
ints_c_n1 = img[y - 1][x - 1] + \
img[y][x - 1] + img[y + 1][x - 1] - 3.0 * bkg
ints_c_0 = img[y - 1][x] + img[y][x] + img[y + 1][x] - 3.0 * bkg
ints_c_p1 = img[y - 1][x + 1] + \
img[y][x + 1] + img[y + 1][x + 1] - 3.0 * bkg
the_x = (-2.0 * ints_c_n2 - ints_c_n1 + ints_c_p1) / \
(ints_c_n2 + ints_c_n1 + ints_c_0 + ints_c_p1)
elif sec_peek == img[y][x + 1]: # right
ints_r_n1 = img[y - 1][x - 1] + img[y - 1][x] + | |
################################################################
# -*- coding: utf-8 -*-
# Title: Buscaminas
# Developed by: <NAME>
# SavedVariables: board, match
# Notes: Minesweeper game that supports different size games.
# It can save game on files and load them later.
# TODO: Support several languages
################################################################
__author__ = 'Administrator'
from numpy import zeros, shape
from random import *
import time
import pickle
################################################################
# Function: menu()
# Main menu
################################################################
def menu():
print('========================================')
print('========================================')
print('¡Bienvenidos a Buscaminas!')
print('Seleccione alguna de las opciones para continuar ')
print('1.- Nuevo Juego Inicial (8x8) ')
print('2.- Nuevo Juego Intermedio (16x16)')
print('3.- Nuevo Juego Avanzado (30x16)')
print('4.- Nuevo Juego Personalizado (Tamaño a escoger)')
print('5.- Cargar una partida Guardada')
print('6.- Acerca del juego')
print('0.- Salir')
print('=======================================')
get_option()
################################################################
# Function: get_option()
# Initialize each option of the menu. Calls several functions.
################################################################
def get_option():
INSTRUCTIONS = ("""
OBJETIVO
Encontrar los cuadrados vacíos evitando las minas. ¡Mientras más rápido, mejor!
EL TABLERO
Buscaminas tiene tres tableros predefinidos en varias dificultades:
[*]Principiante: 8x8 con 10 minas
[*]Intermedio: 16x16 con 40 minas
[*]Avanzado: 30x16 con 99 minas
También puedes crear un tablero personalizado de tamaño máximo 30x30 con hasta 841 minas.
CÓMO JUGAR
Las reglas son sencillas:
[*]Descubre una mina y el juego termina.
[*]Descubre un cuadrado vacío y el juego continía
[*]Descubre un número y te dará información sobre cuantas minas se encuentran escondidas en
los cuadrados circundantes.""")
GOODBYE = ("""
_
.-T | _
| | | / |
| | | / /`|
_ | | |/ / /
\`\| '.' / /
\ \`-. '--|
\ ' |
\ .` /
| |""")
option = input('Ingrese alguna de las opciones anteriores para continuar: ')
if option == "6":
option = input(INSTRUCTIONS)
while not option_is_valid(option):
option = input('Entrada incorrecta, escriba 6 para ayuda.Ingrese una opcion del menú válida: ')
if option == "6":
option = input(INSTRUCTIONS)
if option == "1":
x = 8
y = 8
mines = 10
board = create_board(x, y)
match = bomb_maping(x, y, mines)
play_game(board, match, x, y)
elif option == "2":
x = 16
y = 16
mines = 40
board = create_board(x, y)
match = bomb_maping(x, y, mines)
play_game(board, match, x, y)
elif option == "3":
x = 30
y = 16
mines = 99
board = create_board(x, y)
match = bomb_maping(x, y, mines)
play_game(board, match, x, y)
elif option == "4":
x = input('Ingrese el ancho de la cuadrilla (Máximo 30')
y = input('Ingrese el alto de la cuadrilla (Máximo 30')
mines = input ('Ingrese la cantida de minas. Máximo xy/2')
while not per_size_is_valid(x, y, mines):
print('Alguna de las opciones ingresadas no es válida')
x = input('Ingrese el ancho de la cuadrilla (Máximo 30')
y = input('Ingrese el alto de la cuadrilla (Máximo 30')
mines = input ('Ingrese la cantida de minas. Máximo xy/2')
x = int(x)
y = int(y)
mines = int(mines)
board = create_board(x, y)
match = bomb_maping(x, y, mines)
play_game(board, match, x, y)
elif option == "5":
[board,match] = load_game()
if board == [0, 0] or match == [0, 0]:
print('No hay una partida guardada con anterioridad. \n')
get_option()
else:
[x, y] = shape(board)
mines = 0
for i in range (len(match)-1):
for j in range (len(match[i])-1):
if match[i, j] == '-1':
mines += 1
play_game(board, match, x-1, y-1 )
else:
print (GOODBYE)
print ('Gracias por iniciar el juego. Lo esperamos una próxima ocasión.')
################################################################
# Function: option_is_valid(option_input)
# Determines TRUE or FALSE statement for get_option
################################################################
def option_is_valid(option_input):
try:
option_input = int(option_input)
if option_input >= 0 and option_input <=7:
return True
else:
return False
except:
return False
################################################################
# Function: per_size_is_valid(x_size, y_size, mines)
# Determines TRUE or FALSE statement for the custom game.
# Verifies if the given dimensions are between boundaries
################################################################
def per_size_is_valid(x_size, y_size, mines):
try:
x_size = int(x_size)
y_size = int(y_size)
mines = int(mines)
if x_size>0 and x_size <=30 and y_size>0 and y_size<=30 and mines<=x_size*y_size/2:
return True
else:
return False
except:
return False
################################################################
# Function: create_board(x_size, y_size)
# Creates visual board for the player. Size is given by chosen
# option
################################################################
def create_board(x_size, y_size):
board = zeros([x_size+2, y_size+2], dtype = str)
for i in range(1, len(board)-1):
for j in range(1, len(board[i])-1):
board[i,j] = ' '
for i in range(0, x_size+1):
board[i, 0] = i
board[i, y_size+1] = i
for j in range(0, y_size+1):
board [0, j] = j
board [x_size+1, j] = j
return board
################################################################
# Function: bomb_maping(x_size, y_size, mines)
# Creates hidden map of the mines and their surroundings. Size
# is given by chosen option.
################################################################
def bomb_maping(x_size, y_size, mines):
x_size += 2
y_size += 2
pox_mines = []
for i in range(mines):
row = randint(1, x_size-2)
col = randint(1, y_size-2)
new_mine = [row, col]
while new_mine in pox_mines:
row = randint(1, x_size-2)
col = randint(1, y_size-2)
new_mine = [row, col]
pox_mines.append(new_mine)
match_board = zeros((x_size, y_size), dtype = int)
for i in range(len(pox_mines)):
[row, col] = pox_mines[i]
match_board[row, col] = -1
for i in range(len(pox_mines)):
[row, col] = pox_mines[i]
SURROUNDING = ((row-1, col-1),(row-1, col), (row-1, col+1),
(row , col-1), (row , col+1),
(row+1, col-1),(row+1 , col),(row+1, col+1))
for (surr_row, surr_col) in SURROUNDING:
if(surr_row != 0 and surr_row != x_size-1 and surr_col != 0 and surr_col != y_size-1) \
and (match_board[surr_row, surr_col] != -1):
match_board[surr_row, surr_col] += 1
return match_board
################################################################
# Function: get_move(x, y):
# Receives string for the coords of unveiling cell. Range
# between given size of the game
################################################################
def get_move(x, y):
INSTRUCTIONS = ("""
Primero ingresa la fila, luego la columna separadas con un punto (.).
Para añadir una bandera, escribe \"f\" al final de las coordenadas (Ej: 5.4f donde sería la quinta
en la cuarta columna donde iría la bandera. Para salir escriba \"e\" y para guardar \"s\".
\n Ingrese las coordenadas de la celda: """)
global is_ended
is_ended = False
move = input('Ingrese las coordenadas de una celda. Escriba \"H"\ para ayuda: ')
if move == 'H' or move == 'h':
move = input(INSTRUCTIONS)
if move == 'S' or move == 's':
print('El juego ha sido guardado.')
save_game(board_display, mine_camp)
return (0, 0, '3')
if move == 'E' or move == 'e':
question = input('Presione Y para salir, N para continuar o S para salir y guardar: ')
while not end_is_valid(question):
question = input('Presione Y para salir, N para continuar o S para salir y guardar: ')
if question == 'Y' or question == 'y':
is_ended = True
return (0, 0, '2')
elif question == 'N' or question == 'n':
move = input('Ingrese las coordenadas de una celda. Escriba \"H"\ para ayuda: ')
elif question == 'S' or question == 's':
is_ended = True
save_game(board_display, mine_camp)
return (0, 0, '3')
while not move_is_valid(move, x, y):
move = input('Ingrese las coordenadas de una celda. Escriba \"H"\ para ayuda: ')
if move == 'H' or move == 'h':
move = input(INSTRUCTIONS)
if move == 'E' or move == 'e':
question = input('Presione Y para salir, N para continuar o S para salir y continuar: ')
while not end_is_valid(question):
question = input('Presione Y para salir, N para continuar o S para salir y guardar: ')
if question == 'Y' or question == 'y':
is_ended = True
move = ('1.1')
row = 1
col = 1
flag = 2
return (row, col, flag)
elif question == 'N' or question == 'n':
move = input('Ingrese las coordenadas de una celda. Escriba \"H"\ para ayuda: ')
elif question == 'S' or question == 's':
is_ended = True
move = ('1.1')
row = 1
col = 1
flag = 2
save_game(board_display, mine_camp)
return (row, col, flag)
if move == 'S' or move == 's':
save_game(board_display, mine_camp)
move = input('Ingrese las coordenadas de una celda. Escriba \"H"\ para ayuda: ')
if is_ended == False:
chain = len(move)
vec = list(move)
row = 0
col = 0
flag = 0
k = vec.index('.')
if vec[-1] == 'F' or vec[-1] == 'f':
chain -= 1
flag = 1
for i in range(k):
a = int(vec[i])
row += a*10**(k-i-1)
for i in range (k+1, chain):
a = int(vec[i])
col += a*10**(chain-i-1)
else:
flag = 2
return (row, col, flag)
################################################################
# Function: move_is_valid(move_input, x, | |
in same directory as this program.
file = os.path.dirname(os.path.realpath(__file__)) + os.sep + "opcodetable.csv"
with open(file, newline='') as csvfile:
table = list(csv.DictReader(csvfile))
# Do validity check on table entries and calculate bitmask and value
# for each opcode so we can quicky test opcode for matches in the
# table.
for row in table:
# Validity check: Mnemonic is not empty.
if row["Mnemonic"] == "":
print("Error: Empty mnemonic entry in opcode table:", row)
sys.exit(1)
# Validity check: B W and L are empty or the corresponding letter
if not row["B"] in ("B", ""):
print("Error: Bad B entry in opcode table:", row)
sys.exit(1)
if not row["W"] in ("W", ""):
print("Error: Bad W entry in opcode table:", row)
sys.exit(1)
if not row["L"] in ("L", ""):
print("Error: Bad L entry in opcode table:", row)
sys.exit(1)
# Pattern has length 16 and each character is 0, 1, or X.
if not re.match(r"^[01X]...............$", row["Pattern"]):
print("Error: Bad pattern entry in opcode table:", row)
sys.exit(1)
# Validity check: DataSize is B, W, L, A, or empty.
if not row["DataSize"] in ("B", "W", "L", "A", ""):
print("Error: Bad DataSize entry in opcode table:", row)
sys.exit(1)
# Validity check: DataType is is I, N, D, M or empty.
if not row["DataType"] in ("I", "N", "D", "M", ""):
print("Error: Bad DataType entry in opcode table:", row)
sys.exit(1)
# Convert bit pattern to 16-bit value and bitmask, e.g.
# pattern: 1101XXX110001XXX
# value: 1101000110001000
# mask: 1111000111111000
# Opcode matches pattern if opcode AND mask equals value
pattern = row["Pattern"]
value = ""
mask = ""
for pos in range(16):
if pattern[pos] in ("0", "1"):
value += pattern[pos]
mask += "1"
else:
value += "0"
mask += "0"
# Convert value and mask to numbers and store in table.
row["Value"] = int(value, 2)
row["Mask"] = int(mask, 2)
# Open input file
filename = args.filename
try:
f = open(filename, "rb")
except FileNotFoundError:
print(("Error: input file '{}' not found.".format(filename)), file=sys.stderr)
sys.exit(1)
# Loop over file input
while True:
# Get 16-bit instruction
c1 = f.read(1) # Get binary bytes from file
if len(c1) == 0: # handle EOF
break
c2 = f.read(1)
if len(c2) == 0:
break
data[0] = ord(c1) # Convert to numbers
data[1] = ord(c2)
# Get op code
opcode = data[0]*256 + data[1]
# Find matching mnemonic in table
for row in table:
value = row["Value"]
mask = row["Mask"]
mnemonic = row["Mnemonic"]
if (opcode & mask) == value:
break
# Should now have the mnemonic
if mnemonic == "":
print("Error: Mnemonic not found in opcode table.")
sys.exit(1)
# Handle instruction types - one word implicit with no operands
if mnemonic in ("ILLEGAL", "RESET", "NOP", "RTE", "RTS", "TRAPV", "RTR"):
length = 2
printInstruction(address, length, mnemonic, data, "")
# Handle unimplemented and invalid instructions as a dc.w directive
elif mnemonic in ("UNIMPLEMENTED", "INVALID"):
length = 2
operand = "${0:02X}{1:02X}".format(data[0], data[1]) + " ; " + mnemonic
printInstruction(address, length, "dc.w", data, operand)
# Handle instruction types - one word implicit with operands
# TRAP
elif mnemonic == "TRAP":
length = 2
operand = "#{0:d}".format(data[1] & 0x0f)
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types: ORI to CCR
elif mnemonic in ("ORI to CCR", "EORI to CCR"):
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
if data[2] != 0:
print("Warning: MSB of operand should be zero, but is {0:02X}".format(data[2]))
operand = "#${0:02X},CCR".format(data[3])
if mnemonic == "ORI to CCR":
printInstruction(address, length, "ORI", data, operand)
else:
printInstruction(address, length, "EORI", data, operand)
# Handle instruction types: ORI to SR
elif mnemonic in ("ORI to SR", "EORI to SR"):
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
operand = "#${0:02X}{1:02X},SR".format(data[2], data[3])
if mnemonic == "ORI to SR":
printInstruction(address, length, "ORI", data, operand)
else:
printInstruction(address, length, "EORI", data, operand)
# Handle instruction types: ANDI to CCR
elif mnemonic == "ANDI to CCR":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
if data[2] != 0:
print("Warning: MSB of operand should be zero, but is {0:02X}".format(data[2]))
operand = "#${0:02X},CCR".format(data[3])
printInstruction(address, length, "ANDI", data, operand)
# Handle instruction types: ANDI to SR
elif mnemonic == "ANDI to SR":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
operand = "#${0:02X}{1:02X},SR".format(data[2], data[3])
printInstruction(address, length, "ANDI", data, operand)
# Handle instruction types: STOP
elif mnemonic == "STOP":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
operand = "#${0:02X}{1:02X}".format(data[2], data[3])
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types - BRA, BSR, Bcc
elif mnemonic in ("BRA", "BSR", "BCC"):
if (data[1]) != 0: # Byte offset
length = 2
disp = data[1]
if disp < 128: # Positive offset
dest = address + disp + 2
else: # Negative offset
dest = address - (disp ^ 0xff) + 1
else: # Word offset
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
disp = data[2]*256 + data[3]
if disp < 32768: # Positive offset
dest = address + disp + 2
else: # Negative offset
dest = address - (disp ^ 0xffff) + 1
operand = "${0:08X}".format(dest)
if mnemonic == "BCC":
cond = data[0] & 0x0f
mnemonic = "B" + conditions[cond]
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types - UNLK
elif mnemonic == "UNLK":
length = 2
operand = "A{0:d}".format(data[1] & 0x07)
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types - LINK
elif mnemonic == "LINK":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
operand = "A{0:d},#${1:02X}{2:02X}".format(data[1] & 0x07, data[2], data[3])
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types - SWAP
elif mnemonic == "SWAP":
length = 2
operand = "D{0:d}".format(data[1] & 0x07)
printInstruction(address, length, mnemonic, data, operand)
# Handle instruction types - EXT
elif mnemonic == "EXT":
length = 2
operand = "D{0:d}".format(data[1] & 0x07)
if data[1] & 0x40:
printInstruction(address, length, "EXT.l", data, operand)
else:
printInstruction(address, length, "EXT.w", data, operand)
# Handle instruction types - MOVE USP
elif mnemonic == "MOVE USP":
length = 2
if data[1] & 0x08:
operand = "USP,A{0:d}".format(data[1] & 0x07)
else:
operand = "A{0:d},USP".format(data[1] & 0x07)
printInstruction(address, length, "MOVE", data, operand)
elif mnemonic == "DBCC":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
disp = data[2]*256 + data[3]
if disp < 32768: # Positive offset
dest = address + disp + 2
else: # Negative offset
dest = address - (disp ^ 0xffff) + 1
operand = "D{0:d},${1:08X}".format(data[1] & 0x07, dest)
cond = data[0] & 0x0f
mnemonic = "DB" + conditions[cond]
printInstruction(address, length, mnemonic, data, operand)
elif mnemonic == "MOVEP":
length = 4
data[2] = ord(f.read(1))
data[3] = ord(f.read(1))
disp = data[2]*256 + data[3]
op = (data[1] & 0xc0) >> 6
if op == 0:
mnemonic = "MOVEP.w"
operand = "(${0:04X},A{1:d}),D{2:d}".format(disp, data[1] & 0x07, (data[0] & 0x0e) >> 1)
elif op == 1:
mnemonic = "MOVEP.l"
operand = "(${0:04X},A{1:d}),D{2:d}".format(disp, data[1] & 0x07, (data[0] & 0x0e) >> 1)
elif op == 2:
mnemonic = "MOVEP.w"
operand = "D{0:d},(${1:04X},A{2:d})".format((data[0] & 0x0e) >> 1, disp, data[1] & 0x07)
elif op == 3:
mnemonic = "MOVEP.l"
operand = "D{0:d},(${1:04X},A{2:d})".format((data[0] & 0x0e) >> 1, disp, data[1] & 0x07)
printInstruction(address, length, mnemonic, data, operand)
elif mnemonic == "MOVEQ":
length = 2
# Use these lines if you want signed value (some assemblers complain otherwise)
if data[1] > 127:
operand = "#{0:d},D{1:d}".format(-(256 - data[1]), (data[0] & 0x0e) >> 1)
else:
operand = "#{0:d},D{1:d}".format(data[1], (data[0] & 0x0e) >> 1)
# Use this line if you want 8-bit hex value
# operand = "#${0:02X},D{1:d}".format(data[1], (data[0] & 0x0e) >> 1)
printInstruction(address, length, mnemonic, data, operand)
elif mnemonic in ("SBCD", "ABCD"):
length = 2
if data[1] & 0x08:
operand = "-(A{0:d}),-(A{1:d})".format(data[1] & 0x07, (data[0] & 0x0e) >> 1)
else:
operand = "D{0:d},D{1:d}".format(data[1] & 0x07, (data[0] & 0x0e) >> 1)
printInstruction(address, length, mnemonic, data, operand)
elif mnemonic == "EXG":
length = 2
m = (data[1] & 0xf8) >> 3
if m == 0x08:
operand = "D{0:d},D{1:d}".format((data[0] & 0x0e) >> 1, data[1] & 0x07)
elif m == 0x09:
operand = "A{0:d},A{1:d}".format((data[0] | |
Share(self, *args):
"""
:param ent:
:type ent: Handle_StepRepr_MaterialDesignation &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialDesignation_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWMaterialDesignation
RWStepRepr_RWMaterialDesignation.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialDesignation_ReadStep,None,RWStepRepr_RWMaterialDesignation)
RWStepRepr_RWMaterialDesignation.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialDesignation_WriteStep,None,RWStepRepr_RWMaterialDesignation)
RWStepRepr_RWMaterialDesignation.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialDesignation_Share,None,RWStepRepr_RWMaterialDesignation)
RWStepRepr_RWMaterialDesignation_swigregister = _RWStepRepr.RWStepRepr_RWMaterialDesignation_swigregister
RWStepRepr_RWMaterialDesignation_swigregister(RWStepRepr_RWMaterialDesignation)
class RWStepRepr_RWMaterialProperty(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWMaterialProperty_swiginit(self,_RWStepRepr.new_RWStepRepr_RWMaterialProperty(*args))
def ReadStep(self, *args):
"""
* Reads MaterialProperty
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_MaterialProperty &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialProperty_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes MaterialProperty
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_MaterialProperty &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialProperty_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_MaterialProperty &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialProperty_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWMaterialProperty
RWStepRepr_RWMaterialProperty.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialProperty_ReadStep,None,RWStepRepr_RWMaterialProperty)
RWStepRepr_RWMaterialProperty.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialProperty_WriteStep,None,RWStepRepr_RWMaterialProperty)
RWStepRepr_RWMaterialProperty.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialProperty_Share,None,RWStepRepr_RWMaterialProperty)
RWStepRepr_RWMaterialProperty_swigregister = _RWStepRepr.RWStepRepr_RWMaterialProperty_swigregister
RWStepRepr_RWMaterialProperty_swigregister(RWStepRepr_RWMaterialProperty)
class RWStepRepr_RWMaterialPropertyRepresentation(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_swiginit(self,_RWStepRepr.new_RWStepRepr_RWMaterialPropertyRepresentation(*args))
def ReadStep(self, *args):
"""
* Reads MaterialPropertyRepresentation
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_MaterialPropertyRepresentation &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes MaterialPropertyRepresentation
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_MaterialPropertyRepresentation &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_MaterialPropertyRepresentation &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWMaterialPropertyRepresentation
RWStepRepr_RWMaterialPropertyRepresentation.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_ReadStep,None,RWStepRepr_RWMaterialPropertyRepresentation)
RWStepRepr_RWMaterialPropertyRepresentation.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_WriteStep,None,RWStepRepr_RWMaterialPropertyRepresentation)
RWStepRepr_RWMaterialPropertyRepresentation.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_Share,None,RWStepRepr_RWMaterialPropertyRepresentation)
RWStepRepr_RWMaterialPropertyRepresentation_swigregister = _RWStepRepr.RWStepRepr_RWMaterialPropertyRepresentation_swigregister
RWStepRepr_RWMaterialPropertyRepresentation_swigregister(RWStepRepr_RWMaterialPropertyRepresentation)
class RWStepRepr_RWMeasureRepresentationItem(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_swiginit(self,_RWStepRepr.new_RWStepRepr_RWMeasureRepresentationItem(*args))
def ReadStep(self, *args):
"""
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_MeasureRepresentationItem &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_ReadStep(self, *args)
def WriteStep(self, *args):
"""
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_MeasureRepresentationItem &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_WriteStep(self, *args)
def Share(self, *args):
"""
:param ent:
:type ent: Handle_StepRepr_MeasureRepresentationItem &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWMeasureRepresentationItem
RWStepRepr_RWMeasureRepresentationItem.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_ReadStep,None,RWStepRepr_RWMeasureRepresentationItem)
RWStepRepr_RWMeasureRepresentationItem.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_WriteStep,None,RWStepRepr_RWMeasureRepresentationItem)
RWStepRepr_RWMeasureRepresentationItem.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_Share,None,RWStepRepr_RWMeasureRepresentationItem)
RWStepRepr_RWMeasureRepresentationItem_swigregister = _RWStepRepr.RWStepRepr_RWMeasureRepresentationItem_swigregister
RWStepRepr_RWMeasureRepresentationItem_swigregister(RWStepRepr_RWMeasureRepresentationItem)
class RWStepRepr_RWParametricRepresentationContext(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWParametricRepresentationContext_swiginit(self,_RWStepRepr.new_RWStepRepr_RWParametricRepresentationContext(*args))
def ReadStep(self, *args):
"""
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_ParametricRepresentationContext &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWParametricRepresentationContext_ReadStep(self, *args)
def WriteStep(self, *args):
"""
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_ParametricRepresentationContext &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWParametricRepresentationContext_WriteStep(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWParametricRepresentationContext
RWStepRepr_RWParametricRepresentationContext.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWParametricRepresentationContext_ReadStep,None,RWStepRepr_RWParametricRepresentationContext)
RWStepRepr_RWParametricRepresentationContext.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWParametricRepresentationContext_WriteStep,None,RWStepRepr_RWParametricRepresentationContext)
RWStepRepr_RWParametricRepresentationContext_swigregister = _RWStepRepr.RWStepRepr_RWParametricRepresentationContext_swigregister
RWStepRepr_RWParametricRepresentationContext_swigregister(RWStepRepr_RWParametricRepresentationContext)
class RWStepRepr_RWProductConcept(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWProductConcept_swiginit(self,_RWStepRepr.new_RWStepRepr_RWProductConcept(*args))
def ReadStep(self, *args):
"""
* Reads ProductConcept
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_ProductConcept &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductConcept_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes ProductConcept
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_ProductConcept &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductConcept_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_ProductConcept &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductConcept_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWProductConcept
RWStepRepr_RWProductConcept.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductConcept_ReadStep,None,RWStepRepr_RWProductConcept)
RWStepRepr_RWProductConcept.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductConcept_WriteStep,None,RWStepRepr_RWProductConcept)
RWStepRepr_RWProductConcept.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductConcept_Share,None,RWStepRepr_RWProductConcept)
RWStepRepr_RWProductConcept_swigregister = _RWStepRepr.RWStepRepr_RWProductConcept_swigregister
RWStepRepr_RWProductConcept_swigregister(RWStepRepr_RWProductConcept)
class RWStepRepr_RWProductDefinitionShape(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWProductDefinitionShape_swiginit(self,_RWStepRepr.new_RWStepRepr_RWProductDefinitionShape(*args))
def ReadStep(self, *args):
"""
* Reads ProductDefinitionShape
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_ProductDefinitionShape &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductDefinitionShape_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes ProductDefinitionShape
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_ProductDefinitionShape &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductDefinitionShape_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_ProductDefinitionShape &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWProductDefinitionShape_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWProductDefinitionShape
RWStepRepr_RWProductDefinitionShape.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductDefinitionShape_ReadStep,None,RWStepRepr_RWProductDefinitionShape)
RWStepRepr_RWProductDefinitionShape.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductDefinitionShape_WriteStep,None,RWStepRepr_RWProductDefinitionShape)
RWStepRepr_RWProductDefinitionShape.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWProductDefinitionShape_Share,None,RWStepRepr_RWProductDefinitionShape)
RWStepRepr_RWProductDefinitionShape_swigregister = _RWStepRepr.RWStepRepr_RWProductDefinitionShape_swigregister
RWStepRepr_RWProductDefinitionShape_swigregister(RWStepRepr_RWProductDefinitionShape)
class RWStepRepr_RWPropertyDefinition(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWPropertyDefinition_swiginit(self,_RWStepRepr.new_RWStepRepr_RWPropertyDefinition(*args))
def ReadStep(self, *args):
"""
* Reads PropertyDefinition
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinition &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinition_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes PropertyDefinition
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinition &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinition_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_PropertyDefinition &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinition_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWPropertyDefinition
RWStepRepr_RWPropertyDefinition.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinition_ReadStep,None,RWStepRepr_RWPropertyDefinition)
RWStepRepr_RWPropertyDefinition.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinition_WriteStep,None,RWStepRepr_RWPropertyDefinition)
RWStepRepr_RWPropertyDefinition.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinition_Share,None,RWStepRepr_RWPropertyDefinition)
RWStepRepr_RWPropertyDefinition_swigregister = _RWStepRepr.RWStepRepr_RWPropertyDefinition_swigregister
RWStepRepr_RWPropertyDefinition_swigregister(RWStepRepr_RWPropertyDefinition)
class RWStepRepr_RWPropertyDefinitionRelationship(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_swiginit(self,_RWStepRepr.new_RWStepRepr_RWPropertyDefinitionRelationship(*args))
def ReadStep(self, *args):
"""
* Reads PropertyDefinitionRelationship
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRelationship &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes PropertyDefinitionRelationship
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRelationship &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRelationship &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWPropertyDefinitionRelationship
RWStepRepr_RWPropertyDefinitionRelationship.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_ReadStep,None,RWStepRepr_RWPropertyDefinitionRelationship)
RWStepRepr_RWPropertyDefinitionRelationship.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_WriteStep,None,RWStepRepr_RWPropertyDefinitionRelationship)
RWStepRepr_RWPropertyDefinitionRelationship.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_Share,None,RWStepRepr_RWPropertyDefinitionRelationship)
RWStepRepr_RWPropertyDefinitionRelationship_swigregister = _RWStepRepr.RWStepRepr_RWPropertyDefinitionRelationship_swigregister
RWStepRepr_RWPropertyDefinitionRelationship_swigregister(RWStepRepr_RWPropertyDefinitionRelationship)
class RWStepRepr_RWPropertyDefinitionRepresentation(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_swiginit(self,_RWStepRepr.new_RWStepRepr_RWPropertyDefinitionRepresentation(*args))
def ReadStep(self, *args):
"""
* Reads PropertyDefinitionRepresentation
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRepresentation &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes PropertyDefinitionRepresentation
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRepresentation &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_PropertyDefinitionRepresentation &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWPropertyDefinitionRepresentation
RWStepRepr_RWPropertyDefinitionRepresentation.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_ReadStep,None,RWStepRepr_RWPropertyDefinitionRepresentation)
RWStepRepr_RWPropertyDefinitionRepresentation.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_WriteStep,None,RWStepRepr_RWPropertyDefinitionRepresentation)
RWStepRepr_RWPropertyDefinitionRepresentation.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_Share,None,RWStepRepr_RWPropertyDefinitionRepresentation)
RWStepRepr_RWPropertyDefinitionRepresentation_swigregister = _RWStepRepr.RWStepRepr_RWPropertyDefinitionRepresentation_swigregister
RWStepRepr_RWPropertyDefinitionRepresentation_swigregister(RWStepRepr_RWPropertyDefinitionRepresentation)
class RWStepRepr_RWQuantifiedAssemblyComponentUsage(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_swiginit(self,_RWStepRepr.new_RWStepRepr_RWQuantifiedAssemblyComponentUsage(*args))
def ReadStep(self, *args):
"""
* Reads QuantifiedAssemblyComponentUsage
:param data:
:type data: Handle_StepData_StepReaderData &
:param num:
:type num: int
:param ach:
:type ach: Handle_Interface_Check &
:param ent:
:type ent: Handle_StepRepr_QuantifiedAssemblyComponentUsage &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_ReadStep(self, *args)
def WriteStep(self, *args):
"""
* Writes QuantifiedAssemblyComponentUsage
:param SW:
:type SW: StepData_StepWriter &
:param ent:
:type ent: Handle_StepRepr_QuantifiedAssemblyComponentUsage &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_WriteStep(self, *args)
def Share(self, *args):
"""
* Fills data for graph (shared items)
:param ent:
:type ent: Handle_StepRepr_QuantifiedAssemblyComponentUsage &
:param iter:
:type iter: Interface_EntityIterator &
:rtype: None
"""
return _RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_Share(self, *args)
__swig_destroy__ = _RWStepRepr.delete_RWStepRepr_RWQuantifiedAssemblyComponentUsage
RWStepRepr_RWQuantifiedAssemblyComponentUsage.ReadStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_ReadStep,None,RWStepRepr_RWQuantifiedAssemblyComponentUsage)
RWStepRepr_RWQuantifiedAssemblyComponentUsage.WriteStep = new_instancemethod(_RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_WriteStep,None,RWStepRepr_RWQuantifiedAssemblyComponentUsage)
RWStepRepr_RWQuantifiedAssemblyComponentUsage.Share = new_instancemethod(_RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_Share,None,RWStepRepr_RWQuantifiedAssemblyComponentUsage)
RWStepRepr_RWQuantifiedAssemblyComponentUsage_swigregister = _RWStepRepr.RWStepRepr_RWQuantifiedAssemblyComponentUsage_swigregister
RWStepRepr_RWQuantifiedAssemblyComponentUsage_swigregister(RWStepRepr_RWQuantifiedAssemblyComponentUsage)
class RWStepRepr_RWReprItemAndLengthMeasureWithUnit(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ | |
# route_padded = route + [tail] * lookahead
# weight_padding = 0
# # elif killcollide ...
# # route_padded += [tail] * lookahead
# # weight_padding = 0
# else:
route_padded, weight_padding = bo.routePadding(route, snakes, foods, depth=lookahead, method=routepadding_method)
# (4) Check target, route, route padding are safe
weight_total = weight_route + weight_padding
route_length = len(route_padded)
# print("WEIGHT", weight_padding, weight_route)
# TODO: Clean up all the blank & routes to start ..
if not (route in [[], [start]]):
sn.addRoutes(route_padded, weight_total, route_length, strategy[0])
bo.logger.log("strategy-add", "STRATEGY ROUTE %s Strategy: %s Target:%s Route: %s Weight: %s, Length: %s %s keeplooking|ignorethreat|enemy: %s %s %s" %
(str(i), str(strategy), str(target),
route_padded, weight_total, route_length, lookahead,
keepLooking, ignoreThreat, enemy))
# Check if this is a "good" route for us
if ((weight_total <= CONST.pointThreshold and
(len(route_padded) >= lookahead)) and
not keepLooking):
found = True
# We are desparate. Ignore length & route threshold except hazard/solid death
elif (ignoreThreat and weight_total < 2 * CONST.routeThreshold):
found = True
# Enemy ignores point threshold
elif (enemy):
found = True
# Safe path to & away found
if (found):
route = route_padded + [] # copy.copy
# bo.logger.log('strategy-found', 'PATH FOUND')
# Normal exit from loop
break
# else:
# bo.logger.log('strategy-found', 'PATH NOT FOUND')
# 5) Next strategy -- keep looking. Check if time exceeds limit
st = bo.logger.getStartTime()
diff = 1000 * (time.time() - st)
if diff > CONST.timeEnd:
hurry = True
# Termination of loop
# No more strategy
# Time expired
# Max depth (TODO: Remove)
if (not len(strategylist) or
hurry or
i > depth_strategy):
bo.logger.log('timer-hurry strategy:%s hurry:%s depth:%s / %s' % (len(strategylist), hurry, i, depth_strategy))
# print("STRATEGY EXIT", len(strategylist), hurry, depth)
# Exit loop
target = []
route = []
break
# bo.logger.log('strategy-update','%s not found. Try next strategy. i: %s' % (str(strategy), i))
bo.logger.timer('Strategy search')
i = i + 1
# TODO: Delete -- no longer required for stateless
# Remove duplicates from strategy list
# stl_unique = []
# for stl in strategylist:
# if not stl in stl_unique:
# stl_unique.append(stl)
# strategylist = copy.copy(stl_unique)
sn.setRoute(route)
sn.setTarget(target)
# TODO: Consider combining state machine (target) and
# Check snake target, return next move
def makeMove(bo: board, sn: snake, snakes) -> str:
"""
Translates route to a direction
Use primary route, otherwise select "best route" from available
==
bo: boardClass as board
sn: snakeClass as snake
snakes: list[] of snakes
==
return: "up", "down", "left" or "right"
"""
# 5) Route Found
route = sn.getRoute()
start = sn.getHead()
route_method = ''
found = False
p = []
# print("SNAKE PATHS", sn.getRoutes())
while len(route):
# Preferred route
route_method = 'route_stateMachine'
p = route.pop(0)
if p != start:
found = True
break
# 6.1) Route not found - select least worst route
if (not found):
# Add default routes
# sn.addRoutes()
# Get all routes
routes = sn.getRoutes()
bo.logger.log('route-found %s' % str(routes))
# Best = Longest length (eg. 25) / lowest weight (50%)
rfactor = 0
if len(routes):
route = []
for r in routes:
# len(r['route'])
# TODO: Model this ..
# R20211104 - changed from 10*len to 10*2^len
path_length = min(CONST.lookAheadPathContinue, r['length'])
if r['weight'] == 0:
# rnew = 100 * pow(1.2, r['length'])
rnew = pow(path_length, 3)
else:
# rnew = 100 * pow(1.2, r['length']) / r['weight']
rnew = pow(path_length, 3) / r['weight']
# print("DEBUG ROUTE", rnew, r)
if rnew > rfactor:
rfactor = rnew
route = r['route']
# sn.setTarget(r['target'])
# sn.setRoute(r['route'])
# sn.setTarget
while len(route):
# Preferred route
route_method = 'route_getRoutes'
p = route.pop(0)
if p != start:
found = True
break
# print(bo.markovs)
bo.logger.log('route-last-resort', 'rfactor:%.2f route:%s' % (rfactor, str(p)))
# 7) Translate next point to direction
move = fn.translateDirection(start, p)
bo.logger.log('make move', str(start), str(p), str(move), str(route_method))
sn.setTarget(p)
sn.setRoute(route)
sn.setMove(move)
# return move
def defaultRoutes(bo, sn, snakes):
# TODO: Review & include default route
start = sn.getHead()
# finish = sn.getTarget()
route_weight = CONST.routeThreshold
route_method = ''
routes = []
# 6.2) Still no route - Use lookahead (route padding)
# TODO: Combine lookahead & dijkstra to avoid
# collision with high threat
# if (not len(p) or not bo.inBounds(p)):
# route_method = 'route_findLargestPath'
# route = bo.findLargestPath([start], snakes)
# if len(route) > 1:
# # TODO: Cleanup routes -- some include start, others don't
# # Remove head (if exists)
# if (route[0] == start):
# route.pop(0)
# try:
# p = route.pop(0)
# except Exception as e:
# bo.logger.error('exception', 'makeMove', str(e))
# 6.3) Still no route - Chase any tail
# if (not found and not sn.getEating()):
# route_method = 'route_chaseTail'
# for d in CONST.directions:
# # Check each direction
# t = list( map(add, start, CONST.directionMap[d]) )
# if (bo.inBounds(t)):
# # Find tail
# w = bo.trails[t[0],t[1]]
# if w == 1:
# p = copy.copy(t)
# wmin = copy.copy(w)
# 6.4) Still no route - Use lowest gradient
# route_method = 'route_dijkstra'
# wmin = CONST.routeThreshold
# for d in CONST.directions:
# # Check each direction
# t = list( map(add, start, CONST.directionMap[d]) )
# if (bo.inBounds(t)):
# try:
# # Find lowest weight
# w = bo.dijkstra[0][t[0],t[1]]
# if w < wmin:
# p = copy.copy(t)
# wmin = copy.copy(w)
# except Exception as e:
# pass
# sn.addRoutes(route_padded, weight_total, route_length, strategy[0]))
# 6.5) Still no route - Use self.enclosd available moves
# if (not len(p) or not bo.inBounds(p)):
# route_method = 'route_dijkstra'
# for d in CONST.directions:
# moves_avail = bo.enclosed[move]
# ..
# 6.6) Still no route - Wipe markovs & try again
# if (not len(p) or not bo.inBounds(p)):
# route_method = 'route_findLargest_clear'
# bo.resetRouting()
# route = bo.findLargestPath([start])
# if len(route) > 1:
# if (route[0] == start):
# route.pop(0)
# try:
# p = route.pop(0)
# except Exception as e:
# log('exception', 'makeMove', str(e))
# == HELPERS ==
def largestSnake(bo, us, snakes, larger=CONST.controlLargerBy):
# if larger than enemy
you_len = us.getLength()
largest = True
for identity in snakes:
sn = snakes[identity]
if sn != us:
enemy_len = sn.getLength()
if you_len >= (enemy_len + larger):
pass
else:
largest = False
return largest
# def pathThreat(board, start, path, maxthreat=CONST.aggroLow):
# # TODO: search whole route (currently first vector only)
# if(len(path)):
# tmap = board.getMarkov()
# # First vector
# p0 = path.pop(0)
# # Translate to points
# points = fn.getPointsInLine(start, p0)
# # Iterate points
# for pt in points:
# # Threat exceeds aggro ..
# if (tmap[0][pt[0], pt[1]] > maxthreat):
# return True
# return False
def killPath(bo, snakes, radius=CONST.killRadius):
# Find smaller snakes within a kill radius
you = bo.getIdentity()
you_len = snakes[you].getLength()
you_head = snakes[you].getHead()
collide_target = []
# Check each snake
for identity in snakes:
sn = snakes[identity]
if sn.getType() != "us":
enemy_len = sn.getLength()
enemy_head = sn.getHead()
dist = fn.distanceToPoint(you_head, enemy_head)
dirn_avail = bo.findEmptySpace(enemy_head, dirn=False)
# print("KILL COLLIDE", you_len, enemy_len, dist, radius, dirn_avail)
# Only trigger killpath if (a) larger (b) close and (c) enemy has one direction available
if (you_len > enemy_len) and (dist <= radius) and len(dirn_avail) == 1:
collide_target = dirn_avail[0]
# print("DEBUG COLLIDE", collide_target)
return collide_target
def numMovesAvailable(bo, start):
"""
Return maximum number of moves in any direction
(expensive)
===
start: location (eg. [7, 3])
===
int maximum | |
flow", None))
#if QT_CONFIG(tooltip)
self.actionPTDF_time_series.setToolTip(QCoreApplication.translate("mainWindow", u"PTDF based time series power flow", None))
#endif // QT_CONFIG(tooltip)
self.actionShow_color_controls.setText(QCoreApplication.translate("mainWindow", u"Show color controls", None))
self.actionAdd_circuit.setText(QCoreApplication.translate("mainWindow", u"Add circuit", None))
#if QT_CONFIG(tooltip)
self.actionAdd_circuit.setToolTip(QCoreApplication.translate("mainWindow", u"Add circuit to the current circuit", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(shortcut)
self.actionAdd_circuit.setShortcut(QCoreApplication.translate("mainWindow", u"Ctrl+N, Ctrl+O", None))
#endif // QT_CONFIG(shortcut)
self.actionSync.setText(QCoreApplication.translate("mainWindow", u"Sync", None))
#if QT_CONFIG(tooltip)
self.actionSync.setToolTip(QCoreApplication.translate("mainWindow", u"Sync with the file for colaborative editing of the grid", None))
#endif // QT_CONFIG(tooltip)
self.actionDrawSchematic.setText(QCoreApplication.translate("mainWindow", u"Draw schematic", None))
self.actionSigma_analysis.setText(QCoreApplication.translate("mainWindow", u"Sigma analysis", None))
#if QT_CONFIG(tooltip)
self.actionSigma_analysis.setToolTip(QCoreApplication.translate("mainWindow", u"Perform HELM-Sigma analysis", None))
#endif // QT_CONFIG(tooltip)
self.actionClear_stuff_running_right_now.setText(QCoreApplication.translate("mainWindow", u"Clear \"stuff running right now\"", None))
self.actionAdd_default_catalogue.setText(QCoreApplication.translate("mainWindow", u"Add default catalogue", None))
self.actionFind_node_groups.setText(QCoreApplication.translate("mainWindow", u"Find node groups", None))
#if QT_CONFIG(tooltip)
self.actionFind_node_groups.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>Finds the electrically related nodes by using their electrical distance and the DBSCAN clustering method</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
self.actiongrid_Generator.setText(QCoreApplication.translate("mainWindow", u"Grid Generator", None))
#if QT_CONFIG(shortcut)
self.actiongrid_Generator.setShortcut(QCoreApplication.translate("mainWindow", u"Ctrl+G", None))
#endif // QT_CONFIG(shortcut)
self.actionLicense.setText(QCoreApplication.translate("mainWindow", u"License", None))
self.actionImportPlexosNodeLoad.setText(QCoreApplication.translate("mainWindow", u"Node load", None))
self.actionImportPlexosGeneratorGeneration.setText(QCoreApplication.translate("mainWindow", u"Generator generation", None))
self.actionOTDF_time_series.setText(QCoreApplication.translate("mainWindow", u"Contingency analysis time series", None))
#if QT_CONFIG(tooltip)
self.actionOTDF_time_series.setToolTip(QCoreApplication.translate("mainWindow", u"Contingency analysis time series", None))
#endif // QT_CONFIG(tooltip)
self.actionImportPlexosBranchRates.setText(QCoreApplication.translate("mainWindow", u"Branch rates", None))
self.actionClustering_time_series.setText(QCoreApplication.translate("mainWindow", u"Clustering time series", None))
#if QT_CONFIG(tooltip)
self.actionClustering_time_series.setToolTip(QCoreApplication.translate("mainWindow", u"Perform the time series simulation using clusters for speed / accuracy trade-off", None))
#endif // QT_CONFIG(tooltip)
self.actionSetSelectedBusArea.setText(QCoreApplication.translate("mainWindow", u"Area", None))
self.actionSetSelectedBusZone.setText(QCoreApplication.translate("mainWindow", u"Zone", None))
self.actionSetSelectedBusCountry.setText(QCoreApplication.translate("mainWindow", u"Country", None))
self.actionImport_bus_coordinates.setText(QCoreApplication.translate("mainWindow", u"Import bus coordinates", None))
self.actionATC.setText(QCoreApplication.translate("mainWindow", u"Available Transfer Capacity", None))
#if QT_CONFIG(tooltip)
self.actionATC.setToolTip(QCoreApplication.translate("mainWindow", u"Net Transfer Capacity", None))
#endif // QT_CONFIG(tooltip)
self.actionATC_Time_Series.setText(QCoreApplication.translate("mainWindow", u"Available Transfer Capacity Time Series", None))
#if QT_CONFIG(tooltip)
self.actionATC_Time_Series.setToolTip(QCoreApplication.translate("mainWindow", u"Net Transfer Capacity Time Series", None))
#endif // QT_CONFIG(tooltip)
self.actionContingency_analysis.setText(QCoreApplication.translate("mainWindow", u"Contingency analysis", None))
self.actionApply_new_rates.setText(QCoreApplication.translate("mainWindow", u"Apply new rates", None))
#if QT_CONFIG(tooltip)
self.actionApply_new_rates.setToolTip(QCoreApplication.translate("mainWindow", u"Apply new rates to all the simulation results", None))
#endif // QT_CONFIG(tooltip)
self.actionOptimal_Net_Transfer_Capacity.setText(QCoreApplication.translate("mainWindow", u"Optimal net transfer capacity", None))
#if QT_CONFIG(tooltip)
self.actionOptimal_Net_Transfer_Capacity.setToolTip(QCoreApplication.translate("mainWindow", u"Optimal Net Transfer Capacity", None))
#endif // QT_CONFIG(tooltip)
self.actionSet_schematic_positions_from_GPS_coordinates.setText(QCoreApplication.translate("mainWindow", u"Set schematic (x,y) from (lat,lon)", None))
self.actionInputs_analysis.setText(QCoreApplication.translate("mainWindow", u"Inputs analysis", None))
#if QT_CONFIG(shortcut)
self.actionInputs_analysis.setShortcut(QCoreApplication.translate("mainWindow", u"Ctrl+I", None))
#endif // QT_CONFIG(shortcut)
#if QT_CONFIG(tooltip)
self.run_cascade_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Run complete cascading process", None))
#endif // QT_CONFIG(tooltip)
self.run_cascade_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.run_cascade_step_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Run next cascading state", None))
#endif // QT_CONFIG(tooltip)
self.run_cascade_step_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.copy_cascade_step_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Copy cascade state to normal grid state", None))
#endif // QT_CONFIG(tooltip)
self.copy_cascade_step_pushButton.setText("")
self.clear_cascade_pushButton.setText("")
self.label_27.setText(QCoreApplication.translate("mainWindow", u"Cascading steps", None))
#if QT_CONFIG(tooltip)
self.simulation_results_step_comboBox.setToolTip(QCoreApplication.translate("mainWindow", u"Steps in the simulation", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.view_next_simulation_step_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Next", None))
#endif // QT_CONFIG(tooltip)
self.view_next_simulation_step_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.available_results_to_color_comboBox.setToolTip(QCoreApplication.translate("mainWindow", u"Available results", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.view_previous_simulation_step_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Previous", None))
#endif // QT_CONFIG(tooltip)
self.view_previous_simulation_step_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.colour_results_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Color the grid with the selected study", None))
#endif // QT_CONFIG(tooltip)
self.colour_results_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.show_map_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Show coloured map", None))
#endif // QT_CONFIG(tooltip)
self.show_map_pushButton.setText("")
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.GridSectionTab), QCoreApplication.translate("mainWindow", u"Schematic", None))
self.label_3.setText(QCoreApplication.translate("mainWindow", u"Object types", None))
#if QT_CONFIG(tooltip)
self.analyze_objects_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Launch the grid analysis interface", None))
#endif // QT_CONFIG(tooltip)
self.analyze_objects_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.smart_search_lineEdit.setToolTip(QCoreApplication.translate("mainWindow", u"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:'MS Shell Dlg 2'; font-size:8pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Write search criteria. i.e.:</p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">>=20: Is greater or equal to 20</p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">==bus1: Is exacty equal to bus1</p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">>Bus2: Is greater than Bus2</p>\n"
"<p style=\" margin-top:12px; marg"
"in-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">== 30: Is equal to 30</p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">*bus : Contains the word bus</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.filter_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Smart filter", None))
#endif // QT_CONFIG(tooltip)
self.filter_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.highlight_selection_buses_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Highlight the buses of the selected elements", None))
#endif // QT_CONFIG(tooltip)
self.highlight_selection_buses_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.busViewerButton.setToolTip(QCoreApplication.translate("mainWindow", u"open bus viewer", None))
#endif // QT_CONFIG(tooltip)
self.busViewerButton.setText("")
#if QT_CONFIG(tooltip)
self.processTemplatesPushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Process templates", None))
#endif // QT_CONFIG(tooltip)
self.processTemplatesPushButton.setText("")
#if QT_CONFIG(tooltip)
self.viewTemplatesButton.setToolTip(QCoreApplication.translate("mainWindow", u"View templates catalogue", None))
#endif // QT_CONFIG(tooltip)
self.viewTemplatesButton.setText("")
#if QT_CONFIG(tooltip)
self.add_object_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Add new object", None))
#endif // QT_CONFIG(tooltip)
self.add_object_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.delete_selected_objects_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Delete selection", None))
#endif // QT_CONFIG(tooltip)
self.delete_selected_objects_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.delete_and_reduce_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>Delete and reduce.</p><p>Applicable to buses, it removes a bus and places its objects (loads, generators, etc.) into the next feasible bus of higher voltage.</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
self.delete_and_reduce_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.highlight_by_property_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Highlight the schematic buses based on the values of the selected property", None))
#endif // QT_CONFIG(tooltip)
self.highlight_by_property_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.clear_highlight_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Clear the bus highlight", None))
#endif // QT_CONFIG(tooltip)
self.clear_highlight_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.setValueToColumnButton.setToolTip(QCoreApplication.translate("mainWindow", u"Set value to column", None))
#endif // QT_CONFIG(tooltip)
self.setValueToColumnButton.setText("")
self.label_53.setText(QCoreApplication.translate("mainWindow", u"Templates", None))
#if QT_CONFIG(tooltip)
self.assignTemplateButton.setToolTip(QCoreApplication.translate("mainWindow", u"Assign template to selected element", None))
#endif // QT_CONFIG(tooltip)
self.assignTemplateButton.setText("")
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.DataTab), QCoreApplication.translate("mainWindow", u"Objects", None))
self.label_51.setText(QCoreApplication.translate("mainWindow", u"Data structures", None))
self.label_52.setText(QCoreApplication.translate("mainWindow", u"Elements data (double click to edit)", None))
#if QT_CONFIG(tooltip)
self.catalogue_add_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Add", None))
#endif // QT_CONFIG(tooltip)
self.catalogue_add_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.catalogue_edit_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Edit", None))
#endif // QT_CONFIG(tooltip)
self.catalogue_edit_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.catalogue_delete_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Delete", None))
#endif // QT_CONFIG(tooltip)
self.catalogue_delete_pushButton.setText("")
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.catalogueTab), QCoreApplication.translate("mainWindow", u"Types catalogue", None))
#if QT_CONFIG(tooltip)
self.new_profiles_structure_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Create profiles", None))
#endif // QT_CONFIG(tooltip)
self.new_profiles_structure_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.delete_profiles_structure_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Delete profiles", None))
#endif // QT_CONFIG(tooltip)
self.delete_profiles_structure_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.edit_profiles_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Import profiles", None))
#endif // QT_CONFIG(tooltip)
self.edit_profiles_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.undo_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Un-do action (only for the current profile)", None))
#endif // QT_CONFIG(tooltip)
self.undo_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.redo_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Re-do action (only for the current profile)", None))
#endif // QT_CONFIG(tooltip)
self.redo_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.copy_profile_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Copy displayed profile", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(statustip)
self.copy_profile_pushButton.setStatusTip("")
#endif // QT_CONFIG(statustip)
self.copy_profile_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.paste_profiles_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Paste clipboard into the displayed profile", None))
#endif // QT_CONFIG(tooltip)
self.paste_profiles_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.set_linear_combination_profile_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>Copy the selected profile into the profiles selected next to this button</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
self.set_linear_combination_profile_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.device_type_magnitude_comboBox_2.setToolTip(QCoreApplication.translate("mainWindow", u"Profile where to copy the current profile", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.set_profile_state_button.setToolTip(QCoreApplication.translate("mainWindow", u"Assign the values of the selected time step into the grid", None))
#endif // QT_CONFIG(tooltip)
self.set_profile_state_button.setText("")
#if QT_CONFIG(tooltip)
self.profile_time_selection_comboBox.setToolTip(QCoreApplication.translate("mainWindow", u"Time step selector", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.profile_device_type_comboBox.setToolTip(QCoreApplication.translate("mainWindow", u"Device type", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.device_type_magnitude_comboBox.setToolTip(QCoreApplication.translate("mainWindow", u"Magnitude with profile", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.plot_time_series_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"PLot the selected object's profile", None))
#endif // QT_CONFIG(tooltip)
self.plot_time_series_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.profile_add_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Add value to the profile", None))
#endif // QT_CONFIG(tooltip)
self.profile_add_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.profile_subtract_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Subtract value from the profile", None))
#endif // QT_CONFIG(tooltip)
self.profile_subtract_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.profile_multiply_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Multiply the profile by a value", None))
#endif // QT_CONFIG(tooltip)
self.profile_multiply_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.profile_divide_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Divide the profile by a value", None))
#endif // QT_CONFIG(tooltip)
self.profile_divide_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.set_profile_value_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Set the value to all or to the selection", None))
#endif // QT_CONFIG(tooltip)
self.set_profile_value_pushButton.setText("")
self.profile_label.setText(QCoreApplication.translate("mainWindow", u"...", None))
self.label_36.setText(QCoreApplication.translate("mainWindow", u"Start", None))
self.label_35.setText(QCoreApplication.translate("mainWindow", u"End", None))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.timeEventsTab), QCoreApplication.translate("mainWindow", u"Time events", None))
#if QT_CONFIG(tooltip)
self.compute_simulation_data_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Update the islands dispayed", None))
#endif // QT_CONFIG(tooltip)
self.compute_simulation_data_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.exportSimulationDataButton.setToolTip(QCoreApplication.translate("mainWindow", u"Export simulation data", None))
#endif // QT_CONFIG(tooltip)
self.exportSimulationDataButton.setText("")
self.label_23.setText(QCoreApplication.translate("mainWindow", u"Island", None))
self.label_30.setText(QCoreApplication.translate("mainWindow", u"Data structures", None))
self.label_31.setText(QCoreApplication.translate("mainWindow", u"Data table", None))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.compiledArraysTab), QCoreApplication.translate("mainWindow", u"Compiled arrays", None))
#if QT_CONFIG(tooltip)
self.comments_textEdit.setToolTip(QCoreApplication.translate("mainWindow", u"Write here some comments about the grid", None))
#endif // QT_CONFIG(tooltip)
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.commentsTab), QCoreApplication.translate("mainWindow", u"Comments", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.GridTab), QCoreApplication.translate("mainWindow", u"Model", None))
self.label_16.setText(QCoreApplication.translate("mainWindow", u"Session results", None))
#if QT_CONFIG(tooltip)
self.deleteDriverButton.setToolTip(QCoreApplication.translate("mainWindow", u"Delete selected driver", None))
#endif // QT_CONFIG(tooltip)
self.deleteDriverButton.setText("")
self.label_37.setText(QCoreApplication.translate("mainWindow", u"Disk persistance", None))
#if QT_CONFIG(tooltip)
self.loadResultFromDiskButton.setToolTip(QCoreApplication.translate("mainWindow", u"Load result from the gridcal file", None))
#endif // QT_CONFIG(tooltip)
self.loadResultFromDiskButton.setText("")
#if QT_CONFIG(tooltip)
self.copy_results_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Copy to data frame to clipboard", None))
#endif // QT_CONFIG(tooltip)
self.copy_results_pushButton.setText("")
#if QT_CONFIG(tooltip)
self.copy_numpy_button.setToolTip(QCoreApplication.translate("mainWindow", u"Copy data in numpy format to clipboard", None))
#endif // QT_CONFIG(tooltip)
self.copy_numpy_button.setText("")
#if QT_CONFIG(tooltip)
self.saveResultsButton.setToolTip(QCoreApplication.translate("mainWindow", u"Export data", None))
#endif // QT_CONFIG(tooltip)
self.saveResultsButton.setText("")
#if QT_CONFIG(tooltip)
self.results_as_cdf_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"Results as cummulative density functions", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(statustip)
self.results_as_cdf_checkBox.setStatusTip("")
#endif // QT_CONFIG(statustip)
self.results_as_cdf_checkBox.setText("")
#if QT_CONFIG(tooltip)
self.results_as_abs_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"Results as absolute values", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(statustip)
self.results_as_abs_checkBox.setStatusTip("")
#endif // QT_CONFIG(statustip)
self.results_as_abs_checkBox.setText("")
self.units_label.setText("")
self.search_results_Button.setText("")
#if QT_CONFIG(tooltip)
self.plot_data_pushButton.setToolTip(QCoreApplication.translate("mainWindow", u"Plot the data in a separated window", None))
#endif // QT_CONFIG(tooltip)
self.plot_data_pushButton.setText("")
self.tabWidget.setTabText(self.tabWidget.indexOf(self.ResultsTab), QCoreApplication.translate("mainWindow", u"Results", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.main_console_tab), QCoreApplication.translate("mainWindow", u"Console", None))
self.label_48.setText(QCoreApplication.translate("mainWindow", u"Reactive power control mode", None))
self.label_67.setText("")
self.label_33.setText(QCoreApplication.translate("mainWindow", u"Power flow controls", None))
self.apply_impedance_tolerances_checkBox.setText(QCoreApplication.translate("mainWindow", u"Apply impedance tolerances", None))
self.label_94.setText("")
self.label_93.setText(QCoreApplication.translate("mainWindow", u"Time series clustering", None))
self.label_64.setText("")
self.label_65.setText(QCoreApplication.translate("mainWindow", u"Time series", None))
#if QT_CONFIG(tooltip)
self.dispatch_storage_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>When enabled, the storage devices beheave as actual storage devices taking into account their energy limitations when delivering power.</p><p>When disabled, the storage devices beheave exactly as controlled generators</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
self.dispatch_storage_checkBox.setText(QCoreApplication.translate("mainWindow", u"dispatch storage", None))
#if QT_CONFIG(tooltip)
self.cluster_number_spinBox.setToolTip(QCoreApplication.translate("mainWindow", u"Number of clusters in case of clustering", None))
#endif // QT_CONFIG(tooltip)
self.cluster_number_spinBox.setSuffix(QCoreApplication.translate("mainWindow", u" Clusters", None))
#if QT_CONFIG(tooltip)
self.temperature_correction_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"Correct the branches resistance using the temperature", None))
#endif // QT_CONFIG(tooltip)
self.temperature_correction_checkBox.setText(QCoreApplication.translate("mainWindow", u"Apply temperature correction", None))
self.label_22.setText("")
self.label_50.setText(QCoreApplication.translate("mainWindow", u"Transformer taps control mode", None))
self.label_63.setText("")
self.label_17.setText(QCoreApplication.translate("mainWindow", u"Power flow", None))
self.label_2.setText(QCoreApplication.translate("mainWindow", u"Solver", None))
#if QT_CONFIG(tooltip)
self.auto_precision_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"If active, GridCal finds a precission that suits the magnitude of the power injections so that the power flow is meaningful", None))
#endif // QT_CONFIG(tooltip)
self.auto_precision_checkBox.setText(QCoreApplication.translate("mainWindow", u"Automatic precision", None))
self.label_11.setText(QCoreApplication.translate("mainWindow", u"Acceleration", None))
#if QT_CONFIG(tooltip)
self.distributed_slack_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>If active, the slack power is distributed among the generators according to their installed power "Snom"</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
self.distributed_slack_checkBox.setText(QCoreApplication.translate("mainWindow", u"Distributed slack", None))
#if QT_CONFIG(tooltip)
self.max_iterations_spinBox.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>Maximum numberof iterations to use.</p><p><br/></p><p>Tipical values: </p><p><NAME>: 5</p><p>Levenberg-Marquards: 20</p><p>Fast decoupled: 10</p><p>Others: 20</p></body></html>", None))
#endif // QT_CONFIG(tooltip)
#if QT_CONFIG(tooltip)
self.ignore_single_node_islands_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"If active, the islands of a single node are ignored.", None))
#endif // QT_CONFIG(tooltip)
self.ignore_single_node_islands_checkBox.setText(QCoreApplication.translate("mainWindow", u"Ignore single node islands", None))
#if QT_CONFIG(tooltip)
self.helm_retry_checkBox.setToolTip(QCoreApplication.translate("mainWindow", u"<html><head/><body><p>If | |
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_align(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_align_selected(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_cameras(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_local(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_navigation(bpy_types.Menu, bpy_types._GenericUI):
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
'''
'''
pass
class VIEW3D_MT_view_pie(bpy_types.Menu, bpy_types._GenericUI):
bl_idname = None
''' '''
bl_label = None
''' '''
bl_rna = None
''' '''
id_data = None
''' '''
def append(self, draw_func):
'''
'''
pass
def as_pointer(self):
'''
'''
pass
def bl_rna_get_subclass(self):
'''
'''
pass
def bl_rna_get_subclass_py(self):
'''
'''
pass
def draw(self, _context):
'''
'''
pass
def draw_collapsible(self, context, layout):
'''
'''
pass
def draw_preset(self, _context):
'''
'''
pass
def driver_add(self):
'''
'''
pass
def driver_remove(self):
'''
'''
pass
def get(self):
'''
'''
pass
def is_extended(self):
'''
'''
pass
def is_property_hidden(self):
'''
'''
pass
def is_property_overridable_library(self):
'''
'''
pass
def is_property_readonly(self):
'''
'''
pass
def is_property_set(self):
'''
'''
pass
def items(self):
'''
'''
pass
def keyframe_delete(self):
'''
'''
pass
def keyframe_insert(self):
'''
'''
pass
def keys(self):
'''
'''
pass
def path_from_id(self):
'''
'''
pass
def path_menu(self, searchpaths, operator, props_default, prop_filepath,
filter_ext, filter_path, display_name, add_operator):
'''
'''
pass
def path_resolve(self):
'''
'''
pass
def pop(self):
'''
'''
pass
def prepend(self, draw_func):
'''
'''
pass
def property_overridable_library_set(self):
'''
'''
pass
def property_unset(self):
'''
'''
pass
def remove(self, draw_func):
'''
'''
pass
def type_recast(self):
'''
'''
pass
def values(self):
''' | |
not a special case
right_type = 0 # not a special case
if IsStr(t0):
left_type = 1
elif (isinstance(t0, UnionType) and len(t0.items) == 2 and
IsStr(t0.items[0]) and isinstance(t0.items[1], NoneTyp)):
left_type = 2
if IsStr(t1):
right_type = 1
elif (isinstance(t1, UnionType) and len(t1.items) == 2 and
IsStr(t1.items[0]) and isinstance(t1.items[1], NoneTyp)):
right_type = 2
#self.log('left_type %s right_type %s', left_type, right_type)
if left_type > 0 and right_type > 0 and operator in ('==', '!='):
if operator == '!=':
self.write('!(')
# NOTE: This could also be str_equals(left, right)? Does it make a
# difference?
if left_type > 1 or right_type > 1:
self.write('maybe_str_equals(')
else:
self.write('str_equals(')
self.accept(left)
self.write(', ')
self.accept(right)
self.write(')')
if operator == '!=':
self.write(')')
return
# Note: we could get rid of this altogether and rely on C++ function
# overloading. But somehow I like it more explicit, closer to C (even
# though we use templates).
contains_func = _GetContainsFunc(t1)
if operator == 'in':
if isinstance(right, TupleExpr):
left_type = self.types[left]
equals_func = None
if IsStr(left_type):
equals_func = 'str_equals'
elif (isinstance(left_type, UnionType) and len(left_type.items) == 2 and
IsStr(left_type.items[0]) and isinstance(left_type.items[1], NoneTyp)):
equals_func = 'maybe_str_equals'
# x in (1, 2, 3) => (x == 1 || x == 2 || x == 3)
self.write('(')
for i, item in enumerate(right.items):
if i != 0:
self.write(' || ')
if equals_func:
self.write('%s(' % equals_func)
self.accept(left)
self.write(', ')
self.accept(item)
self.write(')')
else:
self.accept(left)
self.write(' == ')
self.accept(item)
self.write(')')
return
assert contains_func, "RHS of 'in' has type %r" % t1
# x in mylist => list_contains(mylist, x)
self.write('%s(', contains_func)
self.accept(right)
self.write(', ')
self.accept(left)
self.write(')')
return
if operator == 'not in':
if isinstance(right, TupleExpr):
# x not in (1, 2, 3) => (x != 1 && x != 2 && x != 3)
self.write('(')
for i, item in enumerate(right.items):
if i != 0:
self.write(' && ')
self.accept(left)
self.write(' != ')
self.accept(item)
self.write(')')
return
assert contains_func, t1
# x not in mylist => !list_contains(mylist, x)
self.write('!%s(', contains_func)
self.accept(right)
self.write(', ')
self.accept(left)
self.write(')')
return
# Default case
self.accept(o.operands[0])
self.write(' %s ', o.operators[0])
self.accept(o.operands[1])
def visit_cast_expr(self, o: 'mypy.nodes.CastExpr') -> T:
pass
def visit_reveal_expr(self, o: 'mypy.nodes.RevealExpr') -> T:
pass
def visit_super_expr(self, o: 'mypy.nodes.SuperExpr') -> T:
pass
def visit_assignment_expr(self, o: 'mypy.nodes.AssignmentExpr') -> T:
pass
def visit_unary_expr(self, o: 'mypy.nodes.UnaryExpr') -> T:
# e.g. a[-1] or 'not x'
if o.op == 'not':
op_str = '!'
else:
op_str = o.op
self.write(op_str)
self.accept(o.expr)
def _WriteListElements(self, o):
self.write('{')
for i, item in enumerate(o.items):
if i != 0:
self.write(', ')
self.accept(item)
self.write('}')
def visit_list_expr(self, o: 'mypy.nodes.ListExpr') -> T:
list_type = self.types[o]
#self.log('**** list_type = %s', list_type)
c_type = get_c_type(list_type)
item_type = list_type.args[0] # int for List[int]
item_c_type = get_c_type(item_type)
assert c_type.endswith('*'), c_type
c_type = c_type[:-1] # HACK TO CLEAN UP
if len(o.items) == 0:
self.write('Alloc<%s>()' % c_type)
else:
# Lists are MUTABLE so we can't pull them to the top level.
# C++ wart: Use initializer_list.
self.write('Alloc<%s>(std::initializer_list<%s>' % (c_type, item_c_type))
self._WriteListElements(o)
self.write(')')
def _WriteDictElements(self, o, key_type, val_type):
# TODO: use initializer_list<K> and initializer_list<V> perhaps? Do
# we want global data being initialized? Not sure if we'll have
# initialization order problems. Can't really make them constexpr
# because of the Str problem.
# Hm there is some type inference problem with Alloc<Dict<K,V>({})
self.write('std::initializer_list<%s>{' % get_c_type(key_type))
for i, item in enumerate(o.items):
pass
self.write('}, ')
self.write('std::initializer_list<%s>{' % get_c_type(val_type))
# TODO: values
self.write('}')
def visit_dict_expr(self, o: 'mypy.nodes.DictExpr') -> T:
dict_type = self.types[o]
key_type = dict_type.args[0]
val_type = dict_type.args[1]
c_type = get_c_type(dict_type)
assert c_type.endswith('*'), c_type
c_type = c_type[:-1] # HACK TO CLEAN UP
self.write('Alloc<%s>(' % c_type)
if o.items:
self._WriteDictElements(o, key_type, val_type)
self.write(')')
def visit_tuple_expr(self, o: 'mypy.nodes.TupleExpr') -> T:
tuple_type = self.types[o]
c_type = get_c_type(tuple_type)
assert c_type.endswith('*'), c_type
c_type = c_type[:-1] # HACK TO CLEAN UP
maybe_new = c_type if self.in_return_expr else 'Alloc<%s>' % c_type
if len(o.items) == 0:
self.write('(%s())' % maybe_new)
else:
# Use initialize list. Lists are MUTABLE so we can't pull them to
# the top level.
self.write('(%s(' % maybe_new)
for i, item in enumerate(o.items):
if i != 0:
self.write(', ')
self.accept(item)
# TODO: const_lookup
self.write('))')
def visit_set_expr(self, o: 'mypy.nodes.SetExpr') -> T:
pass
def visit_index_expr(self, o: 'mypy.nodes.IndexExpr') -> T:
self.accept(o.base)
#base_type = self.types[o.base]
#self.log('*** BASE TYPE %s', base_type)
if isinstance(o.index, SliceExpr):
self.accept(o.index) # method call
else:
# it's hard syntactically to do (*a)[0], so do it this way.
self.write('->index(')
self.accept(o.index)
self.write(')')
def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> T:
pass
def visit_lambda_expr(self, o: 'mypy.nodes.LambdaExpr') -> T:
pass
def visit_list_comprehension(self, o: 'mypy.nodes.ListComprehension') -> T:
pass
def visit_set_comprehension(self, o: 'mypy.nodes.SetComprehension') -> T:
pass
def visit_dictionary_comprehension(self, o: 'mypy.nodes.DictionaryComprehension') -> T:
pass
def visit_generator_expr(self, o: 'mypy.nodes.GeneratorExpr') -> T:
pass
def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> T:
self.write('->slice(')
if o.begin_index:
self.accept(o.begin_index)
else:
self.write('0') # implicit begining
if o.end_index:
self.write(', ')
self.accept(o.end_index)
self.write(')')
if o.stride:
raise AssertionError('Stride not supported')
def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> T:
cond_type = self.types[o.cond]
if not _CheckConditionType(cond_type):
raise AssertionError(
"Can't use str, list, or dict in boolean context")
# 0 if b else 1 -> b ? 0 : 1
self.accept(o.cond)
self.write(' ? ')
self.accept(o.if_expr)
self.write(' : ')
self.accept(o.else_expr)
def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> T:
pass
def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> T:
pass
def visit_type_alias_expr(self, o: 'mypy.nodes.TypeAliasExpr') -> T:
pass
def visit_namedtuple_expr(self, o: 'mypy.nodes.NamedTupleExpr') -> T:
pass
def visit_enum_call_expr(self, o: 'mypy.nodes.EnumCallExpr') -> T:
pass
def visit_typeddict_expr(self, o: 'mypy.nodes.TypedDictExpr') -> T:
pass
def visit_newtype_expr(self, o: 'mypy.nodes.NewTypeExpr') -> T:
pass
def visit__promote_expr(self, o: 'mypy.nodes.PromoteExpr') -> T:
pass
def visit_await_expr(self, o: 'mypy.nodes.AwaitExpr') -> T:
pass
def visit_temp_node(self, o: 'mypy.nodes.TempNode') -> T:
pass
def _write_tuple_unpacking(self, temp_name, lval_items, item_types,
is_return=False):
"""Used by assignment and for loops."""
for i, (lval_item, item_type) in enumerate(zip(lval_items, item_types)):
#self.log('*** %s :: %s', lval_item, item_type)
if isinstance(lval_item, NameExpr):
if lval_item.name == '_':
continue
item_c_type = get_c_type(item_type)
# declare it at the top of the function
if self.decl:
self.local_var_list.append((lval_item.name, item_c_type))
self.write_ind('%s', lval_item.name)
else:
# Could be MemberExpr like self.foo, self.bar = baz
self.write_ind('')
self.accept(lval_item)
# Tuples that are return values aren't pointers
op = '.' if is_return else '->'
self.write(' = %s%sat%d();\n', temp_name, op, i) # RHS
def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> T:
# Declare constant strings. They have to be at the top level.
if self.decl and self.indent == 0 and len(o.lvalues) == 1:
lval = o.lvalues[0]
c_type = get_c_type(self.types[lval])
if not lval.name.startswith('_'):
self.decl_write('extern %s %s;\n', c_type, lval.name)
# I think there are more than one when you do a = b = 1, which I never
# use.
assert len(o.lvalues) == 1, o.lvalues
lval = o.lvalues[0]
# Special case for global constants. L = [1, 2] or D = {}
#
# We avoid Alloc<T>, since that can't be done until main().
#
# It would be nice to make these completely constexpr, e.g.
# initializing Slab<T> with the right layout from initializer_list, but
# it isn't easy. Would we need a constexpr hash?
#
# Limitation: This doesn't handle a = f([1, 2]), but we don't use that
# in Oil.
if self.indent == 0:
assert isinstance(lval, NameExpr), lval
if lval.name == '_': # Skip _ = log
return
self.log(' GLOBAL List/Dict: %s', lval.name)
# TODO: Change this to
#
# - GLOBAL_LIST(name, int, {42, 0})
# - GLOBAL_DICT(name, int, {42, 0}, Str, {str1, str2})
# - GLOBAL_INSTANCE(name, Token, ...)
#
# So that they can have Tag::Global
lval_type = self.types[lval]
if isinstance(o.rvalue, ListExpr):
item_type = lval_type.args[0]
item_c_type = get_c_type(item_type)
# Create a value first
temp_name = 'glist%d' % self.unique_id
self.unique_id += 1
self.write('List<%s> %s = ', item_c_type, temp_name)
self._WriteListElements(o.rvalue)
self.write(';\n')
# Then a pointer to it
self.write('List<%s>* %s = &%s;\n', item_c_type, lval.name,
temp_name)
return
if isinstance(o.rvalue, DictExpr):
key_type, val_type = lval_type.args
key_c_type = get_c_type(key_type)
val_c_type = get_c_type(val_type)
temp_name = 'gdict%d' % self.unique_id
self.unique_id += 1
# Value
self.write('Dict<%s, %s> %s(', key_c_type, val_c_type, temp_name)
self._WriteDictElements(o.rvalue, key_type, val_type)
self.write(');\n')
# Then a pointer to it
self.write('Dict<%s, %s>* %s = &%s;\n', key_c_type, val_c_type,
lval.name, temp_name)
return
# Global instances, e.g. EOL_TOK = Token(...)
# TODO: Needs Tag::Global
if | |
"0.2.6.0",
sha256 =
"f526d97cdab851f24e215e346f6d54d3a504a6ac5d9264f580c4f72d606178c5",
),
"rethinkdb-client-driver":
struct(
version = "0.0.25",
sha256 =
"0f9dc156cd61b866b847b1b1a60a2345b4b5556b8b75a9e8499b0514e7f98996",
),
"retry":
struct(
version = "0.7.7.0",
sha256 =
"3ccbc27a08ad0c7291342140f417cef11c2b11886586cc2bd870fa1e80cbd16c",
),
"rev-state":
struct(
version = "0.1.2",
sha256 =
"ee070e39d7f7d673593e2f356ab317bc2fdd0d8a283f8316c0e5b5adbdf0f919",
),
"rfc1751":
struct(
version = "0.1.2",
sha256 =
"a345e81625ffbdf3d3dc1723d322133108a5fd9ba17fbfae6e954046cd2b9aca",
),
"rfc5051":
struct(
version = "0.1.0.4",
sha256 =
"615daa230eabc781eff1d3ce94c42fc5ba6188dbeb115a233328454b02c1b3d3",
),
"rio":
struct(
version = "0.1.8.0",
sha256 =
"a013dd04221a1a69d5f253379443b88495be305692c06f1a060f428e98dbf5e1",
),
"rio-orphans":
struct(
version = "0.1.1.0",
sha256 =
"7e8d2c6df6e7afdbca5b344c6e57c754e2d6b9c0cfb4f00e1df88dad1bd48b4e",
),
"rng-utils":
struct(
version = "0.3.0",
sha256 =
"0886acb1e0ae6c6ad5f594a9d4d57ea5af69c566ccc5763d0b7c690963e946ba",
),
"roc-id":
struct(
version = "0.1.0.0",
sha256 =
"3144d7edc22fc5816b38fa356811417125572d062e07d23d8319466c68868429",
),
"rocksdb-haskell":
struct(
version = "1.0.1",
sha256 =
"b8407c933f503c2e21578a558b829288917f0f6f5ff7c32229cf5b11abed4dff",
),
"rocksdb-query":
struct(
version = "0.2.0",
sha256 =
"8e2d645542c98fd69fa73c136d2aa4bba574354c3121bc7b461d367a17fdc206",
),
"roles":
struct(
version = "0.2.0.0",
sha256 =
"e29d2f31b21b2d8ce3507e17211e70a61d2e434a8e19f80b2e4898bdabac34a0",
),
"rosezipper":
struct(
version = "0.2",
sha256 =
"6cab4f1c192cc3b8b0a9eb11a32780d235020a539a0e91125eed5ec590bad7bc",
),
"rot13":
struct(
version = "0.2.0.1",
sha256 =
"e026d418cc6a1ce83ba11e811387e62ad49ffb1cbd6ae7f58b72fd179fccd4dc",
),
"rounded":
struct(
version = "0.1.0.1",
sha256 =
"9abeea23692dd57e879eda210308ef5ef213169b4cb9e4e6c13de02d52a04b11",
),
"rpmbuild-order":
struct(
version = "0.2.1",
sha256 =
"b66d6078e82da6c2becf1e0082fb0f17e5a8a0052d95442dc3b0b63915a6a082",
),
"runmemo":
struct(
version = "1.0.0.1",
sha256 =
"ba5ef3177f8fe5f443808e44f62d03b23ac19bbef7f708e40532031a3505d689",
),
"rvar":
struct(
version = "0.2.0.3",
sha256 =
"d78aaf2ffdba182dda95d1692fec7abc5d77fa371120618a397b5675438c6bc0",
),
"s3-signer":
struct(
version = "0.5.0.0",
sha256 =
"d73671d5bda0f5f627bbd876916341985c281c3572e6f8406cdf2f14ed9188e4",
),
"safe":
struct(
version = "0.3.17",
sha256 =
"79c5c41e7151906969133ea21af9f7e8d25c18315886e23d0bdf6faa8b537e5c",
),
"safe-exceptions":
struct(
version = "0.1.7.0",
sha256 =
"18cddc587b52b6faa0287fb6ad6c964d1562571ea2c8ff57a194dd54b5fba069",
),
"safe-exceptions-checked":
struct(
version = "0.1.0",
sha256 =
"d807552b828de308d80805f65ee41f3e25571506b10e6b28b0b81de4aec0ca3f",
),
"safe-foldable":
struct(
version = "0.1.0.0",
sha256 =
"ca7f2ecc0e799c239df8ce56e8592fb8b8264c229ab4e1c66e0f821d299007d1",
),
"safecopy":
struct(
version = "0.9.4.3",
sha256 =
"787db1a56b7024ab5619b4f25af5379133f5f2a5e1a0657e66c8dfac1a131f08",
),
"safeio":
struct(
version = "0.0.5.0",
sha256 =
"d5799b6a6cd36e8f5442d991ed3a2076b10e0e3131269a2090b8c9c5c001e311",
),
"salak":
struct(
version = "0.1.7",
sha256 =
"669640d42444ccf4d839355406b3abdb02a2ff14e1f2f77a43c21240a33f23e5",
),
"saltine":
struct(
version = "0.1.0.2",
sha256 =
"fd989db905f3e1d742b9fcb9501d6483ffa82620e287cf51b62e0d6d2caaa308",
),
"salve":
struct(
version = "1.0.6",
sha256 =
"32c8bb50cc20360cb48751d810cac117a6b4fb83c39cf50287c61ef13c90f7ed",
),
"sample-frame":
struct(
version = "0.0.3",
sha256 =
"5baf301a4f7b2d52e6b9b9c06b10afd3938de0be6d09736d0188616cd9027247",
),
"sample-frame-np":
struct(
version = "0.0.4.1",
sha256 =
"b1db7621b07503f5fe49390bf1e1b4257c49f4760d617121a23d845278f93624",
),
"sampling":
struct(
version = "0.3.3",
sha256 =
"c8bedc93d61e6b1939f6802d7e21003e9e36abdd6f21a9651179d4d82aa00e0d",
),
"sandman":
struct(
version = "0.2.0.1",
sha256 =
"407d283e1fc4a2a369615bac569683bf399ac14ddbce1331850bfe1d7837ce64",
),
"say":
struct(
version = "0.1.0.1",
sha256 =
"f639656fc21925c45f3f55769b9fb7a90699e943376a725e215a5deea473b3e4",
),
"sbp":
struct(
version = "2.4.7",
sha256 =
"23a61062218ffbc724f61f3e08cfb2686aab6a44bada04aa5cf4abf7252962c6",
),
"sbv":
struct(
version = "7.13",
sha256 =
"33bafb18a6d7476aeb3fb215077154cd4ad36fa0359c5b184a9a2ccb3500642e",
),
"scalpel":
struct(
version = "0.5.1",
sha256 =
"20df66433570a2ca754f14058a47fb00519d9a75bb822fc3fd1769a83c608b0d",
),
"scalpel-core":
struct(
version = "0.5.1",
sha256 =
"8c05b86853b737fbed4144dc9c7bbb7743525c305f9529f59776df97bfe229a9",
),
"scanf":
struct(
version = "0.1.0.0",
sha256 =
"5675132f172ab4ed460f440df21e203c09457c2fff34fb6a389129a9da78c375",
),
"scanner":
struct(
version = "0.3",
sha256 =
"a7f85147b59e443dbd986c1f880a0c3ab0190ba7b27c2ce6238da07397fd507b",
),
"scientific":
struct(
version = "0.3.6.2",
sha256 =
"278d0afc87450254f8a76eab21b5583af63954efc9b74844a17a21a68013140f",
),
"scotty":
struct(
version = "0.11.3",
sha256 =
"0a9c8adb7d5f66ca3ba9e866aed52b87d940e4b8f1fc8f8aca9c663ac304a790",
),
"scrypt":
struct(
version = "0.5.0",
sha256 =
"3ec0a622393e2a4dbbce4c899602c848d924f8516688491b1162331b7093d9b2",
),
"sdl2":
struct(
version = "2.4.1.0",
sha256 =
"21a569c0c19f8ff2bbe1cf1d3eb32f65e8143806de353cedd240df5e9d088b5c",
),
"sdl2-gfx":
struct(
version = "0.2",
sha256 =
"8c1e10b7a675d782cd650820c75c4ef9225718ad6aaa3f8db02e869b7720c50d",
),
"sdl2-image":
struct(
version = "2.0.0",
sha256 =
"399742b2b7e64fe4e58c9d8a44ad29b2c355589233535238f8c9b371de6c26df",
),
"sdl2-mixer":
struct(
version = "1.1.0",
sha256 =
"0f4c15a1bda7b265923278641d686756292fc2a8f1c5ced7f98916cc98df0acd",
),
"sdl2-ttf":
struct(
version = "2.1.0",
sha256 =
"c7656fe923e618d3919d47ac753451b08e6d709372380e15bd3d75b39f2c80f7",
),
"secp256k1-haskell":
struct(
version = "0.1.4",
sha256 =
"741c9f3d51d4a9fc89c991734f71735f46dffd900f550d5d8564aebc1db0cbed",
),
"securemem":
struct(
version = "0.1.10",
sha256 =
"32895a4748508da58207b4867266601af6259b7109af80bbf5d2e9e598e016a6",
),
"selda":
struct(
version = "0.3.4.0",
sha256 =
"92238cfd3a557f68ccf0cb6edafa4981a5c67c91f85d471c83ba55eec1d884f3",
),
"selda-postgresql":
struct(
version = "0.1.7.3",
sha256 =
"ec33d2efedc5a9bf81a2acb726e866c4978c96a6ce92e313f0b83aa49b812d2b",
),
"selda-sqlite":
struct(
version = "0.1.6.1",
sha256 =
"8d60dec5376d99b30939e8d6a2d1fbc3363b7cdb12834a27a31f73c73e7e19e3",
),
"semigroupoid-extras":
struct(
version = "5",
sha256 =
"102e33b55cc3b15a1b714825a3703f3fc2bb09d8038404af442d35c0ac0c3832",
),
"semigroupoids":
struct(
version = "5.3.2",
sha256 =
"61a8213df437ee96a20b1c6dec8b5c573e4e0f338eb2061739a67f471d6b9d05",
),
"semigroups":
struct(
version = "0.18.5",
sha256 =
"ab2a96af6e81e31b909c37ba65f436f1493dbf387cfe0de10b6586270c4ce29d",
),
"semiring-simple":
struct(
version = "1.0.0.1",
sha256 =
"c08d1b533f4559fc55119f563a6cf3d74ad7c6f5916c2efe00b50d2a5169fd28",
),
"semirings":
struct(
version = "0.2.1.1",
sha256 =
"576a5b09e8b0045e13fab04f5a53eaead69c5b0bca99e3cdfff88be90cc64868",
),
"semver":
struct(
version = "0.3.3.1",
sha256 =
"36d3369706836d60f3bc517f30c6860734481866363723904b8768823b6bc8b1",
),
"sendfile":
struct(
version = "0.7.9",
sha256 =
"102fdf6db8c00f5a5981c6eed5acba1368a2d79b2970ce5b22ceb180aa0fdc42",
),
"seqalign":
struct(
version = "0.2.0.4",
sha256 =
"4ea194658d865890157d3df882ed21b0c089cdff7f80ea613ae25c5f3d744305",
),
"serf":
struct(
version = "0.1.1.0",
sha256 =
"d6c9c6ddf99a2119c6686732caf9f04ef8e9c4df5519a8bbd4ac7f5531d4c067",
),
"serialise":
struct(
version = "0.2.1.0",
sha256 =
"043efc1130b4202f080c5b7d2c319098df032b060655d8193f1fcdbfa3f159a5",
),
"servant":
struct(
version = "0.15",
sha256 =
"4f3f35c9c0f5e4ee8c2d10c9113ac4a6409a4d57759137e68f43588f5e6bfa39",
),
"servant-JuicyPixels":
struct(
version = "0.3.0.4",
sha256 =
"7b02f00ac8b78ffda49a96f2d1f39619ec19f244822d177928e75cd533cb9981",
),
"servant-auth":
struct(
version = "0.3.2.0",
sha256 =
"7bb4d5118c072cb3845aaba4287b2d5e34e5ccca96916895456a828bf7a9418b",
),
"servant-auth-client":
struct(
version = "0.3.3.0",
sha256 =
"490ac57150b59c567ef567120a6704cfc2184f7be8e6edaab26ad818dee5b3df",
),
"servant-auth-docs":
struct(
version = "0.2.10.0",
sha256 =
"adf3c33ce4134a78ae7a5c06092ea5812c99d4b942ff2dd685995eb3b2b53e48",
),
"servant-auth-server":
struct(
version = "0.4.3.0",
sha256 =
"74a565bc60e89795394e7680643f79e9bd1e4de45fd3be1cfc12a3108c24f0cf",
),
"servant-auth-swagger":
struct(
version = "0.2.10.0",
sha256 =
"50a783639eb882fd5047d69245f7770817658814d8c409b547ebdddae05acd12",
),
"servant-blaze":
struct(
version = "0.8",
sha256 =
"46ea88550123d765b2d09073370d0530a51878e7fdf2cf20b070be1f2f10ae94",
),
"servant-cassava":
struct(
version = "0.10",
sha256 =
"9b2c5d906f3a4bb2767b2ce91f12a74e24adceadd296220b5d7216c5e1f3560e",
),
"servant-checked-exceptions":
struct(
version = "2.0.0.0",
sha256 =
"a7f282857e56d5d1a59d055cf1936cab96a2cdc2f94a79ff736f7ef1cf56f688",
),
"servant-checked-exceptions-core":
struct(
version = "2.0.0.0",
sha256 =
"aad3513403241bb06aadc605e6af88a5f3aaa0f1f208aafed6d69e15a23ab248",
),
"servant-client":
struct(
version = "0.15",
sha256 =
"2a6c731a479f68ea8f7fe3e124b8b87d14ca9c385ed0751a70461a3c59540a25",
),
"servant-client-core":
struct(
version = "0.15",
sha256 =
"9b8e49e5e3cdda9216c393164e7c4b6d693bb159959dd52648f27f7adbca7960",
),
"servant-docs":
struct(
version = "0.11.3",
sha256 =
"07eb88550b5a5354aed4bfe74f0e4099e17fae99477e0db83a072b50070cda33",
),
"servant-elm":
struct(
version = "0.5.0.0",
sha256 =
"d9d96eeaf209f93791f3c81a5b2afad7be443f9af29f362ec17661436895b950",
),
"servant-exceptions":
struct(
version = "0.1.1",
sha256 =
"652b9fdc463200ebb8c2b2e0757f9d90662408bf45a657b3f719d0a36d34abe1",
),
"servant-foreign":
struct(
version = "0.15",
sha256 =
"f1197f1319a735b37c5fdd991556bf34b780a9b87d0e57d936a42ae6734bbd73",
),
"servant-js":
struct(
version = "0.9.4",
sha256 =
"f86ba73d38644a74ccec50c378df66ab4863664e83359b8866cf17fbf08b3c10",
),
"servant-kotlin":
struct(
version = "0.1.1.5",
sha256 =
"dbf2f037523d25ca2c81c82490ebad8c8e616c760d092e39ad047965981ffd71",
),
"servant-lucid":
struct(
version = "0.8.1",
sha256 =
"6671d5d5e29b05911bb8855f42168839c2dbb8ee113a10cef6dd372fc267113d",
),
"servant-mock":
struct(
version = "0.8.5",
sha256 =
"ae547026ddc5d15bec0af9ea9324954f88dd605cae0775c81c45b1723dc77b81",
),
"servant-pandoc":
struct(
version = "0.5.0.0",
sha256 =
"12d709fced47bb3e017b83dcc5dafb1186720e5318c1b5ebeb886d4439540463",
),
"servant-rawm":
struct(
version = "0.3.0.0",
sha256 =
"e9feee415891b8db2c1c032d6a4b934522354bc9cb2491b0ee59f989e94b6a27",
),
"servant-ruby":
struct(
version = "0.9.0.0",
sha256 =
"63787834369f2fce2216af3a38157af9370a0e4d02965ccd407ec493a62127f4",
),
"servant-server":
struct(
version = "0.15",
sha256 =
"98034e618ff844f18dbedeb663e1a88a87ce3bc3792e9a40d7e17ca1e96b93e2",
),
"servant-static-th":
struct(
version = "0.2.2.0",
sha256 =
"5bec0129407580bde3b5bc49fc75737c916b6eaf0ea421bf72f5bf029342741b",
),
"servant-streaming":
struct(
version = "0.3.0.0",
sha256 =
"980d486577658697891360479195ed493859e2279f76334101a45c880f7c5a4c",
),
"servant-swagger":
struct(
version = "1.1.7",
sha256 =
"e31a1020553c2879047e7d15cd1b57b4ec216606554fdecd62e0f4521e81de36",
),
"servant-swagger-ui":
struct(
version = "0.3.2.3.19.3",
sha256 =
"87ddb5982ce6b12698f9eff28b5d6fc2ebd00cb406bd48c8d0ff1951a1335e68",
),
"servant-swagger-ui-core":
struct(
version = "0.3.2",
sha256 =
"a2cd0e8e68c5de21aea54735f891c4c6e54007c85e93dffd42b89aba419a3ca8",
),
"servant-swagger-ui-redoc":
struct(
version = "0.3.2.1.22.2",
sha256 =
"e09919e7518f8f5b00868eac0c4f80212b5a4950d2c10112696f52446e369934",
),
"servant-tracing":
struct(
version = "0.1.0.2",
sha256 =
"3edf2e58c60b6624a81c57bbc606889d779ba0cc57fc785240cb353f9caaea62",
),
"servant-websockets":
struct(
version = "1.1.0",
sha256 =
"63384c89db83bd03e00f2f6796c391fc133ffb3c2bc72976778d476ed82f0a51",
),
"servant-yaml":
struct(
version = "0.1.0.1",
sha256 =
"01547419509cd0424885146734c08acede329a660022f534ac9b19cc685bf601",
),
"serverless-haskell":
struct(
version = "0.8.5",
sha256 =
"0fe307cadc5f0297e11e7dcac15b9371ad98f04683db7f6f5e1faf03cb17d84a",
),
"serversession":
struct(
version = "1.0.1",
sha256 =
"3ffbefd87017e8d46fbbe380f59e24672aa9c06b999da5f9ae0b052094d94822",
),
"serversession-frontend-wai":
struct(
version = "1.0",
sha256 =
"0b48130e3d3915dc46ec2392984e7862d066f6ddd454127a98b0c21c2574b167",
),
"servius":
struct(
version = "1.2.3.0",
sha256 =
"72c4b63e85df0cb51935bec85e31d44c6ee5cafd0015bd5e6ff44286e9e18b27",
),
"ses-html":
struct(
version = "0.4.0.0",
sha256 =
"cff76ee03b538e69a3d107cd63d577210cf0f9879d470bf55519e887e2a8a08f",
),
"setenv":
struct(
version = "0.1.1.3",
sha256 =
"e358df39afc03d5a39e2ec650652d845c85c80cc98fe331654deafb4767ecb32",
),
"setlocale":
struct(
version = "1.0.0.8",
sha256 =
"6dd148e47714707c311d20af606284ab392392a84ffb71da4004010e67d5b969",
),
"shake":
struct(
version = "0.17.4",
sha256 =
"beaddfbd55559ecd2b00eaaa660c2c79925bbe22619e2f5c4dc8b8ef678575aa",
),
"shake-language-c":
struct(
version = "0.12.0",
sha256 =
"661e350179e55c930c3c36f53853db2bc2697d88c5265049085cea09f5aa1ab0",
),
"shakespeare":
struct(
version = "2.0.20",
sha256 =
"f50ebff8e585851a1e3af36885d6a6d1218b19dcde1d7459f02272d6925d9e03",
),
"shared-memory":
struct(
version = "0.2.0.0",
sha256 =
"266739418194429f0e3a316decd28bf15ae8cc4ce2e1e19c523dc92b3f023abc",
),
"shell-conduit":
struct(
version = "4.7.0",
sha256 =
"6f31c5b6fb46219c4da575b4405f1a5af51eed1f22073d315df80c8a40ddbe30",
),
"shell-escape":
struct(
version = "0.2.0",
sha256 =
"e23c9ba94a27e45430cb39e6bb236557e789d24129257c3def377f441b2cba4a",
),
"shelltestrunner":
struct(
version = "1.9",
sha256 =
"cbc4358d447e32babe4572cda0d530c648cc4c67805f9f88002999c717feb3a8",
),
"shelly":
struct(
version = "1.8.0",
sha256 =
"737f51e5f4d98d72012d905d3f2b78b620c5c15292bb77e38264fa4478bb08f8",
),
"shikensu":
struct(
version = "0.3.11",
sha256 =
"69f59f067522715a86f60bf4e60f9597bc1243e50d204b99b31e89e8e8c63010",
),
"shortcut-links":
struct(
version = "0.4.2.1",
sha256 =
"0d36ecfabc8e2d3a4c0015b521b6cb8efa8469bbd518a509326f07a3aa24deff",
),
"should-not-typecheck":
struct(
version = "2.1.0",
sha256 =
"f538ac70ce07679bc2e6c1651db82a86866664ab995665fdc78e6cb12bd8d591",
),
"show-combinators":
struct(
version = "0.1.1.0",
sha256 =
"d53abf2b289a3075555f1ede76f5beba0fadce352cd94efbad610bc1eb76020a",
),
"show-prettyprint":
struct(
version = "0.2.2",
sha256 =
"f07d860b9bb4176a4e46038c5100ecf07d443daa1b15455ca4c2bd4d10e9af55",
),
"siggy-chardust":
struct(
version = "1.0.0",
sha256 =
"9f730c3cc04ea629e0b655bfff66f83e146eb3b9f0908d5dc00b4c558d5f5a43",
),
"signal":
struct(
version = "0.1.0.4",
sha256 =
"c4bfdd92b75347e02759c1a7d75963fbc7052e948ec96e25299ca5262e5d76e5",
),
"silently":
struct(
version = "1.2.5",
sha256 =
"cef625635053a46032ca53b43d311921875a437910b6568ded17027fdca83839",
),
"simple-cmd":
struct(
version = "0.1.2",
sha256 =
"e6a15592fbbcc5667b7e45563b55d08228ea483241a3b80aef9f7df802f54d82",
),
"simple-log":
struct(
version = "0.9.10",
sha256 =
"b398e8649e06a05e88b84532f0ced426a7f18bafe1eeab6b178574773db6ffa5",
),
"simple-reflect":
struct(
version = "0.3.3",
sha256 =
"07825ea04c135298008cf080133e3bfc8e04cbacd24719c46ac6a2ca4acfdb2b",
),
"simple-sendfile":
struct(
version = "0.2.27",
sha256 =
"f68572592099a2db3f7212ac7d133447ae5bbb2605285d3de1a29a52d9c79caf",
),
"simple-vec3":
struct(
version = "0.4.0.10",
sha256 =
"141788c133a8602825d0f2267e6a87e01e1cedb4f3fc0f1a1cc1203fde4ad937",
),
"since":
struct(
version = "0.0.0",
sha256 =
"7aa713c0fc0b2a748c9b5ddc413b918f77335e45b56d3968100428a42cdfc1ff",
),
"singleton-bool":
struct(
version = "0.1.4",
sha256 =
"0195c6e2be1e149e5b687ec3be84fd5089b377345fddd333a9d681eacdfafb2a",
),
"singleton-nats":
struct(
version = "0.4.2",
sha256 =
"8f8169b013a5e4725be9682bf413019cdaf6e020455839612c145ba6849e9cf1",
),
"singletons":
struct(
version = "2.5.1",
sha256 =
"20b00a3a732812ec1141014d2f8d379e392165ce7881fa7de9add8db0e22f147",
),
"siphash":
struct(
version = "1.0.3",
sha256 =
"cf81ce41c6ca40c4fec9add5dcebc161cb2d31f522f9ad727df23d30ac6a05f3",
),
"size-based":
struct(
version = "0.1.2.0",
sha256 =
"779ff6c45476d20ffd2ad7327b44cefaaf0436ed89f43b2967761c0b58a4151a",
),
"sized-grid":
struct(
version = "0.1.1.6",
sha256 =
"4907af7a4ac56a838d599f319b2096a63c4f30eaf84cac0a5a22d937605c0b1b",
),
"skein":
struct(
version = "1.0.9.4",
sha256 =
"f882ca0cc5ed336ef898fb3c89579e392900259296b2320edf968b9fc16cb8c9",
),
"skews":
struct(
version = "0.1.0.1",
sha256 =
"b544480c3e7d676f008faccd1d31639114f773aac8d2b8828be48122a120b60d",
),
"skip-var":
struct(
version = "0.1.1.0",
sha256 =
"bfbce57abd47c9c892f734b5c7d2bccad90fa5f8f8a6d4747cca15d2a493d41e",
),
"skylighting":
struct(
version = "0.7.5",
sha256 =
"7de100e42e7dac3687372f85225a6d905d534f75990c5a25c6e640acf0ad1320",
),
"skylighting-core":
struct(
version = "0.7.5",
sha256 =
"f706a2eb5d37d1323525d9c4944da2ad8d29c1ccf7d0ae7b433695d981413889",
),
"slack-web":
struct(
version = "0.2.0.9",
sha256 =
"421d2cd3a1626b637224e94c800312673b1a0f0c980d7346c0061e71bb8287d3",
),
"smallcheck":
struct(
version = "1.1.5",
sha256 =
"9020e67895a57bde02d7df2c0af06a4c769eff56d48b6a830f6d803df891aea4",
),
"smoothie":
struct(
version = "0.4.2.9",
sha256 =
"d3cafbc34a5d03363ddd41e59bd681168cd2d0aa8be4678db9ae1904ad202a4f",
),
"smtp-mail":
struct(
version = "0.1.4.6",
sha256 =
"86dacbef87a2519222a1165b49401a437887a249f5bfd63a99702198dad214bc",
),
"snap-blaze":
struct(
version = "0.2.1.5",
sha256 =
"b36e35bd4ba3087b3de92702e488ba6570675719243b5dbdf4eae0b819988841",
),
"snap-core":
struct(
version = "1.0.3.2",
sha256 | |
<gh_stars>0
# Copyright (c) 2003-2015 by <NAME>
#
# TreeCorr is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
"""
.. module:: corr3
"""
import treecorr
# Dict describing the valid parameters, what types they are, and a description:
# Each value is a tuple with the following elements:
# type
# may_be_list
# default value
# list of valid values
# description
corr3_valid_params = {
# Parameters about the input catlogs
'file_name' : (str, True, None, None,
'The file(s) with the galaxy data.'),
'file_name2' : (str, True, None, None,
'The file(s) to use for the second field for a cross-correlation.'),
'file_name3' : (str, True, None, None,
'The file(s) to use for the third field for a cross-correlation.'),
'rand_file_name' : (str, True, None, None,
'For NNN correlations, a list of random files.'),
'rand_file_name2' : (str, True, None, None,
'The randoms for the second field for a cross-correlation.'),
'rand_file_name3' : (str, True, None, None,
'The randoms for the third field for a cross-correlation.'),
'file_list' : (str, False, None, None,
'A text file with file names in lieu of file_name.'),
'file_list2' : (str, False, None, None,
'A text file with file names in lieu of file_name2.'),
'file_list3' : (str, False, None, None,
'A text file with file names in lieu of file_name3.'),
'rand_file_list' : (str, False, None, None,
'A text file with file names in lieu of rand_file_name.'),
'rand_file_list2' : (str, False, None, None,
'A text file with file names in lieu of rand_file_name2.'),
'rand_file_list3' : (str, False, None, None,
'A text file with file names in lieu of rand_file_name3.'),
# Parameters about the output file(s)
'nnn_file_name' : (str, False, None, None,
'The output filename for point-point correlation function.'),
'nnn_statistic' : (str, False, 'compensated', ['compensated','simple'],
'Which statistic to use for omega as the estimator fo the NN correlation function. '),
'kkk_file_name' : (str, False, None, None,
'The output filename for kappa-kappa-kappa correlation function.'),
'ggg_file_name' : (str, False, None, None,
'The output filename for gamma-gamma-gamma correlation function.'),
#'ng_file_name' : (str, False, None, None,
#'The output filename for point-shear correlation function.'),
#'ng_statistic' : (str, False, None, ['compensated', 'simple'],
#'Which statistic to use for the mean shear estimator of the NG correlation function. ',
#'The default is compensated if rand_files is given, otherwise simple'),
#'gg_file_name' : (str, False, None, None,
#'The output filename for shear-shear correlation function.'),
#'nk_file_name' : (str, False, None, None,
#'The output filename for point-kappa correlation function.'),
#'nk_statistic' : (str, False, None, ['compensated', 'simple'],
#'Which statistic to use for the mean kappa estimator of the NK correlation function. ',
#'The default is compensated if rand_files is given, otherwise simple'),
#'kg_file_name' : (str, False, None, None,
#'The output filename for kappa-shear correlation function.'),
# Derived output quantities
#'m3_file_name' : (str, False, None, None,
#'The output filename for the aperture mass statistics.'),
#'m3_uform' : (str, False, 'Crittenden', ['Crittenden', 'Schneider'],
#'The function form of the aperture.'),
#'nm_file_name' : (str, False, None, None,
#'The output filename for <N Map> and related values.'),
#'norm_file_name' : (str, False, None, None,
#'The output filename for <N Map>^2/<N^2><Map^2> and related values.'),
}
# Add in the valid parameters for the relevant classes
for c in [ treecorr.Catalog, treecorr.BinnedCorr3 ]:
corr3_valid_params.update(c._valid_params)
corr3_aliases = {
'n3_file_name' : 'nnn_file_name',
'n3_statistic' : 'nnn_statistic',
'k3_file_name' : 'kkk_file_name',
'g3_file_name' : 'ggg_file_name',
}
def corr3(config, logger=None):
"""Run the full three-point correlation function code based on the parameters in the
given config dict.
The function print_corr3_params() will output information about the valid parameters
that are expected to be in the config dict.
Optionally a logger parameter maybe given, in which case it is used for logging.
If not given, the logging will be based on the verbose and log_file parameters.
:param config: The configuration dict which defines what to do.
:param logger: If desired, a logger object for logging. (default: None, in which case
one will be built according to the config dict's verbose level.)
"""
# Setup logger based on config verbose value
if logger is None:
logger = treecorr.config.setup_logger(
treecorr.config.get(config,'verbose',int,1),
config.get('log_file',None))
# Check that config doesn't have any extra parameters.
# (Such values are probably typos.)
# Also convert the given parameters to the correct type, etc.
config = treecorr.config.check_config(config, corr3_valid_params, corr3_aliases, logger)
import pprint
logger.debug('Using configuration dict:\n%s',pprint.pformat(config))
if ( 'output_dots' not in config
and config.get('log_file',None) is None
and config['verbose'] >= 2 ):
config['output_dots'] = True
# Set the number of threads
num_threads = config.get('num_threads',None)
logger.debug('From config dict, num_threads = %s',num_threads)
treecorr.set_omp_threads(num_threads, logger)
# Read in the input files. Each of these is a list.
cat1 = treecorr.read_catalogs(config, 'file_name', 'file_list', 0, logger)
if len(cat1) == 0:
raise AttributeError("Either file_name or file_list is required")
cat2 = treecorr.read_catalogs(config, 'file_name2', 'rand_file_list2', 1, logger)
cat3 = treecorr.read_catalogs(config, 'file_name3', 'rand_file_list3', 1, logger)
rand1 = treecorr.read_catalogs(config, 'rand_file_name', 'rand_file_list', 0, logger)
rand2 = treecorr.read_catalogs(config, 'rand_file_name2', 'rand_file_list2', 1, logger)
rand3 = treecorr.read_catalogs(config, 'rand_file_name3', 'rand_file_list3', 1, logger)
if len(cat2) == 0 and len(rand2) > 0:
raise AttributeError("rand_file_name2 is invalid without file_name2")
if len(cat3) == 0 and len(rand3) > 0:
raise AttributeError("rand_file_name3 is invalid without file_name3")
logger.info("Done reading input catalogs")
# Do GGG correlation function if necessary
if 'ggg_file_name' in config: #or 'm3_file_name' in config:
logger.warning("Performing GGG calculations...")
ggg = treecorr.GGGCorrelation(config,logger)
ggg.process(cat1,cat2,cat3)
logger.info("Done GGG calculations.")
if 'ggg_file_name' in config:
ggg.write(config['ggg_file_name'])
if 'm3_file_name' in config:
ggg.writeMapSq(config['m3_file_name'])
# Do NNN correlation function if necessary
if 'nnn_file_name' in config:
if len(rand1) == 0:
raise AttributeError("rand_file_name is required for NNN correlation")
if len(cat2) > 0 and len(rand2) == 0:
raise AttributeError("rand_file_name2 is required for NNN cross-correlation")
if len(cat3) > 0 and len(rand3) == 0:
raise AttributeError("rand_file_name3 is required for NNN cross-correlation")
if (len(cat2) > 0) != (len(cat3) > 0):
raise NotImplementedError(
"Cannot yet handle 3-point corrleations with only two catalogs. "+
"Need both cat2 and cat3.")
logger.warning("Performing DDD calculations...")
ddd = treecorr.NNNCorrelation(config,logger)
ddd.process(cat1,cat2,cat3)
logger.info("Done DDD calculations.")
if len(cat2) == 0:
logger.warning("Performing RRR calculations...")
rrr = treecorr.NNNCorrelation(config,logger)
rrr.process(rand1)
logger.info("Done RRR calculations.")
# For the next step, just make cat2 = cat3 = cat1 and rand2 = rand3 = rand1.
cat2 = cat3 = cat1
rand2 = rand3 = rand1
else:
logger.warning("Performing RRR calculations...")
rrr = treecorr.NNNCorrelation(config,logger)
rrr.process(rand1,rand2,rand3)
logger.info("Done RRR calculations.")
if config['nnn_statistic'] == 'compensated':
logger.warning("Performing DRR calculations...")
drr = treecorr.NNNCorrelation(config,logger)
drr.process(cat1,rand2,rand3)
logger.info("Done DRR calculations.")
logger.warning("Performing DDR calculations...")
ddr = treecorr.NNNCorrelation(config,logger)
ddr.process(cat1,cat2,rand3)
logger.info("Done DDR calculations.")
logger.warning("Performing RDR calculations...")
rdr = treecorr.NNNCorrelation(config,logger)
rdr.process(rand1,cat2,rand3)
logger.info("Done RDR calculations.")
logger.warning("Performing RRD calculations...")
rrd = treecorr.NNNCorrelation(config,logger)
rrd.process(rand1,rand2,cat3)
logger.info("Done RRD calculations.")
logger.warning("Performing DRD calculations...")
drd = treecorr.NNNCorrelation(config,logger)
drd.process(cat1,rand2,cat3)
logger.info("Done DRD calculations.")
logger.warning("Performing RDD calculations...")
rdd = treecorr.NNNCorrelation(config,logger)
rdd.process(rand1,cat2,cat3)
logger.info("Done RDD calculations.")
ddd.write(config['nnn_file_name'],rrr,drr,rdr,rrd,ddr,drd,rdd)
else:
ddd.write(config['nnn_file_name'],rrr)
# Do KKK correlation function if necessary
if 'kkk_file_name' in config:
logger.warning("Performing KKK calculations...")
kkk = treecorr.KKKCorrelation(config,logger)
kkk.process(cat1,cat2,cat3)
logger.info("Done KKK calculations.")
kkk.write(config['kkk_file_name'])
# Do NNG correlation function if necessary
if False:
#if 'nng_file_name' in config or 'nnm_file_name' in config:
if len(cat3) == 0:
raise AttributeError("file_name3 is required for nng correlation")
logger.warning("Performing NNG calculations...")
nng = treecorr.NNGCorrelation(config,logger)
nng.process(cat1,cat2,cat3)
logger.info("Done NNG calculation.")
# The default ng_statistic is compensated _iff_ rand files are given.
rrg = None
if len(rand1) == 0:
if config.get('nng_statistic',None) == 'compensated':
raise AttributeError("rand_files is required for nng_statistic = compensated")
elif config.get('nng_statistic','compensated') == 'compensated':
rrg = treecorr.NNGCorrelation(config,logger)
rrg.process(rand1,rand1,cat2)
logger.info("Done RRG calculation.")
if 'nng_file_name' in config:
nng.write(config['nng_file_name'], rrg)
if 'nnm_file_name' in config:
nng.writeNNMap(config['nnm_file_name'], rrg)
# Do NNK correlation function if necessary
if False:
#if 'nnk_file_name' in config:
if len(cat3) == 0:
raise AttributeError("file_name3 is required for nnk correlation")
logger.warning("Performing NNK calculations...")
nnk = treecorr.NNKCorrelation(config,logger)
nnk.process(cat1,cat2,cat3)
logger.info("Done NNK calculation.")
rrk = None
if len(rand1) == 0:
if config.get('nnk_statistic',None) == 'compensated':
raise AttributeError("rand_files is required for nnk_statistic = | |
# Copyright 2021 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Experimental functions that involve a full pass over the dataset.
This module contains functions that are used in the preprocessing function, to
define a full pass operation such as computing the sum, min, max or unique
values of a tensor over the entire dataset. This is implemented by a reduction
operation in the Beam implementation.
From the user's point of view, an analyzer appears as a regular TensorFlow
function, i.e. it accepts and returns tensors. However it is represented in
the graph as a `Analyzer` which is not a TensorFlow op, but a placeholder for
the computation that takes place outside of TensorFlow.
"""
from typing import Any, Collection, List, Optional, Tuple, Type, Iterable
import numpy as np
import pyarrow as pa
import tensorflow as tf
from tensorflow_transform import analyzer_nodes
from tensorflow_transform import analyzers
from tensorflow_transform import common
from tensorflow_transform import common_types
from tensorflow_transform import nodes
from tensorflow_transform import tf_utils
from tfx_bsl import sketches
_APPROXIMATE_VOCAB_FILENAME_PREFIX = 'approx_vocab_'
_APPROXIMATE_VOCAB_FREQUENCY_FILENAME_PREFIX = 'approx_vocab_frequency_'
def _apply_analyzer(analyzer_def_cls: Type[analyzer_nodes.AnalyzerDef],
*tensor_inputs: common_types.TensorType,
**analyzer_def_kwargs: Any) -> Tuple[tf.Tensor, ...]:
"""Applies the analyzer over the whole dataset.
Args:
analyzer_def_cls: A class inheriting from analyzer_nodes.AnalyzerDef that
should be applied.
*tensor_inputs: A list of input `Tensor`s or `CompositeTensor`s.
**analyzer_def_kwargs: KW arguments to use when constructing
analyzer_def_cls.
Returns:
A list of `Tensor`s representing the values of the analysis result.
"""
input_values_node = analyzer_nodes.get_input_tensors_value_nodes(
tensor_inputs)
output_value_nodes = nodes.apply_multi_output_operation(
analyzer_def_cls,
input_values_node,
**analyzer_def_kwargs)
return tuple(map(analyzer_nodes.wrap_as_tensor, output_value_nodes))
@common.log_api_use(common.ANALYZER_COLLECTION)
def ptransform_analyzer(
inputs: Collection[tf.Tensor],
ptransform: Any,
output_dtypes: Collection[tf.dtypes.DType],
output_shapes: Collection[List[int]],
output_asset_default_values: Optional[Collection[Optional[bytes]]] = None,
name: Optional[str] = None):
# pylint: disable=line-too-long
"""Applies a user-provided PTransform over the whole dataset.
WARNING: This is experimental.
Note that in order to have asset files copied correctly, any outputs that
represent asset filenames must be added to the `tf.GraphKeys.ASSET_FILEPATHS`
collection by the caller if using Transform's APIs in compat v1 mode.
Example:
>>> class MeanPerKey(beam.PTransform):
... def expand(self, pcoll: beam.PCollection[Tuple[np.ndarray, np.ndarray]]):
... def extract_output(key_value_pairs):
... keys, values = zip(*key_value_pairs)
... return [beam.TaggedOutput('keys', keys),
... beam.TaggedOutput('values', values)]
... return (pcoll
... | 'ZipAndFlatten' >> beam.FlatMap(lambda batches: list(zip(*batches)))
... | 'MeanPerKey' >> beam.CombinePerKey(beam.combiners.MeanCombineFn())
... | 'ToList' >> beam.combiners.ToList()
... | 'Extract' >> beam.FlatMap(extract_output).with_outputs(
... 'keys', 'values'))
>>> def preprocessing_fn(inputs):
... outputs = tft.experimental.ptransform_analyzer(
... inputs=[inputs['s'], inputs['x']],
... ptransform=MeanPerKey(),
... output_dtypes=[tf.string, tf.float32],
... output_shapes=[[2], [2]])
... (keys, means) = outputs
... mean_a = tf.reshape(tf.gather(means, tf.where(keys == 'a')), [])
... return { 'x/mean_a': inputs['x'] / mean_a }
>>> raw_data = [dict(x=1, s='a'), dict(x=8, s='b'), dict(x=3, s='a')]
>>> feature_spec = dict(
... x=tf.io.FixedLenFeature([], tf.float32),
... s=tf.io.FixedLenFeature([], tf.string))
>>> raw_data_metadata = tft.tf_metadata.dataset_metadata.DatasetMetadata(
... tft.tf_metadata.schema_utils.schema_from_feature_spec(feature_spec))
>>> with tft_beam.Context(temp_dir=tempfile.mkdtemp()):
... transformed_dataset, transform_fn = (
... (raw_data, raw_data_metadata)
... | tft_beam.AnalyzeAndTransformDataset(preprocessing_fn))
>>> transformed_data, transformed_metadata = transformed_dataset
>>> transformed_data
[{'x/mean_a': 0.5}, {'x/mean_a': 4.0}, {'x/mean_a': 1.5}]
Args:
inputs: An ordered collection of input `Tensor`s.
ptransform: A Beam PTransform that accepts a Beam PCollection where each
element is a tuple of `ndarray`s. Each element in the tuple contains a
batch of values for the corresponding input tensor of the analyzer and
maintain their shapes and dtypes.
It returns a `PCollection`, or a tuple of `PCollections`, each containing
a single element which is an `ndarray` or a list of primitive types. The
contents of these output `PCollection`s must be consistent with the given
values of `output_dtypes` and `output_shapes`.
It may inherit from `tft_beam.experimental.PTransformAnalyzer` if access
to a temp base directory is needed.
output_dtypes: An ordered collection of TensorFlow dtypes of the output of
the analyzer.
output_shapes: An ordered collection of shapes of the output of the
analyzer. Must have the same length as output_dtypes.
output_asset_default_values: (Optional) An ordered collection of optional
`bytes` aligned with output_dtypes/output_shapes. Every item in this
collection which is not `None` indicates that the output is a TF asset
path, and its value would be used as the default value of this asset file
prior to analysis.
name: (Optional) Similar to a TF op name. Used to define a unique scope for
this analyzer, which can be used for debugging info.
Returns:
A list of output `Tensor`s. These will have `dtype` and `shape` as
specified by `output_dtypes` and `output_shapes`.
Raises:
ValueError: If output_dtypes and output_shapes have different lengths.
"""
# pylint: enable=line-too-long
if len(output_dtypes) != len(output_shapes):
raise ValueError('output_dtypes ({}) and output_shapes ({}) had different'
' lengths'.format(output_dtypes, output_shapes))
if output_asset_default_values is not None:
if len(output_asset_default_values) != len(output_dtypes):
raise ValueError(
'output_dtypes ({}) and output_asset_default_values ({}) had '
'different lengths'.format(output_dtypes,
output_asset_default_values))
else:
output_asset_default_values = [None] * len(output_dtypes)
with tf.compat.v1.name_scope(name, 'ptransform'):
output_tensor_infos = [
analyzer_nodes.TensorInfo(dtype, shape, default_asset_content)
for dtype, shape, default_asset_content in zip(
output_dtypes, output_shapes, output_asset_default_values)
]
return _apply_analyzer(
analyzer_nodes.PTransform,
*inputs,
ptransform=ptransform,
output_tensor_info_list=output_tensor_infos)
def _get_approx_vocab_filename(vocab_filename: Optional[str],
store_frequency: bool) -> str:
"""Returns a sanitized vocabulary filename with appropriate prefix applied.
Args:
vocab_filename: The file name for the approximate vocabulary file. If None,
the "approximate_vocabulary" scope name in the context of this graph will
be used as the file name.
store_frequency: A bool that is true when the vocabulary for which this
generates a filename stores term frequency. False otherwise.
Returns:
A valid filename.
"""
if vocab_filename is not None:
prefix = None
elif store_frequency:
prefix = _APPROXIMATE_VOCAB_FILENAME_PREFIX
else:
prefix = _APPROXIMATE_VOCAB_FREQUENCY_FILENAME_PREFIX
# Make the file name path safe.
return analyzers.sanitized_vocab_filename(vocab_filename, prefix=prefix)
@common.log_api_use(common.ANALYZER_COLLECTION)
def approximate_vocabulary(
x: common_types.TensorType,
top_k: int,
vocab_filename: Optional[str] = None,
store_frequency: bool = False,
weights: Optional[tf.Tensor] = None,
file_format: common_types.VocabularyFileFormatType = analyzers
.DEFAULT_VOCABULARY_FILE_FORMAT,
name: Optional[str] = None) -> common_types.TemporaryAnalyzerOutputType:
r"""Computes the unique values of a `Tensor` over the whole dataset.
Approximately computes the unique values taken by `x`, which can be a `Tensor`
or `CompositeTensor` of any size. The unique values will be aggregated over
all dimensions of `x` and all instances.
This analyzer provides an approximate alternative to `tft.vocabulary` that can
be more efficient with smaller `top_k` and/or smaller number of unique
elements in `x`. As a rule of thumb, `approximate_vocabulary` becomes more
efficient than `tft.vocabulary` if `top_k` or the number of unique elements in
`x` is smaller than 2*10^5. Moreover, this analyzer is subject to combiner
packing optimization that does not apply to `tft.vocabulary`. Caching is also
more efficient with the approximate implementation since the filtration
happens before writing out cache. Output artifact of `approximate_vocabulary`
is consistent with `tft.vocabulary` and can be used in `tft.apply_vocabulary`
mapper.
Implementation of this analyzer is based on the Misra-Gries algorithm [1]. It
stores at most `top_k` elements with lower bound frequency estimates at a
time. The algorithm keeps track of the approximation error `delta` such that
for any item x with true frequency X:
frequency[x] <= X <= frequency[x] + delta,
delta <= (m - m') / (top_k + 1),
where m is the total frequency of the items in the dataset and m' is the sum
of the lower bound estimates in `frequency` [2]. For datasets that are Zipfian
distributed with parameter `a`, the algorithm provides an expected value of
delta = m / (top_k ^ a) [3].
[1]
https://www.cs.utexas.edu/users/misra/scannedPdf.dir/FindRepeatedElements.pdf
[2] http://www.cohenwang.com/edith/bigdataclass2013/lectures/lecture1.pdf
[3] http://dimacs.rutgers.edu/~graham/pubs/papers/countersj.pdf
In case `file_format` is 'text' and one of the tokens contains the '\n' or
'\r' characters or is empty it will be discarded.
If an integer `Tensor` is provided, its semantic type should be categorical
not a continuous/numeric, since computing a vocabulary over a continuous
feature is not appropriate.
The unique values are sorted by decreasing frequency and then reverse
lexicographical order (e.g. [('a', 5), ('c', 3), ('b', 3)]). This is true even
if `x` is numerical dtype (e.g. [('3', 5), ('2', 3), ('111', 3)]).
Args:
x: A categorical/discrete input `Tensor` or `CompositeTensor` with dtype
tf.string | |
(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: local_port
Local port
**type**\: int
**range:** 0..65535
.. attribute:: remote_port
Remote port
**type**\: int
**range:** 0..65535
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: is_pmtu_enabled
True if tunnel PMTU checking is enabled
**type**\: bool
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: class_name
L2TP class name
**type**\: str
**length:** 0..256
.. attribute:: active_sessions
Number of active sessions
**type**\: int
**range:** 0..4294967295
.. attribute:: sequence_ns
Sequence NS
**type**\: int
**range:** 0..65535
.. attribute:: sequence_nr
Sequence NR
**type**\: int
**range:** 0..65535
.. attribute:: local_window_size
Local window size
**type**\: int
**range:** 0..65535
.. attribute:: remote_window_size
Remote window size
**type**\: int
**range:** 0..65535
.. attribute:: retransmission_time
Retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: maximum_retransmission_time
Maximum retransmission time in seconds
**type**\: int
**range:** 0..65535
**units**\: second
.. attribute:: unsent_queue_size
Unsent queue size
**type**\: int
**range:** 0..65535
.. attribute:: unsent_maximum_queue_size
Unsent maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: resend_queue_size
Resend queue size
**type**\: int
**range:** 0..65535
.. attribute:: resend_maximum_queue_size
Resend maximum queue size
**type**\: int
**range:** 0..65535
.. attribute:: order_queue_size
Order queue size
**type**\: int
**range:** 0..65535
.. attribute:: packet_queue_check
Current number session packet queue check
**type**\: int
**range:** 0..65535
.. attribute:: digest_secrets
Control message authentication with digest secrets
**type**\: int
**range:** 0..65535
.. attribute:: resends
Total resends
**type**\: int
**range:** 0..4294967295
.. attribute:: zero_length_body_acknowledgement_sent
Total zero length body acknowledgement
**type**\: int
**range:** 0..4294967295
.. attribute:: total_out_of_order_drop_packets
Total out of order dropped packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_out_of_order_reorder_packets
Total out of order reorder packets
**type**\: int
**range:** 0..4294967295
.. attribute:: total_peer_authentication_failures
Number of peer authentication failures
**type**\: int
**range:** 0..4294967295
.. attribute:: is_tunnel_up
True if tunnel is up
**type**\: bool
.. attribute:: is_congestion_control_enabled
True if congestion control is enabled else false
**type**\: bool
.. attribute:: retransmit_time
Retransmit time distribution in seconds
**type**\: list of int
**range:** 0..65535
**units**\: second
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
super(L2Tpv2.Tunnels.Tunnel, self).__init__()
self.yang_name = "tunnel"
self.yang_parent_name = "tunnels"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['local_tunnel_id']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('local_tunnel_id', YLeaf(YType.int32, 'local-tunnel-id')),
('local_address', YLeaf(YType.str, 'local-address')),
('remote_address', YLeaf(YType.str, 'remote-address')),
('local_port', YLeaf(YType.uint16, 'local-port')),
('remote_port', YLeaf(YType.uint16, 'remote-port')),
('protocol', YLeaf(YType.uint8, 'protocol')),
('is_pmtu_enabled', YLeaf(YType.boolean, 'is-pmtu-enabled')),
('remote_tunnel_id', YLeaf(YType.uint32, 'remote-tunnel-id')),
('local_tunnel_name', YLeaf(YType.str, 'local-tunnel-name')),
('remote_tunnel_name', YLeaf(YType.str, 'remote-tunnel-name')),
('class_name', YLeaf(YType.str, 'class-name')),
('active_sessions', YLeaf(YType.uint32, 'active-sessions')),
('sequence_ns', YLeaf(YType.uint16, 'sequence-ns')),
('sequence_nr', YLeaf(YType.uint16, 'sequence-nr')),
('local_window_size', YLeaf(YType.uint16, 'local-window-size')),
('remote_window_size', YLeaf(YType.uint16, 'remote-window-size')),
('retransmission_time', YLeaf(YType.uint16, 'retransmission-time')),
('maximum_retransmission_time', YLeaf(YType.uint16, 'maximum-retransmission-time')),
('unsent_queue_size', YLeaf(YType.uint16, 'unsent-queue-size')),
('unsent_maximum_queue_size', YLeaf(YType.uint16, 'unsent-maximum-queue-size')),
('resend_queue_size', YLeaf(YType.uint16, 'resend-queue-size')),
('resend_maximum_queue_size', YLeaf(YType.uint16, 'resend-maximum-queue-size')),
('order_queue_size', YLeaf(YType.uint16, 'order-queue-size')),
('packet_queue_check', YLeaf(YType.uint16, 'packet-queue-check')),
('digest_secrets', YLeaf(YType.uint16, 'digest-secrets')),
('resends', YLeaf(YType.uint32, 'resends')),
('zero_length_body_acknowledgement_sent', YLeaf(YType.uint32, 'zero-length-body-acknowledgement-sent')),
('total_out_of_order_drop_packets', YLeaf(YType.uint32, 'total-out-of-order-drop-packets')),
('total_out_of_order_reorder_packets', YLeaf(YType.uint32, 'total-out-of-order-reorder-packets')),
('total_peer_authentication_failures', YLeaf(YType.uint32, 'total-peer-authentication-failures')),
('is_tunnel_up', YLeaf(YType.boolean, 'is-tunnel-up')),
('is_congestion_control_enabled', YLeaf(YType.boolean, 'is-congestion-control-enabled')),
('retransmit_time', YLeafList(YType.uint16, 'retransmit-time')),
])
self.local_tunnel_id = None
self.local_address = None
self.remote_address = None
self.local_port = None
self.remote_port = None
self.protocol = None
self.is_pmtu_enabled = None
self.remote_tunnel_id = None
self.local_tunnel_name = None
self.remote_tunnel_name = None
self.class_name = None
self.active_sessions = None
self.sequence_ns = None
self.sequence_nr = None
self.local_window_size = None
self.remote_window_size = None
self.retransmission_time = None
self.maximum_retransmission_time = None
self.unsent_queue_size = None
self.unsent_maximum_queue_size = None
self.resend_queue_size = None
self.resend_maximum_queue_size = None
self.order_queue_size = None
self.packet_queue_check = None
self.digest_secrets = None
self.resends = None
self.zero_length_body_acknowledgement_sent = None
self.total_out_of_order_drop_packets = None
self.total_out_of_order_reorder_packets = None
self.total_peer_authentication_failures = None
self.is_tunnel_up = None
self.is_congestion_control_enabled = None
self.retransmit_time = []
self._segment_path = lambda: "tunnel" + "[local-tunnel-id='" + str(self.local_tunnel_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/tunnels/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(L2Tpv2.Tunnels.Tunnel, ['local_tunnel_id', 'local_address', 'remote_address', 'local_port', 'remote_port', 'protocol', 'is_pmtu_enabled', 'remote_tunnel_id', 'local_tunnel_name', 'remote_tunnel_name', 'class_name', 'active_sessions', 'sequence_ns', 'sequence_nr', 'local_window_size', 'remote_window_size', 'retransmission_time', 'maximum_retransmission_time', 'unsent_queue_size', 'unsent_maximum_queue_size', 'resend_queue_size', 'resend_maximum_queue_size', 'order_queue_size', 'packet_queue_check', 'digest_secrets', 'resends', 'zero_length_body_acknowledgement_sent', 'total_out_of_order_drop_packets', 'total_out_of_order_reorder_packets', 'total_peer_authentication_failures', 'is_tunnel_up', 'is_congestion_control_enabled', 'retransmit_time'], name, value)
class Sessions(Entity):
"""
List of session IDs
.. attribute:: session
L2TP information for a particular session
**type**\: list of :py:class:`Session <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session>`
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
super(L2Tpv2.Sessions, self).__init__()
self.yang_name = "sessions"
self.yang_parent_name = "l2tpv2"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("session", ("session", L2Tpv2.Sessions.Session))])
self._leafs = OrderedDict()
self.session = YList(self)
self._segment_path = lambda: "sessions"
self._absolute_path = lambda: "Cisco-IOS-XR-tunnel-l2tun-oper:l2tpv2/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(L2Tpv2.Sessions, [], name, value)
class Session(Entity):
"""
L2TP information for a particular session
.. attribute:: local_tunnel_id (key)
Local tunnel ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: local_session_id (key)
Local session ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: session_application_data
Session application data
**type**\: :py:class:`SessionApplicationData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_tunnel_l2tun_oper.L2Tpv2.Sessions.Session.SessionApplicationData>`
.. attribute:: local_ip_address
Local session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_ip_address
Remote session IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: l2tp_sh_sess_udp_lport
l2tp sh sess udp lport
**type**\: int
**range:** 0..65535
.. attribute:: l2tp_sh_sess_udp_rport
l2tp sh sess udp rport
**type**\: int
**range:** 0..65535
.. attribute:: protocol
Protocol
**type**\: int
**range:** 0..255
.. attribute:: remote_tunnel_id
Remote tunnel ID
**type**\: int
**range:** 0..4294967295
.. attribute:: call_serial_number
Call serial number
**type**\: int
**range:** 0..4294967295
.. attribute:: local_tunnel_name
Local tunnel name
**type**\: str
**length:** 0..256
.. attribute:: remote_tunnel_name
Remote tunnel name
**type**\: str
**length:** 0..256
.. attribute:: remote_session_id
Remote session ID
**type**\: int
**range:** 0..4294967295
.. attribute:: l2tp_sh_sess_tie_breaker_enabled
l2tp sh sess tie breaker enabled
**type**\: int
**range:** 0..255
.. attribute:: l2tp_sh_sess_tie_breaker
l2tp sh sess tie breaker
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: is_session_manual
True if session is manual
**type**\: bool
.. attribute:: is_session_up
True if session is up
**type**\: bool
.. attribute:: is_udp_checksum_enabled
True if UDP checksum enabled
**type**\: bool
.. attribute:: is_sequencing_on
True if session sequence is on
**type**\: bool
.. attribute:: is_session_state_established
True if session state is established
**type**\: bool
.. attribute:: is_session_locally_initiated
True if session initiated locally
**type**\: bool
.. attribute:: is_conditional_debug_enabled
True if conditional debugging is enabled
**type**\: bool
.. attribute:: unique_id
Unique ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_name
Interface name
**type**\: str
**length:** 0..256
"""
_prefix = 'tunnel-l2tun-oper'
_revision = '2015-11-09'
def __init__(self):
super(L2Tpv2.Sessions.Session, self).__init__()
self.yang_name = "session"
self.yang_parent_name = "sessions"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['local_tunnel_id','local_session_id']
self._child_container_classes = OrderedDict([("session-application-data", ("session_application_data", L2Tpv2.Sessions.Session.SessionApplicationData))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('local_tunnel_id', YLeaf(YType.int32, 'local-tunnel-id')),
('local_session_id', YLeaf(YType.int32, 'local-session-id')),
('local_ip_address', YLeaf(YType.str, 'local-ip-address')),
('remote_ip_address', YLeaf(YType.str, 'remote-ip-address')),
('l2tp_sh_sess_udp_lport', YLeaf(YType.uint16, 'l2tp-sh-sess-udp-lport')),
('l2tp_sh_sess_udp_rport', YLeaf(YType.uint16, 'l2tp-sh-sess-udp-rport')),
('protocol', YLeaf(YType.uint8, 'protocol')),
('remote_tunnel_id', YLeaf(YType.uint32, 'remote-tunnel-id')),
('call_serial_number', YLeaf(YType.uint32, 'call-serial-number')),
('local_tunnel_name', YLeaf(YType.str, 'local-tunnel-name')),
('remote_tunnel_name', YLeaf(YType.str, 'remote-tunnel-name')),
('remote_session_id', YLeaf(YType.uint32, 'remote-session-id')),
('l2tp_sh_sess_tie_breaker_enabled', YLeaf(YType.uint8, 'l2tp-sh-sess-tie-breaker-enabled')),
('l2tp_sh_sess_tie_breaker', YLeaf(YType.uint64, 'l2tp-sh-sess-tie-breaker')),
('is_session_manual', YLeaf(YType.boolean, 'is-session-manual')),
('is_session_up', YLeaf(YType.boolean, 'is-session-up')),
('is_udp_checksum_enabled', YLeaf(YType.boolean, 'is-udp-checksum-enabled')),
('is_sequencing_on', YLeaf(YType.boolean, 'is-sequencing-on')),
('is_session_state_established', YLeaf(YType.boolean, 'is-session-state-established')),
('is_session_locally_initiated', YLeaf(YType.boolean, 'is-session-locally-initiated')),
('is_conditional_debug_enabled', YLeaf(YType.boolean, 'is-conditional-debug-enabled')),
('unique_id', YLeaf(YType.uint32, 'unique-id')),
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.local_tunnel_id = None
self.local_session_id = None
self.local_ip_address = None
self.remote_ip_address = None
self.l2tp_sh_sess_udp_lport = None
self.l2tp_sh_sess_udp_rport = None
self.protocol = None
self.remote_tunnel_id = None
self.call_serial_number = None
self.local_tunnel_name = None
self.remote_tunnel_name = None
self.remote_session_id = None
self.l2tp_sh_sess_tie_breaker_enabled = None
self.l2tp_sh_sess_tie_breaker = None
self.is_session_manual = None
self.is_session_up = None
self.is_udp_checksum_enabled = | |
import logging
import os
from abc import ABC, abstractmethod
from collections import namedtuple
from datetime import datetime, timezone
import yaml
from github import Github
from tqdm import tqdm
from .generator import ExtractorGenerator
from .models.model_manager import ModelManager
from .scanners.file_scanner import FileScanner
from .scanners.git_scanner import GitScanner
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
Rule = namedtuple('Rule', 'id regex category description')
Repo = namedtuple('Repo', 'url last_scan')
Discovery = namedtuple(
'Discovery',
'id file_name commit_id line_number snippet repo_url rule_id state timestamp')
class Interface(ABC):
""" Abstract class that simplifies queries for python database module
that implements Python Database API Specification v2.0 (PEP 249).
Parameters
----------
db: database class (as defined in Python Database API Specification v2.0
(PEP 249))
Error: base exception class for the corresponding database type
"""
def __init__(self, db, error):
self.db = db
self.Error = error
def query(self, query, *args):
cursor = self.db.cursor()
try:
cursor.execute(query, args)
self.db.commit()
return True
except (TypeError, IndexError):
""" A TypeError is raised if any of the required arguments is
missing. """
self.db.rollback()
return False
except self.Error:
self.db.rollback()
return False
@abstractmethod
def query_check(self, query, *args):
return
@abstractmethod
def query_id(self, query, *args):
return
def query_as(self, query, cast, *args):
cursor = self.db.cursor()
try:
cursor.execute(query, args)
return dict(cast(*cursor.fetchone())._asdict())
except (TypeError, IndexError):
""" A TypeError is raised if any of the required arguments is
missing. """
self.db.rollback()
return ()
except self.Error:
self.db.rollback()
return ()
class Client(Interface):
def __init__(self, db, error):
super().__init__(db, error)
def add_discovery(self, query, file_name, commit_id, line_number, snippet,
repo_url, rule_id, state='new'):
""" Add a new discovery.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
file_name: str
The name of the file that produced the discovery
commit_id: str
The id of the commit introducing the discovery
line_number: int
The line number of the discovery in the file
snippet: str
The line matched during the scan
repo_url: str
The url of the repository
rule_id: str
The id of the rule used during the scan
state: str, default `new`
The state of the discovery
Returns
-------
int
The id of the new discovery (-1 in case of error)
"""
return self.query_id(
query, file_name,
commit_id, line_number, snippet, repo_url, rule_id, state)
@abstractmethod
def add_discoveries(self, query, discoveries, repo_url):
return
def add_repo(self, query, repo_url):
""" Add a new repository.
Do not set the latest commit (it will be set when the repository is
scanned).
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The url of the repository
Returns
-------
bool
`True` if the insert was successfull, `False` otherwise
"""
return self.query(query, repo_url,)
def add_rule(self, query, regex, category, description=''):
""" Add a new rule.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
regex: str
The regex to be matched
category: str
The category of the rule
description: str, optional
The description of the rule
Returns
-------
int
The id of the new rule (-1 in case of errors)
"""
return self.query_id(query, regex, category, description)
def add_rules_from_file(self, filename):
""" Add rules from a file.
Parameters
----------
filename: str
The file containing the rules
Raises
------
FileNotFoundError
If the file does not exist
ParserError
If the file is malformed
KeyError
If one of the required attributes in the file (i.e., rules, regex,
and category) is missing
"""
with open(filename, 'r') as f:
data = yaml.safe_load(f)
for rule in data['rules']:
self.add_rule(rule['regex'],
rule['category'],
rule.get('description', ''))
def delete_rule(self, query, ruleid):
"""Delete a rule from database
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
ruleid: int
The id of the rule that will be deleted.
Returns
------
False
If the removal operation fails
True
Otherwise
"""
cursor = self.db.cursor()
try:
cursor.execute(query, (ruleid,))
self.db.commit()
return bool(cursor.fetchone()[0])
except (TypeError, IndexError):
""" A TypeError is raised if any of the required arguments is
missing. """
self.db.rollback()
return False
except self.Error:
self.db.rollback()
return False
return True
def delete_repo(self, query, repo_url):
""" Delete a repository.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The url of the repository to delete
Returns
-------
bool
`True` if the repo was successfully deleted, `False` otherwise
"""
return self.query(query, repo_url,)
def delete_discoveries(self, query, repo_url):
""" Delete all discoveries of a repository.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The repository url of the discoveries to delete
Returns
-------
bool
`True` if the discoveries were successfully deleted, `False`
otherwise
"""
return self.query(query, repo_url,)
def get_repos(self):
""" Get all the repositories.
Returns
-------
list
A list of repositories (dictionaries).
An empty list if there are no repos (or in case of errors)
Raises
------
TypeError
If any of the required arguments is missing
"""
query = 'SELECT * FROM repos'
cursor = self.db.cursor()
all_repos = []
cursor.execute(query)
result = cursor.fetchone()
while result:
all_repos.append(dict(Repo(*result)._asdict()))
result = cursor.fetchone()
return all_repos
def get_repo(self, query, repo_url):
""" Get a repository.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The url of the repository
Returns
-------
dict
A repository (an empty dictionary if the url does not exist)
Raises
------
TypeError
If any of the required arguments is missing
"""
cursor = self.db.cursor()
cursor.execute(query, (repo_url,))
result = cursor.fetchone()
if result:
return dict(Repo(*result)._asdict())
else:
return {}
def get_rules(self, category_query=None, category=None):
""" Get the rules.
Differently from other get methods, here we pass the category as
argument. This is due to the fact that categories may have a slash
(e.g., `auth/password`). Encoding such categories in the url would
cause an error on the server side.
NOTE: Here exceptions are suppressed in order to not stop the scanning.
Parameters
----------
category_query: str, optional
If specified, run this specific query (with `category` as an
argument), otherwise get all the rules
category: str, optional
If specified get all the rules of this category, otherwise get all
the rules
Returns
-------
list
A list of rules (dictionaries)
"""
query = 'SELECT * FROM rules'
if category_query is not None and category is not None:
query = category_query
cursor = self.db.cursor()
try:
all_rules = []
if category is not None:
cursor.execute(query, (category,))
else:
cursor.execute(query)
result = cursor.fetchone()
while result:
all_rules.append(dict(Rule(*result)._asdict()))
result = cursor.fetchone()
return all_rules
except (TypeError, IndexError):
""" A TypeError is raised if any of the required arguments is
missing. """
self.db.rollback()
return []
except self.Error:
self.db.rollback()
return []
def get_rule(self, query, rule_id):
""" Get a rule.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
rule_id: int
The id of the rule
Returns
-------
dict
A rule
"""
return self.query_as(query, Rule, rule_id,)
def get_discoveries(self, query, repo_url, file_name=None):
""" Get all the discoveries of a repository.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The url of the repository
file_name: str, optional
The name of the file to filter discoveries on
Returns
-------
list
A list of discoveries (dictionaries)
Raises
------
TypeError
If any of the required arguments is missing
"""
cursor = self.db.cursor()
all_discoveries = []
params = (repo_url,) if file_name is None else (
repo_url, file_name)
cursor.execute(query, params)
result = cursor.fetchone()
while result:
all_discoveries.append(dict(Discovery(*result)._asdict()))
result = cursor.fetchone()
return all_discoveries
def get_discovery(self, query, discovery_id):
""" Get a discovery.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
discovery_id: int
The id of the discovery
Returns
-------
dict
A discovery
"""
return self.query_as(query, Discovery, discovery_id,)
def get_discovery_group(self, query, state_query, repo_url, state=None):
""" Get all the discoveries of a repository, grouped by file_name,
snippet, and state.
Parameters
----------
query: str
The query to be run, with placeholders in place of parameters
repo_url: str
The url of the repository
state: str, optional
The state of the discoveries. If not set, get all the discoveries
independently from their state
Returns
-------
list
A list of tuples. Each tuple is composed by file_name, snippet,
number | |
<filename>sampler/gpflow-files/model.py
# Copyright 2016 <NAME>, <NAME>, <NAME>, alexggmatthews, fujiisoup
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, absolute_import
from .param import Parameterized, AutoFlow, DataHolder
from .mean_functions import Zero
from scipy.optimize import minimize, OptimizeResult
import numpy as np
import tensorflow as tf
from . import hmc
from . import session as session_mngr
from ._settings import settings
import sys
float_type = settings.dtypes.float_type
class ObjectiveWrapper(object):
"""
A simple class to wrap the objective function in order to make it more
robust.
The previously seen state is cached so that we can easily access it if the
model crashes.
"""
def __init__(self, objective):
self._objective = objective
self._previous_x = None
def __call__(self, x):
f, g = self._objective(x)
g_is_fin = np.isfinite(g)
if np.all(g_is_fin):
self._previous_x = x # store the last known good value
return f, g
else:
print("Warning: inf or nan in gradient: replacing with zeros")
return f, np.where(g_is_fin, g, 0.)
class Model(Parameterized):
"""
The Model base class.
To use this class, inheriting classes must define the method
>>> build_likelihood(self)
which returns a tensorflow representation of the model likelihood.
Param and Parameterized objects that are children of the model can be used
in the tensorflow expression. Children on the model are defined by simply
doing:
>>> m = Model()
>>> p = Param(1.0)
>>> m.p = p
At compile time (i.e. when build_likelihood is called), the `Param` object
becomes a tensorflow variable.
The result of build_likelihood() is added to the prior (see Parameterized
class) and the resulting objective and gradients are compiled into
self._objective.
This object has a `_needs_recompile` switch. When any of the child nodes
change, this object is notified and on optimization (or MCMC) the
likelihood is recompiled. This allows fixing and constraining parameters,
but only recompiling lazily.
This object has a `_free_vars` tensorflow array. This array is used to
build the tensorflow representations of the Param objects during
`make_tf_array`.
This object defines `optimize` and `sample` to allow for model fitting.
"""
def __init__(self, name='model'):
"""
name is a string describing this model.
"""
Parameterized.__init__(self)
self.scoped_keys.extend(['build_likelihood', 'build_prior'])
self._name = name
self._needs_recompile = True
self.num_fevals = 0 # Keeps track of how often _objective is called
self._session = None
@property
def name(self):
return self._name
@property
def session(self):
return self._session
def __getstate__(self):
"""
This method is necessary for pickling objects
"""
state = Parameterized.__getstate__(self)
keys = ['_session', '_free_vars', '_objective',
'_minusF', '_minusG', '_feed_dict_keys']
for key in keys:
state.pop(key, None)
return state
def __setstate__(self, d):
Parameterized.__setstate__(self, d)
self._needs_recompile = True
def compile(self, session=None, graph=None, optimizer=None):
"""
Compile the tensorflow function "self._objective".
The `session` and `graph` parameters are mutually exclusive.
:param session: TensorFlow Session. This parameter prevails `graph`
parameter. Custom created session will be used if
this argument is left default, i.e. None.
:param graph: TensorFlow Graph. This argument ignored when `session`
differs from default value, otherwise it is passed to
new session constructor. Default TensorFlow graph value
is used, when `graph` equals None.
:param optimizer: TensorFlow Optimizer.
"""
out_filename = settings.profiling.output_file_name + "_objective"
default_session = tf.get_default_session()
if session is None:
if graph is None or (default_session is not None and
default_session.graph is graph):
session = default_session
if session is None:
session = session_mngr.get_session(
graph=graph, output_file_name=out_filename)
with session.graph.as_default():
self._free_vars = tf.Variable(self.get_free_state())
self.make_tf_array(self._free_vars)
with self.tf_mode():
f = self.build_likelihood() + self.build_prior()
g = tf.gradients(f, self._free_vars)[0]
self._minusF = tf.negative(f, name='objective')
self._minusG = tf.negative(g, name='grad_objective')
# The optimiser needs to be part of the computational graph,
# and needs to be initialised before tf.initialise_all_variables()
# is called.
if optimizer is None:
opt_step = None
else:
opt_step = optimizer.minimize(
self._minusF, var_list=[self._free_vars])
init = tf.global_variables_initializer()
session.run(init)
self._session = session
# build tensorflow functions for computing the likelihood
if settings.verbosity.tf_compile_verb:
print("compiling tensorflow function...")
sys.stdout.flush()
self._feed_dict_keys = self.get_feed_dict_keys()
def obj(x):
self.num_fevals += 1
feed_dict = {self._free_vars: x}
self.update_feed_dict(self._feed_dict_keys, feed_dict)
f, g = self.session.run([self._minusF, self._minusG],
feed_dict=feed_dict)
return f.astype(np.float64), g.astype(np.float64)
self._objective = obj
if settings.verbosity.tf_compile_verb:
print("done")
sys.stdout.flush()
self._needs_recompile = False
return opt_step
@AutoFlow()
def compute_log_prior(self):
""" Compute the log prior of the model (uses AutoFlow)"""
return self.build_prior()
@AutoFlow()
def compute_log_likelihood(self):
""" Compute the log likelihood of the model (uses AutoFlow on ``self.build_likelihood()``)"""
return self.build_likelihood()
def sample(self, num_samples, Lmin=5, Lmax=20, epsilon=0.01, thin=1,
burn=0, verbose=False, return_logprobs=False, return_acc_ratio=False,
RNG=np.random.RandomState(0)):
"""
Use Hamiltonian Monte Carlo to draw samples from the model posterior.
"""
if self._needs_recompile:
self.compile()
return hmc.sample_HMC(self._objective, num_samples,
Lmin=Lmin, Lmax=Lmax, epsilon=epsilon, thin=thin, burn=burn,
x0=self.get_free_state(), verbose=verbose,
return_logprobs=return_logprobs, return_acc_ratio=return_acc_ratio, RNG=RNG)
def optimize(self, method='L-BFGS-B', tol=None, callback=None,
maxiter=1000, **kw):
"""
Optimize the model by maximizing the likelihood (possibly with the
priors also) with respect to any free variables.
method can be one of:
a string, corresponding to a valid scipy.optimize.minimize string
a tensorflow optimizer (e.g. tf.optimize.AdaGrad)
The callback function is executed by passing the current value of
self.get_free_state()
tol is the tolerance passed to scipy.optimize.minimize (ignored
for tensorflow optimizers)
max_iters defines the maximum number of iterations
In the case of the scipy optimization routines, any additional keyword
arguments are passed through.
KeyboardInterrupts are caught and the model is set to the most recent
value tried by the optimization routine.
This method returns the results of the call to optimize.minimize, or a
similar object in the tensorflow case.
"""
if type(method) is str:
return self._optimize_np(method, tol, callback, maxiter, **kw)
return self._optimize_tf(method, callback, maxiter, **kw)
def _optimize_tf(self, method, callback, maxiter):
"""
Optimize the model using a tensorflow optimizer. See self.optimize()
"""
opt_step = self.compile(optimizer=method)
feed_dict = {}
try:
iteration = 0
while iteration < maxiter:
self.update_feed_dict(self._feed_dict_keys, feed_dict)
self.session.run(opt_step, feed_dict=feed_dict)
self.num_fevals += 1
if callback is not None:
callback(self.session.run(self._free_vars))
iteration += 1
except KeyboardInterrupt:
print("Caught KeyboardInterrupt, setting model\
with most recent state.")
self.set_state(self.session.run(self._free_vars))
return None
final_x = self.session.run(self._free_vars)
self.set_state(final_x)
fun, jac = self._objective(final_x)
r = OptimizeResult(x=final_x,
success=True,
message="Finished iterations.",
fun=fun,
jac=jac,
status="Finished iterations.")
return r
def _optimize_np(self, method='L-BFGS-B', tol=None, callback=None,
maxiter=1000, **kw):
"""
Optimize the model to find the maximum likelihood or MAP point. Here
we wrap `scipy.optimize.minimize`, any keyword arguments are passed
through as `options`.
method is a string (default 'L-BFGS-B') specifying the scipy
optimization routine, one of
- 'Powell'
- 'CG'
- 'BFGS'
- 'Newton-CG'
- 'L-BFGS-B'
- 'TNC'
- 'COBYLA'
- 'SLSQP'
- 'dogleg'
tol is the tolerance to be passed to the optimization routine
callback is callback function to be passed to the optimization routine
max_iters is the maximum number of iterations (used in the options dict
for the optimization routine)
"""
if self._needs_recompile:
self.compile()
options = dict(disp=settings.verbosity.optimisation_verb, maxiter=maxiter)
if 'max_iters' in kw: # pragma: no cover
options['maxiter'] = kw.pop('max_iters')
import warnings
warnings.warn("Use `maxiter` instead of deprecated `max_iters`.", np.VisibleDeprecationWarning)
if 'display' in kw: # pragma: no cover
options['disp'] = kw.pop('display')
import warnings
warnings.warn("Use `disp` instead of deprecated `display`.", np.VisibleDeprecationWarning)
options.update(kw)
# here's the actual call to minimize. Catch keyboard errors as harmless.
obj = ObjectiveWrapper(self._objective)
try:
result = minimize(fun=obj,
x0=self.get_free_state(),
method=method,
jac=True,
tol=tol,
callback=callback,
options=options)
except KeyboardInterrupt:
print("Caught KeyboardInterrupt, setting \
model with most recent state.")
self.set_state(obj._previous_x)
return None
if settings.verbosity.optimisation_verb:
print("optimization terminated, setting model state")
self.set_state(result.x)
return result
class GPModel(Model):
"""
A base class for Gaussian process models, that is, those of the form
.. math::
:nowrap:
\\begin{align}
\\theta & \sim p(\\theta) \\\\
f & \sim \\mathcal{GP}(m(x), k(x, x'; \\theta)) \\\\
f_i & = f(x_i) \\\\
y_i\,|\,f_i & \sim p(y_i|f_i)
\\end{align}
This class mostly adds functionality to compile predictions. To use it,
inheriting classes must define a build_predict function, which computes
the means and variances of the latent function. This gets compiled
similarly to build_likelihood in the Model class.
These predictions are then pushed through the likelihood to obtain means
and variances of held out | |
= ''
outstr = ''
for line in str.split('\n')+['\n']:
if _PROMPT_RE.match(line):
if pyout:
outstr += ('<span class="py-output">%s</span>\n\n' %
pyout.strip())
pyout = ''
pysrc += line+'\n'
else:
if pysrc:
# Prompt over-rides other colors (incl string)
pysrc = _DOCTEST_RE.sub(_doctest_sub, pysrc)
pysrc = _PROMPT_RE.sub(r'<span class="py-prompt">'+
r'\1</span>', pysrc)
outstr += ('<span class="py-src">%s</span>\n'
% pysrc.strip())
pysrc = ''
pyout += line+'\n'
if pyout.strip():
outstr += ('<span class="py-output">%s</span>\n' %
pyout.strip())
return outstr.strip()
def _doctest_sub(match):
"""
This helper function is used by L{colorize_doctestblock} to
add colorization to matching expressions. It is called by
C{_DOCTEST_RE.sub} with an expression that matches
C{_DOCTEST_RE}.
@return: The HTML code for the colorized expression.
@rtype: C{string}
@see: L{_DOCTEST_RE}
"""
str = match.group()
if str[:1] == "'" or str[:6] == '"':
return '<span class="py-string">%s</span>' % str
elif str[:1] in '#':
return '<span class="py-comment">%s</span>' % str
else:
return '<span class="py-keyword">%s</span>' % str
######################################################################
## Python source colorizer
######################################################################
"""
Goals:
- colorize tokens appropriately (using css)
- optionally add line numbers
-
"""
#: Javascript code for the PythonSourceColorizer
PYSRC_JAVASCRIPTS = '''\
function expand(id) {
var elt = document.getElementById(id+"-expanded");
if (elt) elt.style.display = "block";
var elt = document.getElementById(id+"-expanded-linenums");
if (elt) elt.style.display = "block";
var elt = document.getElementById(id+"-collapsed");
if (elt) { elt.innerHTML = ""; elt.style.display = "none"; }
var elt = document.getElementById(id+"-collapsed-linenums");
if (elt) { elt.innerHTML = ""; elt.style.display = "none"; }
var elt = document.getElementById(id+"-toggle");
if (elt) { elt.innerHTML = "-"; }
}
function collapse(id) {
var elt = document.getElementById(id+"-expanded");
if (elt) elt.style.display = "none";
var elt = document.getElementById(id+"-expanded-linenums");
if (elt) elt.style.display = "none";
var elt = document.getElementById(id+"-collapsed-linenums");
if (elt) { elt.innerHTML = "<br/>"; elt.style.display="block"; }
var elt = document.getElementById(id+"-toggle");
if (elt) { elt.innerHTML = "+"; }
var elt = document.getElementById(id+"-collapsed");
if (elt) {
elt.style.display = "block";
var indent = elt.indent;
var pad = elt.pad;
var s = "<span class=\'lineno\'>";
for (var i=0; i<pad.length; i++) { s += " " }
s += "</span>";
s += " <span class=\'py-line\'>";
for (var i=0; i<indent.length; i++) { s += " " }
s += "<a href=\'#\' onclick=\'expand(\\"" + id;
s += "\\");return false\'>...</a></span><br />";
elt.innerHTML = s;
}
}
function toggle(id) {
elt = document.getElementById(id+"-toggle");
if (elt.innerHTML == "-")
collapse(id);
else
expand(id);
}
function highlight(id) {
var elt = document.getElementById(id+"-def");
if (elt) elt.className = "highlight-hdr";
var elt = document.getElementById(id+"-expanded");
if (elt) elt.className = "highlight";
var elt = document.getElementById(id+"-collapsed");
if (elt) elt.className = "highlight";
}
function num_lines(s) {
var n = 1;
var pos = s.indexOf("\\n");
while ( pos > 0) {
n += 1;
pos = s.indexOf("\\n", pos+1);
}
return n;
}
// Collapse all blocks that mave more than `min_lines` lines.
function collapse_all(min_lines) {
var elts = document.getElementsByTagName("div");
for (var i=0; i<elts.length; i++) {
var elt = elts[i];
var split = elt.id.indexOf("-");
if (split > 0)
if (elt.id.substring(split, elt.id.length) == "-expanded")
if (num_lines(elt.innerHTML) > min_lines)
collapse(elt.id.substring(0, split));
}
}
function expandto(href) {
var start = href.indexOf("#")+1;
if (start != 0) {
if (href.substring(start, href.length) != "-") {
collapse_all(4);
pos = href.indexOf(".", start);
while (pos != -1) {
var id = href.substring(start, pos);
expand(id);
pos = href.indexOf(".", pos+1);
}
var id = href.substring(start, href.length);
expand(id);
highlight(id);
}
}
}
function kill_doclink(id) {
if (id) {
var parent = document.getElementById(id);
parent.removeChild(parent.childNodes.item(0));
}
else if (!this.contains(event.toElement)) {
var parent = document.getElementById(this.parentID);
parent.removeChild(parent.childNodes.item(0));
}
}
function doclink(id, name, targets) {
var elt = document.getElementById(id);
// If we already opened the box, then destroy it.
// (This case should never occur, but leave it in just in case.)
if (elt.childNodes.length > 1) {
elt.removeChild(elt.childNodes.item(0));
}
else {
// The outer box: relative + inline positioning.
var box1 = document.createElement("div");
box1.style.position = "relative";
box1.style.display = "inline";
box1.style.top = 0;
box1.style.left = 0;
// A shadow for fun
var shadow = document.createElement("div");
shadow.style.position = "absolute";
shadow.style.left = "-1.3em";
shadow.style.top = "-1.3em";
shadow.style.background = "#404040";
// The inner box: absolute positioning.
var box2 = document.createElement("div");
box2.style.position = "relative";
box2.style.border = "1px solid #a0a0a0";
box2.style.left = "-.2em";
box2.style.top = "-.2em";
box2.style.background = "white";
box2.style.padding = ".3em .4em .3em .4em";
box2.style.fontStyle = "normal";
box2.onmouseout=kill_doclink;
box2.parentID = id;
var links = "";
target_list = targets.split(",");
for (var i=0; i<target_list.length; i++) {
var target = target_list[i].split("=");
links += "<li><a href=\'" + target[1] +
"\' style=\'text-decoration:none\'>" +
target[0] + "</a></li>";
}
// Put it all together.
elt.insertBefore(box1, elt.childNodes.item(0));
//box1.appendChild(box2);
box1.appendChild(shadow);
shadow.appendChild(box2);
box2.innerHTML =
"Which <b>"+name+"</b> do you want to see documentation for?" +
"<ul style=\'margin-bottom: 0;\'>" +
links +
"<li><a href=\'#\' style=\'text-decoration:none\' " +
"onclick=\'kill_doclink(\\""+id+"\\");return false;\'>"+
"<i>None of the above</i></a></li></ul>";
}
}
'''
PYSRC_EXPANDTO_JAVASCRIPT = '''\
<script type="text/javascript">
<!--
expandto(location.href);
// -->
</script>
'''
import tokenize, sys, token, cgi, keyword
try: from cStringIO import StringIO
except: from StringIO import StringIO
class PythonSourceColorizer:
"""
A class that renders a python module's source code into HTML
pages. These HTML pages are intended to be provided along with
the API documentation for a module, in case a user wants to learn
more about a particular object by examining its source code.
Links are therefore generated from the API documentation to the
source code pages, and from the source code pages back into the
API documentation.
The HTML generated by C{PythonSourceColorizer} has several notable
features:
- CSS styles are used to color tokens according to their type.
(See L{CSS_CLASSES} for a list of the different token types
that are identified).
- Line numbers are included to the left of each line.
- The first line of each class and function definition includes
a link to the API source documentation for that object.
- The first line of each class and function definition includes
an anchor that can be used to link directly to that class or
function.
- If javascript is enabled, and the page is loaded using the
anchor for a class or function (i.e., if the url ends in
C{'#I{<name>}'}), then that class or function will automatically
be highlighted; and all other classes and function definition
blocks will be 'collapsed'. These collapsed blocks can be
expanded by clicking on them.
- Unicode input is supported (including automatic detection
of C{'coding:'} declarations).
"""
#: A look-up table that is used to determine which CSS class
#: should be used to colorize a given token. The following keys
#: may be used:
#: - Any token name (e.g., C{'STRING'})
#: - Any operator token (e.g., C{'='} or C{'@'}).
#: - C{'KEYWORD'} -- Python keywords such as C{'for'} and C{'if'}
#: - C{'DEFNAME'} -- the name of a class or function at the top
#: of its definition statement.
#: - C{'BASECLASS'} -- names of base classes at the top of a class
#: definition statement.
#: - C{'PARAM'} -- function parameters
#: - C{'DOCSTRING'} -- docstrings
#: - C{'DECORATOR'} -- decorator names
#: If no CSS class can be found for a given token, then it won't
#: be marked with any CSS class.
CSS_CLASSES = {
'NUMBER': 'py-number',
'STRING': 'py-string',
'COMMENT': 'py-comment',
'NAME': 'py-name',
'KEYWORD': 'py-keyword',
'DEFNAME': 'py-def-name',
'BASECLASS': 'py-base-class',
'PARAM': 'py-param',
'DOCSTRING': 'py-docstring',
'DECORATOR': 'py-decorator',
'OP': 'py-op',
'@': 'py-decorator',
}
#: HTML code for the beginning of a collapsable function or class
#: definition block. The block contains two <div>...</div>
#: elements -- a collapsed version and an expanded version -- and
#: only one of these elements is visible at any given time. By
#: default, all definition blocks are expanded.
#:
#: This string should be interpolated with the following values::
#: (name, indentation, name)
#: Where C{name} is the anchor name for the function or class; and
#: indentation is a string of whitespace used to indent the
#: ellipsis marker in the collapsed version.
START_DEF_BLOCK = (
'<div id="%s-collapsed" style="display:none;" '
'pad="%s" indent="%s"></div>'
'<div id="%s-expanded">')
#: HTML code for the end of a collapsable function or class
#: definition block.
END_DEF_BLOCK = '</div>'
#: A regular expression used to pick out the unicode encoding for
#: the source file.
UNICODE_CODING_RE = re.compile(r'.*?\n?.*?coding[:=]\s*([-\w.]+)')
#: A configuration constant, used to determine whether or not to add
#: collapsable <div> elements for | |
# -*- coding: utf-8 -*-
import wxmpl
import numpy
import matplotlib
import layeritem
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
from matplotlib.backends.backend_wxagg import\
NavigationToolbar2WxAgg as ToolBar
from matplotlib.figure import Figure
import wx.lib.agw.flatnotebook
import colors
from util import kindOfItem, axesDef
import copy
itemColor = colors.profileItemColors
itemMarker = colors.profileItemMarkers
class Data(wxmpl.Channel):
""" Class to keep data and style for the PlotItemPanel data """
def __init__(self, x, y, theName="", theColor=None, theStyle=None,
theMarker=None):
wxmpl.Channel.__init__(
self, name=theName, color=theColor, style=theStyle,
marker=theMarker)
self.x = x
self.y = y
self.name = theName
self.color = theColor
self.style = theStyle
self.marker = theMarker
self.changed = False
def getX(self):
return self.x
def getY(self):
return self.y
def myUpdate(self, x, y):
self.x = numpy.zeros(x.shape[0]) + x
self.y = numpy.zeros(y.shape[0]) + y
self.changed = True
class GenericPlotItemPanel(wx.Panel):
""" plot on a PlotPanel one curve """
def __init__(self, parent, value, pression, theName, liste_item=None,
kind="GASES", xlegend="ppmv", edit=False,
layerstyle=False, layer=None, yInPressions=True, tskin=None,
tickSize=10):
self.theName = theName
self.theParent = parent
self.xlegend = xlegend
self.edit = edit
self.kind = kind
self.yInPressions = yInPressions
self.layer = layer
self.layerstyle = layerstyle
self.tickSize = tickSize
self.pression = pression
self.value = value
self.myLayeritem = None
self.tskin = []
self.ytskin = []
if tskin:
self.tskin.append(tskin)
wx.Panel.__init__(self, parent, style=wx.BORDER_SIMPLE)
# define object for matplotlib
self.fig = Figure()
self.canvas = FigureCanvas(self, -1, self.fig)
self.canvas.mpl_connect('motion_notify_event', self.onMouseMotion)
self.text = wx.StaticText(self, -1, label="")
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.canvas, 1, wx.LEFT | wx.GROW, 1)
self.tlb = ToolBar(self.canvas)
self.sizer.Add(self.tlb, 0, wx.GROW)
self.tlb.Realize()
self.SetSizer(self.sizer)
self.text = wx.StaticText(self, -1, label="")
self.sizer.Add(self.text)
self.Fit()
self.onInsert = True
self.myCurves = []
self.OnPlot()
self.valueHistory = []
self.valueHistoryRedo = []
def onResize(self, event):
print "event resize", str(event)
def onMouseMotion(self, event):
""" set text when moving mousse """
if event.inaxes:
xdata = event.xdata
ydata = event.ydata
xstr = "%0.4g" % xdata
ystr = "%0.4g" % ydata
value = str(self.axes.get_ylabel()) + "=" + ystr + \
" " + str(self.axes.get_xlabel()) + "=" + xstr
self.text.SetLabel(value)
def OnPlot(self):
""" effectively perform the graphics """
self.SetTickSize(self.tickSize)
self.fig.clear()
self.axes = self.fig.add_subplot(1, 1, 1)
self.x = self.value[::1]
if self.yInPressions:
self.axes.set_yscale("log")
self.axes.set_yticks((0.00005, 0.0001, 0.0002, 0.0005, 0.001,
0.002, 0.005, 0.01, 0.02, 0.05,
0.1, 0.2, 0.5, 1, 2, 5, 10, 25, 50,
100, 200, 300, 500, 1000))
label = ('5e-5', '1e-4', '2e-4', '5e-4', '1e-3',
'2e-3', '5e-3', '0.01', '0.02', '0.05',
'0.1', '0.2', '0.5', '1', '2', '5', '10', '25', '50',
'100', '200', '300', '500', '1000')
self.axes.set_yticklabels(label)
self.axes.set_ylabel('pressure (hPa)')
self.axes.set_ylim((self.pression[-1] + 150, self.pression[0]))
else:
self.axes.set_ylim(self.value.shape[0] + 2, 1)
self.axes.set_ylabel('level')
if self.kind == "GASES":
if self.yInPressions:
self.y = self.pression[::1]
else:
self.y = numpy.arange(1, self.value.shape[0] + 1, 1)
else:
if self.yInPressions:
self.y = self.layer[::1]
else:
self.y = numpy.arange(1.5, self.value.shape[0], 1)
if not self.layerstyle:
self.data = Data(self.x, self.y, theName=self.theName,
theColor=itemColor[self.theName],
theMarker=itemMarker[self.theName])
else:
if self.yInPressions:
self.myLayeritem = layeritem.Layeritem(
layeritem=self.x, pression=self.pression[::1])
else:
self.myLayeritem = layeritem.Layeritem(
layeritem=self.x, pression=numpy.arange(
1, self.value.shape[0] + 1, 1))
(self.xlayeritem, self.ylayeritem) = (
self.myLayeritem.computeLayerLine(layers=self.y))
self.data = Data(self.xlayeritem, self.ylayeritem,
theName=self.theName,
theColor=itemColor[self.theName],
theMarker=itemMarker[self.theName])
self.axes.set_xlabel(self.xlegend)
self.SetXlimits(self.theName)
self.axes.grid(True, axis='both')
self.myChannelList = []
self.myChannelList.append(self.data)
if self.theName == "T":
if len(self.tskin) > 0:
if self.yInPressions:
self.ytskin.append(self.pression[-1] + 50)
else:
self.ytskin.append(self.value.shape[0] + 1)
datatskin = Data(self.tskin, self.ytskin,
theName='TSKIN', theColor="red",
theMarker="*")
self.myChannelList.append(datatskin)
if wx.Platform == '__WXMAC__':
self.Update()
def SetTickSize(self, size):
matplotlib.rc('xtick', labelsize=size)
matplotlib.rc('ytick', labelsize=size)
def ConnectCanvasEVT_POINT(self, methode):
self.cid = self.fig.canvas.mpl_connect("button_press_event", methode)
def DisconnectCanvasEVT_POINT(self):
self.fig.canvas.mpl_disconnect(self.cid)
def SetXlimits(self, theName=None, xmin=None, xmax=None):
""" set x limits """
if xmin is not None and xmax is not None:
self.axes.set_xlim((xmin, xmax))
else:
if axesDef[self.theName]["xlimits"] is not None:
self.axes.set_xlim(axesDef[self.theName]["xlimits"])
self.axes.set_xscale(axesDef[self.theName]["xscale"])
def Update(self):
""" erase the curve if necessary and redraw """
if len(self.myCurves) == 1:
if len(self.axes.lines) == 1:
self.axes.lines.remove(self.axes.lines[0])
self.myCurves.pop()
for data in self.myChannelList:
c = self.axes.plot(
data.x, data.y, color=data.color, marker=data.marker)
self.myCurves.append(c)
self.fig.canvas.draw_idle()
def UpdateData(self, dataX):
self.x = dataX
self.data.setChanged(True)
if not self.layerstyle:
self.data.myUpdate(self.x, self.y)
else:
(self.xlayeritem, self.ylayeritem) = (
self.myLayeritem.computeLayerLine(layeritem=dataX))
self.data.myUpdate(self.xlayeritem, self.ylayeritem)
self.Update()
def OnRedo(self):
if self.valueHistoryRedo != []:
if not self.layerstyle:
X = numpy.zeros(self.x.shape[0]) + self.x
self.valueHistory.append(X)
X = self.valueHistoryRedo.pop()
self.x = numpy.zeros(X.shape[0]) + X
self.data.myUpdate(self.x, self.y)
else:
X = numpy.zeros(self.xlayeritem.shape[0]) + self.xlayeritem
self.valueHistory.append(X)
X = self.valueHistoryRedo.pop()
self.xlayeritem = numpy.zeros(X.shape[0]) + X
self.x = self.myLayeritem.getLayeritem(self.xlayeritem)
self.myLayeritem.update(self.xlayeritem, self.ylayeritem)
self.data.myUpdate(self.xlayeritem, self.ylayeritem)
self.Update()
def OnUndo(self):
if self.valueHistory != []:
if not self.layerstyle:
X = numpy.zeros(self.x.shape[0]) + self.x
self.valueHistoryRedo.append(X)
X = self.valueHistory.pop()
self.x = numpy.zeros(X.shape[0]) + X
self.data.myUpdate(self.x, self.y)
else:
X = numpy.zeros(self.xlayeritem.shape[0]) + self.xlayeritem
self.valueHistoryRedo.append(X)
X = self.valueHistory.pop()
self.xlayeritem = numpy.zeros(X.shape[0]) + X
self.x = self.myLayeritem.getLayeritem(self.xlayeritem)
self.data.myUpdate(self.xlayeritem, self.ylayeritem)
self.Update()
def OnPoint(self, e):
""" OnPoint Methods """
if (e.button == 1) or (e.dblclick):
if self.canvas.HasCapture():
self.canvas.ReleaseMouse()
return(False)
if e.xdata is None or e.ydata is None:
if self.canvas.HasCapture():
self.canvas.ReleaseMouse()
if self.HasCapture():
self.ReleaseMouse()
return False
if (e.ydata < self.y.min() or e.ydata > self.y.max()):
if self.canvas.HasCapture():
self.canvas.ReleaseMouse()
return(False)
# self.tlb.release_zoom(e)
if not self.layerstyle:
y = numpy.zeros(self.x.shape[0]) + self.x
self.valueHistory.append(y)
mini = 1000
for index in range(self.y.shape[0]):
dist = abs(self.y[index] - e.ydata)
if dist < mini:
imin = index
mini = dist
if self.kind != "GASES" and not self.onInsert:
self.x[imin] = 0
else:
self.x[imin] = e.xdata
self.data.setChanged(True)
self.data.myUpdate(self.x, self.y)
self.Update()
else:
y = numpy.zeros(self.xlayeritem.shape[0]) + self.xlayeritem
self.valueHistory.append(y)
mini = 1000
for index in range(self.ylayeritem.shape[0]):
dist = self.ylayeritem[index] - e.ydata
if dist < mini and dist > 0:
imin = index
mini = dist
if not self.onInsert:
self.xlayeritem[imin] = 0
# we have 2 points to move and its depends if imin is odd
if imin % 2 != 0:
if imin != self.xlayeritem.shape[0]:
self.xlayeritem[imin + 1] = 0
else:
if imin != 0:
self.xlayeritem[imin - 1] = 0
else:
self.xlayeritem[imin] = e.xdata
# we have 2 points to move and its depends if imini is odd
if imin % 2 != 0:
if imin != self.xlayeritem.shape[0]:
self.xlayeritem[imin + 1] = e.xdata
else:
if imin != 0:
self.xlayeritem[imin - 1] = e.xdata
self.data.setChanged(True)
self.data.myUpdate(self.xlayeritem, self.ylayeritem)
self.x = self.myLayeritem.getLayeritem(self.xlayeritem)
self.Update()
if self.canvas.HasCapture():
self.canvas.ReleaseMouse()
if self.HasCapture():
self.ReleaseMouse()
return True
def GetItem(self):
""" get value from curve (=data) and return a profile for the item """
myX = self.data.getX()
if self.layerstyle:
layerX = self.myLayeritem.getLayeritem(myX)
return layerX
else:
return myX
class PlotItemPanelAll(wxmpl.PlotPanel):
""" Plot all gas in the same graphic """
def __init__(self, parent, theProfile, kind="GASES", layer=None,
xlegendT=None,
xlegend=None, yInPressions=True, addTskin=False,
XinLog=False, tickSize=8):
self.myProfileRef = theProfile
self.myProfile = copy.deepcopy(theProfile)
self.pression = self.myProfile['P']
self.layer = layer
self.layerlevel = numpy.arange(1.5, self.myProfile['T'].shape[0], 1)
self.tickSize = tickSize
wxmpl.PlotPanel.__init__(self, parent, -1, None)
self.kind = kind
self.theName = "all " + kind
self.yInPressions = yInPressions
self.itemsList = {}
self.itemsList['GASES'] = self.myProfile.gas_list
self.itemsList['AEROSOLS'] = self.myProfile.aerosol_list
self.itemsList['CLOUDS'] = self.myProfile.cloud_list
self.labelx = {}
self.labelx[kind] = xlegend
self.labelxT = xlegendT
self.tskin = []
self.ytskin = []
if addTskin:
self.tskin.append(self.myProfile['SKIN']['T'])
self.XinLog = XinLog
self.OnPlot()
def SetTickSize(self, size):
matplotlib.rc('xtick', labelsize=size)
matplotlib.rc('ytick', labelsize=size)
def OnPlot(self, theProfile=None):
if theProfile is not None:
self.myProfileRef = theProfile
self.myProfile = copy.deepcopy(theProfile)
self.SetTickSize(self.tickSize)
fig = self.get_figure()
fig.clear()
self.axes = fig.gca()
if self.XinLog:
self.axes.set_xscale("log")
self.data = {}
self.stripCharter = wxmpl.StripCharter(
self.axes, loc_legend="upper left")
if self.yInPressions:
if len(self.tskin) >= 1:
self.ytskin.append(self.pression[-1] + 50)
self.axes.set_yscale("log")
self.axes.set_yticks((0.00005, 0.0001, 0.0002, 0.0005, 0.001,
0.002, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2,
0.5, 1, 2, 5, 10, 25, 50, 100,
200, 300, 500, 1000))
label = ('5e-5', '1e-4', '2e-4', '5e-4', '1e-3',
'2e-3', '5e-3', '0.01', '0.02', '0.05', '0.1', '0.2',
'0.5', '1', '2', '5', '10', '25', '50', '100',
'200', '300', '500', '1000')
self.axes.set_yticklabels(label)
self.axes.set_ylabel('pressure (hPa)')
self.axes.set_ylim((self.pression[-1], self.pression[0]))
if self.kind == 'GASES':
y = self.myProfile['P'][::1]
else:
y = self.layer[::1]
ytempe = self.myProfile['P'][::1]
else:
self.axes.set_ylabel('level')
y = numpy.arange(1, self.myProfile['T'].shape[0] + 1, 1)
if len(self.tskin) >= 1:
self.ytskin.append(self.myProfile['T'].shape[0] + 1)
if self.kind != 'GASES':
y = self.layerlevel
ytempe = numpy.arange(1, self.myProfile['T'].shape[0] + 1, 1)
self.axes.set_ylim(self.myProfile['T'].shape[0] + 2, 1)
self.axes.set_xlabel(self.labelx[self.kind])
if self.kind == "GASES":
if self.labelx["GASES"] is None:
self.axes.set_xlabel(self.myProfile['Q_ATTRIBUTE']['UNITS'])
markerFlag = False
else:
markerFlag = True
if self.kind == "AEROSOLS":
if self.labelx["AEROSOLS"] is None:
for aero in self.myProfile.aerosol_list:
if self.myProfile[aero] is not None and self.myProfile[
aero + '_ATTRIBUTE']['UNITS'] != 'n/a':
self.axes.set_xlabel(
self.myProfile[aero + '_ATTRIBUTE']['UNITS'])
else:
self.axes.set_xlabel(self.labelx["AEROSOLS"])
if self.kind == "CLOUDS":
if self.labelx["CLOUDS"] is None:
for cloud in self.myProfile.cloud_list:
if self.myProfile[cloud] is not None and self.myProfile[
cloud + '_ATTRIBUTE']['UNITS'] != 'n/a':
self.axes.set_xlabel(
self.myProfile[cloud + '_ATTRIBUTE']['UNITS'])
else:
self.axes.set_xlabel(self.labelx["CLOUDS"])
self.axesT = self.axes.twiny()
if self.labelxT is None:
self.axesT.set_xlabel(self.myProfile['T_ATTRIBUTE']['UNITS'])
else:
self.axesT.set_xlabel(self.labelxT)
# must redefine ylim for the levels case ??? TODO | |
local_job_q,
local_result_q,
local_fail_q,
const_arg,
c,
m,
reset_pbc,
njobs,
emergency_dump_path,
# job_q_get_timeout,
host,
port,
authkey):
"""
the wrapper spawned nproc times calling and handling self.func
"""
global log
log = logging.getLogger(__name__+'.'+progress.get_identifier(name='worker{}'.format(i+1), bold=False))
log.setLevel(loglevel)
Signal_to_sys_exit(signals=[signal.SIGTERM])
Signal_to_SIG_IGN(signals=[signal.SIGINT])
n = os.nice(0)
try:
n = os.nice(nice - n)
except PermissionError:
log.warning("changing niceness not permitted! run with niceness %s", n)
log.debug("worker function now alive, niceness %s", n)
cnt = 0
time_queue = 0.
time_calc = 0.
# check for func definition without status members count, max_count
#args_of_func = inspect.getfullargspec(func).args
#if len(args_of_func) == 2:
count_args = progress.getCountKwargs(func)
if count_args is None:
log.warning("found function without status information (progress will not work)")
m.value = 0 # setting max_count to -1 will hide the progress bar
_func = lambda arg, const_arg, c, m : func(arg, const_arg)
elif count_args != ["c", "m"]:
log.debug("found counter keyword arguments: %s", count_args)
# Allow other arguments, such as ["jmc", "jmm"] as defined
# in `validCountKwargs`.
# Here we translate to "c" and "m".
def _func(arg, const_arg, c, m):
kwargs = {count_args[0]: c,
count_args[1]: m}
return func(arg, const_arg, **kwargs)
else:
log.debug("found standard keyword arguments: [c, m]")
_func = func
# supposed to catch SystemExit, which will shut the client down quietly
try:
# the main loop, exit loop when:
# a) job_q is empty
# b) SystemExit is caught
# c) any queue operation (get, put) fails for what ever reason
# d) njobs becomes zero
while njobs != 0:
njobs -= 1
# try to get an item from the job_q
tg_0 = time.time()
try:
arg = job_q_get()
# regular case, just stop working when empty job_q was found
except queue.Empty:
log.info("finds empty job queue, processed %s jobs", cnt)
break
except ContainerClosedError:
log.info("job queue was closed, processed %s jobs", cnt)
break
# handle SystemExit in outer try ... except
except SystemExit as e:
arg = None
log.warning('getting arg from job_q failed due to SystemExit')
raise e
# job_q.get failed -> server down?
except Exception as e:
arg = None
log.error("Error when calling 'job_q_get'")
handle_unexpected_queue_error(e)
break
tg_1 = time.time()
time_queue += (tg_1-tg_0)
# try to process the retrieved argument
try:
tf_0 = time.time()
log.debug("START crunching _func")
res = _func(arg, const_arg, c, m)
log.debug("DONE crunching _func")
tf_1 = time.time()
time_calc += (tf_1-tf_0)
# handle SystemExit in outer try ... except
except SystemExit as e:
raise e
# something went wrong while doing the actual calculation
# - write traceback to file
# - try to inform the server of the failure
except:
err, val, trb = sys.exc_info()
log.error("caught exception '%s' when crunching 'func'\n%s", err.__name__, traceback.print_exc())
# write traceback to file
hostname = socket.gethostname()
fname = 'traceback_err_{}_{}.trb'.format(err.__name__, getDateForFileName(includePID=True))
log.info("write exception to file %s", fname)
with open(fname, 'w') as f:
traceback.print_exception(etype=err, value=val, tb=trb, file=f)
log.debug("put arg to local fail_q")
try:
local_fail_q.put((arg, err.__name__, hostname))
# handle SystemExit in outer try ... except
except SystemExit as e:
log.warning('putting arg to local fail_q failed due to SystemExit')
raise e
# fail_q.put failed -> server down?
except Exception as e:
log.error('putting arg to local fail_q failed')
handle_unexpected_queue_error(e)
break
else:
log.debug('putting arg to local fail_q was successful')
# processing the retrieved arguments succeeded
# - try to send the result back to the server
else:
try:
tp_0 = time.time()
local_result_q.put((arg, res))
tp_1 = time.time()
time_queue += (tp_1-tp_0)
# handle SystemExit in outer try ... except
except SystemExit as e:
log.warning('putting result to local result_q failed due to SystemExit')
raise e
except Exception as e:
log.error('putting result to local result_q failed due to %s', type(e))
emergency_dump(arg, res, emergency_dump_path, host, port, authkey)
handle_unexpected_queue_error(e)
break
del res
cnt += 1
reset_pbc()
log.debug("continue with next arg")
# considered as normal exit caused by some user interaction, SIGINT, SIGTERM
# note SIGINT, SIGTERM -> SystemExit is achieved by overwriting the
# default signal handlers
except SystemExit:
if arg is None:
log.warning("SystemExit, quit processing, no argument to reinsert")
else:
log.warning("SystemExit, quit processing, reinsert current argument, please wait")
log.debug("put arg back to local job_q")
try:
local_job_q.put(arg)
# handle SystemExit in outer try ... except
except SystemExit as e:
log.error("puting arg back to local job_q failed due to SystemExit")
raise e
# fail_q.put failed -> server down?
except Exception as e:
log.error("puting arg back to local job_q failed due to %s", type(e))
handle_unexpected_queue_error(e)
else:
log.debug("putting arg back to local job_q was successful")
try:
sta = progress.humanize_time(time_calc / cnt)
except:
sta = 'invalid'
stat = "pure calculation time: {} single task average: {}".format(progress.humanize_time(time_calc), sta)
try:
stat += "\ncalculation:{:.2%} communication:{:.2%}".format(time_calc/(time_calc+time_queue), time_queue/(time_calc+time_queue))
except ZeroDivisionError:
pass
log.info(stat)
log.debug("JobManager_Client.__worker_func at end (PID %s)", os.getpid())
def start(self):
"""
starts a number of nproc subprocess to work on the job_q
SIGTERM and SIGINT are managed to terminate all subprocesses
retruns when all subprocesses have terminated
"""
self.connect() # get shared objects from server
if not self.connected:
raise JMConnectionError("Can not start Client with no connection to server (shared objetcs are not available)")
log.info("STARTING CLIENT\nserver:%s authkey:%s port:%s num proc:%s", self.server, self.authkey.decode(), self.port, self.nproc)
c = []
for i in range(self.nproc):
c.append(progress.UnsignedIntValue())
m_progress = []
for i in range(self.nproc):
m_progress.append(progress.UnsignedIntValue(0))
m_set_by_function = []
for i in range(self.nproc):
m_set_by_function.append(progress.UnsignedIntValue(0))
if not self.show_counter_only:
m_set_by_function = m_progress
else:
m_progress = None
prepend = []
infoline = progress.StringValue(num_of_bytes=12)
infoline = None
# try:
# worker_stdout_queue = mp.Queue(-1)
# listener = QueueListener(worker_stdout_queue, console_hand)
# listener.start()
# except NameError:
# log.error("QueueListener not available in this python version (need at least 3.2)\n"
# "this may resault in incoheerent logging")
# worker_stdout_queue = None
# worker_stdout_queue = None
l = len(str(self.nproc))
for i in range(self.nproc):
prepend.append("w{0:0{1}}:".format(i+1, l))
job_q, result_q, fail_q, const_arg, manager = self.manager_objects
local_job_q = mp.Queue()
local_result_q = mp.Queue()
local_fail_q = mp.Queue()
kwargs = {'reconnect_wait': self.reconnect_wait,
'reconnect_tries': self.reconnect_tries,
'ping_timeout': self.ping_timeout,
'ping_retry': self.ping_retry}
job_q_get = proxy_operation_decorator(proxy=job_q, operation='get', **kwargs)
job_q_put = proxy_operation_decorator(proxy=job_q, operation='put', **kwargs)
result_q_put = proxy_operation_decorator(proxy=result_q, operation='put', **kwargs)
fail_q_put = proxy_operation_decorator(proxy=fail_q, operation='put', **kwargs)
def pass_job_q_put(job_q_put, local_job_q):
# log.debug("this is thread thr_job_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
while True:
data = local_job_q.get()
job_q_put(data)
# log.debug("stopped thread thr_job_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
def pass_result_q_put(result_q_put, local_result_q):
log.debug("this is thread thr_result_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
try:
while True:
data = local_result_q.get()
result_q_put(data)
except Exception as e:
log.error("thr_result_q_put caught error %s", type(e))
log.info(traceback.format_exc())
log.debug("stopped thread thr_result_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
def pass_fail_q_put(fail_q_put, local_fail_q):
# log.debug("this is thread thr_fail_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
while True:
data = local_fail_q.get()
log.info("put {} to failq".format(data))
fail_q_put(data)
# log.debug("stopped thread thr_fail_q_put with tid %s", ctypes.CDLL('libc.so.6').syscall(186))
thr_job_q_put = threading.Thread(target=pass_job_q_put , args=(job_q_put , local_job_q))
thr_job_q_put.daemon = True
thr_result_q_put = threading.Thread(target=pass_result_q_put, args=(result_q_put, local_result_q))
thr_result_q_put.daemon = True
thr_fail_q_put = threading.Thread(target=pass_fail_q_put , args=(fail_q_put , local_fail_q))
thr_fail_q_put.daemon = True
thr_job_q_put.start()
thr_result_q_put.start()
thr_fail_q_put.start()
with progress.ProgressBarCounterFancy(count = c,
max_count = m_progress,
interval = self.interval,
prepend = prepend,
sigint = 'ign',
sigterm = 'ign',
info_line = infoline) as self.pbc :
if (not self.hide_progress) and self.show_statusbar_for_jobs:
self.pbc.start()
for i in range(self.nproc):
reset_pbc = lambda: self.pbc.reset(i)
p = mp.Process(target=self.__worker_func, args=(self.func, # func
self.nice, # nice
log.level, # loglevel
i, # i
job_q_get, # job_q_get
local_job_q, # local_job_q
local_result_q, # local_result_q
local_fail_q, # local_fail_q
const_arg, # const_arg
c[i], # c
m_set_by_function[i], # m
reset_pbc, # reset_pbc
self.njobs, # njobs
self.emergency_dump_path, # emergency_dump_path
#self._job_q_get_timeout, # job_q_get_timeout
self.server, # host
self.port, # port
self.authkey)) # authkey
self.procs.append(p)
p.start()
log.debug("started new worker with pid %s", p.pid)
time.sleep(0.1)
log.debug("all worker processes startes")
#time.sleep(self.interval/2)
if self.use_special_SIG_INT_handler:
exit_handler_signals = [signal.SIGTERM]
jm_client_special_interrupt_signals = [signal.SIGINT]
else:
exit_handler_signals = [signal.SIGTERM, signal.SIGINT]
jm_client_special_interrupt_signals = []
log.debug("setup Signal_to_terminate_process_list handler for signals %s", exit_handler_signals)
exit_handler = Signal_to_terminate_process_list(process_list = self.procs,
identifier_list = [progress.get_identifier(name = "worker{}".format(i+1),
pid = p.pid,
bold = True) for i, p in enumerate(self.procs)],
signals = exit_handler_signals,
timeout = 2)
log.debug("setup Signal_handler_for_Jobmanager_client handler for signals %s", jm_client_special_interrupt_signals)
Signal_handler_for_Jobmanager_client(client_object = self,
exit_handler = exit_handler,
| |
<gh_stars>0
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Utilities functions
"""
import marvin
import os
import time
import logging
import string
import random
import imaplib
import email
import socket
import urlparse
import datetime
from marvin.cloudstackAPI import *
from marvin.sshClient import SshClient
from marvin.codes import *
def restart_mgmt_server(server):
"""Restarts the management server"""
try:
# Get the SSH client
ssh = is_server_ssh_ready(
server["ipaddress"],
server["port"],
server["username"],
server["password"],
)
result = ssh.execute("/etc/init.d/cloud-management restart")
res = str(result)
# Server Stop - OK
# Server Start - OK
if res.count("OK") != 2:
raise ("ErrorInReboot!")
except Exception as e:
raise e
return
def fetch_latest_mail(services, from_mail):
"""Fetch mail"""
# Login to mail server to verify email
mail = imaplib.IMAP4_SSL(services["server"])
mail.login(
services["email"],
services["password"]
)
mail.list()
mail.select(services["folder"])
date = (datetime.date.today() - datetime.timedelta(1)).strftime("%d-%b-%Y")
result, data = mail.uid(
'search',
None,
'(SENTSINCE {date} HEADER FROM "{mail}")'.format(
date=date,
mail=from_mail
)
)
# Return False if email is not present
if data == []:
return False
latest_email_uid = data[0].split()[-1]
result, data = mail.uid('fetch', latest_email_uid, '(RFC822)')
raw_email = data[0][1]
email_message = email.message_from_string(raw_email)
result = get_first_text_block(email_message)
return result
def get_first_text_block(email_message_instance):
"""fetches first text block from the mail"""
maintype = email_message_instance.get_content_maintype()
if maintype == 'multipart':
for part in email_message_instance.get_payload():
if part.get_content_maintype() == 'text':
return part.get_payload()
elif maintype == 'text':
return email_message_instance.get_payload()
def random_gen(id=None, size=6, chars=string.ascii_uppercase + string.digits):
"""Generate Random Strings of variable length"""
randomstr = ''.join(random.choice(chars) for x in range(size))
if id:
return ''.join([id, '-', randomstr])
return randomstr
def cleanup_resources(api_client, resources):
"""Delete resources"""
for obj in resources:
obj.delete(api_client)
def is_server_ssh_ready(ipaddress, port, username, password, retries=20, retryinterv=30, timeout=10.0, keyPairFileLocation=None):
'''
@Name: is_server_ssh_ready
@Input: timeout: tcp connection timeout flag,
others information need to be added
@Output:object for SshClient
Name of the function is little misnomer and is not
verifying anything as such mentioned
'''
try:
ssh = SshClient(
host=ipaddress,
port=port,
user=username,
passwd=password,
keyPairFiles=keyPairFileLocation,
retries=retries,
delay=retryinterv,
timeout=timeout)
except Exception, e:
raise Exception("SSH connection has Failed. Waited %ss. Error is %s" % (retries * retryinterv, str(e)))
else:
return ssh
def format_volume_to_ext3(ssh_client, device="/dev/sda"):
"""Format attached storage to ext3 fs"""
cmds = [
"echo -e 'n\np\n1\n\n\nw' | fdisk %s" % device,
"mkfs.ext3 %s1" % device,
]
for c in cmds:
ssh_client.execute(c)
def fetch_api_client(config_file='datacenterCfg'):
"""Fetch the Cloudstack API Client"""
config = marvin.configGenerator.get_setup_config(config_file)
mgt = config.mgtSvr[0]
testClientLogger = logging.getLogger("testClient")
asyncTimeout = 3600
return cloudstackAPIClient.CloudStackAPIClient(
marvin.cloudstackConnection.cloudConnection(
mgt,
asyncTimeout,
testClientLogger
)
)
def get_host_credentials(config, hostip):
"""Get login information for a host `hostip` (ipv4) from marvin's `config`
@return the tuple username, password for the host else raise keyerror"""
for zone in config.zones:
for pod in zone.pods:
for cluster in pod.clusters:
for host in cluster.hosts:
if str(host.url).startswith('http'):
hostname = urlparse.urlsplit(str(host.url)).netloc
else:
hostname = str(host.url)
try:
if socket.getfqdn(hostip) == socket.getfqdn(hostname):
return host.username, host.password
except socket.error, e:
raise Exception("Unresolvable host %s error is %s" % (hostip, e))
raise KeyError("Please provide the marvin configuration file with credentials to your hosts")
def get_process_status(hostip, port, username, password, linklocalip, process, hypervisor=None):
"""Double hop and returns a process status"""
#SSH to the machine
ssh = SshClient(hostip, port, username, password)
if str(hypervisor).lower() == 'vmware':
ssh_command = "ssh -i /var/cloudstack/management/.ssh/id_rsa -ostricthostkeychecking=no "
else:
ssh_command = "ssh -i ~/.ssh/id_rsa.cloud -ostricthostkeychecking=no "
ssh_command = ssh_command +\
"-oUserKnownHostsFile=/dev/null -p 3922 %s %s" % (
linklocalip,
process)
# Double hop into router
timeout = 5
# Ensure the SSH login is successful
while True:
res = ssh.execute(ssh_command)
if res[0] != "Host key verification failed.":
break
elif timeout == 0:
break
time.sleep(5)
timeout = timeout - 1
return res
def isAlmostEqual(first_digit, second_digit, range=0):
digits_equal_within_range = False
try:
if ((first_digit - range) < second_digit < (first_digit + range)):
digits_equal_within_range = True
except Exception as e:
raise e
return digits_equal_within_range
def xsplit(txt, seps):
"""
Split a string in `txt` by list of delimiters in `seps`
@param txt: string to split
@param seps: list of separators
@return: list of split units
"""
default_sep = seps[0]
for sep in seps[1:]: # we skip seps[0] because that's the default separator
txt = txt.replace(sep, default_sep)
return [i.strip() for i in txt.split(default_sep)]
def is_snapshot_on_nfs(apiclient, dbconn, config, zoneid, snapshotid):
"""
Checks whether a snapshot with id (not UUID) `snapshotid` is present on the nfs storage
@param apiclient: api client connection
@param @dbconn: connection to the cloudstack db
@param config: marvin configuration file
@param zoneid: uuid of the zone on which the secondary nfs storage pool is mounted
@param snapshotid: uuid of the snapshot
@return: True if snapshot is found, False otherwise
"""
from base import ImageStore, Snapshot
secondaryStores = ImageStore.list(apiclient, zoneid=zoneid)
assert isinstance(secondaryStores, list), "Not a valid response for listImageStores"
assert len(secondaryStores) != 0, "No image stores found in zone %s" % zoneid
secondaryStore = secondaryStores[0]
if str(secondaryStore.providername).lower() != "nfs":
raise Exception(
"is_snapshot_on_nfs works only against nfs secondary storage. found %s" % str(secondaryStore.providername))
qresultset = dbconn.execute(
"select id from snapshots where uuid = '%s';" \
% str(snapshotid)
)
if len(qresultset) == 0:
raise Exception(
"No snapshot found in cloudstack with id %s" % snapshotid)
snapshotid = qresultset[0][0]
qresultset = dbconn.execute(
"select install_path from snapshot_store_ref where snapshot_id='%s' and store_role='Image';" % snapshotid
)
assert isinstance(qresultset, list), "Invalid db query response for snapshot %s" % snapshotid
if len(qresultset) == 0:
#Snapshot does not exist
return False
snapshotPath = qresultset[0][0]
nfsurl = secondaryStore.url
from urllib2 import urlparse
parse_url = urlparse.urlsplit(nfsurl, scheme='nfs')
host, path = parse_url.netloc, parse_url.path
if not config.mgtSvr:
raise Exception("Your marvin configuration does not contain mgmt server credentials")
mgtSvr, user, passwd = config.mgtSvr[0].mgtSvrIp, config.mgtSvr[0].user, config.mgtSvr[0].passwd
try:
ssh_client = SshClient(
mgtSvr,
22,
user,
passwd
)
cmds = [
"mkdir -p %s /mnt/tmp",
"mount -t %s %s%s /mnt/tmp" % (
'nfs',
host,
path,
),
"test -f %s && echo 'snapshot exists'" % (
os.path.join("/mnt/tmp", snapshotPath)
),
]
for c in cmds:
result = ssh_client.execute(c)
# Unmount the Sec Storage
cmds = [
"cd",
"umount /mnt/tmp",
]
for c in cmds:
ssh_client.execute(c)
except Exception as e:
raise Exception("SSH failed for management server: %s - %s" %
(config.mgtSvr[0].mgtSvrIp, e))
return 'snapshot exists' in result
def validateList(inp):
"""
@name: validateList
@Description: 1. A utility function to validate
whether the input passed is a list
2. The list is empty or not
3. If it is list and not empty, return PASS and first element
4. If not reason for FAIL
@Input: Input to be validated
@output: List, containing [ Result,FirstElement,Reason ]
Ist Argument('Result') : FAIL : If it is not a list
If it is list but empty
PASS : If it is list and not empty
IInd Argument('FirstElement'): If it is list and not empty,
then first element
in it, default to None
IIIrd Argument( 'Reason' ): Reason for failure ( FAIL ),
default to None.
INVALID_INPUT
EMPTY_LIST
"""
ret = [FAIL, None, None]
if inp is None:
ret[2] = INVALID_INPUT
return ret
if not isinstance(inp, list):
ret[2] = INVALID_INPUT
return ret
if len(inp) == 0:
ret[2] = EMPTY_LIST
return ret
return [PASS, inp[0], None]
def verifyElementInList(inp, toverify, responsevar=None, pos=0):
'''
@name: verifyElementInList
@Description:
1. A utility function to validate
whether the input passed is a list.
The list is empty or not.
If it is list and not empty, verify
whether a given element is there in that list or not
at a given pos
@Input:
I : Input to be verified whether its a list or not
II : Element to verify whether it exists in the list
III : variable name in response object to verify
default to None, if None, we will verify for the complete
first element EX: | |
7040521458 9534814523 3534291147 2996242113
5543378696 9835321543 4105543752 0239981628 8226117291
1558210513 6860962288 0523876804 6449418784 9960586710
1463691780 7107942661 7689234821 5918325023 4297297489
3722249521 0857346873 4180933569 3734054818 3762028499
4222993156 3544855869 4140433376 5912217555 8881009687
5056886750 2176565295 4395708001 3019408066 0437251122
9466423554 2348826700 5632176807 0356290508 3928640619
1520373892 9817736309 0571242693 9767108891 2049911333
6873205844 3902496618 2121075440 3960436620 2065408356
7733089436 2520794381 3769247417 1503616007 0654094371
3189989522 4849622199 8577358317 9380991278 9524921684
8131861897 2430153806 2997404134 7067696699 1016727320
3569754212 1619608538 4942648200 1892802874 7206531290
3691335849 1333910200 3004596661 5969921598 9976499412
3447348987 2245244592 3581628622 8293218824 6831871861
9170815827 8174416645 4670991441 6151433034 3753198899
7138759124 7751103235 1508761835 6105605892 8400894430
7925614927 2042265885 1183200514 1471256252 7746621201
1782130491 4160489498 7508636829 8802964239 9925529062
1360132623 6764752327 9326361862 8893425972 3105519640
2902820551 2013785950 0492608034 7029783891 5272157870
5711010634 6628329141 0099928877 8187261687 7489695679
2336810369 0546576806 4690404941 1370329314 3295427864
7551034589 5902522647 9645920478 9097159509 1730349175
7542663627 7505919618 8243484546 0070459301 6178355410
1365184976 4464430987 6096742909 5632944279 5364192664
1016611295 8409641718 4095099971 4640777752 7832039904
8183787789 1153537499 4617139119 1602131905 3006072532
3195013244 5016813485 7796531815 0212751271 3993642302
1899339489 1610945773 8726204073 5646500364 6474169195
8022309694 4175236424 9556265326 6082415243 3174625941
6224615163 1964249727 9614424815 7377562290 0331165027
2357806141 7217403979 5177961491 3287910314 8307263863
8396214115 0673267476 0765443055 9769870079 4960031781
0388417594 2261610246 7866443806 9324998343 3830189708
2803742120 7092075892 8675801178 1915442361 6089314879
2561237343 5722034313 4819353241 6059153315 4102582419
8158886092 6578589461 2319020861 8044428787 4369912126
8112378685 0261145365 8293156658 6681049270 0578583509
4174512968 5200699259 2233082764 6015462665 4596610003
2051344802 8370304083 7814023085 7715984162 4396898631
3998118364 0236394670 0025237545 6073624963 9917541404
1506481272 2518626877 1910850955 6392228587 0327122446
9933230452 2947079934 8103169997 2071059066 5849150477
7620272404 6952493381 1163287793 5726195254 7310094486
9728614243 6988746970 6012127752 8913254437 3323402476
2554312427 5949229972 2657488235 4291048894 1799428904
5771325884 8804603695 8241095699 7149043269 3928979477
8412480388 7036620176 5669604984 9817080615 5727750331
8226057479 1757419620 1545436543 7490580279 4567322968
4285743411 0635930861 5300906864 9201072722 9452363619
3995386697 2528154870 3998573726 4767017751 3367190194
5311496308 6388039115 3700874496 0020408274 9966374451
9354774309 9167994368 8981455828 3931691860 1678852740
4175127079 6433924110 6496064245 3499782280 5357057199
3468104840 6218359579 9811437643 8987826415 0686341596
0047473181 9707510404 7782328693 4371165813 7074237959
0525246898 4966784992 6848994293 1369318249 7837113259
1786598180 7047846666 7294870407 4154020216 8776006581
5196684340 3710845677 2323442083 5206023291 8375610787
3489494411 9970047165 7836673858 4467464799 8960874216
8393189463 2530965149 6198965019 9836660139 5313873431
9378728986 6977294884 2700168140 1321767194 4700790646
8035916511 4072033873 9819773422 6421959779 4141034561
5117103825 3986134619 8501408140 4911370155 7358138953
2252596164 5032662727 9904234196 9056185260 7518448534
8515119978 5785567409 4889043787 2640697883 1042089194
5237676006 0135523586 2762798324 1161254033 0204891773
8495298493 7838492242 5062055146 2781680940 0142871227
2463398474 3389696265 1084018290 6082074270 9065932412
7842109596 4878425034 5080264629 8054811717 8412643517
8988690978 1369279862 7514182069 2665428306 3244656110
4594595907 6037603257 1814714591 0832557735 2996548372
3284375797 7148214329 8138383702 3256305709 7358834727
7813307771 5284583166 8500134722 4907698321 3482746803
4714555991 2682086653 7671390156 2986275601 4724708360
1412383605 3465710238 3885352182 4420719749 3848234567
4986252326 7900024791 9848101690 8751397015 9140924453
2923800020 7000980366 0683070916 8566752875 6223977112
6375555850 7440233343 7079083671 9688766104 0752563888
1523188213 6414339406 0457030450 1146252186 0937968468
6630546426 2722433165 6161504252 6309546120 8847549507
0119045743 4880533277 8297634346 1728722227 7813884055
8176286238 8975988343 0808420682 4381694188 1605080478
6588589098 7757280461 0258542381 9502707200 1843942210
8241933662 2303265741 0846056940 1462413160 6525283423
2331915226 5447031333 2507334543 7997068399 7904737696
6199860551 4839832927 6828541559 7741774845 8554107782
4543449726 4405779685 0250574435 6520610019 8902164956
7145811383 9771109357 7215892561 0742454084 9330863842
9056352148 2964807655 6716594386 0203577669 1348335622
7979095008 9667862937 9261526793 3144534540 8678643349
0843649114 1250495944 3995733708 5597955984 6791581151
6378124505 0947164410 4728497015 6681422028 9960957453
3839803728 7764586461 0764637119 4657395618 4450705918
2034682814 6697695220 1112803648 4784385390 1241663676
2909396784 7329227279 7820242043 5419195340 9816610163
7094083893 2874262459 9384717790 7698602727 1315237221
2124457411 1988467432 6204255691 4892927979 4763771334
9252175896 4037294975 3454544441 2535293067 6199603933
7673619883 1815155370 5279730292 5066726249 0107280522
1139211026 3676263485 2236884037 8177706392 7139739255
9007347684 8626022663 5066106596 0403337763 4962685977
3631711556 2068646350 6129569966 4212594368 5036877106
8855240256 4691414339 7706103665 2359282357 1592177763
0065920458 1411228952 4418301894 7807189329 3832470793
6184027891 5635178217 7863984224 5201478562 9833896323
0547685472 5285961964 6812640048 8048991788 9122583340
9901647162 4974783599 2972822669 6064951555 0439417865
5445246989 7203009330 5362879754 1032306340 5339385940
9856632571 0084257962 9341239066 3940539014 4287564609
5375056057 4708252332 7834164918 5034002142 8361709601
2738465189 2403800455 6174819719 5438436103 1706429478
5306503118 0708204185 6933185059 9828707583 6506290905
8119683163 5274117855 8138197938 2232876237 5618870743
0426645228 8725062898 0254909545 3914787869 6309383262
3249857747 1494376481 4300799001 8893463908 6323877109
7041533147 7650279278 3271534463 0201111207 4526789734
4164156173 0746988488 0231599335 0147214223 1870266391
8394241897 9294351060 4906272954 1884189271 3939644533
4755041989 0336901287 2476484237 0706977597 8791346375
6132122157 7088001101 6553140611 1748970836 1155778343
8415762846 7022773258 5933435928 6937520154 0959990757
6677574069 7436043684 8746842245 4788051143 5381639206
1840364803 7685411409 2185245128 3567505305 2671664848
1322506623 0588469159 3113900501 3620036621 8008224907
5803066593 2313941044 7584206514 1387410218 1661578959
9980099987 9064430724 5496953207 2273553959 5713477316
9341873359 1771675775 9125189292 9782753304 3991765738
6760367202 3864737157 1171163911 1208431250 1767300406
6440490275 2431640923 6213410390 6613061212 1300210977
4334490755 8856626777 4326279119 3811664166 8337969521
3121423669 4272436246 4924545176 8398057228 4174569913
7592786509 2857068615 0602210571 4276592874 1998071389
7770570380 8059120946 9226227652 7757795103 5771208499
9749663276 5608093697 2378763791 8108407977 9745301395
2404039910 9682530001 0788856407 8602549632 7631706079
2879850940 4033455186 5714873332 3288221925 1326746108
1843903935 0296657469 6179149555 5055668844 2182146222
7036560920 7269439637 1311145914 2894953651 0571572669
8685716846 2374988800 0645938139 5516631807 7160787070
4271772015 9200684195 0947845934 6662129879 1203624475
6257091686 3521112346 7722253469 5614917013 5212609645
8819306082 2366133421 6378606221 7743456335 0391593506
9437707445 8619232286 4080127626 4493168222 2315516717
1622700371 3809563073 7876275436 9456295039 9670100956
2595437198 1694152439 4921177484 6513991657 7850700838
7591652123 2423684737 5370228366 3213663173 5989203304
1154810588 4610720356 0626714526 5341095806 7680251461
1824679203 7602756508 4385418953 4674383974 5747704961
1484249784 2366941753 2593344145 4465130113 0258666252
1190115554 5508494248 3296625665 9641447235 0114934774
0256419151 0886119367 9111137372 2376794271 0441689057
0528016250 4055331115 3549919392 5322520913 0491368172
2784765315 3716676939 6999557453 4131103837 0893739693
8956991469 2452125884 3924911417 0742545553 0461979109
0038607546 4333298365 1563093545 8401042533 4286926419
2757837046 4643003795 2252068747 3681343883 8607549376
1998593755 1648820957 5624653250 6363691920 9077070876
7822899355 0625728521 8504230753 4651773344 5208085046
9821740539 1619929519 1611589327 9468381280 2007272380
8084066227 1586730848 5260712291 4018022489 6017800416
0295611320 7420308244 6577829314 4478629015 2143341287
1136548754 8793157813 5945374876 9461526929 0335512420
6197208185 4341819025 6172618059 4770089054 3657854993
7842421334 0163297166 7910754718 3827390030 6324037817
7749684215 9741379822 9609338541 3018815240 9300554505
7301457810 6570971228 6632162538 6011387570 8357221939
3840507773 0069154366 3533737718 4736297695 0552996937
6438994794 5941547437 4376800828 6462552730 0962906224
5701980041 4848185117 1850585220 6042134736 3086940305
0469757270 3287952471 7208383882 5939538081 0529664694
0198223084 8520254687 6044178662 1933727357 9738995598
8002520328 4552185216 4698608236 1287658276 6947093594
5079852467 7583165996 9729185434 6811845124 1498210617
0630743658 2336327443 8790377401 9155763631 7370092901
5398687881 9748670288 6325266063 4495494973 7958364184
7896166628 7941681346 1681591787 1621793312 9551719811
1275390504 6530722884 9560461653 6236620754 7177417848
1226062672 9547420232 5642922932 3358808181 0372305207
2018504389 4437781836 2146717439 2828428895 7309799057
1646878941 0015966077 7077721662 0714386120 0554238911
6043823885 7998454447 0691741513 2493852355 3263577102
8870043657 4053371221 3300070621 0043263641 2035218435
9693698737 2632258745 5903746157 9452747074 3430051275
7467159543 6163697569 1773300867 1648754008 2954208298
6985487691 5174214319 4631188788 5282514054 1930250216
2310964958 7880719536 2376234564 9443165002 1899219325
0786790761 8443911838 3637710535 6124299846 3455263264
3667979387 6151233711 8235059969 0026413425 8784808289
1877247976 1876248090 4382969663 2924202947 7836502021
8193547738 1328083179 4722095287 3000136154 2113134369
7256299274 2237664384 8913278916 2898297233 1000991353
9653806178 2237137597 3858941027 0496321834 8648603076
7369414585 7979656920 4447460290 5311359797 2555359361
8293959675 6537528623 1976642180 8743122259 9533043079
2709288417 3839513474 0011477529 9568753725 7536358499
4938876854 9305126326 4601110260 8348454683 2929102556
2556915554 4915040773 0875085656 5134282577 1669088870
2651241089 2848202207 5910928381 1623372461 1928953917
4108144283 9704242134 6866482151 3535151636 9139813860
9209504554 0334011369 0879384680 8342217977 7986832701
1703157747 0686163104 1623544143 7607582080 9396613915
7370947875 3128672639 5957360734 3746159387 8484726428
9690070955 2631340510 7801308462 3584030692 9955858132
4830166577 1679058108 5275375337 9376100666 3445560037
1770139472 4648077894 0094379523 8694688616 8748875198
5006835761 5944471718 2684916262 8809969540 3464723389
4395314307 0639550999 4325571326 2380938252 1836518249
9780735308 3539598072 4751057305 2751467527 0539386887
0880094613 4397737803 9362948575 3558173395 9377045003
2738997134 6043504626 5058530751 7928303429 5911907756
8556999732 9126057576 6392419966 0989039748 7927381413
6552531497 1620878224 4652375881 1988310812 6540263979
7914075222 1064721259 7349607558 3075350231 6608641699
7441840561 8595052996 2513832685 0001045817 7452938505
6933220933 0784342964 1602610045 9610527567 2853919467
4141078455 7830702113 7767876226 4995141426 6205900886
0693431978 8975736942 3287665610 1385015532 5490002440
0705240741 8932046829 2979967805 0517766029 7356324393
5503891663 3543752551 4401010879 5888249198 0154230410
3092398610 1967739650 8569281451 1081854806 2651432652
8158586836 6684294180 8250850670 4649604386 4554150711
0935917432 7671750036 3186774348 4721231023 4202624705
1266971640 7302922837 4009841140 8744036523 4663894314
0173515707 8672157245 4791163715 9517451748 8205084154
2109507225 8809492020 0188190889 9193257629 7383180779
8143731961 1272342753 8445458152 8395510355 4402085529
9389922211 2441185004 8404354085 3924430991 7452907978
4553953206 2104996171 1518943262 1517402664 1772292653
2825182297 2677624405 6888138833 8976523059 1552506500
3974071974 1348760939 6438841127 6251166687 2843898464
5518744425 7811284188 6164705088 1682217290 9073880993
8542988006 6395657226 3637237762 2236816515 7318575148
5276890011 9888459137 1729209620 6603077782 2998973870
8720604436 2188693352 8000044725 1011472625 8925259219
2397521128 5199278922 7738466167 4392531909 4784692482
6919559149 7049295675 9615913863 9730163195 5350701107
8379823026 7050459747 2225558365 4970840173 7907931671
8843926663 0057391223 8402459777 9774012159 9855908223
7653804615 7826215207 2352695589 7886406420 5546718833
8173001438 6560803432 7681289975 7435075135 6237064265
6816433142 2150990565 8994987242 9216431166 4113744643
7114813032 2192549748 7501961844 6042321996 5266067830
9389892889 8200903056 1850179490 1497935206 4818153796
7305881315 8403966305 3082131627 3647583968 1812914249
3789694041 8715522386 2799604936 2681040078 9563899732
4052480178 0430033232 7627076763 0300754700 4593214925
5905820768 1373711381 4031503324 8385974488 9654388586
6200970964 8909161437 7952660064 6505164316 2560654935
9240056295 2486365724 4342108705 7966964165 4690336037
2263439186 6494221493 2445701742 1332235406 9307056791
5525199548 9344371431 6322528222 8654358869 5771981785
6169733898 2827819543 3473800555 5170053732 9308113190
0789735682 2215802322 0801714548 9125876850 9883351892
1097423447 | |
<filename>tests/authorization/test_ranger.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Client tests for SQL statement authorization
import grp
import json
import pytest
import requests
from getpass import getuser
from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
from tests.util.hdfs_util import NAMENODE
from tests.util.calculation_util import get_random_id
ADMIN = "admin"
RANGER_AUTH = ("admin", "admin")
RANGER_HOST = "http://localhost:6080"
IMPALAD_ARGS = "--server-name=server1 --ranger_service_type=hive " \
"--ranger_app_id=impala --authorization_provider=ranger"
CATALOGD_ARGS = "--server-name=server1 --ranger_service_type=hive " \
"--ranger_app_id=impala --authorization_provider=ranger"
LOCAL_CATALOG_IMPALAD_ARGS = "--server-name=server1 --ranger_service_type=hive " \
"--ranger_app_id=impala --authorization_provider=ranger --use_local_catalog=true"
LOCAL_CATALOG_CATALOGD_ARGS = "--server-name=server1 --ranger_service_type=hive " \
"--ranger_app_id=impala --authorization_provider=ranger --catalog_topic_mode=minimal"
class TestRanger(CustomClusterTestSuite):
"""
Tests for Apache Ranger integration with Apache Impala.
"""
@pytest.mark.execute_serially
@CustomClusterTestSuite.with_args(
impalad_args=IMPALAD_ARGS, catalogd_args=CATALOGD_ARGS)
def test_grant_revoke_with_catalog_v1(self, unique_name):
# This test fails due to bumping up the Ranger to a newer version.
# TODO(fangyu.rao): Fix in a follow up commit.
pytest.xfail("failed due to bumping up the Ranger to a newer version")
"""Tests grant/revoke with catalog v1."""
self._test_grant_revoke(unique_name, [None, "invalidate metadata",
"refresh authorization"])
@pytest.mark.execute_serially
@CustomClusterTestSuite.with_args(
impalad_args="{0} {1}".format(IMPALAD_ARGS, "--use_local_catalog=true"),
catalogd_args="{0} {1}".format(CATALOGD_ARGS, "--catalog_topic_mode=minimal"))
def test_grant_revoke_with_local_catalog(self, unique_name):
# This test fails due to bumping up the Ranger to a newer version.
# TODO(fangyu.rao): Fix in a follow up commit.
pytest.xfail("failed due to bumping up the Ranger to a newer version")
"""Tests grant/revoke with catalog v2 (local catalog)."""
self._test_grant_revoke(unique_name, [None, "invalidate metadata",
"refresh authorization"])
def _test_grant_revoke(self, unique_name, refresh_statements):
user = getuser()
admin_client = self.create_impala_client()
unique_database = unique_name + "_db"
unique_table = unique_name + "_tbl"
group = grp.getgrnam(getuser()).gr_name
test_data = [(user, "USER"), (group, "GROUP")]
for refresh_stmt in refresh_statements:
for data in test_data:
ident = data[0]
kw = data[1]
try:
# Set-up temp database/table
admin_client.execute("drop database if exists {0} cascade"
.format(unique_database), user=ADMIN)
admin_client.execute("create database {0}".format(unique_database), user=ADMIN)
admin_client.execute("create table {0}.{1} (x int)"
.format(unique_database, unique_table), user=ADMIN)
self.execute_query_expect_success(admin_client,
"grant select on database {0} to {1} {2}"
.format(unique_database, kw, ident),
user=ADMIN)
self._refresh_authorization(admin_client, refresh_stmt)
result = self.execute_query("show grant {0} {1} on database {2}"
.format(kw, ident, unique_database))
TestRanger._check_privileges(result, [
[kw, ident, unique_database, "", "", "", "*", "select", "false"],
[kw, ident, unique_database, "*", "*", "", "", "select", "false"]])
self.execute_query_expect_success(admin_client,
"revoke select on database {0} from {1} "
"{2}".format(unique_database, kw, ident),
user=ADMIN)
self._refresh_authorization(admin_client, refresh_stmt)
result = self.execute_query("show grant {0} {1} on database {2}"
.format(kw, ident, unique_database))
TestRanger._check_privileges(result, [])
finally:
admin_client.execute("revoke select on database {0} from {1} {2}"
.format(unique_database, kw, ident), user=ADMIN)
admin_client.execute("drop database if exists {0} cascade"
.format(unique_database), user=ADMIN)
@CustomClusterTestSuite.with_args(
impalad_args=IMPALAD_ARGS, catalogd_args=CATALOGD_ARGS)
def test_grant_option(self, unique_name):
# This test fails due to bumping up the Ranger to a newer version.
# TODO(fangyu.rao): Fix in a follow up commit.
pytest.xfail("failed due to bumping up the Ranger to a newer version")
user1 = getuser()
admin_client = self.create_impala_client()
unique_database = unique_name + "_db"
unique_table = unique_name + "_tbl"
try:
# Set-up temp database/table
admin_client.execute("drop database if exists {0} cascade".format(unique_database),
user=ADMIN)
admin_client.execute("create database {0}".format(unique_database), user=ADMIN)
admin_client.execute("create table {0}.{1} (x int)"
.format(unique_database, unique_table), user=ADMIN)
# Give user 1 the ability to grant select privileges on unique_database
self.execute_query_expect_success(admin_client,
"grant select on database {0} to user {1} with "
"grant option".format(unique_database, user1),
user=ADMIN)
self.execute_query_expect_success(admin_client,
"grant insert on database {0} to user {1} with "
"grant option".format(unique_database, user1),
user=ADMIN)
# Verify user 1 has with_grant privilege on unique_database
result = self.execute_query("show grant user {0} on database {1}"
.format(user1, unique_database))
TestRanger._check_privileges(result, [
["USER", user1, unique_database, "", "", "", "*", "insert", "true"],
["USER", user1, unique_database, "", "", "", "*", "select", "true"],
["USER", user1, unique_database, "*", "*", "", "", "insert", "true"],
["USER", user1, unique_database, "*", "*", "", "", "select", "true"]])
# Revoke select privilege and check grant option is still present
self.execute_query_expect_success(admin_client,
"revoke select on database {0} from user {1}"
.format(unique_database, user1), user=ADMIN)
result = self.execute_query("show grant user {0} on database {1}"
.format(user1, unique_database))
TestRanger._check_privileges(result, [
["USER", user1, unique_database, "", "", "", "*", "insert", "true"],
["USER", user1, unique_database, "*", "*", "", "", "insert", "true"]])
# Revoke privilege granting from user 1
self.execute_query_expect_success(admin_client, "revoke grant option for insert "
"on database {0} from user {1}"
.format(unique_database, user1), user=ADMIN)
# User 1 can no longer grant privileges on unique_database
# In ranger it is currently not possible to revoke grant for a single access type
result = self.execute_query("show grant user {0} on database {1}"
.format(user1, unique_database))
TestRanger._check_privileges(result, [
["USER", user1, unique_database, "", "", "", "*", "insert", "false"],
["USER", user1, unique_database, "*", "*", "", "", "insert", "false"]])
finally:
admin_client.execute("revoke insert on database {0} from user {1}"
.format(unique_database, user1), user=ADMIN)
admin_client.execute("drop database if exists {0} cascade".format(unique_database),
user=ADMIN)
@CustomClusterTestSuite.with_args(
impalad_args=IMPALAD_ARGS, catalogd_args=CATALOGD_ARGS)
def test_show_grant(self, unique_name):
# This test fails due to bumping up the Ranger to a newer version.
# TODO(fangyu.rao): Fix in a follow up commit.
pytest.xfail("failed due to bumping up the Ranger to a newer version")
user = getuser()
group = grp.getgrnam(getuser()).gr_name
test_data = [(user, "USER"), (group, "GROUP")]
admin_client = self.create_impala_client()
unique_db = unique_name + "_db"
unique_table = unique_name + "_tbl"
try:
# Create test database/table
admin_client.execute("drop database if exists {0} cascade".format(unique_db),
user=ADMIN)
admin_client.execute("create database {0}".format(unique_db), user=ADMIN)
admin_client.execute("create table {0}.{1} (x int)"
.format(unique_db, unique_table), user=ADMIN)
for data in test_data:
# Test basic show grant functionality for user/group
self._test_show_grant_basic(admin_client, data[1], data[0], unique_db,
unique_table)
# Test that omitting ON <resource> results in failure
self._test_show_grant_without_on(data[1], data[0])
# Test ALL privilege hides other privileges
self._test_show_grant_mask(admin_client, user)
# Test USER inherits privileges for their GROUP
self._test_show_grant_user_group(admin_client, user, group, unique_db)
finally:
admin_client.execute("drop database if exists {0} cascade".format(unique_db),
user=ADMIN)
def _test_show_grant_without_on(self, kw, ident):
self.execute_query_expect_failure(self.client, "show grant {0} {1}".format(kw, ident))
def _test_show_grant_user_group(self, admin_client, user, group, unique_db):
try:
result = self.client.execute("show grant user {0} on database {1}"
.format(user, unique_db))
TestRanger._check_privileges(result, [])
admin_client.execute("grant select on database {0} to group {1}"
.format(unique_db, group))
result = self.client.execute("show grant user {0} on database {1}"
.format(user, unique_db))
TestRanger._check_privileges(result, [
["GROUP", user, unique_db, "", "", "", "*", "select", "false"],
["GROUP", user, unique_db, "*", "*", "", "", "select", "false"]])
finally:
admin_client.execute("revoke select on database {0} from group {1}"
.format(unique_db, group))
def _test_show_grant_mask(self, admin_client, user):
privileges = ["select", "insert", "create", "alter", "drop", "refresh"]
try:
for privilege in privileges:
admin_client.execute("grant {0} on server to user {1}".format(privilege, user))
result = self.client.execute("show grant user {0} on server".format(user))
TestRanger._check_privileges(result, [
["USER", user, "", "", "", "*", "", "alter", "false"],
["USER", user, "", "", "", "*", "", "create", "false"],
["USER", user, "", "", "", "*", "", "drop", "false"],
["USER", user, "", "", "", "*", "", "insert", "false"],
["USER", user, "", "", "", "*", "", "refresh", "false"],
["USER", user, "", "", "", "*", "", "select", "false"],
["USER", user, "*", "", "", "", "*", "alter", "false"],
["USER", user, "*", "", "", "", "*", "create", "false"],
["USER", user, "*", "", "", "", "*", "drop", "false"],
["USER", user, "*", "", "", "", "*", "insert", "false"],
["USER", user, "*", "", "", "", "*", "refresh", "false"],
["USER", user, "*", "", "", "", "*", "select", "false"],
["USER", user, "*", "*", "*", "", "", "alter", "false"],
["USER", user, "*", "*", "*", "", "", "create", "false"],
["USER", user, "*", "*", "*", "", "", "drop", "false"],
["USER", user, "*", "*", "*", "", "", "insert", "false"],
["USER", user, "*", "*", "*", "", "", "refresh", "false"],
["USER", user, "*", "*", "*", "", "", "select", "false"]])
admin_client.execute("grant all on server to user {0}".format(user))
result = self.client.execute("show grant user {0} on server".format(user))
TestRanger._check_privileges(result, [
["USER", user, "", "", "", "*", "", "all", "false"],
["USER", user, "*", "", "", "", "*", "all", "false"],
["USER", user, "*", "*", "*", "", "", "all", "false"]])
finally:
admin_client.execute("revoke all on server from user {0}".format(user))
for privilege in privileges:
admin_client.execute("revoke {0} on server from user {1}".format(privilege, user))
def _test_show_grant_basic(self, admin_client, kw, | |
<filename>src/live_data/live_odds_retrieval.py<gh_stars>0
# Places to retrieve live lineups
# https://www.rotowire.com/basketball/nba-lineups.php
# https://www.nba.com/players/todays-lineups
# stats api here - https://stats.nba.com/js/data/leaders/00_active_starters_20210128.json
import json
import re
from datetime import datetime, timedelta
import requests
import pandas as pd
from bs4 import BeautifulSoup
import ENVIRONMENT
from src.database.database_access import getUniversalTeamShortCode, getPlayerCurrentTeam, getUniversalPlayerName
from src.odds.odds_calculator import checkEvPlayerCodesOddsLine, kellyBetFromAOddsAndScoreProb, decimalToAmerican
from src.utils import getTeamFullFromShort, getSoupFromUrl, sleepChecker, lowercaseNoSpace, removeNewLineChars
def addTeamToUnknownPlayerLine(rawPlayerLine):
formattedPlayerName = getUniversalPlayerName(rawPlayerLine['player'])
teamShortCodeBballRefFormat = getPlayerCurrentTeam(formattedPlayerName)
teamShortCode = getUniversalTeamShortCode(teamShortCodeBballRefFormat)
rawPlayerLine['team'] = teamShortCode
return rawPlayerLine
def getExpectedTipper(team):
if len(team) != 3:
raise ValueError('Need to pass universal team short code to getExpectedTipper')
tipper = tipperFromTeam(team)
return tipper
if len(team) != 3:
raise ValueError('Need to pass team short code to getExpectedTipper')
tipper = tipperFromTeam(team)
return tipper
def getLastTipper(team_code, season_csv=ENVIRONMENT.CURRENT_SEASON_CSV):
df = pd.read_csv(season_csv)
i = len(df['Game Code']) - 1
while i >= 0:
if df['Home Short'].iloc[i] == team_code:
name = df['Home Tipper'].iloc[i]
print('last tipper for', team_code, 'was', name)
return name # , get_player_suffix(name)
elif df['Away Short'].iloc[i] == team_code:
name = df['Away Tipper'].iloc[i]
print('last tipper for', team_code, 'was', name)
return name # , get_player_suffix(name)
i += 1
raise ValueError('No match found for team code this season')
def teamCodeToSlugName(team_code, team_dict=None, json_path=None):
if json_path is not None:
with open(json_path) as j_file:
team_dict = json.load(j_file)
elif team_dict is None:
with open(ENVIRONMENT.TEAM_NAMES_PATH) as j_file:
team_dict = json.load(j_file)
for team in team_dict:
if team['abbreviation'] == team_code:
return team['slug']
raise ValueError('no matching team for abbreviation')
def bovadaTeamOdds(allTeamBets):
scoreFirstBetsSingleTeam = list()
gameIdSet = set()
customId = 0
for bet in allTeamBets:
gameIdSet.add(bet['game']['id'])
if bet['queryTitle'].lower() == 'team to score first':
shortTitle = bet['game']['shortTitle']
team1Id = bet['game']['team1Id']
team2Id = bet['game']['team2Id']
if bet['oddsOverride'] is not None:
decimalOdds = bet['oddsOverride']
else:
decimalOdds = bet['odds']
scoreFirstBetsSingleTeam.append({
"shortTitle": shortTitle,
"team1id": str(team1Id),
"team2id": str(team2Id),
"decimalOdds": decimalOdds,
"customId": customId
})
customId += 1
matchedBets = set()
scoreFirstBetsBothTeams = list()
for bet in scoreFirstBetsSingleTeam:
if bet['shortTitle'] not in matchedBets:
for potentialPair in scoreFirstBetsSingleTeam:
if potentialPair['shortTitle'] == bet['shortTitle'] and potentialPair['customId'] != bet['customId']:
matchedBets.add(potentialPair['shortTitle'])
shortTitle = bet['shortTitle']
team1Id = bet['team1id']
team2Id = bet['team2id']
# This implicitly relies on team1 being the first one on the list
scoreFirstBetsBothTeams.append({
"shortTitle": shortTitle,
"team1id": team1Id,
"team2id": team2Id,
"team1Odds": potentialPair['decimalOdds'],
"team2Odds": bet['decimalOdds'],
})
break
scoreFirstBetsBothTeamsFormatted = list()
for item in scoreFirstBetsBothTeams:
# backlogtodo look at response when bet DNE
if item['team2Odds'] == 1 or item['team1Odds'] == 1:
print('invalid odds for bet', item['shortTitle'], '(decimal odds of 1)')
continue
scoreFirstBetsBothTeamsFormatted.append({
'exchange': 'bovada',
"shortTitle": item['shortTitle'],
"away": getUniversalTeamShortCode(item['team1id']),
"home": getUniversalTeamShortCode(item['team2id']),
"awayTeamFirstQuarterOdds": decimalToAmerican(item['team2Odds']),
"homeTeamFirstQuarterOdds": decimalToAmerican(item['team1Odds']),
"awayPlayerFirstQuarterOdds": [],
"homePlayerFirstQuarterOdds": []
})
scoreFirstBetsBothTeamsFormatted.append({
'exchange': 'bovada',
"away": getUniversalTeamShortCode(item['team2id']),
"home": getUniversalTeamShortCode(item['team1id']),
"shortTitle": item['shortTitle'],
"awayTeamFirstQuarterOdds": decimalToAmerican(item['team2Odds']),
"homeTeamFirstQuarterOdds": decimalToAmerican(item['team1Odds']),
"awayPlayerFirstQuarterOdds": [],
"homePlayerFirstQuarterOdds": []
}) # This is done as it is unknown for bovada whic hteam belongs to which odds
return scoreFirstBetsBothTeamsFormatted, gameIdSet
def bovadaPlayerOdds(playerBetGamesList):
playerTeamDict = {}
match = False
for game in playerBetGamesList:
for betCategory in game:
if betCategory['settings']['title'] == "First Point":
match = True
selections = betCategory['selections']
break
if match:
match = False
shortTitle = betCategory['settings']['games'][0]['shortTitle']
homeShort = shortTitle.split()[-1]
awayShort = shortTitle.split()[0]
homePlayerOdds = list()
awayPlayerOdds = list()
for player in selections:
try:
actualOdds = player['odds'] if player['oddsOverride'] is None else player['oddsOverride']
if player['player']['team']['abbreviation'] == homeShort:
homePlayerOdds.append({
"player": player['player']['name'],
"odds": actualOdds,
"team": getUniversalTeamShortCode(homeShort)
})
elif player['player']['team']['abbreviation'] == awayShort:
awayPlayerOdds.append({
"player": player['player']['name'],
"odds": actualOdds,
"team": getUniversalTeamShortCode(awayShort)
})
else:
raise ValueError("Bovada misformattted something in player and team codes")
except:
print('breaking error encountered in bovada odds for player', player)
playerTeamDict[homePlayerOdds[0]['team']] = homePlayerOdds
playerTeamDict[awayPlayerOdds[0]['team']] = awayPlayerOdds
return playerTeamDict
# backlogtodo these could have the wrong odds on the wrong team, so currently add two versions. Fix this
def bovadaOdds():
soup = getSoupFromUrl('https://widgets.digitalsportstech.com/?sb=bovada&language=en&oddsType=american¤cy=usd&leagueId=123&preMatchOnly=true&liveOnly=true')
gameIdString = soup.find('script').contents[0]
uniqueIds = set()
allGameIds = re.findall(r'(?<="id":)([0-9]{6}?)(?=,)', gameIdString)
for id in allGameIds:
uniqueIds.add(id)
teamBetUrl = 'https://widgets.digitalsportstech.com/api/gp?sb=bovada&tz=-5&preMatchOnly=true&liveOnly=true&gameId=in'
for id in uniqueIds:
teamBetUrl += ',' + str(id)
allTeamBets = requests.get(teamBetUrl).json()
scoreFirstBetsBothTeamsFormatted, gameIdSet = bovadaTeamOdds(allTeamBets)
# backlogtodo fix this to account for vames that don't yet matter
playerBetUrlStub = 'https://widgets.digitalsportstech.com/api/custom-markets?sb=bovada&tz=-5&gameId='
playerBetGames = list()
for id in gameIdSet:
playerBetGame = requests.get(playerBetUrlStub + str(id)).json()
playerBetGames.append(playerBetGame)
scoreFirstBetsAllPlayersDict = bovadaPlayerOdds(playerBetGames)
for gameLine in scoreFirstBetsBothTeamsFormatted:
try:
gameLine["homePlayerFirstQuarterOdds"] = scoreFirstBetsAllPlayersDict[gameLine["home"]]
gameLine["awayPlayerFirstQuarterOdds"] = scoreFirstBetsAllPlayersDict[gameLine["away"]]
except:
print("no player lines found for bovada game", gameLine)
# todo fix this to not break with just team odds
return scoreFirstBetsBothTeamsFormatted
def draftKingsOdds():
# https://sportsbook.draftkings.com/leagues/basketball/103?category=game-props&subcategory=odd/even
# API - https://sportsbook.draftkings.com//sites/US-SB/api/v1/eventgroup/103/full?includePromotions=true&format=json
allBets = requests.get('https://sportsbook.draftkings.com//sites/US-SB/api/v1/eventgroup/103/full?includePromotions=true&format=json').json()
offerCategories = allBets['eventGroup']['offerCategories']
playerProps = gameProps = None
for category in offerCategories:
if category['name'] == "Game Props":
gameProps = category['offerSubcategoryDescriptors']
if category['name'] == "Player Props":
playerProps = category['offerSubcategoryDescriptors']
teamMatch = playerMatch = False
if gameProps is not None:
for subCategory in gameProps:
if subCategory['name'] == "First Team to Score":
firstTeamToScoreLines = subCategory['offerSubcategory']['offers']
teamMatch = True
break
else:
print('no game props found for Draftkings odds')
if playerProps is not None:
for subCategory in playerProps:
if subCategory['name'] == "First Field Goal":
firstPlayerToScoreLines = subCategory['offerSubcategory']['offers']
playerMatch = True
break
else:
print('no player props found for Draftkings odds')
teamSet = set()
allGameLines = list()
if teamMatch:
for teamLine in firstTeamToScoreLines:
outcomes = teamLine[0]['outcomes']
team1 = getUniversalTeamShortCode(outcomes[1]['label'])
team1Odds = outcomes[1]['oddsAmerican']
team2 = getUniversalTeamShortCode(outcomes[0]['label'])
team2Odds = outcomes[0]['oddsAmerican']
teamSet.add(team2)
teamSet.add(team1)
print('Adding game', team1, '@', team2, 'from draftkings to list')
allGameLines.append({
"exchange": "draftkings",
"home": team1,
"away": team2,
"homeTeamFirstQuarterOdds": str(team1Odds),
"awayTeamFirstQuarterOdds": str(team2Odds),
"homePlayerFirstQuarterOdds": [],
"awayPlayerFirstQuarterOdds": []
})
else:
print('No team odds for draftkings currently')
rawPlayerLines = list()
if playerMatch:
for game in firstPlayerToScoreLines:
outcomes = game[0]['outcomes']
for playerOdds in outcomes:
rawPlayerLines.append({
"player": playerOdds['label'],
"odds": playerOdds['oddsAmerican']
})
else:
print('No player odds for draftkings currently')
playerTeamDict = {}
for team in teamSet:
playerTeamDict[team] = []
for rawLine in rawPlayerLines:
try:
playerLine = addTeamToUnknownPlayerLine(rawLine)
playerTeamDict[playerLine['team']] += [playerLine]
except:
print('player', playerLine, 'had a team error, team not found in possible gamges. Perhaps they were traded?')
for gameLine in allGameLines:
gameLine["homePlayerFirstQuarterOdds"] = playerTeamDict[gameLine["home"]]
gameLine["awayPlayerFirstQuarterOdds"] = playerTeamDict[gameLine["away"]]
return allGameLines
def getAmericanOddsFanduel(currentpriceup, currentpricedown):
if currentpriceup is None:
return None
if currentpriceup >= currentpricedown:
return '+' + str((currentpriceup / currentpricedown) * 100)
elif currentpriceup < currentpricedown:
return str((100 / currentpriceup) * currentpricedown * -1)
else:
raise ValueError('fanduel odds messed up')
def fanduelOddsToday():
return _fanduelOddsAll()
def fanduelOddsTomorrow():
return _fanduelOddsAll(today=False)
def _fanduelOddsAll(today=True):
currentDate = datetime.today().strftime('%Y-%m-%d')
gamesResponse = requests.get("https://sportsbook.fanduel.com/cache/psmg/UK/63747.3.json").json()
teamSet = set()
quarterOddsList = list()
unassignedPlayerOddsList = list()
gameIdSet = set()
listOfGames = gamesResponse['events']
for game in listOfGames:
if game['tsstart'][:10] == currentDate and today:
gameIdSet.add(game['idfoevent'])
elif game['tsstart'][:10] != currentDate and not today:
gameIdSet.add(game['idfoevent'])
allEventMatch = None
for gameId in gameIdSet:
gameResponse = requests.get('https://sportsbook.fanduel.com/cache/psevent/UK/1/false/{}.json'.format(gameId)).json()
try:
print('running for fanduel game', gameResponse['externaldescription'])
except:
print('a game had no matches for eventmarketgroups. Game has likely had an error and will be skipped')
continue
sleepChecker(iterations=1, baseTime=2, randomMultiplier=8)
# backlogtodo test the start time to ignore ongoing games, not just by date
try:
for eventMarketGroup in gameResponse['eventmarketgroups']:
if eventMarketGroup['name'] == 'All':
allEventMatch = True
break
except:
print('game', gameResponse['externaldescription'], 'had no matches for eventmarketgroups. Game has likely already started, or is tomorrow.')
continue
teamScoreFirstQuarter1 = teamScoreFirstQuarter2 = teamScoreFirstQuarter3 = teamScoreFirstQuarter4 = playerScoreFirst = None
if allEventMatch:
for market in eventMarketGroup['markets']:
if 'to Score First' in market['name']:
if market['name'] == 'Team to Score First':
teamScoreFirstQuarter1 = market
elif market['name'] == '2nd Quarter Team to Score First':
teamScoreFirstQuarter2 = market
elif market['name'] == '3rd Quarter Team to Score First':
teamScoreFirstQuarter3 = market
elif market['name'] == '4th Quarter Team to Score First':
teamScoreFirstQuarter4 = market
elif market['name'] == 'First Basket':
playerScoreFirst = market
if playerScoreFirst is not None:
for selection in playerScoreFirst['selections']:
unassignedPlayerOddsList.append({
"player": selection['name'],
"odds": getAmericanOddsFanduel(selection['currentpriceup'], selection['currentpricedown']),
})
else:
print('no player odds for this fanduel game currently')
home1Odds = away1Odds = home2Odds = away2Odds = home3Odds = away3Odds = home4Odds = away4Odds = None
if teamScoreFirstQuarter1 is not None:
quarter1home = teamScoreFirstQuarter1['selections'][0] if teamScoreFirstQuarter1['selections'][0]['hadvalue'] == 'H' else teamScoreFirstQuarter1['selections'][1]
quarter1away = teamScoreFirstQuarter1['selections'][0] if teamScoreFirstQuarter1['selections'][0]['hadvalue'] == 'A' else teamScoreFirstQuarter1['selections'][1]
home1Odds = getAmericanOddsFanduel(quarter1home['currentpriceup'], quarter1home['currentpricedown'])
away1Odds = getAmericanOddsFanduel(quarter1away['currentpriceup'], quarter1away['currentpricedown'])
else:
print('no team odds for this fanduel game currently')
if teamScoreFirstQuarter2 is not None:
quarter2home = teamScoreFirstQuarter2['selections'][0] if teamScoreFirstQuarter2['selections'][0]['hadvalue'] == 'H' else teamScoreFirstQuarter2['selections'][1]
quarter2away = teamScoreFirstQuarter2['selections'][0] if teamScoreFirstQuarter2['selections'][0]['hadvalue'] == 'A' else teamScoreFirstQuarter2['selections'][1]
home2Odds = getAmericanOddsFanduel(quarter2home['currentpriceup'], quarter2home['currentpricedown'])
away2Odds = getAmericanOddsFanduel(quarter2away['currentpriceup'], quarter2away['currentpricedown'])
if teamScoreFirstQuarter3 is not None:
quarter3home = teamScoreFirstQuarter3['selections'][0] if teamScoreFirstQuarter3['selections'][0]['hadvalue'] == 'H' else teamScoreFirstQuarter3['selections'][1]
quarter3away = teamScoreFirstQuarter3['selections'][0] if teamScoreFirstQuarter3['selections'][0]['hadvalue'] == 'A' else teamScoreFirstQuarter3['selections'][1]
home3Odds = getAmericanOddsFanduel(quarter3home['currentpriceup'], quarter3home['currentpricedown'])
away3Odds = getAmericanOddsFanduel(quarter3away['currentpriceup'], quarter3away['currentpricedown'])
if | |
we don't want the 'Source WKT has been cleaned by Shapely" warning
location["id"] = location_id = ltable.insert(**location)
set_record_owner(ltable, location, owned_by_user=user_id)
s3db_onaccept(ltable, location, method="create")
# Create Facility
ftable = s3db.org_facility
facility_name = organisation if organisation else org.name
facility = {"name": s3_truncate(facility_name),
"organisation_id": organisation_id,
"location_id": location_id,
"phone1": facility_phone,
"email": facility_email,
"opening_times": opening_times,
"comments": comments,
}
facility["id"] = ftable.insert(**facility)
update_super(ftable, facility)
set_record_owner(ftable, facility, owned_by_user=user_id)
s3db_onaccept(ftable, facility, method="create")
# Link to Facility Type
fttable = s3db.org_facility_type
facility_type = db(fttable.name == "Infection Test Station").select(fttable.id,
limitby = (0, 1),
).first()
if facility_type:
s3db.org_site_facility_type.insert(site_id = facility["site_id"],
facility_type_id = facility_type.id,
)
# Approve user
auth.s3_approve_user(user)
# Send welcome email
settings = current.deployment_settings
from .notifications import CMSNotifications
error = CMSNotifications.send(user.email,
"WelcomeProvider",
{"name": organisation or org.name,
"homepage": settings.get_base_public_url(),
"profile": URL("default", "person", host=True),
},
module = "auth",
resource = "user",
)
if error:
session.warning = "%s: %s" % (T("Welcome Email NOT sent"), error)
session.confirmation = T("Registration approved")
redirect(URL(c = "default",
f = "index",
args = ["approve"],
))
elif rejected:
user.update_record(registration_key = "rejected")
# @ToDo: Delete Org & Fac, if created previously
session.confirmation = T("Registration rejected")
redirect(URL(c = "default",
f = "index",
args = ["approve"],
))
output = {"form": form,
"title": T("Approve Test Station"),
}
# Custom View
self._view("RLPPTM", "approve.html")
else:
# List View
if ORG_ADMIN:
# Filter to just their users
gtable = db.auth_group
mtable = db.auth_membership
query = (mtable.user_id == auth.user.id) & \
(mtable.group_id == gtable.id) & \
(gtable.uuid == "ORG_ADMIN")
memberships = db(query).select(mtable.pe_id)
pe_id = [m.pe_id for m in memberships]
otable = s3db.org_organisation
orgs = db(otable.pe_id.belongs(pe_id)).select(otable.id)
organisation_id = [org.id for org in orgs]
accounts_filter = FS("organisation_id").belongs(organisation_id)
else:
# Filter to all for the ORG_GROUP
accounts_filter = FS("org_group_id") == org_group_id
# Only include pending accounts
accounts_filter &= FS("registration_key") == "pending"
resource = s3db.resource("auth_user", filter=accounts_filter)
list_id = "datatable"
# List fields
list_fields = resource.list_fields()
orderby = None
s3 = response.s3
representation = s3_get_extension(request) or \
S3Request.DEFAULT_REPRESENTATION
# Pagination
get_vars = request.get_vars
if representation == "aadata":
start, limit = S3CRUD._limits(get_vars)
else:
# Initial page request always uses defaults (otherwise
# filtering and pagination would have to be relative to
# the initial limits, but there is no use-case for that)
start = None
limit = None if s3.no_sspag else 0
left = []
distinct = False
dtargs = {}
if representation in S3Request.INTERACTIVE_FORMATS:
# How many records per page?
if s3.dataTable_pageLength:
display_length = s3.dataTable_pageLength
else:
display_length = 25
# Server-side pagination?
if not s3.no_sspag:
dt_pagination = "true"
if not limit:
limit = 2 * display_length
session.s3.filter = get_vars
if orderby is None:
dt_sorting = {"iSortingCols": "1",
"sSortDir_0": "asc"
}
if len(list_fields) > 1:
dt_sorting["bSortable_0"] = "false"
dt_sorting["iSortCol_0"] = "1"
else:
dt_sorting["bSortable_0"] = "true"
dt_sorting["iSortCol_0"] = "0"
orderby, left = resource.datatable_filter(list_fields,
dt_sorting,
)[1:3]
else:
dt_pagination = "false"
# Get the data table
dt, totalrows = resource.datatable(fields = list_fields,
start = start,
limit = limit,
left = left,
orderby = orderby,
distinct = distinct,
)
displayrows = totalrows
# Always show table, otherwise it can't be Ajax-filtered
# @todo: need a better algorithm to determine total_rows
# (which excludes URL filters), so that datatables
# shows the right empty-message (ZeroRecords instead
# of EmptyTable)
dtargs["dt_pagination"] = dt_pagination
dtargs["dt_pageLength"] = display_length
dtargs["dt_base_url"] = URL(c="default", f="index", args="approve")
dtargs["dt_permalink"] = URL(c="default", f="index", args="approve")
datatable = dt.html(totalrows,
displayrows,
id = list_id,
**dtargs)
# Action Buttons
s3.actions = [{"label": s3_str(T("Review")),
"url": URL(args = ["approve", "[id]"],
),
"_class": "action-btn",
},
]
output = {"items": datatable,
"title": T("Test Stations to be Approved"),
}
# Custom View
self._view(TEMPLATE, "approve_list.html")
elif representation == "aadata":
# Apply datatable filters
searchq, orderby, left = resource.datatable_filter(list_fields,
get_vars)
if searchq is not None:
totalrows = resource.count()
resource.add_filter(searchq)
else:
totalrows = None
# Get a data table
if totalrows != 0:
dt, displayrows = resource.datatable(fields = list_fields,
start = start,
limit = limit,
left = left,
orderby = orderby,
distinct = distinct,
)
else:
dt, displayrows = None, 0
if totalrows is None:
totalrows = displayrows
# Echo
draw = int(get_vars.get("draw", 0))
# Representation
if dt is not None:
output = dt.json(totalrows,
displayrows,
list_id,
draw,
**dtargs)
else:
output = '{"recordsTotal":%s,' \
'"recordsFiltered":0,' \
'"dataTable_id":"%s",' \
'"draw":%s,' \
'"data":[]}' % (totalrows, list_id, draw)
return output
# =============================================================================
class register(S3CustomController):
""" Custom Registration Page """
def __call__(self):
auth = current.auth
# Redirect if already logged-in
if auth.s3_logged_in():
redirect(URL(c="default", f="index"))
auth_settings = auth.settings
auth_messages = auth.messages
self.customise_auth_messages()
T = current.T
db = current.db
s3db = current.s3db
request = current.request
response = current.response
session = current.session
settings = current.deployment_settings
utable = auth_settings.table_user
# Page title and intro text
title = T("Register Test Station")
# Get intro text from CMS
db = current.db
s3db = current.s3db
ctable = s3db.cms_post
ltable = s3db.cms_post_module
join = ltable.on((ltable.post_id == ctable.id) & \
(ltable.module == "auth") & \
(ltable.resource == "user") & \
(ltable.deleted == False))
query = (ctable.name == "SelfRegistrationIntro") & \
(ctable.deleted == False)
row = db(query).select(ctable.body,
join = join,
cache = s3db.cache,
limitby = (0, 1),
).first()
intro = row.body if row else None
# Form Fields
formfields, required_fields, subheadings = self.formfields()
# Generate labels (and mark required fields in the process)
labels, has_required = s3_mark_required(formfields,
mark_required = required_fields,
)
response.s3.has_required = has_required
# Form buttons
REGISTER = T("Register")
buttons = [INPUT(_type = "submit",
_value = REGISTER,
),
# TODO cancel-button?
]
# Construct the form
response.form_label_separator = ""
form = SQLFORM.factory(table_name = utable._tablename,
record = None,
hidden = {"_next": request.vars._next},
labels = labels,
separator = "",
showid = False,
submit_button = REGISTER,
delete_label = auth_messages.delete_label,
formstyle = settings.get_ui_formstyle(),
buttons = buttons,
*formfields)
# Identify form for CSS & JS Validation
form.add_class("auth_register")
# Add Subheadings
if subheadings:
for pos, heading in subheadings[::-1]:
form[0].insert(pos, DIV(heading, _class="subheading"))
# Inject client-side Validation
auth.s3_register_validation()
# Set default registration key, so new users are prevented
# from logging in until approved
key = str(uuid4())
code = uuid4().hex[-6:].upper()
utable.registration_key.default = self.keyhash(key, code)
if form.accepts(request.vars,
session,
formname = "register",
onvalidation = auth_settings.register_onvalidation,
):
formvars = form.vars
# Add Organisation, if existing
organisation = formvars.get("organisation")
otable = s3db.org_organisation
org = db(otable.name == organisation).select(otable.id,
limitby = (0, 1)
).first()
if org:
organisation_id = org.id
formvars["organisation_id"] = organisation_id
else:
organisation_id = None
# Create the user record
user_id = utable.insert(**utable._filter_fields(formvars, id=False))
formvars.id = user_id
# Set org_group
ogtable = s3db.org_group
org_group = db(ogtable.name == TESTSTATIONS).select(ogtable.id,
limitby = (0, 1)
).first()
try:
org_group_id = org_group.id
except:
raise RuntimeError("Cannot register user account as Org Group '%s' is missing " % TESTSTATIONS)
db(utable.id == user_id).update(org_group_id = org_group_id)
# Save temporary user fields in s3db.auth_user_temp
temptable = s3db.auth_user_temp
record = {"user_id": user_id}
record["consent"] = formvars.consent
# Store Custom fields
custom = {"location": formvars.location,
"facility_phone": formvars.facility_phone,
"facility_email": formvars.facility_email,
"opening_times": formvars.opening_times,
"projects": formvars.projects,
"comments": formvars.comments,
}
if not organisation_id:
custom["organisation"] = organisation
record["custom"] = json.dumps(custom)
temptable.insert(**record)
# Post-process the new user record
users = db(utable.id > 0).select(utable.id, limitby=(0, 2))
if len(users) == 1:
# 1st user to register doesn't need verification/approval
auth.s3_approve_user(form.vars)
session.confirmation = auth_messages.registration_successful
# 1st user gets Admin rights
admin_group_id = 1
auth.add_membership(admin_group_id, users.first().id)
# Log them in
if "language" not in form.vars:
# Was missing from login form
form.vars.language = T.accepted_language
user = Storage(utable._filter_fields(form.vars, id=True))
auth.login_user(user)
# Send welcome email
auth.s3_send_welcome_email(form.vars)
# Where to go next?
register_next = request.vars._next or auth_settings.register_next
else:
# Request User Verify their Email
# System Details for Verification Email
verify_url = URL(c = "default",
f = "index",
args = ["verify_email", key],
scheme = "https" if request.is_https else "http",
)
system = {"system_name": settings.get_system_name(),
"url": verify_url,
#"url": "%s/default/index/verify_email/%s" % (response.s3.base_url, key),
"code": code,
}
# Try to send the Verification Email
if not auth_settings.mailer or \
not auth_settings.mailer.settings.server or \
not auth_settings.mailer.send(to = form.vars.email,
subject = auth_messages.verify_email_subject % system,
message = auth_messages.verify_email % system,
):
response.error = auth_messages.email_verification_failed
# Custom View
self._view(THEME, "register.html")
return {"title": title,
"form": form,
}
# Redirect to Verification Info page
register_next = URL(c = "default",
f = "message",
args = | |
from mutation import *
from evolocity_graph import *
import evolocity as evo
np.random.seed(1)
random.seed(1)
def parse_args():
import argparse
parser = argparse.ArgumentParser(description='Flu NP sequence analysis')
parser.add_argument('model_name', type=str,
help='Type of language model (e.g., hmm, lstm)')
parser.add_argument('--namespace', type=str, default='np',
help='Model namespace')
parser.add_argument('--dim', type=int, default=512,
help='Embedding dimension')
parser.add_argument('--batch-size', type=int, default=1000,
help='Training minibatch size')
parser.add_argument('--n-epochs', type=int, default=20,
help='Number of training epochs')
parser.add_argument('--seed', type=int, default=1,
help='Random seed')
parser.add_argument('--checkpoint', type=str, default=None,
help='Model checkpoint')
parser.add_argument('--train', action='store_true',
help='Train model')
parser.add_argument('--train-split', action='store_true',
help='Train model on portion of data')
parser.add_argument('--test', action='store_true',
help='Test model')
parser.add_argument('--embed', action='store_true',
help='Analyze embeddings')
parser.add_argument('--evolocity', action='store_true',
help='Analyze evolocity')
args = parser.parse_args()
return args
def parse_phenotype(field):
field = field.split('_')[-1]
if field == 'No':
return 'no'
elif field == 'Yes':
return 'yes'
else:
return 'unknown'
def load_meta(meta_fnames):
with open('data/influenza/np_birds.txt') as f:
birds = set(f.read().lower().rstrip().split())
with open('data/influenza/np_mammals.txt') as f:
mammals = set(f.read().lower().rstrip().split())
metas = {}
for fname in meta_fnames:
with open(fname) as f:
for line in f:
if not line.startswith('>'):
continue
accession = line[1:].rstrip()
fields = line.rstrip().split('|')
embl_id = fields[0]
subtype = fields[4]
year = fields[5]
date = fields[5]
country = fields[7]
host = fields[9].lower()
resist_adamantane = parse_phenotype(fields[12])
resist_oseltamivir = parse_phenotype(fields[13])
virulence = parse_phenotype(fields[14])
transmission = parse_phenotype(fields[15])
if year == '-' or year == 'NA' or year == '':
year = None
else:
year = int(year.split('/')[-1])
if date == '-' or date == 'NA' or date == '':
date = None
else:
date = dparse(date)
if host in birds:
host = 'avian'
elif host in mammals:
host = 'other_mammal'
metas[accession] = {
'gene_id': f'{subtype}_{year}_{host}_{embl_id}',
'embl_id': embl_id,
'subtype': subtype,
'year': year,
'date': str(date),
'country': country,
'host': host,
'resist_adamantane': resist_adamantane,
'resist_oseltamivir': resist_oseltamivir,
'virulence': virulence,
'transmission': transmission,
}
return metas
def process(args, fnames, meta_fnames):
metas = load_meta(meta_fnames)
seqs = {}
for fname in fnames:
for record in SeqIO.parse(fname, 'fasta'):
accession = record.description
meta = metas[accession]
meta['seqlen'] = len(str(record.seq))
if meta['seqlen'] < 450:
continue
if 'X' in record.seq:
continue
if record.seq not in seqs:
seqs[record.seq] = []
seqs[record.seq].append(meta)
seqs = training_distances(seqs, namespace=args.namespace)
tprint('Found {} unique sequences'.format(len(seqs)))
return seqs
def split_seqs(seqs, split_method='random'):
train_seqs, test_seqs = {}, {}
old_cutoff = 1900
new_cutoff = 2008
tprint('Splitting seqs...')
for seq in seqs:
# Pick validation set based on date.
seq_dates = [
meta['year'] for meta in seqs[seq]
if meta['year'] is not None
]
if len(seq_dates) == 0:
test_seqs[seq] = seqs[seq]
continue
if len(seq_dates) > 0:
oldest_date = sorted(seq_dates)[0]
if oldest_date < old_cutoff or oldest_date >= new_cutoff:
test_seqs[seq] = seqs[seq]
continue
train_seqs[seq] = seqs[seq]
tprint('{} train seqs, {} test seqs.'
.format(len(train_seqs), len(test_seqs)))
return train_seqs, test_seqs
def setup(args):
fnames = [ 'data/influenza/ird_influenzaA_NP_allspecies.fa' ]
meta_fnames = fnames
import pickle
cache_fname = 'target/ev_cache/np_seqs.pkl'
try:
with open(cache_fname, 'rb') as f:
seqs = pickle.load(f)
except:
seqs = process(args, fnames, meta_fnames)
with open(cache_fname, 'wb') as of:
pickle.dump(seqs, of)
seq_len = max([ len(seq) for seq in seqs ]) + 2
vocab_size = len(AAs) + 2
model = get_model(args, seq_len, vocab_size,
inference_batch_size=1000)
return model, seqs
def interpret_clusters(adata):
clusters = sorted(set(adata.obs['louvain']))
for cluster in clusters:
tprint('Cluster {}'.format(cluster))
adata_cluster = adata[adata.obs['louvain'] == cluster]
for var in [ 'year', 'country', 'subtype' ]:
tprint('\t{}:'.format(var))
counts = Counter(adata_cluster.obs[var])
for val, count in counts.most_common():
tprint('\t\t{}: {}'.format(val, count))
tprint('')
cluster2subtype = {}
for i in range(len(adata)):
cluster = adata.obs['louvain'][i]
if cluster not in cluster2subtype:
cluster2subtype[cluster] = []
cluster2subtype[cluster].append(adata.obs['subtype'][i])
largest_pct_subtype = []
for cluster in cluster2subtype:
count = Counter(cluster2subtype[cluster]).most_common(1)[0][1]
pct_subtype = float(count) / len(cluster2subtype[cluster])
largest_pct_subtype.append(pct_subtype)
tprint('\tCluster {}, largest subtype % = {}'
.format(cluster, pct_subtype))
tprint('Purity, Louvain and subtype: {}'
.format(np.mean(largest_pct_subtype)))
def plot_umap(adata, namespace='np'):
sc.pl.umap(adata, color='year', save=f'_{namespace}_year.png',
edges=True,)
sc.pl.umap(adata, color='louvain', save=f'_{namespace}_louvain.png',
edges=True,)
sc.pl.umap(adata, color='subtype', save=f'_{namespace}_subtype.png',
edges=True,)
sc.pl.umap(adata, color='simple_subtype',
save=f'_{namespace}_simple_subtype.png', edges=True,)
sc.pl.umap(adata, color='host', save=f'_{namespace}_host.png',
edges=True,)
sc.pl.umap(adata, color='resist_adamantane',
save=f'_{namespace}_adamantane.png', edges=True,)
sc.pl.umap(adata, color='resist_oseltamivir',
save=f'_{namespace}_oseltamivir.png', edges=True,)
sc.pl.umap(adata, color='virulence', save=f'_{namespace}_virulence.png',
edges=True,)
sc.pl.umap(adata, color='transmission', save=f'_{namespace}_transmission.png',
edges=True,)
sc.pl.umap(adata, color='homology', save=f'_{namespace}_homology.png',
edges=True,)
def seqs_to_anndata(seqs):
X, obs = [], {}
obs['n_seq'] = []
obs['seq'] = []
for seq in seqs:
meta = seqs[seq][0]
X.append(meta['embedding'])
for key in meta:
if key == 'embedding':
continue
if key not in obs:
obs[key] = []
obs[key].append(Counter([
meta[key] for meta in seqs[seq]
]).most_common(1)[0][0])
obs['n_seq'].append(len(seqs[seq]))
obs['seq'].append(str(seq))
X = np.array(X)
adata = AnnData(X)
for key in obs:
adata.obs[key] = obs[key]
return adata
def analyze_embedding(args, model, seqs, vocabulary):
seqs = populate_embedding(args, model, seqs, vocabulary,
use_cache=True)
adata = seqs_to_anndata(seqs)
#adata = adata[
# np.logical_or.reduce((
# adata.obs['Host Species'] == 'human',
# adata.obs['Host Species'] == 'avian',
# adata.obs['Host Species'] == 'swine',
# ))
#]
sc.pp.neighbors(adata, n_neighbors=200, use_rep='X')
sc.tl.louvain(adata, resolution=1.)
sc.set_figure_params(dpi_save=500)
plot_umap(adata)
interpret_clusters(adata)
def draw_gong_path(ax, adata):
gong_adata = adata[adata.obs['gong2013_step'].astype(float) > 0]
gong_sort_idx = np.argsort(gong_adata.obs['gong2013_step'])
gong_c = gong_adata.obs['gong2013_step'][gong_sort_idx]
gong_x = gong_adata.obsm['X_umap'][gong_sort_idx, 0]
gong_y = gong_adata.obsm['X_umap'][gong_sort_idx, 1]
for idx, (x, y) in enumerate(zip(gong_x, gong_y)):
if idx < len(gong_x) - 1:
dx, dy = gong_x[idx + 1] - x, gong_y[idx + 1] - y
ax.arrow(x, y, dx, dy, width=0.001, head_width=0.,
length_includes_head=True,
color='#888888', zorder=5)
ax.scatter(gong_x, gong_y, s=50, c=gong_c, cmap='Oranges',
edgecolors='black', linewidths=0.5, zorder=10)
def analyze_edges(adata, model, vocabulary, namespace='np'):
from evolocity.tools.velocity_graph import VelocityGraph
vgraph = VelocityGraph(adata, adata.obs['seq'])
n_obs = adata.X.shape[0]
vgraph.compute_likelihoods(vocabulary, model)
dirname = f'target/{namespace}'
mkdir_p(dirname)
with open(f'{dirname}/{namespace}_edges.txt', 'w') as of:
for i in tqdm(range(n_obs)):
if '_blosum' in namespace:
score_fn = likelihood_blosum62
else:
score_fn = likelihood_muts
neighs_idx = get_iterative_indices(
vgraph.indices, i, vgraph.n_recurse_neighbors, vgraph.max_neighs
)
for j in neighs_idx:
val = score_fn(
vgraph.seqs[i], vgraph.seqs[j],
args, vocabulary, model,
seq_cache=vgraph.seq_probs, verbose=vgraph.verbose,
)
fields = [
i, j, adata.obs['year'][i], adata.obs['year'][j], val
]
of.write('\t'.join([ str(field) for field in fields ]) + '\n')
def epi_gong2013(args, model, seqs, vocabulary, namespace='np'):
###############
## Load data ##
###############
nodes = [
(record.id, str(record.seq))
for record in SeqIO.parse('data/influenza/np_nodes.fa', 'fasta')
]
######################################
## See how local likelihoods change ##
######################################
data = []
for idx, (name, seq) in enumerate(nodes):
if idx > 0:
seq_prev = nodes[idx - 1][1]
score_full = likelihood_full(seq_prev, seq,
args, vocabulary, model,)
score_muts = likelihood_muts(seq_prev, seq,
args, vocabulary, model,)
score_self = likelihood_self(seq_prev, seq,
args, vocabulary, model,)
data.append([ name, seq,
score_full, score_muts, score_self ])
tprint('{}: {}, {}, {}'.format(
name, score_full, score_muts, score_self
))
df = pd.DataFrame(data, columns=[ 'name', 'seq', 'full', 'muts',
'self_score' ])
gong_x = list(range(len(df) + 1))
gong_y = [ 0 ] + list(np.cumsum(df['muts']))
tprint('Sum of full scores: {}'.format(sum(df.full)))
tprint('Sum of local scores: {}'.format(sum(df.muts)))
tprint('Sum of self scores: {}'.format(sum(df.self_score)))
tprint('Gong et al. Spearman r = {}, P = {}'
.format(*ss.spearmanr(gong_x, gong_y)))
############################
## Visualize NP landscape ##
############################
adata_cache = 'target/ev_cache/np_adata.h5ad'
try:
import anndata
adata = anndata.read_h5ad(adata_cache)
except:
seqs = populate_embedding(args, model, seqs, vocabulary,
use_cache=True)
for seq in seqs:
for example_meta in seqs[seq]:
example_meta['gong2013_step'] = 0
for node_idx, (_, seq) in enumerate(nodes):
if seq in seqs:
for meta in seqs[seq]:
meta['gong2013_step'] = node_idx + 100
else:
meta = {}
for key in example_meta:
meta[key] = None
meta['embedding'] = embed_seqs(
args, model, { seq: [ {} ] }, vocabulary, verbose=False,
)[seq][0]['embedding'].mean(0)
meta['gong2013_step'] = node_idx + 100
seqs[seq] = [ meta ]
adata = seqs_to_anndata(seqs)
adata = adata[(adata.obs.host == 'human')]
sc.pp.neighbors(adata, n_neighbors=40, use_rep='X')
sc.tl.louvain(adata, resolution=1.)
sc.tl.umap(adata, min_dist=1.)
adata.write(adata_cache)
if '_onehot' in namespace:
evo.tl.onehot_msa(
adata,
dirname=f'target/evolocity_alignments/{namespace}',
n_threads=40,
)
sc.pp.pca(adata, n_comps=100)
sc.pp.neighbors(adata, n_neighbors=40, use_rep='X_pca')
sc.tl.umap(adata)
keep_subtypes = {
'H1N1', 'H2N2', 'H3N2', 'H5N1', 'H7N9',
}
adata.obs['simple_subtype'] = [
subtype if subtype in keep_subtypes else 'other/unknown'
for subtype in adata.obs['subtype']
]
tprint('Analyzing {} sequences...'.format(adata.X.shape[0]))
evo.set_figure_params(dpi_save=500, figsize=(5, 5))
plot_umap(adata, namespace=namespace)
#####################################
## Compute evolocity and visualize ##
#####################################
cache_prefix = f'target/ev_cache/{namespace}_knn40'
try:
from scipy.sparse import load_npz
adata.uns["velocity_graph"] = load_npz(
'{}_vgraph.npz'.format(cache_prefix)
)
adata.uns["velocity_graph_neg"] = load_npz(
'{}_vgraph_neg.npz'.format(cache_prefix)
)
adata.obs["velocity_self_transition"] = np.load(
'{}_vself_transition.npy'.format(cache_prefix)
)
adata.layers["velocity"] = np.zeros(adata.X.shape)
except:
evo.tl.velocity_graph(adata, model_name=args.model_name,
score=('lm' if '_blosum' not in namespace else
'blosum62'))
from scipy.sparse import save_npz
save_npz('{}_vgraph.npz'.format(cache_prefix),
adata.uns["velocity_graph"],)
save_npz('{}_vgraph_neg.npz'.format(cache_prefix),
adata.uns["velocity_graph_neg"],)
np.save('{}_vself_transition.npy'.format(cache_prefix),
adata.obs["velocity_self_transition"],)
# Edge score stratification analysis.
#analyze_edges(adata, model, vocabulary, namespace=namespace)
rw_root = list(adata.obs['seq']).index(nodes[0][1])
if namespace == 'np':
evo.tl.random_walk(
adata,
root_node=rw_root,
walk_length=len(nodes) - 1,
n_walks=30000,
groupby='subtype',
groups='H3N2',
scale=2.,
)
terminal_clusters = { '1', '3', '8', '9' }
paths = adata.uns['rw_paths']
plt.figure(figsize=(8, 3))
plt.scatter(gong_x, gong_y, s=50, c=gong_x, cmap='Oranges',
edgecolors='black', linewidths=0.5, zorder=10)
plt.plot(gong_x, gong_y, c='black', zorder=9)
for p in range(paths.shape[0]):
if adata.obs['louvain'][paths[p][-1]] in terminal_clusters:
walk_v = []
for idx, seq in enumerate(paths[p]):
if idx == 0:
walk_v.append(0)
continue
seq_prev = paths[p][idx - 1]
walk_v.append(adata.uns['velocity_graph'][seq_prev, seq])
plt.plot(gong_x, np.cumsum(walk_v),
c='#000080', alpha=0.1, zorder=5)
plt.ylim([ -2, 14 ])
plt.axhline(c='black', linestyle='--')
plt.savefig(f'figures/{namespace}_gong_path.svg')
plt.close()
evo.tl.onehot_msa(
adata,
reference=list(adata.obs['gene_id']).index('H1N1_1934_human_>J02147'),
dirname=f'target/evolocity_alignments/{namespace}',
seq_id_fields=[ 'subtype', 'year' | |
<filename>hityper/typeobject.py
import re
from hityper.stdtypes import stdtypes, exporttypemap, inputtypemap, typeequalmap
from hityper import logger
logger.name = __name__
class TypeObject(object):
def __init__(self, t, category, added = False):
self.type = t
#categories: 0 - builtins
#1 - standard libraries
#2 - user defined
self.category = category
self.compatibletypes = [t]
self.startnodename = None
self.startnodeorder = None
self.added = added
if t in ["bool", "int", "float", "complex"]:
self.compatibletypes = ["int", "float", "complex", "bool"]
self.elementtype = []
self.keytype = []
self.valuetype = []
def buildTuple(self, t):
self.type = "Tuple"
self.elementtype = t
def buildDict(self, key, value):
self.type = "Dict"
self.elementtype = key
self.keytype = key
self.valuetype = value
def buildList(self, t):
self.type = "List"
self.elementtype = t
def buildSet(self, t):
self.type = "Set"
self.elementtype = t
@property
def getBuiltinTypes(self):
#ref: https://docs.python.org/zh-cn/3/library/typing.html
#ref: https://docs.python.org/3/library/stdtypes.html
self.builintypes = {}
self.builintypes["element"] = ["bool", "int", "float", "None", "Any", "Text", "type", "bytes"]
self.builintypes["generic"] = [ "List", "Tuple", "Set", "Dict", "Union", "Optional", "Callable", "Iterable", "Sequence", "Generator"]
self.builintypes["rare"] = ["complex", "bytearray", "Frozenset", "memoryview", "range"]
return self.builintypes
@staticmethod
def isCompatible(l, r):
for t in l.compatibletypes:
if t == r.type:
return True
return False
@staticmethod
def existCompatible(l, listr):
for r in listr:
if TypeObject.isCompatible(l, r):
return True
if TypeObject.existSame(l, listr):
return True
return False
@staticmethod
def existNumbers(l, listr, exact = False):
#now we conduct exact match
if not exact:
return False
if l.type in ["int", "float"]:
for r in listr:
if r.type in ["int", "float"]:
return True
return False
#l is x and optional[x] in listr will return true
@staticmethod
def existOptional(l, listr):
for t in listr:
if t.type.lower() == "optional" and len(t.elementtype) == 1 and typeequalmap[t.elementtype[0].type.lower()] == typeequalmap[l.type.lower()]:
return True
return False
@staticmethod
def existSame( l, listr):
for r in listr:
if isinstance(r, str):
if r.startswith("<") and r.endswith(">"):
continue
if TypeObject.isSimilar(l, TypeObject(r,0)):
return True
elif TypeObject.isIdentical(l, r):
return True
return False
@staticmethod
def existSimilar(l, listr):
for r in listr:
if TypeObject.isSimilar(l,r):
return True
return False
@staticmethod
def findSame(l, listr):
for r in listr:
if isinstance(r, str) and TypeObject.isSimilar(l, TypeObject(r,0)):
return r
elif isinstance(r, TypeObject) and TypeObject.isIdentical(l,r):
return r
return None
@staticmethod
def isIdentical( l, r):
if l.category != 0 and r.category != 0:
if l.type == r.type:
return True
elif l.category == r.category and l.category == 2 and (l.type.split(".")[-1] == r.type.split(".")[-1]):
return True
else:
return False
if l.category == 0 and r.category == 0:
if typeequalmap[l.type.lower()] == typeequalmap[r.type.lower()]:
if l.type.lower() not in ["list", "tuple", "set", "iterable", "optional", "union", "sequence", "generator", "dict"]:
return True
else:
if l.type.lower() == "dict" and TypeObject.isIdenticalSet(l.keytype, r.keytype) and TypeObject.isIdenticalSet(l.valuetype, r.valuetype):
return True
elif l.type.lower() in ["list", "tuple", "set", "iterable", "optional", "union", "sequence", "generator"] and TypeObject.isIdenticalSet(l.elementtype, r.elementtype):
return True
elif (l.type.lower() == "literal" and typeequalmap[r.type.lower()] <= 3) or (r.type.lower() == "literal" and typeequalmap[l.type.lower()] <= 3):
return True
elif (l.type.lower() == "iterable" and typeequalmap[r.type.lower()] <= 17 and typeequalmap[r.type.lower()] >= 11) or (r.type.lower() == "iterable" and typeequalmap[l.type.lower()] <= 17 and typeequalmap[l.type.lower()] >= 11):
return True
if l.category == 0 and r.category == 2 and l.type.lower() == "type" and len(l.elementtype) == 1:
return TypeObject.isIdentical(l.elementtype[0], r)
if r.category == 0 and l.category == 2 and r.type.lower() == "type" and len(r.elementtype) == 1:
return TypeObject.isIdentical(r.elementtype[0], l)
return False
@staticmethod
def isSimilar(l,r):
if l.category == 0 and r.category == 0 and typeequalmap[l.type.lower()] == typeequalmap[r.type.lower()]:
return True
elif l.type.lower() == r.type.lower():
return True
else:
return False
@staticmethod
def isIdenticalSet( llist, rlist):
invalidtypes = []
for l in llist:
if not isinstance(l, TypeObject):
invalidtypes.append(l)
for r in rlist:
if not isinstance(r, TypeObject):
invalidtypes.append(r)
for t in invalidtypes:
if t in llist:
llist.remove(t)
for t in invalidtypes:
if t in rlist:
rlist.remove(t)
for l in llist:
if l.type.lower() == "any":
return True
if not TypeObject.existSame(l, rlist) and l.type.lower() != "any":
return False
for r in rlist:
if r.type.lower() == "any":
return True
if not TypeObject.existSame(r, llist) and r.type.lower() != "any":
return False
return True
@staticmethod
def existType(t, listr):
for r in listr:
if isinstance(t, str):
if (r.category == 0 and typeequalmap[t.lower()] == typeequalmap[r.type.lower()]) or (r.category == 2 and r.type == t):
return True
elif isinstance(t, TypeObject):
if (r.category == 0 and t.category == 0 and typeequalmap[t.type.lower()] == typeequalmap[r.type.lower()]) or (t.type == r.type):
return True
return False
@staticmethod
def equal2type(t, typestr):
if typeequalmap[t.type.lower()] == typeequalmap[typestr.lower()]:
return True
return False
@staticmethod
def equal2onetype(t, typestrs):
for s in typestrs:
if typeequalmap[t.type.lower()] == typeequalmap[s.lower()]:
return True
return False
@staticmethod
def combineTypes(listt):
if len(listt) > 1:
typeobject = TypeObject("Union", 0)
typeobject.elementtype = listt
return typeobject
elif len(listt) == 1:
return listt[0]
else:
return None
@staticmethod
def usertypeCompare(l, rlist):
for r in rlist:
if l.category == r.category and l.category == 2 and ((l.type.split(".")[-1] == r.type.split(".")[-1])):
return True
return False
@staticmethod
def existIncluded(l, rlist):
for r in rlist:
if TypeObject.isIncluded(l,r):
return True
return False
#if l is included in r, for generic types, list[a] is included in list[a,b]
@staticmethod
def isIncluded(l, r):
if r.type == "Optional" and len(r.elementtype) == 1 and l.type == r.elementtype[0].type:
return True
elif l.type != r.type:
return False
elif l.type == r.type and l.type in ["List", "Tuple", "Dict", "Set", "Iterable", "Optional", "Union", "Sequence", "Generator"]:
if l.type == "Dict":
for t in l.keytype:
if not TypeObject.existSame(t, r.keytype) and not TypeObject.existOptional(t, r.keytype) and not TypeObject.existIncluded(t, r.keytype):
return False
for t in l.valuetype:
if not TypeObject.existSame(t, r.valuetype) and not TypeObject.existOptional(t, r.valuetype) and not TypeObject.existIncluded(t, r.valuetype):
return False
return True
else:
for t in l.elementtype:
if not TypeObject.existSame(t, r.elementtype) and not TypeObject.existOptional(t, r.elementtype) and not TypeObject.existIncluded(t, r.elementtype):
return False
return True
@staticmethod
def isSetIncluded(llist, rlist):
for r in rlist:
if TypeObject.existSame(r, llist) or TypeObject.existNumbers(r, llist) or TypeObject.usertypeCompare(r, llist):
continue
else:
included = False
for l in llist:
if TypeObject.isIncluded(r, l):
included = True
break
if included:
continue
return False
return True
@staticmethod
def isSetIncluded2(llist, rlist):
for r in rlist:
if TypeObject.existSimilar(r, llist) or TypeObject.existNumbers(r, llist, exact = True) or TypeObject.usertypeCompare(r, llist):
continue
else:
included = False
for l in llist:
if TypeObject.isIncluded(r, l):
included = True
break
if included:
continue
return False
return True
@staticmethod
def simplifyGenericType(t):
if not isinstance(t, TypeObject):
return t
if t.type in ["Set", "Tuple", "List", "Awaitable", "Iterable", "Union"]:
t.elementtype = TypeObject.removeInclusiveTypes(t.elementtype)
elif t.type == "Dict":
t.keytype = TypeObject.removeInclusiveTypes(t.keytype)
t.valuetype = TypeObject.removeInclusiveTypes(t.valuetype)
elif t.type == "Optional":
t.elementtype = TypeObject.removeRedundantTypes(t.elementtype)
rm = None
for et in t.elementtype:
if et.type == "None":
rm = et
break
if rm != None and rm in t.elementtype:
t.elementtype.remove(rm)
return t
@staticmethod
def removeRedundantTypes(listt):
outs = []
for t in listt:
typeobj = TypeObject.simplifyGenericType(t)
if not TypeObject.existSame(typeobj, outs):
outs.append(typeobj)
return outs
#Example: if list[] and list[a] exists at the same time, then list[] is removed
@staticmethod
def removeInclusiveTypes(listt):
outs = TypeObject.removeRedundantTypes(listt)
removed = True
while removed:
removed = False
for i in range(0, len(outs)):
for j in range(0, len(outs)):
if i != j and TypeObject.isIncluded(outs[i], outs[j]):
removed = True
target = outs[i]
break
if removed and target in outs:
outs.remove(target)
return outs
@staticmethod
def removeInvalidTypes(t):
if isinstance(t, TypeObject):
elementtype = []
for tt in t.elementtype:
if isinstance(tt, TypeObject):
elementtype.append(TypeObject.removeInvalidTypes(tt))
t.elementtype = elementtype
keytype = []
for tt in t.keytype:
if isinstance(tt, TypeObject):
keytype.append(TypeObject.removeInvalidTypes(tt))
t.keytype = keytype
valuetype = []
for tt in t.valuetype:
if isinstance(tt, TypeObject):
valuetype.append(TypeObject.removeInvalidTypes(tt))
return t
def __str__(self):
return TypeObject.resolveTypeName(self)
@staticmethod
def resolveTypeName(t):
if isinstance(t, TypeObject):
t = TypeObject.removeInvalidTypes(t)
if t.category != 0:
return t.type
elif t.type.lower() not in exporttypemap:
raise TypeError("Unknown type: " + t.type)
typestr = exporttypemap[t.type.lower()]
if t.type.lower() in ["dict", "callable"]:
typestr = typestr + "["
if len(t.keytype) == 0:
typestr += ", "
elif len(t.keytype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.keytype[0]) + ", "
else:
typestr += "typing.Union["
for n in t.keytype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = typestr[:-1]
typestr += "], "
if len(t.valuetype) == 0:
pass
elif len(t.valuetype) == 1:
typestr = typestr + TypeObject.resolveTypeName(t.valuetype[0])
else:
typestr += "typing.Union["
for n in t.valuetype:
typestr = typestr + TypeObject.resolveTypeName(n) + ","
typestr = | |
which were added only for
# GET server APIs not for rebuild. GET server and Rebuild share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for Rebuild
# response are not present.
body = {
"rebuild": {
"imageRef": self.image_uuid,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
get_only_fields = ['OS-EXT-AZ:availability_zone', 'config_drive',
'OS-EXT-SRV-ATTR:host',
'OS-EXT-SRV-ATTR:hypervisor_hostname',
'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:hostname'
'OS-EXT-SRV-ATTR:kernel_id',
'OS-EXT-SRV-ATTR:launch_index',
'OS-EXT-SRV-ATTR:ramdisk_id',
'OS-EXT-SRV-ATTR:reservation_id',
'OS-EXT-SRV-ATTR:root_device_name',
'OS-EXT-SRV-ATTR:user_data', 'host_status',
'OS-SRV-USG:launched_at',
'OS-SRV-USG:terminated_at']
if not self.expected_key_name:
get_only_fields.append('key_name')
for field in get_only_fields:
self.assertNotIn(field, body['server'])
@mock.patch.object(compute_api.API, 'start')
def test_start(self, mock_start):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
mock_start.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_not_ready)
def test_start_not_ready(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'start', fakes.fake_actions_to_locked_server)
def test_start_locked_server(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_invalid_state)
def test_start_invalid(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop')
def test_stop(self, mock_stop):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
mock_stop.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_not_ready)
def test_stop_not_ready(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'stop', fakes.fake_actions_to_locked_server)
def test_stop_locked_server(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_invalid_state)
def test_stop_invalid_state(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank('/fake/servers/test_inst/action')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerRebuildTestV254(ServersControllerRebuildInstanceTest):
expected_key_name = True
def setUp(self):
super(ServersControllerRebuildTestV254, self).setUp()
fakes.stub_out_key_pair_funcs(self)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.54')
def _test_set_key_name_rebuild(self, set_key_name=True):
key_name = "key"
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_key_name:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['key_name'], key_name)
def test_rebuild_accepted_with_keypair_name(self):
self._test_set_key_name_rebuild()
def test_rebuild_key_not_changed(self):
self._test_set_key_name_rebuild(set_key_name=False)
def test_rebuild_invalid_microversion_253(self):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.53')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "key"
},
}
excpt = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('key_name', six.text_type(excpt))
def test_rebuild_with_not_existed_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "nonexistentkey"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_user_has_no_key_pair(self):
def no_key_pair(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out('nova.db.api.key_pair_get', no_key_pair)
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=None,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body['rebuild']['key_name'] = "a-key-name"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_with_non_string_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 12345
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_invalid_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "123\0d456"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_empty_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": ''
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_none_keypair_name(self):
key_name = None
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
with mock.patch.object(objects.KeyPair, 'get_by_name') as key_get:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body)
# NOTE: because the api will call _get_server twice. The server
# response will always be the same one. So we just use
# objects.KeyPair.get_by_name to verify test.
key_get.assert_not_called()
def test_rebuild_with_too_large_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 256 * "k"
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
class ServersControllerRebuildTestV257(ServersControllerRebuildTestV254):
"""Tests server rebuild at microversion 2.57 where user_data can be
provided and personality files are no longer accepted.
"""
def setUp(self):
super(ServersControllerRebuildTestV257, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.57')
def test_rebuild_personality(self):
"""Tests that trying to rebuild with personality files fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('personality', six.text_type(ex))
def test_rebuild_user_data_old_version(self):
"""Tests that trying to rebuild with user_data before 2.57 fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": "ZWNobyAiaGVsbG8gd29ybGQi"
}
}
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.55')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_malformed(self):
"""Tests that trying to rebuild with malformed user_data fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": b'invalid'
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
def test_rebuild_user_data_too_large(self):
"""Tests that passing user_data to rebuild that is too large fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": ('MQ==' * 16384)
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', six.text_type(ex))
@mock.patch.object(context.RequestContext, 'can')
@mock.patch('nova.db.api.instance_update_and_get_original')
def test_rebuild_reset_user_data(self, mock_update, mock_policy):
"""Tests that passing user_data=None resets the user_data on the
instance.
"""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
self.mock_get.side_effect = None
self.mock_get.return_value = fakes.stub_instance_obj(
context.RequestContext(self.req_user_id, self.req_project_id),
user_data='ZWNobyAiaGVsbG8gd29ybGQi')
def fake_instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs):
# save() is called twice and the second one has system_metadata
# in the updates, so we can ignore that one.
if 'system_metadata' not in values:
self.assertIn('user_data', values)
self.assertIsNone(values['user_data'])
return instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs)
mock_update.side_effect = fake_instance_update_and_get_original
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(2, mock_update.call_count)
class ServersControllerRebuildTestV219(ServersControllerRebuildInstanceTest):
def setUp(self):
super(ServersControllerRebuildTestV219, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def _rebuild_server(self, set_desc, desc):
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
display_description=desc,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_desc:
self.body['rebuild']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['description'], desc)
def test_rebuild_server_with_description(self):
self._rebuild_server(True, 'server desc')
def test_rebuild_server_empty_description(self):
self._rebuild_server(True, '')
def test_rebuild_server_without_description(self):
self._rebuild_server(False, '')
def test_rebuild_server_remove_description(self):
self._rebuild_server(True, None)
def test_rebuild_server_description_too_long(self):
self.body['rebuild']['description'] = 'x' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_server_description_invalid(self):
# Invalid non-printable control char in the desc.
self.body['rebuild']['description'] = "123\0d456"
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
# NOTE(jaypipes): Not based from ServersControllerRebuildInstanceTest because
# that test case's setUp is completely b0rked
class ServersControllerRebuildTestV263(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV263, self).setUp()
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get, certs=None,
conf_enabled=True, conf_certs=None):
fakes.stub_out_trusted_certs(self, certs=certs)
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, trusted_certs=certs,
project_id=self.req_project_id, user_id=self.req_user_id)
self.flags(default_trusted_certificate_ids=conf_certs, group='glance')
if conf_enabled:
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.body['rebuild']['trusted_image_certificates'] = certs
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
if certs:
self.assertEqual(certs, server['trusted_image_certificates'])
else:
if conf_enabled:
# configuration file default is used
self.assertEqual(
conf_certs, server['trusted_image_certificates'])
else:
# either not set or empty
self.assertIsNone(server['trusted_image_certificates'])
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_with_trusted_certs(self, get_min_ver):
"""Test rebuild with valid trusted_image_certificates argument"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
def test_rebuild_server_without_trusted_certs(self):
"""Test rebuild without trusted image certificates"""
self._rebuild_server()
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_conf_options_turned_off_set(self, get_min_ver):
"""Test rebuild with feature disabled and certs specified"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'], conf_enabled=False)
def test_rebuild_server_conf_options_turned_off_empty(self):
"""Test rebuild with feature disabled"""
self._rebuild_server(conf_enabled=False)
def test_rebuild_server_default_trusted_certificates_empty(self):
"""Test rebuild with feature enabled and no certs specified"""
self._rebuild_server(conf_enabled=True)
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_default_trusted_certificates(self, get_min_ver):
"""Test rebuild with certificate specified in configurations"""
self._rebuild_server(conf_enabled=True, conf_certs=['conf-id'])
def test_rebuild_server_with_empty_trusted_cert_id(self):
"""Make sure that we can't rebuild with an empty certificate ID"""
self.body['rebuild']['trusted_image_certificates'] = ['']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_empty_trusted_certs(self):
"""Make sure that we can't rebuild with an empty array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', six.text_type(ex))
def test_rebuild_server_with_too_many_trusted_certs(self):
"""Make sure that we can't rebuild with an array of >50 unique IDs"""
self.body['rebuild']['trusted_image_certificates'] = [
'cert{}'.format(i) for i in range(51)]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too long', six.text_type(ex))
def test_rebuild_server_with_nonunique_trusted_certs(self):
"""Make sure that we can't rebuild with a non-unique array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = ['cert', 'cert']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('has non-unique elements', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_cert_id(self):
"""Make sure that we can't rebuild with non-string certificate IDs"""
self.body['rebuild']['trusted_image_certificates'] = [1, 2]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
def test_rebuild_server_with_invalid_trusted_certs(self):
"""Make sure that we can't rebuild with certificates in a non-array"""
self.body['rebuild']['trusted_image_certificates'] = "not-an-array"
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', six.text_type(ex))
@mock.patch('nova.objects.Service.get_minimum_version',
return_value=compute_api.MIN_COMPUTE_TRUSTED_CERTS)
def test_rebuild_server_with_trusted_certs_pre_2_63_fails(self,
get_min_ver):
"""Make sure we can't use trusted_certs before 2.63"""
self._rebuild_server(certs=['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('Additional properties are | |
list(values)
})
if dbg_prnt:
print('premrg1')
print(premrg1)
mrg1 = pairs[names].merge(premrg1, left_on=names[0], right_on="Gene1")
if dbg_prnt:
print('mrg1')
print(mrg1)
premrg2 = pd.DataFrame.from_dict({
"Gene2": list(locusIds),
"value2": list(values)
})
if dbg_prnt:
print('premrg2')
print(premrg2)
mrg2 = mrg1.merge(premrg2, left_on=names[1], right_on="Gene2")
if dbg_prnt:
print('mrg2')
print(mrg2)
# method can be spearman or pearson
res = mrg2['value1'].corr(mrg2['value2'], method=method)
if dbg_prnt:
print('res')
print(res)
return res
def FEBA_Exp_Status(inp_df, min_gMed=50, max_mad12=0.5, min_cor12=0.1,
max_gccor=0.2, max_adjcor=0.25, dbg_prnt=False):
"""
inp_df: A dataframe with cols:
nMapped (from FitReadMetrics)
nPastEnd (from FitReadMetrics)
nGenic (from FitReadMetrics)
"nUsed": (from FitQuality)
"gMed": (from FitQuality)
"gMedt0": (from FitQuality)
"gMean": (from FitQuality)
"cor12": (from FitQuality)
"mad12": (from FitQuality)
"mad12c": (from FitQuality)
"mad12c_t0": (from FitQuality)
"opcor": (from FitQuality)
"adjcor": (from FitQuality)
"gccor": (from FitQuality)
"maxFit": (from FitQuality)
"name": (from exps_df)
"short": (from exps_df)
"t0set": (from exps_df)
["num"]: (from_exps_df)
indexes are:
Returns:
status_list (pandas Series(list<str>)): each status is from: {"OK", "Time0", "low_count", "high_mad12",
"low_cor12", "high_adj_gc_cor"}
And each status corresponds to one experiment in inp_df (each row)
Description:
# Returns status of each experiment -- "OK" is a non-Time0 experiment that passes all quality metrics
# Note -- arguably min_cor12 should be based on linear correlation not Spearman.
# 0.1 threshold was chosen based on Marinobacter set5, in which defined media experiments with cor12 = 0.1-0.2
# clearly worked, and Kang Polymyxin B (set1), with cor12 ~= 0.13 and they barely worked.
"""
if dbg_prnt:
print(inp_df.columns)
print(inp_df.shape[0])
print(inp_df.index)
status_list = []
# Each row corresponds to one experiment
for ix, row in inp_df.iterrows():
if row["short"] == "Time0":
status_list.append("Time0")
elif row["gMed"] < min_gMed:
status_list.append("low_count")
elif row["mad12"] > max_mad12:
status_list.append("high_mad12")
elif row["cor12"] < min_cor12:
status_list.append("low_cor12")
elif abs(row["gccor"]) > max_gccor or abs(row["adjcor"]) > max_adjcor:
status_list.append("high_adj_gc_cor")
else:
status_list.append("OK")
if dbg_prnt:
print("FEBA_Exp_Status: status_list:")
print(status_list)
return pd.Series(data=status_list, index=inp_df.index)
def SpecificPhenotypes(locusIds, exps_df, fitnorm_df, t_score_df,
minT=5, minFit=1.0, percentile=0.95,
percentileFit=1.0, minDelta=0.5,
dbg_prnt=False):
"""
Args:
locusIds (pandas Series <str>)
exps_df (pandas DataFrame): Entire edited FEBA.BarSeq dataframe
fitnorm_df (pandas DataFrame (float)): length is unique applicable locusId
t_score_df (pandas DataFrame (float)): Does this and above dataframe have the
exact same dimensions?
Description:
Identify "specific phenotypes" -- cases where a gene is sick
in some experiment(s), with |fit| > minFit and |fit| > percentileFit + minDelta and
|t| > minT
percentileFit is defined as the 95th percentile (by default) of |fit| for that gene
exps ideally includes name (the column names of lrn and t_score_df) along with
short, Group, Condition_1, Concentration_1, Units_1, Condition_2, Concentration_2, Units_2
Returns a data frame of locusId, fit, t, name, short, etc.
Returns:
Why return (?) - understand usage
"""
expsFields = set(exps_df.columns).intersection(set(["name", "short", "Group", "Condition_1",
"Concentration_1", "Units_1", "Condition_2",
"Concentration_2", "Units_2", "Condition_3",
"Concentration_3", "Units_3", "Condition_4",
"Concentration_4", "Units_4"]))
# getting the 95th percent quantile over the rows of the absolute values of the dataframe
rowHi = fitnorm_df.abs().quantile(q=percentile)
# Does this test over every element of the dataframe? Are t_score_df and fitnorm_df the exact
# same dimensions (?)
if dbg_prnt:
print("Dimensions of fitnorm and then t_score_df:")
print(f"{fitnorm_df.shape[0]}, {fitnorm_df.shape[1]}")
print(f"{t_score_df.shape[0]}, {t_score_df.shape[1]}")
print("Dimensions of rowHi:")
print(f"{rowHi.shape[0]}, {rowHi.shape[1]}")
print("Type of rowHi:")
print(type(rowHi))
fnabs = fitnorm_df.abs()
rowHi_bool = bool(rowHi < percentileFit)
which_pass_list = []
# We find <row, col> locations that pass thresholds
for row_ix in range(fitnorm_df.shape[0]):
for col_ix in range(fitnorm_df.shape[1]):
if (fnabs.iloc[row_ix, col_ix] > minFit and \
fnabs.iloc[row_ix, col_ix] > rowHi + minDelta and
rowHi_bool and \
t_score_df.abs().iloc[row_ix, col_ix] > minT):
which_pass_list.append([row_ix, col_ix])
# sp - specific
sp_locId = locusIds.iloc[[x[0] for x in which_pass_list]]
return None
"""
SpecificPhenotypes = function(locusIds, exps_df, lrn, t_score_df,
minT = 5, minFit = 1.0,
percentile = 0.95, percentileFit = 1.0, minDelta = 0.5,
expsFields = intersect(names(exps_df),
words("name short Group Condition_1 Concentration_1 Units_1 Condition_2 Concentration_2 Units_2 Condition_3 Concentration_3 Units_3 Condition_4 Concentration_4 Units_4")))
{
rowHi = apply(abs(lrn), 1, quantile, percentile);
bool = abs(lrn) > minFit & abs(lrn) > rowHi+minDelta & rowHi < percentileFit & abs(t_score_df) > minT;
# arr.in or arr.ind (?)
specsick = data.frame(which(bool, arr.in=T));
specsick$locusId = locusIds[specsick$row];
specsick$name = names(lrn)[specsick$col];
specsick$lrn = as.matrix(lrn)[cbind(specsick$row,specsick$col)];
specsick$t = as.matrix(t_score_df)[cbind(specsick$row,specsick$col)];
specsick$row = NULL;
specsick$col = NULL;
return(merge(specsick, exps_df[,expsFields]));
}
"""
def AdjacentPairs(genes_df, dbg_prnt=False):
"""
Args:
genes_df pandas DataFrame of genes.GC tsv
Returns:
DataFrame with the following cols:
Gene1, Gene2, sysName1, type1, scaffoldId, begin1, end1, strand1, name1, desc1, GC1,
nTA1, locusId, sysName2, type2, begin2, end2, strand2, name2, desc2, GC2, nTA2
"""
# get genes in order of scaffoldId and then tiebreaking with increasing begin
c_genes_df = genes_df.copy(deep=True).sort_values(by=['scaffoldId', 'begin'])
# We offset the genes with a loop starting at the first
adj = pd.DataFrame.from_dict({
"Gene1": list(c_genes_df['locusId']),
"Gene2": list(c_genes_df['locusId'].iloc[1:]) + [c_genes_df['locusId'].iloc[0]]
})
adj.to_csv("tmp/py_preAdj1.tsv", sep="\t")
c_genes_df = c_genes_df.rename(columns={"locusId": "Gene1"})
mg1 = adj.merge(c_genes_df, left_on="Gene1", right_on="Gene1")
if dbg_prnt:
mg1.to_csv("tmp/py_preAdj2.tsv", sep="\t")
c_genes_df = c_genes_df.rename(columns={"Gene1":"locusId"})
# add metadata and only keep pairs with same scaffold
adj = mg1.merge(c_genes_df,
left_on=["Gene2", "scaffoldId"],
right_on=["locusId", "scaffoldId"],
suffixes = ["1","2"]
)
if dbg_prnt:
adj.to_csv("tmp/py_AdjacentPairsOutput.tsv", sep="\t")
return adj
def TopCofit(locusIds, lrn, dbg=False, fraction=0.02):
"""
Args:
g is genes (i.e., locusIds)
lrn is a matrix of fitness values with columns set name index
Returns:
out_df (pandas DataFrame): has columns:
locusId (str),
hitId (str)
cofit (float)
rank (int)
"""
n = min( max(1, math.round(len(locusIds) * fraction)) , len(locusIds) - 1)
if dbg:
print(f"n: {n}")
# Number of locusIds must match number of rows in lrn
if len(locusIds) != lrn.shape[0]:
raise Exception("Number of genes and number of rows in matrix do not match.")
# We transpose the matrix lrn
cofits = lrn.transpose().corr(method="pearson")
if dbg:
print("type of cofits:")
print(type(cofits))
print("shapes of cofits 0, 1")
print(f"{cofits.shape[0]}, {cofits.shape[1]}")
nOut = len(locusIds)*n
if dbg:
print(f"Making output with {nOut} rows")
out_hitId = [""]*nOut
out_cofit = [np.nan]*nOut
for i in range(len(locusIds)):
values = cofits.iloc[i,:]
j = py_order(list(values*-1))[1:n]
outi = (i-1)*n + list(range(n)) # where to put inside out
out_hitId[outi] = locusIds[j];
out_cofit[outi] = values[j];
lI_list = []
rank = []
for i in range(len(locusIds)):
lI_list += [locusIds[i]]*n
rank += list(range(n))
out_df = pd.DataFrame.from_dict({
"locusId": lI_list,
"hitId": out_hitId,
"cofit": out_cofit,
"rank": rank
})
return(out_df)
def HighFit(gene_fit_d, genes_df, exps_df, min_fit=4, min_t=5, max_se=2,
min_gMean=10,max_below=8,dbg_prnt=False):
"""
Args:
gene_fit_d (python dict):
lrn: pandas DataFrame (one col per setindexname) floats (fitness?)
t (t-score): pandas DataFrame (one col per setindexname) floats (t_score?)
u (used?): pandasDataFrame (one col per setindexname) floats
Description:
We find the [row, col] indexes where the 'lrn' and 't' dataframes (fitness and
t score dataframes) have values that pass the thresholds of minimum fitness and
minimum t score (parameters min_fit and min_t). We create a new dataframe called
'high_df' which contains the locusId, experiment name, fitness score and t scores
where these thresholds are passed. The number of rows in these dataframes is equal
to the number of locations where the thresholds are passed, and there are doubled
locusIds and expNames.
Returns:
new_high (pandas DataFrame):
locusId, expName, fit, t, se, sdNaive, name, Group, Condition_1, Concentration_1, Units_1, Media, short, u, maxFit, gMean, sysName, desc
"""
lrn = gene_fit_d['lrn']
t = gene_fit_d['t']
u = gene_fit_d['q']['u']
# This needs to be two columns: 1 with rows and 1 with columns
num_rows, num_cols = lrn.shape[0], lrn.shape[1]
# where is high is a list of [row (int), col(int)] (coming from dataframe, so it's a list whose length
# is the length of (m x j) for rows and columns in the dataframe.
where_is_high = []
for i in range(num_rows):
for j in range(num_cols):
if lrn.iloc[i,j] >= min_fit and t.iloc[i,j] >= min_t:
where_is_high.append([i,j])
high_df = pd.DataFrame.from_dict({
# x[0] -> rows from where_is_high
"locusId": gene_fit_d['g'].iloc[[x[0] for x in where_is_high]],
# x[1] -> columns from where_is_high
"expName": (lrn.iloc[:,[x[1] for x in where_is_high]]).columns,
"fit": [lrn.iloc[x[0], x[1]] for x in where_is_high],
"t": [t.iloc[x[0], x[1]] for x in where_is_high],
})
high_df['se'] = high_df['fit']/high_df['t']
high_df['sdNaive'] = [gene_fit_d['sdNaive'].iloc[x[0], x[1]] for x in where_is_high]
high_df = high_df[high_df['se'] <= max_se]
# Which experiments are ok
fields = "name Group Condition_1 Concentration_1 Units_1 Media short".split(" ")
fields = [x for x in fields if x in exps_df.columns]
| |
订购周期对象,当商品是周期订阅类型时,必填
self.duration = duration
# 优惠券ID
self.coupon_id = coupon_id
# 数量,不填默认1
self.quantity = quantity
# 商品订购属性,开通型商品部需要填写
self.commodity_attrs = commodity_attrs
# 履约选项
self.fulfillment_options = fulfillment_options
# 支付选项
self.pay_options = pay_options
def validate(self):
self.validate_required(self.biz_no, 'biz_no')
self.validate_required(self.commodity_code, 'commodity_code')
if self.duration:
self.duration.validate()
if self.commodity_attrs:
for k in self.commodity_attrs:
if k:
k.validate()
if self.fulfillment_options:
self.fulfillment_options.validate()
if self.pay_options:
self.pay_options.validate()
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.biz_no is not None:
result['biz_no'] = self.biz_no
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.tenant_name is not None:
result['tenant_name'] = self.tenant_name
if self.operator_id is not None:
result['operator_id'] = self.operator_id
if self.commodity_code is not None:
result['commodity_code'] = self.commodity_code
if self.order_type is not None:
result['order_type'] = self.order_type
if self.duration is not None:
result['duration'] = self.duration.to_map()
if self.coupon_id is not None:
result['coupon_id'] = self.coupon_id
if self.quantity is not None:
result['quantity'] = self.quantity
result['commodity_attrs'] = []
if self.commodity_attrs is not None:
for k in self.commodity_attrs:
result['commodity_attrs'].append(k.to_map() if k else None)
if self.fulfillment_options is not None:
result['fulfillment_options'] = self.fulfillment_options.to_map()
if self.pay_options is not None:
result['pay_options'] = self.pay_options.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('biz_no') is not None:
self.biz_no = m.get('biz_no')
if m.get('tenant_id') is not None:
self.tenant_id = m.get('tenant_id')
if m.get('tenant_name') is not None:
self.tenant_name = m.get('tenant_name')
if m.get('operator_id') is not None:
self.operator_id = m.get('operator_id')
if m.get('commodity_code') is not None:
self.commodity_code = m.get('commodity_code')
if m.get('order_type') is not None:
self.order_type = m.get('order_type')
if m.get('duration') is not None:
temp_model = OrderDuration()
self.duration = temp_model.from_map(m['duration'])
if m.get('coupon_id') is not None:
self.coupon_id = m.get('coupon_id')
if m.get('quantity') is not None:
self.quantity = m.get('quantity')
self.commodity_attrs = []
if m.get('commodity_attrs') is not None:
for k in m.get('commodity_attrs'):
temp_model = CommodityOrderAttribute()
self.commodity_attrs.append(temp_model.from_map(k))
if m.get('fulfillment_options') is not None:
temp_model = FulfillmentOptions()
self.fulfillment_options = temp_model.from_map(m['fulfillment_options'])
if m.get('pay_options') is not None:
temp_model = PayOptions()
self.pay_options = temp_model.from_map(m['pay_options'])
return self
class CreateOrderResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
order_id: str = None,
instance_ids: List[str] = None,
pay_status: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 订单号
self.order_id = order_id
# 实例ID列表
self.instance_ids = instance_ids
# 支付状态
self.pay_status = pay_status
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.order_id is not None:
result['order_id'] = self.order_id
if self.instance_ids is not None:
result['instance_ids'] = self.instance_ids
if self.pay_status is not None:
result['pay_status'] = self.pay_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('order_id') is not None:
self.order_id = m.get('order_id')
if m.get('instance_ids') is not None:
self.instance_ids = m.get('instance_ids')
if m.get('pay_status') is not None:
self.pay_status = m.get('pay_status')
return self
class ExistPricePersonalizedRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
tenant_id: str = None,
product_code: str = None,
price_object_code: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 租户ID
self.tenant_id = tenant_id
# 商品code
self.product_code = product_code
# 收费项编码,只有当商品存在多收费项的时候需要传入
self.price_object_code = price_object_code
def validate(self):
self.validate_required(self.tenant_id, 'tenant_id')
self.validate_required(self.product_code, 'product_code')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.product_code is not None:
result['product_code'] = self.product_code
if self.price_object_code is not None:
result['price_object_code'] = self.price_object_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('tenant_id') is not None:
self.tenant_id = m.get('tenant_id')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('price_object_code') is not None:
self.price_object_code = m.get('price_object_code')
return self
class ExistPricePersonalizedResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
exist: bool = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 是否存在
self.exist = exist
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.exist is not None:
result['exist'] = self.exist
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('exist') is not None:
self.exist = m.get('exist')
return self
class QueryPriceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
commodity_code: str = None,
tenant_id: str = None,
tenant_name: str = None,
quantity: int = None,
biz_time: str = None,
order_duration: OrderDuration = None,
commodity_order_attrs: List[CommodityOrderAttribute] = None,
currency: str = None,
coupon_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 商品主数据编码
self.commodity_code = commodity_code
# 租户ID,和租户名称二选一必填
self.tenant_id = tenant_id
# 8位租户名,和租户ID二选一必选
self.tenant_name = tenant_name
# 商品数量,不传则默认1
self.quantity = quantity
# 业务发生时间,不传则默认当前时间
self.biz_time = biz_time
# 订购周期,周期型商品必填,如资源包/包年包月商品
self.order_duration = order_duration
# 商品规格列表
# 针对量价型商品,统一使用SYS_USAGE_AMOUNT
# 针对资源包商品,统一使用CAPACITY
self.commodity_order_attrs = commodity_order_attrs
# 币种,元:CNY,不传默认CNY
self.currency = currency
# 优惠券ID
self.coupon_id = coupon_id
def validate(self):
self.validate_required(self.commodity_code, 'commodity_code')
if self.biz_time is not None:
self.validate_pattern(self.biz_time, 'biz_time', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
if self.order_duration:
self.order_duration.validate()
if self.commodity_order_attrs:
for k in self.commodity_order_attrs:
if k:
k.validate()
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.commodity_code is not None:
result['commodity_code'] = self.commodity_code
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.tenant_name is not None:
result['tenant_name'] = self.tenant_name
if self.quantity is not None:
result['quantity'] = self.quantity
if self.biz_time is not None:
result['biz_time'] = self.biz_time
if self.order_duration is not None:
result['order_duration'] = self.order_duration.to_map()
result['commodity_order_attrs'] = []
if self.commodity_order_attrs is not None:
for k in self.commodity_order_attrs:
result['commodity_order_attrs'].append(k.to_map() if k else None)
if self.currency is not None:
result['currency'] = self.currency
if self.coupon_id is not None:
result['coupon_id'] = self.coupon_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('commodity_code') is not None:
self.commodity_code = m.get('commodity_code')
if m.get('tenant_id') is not None:
self.tenant_id = m.get('tenant_id')
if m.get('tenant_name') is not None:
self.tenant_name = m.get('tenant_name')
if m.get('quantity') is not None:
self.quantity = m.get('quantity')
if m.get('biz_time') is not None:
self.biz_time = m.get('biz_time')
if m.get('order_duration') is not None:
temp_model = OrderDuration()
self.order_duration = temp_model.from_map(m['order_duration'])
self.commodity_order_attrs = []
if m.get('commodity_order_attrs') is not None:
for k in m.get('commodity_order_attrs'):
temp_model = CommodityOrderAttribute()
self.commodity_order_attrs.append(temp_model.from_map(k))
if m.get('currency') is not None:
self.currency = m.get('currency')
if m.get('coupon_id') is not None:
self.coupon_id = m.get('coupon_id')
return self
class QueryPriceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
commodity_enquiry_price: CommodityEnquiryPrice = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 商品询价结果
self.commodity_enquiry_price = commodity_enquiry_price
def validate(self):
if self.commodity_enquiry_price:
self.commodity_enquiry_price.validate()
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.commodity_enquiry_price is not None:
result['commodity_enquiry_price'] = self.commodity_enquiry_price.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('commodity_enquiry_price') is not None:
temp_model = CommodityEnquiryPrice()
self.commodity_enquiry_price = temp_model.from_map(m['commodity_enquiry_price'])
return self
class QueryWareslifeInstanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
tenant_id: str = None,
product_codes: List[str] = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 租户id
self.tenant_id = tenant_id
# 商品code
self.product_codes = product_codes
def validate(self):
self.validate_required(self.tenant_id, 'tenant_id')
self.validate_required(self.product_codes, 'product_codes')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.product_codes is not None:
result['product_codes'] = self.product_codes
return result
def from_map(self, m: dict = None):
m = m or dict()
| |
deleteList:
self.box_modifier_hide()
#now the new ones
createdTimeAnnos = []
if not differential:
annosToIterate = newAnnotations
else:
#take on the the nodes from the incoming
annosToIterate = arg["data"]["_eventInfo"]["new"]
annosToIterate.update(arg["data"]["_eventInfo"]["modify"])
self.logger.debug(f"annosToIterate {annosToIterate}")
for annoId,anno in annosToIterate.items():
if anno["type"] in ["threshold","motif"]:
# for thresholds/motifs we do not support delete/create per backend, only modify
# so check for modifications here
# it might not be part of the renderers: maybe thresholds are currently off
if annoId in self.renderers and not self._compare_anno(anno,self.renderers[annoId]["info"]):
self.logger.debug(f"update_annotations() -- thresholds/motif has changed {annoId} {self.renderers[annoId]['info']} => {anno}")
with self.renderersLock:
self.renderersGarbage.append(self.renderers[annoId]["renderer"])
del self.renderers[annoId] # kick out the entry, the remaining invisible renderer will stay in bokeh as garbage
#if the currently selected is being changed, we hide the box modifier
if self.boxModifierVisible:
if self.boxModifierAnnotationName == annoId:
self.box_modifier_hide()
# now recreate
if anno["type"] =="threshold":
self.draw_threshold(anno)
else:
self.draw_motif(anno)
#now execute the changes
if 0:
for entry in deleteList:
# we only switch it invisible for now, we don't delete the
# renderer, as this takes too long
r = self.find_renderer(entry)
if r:
r.visible = False
#if self.showAnnotations and createdTimeAnnos != []:
# self.show_annotations(createdTimeAnnos,fetch=False) # this will put them to the plot renderes
#self.show_annotations()
self.remove_renderers() # execute at least the deletes
def update_annotations_and_thresholds(self,arg=None):
self.logger.debug(f"update_annotations {arg}")
# this is called when the backend has changed annotation leaves or values, it adjusts annotations
# and thresholds
#avoid reload if an envelope embedded in a annotation is changed
if "data" in arg and "sourcePath" in arg["data"]:
splitted = arg["data"]["sourcePath"].split('.')
if len(splitted)>2 and splitted[-2]=="envelope":
self.logger.info("skip anno update due to envelope")
return
# modifies give the value
if "value" in arg["data"]:
#check if the annotation is in our known list
annotationBrowsePath = '.'.join(arg["data"]["sourcePath"].split('.')[:-1])
lookup = {v["browsePath"]:k for k,v in self.server.get_annotations().items()}
if annotationBrowsePath in lookup:
#build the _eventInfo to avoid the fetch
id = lookup[annotationBrowsePath]
updatedAnno = copy.deepcopy(self.server.get_annotations()[id])
changeKey = arg["data"]["sourcePath"].split('.')[-1]
updatedAnno[changeKey]=arg["data"]["value"]
if changeKey != "variable" and "variable" in updatedAnno:
updatedAnno["variable"] = [updatedAnno["variable"]] # events from the outside deliver the variable as list (the forward refs from the referencer, internally, we only keep a string
eventInfo = {"new":{},"delete":{},"modify":{id:updatedAnno}}
arg["data"]["_eventInfo"] = eventInfo
#once more, make sure that the variables are not a list
if "data" in arg and "_eventInfo" in arg["data"]:
for entry in ["new","delete","modify"]:
for id,info in arg["data"]["_eventInfo"][entry].items():
for k,v in info.items():
if k=="variable" and type(v) is list:
arg["data"]["_eventInfo"][entry][id][k]=v[0] #take the first of the variables from the list
lastAnnotations = self.server.get_annotations()
hasModifies = False
modified = None
deleted = None
if "data" in arg and "_eventInfo" in arg["data"]:
if arg["data"]["_eventInfo"]["modify"]:
hasModifies = True
modified = arg["data"]["_eventInfo"]["modify"]
newAnnotations = self.server.fetch_annotations_differential(arg["data"]["_eventInfo"]) #this will write the new anno to our internal mirror, also executing the modify or delete
differential = True
if arg["data"]["_eventInfo"]["delete"]:
deleted = arg["data"]["_eventInfo"]["delete"]
else:
newAnnotations = self.server.fetch_annotations()
differential = False
# now we have written the update to the server
# we now rewrite the annotations
# new and missing will be identified by the show function
if hasModifies:
self.show_annotations(fetch=False,checkModifies=hasModifies, modified = modified )
else:
self.show_annotations(fetch=True, checkModifies=hasModifies, modified = modified)
self.update_annotations_and_thresholds_old_part(arg,lastAnnotations,newAnnotations,differential)
def update_annotation_data(self,anno,annoId):
start = anno["startTime"]
end = anno["endTime"]
infinity = globalInfinity
# we must use varea, as this is the only one glyph that supports hatches and does not create a blue box when zooming out
# self.logger.debug(f"have pattern with hatch {pattern}, tag {tag}, color{color} ")
self.logger.debug(f'from {self.renderers[anno["id"]]["source"].data["w"]} => {end-start}')
#self.renderers[anno["id"]]["source"].data["w"][0]= self.renderers[anno["id"]]["source"].data["w"][0]*0.5
#source = ColumnDataSource({"l": [start], "w": [end - start], "y": [-infinity], "height": [3 * infinity]})
self.renderers[anno["id"]]["source"].data = {"l": [start+(end-start)/2],"w": [end-start],"y": [-infinity],"height": [3 * infinity]}
#self.renderers[anno["id"]]["source"].data = dict(self.renderers[anno["id"]]["source"].data)
def __legend_check(self):
try:
# now we also check if we have a legend click which means that we must delete a variable from the selection
# self.logger.debug("RENDERERS CHECK --------------------------")
deleteList = []
for r in self.plot.renderers:
if r.name and r.name in self.server.get_variables_selected() and r.visible == False:
# there was a click on the legend to hide the variables
self.logger.debug("=>>>>>>>>>>>>>>>>>DELETE FROM plot:" + r.name)
self.logger.debug("=>>>>>>>>>>>>>>>>>DELETE FROM plot:" + r.name)
deleteList.append(r.name)
if deleteList != []:
# now make a second run and check the _score variables of the deletlist
deleteScoreNames = [deletePath.split('.')[-1]+"_score" for deletePath in deleteList]
deleteExpectedNames = [deletePath.split('.')[-1]+"_expected" for deletePath in deleteList]
for r in self.plot.renderers:
if r.name and (r.name.split('.')[-1] in deleteScoreNames or r.name.split('.')[-1] in deleteExpectedNames):
deleteList.append(r.name) #take the according score as well
# now prepare the new list:
newVariablesSelected = [var for var in self.server.get_variables_selected() if var not in deleteList]
self.logger.debug("new var list" + str(newVariablesSelected))
self.server.set_variables_selected(newVariablesSelected)
# self.__dispatch_function(self.refresh_plot)
#now delete potential markers and expected
self.remove_renderers([lin+"_marker" for lin in deleteList])
except Exception as ex:
self.logger.error("problem during __legend_check" + str(ex))
return (deleteList != [])
def __init_new_observer(self):
self.server.sse_register_cb(self.observer_cb)
def __init_figure(self):
"""
initialize the time series widget, plot the lines, create controls like buttons and menues
also hook the callbacks
"""
self.hoverCounter = 0
self.newHover = None
self.hoverTool = None # forget the old hovers
self.showBackgrounds = False
self.showThresholds = False
self.showMotifs = False
self.showScores = False
self.buttonWidth = 70
#layoutControls = []# this will later be applied to layout() function
settings = self.server.get_settings()
mirror = self.server.get_mirror()
if "width" in settings:
self.width = settings["width"]
if "height" in settings:
self.height = settings["height"]
"""
#set the theme
if settings["theme"] == "dark":
self.curdoc().theme = Theme(json=themes.darkTheme)
self.lineColors = themes.darkLineColors
self.plot.xaxis.major_label_text_color = themes.darkTickColor
else:
self.curdoc().theme = Theme(json=themes.whiteTheme)
self.lineColors = themes.whiteLineColors
self.plot.xaxis.major_label_text_color = themes.whiteTickColor
"""
#self.cssClasses = {"button":"button_21","groupButton":"group_button_21","multiSelect":"multi_select_21"}
#self.cssClasses = {"button": "button_21_sm", "groupButton": "group_button_21_sm", "multiSelect": "multi_select_21_sm"}
#self.layoutSettings = {"controlPosition":"bottom"} #support right and bottom, the location of the buttons and tools
#initial values
try:
self.rangeStart = date2secs(settings["startTime"])*1000
self.rangeEnd = date2secs(settings["endTime"])*1000
except:
self.rangeStart = None
self.rangeEnd = None
self.logger.error("range start, end error, use default full")
#create figure
"""
the creation of the figure was reworked as this is a work around for a well known bug (in 1.04), see here
https://github.com/bokeh/bokeh/issues/7497
it's a bokeh problem with internal sync problems of frontend and backend, so what we do now is:
1) use toolbar_location = None to avoid auto-creation of toolbar
2) create tools by hand
3) assign them to the figure with add_tools()
4) create a toolbar and add it to the layout by hand
"""
if self.server.get_mirror()["panOnlyX"][".properties"]["value"]==True:
self.wheelZoomTool = WheelZoomTool(dimensions="width")
self.panTool = PanTool(dimensions="width")
else:
self.wheelZoomTool = WheelZoomTool()#dimensions="width")
self.panTool = PanTool()#dimensions="width")
tools = [self.wheelZoomTool, self.panTool]
"""
self.wheelZoomTool = WheelZoomTool()
self.wheelZoomToolX = WheelZoomTool(dimensions = "width")
self.panTool = PanTool()
tools = [self.wheelZoomTool,self.wheelZoomToolX,self.panTool]
"""
if settings["hasAnnotation"] == True:
self.boxSelectTool = BoxSelectTool(dimensions="width")
tools.append(self.boxSelectTool)
elif settings["hasThreshold"] == True:
self.boxSelectTool = BoxSelectTool(dimensions="height")
tools.append(self.boxSelectTool)
tools.append(ResetTool())
self.freeZoomTool = BoxZoomTool()
tools.append(self.freeZoomTool)
fig = figure(toolbar_location=None, plot_height=self.height,
plot_width=self.width,
sizing_mode="scale_width",
x_axis_type='datetime', y_range=Range1d(),x_range=(0,1))
self.plot = fig
# set the theme
if settings["theme"] == "dark":
self.curdoc().theme = Theme(json=themes.darkTheme)
self.lineColors = themes.darkLineColors
self.plot.xaxis.major_label_text_color = themes.darkTickColor
self.plot.yaxis.major_label_text_color = themes.darkTickColor
else:
self.curdoc().theme = Theme(json=themes.whiteTheme)
self.lineColors = themes.whiteLineColors
self.plot.xaxis.major_label_text_color = themes.whiteTickColor
self.plot.yaxis.major_label_text_color = themes.whiteTickColor
#b1 = date2secs(datetime.datetime(2015,2,13,3,tzinfo=pytz.UTC))*1000
#b2 = date2secs(datetime.datetime(2015,2,13,4,tzinfo=pytz.UTC))*1000
#wid = 20*60*1000 # 20 min
#self.boxData = ColumnDataSource({'x': [b1,b2], 'y':[0,0],'width': [5, 5],'height':[300,300],"alpha":[1,1,0.2]})
#self.boxRect = self.plot.rect(x="x", y="y", width="width", height="height",source=self.boxData)
#self.boxRect = self.plot.rect('x', 'y', 'width', 'height', source=self.boxData,width_units="screen")#, height_units="screen")#, height_units="screen")
self.boxModifierTool=BoxEditTool( renderers=[],num_objects=0,empty_value=0.1)#,dimensions="width")
self.box_modifier_init()
#self.box_modifier_show()
# possible attribures to boxedittool:
# custom_icon, custom_tooltip, dimensions, empty_value, js_event_callbacks, js_property_callbacks, name, num_objects, renderers, subscribed_events
#self.plot.add_layout(self.boxRect)
#self.boxModifierRect.data_source.on_change("selected",self.box_cb)
#self.boxRect.data_source.on_change("active", self.box_cb_2)
tools.append(self.boxModifierTool)
for tool in tools:
fig.add_tools(tool) # must assign them to the layout to have the actual use hooked
toolBarBox = ToolbarBox() #we need the strange creation of the tools to avoid the toolbar to disappear after
# reload of widget, then drawing an annotations (bokeh bug?)
toolBarBox.toolbar = Toolbar(tools=tools,active_inspect=None,active_scroll=self.wheelZoomTool,active_drag = None)
#active_inspect = [crosshair],
# active_drag = # here you can assign the defaults
# active_scroll = # wheel_zoom sometimes is not working if it is set here
# active_tap
toolBarBox.toolbar_location = "right"
toolBarBox.toolbar.logo = None # no bokeh logo
self.tools = toolBarBox
self.toolBarBox = toolBarBox
self.plot.xaxis.formatter = FuncTickFormatter(code = """
let local = moment(tick).tz('%s');
let datestring = local.format();
return datestring.slice(0,-6);
"""%settings["timeZone"])
self.plot.xaxis.ticker = DatetimeTicker(desired_num_ticks=5)# give more room for the date time string (default was 6)
self.plot.xgrid.ticker = self.plot.xaxis.ticker
self.build_second_y_axis()
self.show_hide_scroll_label() #it must be created at startup and then | |
1)] for j in range(i + 1)]
for j in range(i + 1):
for k in range(i + 1):
if j + k == i:
coeff[j][k] = binomial(i, j)
sol = 0
for j in range(i + 1):
for k in range(i + 1):
sol += coeff[j][k]* y0_self[j] * y0_other[k]
y0.append(sol)
return HolonomicFunction(sol_ann, self.x, self.x0, y0)
else:
raise NotImplementedError
return HolonomicFunction(sol_ann, self.x)
__rmul__ = __mul__
def __sub__(self, other):
return self + other * -1
def __rsub__(self, other):
return self * -1 + other
def __neg__(self):
return -1 * self
def __div__(self, other):
return self * (S.One / other)
def __truediv__(self, other):
return self.__div__(other)
def __pow__(self, n):
if n == 0:
return S(1)
if n == 1:
return self
else:
if n % 2 == 1:
powreduce = self**(n - 1)
return powreduce * self
elif n % 2 == 0:
powreduce = self**(n / 2)
return powreduce * powreduce
def composition(self, expr, *args):
"""
Returns the annihilator after composition of a holonomic function with
an algebraic function. Initial conditions for the annihilator after
composition can be also be provided to the function.
Examples
========
>>> from sympy.holonomic.holonomic import HolonomicFunction, DifferentialOperators
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy import symbols
>>> x = symbols('x')
>>> R, Dx = DifferentialOperators(QQ.old_poly_ring(x),'Dx')
>>> HolonomicFunction(Dx - 1, x).composition(x**2, 0, [1]) # e^(x**2)
HolonomicFunction((-2*x) + (1)Dx, x), f(0) = 1
>>> HolonomicFunction(Dx**2 + 1, x).composition(x**2 - 1, 1, [1, 0])
HolonomicFunction((4*x**3) + (-1)Dx + (x)Dx**2, x), f(1) = 1, f'(1) = 0
See Also
========
from_hyper
"""
R = self.annihilator.parent
a = self.annihilator.order
diff = expr.diff()
listofpoly = self.annihilator.listofpoly
for i, j in enumerate(listofpoly):
if isinstance(j, self.annihilator.parent.base.dtype):
listofpoly[i] = self.annihilator.parent.base.to_sympy(j)
r = listofpoly[a].subs({self.x:expr})
subs = [-listofpoly[i].subs({self.x:expr}) / r for i in range (a)]
coeffs = [S(0) for i in range(a)] # coeffs[i] == coeff of (D^i f)(a) in D^k (f(a))
coeffs[0] = S(1)
system = [coeffs]
homogeneous = Matrix([[S(0) for i in range(a)]]).transpose()
sol = S(0)
while sol.is_zero:
coeffs_next = [p.diff() for p in coeffs]
for i in range(a - 1):
coeffs_next[i + 1] += (coeffs[i] * diff)
for i in range(a):
coeffs_next[i] += (coeffs[-1] * subs[i] * diff)
coeffs = coeffs_next
# check for linear relations
system.append(coeffs)
sol_tuple = (Matrix(system).transpose()).gauss_jordan_solve(homogeneous)
sol = sol_tuple[0]
tau = sol.atoms(Dummy).pop()
sol = sol.subs(tau, 1)
sol = _normalize(sol[0:], R, negative=False)
# if initial conditions are given for the resulting function
if args:
return HolonomicFunction(sol, self.x, args[0], args[1])
return HolonomicFunction(sol, self.x)
def to_sequence(self):
"""
Finds the recurrence relation in power series expansion
of the function.
Examples
========
>>> from sympy.holonomic.holonomic import HolonomicFunction, DifferentialOperators
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy import symbols
>>> x = symbols('x')
>>> R, Dx = DifferentialOperators(QQ.old_poly_ring(x),'Dx')
>>> HolonomicFunction(Dx - 1, x, 0, [1]).to_sequence()
HolonomicSequence((-1) + (n + 1)Sn, n), u(0) = 1
See Also
========
HolonomicFunction.series
References
==========
hal.inria.fr/inria-00070025/document
"""
dict1 = {}
n = symbols('n', integer=True)
dom = self.annihilator.parent.base.dom
R, _ = RecurrenceOperators(dom.old_poly_ring(n), 'Sn')
for i, j in enumerate(self.annihilator.listofpoly):
listofdmp = j.all_coeffs()
degree = len(listofdmp) - 1
for k in range(degree + 1):
coeff = listofdmp[degree - k]
if coeff == 0:
continue
if i - k in dict1:
dict1[i - k] += (coeff * rf(n - k + 1, i))
else:
dict1[i - k] = (coeff * rf(n - k + 1, i))
sol = []
lower = min(dict1.keys())
upper = max(dict1.keys())
for j in range(lower, upper + 1):
if j in dict1.keys():
sol.append(dict1[j].subs(n, n - lower))
else:
sol.append(S(0))
# recurrence relation
sol = RecurrenceOperator(sol, R)
if not self._have_init_cond:
return HolonomicSequence(sol)
if self.x0 != 0:
return HolonomicSequence(sol)
# computing the initial conditions for recurrence
order = sol.order
all_roots = roots(sol.listofpoly[-1].rep, filter='Z')
all_roots = all_roots.keys()
if all_roots:
max_root = max(all_roots)
if max_root >= 0:
order += max_root + 1
y0 = _extend_y0(self, order)
u0 = []
# u(n) = y^n(0)/factorial(n)
for i, j in enumerate(y0):
u0.append(j / factorial(i))
return HolonomicSequence(sol, u0)
def series(self, n=6, coefficient=False, order=True):
"""
Finds the power series expansion of given holonomic function.
Examples
========
>>> from sympy.holonomic.holonomic import HolonomicFunction, DifferentialOperators
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy import symbols
>>> x = symbols('x')
>>> R, Dx = DifferentialOperators(QQ.old_poly_ring(x),'Dx')
>>> HolonomicFunction(Dx - 1, x, 0, [1]).series() # e^x
1 + x + x**2/2 + x**3/6 + x**4/24 + x**5/120 + O(x**6)
>>> HolonomicFunction(Dx**2 + 1, x, 0, [0, 1]).series(n=8) # sin(x)
x - x**3/6 + x**5/120 - x**7/5040 + O(x**8)
See Also
========
HolonomicFunction.to_sequence
"""
recurrence = self.to_sequence()
l = len(recurrence.u0) - 1
k = recurrence.recurrence.order
x = self.x
seq_dmp = recurrence.recurrence.listofpoly
R = recurrence.recurrence.parent.base
K = R.get_field()
seq = []
if 0 in roots(seq_dmp[-1].rep, filter='Z').keys():
singular = True
else:
singular = False
for i, j in enumerate(seq_dmp):
seq.append(K.new(j.rep))
sub = [-seq[i] / seq[k] for i in range(k)]
sol = [i for i in recurrence.u0]
if l + 1 >= n:
pass
else:
# use the initial conditions to find the next term
for i in range(l + 1 - k, n - k):
coeff = S(0)
for j in range(k):
if i + j >= 0:
coeff += DMFsubs(sub[j], i) * sol[i + j]
sol.append(coeff)
if coefficient:
return sol
ser = S(0)
for i, j in enumerate(sol):
ser += x**i * j
if order:
return ser + Order(x**n, x)
else:
return ser
def _indicial(self):
"""Computes the roots of Indicial equation.
"""
list_coeff = self.annihilator.listofpoly
R = self.annihilator.parent.base
x = self.x
s = R.zero
y = R.one
def _pole_degree(poly):
root_all = roots(poly.rep, filter='Z')
if 0 in root_all.keys():
return root_all[0]
else:
return 0
degree = [j.degree() for j in list_coeff]
degree = max(degree)
inf = 10 * (max(1, degree) + max(1, self.annihilator.order))
deg = lambda q: inf if q.is_zero else _pole_degree(q)
b = deg(list_coeff[0])
print (b)
for j in range(1, len(list_coeff)):
b = min(b, deg(list_coeff[j]) - j)
print(b)
for i, j in enumerate(list_coeff):
listofdmp = j.all_coeffs()
degree = len(listofdmp) - 1
if - i - b <= 0:
s = s + listofdmp[degree - i - b] * y
y *= x - i
return roots(s.rep, filter='R').keys()
def evalf(self, points, method='RK4'):
"""
Finds numerical value of a holonomic function using numerical methods.
(RK4 by default). A set of points (real or complex) must be provided
which will be the path for the numerical integration.
The path should be given as a list [x1, x2, ... xn]. The numerical
values will be computed at each point in this order x1 --> x2 --> x3
... --> xn.
Returns values of the function at x1, x2, ... xn in a list.
Examples
=======
>>> from sympy.holonomic.holonomic import HolonomicFunction, DifferentialOperators
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy import symbols
>>> x = symbols('x')
>>> R, Dx = DifferentialOperators(QQ.old_poly_ring(x),'Dx')
>>> # a straight line on the real axis from (0 to 1)
>>> r = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1]
# using Runge-Kutta 4th order on e^x from 0.1 to 1.
# exact solution at 1 is 2.71828182845905
>>> HolonomicFunction(Dx - 1, x, 0, [1]).evalf(r)
[1.10517083333333, 1.22140257085069, 1.34985849706254, 1.49182424008069,
1.64872063859684, 1.82211796209193, 2.01375162659678, 2.22553956329232,
2.45960141378007, 2.71827974413517]
# using Euler's method for the same
>>> HolonomicFunction(Dx - 1, x, 0, [1]).evalf(r, method='Euler')
[1.1, 1.21, 1.331, 1.4641, 1.61051, 1.771561, 1.9487171, 2.14358881,
2.357947691, 2.5937424601]
One can also observe that the value obtained using Runge-Kutta 4th order
is much more accurate than Euler's method.
"""
from sympy.holonomic.numerical import _evalf
return _evalf(self, points, method=method)
def from_hyper(func, x0=0, evalf=False):
"""
Converts Hypergeometric Function to Holonomic.
func is the Hypergeometric Function and x0 be the point at
which initial conditions are required.
Examples
=======
>>> from sympy.holonomic.holonomic import from_hyper, DifferentialOperators
>>> from sympy import symbols, hyper, S
>>> x = symbols('x')
>>> from_hyper(hyper([], [S(3)/2], x**2/4))
HolonomicFunction((-x) + (2)Dx + (x)Dx**2, x), f(1) = sinh(1), f'(1) = -sinh(1) + cosh(1)
"""
a = func.ap
| |
from typing import Dict, Union, Tuple, Pattern, List
from apscheduler.schedulers.blocking import BlockingScheduler
from socket import gethostname
from pathlib import Path
from time import sleep
from manager import Manager
from datetime import datetime
from datetime import timedelta
import exceptions
import argparse
import subprocess
import re
from db import Client, Processor
from matplotlib import pyplot as plt
# TODO solve issue with gathering cpu_usage stats as they are ps command is wonky
# TODO add functionality to draw heavy usage stats from db to a table within given timeframe
PROJECT_ROOT = Path(__file__).absolute().parent
DATABASE_ADRESS = f'sqlite:////{str(PROJECT_ROOT.joinpath("db.db"))}'
def read_file(path: Path) -> str:
"""
open a file and read its first line
reads the first line of a file
:param path: Path, path to the file
:return: str, the first line of a file
"""
with open(str(path)) as file:
return file.readline().strip()
def parse_ps(row: str, regex: Pattern) -> Tuple[int, float, str]:
"""
parses text with given regex pattern
used to parse the output of the ps command where 3 columns are expected
index 0: processor id
index 1: process processor_usage
index 2: command
:param row: str, one row output of the ps command
:param regex: Pattern, a regex pattern from re.compile
:return: tuple(thread_id: int, process_usage: float, command: str)
"""
match = regex.search(row)
if match:
processor_id, usage, command = match.groups()
try:
executable, script, *_ = command.split(" ")
if "/" in executable and "/" in script:
executable = f"{executable} {script}"
else:
raise exceptions.NoScript(f"{script} is not a script")
except (ValueError, exceptions.NoScript):
executable, *_ = command.split(" ")
return int(processor_id), float(usage), str(executable)
def get_processes() -> Dict[int, dict]:
"""
finds the heaviest processes in the system for each processor
searches the full list of processes in the system and maps the heaviest process,
the command to the heaviest process aswell as the full processor usage.
keys: 'process_usage', 'command' and 'processor_usage'
:return: dict(processor_id: int) -> dict
"""
process = subprocess.run("ps -Ao psr,pcpu,command, --no-headers".split(),
stdout=subprocess.PIPE, universal_newlines=True)
regex = re.compile(r"(\d)\s+(\d+\.\d+)\s+(.+)")
processes_table = [parse_ps(row, regex) for row in process.stdout.split("\n") if row]
processes_table.sort(key=lambda p: p[1])
processes = {}
for processor_id, usage, command in processes_table:
if processor_id not in processes:
processes.update({processor_id: {"process_usage": usage, "command": command, "processor_usage": usage}})
else:
processes[processor_id]["process_usage"] = usage
processes[processor_id]["command"] = command
processes[processor_id]["processor_usage"] += usage
return processes
def get_cpu_map() -> dict:
"""
maps each systems processor id to its core id
On multithraded processors there will be 2 processors that bellongs to the same core
this function creates a map of which processor (their id) bellongs to what core (id)
:return: dict[processor_id: int] -> core_id: int
"""
path = Path("/proc/cpuinfo")
if path.exists():
process = subprocess.run("cat /proc/cpuinfo".split(), stdout=subprocess.PIPE, universal_newlines=True)
out = process.stdout.strip("\n")
cpu_map = {}
for processor in out.split("\n\n"):
processor_id = re.search(r"processor.*(\d)+", processor)
if processor_id:
thread = int(processor_id.groups()[0])
core = re.search(r"core id.*(\d)+", processor)
if core:
core = int(core.groups()[0])
cpu_map[thread] = core
else:
raise exceptions.CPULoggingNotSupported("Can not find info about core id in /proc/cpuinfo")
else:
raise exceptions.CPULoggingNotSupported("Can not find info about processor id in /proc/cpuinfo")
else:
raise exceptions.CPULoggingNotSupported("Can not find file /proc/cpuinfo")
return cpu_map
def get_session_time() -> Tuple[datetime, datetime]:
"""
looks up the duration of the current session
:return: timedelta
"""
process = subprocess.run("last -1".split(), stdout=subprocess.PIPE, universal_newlines=True)
out = process.stdout.split("\n")
session_string = ' '.join([b for b in out[0].split(" ") if b][3:7])
now = datetime.now()
session_start = datetime.strptime(f"{now.year} {session_string}", "%Y %a %b %d %H:%M")
return session_start, now
def get_temp(need_sleep: bool) -> dict:
"""
reads the temperature on each core in the system
reads the temperature from each core in the system, for production pass need_sleep as True
if the reading is taken at the same time as python boots up. Booting python ups the temperature
on the scheduled core by around 2 degrees C
the temperature is mapped to the physical core the temperature was read on
as mili degrees C
:param need_sleep: bool, pass true if python is booted within 3 second of first reading
:return: dict[core_id: int] -> temperature: int
"""
base = Path("/sys/class/hwmon/")
for hwmon in base.iterdir():
name = read_file(hwmon.joinpath("name").absolute())
if name.lower() == "coretemp":
labels = [file for file in hwmon.iterdir()
if file.stem.endswith("_label")
and "core" in read_file(file.absolute()).lower()]
temps = [hwmon.joinpath(label.stem.replace("label", "input"))
for label in labels]
if need_sleep:
sleep(3) # needed for the processor to cool down from the heat generated to launch python
temporary = zip([int(re.search(r"(\d)+", read_file(label)).groups()[0]) for label in labels],
[int(read_file(temp)) for temp in temps])
return {core: temp for core, temp in temporary}
def store_temp(need_sleep: bool = False):
"""
makes a database entry at the current time
stats is a table where each row stores:
core: int, processor: int, processor_usage: float, heaviest_process: str, ...
... heaviest_process_usage: float, temperature: int
:param need_sleep: bool, pass True if reading is taken within 3 seconds of python boot
:return: None
"""
core_map = get_cpu_map()
time = datetime.now()
temperatures = get_temp(need_sleep)
processes = get_processes()
stats = [(core_map[cpu], cpu, processes[cpu]["processor_usage"], processes[cpu]["command"],
processes[cpu]["process_usage"], temperatures[core_map[cpu]])
for cpu in core_map.keys()]
with Manager(DATABASE_ADRESS) as manager:
client = manager.get_client(gethostname())
for core, cpu, cpu_usage, process, process_usage, temperature in stats:
manager.add_cpu(client, core, cpu, cpu_usage, process, process_usage, temperature, time)
def try_timestamp(timestamp: str, formating: str) -> Union[datetime, None]:
try:
return datetime.strptime(timestamp, formating)
except ValueError:
return None
def get_time_from_user(timestamp: str) -> datetime:
formats = ["%y/%m/%d-%H:%M:%S",
"%y/%m/%d-%H:%M",
"%y/%m/%d-%H",
"%y/%m/%d"]
for formatting in formats:
result = try_timestamp(timestamp, formatting)
if result:
return result
raise exceptions.BadFormatting(f"Time format must follow one of the following: {' '.join(formats)}\n"
f"Formatting explained here: "
f"https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior")
def schedule(parsed_args: Union[argparse.Namespace, Dict[str, int]]):
"""
schedules the application to automaticly collect data
pass a namespace or dict with one or more of the following keys
do not include () adding s is optional, ie: both 'year' and 'years' is a valid key
year(s), month(s), week(s), day(s), hour(s), minute(s), second(s)
:param parsed_args: Union[argparse.Namespace, Dict[str, int]]
:return: None
"""
config = {
"year": parsed_args.year,
"month": parsed_args.month,
"week": parsed_args.week,
"day": parsed_args.day,
"hour": parsed_args.hour,
"minute": parsed_args.minute,
"second": parsed_args.second}
if parsed_args.job_type == "interval":
config = {(k + "s"): config[k] for k in config if k not in ["year", "month"] and config[k] is not None}
else:
config = {key: value for key, value in config.items() if value is not None}
scheduler = BlockingScheduler()
scheduler.add_executor("processpool")
scheduler.add_job(store_temp, parsed_args.job_type, misfire_grace_time=parsed_args.misfire, **config)
scheduler.start()
def view(args: Union[argparse.Namespace, Dict[str, int]]):
host = args.host if args.host else gethostname()
core = args.core if args.core else False
if args.this_session:
start_time, end_time = get_session_time()
else:
start_time = get_time_from_user(args.start_time)
end_time = get_time_from_user(args.end_time)
measurement = args.measurement
plot(host, measurement, core, start_time, end_time)
def plot(host: str, measurment: str, core=False, start_time: datetime = None, end_time: datetime = None):
"""
plots the prefered 'measurement' over time
'measurement' can take the values 'temperature' or 'usage' and will plot the measurement over time.
If 'core' is False on a multithreaded system there will be one line per virtual processor.
If 'core' is True on a multithreaded system the data from each virtual processor on that core will be avaraged
to show one line per core.
The value of 'core' wont matter on non multithreaded systems.
If a value is given to 'start_time' only data availeble from that time will be used in the graph.
If no value is given there will be no under limit on the data used in the graph.
If a value is given to 'end_time' only data availeble up untill that time will be used in the graph
If not value not given there will be no upper limit on the data used in the graph.
:param host: str, the hostname to plot
:param measurment: str, takes value 'usage' or 'temperature'
:param core: bool, mutithreaded systems are avaraged if True
:param start_time: datetime, specific date to start showing data
:param end_time: datetime, specific date to stop showing date
:return:
"""
if not start_time:
start_time = 0
if not end_time:
end_time = datetime.now()
with Manager(DATABASE_ADRESS) as cursor:
client: Client = cursor.get_client(host)
processors: List[Processor] = cursor.session.query(Processor).filter(
start_time < Processor.time, Processor.time < end_time, client == Processor.client
).all()
data = {}
if not core:
for processor in processors:
if processor.processor not in data:
data[processor.processor] = [
[processor.time], [getattr(processor, measurment)], f"Processor {processor.processor}"]
else:
data[processor.processor][0].append(processor.time)
data[processor.processor][1].append(getattr(processor, measurment))
else:
for processor in processors:
if processor.core not in data:
data[processor.core] = [
| |
<gh_stars>1-10
#get a dataframe and rank features
#output: (1) inside ig_tuner and corr_tuner, save [#selected features,acc] in each step
# and print the whole list of x and list of y
# (2) save top features in a csv file with parent node in bgr in left col and the child in right col and importance score in third col
# (3) make dataframe with new features (modify the input dataframe and select the cols that are in the list of
# selected bgrs. save the datafram.
import os
import pandas as pd
import operator
import numpy as np
import ast_features
from pandas import DataFrame as df
from matplotlib import pyplot as plt
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
from sklearn.feature_selection import mutual_info_classif
#input: 1)numpy 2d array 2)list of calsses 3)feature vector(list of str) 4)hyperparams
#output: 1)list of scores of cv 2)avg of the list
def RFC(data, classes, IG_features, cv, ntree, maxdepth, crit):
RF_classifier = RandomForestClassifier(n_estimators=ntree, criterion=crit,
max_depth=maxdepth, oob_score=True)
RF_classifier.fit(data, classes)
tree_heights = [estimator.tree_.max_depth for estimator in RF_classifier.estimators_]
treeHmin = min(tree_heights)
treeHmax = max(tree_heights)
treeHavg = sum(tree_heights) / len(tree_heights)
print 'height of trees'
print 'min = ', treeHmin, 'max = ', treeHmax, 'avg = ', treeHavg
scores_IGFeatures = cross_val_score(RF_classifier, data, classes, cv=cv)
accavg = sum(scores_IGFeatures) / len(scores_IGFeatures)
print 'average acc = ', accavg
return (scores_IGFeatures , accavg)
#input: 1){bgr:IG} all bgrs' igs in dic for a dataset 2)threshold for ig
#output: 1)list of bgr (str) selected by ig thr 2)dic of those selected features and their ig
def IG_selector(IG_pairs, threshold):
info_thr = threshold
IG_features = [k for k in IG_pairs.keys() if IG_pairs[k] > info_thr]
IG_pairsSelected = {k: IG_pairs[k] for k in IG_features}
return(IG_features , IG_pairsSelected)
#get the least number of features that give acc above 85% by tuning ig threshold
#input: 1)dir of dataset for feature extraction 2)ig thrs to test 3)hyperparams
#output: 1)2d numpy of codes vs features selected by tuner 2)classes 3)selected features as list of str
# 4)optimum ig thr 5) corresponding acc with that ig thr and selected features
def IG_tuner(data , classes , feature_labels, ig_thr_range, acc_thr, cv, ntree, maxdepth, crit):
print 'IG_tuner activated.'
print 'number of all features:', len(feature_labels)
print('calculating IGs...')
# dic of all features with their ig
IG_pairs = dict(zip(feature_labels, mutual_info_classif(data, classes, discrete_features=True)))
print('IG tuning started...')
final_thr = 0
final_features = feature_labels
final_data = data
final_acc = 0
Xig = []
Yig = []
for thr in ig_thr_range:
print 'ig_thr = ' , thr , '...'
#list of ig features(list of strings), dic of selected ig features with their ig
IG_features, IG_pairsSelected= IG_selector(IG_pairs, thr)
print '# selected IG features = ', len(IG_features)
if len(IG_features) == 0:
break
data_IG = []
for feature_vec in data:
feature_freq = dict(zip(feature_labels, feature_vec))
temp = [feature_freq[f] for f in feature_freq.keys() if f in IG_features]
data_IG.append(temp)
#numpy 2d array, list of user's name for each row of data,list of selected features as string
scores_IG , accavg = RFC(data_IG, classes, IG_features , cv, ntree, maxdepth, crit)
Xig.append(len(IG_features))
Yig.append(accavg)
if accavg > acc_thr:
if len(IG_features)< len(final_features):
final_thr = thr
final_features = IG_features
final_data = data_IG
final_acc = accavg
continue
# print 'selection terminated.'
# break
return(Xig , Yig , final_data, final_features, final_thr, final_acc)
#rank the selected features that give acc above 80% by one out approach
#input: numpy 2d, classes , list of features , hyperparams
#output: 1) ranks = [(bgr , imposcore)] 2) ranks_dic = {bgr: (rank , importance acc)}
def Ranker_oneOut(ig_data, classes, ig_features, cv, ntree, maxdepth, crit):
#one-out selection
ImpScores = {}
count = 0
for ftr in ig_features:
count += 1
print 'feature ' , count, ' processing...'
temp_data = []
temp_features = [i for i in ig_features if not i == ftr]
for feature_vec in ig_data:
feature_freq = dict(zip(ig_features, feature_vec))
temp = [feature_freq[f] for f in feature_freq.keys() if f in temp_features]
temp_data.append(temp)
temp_acc , temp_accavg= RFC(temp_data, classes, temp_features, cv, ntree, maxdepth, crit)
imp_score = 1 - temp_accavg
ImpScores[ftr] = imp_score
# print(ImpScores)
ranks = sorted(ImpScores.iteritems(), key=operator.itemgetter(1), reverse=True)
ranks_dic = {}
R = 0
for item in ranks:
R += 1
ranks_dic[item[0]] = (R , item[1])
# ranks = [(bgr , rank)] , ranks_dic = {bgr: (rank , importance acc)}
return(ranks , ranks_dic)
#input: (1)numpy 2d array for correlation matrix (2)list of feature labels 3)correlation thr
#output: list of selected features (list of str)
def Corr_selector(corr_matrix, feature_laebles, thr):
corr = np.asarray(corr_matrix)
row = range(len(corr))
col = range(len(corr))
out = []
for i in row:
if feature_laebles[i] not in out:
temp = [x for x in col if x>i and feature_laebles[x] not in out]
for j in temp:
if abs(corr[i][j]) > thr:
out.append(feature_laebles[j])
selected = [f for f in feature_laebles if f not in out]
return(selected)
#get the least number of features that give acc above 85% by tuning correlation threshold
#input: 1)dir of dataset for feature extraction 2)corr thrs to test 3)hyperparams
#output: 1)2d numpy of codes vs features selected by tuner 2)classes 3)selected features as list of str
# 4)optimum corr thr 5) corresponding acc with that corr thr and selected features
def Corr_tuner(data , classes , feature_labels, corr_thr_range, acc_thr, cv, ntree, maxdepth, crit):
print 'corr tuner activated.'
print 'number of all features:', len(feature_labels)
print('calculating correlation matrix...')
#matrix of correlation as a numpy 2d array
corr_matrix = np.corrcoef(data, rowvar= False) #to consider columns as varibales
print('correlation tuning started...')
final_thr = 0
final_features = feature_labels
final_data = data
final_acc = 0
Xcor = []
Ycor = []
for thr in corr_thr_range:
print 'corr_thr = ', thr, '...'
# list of ig features(list of strings), dic of selected ig features with their ig
corr_features = Corr_selector(corr_matrix, feature_labels, thr)
print '# selected uncorrelated features = ', len(corr_features)
if len(corr_features) == 0:
break
data_corr = []
for feature_vec in data:
feature_freq = dict(zip(feature_labels, feature_vec))
temp = [feature_freq[f] for f in feature_freq.keys() if f in corr_features]
data_corr.append(temp)
# numpy 2d array, list of user's name for each row of data,list of selected features as string
scores_corr, accavg = RFC(data_corr, classes, corr_features, cv, ntree, maxdepth, crit)
Xcor.append(len(corr_features))
Ycor.append(accavg)
if accavg > acc_thr:
if len(corr_features)< len(final_features):
final_thr = thr
final_features = corr_features
final_data = data_corr
final_acc = accavg
continue
# print 'selection terminated.'
# break
return (Xcor, Ycor, final_data, final_features, final_thr, final_acc)
if __name__ == '__main__':
year = 'all'
codes = '9'
users = '81'
# mode = '_frequent1sameprob_'
mode = '_frequent1diffProb_'
mydir = os.path.dirname(__file__) + '/dataframe/COPYuserWith9codes_729codeall9freqbgrmorethan1user.csv'
# subdir = '/df_' + year + '_' + codes + mode + users + '.csv'
# mydir = os.path.dirname(__file__) + '/dataframe' + subdir
csv_saveto = os.path.dirname(__file__) + '/FeaturesRanking'
df_saveto = os.path.dirname(__file__) + '/dataframe/ranking'
if not os.path.exists(csv_saveto):
os.makedirs(csv_saveto)
if not os.path.exists(df_saveto):
os.makedirs(df_saveto)
########################################################################################
# #read dataframe
# datadf = pd.read_csv(mydir)
# data = datadf.drop(['classes', 'Unnamed: 0'], axis=1).values
# classes = datadf['classes'].values
# feature_labels = list(datadf.drop(['classes', 'Unnamed: 0'], axis=1))
#
# # hyperparameters
# cv = int(codes)
# ntree = 300
# maxdepth = None
# crit = 'entropy'
# ig_thr_range = [0.1, 0.5, 0.9, 1, 1.2, 1.5, 2, 2.5, 3]
# # ig_thr_range = [1, 1.2, 1.5, 2, 2.5, 3]
# corr_thr_range = [0.9, 0.7, 0.5, 0.3, 0.1]
# acc_thr = 0.90
# ###########################################################################################
################################## INFORMATION GAIN RANKING ###############################
# #give dataframe infor and get (1)csv points (2)new df
# Xig , Yig , ig_data , ig_features, igthr, igacc = IG_tuner(data , classes , feature_labels, ig_thr_range, acc_thr,
# cv, ntree, maxdepth, crit)
# print('')
# print 'final ig features: ', len(ig_features)
# print 'thr = ' , igthr, 'avg_acc = ' , igacc
# print ''
#
# #output (1)save dataframe with new features
# print 'saving dataframe'
# ig_newdatadf = pd.DataFrame(data= ig_data, columns= ig_features)
# ig_newdatadf['classes'] = classes
# print 'number of rows(codes)= ', len(ig_newdatadf)
# print 'number of columns(features)= ', len(list(ig_newdatadf))
# # save to csv
# ig_newdatadf.to_csv(os.path.join(df_saveto, 'COPYIGdf_' + year + '_' + codes + '_frequent1sameprob_' + users + '_acc'+str(acc_thr)+'.csv'))
# #ranking
# print 'ranking starts...'
# #1) ranks = sorted by score:[(bgr , imposcore)] 2) ranks_dic = {bgr: (rank , importance acc)}
# # Ranker(ig_data, classes, ig_features, cv, ntree, maxdepth, | |
hearing_impaired_date_collected = Column(DateTime(timezone=False))
marital_status = Column(Integer)
marital_status_date_collected = Column(DateTime(timezone=False))
non_ambulatory = Column(Integer)
non_ambulatory_date_collected = Column(DateTime(timezone=False))
residential_status = Column(Integer)
residential_status_date_collected = Column(DateTime(timezone=False))
visually_impaired = Column(Integer)
visually_impaired_date_collected = Column(DateTime(timezone=False))
reported = Column(Boolean)
fk_person_historical_to_income_and_sources = relationship('IncomeAndSources',
backref='fk_income_and_sources_to_person_historical')
fk_person_historical_to_veteran = relationship('Veteran', backref='fk_veteran_to_person_historical')
fk_person_historical_to_hud_homeless_episodes = relationship('HUDHomelessEpisodes',
backref='fk_hud_homeless_episodes_to_person_historical')
fk_person_historical_to_person_address = relationship('PersonAddress', backref='fk_person_address_to_person_historical')
useexisting = True
class IncomeAndSources(DB.Base, MapBase):
__tablename__ = 'income_and_sources'
id = Column(Integer, primary_key=True)
person_historical_index_id = Column(Integer, ForeignKey('person_historical.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
amount = Column(Integer)
amount_date_collected = Column(DateTime(timezone=False))
income_source_code = Column(Integer)
income_source_code_date_collected = Column(DateTime(timezone=False))
income_source_other = Column(String(32))
income_source_other_date_collected = Column(DateTime(timezone=False))
## HUD 3.0
income_and_source_id_id_num = Column(String(32))
income_and_source_id_id_str = Column(String(32))
income_and_source_id_id_delete_occurred_date = Column(DateTime(timezone=False))
income_and_source_id_id_delete_effective_date = Column(DateTime(timezone=False))
income_source_code_date_effective = Column(DateTime(timezone=False))
income_source_other_date_effective = Column(DateTime(timezone=False))
receiving_income_source_date_collected = Column(DateTime(timezone=False))
receiving_income_source_date_effective = Column(DateTime(timezone=False))
income_source_amount_date_effective = Column(DateTime(timezone=False))
income_and_source_id_id_delete = Column(Integer)
income_source_code_data_collection_stage = Column(String(32))
income_source_other_data_collection_stage = Column(String(32))
receiving_income_source = Column(Integer)
receiving_income_source_data_collection_stage = Column(String(32))
income_source_amount_data_collection_stage = Column(String(32))
useexisting = True
class Members(DB.Base, MapBase):
__tablename__ = 'members'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
household_index_id = Column(Integer, ForeignKey('household.id'))
person_index_id = Column(Integer, ForeignKey('person.id'))
relationship_to_head_of_household = Column(String(32))
relationship_to_head_of_household_date_collected = Column(DateTime(timezone=False))
reported = Column(Boolean)
useexisting = True
class ReleaseOfInformation(DB.Base, MapBase):
__tablename__ = 'release_of_information'
id = Column(Integer, primary_key=True)
person_index_id = Column(Integer, ForeignKey('person.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
release_of_information_idid_num = Column(String(32))
release_of_information_idid_num_date_collected = Column(DateTime(timezone=False))
release_of_information_idid_str = Column(String(32))
release_of_information_idid_str_date_collected = Column(DateTime(timezone=False))
site_service_idid_num = Column(String(32))
site_service_idid_num_date_collected = Column(DateTime(timezone=False))
site_service_idid_str = Column(String(32))
site_service_idid_str_date_collected = Column(DateTime(timezone=False))
documentation = Column(String(32))
documentation_date_collected = Column(DateTime(timezone=False))
#EffectivePeriod (subtable)
start_date = Column(String(32))
start_date_date_collected = Column(DateTime(timezone=False))
end_date = Column(String(32))
end_date_date_collected = Column(DateTime(timezone=False))
release_granted = Column(String(32))
release_granted_date_collected = Column(DateTime(timezone=False))
reported = Column(Boolean)
## HUD 3.0
release_of_information_id_data_collection_stage = Column(String(32))
release_of_information_id_date_effective = Column(DateTime(timezone=False))
documentation_data_collection_stage = Column(String(32))
documentation_date_effective = Column(DateTime(timezone=False))
release_granted_data_collection_stage = Column(String(32))
release_granted_date_effective = Column(DateTime(timezone=False))
useexisting = True
class SourceExportLink(DB.Base, MapBase):
__tablename__ = 'source_export_link'
id = Column(Integer, primary_key=True)
source_index_id = Column(Integer, ForeignKey('source.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
useexisting = True
class Region(DB.Base, MapBase):
__tablename__ = 'region'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
region_id_id_num = Column(String(50))
region_id_id_str = Column(String(32))
site_service_id = Column(String(50))
region_type = Column(String(50))
region_type_date_collected = Column(DateTime(timezone=False))
region_type_date_effective = Column(DateTime(timezone=False))
region_type_data_collection_stage = Column(String(32))
region_description = Column(String(30))
region_description_date_collected = Column(DateTime(timezone=False))
region_description_date_effective = Column(DateTime(timezone=False))
region_description_data_collection_stage = Column(String(32))
useexisting = True
class Agency(DB.Base, MapBase):
__tablename__ = 'agency'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
agency_delete = Column(Integer)
agency_delete_occurred_date = Column(DateTime(timezone=False))
agency_delete_effective_date = Column(DateTime(timezone=False))
airs_key = Column(String(50))
airs_name = Column(String(50))
agency_description = Column(String(50))
irs_status = Column(String(50))
source_of_funds = Column(String(50))
record_owner = Column(String(50))
fein = Column(String(50))
year_inc = Column(String(50))
annual_budget_total = Column(String(50))
legal_status = Column(String(50))
exclude_from_website = Column(String(50))
exclude_from_directory = Column(String(50))
useexisting = True
class AgencyChild(DB.Base, MapBase):
__tablename__ = 'agency_child'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
agency_index_id = Column(Integer, ForeignKey('agency.id'))
useexisting = True
class Service(DB.Base, MapBase):
__tablename__ = 'service'
id = Column(Integer, primary_key=True)
service_id = Column(String(50))
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
service_delete = Column(Integer)
service_delete_occurred_date = Column(DateTime(timezone=False))
service_delete_effective_date = Column(DateTime(timezone=False))
airs_key = Column(String(50))
airs_name = Column(String(50))
coc_code = Column(String(5))
configuration = Column(String(50))
direct_service_code = Column(String(50))
grantee_identifier = Column(String(10))
individual_family_code = Column(String(50))
residential_tracking_method = Column(String(50))
service_type = Column(String(50))
jfcs_service_type = Column(String(50))
service_effective_period_start_date = Column(DateTime(timezone=False))
service_effective_period_end_date = Column(DateTime(timezone=False))
service_recorded_date = Column(DateTime(timezone=False))
target_population_a = Column(String(50))
target_population_b = Column(String(50))
useexisting = True
class Site(DB.Base, MapBase):
__tablename__ = 'site'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
agency_index_id = Column(Integer, ForeignKey('agency.id'))
#agency_location_index_id = Column(Integer, ForeignKey('agency_location.id'))
site_delete = Column(Integer)
site_delete_occurred_date = Column(DateTime(timezone=False))
site_delete_effective_date = Column(DateTime(timezone=False))
airs_key = Column(String(50))
airs_name = Column(String(50))
site_description = Column(String(50))
physical_address_pre_address_line = Column(String(100))
physical_address_line_1 = Column(String(100))
physical_address_line_2 = Column(String(100))
physical_address_city = Column(String(50))
physical_address_country = Column(String(50))
physical_address_state = Column(String(50))
physical_address_zip_code = Column(String(50))
physical_address_country = Column(String(50))
physical_address_reason_withheld = Column(String(50))
physical_address_confidential = Column(String(50))
physical_address_description = Column(String(50))
mailing_address_pre_address_line = Column(String(100))
mailing_address_line_1 = Column(String(100))
mailing_address_line_2 = Column(String(100))
mailing_address_city = Column(String(50))
mailing_address_country = Column(String(50))
mailing_address_state = Column(String(50))
mailing_address_zip_code = Column(String(50))
mailing_address_country = Column(String(50))
mailing_address_reason_withheld = Column(String(50))
mailing_address_confidential = Column(String(50))
mailing_address_description = Column(String(50))
no_physical_address_description = Column(String(50))
no_physical_address_explanation = Column(String(50))
disabilities_access = Column(String(50))
physical_location_description = Column(String(50))
bus_service_access = Column(String(50))
public_access_to_transportation = Column(String(50))
year_inc = Column(String(50))
annual_budget_total = Column(String(50))
legal_status = Column(String(50))
exclude_from_website = Column(String(50))
exclude_from_directory = Column(String(50))
agency_key = Column(String(50))
useexisting = True
class SiteService(DB.Base, MapBase):
__tablename__ = 'site_service'
id = Column(Integer, primary_key=True)
site_service_id = Column(String(50))
export_index_id = Column(Integer, ForeignKey('export.id'))
report_index_id = Column(String(50), ForeignKey('report.report_id'))
site_index_id = Column(Integer, ForeignKey('site.id'))
service_index_id = Column(Integer, ForeignKey(Service.id))
agency_location_index_id = Column(Integer, ForeignKey('agency_location.id'))
site_service_delete = Column(Integer)
site_service_delete_occurred_date = Column(DateTime(timezone=False))
site_service_delete_effective_date = Column(DateTime(timezone=False))
name = Column(String(50))
key = Column(String(50))
description = Column(String(50))
fee_structure = Column(String(50))
gender_requirements = Column(String(50))
area_flexibility = Column(String(50))
service_not_always_available = Column(String(50))
service_group_key = Column(String(50))
site_id = Column(String(50))
geographic_code = Column(String(50))
geographic_code_date_collected = Column(DateTime(timezone=False))
geographic_code_date_effective = Column(DateTime(timezone=False))
geographic_code_data_collection_stage = Column(String(50))
housing_type = Column(String(50))
housing_type_date_collected = Column(DateTime(timezone=False))
housing_type_date_effective = Column(DateTime(timezone=False))
housing_type_data_collection_stage = Column(String(50))
principal = Column(String(50))
site_service_effective_period_start_date = Column(DateTime(timezone=False))
site_service_effective_period_end_date = Column(DateTime(timezone=False))
site_service_recorded_date = Column(DateTime(timezone=False))
site_service_type = Column(String(50))
useexisting = True
class FundingSource(DB.Base, MapBase):
__tablename__ = 'funding_source'
id = Column(Integer, primary_key=True)
service_index_id = Column(Integer, ForeignKey('service.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
service_event_index_id = Column(Integer, ForeignKey('service_event.id'))
funding_source_id_id_num = Column(String(50))
funding_source_id_id_str = Column(String(32))
funding_source_id_delete = Column(String(50))
funding_source_id_delete_occurred_date = Column(DateTime(timezone=False))
funding_source_id_delete_effective_date = Column(DateTime(timezone=False))
federal_cfda_number = Column(String(50))
receives_mckinney_funding = Column(String(50))
advance_or_arrears = Column(String(50))
financial_assistance_amount = Column(String(50))
useexisting = True
class ResourceInfo(DB.Base, MapBase):
__tablename__ = 'resource_info'
id = Column(Integer, primary_key=True)
agency_index_id = Column(Integer, ForeignKey('agency.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
site_service_index_id = Column(Integer, ForeignKey('site_service.id'))
resource_specialist = Column(String(50))
available_for_directory = Column(String(50))
available_for_referral = Column(String(50))
available_for_research = Column(String(50))
date_added = Column(DateTime(timezone=False))
date_last_verified = Column(DateTime(timezone=False))
date_of_last_action = Column(DateTime(timezone=False))
last_action_type = Column(String(50))
useexisting = True
class Inventory(DB.Base, MapBase):
__tablename__ = 'inventory'
id = Column(Integer, primary_key=True)
service_index_id = Column(Integer, ForeignKey(Service.id))
export_index_id = Column(Integer, ForeignKey('export.id'))
site_service_index_id = Column(Integer, ForeignKey('site_service.id'))
inventory_delete = Column(Integer)
inventory_delete_occurred_date = Column(DateTime(timezone=False))
inventory_delete_effective_delete = Column(DateTime(timezone=False))
hmis_participation_period_start_date = Column(DateTime(timezone=False))
hmis_participation_period_end_date = Column(DateTime(timezone=False))
inventory_id_id_num = Column(String(50))
inventory_id_id_str = Column(String(32))
bed_inventory = Column(String(50))
bed_availability = Column(String(50))
bed_type = Column(String(50))
bed_individual_family_type = Column(String(50))
chronic_homeless_bed = Column(String(50))
domestic_violence_shelter_bed = Column(String(50))
household_type = Column(String(50))
hmis_participating_beds = Column(String(50))
inventory_effective_period_start_date = Column(DateTime(timezone=False))
inventory_effective_period_end_date = Column(DateTime(timezone=False))
inventory_recorded_date = Column(DateTime(timezone=False))
unit_inventory = Column(String(50))
useexisting = True
class AgeRequirements(DB.Base, MapBase):
__tablename__ = 'age_requirements'
id = Column(Integer, primary_key=True)
site_service_index_id = Column(Integer, ForeignKey('site_service.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
gender = Column(String(50))
minimum_age = Column(String(50))
maximum_age = Column(String(50))
useexisting = True
class AidRequirements(DB.Base, MapBase):
__tablename__ = 'aid_requirements'
id = Column(Integer, primary_key=True)
site_service_index_id = Column(Integer, ForeignKey('site_service.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
aid_requirements = Column(String(50))
useexisting = True
class Aka(DB.Base, MapBase):
__tablename__ = 'aka'
id = Column(Integer, primary_key=True)
agency_index_id = Column(Integer, ForeignKey('agency.id'))
site_index_id = Column(Integer, ForeignKey('site.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
# SBB20100914 Added Agency Location foreign key
agency_location_index_id = Column(Integer, ForeignKey('agency_location.id'))
name = Column(String(50))
confidential = Column(String(50))
description = Column(String(50))
useexisting = True
class ApplicationProcess(DB.Base, MapBase):
__tablename__ = 'application_process'
id = Column(Integer, primary_key=True)
site_service_index_id = Column(Integer, ForeignKey('site_service.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
step = Column(String(50))
description = Column(String(50))
useexisting = True
class Assignment(DB.Base, MapBase):
__tablename__ = 'assignment'
id = Column(Integer, primary_key=True)
hmis_asset_index_id = Column(Integer, ForeignKey('hmis_asset.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
assignment_id_id_num = Column(String(50))
assignment_id_id_str = Column(String(32))
assignment_id_delete = Column(Integer)
assignment_id_delete_occurred_date = Column(DateTime(timezone=False))
assignment_id_delete_effective_date = Column(DateTime(timezone=False))
person_id_id_num = Column(String(50))
person_id_id_str = Column(String(32))
household_id_id_num = Column(String(50))
household_id_id_str = Column(String(32))
useexisting = True
class AssignmentPeriod(DB.Base, MapBase):
__tablename__ = 'assignment_period'
id = Column(Integer, primary_key=True)
export_index_id = Column(Integer, ForeignKey('export.id'))
assignment_index_id = Column(Integer, ForeignKey(Assignment.id))
assignment_period_start_date = Column(DateTime(timezone=False))
assignment_period_end_date = Column(DateTime(timezone=False))
useexisting = True
class Call(DB.Base, MapBase):
__tablename__ = 'call'
id = Column(Integer, primary_key=True)
site_service_id = Column(String(50))
call_id_id_num = Column(String(50))
call_id_id_str = Column(String(32))
call_time = Column(DateTime(timezone=False))
call_duration = Column(Interval())
caseworker_id_id_num = Column(String(50))
caseworker_id_id_str = Column(String(32))
# FBY : TBC requested|required fields
caller_zipcode = Column(String(10))
caller_city = Column(String(128))
caller_state = Column(String(2))
caller_home_phone = Column(String(10))
class ChildEnrollmentStatus(DB.Base, MapBase):
__tablename__ = 'child_enrollment_status'
id = Column(Integer, primary_key=True)
person_historical_index_id = Column(Integer, ForeignKey('person_historical.id'))
export_index_id = Column(Integer, ForeignKey('export.id'))
child_enrollment_status_id_id_num = Column(String(50))
child_enrollment_status_id_id_str = Column(String(32))
child_enrollment_status_id_delete = Column(Integer)
child_enrollment_status_id_delete_occurred_date = Column(DateTime(timezone=False))
child_enrollment_status_id_delete_effective_date = Column(DateTime(timezone=False))
child_currently_enrolled_in_school | |
self.x0 = x0
return
def update_ev_cum(self, x2, x1):
ev_cum_x1, ev_cum_x2 = self.ev_cum[x1], self.ev_cum[x2]
if ev_cum_x1[0] > 0:
ev_cum_x1[0] -= 1
else:
ev_cum_x1[1] -= 1
if ev_cum_x2[1] < 0:
ev_cum_x2[1] += 1
else:
ev_cum_x2[0] += 1
logger.enter("normalize")
state = State(m, x, eps, seed)
T = identity_matrix(m.base_ring(), m.nrows())
if state.is_normalized():
logger.info("already normalized")
i = 0
while not state.is_normalized():
i += 1
m = fuchsia_simplify(m, x)
logger.info("step %s" % i)
balances = find_balances(m, x, eps, state)
b = select_balance(balances, eps, state)
if b is None:
raise FuchsiaError("can not balance matrix")
logger.info(" balancing x = %s and x = %s" % (b[1],b[2]))
if logger.is_verbose():
logger.debug("\n use the balance:\n %s\n" % b)
cond, x1, x2, a0_eval, b0_eval, a0_evec, b0_evec, scale = b
if cond == 1:
P = cross_product(a0_evec, b0_evec) / scale
m = balance_transform(m, P, x1, x2, x)
T0 = balance(P, x1, x2, x)
state.update_ev_cum(x1, x2)
else:
P = cross_product(b0_evec, a0_evec) / scale
m = balance_transform(m, P, x2, x1, x)
T0 = balance(P, x2, x1, x)
state.update_ev_cum(x2, x1)
T = fuchsia_simplify(T*T0, x)
logger.exit("normalize")
return m, T
def find_balances(m, x, eps, state={}):
residues = {}
for x1, x2 in state.pairs():
logger.debug("trying to balance x = %s and x = %s" % (x1,x2))
for xi in [x1,x2]:
if xi not in residues:
residues[xi] = matrix_residue(m, x, xi)
a0, b0 = residues[x1], residues[x2]
a0_evr, b0_evl = eigenvectors_right(a0), eigenvectors_left(b0)
if logger.is_verbose():
msg = "\n Eigenvalues:\n"
msg += " x = %s:\n" % x1
a0_evals = [];
for ev, evec, emult in a0_evr:
a0_evals += [ev]*emult
msg += " %s\n" % str(a0_evals)
msg += " x = %s:\n" % x2
b0_evals = [];
for ev, evec, emult in b0_evl:
b0_evals += [ev]*emult
msg += " %s\n" % str(b0_evals)
logger.debug(msg)
balances_1 = find_balances_by_cond(a0_evr, b0_evl, lambda a0_eval, b0_eval: limit_fixed(a0_eval, eps, 0) < -0.5)
for balance in balances_1:
balance = [1, x1, x2] + balance
yield balance
a0_evl, b0_evr = eigenvectors_left(a0), eigenvectors_right(b0)
balances_2 = find_balances_by_cond(a0_evl, b0_evr, lambda a0_eval, b0_eval: limit_fixed(a0_eval, eps, 0) >= 0.5)
for balance in balances_2:
balance = [2, x1, x2] + balance
yield balance
def find_balances_by_cond(a0_ev, b0_ev, cond):
res = []
for a0_eval, a0_evecs, a0_evmult in a0_ev:
for b0_eval, b0_evecs, b0_evmult in b0_ev:
if not cond(a0_eval, b0_eval):
logger.debug("Balance rejected:\n a0_eval = %s\n b0_eval = %s" % (a0_eval, b0_eval))
continue
for a0_evec in a0_evecs:
for b0_evec in b0_evecs:
scale = fuchsia_simplify(dot_product(a0_evec, b0_evec))
balance = [a0_eval, b0_eval, a0_evec, b0_evec, scale]
if scale == 0:
logger.debug("Balance rejected:\n a0_eval = %s\n b0_eval = %s\n a0_evec = %s\n b0_evec = %s\n scale = %s" % tuple(balance))
continue
logger.debug("Balance found:\n a0_eval = %s\n b0_eval = %s\n a0_evec = %s\n b0_evec = %s\n scale = %s" % tuple(balance))
res.append(balance)
return res
def select_balance(balances, eps, state={}):
min_degree, min_balance = None, None
bs = []
for b in balances:
cond, x1, x2, a0_eval, b0_eval, a0_evec, b0_evec, scale = b
if (cond == 1) and limit_fixed(a0_eval, eps, 0) < -0.5 and \
limit_fixed(b0_eval, eps, 0) >= 0.5:
degree = max(scale.numerator().degree(eps), scale.denominator().degree(eps))
if degree < 4:
return b
if (min_degree is None) or (min_degree > degree):
min_degree = degree
min_balance = b
elif (cond == 2) and limit_fixed(a0_eval, eps, 0) >= 0.5 and \
limit_fixed(b0_eval, eps, 0) < -0.5:
degree = max(scale.numerator().degree(eps), scale.denominator().degree(eps))
if degree < 4:
return b
if (min_degree is None) or (min_degree > degree):
min_degree = degree
min_balance = b
bs.append(b)
if min_balance is not None:
return min_balance
x0 = state.x0
if x0 is None:
for b in bs:
cond, x1, x2, ev1, ev2 = b[:5]
if cond == 1:
x0 = x2
break
if cond == 2:
x0 = x1
break
logger.info(" select x0 = %s" % x0)
state.x0 = x0
balances_x0 = [b for b in bs if (b[0] == 1 and b[2] == x0) or (b[0] == 2 and b[1] == x0)]
b = state.random.choice(balances_x0) if balances_x0 else None
return b
def eigenvectors_left(m):
if m._cache is None:
m._cache = {}
if "eigenvectors_left" not in m._cache:
res = simplify(m.eigenvectors_left())
m._cache["eigenvectors_left"] = res
return m._cache["eigenvectors_left"]
def eigenvectors_right(m):
if m._cache is None:
m._cache = {}
if "eigenvectors_right" not in m._cache:
res = m.eigenvectors_right()
m._cache["eigenvectors_right"] = res
return m._cache["eigenvectors_right"]
#==================================================================================================
# Step III: Factorize
#==================================================================================================
def gensym():
sym = SR.symbol()
SR.symbols[str(sym)] = sym
return sym
def factorize(M, x, epsilon, b=None, seed=0):
"""Given a normalized Fuchsian system of differential equations:
dF/dx = M(x,epsilon)*F,
try to find a transformation that will factor out an epsilon
from M. Return a transformed M (proportional to epsilon)
and T. Raise FuchsiaError if epsilon can not be factored.
"""
logger.info("-> factorize")
n = M.nrows()
M = fuchsia_simplify(M, x)
if epsilon not in expand(M/epsilon).variables():
logger.info(" already in epsilon form")
logger.info("<- factorize")
return M, identity_matrix(SR, n)
rng = Random(seed)
mu = gensym()
if b is None:
T_symbols = [gensym() for i in range(n*n)]
T = matrix(SR, n, n, T_symbols)
else:
T, T_symbols = matrix(SR, n), []
for ki,ni in b:
for i in range(ki,ki+ni):
for j in range(ki+ni):
sym = gensym()
T[i,j] = sym
T_symbols.append(sym)
eqs = []
for point, prank in singularities(M, x).items():
assert prank == 0
logger.debug(" processing point x = %s" % point)
R = matrix_c0(M, x, point, 0)
R = fuchsia_simplify(R)
eq = (R/epsilon)*T-T*(R.subs({epsilon: mu})/mu)
eq = fuchsia_simplify(eq)
eqs.extend(eq.list())
logger.info(" found %d equations with %d unknowns" % (len(eqs), len(T_symbols)))
solutions = fuchsia_solve(eqs, T_symbols)
for solution in solutions:
S = T.subs(solution)
# Right now S likely has a number of free variables in
# it; we can set them to arbibtrary values, as long as
# it'll make S invertible.
rndrange = 0
while True:
try:
sT = S.subs([
e==rng.randint(-rndrange, rndrange)
for e in S.variables() if e != epsilon
])
sT = fuchsia_simplify(sT,x)
M = fuchsia_simplify(transform(M, x, sT), x)
# We're leaking a bunch of temprary variables here,
# which accumulate in SR.variables, but who cares?
logger.info("<- factorize")
return M, sT
except (ZeroDivisionError, ValueError):
rndrange += 1 + rndrange//16
# We've tried a bunch of substituions, and they didn't
# work. Is the matrix at all invertible? Let's check.
if rndrange == 16 and not S.is_invertible():
break
raise FuchsiaError("can not factor epsilon")
#==================================================================================================
# Helpers
#==================================================================================================
def matrix_complexity(M):
return len(str(M.list()))
def simplify_by_jordanification(M, x):
"""Try to simplify matrix M by constant transformations that
transform M's residues into their Jordan forms. Return the
simplified matrix and the transformation. If none of the
attempted transformations reduce M's complexity (as measured
by 'matrix_complexity()'), return the original matrix and
the identity transformation.
"""
minM = M
minC = matrix_complexity(M)
minT = identity_matrix(M.base_ring(), M.nrows())
for point, prank in singularities(M, x).items():
R = matrix_c0(M, x, point, prank)
J, T = R.jordan_form(transformation=True)
MM = fuchsia_simplify(transform(M, x, T), x)
C = matrix_complexity(MM)
if C < minC:
minM = MM
minC = C
minT = T
return minM, minT
def common_factor(expressions, filter):
"""Factorize given expressions, select those factors for
which 'filter(factor)' is True, and return the product of
factors common to all the expressions.
Examples:
>>> x = var("x")
>>> common_factor([x*x-1, x+1], lambda f: True)
x + 1
>>> common_factor([1/x**2, 2/x**3, 3/x**4], lambda f: True)
x^(-2)
Note that if there is a mix of positive and negative exponents
of a given factor, this function will use (one of) the most
frequently occurring exponent:
>>> common_factor([x, 1/x, 2/x**2, 3/x], lambda f: True)
1/x
"""
factor2exp2count = defaultdict(lambda: defaultdict(lambda: 0))
for i, expr in enumerate(expressions):
factors = dict(expr.factor_list())
for factor, n in factors.items():
if not filter(factor): continue
if factor in factor2exp2count:
factor2exp2count[factor][n] += 1
else:
if i > 0: factor2exp2count[factor][0] = i
factor2exp2count[factor][n] = 1
for factor, exps in factor2exp2count.items():
if factor not in factors:
exps[0] += 1
result = SR(1)
for factor, exp2count in factor2exp2count.items():
exps = exp2count.keys()
minn = min(exps)
maxn = max(exps)
if minn > 0: result *= factor**minn
if maxn < 0: result *= factor**maxn
if minn <= 0 and maxn >= 0:
bestn = max(exps, key=lambda exp: exp2count[exp])
result *= factor**bestn
return result
def simplify_by_factorization(M, x):
"""Try to simplify matrix M by a constant transformation
that extracts | |
days of the week and hours that maintenance will be performed.
"""
pulumi.set(__self__, "preference", preference)
if days_of_weeks is not None:
pulumi.set(__self__, "days_of_weeks", days_of_weeks)
if hours_of_days is not None:
pulumi.set(__self__, "hours_of_days", hours_of_days)
if lead_time_in_weeks is not None:
pulumi.set(__self__, "lead_time_in_weeks", lead_time_in_weeks)
if months is not None:
pulumi.set(__self__, "months", months)
if weeks_of_months is not None:
pulumi.set(__self__, "weeks_of_months", weeks_of_months)
@property
@pulumi.getter
def preference(self) -> pulumi.Input[str]:
"""
(Updatable) The maintenance window scheduling preference.
"""
return pulumi.get(self, "preference")
@preference.setter
def preference(self, value: pulumi.Input[str]):
pulumi.set(self, "preference", value)
@property
@pulumi.getter(name="daysOfWeeks")
def days_of_weeks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsDaysOfWeekArgs']]]]:
"""
(Updatable) Days during the week when maintenance should be performed.
"""
return pulumi.get(self, "days_of_weeks")
@days_of_weeks.setter
def days_of_weeks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsDaysOfWeekArgs']]]]):
pulumi.set(self, "days_of_weeks", value)
@property
@pulumi.getter(name="hoursOfDays")
def hours_of_days(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
"""
(Updatable) The window of hours during the day when maintenance should be performed. The window is a 4 hour slot. Valid values are
* 0 - represents time slot 0:00 - 3:59 UTC - 4 - represents time slot 4:00 - 7:59 UTC - 8 - represents time slot 8:00 - 11:59 UTC - 12 - represents time slot 12:00 - 15:59 UTC - 16 - represents time slot 16:00 - 19:59 UTC - 20 - represents time slot 20:00 - 23:59 UTC
"""
return pulumi.get(self, "hours_of_days")
@hours_of_days.setter
def hours_of_days(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "hours_of_days", value)
@property
@pulumi.getter(name="leadTimeInWeeks")
def lead_time_in_weeks(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) Lead time window allows user to set a lead time to prepare for a down time. The lead time is in weeks and valid value is between 1 to 4.
"""
return pulumi.get(self, "lead_time_in_weeks")
@lead_time_in_weeks.setter
def lead_time_in_weeks(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "lead_time_in_weeks", value)
@property
@pulumi.getter
def months(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsMonthArgs']]]]:
"""
(Updatable) Months during the year when maintenance should be performed.
"""
return pulumi.get(self, "months")
@months.setter
def months(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutonomousExadataInfrastructureMaintenanceWindowDetailsMonthArgs']]]]):
pulumi.set(self, "months", value)
@property
@pulumi.getter(name="weeksOfMonths")
def weeks_of_months(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:
"""
(Updatable) Weeks during the month when maintenance should be performed. Weeks start on the 1st, 8th, 15th, and 22nd days of the month, and have a duration of 7 days. Weeks start and end based on calendar dates, not days of the week. For example, to allow maintenance during the 2nd week of the month (from the 8th day to the 14th day of the month), use the value 2. Maintenance cannot be scheduled for the fifth week of months that contain more than 28 days. Note that this parameter works in conjunction with the daysOfWeek and hoursOfDay parameters to allow you to specify specific days of the week and hours that maintenance will be performed.
"""
return pulumi.get(self, "weeks_of_months")
@weeks_of_months.setter
def weeks_of_months(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]):
pulumi.set(self, "weeks_of_months", value)
@pulumi.input_type
class AutonomousExadataInfrastructureMaintenanceWindowDetailsDaysOfWeekArgs:
def __init__(__self__, *,
name: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: (Updatable) Name of the month of the year.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
(Updatable) Name of the month of the year.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@pulumi.input_type
class AutonomousExadataInfrastructureMaintenanceWindowDetailsMonthArgs:
def __init__(__self__, *,
name: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: (Updatable) Name of the month of the year.
"""
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
(Updatable) Name of the month of the year.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@pulumi.input_type
class AutonomousExadataInfrastructureMaintenanceWindowMonthArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: (Updatable) Name of the month of the year.
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Name of the month of the year.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class BackupDestinationAssociatedDatabaseArgs:
def __init__(__self__, *,
db_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] db_name: The display name of the database that is associated with the backup destination.
:param pulumi.Input[str] id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup destination.
"""
if db_name is not None:
pulumi.set(__self__, "db_name", db_name)
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter(name="dbName")
def db_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name of the database that is associated with the backup destination.
"""
return pulumi.get(self, "db_name")
@db_name.setter
def db_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup destination.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class BackupDestinationMountTypeDetailsArgs:
def __init__(__self__, *,
mount_type: pulumi.Input[str],
local_mount_point_path: Optional[pulumi.Input[str]] = None,
nfs_server_export: Optional[pulumi.Input[str]] = None,
nfs_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] mount_type: Mount type for backup destination.
:param pulumi.Input[str] local_mount_point_path: The local directory path on each VM cluster node where the NFS server location is mounted. The local directory path and the NFS server location must each be the same across all of the VM cluster nodes. Ensure that the NFS mount is maintained continuously on all of the VM cluster nodes.
:param pulumi.Input[str] nfs_server_export: Specifies the directory on which to mount the file system
:param pulumi.Input[Sequence[pulumi.Input[str]]] nfs_servers: IP addresses for NFS Auto mount.
"""
pulumi.set(__self__, "mount_type", mount_type)
if local_mount_point_path is not None:
pulumi.set(__self__, "local_mount_point_path", local_mount_point_path)
if nfs_server_export is not None:
pulumi.set(__self__, "nfs_server_export", nfs_server_export)
if nfs_servers is not None:
pulumi.set(__self__, "nfs_servers", nfs_servers)
@property
@pulumi.getter(name="mountType")
def mount_type(self) -> pulumi.Input[str]:
"""
Mount type for backup destination.
"""
return pulumi.get(self, "mount_type")
@mount_type.setter
def mount_type(self, value: pulumi.Input[str]):
pulumi.set(self, "mount_type", value)
@property
@pulumi.getter(name="localMountPointPath")
def local_mount_point_path(self) -> Optional[pulumi.Input[str]]:
"""
The local directory path on each VM cluster node where the NFS server location is mounted. The local directory path and the NFS server location must each be the same across all of the VM cluster nodes. Ensure that the NFS mount is maintained continuously on all of the VM cluster nodes.
"""
return pulumi.get(self, "local_mount_point_path")
@local_mount_point_path.setter
def local_mount_point_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "local_mount_point_path", value)
@property
@pulumi.getter(name="nfsServerExport")
def nfs_server_export(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the directory on which to mount the file system
"""
return pulumi.get(self, "nfs_server_export")
@nfs_server_export.setter
def nfs_server_export(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nfs_server_export", value)
@property
@pulumi.getter(name="nfsServers")
def nfs_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
IP addresses for NFS Auto mount.
"""
return pulumi.get(self, "nfs_servers")
@nfs_servers.setter
def nfs_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "nfs_servers", value)
@pulumi.input_type
class CloudExadataInfrastructureCustomerContactArgs:
def __init__(__self__, *,
email: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] email: (Updatable) The email address used by Oracle to send notifications regarding databases and infrastructure.
"""
if email is not None:
pulumi.set(__self__, "email", email)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The email address used by Oracle to send notifications regarding databases and infrastructure.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@pulumi.input_type
class CloudExadataInfrastructureMaintenanceWindowArgs:
def __init__(__self__, *,
preference: pulumi.Input[str],
days_of_weeks: Optional[pulumi.Input[Sequence[pulumi.Input['CloudExadataInfrastructureMaintenanceWindowDaysOfWeekArgs']]]] = None,
hours_of_days: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,
lead_time_in_weeks: Optional[pulumi.Input[int]] = None,
months: Optional[pulumi.Input[Sequence[pulumi.Input['CloudExadataInfrastructureMaintenanceWindowMonthArgs']]]] = None,
weeks_of_months: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None):
"""
:param pulumi.Input[str] preference: (Updatable) The maintenance window scheduling preference.
:param pulumi.Input[Sequence[pulumi.Input['CloudExadataInfrastructureMaintenanceWindowDaysOfWeekArgs']]] days_of_weeks: (Updatable) Days during the week when maintenance should be performed.
:param pulumi.Input[Sequence[pulumi.Input[int]]] hours_of_days: (Updatable) The window of hours during the day when maintenance should be performed. The window is a 4 hour slot. Valid values are
* 0 - represents time slot 0:00 - 3:59 UTC - 4 - represents time slot 4:00 - 7:59 UTC - 8 - represents time slot 8:00 - 11:59 UTC - 12 - represents time slot 12:00 - 15:59 UTC - 16 - represents time slot 16:00 - 19:59 UTC - 20 - represents time slot 20:00 - 23:59 UTC
:param pulumi.Input[int] lead_time_in_weeks: (Updatable) Lead time window allows user to set a lead time to prepare for a down time. The lead time is in weeks and valid value is between 1 to 4.
:param pulumi.Input[Sequence[pulumi.Input['CloudExadataInfrastructureMaintenanceWindowMonthArgs']]] months: (Updatable) Months during the year when maintenance should be performed.
:param pulumi.Input[Sequence[pulumi.Input[int]]] weeks_of_months: (Updatable) Weeks during the month when maintenance should be performed. Weeks start on the 1st, 8th, 15th, and 22nd days of the month, and have a duration of 7 days. Weeks start and end based on calendar dates, not days of the week. For example, to allow maintenance during the 2nd week of the month | |
<filename>networking_cisco/plugins/cisco/cfg_agent/service_helpers/routing_svc_helper.py
# Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import eventlet
import netaddr
import pprint as pp
from operator import itemgetter
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_utils import excutils
from oslo_utils import importutils
import six
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron_lib import exceptions as n_lib_exc
from networking_cisco._i18n import _, _LE, _LI, _LW
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.cfg_agent import cfg_exceptions
from networking_cisco.plugins.cisco.cfg_agent.device_drivers import driver_mgr
from networking_cisco.plugins.cisco.cfg_agent import device_status
from networking_cisco.plugins.cisco.common import (cisco_constants as
c_constants)
from networking_cisco.plugins.cisco.extensions import ha
from networking_cisco.plugins.cisco.extensions import routerrole
ncc_errors = importutils.try_import('ncclient.transport.errors')
LOG = logging.getLogger(__name__)
N_ROUTER_PREFIX = 'nrouter-'
ROUTER_ROLE_ATTR = routerrole.ROUTER_ROLE_ATTR
# Number of routers to fetch from server at a time on resync.
# Needed to reduce load on server side and to speed up resync on agent side.
SYNC_ROUTERS_MAX_CHUNK_SIZE = 64
SYNC_ROUTERS_MIN_CHUNK_SIZE = 8
class IPAddressMissingException(n_lib_exc.NeutronException):
message = _("Router port %(port_id)s has no IP address on subnet "
"%(subnet_id)s.")
class MultipleIPv4SubnetsException(n_lib_exc.NeutronException):
message = _("There should not be multiple IPv4 subnets %(subnets)s on "
"router port %(port_id)s")
class RouterInfo(object):
"""Wrapper class around the (neutron) router dictionary.
Information about the neutron router is exchanged as a python dictionary
between plugin and config agent. RouterInfo is a wrapper around that dict,
with attributes for common parameters. These attributes keep the state
of the current router configuration, and are used for detecting router
state changes when an updated router dict is received.
This is a modified version of the RouterInfo class defined in the
(reference) l3-agent implementation, for use with cisco config agent.
"""
def __init__(self, router_id, router):
self.router_id = router_id
self.ex_gw_port = None
self._snat_enabled = None
self._snat_action = None
self.internal_ports = []
self.floating_ips = []
self._router = None
self.router = router
self.routes = []
self.ha_info = router.get('ha_info')
@property
def router(self):
return self._router
@property
def id(self):
return self.router_id
@property
def snat_enabled(self):
return self._snat_enabled
@router.setter
def router(self, value):
self._router = value
if not self._router:
return
# enable_snat by default if it wasn't specified by plugin
self._snat_enabled = self._router.get('enable_snat', True)
def router_name(self):
return N_ROUTER_PREFIX + self.router_id
@property
def ha_enabled(self):
ha_enabled = self.router.get(ha.ENABLED, False)
return ha_enabled
class CiscoRoutingPluginApi(object):
"""RoutingServiceHelper(Agent) side of the routing RPC API."""
def __init__(self, topic, host):
self.host = host
target = oslo_messaging.Target(topic=topic, version='1.0')
self.client = n_rpc.get_client(target)
def get_routers(self, context, router_ids=None, hd_ids=None):
"""Make a remote process call to retrieve the sync data for routers.
:param context: session context
:param router_ids: list of routers to fetch
:param hd_ids : hosting device ids, only routers assigned to these
hosting devices will be returned.
"""
cctxt = self.client.prepare(version='1.1')
return cctxt.call(context, 'cfg_sync_routers', host=self.host,
router_ids=router_ids, hosting_device_ids=hd_ids)
def get_router_ids(self, context, router_ids=None, hd_ids=None):
"""Make a remote process call to retrieve scheduled routers ids."""
cctxt = self.client.prepare(version='1.3')
return cctxt.call(context, 'get_cfg_router_ids', host=self.host,
router_ids=router_ids, hosting_device_ids=hd_ids)
def get_hardware_router_type_id(self, context):
"""Get the ID for the ASR1k hardware router type."""
cctxt = self.client.prepare()
return cctxt.call(context,
'get_hardware_router_type_id',
host=self.host)
def update_floatingip_statuses(self, context, router_id, fip_statuses):
"""Make a remote process call to update operational status for one or
several floating IPs.
@param context: contains user information
@param router_id: id of router associated with the floatingips
@param fip_statuses: dict with floatingip_id as key and status as value
"""
cctxt = self.client.prepare(version='1.1')
return cctxt.call(context, 'update_floatingip_statuses_cfg',
router_id=router_id, fip_statuses=fip_statuses)
def send_update_port_statuses(self, context, port_ids, status):
"""Call the pluging to update the port status which updates the DB.
:param context: contains user information
:param port_ids: list of ids of the ports associated with the status
:param status: value of the status for the given port list (port_ids)
"""
cctxt = self.client.prepare(version='1.1')
return cctxt.call(context, 'update_port_statuses_cfg',
port_ids=port_ids, status=status)
class RoutingServiceHelper(object):
target = oslo_messaging.Target(version='1.1')
def __init__(self, host, conf, cfg_agent):
self.conf = conf
self.cfg_agent = cfg_agent
self.context = bc.context.get_admin_context_without_session()
self.plugin_rpc = CiscoRoutingPluginApi(topics.L3PLUGIN, host)
self._dev_status = device_status.DeviceStatus()
self._dev_status.enable_heartbeat = (
self.conf.cfg_agent.enable_heartbeat)
self._drivermgr = driver_mgr.DeviceDriverManager()
self.router_info = {}
self.updated_routers = set()
self.removed_routers = set()
self.sync_devices = set()
self.sync_devices_attempts = 0
self.fullsync = True
self.sync_routers_chunk_size = SYNC_ROUTERS_MAX_CHUNK_SIZE
self.topic = '%s.%s' % (c_constants.CFG_AGENT_L3_ROUTING, host)
self.hardware_router_type = None
self.hardware_router_type_id = None
self._setup_rpc()
def _setup_rpc(self):
self.conn = n_rpc.create_connection()
self.endpoints = [self]
self.conn.create_consumer(self.topic, self.endpoints, fanout=False)
self.conn.consume_in_threads()
### Notifications from Plugin ####
def router_deleted(self, context, routers):
"""Deal with router deletion RPC message."""
LOG.debug('Got router deleted notification for %s', routers)
self.removed_routers.update(routers)
def routers_updated(self, context, routers):
"""Deal with routers modification and creation RPC message."""
LOG.debug('Got routers updated notification :%s', routers)
if routers:
# This is needed for backward compatibility
if isinstance(routers[0], dict):
routers = [router['id'] for router in routers]
self.updated_routers.update(routers)
def router_removed_from_hosting_device(self, context, routers):
LOG.debug('Got router removed from hosting device: %s', routers)
self.router_deleted(context, routers)
def router_added_to_hosting_device(self, context, routers):
LOG.debug('Got router added to hosting device :%s', routers)
self.routers_updated(context, routers)
# version 1.1
def routers_removed_from_hosting_device(self, context, router_ids):
LOG.debug('Got routers removed from hosting device: %s', router_ids)
self.router_deleted(context, router_ids)
# Routing service helper public methods
@property
def driver_manager(self):
return self._drivermgr
def process_service(self, device_ids=None, removed_devices_info=None):
try:
LOG.debug("Routing service processing started")
resources = {}
routers = []
removed_routers = []
all_routers_flag = False
if self.fullsync:
LOG.debug("FullSync flag is on. Starting fullsync")
# Setting all_routers_flag and clear the global full_sync flag
all_routers_flag = True
self.fullsync = False
self.router_info = {}
self.updated_routers.clear()
self.removed_routers.clear()
self.sync_devices.clear()
routers = self._fetch_router_info(all_routers=True)
LOG.debug("All routers: %s" % (pp.pformat(routers)))
if routers is not None:
self._cleanup_invalid_cfg(routers)
else:
if self.updated_routers:
router_ids = list(self.updated_routers)
LOG.debug("Updated routers:%s", router_ids)
self.updated_routers.clear()
routers = self._fetch_router_info(router_ids=router_ids)
LOG.debug("Updated routers:%s" % (pp.pformat(routers)))
if device_ids:
LOG.debug("Adding new devices:%s", device_ids)
self.sync_devices = set(device_ids) | self.sync_devices
if self.sync_devices:
self._handle_sync_devices(routers)
if removed_devices_info:
if removed_devices_info.get('deconfigure'):
ids = self._get_router_ids_from_removed_devices_info(
removed_devices_info)
self.removed_routers = self.removed_routers | set(ids)
if self.removed_routers:
removed_routers_ids = list(self.removed_routers)
LOG.debug("Removed routers:%s",
pp.pformat(removed_routers_ids))
for r in removed_routers_ids:
if r in self.router_info:
removed_routers.append(self.router_info[r].router)
# Sort on hosting device
if routers:
resources['routers'] = routers
if removed_routers:
resources['removed_routers'] = removed_routers
hosting_devices = self._sort_resources_per_hosting_device(
resources)
# Dispatch process_services() for each hosting device
pool = eventlet.GreenPool()
for device_id, resources in hosting_devices.items():
routers = resources.get('routers', [])
removed_routers = resources.get('removed_routers', [])
pool.spawn_n(self._process_routers, routers, removed_routers,
device_id, all_routers=all_routers_flag)
pool.waitall()
if removed_devices_info:
for hd_id in removed_devices_info['hosting_data']:
self.driver_manager.remove_driver_for_hosting_device(hd_id)
LOG.debug("Routing service processing successfully completed")
except Exception:
LOG.exception(_LE("Failed processing routers"))
self.fullsync = True
def collect_state(self, configurations):
"""Collect state from this helper.
A set of attributes which summarizes the state of the routers and
configurations managed by this config agent.
:param configurations: dict of configuration values
:return dict of updated configuration values
"""
num_ex_gw_ports = 0
num_interfaces = 0
num_floating_ips = 0
router_infos = self.router_info.values()
num_routers = len(router_infos)
num_hd_routers = collections.defaultdict(int)
for ri in router_infos:
ex_gw_port = ri.router.get('gw_port')
if ex_gw_port:
num_ex_gw_ports += 1
num_interfaces += len(ri.router.get(
bc.constants.INTERFACE_KEY, []))
num_floating_ips += len(ri.router.get(
bc.constants.FLOATINGIP_KEY, []))
hd = ri.router['hosting_device']
if hd:
num_hd_routers[hd['id']] += 1
routers_per_hd = dict((hd_id, {'routers': num})
for hd_id, num in num_hd_routers.items())
non_responding = self._dev_status.get_backlogged_hosting_devices()
configurations['total routers'] = num_routers
configurations['total ex_gw_ports'] = num_ex_gw_ports
configurations['total interfaces'] = num_interfaces
configurations['total floating_ips'] = num_floating_ips
configurations['hosting_devices'] = routers_per_hd
configurations['non_responding_hosting_devices'] = non_responding
return configurations
# Routing service helper internal methods
def _cleanup_invalid_cfg(self, routers):
# dict with hd id as key and associated routers list as val
hd_routermapping = collections.defaultdict(list)
for router in routers:
hd_routermapping[router['hosting_device']['id']].append(router)
# call cfg cleanup specific to device type from its driver
for hd_id, routers in six.iteritems(hd_routermapping):
temp_res = {"id": hd_id,
"hosting_device": routers[0]['hosting_device'],
"router_type": routers[0]['router_type']}
driver = self.driver_manager.set_driver(temp_res)
driver.cleanup_invalid_cfg(
routers[0]['hosting_device'], routers)
def _fetch_router_info(self, router_ids=None, device_ids=None,
all_routers=False):
"""Fetch router dict from the routing plugin.
:param router_ids: List of router_ids of routers to fetch
:param device_ids: List of device_ids whose routers to fetch
:param all_routers: If True fetch all the routers for this agent.
:return: List of router dicts of format:
[ {router_dict1}, {router_dict2},.....]
"""
try:
if all_routers:
router_ids = self.plugin_rpc.get_router_ids(self.context)
return self._fetch_router_chunk_data(router_ids)
if router_ids:
return self._fetch_router_chunk_data(router_ids)
if device_ids:
return self.plugin_rpc.get_routers(self.context,
hd_ids=device_ids)
except oslo_messaging.MessagingTimeout:
if self.sync_routers_chunk_size > SYNC_ROUTERS_MIN_CHUNK_SIZE:
self.sync_routers_chunk_size = max(
| |
# AUTOGENERATED! DO NOT EDIT! File to edit: 04_carion2020end.ipynb (unless otherwise specified).
__all__ = ['coco_vocab', 'bb_pad', 'ParentSplitter', 'box_cxcywh_to_xyxy', 'box_xyxy_to_cxcywh', 'TensorBBoxWH',
'TensorBBoxTL', 'ToWH', 'ToXYXY', 'ToTL', 'box_area', 'all_op', 'generalized_box_iou', 'DETRLoss', 'DETR',
'CocoEval', 'sorted_detr_trainable_params', 'GetAnnotatedImageFiles', 'GetBboxAnnotation',
'GetClassAnnotation', 'CocoDataLoaders', 'detr_learner']
# Cell
import os
import torch
import numpy as np
import seaborn as sns
import io
from contextlib import redirect_stdout
from IPython.core.debugger import set_trace
from torch import functional as F
from scipy.optimize import linear_sum_assignment
from fastprogress.fastprogress import master_bar, progress_bar
from fastai.data.all import *
from fastai.vision.all import *
from .core import *
from itertools import chain
from pycocotools.cocoeval import COCOeval
from pycocotools.coco import COCO
from .core import _parent_idxs
# Cell
coco_vocab = [
'N/A0', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'N/A1',
'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse',
'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'N/A2', 'backpack',
'umbrella', 'N/A3', 'N/A4', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'N/A5', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'N/A6', 'dining table', 'N/A7',
'N/A8', 'toilet', 'N/A9', 'tv', 'laptop', 'mouse', 'remote', 'keyboard',
'cell phone', 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'N/A10',
'book', 'clock', 'vase', 'scissors', 'teddy bear', 'hair drier',
'toothbrush'
]
# Cell
def bb_pad(samples, pad_idx=0):
"Function that collect `samples` of labelled bboxes and adds padding with `pad_idx`."
if len(samples[0][1:])>0:
samples = [(s[0], *clip_remove_empty(*s[1:])) for s in samples]
max_len = max([len(s[2]) for s in samples])
def _f(img,bbox,lbl):
bbox = torch.cat([bbox,bbox.new_zeros(max_len-bbox.shape[0], 4)])
lbl = torch.cat([lbl, lbl .new_zeros(max_len-lbl .shape[0])+pad_idx])
return img,bbox,lbl
return [_f(*s) for s in samples]
else:
return samples
# Cell
def ParentSplitter(train_name='train', valid_name='valid'):
"Split `items` from the grand parent folder names (`train_name` and `valid_name`)."
def _inner(o):
tindex = _parent_idxs(o, train_name)
vindex = _parent_idxs(o, valid_name)
return tindex, vindex
return _inner
# Cell
def box_cxcywh_to_xyxy(x):
x_c, y_c, w, h = x.unbind(-1)
b = [(x_c - 0.5 * w), (y_c - 0.5 * h),
(x_c + 0.5 * w), (y_c + 0.5 * h)]
return torch.stack(b, dim=-1)
# Cell
def box_xyxy_to_cxcywh(x):
x0, y0, x1, y1 = x.unbind(-1)
b = [(x0 + x1) / 2, (y0 + y1) / 2,
(x1 - x0), (y1 - y0)]
return torch.stack(b, dim=-1)
# Cell
class TensorBBoxWH(TensorPoint): pass
class TensorBBoxTL(TensorPoint): pass
# Cell
@Transform
def ToWH(x:TensorBBox): return TensorBBoxWH(box_xyxy_to_cxcywh(x*0.5+0.5), img_size=x.img_size)
# Cell
@Transform
def ToXYXY(x:TensorBBoxWH)->None:
return TensorBBox(box_cxcywh_to_xyxy(x)*2-1, img_size=x.img_size)
# Cell
class ToTL(Transform):
def encodes(self, x:TensorBBoxWH)->None: return TensorBBoxTL(box_cxcywh_to_xyxy(x), img_size=x.img_size)
def encodes(self, x:TensorBBox)->None: return TensorBBoxTL((x+1)/2, img_size=x.img_size)
# Cell
def box_area(boxes): return (boxes[..., 2] - boxes[..., 0]) * (boxes[..., 3] - boxes[..., 1])
# Cell
def all_op(cmp):
"Compares all the elements of `a` and `b` using cmp."
def _inner(a, b):
if not is_iter(b): return False
return all(cmp(a_,b_) for a_,b_ in itertools.zip_longest(a,b))
return _inner
# Cell
def generalized_box_iou(boxes1, boxes2, pairwise=False):
"""
Generalized IoU from https://giou.stanford.edu/
The boxes should be in [x0, y0, x1, y1] format
Returns a [N, M] pairwise matrix, where N = len(boxes1)
and M = len(boxes2).
This implemenation expects bs as first dim.
"""
# degenerate boxes gives inf / nan results
# so do an early check
#pexpt((boxes1, boxes2))
#set_trace()
boxes1, boxes2 = ToTL()((boxes1, boxes2))
#pexpt((boxes1, boxes2))
assert (boxes1[..., 2:] >= boxes1[..., :2]).all(), 'boxes1 are not in [left_x, top_y, right_x, bottom_y] coords'
assert (boxes2[..., 2:] >= boxes2[..., :2]).all(), 'boxes2 are not in [left_x, top_y, right_x, bottom_y] coords'
area1 = box_area(boxes1)
area2 = box_area(boxes2)
if pairwise:
boxes1 = boxes1[:, :, None, :]
boxes2 = boxes2[:, None, :, :]
area1 = area1[:, :, None]
area2 = area2[:, None, :]
lt = torch.max(boxes1[..., :2], boxes2[..., :2]) # [N,M,2]
rb = torch.min(boxes1[..., 2:], boxes2[..., 2:]) # [N,M,2]
wh = (rb - lt).clamp(min=0) # [N,M,2]
inter = wh[..., 0] * wh[..., 1] # [N,M]
union = (area1 + area2) - inter
iou = inter / union
lt = torch.min(boxes1[..., :2], boxes2[..., :2]) # [N,M,2]
rb = torch.max(boxes1[..., 2:], boxes2[..., 2:]) # [N,M,2]
wh = (rb - lt).clamp(min=0) # [N,M,2]
area = wh[..., 0] * wh[..., 1]
#set_trace()
return iou - (area - union) / area
# Cell
class DETRLoss(nn.Module):
def __init__(self, classw=1, boxw=1, giouw=1, n_queries=100, th=0.7, eos_coef=0.1, n_classes=92):
super().__init__()
store_attr()
self.emptyw = torch.ones(n_classes)
self.emptyw[-1] = eos_coef
self.entropy = nn.CrossEntropyLoss(weight=self.emptyw)
def class_loss(self, output_classes, target_id, indices):
bs, nq, nc = output_classes.shape
target_id_full = torch.full((bs, nq), nc-1, dtype=torch.int64, device=target_id.device)
for i, ind in enumerate(indices): target_id_full[i, ind[0]] = target_id[i, ind[1]]
return self.entropy(output_classes.transpose(1,2), target_id_full)
def box_loss(self, output_boxes, target_boxes, indices):
output_boxes, target_boxes = ToWH((output_boxes, target_boxes))
output_boxes_ind = []
target_boxes_ind = []
for i, (src, dst) in enumerate(indices):
output_boxes_ind.append(output_boxes[i, src, :])
target_boxes_ind.append(target_boxes[i, dst, :])
output_boxes_ind = torch.cat(output_boxes_ind)
target_boxes_ind = torch.cat(target_boxes_ind)
l1_loss = nn.L1Loss()(output_boxes_ind, target_boxes_ind)
giou = 1 - generalized_box_iou(output_boxes_ind, target_boxes_ind)
return self.boxw * l1_loss + self.giouw * giou.mean()
def box_cost(self, output_boxes, target_boxes):
output_boxes, target_boxes = ToWH((output_boxes, target_boxes))
return torch.cdist(output_boxes, target_boxes, p=1)
def class_cost(self, output_class, target_ids):
bs, nq, _ = output_class.shape
_, mc = target_ids.shape
p = output_class.flatten(0,1).softmax(-1) # [bs*nq, num_classes]
ids = target_ids.flatten() # [bs*nq]
loss = -p[:, ids].reshape(bs, nq, -1) # [bs, nq, bs*mc]
return torch.cat([loss[i, :, i*mc:(i+1)*mc][None, ...] for i in range(bs)], 0) # [bs, nq, mc]
@torch.no_grad()
def matcher(self, output, target):
output_boxes, output_class = output # [bs, nq, 4], [bs, nq, num_classes]
target_boxes, target_ids = target # [bs, max(n in batch), 4], [bs, max(n in batch)]
l_iou = -generalized_box_iou(output_boxes, target_boxes, pairwise=True)
l_box = self.box_cost(output_boxes, target_boxes)
l_class = self.class_cost(output_class, target_ids)
C = self.classw*l_class + self.boxw*l_box + self.giouw*l_iou
C = C.cpu()
sizes = [(v<self.n_classes-1).type(torch.int).sum() for v in target[1]]
Cs = [C[i, :, :s] for i, s in enumerate(sizes)]
indices = [linear_sum_assignment(C[i, :, :s]) for i, s in enumerate(sizes)]
return [(torch.as_tensor(i, dtype=torch.int64), torch.as_tensor(j, dtype=torch.int64)) for i, j in indices]
def forward(self, output, target_boxes, target_ids):
output_boxes, output_class, aux_outputs = output
indices = self.matcher((output_boxes, output_class), (target_boxes, target_ids))
l_class = self.class_loss(output_class, target_ids, indices)
l_box = self.box_loss(output_boxes, target_boxes, indices)
loss = l_class * self.classw + l_box
if aux_outputs:
for output in aux_outputs:
output_boxes, output_class = output['pred_boxes'], output['pred_logits']
indices = self.matcher((output_boxes, output_class), (target_boxes, target_ids))
l_class = self.class_loss(output_class, target_ids, indices)
l_box = self.box_loss(output_boxes, target_boxes, indices)
loss += l_class * self.classw + l_box
return loss
def activation(self, x): return (ToXYXY(x[0]), F.softmax(x[1], dim=-1))
def decodes(self, x, pad=True):
pred_boxes, probs = x
max_probs, pred_ids = probs.max(axis=-1)
ind = (max_probs>self.th) & (pred_ids<probs.shape[-1]-1) & (box_area(pred_boxes)>0)
max_probs = [max_probs[i, ind[i]] for i in range(ind.shape[0])]
pred_ids = [pred_ids[i, ind[i]] for i in range(ind.shape[0])]
#pred_boxes = L([pred_boxes[i, ind[i], :] for i in range(ind.shape[0])]).map(TensorBBox)
pred_boxes = L(pred_boxes[i, ind[i], :] for i in range(ind.shape[0]))
if pad:
imgs = [None for i in range_of(pred_ids)]
z_inp = zip(imgs ,pred_boxes, pred_ids)
out = bb_pad(list(z_inp), pad_idx=self.n_classes-1)
pred_boxes = torch.cat([x[1].unsqueeze(0) for x in out])
pred_ids = torch.cat([x[2].unsqueeze(0) for x in out])
pred_boxes, pred_ids = TensorBBox(pred_boxes), TensorMultiCategory(pred_ids)
self.scores = max_probs
return pred_boxes, pred_ids
# Cell
class DETR(nn.Module):
def __init__(self, pretrained=True, n_classes=92, aux_loss=False):
super().__init__()
self.model = torch.hub.load('facebookresearch/detr', 'detr_resnet50', pretrained=pretrained, verbose=False)
if self.model.class_embed.out_features!=n_classes:
self.model.class_embed = nn.Linear(256, n_classes)
self.model.aux_loss = aux_loss
def forward(self, x):
img_sz = x.shape[2:]
x = self.model(x)
pred_boxes, pred_logits = x['pred_boxes'], x['pred_logits']
aux_outputs = x.get('aux_outputs', None)
if aux_outputs:
for o in aux_outputs: o['pred_boxes'] = TensorBBoxWH(o['pred_boxes'], img_size=img_sz)
return TensorBBoxWH(pred_boxes, img_size=img_sz), pred_logits, aux_outputs
# Cell
class CocoEval(Callback):
run_before=Recorder
run_train = False
def __init__(self):
metrics = 'AP AP50 AP75 AP_small AP_medium AP_large AR1 AR10 AR100 AR_small AR_medium AR_large'.split()
self.metrics = L(metrics).map(partial(getattr, self)).map(ValueMetric)
def before_validate(self):
vocab = self.dls.vocab
bs = self.learn.dls.bs
self.gt_ds = {'annotations': [], 'images': [], 'categories': []}
self.dt_ds = {'annotations': [], 'images': [], 'categories': []}
self.gt_ds['categories'] = [{'id': i+1,'name':o} for i,o in enumerate(vocab)]
self.dt_ds['categories'] = [{'id': i+1,'name':o} for i,o in enumerate(vocab)]
self.reset_counters()
self.bs = bs
self.dec_bbox = compose(ToXYXY, to_cpu, self.learn.dls.after_item.decode)#
self.dec_cls = compose(to_cpu, lambda x: x[x>0])
self.batch_to_samples = compose(partial(batch_to_samples, max_n=self.bs), L)
def reset_counters(self):
self.img_id = Inf.count
self.gtann = Inf.count
self.dtann = Inf.count
def after_batch(self):
pred_boxes, pred_ids = self.learn.loss_func.decodes(self.loss_func.activation(self.pred), pad=False)
max_probs = self.learn.loss_func.scores
_, _, w, h = self.xb[0].shape
gt_cls = self.batch_to_samples(self.yb[1]).map(to_cpu)
dt_cls = L(pred_ids).map(to_cpu)
gt_boxes = self.batch_to_samples(self.yb[0]).map(self.dec_bbox)
dt_boxes = L(pred_boxes).map(self.dec_bbox)
for gtb, gtc, dtb, dtc, i, socres in zip(gt_boxes, gt_cls, dt_boxes, dt_cls, self.img_id, max_probs):
self.gt_ds['images'].append({'id': i, 'height': h, 'width': w})
self.gt_ds['annotations'].extend([{'iscrowd': 0, 'bbox': o.tolist(), 'area': box_area(o), 'category_id': int(c), 'image_id': i, 'id': j} for o, c, j in zip(gtb, gtc, self.gtann)])
self.dt_ds['images'].append({'id': i, 'height': h, 'width': w})
self.dt_ds['annotations'].extend([{'iscrowd': 0, 'score': s, 'bbox': o.tolist(), 'area': box_area(o), | |
<filename>tests/encryption_test.py
# -*- coding: utf-8 -*-
import copy
import json
import os
import pytest
from olm import Account, OlmMessage, OlmPreKeyMessage, OutboundGroupSession
from nio.crypto import (DeviceStore, GroupSessionStore, InboundGroupSession,
Olm, OlmDevice, OutboundSession, OutgoingKeyRequest,
SessionStore)
from nio.events import (ForwardedRoomKeyEvent, MegolmEvent, OlmEvent,
RoomKeyEvent, RoomMessageText, UnknownBadEvent)
from nio.exceptions import EncryptionError, GroupEncryptionError, OlmTrustError
from nio.responses import (KeysClaimResponse, KeysQueryResponse,
KeysUploadResponse)
from nio.store import DefaultStore, Ed25519Key, Key, KeyStore
AliceId = "@alice:example.org"
Alice_device = "ALDEVICE"
BobId = "@bob:example.org"
Bob_device = "BOBDEVICE"
MaloryId = "@malory:example.org"
Malory_device = "MALORYDEVICE"
PICKLE_KEY = "DEFAULT_KEY"
TEST_ROOM = "!test_room"
ephemeral_dir = os.path.join(os.curdir, "tests/data/encryption")
def ephemeral(func):
def wrapper(*args, **kwargs):
try:
ret = func(*args, **kwargs)
finally:
os.remove(os.path.join(
ephemeral_dir,
"ephemeral_DEVICEID.db"
))
return ret
return wrapper
class TestClass(object):
@staticmethod
def _load_response(filename):
with open(filename) as f:
return json.loads(f.read(), encoding="utf-8")
def _get_store(self, user_id, device_id, pickle_key=""):
return DefaultStore(user_id, device_id, ephemeral_dir, pickle_key)
@property
def ephemeral_olm(self):
user_id = "ephemeral"
device_id = "DEVICEID"
return Olm(user_id, device_id, self._get_store(user_id, device_id))
@ephemeral
def test_new_account_creation(self):
olm = self.ephemeral_olm
assert isinstance(olm.account, Account)
def _load(self, user_id, device_id, pickle_key=""):
return Olm(
user_id,
device_id,
self._get_store(user_id, device_id, pickle_key)
)
def test_account_loading(self):
olm = self._load("example", "DEVICEID", PICKLE_KEY)
assert isinstance(olm.account, Account)
assert (olm.account.identity_keys["curve25519"]
== "<KEY>")
assert (olm.account.identity_keys["ed25519"]
== "<KEY>")
def test_fingerprint_store(self, monkeypatch):
def mocksave(self):
return
monkeypatch.setattr(KeyStore, '_save', mocksave)
store = KeyStore(os.path.join(
ephemeral_dir,
"ephemeral_devices"
))
account = Account()
device = OlmDevice(
"example",
"DEVICEID",
account.identity_keys
)
key = Key.from_olmdevice(device)
assert key not in store
assert store.add(key)
assert key in store
assert store.remove(key)
assert store.check(key) is False
def test_fingerprint_store_loading(self):
store = KeyStore(os.path.join(ephemeral_dir, "known_devices"))
key = Ed25519Key(
"example",
"DEVICEID",
"<KEY>"
)
assert key in store
def test_invalid_store_entry_equality(self):
entry = Ed25519Key(
"example",
"DEVICEID",
"<KEY>"
)
assert entry != 1
def test_differing_store_entries(self):
alice = Ed25519Key(
"alice",
"DEVICEID",
"<KEY>"
)
bob = Ed25519Key(
"bob",
"DEVICEDI",
"<KEY>"
)
assert alice != bob
def _create_session(self):
alice = Account()
bob = Account()
bob.generate_one_time_keys(1)
one_time = list(bob.one_time_keys["curve25519"].values())[0]
id_key = bob.identity_keys["curve25519"]
s = OutboundSession(alice, id_key, one_time)
return alice, bob, s
def test_session_store(self):
alice, bob, s = self._create_session()
store = SessionStore()
store.add(bob.identity_keys["curve25519"], s)
assert s in store
def test_session_store_sort(self):
alice, bob, s = self._create_session()
bob.generate_one_time_keys(1)
one_time = list(bob.one_time_keys["curve25519"].values())[0]
curve_key = bob.identity_keys["curve25519"]
s2 = OutboundSession(alice, curve_key, one_time)
store = SessionStore()
store.add(curve_key, s)
store.add(curve_key, s2)
if s.id < s2.id:
assert s == store.get(curve_key)
else:
assert s2 == store.get(curve_key)
def test_device_store(self):
alice = OlmDevice(
"example",
"DEVICEID",
{"edd25519": "2MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA",
"curve25519": "3MX1WOCAmE9eyywGdiMsQ4RxL2SIKVeyJXiSjVFycpA"}
)
store = DeviceStore()
assert store.add(alice)
assert store.add(alice) is False
assert alice in store
@ephemeral
def test_olm_outbound_session_create(self):
bob = Account()
bob.generate_one_time_keys(1)
one_time = list(bob.one_time_keys["curve25519"].values())[0]
bob_device = OlmDevice(
BobId,
Bob_device,
bob.identity_keys
)
olm = self.ephemeral_olm
olm.device_store[bob_device.user_id][bob_device.id] = bob_device
olm.create_session(one_time, bob_device.curve25519)
assert isinstance(
olm.session_store.get(bob.identity_keys["curve25519"]),
OutboundSession
)
def test_olm_session_load(self):
olm = self._load("example", "DEVICEID", PICKLE_KEY)
bob_session = olm.session_store.get(
"+Qs131S/odNdWG6VJ8hiy9YZW0us24wnsDjYQbaxLk4"
)
assert bob_session
assert (bob_session.id
== "EeEiqT9LjCtECaN7WTqcBQ7D5Dwm4+/L9Uxr1IyPAts")
@ephemeral
def test_olm_group_session_store(self):
olm = self.ephemeral_olm
bob_account = Account()
outbound_session = OutboundGroupSession()
olm.create_group_session(
bob_account.identity_keys["curve25519"],
bob_account.identity_keys["ed25519"],
"!test_room",
outbound_session.id,
outbound_session.session_key)
del olm
olm = self.ephemeral_olm
bob_session = olm.inbound_group_store.get(
"!test_room",
bob_account.identity_keys["curve25519"],
outbound_session.id
)
assert bob_session
assert (bob_session.id
== outbound_session.id)
@ephemeral
def test_keys_query(self):
olm = self.ephemeral_olm
parsed_dict = TestClass._load_response(
"tests/data/keys_query.json")
response = KeysQueryResponse.from_dict(parsed_dict)
assert isinstance(response, KeysQueryResponse)
olm.handle_response(response)
device = olm.device_store["@alice:example.org"]["JLAFKJWSCS"]
assert (
device.ed25519 == "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM"
)
del olm
olm = self.ephemeral_olm
device = olm.device_store["@alice:example.org"]["JLAFKJWSCS"]
assert (
device.ed25519 == "nE6W2fCblxDcOFmeEtCHNl8/l8bXcu7GKyAswA4r3mM"
)
@ephemeral
def test_same_query_response_twice(self):
olm = self.ephemeral_olm
parsed_dict = TestClass._load_response(
"tests/data/keys_query.json")
response = KeysQueryResponse.from_dict(parsed_dict)
olm.handle_response(response)
assert response.changed
# TODO check out why this fails under python2 if we remove the copy()
# call.
response2 = copy.copy(response)
olm.handle_response(response)
assert response2.changed
def test_olm_inbound_session(self, monkeypatch):
def mocksave(self):
return
monkeypatch.setattr(KeyStore, '_save', mocksave)
# create three new accounts
alice = self._load(AliceId, Alice_device)
bob = self._load(BobId, Bob_device)
malory = self._load(BobId, Bob_device)
# create olm devices for each others known devices list
alice_device = OlmDevice(
AliceId,
Alice_device,
alice.account.identity_keys
)
bob_device = OlmDevice(
BobId,
Bob_device,
bob.account.identity_keys
)
malory_device = OlmDevice(
MaloryId,
Malory_device,
malory.account.identity_keys
)
# add the devices to the device list
alice.device_store.add(bob_device)
alice.device_store.add(malory_device)
bob.device_store.add(alice_device)
# bob creates one time keys
bob.account.generate_one_time_keys(1)
one_time = list(bob.account.one_time_keys["curve25519"].values())[0]
# Mark the keys as published
bob.account.mark_keys_as_published()
# alice creates an outbound olm session with bob
alice.create_session(one_time, bob_device.curve25519)
# alice creates an group session
alice.create_outbound_group_session("!test:example.org")
group_session = alice.outbound_group_sessions["!test:example.org"]
# alice shares the group session with bob, but bob isn't verified
with pytest.raises(OlmTrustError):
sharing_with, to_device = alice.share_group_session(
"!test:example.org",
[BobId]
)
alice.verify_device(bob_device)
# alice shares the group session with bob and malory, but malory isn't
# blocked
with pytest.raises(OlmTrustError):
sharing_with, to_device = alice.share_group_session(
"!test:example.org",
[BobId, MaloryId]
)
alice.blacklist_device(malory_device)
sharing_with, to_device = alice.share_group_session(
"!test:example.org",
[BobId, MaloryId]
)
# check that we aren't sharing the group session with malory
with pytest.raises(KeyError):
to_device["messages"][MaloryId][malory_device.id]["ciphertext"]
ciphertext = to_device["messages"][BobId][bob_device.id]["ciphertext"]
olm_event_dict = {
"sender": AliceId,
"type": "m.room.encrypted",
"content": {
"algorithm": Olm._olm_algorithm,
"sender_key": alice_device.curve25519,
"ciphertext": ciphertext
}
}
olm_event = OlmEvent.from_dict(olm_event_dict)
assert isinstance(olm_event, OlmEvent)
# bob decrypts the message and creates a new inbound session with alice
try:
# pdb.set_trace()
bob.decrypt_event(olm_event)
# we check that the session is there
assert bob.session_store.get(alice_device.curve25519)
# we check that the group session is there
assert bob.inbound_group_store.get(
"!test:example.org",
alice_device.curve25519,
group_session.id,
)
# Test another round of sharing, this time with an existing session
alice.create_outbound_group_session(TEST_ROOM)
group_session = alice.outbound_group_sessions[TEST_ROOM]
sharing_with, to_device = alice.share_group_session(
TEST_ROOM,
[BobId, MaloryId]
)
ciphertext = to_device["messages"][BobId][bob_device.id]["ciphertext"]
olm_event_dict = {
"sender": AliceId,
"type": "m.room.encrypted",
"content": {
"algorithm": Olm._olm_algorithm,
"sender_key": alice_device.curve25519,
"ciphertext": ciphertext
}
}
olm_event = OlmEvent.from_dict(olm_event_dict)
assert isinstance(olm_event, OlmEvent)
event = bob.decrypt_event(olm_event)
assert event
assert bob.inbound_group_store.get(
TEST_ROOM,
alice_device.curve25519,
group_session.id,
)
finally:
# remove the databases, the known devices store is handled by
# monkeypatching
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(AliceId, Alice_device)
))
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(BobId, Bob_device)
))
def test_group_session_sharing(self, monkeypatch):
def mocksave(self):
return
monkeypatch.setattr(KeyStore, '_save', mocksave)
# create three new accounts
alice = self._load(AliceId, Alice_device)
bob = self._load(BobId, Bob_device)
malory = self._load(BobId, Bob_device)
# create olm devices for each others known devices list
alice_device = OlmDevice(
AliceId,
Alice_device,
alice.account.identity_keys
)
bob_device = OlmDevice(
BobId,
Bob_device,
bob.account.identity_keys
)
malory_device = OlmDevice(
MaloryId,
Malory_device,
malory.account.identity_keys
)
# add the devices to the device list
alice.device_store.add(bob_device)
alice.device_store.add(malory_device)
bob.device_store.add(alice_device)
# bob creates one time keys
bob.account.generate_one_time_keys(1)
one_time = list(bob.account.one_time_keys["curve25519"].values())[0]
# Mark the keys as published
bob.account.mark_keys_as_published()
# alice creates an outbound olm session with bob
alice.create_session(one_time, bob_device.curve25519)
alice.verify_device(bob_device)
alice.verify_device(malory_device)
alice._maxToDeviceMessagesPerRequest = 1
sharing_with, to_device = alice.share_group_session(
"!test:example.org",
[BobId, MaloryId]
)
group_session = alice.outbound_group_sessions["!test:example.org"]
assert group_session
assert len(sharing_with) == 1
assert not group_session.users_shared_with
group_session.users_shared_with.update(sharing_with)
sharing_with, to_device = alice.share_group_session(
"!test:example.org",
[BobId, MaloryId]
)
assert len(sharing_with) == 1
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(AliceId, Alice_device)
))
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(BobId, Bob_device)
))
@ephemeral
def test_room_key_event(self):
olm = self.ephemeral_olm
session = OutboundGroupSession()
payload = {
"sender": BobId,
"sender_device": Bob_device,
"type": "m.room_key",
"content": {
"algorithm": "m.megolm.v1.aes-sha2",
"room_id": TEST_ROOM,
"session_id": session.id,
"session_key": session.session_key,
},
"keys": {
}
}
bad_event = olm._handle_room_key_event(
BobId,
"<KEY>",
{}
)
assert isinstance(bad_event, UnknownBadEvent)
event = olm._handle_room_key_event(
BobId,
"<KEY>",
payload
)
assert not event
payload["keys"] = {
"ed25519": "<KEY>"
}
event = olm._handle_room_key_event(
BobId,
"<KEY>",
payload
)
assert isinstance(event, RoomKeyEvent)
@ephemeral
def test_forwarded_room_key_event(self):
olm = self.ephemeral_olm
session = OutboundGroupSession()
session = InboundGroupSession(
session.session_key,
"<KEY>",
"<KEY>",
TEST_ROOM
)
payload = {
"sender": BobId,
"sender_device": Bob_device,
"type": "m.forwarded_room_key",
"content": {
"algorithm": "m.megolm.v1.aes-sha2",
"room_id": session.room_id,
"session_id": session.id,
"session_key": session.export_session(
session.first_known_index
),
"sender_key": session.sender_key,
"sender_claimed_ed25519_key": session.ed25519,
"forwarding_curve25519_key_chain": session.forwarding_chain,
},
"keys": {
"ed25519": session.ed25519
}
}
bad_event = olm._handle_room_key_event(
BobId,
"<KEY>",
{}
)
assert isinstance(bad_event, UnknownBadEvent)
event = olm._handle_forwarded_room_key_event(
BobId,
"<KEY>",
payload
)
assert not event
key_request = OutgoingKeyRequest(
session.id,
session.id,
session.room_id,
"megolm.v1"
)
olm.outgoing_key_requests[session.id] = key_request
event = olm._handle_olm_event(
BobId,
"Xjuu9d2KjHLGIHpCOCHS7hONQahapiwI1MhVmlPlCFM",
payload
)
assert isinstance(event, ForwardedRoomKeyEvent)
def test_user_verification_status(self, monkeypatch):
def mocksave(self):
return
monkeypatch.setattr(KeyStore, '_save', mocksave)
# create three new accounts
alice = self._load(AliceId, Alice_device)
bob = self._load(BobId, Bob_device)
# create olm devices for each others known devices list
bob_device = OlmDevice(
BobId,
Bob_device,
bob.account.identity_keys
)
bob2_device = OlmDevice(
BobId,
Malory_device,
bob.account.identity_keys
)
alice.device_store.add(bob_device)
assert not alice.user_fully_verified(BobId)
alice.verify_device(bob_device)
assert alice.user_fully_verified(BobId)
alice.device_store.add(bob2_device)
assert not alice.user_fully_verified(BobId)
alice.verify_device(bob2_device)
assert alice.user_fully_verified(BobId)
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(AliceId, Alice_device)
))
os.remove(os.path.join(
ephemeral_dir,
"{}_{}.db".format(BobId, Bob_device)
))
@ephemeral
def test_group_decryption(self):
olm = self.ephemeral_olm
olm.create_outbound_group_session(TEST_ROOM)
message = {
"type": "m.room.message",
"content": {
"msgtype": "m.text",
"body": "hello wordl",
},
}
with pytest.raises(GroupEncryptionError):
encrypted_dict = olm.group_encrypt(TEST_ROOM, message)
session = olm.outbound_group_sessions[TEST_ROOM]
session.shared = True
encrypted_dict = olm.group_encrypt(TEST_ROOM, message)
megolm = {
"type": "m.room.encrypted",
"content": encrypted_dict
}
megolm_event = MegolmEvent.from_dict(megolm)
assert isinstance(megolm_event, UnknownBadEvent)
megolm["event_id"] = "1"
| |
<filename>DNN_base.py
# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.training import moving_averages
#
# 用于最中执行batch normalization的函数
# tf.nn.batch_normalization(
# x,
# mean,
# variance,
# offset,
# scale,
# variance_epsilon,
# name=None
# )
#
# 参数:
# x是input输入样本
# mean是样本均值
# variance是样本方差
# offset是样本偏移(相加一个转化值)
# scale是缩放(默认为1)
# variance_epsilon是为了避免分母为0,添加的一个极小值
# 输出的计算公式为:
# y = scale * (x - mean) / var + offset
#
# -------------------------------------------------------
# def moments(
# x,
# axes,
# shift=None, # pylint: disable=unused-argument
# name=None,
# keep_dims=False):
#
# 参数:
# x:一个tensor张量,即我们的输入数据
# axes:一个int型数组,它用来指定我们计算均值和方差的轴(这里不好理解,可以结合下面的例子)
# shift:当前实现中并没有用到
# name:用作计算moment操作的名称
# keep_dims:输出和输入是否保持相同的维度
#
# 返回:
# 两个tensor张量:均值和方差
def mean_var2tensor(input_variable):
v_shape = input_variable.get_shape()
axis = [len(v_shape) - 1]
v_mean, v_var = tf.nn.moments(input_variable, axes=axis, keep_dims=True)
return v_mean, v_var
def mean_var2numpy(input_variable):
v_shape = input_variable.get_shape()
axis = [len(v_shape) - 1]
v_mean, v_var = tf.nn.moments(input_variable, axes=axis, keep_dims=True)
return v_mean, v_var
def my_batch_normalization(input_x, is_training=True, name='BatchNorm', moving_decay=0.9):
# Batch Normalize
x_shape = input_x.get_shape()
axis = [len(x_shape) - 1]
with tf.variable_scope(name):
x_mean, x_var = tf.nn.moments(input_x, axes=axis, name='moments', keep_dims=True)
scale = tf.constant(0.1) # 所有的batch 使用同一个scale因子
shift = tf.constant(0.001) # 所有的batch 使用同一个shift项
epsilon = 0.0001
# 采用滑动平均更新均值与方差
ema = tf.train.ExponentialMovingAverage(moving_decay)
def mean_var_with_update():
ema_apply_op = ema.apply([x_mean, x_var])
with tf.control_dependencies([ema_apply_op]):
return tf.identity(x_mean), tf.identity(x_var)
# 训练时,更新均值与方差,测试时使用之前最后一次保存的均值与方差
x_mean, x_var = tf.cond(tf.equal(is_training, True), mean_var_with_update,
lambda: (ema.average(x_mean), ema.average(x_var)))
out_x = tf.nn.batch_normalization(input_x, x_mean, x_var, shift, scale, epsilon)
return out_x
def my_bn(input_x, is_training=True, name='BatchNorm', moving_decay=0.9):
# Batch Normalize
x_shape = input_x.get_shape()
axis = [len(x_shape) - 1]
with tf.variable_scope(name):
x_mean, x_var = tf.nn.moments(input_x, axes=axis, name='moments', keep_dims=True)
scale = tf.constant(0.1) # 所有的batch 使用同一个scale因子
shift = tf.constant(0.001) # 所有的batch 使用同一个shift项
epsilon = 0.0001
out_x = tf.nn.batch_normalization(input_x, x_mean, x_var, shift, scale, epsilon)
return out_x
# ---------------------------------------------- my activations -----------------------------------------------
def mysin(x):
return tf.sin(2*np.pi*x)
def srelu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)
def asrelu(x): # abs srelu
return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))
def s2relu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
# return 1.5*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
# return 1.25*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)
def s3relu(x):
# return 0.5*tf.nn.relu(1-x)*tf.nn.relu(1+x)*tf.sin(2*np.pi*x)
# return 0.21*tf.nn.relu(1-x)*tf.nn.relu(1+x)*tf.sin(2*np.pi*x)
# return tf.nn.relu(1 - x) * tf.nn.relu(x) * (tf.sin(2 * np.pi * x) + tf.cos(2 * np.pi * x)) # (work不好)
# return tf.nn.relu(1 - x) * tf.nn.relu(1 + x) * (tf.sin(2 * np.pi * x) + tf.cos(2 * np.pi * x)) #(不work)
return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(2*np.pi*tf.abs(x)) # work 不如 s2relu
# return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(2*np.pi*x) # work 不如 s2relu
# return 1.5*tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.sin(np.pi*x)
# return tf.nn.relu(1 - x) * tf.nn.relu(x+0.5) * tf.sin(2 * np.pi * x)
def csrelu(x):
# return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.cos(np.pi*x)
return 1.5*tf.nn.relu(1 - x) * tf.nn.relu(x) * tf.cos(np.pi * x)
# return tf.nn.relu(1-tf.abs(x))*tf.nn.relu(tf.abs(x))*tf.cos(np.pi*x)
def stanh(x):
# return tf.tanh(x)*tf.sin(2*np.pi*x)
return tf.sin(2*np.pi*tf.tanh(x))
def gauss(x):
# return 0.2*tf.exp(-4*x*x)
# return 0.25*tf.exp(-4 * x * x)
return 0.75 * tf.exp(-2 * x * x)
# return 0.25*tf.exp(-7.5*(x-0.5)*(x-0.5))
def mexican(x):
return (1-x*x)*tf.exp(-0.5*x*x)
def modify_mexican(x):
# return 1.25*x*tf.exp(-0.25*x*x)
# return x * tf.exp(-0.125 * x * x)
return x * tf.exp(-0.075*x * x)
# return -1.25*x*tf.exp(-0.25*x*x)
def sm_mexican(x):
# return tf.sin(np.pi*x) * x * tf.exp(-0.075*x * x)
# return tf.sin(np.pi*x) * x * tf.exp(-0.125*x * x)
return 2.0*tf.sin(np.pi*x) * x * tf.exp(-0.5*x * x)
def singauss(x):
# return 0.6 * tf.exp(-4 * x * x) * tf.sin(np.pi * x)
# return 0.6 * tf.exp(-5 * x * x) * tf.sin(np.pi * x)
# return 0.75*tf.exp(-5*x*x)*tf.sin(2*np.pi*x)
# return tf.exp(-(x-0.5) * (x - 0.5)) * tf.sin(np.pi * x)
# return 0.25 * tf.exp(-3.5 * x * x) * tf.sin(2 * np.pi * x)
# return 0.225*tf.exp(-2.5 * (x - 0.5) * (x - 0.5)) * tf.sin(2*np.pi * x)
return 0.225 * tf.exp(-2 * (x - 0.5) * (x - 0.5)) * tf.sin(2 * np.pi * x)
# return 0.4 * tf.exp(-10 * (x - 0.5) * (x - 0.5)) * tf.sin(2 * np.pi * x)
# return 0.45 * tf.exp(-5 * (x - 1.0) * (x - 1.0)) * tf.sin(np.pi * x)
# return 0.3 * tf.exp(-5 * (x - 1.0) * (x - 1.0)) * tf.sin(2 * np.pi * x)
# return tf.sin(2*np.pi*tf.exp(-0.5*x*x))
def powsin_srelu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(2*np.pi*x)*tf.sin(2*np.pi*x)
def sin2_srelu(x):
return 2.0*tf.nn.relu(1-x)*tf.nn.relu(x)*tf.sin(4*np.pi*x)*tf.sin(2*np.pi*x)
def slrelu(x):
return tf.nn.leaky_relu(1-x)*tf.nn.leaky_relu(x)
def pow2relu(x):
return tf.nn.relu(1-x)*tf.nn.relu(x)*tf.nn.relu(x)
def selu(x):
return tf.nn.elu(1-x)*tf.nn.elu(x)
def wave(x):
return tf.nn.relu(x) - 2*tf.nn.relu(x-1/4) + \
2*tf.nn.relu(x-3/4) - tf.nn.relu(x-1)
def phi(x):
return tf.nn.relu(x) * tf.nn.relu(x)-3*tf.nn.relu(x-1)*tf.nn.relu(x-1) + 3*tf.nn.relu(x-2)*tf.nn.relu(x-2) \
- tf.nn.relu(x-3)*tf.nn.relu(x-3)*tf.nn.relu(x-3)
# ------------------------------------------------ 初始化权重和偏置 --------------------------------------------
# 生成DNN的权重和偏置
# tf.random_normal(): 用于从服从指定正太分布的数值中取出随机数
# tf.random_normal(shape,mean=0.0,stddev=1.0,dtype=tf.float32,seed=None,name=None)
# hape: 输出张量的形状,必选.--- mean: 正态分布的均值,默认为0.----stddev: 正态分布的标准差,默认为1.0
# dtype: 输出的类型,默认为tf.float32 ----seed: 随机数种子,是一个整数,当设置之后,每次生成的随机数都一样---name: 操作的名称
def Initial_DNN2different_hidden(in_size, out_size, hidden_layers, Flag):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
W = tf.Variable(0.1 * tf.random.normal([in_size, hidden_layers[0]]), dtype='float32',
name='W_transInput' + str(Flag))
B = tf.Variable(0.1 * tf.random.uniform([1, hidden_layers[0]]), dtype='float32',
name='B_transInput' + str(Flag))
Weights.append(W)
Biases.append(B)
# 隐藏层:第二至倒数第二层的权重和偏置
for i_layer in range(n_hiddens - 1):
W = tf.Variable(0.1 * tf.random.normal([hidden_layers[i_layer], hidden_layers[i_layer+1]]), dtype='float32',
name='W_hidden' + str(i_layer + 1) + str(Flag))
B = tf.Variable(0.1 * tf.random.uniform([1, hidden_layers[i_layer+1]]), dtype='float32',
name='B_hidden' + str(i_layer + 1) + str(Flag))
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
W = tf.Variable(0.1 * tf.random.normal([hidden_layers[-1], out_size]), dtype='float32',
name='W_outTrans' + str(Flag))
B = tf.Variable(0.1 * tf.random.uniform([1, out_size]), dtype='float32',
name='B_outTrans' + str(Flag))
Weights.append(W)
Biases.append(B)
return Weights, Biases
# tf.truncated_normal(shape, mean, stddev) :shape表示生成张量的维度,mean是均值,stddev是标准差。这个函数产生正太分布,
# 均值和标准差自己设定。这是一个截断的产生正太分布的函数,就是说产生正太分布的值如果与均值的差值大于两倍的标准差,
# 那就重新生成。和一般的正太分布的产生随机数据比起来,这个函数产生的随机数与均值的差距不会超过两倍的标准差,但是一般的别的函数是可能的。
# truncated_normal(
# shape,
# mean=0.0,
# stddev=1.0,
# dtype=tf.float32,
# seed=None,
# name=None)
def truncated_normal_init(in_dim, out_dim, scale_coef=1.0, weight_name='weight'):
xavier_stddev = np.sqrt(2/(in_dim + out_dim))
# 尺度因子防止初始化的数值太小或者太大
V = tf.Variable(scale_coef*tf.truncated_normal([in_dim, out_dim], stddev=xavier_stddev), dtype=tf.float32, name=weight_name)
return V
# tf.random_uniform()
# 默认是在 0 到 1 之间产生随机数,也可以通过 minval 和 maxval 指定上下界
def uniform_init(in_dim, out_dim, weight_name='weight'):
V = tf.Variable(tf.random_uniform([in_dim, out_dim], dtype=tf.float32), dtype=tf.float32, name=weight_name)
return V
# tf.random_normal(shape, mean=0.0, stddev=1.0, dtype=tf.float32, seed=None, name=None)
# 从正态分布中输出随机值。
# 参数:
# shape: 一维的张量,也是输出的张量。
# mean: 正态分布的均值。
# stddev: 正态分布的标准差。
# dtype: 输出的类型。
# seed: 一个整数,当设置之后,每次生成的随机数都一样。
# name: 操作的名字。
def normal_init(in_dim, out_dim, scale_coef=1.0, weight_name='weight'):
stddev2normal = np.sqrt(2.0/(in_dim + out_dim))
# 尺度因子防止初始化的数值太小或者太大
V = tf.Variable(scale_coef*tf.random_normal([in_dim, out_dim], mean=0, stddev=stddev2normal, dtype=tf.float32),
dtype=tf.float32, name=weight_name)
return V
# tf.zeros(
# shape,
# dtype=tf.float32,
# name=None
# )
# shape代表形状,也就是1纬的还是2纬的还是n纬的数组
def zeros_init(in_dim, out_dim, weight_name='weight'):
V = tf.Variable(tf.zeros([in_dim, out_dim], dtype=tf.float32), dtype=tf.float32, name=weight_name)
return V
def initialize_NN_xavier(in_size, out_size, hidden_layers, Flag):
with tf.variable_scope('WB_scope', reuse=tf.AUTO_REUSE):
scale = 5.0
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
W = truncated_normal_init(in_size, hidden_layers[0], scale_coef=scale, weight_name='W-transInput' + str(Flag))
B = uniform_init(1, hidden_layers[0], weight_name='B-transInput' + str(Flag))
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
W = truncated_normal_init(hidden_layers[i_layer], hidden_layers[i_layer + 1], scale_coef=scale,
weight_name='W-hidden' + str(i_layer + 1) + str(Flag))
B = uniform_init(1, hidden_layers[i_layer + 1], weight_name='B-hidden' + str(i_layer + 1) + str(Flag))
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
W = truncated_normal_init(hidden_layers[-1], out_size, scale_coef=scale, weight_name='W-outTrans' + str(Flag))
B = uniform_init(1, out_size, weight_name='B-outTrans' + str(Flag))
Weights.append(W)
Biases.append(B)
return Weights, Biases
def initialize_NN_random_normal(in_size, out_size, hidden_layers, Flag, varcoe=0.5):
with tf.variable_scope('WB_scope', reuse=tf.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
W = tf.get_variable(name='W-transInput' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
B = tf.get_variable(name='B-transInput' + str(Flag), shape=(1, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
stddev_WB = (2.0 / (hidden_layers[i_layer] + hidden_layers[i_layer + 1])) ** varcoe
W = tf.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.get_variable(
name='B' + str(i_layer + 1) + str(Flag), shape=(1, hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
stddev_WB = (2.0 / (hidden_layers[-1] + out_size)) ** varcoe
W = tf.get_variable(
name='W-outTrans' + str(Flag), shape=(hidden_layers[-1], out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.get_variable(
name='B-outTrans' + str(Flag), shape=(1, out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
return Weights, Biases
def initialize_NN_random_normal2(in_size, out_size, hidden_layers, Flag, varcoe=0.5):
with tf.variable_scope('WB_scope', reuse=tf.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
W = tf.get_variable(name='W-transInput' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
B = tf.get_variable(name='B-transInput' + str(Flag), shape=(hidden_layers[0],),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
Weights.append(W)
Biases.append(B)
for i_layer in range(0, n_hiddens - 1):
stddev_WB = (2.0 / (hidden_layers[i_layer] + hidden_layers[i_layer + 1])) ** varcoe
W = tf.get_variable(
name='W' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer], hidden_layers[i_layer + 1]),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.get_variable(name='B' + str(i_layer + 1) + str(Flag), shape=(hidden_layers[i_layer + 1],),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
# 输出层:最后一层的权重和偏置。将最后的结果变换到输出维度
stddev_WB = (2.0 / (hidden_layers[-1] + out_size)) ** varcoe
W = tf.get_variable(name='W-outTrans' + str(Flag), shape=(hidden_layers[-1], out_size),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
B = tf.get_variable(name='B-outTrans' + str(Flag), shape=(out_size,),
initializer=tf.random_normal_initializer(stddev=stddev_WB), dtype=tf.float32)
Weights.append(W)
Biases.append(B)
return Weights, Biases
def initialize_NN_random_normal2_CS(in_size, out_size, hidden_layers, Flag, varcoe=0.5):
with tf.variable_scope('WB_scope', reuse=tf.AUTO_REUSE):
n_hiddens = len(hidden_layers)
Weights = [] # 权重列表,用于存储隐藏层的权重
Biases = [] # 偏置列表,用于存储隐藏层的偏置
# 隐藏层:第一层的权重和偏置,对输入数据做变换
stddev_WB = (2.0 / (in_size + hidden_layers[0])) ** varcoe
W = tf.get_variable(name='W-transInput' + str(Flag), shape=(in_size, hidden_layers[0]),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
B = tf.get_variable(name='B-transInput' + str(Flag), shape=(hidden_layers[0],),
initializer=tf.random_normal_initializer(stddev=stddev_WB),
dtype=tf.float32)
Weights.append(W)
Biases.append(B)
for |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.