code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import json
import pickle
class ConnectionHelper:
@staticmethod
def send_json(socket, data):
try:
serialized = json.dumps(data)
except (TypeError, ValueError) as e:
raise Exception('You can only send JSON-serializable data')
# send the length of the serialized data first
socket.send('%d\n'.encode() % len(serialized))
# send the serialized data
socket.sendall(serialized.encode())
@staticmethod
def receive_json(socket):
deserialized = None
try:
view = ConnectionHelper.receive(socket).decode()
deserialized = json.loads(view)
except (TypeError, ValueError) as e:
raise Exception('Data received was not in JSON format')
except Exception as e:
print(e)
return deserialized
@staticmethod
def send_pickle(socket, object):
try:
serialized = pickle.dumps(object)
except (TypeError, ValueError) as e:
raise Exception('You can only send Pickle-serializable data')
# send the length of the serialized data first
socket.send('%d\n'.encode() % len(serialized))
# send the serialized data
socket.sendall(serialized)
@staticmethod
def receive_pickle(socket):
view = ConnectionHelper.receive(socket)
try:
deserialized = pickle.loads(view)
except (TypeError, ValueError) as e:
raise Exception('Data received was not in Pickle format')
return deserialized
@staticmethod
def receive(socket):
# read the length of the data, letter by letter until we reach EOL
length_str = ''
char = socket.recv(1).decode()
if char == '':
return char
while char != '\n':
length_str += char
char = socket.recv(1).decode()
total = int(length_str)
# use a memoryview to receive the data chunk by chunk efficiently
view = memoryview(bytearray(total))
next_offset = 0
while total - next_offset > 0:
recv_size = socket.recv_into(view[next_offset:], total - next_offset)
next_offset += recv_size
view = view.tobytes()
return view
|
[
"pickle.loads",
"json.loads",
"json.dumps",
"pickle.dumps"
] |
[((142, 158), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (152, 158), False, 'import json\n'), ((643, 659), 'json.loads', 'json.loads', (['view'], {}), '(view)\n', (653, 659), False, 'import json\n'), ((947, 967), 'pickle.dumps', 'pickle.dumps', (['object'], {}), '(object)\n', (959, 967), False, 'import pickle\n'), ((1406, 1424), 'pickle.loads', 'pickle.loads', (['view'], {}), '(view)\n', (1418, 1424), False, 'import pickle\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities, _tables
__all__ = [
'GlobalTableReplicaArgs',
'TableAttributeArgs',
'TableGlobalSecondaryIndexArgs',
'TableLocalSecondaryIndexArgs',
'TablePointInTimeRecoveryArgs',
'TableReplicaArgs',
'TableServerSideEncryptionArgs',
'TableTtlArgs',
'GetTableServerSideEncryptionArgs',
]
@pulumi.input_type
class GlobalTableReplicaArgs:
def __init__(__self__, *,
region_name: pulumi.Input[str]):
"""
:param pulumi.Input[str] region_name: AWS region name of replica DynamoDB Table. e.g. `us-east-1`
"""
pulumi.set(__self__, "region_name", region_name)
@property
@pulumi.getter(name="regionName")
def region_name(self) -> pulumi.Input[str]:
"""
AWS region name of replica DynamoDB Table. e.g. `us-east-1`
"""
return pulumi.get(self, "region_name")
@region_name.setter
def region_name(self, value: pulumi.Input[str]):
pulumi.set(self, "region_name", value)
@pulumi.input_type
class TableAttributeArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] type: Attribute type, which must be a scalar type: `S`, `N`, or `B` for (S)tring, (N)umber or (B)inary data
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Attribute type, which must be a scalar type: `S`, `N`, or `B` for (S)tring, (N)umber or (B)inary data
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class TableGlobalSecondaryIndexArgs:
def __init__(__self__, *,
hash_key: pulumi.Input[str],
name: pulumi.Input[str],
projection_type: pulumi.Input[str],
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
range_key: Optional[pulumi.Input[str]] = None,
read_capacity: Optional[pulumi.Input[int]] = None,
write_capacity: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] hash_key: The name of the hash key in the index; must be
defined as an attribute in the resource.
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] projection_type: One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
:param pulumi.Input[Sequence[pulumi.Input[str]]] non_key_attributes: Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
:param pulumi.Input[str] range_key: The name of the range key; must be defined
:param pulumi.Input[int] read_capacity: The number of read units for this index. Must be set if billing_mode is set to PROVISIONED.
:param pulumi.Input[int] write_capacity: The number of write units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
pulumi.set(__self__, "hash_key", hash_key)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "projection_type", projection_type)
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
if range_key is not None:
pulumi.set(__self__, "range_key", range_key)
if read_capacity is not None:
pulumi.set(__self__, "read_capacity", read_capacity)
if write_capacity is not None:
pulumi.set(__self__, "write_capacity", write_capacity)
@property
@pulumi.getter(name="hashKey")
def hash_key(self) -> pulumi.Input[str]:
"""
The name of the hash key in the index; must be
defined as an attribute in the resource.
"""
return pulumi.get(self, "hash_key")
@hash_key.setter
def hash_key(self, value: pulumi.Input[str]):
pulumi.set(self, "hash_key", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> pulumi.Input[str]:
"""
One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
"""
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: pulumi.Input[str]):
pulumi.set(self, "projection_type", value)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@property
@pulumi.getter(name="rangeKey")
def range_key(self) -> Optional[pulumi.Input[str]]:
"""
The name of the range key; must be defined
"""
return pulumi.get(self, "range_key")
@range_key.setter
def range_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "range_key", value)
@property
@pulumi.getter(name="readCapacity")
def read_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of read units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
return pulumi.get(self, "read_capacity")
@read_capacity.setter
def read_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "read_capacity", value)
@property
@pulumi.getter(name="writeCapacity")
def write_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The number of write units for this index. Must be set if billing_mode is set to PROVISIONED.
"""
return pulumi.get(self, "write_capacity")
@write_capacity.setter
def write_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "write_capacity", value)
@pulumi.input_type
class TableLocalSecondaryIndexArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
projection_type: pulumi.Input[str],
range_key: pulumi.Input[str],
non_key_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] name: The name of the index
:param pulumi.Input[str] projection_type: One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
:param pulumi.Input[str] range_key: The name of the range key; must be defined
:param pulumi.Input[Sequence[pulumi.Input[str]]] non_key_attributes: Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "projection_type", projection_type)
pulumi.set(__self__, "range_key", range_key)
if non_key_attributes is not None:
pulumi.set(__self__, "non_key_attributes", non_key_attributes)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the index
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="projectionType")
def projection_type(self) -> pulumi.Input[str]:
"""
One of `ALL`, `INCLUDE` or `KEYS_ONLY`
where `ALL` projects every attribute into the index, `KEYS_ONLY`
projects just the hash and range key into the index, and `INCLUDE`
projects only the keys specified in the _non_key_attributes_
parameter.
"""
return pulumi.get(self, "projection_type")
@projection_type.setter
def projection_type(self, value: pulumi.Input[str]):
pulumi.set(self, "projection_type", value)
@property
@pulumi.getter(name="rangeKey")
def range_key(self) -> pulumi.Input[str]:
"""
The name of the range key; must be defined
"""
return pulumi.get(self, "range_key")
@range_key.setter
def range_key(self, value: pulumi.Input[str]):
pulumi.set(self, "range_key", value)
@property
@pulumi.getter(name="nonKeyAttributes")
def non_key_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Only required with `INCLUDE` as a
projection type; a list of attributes to project into the index. These
do not need to be defined as attributes on the table.
"""
return pulumi.get(self, "non_key_attributes")
@non_key_attributes.setter
def non_key_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "non_key_attributes", value)
@pulumi.input_type
class TablePointInTimeRecoveryArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool]):
"""
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class TableReplicaArgs:
def __init__(__self__, *,
region_name: pulumi.Input[str]):
"""
:param pulumi.Input[str] region_name: Region name of the replica.
"""
pulumi.set(__self__, "region_name", region_name)
@property
@pulumi.getter(name="regionName")
def region_name(self) -> pulumi.Input[str]:
"""
Region name of the replica.
"""
return pulumi.get(self, "region_name")
@region_name.setter
def region_name(self, value: pulumi.Input[str]):
pulumi.set(self, "region_name", value)
@pulumi.input_type
class TableServerSideEncryptionArgs:
def __init__(__self__, *,
enabled: pulumi.Input[bool],
kms_key_arn: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
:param pulumi.Input[str] kms_key_arn: The ARN of the CMK that should be used for the AWS KMS encryption.
This attribute should only be specified if the key is different from the default DynamoDB CMK, `alias/aws/dynamodb`.
"""
pulumi.set(__self__, "enabled", enabled)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter
def enabled(self) -> pulumi.Input[bool]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the CMK that should be used for the AWS KMS encryption.
This attribute should only be specified if the key is different from the default DynamoDB CMK, `alias/aws/dynamodb`.
"""
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_arn", value)
@pulumi.input_type
class TableTtlArgs:
def __init__(__self__, *,
attribute_name: pulumi.Input[str],
enabled: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] attribute_name: The name of the table attribute to store the TTL timestamp in.
:param pulumi.Input[bool] enabled: Indicates whether ttl is enabled (true) or disabled (false).
"""
pulumi.set(__self__, "attribute_name", attribute_name)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="attributeName")
def attribute_name(self) -> pulumi.Input[str]:
"""
The name of the table attribute to store the TTL timestamp in.
"""
return pulumi.get(self, "attribute_name")
@attribute_name.setter
def attribute_name(self, value: pulumi.Input[str]):
pulumi.set(self, "attribute_name", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether ttl is enabled (true) or disabled (false).
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class GetTableServerSideEncryptionArgs:
def __init__(__self__, *,
enabled: bool,
kms_key_arn: str):
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: bool):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> str:
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: str):
pulumi.set(self, "kms_key_arn", value)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set"
] |
[((979, 1011), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""regionName"""'}), "(name='regionName')\n", (992, 1011), False, 'import pulumi\n'), ((4768, 4797), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""hashKey"""'}), "(name='hashKey')\n", (4781, 4797), False, 'import pulumi\n'), ((5424, 5460), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectionType"""'}), "(name='projectionType')\n", (5437, 5460), False, 'import pulumi\n'), ((6028, 6066), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""nonKeyAttributes"""'}), "(name='nonKeyAttributes')\n", (6041, 6066), False, 'import pulumi\n'), ((6617, 6647), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""rangeKey"""'}), "(name='rangeKey')\n", (6630, 6647), False, 'import pulumi\n'), ((6973, 7007), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""readCapacity"""'}), "(name='readCapacity')\n", (6986, 7007), False, 'import pulumi\n'), ((7402, 7437), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""writeCapacity"""'}), "(name='writeCapacity')\n", (7415, 7437), False, 'import pulumi\n'), ((9508, 9544), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""projectionType"""'}), "(name='projectionType')\n", (9521, 9544), False, 'import pulumi\n'), ((10112, 10142), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""rangeKey"""'}), "(name='rangeKey')\n", (10125, 10142), False, 'import pulumi\n'), ((10448, 10486), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""nonKeyAttributes"""'}), "(name='nonKeyAttributes')\n", (10461, 10486), False, 'import pulumi\n'), ((11957, 11989), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""regionName"""'}), "(name='regionName')\n", (11970, 11989), False, 'import pulumi\n'), ((13339, 13370), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsKeyArn"""'}), "(name='kmsKeyArn')\n", (13352, 13370), False, 'import pulumi\n'), ((14430, 14465), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""attributeName"""'}), "(name='attributeName')\n", (14443, 14465), False, 'import pulumi\n'), ((15638, 15669), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsKeyArn"""'}), "(name='kmsKeyArn')\n", (15651, 15669), False, 'import pulumi\n'), ((910, 958), 'pulumi.set', 'pulumi.set', (['__self__', '"""region_name"""', 'region_name'], {}), "(__self__, 'region_name', region_name)\n", (920, 958), False, 'import pulumi\n'), ((1167, 1198), 'pulumi.get', 'pulumi.get', (['self', '"""region_name"""'], {}), "(self, 'region_name')\n", (1177, 1198), False, 'import pulumi\n'), ((1285, 1323), 'pulumi.set', 'pulumi.set', (['self', '"""region_name"""', 'value'], {}), "(self, 'region_name', value)\n", (1295, 1323), False, 'import pulumi\n'), ((1720, 1754), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1730, 1754), False, 'import pulumi\n'), ((1763, 1797), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (1773, 1797), False, 'import pulumi\n'), ((1942, 1966), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (1952, 1966), False, 'import pulumi\n'), ((2039, 2070), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (2049, 2070), False, 'import pulumi\n'), ((2295, 2319), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (2305, 2319), False, 'import pulumi\n'), ((2392, 2423), 'pulumi.set', 'pulumi.set', (['self', '"""type"""', 'value'], {}), "(self, 'type', value)\n", (2402, 2423), False, 'import pulumi\n'), ((4179, 4221), 'pulumi.set', 'pulumi.set', (['__self__', '"""hash_key"""', 'hash_key'], {}), "(__self__, 'hash_key', hash_key)\n", (4189, 4221), False, 'import pulumi\n'), ((4230, 4264), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (4240, 4264), False, 'import pulumi\n'), ((4273, 4329), 'pulumi.set', 'pulumi.set', (['__self__', '"""projection_type"""', 'projection_type'], {}), "(__self__, 'projection_type', projection_type)\n", (4283, 4329), False, 'import pulumi\n'), ((4986, 5014), 'pulumi.get', 'pulumi.get', (['self', '"""hash_key"""'], {}), "(self, 'hash_key')\n", (4996, 5014), False, 'import pulumi\n'), ((5095, 5130), 'pulumi.set', 'pulumi.set', (['self', '"""hash_key"""', 'value'], {}), "(self, 'hash_key', value)\n", (5105, 5130), False, 'import pulumi\n'), ((5275, 5299), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (5285, 5299), False, 'import pulumi\n'), ((5372, 5403), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (5382, 5403), False, 'import pulumi\n'), ((5835, 5870), 'pulumi.get', 'pulumi.get', (['self', '"""projection_type"""'], {}), "(self, 'projection_type')\n", (5845, 5870), False, 'import pulumi\n'), ((5965, 6007), 'pulumi.set', 'pulumi.set', (['self', '"""projection_type"""', 'value'], {}), "(self, 'projection_type', value)\n", (5975, 6007), False, 'import pulumi\n'), ((6378, 6416), 'pulumi.get', 'pulumi.get', (['self', '"""non_key_attributes"""'], {}), "(self, 'non_key_attributes')\n", (6388, 6416), False, 'import pulumi\n'), ((6551, 6596), 'pulumi.set', 'pulumi.set', (['self', '"""non_key_attributes"""', 'value'], {}), "(self, 'non_key_attributes', value)\n", (6561, 6596), False, 'import pulumi\n'), ((6794, 6823), 'pulumi.get', 'pulumi.get', (['self', '"""range_key"""'], {}), "(self, 'range_key')\n", (6804, 6823), False, 'import pulumi\n'), ((6916, 6952), 'pulumi.set', 'pulumi.set', (['self', '"""range_key"""', 'value'], {}), "(self, 'range_key', value)\n", (6926, 6952), False, 'import pulumi\n'), ((7207, 7240), 'pulumi.get', 'pulumi.get', (['self', '"""read_capacity"""'], {}), "(self, 'read_capacity')\n", (7217, 7240), False, 'import pulumi\n'), ((7341, 7381), 'pulumi.set', 'pulumi.set', (['self', '"""read_capacity"""', 'value'], {}), "(self, 'read_capacity', value)\n", (7351, 7381), False, 'import pulumi\n'), ((7639, 7673), 'pulumi.get', 'pulumi.get', (['self', '"""write_capacity"""'], {}), "(self, 'write_capacity')\n", (7649, 7673), False, 'import pulumi\n'), ((7776, 7817), 'pulumi.set', 'pulumi.set', (['self', '"""write_capacity"""', 'value'], {}), "(self, 'write_capacity', value)\n", (7786, 7817), False, 'import pulumi\n'), ((8944, 8978), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (8954, 8978), False, 'import pulumi\n'), ((8987, 9043), 'pulumi.set', 'pulumi.set', (['__self__', '"""projection_type"""', 'projection_type'], {}), "(__self__, 'projection_type', projection_type)\n", (8997, 9043), False, 'import pulumi\n'), ((9052, 9096), 'pulumi.set', 'pulumi.set', (['__self__', '"""range_key"""', 'range_key'], {}), "(__self__, 'range_key', range_key)\n", (9062, 9096), False, 'import pulumi\n'), ((9359, 9383), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (9369, 9383), False, 'import pulumi\n'), ((9456, 9487), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (9466, 9487), False, 'import pulumi\n'), ((9919, 9954), 'pulumi.get', 'pulumi.get', (['self', '"""projection_type"""'], {}), "(self, 'projection_type')\n", (9929, 9954), False, 'import pulumi\n'), ((10049, 10091), 'pulumi.set', 'pulumi.set', (['self', '"""projection_type"""', 'value'], {}), "(self, 'projection_type', value)\n", (10059, 10091), False, 'import pulumi\n'), ((10279, 10308), 'pulumi.get', 'pulumi.get', (['self', '"""range_key"""'], {}), "(self, 'range_key')\n", (10289, 10308), False, 'import pulumi\n'), ((10391, 10427), 'pulumi.set', 'pulumi.set', (['self', '"""range_key"""', 'value'], {}), "(self, 'range_key', value)\n", (10401, 10427), False, 'import pulumi\n'), ((10798, 10836), 'pulumi.get', 'pulumi.get', (['self', '"""non_key_attributes"""'], {}), "(self, 'non_key_attributes')\n", (10808, 10836), False, 'import pulumi\n'), ((10971, 11016), 'pulumi.set', 'pulumi.set', (['self', '"""non_key_attributes"""', 'value'], {}), "(self, 'non_key_attributes', value)\n", (10981, 11016), False, 'import pulumi\n'), ((11287, 11327), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (11297, 11327), False, 'import pulumi\n'), ((11515, 11542), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (11525, 11542), False, 'import pulumi\n'), ((11622, 11656), 'pulumi.set', 'pulumi.set', (['self', '"""enabled"""', 'value'], {}), "(self, 'enabled', value)\n", (11632, 11656), False, 'import pulumi\n'), ((11888, 11936), 'pulumi.set', 'pulumi.set', (['__self__', '"""region_name"""', 'region_name'], {}), "(__self__, 'region_name', region_name)\n", (11898, 11936), False, 'import pulumi\n'), ((12113, 12144), 'pulumi.get', 'pulumi.get', (['self', '"""region_name"""'], {}), "(self, 'region_name')\n", (12123, 12144), False, 'import pulumi\n'), ((12231, 12269), 'pulumi.set', 'pulumi.set', (['self', '"""region_name"""', 'value'], {}), "(self, 'region_name', value)\n", (12241, 12269), False, 'import pulumi\n'), ((12852, 12892), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (12862, 12892), False, 'import pulumi\n'), ((13177, 13204), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (13187, 13204), False, 'import pulumi\n'), ((13284, 13318), 'pulumi.set', 'pulumi.set', (['self', '"""enabled"""', 'value'], {}), "(self, 'enabled', value)\n", (13294, 13318), False, 'import pulumi\n'), ((13668, 13699), 'pulumi.get', 'pulumi.get', (['self', '"""kms_key_arn"""'], {}), "(self, 'kms_key_arn')\n", (13678, 13699), False, 'import pulumi\n'), ((13796, 13834), 'pulumi.set', 'pulumi.set', (['self', '"""kms_key_arn"""', 'value'], {}), "(self, 'kms_key_arn', value)\n", (13806, 13834), False, 'import pulumi\n'), ((14270, 14324), 'pulumi.set', 'pulumi.set', (['__self__', '"""attribute_name"""', 'attribute_name'], {}), "(__self__, 'attribute_name', attribute_name)\n", (14280, 14324), False, 'import pulumi\n'), ((14627, 14661), 'pulumi.get', 'pulumi.get', (['self', '"""attribute_name"""'], {}), "(self, 'attribute_name')\n", (14637, 14661), False, 'import pulumi\n'), ((14754, 14795), 'pulumi.set', 'pulumi.set', (['self', '"""attribute_name"""', 'value'], {}), "(self, 'attribute_name', value)\n", (14764, 14795), False, 'import pulumi\n'), ((14993, 15020), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (15003, 15020), False, 'import pulumi\n'), ((15110, 15144), 'pulumi.set', 'pulumi.set', (['self', '"""enabled"""', 'value'], {}), "(self, 'enabled', value)\n", (15120, 15144), False, 'import pulumi\n'), ((15312, 15352), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (15322, 15352), False, 'import pulumi\n'), ((15361, 15409), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_key_arn"""', 'kms_key_arn'], {}), "(__self__, 'kms_key_arn', kms_key_arn)\n", (15371, 15409), False, 'import pulumi\n'), ((15490, 15517), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (15500, 15517), False, 'import pulumi\n'), ((15583, 15617), 'pulumi.set', 'pulumi.set', (['self', '"""enabled"""', 'value'], {}), "(self, 'enabled', value)\n", (15593, 15617), False, 'import pulumi\n'), ((15719, 15750), 'pulumi.get', 'pulumi.get', (['self', '"""kms_key_arn"""'], {}), "(self, 'kms_key_arn')\n", (15729, 15750), False, 'import pulumi\n'), ((15823, 15861), 'pulumi.set', 'pulumi.set', (['self', '"""kms_key_arn"""', 'value'], {}), "(self, 'kms_key_arn', value)\n", (15833, 15861), False, 'import pulumi\n'), ((4385, 4447), 'pulumi.set', 'pulumi.set', (['__self__', '"""non_key_attributes"""', 'non_key_attributes'], {}), "(__self__, 'non_key_attributes', non_key_attributes)\n", (4395, 4447), False, 'import pulumi\n'), ((4494, 4538), 'pulumi.set', 'pulumi.set', (['__self__', '"""range_key"""', 'range_key'], {}), "(__self__, 'range_key', range_key)\n", (4504, 4538), False, 'import pulumi\n'), ((4589, 4641), 'pulumi.set', 'pulumi.set', (['__self__', '"""read_capacity"""', 'read_capacity'], {}), "(__self__, 'read_capacity', read_capacity)\n", (4599, 4641), False, 'import pulumi\n'), ((4693, 4747), 'pulumi.set', 'pulumi.set', (['__self__', '"""write_capacity"""', 'write_capacity'], {}), "(__self__, 'write_capacity', write_capacity)\n", (4703, 4747), False, 'import pulumi\n'), ((9152, 9214), 'pulumi.set', 'pulumi.set', (['__self__', '"""non_key_attributes"""', 'non_key_attributes'], {}), "(__self__, 'non_key_attributes', non_key_attributes)\n", (9162, 9214), False, 'import pulumi\n'), ((12941, 12989), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_key_arn"""', 'kms_key_arn'], {}), "(__self__, 'kms_key_arn', kms_key_arn)\n", (12951, 12989), False, 'import pulumi\n'), ((14369, 14409), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (14379, 14409), False, 'import pulumi\n')]
|
# -*- coding: utf-8 -*-
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from collections import namedtuple
import logging
import dns.opcode
import dns.rcode
import dns.rdatatype
import dns.resolver
import dns.reversename
DNS_IP_RDTYPES = (dns.rdatatype.A, dns.rdatatype.AAAA)
NameServer = namedtuple('NameServer', ('nameserver', 'ip_addresses'))
########################################################################
class DnsBase:
# ----------------------------------------------------------------------
def __init__(self, nameservers, logger):
self._nameservers = nameservers
self._logger = logger
# ----------------------------------------------------------------------
def _resolve(self, fqdn, rdtype, nameservers=None, ignore_errors=False):
try:
resolver = self._factor_resolver(nameservers)
answer = resolver.query(fqdn, rdtype)
except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as exc:
self._log_dns_message(
logging.INFO,
'Error on resolving name "{}" for type "{}": {}'.format(fqdn, rdtype, exc),
query=fqdn,
rdtype=rdtype,
exc=exc)
if ignore_errors:
return list()
# raise the original exception
raise
else:
result = [rr.to_text() for rr in answer]
self._log_dns_message(
logging.DEBUG,
'Resolved name "{}" for type "{}" to: {}'.format(fqdn, rdtype, ', '.join(result)),
query=fqdn,
rdtype=rdtype,
answer=answer)
return result
# ----------------------------------------------------------------------
def _factor_resolver(self, nameservers=None):
resolver = dns.resolver.Resolver(configure=False)
resolver.nameservers = nameservers or self._nameservers
self._logger.debug('Using nameservers "{}"'.format(','.join(resolver.nameservers)))
return resolver
# ----------------------------------------------------------------------
def _log_dns_message(self, level, message, query, rdtype=None, answer=None, exc=None):
"""Add Elastic ECS fields"""
response = None
extra = dict()
extra['dns'] = dict()
extra['dns']['type'] = 'answer' if not answer and not exc else 'query'
extra['dns']['question'] = dict()
extra['dns']['question']['class'] = 'IN'
extra['dns']['question']['name'] = query
if rdtype:
extra['dns']['question']['type'] = rdtype
if exc:
response = exc.kwargs.get('response')
if answer:
response = answer.response
if response:
extra['dns']['id'] = response.id
extra['dns']['opcode'] = dns.opcode.to_text(response.opcode())
extra['dns']['response_code'] = dns.rcode.to_text(response.rcode())
resolved_ips = list()
answers = list()
for record in response.answer:
ecs_rr = dict()
ecs_rr['class'] = 'IN'
ecs_rr['type'] = dns.rdatatype.to_text(record.rdtype)
ecs_rr['data'] = self._get_ip_address_from_record(record)
ecs_rr['ttl'] = record.ttl
answers.append(ecs_rr)
if record.rdtype in DNS_IP_RDTYPES:
resolved_ips.append(ecs_rr['data'])
if resolved_ips:
extra['dns']['resolved_ip'] = resolved_ips
if answers:
extra['dns']['answers'] = answers
if isinstance(exc, (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN)):
exc = None # do not log traceback for common, expected errors
self._logger.log(level, message, extra=extra, exc_info=exc)
# ----------------------------------------------------------------------
def _get_ip_address_from_record(self, record):
if record.items and len(record.items) == 1:
item_keys = list(record.items.keys())
item_key = item_keys[0]
return item_key.to_text()
# ----------------------------------------------------------------------
def _factor_nameserver_list(self, nameserver_names):
nameservers = list()
for nameserver_name in nameserver_names:
nameserver_ipv4_addresses = self._resolve(nameserver_name, 'A', ignore_errors=True)
nameserver_ipv6_addresses = self._resolve(nameserver_name, 'AAAA', ignore_errors=True)
ip_addresses = nameserver_ipv4_addresses + nameserver_ipv6_addresses
nameserver = NameServer(nameserver_name, ip_addresses)
nameservers.append(nameserver)
return nameservers
|
[
"collections.namedtuple"
] |
[((368, 424), 'collections.namedtuple', 'namedtuple', (['"""NameServer"""', "('nameserver', 'ip_addresses')"], {}), "('NameServer', ('nameserver', 'ip_addresses'))\n", (378, 424), False, 'from collections import namedtuple\n')]
|
## @package teetool
# This module contains the GaussianProcess class
#
# See GaussianProcess class for more details
import teetool as tt
import numpy as np
from numpy.linalg import det, inv, pinv, cond
## GaussianProcess class evaluates an ensemble of trajectories as a Gaussian process
#
# Such a Gaussian process has a mean and covariance, and expresses itself as an ellipse (2d) or ellipsoid (3d) at a constant variance
class GaussianProcess(object):
## The constructor of GaussianProcess
# @param self object pointer
# @param cluster_data trajectory data in specific format: a list of (x, Y), where x [npoints x 1] and Y [npoints x ndim]
# @param ngaus number of Gaussians desired
def __init__(self, cluster_data, ngaus):
# Fit cluster_data in a [0, 1] domain
outline = tt.helpers.get_cluster_data_outline(cluster_data)
cluster_data_norm = tt.helpers.get_cluster_data_norm(cluster_data,
outline)
## normalised cluster_data
self._cluster_data = cluster_data_norm
## original outline
self._outline = outline
## number of Gaussians after modelling
self._ngaus = ngaus
## dimensionality of trajectory data
self._ndim = tt.helpers.getDimension(cluster_data)
## obtain vectors to multiply normalised values with, allows for a transformation back to the actual values from the normalised ones.
# @param self The object pointer.
# @return M, vector with minimum values [ngaus*ndim x 1]
# @return D, vector with difference values [ngaus*ndim x 1]
def _outline2vectors(self):
M_list = [] # list with minimum values [M]
D_list = [] # list with difference [D]
for d in range(self._ndim):
xmin = self._outline[d*2+0]
xmax = self._outline[d*2+1]
m1 = np.ones(shape=(self._ngaus, 1)) * (xmin)
M_list.append(m1)
d1 = np.ones(shape=(self._ngaus, 1)) * (xmax - xmin)
D_list.append(d1)
M = np.concatenate(M_list, axis=0) # vector
D = np.concatenate(D_list, axis=0) # vector
return (M, D)
## returns the mu_y, sig_y vector to the original dimensions using the outline
# @param self The object pointer.
# @param mu_y mean vector [ngaus*ndim x 1]
# @param sig_y covariance matrix [ngaus*ndim x ngaus*ndim]
def _norm2real(self, mu_y, sig_y):
(M, D) = self._outline2vectors()
D_diag = np.diagflat(D ** 2)
mu_y_real = np.multiply(mu_y, D) + M
sig_y_real = sig_y * D_diag
return mu_y_real, sig_y_real
## models the trajectory data via re-sampling, ignoring noise, missing data, trends, etc. Quick method only suitable for high-quality data
# @param self The object pointer.
# @return mu_y mean vector [ngaus*ndim x 1]
# @return sig_y covariance matrix [ngaus*ndim x ngaus*ndim]
# @return cc mean [ndim x 1] in ngaus cells
# @return cA covariance [ndim x ndim] in ngaus cells
def model_by_resampling(self):
# extract
cluster_data = self._cluster_data
ngaus = self._ngaus
mdim = self._ndim
# predict these values
xp = np.linspace(0, 1, ngaus)
yc = [] # list to put trajectories
for (xn, Yn) in cluster_data:
# array to fill
yp = np.empty(shape=(ngaus, mdim))
for d in range(mdim):
ynd = Yn[:, d]
yp[:, d] = np.interp(xp, xn, ynd)
# single column
yp1 = np.reshape(yp, (-1, 1), order='F')
yc.append(yp1)
# compute values
ntraj = len(yc) # number of trajectories
# obtain average [mu]
mu_y = np.zeros(shape=(mdim*ngaus, 1))
for yn in yc:
mu_y += yn
mu_y = (mu_y / ntraj)
# obtain standard deviation [sig]
sig_y_sum = np.zeros(shape=(mdim*ngaus, mdim*ngaus))
for yn in yc:
sig_y_sum += (yn - mu_y) * (yn - mu_y).transpose()
sig_y = np.mat(sig_y_sum / ntraj)
# convert to original values
mu_y, sig_y = self._norm2real(mu_y, sig_y)
# convert to cells
(cc, cA) = self._getGMMCells(mu_y, sig_y, self._ngaus)
return (mu_y, sig_y, cc, cA)
## models the trajectory data via maximum likelihood. It uses the basis function as specified to handle missing data, however, noise per trajectory has no influence on the parameter estimation. A suitable method in the absence of noise and known shape of trajectories.
# @param self The object pointer.
# @param type_basis see Basis class for input
# @param nbasis see Basis class for input
# @return mu_y mean vector [ngaus*ndim x 1]
# @return sig_y covariance matrix [ngaus*ndim x ngaus*ndim]
# @return cc mean [ndim x 1] in ngaus cells
# @return cA covariance [ndim x ndim] in ngaus cells
def model_by_ml(self, type_basis, nbasis):
# extract
cluster_data = self._cluster_data
ngaus = self._ngaus
ndim = self._ndim
ntraj = len(cluster_data)
# create a basis
basis = tt.basis.Basis(type_basis, nbasis, ndim)
wc = []
for i, (xn, Y) in enumerate(cluster_data):
yn = np.reshape(Y, newshape=(-1,1), order='F')
Hn = basis.get(xn)
wn = pinv(Hn) * yn
wn = np.mat(wn)
wc.append(wn)
# obtain average [mu]
mu_w = np.zeros(shape=(ndim*nbasis, 1))
for wn in wc:
mu_w += wn
mu_w = np.mat(mu_w / ntraj)
# obtain standard deviation [sig]
sig_w_sum = np.zeros(shape=(ndim*nbasis, ndim*nbasis))
for wn in wc:
sig_w_sum += (wn - mu_w)*(wn - mu_w).transpose()
sig_w = np.mat(sig_w_sum / ntraj)
# predict these values
xp = np.linspace(0, 1, ngaus)
Hp = basis.get(xp)
mu_y = Hp * mu_w
sig_y = Hp * sig_w * Hp.transpose()
# convert to original values
mu_y, sig_y = self._norm2real(mu_y, sig_y)
(cc, cA) = self._getGMMCells(mu_y, sig_y, self._ngaus)
return (mu_y, sig_y, cc, cA)
## models the trajectory data via expectation maximization. It uses the basis function as specified to handle missing data, and, when noisy data is detected within a trajectory, the global trend, as learned, takes over. A suitable method in the presence of noise or an unknown shape of trajectories -- the latter as different models can be compared via likelihood
# @param self The object pointer.
# @param type_basis see Basis class for input
# @param nbasis see Basis class for input
# @param maximum_iterations maximum allowed number of evaluations till convergence
# @return mu_y mean vector [ngaus*ndim x 1]
# @return sig_y covariance matrix [ngaus*ndim x ngaus*ndim]
# @return cc mean [ndim x 1] in ngaus cells
# @return cA covariance [ndim x ndim] in ngaus cells
def model_by_em(self, type_basis, nbasis, maximum_iterations=2001):
# extract
cluster_data = self._cluster_data
ngaus = self._ngaus
ndim = self._ndim
ntraj = len(cluster_data)
Mstar = 0
for (xn, Yn) in cluster_data:
Mstar += np.size(xn)
# create a basis
basis = tt.basis.Basis(type_basis, nbasis, ndim)
# from cluster_data to cell structure
yc, Hc = self._from_clusterdata2cells(cluster_data, basis)
# hardcoded parameters
MAX_ITERATIONS = maximum_iterations # maximum number of iterations
CONV_LIKELIHOOD = 1e-3 # stop convergence
# min_eig = 10**-6 # minimum eigenvalue (numerical trick)
# initial variables
BETA_EM = 1000.
mu_w = np.zeros(shape=(nbasis*ndim, 1))
sig_w = np.mat(np.eye(nbasis*ndim))
sig_w_inv = inv(sig_w)
loglikelihood_previous = np.inf
for i_iter in range(MAX_ITERATIONS):
# Expectation (54) (55)
(Ewc, Ewwc) = self._Ewc_Ewwc(yc, Hc, mu_w, sig_w_inv, BETA_EM)
# Maximization :: (56), (57)
# E [ MU ]
mu_w = self._E_mu(Ewc)
# E [ SIGMA ]
sig_w = self._E_sigma(mu_w, yc, Hc, Ewc, Ewwc)
# pre-calculate inverse
sig_w_inv = inv(sig_w)
# E [BETA]
BETA_EM = self._E_beta(yc, Hc, Ewc, Ewwc, ndim, Mstar)
# //// log likelihood ///////////
# // ln( p(Y|w) - likelihood
loglikelihood_pYw = self._L_pYw(yc,
Hc,
Ewc,
Ewwc,
ndim,
Mstar,
BETA_EM)
# // ln( p(w) ) - prior
loglikelihood_pw = self._L_pw(Ewc,
Ewwc,
mu_w,
sig_w,
sig_w_inv,
ndim,
nbasis)
loglikelihood_pY = loglikelihood_pYw + loglikelihood_pw
# // check convergence
loglikelihood_diff = np.abs(loglikelihood_pY - loglikelihood_previous)
if np.isfinite(loglikelihood_pY):
# check
if (loglikelihood_diff < CONV_LIKELIHOOD):
break
else:
# not a valid loglikelihood
print("warning: not a finite loglikelihood")
break
# output
#if (i_iter % 100 == 0):
# print("{0} {1} {2}".format(i_iter, loglikelihood_pY, min_eig))
# store previous log_likelihood
loglikelihood_previous = loglikelihood_pY
# predict these values
xp = np.linspace(0, 1, ngaus)
Hp = basis.get(xp)
mu_y = Hp * mu_w
sig_y = Hp * sig_w * Hp.transpose()
# convert to original values
mu_y, sig_y = self._norm2real(mu_y, sig_y)
(cc, cA) = self._getGMMCells(mu_y, sig_y, self._ngaus)
return (mu_y, sig_y, cc, cA)
def _from_clusterdata2cells(self, cluster_data, basis):
"""converts from cluster_data (xn, Yn) list, to cells
Input:
cluster_data -
basis -
Output:
yc -
Hc -
"""
# prepare data
yc = []
Hc = []
for (xn, Yn) in cluster_data:
# data
yn = np.reshape(Yn, newshape=(-1,1), order='F')
Hn = basis.get(xn)
# add to list
yc.append(yn)
Hc.append(Hn)
return (yc, Hc)
def _Ewc_Ewwc(self, yc, Hc, mu_w, sig_w_inv, BETA_EM):
"""returns the expected values Ewc and Ewwc
input:
yc - [points]
Hc - [Gram matrix]
mu_w - E[w]
sig_w_inv - 1 / E[ww]
BETA_EM - 1 / noise
output:
Ewc - [E[wn]]
Ewnwc - [E[wnwn]]
"""
ntraj = len(yc)
Ewc = []
Ewwc = []
# Expectation (54) (55)
for n in range(ntraj):
# data
yn = yc[n]
Hn = Hc[n]
(Ewn, Ewnwn) = self._Ewn_Ewnwn(yn,
Hn,
mu_w,
sig_w_inv,
BETA_EM)
# store
Ewc.append(Ewn);
Ewwc.append(Ewnwn);
return (Ewc, Ewwc)
def _Ewn_Ewnwn(self, yn, Hn, mu_w, sig_w_inv, BETA_EM):
"""returns the expected values Ewn and Ewnwn
input:
yn - points
Hn - Gram matrix
mu_w - E[w]
sig_w_inv - 1 / E[ww]
BETA_EM - 1 / noise
output:
Ewn - E[wn]
Ewnwn - E[wnwn]
"""
# calculate S :: (50)
Sn_inv = sig_w_inv + np.multiply(BETA_EM,(Hn.transpose()*Hn))
Sn = np.mat(inv(Sn_inv))
Ewn = (Sn *((np.multiply(BETA_EM,(Hn.transpose()*yn))) + ((sig_w_inv*mu_w))))
# assure matrix
Ewn = np.mat(Ewn)
# BISHOP (2.62)
Ewnwn = Sn + Ewn*Ewn.transpose()
# assure matrix
Ewnwn = np.mat(Ewnwn)
return (Ewn, Ewnwn)
def _E_mu(self, Ewc):
"""returns the expected value E [ MU ]
Input:
Ewc - list of expected values
Output:
mu_w - average of expected values
"""
# total number of trajectories
ntraj = len(Ewc)
mu_w_sum = np.zeros_like(Ewc[0])
for Ewn in Ewc:
# sum
mu_w_sum += Ewn
mu_w = np.mat(mu_w_sum / ntraj)
return mu_w
def _E_sigma(self, mu_w, yc, Hc, Ewc, Ewwc):
"""return the expected variance E [ SIGMA ]
this takes into account the measured data and the model
Input:
mu_w -
yc -
Hc -
Ewc -
Ewwc -
Output:
sig_w -
"""
# total number of trajectories
ntraj = len(yc)
sig_w_sum = np.zeros_like(Ewwc[0])
# E [ SIGMA ]
# sig_w_sum = np.zeros((nbasis*ndim, nbasis*ndim));
for n in range(ntraj):
# extract data
yn = yc[n]
Hn = Hc[n]
Ewn = Ewc[n]
Ewnwn = Ewwc[n]
# sum
SIGMA_n = Ewnwn - 2.*(mu_w*Ewn.transpose()) + mu_w*mu_w.transpose()
sig_w_sum += SIGMA_n
sig_w = np.mat(sig_w_sum / ntraj)
return sig_w
def _E_beta(self, yc, Hc, Ewc, Ewwc, ndim, Mstar):
"""returns the expected noise parameter"""
ntraj = len(yc)
# E [BETA]
BETA_sum_inv = 0.;
for n in range(ntraj):
# extract data
yn = yc[n]
Hn = Hc[n]
Ewn = Ewc[n]
Ewnwn = Ewwc[n]
BETA_sum_inv += np.dot(yn.transpose(),yn) - 2.*(np.dot(yn.transpose(),(Hn*Ewn))) + np.trace((Hn.transpose()*Hn)*Ewnwn)
BETA_EM = np.mat( (ndim*Mstar) / BETA_sum_inv )
return BETA_EM
def _L_pYw(self, yc, Hc, Ewc, Ewwc, ndim, Mstar, BETA_EM):
"""returns ln( p (Y|w) )
likelihood of data, given the parameters"""
ntraj = len(yc)
loglikelihood_pYw_sum = 0.;
for n in range(ntraj):
# extract data
yn = yc[n]
Hn = Hc[n]
Ewn = Ewc[n]
Ewnwn = Ewwc[n]
# loglikelihood_pYw_sum = loglikelihood_pYw_sum + ((yn.')*yn - 2*(yn.')*(Hn*Ewn) + trace(((Hn.')*Hn)*Ewnwn));
loglikelihood_pYw_sum += np.dot(yn.transpose(),yn) - 2.*(np.dot(yn.transpose(),(Hn*Ewn))) + np.trace((Hn.transpose()*Hn)*Ewnwn)
# loglikelihood_pYw = + ((Mstar*D) / 2) * log(2*pi) - ((Mstar*D) / 2) * log( BETA_EM ) + (BETA_EM/2) * loglikelihood_pYw_sum;
loglikelihood_pYw = (Mstar*ndim / 2.) * np.log(2.*np.pi) - (Mstar*ndim / 2.) * np.log(BETA_EM) + (BETA_EM / 2.) * loglikelihood_pYw_sum
return loglikelihood_pYw
def _L_pw(self, Ewc, Ewwc, mu_w, sig_w, sig_w_inv, ndim, nbasis):
"""returns ln( p(w) )
likelihood of parameters, before seeing the data"""
# test conditioning sig_w
if cond(sig_w) > 0:
return 1e4
ntraj = len(Ewc)
loglikelihood_pw_sum = 0.;
for n in range(ntraj):
# extract data
Ewn = Ewc[n]
Ewnwn = Ewwc[n]
# loglikelihood_pw_sum = loglikelihood_pw_sum + trace( (LAMBDA_EM)*( Ewnwn - 2*MU_EM*(Ewn.') + (MU_EM*(MU_EM.')) ) );
loglikelihood_pw_sum += np.trace(sig_w_inv*(Ewnwn - 2.*mu_w*Ewn.transpose() + mu_w*mu_w.transpose()))
# loglikelihood_pw = + ((N*J*D) / 2) * log(2*pi) + (N/2) * ln_det_Sigma + (1/2) * loglikelihood_pw_sum;
loglikelihood_pw = (ntraj*nbasis*ndim/2.)*np.log(2*np.pi) + (ntraj/2.)*np.log(det(sig_w)) + (1./2.)*loglikelihood_pw_sum
return loglikelihood_pw
def _getGMMCells(self, mu_y, sig_y, ngaus):
"""
return Gaussian Mixture Model (GMM) in cells
"""
cc = []
cA = []
for m in range(ngaus):
# single cell
(c, A) = self._getMuSigma(mu_y, sig_y, m, ngaus)
# check for singularity
A = tt.helpers.nearest_spd(A)
cc.append(c)
cA.append(A)
return (cc, cA)
def _getMuSigma(self, mu_y, sig_y, npoint, ngaus):
"""
returns (mu, sigma)
"""
# mu_y [DM x 1]
# sig_y [DM x DM]
D = self._ndim
# check range
if ((npoint < 0) or (npoint >= ngaus)):
raise ValueError("{0}, not in [0, {1}]".format(npoint, ngaus))
c = np.empty(shape=(D, 1))
A = np.empty(shape=(D, D))
# select position
for d_row in range(D):
c[d_row, 0] = mu_y[(npoint+d_row*ngaus), 0]
for d_col in range(D):
A[d_row, d_col] = sig_y[(npoint+d_row*ngaus), (npoint+d_col*ngaus)]
return (c, A)
|
[
"teetool.helpers.nearest_spd",
"numpy.abs",
"numpy.diagflat",
"numpy.empty",
"numpy.ones",
"numpy.linalg.cond",
"numpy.interp",
"numpy.mat",
"numpy.linalg.pinv",
"teetool.helpers.get_cluster_data_norm",
"numpy.zeros_like",
"numpy.multiply",
"numpy.isfinite",
"numpy.reshape",
"numpy.linspace",
"numpy.linalg.det",
"numpy.size",
"teetool.helpers.get_cluster_data_outline",
"numpy.linalg.inv",
"numpy.concatenate",
"numpy.log",
"numpy.zeros",
"teetool.helpers.getDimension",
"numpy.eye",
"teetool.basis.Basis"
] |
[((850, 899), 'teetool.helpers.get_cluster_data_outline', 'tt.helpers.get_cluster_data_outline', (['cluster_data'], {}), '(cluster_data)\n', (885, 899), True, 'import teetool as tt\n'), ((928, 983), 'teetool.helpers.get_cluster_data_norm', 'tt.helpers.get_cluster_data_norm', (['cluster_data', 'outline'], {}), '(cluster_data, outline)\n', (960, 983), True, 'import teetool as tt\n'), ((1329, 1366), 'teetool.helpers.getDimension', 'tt.helpers.getDimension', (['cluster_data'], {}), '(cluster_data)\n', (1352, 1366), True, 'import teetool as tt\n'), ((2127, 2157), 'numpy.concatenate', 'np.concatenate', (['M_list'], {'axis': '(0)'}), '(M_list, axis=0)\n', (2141, 2157), True, 'import numpy as np\n'), ((2179, 2209), 'numpy.concatenate', 'np.concatenate', (['D_list'], {'axis': '(0)'}), '(D_list, axis=0)\n', (2193, 2209), True, 'import numpy as np\n'), ((2579, 2598), 'numpy.diagflat', 'np.diagflat', (['(D ** 2)'], {}), '(D ** 2)\n', (2590, 2598), True, 'import numpy as np\n'), ((3319, 3343), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ngaus'], {}), '(0, 1, ngaus)\n', (3330, 3343), True, 'import numpy as np\n'), ((3851, 3884), 'numpy.zeros', 'np.zeros', ([], {'shape': '(mdim * ngaus, 1)'}), '(shape=(mdim * ngaus, 1))\n', (3859, 3884), True, 'import numpy as np\n'), ((4023, 4067), 'numpy.zeros', 'np.zeros', ([], {'shape': '(mdim * ngaus, mdim * ngaus)'}), '(shape=(mdim * ngaus, mdim * ngaus))\n', (4031, 4067), True, 'import numpy as np\n'), ((4167, 4192), 'numpy.mat', 'np.mat', (['(sig_y_sum / ntraj)'], {}), '(sig_y_sum / ntraj)\n', (4173, 4192), True, 'import numpy as np\n'), ((5282, 5322), 'teetool.basis.Basis', 'tt.basis.Basis', (['type_basis', 'nbasis', 'ndim'], {}), '(type_basis, nbasis, ndim)\n', (5296, 5322), True, 'import teetool as tt\n'), ((5613, 5647), 'numpy.zeros', 'np.zeros', ([], {'shape': '(ndim * nbasis, 1)'}), '(shape=(ndim * nbasis, 1))\n', (5621, 5647), True, 'import numpy as np\n'), ((5708, 5728), 'numpy.mat', 'np.mat', (['(mu_w / ntraj)'], {}), '(mu_w / ntraj)\n', (5714, 5728), True, 'import numpy as np\n'), ((5792, 5838), 'numpy.zeros', 'np.zeros', ([], {'shape': '(ndim * nbasis, ndim * nbasis)'}), '(shape=(ndim * nbasis, ndim * nbasis))\n', (5800, 5838), True, 'import numpy as np\n'), ((5936, 5961), 'numpy.mat', 'np.mat', (['(sig_w_sum / ntraj)'], {}), '(sig_w_sum / ntraj)\n', (5942, 5961), True, 'import numpy as np\n'), ((6007, 6031), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ngaus'], {}), '(0, 1, ngaus)\n', (6018, 6031), True, 'import numpy as np\n'), ((7489, 7529), 'teetool.basis.Basis', 'tt.basis.Basis', (['type_basis', 'nbasis', 'ndim'], {}), '(type_basis, nbasis, ndim)\n', (7503, 7529), True, 'import teetool as tt\n'), ((7938, 7972), 'numpy.zeros', 'np.zeros', ([], {'shape': '(nbasis * ndim, 1)'}), '(shape=(nbasis * ndim, 1))\n', (7946, 7972), True, 'import numpy as np\n'), ((8035, 8045), 'numpy.linalg.inv', 'inv', (['sig_w'], {}), '(sig_w)\n', (8038, 8045), False, 'from numpy.linalg import det, inv, pinv, cond\n'), ((10187, 10211), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'ngaus'], {}), '(0, 1, ngaus)\n', (10198, 10211), True, 'import numpy as np\n'), ((12659, 12670), 'numpy.mat', 'np.mat', (['Ewn'], {}), '(Ewn)\n', (12665, 12670), True, 'import numpy as np\n'), ((12778, 12791), 'numpy.mat', 'np.mat', (['Ewnwn'], {}), '(Ewnwn)\n', (12784, 12791), True, 'import numpy as np\n'), ((13113, 13134), 'numpy.zeros_like', 'np.zeros_like', (['Ewc[0]'], {}), '(Ewc[0])\n', (13126, 13134), True, 'import numpy as np\n'), ((13222, 13246), 'numpy.mat', 'np.mat', (['(mu_w_sum / ntraj)'], {}), '(mu_w_sum / ntraj)\n', (13228, 13246), True, 'import numpy as np\n'), ((13675, 13697), 'numpy.zeros_like', 'np.zeros_like', (['Ewwc[0]'], {}), '(Ewwc[0])\n', (13688, 13697), True, 'import numpy as np\n'), ((14089, 14114), 'numpy.mat', 'np.mat', (['(sig_w_sum / ntraj)'], {}), '(sig_w_sum / ntraj)\n', (14095, 14114), True, 'import numpy as np\n'), ((14626, 14661), 'numpy.mat', 'np.mat', (['(ndim * Mstar / BETA_sum_inv)'], {}), '(ndim * Mstar / BETA_sum_inv)\n', (14632, 14661), True, 'import numpy as np\n'), ((17352, 17374), 'numpy.empty', 'np.empty', ([], {'shape': '(D, 1)'}), '(shape=(D, 1))\n', (17360, 17374), True, 'import numpy as np\n'), ((17387, 17409), 'numpy.empty', 'np.empty', ([], {'shape': '(D, D)'}), '(shape=(D, D))\n', (17395, 17409), True, 'import numpy as np\n'), ((2620, 2640), 'numpy.multiply', 'np.multiply', (['mu_y', 'D'], {}), '(mu_y, D)\n', (2631, 2640), True, 'import numpy as np\n'), ((3473, 3502), 'numpy.empty', 'np.empty', ([], {'shape': '(ngaus, mdim)'}), '(shape=(ngaus, mdim))\n', (3481, 3502), True, 'import numpy as np\n'), ((3666, 3700), 'numpy.reshape', 'np.reshape', (['yp', '(-1, 1)'], {'order': '"""F"""'}), "(yp, (-1, 1), order='F')\n", (3676, 3700), True, 'import numpy as np\n'), ((5409, 5451), 'numpy.reshape', 'np.reshape', (['Y'], {'newshape': '(-1, 1)', 'order': '"""F"""'}), "(Y, newshape=(-1, 1), order='F')\n", (5419, 5451), True, 'import numpy as np\n'), ((5530, 5540), 'numpy.mat', 'np.mat', (['wn'], {}), '(wn)\n', (5536, 5540), True, 'import numpy as np\n'), ((7435, 7446), 'numpy.size', 'np.size', (['xn'], {}), '(xn)\n', (7442, 7446), True, 'import numpy as np\n'), ((7994, 8015), 'numpy.eye', 'np.eye', (['(nbasis * ndim)'], {}), '(nbasis * ndim)\n', (8000, 8015), True, 'import numpy as np\n'), ((8494, 8504), 'numpy.linalg.inv', 'inv', (['sig_w'], {}), '(sig_w)\n', (8497, 8504), False, 'from numpy.linalg import det, inv, pinv, cond\n'), ((9553, 9602), 'numpy.abs', 'np.abs', (['(loglikelihood_pY - loglikelihood_previous)'], {}), '(loglikelihood_pY - loglikelihood_previous)\n', (9559, 9602), True, 'import numpy as np\n'), ((9619, 9648), 'numpy.isfinite', 'np.isfinite', (['loglikelihood_pY'], {}), '(loglikelihood_pY)\n', (9630, 9648), True, 'import numpy as np\n'), ((10881, 10924), 'numpy.reshape', 'np.reshape', (['Yn'], {'newshape': '(-1, 1)', 'order': '"""F"""'}), "(Yn, newshape=(-1, 1), order='F')\n", (10891, 10924), True, 'import numpy as np\n'), ((12520, 12531), 'numpy.linalg.inv', 'inv', (['Sn_inv'], {}), '(Sn_inv)\n', (12523, 12531), False, 'from numpy.linalg import det, inv, pinv, cond\n'), ((15843, 15854), 'numpy.linalg.cond', 'cond', (['sig_w'], {}), '(sig_w)\n', (15847, 15854), False, 'from numpy.linalg import det, inv, pinv, cond\n'), ((16909, 16934), 'teetool.helpers.nearest_spd', 'tt.helpers.nearest_spd', (['A'], {}), '(A)\n', (16931, 16934), True, 'import teetool as tt\n'), ((1947, 1978), 'numpy.ones', 'np.ones', ([], {'shape': '(self._ngaus, 1)'}), '(shape=(self._ngaus, 1))\n', (1954, 1978), True, 'import numpy as np\n'), ((2036, 2067), 'numpy.ones', 'np.ones', ([], {'shape': '(self._ngaus, 1)'}), '(shape=(self._ngaus, 1))\n', (2043, 2067), True, 'import numpy as np\n'), ((3596, 3618), 'numpy.interp', 'np.interp', (['xp', 'xn', 'ynd'], {}), '(xp, xn, ynd)\n', (3605, 3618), True, 'import numpy as np\n'), ((5499, 5507), 'numpy.linalg.pinv', 'pinv', (['Hn'], {}), '(Hn)\n', (5503, 5507), False, 'from numpy.linalg import det, inv, pinv, cond\n'), ((15506, 15525), 'numpy.log', 'np.log', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (15512, 15525), True, 'import numpy as np\n'), ((15545, 15560), 'numpy.log', 'np.log', (['BETA_EM'], {}), '(BETA_EM)\n', (15551, 15560), True, 'import numpy as np\n'), ((16466, 16483), 'numpy.log', 'np.log', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (16472, 16483), True, 'import numpy as np\n'), ((16502, 16512), 'numpy.linalg.det', 'det', (['sig_w'], {}), '(sig_w)\n', (16505, 16512), False, 'from numpy.linalg import det, inv, pinv, cond\n')]
|
"""
Copyright (c) 2017 Dependable Systems Laboratory, EPFL
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import argparse
import logging
import os
import re
from magic import Magic
from s2e_env.command import EnvCommand, CommandError
from s2e_env.commands.project_creation import CGCProject, LinuxProject, AbstractProject
from s2e_env.commands.project_creation import WindowsExeProject, \
WindowsDLLProject, WindowsDriverProject
from s2e_env.commands.project_creation import Target
from s2e_env.commands.project_creation.abstract_project import validate_arguments, SUPPORTED_TOOLS
from s2e_env.infparser.driver import Driver
from s2e_env.manage import call_command
logger = logging.getLogger('new_project')
PROJECT_TYPES = {
'cgc': CGCProject,
'linux': LinuxProject,
'windows': WindowsExeProject,
'windows_dll': WindowsDLLProject,
'windows_driver': WindowsDriverProject,
}
# Paths
FILE_DIR = os.path.dirname(__file__)
CGC_MAGIC = os.path.join(FILE_DIR, '..', 'dat', 'cgc.magic')
# Magic regexs
CGC_REGEX = re.compile(r'^CGC 32-bit')
ELF32_REGEX = re.compile(r'^ELF 32-bit')
ELF64_REGEX = re.compile(r'^ELF 64-bit')
DLL32_REGEX = re.compile(r'^PE32 executable \(DLL\)')
DLL64_REGEX = re.compile(r'^PE32\+ executable \(DLL\)')
WIN32_DRIVER_REGEX = re.compile(r'^PE32 executable \(native\)')
WIN64_DRIVER_REGEX = re.compile(r'^PE32\+ executable \(native\)')
PE32_REGEX = re.compile(r'^PE32 executable')
PE64_REGEX = re.compile(r'^PE32\+ executable')
MSDOS_REGEX = re.compile(r'^MS-DOS executable')
def _determine_arch_and_proj(target_path):
"""
Check that the given target is supported by S2E.
The target's magic is checked to see if it is a supported file type (e.g.
ELF, PE, etc.). The architecture and operating system that the target was
compiled for (e.g., i386 Windows, x64 Linux, etc.) is also checked.
Returns:
A tuple containing the target's architecture, operating system and a
project class. A tuple containing three ``None``s is returned on
failure.
"""
default_magic = Magic()
magic_checks = (
(Magic(magic_file=CGC_MAGIC), CGC_REGEX, CGCProject, 'i386', 'decree'),
(default_magic, ELF32_REGEX, LinuxProject, 'i386', 'linux'),
(default_magic, ELF64_REGEX, LinuxProject, 'x86_64', 'linux'),
(default_magic, DLL32_REGEX, WindowsDLLProject, 'i386', 'windows'),
(default_magic, DLL64_REGEX, WindowsDLLProject, 'x86_64', 'windows'),
(default_magic, WIN32_DRIVER_REGEX, WindowsDriverProject, 'i386', 'windows'),
(default_magic, WIN64_DRIVER_REGEX, WindowsDriverProject, 'x86_64', 'windows'),
(default_magic, PE32_REGEX, WindowsExeProject, 'i386', 'windows'),
(default_magic, PE64_REGEX, WindowsExeProject, 'x86_64', 'windows'),
(default_magic, MSDOS_REGEX, WindowsExeProject, 'i386', 'windows'),
)
# Need to resolve symbolic links, otherwise magic will report the file type
# as being a symbolic link
target_path = os.path.realpath(target_path)
# Check the target program against the valid file types
for magic_check, regex, proj_class, arch, operating_sys in magic_checks:
magic = magic_check.from_file(target_path)
# If we find a match, create that project
if regex.match(magic):
return arch, operating_sys, proj_class
return None, None, None
def _extract_inf_files(target_path):
"""Extract Windows driver files from an INF file."""
driver = Driver(target_path)
driver.analyze()
driver_files = driver.get_files()
if not driver_files:
raise Exception('Driver has no files')
base_dir = os.path.dirname(target_path)
logger.info(' Driver files:')
file_paths = []
for f in driver_files:
full_path = os.path.join(base_dir, f)
if not os.path.exists(full_path):
if full_path.endswith('.cat'):
logger.warning('Catalog file %s is missing', full_path)
continue
raise Exception('%s does not exist' % full_path)
logger.info(' %s', full_path)
file_paths.append(full_path)
return list(set(file_paths))
def _translate_target_to_files(path):
"""
:param path: The path to the target
:return: The list of files associated with the target. The first
item in the list is the main target name.
"""
if not os.path.isfile(path):
raise Exception('Target %s does not exist' % path)
if path.endswith('.inf'):
logger.info('Detected Windows INF file, attempting to create a driver project...')
driver_files = _extract_inf_files(path)
first_sys_file = None
for f in driver_files:
if f.endswith('.sys'):
first_sys_file = f
# TODO: prompt the user to select the right driver
if not first_sys_file:
raise Exception('Could not find a *.sys file in the INF '
'file. Make sure that the INF file is valid '
'and belongs to a Windows driver')
path_to_analyze = first_sys_file
aux_files = driver_files
else:
path_to_analyze = path
aux_files = []
return [path_to_analyze] + aux_files
def _parse_sym_args(sym_args_str):
"""
Parses a list of argument indices to make symbolic.
``sym_args_str`` should be a string of space-separated integers that
correspond to a program argument to make symbolic. E.g. to make the first
argument symbolic, ``sym_args_str`` should be "1". To make the first and
third arguments symbolic, ``sym_args_str`` should be "1 3".
"""
sym_args = []
if not sym_args_str:
return sym_args
for i in sym_args_str.split(' '):
try:
sym_args.append(int(i))
except ValueError as e:
raise argparse.ArgumentTypeError('\'%s\' is not a valid index' % i) from e
return sym_args
def target_from_file(path, args=None, project_class=None):
files = _translate_target_to_files(path)
path_to_analyze = files[0]
aux_files = files[1:]
arch, op_sys, proj_class = _determine_arch_and_proj(path_to_analyze)
if not arch:
raise Exception(f'Could not determine architecture for {path_to_analyze}')
# Overwrite the automatically-derived project class if one is provided
if project_class:
if not issubclass(project_class, AbstractProject):
raise Exception('Custom projects must be a subclass of AbstractProject')
proj_class = project_class
return Target(path, args, arch, op_sys, aux_files), proj_class
def _handle_with_file(target_path, target_args, proj_class, *args, **options):
target, proj_class = target_from_file(target_path, target_args, proj_class)
options['target'] = target
return call_command(proj_class(), *args, **options)
def _handle_empty_project(proj_class, *args, **options):
if not options['no_target']:
raise CommandError('No target binary specified. Use the -m option to '
'create an empty project')
if not options['image']:
raise CommandError('An empty project requires a VM image. Use the -i '
'option to specify the image')
if not options['name']:
raise CommandError('An empty project requires a name. Use the -n '
'option to specify one')
# If the project class wasn't explicitly overridden programmatically, get
# one of the default project classes from the command line
if not proj_class:
project_types = list(PROJECT_TYPES.keys())
if options['type'] not in project_types:
raise CommandError('An empty project requires a type. Use the -t '
'option and specify one from %s' % project_types)
proj_class = PROJECT_TYPES[options['type']]
options['target'] = Target.empty()
return call_command(proj_class(), *args, **options)
class Command(EnvCommand):
"""
Initialize a new analysis project.
"""
help = 'Initialize a new analysis project.'
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('target', nargs='?',
help='Path to the target file to analyze')
parser.add_argument('target_args', nargs=argparse.REMAINDER,
help='Arguments to the target program. Use @@ '
'as an input file marker that is automatically '
'substituted by a file with symbolic content')
parser.add_argument('-n', '--name', required=False, default=None,
help='The name of the project. Defaults to the '
'name of the target program')
parser.add_argument('-i', '--image', required=False, default=None,
help='The name of an image in the ``images`` '
'directory. If missing, the image will be '
'guessed based on the type of the binary')
parser.add_argument('-d', '--download-image', required=False,
action='store_true',
help='Download a suitable image if it is not available')
parser.add_argument('-m', '--no-target', required=False, default=False,
action='store_true',
help='Create an empty, target-less project. Used '
'when no binary is needed')
parser.add_argument('-t', '--type', required=False, default=None,
help='Project type (%s), valid only when creating empty projects' %
','.join(list(PROJECT_TYPES.keys())))
parser.add_argument('-s', '--use-seeds', action='store_true',
help='Use this option to use seeds for creating '
'symbolic files. Users must create these '
'seeds themselves and place them in the '
'project\'s ``seeds`` directory')
parser.add_argument('--tools', type=lambda s: s.split(','),
default=[],
help='Comma-separated list of tools to enable '
f'(supported tools: {",".join(SUPPORTED_TOOLS)})')
parser.add_argument('--single-path', action='store_true', default=False,
help='Enables single-path mode, no symbolic execution possible')
parser.add_argument('-a', '--sym-args', type=_parse_sym_args, default='',
help='A space-separated list of target argument '
'indices to make symbolic')
parser.add_argument('-f', '--force', action='store_true',
help='If a project with the given name already '
'exists, replace it')
def handle(self, *args, **options):
if not validate_arguments(options):
return
# The 'project_class' option is not exposed as a command-line argument:
# it is typically used when creating a custom project programmatically.
# It provides a class that is instantiated with the current
# command-line arguments and options
proj_class = options.pop('project_class', None)
if options['target']:
_handle_with_file(options.pop('target'), options.pop('target_args'), proj_class, *args, **options)
else:
_handle_empty_project(proj_class, *args, **options)
|
[
"s2e_env.commands.project_creation.Target.empty",
"os.path.dirname",
"os.path.realpath",
"magic.Magic",
"os.path.exists",
"s2e_env.commands.project_creation.Target",
"argparse.ArgumentTypeError",
"s2e_env.commands.project_creation.abstract_project.validate_arguments",
"os.path.isfile",
"s2e_env.infparser.driver.Driver",
"s2e_env.command.CommandError",
"os.path.join",
"logging.getLogger",
"re.compile"
] |
[((1668, 1700), 'logging.getLogger', 'logging.getLogger', (['"""new_project"""'], {}), "('new_project')\n", (1685, 1700), False, 'import logging\n'), ((1908, 1933), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1923, 1933), False, 'import os\n'), ((1946, 1994), 'os.path.join', 'os.path.join', (['FILE_DIR', '""".."""', '"""dat"""', '"""cgc.magic"""'], {}), "(FILE_DIR, '..', 'dat', 'cgc.magic')\n", (1958, 1994), False, 'import os\n'), ((2023, 2048), 're.compile', 're.compile', (['"""^CGC 32-bit"""'], {}), "('^CGC 32-bit')\n", (2033, 2048), False, 'import re\n'), ((2064, 2089), 're.compile', 're.compile', (['"""^ELF 32-bit"""'], {}), "('^ELF 32-bit')\n", (2074, 2089), False, 'import re\n'), ((2105, 2130), 're.compile', 're.compile', (['"""^ELF 64-bit"""'], {}), "('^ELF 64-bit')\n", (2115, 2130), False, 'import re\n'), ((2146, 2186), 're.compile', 're.compile', (['"""^PE32 executable \\\\(DLL\\\\)"""'], {}), "('^PE32 executable \\\\(DLL\\\\)')\n", (2156, 2186), False, 'import re\n'), ((2200, 2243), 're.compile', 're.compile', (['"""^PE32\\\\+ executable \\\\(DLL\\\\)"""'], {}), "('^PE32\\\\+ executable \\\\(DLL\\\\)')\n", (2210, 2243), False, 'import re\n'), ((2263, 2306), 're.compile', 're.compile', (['"""^PE32 executable \\\\(native\\\\)"""'], {}), "('^PE32 executable \\\\(native\\\\)')\n", (2273, 2306), False, 'import re\n'), ((2327, 2373), 're.compile', 're.compile', (['"""^PE32\\\\+ executable \\\\(native\\\\)"""'], {}), "('^PE32\\\\+ executable \\\\(native\\\\)')\n", (2337, 2373), False, 'import re\n'), ((2385, 2415), 're.compile', 're.compile', (['"""^PE32 executable"""'], {}), "('^PE32 executable')\n", (2395, 2415), False, 'import re\n'), ((2430, 2463), 're.compile', 're.compile', (['"""^PE32\\\\+ executable"""'], {}), "('^PE32\\\\+ executable')\n", (2440, 2463), False, 'import re\n'), ((2478, 2510), 're.compile', 're.compile', (['"""^MS-DOS executable"""'], {}), "('^MS-DOS executable')\n", (2488, 2510), False, 'import re\n'), ((3056, 3063), 'magic.Magic', 'Magic', ([], {}), '()\n', (3061, 3063), False, 'from magic import Magic\n'), ((3997, 4026), 'os.path.realpath', 'os.path.realpath', (['target_path'], {}), '(target_path)\n', (4013, 4026), False, 'import os\n'), ((4487, 4506), 's2e_env.infparser.driver.Driver', 'Driver', (['target_path'], {}), '(target_path)\n', (4493, 4506), False, 'from s2e_env.infparser.driver import Driver\n'), ((4654, 4682), 'os.path.dirname', 'os.path.dirname', (['target_path'], {}), '(target_path)\n', (4669, 4682), False, 'import os\n'), ((8932, 8946), 's2e_env.commands.project_creation.Target.empty', 'Target.empty', ([], {}), '()\n', (8944, 8946), False, 'from s2e_env.commands.project_creation import Target\n'), ((4786, 4811), 'os.path.join', 'os.path.join', (['base_dir', 'f'], {}), '(base_dir, f)\n', (4798, 4811), False, 'import os\n'), ((5391, 5411), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (5405, 5411), False, 'import os\n'), ((7577, 7620), 's2e_env.commands.project_creation.Target', 'Target', (['path', 'args', 'arch', 'op_sys', 'aux_files'], {}), '(path, args, arch, op_sys, aux_files)\n', (7583, 7620), False, 'from s2e_env.commands.project_creation import Target\n'), ((7988, 8081), 's2e_env.command.CommandError', 'CommandError', (['"""No target binary specified. Use the -m option to create an empty project"""'], {}), "(\n 'No target binary specified. Use the -m option to create an empty project')\n", (8000, 8081), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((8151, 8253), 's2e_env.command.CommandError', 'CommandError', (['"""An empty project requires a VM image. Use the -i option to specify the image"""'], {}), "(\n 'An empty project requires a VM image. Use the -i option to specify the image'\n )\n", (8163, 8253), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((8317, 8404), 's2e_env.command.CommandError', 'CommandError', (['"""An empty project requires a name. Use the -n option to specify one"""'], {}), "(\n 'An empty project requires a name. Use the -n option to specify one')\n", (8329, 8404), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((3094, 3121), 'magic.Magic', 'Magic', ([], {'magic_file': 'CGC_MAGIC'}), '(magic_file=CGC_MAGIC)\n', (3099, 3121), False, 'from magic import Magic\n'), ((4827, 4852), 'os.path.exists', 'os.path.exists', (['full_path'], {}), '(full_path)\n', (4841, 4852), False, 'import os\n'), ((8713, 8830), 's2e_env.command.CommandError', 'CommandError', (["('An empty project requires a type. Use the -t option and specify one from %s'\n % project_types)"], {}), "(\n 'An empty project requires a type. Use the -t option and specify one from %s'\n % project_types)\n", (8725, 8830), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((12161, 12188), 's2e_env.commands.project_creation.abstract_project.validate_arguments', 'validate_arguments', (['options'], {}), '(options)\n', (12179, 12188), False, 'from s2e_env.commands.project_creation.abstract_project import validate_arguments, SUPPORTED_TOOLS\n'), ((6861, 6920), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['("\'%s\' is not a valid index" % i)'], {}), '("\'%s\' is not a valid index" % i)\n', (6887, 6920), False, 'import argparse\n')]
|
import boto3
if __name__ == "__main__":
collectionId='photos'
faces=[]
faces.append("373ce3fd-c2e0-410d-ba78-9023e2216f76")
client=boto3.client('rekognition',region_name='us-east-1')
response=client.delete_faces(CollectionId=collectionId,
FaceIds=faces)
print(str(len(response['DeletedFaces'])) + ' faces deleted:')
for faceId in response['DeletedFaces']:
print (faceId)
|
[
"boto3.client"
] |
[((154, 206), 'boto3.client', 'boto3.client', (['"""rekognition"""'], {'region_name': '"""us-east-1"""'}), "('rekognition', region_name='us-east-1')\n", (166, 206), False, 'import boto3\n')]
|
#!/usr/bin/env python3.8
# Copyright (c) 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import sys
HEADER = """#include <string>
namespace media::audio {
// This is a char[] type because that requires no code to run
// initialization, so other constants can be initialized with
// this value.
//
// It is constexpr so that it can be used in the initialization
// of other constexprs.
static constexpr char %s[] =
"""
FOOTER = """
} // namespace media::audio
"""
def main():
if len(sys.argv) != 3:
print("Usage: %s <input_file> <output_file>" % (sys.argv[0],))
exit(-1)
lines = open(sys.argv[1], 'r').readlines()
out = open(sys.argv[2], 'w')
varname = re.sub(
'_([a-zA-Z0-9])', lambda m: m.group(1).upper(),
'k_' + os.path.splitext(os.path.split(sys.argv[1])[1])[0])
out.write(HEADER % (varname,))
for i in range(len(lines)):
l = lines[i].replace('\n', '') # Remove the trailing newline
l = re.sub('//.*', '', l) # Remove any comments
l = re.sub('(^\s+|\s+$)', '', l) # Remove leading/trailing whitespace
l = l.replace('\\', '\\\\') # Escape all fwd slash
l = l.replace('"', '\\"') # Escape all double-quotes
# Skip empty lines
if len(l) == 0:
continue
out.write(' "%s"' % (l,))
if ((i + 1) == len(lines)):
out.write(';\n')
else:
out.write('\n')
out.write(FOOTER)
out.close()
if __name__ == '__main__':
main()
|
[
"os.path.split",
"re.sub"
] |
[((1510, 1531), 're.sub', 're.sub', (['"""//.*"""', '""""""', 'l'], {}), "('//.*', '', l)\n", (1516, 1531), False, 'import re\n'), ((1567, 1597), 're.sub', 're.sub', (['"""(^\\\\s+|\\\\s+$)"""', '""""""', 'l'], {}), "('(^\\\\s+|\\\\s+$)', '', l)\n", (1573, 1597), False, 'import re\n'), ((1324, 1350), 'os.path.split', 'os.path.split', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1337, 1350), False, 'import os\n')]
|
#!/usr/bin/env python
"""
Apply doi_submission_generator to all pubs
"""
import argparse
import copy
import os
import sys
from pathlib import Path
import subprocess
from natsort import natsorted
import warnings
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate valid XML from metadata.json for Crossref submission')
parser.add_argument("-i", "--input-folder", dest="input_folder",
help="Path to parent folder: publications. publications/{pub_id}/metadata.json.")
parser.add_argument("-s", "--input-script", dest="input_script",
help="Path to script: Example: doi_submission_generator.py.")
parser.add_argument("-v", "--verbose",
dest="verbose", action='store_true',
help="Print the output dict")
parser.add_argument("--validate", dest="validate", action='store_true',
help="Validate XML locally. Warning: slow.")
parser.add_argument("--upload", dest="upload", action='store_true',
help="Upload to crossref.")
parser.add_argument("--email", dest="email", default=None,
help="Upload to crossref. Credentials, email (and role for shared credentials). Example with role: <EMAIL>/myrole")
parser.add_argument("--password", dest="password", default=None,
help="Upload to crossref. Credentials, password")
parser.add_argument("--test", dest="test", action='store_true',
help="Upload to crossref. But use test server.")
parser.add_argument("--dry", dest="dry", action='store_true',
help="Do not run the command.")
parser.add_argument("-o, --output-folder", dest="output_folder", default="/tmp",
help="Output folder to write down the filled xml before uploading to crossref.")
args = parser.parse_args()
print(args)
input_folder_path = Path(args.input_folder).expanduser().absolute()
if not input_folder_path.exists:
raise RuntimeError("input_folder cannot be found: {}".format(str(input_folder_path)))
input_script_path = Path(args.input_script).expanduser().absolute()
if not input_script_path.exists:
raise RuntimeError("input_script cannot be found: {}".format(str(input_script_path)))
# python doi_submission_generator.py
command_base = sys.executable + " " + str(input_script_path)
args_template = {
"input_metadata": "", # Fill this with the target metadata.json path
"verbose" : "--verbose" if args.verbose else "",
"validate" : "--validate" if args.validate else "",
"upload" : "--upload" if args.upload else "",
"email" : "--email " + args.email,
"password" : "--password " + args.password,
"test": "--test" if args.test else "",
"output_folder": "--output-folder " + args.output_folder
}
print(args_template)
args_string_template = "--input-metadata {input_metadata} {verbose} {validate} {upload} {email} {password} {test} {output_folder}"
publication_folders = natsorted([str(f) for f in input_folder_path.iterdir() if f.is_dir()])
for pub in publication_folders:
pub_path = Path(pub)
publication_id = pub_path.stem
print("-"*40)
print("publication_id:", publication_id)
metadata_path = pub_path / "metadata.json"
if not metadata_path.exists():
warnings.warn("No metadata.json for pub {}".format(publication_id))
continue
# populate the args
args_dict = copy.deepcopy(args_template)
args_dict["input_metadata"] = str(metadata_path)
args_string = args_string_template.format(**args_dict)
# build the final command
command = command_base + " " + args_string
# print(args_template)
# print(args_string)
if not args.dry:
subprocess.run(args=command, shell=True, check=True)
print(" command:", command)
|
[
"copy.deepcopy",
"pathlib.Path",
"argparse.ArgumentParser",
"subprocess.run"
] |
[((254, 359), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate valid XML from metadata.json for Crossref submission"""'}), "(description=\n 'Generate valid XML from metadata.json for Crossref submission')\n", (277, 359), False, 'import argparse\n'), ((3275, 3284), 'pathlib.Path', 'Path', (['pub'], {}), '(pub)\n', (3279, 3284), False, 'from pathlib import Path\n'), ((3634, 3662), 'copy.deepcopy', 'copy.deepcopy', (['args_template'], {}), '(args_template)\n', (3647, 3662), False, 'import copy\n'), ((3966, 4018), 'subprocess.run', 'subprocess.run', ([], {'args': 'command', 'shell': '(True)', 'check': '(True)'}), '(args=command, shell=True, check=True)\n', (3980, 4018), False, 'import subprocess\n'), ((1986, 2009), 'pathlib.Path', 'Path', (['args.input_folder'], {}), '(args.input_folder)\n', (1990, 2009), False, 'from pathlib import Path\n'), ((2189, 2212), 'pathlib.Path', 'Path', (['args.input_script'], {}), '(args.input_script)\n', (2193, 2212), False, 'from pathlib import Path\n')]
|
# Generated by Django 3.1.2 on 2020-11-05 12:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='catenauser',
name='access_token',
field=models.TextField(null=True, verbose_name='Access Token'),
),
]
|
[
"django.db.models.TextField"
] |
[((331, 387), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'verbose_name': '"""Access Token"""'}), "(null=True, verbose_name='Access Token')\n", (347, 387), False, 'from django.db import migrations, models\n')]
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import cv2
import tensorflow as tf
from PIL import Image
import os
from sklearn.model_selection import train_test_split
from keras.utils import to_categorical
from keras.models import Sequential, load_model
from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout
data = []
labels = []
classes = 43
cur_path = os.getcwd()
#Retrieving the images and their labels
for i in range(classes):
path = os.path.join(cur_path,'train',str(i))
images = os.listdir(path)
for a in images:
try:
image = Image.open(path + '\\'+ a)
image = image.resize((30,30))
image = np.array(image)
#sim = Image.fromarray(image)
data.append(image)
labels.append(i)
except:
print("Error loading image")
#Converting lists into numpy arrays
data = np.array(data)
labels = np.array(labels)
print(data.shape, labels.shape)
#Splitting training and testing dataset
X_train, X_test, y_train, y_test = train_test_split(data, labels, test_size=0.2, random_state=42)
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)
#Converting the labels into one hot encoding
y_train = to_categorical(y_train, 43)
y_test = to_categorical(y_test, 43)
#Building the model
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu', input_shape=X_train.shape[1:]))
model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu'))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Dropout(rate=0.25))
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Dropout(rate=0.25))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(rate=0.5))
model.add(Dense(43, activation='softmax'))
#Compilation of the model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
epochs = 15
history = model.fit(X_train, y_train, batch_size=32, epochs=epochs, validation_data=(X_test, y_test))
model.save("my_model.h5")
#plotting graphs for accuracy
plt.figure(0)
plt.plot(history.history['accuracy'], label='training accuracy')
plt.plot(history.history['val_accuracy'], label='val accuracy')
plt.title('Accuracy')
plt.xlabel('epochs')
plt.ylabel('accuracy')
plt.legend()
plt.show()
plt.figure(1)
plt.plot(history.history['loss'], label='training loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.title('Loss')
plt.xlabel('epochs')
plt.ylabel('loss')
plt.legend()
plt.show()
#testing accuracy on test dataset
from sklearn.metrics import accuracy_score
y_test = pd.read_csv('Test.csv')
labels = y_test["ClassId"].values
imgs = y_test["Path"].values
data=[]
for img in imgs:
image = Image.open(img)
image = image.resize((30,30))
data.append(np.array(image))
X_test=np.array(data)
pred = model.predict_classes(X_test)
#Accuracy with the test data
from sklearn.metrics import accuracy_score
print(accuracy_score(labels, pred))
|
[
"matplotlib.pyplot.title",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.accuracy_score",
"keras.layers.MaxPool2D",
"matplotlib.pyplot.figure",
"keras.layers.Flatten",
"keras.utils.to_categorical",
"matplotlib.pyplot.show",
"keras.layers.Dropout",
"matplotlib.pyplot.legend",
"keras.layers.Conv2D",
"matplotlib.pyplot.ylabel",
"os.listdir",
"matplotlib.pyplot.plot",
"os.getcwd",
"PIL.Image.open",
"keras.layers.Dense",
"numpy.array",
"keras.models.Sequential",
"matplotlib.pyplot.xlabel"
] |
[((395, 406), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (404, 406), False, 'import os\n'), ((916, 930), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (924, 930), True, 'import numpy as np\n'), ((940, 956), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (948, 956), True, 'import numpy as np\n'), ((1065, 1127), 'sklearn.model_selection.train_test_split', 'train_test_split', (['data', 'labels'], {'test_size': '(0.2)', 'random_state': '(42)'}), '(data, labels, test_size=0.2, random_state=42)\n', (1081, 1127), False, 'from sklearn.model_selection import train_test_split\n'), ((1249, 1276), 'keras.utils.to_categorical', 'to_categorical', (['y_train', '(43)'], {}), '(y_train, 43)\n', (1263, 1276), False, 'from keras.utils import to_categorical\n'), ((1286, 1312), 'keras.utils.to_categorical', 'to_categorical', (['y_test', '(43)'], {}), '(y_test, 43)\n', (1300, 1312), False, 'from keras.utils import to_categorical\n'), ((1342, 1354), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1352, 1354), False, 'from keras.models import Sequential, load_model\n'), ((2219, 2232), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (2229, 2232), True, 'import matplotlib.pyplot as plt\n'), ((2233, 2297), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['accuracy']"], {'label': '"""training accuracy"""'}), "(history.history['accuracy'], label='training accuracy')\n", (2241, 2297), True, 'import matplotlib.pyplot as plt\n'), ((2298, 2361), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_accuracy']"], {'label': '"""val accuracy"""'}), "(history.history['val_accuracy'], label='val accuracy')\n", (2306, 2361), True, 'import matplotlib.pyplot as plt\n'), ((2362, 2383), 'matplotlib.pyplot.title', 'plt.title', (['"""Accuracy"""'], {}), "('Accuracy')\n", (2371, 2383), True, 'import matplotlib.pyplot as plt\n'), ((2384, 2404), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epochs"""'], {}), "('epochs')\n", (2394, 2404), True, 'import matplotlib.pyplot as plt\n'), ((2405, 2427), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""accuracy"""'], {}), "('accuracy')\n", (2415, 2427), True, 'import matplotlib.pyplot as plt\n'), ((2428, 2440), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2438, 2440), True, 'import matplotlib.pyplot as plt\n'), ((2441, 2451), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2449, 2451), True, 'import matplotlib.pyplot as plt\n'), ((2453, 2466), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (2463, 2466), True, 'import matplotlib.pyplot as plt\n'), ((2467, 2523), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['loss']"], {'label': '"""training loss"""'}), "(history.history['loss'], label='training loss')\n", (2475, 2523), True, 'import matplotlib.pyplot as plt\n'), ((2524, 2579), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_loss']"], {'label': '"""val loss"""'}), "(history.history['val_loss'], label='val loss')\n", (2532, 2579), True, 'import matplotlib.pyplot as plt\n'), ((2580, 2597), 'matplotlib.pyplot.title', 'plt.title', (['"""Loss"""'], {}), "('Loss')\n", (2589, 2597), True, 'import matplotlib.pyplot as plt\n'), ((2598, 2618), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epochs"""'], {}), "('epochs')\n", (2608, 2618), True, 'import matplotlib.pyplot as plt\n'), ((2619, 2637), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""loss"""'], {}), "('loss')\n", (2629, 2637), True, 'import matplotlib.pyplot as plt\n'), ((2638, 2650), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2648, 2650), True, 'import matplotlib.pyplot as plt\n'), ((2651, 2661), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2659, 2661), True, 'import matplotlib.pyplot as plt\n'), ((2750, 2773), 'pandas.read_csv', 'pd.read_csv', (['"""Test.csv"""'], {}), "('Test.csv')\n", (2761, 2773), True, 'import pandas as pd\n'), ((2968, 2982), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (2976, 2982), True, 'import numpy as np\n'), ((536, 552), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (546, 552), False, 'import os\n'), ((1365, 1458), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(32)', 'kernel_size': '(5, 5)', 'activation': '"""relu"""', 'input_shape': 'X_train.shape[1:]'}), "(filters=32, kernel_size=(5, 5), activation='relu', input_shape=\n X_train.shape[1:])\n", (1371, 1458), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1464, 1521), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(32)', 'kernel_size': '(5, 5)', 'activation': '"""relu"""'}), "(filters=32, kernel_size=(5, 5), activation='relu')\n", (1470, 1521), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1532, 1559), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1541, 1559), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1571, 1589), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.25)'}), '(rate=0.25)\n', (1578, 1589), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1601, 1658), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3, 3)', 'activation': '"""relu"""'}), "(filters=64, kernel_size=(3, 3), activation='relu')\n", (1607, 1658), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1670, 1727), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3, 3)', 'activation': '"""relu"""'}), "(filters=64, kernel_size=(3, 3), activation='relu')\n", (1676, 1727), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1739, 1766), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1748, 1766), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1778, 1796), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.25)'}), '(rate=0.25)\n', (1785, 1796), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1808, 1817), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1815, 1817), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1829, 1858), 'keras.layers.Dense', 'Dense', (['(256)'], {'activation': '"""relu"""'}), "(256, activation='relu')\n", (1834, 1858), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1870, 1887), 'keras.layers.Dropout', 'Dropout', ([], {'rate': '(0.5)'}), '(rate=0.5)\n', (1877, 1887), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((1899, 1930), 'keras.layers.Dense', 'Dense', (['(43)'], {'activation': '"""softmax"""'}), "(43, activation='softmax')\n", (1904, 1930), False, 'from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout\n'), ((2877, 2892), 'PIL.Image.open', 'Image.open', (['img'], {}), '(img)\n', (2887, 2892), False, 'from PIL import Image\n'), ((3100, 3128), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['labels', 'pred'], {}), '(labels, pred)\n', (3114, 3128), False, 'from sklearn.metrics import accuracy_score\n'), ((2943, 2958), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (2951, 2958), True, 'import numpy as np\n'), ((608, 635), 'PIL.Image.open', 'Image.open', (["(path + '\\\\' + a)"], {}), "(path + '\\\\' + a)\n", (618, 635), False, 'from PIL import Image\n'), ((697, 712), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (705, 712), True, 'import numpy as np\n')]
|
import dataclasses
import re
import sys
import copy
import json
import yaml
from enum import Enum
from pathlib import Path
from dataclasses import dataclass, field
from typing import Any, Iterable, List, NewType, Optional, Tuple, Union, Dict
from transformers.hf_argparser import HfArgumentParser as ArgumentParser
DataClass = NewType("DataClass", Any)
DataClassType = NewType("DataClassType", Any)
def lambda_field(default, **kwargs):
return field(default_factory=lambda: copy.copy(default))
class HfArgumentParser(ArgumentParser):
def parse_json_file(self, json_file: str) -> Tuple[DataClass, ...]:
"""
Alternative helper method that does not use `argparse` at all, instead loading a json file and populating the
dataclass types.
"""
data = json.loads(Path(json_file).read_text())
outputs = []
for dtype in self.dataclass_types:
keys = {f.name for f in dataclasses.fields(dtype) if f.init}
arg_name = dtype.__mro__[-2].__name__
inputs = {k: v for k, v in data[arg_name].items() if k in keys}
obj = dtype(**inputs)
outputs.append(obj)
return (*outputs,)
def parse_yaml_file(self, yaml_file: str) -> Tuple[DataClass, ...]:
"""
Alternative helper method that does not use `argparse` at all, instead loading a yaml file and populating the
dataclass types.
"""
# https://stackoverflow.com/questions/30458977/yaml-loads-5e-6-as-string-and-not-a-number
loader = yaml.SafeLoader
loader.add_implicit_resolver(
u'tag:yaml.org,2002:float',
re.compile(u'''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.'))
data = yaml.load(Path(yaml_file).read_text(), Loader=loader)
outputs = []
for dtype in self.dataclass_types:
keys = {f.name for f in dataclasses.fields(dtype) if f.init}
arg_name = dtype.__mro__[-2].__name__
inputs = {k: v for k, v in data[arg_name].items() if k in keys}
obj = dtype(**inputs)
outputs.append(obj)
return (*outputs,)
|
[
"copy.copy",
"pathlib.Path",
"dataclasses.fields",
"typing.NewType",
"re.compile"
] |
[((330, 355), 'typing.NewType', 'NewType', (['"""DataClass"""', 'Any'], {}), "('DataClass', Any)\n", (337, 355), False, 'from typing import Any, Iterable, List, NewType, Optional, Tuple, Union, Dict\n'), ((372, 401), 'typing.NewType', 'NewType', (['"""DataClassType"""', 'Any'], {}), "('DataClassType', Any)\n", (379, 401), False, 'from typing import Any, Iterable, List, NewType, Optional, Tuple, Union, Dict\n'), ((1655, 1985), 're.compile', 're.compile', (['u"""^(?:\n [-+]?(?:[0-9][0-9_]*)\\\\.[0-9_]*(?:[eE][-+]?[0-9]+)?\n |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)\n |\\\\.[0-9_]+(?:[eE][-+][0-9]+)?\n |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\\\.[0-9_]*\n |[-+]?\\\\.(?:inf|Inf|INF)\n |\\\\.(?:nan|NaN|NAN))$"""', 're.X'], {}), '(\n u"""^(?:\n [-+]?(?:[0-9][0-9_]*)\\\\.[0-9_]*(?:[eE][-+]?[0-9]+)?\n |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)\n |\\\\.[0-9_]+(?:[eE][-+][0-9]+)?\n |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\\\.[0-9_]*\n |[-+]?\\\\.(?:inf|Inf|INF)\n |\\\\.(?:nan|NaN|NAN))$"""\n , re.X)\n', (1665, 1985), False, 'import re\n'), ((482, 500), 'copy.copy', 'copy.copy', (['default'], {}), '(default)\n', (491, 500), False, 'import copy\n'), ((809, 824), 'pathlib.Path', 'Path', (['json_file'], {}), '(json_file)\n', (813, 824), False, 'from pathlib import Path\n'), ((938, 963), 'dataclasses.fields', 'dataclasses.fields', (['dtype'], {}), '(dtype)\n', (956, 963), False, 'import dataclasses\n'), ((2038, 2053), 'pathlib.Path', 'Path', (['yaml_file'], {}), '(yaml_file)\n', (2042, 2053), False, 'from pathlib import Path\n'), ((2182, 2207), 'dataclasses.fields', 'dataclasses.fields', (['dtype'], {}), '(dtype)\n', (2200, 2207), False, 'import dataclasses\n')]
|
import qcircuits as qc
# Quantum Teleportation: transmitting two classical bits to transport a qubit state
# Alice has a qubit in a given quantum state.
# Alice and Bob have previously prepared a Bell state, and have since
# physically separated the qubits.
# Alice manipulates her hidden qubit and her half of the Bell state, and then
# measures both qubits.
# She sends the result (two classical bits) to Bob, who is able to reconstruct
# Alice's state by applying operators based on the measurement outcomes.
def quantum_teleportation(alice_state):
# Get operators we will need
CNOT = qc.CNOT()
H = qc.Hadamard()
X = qc.PauliX()
Z = qc.PauliZ()
# The prepared, shared Bell state
bell = qc.bell_state(0, 0)
# The whole state vector
state = alice_state * bell
# Apply CNOT and Hadamard gate
state = CNOT(state, qubit_indices=[0, 1])
state = H(state, qubit_indices=[0])
# Measure the first two bits
# The only uncollapsed part of the state vector is Bob's
M1, M2 = state.measure(qubit_indices=[0, 1], remove=True)
# Apply X and/or Z gates to third qubit depending on measurements
if M2:
state = X(state)
if M1:
state = Z(state)
return state
if __name__ == '__main__':
# Alice's original state to be teleported to Bob
alice = qc.qubit(theta=1.5, phi=0.5, global_phase=0.2)
# Bob's state after quantum teleportation
bob = quantum_teleportation(alice)
print('Original state:', alice)
print('\nTeleported state:', bob)
|
[
"qcircuits.Hadamard",
"qcircuits.CNOT",
"qcircuits.PauliX",
"qcircuits.PauliZ",
"qcircuits.bell_state",
"qcircuits.qubit"
] |
[((600, 609), 'qcircuits.CNOT', 'qc.CNOT', ([], {}), '()\n', (607, 609), True, 'import qcircuits as qc\n'), ((618, 631), 'qcircuits.Hadamard', 'qc.Hadamard', ([], {}), '()\n', (629, 631), True, 'import qcircuits as qc\n'), ((640, 651), 'qcircuits.PauliX', 'qc.PauliX', ([], {}), '()\n', (649, 651), True, 'import qcircuits as qc\n'), ((660, 671), 'qcircuits.PauliZ', 'qc.PauliZ', ([], {}), '()\n', (669, 671), True, 'import qcircuits as qc\n'), ((722, 741), 'qcircuits.bell_state', 'qc.bell_state', (['(0)', '(0)'], {}), '(0, 0)\n', (735, 741), True, 'import qcircuits as qc\n'), ((1336, 1382), 'qcircuits.qubit', 'qc.qubit', ([], {'theta': '(1.5)', 'phi': '(0.5)', 'global_phase': '(0.2)'}), '(theta=1.5, phi=0.5, global_phase=0.2)\n', (1344, 1382), True, 'import qcircuits as qc\n')]
|
# built-in
import ast
from textwrap import dedent
# external
import astroid
import pytest
# project
from deal.linter._extractors import get_exceptions
@pytest.mark.parametrize('text, expected', [
('raise BaseException', (BaseException, )),
('raise ValueError', (ValueError, )),
('raise UnknownError', ('UnknownError', )),
('raise ValueError("lol")', (ValueError, )),
('raise unknown()', ()),
('raise 1 + 2', ()),
('assert False', (AssertionError, )),
('12 / 0', (ZeroDivisionError, )),
('12 + 0', ()),
('exit()', (SystemExit, )),
('exit(13)', (SystemExit, )),
('sys.exit(13)', (SystemExit, )),
('something.exit(13)', ()),
# try-except
('try:\n raise AError\nexcept Exception:\n pass', ()),
('try:\n raise AError\nexcept AError:\n raise BError', ('BError', )),
('try:\n pass\nfinally:\n raise KeyError', (KeyError, )),
('if True: raise KeyError', (KeyError, )),
('for i in lst: raise KeyError', (KeyError, )),
])
def test_get_exceptions_simple(text, expected):
tree = astroid.parse(text)
print(tree.repr_tree())
returns = tuple(r.value for r in get_exceptions(body=tree.body))
assert returns == expected
tree = ast.parse(text)
print(ast.dump(tree))
returns = tuple(r.value for r in get_exceptions(body=tree.body))
assert returns == expected
def test_inference_simple():
text = """
def subf():
raise ValueError # explicit raise
def subf2():
c = 1 - 1
1 / c # implicit raise
d = [1, 2, 3]
1 / d # resolved into not a constant
a = b
@deal.raises(KeyError)
def f():
a = 1
a() # resolved not in a function
unknown() # cannot resolve
subf() # resolve
subf2()
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == (ValueError, ZeroDivisionError)
def test_inference_assign():
text = """
def subf():
raise Unknown
@deal.raises(KeyError)
def f():
b = subf()
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == ('Unknown', )
def test_inference_ok_uncalled():
text = """
def subf():
raise ValueError
@deal.raises(KeyError)
def f():
subf
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == ()
def test_inference_subcalls():
text = """
def subf():
raise ValueError
def subf2():
raise IndexError
@deal.raises(KeyError)
def f():
other(subf(), b=subf2())
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == (ValueError, IndexError)
def test_resolve_doesnt_fail_for_simple_ast():
text = """
def subf():
raise ValueError # explicit raise
@deal.raises(KeyError)
def f():
subf()
"""
tree = ast.parse(dedent(text))
print(ast.dump(tree))
func_tree = tree.body[-1].body
tuple(get_exceptions(body=func_tree))
def test_inference_subcontracts():
text = """
@deal.raises(SomeError) # actual contract
@deal.raises(1) # ignore junk
@deal.post(lambda _: 1) # ignore other contracts
def subf():
return 1
@deal.raises(KeyError)
def f():
b = subf()
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == ('SomeError', )
def test_inference_doesnt_have_exceptions():
text = """
def subf():
something()
return 1
@deal.raises(KeyError)
def f():
b = subf()
"""
tree = astroid.parse(dedent(text))
print(tree.repr_tree())
func_tree = tree.body[-1].body
returns = tuple(r.value for r in get_exceptions(body=func_tree))
assert returns == ()
|
[
"textwrap.dedent",
"deal.linter._extractors.get_exceptions",
"astroid.parse",
"ast.dump",
"pytest.mark.parametrize",
"ast.parse"
] |
[((156, 942), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, expected"""', '[(\'raise BaseException\', (BaseException,)), (\'raise ValueError\', (\n ValueError,)), (\'raise UnknownError\', (\'UnknownError\',)), (\n \'raise ValueError("lol")\', (ValueError,)), (\'raise unknown()\', ()), (\n \'raise 1 + 2\', ()), (\'assert False\', (AssertionError,)), (\'12 / 0\', (\n ZeroDivisionError,)), (\'12 + 0\', ()), (\'exit()\', (SystemExit,)), (\n \'exit(13)\', (SystemExit,)), (\'sys.exit(13)\', (SystemExit,)), (\n \'something.exit(13)\', ()), (\n """try:\n raise AError\nexcept Exception:\n pass""", ()), (\n \'try:\\n raise AError\\nexcept AError:\\n raise BError\', (\'BError\',)), (\n """try:\n pass\nfinally:\n raise KeyError""", (KeyError,)), (\n \'if True: raise KeyError\', (KeyError,)), (\n \'for i in lst: raise KeyError\', (KeyError,))]'], {}), '(\'text, expected\', [(\'raise BaseException\', (\n BaseException,)), (\'raise ValueError\', (ValueError,)), (\n \'raise UnknownError\', (\'UnknownError\',)), (\'raise ValueError("lol")\', (\n ValueError,)), (\'raise unknown()\', ()), (\'raise 1 + 2\', ()), (\n \'assert False\', (AssertionError,)), (\'12 / 0\', (ZeroDivisionError,)), (\n \'12 + 0\', ()), (\'exit()\', (SystemExit,)), (\'exit(13)\', (SystemExit,)),\n (\'sys.exit(13)\', (SystemExit,)), (\'something.exit(13)\', ()), (\n """try:\n raise AError\nexcept Exception:\n pass""", ()), (\n \'try:\\n raise AError\\nexcept AError:\\n raise BError\', (\'BError\',)), (\n """try:\n pass\nfinally:\n raise KeyError""", (KeyError,)), (\n \'if True: raise KeyError\', (KeyError,)), (\n \'for i in lst: raise KeyError\', (KeyError,))])\n', (179, 942), False, 'import pytest\n'), ((1053, 1072), 'astroid.parse', 'astroid.parse', (['text'], {}), '(text)\n', (1066, 1072), False, 'import astroid\n'), ((1213, 1228), 'ast.parse', 'ast.parse', (['text'], {}), '(text)\n', (1222, 1228), False, 'import ast\n'), ((1239, 1253), 'ast.dump', 'ast.dump', (['tree'], {}), '(tree)\n', (1247, 1253), False, 'import ast\n'), ((1892, 1904), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (1898, 1904), False, 'from textwrap import dedent\n'), ((2289, 2301), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (2295, 2301), False, 'from textwrap import dedent\n'), ((2670, 2682), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (2676, 2682), False, 'from textwrap import dedent\n'), ((3108, 3120), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (3114, 3120), False, 'from textwrap import dedent\n'), ((3529, 3541), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (3535, 3541), False, 'from textwrap import dedent\n'), ((3553, 3567), 'ast.dump', 'ast.dump', (['tree'], {}), '(tree)\n', (3561, 3567), False, 'import ast\n'), ((3614, 3644), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (3628, 3644), False, 'from deal.linter._extractors import get_exceptions\n'), ((4009, 4021), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (4015, 4021), False, 'from textwrap import dedent\n'), ((4425, 4437), 'textwrap.dedent', 'dedent', (['text'], {}), '(text)\n', (4431, 4437), False, 'from textwrap import dedent\n'), ((1138, 1168), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'tree.body'}), '(body=tree.body)\n', (1152, 1168), False, 'from deal.linter._extractors import get_exceptions\n'), ((1292, 1322), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'tree.body'}), '(body=tree.body)\n', (1306, 1322), False, 'from deal.linter._extractors import get_exceptions\n'), ((2006, 2036), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (2020, 2036), False, 'from deal.linter._extractors import get_exceptions\n'), ((2403, 2433), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (2417, 2433), False, 'from deal.linter._extractors import get_exceptions\n'), ((2784, 2814), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (2798, 2814), False, 'from deal.linter._extractors import get_exceptions\n'), ((3222, 3252), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (3236, 3252), False, 'from deal.linter._extractors import get_exceptions\n'), ((4123, 4153), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (4137, 4153), False, 'from deal.linter._extractors import get_exceptions\n'), ((4539, 4569), 'deal.linter._extractors.get_exceptions', 'get_exceptions', ([], {'body': 'func_tree'}), '(body=func_tree)\n', (4553, 4569), False, 'from deal.linter._extractors import get_exceptions\n')]
|
import io
import numpy as np
import pytest
from typing import List, Tuple
from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto
from mlagents_envs.communicator_objects.observation_pb2 import (
ObservationProto,
NONE,
PNG,
)
from mlagents_envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto
from mlagents_envs.communicator_objects.agent_info_action_pair_pb2 import (
AgentInfoActionPairProto,
)
from mlagents_envs.communicator_objects.agent_action_pb2 import AgentActionProto
from mlagents_envs.base_env import (
BehaviorSpec,
ActionSpec,
DecisionSteps,
TerminalSteps,
)
from mlagents_envs.exception import UnityObservationException
from mlagents_envs.rpc_utils import (
behavior_spec_from_proto,
process_pixels,
_process_visual_observation,
_process_vector_observation,
steps_from_proto,
)
from PIL import Image
def generate_list_agent_proto(
n_agent: int,
shape: List[Tuple[int]],
infinite_rewards: bool = False,
nan_observations: bool = False,
) -> List[AgentInfoProto]:
result = []
for agent_index in range(n_agent):
ap = AgentInfoProto()
ap.reward = float("inf") if infinite_rewards else agent_index
ap.done = agent_index % 2 == 0
ap.max_step_reached = agent_index % 4 == 0
ap.id = agent_index
ap.action_mask.extend([True, False] * 5)
obs_proto_list = []
for obs_index in range(len(shape)):
obs_proto = ObservationProto()
obs_proto.shape.extend(list(shape[obs_index]))
obs_proto.compression_type = NONE
obs_proto.float_data.data.extend(
([float("nan")] if nan_observations else [0.1])
* np.prod(shape[obs_index])
)
obs_proto_list.append(obs_proto)
ap.observations.extend(obs_proto_list)
result.append(ap)
return result
def generate_compressed_data(in_array: np.ndarray) -> bytes:
image_arr = (in_array * 255).astype(np.uint8)
bytes_out = bytes()
num_channels = in_array.shape[2]
num_images = (num_channels + 2) // 3
# Split the input image into batches of 3 channels.
for i in range(num_images):
sub_image = image_arr[..., 3 * i : 3 * i + 3]
if (i == num_images - 1) and (num_channels % 3) != 0:
# Pad zeros
zero_shape = list(in_array.shape)
zero_shape[2] = 3 - (num_channels % 3)
z = np.zeros(zero_shape, dtype=np.uint8)
sub_image = np.concatenate([sub_image, z], axis=2)
im = Image.fromarray(sub_image, "RGB")
byteIO = io.BytesIO()
im.save(byteIO, format="PNG")
bytes_out += byteIO.getvalue()
return bytes_out
# test helper function for old C# API (no compressed channel mapping)
def generate_compressed_proto_obs(
in_array: np.ndarray, grayscale: bool = False
) -> ObservationProto:
obs_proto = ObservationProto()
obs_proto.compressed_data = generate_compressed_data(in_array)
obs_proto.compression_type = PNG
if grayscale:
# grayscale flag is only used for old API without mapping
expected_shape = [in_array.shape[0], in_array.shape[1], 1]
obs_proto.shape.extend(expected_shape)
else:
obs_proto.shape.extend(in_array.shape)
return obs_proto
# test helper function for new C# API (with compressed channel mapping)
def generate_compressed_proto_obs_with_mapping(
in_array: np.ndarray, mapping: List[int]
) -> ObservationProto:
obs_proto = ObservationProto()
obs_proto.compressed_data = generate_compressed_data(in_array)
obs_proto.compression_type = PNG
if mapping is not None:
obs_proto.compressed_channel_mapping.extend(mapping)
expected_shape = [
in_array.shape[0],
in_array.shape[1],
len({m for m in mapping if m >= 0}),
]
obs_proto.shape.extend(expected_shape)
else:
obs_proto.shape.extend(in_array.shape)
return obs_proto
def generate_uncompressed_proto_obs(in_array: np.ndarray) -> ObservationProto:
obs_proto = ObservationProto()
obs_proto.float_data.data.extend(in_array.flatten().tolist())
obs_proto.compression_type = NONE
obs_proto.shape.extend(in_array.shape)
return obs_proto
def proto_from_steps(
decision_steps: DecisionSteps, terminal_steps: TerminalSteps
) -> List[AgentInfoProto]:
agent_info_protos: List[AgentInfoProto] = []
# Take care of the DecisionSteps first
for agent_id in decision_steps.agent_id:
agent_id_index = decision_steps.agent_id_to_index[agent_id]
reward = decision_steps.reward[agent_id_index]
done = False
max_step_reached = False
agent_mask = None
if decision_steps.action_mask is not None:
agent_mask = [] # type: ignore
for _branch in decision_steps.action_mask:
agent_mask = np.concatenate(
(agent_mask, _branch[agent_id_index, :]), axis=0
)
observations: List[ObservationProto] = []
for all_observations_of_type in decision_steps.obs:
observation = all_observations_of_type[agent_id_index]
if len(observation.shape) == 3:
observations.append(generate_uncompressed_proto_obs(observation))
else:
observations.append(
ObservationProto(
float_data=ObservationProto.FloatData(data=observation),
shape=[len(observation)],
compression_type=NONE,
)
)
agent_info_proto = AgentInfoProto(
reward=reward,
done=done,
id=agent_id,
max_step_reached=max_step_reached,
action_mask=agent_mask,
observations=observations,
)
agent_info_protos.append(agent_info_proto)
# Take care of the TerminalSteps second
for agent_id in terminal_steps.agent_id:
agent_id_index = terminal_steps.agent_id_to_index[agent_id]
reward = terminal_steps.reward[agent_id_index]
done = True
max_step_reached = terminal_steps.interrupted[agent_id_index]
final_observations: List[ObservationProto] = []
for all_observations_of_type in terminal_steps.obs:
observation = all_observations_of_type[agent_id_index]
if len(observation.shape) == 3:
final_observations.append(generate_uncompressed_proto_obs(observation))
else:
final_observations.append(
ObservationProto(
float_data=ObservationProto.FloatData(data=observation),
shape=[len(observation)],
compression_type=NONE,
)
)
agent_info_proto = AgentInfoProto(
reward=reward,
done=done,
id=agent_id,
max_step_reached=max_step_reached,
action_mask=None,
observations=final_observations,
)
agent_info_protos.append(agent_info_proto)
return agent_info_protos
# The arguments here are the DecisionSteps, TerminalSteps and continuous/discrete actions for a single agent name
def proto_from_steps_and_action(
decision_steps: DecisionSteps,
terminal_steps: TerminalSteps,
continuous_actions: np.ndarray,
discrete_actions: np.ndarray,
) -> List[AgentInfoActionPairProto]:
agent_info_protos = proto_from_steps(decision_steps, terminal_steps)
agent_action_protos = []
num_agents = (
len(continuous_actions)
if continuous_actions is not None
else len(discrete_actions)
)
for i in range(num_agents):
proto = AgentActionProto()
if continuous_actions is not None:
proto.continuous_actions.extend(continuous_actions[i])
proto.vector_actions_deprecated.extend(continuous_actions[i])
if discrete_actions is not None:
proto.discrete_actions.extend(discrete_actions[i])
proto.vector_actions_deprecated.extend(discrete_actions[i])
agent_action_protos.append(proto)
agent_info_action_pair_protos = [
AgentInfoActionPairProto(agent_info=agent_info_proto, action_info=action_proto)
for agent_info_proto, action_proto in zip(
agent_info_protos, agent_action_protos
)
]
return agent_info_action_pair_protos
def test_process_pixels():
in_array = np.random.rand(128, 64, 3)
byte_arr = generate_compressed_data(in_array)
out_array = process_pixels(byte_arr, 3)
assert out_array.shape == (128, 64, 3)
assert np.sum(in_array - out_array) / np.prod(in_array.shape) < 0.01
assert np.allclose(in_array, out_array, atol=0.01)
def test_process_pixels_multi_png():
height = 128
width = 64
num_channels = 7
in_array = np.random.rand(height, width, num_channels)
byte_arr = generate_compressed_data(in_array)
out_array = process_pixels(byte_arr, num_channels)
assert out_array.shape == (height, width, num_channels)
assert np.sum(in_array - out_array) / np.prod(in_array.shape) < 0.01
assert np.allclose(in_array, out_array, atol=0.01)
def test_process_pixels_gray():
in_array = np.random.rand(128, 64, 3)
byte_arr = generate_compressed_data(in_array)
out_array = process_pixels(byte_arr, 1)
assert out_array.shape == (128, 64, 1)
assert np.mean(in_array.mean(axis=2, keepdims=True) - out_array) < 0.01
assert np.allclose(in_array.mean(axis=2, keepdims=True), out_array, atol=0.01)
def test_vector_observation():
n_agents = 10
shapes = [(3,), (4,)]
list_proto = generate_list_agent_proto(n_agents, shapes)
for obs_index, shape in enumerate(shapes):
arr = _process_vector_observation(obs_index, shape, list_proto)
assert list(arr.shape) == ([n_agents] + list(shape))
assert np.allclose(arr, 0.1, atol=0.01)
def test_process_visual_observation():
in_array_1 = np.random.rand(128, 64, 3)
proto_obs_1 = generate_compressed_proto_obs(in_array_1)
in_array_2 = np.random.rand(128, 64, 3)
in_array_2_mapping = [0, 1, 2]
proto_obs_2 = generate_compressed_proto_obs_with_mapping(
in_array_2, in_array_2_mapping
)
ap1 = AgentInfoProto()
ap1.observations.extend([proto_obs_1])
ap2 = AgentInfoProto()
ap2.observations.extend([proto_obs_2])
ap_list = [ap1, ap2]
arr = _process_visual_observation(0, (128, 64, 3), ap_list)
assert list(arr.shape) == [2, 128, 64, 3]
assert np.allclose(arr[0, :, :, :], in_array_1, atol=0.01)
assert np.allclose(arr[1, :, :, :], in_array_2, atol=0.01)
def test_process_visual_observation_grayscale():
in_array_1 = np.random.rand(128, 64, 3)
proto_obs_1 = generate_compressed_proto_obs(in_array_1, grayscale=True)
expected_out_array_1 = np.mean(in_array_1, axis=2, keepdims=True)
in_array_2 = np.random.rand(128, 64, 3)
in_array_2_mapping = [0, 0, 0]
proto_obs_2 = generate_compressed_proto_obs_with_mapping(
in_array_2, in_array_2_mapping
)
expected_out_array_2 = np.mean(in_array_2, axis=2, keepdims=True)
ap1 = AgentInfoProto()
ap1.observations.extend([proto_obs_1])
ap2 = AgentInfoProto()
ap2.observations.extend([proto_obs_2])
ap_list = [ap1, ap2]
arr = _process_visual_observation(0, (128, 64, 1), ap_list)
assert list(arr.shape) == [2, 128, 64, 1]
assert np.allclose(arr[0, :, :, :], expected_out_array_1, atol=0.01)
assert np.allclose(arr[1, :, :, :], expected_out_array_2, atol=0.01)
def test_process_visual_observation_padded_channels():
in_array_1 = np.random.rand(128, 64, 12)
in_array_1_mapping = [0, 1, 2, 3, -1, -1, 4, 5, 6, 7, -1, -1]
proto_obs_1 = generate_compressed_proto_obs_with_mapping(
in_array_1, in_array_1_mapping
)
expected_out_array_1 = np.take(in_array_1, [0, 1, 2, 3, 6, 7, 8, 9], axis=2)
ap1 = AgentInfoProto()
ap1.observations.extend([proto_obs_1])
ap_list = [ap1]
arr = _process_visual_observation(0, (128, 64, 8), ap_list)
assert list(arr.shape) == [1, 128, 64, 8]
assert np.allclose(arr[0, :, :, :], expected_out_array_1, atol=0.01)
def test_process_visual_observation_bad_shape():
in_array_1 = np.random.rand(128, 64, 3)
proto_obs_1 = generate_compressed_proto_obs(in_array_1)
ap1 = AgentInfoProto()
ap1.observations.extend([proto_obs_1])
ap_list = [ap1]
with pytest.raises(UnityObservationException):
_process_visual_observation(0, (128, 42, 3), ap_list)
def test_batched_step_result_from_proto():
n_agents = 10
shapes = [(3,), (4,)]
spec = BehaviorSpec(shapes, ActionSpec.create_continuous(3))
ap_list = generate_list_agent_proto(n_agents, shapes)
decision_steps, terminal_steps = steps_from_proto(ap_list, spec)
for agent_id in range(n_agents):
if agent_id in decision_steps:
# we set the reward equal to the agent id in generate_list_agent_proto
assert decision_steps[agent_id].reward == agent_id
elif agent_id in terminal_steps:
assert terminal_steps[agent_id].reward == agent_id
else:
raise Exception("Missing agent from the steps")
# We sort the AgentId since they are split between DecisionSteps and TerminalSteps
combined_agent_id = list(decision_steps.agent_id) + list(terminal_steps.agent_id)
combined_agent_id.sort()
assert combined_agent_id == list(range(n_agents))
for agent_id in range(n_agents):
assert (agent_id in terminal_steps) == (agent_id % 2 == 0)
if agent_id in terminal_steps:
assert terminal_steps[agent_id].interrupted == (agent_id % 4 == 0)
assert decision_steps.obs[0].shape[1] == shapes[0][0]
assert decision_steps.obs[1].shape[1] == shapes[1][0]
assert terminal_steps.obs[0].shape[1] == shapes[0][0]
assert terminal_steps.obs[1].shape[1] == shapes[1][0]
def test_action_masking_discrete():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_discrete((7, 3)))
ap_list = generate_list_agent_proto(n_agents, shapes)
decision_steps, terminal_steps = steps_from_proto(ap_list, behavior_spec)
masks = decision_steps.action_mask
assert isinstance(masks, list)
assert len(masks) == 2
assert masks[0].shape == (n_agents / 2, 7) # half agents are done
assert masks[1].shape == (n_agents / 2, 3) # half agents are done
assert masks[0][0, 0]
assert not masks[1][0, 0]
assert masks[1][0, 1]
def test_action_masking_discrete_1():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_discrete((10,)))
ap_list = generate_list_agent_proto(n_agents, shapes)
decision_steps, terminal_steps = steps_from_proto(ap_list, behavior_spec)
masks = decision_steps.action_mask
assert isinstance(masks, list)
assert len(masks) == 1
assert masks[0].shape == (n_agents / 2, 10)
assert masks[0][0, 0]
def test_action_masking_discrete_2():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_discrete((2, 2, 6)))
ap_list = generate_list_agent_proto(n_agents, shapes)
decision_steps, terminal_steps = steps_from_proto(ap_list, behavior_spec)
masks = decision_steps.action_mask
assert isinstance(masks, list)
assert len(masks) == 3
assert masks[0].shape == (n_agents / 2, 2)
assert masks[1].shape == (n_agents / 2, 2)
assert masks[2].shape == (n_agents / 2, 6)
assert masks[0][0, 0]
def test_action_masking_continuous():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_continuous(10))
ap_list = generate_list_agent_proto(n_agents, shapes)
decision_steps, terminal_steps = steps_from_proto(ap_list, behavior_spec)
masks = decision_steps.action_mask
assert masks is None
def test_agent_behavior_spec_from_proto():
agent_proto = generate_list_agent_proto(1, [(3,), (4,)])[0]
bp = BrainParametersProto()
bp.vector_action_size_deprecated.extend([5, 4])
bp.vector_action_space_type_deprecated = 0
behavior_spec = behavior_spec_from_proto(bp, agent_proto)
assert behavior_spec.action_spec.is_discrete()
assert not behavior_spec.action_spec.is_continuous()
assert behavior_spec.observation_shapes == [(3,), (4,)]
assert behavior_spec.action_spec.discrete_branches == (5, 4)
assert behavior_spec.action_spec.discrete_size == 2
bp = BrainParametersProto()
bp.vector_action_size_deprecated.extend([6])
bp.vector_action_space_type_deprecated = 1
behavior_spec = behavior_spec_from_proto(bp, agent_proto)
assert not behavior_spec.action_spec.is_discrete()
assert behavior_spec.action_spec.is_continuous()
assert behavior_spec.action_spec.continuous_size == 6
def test_batched_step_result_from_proto_raises_on_infinite():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_continuous(3))
ap_list = generate_list_agent_proto(n_agents, shapes, infinite_rewards=True)
with pytest.raises(RuntimeError):
steps_from_proto(ap_list, behavior_spec)
def test_batched_step_result_from_proto_raises_on_nan():
n_agents = 10
shapes = [(3,), (4,)]
behavior_spec = BehaviorSpec(shapes, ActionSpec.create_continuous(3))
ap_list = generate_list_agent_proto(n_agents, shapes, nan_observations=True)
with pytest.raises(RuntimeError):
steps_from_proto(ap_list, behavior_spec)
|
[
"numpy.sum",
"mlagents_envs.communicator_objects.observation_pb2.ObservationProto",
"mlagents_envs.rpc_utils.behavior_spec_from_proto",
"numpy.allclose",
"mlagents_envs.communicator_objects.brain_parameters_pb2.BrainParametersProto",
"numpy.mean",
"mlagents_envs.base_env.ActionSpec.create_continuous",
"numpy.prod",
"mlagents_envs.communicator_objects.agent_action_pb2.AgentActionProto",
"mlagents_envs.rpc_utils._process_vector_observation",
"mlagents_envs.communicator_objects.observation_pb2.ObservationProto.FloatData",
"mlagents_envs.rpc_utils._process_visual_observation",
"numpy.random.rand",
"mlagents_envs.rpc_utils.steps_from_proto",
"pytest.raises",
"io.BytesIO",
"mlagents_envs.base_env.ActionSpec.create_discrete",
"mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto",
"mlagents_envs.rpc_utils.process_pixels",
"numpy.concatenate",
"numpy.zeros",
"numpy.take",
"mlagents_envs.communicator_objects.agent_info_action_pair_pb2.AgentInfoActionPairProto",
"PIL.Image.fromarray"
] |
[((2964, 2982), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto', 'ObservationProto', ([], {}), '()\n', (2980, 2982), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n'), ((3569, 3587), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto', 'ObservationProto', ([], {}), '()\n', (3585, 3587), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n'), ((4151, 4169), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto', 'ObservationProto', ([], {}), '()\n', (4167, 4169), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n'), ((8617, 8643), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (8631, 8643), True, 'import numpy as np\n'), ((8710, 8737), 'mlagents_envs.rpc_utils.process_pixels', 'process_pixels', (['byte_arr', '(3)'], {}), '(byte_arr, 3)\n', (8724, 8737), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((8865, 8908), 'numpy.allclose', 'np.allclose', (['in_array', 'out_array'], {'atol': '(0.01)'}), '(in_array, out_array, atol=0.01)\n', (8876, 8908), True, 'import numpy as np\n'), ((9016, 9059), 'numpy.random.rand', 'np.random.rand', (['height', 'width', 'num_channels'], {}), '(height, width, num_channels)\n', (9030, 9059), True, 'import numpy as np\n'), ((9126, 9164), 'mlagents_envs.rpc_utils.process_pixels', 'process_pixels', (['byte_arr', 'num_channels'], {}), '(byte_arr, num_channels)\n', (9140, 9164), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((9309, 9352), 'numpy.allclose', 'np.allclose', (['in_array', 'out_array'], {'atol': '(0.01)'}), '(in_array, out_array, atol=0.01)\n', (9320, 9352), True, 'import numpy as np\n'), ((9402, 9428), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (9416, 9428), True, 'import numpy as np\n'), ((9495, 9522), 'mlagents_envs.rpc_utils.process_pixels', 'process_pixels', (['byte_arr', '(1)'], {}), '(byte_arr, 1)\n', (9509, 9522), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((10149, 10175), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (10163, 10175), True, 'import numpy as np\n'), ((10253, 10279), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (10267, 10279), True, 'import numpy as np\n'), ((10433, 10449), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (10447, 10449), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((10503, 10519), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (10517, 10519), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((10598, 10651), 'mlagents_envs.rpc_utils._process_visual_observation', '_process_visual_observation', (['(0)', '(128, 64, 3)', 'ap_list'], {}), '(0, (128, 64, 3), ap_list)\n', (10625, 10651), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((10709, 10760), 'numpy.allclose', 'np.allclose', (['arr[0, :, :, :]', 'in_array_1'], {'atol': '(0.01)'}), '(arr[0, :, :, :], in_array_1, atol=0.01)\n', (10720, 10760), True, 'import numpy as np\n'), ((10772, 10823), 'numpy.allclose', 'np.allclose', (['arr[1, :, :, :]', 'in_array_2'], {'atol': '(0.01)'}), '(arr[1, :, :, :], in_array_2, atol=0.01)\n', (10783, 10823), True, 'import numpy as np\n'), ((10892, 10918), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (10906, 10918), True, 'import numpy as np\n'), ((11022, 11064), 'numpy.mean', 'np.mean', (['in_array_1'], {'axis': '(2)', 'keepdims': '(True)'}), '(in_array_1, axis=2, keepdims=True)\n', (11029, 11064), True, 'import numpy as np\n'), ((11082, 11108), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (11096, 11108), True, 'import numpy as np\n'), ((11278, 11320), 'numpy.mean', 'np.mean', (['in_array_2'], {'axis': '(2)', 'keepdims': '(True)'}), '(in_array_2, axis=2, keepdims=True)\n', (11285, 11320), True, 'import numpy as np\n'), ((11332, 11348), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (11346, 11348), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((11402, 11418), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (11416, 11418), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((11497, 11550), 'mlagents_envs.rpc_utils._process_visual_observation', '_process_visual_observation', (['(0)', '(128, 64, 1)', 'ap_list'], {}), '(0, (128, 64, 1), ap_list)\n', (11524, 11550), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((11608, 11669), 'numpy.allclose', 'np.allclose', (['arr[0, :, :, :]', 'expected_out_array_1'], {'atol': '(0.01)'}), '(arr[0, :, :, :], expected_out_array_1, atol=0.01)\n', (11619, 11669), True, 'import numpy as np\n'), ((11681, 11742), 'numpy.allclose', 'np.allclose', (['arr[1, :, :, :]', 'expected_out_array_2'], {'atol': '(0.01)'}), '(arr[1, :, :, :], expected_out_array_2, atol=0.01)\n', (11692, 11742), True, 'import numpy as np\n'), ((11817, 11844), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(12)'], {}), '(128, 64, 12)\n', (11831, 11844), True, 'import numpy as np\n'), ((12045, 12098), 'numpy.take', 'np.take', (['in_array_1', '[0, 1, 2, 3, 6, 7, 8, 9]'], {'axis': '(2)'}), '(in_array_1, [0, 1, 2, 3, 6, 7, 8, 9], axis=2)\n', (12052, 12098), True, 'import numpy as np\n'), ((12110, 12126), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (12124, 12126), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((12200, 12253), 'mlagents_envs.rpc_utils._process_visual_observation', '_process_visual_observation', (['(0)', '(128, 64, 8)', 'ap_list'], {}), '(0, (128, 64, 8), ap_list)\n', (12227, 12253), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((12311, 12372), 'numpy.allclose', 'np.allclose', (['arr[0, :, :, :]', 'expected_out_array_1'], {'atol': '(0.01)'}), '(arr[0, :, :, :], expected_out_array_1, atol=0.01)\n', (12322, 12372), True, 'import numpy as np\n'), ((12441, 12467), 'numpy.random.rand', 'np.random.rand', (['(128)', '(64)', '(3)'], {}), '(128, 64, 3)\n', (12455, 12467), True, 'import numpy as np\n'), ((12538, 12554), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (12552, 12554), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((12980, 13011), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'spec'], {}), '(ap_list, spec)\n', (12996, 13011), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((14376, 14416), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (14392, 14416), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((14997, 15037), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (15013, 15037), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((15472, 15512), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (15488, 15512), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((16035, 16075), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (16051, 16075), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((16258, 16280), 'mlagents_envs.communicator_objects.brain_parameters_pb2.BrainParametersProto', 'BrainParametersProto', ([], {}), '()\n', (16278, 16280), False, 'from mlagents_envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto\n'), ((16400, 16441), 'mlagents_envs.rpc_utils.behavior_spec_from_proto', 'behavior_spec_from_proto', (['bp', 'agent_proto'], {}), '(bp, agent_proto)\n', (16424, 16441), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((16740, 16762), 'mlagents_envs.communicator_objects.brain_parameters_pb2.BrainParametersProto', 'BrainParametersProto', ([], {}), '()\n', (16760, 16762), False, 'from mlagents_envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto\n'), ((16879, 16920), 'mlagents_envs.rpc_utils.behavior_spec_from_proto', 'behavior_spec_from_proto', (['bp', 'agent_proto'], {}), '(bp, agent_proto)\n', (16903, 16920), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((1158, 1174), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {}), '()\n', (1172, 1174), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((2606, 2639), 'PIL.Image.fromarray', 'Image.fromarray', (['sub_image', '"""RGB"""'], {}), "(sub_image, 'RGB')\n", (2621, 2639), False, 'from PIL import Image\n'), ((2657, 2669), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2667, 2669), False, 'import io\n'), ((5717, 5861), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {'reward': 'reward', 'done': 'done', 'id': 'agent_id', 'max_step_reached': 'max_step_reached', 'action_mask': 'agent_mask', 'observations': 'observations'}), '(reward=reward, done=done, id=agent_id, max_step_reached=\n max_step_reached, action_mask=agent_mask, observations=observations)\n', (5731, 5861), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((6953, 7097), 'mlagents_envs.communicator_objects.agent_info_pb2.AgentInfoProto', 'AgentInfoProto', ([], {'reward': 'reward', 'done': 'done', 'id': 'agent_id', 'max_step_reached': 'max_step_reached', 'action_mask': 'None', 'observations': 'final_observations'}), '(reward=reward, done=done, id=agent_id, max_step_reached=\n max_step_reached, action_mask=None, observations=final_observations)\n', (6967, 7097), False, 'from mlagents_envs.communicator_objects.agent_info_pb2 import AgentInfoProto\n'), ((7867, 7885), 'mlagents_envs.communicator_objects.agent_action_pb2.AgentActionProto', 'AgentActionProto', ([], {}), '()\n', (7883, 7885), False, 'from mlagents_envs.communicator_objects.agent_action_pb2 import AgentActionProto\n'), ((8334, 8413), 'mlagents_envs.communicator_objects.agent_info_action_pair_pb2.AgentInfoActionPairProto', 'AgentInfoActionPairProto', ([], {'agent_info': 'agent_info_proto', 'action_info': 'action_proto'}), '(agent_info=agent_info_proto, action_info=action_proto)\n', (8358, 8413), False, 'from mlagents_envs.communicator_objects.agent_info_action_pair_pb2 import AgentInfoActionPairProto\n'), ((9924, 9981), 'mlagents_envs.rpc_utils._process_vector_observation', '_process_vector_observation', (['obs_index', 'shape', 'list_proto'], {}), '(obs_index, shape, list_proto)\n', (9951, 9981), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((10058, 10090), 'numpy.allclose', 'np.allclose', (['arr', '(0.1)'], {'atol': '(0.01)'}), '(arr, 0.1, atol=0.01)\n', (10069, 10090), True, 'import numpy as np\n'), ((12627, 12667), 'pytest.raises', 'pytest.raises', (['UnityObservationException'], {}), '(UnityObservationException)\n', (12640, 12667), False, 'import pytest\n'), ((12677, 12730), 'mlagents_envs.rpc_utils._process_visual_observation', '_process_visual_observation', (['(0)', '(128, 42, 3)', 'ap_list'], {}), '(0, (128, 42, 3), ap_list)\n', (12704, 12730), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((12852, 12883), 'mlagents_envs.base_env.ActionSpec.create_continuous', 'ActionSpec.create_continuous', (['(3)'], {}), '(3)\n', (12880, 12883), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((14245, 14279), 'mlagents_envs.base_env.ActionSpec.create_discrete', 'ActionSpec.create_discrete', (['(7, 3)'], {}), '((7, 3))\n', (14271, 14279), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((14867, 14900), 'mlagents_envs.base_env.ActionSpec.create_discrete', 'ActionSpec.create_discrete', (['(10,)'], {}), '((10,))\n', (14893, 14900), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((15338, 15375), 'mlagents_envs.base_env.ActionSpec.create_discrete', 'ActionSpec.create_discrete', (['(2, 2, 6)'], {}), '((2, 2, 6))\n', (15364, 15375), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((15906, 15938), 'mlagents_envs.base_env.ActionSpec.create_continuous', 'ActionSpec.create_continuous', (['(10)'], {}), '(10)\n', (15934, 15938), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((17236, 17267), 'mlagents_envs.base_env.ActionSpec.create_continuous', 'ActionSpec.create_continuous', (['(3)'], {}), '(3)\n', (17264, 17267), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((17359, 17386), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (17372, 17386), False, 'import pytest\n'), ((17396, 17436), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (17412, 17436), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((17581, 17612), 'mlagents_envs.base_env.ActionSpec.create_continuous', 'ActionSpec.create_continuous', (['(3)'], {}), '(3)\n', (17609, 17612), False, 'from mlagents_envs.base_env import BehaviorSpec, ActionSpec, DecisionSteps, TerminalSteps\n'), ((17704, 17731), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (17717, 17731), False, 'import pytest\n'), ((17741, 17781), 'mlagents_envs.rpc_utils.steps_from_proto', 'steps_from_proto', (['ap_list', 'behavior_spec'], {}), '(ap_list, behavior_spec)\n', (17757, 17781), False, 'from mlagents_envs.rpc_utils import behavior_spec_from_proto, process_pixels, _process_visual_observation, _process_vector_observation, steps_from_proto\n'), ((1508, 1526), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto', 'ObservationProto', ([], {}), '()\n', (1524, 1526), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n'), ((2493, 2529), 'numpy.zeros', 'np.zeros', (['zero_shape'], {'dtype': 'np.uint8'}), '(zero_shape, dtype=np.uint8)\n', (2501, 2529), True, 'import numpy as np\n'), ((2554, 2592), 'numpy.concatenate', 'np.concatenate', (['[sub_image, z]'], {'axis': '(2)'}), '([sub_image, z], axis=2)\n', (2568, 2592), True, 'import numpy as np\n'), ((8792, 8820), 'numpy.sum', 'np.sum', (['(in_array - out_array)'], {}), '(in_array - out_array)\n', (8798, 8820), True, 'import numpy as np\n'), ((8823, 8846), 'numpy.prod', 'np.prod', (['in_array.shape'], {}), '(in_array.shape)\n', (8830, 8846), True, 'import numpy as np\n'), ((9236, 9264), 'numpy.sum', 'np.sum', (['(in_array - out_array)'], {}), '(in_array - out_array)\n', (9242, 9264), True, 'import numpy as np\n'), ((9267, 9290), 'numpy.prod', 'np.prod', (['in_array.shape'], {}), '(in_array.shape)\n', (9274, 9290), True, 'import numpy as np\n'), ((4973, 5037), 'numpy.concatenate', 'np.concatenate', (['(agent_mask, _branch[agent_id_index, :])'], {'axis': '(0)'}), '((agent_mask, _branch[agent_id_index, :]), axis=0)\n', (4987, 5037), True, 'import numpy as np\n'), ((1760, 1785), 'numpy.prod', 'np.prod', (['shape[obs_index]'], {}), '(shape[obs_index])\n', (1767, 1785), True, 'import numpy as np\n'), ((5507, 5551), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto.FloatData', 'ObservationProto.FloatData', ([], {'data': 'observation'}), '(data=observation)\n', (5533, 5551), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n'), ((6743, 6787), 'mlagents_envs.communicator_objects.observation_pb2.ObservationProto.FloatData', 'ObservationProto.FloatData', ([], {'data': 'observation'}), '(data=observation)\n', (6769, 6787), False, 'from mlagents_envs.communicator_objects.observation_pb2 import ObservationProto, NONE, PNG\n')]
|
from __future__ import annotations
import asyncio
from typing import TYPE_CHECKING
import pytest
if TYPE_CHECKING:
from pytest import FixtureRequest
pytest_plugins = ["pytester"]
@pytest.fixture(scope="session")
def event_loop():
"""Redefine pytest-asyncio event_loop fixture with 'session' scope."""
loop = asyncio.get_event_loop_policy().get_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
async def setup() -> None:
...
@pytest.fixture(autouse=True)
async def teardown(request: FixtureRequest):
try:
yield
finally:
pass
|
[
"pytest.fixture",
"asyncio.get_event_loop_policy"
] |
[((190, 221), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (204, 221), False, 'import pytest\n'), ((410, 441), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (424, 441), False, 'import pytest\n'), ((480, 508), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (494, 508), False, 'import pytest\n'), ((326, 357), 'asyncio.get_event_loop_policy', 'asyncio.get_event_loop_policy', ([], {}), '()\n', (355, 357), False, 'import asyncio\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import utils
from time import sleep
from urllib import parse
from requests import codes, exceptions
# Developer token
apikey = utils.config('api_key')
def run(string, entities):
"""Verify if one or several email addresses have been pwned"""
emails = []
for item in entities:
if item['entity'] == 'email':
emails.append(item['resolution']['value'])
if not emails:
emails = utils.config('emails')
if not emails:
return utils.output('end', 'no-email', utils.translate('no-email'))
utils.output('inter', 'checking', utils.translate('checking'))
for index, email in enumerate(emails):
isLastEmail = index == len(emails) - 1
breached = checkForBreach(email)
data = { 'email': email }
# Have I Been Pwned API returns a 403 when accessed by unauthorized/banned clients
if breached == 403:
return utils.output('end', 'blocked', utils.translate('blocked', { 'website_name': 'Have I Been Pwned' }))
elif breached == 503:
return utils.output('end', 'blocked', utils.translate('unavailable', { 'website_name': 'Have I Been Pwned' }))
elif not breached:
if isLastEmail:
return utils.output('end', 'no-pwnage', utils.translate('no-pwnage', data))
else:
utils.output('inter', 'no-pwnage', utils.translate('no-pwnage', data))
else:
data['result'] = ''
for index, b in enumerate(breached):
data['result'] += utils.translate('list_element', {
'url': 'http://' + b['Domain'],
'name': b['Name'],
'total': b['PwnCount']
}
)
if isLastEmail:
return utils.output('end', 'pwned', utils.translate('pwned', data))
else:
utils.output('inter', 'pwned', utils.translate('pwned', data))
def checkForBreach(email):
# Delay for 2 seconds before making request to accomodate API usage policy
sleep(2)
truncate = '?truncateResponse=true'
url = 'https://haveibeenpwned.com/api/v3/breachedaccount/' + parse.quote_plus(email)
try:
response = utils.http('GET', url, { 'hibp-api-key': apikey })
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
return response.status_code
except exceptions.RequestException as e:
return utils.output('end', 'down', utils.translate('errors', { 'website_name': 'Have I Been Pwned' }))
|
[
"utils.http",
"utils.config",
"time.sleep",
"utils.translate",
"urllib.parse.quote_plus"
] |
[((174, 197), 'utils.config', 'utils.config', (['"""api_key"""'], {}), "('api_key')\n", (186, 197), False, 'import utils\n'), ((2149, 2157), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (2154, 2157), False, 'from time import sleep\n'), ((473, 495), 'utils.config', 'utils.config', (['"""emails"""'], {}), "('emails')\n", (485, 495), False, 'import utils\n'), ((639, 666), 'utils.translate', 'utils.translate', (['"""checking"""'], {}), "('checking')\n", (654, 666), False, 'import utils\n'), ((2263, 2286), 'urllib.parse.quote_plus', 'parse.quote_plus', (['email'], {}), '(email)\n', (2279, 2286), False, 'from urllib import parse\n'), ((2316, 2364), 'utils.http', 'utils.http', (['"""GET"""', 'url', "{'hibp-api-key': apikey}"], {}), "('GET', url, {'hibp-api-key': apikey})\n", (2326, 2364), False, 'import utils\n'), ((571, 598), 'utils.translate', 'utils.translate', (['"""no-email"""'], {}), "('no-email')\n", (586, 598), False, 'import utils\n'), ((1004, 1069), 'utils.translate', 'utils.translate', (['"""blocked"""', "{'website_name': 'Have I Been Pwned'}"], {}), "('blocked', {'website_name': 'Have I Been Pwned'})\n", (1019, 1069), False, 'import utils\n'), ((2634, 2698), 'utils.translate', 'utils.translate', (['"""errors"""', "{'website_name': 'Have I Been Pwned'}"], {}), "('errors', {'website_name': 'Have I Been Pwned'})\n", (2649, 2698), False, 'import utils\n'), ((1153, 1222), 'utils.translate', 'utils.translate', (['"""unavailable"""', "{'website_name': 'Have I Been Pwned'}"], {}), "('unavailable', {'website_name': 'Have I Been Pwned'})\n", (1168, 1222), False, 'import utils\n'), ((1608, 1721), 'utils.translate', 'utils.translate', (['"""list_element"""', "{'url': 'http://' + b['Domain'], 'name': b['Name'], 'total': b['PwnCount']}"], {}), "('list_element', {'url': 'http://' + b['Domain'], 'name': b[\n 'Name'], 'total': b['PwnCount']})\n", (1623, 1721), False, 'import utils\n'), ((1337, 1371), 'utils.translate', 'utils.translate', (['"""no-pwnage"""', 'data'], {}), "('no-pwnage', data)\n", (1352, 1371), False, 'import utils\n'), ((1442, 1476), 'utils.translate', 'utils.translate', (['"""no-pwnage"""', 'data'], {}), "('no-pwnage', data)\n", (1457, 1476), False, 'import utils\n'), ((1909, 1939), 'utils.translate', 'utils.translate', (['"""pwned"""', 'data'], {}), "('pwned', data)\n", (1924, 1939), False, 'import utils\n'), ((2006, 2036), 'utils.translate', 'utils.translate', (['"""pwned"""', 'data'], {}), "('pwned', data)\n", (2021, 2036), False, 'import utils\n')]
|
import sys
def FileHeader(time, in_file, out_file, intra_gap_tol, inter_gap_tol, overlap_tol, eval_tol, Tot_prot_cnt, Tot_cnt, NC_cnt, CP_cnt, IS_cnt):
fileHeader = \
"""#===========================================================================================
# DOMAIN MAPPER v2.0.2
# Johns Hopkins Univeristy - January 19th, 2022
# <NAME>, B.S. - Dept. of Biophysics
# <EMAIL>
# &
# <NAME>, Ph.D. - Dept. of Chemistry
# <EMAIL>
#===========================================================================================
# Excecuted on:
# {}
# Input HMM:
# {}
# Output:
# {}
# Options:
# Intra domain gap = {:2d}
# Inter domain gap = {:2d}
# overlap = {:2d}
# E-value cutoff = {:1.2e}
# Domain Counts:
# Total Proteins: {:6d} Total Domains: {:6d}
# NC : {:3d} ({:.2%})
# CP : {:3d} ({:.2%})
# IS : {:3d} ({:.2%})
# Property Definitions:
# CP = Circular Permutant Domain
# NC = Non-Contiguous Domain
# IS = InSertional Domain
#===========================================================================================
# Accession\tE-Value\tResidue Range\tProperty\tArchitecture\tX-group\tT-group\tF-group\tF-id
""".format(time, in_file, out_file, intra_gap_tol, inter_gap_tol, overlap_tol, eval_tol, Tot_prot_cnt, Tot_cnt, NC_cnt, float(NC_cnt)/float(Tot_cnt), CP_cnt, float(CP_cnt)/float(Tot_cnt), IS_cnt, float(IS_cnt)/float(Tot_cnt),)
return fileHeader
# This was stolen from: Greenstick @ https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console?page=1&tab=votes#tab-top
# Headless and fast
def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
printEnd - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd)
# Print New Line on Complete
if iteration == total:
print()
def ErrorMsg(msg):
ErrMsg = 'ERROR: ' + msg + '\n' + 'System Exiting...\n'
sys.stderr.write(ErrMsg)
sys.stderr.flush()
return sys.exit()
def WarningMsg(msg):
WrnMsg = 'WARNING: ' + msg + '\n'
sys.stderr.write(WrnMsg)
return sys.stderr.flush()
|
[
"sys.stderr.write",
"sys.stderr.flush",
"sys.exit"
] |
[((3015, 3039), 'sys.stderr.write', 'sys.stderr.write', (['ErrMsg'], {}), '(ErrMsg)\n', (3031, 3039), False, 'import sys\n'), ((3044, 3062), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (3060, 3062), False, 'import sys\n'), ((3074, 3084), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3082, 3084), False, 'import sys\n'), ((3149, 3173), 'sys.stderr.write', 'sys.stderr.write', (['WrnMsg'], {}), '(WrnMsg)\n', (3165, 3173), False, 'import sys\n'), ((3185, 3203), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (3201, 3203), False, 'import sys\n')]
|
import sys
READ_POINTS = 0
READ_FOLDS = 1
def main(args):
with open("test_input.txt" if len(args) > 1 and args[1] == "-t" else "input.txt") as f:
mode = READ_POINTS
points = set()
folds = []
for line in f.readlines():
line = line.strip("\n")
if mode == READ_POINTS:
if len(line) == 0:
mode = READ_FOLDS
continue
else:
[x, y] = line.split(",")
points.add((int(x), int(y)))
elif mode == READ_FOLDS:
folds.append(line.split(" ")[2].split("="))
for fold, val in folds:
remove_points = []
add_points = []
line = int(val)
if fold == "x":
for point in points:
if point[0] >= line:
remove_points.append(point)
if point[0] > line:
add_points.append((line + line - point[0], point[1]))
elif fold == "y":
for point in points:
if point[1] >= line:
remove_points.append(point)
if point[1] > line:
add_points.append((point[0], line + line - point[1]))
for point in remove_points:
points.remove(point)
for point in add_points:
points.add(point)
max_x = 0
max_y = 0
for x, y in points:
max_x = max(max_x, x)
max_y = max(max_y, y)
for y in range(max_y + 1):
for x in range(max_x + 1):
if (x, y) in points:
sys.stdout.write("#")
else:
sys.stdout.write(".")
sys.stdout.write("\n")
if __name__ == "__main__":
main(sys.argv)
|
[
"sys.stdout.write"
] |
[((1686, 1708), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1702, 1708), False, 'import sys\n'), ((1600, 1621), 'sys.stdout.write', 'sys.stdout.write', (['"""#"""'], {}), "('#')\n", (1616, 1621), False, 'import sys\n'), ((1655, 1676), 'sys.stdout.write', 'sys.stdout.write', (['"""."""'], {}), "('.')\n", (1671, 1676), False, 'import sys\n')]
|
import unittest
from celery import Celery
from django.conf import settings
from django.db import connection
class SmokeTests(unittest.TestCase):
def test_can_access_db(self):
"""
access the database
"""
cursor = connection.cursor()
cursor.execute("SELECT 1")
row = cursor.fetchone()
self.assertEqual(1, row[0])
def test_can_access_celery(self):
"""
connect to SQS
"""
if not getattr(settings, "CELERY_ALWAYS_EAGER", False):
app = Celery("cla_backend")
app.config_from_object("django.conf:settings")
conn = app.connection()
conn.connect()
conn.release()
|
[
"celery.Celery",
"django.db.connection.cursor"
] |
[((251, 270), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (268, 270), False, 'from django.db import connection\n'), ((542, 563), 'celery.Celery', 'Celery', (['"""cla_backend"""'], {}), "('cla_backend')\n", (548, 563), False, 'from celery import Celery\n')]
|
import os
from collections import OrderedDict
from click.testing import CliRunner
from great_expectations import DataContext
from great_expectations.cli.v012 import cli
from tests.cli.v012.test_cli import yaml
from tests.cli.v012.utils import (
assert_dict_key_and_val_in_stdout,
assert_no_logging_messages_or_tracebacks,
assert_no_tracebacks,
)
def test_cli_datasource_list(empty_data_context, empty_sqlite_db, caplog):
"""Test an empty project and after adding a single datasource."""
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli, ["datasource", "list", "-d", project_root_dir], catch_exceptions=False
)
stdout = result.stdout.strip()
assert "No Datasources found" in stdout
assert context.list_datasources() == []
datasource_name = "wow_a_datasource"
_add_datasource_and_credentials_to_context(
context, datasource_name, empty_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli, ["datasource", "list", "-d", project_root_dir], catch_exceptions=False
)
url = str(empty_sqlite_db.engine.url)
expected_output = """\
1 Datasource found:[0m
[0m
- [36mname:[0m wow_a_datasource[0m
[36mmodule_name:[0m great_expectations.datasource[0m
[36mclass_name:[0m SqlAlchemyDatasource[0m
[36mbatch_kwargs_generators:[0m[0m
[36mdefault:[0m[0m
[36mclass_name:[0m TableBatchKwargsGenerator[0m
[36mcredentials:[0m[0m
[36murl:[0m {}[0m
[36mdata_asset_type:[0m[0m
[36mclass_name:[0m SqlAlchemyDataset[0m
[36mmodule_name:[0m None[0m
""".format(
url
).strip()
stdout = result.stdout.strip()
assert stdout == expected_output
assert_no_logging_messages_or_tracebacks(caplog, result)
def _add_datasource_and_credentials_to_context(context, datasource_name, sqlite_engine):
original_datasources = context.list_datasources()
url = str(sqlite_engine.url)
credentials = {"url": url}
context.save_config_variable(datasource_name, credentials)
context.add_datasource(
datasource_name,
initialize=False,
module_name="great_expectations.datasource",
class_name="SqlAlchemyDatasource",
data_asset_type={"class_name": "SqlAlchemyDataset"},
credentials="${" + datasource_name + "}",
batch_kwargs_generators={
"default": {"class_name": "TableBatchKwargsGenerator"}
},
)
expected_datasources = original_datasources
expected_datasources.append(
{
"name": datasource_name,
"class_name": "SqlAlchemyDatasource",
"module_name": "great_expectations.datasource",
"credentials": OrderedDict([("url", url)]),
"data_asset_type": {"class_name": "SqlAlchemyDataset", "module_name": None},
"batch_kwargs_generators": {
"default": {"class_name": "TableBatchKwargsGenerator"}
},
}
)
assert context.list_datasources() == expected_datasources
return context
def _add_datasource__with_two_generators_and_credentials_to_context(
context, datasource_name, sqlite_engine
):
original_datasources = context.list_datasources()
url = str(sqlite_engine.url)
credentials = {"url": url}
context.save_config_variable(datasource_name, credentials)
context.add_datasource(
datasource_name,
initialize=False,
module_name="great_expectations.datasource",
class_name="SqlAlchemyDatasource",
data_asset_type={"class_name": "SqlAlchemyDataset"},
credentials="${" + datasource_name + "}",
batch_kwargs_generators={
"default": {"class_name": "TableBatchKwargsGenerator"},
"second_generator": {
"class_name": "ManualBatchKwargsGenerator",
"assets": {
"asset_one": [
{"partition_id": 1, "query": "select * from main.titanic"}
]
},
},
},
)
expected_datasources = original_datasources
expected_datasources.append(
{
"name": datasource_name,
"class_name": "SqlAlchemyDatasource",
"module_name": "great_expectations.datasource",
"credentials": {"url": url},
"data_asset_type": {"class_name": "SqlAlchemyDataset", "module_name": None},
"batch_kwargs_generators": {
"default": {"class_name": "TableBatchKwargsGenerator"},
"second_generator": {
"assets": {
"asset_one": [
{
"partition_id": 1,
"query": "select " "* " "from " "main.titanic",
}
]
},
"class_name": "ManualBatchKwargsGenerator",
},
},
}
)
assert context.list_datasources() == expected_datasources
return context
def test_cli_datasource_new_connection_string(
empty_data_context, empty_sqlite_db, caplog
):
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
assert context.list_datasources() == []
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
["datasource", "new", "-d", project_root_dir],
input="2\n6\nmynewsource\n{}\n\n".format(str(empty_sqlite_db.url)),
catch_exceptions=False,
)
stdout = result.stdout
assert "What data would you like Great Expectations to connect to?" in stdout
assert "Give your new Datasource a short name." in stdout
assert (
"Next, we will configure database credentials and store them in the `mynewsource` section"
in stdout
)
assert "What is the url/connection string for the sqlalchemy connection?" in stdout
assert "Attempting to connect to your database. This may take a moment" in stdout
assert "Great Expectations connected to your database" in stdout
assert "A new datasource 'mynewsource' was added to your project." in stdout
assert result.exit_code == 0
config_path = os.path.join(project_root_dir, DataContext.GE_YML)
config = yaml.load(open(config_path))
datasources = config["datasources"]
assert "mynewsource" in datasources.keys()
data_source_class = datasources["mynewsource"]["data_asset_type"]["class_name"]
assert data_source_class == "SqlAlchemyDataset"
assert_no_logging_messages_or_tracebacks(caplog, result)
def test_cli_datasource_profile_answering_no(
empty_data_context, titanic_sqlite_db, caplog
):
"""
When datasource profile command is called without additional arguments,
the command must prompt the user with a confirm (y/n) before profiling.
We are verifying that it does that and respects user's "no".
"""
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
["datasource", "profile", datasource_name, "-d", project_root_dir, "--no-view"],
input="n\n",
catch_exceptions=False,
)
stdout = result.output
assert result.exit_code == 0
assert "Profiling 'wow_a_datasource'" in stdout
assert "Skipping profiling for now." in stdout
assert_no_logging_messages_or_tracebacks(caplog, result)
def test_cli_datasource_profile_on_empty_database(
empty_data_context, empty_sqlite_db, caplog
):
"""
We run the datasource profile command against an empty database (no tables).
This means that no generator can "see" a list of available data assets.
The command must exit with an error message saying that no generator can see
any assets.
"""
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, empty_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
["datasource", "profile", datasource_name, "-d", project_root_dir, "--no-view"],
input="n\n",
catch_exceptions=False,
)
stdout = result.output
assert result.exit_code == 1
assert "Profiling 'wow_a_datasource'" in stdout
assert "No batch kwargs generators can list available data assets" in stdout
assert_no_logging_messages_or_tracebacks(caplog, result)
def test_cli_datasource_profile_with_datasource_arg(
empty_data_context, titanic_sqlite_db, caplog
):
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
[
"datasource",
"profile",
datasource_name,
"-d",
project_root_dir,
"--no-view",
],
input="Y\n",
catch_exceptions=False,
)
stdout = result.stdout
assert result.exit_code == 0
assert "Profiling '{}'".format(datasource_name) in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 1
assert (
suites[0].expectation_suite_name
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
validations_store = context.stores["validations_store"]
validation_keys = validations_store.list_keys()
assert len(validation_keys) == 1
validation = validations_store.get(validation_keys[0])
assert (
validation.meta["expectation_suite_name"]
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
assert validation.success is False
assert len(validation.results) == 51
assert "Preparing column 1 of 7" in caplog.messages[0]
assert len(caplog.messages) == 10
assert_no_tracebacks(result)
def test_cli_datasource_profile_with_datasource_arg_and_generator_name_arg(
empty_data_context, titanic_sqlite_db, caplog
):
"""
Here we are verifying that when generator_name argument is passed to
the methods down the stack.
We use a datasource with two generators. This way we can check that the
name of the expectation suite created by the profiler corresponds to
the name of the data asset listed by the generator that we told the profiler
to use.
The logic of processing this argument is testing in tests/profile.
"""
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource__with_two_generators_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
second_generator_name = "second_generator"
runner = CliRunner()
result = runner.invoke(
cli,
[
"datasource",
"profile",
datasource_name,
"--batch-kwargs-generator-name",
second_generator_name,
"-d",
project_root_dir,
"--no-view",
],
input="Y\n",
)
stdout = result.stdout
assert result.exit_code == 0
assert "Profiling '{}'".format(datasource_name) in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 1
assert (
suites[0].expectation_suite_name
== "wow_a_datasource.second_generator.asset_one.BasicDatasetProfiler"
)
assert "Preparing column 1 of 7" in caplog.messages[0]
assert len(caplog.messages) == 10
assert_no_tracebacks(result)
def test_cli_datasource_profile_with_no_datasource_args(
empty_data_context, titanic_sqlite_db, caplog
):
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
["datasource", "profile", "-d", project_root_dir, "--no-view"],
input="Y\n",
catch_exceptions=False,
)
assert result.exit_code == 0
stdout = result.stdout
assert "Profiling 'wow_a_datasource'" in stdout
assert "The following Data Docs sites will be built:\n" in stdout
assert "local_site:" in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 1
assert (
suites[0].expectation_suite_name
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
validations_store = context.stores["validations_store"]
validation_keys = validations_store.list_keys()
assert len(validation_keys) == 1
validation = validations_store.get(validation_keys[0])
assert (
validation.meta["expectation_suite_name"]
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
assert validation.success is False
assert len(validation.results) == 51
assert "Preparing column 1 of 7" in caplog.messages[0]
assert len(caplog.messages) == 10
assert_no_tracebacks(result)
def test_cli_datasource_profile_with_data_asset_and_additional_batch_kwargs_with_limit(
empty_data_context, titanic_sqlite_db, caplog
):
"""
User can pass additional batch kwargs (e.g., limit) to a sql backend.
Here we are verifying that passing "limit" affects the query correctly -
the row count in the batch that the profiler uses to profile the data asset
must match the limit passed by the user.
"""
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
[
"datasource",
"profile",
"-d",
project_root_dir,
"--data-assets",
"main.titanic",
"--additional-batch-kwargs",
'{"limit": 97}',
"--no-view",
],
input="Y\n",
catch_exceptions=False,
)
stdout = result.stdout
assert result.exit_code == 0
assert "Profiling '{}'".format(datasource_name) in stdout
assert "The following Data Docs sites will be built:\n" in stdout
assert "local_site:" in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 1
assert (
suites[0].expectation_suite_name
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
validations_store = context.stores["validations_store"]
validation_keys = validations_store.list_keys()
assert len(validation_keys) == 1
validation = validations_store.get(validation_keys[0])
assert (
validation.meta["expectation_suite_name"]
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
assert validation.success is False
row_count_validation_results = [
validation_result
for validation_result in validation.results
if validation_result.expectation_config.expectation_type
== "expect_table_row_count_to_be_between"
]
assert len(row_count_validation_results) == 1
assert row_count_validation_results[0].result["observed_value"] == 97
assert "Preparing column 1 of 7" in caplog.messages[0]
assert len(caplog.messages) == 10
assert_no_tracebacks(result)
def test_cli_datasource_profile_with_valid_data_asset_arg(
empty_data_context, titanic_sqlite_db, caplog
):
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
[
"datasource",
"profile",
datasource_name,
"--data-assets",
"main.titanic",
"-d",
project_root_dir,
"--no-view",
],
catch_exceptions=False,
)
stdout = result.stdout
assert result.exit_code == 0
assert "Profiling '{}'".format(datasource_name) in stdout
assert "The following Data Docs sites will be built:\n" in stdout
assert "local_site:" in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 1
assert (
suites[0].expectation_suite_name
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
validations_store = context.stores["validations_store"]
validation_keys = validations_store.list_keys()
assert len(validation_keys) == 1
validation = validations_store.get(validation_keys[0])
assert (
validation.meta["expectation_suite_name"]
== "wow_a_datasource.default.main.titanic.BasicDatasetProfiler"
)
assert validation.success is False
assert len(validation.results) == 51
assert "Preparing column 1 of 7" in caplog.messages[0]
assert len(caplog.messages) == 10
assert_no_tracebacks(result)
def test_cli_datasource_profile_with_invalid_data_asset_arg_answering_no(
empty_data_context, titanic_sqlite_db, caplog
):
project_root_dir = empty_data_context.root_directory
context = DataContext(project_root_dir)
datasource_name = "wow_a_datasource"
context = _add_datasource_and_credentials_to_context(
context, datasource_name, titanic_sqlite_db
)
runner = CliRunner(mix_stderr=False)
result = runner.invoke(
cli,
[
"datasource",
"profile",
datasource_name,
"--data-assets",
"bad-bad-asset",
"-d",
project_root_dir,
"--no-view",
],
input="2\n",
catch_exceptions=False,
)
stdout = result.stdout
assert (
"Some of the data assets you specified were not found: bad-bad-asset" in stdout
)
assert "Choose how to proceed" in stdout
assert "Skipping profiling for now." in stdout
context = DataContext(project_root_dir)
assert len(context.list_datasources()) == 1
expectations_store = context.stores["expectations_store"]
suites = expectations_store.list_keys()
assert len(suites) == 0
assert_no_logging_messages_or_tracebacks(caplog, result)
|
[
"tests.cli.v012.utils.assert_no_tracebacks",
"great_expectations.DataContext",
"tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks",
"collections.OrderedDict",
"click.testing.CliRunner",
"os.path.join"
] |
[((578, 607), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (589, 607), False, 'from great_expectations import DataContext\n'), ((622, 649), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (631, 649), False, 'from click.testing import CliRunner\n'), ((1052, 1079), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (1061, 1079), False, 'from click.testing import CliRunner\n'), ((1857, 1913), 'tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks', 'assert_no_logging_messages_or_tracebacks', (['caplog', 'result'], {}), '(caplog, result)\n', (1897, 1913), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((5399, 5428), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (5410, 5428), False, 'from great_expectations import DataContext\n'), ((5487, 5514), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (5496, 5514), False, 'from click.testing import CliRunner\n'), ((6410, 6460), 'os.path.join', 'os.path.join', (['project_root_dir', 'DataContext.GE_YML'], {}), '(project_root_dir, DataContext.GE_YML)\n', (6422, 6460), False, 'import os\n'), ((6731, 6787), 'tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks', 'assert_no_logging_messages_or_tracebacks', (['caplog', 'result'], {}), '(caplog, result)\n', (6771, 6787), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((7194, 7223), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (7205, 7223), False, 'from great_expectations import DataContext\n'), ((7395, 7422), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (7404, 7422), False, 'from click.testing import CliRunner\n'), ((7781, 7837), 'tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks', 'assert_no_logging_messages_or_tracebacks', (['caplog', 'result'], {}), '(caplog, result)\n', (7821, 7837), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((8283, 8312), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (8294, 8312), False, 'from great_expectations import DataContext\n'), ((8482, 8509), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (8491, 8509), False, 'from click.testing import CliRunner\n'), ((8899, 8955), 'tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks', 'assert_no_logging_messages_or_tracebacks', (['caplog', 'result'], {}), '(caplog, result)\n', (8939, 8955), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((9135, 9164), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (9146, 9164), False, 'from great_expectations import DataContext\n'), ((9336, 9363), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (9345, 9363), False, 'from click.testing import CliRunner\n'), ((9774, 9803), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (9785, 9803), False, 'from great_expectations import DataContext\n'), ((10652, 10680), 'tests.cli.v012.utils.assert_no_tracebacks', 'assert_no_tracebacks', (['result'], {}), '(result)\n', (10672, 10680), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((11319, 11348), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (11330, 11348), False, 'from great_expectations import DataContext\n'), ((11589, 11600), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (11598, 11600), False, 'from click.testing import CliRunner\n'), ((12059, 12088), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (12070, 12088), False, 'from great_expectations import DataContext\n'), ((12512, 12540), 'tests.cli.v012.utils.assert_no_tracebacks', 'assert_no_tracebacks', (['result'], {}), '(result)\n', (12532, 12540), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((12724, 12753), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (12735, 12753), False, 'from great_expectations import DataContext\n'), ((12925, 12952), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (12934, 12952), False, 'from click.testing import CliRunner\n'), ((13357, 13386), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (13368, 13386), False, 'from great_expectations import DataContext\n'), ((14235, 14263), 'tests.cli.v012.utils.assert_no_tracebacks', 'assert_no_tracebacks', (['result'], {}), '(result)\n', (14255, 14263), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((14770, 14799), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (14781, 14799), False, 'from great_expectations import DataContext\n'), ((14971, 14998), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (14980, 14998), False, 'from click.testing import CliRunner\n'), ((15612, 15641), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (15623, 15641), False, 'from great_expectations import DataContext\n'), ((16810, 16838), 'tests.cli.v012.utils.assert_no_tracebacks', 'assert_no_tracebacks', (['result'], {}), '(result)\n', (16830, 16838), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((17024, 17053), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (17035, 17053), False, 'from great_expectations import DataContext\n'), ((17225, 17252), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (17234, 17252), False, 'from click.testing import CliRunner\n'), ((17804, 17833), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (17815, 17833), False, 'from great_expectations import DataContext\n'), ((18682, 18710), 'tests.cli.v012.utils.assert_no_tracebacks', 'assert_no_tracebacks', (['result'], {}), '(result)\n', (18702, 18710), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((18911, 18940), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (18922, 18940), False, 'from great_expectations import DataContext\n'), ((19112, 19139), 'click.testing.CliRunner', 'CliRunner', ([], {'mix_stderr': '(False)'}), '(mix_stderr=False)\n', (19121, 19139), False, 'from click.testing import CliRunner\n'), ((19716, 19745), 'great_expectations.DataContext', 'DataContext', (['project_root_dir'], {}), '(project_root_dir)\n', (19727, 19745), False, 'from great_expectations import DataContext\n'), ((19934, 19990), 'tests.cli.v012.utils.assert_no_logging_messages_or_tracebacks', 'assert_no_logging_messages_or_tracebacks', (['caplog', 'result'], {}), '(caplog, result)\n', (19974, 19990), False, 'from tests.cli.v012.utils import assert_dict_key_and_val_in_stdout, assert_no_logging_messages_or_tracebacks, assert_no_tracebacks\n'), ((2857, 2884), 'collections.OrderedDict', 'OrderedDict', (["[('url', url)]"], {}), "([('url', url)])\n", (2868, 2884), False, 'from collections import OrderedDict\n')]
|
#!/usr/bin/env python3 -u
# -*- coding: utf-8 -*-
# copyright: sktime developers, BSD-3-Clause License (see LICENSE file)
from logging import warning
import numpy as np
import pandas as pd
from sklearn.utils import check_array, check_consistent_length
from sktime.datatypes import check_is_scitype, convert
from sktime.performance_metrics.forecasting._classes import _BaseForecastingErrorMetric
# TODO: Rework tests now
class _BaseProbaForecastingErrorMetric(_BaseForecastingErrorMetric):
"""Base class for probabilistic forecasting error metrics in sktime.
Extends sktime's BaseMetric to the forecasting interface. Forecasting error
metrics measure the error (loss) between forecasts and true values. Lower
values are better.
Parameters
----------
multioutput : {'raw_values', 'uniform_average'} or array-like of shape \
(n_outputs,), default='uniform_average'
Defines how to aggregate metric for multivariate (multioutput) data.
If array-like, values used as weights to average the errors.
If 'raw_values', returns a full set of errors in case of multioutput input.
If 'uniform_average', errors of all outputs are averaged with uniform weight.
score_average : bool, optional, default=True
for interval and quantile losses only
if True, metric/loss is averaged by upper/lower and/or quantile
if False, metric/loss is not averaged by upper/lower and/or quantile
"""
_tags = {
"scitype:y_pred": "pred_quantiles",
"lower_is_better": True,
}
def __init__(
self,
func=None,
name=None,
multioutput="uniform_average",
score_average=True,
):
self.multioutput = multioutput
self.score_average = score_average
super().__init__(func, name=name, multioutput=multioutput)
def __call__(self, y_true, y_pred, **kwargs):
"""Calculate metric value using underlying metric function.
Parameters
----------
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : return object of probabilistic predictition method scitype:y_pred
must be at fh and for variables equal to those in y_true.
Returns
-------
loss : float or 1-column pd.DataFrame with calculated metric value(s)
metric is always averaged (arithmetic) over fh values
if multioutput = "raw_values",
will have a column level corresponding to variables in y_true
if multioutput = multioutput = "uniform_average" or or array-like
entries will be averaged over output variable column
if score_average = False,
will have column levels corresponding to quantiles/intervals
if score_average = True,
entries will be averaged over quantiles/interval column
"""
return self.evaluate(y_true, y_pred, multioutput=self.multioutput, **kwargs)
def evaluate(self, y_true, y_pred, multioutput=None, **kwargs):
"""Evaluate the desired metric on given inputs.
Parameters
----------
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : return object of probabilistic predictition method scitype:y_pred
must be at fh and for variables equal to those in y_true
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
Returns
-------
loss : float or 1-column pd.DataFrame with calculated metric value(s)
metric is always averaged (arithmetic) over fh values
if multioutput = "raw_values",
will have a column level corresponding to variables in y_true
if multioutput = multioutput = "uniform_average" or or array-like
entries will be averaged over output variable column
if score_average = False,
will have column levels corresponding to quantiles/intervals
if score_average = True,
entries will be averaged over quantiles/interval column
"""
# Input checks and conversions
y_true_inner, y_pred_inner, multioutput = self._check_ys(
y_true, y_pred, multioutput
)
# Don't want to include scores for 0 width intervals, makes no sense
if 0 in y_pred_inner.columns.get_level_values(1):
y_pred_inner = y_pred_inner.drop(0, axis=1, level=1)
warning(
"Dropping 0 width interval, don't include 0.5 quantile\
for interval metrics."
)
# pass to inner function
out = self._evaluate(y_true_inner, y_pred_inner, multioutput, **kwargs)
if self.score_average and multioutput == "uniform_average":
out = float(out.mean(axis=1, level=None)) # average over all
if self.score_average and multioutput == "raw_values":
out = out.mean(axis=1, level=0) # average over scores
if not self.score_average and multioutput == "uniform_average":
out = out.mean(axis=1, level=1) # average over variables
if not self.score_average and multioutput == "raw_values":
out = out # don't average
if isinstance(out, pd.DataFrame):
out = out.squeeze(axis=0)
return out
def _evaluate(self, y_true, y_pred, multioutput, **kwargs):
"""Evaluate the desired metric on given inputs.
Parameters
----------
y_true : pd.DataFrame or of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : pd.DataFrame of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Forecasted values.
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
Returns
-------
loss : pd.DataFrame of shape (, n_outputs), calculated loss metric.
"""
# Default implementation relies on implementation of evaluate_by_index
try:
index_df = self._evaluate_by_index(y_true, y_pred, multioutput)
out_df = pd.DataFrame(index_df.mean(axis=0)).T
out_df.columns = index_df.columns
return out_df
except RecursionError:
RecursionError("Must implement one of _evaluate or _evaluate_by_index")
def evaluate_by_index(self, y_true, y_pred, multioutput=None, **kwargs):
"""Return the metric evaluated at each time point.
Parameters
----------
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : return object of probabilistic predictition method scitype:y_pred
must be at fh and for variables equal to those in y_true
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
Returns
-------
loss : pd.DataFrame of length len(fh), with calculated metric value(s)
i-th column contains metric value(s) for prediction at i-th fh element
if multioutput = "raw_values",
will have a column level corresponding to variables in y_true
if multioutput = multioutput = "uniform_average" or or array-like
entries will be averaged over output variable column
if score_average = False,
will have column levels corresponding to quantiles/intervals
if score_average = True,
entries will be averaged over quantiles/interval column
"""
# Input checks and conversions
y_true_inner, y_pred_inner, multioutput = self._check_ys(
y_true, y_pred, multioutput
)
# Don't want to include scores for 0 width intervals, makes no sense
if 0 in y_pred_inner.columns.get_level_values(1):
y_pred_inner = y_pred_inner.drop(0, axis=1, level=1)
warning(
"Dropping 0 width interval, don't include 0.5 quantile\
for interval metrics."
)
# pass to inner function
out = self._evaluate_by_index(y_true_inner, y_pred_inner, multioutput, **kwargs)
if self.score_average and multioutput == "uniform_average":
out = out.mean(axis=1, level=None) # average over all
if self.score_average and multioutput == "raw_values":
out = out.mean(axis=1, level=0) # average over scores
if not self.score_average and multioutput == "uniform_average":
out = out.mean(axis=1, level=1) # average over variables
if not self.score_average and multioutput == "raw_values":
out = out # don't average
return out
def _evaluate_by_index(self, y_true, y_pred, multioutput, **kwargs):
"""Logic for finding the metric evaluated at each index.
By default this uses _evaluate to find jackknifed pseudosamples. This
estimates the error at each of the time points.
Parameters
----------
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Forecasted values.
multioutput : string "uniform_average" or "raw_values" determines how \
multioutput results will be treated.
"""
n = y_true.shape[0]
out_series = pd.Series(index=y_pred.index)
try:
x_bar = self.evaluate(y_true, y_pred, multioutput, **kwargs)
for i in range(n):
out_series[i] = n * x_bar - (n - 1) * self.evaluate(
np.vstack((y_true[:i, :], y_true[i + 1 :, :])), # noqa
np.vstack((y_pred[:i, :], y_pred[i + 1 :, :])), # noqa
multioutput,
)
return out_series
except RecursionError:
RecursionError("Must implement one of _evaluate or _evaluate_by_index")
def _check_consistent_input(self, y_true, y_pred, multioutput):
check_consistent_length(y_true, y_pred)
y_true = check_array(y_true, ensure_2d=False)
if not isinstance(y_pred, pd.DataFrame):
ValueError("y_pred should be a dataframe.")
if not all(y_pred.dtypes == float):
ValueError("Data should be numeric.")
if y_true.ndim == 1:
y_true = y_true.reshape((-1, 1))
n_outputs = y_true.shape[1]
allowed_multioutput_str = ("raw_values", "uniform_average", "variance_weighted")
if isinstance(multioutput, str):
if multioutput not in allowed_multioutput_str:
raise ValueError(
"Allowed 'multioutput' string values are {}. "
"You provided multioutput={!r}".format(
allowed_multioutput_str, multioutput
)
)
elif multioutput is not None:
multioutput = check_array(multioutput, ensure_2d=False)
if n_outputs == 1:
raise ValueError("Custom weights are useful only in multi-output case.")
elif n_outputs != len(multioutput):
raise ValueError(
"There must be equally many custom weights (%d) as outputs (%d)."
% (len(multioutput), n_outputs)
)
return y_true, y_pred, multioutput
def _check_ys(self, y_true, y_pred, multioutput):
if multioutput is None:
multioutput = self.multioutput
valid, msg, metadata = check_is_scitype(
y_pred, scitype="Proba", return_metadata=True, var_name="y_pred"
)
if not valid:
raise TypeError(msg)
y_pred_mtype = metadata["mtype"]
inner_y_pred_mtype = self.get_tag("scitype:y_pred")
y_pred_inner = convert(
y_pred,
from_type=y_pred_mtype,
to_type=inner_y_pred_mtype,
as_scitype="Proba",
)
if inner_y_pred_mtype == "pred_interval":
if 0.0 in y_pred_inner.columns.get_level_values(1):
for var in y_pred_inner.columns.get_level_values(0):
y_pred_inner[var, 0.0, "upper"] = y_pred_inner[var, 0.0, "lower"]
y_pred_inner.sort_index(axis=1, level=[0, 1], inplace=True)
y_true, y_pred, multioutput = self._check_consistent_input(
y_true, y_pred, multioutput
)
return y_true, y_pred_inner, multioutput
def _get_alpha_from(self, y_pred):
"""Fetch the alphas present in y_pred."""
alphas = np.unique(list(y_pred.columns.get_level_values(1)))
if not all(((alphas > 0) & (alphas < 1))):
raise ValueError("Alpha must be between 0 and 1.")
return alphas
def _check_alpha(self, alpha):
"""Check that alpha input is valid."""
if alpha is None:
return None
if isinstance(alpha, float):
alpha = [alpha]
if not isinstance(alpha, np.ndarray):
alpha = np.asarray(alpha)
if not all(((alpha > 0) & (alpha < 1))):
raise ValueError("Alpha must be between 0 and 1.")
return alpha
def _handle_multioutput(self, loss, multioutput):
"""Specificies how multivariate outputs should be handled.
Parameters
----------
loss : float, np.ndarray the evaluated metric value.
multioutput : string "uniform_average" or "raw_values" determines how \
multioutput results will be treated.
"""
if isinstance(multioutput, str):
if multioutput == "raw_values":
return loss
elif multioutput == "uniform_average":
# pass None as weights to np.average: uniform mean
multioutput = None
else:
raise ValueError(
"multioutput is expected to be 'raw_values' "
"or 'uniform_average' but we got %r"
" instead." % multioutput
)
if loss.ndim > 1:
out = np.average(loss, weights=multioutput, axis=1)
else:
out = np.average(loss, weights=multioutput)
return out
class PinballLoss(_BaseProbaForecastingErrorMetric):
"""Evaluate the pinball loss at all quantiles given in data.
Parameters
----------
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
score_average : bool, optional, default = True
specifies whether scores for each quantile should be averaged.
alpha (optional) : float, list or np.ndarray, specifies what quantiles to \
evaluate metric at.
"""
_tags = {
"scitype:y_pred": "pred_quantiles",
"lower_is_better": True,
}
def __init__(
self,
multioutput="uniform_average",
score_average=True,
alpha=None,
):
name = "PinballLoss"
self.score_average = score_average
self.alpha = self._check_alpha(alpha)
self.metric_args = {"alpha": alpha}
super().__init__(
name=name, multioutput=multioutput, score_average=score_average
)
def _evaluate_by_index(self, y_true, y_pred, multioutput, **kwargs):
"""Logic for finding the metric evaluated at each index.
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target value`s.
y_pred : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Forecasted values.
multioutput : string "uniform_average" or "raw_values"
Determines how multioutput results will be treated.
"""
alpha = self.alpha
y_pred_alphas = self._get_alpha_from(y_pred)
if alpha is None:
alphas = y_pred_alphas
else:
# if alpha was provided, check whether they are predicted
# if not all alpha are observed, raise a ValueError
if not np.isin(alpha, y_pred_alphas).all():
# todo: make error msg more informative
# which alphas are missing
msg = "not all quantile values in alpha are available in y_pred"
raise ValueError(msg)
else:
alphas = alpha
alphas = self._check_alpha(alphas)
alpha_preds = y_pred.iloc[
:, [x in alphas for x in y_pred.columns.get_level_values(1)]
]
alpha_preds_np = alpha_preds.to_numpy()
alpha_mat = np.repeat(
(y_pred.columns.get_level_values(1).to_numpy().reshape(1, -1)),
repeats=y_true.shape[0],
axis=0,
)
y_true_np = np.repeat(y_true, axis=1, repeats=len(alphas))
diff = y_true_np - alpha_preds_np
sign = (diff >= 0).astype(diff.dtype)
loss = alpha_mat * sign * diff - (1 - alpha_mat) * (1 - sign) * diff
out_df = pd.DataFrame(loss, columns=alpha_preds.columns)
return out_df
@classmethod
def get_test_params(cls, parameter_set="default"):
"""Retrieve test parameters."""
params1 = {}
params2 = {"alpha": [0.1, 0.5, 0.9]}
return [params1, params2]
class EmpiricalCoverage(_BaseProbaForecastingErrorMetric):
"""Evaluate the pinball loss at all quantiles given in data.
Parameters
----------
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
score_average : bool, optional, default = True
specifies whether scores for each quantile should be averaged.
"""
_tags = {
"scitype:y_pred": "pred_interval",
"lower_is_better": False,
}
def __init__(self, multioutput="uniform_average", score_average=True):
name = "EmpiricalCoverage"
self.score_average = score_average
self.multioutput = multioutput
super().__init__(
name=name, score_average=score_average, multioutput=multioutput
)
def _evaluate_by_index(self, y_true, y_pred, multioutput, **kwargs):
"""Logic for finding the metric evaluated at each index.
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Forecasted values.
multioutput : string "uniform_average" or "raw_values" determines how \
multioutput results will be treated.
"""
lower = y_pred.iloc[:, y_pred.columns.get_level_values(2) == "lower"].to_numpy()
upper = y_pred.iloc[:, y_pred.columns.get_level_values(2) == "upper"].to_numpy()
if not isinstance(y_true, np.ndarray):
y_true_np = y_true.to_numpy()
else:
y_true_np = y_true
if y_true_np.ndim == 1:
y_true_np = y_true.reshape(-1, 1)
scores = np.unique(np.round(y_pred.columns.get_level_values(1), 7))
no_scores = len(scores)
vars = np.unique(y_pred.columns.get_level_values(0))
y_true_np = np.tile(y_true_np, no_scores)
truth_array = (y_true_np > lower).astype(int) * (y_true_np < upper).astype(int)
out_df = pd.DataFrame(
truth_array, columns=pd.MultiIndex.from_product([vars, scores])
)
return out_df
@classmethod
def get_test_params(self):
"""Retrieve test parameters."""
params1 = {}
return [params1]
class ConstraintViolation(_BaseProbaForecastingErrorMetric):
"""Evaluate the pinball loss at all quantiles given in data.
Parameters
----------
multioutput : string "uniform_average" or "raw_values" determines how\
multioutput results will be treated.
score_average : bool, optional, default = True
specifies whether scores for each quantile should be averaged.
"""
_tags = {
"scitype:y_pred": "pred_interval",
"lower_is_better": True,
}
def __init__(self, multioutput="uniform_average", score_average=True):
name = "ConstraintViolation"
self.score_average = score_average
self.multioutput = multioutput
super().__init__(
name=name, score_average=score_average, multioutput=multioutput
)
def _evaluate_by_index(self, y_true, y_pred, multioutput, **kwargs):
"""Logic for finding the metric evaluated at each index.
y_true : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Ground truth (correct) target values.
y_pred : pd.Series, pd.DataFrame or np.array of shape (fh,) or \
(fh, n_outputs) where fh is the forecasting horizon
Forecasted values.
multioutput : string "uniform_average" or "raw_values" determines how \
multioutput results will be treated.
"""
lower = y_pred.iloc[:, y_pred.columns.get_level_values(2) == "lower"].to_numpy()
upper = y_pred.iloc[:, y_pred.columns.get_level_values(2) == "upper"].to_numpy()
if not isinstance(y_true, np.ndarray):
y_true_np = y_true.to_numpy()
else:
y_true_np = y_true
if y_true_np.ndim == 1:
y_true_np = y_true.reshape(-1, 1)
scores = np.unique(np.round(y_pred.columns.get_level_values(1), 7))
no_scores = len(scores)
vars = np.unique(y_pred.columns.get_level_values(0))
y_true_np = np.tile(y_true_np, no_scores)
int_distance = ((y_true_np < lower).astype(int) * abs(lower - y_true_np)) + (
(y_true_np > upper).astype(int) * abs(y_true_np - upper)
)
out_df = pd.DataFrame(
int_distance, columns=pd.MultiIndex.from_product([vars, scores])
)
return out_df
@classmethod
def get_test_params(self):
"""Retrieve test parameters."""
params1 = {}
return [params1]
|
[
"pandas.DataFrame",
"sktime.datatypes.check_is_scitype",
"numpy.isin",
"numpy.average",
"sklearn.utils.check_array",
"logging.warning",
"numpy.asarray",
"sktime.datatypes.convert",
"pandas.MultiIndex.from_product",
"pandas.Series",
"numpy.tile",
"sklearn.utils.check_consistent_length",
"numpy.vstack"
] |
[((10207, 10236), 'pandas.Series', 'pd.Series', ([], {'index': 'y_pred.index'}), '(index=y_pred.index)\n', (10216, 10236), True, 'import pandas as pd\n'), ((10848, 10887), 'sklearn.utils.check_consistent_length', 'check_consistent_length', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (10871, 10887), False, 'from sklearn.utils import check_array, check_consistent_length\n'), ((10906, 10942), 'sklearn.utils.check_array', 'check_array', (['y_true'], {'ensure_2d': '(False)'}), '(y_true, ensure_2d=False)\n', (10917, 10942), False, 'from sklearn.utils import check_array, check_consistent_length\n'), ((12377, 12464), 'sktime.datatypes.check_is_scitype', 'check_is_scitype', (['y_pred'], {'scitype': '"""Proba"""', 'return_metadata': '(True)', 'var_name': '"""y_pred"""'}), "(y_pred, scitype='Proba', return_metadata=True, var_name=\n 'y_pred')\n", (12393, 12464), False, 'from sktime.datatypes import check_is_scitype, convert\n'), ((12664, 12755), 'sktime.datatypes.convert', 'convert', (['y_pred'], {'from_type': 'y_pred_mtype', 'to_type': 'inner_y_pred_mtype', 'as_scitype': '"""Proba"""'}), "(y_pred, from_type=y_pred_mtype, to_type=inner_y_pred_mtype,\n as_scitype='Proba')\n", (12671, 12755), False, 'from sktime.datatypes import check_is_scitype, convert\n'), ((17986, 18033), 'pandas.DataFrame', 'pd.DataFrame', (['loss'], {'columns': 'alpha_preds.columns'}), '(loss, columns=alpha_preds.columns)\n', (17998, 18033), True, 'import pandas as pd\n'), ((20303, 20332), 'numpy.tile', 'np.tile', (['y_true_np', 'no_scores'], {}), '(y_true_np, no_scores)\n', (20310, 20332), True, 'import numpy as np\n'), ((22735, 22764), 'numpy.tile', 'np.tile', (['y_true_np', 'no_scores'], {}), '(y_true_np, no_scores)\n', (22742, 22764), True, 'import numpy as np\n'), ((4833, 4940), 'logging.warning', 'warning', (['"""Dropping 0 width interval, don\'t include 0.5 quantile for interval metrics."""'], {}), '(\n "Dropping 0 width interval, don\'t include 0.5 quantile for interval metrics."\n )\n', (4840, 4940), False, 'from logging import warning\n'), ((8562, 8669), 'logging.warning', 'warning', (['"""Dropping 0 width interval, don\'t include 0.5 quantile for interval metrics."""'], {}), '(\n "Dropping 0 width interval, don\'t include 0.5 quantile for interval metrics."\n )\n', (8569, 8669), False, 'from logging import warning\n'), ((13881, 13898), 'numpy.asarray', 'np.asarray', (['alpha'], {}), '(alpha)\n', (13891, 13898), True, 'import numpy as np\n'), ((14948, 14993), 'numpy.average', 'np.average', (['loss'], {'weights': 'multioutput', 'axis': '(1)'}), '(loss, weights=multioutput, axis=1)\n', (14958, 14993), True, 'import numpy as np\n'), ((15026, 15063), 'numpy.average', 'np.average', (['loss'], {'weights': 'multioutput'}), '(loss, weights=multioutput)\n', (15036, 15063), True, 'import numpy as np\n'), ((11772, 11813), 'sklearn.utils.check_array', 'check_array', (['multioutput'], {'ensure_2d': '(False)'}), '(multioutput, ensure_2d=False)\n', (11783, 11813), False, 'from sklearn.utils import check_array, check_consistent_length\n'), ((20487, 20529), 'pandas.MultiIndex.from_product', 'pd.MultiIndex.from_product', (['[vars, scores]'], {}), '([vars, scores])\n', (20513, 20529), True, 'import pandas as pd\n'), ((22997, 23039), 'pandas.MultiIndex.from_product', 'pd.MultiIndex.from_product', (['[vars, scores]'], {}), '([vars, scores])\n', (23023, 23039), True, 'import pandas as pd\n'), ((17043, 17072), 'numpy.isin', 'np.isin', (['alpha', 'y_pred_alphas'], {}), '(alpha, y_pred_alphas)\n', (17050, 17072), True, 'import numpy as np\n'), ((10443, 10488), 'numpy.vstack', 'np.vstack', (['(y_true[:i, :], y_true[i + 1:, :])'], {}), '((y_true[:i, :], y_true[i + 1:, :]))\n', (10452, 10488), True, 'import numpy as np\n'), ((10519, 10564), 'numpy.vstack', 'np.vstack', (['(y_pred[:i, :], y_pred[i + 1:, :])'], {}), '((y_pred[:i, :], y_pred[i + 1:, :]))\n', (10528, 10564), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
A service that converts TCF to the Mallet input format.
"""
from collections import Counter
import re
from lxml import etree
from tcflib import tcf
from tcflib.tagsets import TagSet
from tcflib.service import ExportingWorker, run_as_cli
# Use ISOcat/MAF as reference tagset.
ISOcat = TagSet('DC-1345')
# Create a filter that tests tokens for POS.
# `token` is a tcflib.tcf.TokenElement.
# `token.postag` is a tcflib.tagsets.ISOcatTag.
# `token.postag.is_a` queries the tagset hierarchy: "commonNoun" is_a "noun".
def posfilter(postags):
def pfilter(token):
for postag in postags:
if token.postag.is_a(postag):
return True
return False
return pfilter
class MalletExporter(ExportingWorker):
# Define configuration options. They work both as command line arguments:
# ./tcf2mallet.py --spantype paragraph --postags "noun" "verb" "adjective"
# and as GET parameters:
# GET /tcf2mallet/?spantype=paragraph&postags=noun
__options__ = {
'spantype': '',
'postags': [''],
'prefix': ''
}
layers = ['tokens', 'POStags', 'lemmas', 'textstructure']
def export(self):
# ExportingWorker just has to override `export()` to return the target
# Format as bytes. It can access `self.corpus` like an `AddingWorker`.
# Use TagSet API for token filtering
if self.options.postags[0]:
postags = [ISOcat[postag] for postag in self.options.postags]
tokenfilter = posfilter(postags)
else:
tokenfilter = lambda token: not token.postag.is_closed
# The textstructure layer can be used like a list:
if self.options.spantype:
textspans = [span for span in self.corpus.textstructure
if span.type == self.options.spantype]
else:
textspans = self.corpus.textstructure
# Ensure prefix does not contain whitespace
prefix = re.sub(r'\s+', '_', self.options.prefix)
# Do the actual work. This mallet output uses lemma as token value.
output = []
for i, span in enumerate(textspans, start=1):
# Filter tokens by POS and use lemmata
words = [token.lemma for token in span.tokens
if tokenfilter(token)]
# Deal with TreeTagger’s `<unknown>` pseudo-lemma
words = [word for word in words if not word == '<unknown>']
# Append a line in mallet’s `<document> <label> <words...>` format
output.append('{}{} {} {}\n'.format(prefix, i,
self.corpus.lang,
' '.join(words)))
# ExportingWorker returns output as bytes.
return ''.join(output).encode('utf8')
if __name__ == '__main__':
run_as_cli(MalletWorker)
|
[
"tcflib.service.run_as_cli",
"re.sub",
"tcflib.tagsets.TagSet"
] |
[((1437, 1454), 'tcflib.tagsets.TagSet', 'TagSet', (['"""DC-1345"""'], {}), "('DC-1345')\n", (1443, 1454), False, 'from tcflib.tagsets import TagSet\n'), ((3995, 4019), 'tcflib.service.run_as_cli', 'run_as_cli', (['MalletWorker'], {}), '(MalletWorker)\n', (4005, 4019), False, 'from tcflib.service import ExportingWorker, run_as_cli\n'), ((3117, 3157), 're.sub', 're.sub', (['"""\\\\s+"""', '"""_"""', 'self.options.prefix'], {}), "('\\\\s+', '_', self.options.prefix)\n", (3123, 3157), False, 'import re\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2019/4/2 14:53
# @Author : Alan
# @Email : <EMAIL>
# @File : demo test.py
# @Software: PyCharm
from elasticsearch import Elasticsearch
import json
es = Elasticsearch()
data = {'title': '美国留给伊拉克的是个烂摊子吗', 'url': 'http://view.news.qq.com/zt2011/usa_iraq/index.htm'}
data2 = {'title': '美国留给伊拉克的是个烂摊子吗', 'url': 'http://view.news.qq.com/zt2011/usa_iraq/index.htm',
'date': '2011-12-16'}
'''插入数据'''
# create() 方法需要我们指定 id 字段来唯一标识该条数据
# index() 方法则不需要指定 id 字段,如果不指定 id,会自动生成一个 id
# create() 方法内部其实也是调用了 index() 方法,是对 index() 方法的封装。
# result = es.create(index='news', doc_type='politics', id=1, body=data)
# result2 = es.index(index='news', doc_type='politics', body=data)
'''更新数据'''
# 可以用es.update
# index() 方法可以代替我们完成两个操作,如果数据不存在,那就执行插入操作,如果已经存在,那就执行更新操作
# result = es.index(index='news', doc_type='politics', body=data2)
'''删除数据'''
# result_d = es.delete(index='news', doc_type='politics', id=1)
# print(result_d)
'''查询数据'''
# 安装一个分词插件,这里使用的是 elasticsearch-analysis-ik,GitHub 链接为:https://github.com/medcl/elasticsearch-analysis-ik
# 使用 Elasticsearch 的另一个命令行工具 elasticsearch-plugin 来安装,这里安装的版本是 6.3.2,请确保和 Elasticsearch 的版本对应起来
mapping = {
'properties':{
'title':{
'type': 'text',
'analyzer': 'ik_max_word',
'search_analyzer': 'ik_max_word'
}
}
}
es.indices.delete(index='news', ignore=[400, 404])
es.indices.create(index='news', ignore=400)
result_q = es.indices.put_mapping(index='news', doc_type='politics', body=mapping)
'''插入新的数据'''
datas = [
{
'title': '美国留给伊拉克的是个烂摊子吗',
'url': 'http://view.news.qq.com/zt2011/usa_iraq/index.htm',
'date': '2011-12-16'
},
{
'title': '公安部:各地校车将享最高路权',
'url': 'http://www.chinanews.com/gn/2011/12-16/3536077.shtml',
'date': '2011-12-16'
},
{
'title': '中韩渔警冲突调查:韩警平均每天扣1艘中国渔船',
'url': 'https://news.qq.com/a/20111216/001044.htm',
'date': '2011-12-17'
},
{
'title': '中国驻洛杉矶领事馆遭亚裔男子枪击 嫌犯已自首',
'url': 'http://news.ifeng.com/world/detail_2011_12/16/11372558_0.shtml',
'date': '2011-12-18'
}
]
for data in datas:
es.index(index='news', doc_type='politics', body=data)
result = es.search(index='news', doc_type='politics')
print(result)
'''全文检索'''
dsl = {
'query':{
'match':{
'title':'中国 领事馆'
}
}
}
es = Elasticsearch()
result_q2 = es.search(index='news', doc_type='politics', body=dsl)
print(json.dumps(result_q2, indent=2, ensure_ascii=False))
|
[
"elasticsearch.Elasticsearch",
"json.dumps"
] |
[((198, 213), 'elasticsearch.Elasticsearch', 'Elasticsearch', ([], {}), '()\n', (211, 213), False, 'from elasticsearch import Elasticsearch\n'), ((2418, 2433), 'elasticsearch.Elasticsearch', 'Elasticsearch', ([], {}), '()\n', (2431, 2433), False, 'from elasticsearch import Elasticsearch\n'), ((2507, 2558), 'json.dumps', 'json.dumps', (['result_q2'], {'indent': '(2)', 'ensure_ascii': '(False)'}), '(result_q2, indent=2, ensure_ascii=False)\n', (2517, 2558), False, 'import json\n')]
|
import random
from time import sleep
def rand_wait_min(a=None, b=None):
a = a if not a else a * 60
b = b if not b else b * 60
rand_wait_sec(a, b)
def rand_wait_sec(min_v=2, max_v=3):
sleep(random.randint(min_v, max_v))
|
[
"random.randint"
] |
[((209, 237), 'random.randint', 'random.randint', (['min_v', 'max_v'], {}), '(min_v, max_v)\n', (223, 237), False, 'import random\n')]
|
import time
import numpy as np
import torch
from onpolicy.runner.shared.base_runner import Runner
import wandb
import imageio
def _t2n(x):
return x.detach().cpu().numpy()
class MPERunner(Runner):
"""Runner class to perform training, evaluation. and data collection for the MPEs. See parent class for details."""
def __init__(self, config):
super(MPERunner, self).__init__(config)
def run(self):
self.warmup()
start = time.time()
episodes = int(self.num_env_steps) // self.episode_length // self.n_rollout_threads
for episode in range(episodes):
if self.use_linear_lr_decay:
self.trainer.policy.lr_decay(episode, episodes)
for step in range(self.episode_length):
# Sample actions
values, actions, action_log_probs, rnn_states, rnn_states_critic, actions_env = self.collect(step)
# Obser reward and next obs
obs, rewards, dones, infos = self.envs.step(actions_env)
data = obs, rewards, dones, infos, values, actions, action_log_probs, rnn_states, rnn_states_critic
# insert data into buffer
self.insert(data)
# compute return and update network
self.compute()
train_infos = self.train()
# post process
total_num_steps = (episode + 1) * self.episode_length * self.n_rollout_threads
# save model
if (episode % self.save_interval == 0 or episode == episodes - 1):
self.save()
# log information
if episode % self.log_interval == 0:
end = time.time()
print("\n Scenario {} Algo {} Exp {} updates {}/{} episodes, total num timesteps {}/{}, FPS {}.\n"
.format(self.all_args.scenario_name,
self.algorithm_name,
self.experiment_name,
episode,
episodes,
total_num_steps,
self.num_env_steps,
int(total_num_steps / (end - start))))
if self.env_name == "MPE":
env_infos = {}
for agent_id in range(self.num_agents):
idv_rews = []
for info in infos:
if 'individual_reward' in info[agent_id].keys():
idv_rews.append(info[agent_id]['individual_reward'])
agent_k = 'agent%i/individual_rewards' % agent_id
env_infos[agent_k] = idv_rews
train_infos["average_episode_rewards"] = np.mean(self.buffer.rewards) * self.episode_length
print("average episode rewards is {}".format(train_infos["average_episode_rewards"]))
self.log_train(train_infos, total_num_steps)
self.log_env(env_infos, total_num_steps)
# eval
if episode % self.eval_interval == 0 and self.use_eval:
self.eval(total_num_steps)
def warmup(self):
# reset env
obs = self.envs.reset()
# replay buffer
if self.use_centralized_V:
share_obs = obs.reshape(self.n_rollout_threads, -1)
share_obs = np.expand_dims(share_obs, 1).repeat(self.num_agents, axis=1)
else:
share_obs = obs
self.buffer.share_obs[0] = share_obs.copy()
self.buffer.obs[0] = obs.copy()
@torch.no_grad()
def collect(self, step):
self.trainer.prep_rollout()
value, action, action_log_prob, rnn_states, rnn_states_critic \
= self.trainer.policy.get_actions(np.concatenate(self.buffer.share_obs[step]),
np.concatenate(self.buffer.obs[step]),
np.concatenate(self.buffer.rnn_states[step]),
np.concatenate(self.buffer.rnn_states_critic[step]),
np.concatenate(self.buffer.masks[step]))
# [self.envs, agents, dim]
values = np.array(np.split(_t2n(value), self.n_rollout_threads))
actions = np.array(np.split(_t2n(action), self.n_rollout_threads))
action_log_probs = np.array(np.split(_t2n(action_log_prob), self.n_rollout_threads))
rnn_states = np.array(np.split(_t2n(rnn_states), self.n_rollout_threads))
rnn_states_critic = np.array(np.split(_t2n(rnn_states_critic), self.n_rollout_threads))
# rearrange action
if self.envs.action_space[0].__class__.__name__ == 'MultiDiscrete':
for i in range(self.envs.action_space[0].shape):
uc_actions_env = np.eye(self.envs.action_space[0].high[i] + 1)[actions[:, :, i]]
if i == 0:
actions_env = uc_actions_env
else:
actions_env = np.concatenate((actions_env, uc_actions_env), axis=2)
elif self.envs.action_space[0].__class__.__name__ == 'Discrete':
actions_env = np.squeeze(np.eye(self.envs.action_space[0].n)[actions], 2)
else:
raise NotImplementedError
return values, actions, action_log_probs, rnn_states, rnn_states_critic, actions_env
def insert(self, data):
obs, rewards, dones, infos, values, actions, action_log_probs, rnn_states, rnn_states_critic = data
rnn_states[dones == True] = np.zeros(((dones == True).sum(), self.recurrent_N, self.hidden_size), dtype=np.float32)
rnn_states_critic[dones == True] = np.zeros(((dones == True).sum(), *self.buffer.rnn_states_critic.shape[3:]), dtype=np.float32)
masks = np.ones((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)
masks[dones == True] = np.zeros(((dones == True).sum(), 1), dtype=np.float32)
if self.use_centralized_V:
share_obs = obs.reshape(self.n_rollout_threads, -1)
share_obs = np.expand_dims(share_obs, 1).repeat(self.num_agents, axis=1)
else:
share_obs = obs
self.buffer.insert(share_obs, obs, rnn_states, rnn_states_critic, actions, action_log_probs, values, rewards, masks)
@torch.no_grad()
def eval(self, total_num_steps):
eval_episode_rewards = []
eval_obs = self.eval_envs.reset()
eval_rnn_states = np.zeros((self.n_eval_rollout_threads, *self.buffer.rnn_states.shape[2:]), dtype=np.float32)
eval_masks = np.ones((self.n_eval_rollout_threads, self.num_agents, 1), dtype=np.float32)
for eval_step in range(self.episode_length):
self.trainer.prep_rollout()
eval_action, eval_rnn_states = self.trainer.policy.act(np.concatenate(eval_obs),
np.concatenate(eval_rnn_states),
np.concatenate(eval_masks),
deterministic=True)
eval_actions = np.array(np.split(_t2n(eval_action), self.n_eval_rollout_threads))
eval_rnn_states = np.array(np.split(_t2n(eval_rnn_states), self.n_eval_rollout_threads))
if self.eval_envs.action_space[0].__class__.__name__ == 'MultiDiscrete':
for i in range(self.eval_envs.action_space[0].shape):
eval_uc_actions_env = np.eye(self.eval_envs.action_space[0].high[i]+1)[eval_actions[:, :, i]]
if i == 0:
eval_actions_env = eval_uc_actions_env
else:
eval_actions_env = np.concatenate((eval_actions_env, eval_uc_actions_env), axis=2)
elif self.eval_envs.action_space[0].__class__.__name__ == 'Discrete':
eval_actions_env = np.squeeze(np.eye(self.eval_envs.action_space[0].n)[eval_actions], 2)
else:
raise NotImplementedError
# Obser reward and next obs
eval_obs, eval_rewards, eval_dones, eval_infos = self.eval_envs.step(eval_actions_env)
eval_episode_rewards.append(eval_rewards)
eval_rnn_states[eval_dones == True] = np.zeros(((eval_dones == True).sum(), self.recurrent_N, self.hidden_size), dtype=np.float32)
eval_masks = np.ones((self.n_eval_rollout_threads, self.num_agents, 1), dtype=np.float32)
eval_masks[eval_dones == True] = np.zeros(((eval_dones == True).sum(), 1), dtype=np.float32)
eval_episode_rewards = np.array(eval_episode_rewards)
eval_env_infos = {}
eval_env_infos['eval_average_episode_rewards'] = np.sum(np.array(eval_episode_rewards), axis=0)
print("eval average episode rewards of agent: " + str(eval_average_episode_rewards))
self.log_env(eval_env_infos, total_num_steps)
@torch.no_grad()
def render(self):
"""Visualize the env."""
envs = self.envs
all_frames = []
for episode in range(self.all_args.render_episodes):
obs = envs.reset()
if self.all_args.save_gifs:
image = envs.render('rgb_array')[0][0]
all_frames.append(image)
rnn_states = np.zeros((self.n_rollout_threads, self.num_agents, self.recurrent_N, self.hidden_size), dtype=np.float32)
masks = np.ones((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)
episode_rewards = []
for step in range(self.episode_length):
calc_start = time.time()
self.trainer.prep_rollout()
action, rnn_states = self.trainer.policy.act(np.concatenate(obs),
np.concatenate(rnn_states),
np.concatenate(masks),
deterministic=True)
actions = np.array(np.split(_t2n(action), self.n_rollout_threads))
rnn_states = np.array(np.split(_t2n(rnn_states), self.n_rollout_threads))
if envs.action_space[0].__class__.__name__ == 'MultiDiscrete':
for i in range(envs.action_space[0].shape):
uc_actions_env = np.eye(envs.action_space[0].high[i]+1)[actions[:, :, i]]
if i == 0:
actions_env = uc_actions_env
else:
actions_env = np.concatenate((actions_env, uc_actions_env), axis=2)
elif envs.action_space[0].__class__.__name__ == 'Discrete':
actions_env = np.squeeze(np.eye(envs.action_space[0].n)[actions], 2)
else:
raise NotImplementedError
# Obser reward and next obs
obs, rewards, dones, infos = envs.step(actions_env)
episode_rewards.append(rewards)
rnn_states[dones == True] = np.zeros(((dones == True).sum(), self.recurrent_N, self.hidden_size), dtype=np.float32)
masks = np.ones((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)
masks[dones == True] = np.zeros(((dones == True).sum(), 1), dtype=np.float32)
if self.all_args.save_gifs:
image = envs.render('rgb_array')[0][0]
all_frames.append(image)
calc_end = time.time()
elapsed = calc_end - calc_start
if elapsed < self.all_args.ifi:
time.sleep(self.all_args.ifi - elapsed)
print("average episode rewards is: " + str(np.mean(np.sum(np.array(episode_rewards), axis=0))))
if self.all_args.save_gifs:
imageio.mimsave(str(self.gif_dir) + '/render.gif', all_frames, duration=self.all_args.ifi)
|
[
"numpy.zeros",
"numpy.ones",
"numpy.expand_dims",
"time.time",
"time.sleep",
"numpy.mean",
"numpy.array",
"numpy.eye",
"torch.no_grad",
"numpy.concatenate"
] |
[((3685, 3700), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3698, 3700), False, 'import torch\n'), ((6358, 6373), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6371, 6373), False, 'import torch\n'), ((8961, 8976), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8974, 8976), False, 'import torch\n'), ((464, 475), 'time.time', 'time.time', ([], {}), '()\n', (473, 475), False, 'import time\n'), ((5841, 5912), 'numpy.ones', 'np.ones', (['(self.n_rollout_threads, self.num_agents, 1)'], {'dtype': 'np.float32'}), '((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)\n', (5848, 5912), True, 'import numpy as np\n'), ((6514, 6610), 'numpy.zeros', 'np.zeros', (['(self.n_eval_rollout_threads, *self.buffer.rnn_states.shape[2:])'], {'dtype': 'np.float32'}), '((self.n_eval_rollout_threads, *self.buffer.rnn_states.shape[2:]),\n dtype=np.float32)\n', (6522, 6610), True, 'import numpy as np\n'), ((6628, 6704), 'numpy.ones', 'np.ones', (['(self.n_eval_rollout_threads, self.num_agents, 1)'], {'dtype': 'np.float32'}), '((self.n_eval_rollout_threads, self.num_agents, 1), dtype=np.float32)\n', (6635, 6704), True, 'import numpy as np\n'), ((8645, 8675), 'numpy.array', 'np.array', (['eval_episode_rewards'], {}), '(eval_episode_rewards)\n', (8653, 8675), True, 'import numpy as np\n'), ((3884, 3927), 'numpy.concatenate', 'np.concatenate', (['self.buffer.share_obs[step]'], {}), '(self.buffer.share_obs[step])\n', (3898, 3927), True, 'import numpy as np\n'), ((3957, 3994), 'numpy.concatenate', 'np.concatenate', (['self.buffer.obs[step]'], {}), '(self.buffer.obs[step])\n', (3971, 3994), True, 'import numpy as np\n'), ((4024, 4068), 'numpy.concatenate', 'np.concatenate', (['self.buffer.rnn_states[step]'], {}), '(self.buffer.rnn_states[step])\n', (4038, 4068), True, 'import numpy as np\n'), ((4098, 4149), 'numpy.concatenate', 'np.concatenate', (['self.buffer.rnn_states_critic[step]'], {}), '(self.buffer.rnn_states_critic[step])\n', (4112, 4149), True, 'import numpy as np\n'), ((4179, 4218), 'numpy.concatenate', 'np.concatenate', (['self.buffer.masks[step]'], {}), '(self.buffer.masks[step])\n', (4193, 4218), True, 'import numpy as np\n'), ((8431, 8507), 'numpy.ones', 'np.ones', (['(self.n_eval_rollout_threads, self.num_agents, 1)'], {'dtype': 'np.float32'}), '((self.n_eval_rollout_threads, self.num_agents, 1), dtype=np.float32)\n', (8438, 8507), True, 'import numpy as np\n'), ((8768, 8798), 'numpy.array', 'np.array', (['eval_episode_rewards'], {}), '(eval_episode_rewards)\n', (8776, 8798), True, 'import numpy as np\n'), ((9344, 9454), 'numpy.zeros', 'np.zeros', (['(self.n_rollout_threads, self.num_agents, self.recurrent_N, self.hidden_size)'], {'dtype': 'np.float32'}), '((self.n_rollout_threads, self.num_agents, self.recurrent_N, self.\n hidden_size), dtype=np.float32)\n', (9352, 9454), True, 'import numpy as np\n'), ((9470, 9541), 'numpy.ones', 'np.ones', (['(self.n_rollout_threads, self.num_agents, 1)'], {'dtype': 'np.float32'}), '((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)\n', (9477, 9541), True, 'import numpy as np\n'), ((1740, 1751), 'time.time', 'time.time', ([], {}), '()\n', (1749, 1751), False, 'import time\n'), ((6866, 6890), 'numpy.concatenate', 'np.concatenate', (['eval_obs'], {}), '(eval_obs)\n', (6880, 6890), True, 'import numpy as np\n'), ((6940, 6971), 'numpy.concatenate', 'np.concatenate', (['eval_rnn_states'], {}), '(eval_rnn_states)\n', (6954, 6971), True, 'import numpy as np\n'), ((7021, 7047), 'numpy.concatenate', 'np.concatenate', (['eval_masks'], {}), '(eval_masks)\n', (7035, 7047), True, 'import numpy as np\n'), ((9682, 9693), 'time.time', 'time.time', ([], {}), '()\n', (9691, 9693), False, 'import time\n'), ((11232, 11303), 'numpy.ones', 'np.ones', (['(self.n_rollout_threads, self.num_agents, 1)'], {'dtype': 'np.float32'}), '((self.n_rollout_threads, self.num_agents, 1), dtype=np.float32)\n', (11239, 11303), True, 'import numpy as np\n'), ((2858, 2886), 'numpy.mean', 'np.mean', (['self.buffer.rewards'], {}), '(self.buffer.rewards)\n', (2865, 2886), True, 'import numpy as np\n'), ((3483, 3511), 'numpy.expand_dims', 'np.expand_dims', (['share_obs', '(1)'], {}), '(share_obs, 1)\n', (3497, 3511), True, 'import numpy as np\n'), ((4871, 4916), 'numpy.eye', 'np.eye', (['(self.envs.action_space[0].high[i] + 1)'], {}), '(self.envs.action_space[0].high[i] + 1)\n', (4877, 4916), True, 'import numpy as np\n'), ((5067, 5120), 'numpy.concatenate', 'np.concatenate', (['(actions_env, uc_actions_env)'], {'axis': '(2)'}), '((actions_env, uc_actions_env), axis=2)\n', (5081, 5120), True, 'import numpy as np\n'), ((6123, 6151), 'numpy.expand_dims', 'np.expand_dims', (['share_obs', '(1)'], {}), '(share_obs, 1)\n', (6137, 6151), True, 'import numpy as np\n'), ((9800, 9819), 'numpy.concatenate', 'np.concatenate', (['obs'], {}), '(obs)\n', (9814, 9819), True, 'import numpy as np\n'), ((9873, 9899), 'numpy.concatenate', 'np.concatenate', (['rnn_states'], {}), '(rnn_states)\n', (9887, 9899), True, 'import numpy as np\n'), ((9953, 9974), 'numpy.concatenate', 'np.concatenate', (['masks'], {}), '(masks)\n', (9967, 9974), True, 'import numpy as np\n'), ((11578, 11589), 'time.time', 'time.time', ([], {}), '()\n', (11587, 11589), False, 'import time\n'), ((5231, 5266), 'numpy.eye', 'np.eye', (['self.envs.action_space[0].n'], {}), '(self.envs.action_space[0].n)\n', (5237, 5266), True, 'import numpy as np\n'), ((7522, 7572), 'numpy.eye', 'np.eye', (['(self.eval_envs.action_space[0].high[i] + 1)'], {}), '(self.eval_envs.action_space[0].high[i] + 1)\n', (7528, 7572), True, 'import numpy as np\n'), ((7757, 7820), 'numpy.concatenate', 'np.concatenate', (['(eval_actions_env, eval_uc_actions_env)'], {'axis': '(2)'}), '((eval_actions_env, eval_uc_actions_env), axis=2)\n', (7771, 7820), True, 'import numpy as np\n'), ((11718, 11757), 'time.sleep', 'time.sleep', (['(self.all_args.ifi - elapsed)'], {}), '(self.all_args.ifi - elapsed)\n', (11728, 11757), False, 'import time\n'), ((7949, 7989), 'numpy.eye', 'np.eye', (['self.eval_envs.action_space[0].n'], {}), '(self.eval_envs.action_space[0].n)\n', (7955, 7989), True, 'import numpy as np\n'), ((10406, 10446), 'numpy.eye', 'np.eye', (['(envs.action_space[0].high[i] + 1)'], {}), '(envs.action_space[0].high[i] + 1)\n', (10412, 10446), True, 'import numpy as np\n'), ((10627, 10680), 'numpy.concatenate', 'np.concatenate', (['(actions_env, uc_actions_env)'], {'axis': '(2)'}), '((actions_env, uc_actions_env), axis=2)\n', (10641, 10680), True, 'import numpy as np\n'), ((10802, 10832), 'numpy.eye', 'np.eye', (['envs.action_space[0].n'], {}), '(envs.action_space[0].n)\n', (10808, 10832), True, 'import numpy as np\n'), ((11829, 11854), 'numpy.array', 'np.array', (['episode_rewards'], {}), '(episode_rewards)\n', (11837, 11854), True, 'import numpy as np\n')]
|
import re
from django.core.exceptions import ValidationError
def phone_validator(value):
v = re.match('^[0-9\+]{8,13}$', value)
if v is None:
raise ValidationError('Tylko cyfry (min.9) np.111222333')
return True
|
[
"django.core.exceptions.ValidationError",
"re.match"
] |
[((100, 135), 're.match', 're.match', (['"""^[0-9\\\\+]{8,13}$"""', 'value'], {}), "('^[0-9\\\\+]{8,13}$', value)\n", (108, 135), False, 'import re\n'), ((167, 218), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Tylko cyfry (min.9) np.111222333"""'], {}), "('Tylko cyfry (min.9) np.111222333')\n", (182, 218), False, 'from django.core.exceptions import ValidationError\n')]
|
# -*- coding: utf-8 -*-
"""Module providing schema definitions for general widgets"""
from plone.app.textfield import RichText
from plone.autoform.interfaces import IFormFieldProvider
from zope import schema
from zope.interface import Interface, provider
from ade25.widgets import MessageFactory as _
@provider(IFormFieldProvider)
class IAde25WidgetSeparator(Interface):
""" Content widget separator """
@provider(IFormFieldProvider)
class IAde25WidgetHorizontalLine(Interface):
""" Content widget horizontal line """
@provider(IFormFieldProvider)
class IAde25WidgetTextBlock(Interface):
""" Content widget formatted text """
text = schema.Text(
title=_("Text Content"),
required=False
)
@provider(IFormFieldProvider)
class IAde25WidgetTextFormatted(Interface):
""" Content widget formatted text """
text = RichText(
title=_(u"Text"),
required=False
)
@provider(IFormFieldProvider)
class IAde25WidgetTextHtml(Interface):
""" Content widget html text """
text = schema.Text(
title=_("HTML Content"),
required=False
)
@provider(IFormFieldProvider)
class IAde25WidgetPageHeader(Interface):
""" Content Widget to display page header """
headline = schema.TextLine(
title=u"Page Headline",
description=_(u"Please enter the main page headline."),
required=False,
)
abstract = schema.Text(
title=u"Page Abstract",
description=_(u"Use the abstract to provide a short description of ."
u"the page content."),
required=False,
)
|
[
"ade25.widgets.MessageFactory",
"zope.interface.provider"
] |
[((305, 333), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (313, 333), False, 'from zope.interface import Interface, provider\n'), ((414, 442), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (422, 442), False, 'from zope.interface import Interface, provider\n'), ((534, 562), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (542, 562), False, 'from zope.interface import Interface, provider\n'), ((735, 763), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (743, 763), False, 'from zope.interface import Interface, provider\n'), ((930, 958), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (938, 958), False, 'from zope.interface import Interface, provider\n'), ((1125, 1153), 'zope.interface.provider', 'provider', (['IFormFieldProvider'], {}), '(IFormFieldProvider)\n', (1133, 1153), False, 'from zope.interface import Interface, provider\n'), ((684, 701), 'ade25.widgets.MessageFactory', '_', (['"""Text Content"""'], {}), "('Text Content')\n", (685, 701), True, 'from ade25.widgets import MessageFactory as _\n'), ((886, 896), 'ade25.widgets.MessageFactory', '_', (['u"""Text"""'], {}), "(u'Text')\n", (887, 896), True, 'from ade25.widgets import MessageFactory as _\n'), ((1074, 1091), 'ade25.widgets.MessageFactory', '_', (['"""HTML Content"""'], {}), "('HTML Content')\n", (1075, 1091), True, 'from ade25.widgets import MessageFactory as _\n'), ((1330, 1372), 'ade25.widgets.MessageFactory', '_', (['u"""Please enter the main page headline."""'], {}), "(u'Please enter the main page headline.')\n", (1331, 1372), True, 'from ade25.widgets import MessageFactory as _\n'), ((1485, 1560), 'ade25.widgets.MessageFactory', '_', (['u"""Use the abstract to provide a short description of .the page content."""'], {}), "(u'Use the abstract to provide a short description of .the page content.')\n", (1486, 1560), True, 'from ade25.widgets import MessageFactory as _\n')]
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import io
import os
import tarfile
from flask import Flask, request, send_file
from arch.api.utils.core import json_loads
from fate_flow.driver.job_controller import JobController
from fate_flow.driver.task_scheduler import TaskScheduler
from fate_flow.settings import stat_logger, CLUSTER_STANDALONE_JOB_SERVER_PORT
from fate_flow.utils import job_utils, detect_utils
from fate_flow.utils.api_utils import get_json_result, request_execute_server
from fate_flow.entity.constant_config import WorkMode
from fate_flow.entity.runtime_config import RuntimeConfig
manager = Flask(__name__)
@manager.errorhandler(500)
def internal_server_error(e):
stat_logger.exception(e)
return get_json_result(retcode=100, retmsg=str(e))
@manager.route('/submit', methods=['POST'])
def submit_job():
work_mode = request.json.get('job_runtime_conf', {}).get('job_parameters', {}).get('work_mode', None)
detect_utils.check_config({'work_mode': work_mode}, required_arguments=[('work_mode', (WorkMode.CLUSTER, WorkMode.STANDALONE))])
if work_mode == RuntimeConfig.WORK_MODE:
job_id, job_dsl_path, job_runtime_conf_path, model_info, board_url = JobController.submit_job(request.json)
return get_json_result(job_id=job_id, data={'job_dsl_path': job_dsl_path,
'job_runtime_conf_path': job_runtime_conf_path,
'model_info': model_info,
'board_url': board_url
})
else:
if RuntimeConfig.WORK_MODE == WorkMode.CLUSTER and work_mode == WorkMode.STANDALONE:
# use cluster standalone job server to execute standalone job
return request_execute_server(request=request, execute_host='{}:{}'.format(request.remote_addr, CLUSTER_STANDALONE_JOB_SERVER_PORT))
else:
raise Exception('server run on standalone can not support cluster mode job')
@manager.route('/stop', methods=['POST'])
@job_utils.job_server_routing()
def stop_job():
TaskScheduler.stop_job(job_id=request.json.get('job_id', ''))
return get_json_result(retcode=0, retmsg='success')
@manager.route('/query', methods=['POST'])
def query_job():
jobs = job_utils.query_job(**request.json)
if not jobs:
return get_json_result(retcode=101, retmsg='find job failed')
return get_json_result(retcode=0, retmsg='success', data=[job.to_json() for job in jobs])
@manager.route('/config', methods=['POST'])
def job_config():
jobs = job_utils.query_job(**request.json)
if not jobs:
return get_json_result(retcode=101, retmsg='find job failed')
else:
job = jobs[0]
response_data = dict()
response_data['job_id'] = job.f_job_id
response_data['dsl'] = json_loads(job.f_dsl)
response_data['runtime_conf'] = json_loads(job.f_runtime_conf)
response_data['train_runtime_conf'] = json_loads(job.f_train_runtime_conf)
response_data['model_info'] = {'model_id': response_data['runtime_conf']['job_parameters']['model_id'],
'model_version': response_data['runtime_conf']['job_parameters'][
'model_version']}
return get_json_result(retcode=0, retmsg='success', data=response_data)
@manager.route('/log', methods=['get'])
@job_utils.job_server_routing(307)
def job_log():
job_id = request.json.get('job_id', '')
memory_file = io.BytesIO()
tar = tarfile.open(fileobj=memory_file, mode='w:gz')
job_log_dir = job_utils.get_job_log_directory(job_id=job_id)
for root, dir, files in os.walk(job_log_dir):
for file in files:
full_path = os.path.join(root, file)
rel_path = os.path.relpath(full_path, job_log_dir)
tar.add(full_path, rel_path)
tar.close()
memory_file.seek(0)
return send_file(memory_file, attachment_filename='job_{}_log.tar.gz'.format(job_id), as_attachment=True)
@manager.route('/task/query', methods=['POST'])
def query_task():
tasks = job_utils.query_task(**request.json)
if not tasks:
return get_json_result(retcode=101, retmsg='find task failed')
return get_json_result(retcode=0, retmsg='success', data=[task.to_json() for task in tasks])
|
[
"fate_flow.utils.job_utils.job_server_routing",
"io.BytesIO",
"flask.request.json.get",
"os.path.join",
"tarfile.open",
"fate_flow.utils.job_utils.get_job_log_directory",
"fate_flow.driver.job_controller.JobController.submit_job",
"flask.Flask",
"fate_flow.utils.api_utils.get_json_result",
"os.walk",
"arch.api.utils.core.json_loads",
"os.path.relpath",
"fate_flow.utils.detect_utils.check_config",
"fate_flow.settings.stat_logger.exception",
"fate_flow.utils.job_utils.query_job",
"fate_flow.utils.job_utils.query_task"
] |
[((1188, 1203), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1193, 1203), False, 'from flask import Flask, request, send_file\n'), ((2671, 2701), 'fate_flow.utils.job_utils.job_server_routing', 'job_utils.job_server_routing', ([], {}), '()\n', (2699, 2701), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((4046, 4079), 'fate_flow.utils.job_utils.job_server_routing', 'job_utils.job_server_routing', (['(307)'], {}), '(307)\n', (4074, 4079), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((1267, 1291), 'fate_flow.settings.stat_logger.exception', 'stat_logger.exception', (['e'], {}), '(e)\n', (1288, 1291), False, 'from fate_flow.settings import stat_logger, CLUSTER_STANDALONE_JOB_SERVER_PORT\n'), ((1521, 1654), 'fate_flow.utils.detect_utils.check_config', 'detect_utils.check_config', (["{'work_mode': work_mode}"], {'required_arguments': "[('work_mode', (WorkMode.CLUSTER, WorkMode.STANDALONE))]"}), "({'work_mode': work_mode}, required_arguments=[(\n 'work_mode', (WorkMode.CLUSTER, WorkMode.STANDALONE))])\n", (1546, 1654), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((2795, 2839), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'retcode': '(0)', 'retmsg': '"""success"""'}), "(retcode=0, retmsg='success')\n", (2810, 2839), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((2913, 2948), 'fate_flow.utils.job_utils.query_job', 'job_utils.query_job', ([], {}), '(**request.json)\n', (2932, 2948), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((3205, 3240), 'fate_flow.utils.job_utils.query_job', 'job_utils.query_job', ([], {}), '(**request.json)\n', (3224, 3240), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((4108, 4138), 'flask.request.json.get', 'request.json.get', (['"""job_id"""', '""""""'], {}), "('job_id', '')\n", (4124, 4138), False, 'from flask import Flask, request, send_file\n'), ((4157, 4169), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4167, 4169), False, 'import io\n'), ((4180, 4226), 'tarfile.open', 'tarfile.open', ([], {'fileobj': 'memory_file', 'mode': '"""w:gz"""'}), "(fileobj=memory_file, mode='w:gz')\n", (4192, 4226), False, 'import tarfile\n'), ((4245, 4291), 'fate_flow.utils.job_utils.get_job_log_directory', 'job_utils.get_job_log_directory', ([], {'job_id': 'job_id'}), '(job_id=job_id)\n', (4276, 4291), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((4320, 4340), 'os.walk', 'os.walk', (['job_log_dir'], {}), '(job_log_dir)\n', (4327, 4340), False, 'import os\n'), ((4752, 4788), 'fate_flow.utils.job_utils.query_task', 'job_utils.query_task', ([], {}), '(**request.json)\n', (4772, 4788), False, 'from fate_flow.utils import job_utils, detect_utils\n'), ((1772, 1810), 'fate_flow.driver.job_controller.JobController.submit_job', 'JobController.submit_job', (['request.json'], {}), '(request.json)\n', (1796, 1810), False, 'from fate_flow.driver.job_controller import JobController\n'), ((1826, 1999), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'job_id': 'job_id', 'data': "{'job_dsl_path': job_dsl_path, 'job_runtime_conf_path':\n job_runtime_conf_path, 'model_info': model_info, 'board_url': board_url}"}), "(job_id=job_id, data={'job_dsl_path': job_dsl_path,\n 'job_runtime_conf_path': job_runtime_conf_path, 'model_info':\n model_info, 'board_url': board_url})\n", (1841, 1999), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((2981, 3035), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'retcode': '(101)', 'retmsg': '"""find job failed"""'}), "(retcode=101, retmsg='find job failed')\n", (2996, 3035), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((3273, 3327), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'retcode': '(101)', 'retmsg': '"""find job failed"""'}), "(retcode=101, retmsg='find job failed')\n", (3288, 3327), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((3469, 3490), 'arch.api.utils.core.json_loads', 'json_loads', (['job.f_dsl'], {}), '(job.f_dsl)\n', (3479, 3490), False, 'from arch.api.utils.core import json_loads\n'), ((3531, 3561), 'arch.api.utils.core.json_loads', 'json_loads', (['job.f_runtime_conf'], {}), '(job.f_runtime_conf)\n', (3541, 3561), False, 'from arch.api.utils.core import json_loads\n'), ((3608, 3644), 'arch.api.utils.core.json_loads', 'json_loads', (['job.f_train_runtime_conf'], {}), '(job.f_train_runtime_conf)\n', (3618, 3644), False, 'from arch.api.utils.core import json_loads\n'), ((3938, 4002), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'retcode': '(0)', 'retmsg': '"""success"""', 'data': 'response_data'}), "(retcode=0, retmsg='success', data=response_data)\n", (3953, 4002), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((4822, 4877), 'fate_flow.utils.api_utils.get_json_result', 'get_json_result', ([], {'retcode': '(101)', 'retmsg': '"""find task failed"""'}), "(retcode=101, retmsg='find task failed')\n", (4837, 4877), False, 'from fate_flow.utils.api_utils import get_json_result, request_execute_server\n'), ((2752, 2782), 'flask.request.json.get', 'request.json.get', (['"""job_id"""', '""""""'], {}), "('job_id', '')\n", (2768, 2782), False, 'from flask import Flask, request, send_file\n'), ((4393, 4417), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (4405, 4417), False, 'import os\n'), ((4441, 4480), 'os.path.relpath', 'os.path.relpath', (['full_path', 'job_log_dir'], {}), '(full_path, job_log_dir)\n', (4456, 4480), False, 'import os\n'), ((1427, 1467), 'flask.request.json.get', 'request.json.get', (['"""job_runtime_conf"""', '{}'], {}), "('job_runtime_conf', {})\n", (1443, 1467), False, 'from flask import Flask, request, send_file\n')]
|
##############################################################################
# Institute for the Design of Advanced Energy Systems Process Systems
# Engineering Framework (IDAES PSE Framework) Copyright (c) 2018-2019, by the
# software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia
# University Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.txt and LICENSE.txt for full copyright and
# license information, respectively. Both files are also available online
# at the URL "https://github.com/IDAES/idaes-pse".
##############################################################################
import pyomo.environ as pyo
from idaes.surrogate import ripe
import numpy as np
import random
from . import isotsim
np.random.seed(20)
def main():
#ndata = 100
noise = 0.1
ns = 5
lb_conc = [0,0,0,0,0]
ub_conc = [10,10,0,0,0]
# Initialize concentration arrays
# initial concentrations - only 2 data points at bounds
cdata0 = [[1,1,0,0,0],[10,10,0,0,0]]
cdata = isotsim.sim(cdata0)
nd = len(cdata0)
# Considered reaction stoichiometries
stoich = [[-1,-1,1,0,0] ,[0,-1,-1,1,0],[-1,0,0,-1,1],[-1,-2,0,1,0] ,[-2,-2,0,0,1],[-1,-1,-1,0,1],[-2,-1,1,-1,1]]
# IRIPE internal mass action kinetics are specified
mechs = [['all','massact']]
# Use expected variance - estimated from data if not provided
sigma = np.multiply(noise**2,np.array(cdata))
# Call to RIPE
results = ripe.ripemodel(cdata,stoich = stoich,mechanisms=mechs,x0=cdata0,hide_output=False,sigma=sigma,deltaterm=0,expand_output=True)
# Adaptive experimental design using error maximization sampling
[new_points, err] = ripe.ems(results,isotsim.sim,lb_conc,ub_conc,5,x=cdata,x0=cdata0)
# Implement EMS as described in the RIPE publication
new_res = isotsim.sim(new_points)[0]
ite = 0
# print 'maximum allowable tolerances : ', [noise*s for s in new_res]
while any(err > [2*noise*s for s in new_res] ):
# print 'Which concentrations violate error (True=violation) : ', err > [noise*s for s in new_res]
results = {}
ite+=1
# Data updated explicitly so RBFopt subroutines produce consistent results
new_cdata0 = np.zeros([nd+ite,ns])
new_cdata = np.zeros([nd+ite,ns])
new_cdata0[:-1][:] = cdata0[:][:]
new_cdata[:-1][:] = cdata[:][:]
new_cdata0[-1][:] = new_points[:]
res = isotsim.sim(new_points)[0]
for j in range(len(res)):
new_cdata[-1][j] = res[j]
#Update weight parameters
sigma = np.multiply(noise**2,np.array(new_cdata))
# Build updated RIPE model
results = ripe.ripemodel(new_cdata,stoich = stoich,mechanisms=mechs,x0=new_cdata0,sigma=sigma,expand_output=True)
# Another call to EMS
[new_points, err] = ripe.ems(results,isotsim.sim,lb_conc,ub_conc,5,x=cdata,x0=cdata0)
# Update results
new_res = isotsim.sim(new_points)[0]
cdata0 = new_cdata0
cdata = new_cdata
# Final call to RIPE to get concise output
results = ripe.ripemodel(cdata,stoich = stoich,mechanisms=mechs,x0=cdata0,sigma=sigma,expand_output=False)
#print results
if __name__ == "__main__":
main()
|
[
"idaes.surrogate.ripe.ems",
"idaes.surrogate.ripe.ripemodel",
"numpy.random.seed",
"numpy.zeros",
"numpy.array"
] |
[((907, 925), 'numpy.random.seed', 'np.random.seed', (['(20)'], {}), '(20)\n', (921, 925), True, 'import numpy as np\n'), ((1634, 1768), 'idaes.surrogate.ripe.ripemodel', 'ripe.ripemodel', (['cdata'], {'stoich': 'stoich', 'mechanisms': 'mechs', 'x0': 'cdata0', 'hide_output': '(False)', 'sigma': 'sigma', 'deltaterm': '(0)', 'expand_output': '(True)'}), '(cdata, stoich=stoich, mechanisms=mechs, x0=cdata0,\n hide_output=False, sigma=sigma, deltaterm=0, expand_output=True)\n', (1648, 1768), False, 'from idaes.surrogate import ripe\n'), ((1854, 1925), 'idaes.surrogate.ripe.ems', 'ripe.ems', (['results', 'isotsim.sim', 'lb_conc', 'ub_conc', '(5)'], {'x': 'cdata', 'x0': 'cdata0'}), '(results, isotsim.sim, lb_conc, ub_conc, 5, x=cdata, x0=cdata0)\n', (1862, 1925), False, 'from idaes.surrogate import ripe\n'), ((3270, 3374), 'idaes.surrogate.ripe.ripemodel', 'ripe.ripemodel', (['cdata'], {'stoich': 'stoich', 'mechanisms': 'mechs', 'x0': 'cdata0', 'sigma': 'sigma', 'expand_output': '(False)'}), '(cdata, stoich=stoich, mechanisms=mechs, x0=cdata0, sigma=\n sigma, expand_output=False)\n', (3284, 3374), False, 'from idaes.surrogate import ripe\n'), ((1583, 1598), 'numpy.array', 'np.array', (['cdata'], {}), '(cdata)\n', (1591, 1598), True, 'import numpy as np\n'), ((2404, 2428), 'numpy.zeros', 'np.zeros', (['[nd + ite, ns]'], {}), '([nd + ite, ns])\n', (2412, 2428), True, 'import numpy as np\n'), ((2447, 2471), 'numpy.zeros', 'np.zeros', (['[nd + ite, ns]'], {}), '([nd + ite, ns])\n', (2455, 2471), True, 'import numpy as np\n'), ((2854, 2964), 'idaes.surrogate.ripe.ripemodel', 'ripe.ripemodel', (['new_cdata'], {'stoich': 'stoich', 'mechanisms': 'mechs', 'x0': 'new_cdata0', 'sigma': 'sigma', 'expand_output': '(True)'}), '(new_cdata, stoich=stoich, mechanisms=mechs, x0=new_cdata0,\n sigma=sigma, expand_output=True)\n', (2868, 2964), False, 'from idaes.surrogate import ripe\n'), ((3017, 3088), 'idaes.surrogate.ripe.ems', 'ripe.ems', (['results', 'isotsim.sim', 'lb_conc', 'ub_conc', '(5)'], {'x': 'cdata', 'x0': 'cdata0'}), '(results, isotsim.sim, lb_conc, ub_conc, 5, x=cdata, x0=cdata0)\n', (3025, 3088), False, 'from idaes.surrogate import ripe\n'), ((2779, 2798), 'numpy.array', 'np.array', (['new_cdata'], {}), '(new_cdata)\n', (2787, 2798), True, 'import numpy as np\n')]
|
import os
import numpy as np
from glob import glob
from os.path import join
import torch.utils.data as data
from sklearn.neighbors import NearestNeighbors
import datasets_util
class GeolocDataset(data.Dataset):
def __init__(self, datasets_folder="datasets", dataset_name="pitts30k", split="train", positive_dist_threshold=25):
"""
Parameters
----------
datasets_folder : str, path of the folder with the datasets.
dataset_name : str, name of the folder with the dataset within datasets_folder.
split : str, split to use among train, val or test.
positive_dist_threshold : int, the threshold for positives (in meters).
The images should be located at these two locations:
{datasets_folder}/{dataset_name}/images/{split}/gallery/*.jpg
{datasets_folder}/{dataset_name}/images/{split}/queries/*.jpg
"""
super().__init__()
self.dataset_name = dataset_name
self.dataset_folder = join(datasets_folder, dataset_name, "images", split)
if not os.path.exists(self.dataset_folder): raise FileNotFoundError(f"Folder {self.dataset_folder} does not exist")
#### Read paths and UTM coordinates for all images.
gallery_folder = join(self.dataset_folder, "gallery")
queries_folder = join(self.dataset_folder, "queries")
if not os.path.exists(gallery_folder): raise FileNotFoundError(f"Folder {gallery_folder} does not exist")
if not os.path.exists(queries_folder) : raise FileNotFoundError(f"Folder {queries_folder} does not exist")
self.gallery_paths = sorted(glob(join(gallery_folder, "**", "*.jpg"), recursive=True))
self.queries_paths = sorted(glob(join(queries_folder, "**", "*.jpg"), recursive=True))
# The format must be path/to/file/@utm_easting@utm_northing@...@.jpg
self.gallery_utms = np.array([(path.split("@")[1], path.split("@")[2]) for path in self.gallery_paths]).astype(np.float)
self.queries_utms = np.array([(path.split("@")[1], path.split("@")[2]) for path in self.queries_paths]).astype(np.float)
# Find soft_positives_per_query, which are within positive_dist_threshold (deafult 25 meters)
knn = NearestNeighbors(n_jobs=-1)
knn.fit(self.gallery_utms)
self.soft_positives_per_query = knn.radius_neighbors(self.queries_utms,
radius=positive_dist_threshold,
return_distance=False)
self.images_paths = list(self.gallery_paths) + list(self.queries_paths)
self.gallery_num = len(self.gallery_paths)
self.queries_num = len(self.queries_paths)
def __getitem__(self, index):
image_path = self.images_paths[index]
img = datasets_util.open_image_and_apply_transform(image_path)
return img, index
def __len__(self):
return len(self.images_paths)
def __repr__(self):
return (f"< {self.__class__.__name__}, {self.dataset_name} - #gallery: {self.gallery_num}; #queries: {self.queries_num} >")
def get_positives(self):
return self.soft_positives_per_query
|
[
"sklearn.neighbors.NearestNeighbors",
"os.path.join",
"datasets_util.open_image_and_apply_transform",
"os.path.exists"
] |
[((1037, 1089), 'os.path.join', 'join', (['datasets_folder', 'dataset_name', '"""images"""', 'split'], {}), "(datasets_folder, dataset_name, 'images', split)\n", (1041, 1089), False, 'from os.path import join\n'), ((1308, 1344), 'os.path.join', 'join', (['self.dataset_folder', '"""gallery"""'], {}), "(self.dataset_folder, 'gallery')\n", (1312, 1344), False, 'from os.path import join\n'), ((1371, 1407), 'os.path.join', 'join', (['self.dataset_folder', '"""queries"""'], {}), "(self.dataset_folder, 'queries')\n", (1375, 1407), False, 'from os.path import join\n'), ((2299, 2326), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (2315, 2326), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((2911, 2967), 'datasets_util.open_image_and_apply_transform', 'datasets_util.open_image_and_apply_transform', (['image_path'], {}), '(image_path)\n', (2955, 2967), False, 'import datasets_util\n'), ((1105, 1140), 'os.path.exists', 'os.path.exists', (['self.dataset_folder'], {}), '(self.dataset_folder)\n', (1119, 1140), False, 'import os\n'), ((1423, 1453), 'os.path.exists', 'os.path.exists', (['gallery_folder'], {}), '(gallery_folder)\n', (1437, 1453), False, 'import os\n'), ((1537, 1567), 'os.path.exists', 'os.path.exists', (['queries_folder'], {}), '(queries_folder)\n', (1551, 1567), False, 'import os\n'), ((1678, 1713), 'os.path.join', 'join', (['gallery_folder', '"""**"""', '"""*.jpg"""'], {}), "(gallery_folder, '**', '*.jpg')\n", (1682, 1713), False, 'from os.path import join\n'), ((1774, 1809), 'os.path.join', 'join', (['queries_folder', '"""**"""', '"""*.jpg"""'], {}), "(queries_folder, '**', '*.jpg')\n", (1778, 1809), False, 'from os.path import join\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from collections import defaultdict
from typing import Dict, Set, List, Tuple
from time import time
from subprocess import run, PIPE, TimeoutExpired
from random import randint
import re
class NetworkError(RuntimeError):
"""any error related to this module"""
class NetworkInitializationError(NetworkError):
"""any error related to initialization"""
class SudoNotFoundError(NetworkInitializationError):
"""sudo was not fount, is it installed?"""
class IpsetNotFoundError(NetworkInitializationError):
"""ipset was not found, is it installed?"""
class PermissionDeniedError(NetworkInitializationError):
"""permission request timed out or was denied, verify /etc/sudoers ?"""
class SetExistError(NetworkInitializationError):
"""set with same name but different properties already exist"""
class UnknownInitializationError(NetworkInitializationError):
"""something went wrong, IDK what"""
class GenericNetworkError(NetworkError):
"""something went wrong, IDK what"""
class InvalidAddressError(NetworkError):
"""the address provided was invalid"""
class UnknownAddress(NetworkError):
"""no corresponding address was found"""
class NotInSetError(NetworkError):
"""can't query or delete this mac, not in set"""
class FeatureDisabledError(NetworkError):
"""a feature required for this method is disabled"""
class Ipset:
#name: str
#counter: bool
#skbinfo: bool
#mark_start: int
#mark_mod: int
#next_add: int
#multi_vpn_mark: int
#user_logs: List[Tuple[time, Dict[str, (int, int)]]]
#vpn_logs: List[Tuple[time, Dict[int, (int, int)]]]
#last_user_measure: Dict[str, (int,int)]
#last_vpn_measure: Dict[int, (int,int)]
def __init__(self, name="langate", default_timeout=0, counter=True, marking=True, mark=(0, 1),
multi_vpn_mark=4294967295, fixed_vpn_timeout=30):
"""
Create a new set with provided arguments. Equivalent to the command :
'sudo ipset create langate hash:mac -exist hashsize 4096 timeout 0 counters skbinfo"
:param name: Name of the set.
:param default_timeout: Timeout by default.
:param counter: Enable bandwith counters
:param marking: Enable packet marking
:param mark: First mark and number of mark to use. Useless if marking=False
:param multi_vpn_mark: Mark for devices mapped to multiple marks. Useless if marking=False
:param fixed_vpn_timeout: Number of seconds without network usage before a user may change of vpn
"""
self.name = name
self.timeout = default_timeout
self.counter = counter
self.skbinfo = marking
(self.mark_start, self.mark_mod) = mark
self.next_add = 0
self.multi_vpn_mark = multi_vpn_mark
if self.counter:
self.user_logs = list()
self.last_user_measure = defaultdict(lambda: [0,0], {})
if self.skbinfo:
self.vpn_logs = list()
self.last_vpn_measure = defaultdict(lambda: [0,0], {})
creation_args = ["sudo", "ipset", "create", self.name, "hash:mac",
"-exist", "hashsize", "4096", "timeout", str(default_timeout)]
if self.counter:
creation_args.append("counters")
if self.skbinfo:
creation_args.append("skbinfo")
try:
result = run(creation_args, stderr=PIPE, timeout=2)
except FileNotFoundError:
raise SudoNotFoundError("sudo was not found")
except TimeoutExpired:
raise PermissionDeniedError("permission request timed out")
if result.returncode != 0:
if re.match(r'.*command not found.*', result.stderr.decode("UTF-8")):
raise IpsetNotFoundError("ipset was not found")
if re.match(r'.*set with the same name already exists.*', result.stderr.decode("UTF-8")):
raise SetExistError("set with same name but different settings already exist")
raise UnknownInitializationError(result.stderr.decode("UTF-8"))
if self.counter:
creation_args = ["sudo", "ipset", "create", self.name + "-download" , "hash:mac",
"-exist", "hashsize", "4096", "timeout", str(default_timeout), "counters"]
result = run(creation_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise UnknownInitializationError(result.stderr.decode("UTF-8"))
if self.counter and self.skbinfo:
# CMD=sudo ipset create langate-recent hash:mac -exist hashsize 4096 timeout 60
creation_args = ["sudo", "ipset", "create", self.name + "-recent", "hash:mac", "-exist", "hashsize", "4096",
"timeout", str(fixed_vpn_timeout)]
result = run(creation_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise UnknownInitializationError(result.stderr.decode("UTF-8"))
def generate_iptables(self, match_internal: str = "-s 172.16.0.0/255.252.0.0", stop: bool = False) -> str:
if stop:
A="D"
else:
A="A"
res = "# Portal rules :\n"
res+= "iptables -" + A + " FORWARD " + match_internal + " -m set ! --match-set " + self.name + " src ! --update-counters -j REJECT\n"
res+= "iptables -t nat -" + A + " PREROUTING " + match_internal + " -p tcp --dport 80 -m set ! --match-set " + self.name + " src ! --update-counters -j REDIRECT --to-ports 80\n"
res+= "\n\n# Marking and accounting rules\n"
# accounting is done automatically when no ! --update-counters is provided
res+= "iptables -t mangle -" + A + " PREROUTING -m set --match-set " + self.name + " src ! --update-counters -j SET --map-set " + self.name + " src --map-mark\n"
res+= "iptables -t mangle -" + A + " PREROUTING -m set --match-set " + self.name + " src -j SET --add-set " + self.name + "-recent src --exist\n"
res+= "iptables -t mangle -" + A + " PREROUTING -m set --match-set " + self.name + "-download dst -j SET --add-set " + self.name + "-recent dst --exist\n"
res+= "\n\n# Map single device to all vpns\n"
res+= "iptables -t mangle -" + A + " PREROUTING -m mark --mark " + str(self.multi_vpn_mark) + " -j HMARK --hmark-tuple src,dst,sport,dport "
res+= "--hmark-offset " + str(self.mark_start) + " --hmark-mod " + str(self.mark_mod) + " --hmark-rnd " + str(randint(0, 2**32))
return res
def connect_user(self, mac: str, timeout: int = None, mark: int = None, counter: Tuple[int,int] = (0,0), multi_vpn: bool = False):
"""
Add a user to the set.
Equivalent to the command :
'sudo ipset add langate 00:00:00:00:00:01 -exist timeout 0 skbmark 0'
:param mac: Mac of the user.
:param timeout: (Optional) Timeout after which the user will be disconnected.
:param mark: Mark to use for this entry, None for automatic
:param counter: Value to initialize bandwith counter to
:param multi_vpn: True if device should be mapped to multiple vpn (caching server...)
"""
if not verify_mac(mac):
raise InvalidAddressError("'{}' is not a valid mac address".format(mac))
connect_args = ["sudo", "ipset", "add", self.name, mac, "-exist"]
if timeout is not None:
connect_args.append("timeout")
connect_args.append(str(timeout))
if self.counter:
connect_args.append("bytes")
connect_args.append(str(counter[1]))
elif counter != 0:
raise FeatureDisabledError("Feature counter is disabled for this set")
if self.skbinfo:
connect_args.append("skbmark")
if multi_vpn:
connect_args.append(hex(self.multi_vpn_mark))
elif mark is None:
connect_args.append(hex(self.next_add + self.mark_start))
self.next_add = (self.next_add + 1) % self.mark_mod
else:
connect_args.append(hex(mark))
elif mark is not None:
raise FeatureDisabledError("Feature skbinfo is disabled for this set")
result = run(connect_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
if self.counter:
connect_args = ["sudo", "ipset", "add", self.name + "-download", mac, "-exist"]
if timeout is not None:
connect_args.append("timeout")
connect_args.append(str(timeout))
connect_args.append("bytes")
connect_args.append(str(counter[0]))
result = run(connect_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
def disconnect_user(self, mac: str):
"""
Remove a user from the set.
Equivalent to the command :
'sudo ipset del langate 00:00:00:00:00:01'
:param mac: Mac of the user.
"""
if not verify_mac(mac):
raise InvalidAddressError("'{}' is not a valid mac address".format(mac))
disconnect_args = ["sudo", "ipset", "del", self.name, mac, "-exist"]
result = run(disconnect_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
if self.counter:
disconnect_args = ["sudo", "ipset", "del", self.name + "-download", mac, "-exist"]
result = run(disconnect_args, stderr=PIPE, timeout=2)
def get_user_info(self, mac: str) -> (bool, (int, int), int):
"""
Get users information from his mac address.
Obtained by the commands :
'sudo ipset test langate 00:00:00:00:00:01 -q'
and 'sudo ipset list langate | grep 00:00:00:00:00:01'
:param mac: Mac adress of the user.
:return: (bool:1, (int:2,int:3) ,int:4) with
1 : if the user is connected,
2 : how much bytes were transfered in download
3 : how much bytes were transfered in upload
and 4 : what mark is used for the entry.
"""
if not verify_mac(mac):
raise InvalidAddressError("'{}' is not a valid mac address".format(mac))
test_args = ["sudo", "ipset", "test", self.name, mac, "-q"]
result = run(test_args, timeout=2)
if result.returncode != 0:
return False, (0,0), 0
if not self.counter and not self.skbinfo:
return True, (0,0), 0
list_args = ["sudo", "ipset", "list", self.name]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out = result.stdout.decode("UTF-8")
res = (None, None, None)
for line in out.splitlines():
if re.match(mac.upper() + '.*', line):
byte = re.search('bytes ([0-9]+)', line)
if byte:
byte = int(byte.group(1))
else:
byte = 0
skbmark = re.search('skbmark (0x[0-9]+)', line)
if skbmark:
skbmark = int(skbmark.group(1), 16)
else:
skbmark = 0
res = (True, byte, skbmark)
if self.counter:
list_args = ["sudo", "ipset", "list", self.name + "-download"]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out = result.stdout.decode("UTF-8")
down = None
for line in out.splitlines():
if re.match(mac.upper() + '.*', line):
byte = re.search('bytes ([0-9]+)', line)
if byte:
down = int(byte.group(1))
else:
down = 0
full_res = (res[0], (down, res[1]), res[2])
else:
full_res = (res[0], (None, res[1]), res[2])
return full_res
def clear(self):
"""
Clear the set, by removing all entry from it. Equivalent to the command :
'sudo ipset flush langate'
"""
clear_args = ["sudo", "ipset", "flush", self.name]
result = run(clear_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
if self.counter:
clear_args = ["sudo", "ipset", "flush", self.name + "-download"]
result = run(clear_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
def get_all_connected(self) -> Dict[str, Tuple[Tuple[int, int], int]]:
"""
Get all entries from the set, with how much bytes they transferred and what is their mark.
Equivalent to the command : 'sudo ipset list langate"
:return: Dictionary mapping device MAC to their bandwith usage (down and up) and mark
"""
list_args = ["sudo", "ipset", "list", self.name]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out = result.stdout.decode("UTF-8")
res = dict()
for line in out.splitlines():
if re.match('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line):
mac = re.search('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line).group(1)
byte = re.search(r'bytes ([0-9]+)', line)
if byte:
byte = int(byte.group(1))
else:
byte = 0
skbmark = re.search('skbmark (0x[0-9]+)', line)
if skbmark:
skbmark = int(skbmark.group(1), 16)
else:
skbmark = 0
res[mac] = (byte, skbmark)
if self.counter:
list_args = ["sudo", "ipset", "list", self.name + "-download"]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out = result.stdout.decode("UTF-8")
full_res = dict()
for line in out.splitlines():
if re.match('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line):
mac = re.search('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line).group(1)
byte = re.search(r'bytes ([0-9]+)', line)
if byte:
byte = int(byte.group(1))
else:
byte = 0
full_res[mac] = ((byte, res[mac][0]), res[mac][1])
else:
full_res = dict()
for k in res:
full_res[k] = ((0, res[mac][0]), res[mac][1])
return res
def delete(self):
"""
Delete the set. Equivalent to the command :
'sudo ipset destroy langate"
"""
clear_args = ["sudo", "ipset", "destroy", self.name]
result = run(clear_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
if self.counter:
clear_args = ["sudo", "ipset", "destroy", self.name + "-download"]
result = run(clear_args, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
# add an entry to internal log
def log_statistics(self):
"""
Add an entry to internal log. Equivalent to the command :
'sudo ipset list langate'
"""
if not self.counter:
raise FeatureDisabledError("Feature counter is disabled for this set")
list_args = ["sudo", "ipset", "list", self.name]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out_u = result.stdout.decode("UTF-8")
list_args = ["sudo", "ipset", "list", self.name]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out_d = result.stdout.decode("UTF-8")
current_time = time()
user_log = defaultdict(lambda: [0,0], {})
if self.skbinfo:
vpn_log = defaultdict(lambda: [0,0], {})
for line in out_u.splitlines():
if re.match('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line):
mac = re.search("(([0-9A-F]{2}:){5}[0-9A−F]{2})", line).group(1)
byte = int(re.search('bytes ([0-9]+)', line).group(1))
if mac in self.last_user_measure:
user_log[mac][1] = byte - self.last_user_measure[mac][1]
else:
user_log[mac][1] = byte
self.last_user_measure[mac][1] = byte
if self.skbinfo:
vpn = re.search('skbmark (0x[0-9]+)', line)
if vpn:
vpn = int(vpn.group(1), 16)
user_log[mac][0] = vpn
else:
continue
vpn_log[vpn][1] += byte
for line in out_d.splitlines():
if re.match('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line):
mac = re.search("(([0-9A-F]{2}:){5}[0-9A−F]{2})", line).group(1)
byte = int(re.search('bytes ([0-9]+)', line).group(1))
if self.skbinfo:
vpn_log[user_log[mac][0]][0] += byte
if mac in self.last_user_measure:
user_log[mac][0] = byte - self.last_user_measure[mac][0]
else:
user_log[mac][0] = byte
self.last_user_measure[mac][0] = byte
#user_logs: List[Tuple[time, Dict[str, (int, int)]]]
self.user_logs.append((current_time, user_log))
if self.skbinfo:
for vpn in vpn_log:
vpn_log[vpn][0] -= self.last_vpn_measure[vpn][0]
vpn_log[vpn][1] -= self.last_vpn_measure[vpn][1]
self.last_vpn_measure[vpn][0] += vpn_log[vpn][0]
self.last_vpn_measure[vpn][1] += vpn_log[vpn][1]
self.vpn_logs.append((current_time, vpn_log))
def get_users_logs(self) -> List[Tuple[time, Dict[str, Tuple[int, int]]]]:
"""
Get logs by users, sorted by date.
:return: List sorted by date of tuple of date and dictionary, itself mapping device's
Mac to it's bandwith usage since last entry
"""
if not self.counter:
raise FeatureDisabledError("Feature counter is disabled for this set")
return self.user_logs
def get_vpn_logs(self) -> List[Tuple[time, Dict[int, Tuple[int, int]]]]:
"""
Get logs by vpn sorted by date.
:return: List sorted by date of tuple of date and dictionary, itself mapping vpn mark
to it's bandwith usage since last entry
"""
if not self.counter:
raise FeatureDisabledError("Feature counter is disabled for this set")
if not self.skbinfo:
raise FeatureDisabledError("Feature skbinfo is disabled for this set")
return self.vpn_logs
def clear_logs(self, after=time()):
"""
Clear internal logs (logs are never cleared otherwise, taking memory indefinitely).
:param after: Time after which the cleaning must be done, now if not set.
"""
if not self.counter:
raise FeatureDisabledError("Feature counter is disabled for this set")
lo = 0
hi = len(self.user_logs)
while lo < hi:
mid = (lo + hi) // 2
if self.user_logs[mid][0] < after:
lo = mid + 1
else:
hi = mid
index = lo
self.user_logs = self.user_logs[index:]
if self.skbinfo:
self.vpn_logs = self.vpn_logs[index:]
def try_balance(self):
"""
Try to auto-balance vpn usage by switching some user of vpn.
"""
if self.counter and self.skbinfo:
# get users recent network usage
logs = self.get_users_logs()
if len(logs) > 0:
log = logs[-1][1][0] + logs[-1][1][1]
else:
return
user_info = self.get_all_connected()
all_connected = set(user_info)
# get all fixed users
list_args = ["sudo", "ipset", "list", self.name + "-recent"]
result = run(list_args, stdout=PIPE, stderr=PIPE, timeout=2)
if result.returncode != 0:
raise GenericNetworkError(result.stderr.decode("UTF-8"))
out = result.stdout.decode("UTF-8")
fixed = set()
for line in out.splitlines():
if re.match('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line):
mac = re.search("(([0-9A-F]{2}:){5}[0-9A−F]{2})", line).group(1)
fixed.add(mac)
can_move = all_connected - fixed
vpn_usage = defaultdict(lambda: 0, {})
for player in fixed:
vpn_usage[user_info[player][1]] += log[player]
can_move = sorted(can_move, key=lambda p: log[p], reverse=True)
vpn = sorted(vpn_usage, key=lambda v: vpn_usage[v])
for player in can_move:
vpn_usage[vpn[0]] += log[player]
self.set_vpn(player, vpn[0])
vpn = sorted(vpn, key=lambda v: vpn_usage[v])
else:
raise FeatureDisabledError("Feature counter or skbinfo is disabled for this set")
def get_balance(self) -> Dict[int, Set[str]]:
"""
Get current mapping of vpn and mac (each entry contain the vpn number, with who is connected to it)
:return: Dictionary composed of vpn and set of mac addresses
"""
res = dict()
connected = self.get_all_connected()
for mac in connected:
if connected[mac][1] in res:
res[connected[mac][1]].add(mac)
else:
res[connected[mac][1]] = set()
res[connected[mac][1]].add(mac)
return res
def set_vpn(self, mac: str, vpn: int):
"""
Move an user to a new vpn.
:param mac: Mac address of the user.
:param vpn: Vpn where move the user to.
"""
if not verify_mac(mac):
raise InvalidAddressError("'{}' is not a valid mac address".format(mac))
if not self.skbinfo:
raise FeatureDisabledError("Feature skbinfo is disabled for this set")
user_info = self.get_user_info(mac)
if user_info[0]:
old_vpn = user_info[2]
byte = user_info[1]
self.connect_user(mac, mark=vpn, counter=byte)
if self.counter:
self.last_vpn_measure[old_vpn][0] = self.last_vpn_measure[old_vpn][0] \
- self.last_user_measure[mac][0]
self.last_vpn_measure[old_vpn][1] = self.last_vpn_measure[old_vpn][1] \
- self.last_user_measure[mac][1]
self.last_vpn_measure[vpn][0] = self.last_vpn_measure[vpn][0] \
+ self.last_user_measure[mac][0]
self.last_vpn_measure[vpn][1] = self.last_vpn_measure[vpn][1] \
+ self.last_user_measure[mac][1]
else:
raise NotInSetError("'{}' is not in the set")
def verify_mac(mac: str) -> bool:
"""
Verify if mac address is correctly formed.
:param mac: Mac address to verify.
:return: True is correctly formed, False if not.
"""
return bool(re.match(r'^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$', mac))
def verify_ip(ip: str) -> bool:
"""
Verify if ip address is correctly formed.
:param ip: Ip address to verify.
:return: True is correctly formed, False if not.
"""
return bool(re.match(r'^([0-9]{1,3}\.){3}[0-9]{1,3}$', ip))
def get_ip(mac: str) -> str:
"""
Get the ip address associated with a given mac address.
:param mac: Mac address of the user.
:return: Ip address of the user.
"""
if not verify_mac(mac):
raise InvalidAddressError("'{}' is not a valid mac address".format(mac))
f = open('/proc/net/arp', 'r')
lines = f.readlines()[1:]
for line in lines:
if line.startswith(mac, 41): # 41=offset in line
return line.split(' ')[0]
raise UnknownAddress("'{}' does not have a known ip".format(mac))
# get mac from ip
def get_mac(ip: str) -> str:
"""
Get the mac address associated with a given ip address.
:param ip: Ip address of the user.
:return: Mac address of the user.
"""
if not verify_ip(ip):
raise InvalidAddressError("'{}' is not a valid ip address".format(ip))
f = open('/proc/net/arp', 'r')
lines = f.readlines()[1:]
for line in lines:
if line.startswith(ip, 0): # 41=offset in line
return line[41:].split(' ')[0]
raise UnknownAddress("'{}' does not have a known mac".format(ip))
|
[
"subprocess.run",
"random.randint",
"re.match",
"time.time",
"collections.defaultdict",
"re.search"
] |
[((8302, 8343), 'subprocess.run', 'run', (['connect_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(connect_args, stderr=PIPE, timeout=2)\n', (8305, 8343), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((9402, 9446), 'subprocess.run', 'run', (['disconnect_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(disconnect_args, stderr=PIPE, timeout=2)\n', (9405, 9446), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((10533, 10558), 'subprocess.run', 'run', (['test_args'], {'timeout': '(2)'}), '(test_args, timeout=2)\n', (10536, 10558), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((10789, 10840), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (10792, 10840), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((12578, 12617), 'subprocess.run', 'run', (['clear_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(clear_args, stderr=PIPE, timeout=2)\n', (12581, 12617), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((13428, 13479), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (13431, 13479), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((15453, 15492), 'subprocess.run', 'run', (['clear_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(clear_args, stderr=PIPE, timeout=2)\n', (15456, 15492), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((16253, 16304), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (16256, 16304), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((16529, 16580), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (16532, 16580), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((16754, 16760), 'time.time', 'time', ([], {}), '()\n', (16758, 16760), False, 'from time import time\n'), ((16780, 16812), 'collections.defaultdict', 'defaultdict', (['(lambda : [0, 0])', '{}'], {}), '(lambda : [0, 0], {})\n', (16791, 16812), False, 'from collections import defaultdict\n'), ((19817, 19823), 'time.time', 'time', ([], {}), '()\n', (19821, 19823), False, 'from time import time\n'), ((24361, 24414), 're.match', 're.match', (['"""^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$"""', 'mac'], {}), "('^([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$', mac)\n", (24369, 24414), False, 'import re\n'), ((24620, 24666), 're.match', 're.match', (['"""^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$"""', 'ip'], {}), "('^([0-9]{1,3}\\\\.){3}[0-9]{1,3}$', ip)\n", (24628, 24666), False, 'import re\n'), ((2952, 2984), 'collections.defaultdict', 'defaultdict', (['(lambda : [0, 0])', '{}'], {}), '(lambda : [0, 0], {})\n', (2963, 2984), False, 'from collections import defaultdict\n'), ((3459, 3501), 'subprocess.run', 'run', (['creation_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(creation_args, stderr=PIPE, timeout=2)\n', (3462, 3501), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((4395, 4437), 'subprocess.run', 'run', (['creation_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(creation_args, stderr=PIPE, timeout=2)\n', (4398, 4437), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((4897, 4939), 'subprocess.run', 'run', (['creation_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(creation_args, stderr=PIPE, timeout=2)\n', (4900, 4939), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((8810, 8851), 'subprocess.run', 'run', (['connect_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(connect_args, stderr=PIPE, timeout=2)\n', (8813, 8851), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((9694, 9738), 'subprocess.run', 'run', (['disconnect_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(disconnect_args, stderr=PIPE, timeout=2)\n', (9697, 9738), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((11657, 11708), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (11660, 11708), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((12845, 12884), 'subprocess.run', 'run', (['clear_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(clear_args, stderr=PIPE, timeout=2)\n', (12848, 12884), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((13702, 13750), 're.match', 're.match', (['"""([0-9A-F]{2}:){5}[0-9A−F]{2}.*"""', 'line'], {}), "('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line)\n", (13710, 13750), False, 'import re\n'), ((14379, 14430), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (14382, 14430), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((15722, 15761), 'subprocess.run', 'run', (['clear_args'], {'stderr': 'PIPE', 'timeout': '(2)'}), '(clear_args, stderr=PIPE, timeout=2)\n', (15725, 15761), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((16858, 16890), 'collections.defaultdict', 'defaultdict', (['(lambda : [0, 0])', '{}'], {}), '(lambda : [0, 0], {})\n', (16869, 16890), False, 'from collections import defaultdict\n'), ((16944, 16992), 're.match', 're.match', (['"""([0-9A-F]{2}:){5}[0-9A−F]{2}.*"""', 'line'], {}), "('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line)\n", (16952, 16992), False, 'import re\n'), ((17777, 17825), 're.match', 're.match', (['"""([0-9A-F]{2}:){5}[0-9A−F]{2}.*"""', 'line'], {}), "('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line)\n", (17785, 17825), False, 'import re\n'), ((21098, 21149), 'subprocess.run', 'run', (['list_args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'timeout': '(2)'}), '(list_args, stdout=PIPE, stderr=PIPE, timeout=2)\n', (21101, 21149), False, 'from subprocess import run, PIPE, TimeoutExpired\n'), ((21637, 21664), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)', '{}'], {}), '(lambda : 0, {})\n', (21648, 21664), False, 'from collections import defaultdict\n'), ((3091, 3123), 'collections.defaultdict', 'defaultdict', (['(lambda : [0, 0])', '{}'], {}), '(lambda : [0, 0], {})\n', (3102, 3123), False, 'from collections import defaultdict\n'), ((6545, 6564), 'random.randint', 'randint', (['(0)', '(2 ** 32)'], {}), '(0, 2 ** 32)\n', (6552, 6564), False, 'from random import randint\n'), ((11134, 11167), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (11143, 11167), False, 'import re\n'), ((11316, 11353), 're.search', 're.search', (['"""skbmark (0x[0-9]+)"""', 'line'], {}), "('skbmark (0x[0-9]+)', line)\n", (11325, 11353), False, 'import re\n'), ((13856, 13889), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (13865, 13889), False, 'import re\n'), ((14039, 14076), 're.search', 're.search', (['"""skbmark (0x[0-9]+)"""', 'line'], {}), "('skbmark (0x[0-9]+)', line)\n", (14048, 14076), False, 'import re\n'), ((14682, 14730), 're.match', 're.match', (['"""([0-9A-F]{2}:){5}[0-9A−F]{2}.*"""', 'line'], {}), "('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line)\n", (14690, 14730), False, 'import re\n'), ((21397, 21445), 're.match', 're.match', (['"""([0-9A-F]{2}:){5}[0-9A−F]{2}.*"""', 'line'], {}), "('([0-9A-F]{2}:){5}[0-9A−F]{2}.*', line)\n", (21405, 21445), False, 'import re\n'), ((12017, 12050), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (12026, 12050), False, 'import re\n'), ((14844, 14877), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (14853, 14877), False, 'import re\n'), ((17453, 17490), 're.search', 're.search', (['"""skbmark (0x[0-9]+)"""', 'line'], {}), "('skbmark (0x[0-9]+)', line)\n", (17462, 17490), False, 'import re\n'), ((13774, 13823), 're.search', 're.search', (['"""(([0-9A-F]{2}:){5}[0-9A−F]{2})"""', 'line'], {}), "('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line)\n", (13783, 13823), False, 'import re\n'), ((17016, 17065), 're.search', 're.search', (['"""(([0-9A-F]{2}:){5}[0-9A−F]{2})"""', 'line'], {}), "('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line)\n", (17025, 17065), False, 'import re\n'), ((17849, 17898), 're.search', 're.search', (['"""(([0-9A-F]{2}:){5}[0-9A−F]{2})"""', 'line'], {}), "('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line)\n", (17858, 17898), False, 'import re\n'), ((14758, 14807), 're.search', 're.search', (['"""(([0-9A-F]{2}:){5}[0-9A−F]{2})"""', 'line'], {}), "('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line)\n", (14767, 14807), False, 'import re\n'), ((17102, 17135), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (17111, 17135), False, 'import re\n'), ((17935, 17968), 're.search', 're.search', (['"""bytes ([0-9]+)"""', 'line'], {}), "('bytes ([0-9]+)', line)\n", (17944, 17968), False, 'import re\n'), ((21473, 21522), 're.search', 're.search', (['"""(([0-9A-F]{2}:){5}[0-9A−F]{2})"""', 'line'], {}), "('(([0-9A-F]{2}:){5}[0-9A−F]{2})', line)\n", (21482, 21522), False, 'import re\n')]
|
# -*- coding: utf-8 -*-
"""
Source: https://github.com/awesto/django-shop/blob/3a069d764a7b72ef119828220869dcfbbfc1b9c5/shop/apps.py
"""
from __future__ import unicode_literals
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class ShopConfig(AppConfig):
name = 'edw_shop'
verbose_name = _("Shop")
def ready(self):
#from django_fsm.signals import post_transition
from edw_shop.models.fields import JSONField
from rest_framework.serializers import ModelSerializer
from edw.deferred import ForeignKeyBuilder
from edw_shop.rest.fields import JSONSerializerField
#from edw_shop.models.notification import order_event_notification
#post_transition.connect(order_event_notification)
# add JSONField to the map of customized serializers
ModelSerializer.serializer_field_mapping[JSONField] = JSONSerializerField
# perform some sanity checks
ForeignKeyBuilder.check_for_pending_mappings()
|
[
"django.utils.translation.ugettext_lazy",
"edw.deferred.ForeignKeyBuilder.check_for_pending_mappings"
] |
[((340, 349), 'django.utils.translation.ugettext_lazy', '_', (['"""Shop"""'], {}), "('Shop')\n", (341, 349), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((981, 1027), 'edw.deferred.ForeignKeyBuilder.check_for_pending_mappings', 'ForeignKeyBuilder.check_for_pending_mappings', ([], {}), '()\n', (1025, 1027), False, 'from edw.deferred import ForeignKeyBuilder\n')]
|
# https://github.com/JasonHaley/hello-python/tree/master/app
from flask import Flask, request
import socket
import datetime
app = Flask(__name__)
@app.route("/")
def hello():
msg ='Your URL is "{}" .\nFrom Hands On GKE.\nHostName: {}\nIP: {}\nCurrent time: {}\n'.format(request.url, host_name, host_ip, current_time)
return msg
@app.route('/<mypath>')
def show_path(mypath):
msg ='Your URL is "{}" .\nFrom Hands On GKE.\nHostName: {}\nIP: {}\nCurrent time: {}\n'.format(request.url, host_name, host_ip, current_time)
return msg
if __name__ == "__main__":
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
current_time = datetime.datetime.now()
app.run(host='0.0.0.0')
|
[
"socket.gethostname",
"flask.Flask",
"datetime.datetime.now",
"socket.gethostbyname"
] |
[((132, 147), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (137, 147), False, 'from flask import Flask, request\n'), ((603, 623), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (621, 623), False, 'import socket\n'), ((643, 674), 'socket.gethostbyname', 'socket.gethostbyname', (['host_name'], {}), '(host_name)\n', (663, 674), False, 'import socket\n'), ((694, 717), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (715, 717), False, 'import datetime\n')]
|
import matplotlib
matplotlib.use('WXAgg')
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
import CoolProp
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(figsize = (2,2))
ax = fig.add_subplot(111, projection='3d')
NT = 1000
NR = 1000
rho,t = np.logspace(np.log10(2e-3), np.log10(1100), NR),np.linspace(275.15,700,NT)
RHO,T = np.meshgrid(rho,t)
P = CoolProp.CoolProp.PropsSI('P','D',RHO.reshape((NR*NT,1)),'T',T.reshape((NR*NT,1)),'REFPROP-Water').reshape(NT,NR)
Tsat = np.linspace(273.17,647.0,100)
psat = CoolProp.CoolProp.PropsSI('P','Q',0,'T',Tsat,'Water')
rhoL = CoolProp.CoolProp.PropsSI('D','Q',0,'T',Tsat,'Water')
rhoV = CoolProp.CoolProp.PropsSI('D','Q',1,'T',Tsat,'Water')
ax.plot_surface(np.log(RHO),T,np.log(P), cmap=cm.jet, edgecolor = 'none')
ax.plot(np.log(rhoL),Tsat,np.log(psat),color='k',lw=2)
ax.plot(np.log(rhoV),Tsat,np.log(psat),color='k',lw=2)
ax.text(0.3,800,22, "CoolProp", size = 12)
ax.set_frame_on(False)
ax.set_axis_off()
ax.view_init(22, -136)
ax.set_xlabel(r'$\ln\rho$ ')
ax.set_ylabel('$T$')
ax.set_zlabel('$p$')
plt.tight_layout()
plt.savefig('_static/PVTCP.png',transparent = True)
plt.savefig('_static/PVTCP.pdf',transparent = True)
plt.close()
|
[
"numpy.meshgrid",
"numpy.log",
"matplotlib.pyplot.close",
"CoolProp.CoolProp.PropsSI",
"matplotlib.pyplot.figure",
"matplotlib.use",
"numpy.linspace",
"numpy.log10",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.savefig"
] |
[((18, 41), 'matplotlib.use', 'matplotlib.use', (['"""WXAgg"""'], {}), "('WXAgg')\n", (32, 41), False, 'import matplotlib\n'), ((182, 208), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(2, 2)'}), '(figsize=(2, 2))\n', (192, 208), True, 'import matplotlib.pyplot as plt\n'), ((365, 384), 'numpy.meshgrid', 'np.meshgrid', (['rho', 't'], {}), '(rho, t)\n', (376, 384), True, 'import numpy as np\n'), ((511, 542), 'numpy.linspace', 'np.linspace', (['(273.17)', '(647.0)', '(100)'], {}), '(273.17, 647.0, 100)\n', (522, 542), True, 'import numpy as np\n'), ((548, 606), 'CoolProp.CoolProp.PropsSI', 'CoolProp.CoolProp.PropsSI', (['"""P"""', '"""Q"""', '(0)', '"""T"""', 'Tsat', '"""Water"""'], {}), "('P', 'Q', 0, 'T', Tsat, 'Water')\n", (573, 606), False, 'import CoolProp\n'), ((609, 667), 'CoolProp.CoolProp.PropsSI', 'CoolProp.CoolProp.PropsSI', (['"""D"""', '"""Q"""', '(0)', '"""T"""', 'Tsat', '"""Water"""'], {}), "('D', 'Q', 0, 'T', Tsat, 'Water')\n", (634, 667), False, 'import CoolProp\n'), ((670, 728), 'CoolProp.CoolProp.PropsSI', 'CoolProp.CoolProp.PropsSI', (['"""D"""', '"""Q"""', '(1)', '"""T"""', 'Tsat', '"""Water"""'], {}), "('D', 'Q', 1, 'T', Tsat, 'Water')\n", (695, 728), False, 'import CoolProp\n'), ((1088, 1106), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1104, 1106), True, 'import matplotlib.pyplot as plt\n'), ((1107, 1157), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""_static/PVTCP.png"""'], {'transparent': '(True)'}), "('_static/PVTCP.png', transparent=True)\n", (1118, 1157), True, 'import matplotlib.pyplot as plt\n'), ((1159, 1209), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""_static/PVTCP.pdf"""'], {'transparent': '(True)'}), "('_static/PVTCP.pdf', transparent=True)\n", (1170, 1209), True, 'import matplotlib.pyplot as plt\n'), ((1211, 1222), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1220, 1222), True, 'import matplotlib.pyplot as plt\n'), ((330, 358), 'numpy.linspace', 'np.linspace', (['(275.15)', '(700)', 'NT'], {}), '(275.15, 700, NT)\n', (341, 358), True, 'import numpy as np\n'), ((741, 752), 'numpy.log', 'np.log', (['RHO'], {}), '(RHO)\n', (747, 752), True, 'import numpy as np\n'), ((755, 764), 'numpy.log', 'np.log', (['P'], {}), '(P)\n', (761, 764), True, 'import numpy as np\n'), ((807, 819), 'numpy.log', 'np.log', (['rhoL'], {}), '(rhoL)\n', (813, 819), True, 'import numpy as np\n'), ((825, 837), 'numpy.log', 'np.log', (['psat'], {}), '(psat)\n', (831, 837), True, 'import numpy as np\n'), ((862, 874), 'numpy.log', 'np.log', (['rhoV'], {}), '(rhoV)\n', (868, 874), True, 'import numpy as np\n'), ((880, 892), 'numpy.log', 'np.log', (['psat'], {}), '(psat)\n', (886, 892), True, 'import numpy as np\n'), ((294, 309), 'numpy.log10', 'np.log10', (['(0.002)'], {}), '(0.002)\n', (302, 309), True, 'import numpy as np\n'), ((310, 324), 'numpy.log10', 'np.log10', (['(1100)'], {}), '(1100)\n', (318, 324), True, 'import numpy as np\n')]
|
# modify_annotations_txt.py
import glob
import string
txt_list = glob.glob('/media/wrc/0EB90E450EB90E45/data/kitti/label_2/*.txt') # 存储Labels文件夹所有txt文件路径
def show_category(txt_list):
category_list= []
for item in txt_list:
try:
with open(item) as tdf:
for each_line in tdf:
labeldata = each_line.strip().split(' ') # 去掉前后多余的字符并把其分开
category_list.append(labeldata[0]) # 只要第一个字段,即类别
except IOError as ioerr:
print('File error:'+str(ioerr))
print(set(category_list)) # 输出集合
def merge(line):
each_line=''
for i in range(len(line)):
if i!= (len(line)-1):
each_line=each_line+line[i]+' '
else:
each_line=each_line+line[i] # 最后一条字段后面不加空格
each_line=each_line+'\n'
return (each_line)
print('before modify categories are:\n')
show_category(txt_list)
for item in txt_list:
new_txt=[]
try:
with open(item, 'r') as r_tdf:
for each_line in r_tdf:
labeldata = each_line.strip().split(' ')
if labeldata[0] in ['Truck','Van','Tram']: # 合并汽车类
labeldata[0] = labeldata[0].replace(labeldata[0],'Car')
if labeldata[0] == 'Person_sitting': # 合并行人类
labeldata[0] = labeldata[0].replace(labeldata[0],'Pedestrian')
if labeldata[0] == 'DontCare': # 忽略Dontcare类
continue
if labeldata[0] == 'Misc': # 忽略Misc类
continue
new_txt.append(merge(labeldata)) # 重新写入新的txt文件
with open(item,'w+') as w_tdf: # w+是打开原文件将内容删除,另写新内容进去
for temp in new_txt:
w_tdf.write(temp)
except IOError as ioerr:
print('File error:'+str(ioerr))
print('\nafter modify categories are:\n')
show_category(txt_list)
|
[
"glob.glob"
] |
[((70, 135), 'glob.glob', 'glob.glob', (['"""/media/wrc/0EB90E450EB90E45/data/kitti/label_2/*.txt"""'], {}), "('/media/wrc/0EB90E450EB90E45/data/kitti/label_2/*.txt')\n", (79, 135), False, 'import glob\n')]
|
import json
import os
import shutil
import time
from flask import Flask
from flask.testing import FlaskClient
from pyfakefs.fake_filesystem import FakeFilesystem
def wait_for_task(client: FlaskClient, name: str) -> None:
while True:
time.sleep(0.5)
resp = client.get('/task',
data=json.dumps({'name': name}),
content_type='application/json')
assert resp.status == '200 OK'
task_state = json.loads(resp.data)
assert task_state['state'] != 'failed'
if task_state['state'] == 'done':
break
print(task_state)
def test_e2e(app: Flask, fs: FakeFilesystem) -> None:
fs.pause()
app.config['BASE_CLASSIFIER_DIR'] = '/tmp/tctest/testdata'
if os.path.exists(app.config['BASE_CLASSIFIER_DIR']):
shutil.rmtree(app.config['BASE_CLASSIFIER_DIR'])
os.makedirs(app.config['BASE_CLASSIFIER_DIR'], exist_ok=True)
seq_pattern = ('Continue working for the eradiction of poverty %d')
other_seq_pattern = ('Actions to prevent climate change %d')
client = app.test_client()
samples = list()
samples += [
dict(seq=seq_pattern % i, training_labels=['a']) for i in range(0, 15)
]
samples += [
dict(seq=other_seq_pattern % i, training_labels=['b'])
for i in range(0, 15)
]
with app.test_request_context():
assert client.post('/task',
data=json.dumps({
'provider': 'TrainModel',
'name': 'train-model',
'model': 'trained_model',
'labels': ['a', 'b', 'c'],
'num_train_steps': 10,
'train_ratio': 0.5,
'samples': samples
}),
content_type='application/json').status == '200 OK'
wait_for_task(client, 'train-model')
# without threshold file default confidence is set to 0.3
resp = client.post('/classify?model=trained_model',
data=json.dumps(
dict(samples=[
dict(seq=seq_pattern % 1),
dict(seq=other_seq_pattern % 1)
])),
content_type='application/json')
assert resp.status == '200 OK'
data = json.loads(resp.data)
assert len(data['samples']) == 2
assert data['samples'][0]['predicted_labels'][0]['topic'] == 'a'
assert data['samples'][0]['predicted_labels'][0]['quality'] >= 0.7
assert data['samples'][1]['predicted_labels'][0]['topic'] == 'b'
|
[
"json.loads",
"os.makedirs",
"os.path.exists",
"json.dumps",
"time.sleep",
"shutil.rmtree"
] |
[((773, 822), 'os.path.exists', 'os.path.exists', (["app.config['BASE_CLASSIFIER_DIR']"], {}), "(app.config['BASE_CLASSIFIER_DIR'])\n", (787, 822), False, 'import os\n'), ((885, 946), 'os.makedirs', 'os.makedirs', (["app.config['BASE_CLASSIFIER_DIR']"], {'exist_ok': '(True)'}), "(app.config['BASE_CLASSIFIER_DIR'], exist_ok=True)\n", (896, 946), False, 'import os\n'), ((248, 263), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (258, 263), False, 'import time\n'), ((477, 498), 'json.loads', 'json.loads', (['resp.data'], {}), '(resp.data)\n', (487, 498), False, 'import json\n'), ((832, 880), 'shutil.rmtree', 'shutil.rmtree', (["app.config['BASE_CLASSIFIER_DIR']"], {}), "(app.config['BASE_CLASSIFIER_DIR'])\n", (845, 880), False, 'import shutil\n'), ((2505, 2526), 'json.loads', 'json.loads', (['resp.data'], {}), '(resp.data)\n', (2515, 2526), False, 'import json\n'), ((330, 356), 'json.dumps', 'json.dumps', (["{'name': name}"], {}), "({'name': name})\n", (340, 356), False, 'import json\n'), ((1460, 1645), 'json.dumps', 'json.dumps', (["{'provider': 'TrainModel', 'name': 'train-model', 'model': 'trained_model',\n 'labels': ['a', 'b', 'c'], 'num_train_steps': 10, 'train_ratio': 0.5,\n 'samples': samples}"], {}), "({'provider': 'TrainModel', 'name': 'train-model', 'model':\n 'trained_model', 'labels': ['a', 'b', 'c'], 'num_train_steps': 10,\n 'train_ratio': 0.5, 'samples': samples})\n", (1470, 1645), False, 'import json\n')]
|
from models.original_models import wide_resnet_model
from tools.foolbox.model import Model
class WideResNetModel(Model):
def __init__(self, checkpoint_path, image_height=32, image_width=32, n_channels=3, n_classes=10):
super().__init__(image_height=image_height, image_width=image_width, n_channels=n_channels, n_classes=n_classes,
checkpoint_path=checkpoint_path)
def calculate_logits(self, inputs):
model = wide_resnet_model.WideResnetModel(image_height=self.image_height, image_width=self.image_width,
n_channels=self.n_channels, n_classes=self.n_classes)
output = model.fprop(inputs)
logits = output['logits']
return logits
|
[
"models.original_models.wide_resnet_model.WideResnetModel"
] |
[((461, 619), 'models.original_models.wide_resnet_model.WideResnetModel', 'wide_resnet_model.WideResnetModel', ([], {'image_height': 'self.image_height', 'image_width': 'self.image_width', 'n_channels': 'self.n_channels', 'n_classes': 'self.n_classes'}), '(image_height=self.image_height,\n image_width=self.image_width, n_channels=self.n_channels, n_classes=\n self.n_classes)\n', (494, 619), False, 'from models.original_models import wide_resnet_model\n')]
|
import logging
import time
from dataclasses import dataclass, field
from typing import Iterable, List
import confluent_kafka
from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient
import datahub.ingestion.extractor.schema_util as schema_util
from datahub.configuration import ConfigModel
from datahub.configuration.common import AllowDenyPattern
from datahub.configuration.kafka import KafkaConsumerConnectionConfig
from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.api.source import Source, SourceReport
from datahub.ingestion.source.metadata_common import MetadataWorkUnit
from datahub.metadata.com.linkedin.pegasus2avro.common import AuditStamp, Status
from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from datahub.metadata.com.linkedin.pegasus2avro.schema import (
KafkaSchema,
SchemaField,
SchemaMetadata,
)
logger = logging.getLogger(__name__)
class KafkaSourceConfig(ConfigModel):
# TODO: inline the connection config
connection: KafkaConsumerConnectionConfig = KafkaConsumerConnectionConfig()
topic_patterns: AllowDenyPattern = AllowDenyPattern(allow=[".*"], deny=["^_.*"])
@dataclass
class KafkaSourceReport(SourceReport):
topics_scanned = 0
filtered: List[str] = field(default_factory=list)
def report_topic_scanned(self, topic: str) -> None:
self.topics_scanned += 1
def report_dropped(self, topic: str) -> None:
self.filtered.append(topic)
@dataclass
class KafkaSource(Source):
source_config: KafkaSourceConfig
consumer: confluent_kafka.Consumer
report: KafkaSourceReport
def __init__(self, config: KafkaSourceConfig, ctx: PipelineContext):
super().__init__(ctx)
self.source_config = config
self.consumer = confluent_kafka.Consumer(
{
"group.id": "test",
"bootstrap.servers": self.source_config.connection.bootstrap,
**self.source_config.connection.consumer_config,
}
)
self.schema_registry_client = SchemaRegistryClient(
{"url": self.source_config.connection.schema_registry_url}
)
self.report = KafkaSourceReport()
@classmethod
def create(cls, config_dict, ctx):
config = KafkaSourceConfig.parse_obj(config_dict)
return cls(config, ctx)
def get_workunits(self) -> Iterable[MetadataWorkUnit]:
topics = self.consumer.list_topics().topics
for t in topics:
self.report.report_topic_scanned(t)
if self.source_config.topic_patterns.allowed(t):
mce = self._extract_record(t)
wu = MetadataWorkUnit(id=f"kafka-{t}", mce=mce)
self.report.report_workunit(wu)
yield wu
else:
self.report.report_dropped(t)
def _extract_record(self, topic: str) -> MetadataChangeEvent:
logger.debug(f"topic = {topic}")
platform = "kafka"
dataset_name = topic
env = "PROD" # TODO: configure!
actor, sys_time = "urn:li:corpuser:etl", int(time.time()) * 1000
metadata_record = MetadataChangeEvent()
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{platform},{dataset_name},{env})",
)
dataset_snapshot.aspects.append(Status(removed=False))
metadata_record.proposedSnapshot = dataset_snapshot
# Fetch schema from the registry.
has_schema = True
try:
registered_schema = self.schema_registry_client.get_latest_version(
topic + "-value"
)
schema = registered_schema.schema
except Exception as e:
self.report.report_warning(topic, f"failed to get schema: {e}")
has_schema = False
# Parse the schema
fields: List[SchemaField] = []
if has_schema and schema.schema_type == "AVRO":
fields = schema_util.avro_schema_to_mce_fields(schema.schema_str)
elif has_schema:
self.report.report_warning(
topic, f"unable to parse kafka schema type {schema.schema_type}"
)
if has_schema:
schema_metadata = SchemaMetadata(
schemaName=topic,
version=0,
hash=str(schema._hash),
platform=f"urn:li:dataPlatform:{platform}",
platformSchema=KafkaSchema(documentSchema=schema.schema_str),
fields=fields,
created=AuditStamp(time=sys_time, actor=actor),
lastModified=AuditStamp(time=sys_time, actor=actor),
)
dataset_snapshot.aspects.append(schema_metadata)
return metadata_record
def get_report(self):
return self.report
def close(self):
if self.consumer:
self.consumer.close()
|
[
"datahub.metadata.com.linkedin.pegasus2avro.mxe.MetadataChangeEvent",
"datahub.metadata.com.linkedin.pegasus2avro.common.AuditStamp",
"datahub.configuration.kafka.KafkaConsumerConnectionConfig",
"datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot",
"datahub.configuration.common.AllowDenyPattern",
"confluent_kafka.Consumer",
"datahub.metadata.com.linkedin.pegasus2avro.common.Status",
"dataclasses.field",
"time.time",
"datahub.ingestion.extractor.schema_util.avro_schema_to_mce_fields",
"datahub.ingestion.source.metadata_common.MetadataWorkUnit",
"datahub.metadata.com.linkedin.pegasus2avro.schema.KafkaSchema",
"logging.getLogger",
"confluent_kafka.schema_registry.schema_registry_client.SchemaRegistryClient"
] |
[((1019, 1046), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1036, 1046), False, 'import logging\n'), ((1176, 1207), 'datahub.configuration.kafka.KafkaConsumerConnectionConfig', 'KafkaConsumerConnectionConfig', ([], {}), '()\n', (1205, 1207), False, 'from datahub.configuration.kafka import KafkaConsumerConnectionConfig\n'), ((1247, 1292), 'datahub.configuration.common.AllowDenyPattern', 'AllowDenyPattern', ([], {'allow': "['.*']", 'deny': "['^_.*']"}), "(allow=['.*'], deny=['^_.*'])\n", (1263, 1292), False, 'from datahub.configuration.common import AllowDenyPattern\n'), ((1394, 1421), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (1399, 1421), False, 'from dataclasses import dataclass, field\n'), ((1909, 2076), 'confluent_kafka.Consumer', 'confluent_kafka.Consumer', (["{'group.id': 'test', 'bootstrap.servers': self.source_config.connection.\n bootstrap, **self.source_config.connection.consumer_config}"], {}), "({'group.id': 'test', 'bootstrap.servers': self.\n source_config.connection.bootstrap, **self.source_config.connection.\n consumer_config})\n", (1933, 2076), False, 'import confluent_kafka\n'), ((2190, 2275), 'confluent_kafka.schema_registry.schema_registry_client.SchemaRegistryClient', 'SchemaRegistryClient', (["{'url': self.source_config.connection.schema_registry_url}"], {}), "({'url': self.source_config.connection.schema_registry_url}\n )\n", (2210, 2275), False, 'from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient\n'), ((3281, 3302), 'datahub.metadata.com.linkedin.pegasus2avro.mxe.MetadataChangeEvent', 'MetadataChangeEvent', ([], {}), '()\n', (3300, 3302), False, 'from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent\n'), ((3330, 3427), 'datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot', 'DatasetSnapshot', ([], {'urn': 'f"""urn:li:dataset:(urn:li:dataPlatform:{platform},{dataset_name},{env})"""'}), "(urn=\n f'urn:li:dataset:(urn:li:dataPlatform:{platform},{dataset_name},{env})')\n", (3345, 3427), False, 'from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot\n'), ((3486, 3507), 'datahub.metadata.com.linkedin.pegasus2avro.common.Status', 'Status', ([], {'removed': '(False)'}), '(removed=False)\n', (3492, 3507), False, 'from datahub.metadata.com.linkedin.pegasus2avro.common import AuditStamp, Status\n'), ((4106, 4162), 'datahub.ingestion.extractor.schema_util.avro_schema_to_mce_fields', 'schema_util.avro_schema_to_mce_fields', (['schema.schema_str'], {}), '(schema.schema_str)\n', (4143, 4162), True, 'import datahub.ingestion.extractor.schema_util as schema_util\n'), ((2796, 2838), 'datahub.ingestion.source.metadata_common.MetadataWorkUnit', 'MetadataWorkUnit', ([], {'id': 'f"""kafka-{t}"""', 'mce': 'mce'}), "(id=f'kafka-{t}', mce=mce)\n", (2812, 2838), False, 'from datahub.ingestion.source.metadata_common import MetadataWorkUnit\n'), ((3234, 3245), 'time.time', 'time.time', ([], {}), '()\n', (3243, 3245), False, 'import time\n'), ((4585, 4630), 'datahub.metadata.com.linkedin.pegasus2avro.schema.KafkaSchema', 'KafkaSchema', ([], {'documentSchema': 'schema.schema_str'}), '(documentSchema=schema.schema_str)\n', (4596, 4630), False, 'from datahub.metadata.com.linkedin.pegasus2avro.schema import KafkaSchema, SchemaField, SchemaMetadata\n'), ((4687, 4725), 'datahub.metadata.com.linkedin.pegasus2avro.common.AuditStamp', 'AuditStamp', ([], {'time': 'sys_time', 'actor': 'actor'}), '(time=sys_time, actor=actor)\n', (4697, 4725), False, 'from datahub.metadata.com.linkedin.pegasus2avro.common import AuditStamp, Status\n'), ((4756, 4794), 'datahub.metadata.com.linkedin.pegasus2avro.common.AuditStamp', 'AuditStamp', ([], {'time': 'sys_time', 'actor': 'actor'}), '(time=sys_time, actor=actor)\n', (4766, 4794), False, 'from datahub.metadata.com.linkedin.pegasus2avro.common import AuditStamp, Status\n')]
|
import os
import os
API_ID = int(os.getenv("API_ID"))
API_HASH = os.getenv("API_HASH")
BOT_TOKEN = os.getenv("SESSION")
LOG_GRP = os.getenv("LOG_GRP")
|
[
"os.getenv"
] |
[((68, 89), 'os.getenv', 'os.getenv', (['"""API_HASH"""'], {}), "('API_HASH')\n", (77, 89), False, 'import os\n'), ((102, 122), 'os.getenv', 'os.getenv', (['"""SESSION"""'], {}), "('SESSION')\n", (111, 122), False, 'import os\n'), ((133, 153), 'os.getenv', 'os.getenv', (['"""LOG_GRP"""'], {}), "('LOG_GRP')\n", (142, 153), False, 'import os\n'), ((36, 55), 'os.getenv', 'os.getenv', (['"""API_ID"""'], {}), "('API_ID')\n", (45, 55), False, 'import os\n')]
|
from __future__ import division
import sys
import pytest
import numpy as np
from datashader.glyphs import Glyph
from datashader.glyphs.line import _build_draw_segment, \
_build_map_onto_pixel_for_line
from datashader.utils import ngjit
py2_skip = pytest.mark.skipif(sys.version_info.major < 3, reason="py2 not supported")
mapper = ngjit(lambda x: x)
map_onto_pixel = _build_map_onto_pixel_for_line(mapper, mapper)
sx, tx, sy, ty = 1, 0, 1, 0
xmin, xmax, ymin, ymax = 0, 5, 0, 5
@pytest.fixture
def draw_line():
@ngjit
def append(i, x, y, agg):
agg[y, x] += 1
expand_aggs_and_cols = Glyph._expand_aggs_and_cols(append, 1)
return _build_draw_segment(append, map_onto_pixel, expand_aggs_and_cols,
False)
@py2_skip
@pytest.mark.benchmark(group="draw_line")
def test_draw_line_left_border(benchmark, draw_line):
n = 10**4
x0, y0 = (0, 0)
x1, y1 = (0, n)
agg = np.zeros((n+1, n+1), dtype='i4')
benchmark(draw_line, sx, tx, sy, ty, xmin, xmax, ymin, ymax, x0, y0, x1, y1, 0, True, agg)
@py2_skip
@pytest.mark.benchmark(group="draw_line")
def test_draw_line_diagonal(benchmark, draw_line):
n = 10**4
x0, y0 = (0, 0)
x1, y1 = (n, n)
agg = np.zeros((n+1, n+1), dtype='i4')
benchmark(draw_line, sx, tx, sy, ty, xmin, xmax, ymin, ymax, x0, y0, x1, y1, 0, True, agg)
@py2_skip
@pytest.mark.benchmark(group="draw_line")
def test_draw_line_offset(benchmark, draw_line):
n = 10**4
x0, y0 = (0, n//4)
x1, y1 = (n, n//4-1)
agg = np.zeros((n+1, n+1), dtype='i4')
benchmark(draw_line, sx, tx, sy, ty, xmin, xmax, ymin, ymax, x0, y0, x1, y1, 0, True, agg)
|
[
"datashader.glyphs.Glyph._expand_aggs_and_cols",
"numpy.zeros",
"pytest.mark.benchmark",
"pytest.mark.skipif",
"datashader.utils.ngjit",
"datashader.glyphs.line._build_map_onto_pixel_for_line",
"datashader.glyphs.line._build_draw_segment"
] |
[((256, 330), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(sys.version_info.major < 3)'], {'reason': '"""py2 not supported"""'}), "(sys.version_info.major < 3, reason='py2 not supported')\n", (274, 330), False, 'import pytest\n'), ((342, 360), 'datashader.utils.ngjit', 'ngjit', (['(lambda x: x)'], {}), '(lambda x: x)\n', (347, 360), False, 'from datashader.utils import ngjit\n'), ((378, 424), 'datashader.glyphs.line._build_map_onto_pixel_for_line', '_build_map_onto_pixel_for_line', (['mapper', 'mapper'], {}), '(mapper, mapper)\n', (408, 424), False, 'from datashader.glyphs.line import _build_draw_segment, _build_map_onto_pixel_for_line\n'), ((783, 823), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""draw_line"""'}), "(group='draw_line')\n", (804, 823), False, 'import pytest\n'), ((1084, 1124), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""draw_line"""'}), "(group='draw_line')\n", (1105, 1124), False, 'import pytest\n'), ((1381, 1421), 'pytest.mark.benchmark', 'pytest.mark.benchmark', ([], {'group': '"""draw_line"""'}), "(group='draw_line')\n", (1402, 1421), False, 'import pytest\n'), ((616, 654), 'datashader.glyphs.Glyph._expand_aggs_and_cols', 'Glyph._expand_aggs_and_cols', (['append', '(1)'], {}), '(append, 1)\n', (643, 654), False, 'from datashader.glyphs import Glyph\n'), ((666, 738), 'datashader.glyphs.line._build_draw_segment', '_build_draw_segment', (['append', 'map_onto_pixel', 'expand_aggs_and_cols', '(False)'], {}), '(append, map_onto_pixel, expand_aggs_and_cols, False)\n', (685, 738), False, 'from datashader.glyphs.line import _build_draw_segment, _build_map_onto_pixel_for_line\n'), ((943, 979), 'numpy.zeros', 'np.zeros', (['(n + 1, n + 1)'], {'dtype': '"""i4"""'}), "((n + 1, n + 1), dtype='i4')\n", (951, 979), True, 'import numpy as np\n'), ((1241, 1277), 'numpy.zeros', 'np.zeros', (['(n + 1, n + 1)'], {'dtype': '"""i4"""'}), "((n + 1, n + 1), dtype='i4')\n", (1249, 1277), True, 'import numpy as np\n'), ((1544, 1580), 'numpy.zeros', 'np.zeros', (['(n + 1, n + 1)'], {'dtype': '"""i4"""'}), "((n + 1, n + 1), dtype='i4')\n", (1552, 1580), True, 'import numpy as np\n')]
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from itertools import zip_longest
from typing import Dict, Tuple, Any, Union, Optional, List
from flask_marshmallow import Marshmallow # type: ignore
from flask_sqlalchemy import DefaultMeta # type: ignore
from flask_sqlalchemy import SQLAlchemy as SQLAlchemyBase
from sqlalchemy import Index, Column, Integer, func, DateTime, inspect
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import Mapper, RelationshipProperty
from sqlalchemy.orm.attributes import History
from sqlalchemy.orm.interfaces import MapperProperty
from sqlalchemy.orm.state import InstanceState, AttributeState
log = logging.getLogger(__name__)
# Adding the "pool_pre_ping" command to avoid mysql server has gone away issues.
# Note: This will slightly degrade performance. It might be better to adjust
# MariaDB server settings.
class SQLAlchemy(SQLAlchemyBase):
def apply_pool_defaults(self, app, options):
options = super().apply_pool_defaults(app, options)
options["pool_pre_ping"] = True
return options
db = SQLAlchemy()
ma = Marshmallow() # pylint: disable=invalid-name
BaseModel: DefaultMeta = db.Model
ChangeUnion = Union[Tuple[Any, Any], Dict[str, Any], List[Any]]
Changes = Dict[str, ChangeUnion]
class MainBase(BaseModel):
# N.B. We leave the schema out on purpose as alembic gets confused
# otherwise. The default schema is already main (as specified in the
# connection string). Also see:
# https://github.com/sqlalchemy/alembic/issues/519#issuecomment-442533633
# __table_args__ = {'schema': 'main'}
__abstract__ = True
id = Column(Integer, autoincrement=True, primary_key=True)
date_created = Column(DateTime, default=func.current_timestamp())
date_modified = Column(
DateTime,
default=func.current_timestamp(),
onupdate=func.current_timestamp(),
)
def model_changes(self, *, already_tested=None) -> Changes:
"""Returns the changed attributes of this instance.
Returns:
a dictionary mapping the attributes to (new, old) tuples or a
recursive version if the attribute is a list or reference.
"""
def inner(current) -> Optional[Union[List[Any], Changes]]:
if isinstance(current, list):
res = [inner(item) for item in current]
if any(res):
return res
elif hasattr(current, "model_changes"):
return current.model_changes(already_tested=already_tested)
return None
changes: Changes = {}
if already_tested is None:
already_tested = {id(self)}
elif id(self) in already_tested:
return changes
already_tested.add(id(self))
state: InstanceState = inspect(self)
attr: AttributeState
for name, attr in state.attrs.items():
hist: History = attr.load_history()
if hist.has_changes():
changes[name] = hist[0], hist[2]
else:
subchanges = inner(getattr(self, name))
if subchanges:
changes[name] = subchanges
return changes
def diff(self, other: BaseModel, *, already_tested=None) -> Changes:
"""Returns the difference between this instance and the given one.
Returns:
a dictionary mapping the attributes to (new, old) tuples or a
recursive version if the attribute is a list or reference.
"""
changes: Changes = {}
if already_tested is None:
already_tested = {id(self), id(other)}
elif id(self) in already_tested and id(other) in already_tested:
return changes
already_tested.add(id(self))
already_tested.add(id(other))
if id(self) == id(other): # identity cache
log.warning("Comparing the same instance (%r). Identity cache?", self)
return self.model_changes()
clz = type(self)
oclz = type(other)
if not isinstance(other, clz):
raise TypeError(
"Instance of {} expected. Got {}".format(clz.__name__, oclz.__name__)
)
def innerdiff(current, other) -> Optional[ChangeUnion]:
if current is None and other is None:
return None
if current is None or other is None:
return (current, other)
if hasattr(current, "diff"):
return current.diff(other, already_tested=already_tested)
if isinstance(current, list) and isinstance(other, list):
res = []
for cur, oth in zip_longest(current, other):
res.append(innerdiff(cur, oth))
if all(res):
return res
elif current != other:
return (current, other)
return None
mapper: Mapper = inspect(clz)
name: str
attr: MapperProperty
for name, attr in mapper.attrs.items(): # type: ignore
# log.debug('Compare %s of %s <> %s', name, clz, oclz)
other_value = getattr(other, name)
current_value = getattr(self, name)
if isinstance(attr, RelationshipProperty) and other_value is None:
for col in attr.local_columns:
cname = col.name
if innerdiff(getattr(self, cname), getattr(other, cname)):
break
else:
continue
if name in changes:
continue
subchanges = innerdiff(current_value, other_value)
if subchanges:
changes[name] = subchanges
return changes
class NvdBase(BaseModel):
__abstract__ = True
@declared_attr
def __table_args__(cls): # pylint: disable=no-self-argument
indices = []
idx_format = "idx_{tbl_name}_{col_name}"
for key in cls.__dict__:
attribute = cls.__dict__[key]
# pylint: disable=no-member
if not isinstance(attribute, db.Column) or not attribute.index:
continue
# pylint: enable=no-member
# Disable Index
attribute.index = None
# Create a custom index here.
indices.append(
Index(idx_format.format(tbl_name=cls.__tablename__, col_name=key), key)
)
indices.append({"schema": "cve"})
return tuple(indices)
class CweBase(BaseModel):
__table_args__ = {"schema": "cwe"}
__abstract__ = True
|
[
"sqlalchemy.inspect",
"sqlalchemy.func.current_timestamp",
"itertools.zip_longest",
"flask_marshmallow.Marshmallow",
"sqlalchemy.Column",
"logging.getLogger"
] |
[((1207, 1234), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1224, 1234), False, 'import logging\n'), ((1659, 1672), 'flask_marshmallow.Marshmallow', 'Marshmallow', ([], {}), '()\n', (1670, 1672), False, 'from flask_marshmallow import Marshmallow\n'), ((2201, 2254), 'sqlalchemy.Column', 'Column', (['Integer'], {'autoincrement': '(True)', 'primary_key': '(True)'}), '(Integer, autoincrement=True, primary_key=True)\n', (2207, 2254), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((3382, 3395), 'sqlalchemy.inspect', 'inspect', (['self'], {}), '(self)\n', (3389, 3395), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((5529, 5541), 'sqlalchemy.inspect', 'inspect', (['clz'], {}), '(clz)\n', (5536, 5541), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((2299, 2323), 'sqlalchemy.func.current_timestamp', 'func.current_timestamp', ([], {}), '()\n', (2321, 2323), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((2387, 2411), 'sqlalchemy.func.current_timestamp', 'func.current_timestamp', ([], {}), '()\n', (2409, 2411), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((2430, 2454), 'sqlalchemy.func.current_timestamp', 'func.current_timestamp', ([], {}), '()\n', (2452, 2454), False, 'from sqlalchemy import Index, Column, Integer, func, DateTime, inspect\n'), ((5263, 5290), 'itertools.zip_longest', 'zip_longest', (['current', 'other'], {}), '(current, other)\n', (5274, 5290), False, 'from itertools import zip_longest\n')]
|
import pytest
import sys
sys.path.append('../src')
from remap import parse_schema
def test_import_aws(file):
parse_schema(file)
return
if __name__ == '__main__':
test_import_aws("./schema.txt")
|
[
"sys.path.append",
"remap.parse_schema"
] |
[((25, 50), 'sys.path.append', 'sys.path.append', (['"""../src"""'], {}), "('../src')\n", (40, 50), False, 'import sys\n'), ((114, 132), 'remap.parse_schema', 'parse_schema', (['file'], {}), '(file)\n', (126, 132), False, 'from remap import parse_schema\n')]
|
#!/usr/bin/env python
"""
Horizontal split example.
"""
from __future__ import unicode_literals
from prompt_toolkit.application import Application
from prompt_toolkit.key_binding import KeyBindings
from prompt_toolkit.layout.containers import HSplit, Window
from prompt_toolkit.layout.controls import FormattedTextControl
from prompt_toolkit.layout.layout import Layout
# 1. The layout
left_text = "\nVertical-split example. Press 'q' to quit.\n\n(top pane.)"
right_text = "\n(bottom pane.)"
body = HSplit([
Window(FormattedTextControl(left_text)),
Window(height=1, char='-'), # Horizontal line in the middle.
Window(FormattedTextControl(right_text)),
])
# 2. Key bindings
kb = KeyBindings()
@kb.add('q')
def _(event):
" Quit application. "
event.app.exit()
# 3. The `Application`
application = Application(
layout=Layout(body),
key_bindings=kb,
full_screen=True)
def run():
application.run()
if __name__ == '__main__':
run()
|
[
"prompt_toolkit.layout.containers.Window",
"prompt_toolkit.key_binding.KeyBindings",
"prompt_toolkit.layout.layout.Layout",
"prompt_toolkit.layout.controls.FormattedTextControl"
] |
[((698, 711), 'prompt_toolkit.key_binding.KeyBindings', 'KeyBindings', ([], {}), '()\n', (709, 711), False, 'from prompt_toolkit.key_binding import KeyBindings\n'), ((562, 588), 'prompt_toolkit.layout.containers.Window', 'Window', ([], {'height': '(1)', 'char': '"""-"""'}), "(height=1, char='-')\n", (568, 588), False, 'from prompt_toolkit.layout.containers import HSplit, Window\n'), ((851, 863), 'prompt_toolkit.layout.layout.Layout', 'Layout', (['body'], {}), '(body)\n', (857, 863), False, 'from prompt_toolkit.layout.layout import Layout\n'), ((524, 555), 'prompt_toolkit.layout.controls.FormattedTextControl', 'FormattedTextControl', (['left_text'], {}), '(left_text)\n', (544, 555), False, 'from prompt_toolkit.layout.controls import FormattedTextControl\n'), ((635, 667), 'prompt_toolkit.layout.controls.FormattedTextControl', 'FormattedTextControl', (['right_text'], {}), '(right_text)\n', (655, 667), False, 'from prompt_toolkit.layout.controls import FormattedTextControl\n')]
|
# __init__.py (flowsa)
# !/usr/bin/env python3
# coding=utf-8
"""
Public API for flowsa
For standard dataframe formats, see
https://github.com/USEPA/flowsa/tree/master/format%20specs
Files are loaded from a user's local directory
https://github.com/USEPA/flowsa/wiki/Data-Storage#local-storage
or can be downloaded from a remote repository
https://edap-ord-data-commons.s3.amazonaws.com/index.html?prefix=flowsa/
The most recent version (based on timestamp) of Flow-By-Activity and
Flow-By-Sector files are loaded when running these functions
"""
import os
import pprint
from esupy.processed_data_mgmt import load_preprocessed_output, \
download_from_remote
from flowsa.common import load_yaml_dict
from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, \
paths, fbaoutputpath, fbsoutputpath, \
biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING
from flowsa.metadata import set_fb_meta
from flowsa.flowbyfunctions import collapse_fbs_sectors, filter_by_geoscale
from flowsa.validation import check_for_nonetypes_in_sector_col, \
check_for_negative_flowamounts
import flowsa.flowbyactivity
import flowsa.flowbysector
from flowsa.bibliography import generate_fbs_bibliography
from flowsa.datavisualization import plotFBSresults
def getFlowByActivity(datasource, year, flowclass=None, geographic_level=None,
download_FBA_if_missing=DEFAULT_DOWNLOAD_IF_MISSING):
"""
Retrieves stored data in the FlowByActivity format
:param datasource: str, the code of the datasource.
:param year: int, a year, e.g. 2012
:param flowclass: str, a 'Class' of the flow. Optional. E.g. 'Water'
:param geographic_level: str, a geographic level of the data.
Optional. E.g. 'national', 'state', 'county'.
:param download_FBA_if_missing: bool, if True will attempt to load from
remote server prior to generating if file not found locally
:return: a pandas DataFrame in FlowByActivity format
"""
# Set fba metadata
name = flowsa.flowbyactivity.set_fba_name(datasource, year)
fba_meta = set_fb_meta(name, "FlowByActivity")
# Try to load a local version of FBA
fba = load_preprocessed_output(fba_meta, paths)
# If that didn't work, try to download a remote version of FBA
if fba is None and download_FBA_if_missing:
log.info(f'{datasource} {str(year)} not found in {fbaoutputpath}, '
'downloading from remote source')
download_from_remote(fba_meta, paths)
fba = load_preprocessed_output(fba_meta, paths)
# If that didn't work or wasn't allowed, try to construct the FBA
if fba is None:
log.info(f'{datasource} {str(year)} not found in {fbaoutputpath}, '
'running functions to generate FBA')
# Generate the fba
flowsa.flowbyactivity.main(year=year, source=datasource)
# Now load the fba
fba = load_preprocessed_output(fba_meta, paths)
# If none of the above worked, log an error message
if fba is None:
raise flowsa.exceptions.FBANotAvailableError(method=datasource,
year=year)
# Otherwise (that is, if one of the above methods successfuly loaded the
# FBA), log it.
else:
log.info(f'Loaded {datasource} {str(year)} from {fbaoutputpath}')
if len(fba) ==0:
raise flowsa.exceptions.FBANotAvailableError(
message=f"Error generating {datasource} for {str(year)}")
# Address optional parameters
if flowclass is not None:
fba = fba[fba['Class'] == flowclass]
# if geographic level specified, only load rows in geo level
if geographic_level is not None:
fba = filter_by_geoscale(fba, geographic_level)
return fba
def getFlowBySector(methodname, fbsconfigpath=None,
download_FBAs_if_missing=DEFAULT_DOWNLOAD_IF_MISSING,
download_FBS_if_missing=DEFAULT_DOWNLOAD_IF_MISSING):
"""
Loads stored FlowBySector output or generates it if it doesn't exist,
then loads
:param methodname: string, Name of an available method for the given class
:param fbsconfigpath: str, path to the FBS method file if loading a file
from outside the flowsa repository
:param download_FBAs_if_missing: bool, if True will attempt to load FBAS
used in generating the FBS from remote server prior to generating if
file not found locally
:param download_FBS_if_missing: bool, if True will attempt to load from
remote server prior to generating if file not found locally
:return: dataframe in flow by sector format
"""
fbs_meta = set_fb_meta(methodname, "FlowBySector")
# Try to load a local version of the FBS
fbs = load_preprocessed_output(fbs_meta, paths)
# If that didn't work, try to download a remote version of FBS
if fbs is None and download_FBS_if_missing:
log.info('%s not found in %s, downloading from remote source',
methodname, fbsoutputpath)
# download and load the FBS parquet
subdirectory_dict = {'.log': 'Log'}
download_from_remote(fbs_meta, paths,
subdirectory_dict=subdirectory_dict)
fbs = load_preprocessed_output(fbs_meta, paths)
# If that didn't work or wasn't allowed, try to construct the FBS
if fbs is None:
log.info('%s not found in %s, running functions to generate FBS',
methodname, fbsoutputpath)
# Generate the fbs, with option to download any required FBAs from
# Data Commons
flowsa.flowbysector.main(
method=methodname,
fbsconfigpath=fbsconfigpath,
download_FBAs_if_missing=download_FBAs_if_missing
)
# Now load the fbs
fbs = load_preprocessed_output(fbs_meta, paths)
# If none of the above worked, log an error message
if fbs is None:
log.error('getFlowBySector failed, FBS not found')
# Otherwise (that is, if one of the above methods successfuly loaded the
# FBS), log it.
else:
log.info('Loaded %s from %s', methodname, fbsoutputpath)
return fbs
def collapse_FlowBySector(methodname, fbsconfigpath=None,
download_FBAs_if_missing=DEFAULT_DOWNLOAD_IF_MISSING,
download_FBS_if_missing=DEFAULT_DOWNLOAD_IF_MISSING):
"""
Returns fbs with one sector column in place of two
:param methodname: string, Name of an available method for the given class
:return: dataframe in flow by sector format
"""
fbs = flowsa.getFlowBySector(methodname, fbsconfigpath,
download_FBAs_if_missing,
download_FBS_if_missing)
fbs_collapsed = collapse_fbs_sectors(fbs)
# check data for NoneType in sector column
fbs_collapsed = check_for_nonetypes_in_sector_col(fbs_collapsed)
# check data for negative FlowAmount values
fbs_collapsed = check_for_negative_flowamounts(fbs_collapsed)
return fbs_collapsed
def writeFlowBySectorBibliography(methodname):
"""
Generate bibliography for FlowBySectorMethod in local directory
:param methodname: string, FBS methodname for which to create .bib file
:return: .bib file save to local directory
"""
# Generate a single .bib file for a list of Flow-By-Sector method names
# and save file to local directory
log.info('Write bibliography to %s%s.bib', biboutputpath, methodname)
generate_fbs_bibliography(methodname)
def seeAvailableFlowByModels(flowbytype, print_method=True):
"""
Return available Flow-By-Activity or Flow-By-Sector models
:param flowbytype: 'FBA' or 'FBS'
:param print_method: False to skip printing to console
:return: console printout of available models
"""
# return fba directory path dependent on FBA or FBS
if flowbytype == 'FBA':
fb_directory = sourceconfigpath
else:
fb_directory = flowbysectormethodpath
# empty dictionary
fb_dict = {}
# empty df
fb_df = []
# run through all files and append
for file in os.listdir(fb_directory):
if file.endswith(".yaml"):
# drop file extension
f = os.path.splitext(file)[0]
if flowbytype == 'FBA':
s = load_yaml_dict(f, 'FBA')
try:
years = s['years']
except KeyError:
years = 'YAML missing information on years'
fb_dict.update({f: years})
# else if FBS
else:
fb_df.append(f)
# determine format of data to print
if flowbytype == 'FBA':
data_print = fb_dict
else:
data_print = fb_df
if print_method:
# print data in human-readable format
pprint.pprint(data_print, width=79, compact=True)
return data_print
def generateFBSplot(method_dict, plottype, sector_length_display=None,
sectors_to_include=None, plot_title=None):
"""
Plot the results of FBS models. Graphic can either be a faceted
scatterplot or a method comparison
:param method_dict: dictionary, key is the label, value is the FBS
methodname
:param plottype: str, 'facet_graph' or 'method_comparison'
:param sector_length_display: numeric, sector length by which to
aggregate, default is 'None' which returns the max sector length in a
dataframe
:param sectors_to_include: list, sectors to include in output. Sectors
are subset by all sectors that "start with" the values in this list
:return: graphic displaying results of FBS models
"""
plotFBSresults(method_dict, plottype, sector_length_display,
sectors_to_include, plot_title)
|
[
"flowsa.bibliography.generate_fbs_bibliography",
"flowsa.settings.log.info",
"esupy.processed_data_mgmt.load_preprocessed_output",
"esupy.processed_data_mgmt.download_from_remote",
"flowsa.settings.log.error",
"flowsa.metadata.set_fb_meta",
"flowsa.flowbyfunctions.filter_by_geoscale",
"flowsa.validation.check_for_negative_flowamounts",
"flowsa.validation.check_for_nonetypes_in_sector_col",
"flowsa.flowbyfunctions.collapse_fbs_sectors",
"flowsa.common.load_yaml_dict",
"pprint.pprint",
"os.path.splitext",
"flowsa.datavisualization.plotFBSresults",
"os.listdir"
] |
[((2098, 2133), 'flowsa.metadata.set_fb_meta', 'set_fb_meta', (['name', '"""FlowByActivity"""'], {}), "(name, 'FlowByActivity')\n", (2109, 2133), False, 'from flowsa.metadata import set_fb_meta\n'), ((2186, 2227), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fba_meta', 'paths'], {}), '(fba_meta, paths)\n', (2210, 2227), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((4687, 4726), 'flowsa.metadata.set_fb_meta', 'set_fb_meta', (['methodname', '"""FlowBySector"""'], {}), "(methodname, 'FlowBySector')\n", (4698, 4726), False, 'from flowsa.metadata import set_fb_meta\n'), ((4782, 4823), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fbs_meta', 'paths'], {}), '(fbs_meta, paths)\n', (4806, 4823), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((6814, 6839), 'flowsa.flowbyfunctions.collapse_fbs_sectors', 'collapse_fbs_sectors', (['fbs'], {}), '(fbs)\n', (6834, 6839), False, 'from flowsa.flowbyfunctions import collapse_fbs_sectors, filter_by_geoscale\n'), ((6908, 6956), 'flowsa.validation.check_for_nonetypes_in_sector_col', 'check_for_nonetypes_in_sector_col', (['fbs_collapsed'], {}), '(fbs_collapsed)\n', (6941, 6956), False, 'from flowsa.validation import check_for_nonetypes_in_sector_col, check_for_negative_flowamounts\n'), ((7025, 7070), 'flowsa.validation.check_for_negative_flowamounts', 'check_for_negative_flowamounts', (['fbs_collapsed'], {}), '(fbs_collapsed)\n', (7055, 7070), False, 'from flowsa.validation import check_for_nonetypes_in_sector_col, check_for_negative_flowamounts\n'), ((7472, 7541), 'flowsa.settings.log.info', 'log.info', (['"""Write bibliography to %s%s.bib"""', 'biboutputpath', 'methodname'], {}), "('Write bibliography to %s%s.bib', biboutputpath, methodname)\n", (7480, 7541), False, 'from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, paths, fbaoutputpath, fbsoutputpath, biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING\n'), ((7546, 7583), 'flowsa.bibliography.generate_fbs_bibliography', 'generate_fbs_bibliography', (['methodname'], {}), '(methodname)\n', (7571, 7583), False, 'from flowsa.bibliography import generate_fbs_bibliography\n'), ((8180, 8204), 'os.listdir', 'os.listdir', (['fb_directory'], {}), '(fb_directory)\n', (8190, 8204), False, 'import os\n'), ((9731, 9827), 'flowsa.datavisualization.plotFBSresults', 'plotFBSresults', (['method_dict', 'plottype', 'sector_length_display', 'sectors_to_include', 'plot_title'], {}), '(method_dict, plottype, sector_length_display,\n sectors_to_include, plot_title)\n', (9745, 9827), False, 'from flowsa.datavisualization import plotFBSresults\n'), ((2478, 2515), 'esupy.processed_data_mgmt.download_from_remote', 'download_from_remote', (['fba_meta', 'paths'], {}), '(fba_meta, paths)\n', (2498, 2515), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((2530, 2571), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fba_meta', 'paths'], {}), '(fba_meta, paths)\n', (2554, 2571), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((2925, 2966), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fba_meta', 'paths'], {}), '(fba_meta, paths)\n', (2949, 2966), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((3732, 3773), 'flowsa.flowbyfunctions.filter_by_geoscale', 'filter_by_geoscale', (['fba', 'geographic_level'], {}), '(fba, geographic_level)\n', (3750, 3773), False, 'from flowsa.flowbyfunctions import collapse_fbs_sectors, filter_by_geoscale\n'), ((4947, 5040), 'flowsa.settings.log.info', 'log.info', (['"""%s not found in %s, downloading from remote source"""', 'methodname', 'fbsoutputpath'], {}), "('%s not found in %s, downloading from remote source', methodname,\n fbsoutputpath)\n", (4955, 5040), False, 'from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, paths, fbaoutputpath, fbsoutputpath, biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING\n'), ((5150, 5224), 'esupy.processed_data_mgmt.download_from_remote', 'download_from_remote', (['fbs_meta', 'paths'], {'subdirectory_dict': 'subdirectory_dict'}), '(fbs_meta, paths, subdirectory_dict=subdirectory_dict)\n', (5170, 5224), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((5268, 5309), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fbs_meta', 'paths'], {}), '(fbs_meta, paths)\n', (5292, 5309), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((5408, 5504), 'flowsa.settings.log.info', 'log.info', (['"""%s not found in %s, running functions to generate FBS"""', 'methodname', 'fbsoutputpath'], {}), "('%s not found in %s, running functions to generate FBS',\n methodname, fbsoutputpath)\n", (5416, 5504), False, 'from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, paths, fbaoutputpath, fbsoutputpath, biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING\n'), ((5835, 5876), 'esupy.processed_data_mgmt.load_preprocessed_output', 'load_preprocessed_output', (['fbs_meta', 'paths'], {}), '(fbs_meta, paths)\n', (5859, 5876), False, 'from esupy.processed_data_mgmt import load_preprocessed_output, download_from_remote\n'), ((5961, 6011), 'flowsa.settings.log.error', 'log.error', (['"""getFlowBySector failed, FBS not found"""'], {}), "('getFlowBySector failed, FBS not found')\n", (5970, 6011), False, 'from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, paths, fbaoutputpath, fbsoutputpath, biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING\n'), ((6127, 6183), 'flowsa.settings.log.info', 'log.info', (['"""Loaded %s from %s"""', 'methodname', 'fbsoutputpath'], {}), "('Loaded %s from %s', methodname, fbsoutputpath)\n", (6135, 6183), False, 'from flowsa.settings import log, sourceconfigpath, flowbysectormethodpath, paths, fbaoutputpath, fbsoutputpath, biboutputpath, DEFAULT_DOWNLOAD_IF_MISSING\n'), ((8885, 8934), 'pprint.pprint', 'pprint.pprint', (['data_print'], {'width': '(79)', 'compact': '(True)'}), '(data_print, width=79, compact=True)\n', (8898, 8934), False, 'import pprint\n'), ((8291, 8313), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (8307, 8313), False, 'import os\n'), ((8373, 8397), 'flowsa.common.load_yaml_dict', 'load_yaml_dict', (['f', '"""FBA"""'], {}), "(f, 'FBA')\n", (8387, 8397), False, 'from flowsa.common import load_yaml_dict\n')]
|
"""Example of a custom gym environment and model. Run this for a demo.
This example shows:
- using a custom environment
- using a custom model
- using Tune for grid search
You can visualize experiment results in ~/ray_results using TensorBoard.
"""
import argparse
import os
import numpy as np
import pygame
import matplotlib.pyplot as plt
import matplotlib.backends.backend_agg as agg
import ray
from ray import tune
from ray.tune import grid_search
from ray.rllib.models import ModelCatalog
from ray.rllib.models.tf.tf_modelv2 import TFModelV2
from ray.rllib.models.tf.fcnet import FullyConnectedNetwork
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.rllib.models.torch.fcnet import FullyConnectedNetwork as TorchFC
from ray.rllib.utils.framework import try_import_tf, try_import_torch
from ray.rllib.utils.test_utils import check_learning_achieved
from ray.rllib.agents.ppo import PPOTrainer
from ray.rllib.agents.sac import SACTrainer
from ray.rllib.agents.ddpg import DDPGTrainer
from WindAI.floris.optimize_AI import farminit
from WindAI.floris import tools as wfct
from WindAI.farm_env.env import FarmEnv
from WindAI.agent_configs import config_PPO, config_SAC, config_DDPG
tf1, tf, tfv = try_import_tf()
torch, nn = try_import_torch()
parser = argparse.ArgumentParser()
parser.add_argument("--run", type=str, default="SAC")
parser.add_argument("--torch", action="store_true")
parser.add_argument("--num-wt-rows", type=int, default=1)
parser.add_argument("--num-wt-cols", type=int, default=2)
class CustomModel(TFModelV2):
"""Example of a keras custom model that just delegates to an fc-net."""
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
super(CustomModel, self).__init__(obs_space, action_space, num_outputs,
model_config, name)
self.model = FullyConnectedNetwork(obs_space, action_space,
num_outputs, model_config, name)
self.register_variables(self.model.variables())
def forward(self, input_dict, state, seq_lens):
return self.model.forward(input_dict, state, seq_lens)
def value_function(self):
return self.model.value_function()
class TorchCustomModel(TorchModelV2, nn.Module):
"""Example of a PyTorch custom model that just delegates to a fc-net."""
def __init__(self, obs_space, action_space, num_outputs, model_config,
name):
print(f'CUDA {torch.cuda.is_available()}')
TorchModelV2.__init__(self, obs_space, action_space, num_outputs,
model_config, name)
nn.Module.__init__(self)
self.torch_sub_model = TorchFC(obs_space, action_space, num_outputs,
model_config, name)
def forward(self, input_dict, state, seq_lens):
input_dict["obs"] = input_dict["obs"].float()
fc_out, _ = self.torch_sub_model(input_dict, state, seq_lens)
return fc_out, []
def value_function(self):
return torch.reshape(self.torch_sub_model.value_function(), [-1])
if __name__ == "__main__":
args = parser.parse_args()
# register_env("corridor", lambda config: SimpleCorridor(config))
ModelCatalog.register_custom_model(
"my_model", TorchCustomModel if args.torch else CustomModel)
pygame.init()
ray.init()
agent = {}
config = {}
# initialize Farm
# env variables
ws = 10. # wind speed in knots
wd = 0. # wind direction in degrees
wd_change = 1
ws_change = 1
farm = farminit(args.num_wt_rows, args.num_wt_cols)
farm.reinitialize_flow_field(wind_direction=[wd], wind_speed=[ws])
farm.calculate_wake()
power = farm.get_farm_power()
print(f'initial power {power}')
env_config = {
"env_config": {
"num_wind_turbines": args.num_wt_rows * args.num_wt_cols,
"farm": farm,
"max_yaw": 20,
"continuous_action_space": True,
"min_wind_speed": 10., # m.s-1 (range from 2 to 25.5)
"max_wind_speed": 10., # m.s-1 (range from 2 to 25.5)
"min_wind_angle": 250.,
"max_wind_angle": 290.
}}
# instantiate env class
env = FarmEnv(env_config['env_config'])
general_config = {
"env": FarmEnv,
"model": {
"custom_model": "my_model",
},
"framework": "torch" if args.torch else "tf",
}
if args.run == "PPO":
agent_config = config_PPO
config = {
**env_config,
**agent_config,
**general_config
}
agent = PPOTrainer(config=config)
elif args.run == "SAC":
agent_config = config_SAC
config = {
**env_config,
**agent_config,
**general_config
}
agent = SACTrainer(config=config)
elif args.run == "DDPG":
agent_config = config_DDPG
config = {
**env_config,
**agent_config,
**general_config
}
agent = DDPGTrainer(config=config)
# '/home/david/ray_results/SAC/SAC_FarmEnv_ff600_00000_0_2021-02-06_14-34-11/checkpoint_50/checkpoint-50'
checkpoint_path = '/home/david/ray_results/SAC/SAC_FarmEnv_305d2_00000_0_2021-03-24_08-40-22/checkpoint_10/checkpoint-10'
agent.restore(checkpoint_path=checkpoint_path)
font = pygame.font.Font('freesansbold.ttf', 20)
textX = 10
textY = 10
# arrow indicating wind direction
arrow_Img = pygame.image.load('wind-compass.png')
arrow_x = 250
arrow_y = 5
screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption('WindAI')
def update_env():
obs = env.reset(wd=wd, ws=ws)
print(f'initializing flow filed for {wd} with {env.cur_yaws}')
farm.reinitialize_flow_field(wind_direction=[wd], wind_speed=[ws])
farm.calculate_wake(yaw_angles=env.cur_yaws)
nominal_power = farm.get_farm_power()
print(f'nominal power {nominal_power}')
action = agent.compute_action(obs)
print(f'actions : {action}')
# Execute the actions
if env.continuous_action_space:
env.cur_yaws = action * env.allowed_yaw
else:
env.cur_yaws = action - env.allowed_yaw
farm.calculate_wake(yaw_angles=env.cur_yaws)
# obs, reward, done, info = env.step(action=action, no_variation=True)
steering_power = farm.get_farm_power()
hor_plane = farm.get_hor_plane(
height=farm.floris.farm.turbines[0].hub_height) # x_resolution=400, y_resolution=100)
# Plot and show
fig, ax = plt.subplots()
wfct.visualization.visualize_cut_plane(hor_plane, ax=ax)
canvas = agg.FigureCanvasAgg(fig)
canvas.draw()
renderer = canvas.get_renderer()
size = canvas.get_width_height()
raw_data = renderer.tostring_rgb()
surf = pygame.image.fromstring(raw_data, size, "RGB")
return surf, nominal_power, steering_power
farm_img, power, new_power = update_env()
def show_env_params(x, y, pwr, new_pwr):
wind_direction = font.render("Wind direction " + str(wd) + "°", True, (0, 0, 0))
wind_speed = font.render("Wind speed " + str(ws) + "°", True, (0, 0, 0))
current_power = font.render("current power " + str(int(pwr)) + "W", True, (0, 0, 0))
nw_power = font.render("new power " + str(int(new_pwr)) + "W", True, (0, 0, 0))
var_pwr = (new_power - pwr) / pwr * 100
var_power = font.render("power variation" + str(int(var_pwr)) + "%", True, (0, 0, 0))
screen.blit(wind_direction, (x, y))
screen.blit(wind_speed, (x, y + 30))
screen.blit(current_power, (x, y + 60))
screen.blit(nw_power, (x, y + 90))
screen.blit(var_power, (x, y + 120))
def arrow():
rotated_image = pygame.transform.rotate(arrow_Img, -wd)
screen.blit(rotated_image, (arrow_x, arrow_y))
def farm_viz():
screen.blit(farm_img, (0, 0))
# Game loop
running = True
while running:
screen.fill((255, 255, 255))
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
if wd < wd_change:
wd = 360 - wd_change
else:
wd -= wd_change
farm_img, power, new_power = update_env()
if event.key == pygame.K_RIGHT:
if wd + wd_change >= 360:
wd = wd + wd_change - 360
else:
wd += wd_change
farm_img, power, new_power = update_env()
farm_viz()
show_env_params(textX, textY, power, new_power)
arrow()
pygame.display.update()
|
[
"argparse.ArgumentParser",
"pygame.event.get",
"ray.rllib.agents.ppo.PPOTrainer",
"WindAI.farm_env.env.FarmEnv",
"pygame.display.update",
"pygame.font.Font",
"matplotlib.backends.backend_agg.FigureCanvasAgg",
"pygame.display.set_mode",
"ray.rllib.models.torch.fcnet.FullyConnectedNetwork",
"pygame.display.set_caption",
"ray.rllib.utils.framework.try_import_tf",
"matplotlib.pyplot.subplots",
"ray.init",
"WindAI.floris.tools.visualization.visualize_cut_plane",
"pygame.init",
"ray.rllib.agents.sac.SACTrainer",
"ray.rllib.models.ModelCatalog.register_custom_model",
"WindAI.floris.optimize_AI.farminit",
"ray.rllib.models.tf.fcnet.FullyConnectedNetwork",
"pygame.image.load",
"pygame.transform.rotate",
"ray.rllib.models.torch.torch_modelv2.TorchModelV2.__init__",
"pygame.image.fromstring",
"ray.rllib.agents.ddpg.DDPGTrainer",
"ray.rllib.utils.framework.try_import_torch"
] |
[((1228, 1243), 'ray.rllib.utils.framework.try_import_tf', 'try_import_tf', ([], {}), '()\n', (1241, 1243), False, 'from ray.rllib.utils.framework import try_import_tf, try_import_torch\n'), ((1256, 1274), 'ray.rllib.utils.framework.try_import_torch', 'try_import_torch', ([], {}), '()\n', (1272, 1274), False, 'from ray.rllib.utils.framework import try_import_tf, try_import_torch\n'), ((1285, 1310), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1308, 1310), False, 'import argparse\n'), ((3288, 3388), 'ray.rllib.models.ModelCatalog.register_custom_model', 'ModelCatalog.register_custom_model', (['"""my_model"""', '(TorchCustomModel if args.torch else CustomModel)'], {}), "('my_model', TorchCustomModel if args.\n torch else CustomModel)\n", (3322, 3388), False, 'from ray.rllib.models import ModelCatalog\n'), ((3398, 3411), 'pygame.init', 'pygame.init', ([], {}), '()\n', (3409, 3411), False, 'import pygame\n'), ((3417, 3427), 'ray.init', 'ray.init', ([], {}), '()\n', (3425, 3427), False, 'import ray\n'), ((3626, 3670), 'WindAI.floris.optimize_AI.farminit', 'farminit', (['args.num_wt_rows', 'args.num_wt_cols'], {}), '(args.num_wt_rows, args.num_wt_cols)\n', (3634, 3670), False, 'from WindAI.floris.optimize_AI import farminit\n'), ((4305, 4338), 'WindAI.farm_env.env.FarmEnv', 'FarmEnv', (["env_config['env_config']"], {}), "(env_config['env_config'])\n", (4312, 4338), False, 'from WindAI.farm_env.env import FarmEnv\n'), ((5470, 5510), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(20)'], {}), "('freesansbold.ttf', 20)\n", (5486, 5510), False, 'import pygame\n'), ((5595, 5632), 'pygame.image.load', 'pygame.image.load', (['"""wind-compass.png"""'], {}), "('wind-compass.png')\n", (5612, 5632), False, 'import pygame\n'), ((5681, 5716), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(800, 600)'], {}), '((800, 600))\n', (5704, 5716), False, 'import pygame\n'), ((5722, 5758), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""WindAI"""'], {}), "('WindAI')\n", (5748, 5758), False, 'import pygame\n'), ((1904, 1983), 'ray.rllib.models.tf.fcnet.FullyConnectedNetwork', 'FullyConnectedNetwork', (['obs_space', 'action_space', 'num_outputs', 'model_config', 'name'], {}), '(obs_space, action_space, num_outputs, model_config, name)\n', (1925, 1983), False, 'from ray.rllib.models.tf.fcnet import FullyConnectedNetwork\n'), ((2560, 2649), 'ray.rllib.models.torch.torch_modelv2.TorchModelV2.__init__', 'TorchModelV2.__init__', (['self', 'obs_space', 'action_space', 'num_outputs', 'model_config', 'name'], {}), '(self, obs_space, action_space, num_outputs,\n model_config, name)\n', (2581, 2649), False, 'from ray.rllib.models.torch.torch_modelv2 import TorchModelV2\n'), ((2741, 2806), 'ray.rllib.models.torch.fcnet.FullyConnectedNetwork', 'TorchFC', (['obs_space', 'action_space', 'num_outputs', 'model_config', 'name'], {}), '(obs_space, action_space, num_outputs, model_config, name)\n', (2748, 2806), True, 'from ray.rllib.models.torch.fcnet import FullyConnectedNetwork as TorchFC\n'), ((4705, 4730), 'ray.rllib.agents.ppo.PPOTrainer', 'PPOTrainer', ([], {'config': 'config'}), '(config=config)\n', (4715, 4730), False, 'from ray.rllib.agents.ppo import PPOTrainer\n'), ((6748, 6762), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (6760, 6762), True, 'import matplotlib.pyplot as plt\n'), ((6771, 6827), 'WindAI.floris.tools.visualization.visualize_cut_plane', 'wfct.visualization.visualize_cut_plane', (['hor_plane'], {'ax': 'ax'}), '(hor_plane, ax=ax)\n', (6809, 6827), True, 'from WindAI.floris import tools as wfct\n'), ((6845, 6869), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'agg.FigureCanvasAgg', (['fig'], {}), '(fig)\n', (6864, 6869), True, 'import matplotlib.backends.backend_agg as agg\n'), ((7032, 7078), 'pygame.image.fromstring', 'pygame.image.fromstring', (['raw_data', 'size', '"""RGB"""'], {}), "(raw_data, size, 'RGB')\n", (7055, 7078), False, 'import pygame\n'), ((7985, 8024), 'pygame.transform.rotate', 'pygame.transform.rotate', (['arrow_Img', '(-wd)'], {}), '(arrow_Img, -wd)\n', (8008, 8024), False, 'import pygame\n'), ((8256, 8274), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (8272, 8274), False, 'import pygame\n'), ((9025, 9048), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (9046, 9048), False, 'import pygame\n'), ((4921, 4946), 'ray.rllib.agents.sac.SACTrainer', 'SACTrainer', ([], {'config': 'config'}), '(config=config)\n', (4931, 4946), False, 'from ray.rllib.agents.sac import SACTrainer\n'), ((5139, 5165), 'ray.rllib.agents.ddpg.DDPGTrainer', 'DDPGTrainer', ([], {'config': 'config'}), '(config=config)\n', (5150, 5165), False, 'from ray.rllib.agents.ddpg import DDPGTrainer\n')]
|
import smart_imports
smart_imports.all()
class HeroStatisticsTest(utils_testcase.TestCase):
def setUp(self):
super(HeroStatisticsTest, self).setUp()
self.place_1, self.place_2, self.place_3 = game_logic.create_test_map()
account = self.accounts_factory.create_account()
self.storage = game_logic_storage.LogicStorage()
self.storage.load_account_data(account)
self.hero = self.storage.accounts_to_heroes[account.id]
self.hero.statistics.change_money(relations.MONEY_SOURCE.EARNED_FROM_LOOT, 10)
self.hero.statistics.change_artifacts_had(11)
self.hero.statistics.change_quests_done(12)
self.hero.statistics.change_pve_kills(13)
self.hero.statistics.change_pve_deaths(14)
self.hero.statistics.change_pvp_battles_1x1_victories(16)
self.hero.statistics.change_help_count(17)
self.hero.statistics.change_cards_used(18)
self.hero.statistics.change_cards_combined(19)
self.hero.statistics.change_gifts_returned(20)
self.hero.statistics.change_companions_count(21)
self.assertEqual(self.hero.statistics.pvp_battles_1x1_number, 16)
def test_change_money__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_money(relations.MONEY_SOURCE.SPEND_FOR_HEAL, 666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.MONEY,
old_value=10,
new_value=10)])
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_money(relations.MONEY_SOURCE.EARNED_FROM_LOOT, 666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.MONEY,
old_value=10,
new_value=10 + 666)])
def test_change_artifacts_had__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_artifacts_had(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.ARTIFACTS,
old_value=11,
new_value=11 + 666)])
def test_change_quests_done__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_quests_done(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.QUESTS,
old_value=12,
new_value=12 + 666)])
def test_change_pve_kills__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pve_kills(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.MOBS,
old_value=13,
new_value=13 + 666)])
def test_change_pve_deaths__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pve_deaths(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.DEATHS,
old_value=14,
new_value=14 + 666)])
def test_change_pvp_battles_1x1_victories__achievements(self):
self.assertTrue(self.hero.statistics.pvp_battles_1x1_number < conf.settings.MIN_PVP_BATTLES)
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pvp_battles_1x1_victories(1)
self.assertEqual(verify_achievements.call_args_list, [mock.call(new_value=0,
old_value=0,
account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1),
mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_BATTLES_1X1,
old_value=16,
new_value=16 + 1)])
self.hero.statistics.change_pvp_battles_1x1_defeats(conf.settings.MIN_PVP_BATTLES)
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pvp_battles_1x1_victories(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1,
old_value=int(float(17) / (17 + conf.settings.MIN_PVP_BATTLES) * 100),
new_value=int(float(17 + 666) / self.hero.statistics.pvp_battles_1x1_number * 100)),
mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_BATTLES_1X1,
old_value=16 + 1 + conf.settings.MIN_PVP_BATTLES,
new_value=16 + 1 + conf.settings.MIN_PVP_BATTLES + 666)])
def test_change_pvp_battles_1x1_draws__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pvp_battles_1x1_draws(1)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_BATTLES_1X1,
old_value=16,
new_value=16 + 1)])
def test_change_pvp_battles_1x1_defeats__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_pvp_battles_1x1_defeats(1)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.PVP_BATTLES_1X1,
old_value=16,
new_value=16 + 1)])
def test_change_help_count__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_help_count(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.KEEPER_HELP_COUNT,
old_value=17,
new_value=17 + 666)])
def test_change_cards_used__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_cards_used(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.KEEPER_CARDS_USED,
old_value=18,
new_value=18 + 666)])
def test_change_cards_combined__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_cards_combined(666)
self.assertEqual(verify_achievements.call_args_list, [mock.call(account_id=self.hero.account_id,
type=achievements_relations.ACHIEVEMENT_TYPE.KEEPER_CARDS_COMBINED,
old_value=19,
new_value=19 + 666)])
def test_change_giftst_used__achievements(self):
with mock.patch('the_tale.accounts.achievements.storage.AchievementsStorage.verify_achievements') as verify_achievements:
self.hero.statistics.change_gifts_returned(666)
self.assertEqual(verify_achievements.call_args_list, [])
@mock.patch('the_tale.game.heroes.conf.settings.MIN_PVP_BATTLES', 1000)
def test_change_pvp_battles_1x1_victories__multiple_victories_achievements(self):
achievement_1 = achievements_prototypes.AchievementPrototype.create(group=achievements_relations.ACHIEVEMENT_GROUP.MONEY, type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1, barrier=10, points=10,
caption='achievement_1', description='description_1', approved=True)
achievement_2 = achievements_prototypes.AchievementPrototype.create(group=achievements_relations.ACHIEVEMENT_GROUP.MONEY, type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1, barrier=20, points=10,
caption='achievement_2', description='description_2', approved=True)
achievement_3 = achievements_prototypes.AchievementPrototype.create(group=achievements_relations.ACHIEVEMENT_GROUP.MONEY, type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1, barrier=30, points=10,
caption='achievement_3', description='description_3', approved=True)
achievement_4 = achievements_prototypes.AchievementPrototype.create(group=achievements_relations.ACHIEVEMENT_GROUP.MONEY, type=achievements_relations.ACHIEVEMENT_TYPE.PVP_VICTORIES_1X1, barrier=40, points=10,
caption='achievement_4', description='description_4', approved=True)
self.assertEqual(self.hero.statistics.pvp_battles_1x1_number, 16)
with self.check_not_changed(achievements_prototypes.GiveAchievementTaskPrototype._db_count):
self.hero.statistics.change_pvp_battles_1x1_draws(400)
self.hero.statistics.change_pvp_battles_1x1_defeats(300)
with self.check_delta(achievements_prototypes.GiveAchievementTaskPrototype._db_count, 3):
self.hero.statistics.change_pvp_battles_1x1_victories(300)
self.assertEqual(set(achievements_prototypes.GiveAchievementTaskPrototype._db_all().values_list('achievement_id', flat=True)),
set([achievement_1.id, achievement_2.id, achievement_3.id]))
with self.check_delta(achievements_prototypes.GiveAchievementTaskPrototype._db_count, 1):
self.hero.statistics.change_pvp_battles_1x1_victories(200)
self.assertEqual(set(achievements_prototypes.GiveAchievementTaskPrototype._db_all().values_list('achievement_id', flat=True)),
set([achievement_1.id, achievement_2.id, achievement_3.id, achievement_4.id]))
self.assertEqual(self.hero.statistics.pvp_battles_1x1_number, 1216)
self.assertEqual(self.hero.statistics.pvp_battles_1x1_victories, 516)
self.assertEqual(self.hero.statistics.pvp_battles_1x1_defeats, 300)
self.assertEqual(self.hero.statistics.pvp_battles_1x1_draws, 400)
|
[
"smart_imports.all"
] |
[((23, 42), 'smart_imports.all', 'smart_imports.all', ([], {}), '()\n', (40, 42), False, 'import smart_imports\n')]
|
from pathlib import Path
COIN_MARKET_CAP_URL = "https://coinmarketcap.com/currencies/bitcoin/markets"
WEB_API_EXCHANGE_PRICE_URL = "https://web-api.coinmarketcap.com/v1/exchange/market-pairs/latest?id="
COIN_NAME_BITCOIN = "BitCoin"
COIN_NAME_ETC = "ETC"
TARGET_EXCHANGE_SET = {"Binance", "Gemini", "Bitfinex", "CoinsBank"}
TARGET_EXCHANGE_ID = {270, 107, 151, 656} # 89 - Coinbase Pro
TARGET_COIN_PAIR = {"BTC/USD", "BTC/USDT"}
CRYPTO_SYMBOL_SET = {COIN_NAME_BITCOIN, COIN_NAME_ETC}
TEST_DB_NAME = "test.db"
CRYPTO_DB_NAME = "crypto.db"
BITCOIN_CRAWLING_PERIOD_SEC = 60
BITCOIN_PRICE_ALARM_MIN = 3000 # $USD
BITCOIN_PRICE_ALARM_MAX = 20000 # $USD
BITCOIN_PRICE_VALIDATE_MAX = 30000 # $USD
TRANS_FEE_PERCENTAGE_AVG = 0.0005 # 0.05%
ROOT_DIR = Path(__file__).resolve().parent
CSV_FOLDER_PATH = ROOT_DIR.joinpath("files/csv")
CREATE_TABLE_CRYPTO = """
CREATE TABLE IF NOT EXISTS crypto_price (
id INTEGER PRIMARY KEY AUTOINCREMENT,
exchange TEXT NOT NULL,
coin_name TEXT NOT NULL,
price REAL NOT NULL,
pricing_time INTEGER NOT NULL,
volume REAL NOT NULL,
volume_p REAL NOT NULL,
fee_type TEXT NOT NULL,
coin_pair TEXT NOT NULL
);
"""
SELECT_CRYPTO = """
SELECT id, exchange, coin_name, price, pricing_time, volume,
volume_p, fee_type, coin_pair
FROM crypto_price where exchange = 'Binance';
"""
SELECT_CRYPTO_RECENT_500 = """
SELECT id, exchange, coin_name, price, pricing_time,
volume, volume_p, fee_type, coin_pair
FROM crypto_price WHERE exchange = 'Binance'
ORDER BY id DESC LIMIT 500;
"""
INSERT_CRYPTO_MANY = """
INSERT INTO crypto_price(exchange, coin_name, price, pricing_time, volume, volume_p, fee_type, coin_pair)
VALUES(?, ?, ?, ?, ?, ?, ?, ?)
"""
CREATE_TABLE_DUMMY = """
CREATE TABLE IF NOT EXISTS dummy (
id INTEGER PRIMARY KEY AUTOINCREMENT,
content TEXT NOT NULL,
update_time REAL NOT NULL
);
"""
INSERT_MANY_DUMMY = """
INSERT INTO dummy(content, update_time) VALUES(?, ?);
"""
SELECT_DUMMY = """
SELECT * FROM dummy;
"""
DELETE_DUMMY = """
DELETE FROM dummy WHERE id = ?;
"""
DROP_DUMMY = """
DROP TABLE dummy;
"""
|
[
"pathlib.Path"
] |
[((783, 797), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (787, 797), False, 'from pathlib import Path\n')]
|
# Who were the top 3 editors during that hour?
import requests
import json
ENDPOINT = 'https://en.wikipedia.org/w/api.php'
parameters = { 'action' : 'query',
'prop' : 'revisions',
'titles' : 'Panama_Papers',
'format' : 'json',
'rvdir' : 'newer',
'rvstart': '2016-04-03T21:00:00Z',
'rvend' : '2016-04-03T21:59:59Z',
'rvlimit' : 500,
'continue' : '' }
users = {}
done = False
while not done:
wp_call = requests.get(ENDPOINT, params=parameters)
response = wp_call.json()
pages = response['query']['pages']
# print(pages)
for page_id in pages:
page = pages[page_id]
revisions = page['revisions']
for rev in revisions:
revuser = rev['user']
if revuser in users.keys():
users[revuser] += 1
else:
users[revuser] = 1
if 'continue' in response:
parameters['continue'] = response['continue']['continue']
parameters['rvcontinue'] = response['continue']['rvcontinue']
else:
done = True
# print(json.dumps(users, indent=4))
top_3_users = []
user_list = []
for ukey,uval in users.items():
user_list.append([ukey,uval])
# print(user_list)
top_3_editors = []
found_three = False
while not found_three:
top_edit_val_seen = 0
top_editors_seen = []
for u in user_list:
if u[1] > top_edit_val_seen:
top_edit_val_seen = u[1]
top_editors_seen[:] = []
top_editors_seen.append(u[0])
elif u[1] == top_edit_val_seen:
top_editors_seen.append(u[0])
else:
pass
for t in top_editors_seen:
x = [t,top_edit_val_seen]
top_3_editors.append(x)
user_list.remove(x)
if len(top_3_editors) >= 3:
break
else:
continue
print(top_3_editors)
|
[
"requests.get"
] |
[((528, 569), 'requests.get', 'requests.get', (['ENDPOINT'], {'params': 'parameters'}), '(ENDPOINT, params=parameters)\n', (540, 569), False, 'import requests\n')]
|
"""
Copied from ESRI documentation
"""
import arcpy, time, smtplib
# Set the workspace
arcpy.env.workspace = 'Database Connections/admin.sde'
# Set a variable for the workspace
workspace = arcpy.env.workspace
# Get a list of connected users.
userList = arcpy.ListUsers("Database Connections/admin.sde")
# Get a list of user names of users currently connected and make email addresses
emailList = [u.Name + "@yourcompany.com" for user in arcpy.ListUsers("Database Connections/admin.sde")]
# Take the email list and use it to send an email to connected users.
SERVER = "mailserver.yourcompany.com"
FROM = "SDE Admin <<EMAIL>>"
TO = emailList
SUBJECT = "Maintenance is about to be performed"
MSG = "Auto generated Message.\n\rServer maintenance will be performed in 15 minutes. Please log off."
# Prepare actual message
MESSAGE = """\
From: %s
To: %s
Subject: %s
%s
""" % (FROM, ", ".join(TO), SUBJECT, MSG)
# Send the mail
server = smtplib.SMTP(SERVER)
server.sendmail(FROM, TO, MESSAGE)
server.quit()
# Block new connections to the database.
arcpy.AcceptConnections('Database Connections/admin.sde', False)
# Wait 15 minutes
time.sleep(900)
# Disconnect all users from the database.
arcpy.DisconnectUser('Database Connections/admin.sde', "ALL")
# Get a list of versions to pass into the ReconcileVersions tool.
versionList = arcpy.ListVersions('Database Connections/admin.sde')
# Execute the ReconcileVersions tool.
arcpy.ReconcileVersions_management('Database Connections/admin.sde', "ALL_VERSIONS", "sde.DEFAULT", versionList, "LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "DELETE_VERSION", "c:/temp/reconcilelog.txt")
# Run the compress tool.
arcpy.Compress_management('Database Connections/admin.sde')
# Allow the database to begin accepting connections again
arcpy.AcceptConnections('Database Connections/admin.sde', True)
# Get a list of datasets owned by the admin user
# Rebuild indexes and analyze the states and states_lineages system tables
arcpy.RebuildIndexes_management(workspace, "SYSTEM", "ALL")
arcpy.AnalyzeDatasets_management(workspace, "SYSTEM", "ANALYZE_BASE", "ANALYZE_DELTA", "ANALYZE_ARCHIVE")
|
[
"arcpy.Compress_management",
"arcpy.ReconcileVersions_management",
"smtplib.SMTP",
"arcpy.DisconnectUser",
"arcpy.AnalyzeDatasets_management",
"arcpy.ListVersions",
"arcpy.RebuildIndexes_management",
"time.sleep",
"arcpy.AcceptConnections",
"arcpy.ListUsers"
] |
[((258, 307), 'arcpy.ListUsers', 'arcpy.ListUsers', (['"""Database Connections/admin.sde"""'], {}), "('Database Connections/admin.sde')\n", (273, 307), False, 'import arcpy, time, smtplib\n'), ((940, 960), 'smtplib.SMTP', 'smtplib.SMTP', (['SERVER'], {}), '(SERVER)\n', (952, 960), False, 'import arcpy, time, smtplib\n'), ((1052, 1116), 'arcpy.AcceptConnections', 'arcpy.AcceptConnections', (['"""Database Connections/admin.sde"""', '(False)'], {}), "('Database Connections/admin.sde', False)\n", (1075, 1116), False, 'import arcpy, time, smtplib\n'), ((1136, 1151), 'time.sleep', 'time.sleep', (['(900)'], {}), '(900)\n', (1146, 1151), False, 'import arcpy, time, smtplib\n'), ((1195, 1256), 'arcpy.DisconnectUser', 'arcpy.DisconnectUser', (['"""Database Connections/admin.sde"""', '"""ALL"""'], {}), "('Database Connections/admin.sde', 'ALL')\n", (1215, 1256), False, 'import arcpy, time, smtplib\n'), ((1338, 1390), 'arcpy.ListVersions', 'arcpy.ListVersions', (['"""Database Connections/admin.sde"""'], {}), "('Database Connections/admin.sde')\n", (1356, 1390), False, 'import arcpy, time, smtplib\n'), ((1430, 1674), 'arcpy.ReconcileVersions_management', 'arcpy.ReconcileVersions_management', (['"""Database Connections/admin.sde"""', '"""ALL_VERSIONS"""', '"""sde.DEFAULT"""', 'versionList', '"""LOCK_ACQUIRED"""', '"""NO_ABORT"""', '"""BY_OBJECT"""', '"""FAVOR_TARGET_VERSION"""', '"""POST"""', '"""DELETE_VERSION"""', '"""c:/temp/reconcilelog.txt"""'], {}), "('Database Connections/admin.sde',\n 'ALL_VERSIONS', 'sde.DEFAULT', versionList, 'LOCK_ACQUIRED', 'NO_ABORT',\n 'BY_OBJECT', 'FAVOR_TARGET_VERSION', 'POST', 'DELETE_VERSION',\n 'c:/temp/reconcilelog.txt')\n", (1464, 1674), False, 'import arcpy, time, smtplib\n'), ((1690, 1749), 'arcpy.Compress_management', 'arcpy.Compress_management', (['"""Database Connections/admin.sde"""'], {}), "('Database Connections/admin.sde')\n", (1715, 1749), False, 'import arcpy, time, smtplib\n'), ((1809, 1872), 'arcpy.AcceptConnections', 'arcpy.AcceptConnections', (['"""Database Connections/admin.sde"""', '(True)'], {}), "('Database Connections/admin.sde', True)\n", (1832, 1872), False, 'import arcpy, time, smtplib\n'), ((1999, 2058), 'arcpy.RebuildIndexes_management', 'arcpy.RebuildIndexes_management', (['workspace', '"""SYSTEM"""', '"""ALL"""'], {}), "(workspace, 'SYSTEM', 'ALL')\n", (2030, 2058), False, 'import arcpy, time, smtplib\n'), ((2060, 2169), 'arcpy.AnalyzeDatasets_management', 'arcpy.AnalyzeDatasets_management', (['workspace', '"""SYSTEM"""', '"""ANALYZE_BASE"""', '"""ANALYZE_DELTA"""', '"""ANALYZE_ARCHIVE"""'], {}), "(workspace, 'SYSTEM', 'ANALYZE_BASE',\n 'ANALYZE_DELTA', 'ANALYZE_ARCHIVE')\n", (2092, 2169), False, 'import arcpy, time, smtplib\n'), ((443, 492), 'arcpy.ListUsers', 'arcpy.ListUsers', (['"""Database Connections/admin.sde"""'], {}), "('Database Connections/admin.sde')\n", (458, 492), False, 'import arcpy, time, smtplib\n')]
|
import requests
import urllib
from django.conf import settings
from django.contrib.auth import get_user_model, login
from django.contrib import messages
from django.http.response import HttpResponseRedirect
from django.shortcuts import redirect
from django.utils.module_loading import import_string
from django_salesforce_oauth.oauth import OAuth
from django_salesforce_oauth.utils import get_or_create_user
CALLBACK_ERROR_MESSAGE = "CUSTOM_CALLBACK must return a user object or a redirect"
STATE_COOKIE_NAME = "django_salesforce_oauth_state"
def oauth(request, domain="login"):
"""
View for initiating OAuth with Salesforce
"""
url = f"https://{domain}.salesforce.com/services/oauth2/authorize"
url_args = {
"client_id": settings.SFDC_CONSUMER_KEY,
"response_type": "code",
"redirect_uri": settings.OAUTH_REDIRECT_URI,
"scope": settings.SCOPES,
# track whether or not this was a prod org, or a sandbox
# this is separate from the state query param the user
# may pass to this view.
"state": domain,
}
args = urllib.parse.urlencode(url_args)
url = f"{url}?{args}"
response = redirect(url)
state = request.GET.get("state")
if state:
response.set_cookie(STATE_COOKIE_NAME, value=state)
return response
def oauth_callback(request):
"""
View behind the callback URI provided to Salesforce
"""
code = request.GET.get("code")
state = request.GET.get("state")
url = f"https://{state}.salesforce.com/services/oauth2/token"
if not code:
messages.error(request, "Unable to authenticate with Salesforce")
return redirect("index")
data = {
"client_id": settings.SFDC_CONSUMER_KEY,
"client_secret": settings.SFDC_CONSUMER_SECRET,
"redirect_uri": settings.OAUTH_REDIRECT_URI,
"grant_type": "authorization_code",
"code": code,
}
response = requests.post(url, data)
oauth = OAuth(response.json())
if hasattr(settings, "CUSTOM_CALLBACK"):
custom_callback = import_string(settings.CUSTOM_CALLBACK)
state = request.COOKIES.get(STATE_COOKIE_NAME)
if state:
response = custom_callback(request, oauth, state=state)
else:
response = custom_callback(request, oauth)
is_user = type(response) == get_user_model()
is_redirect = type(response) == HttpResponseRedirect
assert is_user or is_redirect, CALLBACK_ERROR_MESSAGE
if is_redirect:
return response
else:
user = response
else:
user = get_or_create_user(oauth)
login(request, user)
messages.info(request, "Authentication with Salesforce successful!")
return redirect(settings.LOGIN_REDIRECT_URL)
|
[
"django.utils.module_loading.import_string",
"urllib.parse.urlencode",
"django.shortcuts.redirect",
"django.contrib.messages.error",
"django.contrib.auth.get_user_model",
"django_salesforce_oauth.utils.get_or_create_user",
"django.contrib.messages.info",
"requests.post",
"django.contrib.auth.login"
] |
[((1108, 1140), 'urllib.parse.urlencode', 'urllib.parse.urlencode', (['url_args'], {}), '(url_args)\n', (1130, 1140), False, 'import urllib\n'), ((1184, 1197), 'django.shortcuts.redirect', 'redirect', (['url'], {}), '(url)\n', (1192, 1197), False, 'from django.shortcuts import redirect\n'), ((1955, 1979), 'requests.post', 'requests.post', (['url', 'data'], {}), '(url, data)\n', (1968, 1979), False, 'import requests\n'), ((2664, 2684), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (2669, 2684), False, 'from django.contrib.auth import get_user_model, login\n'), ((2690, 2758), 'django.contrib.messages.info', 'messages.info', (['request', '"""Authentication with Salesforce successful!"""'], {}), "(request, 'Authentication with Salesforce successful!')\n", (2703, 2758), False, 'from django.contrib import messages\n'), ((2771, 2808), 'django.shortcuts.redirect', 'redirect', (['settings.LOGIN_REDIRECT_URL'], {}), '(settings.LOGIN_REDIRECT_URL)\n', (2779, 2808), False, 'from django.shortcuts import redirect\n'), ((1597, 1662), 'django.contrib.messages.error', 'messages.error', (['request', '"""Unable to authenticate with Salesforce"""'], {}), "(request, 'Unable to authenticate with Salesforce')\n", (1611, 1662), False, 'from django.contrib import messages\n'), ((1678, 1695), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1686, 1695), False, 'from django.shortcuts import redirect\n'), ((2088, 2127), 'django.utils.module_loading.import_string', 'import_string', (['settings.CUSTOM_CALLBACK'], {}), '(settings.CUSTOM_CALLBACK)\n', (2101, 2127), False, 'from django.utils.module_loading import import_string\n'), ((2633, 2658), 'django_salesforce_oauth.utils.get_or_create_user', 'get_or_create_user', (['oauth'], {}), '(oauth)\n', (2651, 2658), False, 'from django_salesforce_oauth.utils import get_or_create_user\n'), ((2374, 2390), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2388, 2390), False, 'from django.contrib.auth import get_user_model, login\n')]
|
from datetime import datetime
from operator import itemgetter
from os.path import dirname, join
import pytest # noqa
from city_scrapers_core.constants import BOARD, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from city_scrapers.spiders.chi_land_trust import ChiLandTrustSpider
test_response = file_response(
join(dirname(__file__), "files", "chi_land_trust.html"),
url="https://www.chicago.gov/city/en/depts/doh/supp_info/chicago_communitylandtrust0.html", # noqa
)
spider = ChiLandTrustSpider()
freezer = freeze_time("2019-07-11")
freezer.start()
parsed_items = sorted(
[item for item in spider.parse(test_response)], key=itemgetter("start")
)
freezer.stop()
def test_count():
assert len(parsed_items) == 13
def test_title():
assert parsed_items[-6]["title"] == "Board of Directors"
def test_description():
assert parsed_items[-6]["description"] == ""
def test_start():
assert parsed_items[-6]["start"] == datetime(2019, 2, 7, 9, 0)
def test_end():
assert parsed_items[-6]["end"] is None
def test_time_notes():
assert parsed_items[-6]["time_notes"] == "See agenda to confirm time"
def test_id():
assert parsed_items[-6]["id"] == "chi_land_trust/201902070900/x/board_of_directors"
def test_status():
assert parsed_items[-6]["status"] == PASSED
def test_location():
assert parsed_items[-6]["location"] == spider.location
def test_source():
assert (
parsed_items[-6]["source"]
== "https://www.chicago.gov/city/en/depts/doh/supp_info/chicago_communitylandtrust0.html" # noqa
)
def test_links():
assert parsed_items[-6]["links"] == [
{
"href": "https://www.chicago.gov/content/dam/city/depts/doh/general/CCLT_February_2019_Agernda.pdf", # noqa
"title": "Agenda",
}
]
def test_classification():
assert parsed_items[-6]["classification"] == BOARD
def test_all_day():
assert parsed_items[-6]["all_day"] is False
|
[
"city_scrapers.spiders.chi_land_trust.ChiLandTrustSpider",
"os.path.dirname",
"datetime.datetime",
"freezegun.freeze_time",
"operator.itemgetter"
] |
[((536, 556), 'city_scrapers.spiders.chi_land_trust.ChiLandTrustSpider', 'ChiLandTrustSpider', ([], {}), '()\n', (554, 556), False, 'from city_scrapers.spiders.chi_land_trust import ChiLandTrustSpider\n'), ((568, 593), 'freezegun.freeze_time', 'freeze_time', (['"""2019-07-11"""'], {}), "('2019-07-11')\n", (579, 593), False, 'from freezegun import freeze_time\n'), ((369, 386), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (376, 386), False, 'from os.path import dirname, join\n'), ((690, 709), 'operator.itemgetter', 'itemgetter', (['"""start"""'], {}), "('start')\n", (700, 709), False, 'from operator import itemgetter\n'), ((999, 1025), 'datetime.datetime', 'datetime', (['(2019)', '(2)', '(7)', '(9)', '(0)'], {}), '(2019, 2, 7, 9, 0)\n', (1007, 1025), False, 'from datetime import datetime\n')]
|
import time
import numpy as np
from pyembree import rtcore_scene as rtcs
from pyembree.mesh_construction import TriangleMesh
N = 4
def xplane(x):
return [[[x, -1.0, -1.0],
[x, +1.0, -1.0],
[x, -1.0, +1.0]],
[[x, +1.0, -1.0],
[x, +1.0, +1.0],
[x, -1.0, +1.0]]]
triangles = xplane(7.0)
triangles = np.array(triangles, 'float32')
scene = rtcs.EmbreeScene()
mesh = TriangleMesh(scene, triangles)
origins = np.zeros((N, 3), dtype='float32')
origins[:,0] = 0.1
origins[0,1] = -0.2
origins[1,1] = +0.2
origins[2,1] = +0.3
origins[3,1] = -8.2
dirs = np.zeros((N, 3), dtype='float32')
dirs[:, 0] = 1.0
t1 = time.time()
res = scene.run(origins, dirs, output=1)
t2 = time.time()
print("Ran in {0:.3f} s".format(t2-t1))
print('Output is a dict containing Embree results with id of intersected dimensionless coordinates')
print(res)
ray_inter = res['geomID'] >= 0
print('{0} rays intersect geometry (over {1})'.format(sum(ray_inter), N))
print('Intersection coordinates')
primID = res['primID'][ray_inter]
u = res['u'][ray_inter]
v = res['v'][ray_inter]
w = 1 - u - v
inters = np.vstack(w) * triangles[primID][:, 0, :] + \
np.vstack(u) * triangles[primID][:, 1, :] + \
np.vstack(v) * triangles[primID][:, 2, :]
print(inters)
|
[
"pyembree.rtcore_scene.EmbreeScene",
"numpy.zeros",
"time.time",
"numpy.array",
"pyembree.mesh_construction.TriangleMesh",
"numpy.vstack"
] |
[((370, 400), 'numpy.array', 'np.array', (['triangles', '"""float32"""'], {}), "(triangles, 'float32')\n", (378, 400), True, 'import numpy as np\n'), ((410, 428), 'pyembree.rtcore_scene.EmbreeScene', 'rtcs.EmbreeScene', ([], {}), '()\n', (426, 428), True, 'from pyembree import rtcore_scene as rtcs\n'), ((436, 466), 'pyembree.mesh_construction.TriangleMesh', 'TriangleMesh', (['scene', 'triangles'], {}), '(scene, triangles)\n', (448, 466), False, 'from pyembree.mesh_construction import TriangleMesh\n'), ((478, 511), 'numpy.zeros', 'np.zeros', (['(N, 3)'], {'dtype': '"""float32"""'}), "((N, 3), dtype='float32')\n", (486, 511), True, 'import numpy as np\n'), ((619, 652), 'numpy.zeros', 'np.zeros', (['(N, 3)'], {'dtype': '"""float32"""'}), "((N, 3), dtype='float32')\n", (627, 652), True, 'import numpy as np\n'), ((676, 687), 'time.time', 'time.time', ([], {}), '()\n', (685, 687), False, 'import time\n'), ((734, 745), 'time.time', 'time.time', ([], {}), '()\n', (743, 745), False, 'import time\n'), ((1254, 1266), 'numpy.vstack', 'np.vstack', (['v'], {}), '(v)\n', (1263, 1266), True, 'import numpy as np\n'), ((1144, 1156), 'numpy.vstack', 'np.vstack', (['w'], {}), '(w)\n', (1153, 1156), True, 'import numpy as np\n'), ((1199, 1211), 'numpy.vstack', 'np.vstack', (['u'], {}), '(u)\n', (1208, 1211), True, 'import numpy as np\n')]
|
import configuration.location_configurations as loc_conf
import requests
from requests.structures import CaseInsensitiveDict
class HomeLocation:
def __init__(self, coordinates: [str, None]):
self.coordinates = coordinates
if coordinates:
self.data = {}
self.extract_raw_data()
else:
self.data = None
def extract_raw_data(self):
coordinates_for_url = self.coordinates.replace(" ", "")
coordinates_for_url = coordinates_for_url.split(',')
url = f"https://api.geoapify.com/v1/geocode/reverse?lat={coordinates_for_url[0]}&" \
f"lon={coordinates_for_url[1]}&format=json&apiKey={loc_conf.API_KEY}"
headers = CaseInsensitiveDict()
headers["Accept"] = "application/json"
resp = requests.get(url, headers=headers, verify=False).json()['results'][0]
for d in loc_conf.DETAILS_TO_KEEP_HOME_LOCATION:
if d not in resp.keys():
self.data[d] = None
else:
self.data[d] = resp[d]
def __str__(self):
return str(self.data)
class Place:
def __init__(self, dict_data):
self.data = dict_data
def __str__(self):
return str(self.data['name'])
class Places:
def __init__(self, base_coordinates: [str, None]):
self.base_coordinates = base_coordinates
self.places = {}
self.get_places()
def get_places(self):
for t in loc_conf.PLACE_TYPES:
self.places[t] = Places.get_all_places_by_base_coordinates_and_type(self.base_coordinates, t)
def __str__(self):
return str(self.places)
@staticmethod
def get_all_places_by_base_coordinates_and_type(base_coordinates, type):
print(type)
obj_place = []
url = Places.get_url_for_request(base_coordinates, type)
print(url)
headers = CaseInsensitiveDict()
headers["Accept"] = "application/json"
resp = requests.get(url, headers=headers, verify=False).json()['features']
for p in resp:
properties = p["properties"]
current_place = {}
for d in loc_conf.DETAILS_TO_KEEP_PLACE:
if d not in properties.keys():
current_place[d] = None
else:
current_place[d] = properties[d]
obj_place.append(str(Place(current_place)))
return obj_place
@staticmethod
def get_url_for_request(base_coordinates, type):
coordinates_for_url = base_coordinates.replace(" ", "")
coordinates_for_url = coordinates_for_url.split(',')
string_place_type = type.lower()
string_place_type = string_place_type.split(' / ')
string_place_type[1] = string_place_type[1].replace(" ", "_")
string_place_type = '.'.join(string_place_type)
url = loc_conf.MOST_BASIC_URL + string_place_type + '&' + f'filter=circle:{coordinates_for_url[1]},{coordinates_for_url[0]},' \
f'{loc_conf.RADIUS}&bias=proximity:{coordinates_for_url[1]},{coordinates_for_url[0]}&apiKey={loc_conf.API_KEY}'
return url
if __name__ == "__main__":
p = Places('32.162572, 34.852054')
for t in loc_conf.PLACE_TYPES:
print(f'*** {t} ***')
print(*p.places[t], sep='\n')
|
[
"requests.structures.CaseInsensitiveDict",
"requests.get"
] |
[((719, 740), 'requests.structures.CaseInsensitiveDict', 'CaseInsensitiveDict', ([], {}), '()\n', (738, 740), False, 'from requests.structures import CaseInsensitiveDict\n'), ((1898, 1919), 'requests.structures.CaseInsensitiveDict', 'CaseInsensitiveDict', ([], {}), '()\n', (1917, 1919), False, 'from requests.structures import CaseInsensitiveDict\n'), ((1982, 2030), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (1994, 2030), False, 'import requests\n'), ((803, 851), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (815, 851), False, 'import requests\n')]
|
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render, redirect
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from main.user_utils import get_id
from main.utils import get_md5, generate_token, certify_token
from passport.models import User, UserGroup
def login(request):
context = {}
user_id = request.session.get("user_id")
token = request.session.get("token")
if bool(user_id) & bool(token):
if certify_token(user_id, token):
context['msg_tips'] = '登录成功'
context['bese_title'] = '个人中心'
return render(request, 'passport/user_center.html', context)
else:
context['msg_tips'] = '登录过期,重新登录'
else:
context['msg_tips'] = '欢迎登录'
request.session.clear()
context['bese_title'] = 'Login'
return render(request, 'passport/login.html', context)
@csrf_exempt
def login_result(request):
context = {}
if request.method == 'POST':
username = request.POST.get('id_username')
password = request.POST.get('<PASSWORD>')
user = User.objects.filter(account=username, pwd=get_md5(password))
if user:
request.session["user_id"] = str(user[0].id)
request.session["token"] = generate_token(str(user[0].id))
context['msg_tips'] = '登录成功'
context['base_title'] = '个人中心'
context['user']=user
return render(request, 'passport/user_center.html', context)
else:
context['msg_tips'] = '登录失败,没有该用户'
else:
context['msg_tips'] = '请求失败,请重试'
context['bese_title'] = 'Login'
return render(request, 'passport/login.html', context)
def regist(request):
context = {}
context['msg_tips'] = '用户注册'
context['bese_title'] = '注册'
context['groups'] = UserGroup.objects.order_by("id")
return render(request, 'passport/page_regist.html', context)
@csrf_exempt
def action_regist(request):
context = {}
if request.method == 'POST':
username = request.POST.get('id_username')
password = request.POST.get('id_password')
nick = request.POST.get('id_nick')
phone = request.POST.get('id_phone')
sex = request.POST.get('id_sex')
group_id = request.POST.get('id_group')
print(username, password, nick, phone, sex)
if bool(username) & bool(password) & bool(nick) & bool(phone) & bool(sex) & bool(group_id):
try:
id = get_id()
user = User(id=id, account=username, pwd=password, nick_name=nick, phone=phone, sex=sex,
group_id=group_id)
user.save()
request.session["user_id"] = str(id)
request.session["token"] = generate_token(str(id))
context['msg_tips'] = '注册成功'
context['base_title'] = '个人中心'
context['user'] = user
return render(request, 'passport/user_center.html', context)
except Exception as err:
print('err:', err)
context['msg_tips'] = '数据库错误,请重试'
else:
context['msg_tips'] = '信息不全,请重试'
else:
context['msg_tips'] = '请求失败,请重试'
context['bese_title'] = '注册'
return render(request, 'passport/page_regist.html', context)
|
[
"passport.models.User",
"main.utils.get_md5",
"passport.models.UserGroup.objects.order_by",
"main.user_utils.get_id",
"django.shortcuts.render",
"main.utils.certify_token"
] |
[((878, 925), 'django.shortcuts.render', 'render', (['request', '"""passport/login.html"""', 'context'], {}), "(request, 'passport/login.html', context)\n", (884, 925), False, 'from django.shortcuts import render, redirect\n'), ((1689, 1736), 'django.shortcuts.render', 'render', (['request', '"""passport/login.html"""', 'context'], {}), "(request, 'passport/login.html', context)\n", (1695, 1736), False, 'from django.shortcuts import render, redirect\n'), ((1868, 1900), 'passport.models.UserGroup.objects.order_by', 'UserGroup.objects.order_by', (['"""id"""'], {}), "('id')\n", (1894, 1900), False, 'from passport.models import User, UserGroup\n'), ((1912, 1965), 'django.shortcuts.render', 'render', (['request', '"""passport/page_regist.html"""', 'context'], {}), "(request, 'passport/page_regist.html', context)\n", (1918, 1965), False, 'from django.shortcuts import render, redirect\n'), ((3321, 3374), 'django.shortcuts.render', 'render', (['request', '"""passport/page_regist.html"""', 'context'], {}), "(request, 'passport/page_regist.html', context)\n", (3327, 3374), False, 'from django.shortcuts import render, redirect\n'), ((508, 537), 'main.utils.certify_token', 'certify_token', (['user_id', 'token'], {}), '(user_id, token)\n', (521, 537), False, 'from main.utils import get_md5, generate_token, certify_token\n'), ((642, 695), 'django.shortcuts.render', 'render', (['request', '"""passport/user_center.html"""', 'context'], {}), "(request, 'passport/user_center.html', context)\n", (648, 695), False, 'from django.shortcuts import render, redirect\n'), ((1476, 1529), 'django.shortcuts.render', 'render', (['request', '"""passport/user_center.html"""', 'context'], {}), "(request, 'passport/user_center.html', context)\n", (1482, 1529), False, 'from django.shortcuts import render, redirect\n'), ((1176, 1193), 'main.utils.get_md5', 'get_md5', (['password'], {}), '(password)\n', (1183, 1193), False, 'from main.utils import get_md5, generate_token, certify_token\n'), ((2528, 2536), 'main.user_utils.get_id', 'get_id', ([], {}), '()\n', (2534, 2536), False, 'from main.user_utils import get_id\n'), ((2560, 2664), 'passport.models.User', 'User', ([], {'id': 'id', 'account': 'username', 'pwd': 'password', 'nick_name': 'nick', 'phone': 'phone', 'sex': 'sex', 'group_id': 'group_id'}), '(id=id, account=username, pwd=password, nick_name=nick, phone=phone,\n sex=sex, group_id=group_id)\n', (2564, 2664), False, 'from passport.models import User, UserGroup\n'), ((2991, 3044), 'django.shortcuts.render', 'render', (['request', '"""passport/user_center.html"""', 'context'], {}), "(request, 'passport/user_center.html', context)\n", (2997, 3044), False, 'from django.shortcuts import render, redirect\n')]
|
# ______ _____ _ ________
# / ____/___ / ___/(_)___ ___ / _/ __ |
# / / / __ \\__ \/ / __ `__ \ / // / / /
# / /___/ /_/ /__/ / / / / / / // // /_/ /
# \____/\____/____/_/_/ /_/ /_/___/\____/
# Kratos CoSimulationApplication
#
# License: BSD License, see license.txt
#
# Main authors: <NAME> (https://github.com/philbucher)
#
import CoSimIO
from CoSimIO.mpi import ConnectMPI
from mpi4py import MPI # this initializes and finalizes MPI (calls MPI_Init and MPI_Finalize)
from CoSimIO.mpi.mpi4pyInterface import mpi4pyCommHolder
def cosimio_check_equal(a, b):
assert a == b
# Connection Settings
settings = CoSimIO.Info()
settings.SetString("my_name", "py_mesh_import_solver")
settings.SetString("connect_to", "py_mesh_export_solver")
settings.SetInt("echo_level", 1)
settings.SetString("version", "1.25")
# Connecting
return_info = ConnectMPI(settings, mpi4pyCommHolder(MPI.COMM_WORLD))
cosimio_check_equal(return_info.GetInt("connection_status"), CoSimIO.ConnectionStatus.Connected)
connection_name = return_info.GetString("connection_name")
# Importing mesh
info = CoSimIO.Info()
info.SetString("identifier", "fluid_mesh")
info.SetString("connection_name", connection_name)
model_part = CoSimIO.ModelPart("mp_exchange")
return_info = CoSimIO.ImportMesh(info, model_part)
# Checking the imported mesh
expected_nodal_coords = [
(0.0, 2.5, 1.0),
(2.0, 0.0, 1.5),
(2.0, 2.5, 1.5),
(4.0, 2.5, 1.7),
(4.0, 0.0, 1.7),
(6.0, 0.0, 1.8)
]
expected_element_connectivities = [
(1, 2, 3),
(2, 4, 3),
(2, 5, 4),
(4, 5, 6),
]
cosimio_check_equal(model_part.NumberOfNodes(), len(expected_nodal_coords))
cosimio_check_equal(model_part.NumberOfElements(), len(expected_element_connectivities))
for i, (coords, node) in enumerate(zip(expected_nodal_coords, model_part.Nodes)):
cosimio_check_equal(i+1, node.Id())
cosimio_check_equal(coords[0], node.X())
cosimio_check_equal(coords[1], node.Y())
cosimio_check_equal(coords[2], node.Z())
for i, (conn, elem) in enumerate(zip(expected_element_connectivities, model_part.Elements)):
cosimio_check_equal(i+1, elem.Id())
cosimio_check_equal(elem.Type(), CoSimIO.ElementType.Triangle3D3)
cosimio_check_equal(elem.NumberOfNodes(), 3)
for j, node in enumerate(elem.Nodes):
cosimio_check_equal(node.Id(), conn[j])
# Disconnecting
disconnect_settings = CoSimIO.Info()
disconnect_settings.SetString("connection_name", connection_name)
return_info = CoSimIO.Disconnect(disconnect_settings)
cosimio_check_equal(return_info.GetInt("connection_status"), CoSimIO.ConnectionStatus.Disconnected)
|
[
"CoSimIO.ModelPart",
"CoSimIO.ImportMesh",
"CoSimIO.Disconnect",
"CoSimIO.Info",
"CoSimIO.mpi.mpi4pyInterface.mpi4pyCommHolder"
] |
[((656, 670), 'CoSimIO.Info', 'CoSimIO.Info', ([], {}), '()\n', (668, 670), False, 'import CoSimIO\n'), ((1119, 1133), 'CoSimIO.Info', 'CoSimIO.Info', ([], {}), '()\n', (1131, 1133), False, 'import CoSimIO\n'), ((1242, 1274), 'CoSimIO.ModelPart', 'CoSimIO.ModelPart', (['"""mp_exchange"""'], {}), "('mp_exchange')\n", (1259, 1274), False, 'import CoSimIO\n'), ((1289, 1325), 'CoSimIO.ImportMesh', 'CoSimIO.ImportMesh', (['info', 'model_part'], {}), '(info, model_part)\n', (1307, 1325), False, 'import CoSimIO\n'), ((2414, 2428), 'CoSimIO.Info', 'CoSimIO.Info', ([], {}), '()\n', (2426, 2428), False, 'import CoSimIO\n'), ((2509, 2548), 'CoSimIO.Disconnect', 'CoSimIO.Disconnect', (['disconnect_settings'], {}), '(disconnect_settings)\n', (2527, 2548), False, 'import CoSimIO\n'), ((904, 936), 'CoSimIO.mpi.mpi4pyInterface.mpi4pyCommHolder', 'mpi4pyCommHolder', (['MPI.COMM_WORLD'], {}), '(MPI.COMM_WORLD)\n', (920, 936), False, 'from CoSimIO.mpi.mpi4pyInterface import mpi4pyCommHolder\n')]
|
import asyncio
import httpx
import pytest
from asgi_lifespan import LifespanManager
from fastapi import status
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
loop.close()
@pytest.fixture
async def test_client():
async with LifespanManager(app):
async with httpx.AsyncClient(app=app, base_url="http://app.io") as test_client:
yield test_client
@pytest.mark.asyncio
class TestCreatePerson:
async def test_invalid(self, test_client: httpx.AsyncClient):
payload = {"first_name": "John", "last_name": "Doe"}
response = await test_client.post("/persons", json=payload)
assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
async def test_valid(self, test_client: httpx.AsyncClient):
payload = {"first_name": "John", "last_name": "Doe", "age": 30}
response = await test_client.post("/persons", json=payload)
assert response.status_code == status.HTTP_201_CREATED
json = response.json()
assert json == payload
|
[
"httpx.AsyncClient",
"asgi_lifespan.LifespanManager",
"pytest.fixture",
"asyncio.get_event_loop"
] |
[((115, 146), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (129, 146), False, 'import pytest\n'), ((176, 200), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (198, 200), False, 'import asyncio\n'), ((291, 311), 'asgi_lifespan.LifespanManager', 'LifespanManager', (['app'], {}), '(app)\n', (306, 311), False, 'from asgi_lifespan import LifespanManager\n'), ((332, 384), 'httpx.AsyncClient', 'httpx.AsyncClient', ([], {'app': 'app', 'base_url': '"""http://app.io"""'}), "(app=app, base_url='http://app.io')\n", (349, 384), False, 'import httpx\n')]
|
# coding:UTF-8
import time
import hashlib
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from message.models import AppModel, TokenModel, AccountModel
def index(request):
apps = AppModel.objects.order_by("-id").filter(status=0)
return render_to_response("admin/index.html", {'apps': apps})
def addApp(request):
app_name = request.POST.get("appName", None)
app_key = hashlib.md5(str(time.time())).hexdigest()
app_model = AppModel(appName=app_name, appKey=app_key, status=0, createTime=time.strftime("%Y-%m-%d %H:%M:%S"))
app_model.save()
return HttpResponseRedirect("/admin/")
def deleteApp(request, app_id):
AppModel.objects.filter(id=app_id).update(status=2)
return HttpResponseRedirect("/admin/")
def detail(request, app_id):
"显示应用详情"
app = AppModel.objects.get(id=app_id)
appTokens = TokenModel.objects.filter(appId=app_id)
return render_to_response("admin/detail.html", {"app": app, "appTokens": appTokens})
def addToken(request, app_id):
"添加应用token"
token = request.POST.get("token", None)
able_time = request.POST.get("ableTime", None)
obj = TokenModel(appId=app_id, token=token, ableTime=able_time)
obj.save()
return HttpResponseRedirect("/admin/detail/%s" % (app_id,))
def deleteToken(request, app_id, token_id):
"删除应用token"
TokenModel.objects.filter(id=token_id, appId=app_id).delete()
return HttpResponseRedirect("/admin/detail/%s" % (app_id,))
def account(request):
"账号登录"
if request.method == "GET":
error = request.GET.get("error", 0)
return render_to_response("admin/account.html", {})
else:
username = request.POST.get("username", None)
password = request.POST.get("password", None)
try:
account_model = AccountModel.objects.get(username=username)
except ObjectDoesNotExist:
return HttpResponseRedirect("/admin/account?error=1")
if account_model.password == hashlib.md5(password).hexdigest():
request.session['admin'] = time.time()
return HttpResponseRedirect("/admin/")
else:
return HttpResponseRedirect("/admin/account?error=2")
def exit_account(request):
"用户退出"
if 'admin' in request.session:
del request.session['admin']
return HttpResponseRedirect("/admin/account")
|
[
"message.models.TokenModel",
"django.shortcuts.render_to_response",
"hashlib.md5",
"message.models.AppModel.objects.order_by",
"message.models.AppModel.objects.filter",
"time.strftime",
"message.models.AccountModel.objects.get",
"time.time",
"django.http.HttpResponseRedirect",
"message.models.TokenModel.objects.filter",
"message.models.AppModel.objects.get"
] |
[((346, 400), 'django.shortcuts.render_to_response', 'render_to_response', (['"""admin/index.html"""', "{'apps': apps}"], {}), "('admin/index.html', {'apps': apps})\n", (364, 400), False, 'from django.shortcuts import render_to_response\n'), ((677, 708), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/"""'], {}), "('/admin/')\n", (697, 708), False, 'from django.http import HttpResponseRedirect\n'), ((810, 841), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/"""'], {}), "('/admin/')\n", (830, 841), False, 'from django.http import HttpResponseRedirect\n'), ((896, 927), 'message.models.AppModel.objects.get', 'AppModel.objects.get', ([], {'id': 'app_id'}), '(id=app_id)\n', (916, 927), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((944, 983), 'message.models.TokenModel.objects.filter', 'TokenModel.objects.filter', ([], {'appId': 'app_id'}), '(appId=app_id)\n', (969, 983), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((995, 1072), 'django.shortcuts.render_to_response', 'render_to_response', (['"""admin/detail.html"""', "{'app': app, 'appTokens': appTokens}"], {}), "('admin/detail.html', {'app': app, 'appTokens': appTokens})\n", (1013, 1072), False, 'from django.shortcuts import render_to_response\n'), ((1227, 1284), 'message.models.TokenModel', 'TokenModel', ([], {'appId': 'app_id', 'token': 'token', 'ableTime': 'able_time'}), '(appId=app_id, token=token, ableTime=able_time)\n', (1237, 1284), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((1311, 1363), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/admin/detail/%s' % (app_id,))"], {}), "('/admin/detail/%s' % (app_id,))\n", (1331, 1363), False, 'from django.http import HttpResponseRedirect\n'), ((1503, 1555), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/admin/detail/%s' % (app_id,))"], {}), "('/admin/detail/%s' % (app_id,))\n", (1523, 1555), False, 'from django.http import HttpResponseRedirect\n'), ((2410, 2448), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/account"""'], {}), "('/admin/account')\n", (2430, 2448), False, 'from django.http import HttpResponseRedirect\n'), ((1682, 1726), 'django.shortcuts.render_to_response', 'render_to_response', (['"""admin/account.html"""', '{}'], {}), "('admin/account.html', {})\n", (1700, 1726), False, 'from django.shortcuts import render_to_response\n'), ((285, 317), 'message.models.AppModel.objects.order_by', 'AppModel.objects.order_by', (['"""-id"""'], {}), "('-id')\n", (310, 317), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((609, 643), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (622, 643), False, 'import time\n'), ((747, 781), 'message.models.AppModel.objects.filter', 'AppModel.objects.filter', ([], {'id': 'app_id'}), '(id=app_id)\n', (770, 781), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((1430, 1482), 'message.models.TokenModel.objects.filter', 'TokenModel.objects.filter', ([], {'id': 'token_id', 'appId': 'app_id'}), '(id=token_id, appId=app_id)\n', (1455, 1482), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((1887, 1930), 'message.models.AccountModel.objects.get', 'AccountModel.objects.get', ([], {'username': 'username'}), '(username=username)\n', (1911, 1930), False, 'from message.models import AppModel, TokenModel, AccountModel\n'), ((2144, 2155), 'time.time', 'time.time', ([], {}), '()\n', (2153, 2155), False, 'import time\n'), ((2175, 2206), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/"""'], {}), "('/admin/')\n", (2195, 2206), False, 'from django.http import HttpResponseRedirect\n'), ((2240, 2286), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/account?error=2"""'], {}), "('/admin/account?error=2')\n", (2260, 2286), False, 'from django.http import HttpResponseRedirect\n'), ((1985, 2031), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/admin/account?error=1"""'], {}), "('/admin/account?error=1')\n", (2005, 2031), False, 'from django.http import HttpResponseRedirect\n'), ((503, 514), 'time.time', 'time.time', ([], {}), '()\n', (512, 514), False, 'import time\n'), ((2070, 2091), 'hashlib.md5', 'hashlib.md5', (['password'], {}), '(password)\n', (2081, 2091), False, 'import hashlib\n')]
|
import tensorflow as tf
import tensorflow.contrib.slim as slim
def lrelu(x, leak=0.2, name="lrelu"):
with tf.variable_scope(name):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * abs(x)
def selu(x):
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale * tf.where(x > 0.0, x, alpha * tf.exp(x) - alpha)
def huber_loss(labels, predictions, delta=1.0):
residual = tf.abs(predictions - labels)
condition = tf.less(residual, delta)
small_res = 0.5 * tf.square(residual)
large_res = delta * residual - 0.5 * tf.square(delta)
return tf.where(condition, small_res, large_res)
def conv2d(input, output_shape, is_train, activation_fn=tf.nn.relu,
k_h=5, k_w=5, s_h=2, s_w=2, stddev=0.02, name="conv2d"):
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, input.get_shape()[-1], output_shape],
initializer=tf.truncated_normal_initializer(stddev=stddev))
conv = tf.nn.conv2d(input, w, strides=[1, s_h, s_w, 1], padding='SAME')
biases = tf.get_variable('biases', [output_shape],
initializer=tf.constant_initializer(0.0))
activation = activation_fn(conv + biases)
bn = tf.contrib.layers.batch_norm(activation, center=True, scale=True,
decay=0.9, is_training=is_train,
updates_collections=None)
return bn
def fc(input, output_shape, activation_fn=tf.nn.relu, name="fc"):
output = slim.fully_connected(input, int(output_shape), activation_fn=activation_fn)
return output
|
[
"tensorflow.abs",
"tensorflow.constant_initializer",
"tensorflow.less",
"tensorflow.variable_scope",
"tensorflow.contrib.layers.batch_norm",
"tensorflow.exp",
"tensorflow.nn.conv2d",
"tensorflow.where",
"tensorflow.square",
"tensorflow.truncated_normal_initializer"
] |
[((472, 500), 'tensorflow.abs', 'tf.abs', (['(predictions - labels)'], {}), '(predictions - labels)\n', (478, 500), True, 'import tensorflow as tf\n'), ((517, 541), 'tensorflow.less', 'tf.less', (['residual', 'delta'], {}), '(residual, delta)\n', (524, 541), True, 'import tensorflow as tf\n'), ((653, 694), 'tensorflow.where', 'tf.where', (['condition', 'small_res', 'large_res'], {}), '(condition, small_res, large_res)\n', (661, 694), True, 'import tensorflow as tf\n'), ((112, 135), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (129, 135), True, 'import tensorflow as tf\n'), ((564, 583), 'tensorflow.square', 'tf.square', (['residual'], {}), '(residual)\n', (573, 583), True, 'import tensorflow as tf\n'), ((842, 865), 'tensorflow.variable_scope', 'tf.variable_scope', (['name'], {}), '(name)\n', (859, 865), True, 'import tensorflow as tf\n'), ((1052, 1116), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['input', 'w'], {'strides': '[1, s_h, s_w, 1]', 'padding': '"""SAME"""'}), "(input, w, strides=[1, s_h, s_w, 1], padding='SAME')\n", (1064, 1116), True, 'import tensorflow as tf\n'), ((1314, 1442), 'tensorflow.contrib.layers.batch_norm', 'tf.contrib.layers.batch_norm', (['activation'], {'center': '(True)', 'scale': '(True)', 'decay': '(0.9)', 'is_training': 'is_train', 'updates_collections': 'None'}), '(activation, center=True, scale=True, decay=0.9,\n is_training=is_train, updates_collections=None)\n', (1342, 1442), True, 'import tensorflow as tf\n'), ((625, 641), 'tensorflow.square', 'tf.square', (['delta'], {}), '(delta)\n', (634, 641), True, 'import tensorflow as tf\n'), ((989, 1035), 'tensorflow.truncated_normal_initializer', 'tf.truncated_normal_initializer', ([], {'stddev': 'stddev'}), '(stddev=stddev)\n', (1020, 1035), True, 'import tensorflow as tf\n'), ((1221, 1249), 'tensorflow.constant_initializer', 'tf.constant_initializer', (['(0.0)'], {}), '(0.0)\n', (1244, 1249), True, 'import tensorflow as tf\n'), ((388, 397), 'tensorflow.exp', 'tf.exp', (['x'], {}), '(x)\n', (394, 397), True, 'import tensorflow as tf\n')]
|
import numpy as np
import pandas as pd
import read_data as rd
import argparse
import os
import time
import sklearn
from sklearn.externals import joblib
from sklearn.metrics import precision_recall_curve
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
def prepare_data(df=None):
'''
Preps the data to be used in the model. Right now, the code itself must
be modified to tweak which columns are included in what way.
Parameters
----------
df : Dataframe to use. If not specified, the dataframe is loaded automatically.
Returns
-------
predictors : NxM DataFrame of the predictors for the classification problem.
meta_info : Nx6 DataFrame containing the columns 'Escherichia.coli' and
'Full_date', to be used, e.g., for leave-one-year-out cross
validation and creating the true class labels (elevated vs.
not elevated E. coli levels). The columns 'Client.ID','BEACH',
'Drek_Prediction'and 'Weekday' are also returned.
'''
# Meta columns are not used as predictors
meta_columns = ['Client.ID','BEACH','Full_date','Escherichia.coli',
'Drek_Prediction','Weekday']
# Deterministic columns are known ahead of time, their actual values can be used.
deterministic_columns = [
'Client.ID', # subsumed by the geographic flags
'group_prior_mean',
'previous_reading',
'accum_rain', #added to try to capture storm events
'Collection_Time', # mostly missing values but may still be of some use
'12hrPressureChange', # overnight pressure change
#'precipIntensity',
#'precipIntensityMax',
#'temperatureMin',
#'temperatureMax',
#'humidity',
#'windSpeed',
#'cloudCover',
#'flag_geographically_a_north_beach',
'categorical_beach_grouping'
#'12th_previous',
#'Montrose_previous',
#'Rainbow_previous',
#'63rd_previous',
#'Osterman_previous'
]
# Deterministic columns are known ahead of time, their actual values are used.
# These hourly variables have an additional parameter which defines what hours
# should be used. For example, an entry
# 'temperature':[-16,-13,-12,-11,-9,-3,0]
# would indicate that the hourly temperature at offsets of
# [-16,-13,-12,-11,-9,-3,0] from MIDNIGHT the day of should be included as
# variables in the model.
deterministic_hourly_columns = {
'temperature':np.linspace(-19,4,num=6,dtype=np.int64),#range(-19,5),
'windVectorX':np.linspace(-19,4,num=6,dtype=np.int64),#range(-19,5),#[-4,-2,0,2,4],
'windVectorY':np.linspace(-19,4,num=6,dtype=np.int64),
#'windSpeed':[-2,0,2,4],
#'windBearing':[-2,0,2,4],
'pressure':[0],
'cloudCover':[-15], #range(-19,5),
'humidity':[4],
#'precipIntensity':[4]#np.linspace(-10,4,num=4,dtype=np.int64)
}
for var in deterministic_hourly_columns:
for hr in deterministic_hourly_columns[var]:
deterministic_columns.append(var + '_hour_' + str(hr))
# Historical columns have their previous days' values added to the predictors,
# but not the current day's value(s) unless the historical column also exists
# in the deterministic columns list.
# Similar to the hourly columns, you need to specify which previous days
# to include as variables. For example, below we have an entry
# 'temperatureMax': range(1,4)
# which indicates that the max temperature from 1, 2, and 3 days previous
# should be included.
historical_columns = {
#'temperatureMin': range(2,3),
'temperatureMax': range(2,5),
# 'humidity': range(1,3),
#'windSpeed': range(1,3),
'pressure': range(1,3),
'dewPoint': range(1,3),
#'cloudCover': range(1,3),
'windVectorX': range(2,3),
'windVectorY': range(2,3),
'Escherichia.coli': range(2,8)
}
historical_columns_list = list(historical_columns.keys())
######################################################
#### Get relevant columns, add historical data
######################################################
all_columns = meta_columns + deterministic_columns + historical_columns_list #+ derived_columns
all_columns = list(set(all_columns))
df = df[all_columns]
for var in historical_columns:
df = rd.add_column_prior_data(
df, var, historical_columns[var],
beach_col_name='Client.ID', timestamp_col_name='Full_date'
)
df.drop((set(historical_columns_list) - set(deterministic_columns)) - set(meta_columns),
axis=1, inplace=True)
######################################################
#### Average the historical columns, fill in NaNs
######################################################
# Creates a "trailing_average_daily_" column for each historical variable
# which is simply the mean of the previous day columns of that variable.
# NaN values for any previous day data is filled in by that mean value.
for var in historical_columns:
cname = 'trailing_average_daily_' + var
rnge = historical_columns[var]
if len(rnge) == 1: # no need to create a trailing average of a single number...
continue
df[cname] = df[[str(n) + '_day_prior_' + var for n in rnge]].mean(1)
for n in rnge:
df[str(n) + '_day_prior_' + var].fillna(df[cname], inplace=True)
# Do a similar process for the hourly data.
for var in deterministic_hourly_columns:
cname = 'trailing_average_hourly_' + var
rnge = deterministic_hourly_columns[var]
if len(rnge) == 1: # no need to create a trailing average of a single number...
continue
df[cname] = df[[var + '_hour_' + str(n) for n in rnge]].mean(1)
for n in rnge:
df[var + '_hour_' + str(n)].fillna(df[cname], inplace=True)
######################################################
#### Process non-numeric columns
######################################################
# process all of the nonnumeric columns
# This method just assigns a numeric value to each possible value
# of the non-numeric column. Note that this will not work well
# for regression-style models, where instead dummy columns should
# be created.
def nonnumericCols(data, verbose=True):
for f in data.columns:
if data[f].dtype=='object':
if (verbose):
print('Column ' + str(f) + ' being treated as non-numeric')
lbl = sklearn.preprocessing.LabelEncoder()
lbl.fit(list(data[f].values))
data.loc[:,f] = lbl.transform(list(data[f].values))
return data
# Do this at the end so meta_data has Beach names and Weekdays
#df = nonnumericCols(df)
# As a last NaN filling measure, we fill the NaNs of all columns
# that are NOT the E. coli column with the mean value of the column,
# the mean value taken over all data not from the same year as the
# year of the row we are filling. For example, if there is a NaN
# in the temperatureMax column in some row from 2010, then we will
# fill that value with the mean temperatureMax value from all years
# that are NOT 2010.
cols = df.columns.tolist()
cols.remove('Escherichia.coli')
years = df['Full_date'].map(lambda x: x.year)
for yr in years.unique():
not_yr = np.array(years != yr)
is_yr = np.array(years == yr)
df.ix[is_yr, cols] = df.ix[is_yr, cols].fillna(df.ix[not_yr, cols].median())
######################################################
#### Drop any rows that still have NA, set up outputs
######################################################
# The following lines will print the % of rows that:
# (a) have a NaN value in some column other than Escherichia.coli, AND
# (b) the column Escherichia.coli is NOT NaN.
# Since we are now filling NaNs with column averages above, this should
# always report 0%. I'm leaving the check in here just to be sure, though.
total_rows_predictors = df.dropna(subset=['Escherichia.coli'], axis=0).shape[0]
nonnan_rows_predictors = df.dropna(axis=0).shape[0]
print('Dropping {0:.4f}% of rows because predictors contain NANs'.format(
100.0 - 100.0 * nonnan_rows_predictors / total_rows_predictors
))
# Any rows that still have NaNs are NaN b/c there is no E. coli reading
# We should drop these rows b/c there is nothing for us to predict.
df.dropna(axis=0, inplace=True)
#df.dropna(axis=0, how='any', subset=['Full_date','Escherichia.coli'], inplace=True)
predictors = df.drop(set(meta_columns)-set(['Client.ID']) , axis=1)
meta_info = df[meta_columns]
predictors = nonnumericCols(predictors)
return predictors, meta_info
def display_predictions_by_beach(results, predict_col = 'predictedEPA'):
'''
Helper function to test ensemble of models on 2015 data.
Displays the prediction results by beach, sorted from north to south.
Parameters
----------
results : dataframe with all predictions
Returns
-------
precision : percent of reported warnings that are actually correct
recall : percent of all actual ecoli outbreaks that are warned about
Also prints table of results to console
'''
results['correct_warning'] = (results['expected'])&(results[predict_col])
results['incorrect_warning'] = (results['expected']==False)&(results[predict_col])
results['missed_warning'] = (results['expected'])&(~results[predict_col])
print(results.groupby(['Client.ID','BEACH'])['incorrect_warning','correct_warning','missed_warning'].sum())
TP = results['correct_warning'].sum()
FP = results['incorrect_warning'].sum()
FN = results['missed_warning'].sum()
precision = TP/(TP+FP)
recall = TP/(TP+FN)
return precision, recall
def calibrateThreshold(target, predictions, FNR):
'''
Helper function to calibrate the decision threshold such that
False Negative Rate (FNR) should be values between 1.0 and 10
'''
countOfAllNeg = len(target[target<236])
cut = max(np.exp(predictions))
for firstcut in np.linspace(cut,50,10):
countOfCorrectNeg = len(target[(target<236)&(np.exp(predictions)<firstcut)])
specif = countOfCorrectNeg/countOfAllNeg
if specif < (1.0-FNR/100):
cut = firstcut + (max(np.exp(predictions0))-50)/9 #go back up one cut to begin next search
break
for secondcut in np.linspace(cut, cut/2,100):
countOfCorrectNeg = len(target[(target<236)&(np.exp(predictions)<secondcut)])
specif = countOfCorrectNeg/countOfAllNeg
if specif <= (1.0-FNR/100):
cut = secondcut
break
return cut
###############################################################################
###############################################################################
#### This builds the set of leave one out RF and GBM models ####
###############################################################################
###############################################################################
if __name__ == '__main__':
'''
This script will produce and put in a folder named model_<model_suffix>:
-- 18 .pkl files
- 9 Random Forest Classifier models
- 9 Gradient Boosting Regression models
-- 5 .csv files
- data saved from reading in via read_data()
- processed data ready for modeling
- accompaning meta data for modeling
- summary of precision, recall and tuned threshold values
- results of testing on 2015 data
-- 1 .txt file containing a list of predictors
'''
# Command Line Argument parsing
parser = argparse.ArgumentParser(description='Process beach data.')
parser.add_argument('-id', '--input_data', type=str,
metavar='data',
help='input pre-read data CSV filename')
parser.add_argument('-ip', '--input_processed', type=str,
metavar='processed',
help='input processed modeling data CSV filename')
parser.add_argument('-ip2', '--input_meta', type=str,
metavar='processed_meta',
help='input processed modeling metadata CSV filename')
parser.add_argument('-s', '--suffix', type=str,
metavar='model_suffix',
help='suffix to identify this model build results')
parser.add_argument('-v', '--verbose', action='count', default=1)
args = parser.parse_args()
if args.suffix:
model_suffix = args.suffix
else:
model_suffix = time.strftime("%d_%m_%Y")
directory = 'model_'+model_suffix
if not os.path.exists(directory):
os.makedirs(directory)
##########################
### Load the data
##########################
if args.input_data:
print('Loading data from {0}'.format(args.input_data))
df = pd.read_csv(args.input_data, parse_dates='Full_date', low_memory=False)
df['Full_date'] = rd.date_lookup(df['Full_date'])
else:
print('Reading and loading data. Saving to {}'.format(directory+'/all_data.csv'))
df = rd.read_data(read_weather_station=False, read_water_sensor=False, add_each_beach_data=True)
df.to_csv(directory+'/all_data.csv', index=False)
###############################
### Prepare Predictors
###############################
if args.input_processed:
print('Using Preprocessed data from {0} and {1}'.format(args.input_processed, args.input_meta ))
datafilename = args.input_processed
metadatafilename = args.input_meta
data_processed = pd.read_csv(datafilename)
meta_info = pd.read_csv(metadatafilename, parse_dates='Full_date')
meta_info['Full_date'] = rd.date_lookup(meta_info['Full_date'])
else:
print('Preparing data for modeling. Saving to {0} and {1}'.format(directory+'/processed.csv', directory+'/meta_processed.csv'))
data_processed, meta_info = prepare_data(df)
data_processed.to_csv(directory+'/processed.csv', index=False)
meta_info.to_csv(directory+'/meta_processed.csv', index=False)
f = open(directory+'/feature_list.txt', 'w')
f.write("\n".join(list(data_processed.columns) ) ) # For easy reference
f.close()
if args.verbose>=1:
print('Using the following columns as predictors:')
for c in data_processed.columns:
print('\t' + str(c))
##########################################################################
### Split data into Train/Validate (2006-2014) and Testing (2015)
##########################################################################
train_processed = data_processed[meta_info['Full_date'] < '1-1-2015'].copy()
test_processed = data_processed[meta_info['Full_date'] > '1-1-2015'].copy()
train_meta_info = meta_info[meta_info['Full_date'] < '1-1-2015'].copy()
test_meta_info = meta_info[meta_info['Full_date'] > '1-1-2015'].copy()
##########################################################################
### Setup Random Forest classifier and Gradient Boosting Regressor
##########################################################################
RF_reg = RandomForestRegressor(n_estimators=500,
max_depth=10,
max_features=0.8,
min_samples_split=10,
min_samples_leaf=4,
oob_score=True,
n_jobs=-1)
gbm_reg = GradientBoostingRegressor(loss='quantile',
learning_rate=0.025,
n_estimators=1500, # train longer, no concern of overfitting
subsample=0.8,
min_samples_split=10,
min_samples_leaf=4,
max_depth=10,
alpha=0.85)
##########################################################################
### Train models by holding one year out
### Validate and tune cutoff thresholds on held out year
##########################################################################
dataSeries = []
colIndexes = []
timestamps = train_meta_info['Full_date'].map(lambda x: x.year)
print('\nBegining training and validation of hold-one-year-out models\n')
for yr in range(2006, 2015):
### HOLD OUT YEAR
train_ind = np.array((timestamps != yr))
# Remove weekends from training b/c sampled under different conditions
train_data = train_processed.ix[train_ind & (train_meta_info['Weekday']!='Saturday')
& (train_meta_info['Weekday']!='Sunday')]
train_target = train_meta_info.ix[train_ind & (train_meta_info['Weekday']!='Saturday')
& (train_meta_info['Weekday']!='Sunday'),'Escherichia.coli']
# Leave weekends in held out validation data
test_data = train_processed.ix[~train_ind]
test_target = train_meta_info.ix[~train_ind,'Escherichia.coli']
### TRAIN Random Forest Regressor model and save as pickle file
startTime = time.time() ## This is only to keep track of training time
RF_reg.fit(train_data, np.log(train_target+1) )
filename = directory+'/RF_regress' + '_' +str(yr) +'.pkl'
joblib.dump(RF_reg, filename, compress=9)
### VALIDATE MODEL on held out year to calibarate cutoff threshold based on False Negative Rate
predictions0 = getattr(RF_reg, 'predict')(test_data)
# rescales to between 0 and 1 in order to use in precision_recall_curve()
predictionsX0= predictions0-predictions0.min()
predictionsX0= predictionsX0/(predictions0.max()-predictions0.min())
precisionV, recallV, threshV = precision_recall_curve(test_target>=236, predictionsX0)
threshV = np.exp(threshV*(predictions0.max()-predictions0.min())+predictions0.min()) # map back from [0,1] to origial scaling
RFthresh = calibrateThreshold(test_target, predictions0, 2.0) # FNR of 2%
threshIdx = (np.abs(threshV-RFthresh)).argmin()
RF_rec = recallV[threshIdx]
RF_prec = precisionV[threshIdx]
RFthreshAlt = calibrateThreshold(test_target, predictions0, 5.0) # FNR of 5%
threshIdx = (np.abs(threshV-RFthreshAlt)).argmin()
RF_recAlt = recallV[threshIdx]
RF_precAlt = precisionV[threshIdx]
# REPORT Results
print(' RF ensemble {0} model: thresh for 2% FNR = {1}, recall= {2}, precision = {3}'\
.format(yr,np.int(RFthresh),np.int(RF_rec*100+.4),np.int(RF_prec*100+.4) ))
print(' RF ensemble {0} model: thresh for 5% FNR = {1}, recall= {2}, precision = {3}'\
.format(yr,np.int(RFthreshAlt),np.int(RF_recAlt*100+.4),np.int(RF_precAlt*100+.4) ))
if args.verbose>=3:
print('\t runtime of building and testing RF model was {0} minutes'.format(np.round((time.time() - startTime)/60) ))
### TRAIN Gradient Boosting Regression model and save as pickle file
startTime = time.time()
gbm_reg.fit(train_data, np.log(train_target+1))
filename = directory+'/GBM_regress' + '_' + str(yr) +'.pkl'
joblib.dump(gbm_reg, filename, compress=9)
### VALIDATE MODEL on held out year to calibarate cutoff threshold based on False Negative Rate
predictions0 = getattr(gbm_reg, 'predict')(test_data)
# rescales to between 0 and 1 in order to use in precision_recall_curve()
predictionsX0= predictions0-predictions0.min()
predictionsX0= predictionsX0/(predictions0.max()-predictions0.min())
precisionV, recallV, threshV = precision_recall_curve(test_target>=236, predictionsX0)
threshV = np.exp(threshV*(predictions0.max()-predictions0.min())+predictions0.min()) # map back from [0,1] to origial scaling
GBMthresh = calibrateThreshold(test_target, predictions0, 2.0) # FNR of 2%
threshIdx = (np.abs(threshV-GBMthresh)).argmin()
GBM_rec = recallV[threshIdx]
GBM_prec = precisionV[threshIdx]
GBMthreshAlt = calibrateThreshold(test_target, predictions0, 5.0) # FNR of 5%
threshIdx = (np.abs(threshV-GBMthreshAlt)).argmin()
GBM_recAlt = recallV[threshIdx]
GBM_precAlt = precisionV[threshIdx]
# REPORT Results
print(' GBM ensemble {0} model: thresh for 2% FNR = {1}, recall= {2}, precision = {3}'\
.format(yr,np.int(GBMthresh),np.int(GBM_rec*100+.4),np.int(GBM_prec*100+.4) ))
print(' GBM ensemble {0} model: thresh for 5% FNR = {1}, recall= {2}, precision = {3}'\
.format(yr,np.int(GBMthreshAlt),np.int(GBM_recAlt*100+.4),np.int(GBM_precAlt*100+.4) ))
if args.verbose>=3:
print('\t runtime of building and testing GBM model was {0} minutes'.format(np.round((time.time() - startTime)/60)))
# SAVE the precision, recall, and tuned thresholds
d = { 'RF_precision2p':RF_prec, 'RF_recall2p':RF_rec, 'RF_thresh2p': RFthresh,
'RF_precision5p':RF_precAlt, 'RF_recall5p':RF_recAlt, 'RF_thresh5p': RFthreshAlt,
'GBM_precision2p':GBM_prec, 'GBM_recall2p':GBM_rec, 'GBM_thresh2p': GBMthresh,
'GBM_precision5p':GBM_precAlt, 'GBM_recall5p':GBM_recAlt, 'GBM_thresh5p': GBMthreshAlt
}
d = pd.Series(d, index = [ 'RF_precision2p', 'RF_recall2p', 'RF_thresh2p',
'RF_precision5p', 'RF_recall5p', 'RF_thresh5p',
'GBM_precision2p', 'GBM_recall2p', 'GBM_thresh2p',
'GBM_precision5p', 'GBM_recall5p', 'GBM_thresh5p'])
dataSeries = dataSeries + [ d ]
colIndexes = colIndexes + [yr]
summaryFrame = pd.DataFrame( dataSeries , index = colIndexes)
summaryFileName = directory+'/ValidationReport2.csv'
summaryFrame.to_csv(summaryFileName)
##########################################################################
### Test models on 2015 data
##########################################################################
print('\nTesting ensemble of models on 2015 data\n')
results = test_meta_info.copy()
results['expected'] = results['Escherichia.coli']>=235
results['predictedEPA'] = results['Drek_Prediction']>=235
RF_cols = []
GBM_cols = []
RF_bool_cols2p = []
RF_bool_cols5p = []
GBM_bool_cols2p = []
GBM_bool_cols5p = []
for yr in range(2006, 2015):
filename = directory+'/GBM_regress' + '_' + str(yr) +'.pkl'
gbmmodel = joblib.load(filename)
pred_col_name = 'GBM_' +str(yr)+ '_pred'
GBM_cols = GBM_cols + [pred_col_name]
results[pred_col_name] = np.exp(getattr(gbmmodel, 'predict')(test_processed))
results[pred_col_name+'_bool_2p'] = results[pred_col_name] > summaryFrame.ix[yr,'GBM_thresh2p']
results[pred_col_name+'_bool_5p'] = results[pred_col_name] > summaryFrame.ix[yr,'GBM_thresh5p']
GBM_bool_cols2p = GBM_bool_cols2p + [pred_col_name+'_bool_2p']
GBM_bool_cols5p = GBM_bool_cols5p + [pred_col_name+'_bool_5p']
for yr in range(2006, 2015):
filename = directory+'/RF_regress' + '_' +str(yr) +'.pkl'
RFmodel = joblib.load(filename)
pred_col_name = 'RF_' +str(yr)+ '_pred'
results[pred_col_name] = np.exp(getattr(RFmodel, 'predict')(test_processed))
RF_cols = RF_cols + [pred_col_name]
results[pred_col_name+'_bool_2p'] = results[pred_col_name] > summaryFrame.ix[yr,'RF_thresh2p']
results[pred_col_name+'_bool_5p'] = results[pred_col_name] > summaryFrame.ix[yr,'RF_thresh5p']
RF_cols = RF_cols + [pred_col_name]
RF_bool_cols2p = RF_bool_cols2p + [pred_col_name+'_bool_2p']
RF_bool_cols5p = RF_bool_cols5p + [pred_col_name+'_bool_5p']
results['mean_GBM'] = results[GBM_cols].mean(1)
results['max_GBM'] = results[GBM_cols].max(1)
results['min_GBM'] = results[GBM_cols].min(1)
results['mean_RF'] = results[RF_cols].mean(1)
results['max_RF'] = results[RF_cols].max(1)
results['min_RF'] = results[RF_cols].min(1)
# The above results could be interesting to drill down into to see how the
# different models are biased, and how much variance in the predictions.
# For now, the method of final prediction is to predict Ecoli_High == True
# IF ((any GBM predicts true) AND (any RF predicts true)) OR (EPA predicts true)
results['predict_RF2p'] = results[RF_bool_cols2p].sum(1) > 1
results['predict_GBM2p'] = results[GBM_bool_cols2p].sum(1) > 1
results['predict_Combo2p'] = (((results['predict_RF2p'])&(results['predict_GBM2p']))|(results['predictedEPA']) )
results['predict_RF5p'] = results[RF_bool_cols5p].sum(1) > 1
results['predict_GBM5p'] = results[GBM_bool_cols5p].sum(1) > 1
results['predict_Combo5p'] = (((results['predict_RF5p'])&(results['predict_GBM5p']))|(results['predictedEPA']) )
results.to_csv(directory+'/results_RF_GBM.csv', index=False)
# Look at performance of GMB ensemble at 5% FNR alone
prec, rec = display_predictions_by_beach(results, 'predict_GBM5p')
print('GBM ensemble model at 5% FNR: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
# Look at performance of RF ensemble at 5% FNR alone
prec, rec = display_predictions_by_beach(results, 'predict_RF5p')
print('RF ensemble model at 5% FNR: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
prec, rec = display_predictions_by_beach(results, 'predict_Combo5p')
print('Combo ensemble model variant at 5% FNR with AND: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
# Try out some variants of putting models together
results['predict_Combo5p'] = (((results['predict_RF5p'])|(results['predict_GBM5p']))|(results['predictedEPA']) )
prec, rec = display_predictions_by_beach(results, 'predict_Combo5p')
print('Combo ensemble model variant at 5% FNR with OR: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
prec, rec = display_predictions_by_beach(results, 'predict_Combo2p')
print('Combo ensemble model variant at 2% FNR with AND: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
# Try out some variants of putting models together
results['predict_Combo2p'] = (((results['predict_RF2p'])|(results['predict_GBM2p']))|(results['predictedEPA']) )
prec, rec = display_predictions_by_beach(results, 'predict_Combo2p')
print('Combo ensemble model variant at 2% FNR with OR: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
# Try out some variants of putting models together
results['predict_RF'] = results['mean_RF']> np.exp(summaryFrame.RF_thresh5p.min())
results['predict_GBM'] = results['mean_GBM']> np.exp(summaryFrame.GBM_thresh5p.min())
results['predict_Combo'] = (((results['predict_RF'])&(results['predict_GBM']))|(results['predictedEPA']) )
prec, rec = display_predictions_by_beach(results, 'predict_Combo')
print('Combo ensemble model variant with one threshold: recall= {0}, precision = {1}\n'.format(np.int(rec*100),np.int(prec*100)))
|
[
"sklearn.externals.joblib.dump",
"numpy.abs",
"argparse.ArgumentParser",
"pandas.read_csv",
"sklearn.ensemble.GradientBoostingRegressor",
"time.strftime",
"numpy.exp",
"read_data.read_data",
"pandas.DataFrame",
"read_data.date_lookup",
"os.path.exists",
"sklearn.preprocessing.LabelEncoder",
"numpy.int",
"numpy.linspace",
"sklearn.ensemble.RandomForestRegressor",
"sklearn.metrics.precision_recall_curve",
"pandas.Series",
"sklearn.externals.joblib.load",
"os.makedirs",
"numpy.log",
"read_data.add_column_prior_data",
"time.time",
"numpy.array"
] |
[((10726, 10750), 'numpy.linspace', 'np.linspace', (['cut', '(50)', '(10)'], {}), '(cut, 50, 10)\n', (10737, 10750), True, 'import numpy as np\n'), ((11068, 11098), 'numpy.linspace', 'np.linspace', (['cut', '(cut / 2)', '(100)'], {}), '(cut, cut / 2, 100)\n', (11079, 11098), True, 'import numpy as np\n'), ((12418, 12476), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process beach data."""'}), "(description='Process beach data.')\n", (12441, 12476), False, 'import argparse\n'), ((16215, 16359), 'sklearn.ensemble.RandomForestRegressor', 'RandomForestRegressor', ([], {'n_estimators': '(500)', 'max_depth': '(10)', 'max_features': '(0.8)', 'min_samples_split': '(10)', 'min_samples_leaf': '(4)', 'oob_score': '(True)', 'n_jobs': '(-1)'}), '(n_estimators=500, max_depth=10, max_features=0.8,\n min_samples_split=10, min_samples_leaf=4, oob_score=True, n_jobs=-1)\n', (16236, 16359), False, 'from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n'), ((16607, 16780), 'sklearn.ensemble.GradientBoostingRegressor', 'GradientBoostingRegressor', ([], {'loss': '"""quantile"""', 'learning_rate': '(0.025)', 'n_estimators': '(1500)', 'subsample': '(0.8)', 'min_samples_split': '(10)', 'min_samples_leaf': '(4)', 'max_depth': '(10)', 'alpha': '(0.85)'}), "(loss='quantile', learning_rate=0.025,\n n_estimators=1500, subsample=0.8, min_samples_split=10,\n min_samples_leaf=4, max_depth=10, alpha=0.85)\n", (16632, 16780), False, 'from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n'), ((23401, 23443), 'pandas.DataFrame', 'pd.DataFrame', (['dataSeries'], {'index': 'colIndexes'}), '(dataSeries, index=colIndexes)\n', (23413, 23443), True, 'import pandas as pd\n'), ((2635, 2677), 'numpy.linspace', 'np.linspace', (['(-19)', '(4)'], {'num': '(6)', 'dtype': 'np.int64'}), '(-19, 4, num=6, dtype=np.int64)\n', (2646, 2677), True, 'import numpy as np\n'), ((2713, 2755), 'numpy.linspace', 'np.linspace', (['(-19)', '(4)'], {'num': '(6)', 'dtype': 'np.int64'}), '(-19, 4, num=6, dtype=np.int64)\n', (2724, 2755), True, 'import numpy as np\n'), ((2806, 2848), 'numpy.linspace', 'np.linspace', (['(-19)', '(4)'], {'num': '(6)', 'dtype': 'np.int64'}), '(-19, 4, num=6, dtype=np.int64)\n', (2817, 2848), True, 'import numpy as np\n'), ((4628, 4751), 'read_data.add_column_prior_data', 'rd.add_column_prior_data', (['df', 'var', 'historical_columns[var]'], {'beach_col_name': '"""Client.ID"""', 'timestamp_col_name': '"""Full_date"""'}), "(df, var, historical_columns[var], beach_col_name=\n 'Client.ID', timestamp_col_name='Full_date')\n", (4652, 4751), True, 'import read_data as rd\n'), ((7838, 7859), 'numpy.array', 'np.array', (['(years != yr)'], {}), '(years != yr)\n', (7846, 7859), True, 'import numpy as np\n'), ((7877, 7898), 'numpy.array', 'np.array', (['(years == yr)'], {}), '(years == yr)\n', (7885, 7898), True, 'import numpy as np\n'), ((10684, 10703), 'numpy.exp', 'np.exp', (['predictions'], {}), '(predictions)\n', (10690, 10703), True, 'import numpy as np\n'), ((13427, 13452), 'time.strftime', 'time.strftime', (['"""%d_%m_%Y"""'], {}), "('%d_%m_%Y')\n", (13440, 13452), False, 'import time\n'), ((13514, 13539), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (13528, 13539), False, 'import os\n'), ((13550, 13572), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (13561, 13572), False, 'import os\n'), ((13774, 13845), 'pandas.read_csv', 'pd.read_csv', (['args.input_data'], {'parse_dates': '"""Full_date"""', 'low_memory': '(False)'}), "(args.input_data, parse_dates='Full_date', low_memory=False)\n", (13785, 13845), True, 'import pandas as pd\n'), ((13873, 13904), 'read_data.date_lookup', 'rd.date_lookup', (["df['Full_date']"], {}), "(df['Full_date'])\n", (13887, 13904), True, 'import read_data as rd\n'), ((14029, 14124), 'read_data.read_data', 'rd.read_data', ([], {'read_weather_station': '(False)', 'read_water_sensor': '(False)', 'add_each_beach_data': '(True)'}), '(read_weather_station=False, read_water_sensor=False,\n add_each_beach_data=True)\n', (14041, 14124), True, 'import read_data as rd\n'), ((14557, 14582), 'pandas.read_csv', 'pd.read_csv', (['datafilename'], {}), '(datafilename)\n', (14568, 14582), True, 'import pandas as pd\n'), ((14604, 14658), 'pandas.read_csv', 'pd.read_csv', (['metadatafilename'], {'parse_dates': '"""Full_date"""'}), "(metadatafilename, parse_dates='Full_date')\n", (14615, 14658), True, 'import pandas as pd\n'), ((14700, 14738), 'read_data.date_lookup', 'rd.date_lookup', (["meta_info['Full_date']"], {}), "(meta_info['Full_date'])\n", (14714, 14738), True, 'import read_data as rd\n'), ((17789, 17815), 'numpy.array', 'np.array', (['(timestamps != yr)'], {}), '(timestamps != yr)\n', (17797, 17815), True, 'import numpy as np\n'), ((18608, 18619), 'time.time', 'time.time', ([], {}), '()\n', (18617, 18619), False, 'import time\n'), ((18800, 18841), 'sklearn.externals.joblib.dump', 'joblib.dump', (['RF_reg', 'filename'], {'compress': '(9)'}), '(RF_reg, filename, compress=9)\n', (18811, 18841), False, 'from sklearn.externals import joblib\n'), ((19269, 19326), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['(test_target >= 236)', 'predictionsX0'], {}), '(test_target >= 236, predictionsX0)\n', (19291, 19326), False, 'from sklearn.metrics import precision_recall_curve\n'), ((20599, 20610), 'time.time', 'time.time', ([], {}), '()\n', (20608, 20610), False, 'import time\n'), ((20747, 20789), 'sklearn.externals.joblib.dump', 'joblib.dump', (['gbm_reg', 'filename'], {'compress': '(9)'}), '(gbm_reg, filename, compress=9)\n', (20758, 20789), False, 'from sklearn.externals import joblib\n'), ((21218, 21275), 'sklearn.metrics.precision_recall_curve', 'precision_recall_curve', (['(test_target >= 236)', 'predictionsX0'], {}), '(test_target >= 236, predictionsX0)\n', (21240, 21275), False, 'from sklearn.metrics import precision_recall_curve\n'), ((22970, 23200), 'pandas.Series', 'pd.Series', (['d'], {'index': "['RF_precision2p', 'RF_recall2p', 'RF_thresh2p', 'RF_precision5p',\n 'RF_recall5p', 'RF_thresh5p', 'GBM_precision2p', 'GBM_recall2p',\n 'GBM_thresh2p', 'GBM_precision5p', 'GBM_recall5p', 'GBM_thresh5p']"}), "(d, index=['RF_precision2p', 'RF_recall2p', 'RF_thresh2p',\n 'RF_precision5p', 'RF_recall5p', 'RF_thresh5p', 'GBM_precision2p',\n 'GBM_recall2p', 'GBM_thresh2p', 'GBM_precision5p', 'GBM_recall5p',\n 'GBM_thresh5p'])\n", (22979, 23200), True, 'import pandas as pd\n'), ((24294, 24315), 'sklearn.externals.joblib.load', 'joblib.load', (['filename'], {}), '(filename)\n', (24305, 24315), False, 'from sklearn.externals import joblib\n'), ((24979, 25000), 'sklearn.externals.joblib.load', 'joblib.load', (['filename'], {}), '(filename)\n', (24990, 25000), False, 'from sklearn.externals import joblib\n'), ((18699, 18723), 'numpy.log', 'np.log', (['(train_target + 1)'], {}), '(train_target + 1)\n', (18705, 18723), True, 'import numpy as np\n'), ((20644, 20668), 'numpy.log', 'np.log', (['(train_target + 1)'], {}), '(train_target + 1)\n', (20650, 20668), True, 'import numpy as np\n'), ((27020, 27037), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (27026, 27037), True, 'import numpy as np\n'), ((27036, 27054), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (27042, 27054), True, 'import numpy as np\n'), ((27270, 27287), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (27276, 27287), True, 'import numpy as np\n'), ((27286, 27304), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (27292, 27304), True, 'import numpy as np\n'), ((27491, 27508), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (27497, 27508), True, 'import numpy as np\n'), ((27507, 27525), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (27513, 27525), True, 'import numpy as np\n'), ((27885, 27902), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (27891, 27902), True, 'import numpy as np\n'), ((27901, 27919), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (27907, 27919), True, 'import numpy as np\n'), ((28098, 28115), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (28104, 28115), True, 'import numpy as np\n'), ((28114, 28132), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (28120, 28132), True, 'import numpy as np\n'), ((28492, 28509), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (28498, 28509), True, 'import numpy as np\n'), ((28508, 28526), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (28514, 28526), True, 'import numpy as np\n'), ((29051, 29068), 'numpy.int', 'np.int', (['(rec * 100)'], {}), '(rec * 100)\n', (29057, 29068), True, 'import numpy as np\n'), ((29067, 29085), 'numpy.int', 'np.int', (['(prec * 100)'], {}), '(prec * 100)\n', (29073, 29085), True, 'import numpy as np\n'), ((6935, 6971), 'sklearn.preprocessing.LabelEncoder', 'sklearn.preprocessing.LabelEncoder', ([], {}), '()\n', (6969, 6971), False, 'import sklearn\n'), ((19566, 19592), 'numpy.abs', 'np.abs', (['(threshV - RFthresh)'], {}), '(threshV - RFthresh)\n', (19572, 19592), True, 'import numpy as np\n'), ((19789, 19818), 'numpy.abs', 'np.abs', (['(threshV - RFthreshAlt)'], {}), '(threshV - RFthreshAlt)\n', (19795, 19818), True, 'import numpy as np\n'), ((20070, 20086), 'numpy.int', 'np.int', (['RFthresh'], {}), '(RFthresh)\n', (20076, 20086), True, 'import numpy as np\n'), ((20087, 20113), 'numpy.int', 'np.int', (['(RF_rec * 100 + 0.4)'], {}), '(RF_rec * 100 + 0.4)\n', (20093, 20113), True, 'import numpy as np\n'), ((20109, 20136), 'numpy.int', 'np.int', (['(RF_prec * 100 + 0.4)'], {}), '(RF_prec * 100 + 0.4)\n', (20115, 20136), True, 'import numpy as np\n'), ((20259, 20278), 'numpy.int', 'np.int', (['RFthreshAlt'], {}), '(RFthreshAlt)\n', (20265, 20278), True, 'import numpy as np\n'), ((20279, 20308), 'numpy.int', 'np.int', (['(RF_recAlt * 100 + 0.4)'], {}), '(RF_recAlt * 100 + 0.4)\n', (20285, 20308), True, 'import numpy as np\n'), ((20304, 20334), 'numpy.int', 'np.int', (['(RF_precAlt * 100 + 0.4)'], {}), '(RF_precAlt * 100 + 0.4)\n', (20310, 20334), True, 'import numpy as np\n'), ((21515, 21542), 'numpy.abs', 'np.abs', (['(threshV - GBMthresh)'], {}), '(threshV - GBMthresh)\n', (21521, 21542), True, 'import numpy as np\n'), ((21742, 21772), 'numpy.abs', 'np.abs', (['(threshV - GBMthreshAlt)'], {}), '(threshV - GBMthreshAlt)\n', (21748, 21772), True, 'import numpy as np\n'), ((22023, 22040), 'numpy.int', 'np.int', (['GBMthresh'], {}), '(GBMthresh)\n', (22029, 22040), True, 'import numpy as np\n'), ((22041, 22068), 'numpy.int', 'np.int', (['(GBM_rec * 100 + 0.4)'], {}), '(GBM_rec * 100 + 0.4)\n', (22047, 22068), True, 'import numpy as np\n'), ((22064, 22092), 'numpy.int', 'np.int', (['(GBM_prec * 100 + 0.4)'], {}), '(GBM_prec * 100 + 0.4)\n', (22070, 22092), True, 'import numpy as np\n'), ((22216, 22236), 'numpy.int', 'np.int', (['GBMthreshAlt'], {}), '(GBMthreshAlt)\n', (22222, 22236), True, 'import numpy as np\n'), ((22237, 22267), 'numpy.int', 'np.int', (['(GBM_recAlt * 100 + 0.4)'], {}), '(GBM_recAlt * 100 + 0.4)\n', (22243, 22267), True, 'import numpy as np\n'), ((22263, 22294), 'numpy.int', 'np.int', (['(GBM_precAlt * 100 + 0.4)'], {}), '(GBM_precAlt * 100 + 0.4)\n', (22269, 22294), True, 'import numpy as np\n'), ((10804, 10823), 'numpy.exp', 'np.exp', (['predictions'], {}), '(predictions)\n', (10810, 10823), True, 'import numpy as np\n'), ((11151, 11170), 'numpy.exp', 'np.exp', (['predictions'], {}), '(predictions)\n', (11157, 11170), True, 'import numpy as np\n'), ((10958, 10978), 'numpy.exp', 'np.exp', (['predictions0'], {}), '(predictions0)\n', (10964, 10978), True, 'import numpy as np\n'), ((20465, 20476), 'time.time', 'time.time', ([], {}), '()\n', (20474, 20476), False, 'import time\n'), ((22422, 22433), 'time.time', 'time.time', ([], {}), '()\n', (22431, 22433), False, 'import time\n')]
|
# coding=utf-8
import signal
import sys
import time
import RPi.GPIO as GPIO
# Piezo buzzer - pin 31 on Raspberry Pi corresponds to BCM GPIO 22
BeepPin = 31
def close(signal, frame):
GPIO.output(BeepPin, GPIO.LOW)
sys.exit(0)
# to use Raspberry Pi board pin numbers
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
signal.signal(signal.SIGINT, close)
# set up GPIO output channel
GPIO.setup(BeepPin, GPIO.OUT)
print('Press CTRL+C to exit.')
try:
while True:
GPIO.output(BeepPin, GPIO.HIGH)
time.sleep(0.000002)
GPIO.output(BeepPin, GPIO.LOW)
time.sleep(0.000002)
finally:
GPIO.cleanup()
|
[
"RPi.GPIO.setmode",
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"RPi.GPIO.output",
"time.sleep",
"signal.signal",
"RPi.GPIO.setwarnings",
"sys.exit"
] |
[((279, 303), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BOARD'], {}), '(GPIO.BOARD)\n', (291, 303), True, 'import RPi.GPIO as GPIO\n'), ((304, 327), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (320, 327), True, 'import RPi.GPIO as GPIO\n'), ((329, 364), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'close'], {}), '(signal.SIGINT, close)\n', (342, 364), False, 'import signal\n'), ((395, 424), 'RPi.GPIO.setup', 'GPIO.setup', (['BeepPin', 'GPIO.OUT'], {}), '(BeepPin, GPIO.OUT)\n', (405, 424), True, 'import RPi.GPIO as GPIO\n'), ((190, 220), 'RPi.GPIO.output', 'GPIO.output', (['BeepPin', 'GPIO.LOW'], {}), '(BeepPin, GPIO.LOW)\n', (201, 220), True, 'import RPi.GPIO as GPIO\n'), ((225, 236), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (233, 236), False, 'import sys\n'), ((628, 642), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (640, 642), True, 'import RPi.GPIO as GPIO\n'), ((486, 517), 'RPi.GPIO.output', 'GPIO.output', (['BeepPin', 'GPIO.HIGH'], {}), '(BeepPin, GPIO.HIGH)\n', (497, 517), True, 'import RPi.GPIO as GPIO\n'), ((526, 543), 'time.sleep', 'time.sleep', (['(2e-06)'], {}), '(2e-06)\n', (536, 543), False, 'import time\n'), ((555, 585), 'RPi.GPIO.output', 'GPIO.output', (['BeepPin', 'GPIO.LOW'], {}), '(BeepPin, GPIO.LOW)\n', (566, 585), True, 'import RPi.GPIO as GPIO\n'), ((594, 611), 'time.sleep', 'time.sleep', (['(2e-06)'], {}), '(2e-06)\n', (604, 611), False, 'import time\n')]
|
# Hotspot 2.0 PPS MO tests
# Copyright (c) 2018, The Linux Foundation
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import logging
logger = logging.getLogger()
import os.path
import subprocess
import hostapd
from utils import HwsimSkip
from test_ap_hs20 import hs20_ap_params, interworking_select, interworking_connect, check_sp_type
from test_ap_eap import check_eap_capa
def check_hs20_osu_client():
if not os.path.exists("../../hs20/client/hs20-osu-client"):
raise HwsimSkip("No hs20-osu-client available")
def set_pps(pps_mo):
res = subprocess.check_output(["../../hs20/client/hs20-osu-client",
"set_pps", pps_mo])
logger.info("set_pps result: " + res)
def test_hs20_pps_mo_1(dev, apdev):
"""Hotspot 2.0 PPS MO with username/password credential"""
check_hs20_osu_client()
check_eap_capa(dev[0], "MSCHAPV2")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = [ "0,w1.fi,13[5:6],21[2:4][5:7]",
"0,another.example.com" ]
params['domain_name'] = "w1.fi"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].hs20_enable()
set_pps("pps-mo-1.xml")
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "home")
|
[
"test_ap_hs20.check_sp_type",
"subprocess.check_output",
"hostapd.add_ap",
"test_ap_eap.check_eap_capa",
"test_ap_hs20.interworking_connect",
"utils.HwsimSkip",
"test_ap_hs20.hs20_ap_params",
"logging.getLogger",
"test_ap_hs20.interworking_select"
] |
[((199, 218), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (216, 218), False, 'import logging\n'), ((615, 700), 'subprocess.check_output', 'subprocess.check_output', (["['../../hs20/client/hs20-osu-client', 'set_pps', pps_mo]"], {}), "(['../../hs20/client/hs20-osu-client', 'set_pps',\n pps_mo])\n", (638, 700), False, 'import subprocess\n'), ((906, 940), 'test_ap_eap.check_eap_capa', 'check_eap_capa', (['dev[0]', '"""MSCHAPV2"""'], {}), "(dev[0], 'MSCHAPV2')\n", (920, 940), False, 'from test_ap_eap import check_eap_capa\n'), ((984, 1000), 'test_ap_hs20.hs20_ap_params', 'hs20_ap_params', ([], {}), '()\n', (998, 1000), False, 'from test_ap_hs20 import hs20_ap_params, interworking_select, interworking_connect, check_sp_type\n'), ((1191, 1223), 'hostapd.add_ap', 'hostapd.add_ap', (['apdev[0]', 'params'], {}), '(apdev[0], params)\n', (1205, 1223), False, 'import hostapd\n'), ((1282, 1337), 'test_ap_hs20.interworking_select', 'interworking_select', (['dev[0]', 'bssid', '"""home"""'], {'freq': '"""2412"""'}), "(dev[0], bssid, 'home', freq='2412')\n", (1301, 1337), False, 'from test_ap_hs20 import hs20_ap_params, interworking_select, interworking_connect, check_sp_type\n'), ((1342, 1385), 'test_ap_hs20.interworking_connect', 'interworking_connect', (['dev[0]', 'bssid', '"""TTLS"""'], {}), "(dev[0], bssid, 'TTLS')\n", (1362, 1385), False, 'from test_ap_hs20 import hs20_ap_params, interworking_select, interworking_connect, check_sp_type\n'), ((1390, 1419), 'test_ap_hs20.check_sp_type', 'check_sp_type', (['dev[0]', '"""home"""'], {}), "(dev[0], 'home')\n", (1403, 1419), False, 'from test_ap_hs20 import hs20_ap_params, interworking_select, interworking_connect, check_sp_type\n'), ((541, 582), 'utils.HwsimSkip', 'HwsimSkip', (['"""No hs20-osu-client available"""'], {}), "('No hs20-osu-client available')\n", (550, 582), False, 'from utils import HwsimSkip\n')]
|
"""
This is the main module for parsing the parameter file. There are two other
modules strongly linked ot this one:
* options: contains definitions of the possible options of a parameter file
* option_parsing: provides the classes Section and Option for simpler parsing
and validation.
:Authors: <NAME>, <NAME>
"""
import os
import pathlib
import sys
import shutil
import configparser as cp
from hyvr.utils import print_to_stdout
from hyvr.input.options import options
from hyvr.input.options_deprecated import options as old_options
from hyvr.input.option_parsing import *
from hyvr.geo.model import Model
from hyvr.geo.contact_surface_utils import parse_contact_model
def setup_from_inifile(inifile, flag_ow):
"""
Parses the input file and sets up the directory structure for a HyVR run.
Parameters
----------
inifile : path
Path to inifile. If ``inifile`` is set to 0, the ini-file for the MADE
test case is used.
flag_ow : bool
Whether to overwrite existing run directories.
Returns
-------
run : dict
HyVR run parameters (number of simulations, where to store results)
model : Model object
A HyVR model object created from the ini-file.
hydraulics : dict
Parsed hydraulics section of the ini-file
"""
run, model_dict, strata_dict, hydraulics, flowtrans, elements = parameters(inifile)
# Assign architectural element identifiers
for element in elements.keys():
elements[element]['ae_id'] = strata_dict['ae'].index(element)
# create model object
model = Model(model_dict, strata_dict, elements, flowtrans)
# set up directories
set_up_directories(run, inifile, flag_ow)
return run, model, hydraulics
def parameters(inifile):
"""
Parses the inifile and returns all sections as dictionaries. Furthermore it
sets to correct directory names for the run settings.
Parameters
----------
inifile : path
Path to inifile. If ``inifile`` is set to 0, the ini-file for the MADE
test case is used.
Returns
-------
run : dict
HyVR run settings (number of simulations, where to store results)
model_dict : dict
model setup parameters
strata_dict : dict
strata setup parameters
hydraulics : dict
Parsed hydraulics section of the ini-file
flowtrans : dict
Flow and transport settings for output
elements : list of dicts
List of dictionaries of settings for :class:`hyvr.model.ae_types.AEType`.
"""
print_to_stdout("Reading parameter file")
# test case
if inifile == 0:
from pkg_resources import resource_filename
inifile = resource_filename(__name__, str(pathlib.Path('../made.ini')))
# read file
p = cp.ConfigParser()
try:
p.read(inifile, encoding='utf-8')
except cp.MissingSectionHeaderError:
# this is probably caused by a wrong encoding
p.read(inifile, encoding='utf-8-sig')
if len(p.sections()) == 0:
raise FileNotFoundError("Parameter file {:s} not found!".format(inifile))
old_format = 'dataoutputs' in dict(p['run'])
if old_format:
run, model, strata, hydraulics, flowtrans, elements = get_new_parameters_from_deprecated(*parse_deprecated_inifile(p))
else:
run, model, strata, hydraulics, flowtrans, elements = parse_inifile(p)
# Runname and Modeldir
# ====================
# The following code sets
# - runname: based on this, the output directory is named
# - modeldir: the directory where to create the output directory
# - rundir: the output directory
# if runname is not given, it's the part of the ini-file before
# "_parameters.ini" or ".ini"
# in the test case runname is given
ini_path = pathlib.Path(inifile).parent.resolve()
ini_name = pathlib.Path(inifile).name
if run['runname'] is None:
if ini_name.endswith("autogenerated_backup.ini"):
run['runname'] = ini_name[0:-25]
else:
run['runname'] = ".".join(ini_name.split('.')[0:-1])
# separate task: find modeldir
# modeldir is either the given option or the path of the .ini file
# rundir is modeldir/runname
# But:
# If the .ini-filename ends with "autogenerated_backup.ini", the directory of the
# ini-file is the rundir and modeldir is the directory above.
# This should overwrite all other settings (except overwrite_old_output)
if ini_name.endswith("_autogenerated_backup.ini"):
run['modeldir'] = ini_path.parent.resolve()
run['rundir'] = ini_path
else:
# either inipath or the chosen modeldir
if run['modeldir'] is None:
run['modeldir'] = ini_path
if run['modeldir'] == 'select':
directory = input(
'Please input the model directory save path, or press <enter> to'
'save in default directory:\n')
if len(run['modeldir']) == 0:
run['modeldir'] = inipath
else:
run['modeldir'] = pathlib.Path(directory).resolve()
run['rundir'] = run['modeldir'] / run['runname']
return run, model, strata, hydraulics, flowtrans, elements
def parse_inifile(p):
"""
This function does the main work of parsing the input file.
Parameters
----------
p : ConfigParser
A config parser that already read in the inifile.
Returns
-------
run : dict
HyVR run parameters (number of simulations, where to store results)
model_dict : dict
model setup parameters
strata_dict : dict
strata setup parameters
hydraulics : dict
Parsed hydraulics section of the ini-file
flowtrans : dict
Flow and transport settings for output
elements : list of dicts
List of dictionaries of settings for :class:`hyvr.model.ae_types.AEType`.
"""
# TODO: these names were at some time keywords for hyvr, but I don't know
# what they describe and they are not implemented anymore
str_values = 'k_trend', 'linear_acceleration'
sections = p.sections()
section_parser = {}
must_haves = ['run', 'model', 'strata', 'hydraulics']
for section in must_haves:
if section not in sections:
raise MissingSectionError(section)
# run section
# -----------
run = Section('run', options['run']).parse(dict(p['run']))
del sections[sections.index('run')]
# hydraulics section
# ------------------
hydraulics = Section('hydraulics', options['hydraulics']).parse(dict(p['hydraulics']))
del sections[sections.index('hydraulics')]
# model section
# -------------
model = Section('model', options['model']).parse(dict(p['model']))
del sections[sections.index('model')]
if model['dy'] == None:
model['dy'] = model['dx']
if model['dz'] == None:
model['dz'] = model['dx']
# strata section
# --------------
strata = Section('strata', options['strata']).parse(dict(p['strata']))
del sections[sections.index('strata')]
if len(strata['strata_contact_models']) != len(strata['strata']) - 1:
raise ShapeError('strata_contact_models', 'strata')
strata['contact_models'] = [
parse_contact_model(model, depth=True) for model in strata['strata_contact_models']
]
try:
strata['bg_facies'] = parse_facies(strata['bg_facies'], hydraulics['hydrofacies'])
except:
raise ValueError('Invalid facies string in strata section in option bg_facies: ' + strata['bg_facies'])
# flowtrans section
# ------------------
# this section is only necessary for model output
for output in ['mf', 'mf6', 'hgs']:
if output in run['outputs']:
if 'flowtrans' not in sections:
raise MissingSectionError('flowtrans')
break
if 'flowtrans' in sections:
flowtrans = Section('flowtrans', options['flowtrans']).parse(dict(p['flowtrans']))
del sections[sections.index('flowtrans')]
else:
flowtrans = {}
# remaining sections are architectural elements
elements = {}
for section in sections:
dictionary = dict(p[section])
assert_exists('geometry', dictionary, section)
geometry = dictionary['geometry']
if geometry not in ['trough', 'channel', 'sheet']:
raise ValueError('Invalid geometry: ' + geometry + ' in section ' + section)
elements[section] = Section(section, options[geometry]).parse(dict(p[section]))
elements[section]['contact_model'] = parse_contact_model(elements[section]['contact_model'], depth=False)
# get facies number
try:
elements[section]['facies'] = parse_facies(elements[section]['facies'], hydraulics['hydrofacies'])
except:
raise ValueError('Invalid facies string in section ' + section + ' in option facies!')
# get altfacies
if elements[section]['altfacies'] is not None:
try:
elements[section]['altfacies'] = [
parse_facies(facies_list, hydraulics['hydrofacies']) for facies_list in elements[section]['altfacies']
]
except:
raise ValueError('Invalid facies string in section ' + section + ' in option altfacies!')
# get bg_facies number
if elements[section]['bg_facies'] == None:
elements[section]['bg_facies'] = -1
else:
try:
elements[section]['bg_facies'] = hydraulics['hydrofacies'].index(elements[section]['bg_facies'])
except:
raise ValueError('Invalid facies in section ' + section + ' in option bg_facies: ', elements[section]['bg_facies'])
# if structure is 'dip', require 'dipset_dist'
if elements[section]['structure'] == 'dip' or elements[section]['structure'] == 'random':
assert_exists('dipset_dist', dictionary, section)
# if structure is 'bulb_sets', require 'bulbset_dist'
if elements[section]['structure'] == 'bulb' or elements[section]['structure'] == 'random':
assert_exists('bulbset_dist', dictionary, section)
# lag surface facies
if geometry in ['trough', 'channel']:
if elements[section]['lag_height'] != 0.0:
try:
elements[section]['lag_facies'] = hydraulics['hydrofacies'].index(elements[section]['lag_facies'])
except:
raise ValueError('lag_facies in section ' + section + ' is invalid!')
else:
elements[section]['lag_facies'] = -1
return run, model, strata, hydraulics, flowtrans, elements
def get_new_parameters_from_deprecated(run, model, strata, hydraulics, flowtrans, elements):
"""
This applies the necessary changes to parsed sections of an old-format inifile.
"""
# run section
run['outputs'] = run['modeloutputs'] + run['dataoutputs']
run['overwrite_old_output'] = run['flag_ow']
# hydraulics section
hydraulics['hydrofacies'] = hydraulics['hydro']
# model section
model['anisotropy'] = run['anisotropy']
model['hydraulics'] = hydraulics['gen']
model['heterogeneity'] = run['het']
model['heterogeneity_level'] = model['hetlev']
if model['display']:
raise ValueError('display option is not supported anymore')
# strata section
n_strata = len(strata['ssm'])
strata['bg_facies'] = [int(strata['bg'][0])] * n_strata
strata['bg_azim'] = [strata['bg'][1]] * n_strata
strata['bg_dip'] = [strata['bg'][2]] * n_strata
strata['strata'] = strata['ssm']
if strata['ae_table'] is not None:
raise ValueError('AE tables are not supported anymore!')
if strata['save_aelu']:
raise ValueError('save_aelu is not supported anymore!')
contact_models = []
for i in range(n_strata-1):
cm = parse_old_strata_contact_model(strata, i)
contact_models.append(cm)
strata['contact_models'] = contact_models
strata['ae_in_strata'] = strata['ssm_ae']
# element sections
for elem_name in elements:
elem = elements[elem_name]
elem['size_ztrend'] = elem['geo_ztrend']
if elem['bg'] is not None:
elem['bg_facies'] = elem['bg'][0]
elem['bg_azim'] = elem['bg'][1]
elem['bg_dip'] = elem['bg'][1]
else:
elem['bg_facies'] = -1
elem['bg_azim'] = float('NaN')
elem['bg_dip'] = float('NaN')
elem['dipset_dist'] = elem['dipset_d']
if 'bulbset_d' in elem:
elem['bulbset_dist'] = elem['bulbset_d']
if elem['geometry'] == 'trunc_ellip':
elem['geometry'] = 'trough'
elem['trough_density'] = elem['el_z']
elem['contact_model'] = parse_old_elem_contact_model(elem)
if elem['geometry'] in ['trough', 'channel']:
if elem['lag'] is not None:
elem['lag_height'] = elem['lag'][0]
elem['lag_facies'] = elem['lag'][1]
else:
elem['lag_height'] = 0.0
elem['lag_facies'] = -1
return run, model, strata, hydraulics, flowtrans, elements
def parse_facies(facies_list, hydrofacies):
"""
Reads a list of facies strings (facies_list) and returns the corresponding
indices in the hydrofacies list.
"""
return [
hydrofacies.index(facies) for facies in facies_list
]
def get_new_facies_list(facies_list, hydrofacies):
return [hydrofacies[i] for i in facies_list]
def parse_old_strata_contact_model(strata, i):
mode = strata['ssm_contact']
cm = {'mode':mode, 'z':strata['ssm_top'][i]}
if mode.lower() == 'flat':
return cm
elif mode.lower() == 'random':
cm['var'] = strata['ssm_contact_model'][i][0]
cm['corlx'] = strata['ssm_contact_model'][i][1]
cm['corly'] = strata['ssm_contact_model'][i][2]
return cm
else:
raise ValueError('Unknown contact type in strata section: ' + mode)
def parse_old_elem_contact_model(elem):
mode = elem['contact']
cm = {'mode':mode}
if mode.lower() == 'flat':
return cm
elif mode.lower() == 'random':
cm['var'] = elem['contact_model'][0]
cm['corlx'] = elem['contact_model'][1]
cm['corly'] = elem['contact_model'][2]
return cm
else:
raise ValueError('Unknown contact type in strata section: ' + mode)
def parse_deprecated_inifile(p):
import warnings
print()
warnings.warn("You seem to be using the old ini-file format. We strongly recommend to use the new format as described in our documentation.", DeprecationWarning)
print("------- Warning: You are using the old ini-file format ----------------------")
print("We strongly recommend to use the new format as described in the documentation")
print("-----------------------------------------------------------------------------")
print()
sections = p.sections()
section_parser = {}
# The following code is not very nice, with lots of repetitions. This could
# be much nicer if the strata section had only one possible name.
must_haves = ['run', 'model', 'strata', 'hydraulics', 'flowtrans']
for section in must_haves:
if section not in sections:
raise MissingSectionError(section)
run = Section('run', old_options['run']).parse(dict(p['run']))
del sections[sections.index('run')]
model = Section('model', old_options['model']).parse(dict(p['model']))
del sections[sections.index('model')]
if model['dy'] == None:
model['dy'] = model['dx']
if model['dz'] == None:
model['dz'] = model['dx']
strata = Section('strata', old_options['strata']).parse(dict(p['strata']))
del sections[sections.index('strata')]
if strata['ae_table'] is not None:
ae_table = pathlib.Path(inifile).parent / strata['ae_table']
if not ae_table.exists():
raise FileNotFoundError('ae_table-file {:s} not found!'.format(ae_table))
hydraulics = Section('hydraulics', old_options['hydraulics']).parse(dict(p['hydraulics']))
del sections[sections.index('hydraulics')]
flowtrans = Section('flowtrans', old_options['flowtrans']).parse(dict(p['flowtrans']))
del sections[sections.index('flowtrans')]
# remaining sections are architectural elements
elements = {}
for section in sections:
dictionary = dict(p[section])
assert_exists('geometry', dictionary, section)
geometry = dictionary['geometry']
elements[section] = Section(section, old_options[geometry]).parse(dict(p[section]))
return run, model, strata, hydraulics, flowtrans, elements
def set_up_directories(run, inifile, overwrite_old_output=None):
"""
This functions creates the necessary directories (modeldir, rundir). It also stores the used
ini-file in the rundir.
Parameters:
run (dict): parsed run-section of the config file
inifile (str): path to config file
overwrite_old_output (bool): Whether to overwrite the old run directory. If it is None, the option
from the config file will be chosen instead (default in inifile: False)
"""
# for the test case we just create a temporary run directory/output
# directory
if inifile == 0:
import tempfile
run['rundir'] = pathlib.Path(tempfile.mkdtemp())
return
p = cp.ConfigParser()
try:
p.read(inifile, encoding='utf-8')
except cp.MissingSectionHeaderError:
# this is probably caused by a wrong encoding
p.read(inifile, encoding='utf-8-sig')
# we're now done with parsing and can create the model directory.
run['modeldir'].mkdir(parents=True, exist_ok=True)
# If the run directory already exists, it is overwritten if
# overwrite_old_output is set to True. If it doesn't exist, it will be
# created.
if overwrite_old_output is None:
overwrite_old_output = run['overwrite_old_output']
if run['rundir'].exists():
if run['overwrite_old_output'] is True:
# If it exists, we just delete everything in it
for f in os.listdir(run['rundir']):
path = os.path.join(run['rundir'], f)
path = run['rundir'] / f
if path.is_file():
path.unlink()
elif path.is_dir():
shutil.rmtree(path)
else:
raise FileExistsError(
"Run directory already exists and overwrite flag is "
"set to 'false'" ". Either change the runname, or "
"change the overwrite flag ('overwrite_old_output') "
"in the config file or run hyvr with --overwrite.")
else:
run['rundir'].mkdir(parents=True, exist_ok=True)
backup_file = run['rundir'] / (run['runname'] + '_autogenerated_backup.ini')
with open(backup_file, 'w') as f:
p.write(f)
|
[
"os.listdir",
"hyvr.geo.model.Model",
"shutil.rmtree",
"pathlib.Path",
"hyvr.geo.contact_surface_utils.parse_contact_model",
"tempfile.mkdtemp",
"hyvr.utils.print_to_stdout",
"configparser.ConfigParser",
"os.path.join",
"warnings.warn"
] |
[((1598, 1649), 'hyvr.geo.model.Model', 'Model', (['model_dict', 'strata_dict', 'elements', 'flowtrans'], {}), '(model_dict, strata_dict, elements, flowtrans)\n', (1603, 1649), False, 'from hyvr.geo.model import Model\n'), ((2576, 2617), 'hyvr.utils.print_to_stdout', 'print_to_stdout', (['"""Reading parameter file"""'], {}), "('Reading parameter file')\n", (2591, 2617), False, 'from hyvr.utils import print_to_stdout\n'), ((2813, 2830), 'configparser.ConfigParser', 'cp.ConfigParser', ([], {}), '()\n', (2828, 2830), True, 'import configparser as cp\n'), ((14666, 14837), 'warnings.warn', 'warnings.warn', (['"""You seem to be using the old ini-file format. We strongly recommend to use the new format as described in our documentation."""', 'DeprecationWarning'], {}), "(\n 'You seem to be using the old ini-file format. We strongly recommend to use the new format as described in our documentation.'\n , DeprecationWarning)\n", (14679, 14837), False, 'import warnings\n'), ((17633, 17650), 'configparser.ConfigParser', 'cp.ConfigParser', ([], {}), '()\n', (17648, 17650), True, 'import configparser as cp\n'), ((3888, 3909), 'pathlib.Path', 'pathlib.Path', (['inifile'], {}), '(inifile)\n', (3900, 3909), False, 'import pathlib\n'), ((7331, 7369), 'hyvr.geo.contact_surface_utils.parse_contact_model', 'parse_contact_model', (['model'], {'depth': '(True)'}), '(model, depth=True)\n', (7350, 7369), False, 'from hyvr.geo.contact_surface_utils import parse_contact_model\n'), ((8666, 8734), 'hyvr.geo.contact_surface_utils.parse_contact_model', 'parse_contact_model', (["elements[section]['contact_model']"], {'depth': '(False)'}), "(elements[section]['contact_model'], depth=False)\n", (8685, 8734), False, 'from hyvr.geo.contact_surface_utils import parse_contact_model\n'), ((17589, 17607), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (17605, 17607), False, 'import tempfile\n'), ((18381, 18406), 'os.listdir', 'os.listdir', (["run['rundir']"], {}), "(run['rundir'])\n", (18391, 18406), False, 'import os\n'), ((2758, 2785), 'pathlib.Path', 'pathlib.Path', (['"""../made.ini"""'], {}), "('../made.ini')\n", (2770, 2785), False, 'import pathlib\n'), ((3834, 3855), 'pathlib.Path', 'pathlib.Path', (['inifile'], {}), '(inifile)\n', (3846, 3855), False, 'import pathlib\n'), ((16033, 16054), 'pathlib.Path', 'pathlib.Path', (['inifile'], {}), '(inifile)\n', (16045, 16054), False, 'import pathlib\n'), ((18431, 18461), 'os.path.join', 'os.path.join', (["run['rundir']", 'f'], {}), "(run['rundir'], f)\n", (18443, 18461), False, 'import os\n'), ((5120, 5143), 'pathlib.Path', 'pathlib.Path', (['directory'], {}), '(directory)\n', (5132, 5143), False, 'import pathlib\n'), ((18628, 18647), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (18641, 18647), False, 'import shutil\n')]
|
# This function is copied from https://github.com/Rubikplayer/flame-fitting
'''
Copyright 2015 <NAME>, <NAME> and the Max Planck Gesellschaft. All rights reserved.
This software is provided for research purposes only.
By using this software you agree to the terms of the SMPL Model license here http://smpl.is.tue.mpg.de/license
More information about SMPL is available here http://smpl.is.tue.mpg.
For comments or questions, please email us at: <EMAIL>
About this file:
================
This module defines the mapping of joint-angles to pose-blendshapes.
Modules included:
- posemap:
computes the joint-to-pose blend shape mapping given a mapping type as input
'''
import chumpy as ch
import numpy as np
import cv2
class Rodrigues(ch.Ch):
dterms = 'rt'
def compute_r(self):
return cv2.Rodrigues(self.rt.r)[0]
def compute_dr_wrt(self, wrt):
if wrt is self.rt:
return cv2.Rodrigues(self.rt.r)[1].T
def lrotmin(p):
if isinstance(p, np.ndarray):
p = p.ravel()[3:]
return np.concatenate(
[(cv2.Rodrigues(np.array(pp))[0] - np.eye(3)).ravel() for pp in p.reshape((-1, 3))]).ravel()
if p.ndim != 2 or p.shape[1] != 3:
p = p.reshape((-1, 3))
p = p[1:]
return ch.concatenate([(Rodrigues(pp) - ch.eye(3)).ravel() for pp in p]).ravel()
def posemap(s):
if s == 'lrotmin':
return lrotmin
else:
raise Exception('Unknown posemapping: %s' % (str(s),))
|
[
"chumpy.eye",
"cv2.Rodrigues",
"numpy.array",
"numpy.eye"
] |
[((832, 856), 'cv2.Rodrigues', 'cv2.Rodrigues', (['self.rt.r'], {}), '(self.rt.r)\n', (845, 856), False, 'import cv2\n'), ((946, 970), 'cv2.Rodrigues', 'cv2.Rodrigues', (['self.rt.r'], {}), '(self.rt.r)\n', (959, 970), False, 'import cv2\n'), ((1329, 1338), 'chumpy.eye', 'ch.eye', (['(3)'], {}), '(3)\n', (1335, 1338), True, 'import chumpy as ch\n'), ((1139, 1148), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (1145, 1148), True, 'import numpy as np\n'), ((1120, 1132), 'numpy.array', 'np.array', (['pp'], {}), '(pp)\n', (1128, 1132), True, 'import numpy as np\n')]
|
import logging
import time
import can
import j1939
logging.getLogger('j1939').setLevel(logging.DEBUG)
logging.getLogger('can').setLevel(logging.DEBUG)
def on_message(priority, pgn, sa, timestamp, data):
"""Receive incoming messages from the bus
:param int priority:
Priority of the message
:param int pgn:
Parameter Group Number of the message
:param int sa:
Source Address of the message
:param int timestamp:
Timestamp of the message
:param bytearray data:
Data of the PDU
"""
print("PGN {} length {}".format(hex(pgn), len(data)))
def main():
print("Initializing")
# create the ElectronicControlUnit (one ECU can hold multiple ControllerApplications)
ecu = j1939.ElectronicControlUnit()
# Connect to the CAN bus
# Arguments are passed to python-can's can.interface.Bus() constructor
# (see https://python-can.readthedocs.io/en/stable/bus.html).
# ecu.connect(bustype='socketcan', channel='can0')
# ecu.connect(bustype='kvaser', channel=0, bitrate=250000)
ecu.connect(bustype='pcan', channel='PCAN_USBBUS1', bitrate=500000)
# ecu.connect(bustype='ixxat', channel=0, bitrate=250000)
# ecu.connect(bustype='vector', app_name='CANalyzer', channel=0, bitrate=250000)
# ecu.connect(bustype='nican', channel='CAN0', bitrate=250000)
# subscribe to all global and peer-to-peer messages with destination 0xFA
ecu.subscribe(on_message, 0xFA)
time.sleep(120)
print("Deinitializing")
ecu.disconnect()
if __name__ == '__main__':
main()
|
[
"j1939.ElectronicControlUnit",
"logging.getLogger",
"time.sleep"
] |
[((776, 805), 'j1939.ElectronicControlUnit', 'j1939.ElectronicControlUnit', ([], {}), '()\n', (803, 805), False, 'import j1939\n'), ((1516, 1531), 'time.sleep', 'time.sleep', (['(120)'], {}), '(120)\n', (1526, 1531), False, 'import time\n'), ((57, 83), 'logging.getLogger', 'logging.getLogger', (['"""j1939"""'], {}), "('j1939')\n", (74, 83), False, 'import logging\n'), ((109, 133), 'logging.getLogger', 'logging.getLogger', (['"""can"""'], {}), "('can')\n", (126, 133), False, 'import logging\n')]
|
import json
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.views.generic import DetailView, ListView, TemplateView
from django.views.decorators.cache import (
cache_page,
patch_cache_control,
)
from djangae.utils import on_production
from blog.models import Post, Photo
MAX_AGE = 1 if not on_production() else 90
class CacheMixin(object):
@method_decorator(cache_page(MAX_AGE))
def get(self, request, *args, **kwargs):
response = super(CacheMixin, self).get(request, *args, **kwargs)
patch_cache_control(response, public=True, max_age=MAX_AGE)
response['Pragma'] = 'Public'
return response
class HomeView(CacheMixin, ListView):
model = Post
paginate_by = 3
template_name = 'blog/home.html'
def get_queryset(self):
return self.model.objects.published()
home = HomeView.as_view()
class PostView(CacheMixin, DetailView):
model = Post
template_name = 'blog/post.html'
slug_field = 'slug'
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if not self.object.is_published:
return redirect(reverse('blog:home'))
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
post = PostView.as_view()
class PhotoListView(CacheMixin, ListView):
model = Photo
paginate_by = 12
template_name = 'blog/photos.html'
def get_queryset(self):
return self.model.objects.published()
photos = PhotoListView.as_view()
class AboutView(CacheMixin, TemplateView):
template_name = 'blog/about.html'
about = AboutView.as_view()
|
[
"django.views.decorators.cache.patch_cache_control",
"djangae.utils.on_production",
"django.views.decorators.cache.cache_page",
"django.core.urlresolvers.reverse"
] |
[((437, 452), 'djangae.utils.on_production', 'on_production', ([], {}), '()\n', (450, 452), False, 'from djangae.utils import on_production\n'), ((658, 717), 'django.views.decorators.cache.patch_cache_control', 'patch_cache_control', (['response'], {'public': '(True)', 'max_age': 'MAX_AGE'}), '(response, public=True, max_age=MAX_AGE)\n', (677, 717), False, 'from django.views.decorators.cache import cache_page, patch_cache_control\n'), ((511, 530), 'django.views.decorators.cache.cache_page', 'cache_page', (['MAX_AGE'], {}), '(MAX_AGE)\n', (521, 530), False, 'from django.views.decorators.cache import cache_page, patch_cache_control\n'), ((1272, 1292), 'django.core.urlresolvers.reverse', 'reverse', (['"""blog:home"""'], {}), "('blog:home')\n", (1279, 1292), False, 'from django.core.urlresolvers import reverse\n')]
|
import matplotlib.pyplot as plt
import cv2
import sys
if len(sys.argv) < 2:
print('Please enter filename')
sys.exit()
file_path = ''.join([x + ' ' for x in sys.argv[1:]])[:-1]
extension = file_path.split('.')[-1]
file_name = ''.join([x + '.' for x in file_path.split('.')[:-1]])[:-1]
# print(file_name, file_path, extension)
image = cv2.imread(file_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
name_res = {
'_placehold.jpg': (129, 230),
'_lg.jpg': (1125, 1999),
file_name + '.jpg': (1080, 1920),
'_thumb@2x.jpg': (602, 1070),
'_md.jpg': (557, 991),
'_xs.jpg': (323, 575),
'_thumb.jpg': (301, 535),
'_sm.jpg': (431, 767)
}
for key, value in name_res.items():
img = cv2.resize(image, value[::-1])
plt.imsave(file_name + key, img)
|
[
"cv2.resize",
"cv2.cvtColor",
"cv2.imread",
"matplotlib.pyplot.imsave",
"sys.exit"
] |
[((357, 378), 'cv2.imread', 'cv2.imread', (['file_path'], {}), '(file_path)\n', (367, 378), False, 'import cv2\n'), ((388, 426), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (400, 426), False, 'import cv2\n'), ((124, 134), 'sys.exit', 'sys.exit', ([], {}), '()\n', (132, 134), False, 'import sys\n'), ((749, 779), 'cv2.resize', 'cv2.resize', (['image', 'value[::-1]'], {}), '(image, value[::-1])\n', (759, 779), False, 'import cv2\n'), ((785, 817), 'matplotlib.pyplot.imsave', 'plt.imsave', (['(file_name + key)', 'img'], {}), '(file_name + key, img)\n', (795, 817), True, 'import matplotlib.pyplot as plt\n')]
|
# Generated by Django 2.1.1 on 2018-10-22 14:15
from django.db import migrations
from openwisp_users.migrations import create_default_groups
class Migration(migrations.Migration):
dependencies = [
('openwisp_users', '0003_default_organization'),
]
operations = [
migrations.RunPython(
create_default_groups, reverse_code=migrations.RunPython.noop
),
]
|
[
"django.db.migrations.RunPython"
] |
[((295, 383), 'django.db.migrations.RunPython', 'migrations.RunPython', (['create_default_groups'], {'reverse_code': 'migrations.RunPython.noop'}), '(create_default_groups, reverse_code=migrations.\n RunPython.noop)\n', (315, 383), False, 'from django.db import migrations\n')]
|
from datetime import datetime
from freezegun import freeze_time
from django.urls import reverse
from api.cases.enums import CaseTypeEnum
from api.compliance.tests.factories import ComplianceSiteCaseFactory, OpenLicenceReturnsFactory
from api.licences.enums import LicenceStatus
from api.staticdata.statuses.enums import CaseStatusEnum
from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status
from test_helpers.clients import DataTestClient
def _assert_response_data(self, response_data, licence, completed_olr=False):
self.assertEqual(len(response_data), 1)
self.assertEqual(response_data[0]["id"], str(licence.case_id))
self.assertEqual(response_data[0]["reference_code"], licence.case.reference_code)
self.assertEqual(response_data[0]["status"]["key"], licence.status)
self.assertEqual(response_data[0]["status"]["value"], LicenceStatus.to_str(licence.status))
goods = licence.case.goods_type.all()
self.assertEqual(len(response_data[0]["flags"]), 2)
self.assertEqual(response_data[0]["flags"][0]["name"], goods[0].flags.all()[0].name)
self.assertEqual(response_data[0]["flags"][1]["name"], goods[1].flags.all()[0].name)
self.assertEqual(response_data[0]["flags"][0]["level"], goods[0].flags.all()[0].level)
self.assertEqual(response_data[0]["flags"][1]["level"], goods[1].flags.all()[0].level)
self.assertEqual(response_data[0]["flags"][0]["priority"], goods[0].flags.all()[0].priority)
self.assertEqual(response_data[0]["flags"][1]["priority"], goods[1].flags.all()[0].priority)
self.assertEqual(response_data[0]["case_type"]["id"], str(licence.case.case_type.id))
self.assertEqual(response_data[0]["case_type"]["reference"]["key"], licence.case.case_type.reference)
self.assertEqual(response_data[0]["case_type"]["type"]["key"], licence.case.case_type.type)
self.assertEqual(response_data[0]["case_type"]["sub_type"]["key"], licence.case.case_type.sub_type)
self.assertEqual(response_data[0]["has_open_licence_returns"], completed_olr)
class GetComplianceLicencesTests(DataTestClient):
def test_get_compliance_OIEL_licences_with_outstanding_olr(self):
compliance_case = ComplianceSiteCaseFactory(
organisation=self.organisation,
site=self.organisation.primary_site,
status=get_case_status_by_status(CaseStatusEnum.OPEN),
)
application = self.create_open_application_case(self.organisation)
licence = self.create_licence(application, status=LicenceStatus.ISSUED)
url = reverse("compliance:licences", kwargs={"pk": compliance_case.id})
response = self.client.get(url, **self.gov_headers)
response_data = response.json()["results"]
_assert_response_data(self, response_data, licence)
def test_get_compliance_OICL_licences(self):
compliance_case = ComplianceSiteCaseFactory(
organisation=self.organisation,
site=self.organisation.primary_site,
status=get_case_status_by_status(CaseStatusEnum.OPEN),
)
application = self.create_open_application_case(self.organisation)
application.case_type_id = CaseTypeEnum.OICL.id
application.save()
licence = self.create_licence(application, status=LicenceStatus.ISSUED)
url = reverse("compliance:licences", kwargs={"pk": compliance_case.id})
response = self.client.get(url, **self.gov_headers)
response_data = response.json()["results"]
_assert_response_data(self, response_data, licence)
@freeze_time("2020-06-14")
def test_get_compliance_OIEL_licences_with_completed_olr(self):
compliance_case = ComplianceSiteCaseFactory(
organisation=self.organisation,
site=self.organisation.primary_site,
status=get_case_status_by_status(CaseStatusEnum.OPEN),
)
application = self.create_open_application_case(self.organisation)
application.case_type_id = CaseTypeEnum.OIEL.id
application.save()
licence = self.create_licence(application, status=LicenceStatus.ISSUED)
olr = OpenLicenceReturnsFactory(organisation=self.organisation, year=datetime.now().year - 1)
olr.licences.set([licence])
url = reverse("compliance:licences", kwargs={"pk": compliance_case.id})
response = self.client.get(url, **self.gov_headers)
response_data = response.json()["results"]
_assert_response_data(self, response_data, licence, completed_olr=True)
@freeze_time("2020-06-14")
def test_get_compliance_OIEL_licences_with_2_year_previous_olr(self):
compliance_case = ComplianceSiteCaseFactory(
organisation=self.organisation,
site=self.organisation.primary_site,
status=get_case_status_by_status(CaseStatusEnum.OPEN),
)
application = self.create_open_application_case(self.organisation)
application.case_type_id = CaseTypeEnum.OIEL.id
application.save()
licence = self.create_licence(application, status=LicenceStatus.ISSUED)
olr = OpenLicenceReturnsFactory(organisation=self.organisation, year=datetime.now().year - 2)
olr.licences.set([licence])
url = reverse("compliance:licences", kwargs={"pk": compliance_case.id})
response = self.client.get(url, **self.gov_headers)
response_data = response.json()["results"]
_assert_response_data(self, response_data, licence, completed_olr=False)
|
[
"django.urls.reverse",
"api.licences.enums.LicenceStatus.to_str",
"freezegun.freeze_time",
"datetime.datetime.now",
"api.staticdata.statuses.libraries.get_case_status.get_case_status_by_status"
] |
[((3572, 3597), 'freezegun.freeze_time', 'freeze_time', (['"""2020-06-14"""'], {}), "('2020-06-14')\n", (3583, 3597), False, 'from freezegun import freeze_time\n'), ((4544, 4569), 'freezegun.freeze_time', 'freeze_time', (['"""2020-06-14"""'], {}), "('2020-06-14')\n", (4555, 4569), False, 'from freezegun import freeze_time\n'), ((881, 917), 'api.licences.enums.LicenceStatus.to_str', 'LicenceStatus.to_str', (['licence.status'], {}), '(licence.status)\n', (901, 917), False, 'from api.licences.enums import LicenceStatus\n'), ((2564, 2629), 'django.urls.reverse', 'reverse', (['"""compliance:licences"""'], {'kwargs': "{'pk': compliance_case.id}"}), "('compliance:licences', kwargs={'pk': compliance_case.id})\n", (2571, 2629), False, 'from django.urls import reverse\n'), ((3328, 3393), 'django.urls.reverse', 'reverse', (['"""compliance:licences"""'], {'kwargs': "{'pk': compliance_case.id}"}), "('compliance:licences', kwargs={'pk': compliance_case.id})\n", (3335, 3393), False, 'from django.urls import reverse\n'), ((4280, 4345), 'django.urls.reverse', 'reverse', (['"""compliance:licences"""'], {'kwargs': "{'pk': compliance_case.id}"}), "('compliance:licences', kwargs={'pk': compliance_case.id})\n", (4287, 4345), False, 'from django.urls import reverse\n'), ((5258, 5323), 'django.urls.reverse', 'reverse', (['"""compliance:licences"""'], {'kwargs': "{'pk': compliance_case.id}"}), "('compliance:licences', kwargs={'pk': compliance_case.id})\n", (5265, 5323), False, 'from django.urls import reverse\n'), ((2336, 2382), 'api.staticdata.statuses.libraries.get_case_status.get_case_status_by_status', 'get_case_status_by_status', (['CaseStatusEnum.OPEN'], {}), '(CaseStatusEnum.OPEN)\n', (2361, 2382), False, 'from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status\n'), ((3017, 3063), 'api.staticdata.statuses.libraries.get_case_status.get_case_status_by_status', 'get_case_status_by_status', (['CaseStatusEnum.OPEN'], {}), '(CaseStatusEnum.OPEN)\n', (3042, 3063), False, 'from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status\n'), ((3831, 3877), 'api.staticdata.statuses.libraries.get_case_status.get_case_status_by_status', 'get_case_status_by_status', (['CaseStatusEnum.OPEN'], {}), '(CaseStatusEnum.OPEN)\n', (3856, 3877), False, 'from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status\n'), ((4809, 4855), 'api.staticdata.statuses.libraries.get_case_status.get_case_status_by_status', 'get_case_status_by_status', (['CaseStatusEnum.OPEN'], {}), '(CaseStatusEnum.OPEN)\n', (4834, 4855), False, 'from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status\n'), ((4204, 4218), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4216, 4218), False, 'from datetime import datetime\n'), ((5182, 5196), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5194, 5196), False, 'from datetime import datetime\n')]
|
from django.urls import path
from . import views
urlpatterns = [
path('plagiarism/check/<int:id>', views.check),
path('forum/search', views.forumSearch),
path('products/search', views.productSearch),
path('vectorize', views.vectorize),
]
|
[
"django.urls.path"
] |
[((70, 116), 'django.urls.path', 'path', (['"""plagiarism/check/<int:id>"""', 'views.check'], {}), "('plagiarism/check/<int:id>', views.check)\n", (74, 116), False, 'from django.urls import path\n'), ((122, 161), 'django.urls.path', 'path', (['"""forum/search"""', 'views.forumSearch'], {}), "('forum/search', views.forumSearch)\n", (126, 161), False, 'from django.urls import path\n'), ((167, 211), 'django.urls.path', 'path', (['"""products/search"""', 'views.productSearch'], {}), "('products/search', views.productSearch)\n", (171, 211), False, 'from django.urls import path\n'), ((217, 251), 'django.urls.path', 'path', (['"""vectorize"""', 'views.vectorize'], {}), "('vectorize', views.vectorize)\n", (221, 251), False, 'from django.urls import path\n')]
|
""" Customization of pyface functionality.
"""
import logging
from pyface.api import DirectoryDialog, FileDialog, OK
from pyface.message_dialog import MessageDialog
local_logger = logging.getLogger(__name__)
def message_dialog(message, title='Error', severity='error'):
""" Convenience function to show an error message dialog.
Parameters
----------
message : str
The text of the message to display.
title : str
The text of the dialog title.
severity : str
Level of severity for the dialog. One of 'information', 'warning', and
'error'.
"""
dialog = MessageDialog(
parent=None, message=message, title=title, severity=severity
)
dialog.open()
return
class FileDialogWithMemory(FileDialog):
""" Customized FileDialog to remember where the last file was loaded from.
"""
#: Class attribute for all dialogs to share where last file was loaded from
last_directory = ""
def _default_directory_default(self):
if self.last_directory:
return self.last_directory
else:
from ...std_lib.filepath_utils import get_home_folder
return get_home_folder()
def close(self):
""" Remember the selected directory.
"""
super(FileDialogWithMemory, self).close()
if self.directory:
FileDialogWithMemory.last_directory = self.directory
DirectoryDialogWithMemory.last_directory = self.directory
class DirectoryDialogWithMemory(DirectoryDialog):
""" Customized FileDialog to remember where the last file was loaded from.
"""
#: Class attribute for all dialogs to share where last file was loaded from
last_directory = ""
def _default_directory_default(self):
if self.last_directory:
return self.last_directory
else:
from ...std_lib.filepath_utils import get_home_folder
return get_home_folder()
def close(self):
""" Remember the selected directory.
"""
super(DirectoryDialogWithMemory, self).close()
if self.path:
DirectoryDialogWithMemory.last_directory = self.path
FileDialogWithMemory.last_directory = self.path
request_file_docstring_template = \
""" Request file of type {0} using a pyface file dialog (with memory).
Parameters
----------
title : str
Title of the file dialog.
wildcard_text : str
Text describing {0} files.
action : str [OPTIONAL, default='open']
Type of dialog requested. By default, the action allows to select an
existing file. Set to 'save as' to create a new file.
Returns
-------
str or None
Path to the file selected or None if the dialog was cancelled.
"""
def generate_file_requester(file_desc, extension, action='open', title=""):
""" Convert a file extension into a function prompting user for file path.
Parameters
----------
file_desc : str
Description for the file requested.
extension : str
Extension of the file requested, for example ".csv" or ".png".
action : str
Type of file dialog: open an existing file (default), or 'save as' to
target a new file.
Examples
--------
>>> # Triggers a dialog asking the user for an existing python file.
>>> prompter = generate_file_requester("Python", ".py")
>>> prompter()
"""
if not title:
title = "Select {} file name".format(file_desc)
wildcard_text = "{} files".format(file_desc)
def request_file(title=title, wildcard_text=wildcard_text, action=action):
wildcard = FileDialogWithMemory.create_wildcard(wildcard_text,
"*" + extension)
file_dialog = FileDialogWithMemory(title=title, wildcard=wildcard,
action=action)
file_dialog.open()
if file_dialog.return_code == OK:
path = file_dialog.path
# On Windows the dialog doesn't automatically append the extension
# if missing
if not path.endswith(extension):
path += extension
return path
request_file.__doc__ = request_file_docstring_template.format(file_desc)
return request_file
def request_folder(title=""):
"""Request folder using a pyface directory dialog (with memory).
Parameters
----------
title : str
Title of the directory dialog.
Returns
-------
str or None
Returns the path selected for the folder, or None if the dialog was
aborted.
"""
if not title:
title = 'Select folder'
dialog = DirectoryDialogWithMemory(title=title)
dialog.open()
if dialog.return_code == OK:
return dialog.path
request_python_file = generate_file_requester("Python", ".py")
request_jpeg_file = generate_file_requester("JPEG", ".jpg")
request_png_file = generate_file_requester("PNG", ".png")
request_csv_file = generate_file_requester("CSV", ".csv")
request_excel_file = generate_file_requester("XLSX", ".xlsx")
to_csv_file_requester = generate_file_requester("CSV", ".csv",
action="save as")
to_excel_file_requester = generate_file_requester("XLSX", ".xlsx",
action="save as")
to_png_file_requester = generate_file_requester("PNG", ".png",
action="save as")
to_jpg_file_requester = generate_file_requester("JPEG", ".jpg",
action="save as")
|
[
"pyface.message_dialog.MessageDialog",
"logging.getLogger"
] |
[((182, 209), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (199, 209), False, 'import logging\n'), ((621, 696), 'pyface.message_dialog.MessageDialog', 'MessageDialog', ([], {'parent': 'None', 'message': 'message', 'title': 'title', 'severity': 'severity'}), '(parent=None, message=message, title=title, severity=severity)\n', (634, 696), False, 'from pyface.message_dialog import MessageDialog\n')]
|
"""
There are a total of numCourses courses you have to take,
labeled from 0 to numCourses - 1.
You are given an array prerequisites where prerequisites[i] = [ai, bi]
indicates that you must take course bi first if you want to take course ai.
For example, the pair [0, 1], indicates that to take course 0 you have to first take course 1.
Return true if you can finish all courses. Otherwise, return false.
Example 1:
Input: numCourses = 2, prerequisites = [[1,0]]
Output: true
Explanation: There are a total of 2 courses to take.
To take course 1 you should have finished course 0. So it is possible.
Example 2:
Input: numCourses = 2, prerequisites = [[1,0],[0,1]]
Output: false
Explanation: There are a total of 2 courses to take.
To take course 1 you should have finished course 0, and to take course 0 you should also have finished course 1. So it is impossible.
Constraints:
1 <= numCourses <= 105
0 <= prerequisites.length <= 5000
prerequisites[i].length == 2
0 <= ai, bi < numCourses
All the pairs prerequisites[i] are unique.
"""
# V0
import collections
class Solution:
def canFinish(self, numCourses: int, prerequisites: List[List[int]]):
_graph = collections.defaultdict(list)
for i in range(len(prerequisites)):
_graph[prerequisites[i][0]].append(prerequisites[i][1])
visited = [0] * numCourses
for i in range(numCourses):
if not self.dfs(_graph, visited, i):
return False
return True
# 0 : unknown, 1 :visiting, 2 : visited
def dfs(self, _graph, visited, i):
if visited[i] == 1:
return False
if visited[i] == 2:
return True
visited[i] = 1
for item in _graph[i]:
if not self.dfs(_graph, visited, item):
return False
visited[i] = 2
return True
# V0'
# IDEA : DFS + topological sort
import collections
class Solution(object):
def canFinish(self, N, prerequisites):
graph = collections.defaultdict(list)
for u, v in prerequisites:
graph[u].append(v)
# 0 = Unknown, 1 = visiting, 2 = visited
visited = [0] * N
for i in range(N):
if not self.dfs(graph, visited, i):
return False
return True
# Can we add node i to visited successfully?
def dfs(self, graph, visited, i):
# 0 = Unknown, 1 = visiting, 2 = visited
if visited[i] == 1: return False
if visited[i] == 2: return True
visited[i] = 1
for j in graph[i]:
if not self.dfs(graph, visited, j):
return False
visited[i] = 2
return True
# V0'' (AGAIN!)
# IDEA : BFS + topological sort
class Solution(object):
def canFinish(self, N, prerequisites):
"""
:type N,: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
graph = collections.defaultdict(list)
indegrees = collections.defaultdict(int)
for u, v in prerequisites:
graph[v].append(u)
indegrees[u] += 1
for i in range(N):
zeroDegree = False
for j in range(N):
if indegrees[j] == 0:
zeroDegree = True
break
if not zeroDegree: return False
indegrees[j] = -1
for node in graph[j]:
indegrees[node] -= 1
return True
# V1
# https://blog.csdn.net/fuxuemingzhu/article/details/82951771
# diagram explaination:
# https://leetcode.com/problems/course-schedule/discuss/658379/Python-by-DFS-and-cycle-detection-w-Graph
# IDEA : DFS + topological sort
class Solution(object):
def canFinish(self, N, prerequisites):
"""
:type N,: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
graph = collections.defaultdict(list)
for u, v in prerequisites:
graph[u].append(v)
# 0 = Unknown, 1 = visiting, 2 = visited
visited = [0] * N
for i in range(N):
if not self.dfs(graph, visited, i):
return False
return True
# Can we add node i to visited successfully?
def dfs(self, graph, visited, i):
# 0 = Unknown, 1 = visiting, 2 = visited
if visited[i] == 1: return False
if visited[i] == 2: return True
visited[i] = 1
for j in graph[i]:
if not self.dfs(graph, visited, j):
return False
visited[i] = 2
return True
# V1'
# https://www.jiuzhang.com/solution/course-schedule/#tag-highlight-lang-python
from collections import deque
class Solution:
# @param {int} numCourses a total of n courses
# @param {int[][]} prerequisites a list of prerequisite pairs
# @return {boolean} true if can finish all courses or false
def canFinish(self, numCourses, prerequisites):
# Write your code here
edges = {i: [] for i in range(numCourses)}
degrees = [0 for i in range(numCourses)]
for i, j in prerequisites:
edges[j].append(i)
degrees[i] += 1
queue, count = deque([]), 0
for i in range(numCourses):
if degrees[i] == 0:
queue.append(i)
while queue:
node = queue.popleft()
count += 1
for x in edges[node]:
degrees[x] -= 1
if degrees[x] == 0:
queue.append(x)
return count == numCourses
# V1''
# https://blog.csdn.net/fuxuemingzhu/article/details/82951771
# IDEA : BFS + topological sort
class Solution(object):
def canFinish(self, N, prerequisites):
"""
:type N,: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
graph = collections.defaultdict(list)
indegrees = collections.defaultdict(int)
for u, v in prerequisites:
graph[v].append(u)
indegrees[u] += 1
for i in range(N):
zeroDegree = False
for j in range(N):
if indegrees[j] == 0:
zeroDegree = True
break
if not zeroDegree: return False
indegrees[j] = -1
for node in graph[j]:
indegrees[node] -= 1
return True
# V2
# Time: O(|V| + |E|)
# Space: O(|E|)
from collections import defaultdict, deque
class Solution(object):
def canFinish(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: bool
"""
zero_in_degree_queue = deque()
in_degree, out_degree = defaultdict(set), defaultdict(set)
for i, j in prerequisites:
in_degree[i].add(j)
out_degree[j].add(i)
for i in xrange(numCourses):
if i not in in_degree:
zero_in_degree_queue.append(i)
while zero_in_degree_queue:
prerequisite = zero_in_degree_queue.popleft()
if prerequisite in out_degree:
for course in out_degree[prerequisite]:
in_degree[course].discard(prerequisite)
if not in_degree[course]:
zero_in_degree_queue.append(course)
del out_degree[prerequisite]
if out_degree:
return False
return True
|
[
"collections.defaultdict",
"collections.deque"
] |
[((1185, 1214), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1208, 1214), False, 'import collections\n'), ((2010, 2039), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (2033, 2039), False, 'import collections\n'), ((2945, 2974), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (2968, 2974), False, 'import collections\n'), ((2995, 3023), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (3018, 3023), False, 'import collections\n'), ((3907, 3936), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (3930, 3936), False, 'import collections\n'), ((5898, 5927), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (5921, 5927), False, 'import collections\n'), ((5948, 5976), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (5971, 5976), False, 'import collections\n'), ((6755, 6762), 'collections.deque', 'deque', ([], {}), '()\n', (6760, 6762), False, 'from collections import defaultdict, deque\n'), ((5225, 5234), 'collections.deque', 'deque', (['[]'], {}), '([])\n', (5230, 5234), False, 'from collections import defaultdict, deque\n'), ((6795, 6811), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (6806, 6811), False, 'from collections import defaultdict, deque\n'), ((6813, 6829), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (6824, 6829), False, 'from collections import defaultdict, deque\n')]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'hash_verify_gui.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_hashDialog(object):
def setupUi(self, hashDialog):
hashDialog.setObjectName("hashDialog")
hashDialog.setEnabled(True)
hashDialog.resize(548, 414)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(hashDialog.sizePolicy().hasHeightForWidth())
hashDialog.setSizePolicy(sizePolicy)
self.browseButton = QtWidgets.QPushButton(hashDialog)
self.browseButton.setGeometry(QtCore.QRect(450, 33, 80, 23))
self.browseButton.setObjectName("browseButton")
self.inputEdit = QtWidgets.QLineEdit(hashDialog)
self.inputEdit.setGeometry(QtCore.QRect(20, 33, 411, 23))
self.inputEdit.setObjectName("inputEdit")
self.inputlabel = QtWidgets.QLabel(hashDialog)
self.inputlabel.setGeometry(QtCore.QRect(20, 10, 71, 16))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(79, 78, 77))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
self.inputlabel.setPalette(palette)
self.inputlabel.setObjectName("inputlabel")
self.digestLabel = QtWidgets.QLabel(hashDialog)
self.digestLabel.setGeometry(QtCore.QRect(20, 76, 231, 16))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(111, 111, 111))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(204, 147, 147))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(111, 111, 111))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(204, 147, 147))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(79, 78, 77))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(79, 78, 77))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(111, 111, 111))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(204, 147, 147))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.digestLabel.setPalette(palette)
self.digestLabel.setObjectName("digestLabel")
self.digestEdit = QtWidgets.QLineEdit(hashDialog)
self.digestEdit.setGeometry(QtCore.QRect(20, 100, 511, 23))
self.digestEdit.setObjectName("digestEdit")
self.progressBar = QtWidgets.QProgressBar(hashDialog)
self.progressBar.setGeometry(QtCore.QRect(130, 327, 301, 23))
self.progressBar.setProperty("value", 0)
self.progressBar.setObjectName("progressBar")
self.resultsBox = QtWidgets.QTextBrowser(hashDialog)
self.resultsBox.setGeometry(QtCore.QRect(20, 230, 511, 81))
self.resultsBox.setObjectName("resultsBox")
self.resultsLabel = QtWidgets.QLabel(hashDialog)
self.resultsLabel.setGeometry(QtCore.QRect(20, 208, 59, 15))
self.resultsLabel.setObjectName("resultsLabel")
self.progressLabel = QtWidgets.QLabel(hashDialog)
self.progressLabel.setGeometry(QtCore.QRect(20, 330, 59, 15))
self.progressLabel.setObjectName("progressLabel")
self.startButton = QtWidgets.QPushButton(hashDialog)
self.startButton.setGeometry(QtCore.QRect(210, 150, 131, 41))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(239, 235, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.startButton.setPalette(palette)
self.startButton.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.startButton.setObjectName("startButton")
self.closeButton = QtWidgets.QPushButton(hashDialog)
self.closeButton.setGeometry(QtCore.QRect(450, 370, 82, 25))
self.closeButton.setObjectName("closeButton")
self.resetButton = QtWidgets.QPushButton(hashDialog)
self.resetButton.setGeometry(QtCore.QRect(360, 370, 82, 25))
self.resetButton.setObjectName("resetButton")
self.retranslateUi(hashDialog)
QtCore.QMetaObject.connectSlotsByName(hashDialog)
def retranslateUi(self, hashDialog):
_translate = QtCore.QCoreApplication.translate
hashDialog.setWindowTitle(_translate("hashDialog", "Hash Sum Verification"))
self.browseButton.setToolTip(_translate("hashDialog", "Browse for file"))
self.browseButton.setText(_translate("hashDialog", "Browse"))
self.inputlabel.setText(_translate("hashDialog", "Input File"))
self.digestLabel.setText(_translate("hashDialog", "Original Hash Sum (paste or copy)"))
self.resultsBox.setPlaceholderText(_translate("hashDialog", "Not started"))
self.resultsLabel.setText(_translate("hashDialog", "Results"))
self.progressLabel.setText(_translate("hashDialog", "Progress"))
self.startButton.setToolTip(_translate("hashDialog", "Start the computation"))
self.startButton.setText(_translate("hashDialog", "Start"))
self.closeButton.setText(_translate("hashDialog", "Close"))
self.resetButton.setText(_translate("hashDialog", "Reset"))
|
[
"PyQt5.QtWidgets.QTextBrowser",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QProgressBar",
"PyQt5.QtCore.QRect",
"PyQt5.QtGui.QColor",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtGui.QPalette",
"PyQt5.QtCore.QMetaObject.connectSlotsByName"
] |
[((449, 541), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Preferred)\n', (470, 541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((776, 809), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['hashDialog'], {}), '(hashDialog)\n', (797, 809), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((960, 991), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['hashDialog'], {}), '(hashDialog)\n', (979, 991), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1134, 1162), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['hashDialog'], {}), '(hashDialog)\n', (1150, 1162), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1247, 1263), 'PyQt5.QtGui.QPalette', 'QtGui.QPalette', ([], {}), '()\n', (1261, 1263), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1937, 1965), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['hashDialog'], {}), '(hashDialog)\n', (1953, 1965), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2052, 2068), 'PyQt5.QtGui.QPalette', 'QtGui.QPalette', ([], {}), '()\n', (2066, 2068), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4424, 4455), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['hashDialog'], {}), '(hashDialog)\n', (4443, 4455), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4603, 4637), 'PyQt5.QtWidgets.QProgressBar', 'QtWidgets.QProgressBar', (['hashDialog'], {}), '(hashDialog)\n', (4625, 4637), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4837, 4871), 'PyQt5.QtWidgets.QTextBrowser', 'QtWidgets.QTextBrowser', (['hashDialog'], {}), '(hashDialog)\n', (4859, 4871), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5020, 5048), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['hashDialog'], {}), '(hashDialog)\n', (5036, 5048), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5203, 5231), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['hashDialog'], {}), '(hashDialog)\n', (5219, 5231), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5387, 5420), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['hashDialog'], {}), '(hashDialog)\n', (5408, 5420), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5509, 5525), 'PyQt5.QtGui.QPalette', 'QtGui.QPalette', ([], {}), '()\n', (5523, 5525), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6816, 6849), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['hashDialog'], {}), '(hashDialog)\n', (6837, 6849), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7000, 7033), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['hashDialog'], {}), '(hashDialog)\n', (7021, 7033), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7205, 7254), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['hashDialog'], {}), '(hashDialog)\n', (7242, 7254), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((848, 877), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(450)', '(33)', '(80)', '(23)'], {}), '(450, 33, 80, 23)\n', (860, 877), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1027, 1056), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(33)', '(411)', '(23)'], {}), '(20, 33, 411, 23)\n', (1039, 1056), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1199, 1227), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(10)', '(71)', '(16)'], {}), '(20, 10, 71, 16)\n', (1211, 1227), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1293, 1314), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1305, 1314), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1474, 1495), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1486, 1495), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1657, 1681), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(79)', '(78)', '(77)'], {}), '(79, 78, 77)\n', (1669, 1681), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2003, 2032), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(76)', '(231)', '(16)'], {}), '(20, 76, 231, 16)\n', (2015, 2032), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2098, 2119), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (2110, 2119), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2279, 2306), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(255)', '(127)'], {}), '(255, 255, 127)\n', (2291, 2306), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2460, 2487), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(111)', '(111)', '(111)'], {}), '(111, 111, 111)\n', (2472, 2487), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2648, 2675), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(204)', '(147)', '(147)'], {}), '(204, 147, 147)\n', (2660, 2675), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2836, 2857), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (2848, 2857), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3019, 3046), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(255)', '(127)'], {}), '(255, 255, 127)\n', (3031, 3046), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3202, 3229), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(111)', '(111)', '(111)'], {}), '(111, 111, 111)\n', (3214, 3229), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3392, 3419), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(204)', '(147)', '(147)'], {}), '(204, 147, 147)\n', (3404, 3419), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3582, 3606), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(79)', '(78)', '(77)'], {}), '(79, 78, 77)\n', (3594, 3606), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3768, 3792), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(79)', '(78)', '(77)'], {}), '(79, 78, 77)\n', (3780, 3792), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3948, 3975), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(111)', '(111)', '(111)'], {}), '(111, 111, 111)\n', (3960, 3975), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4138, 4165), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(204)', '(147)', '(147)'], {}), '(204, 147, 147)\n', (4150, 4165), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4492, 4522), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(100)', '(511)', '(23)'], {}), '(20, 100, 511, 23)\n', (4504, 4522), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4675, 4706), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(130)', '(327)', '(301)', '(23)'], {}), '(130, 327, 301, 23)\n', (4687, 4706), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4908, 4938), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(230)', '(511)', '(81)'], {}), '(20, 230, 511, 81)\n', (4920, 4938), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5087, 5116), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(208)', '(59)', '(15)'], {}), '(20, 208, 59, 15)\n', (5099, 5116), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5271, 5300), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(330)', '(59)', '(15)'], {}), '(20, 330, 59, 15)\n', (5283, 5300), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5458, 5489), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(210)', '(150)', '(131)', '(41)'], {}), '(210, 150, 131, 41)\n', (5470, 5489), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5555, 5578), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (5567, 5578), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5734, 5761), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (5746, 5761), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5915, 5938), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (5927, 5938), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6096, 6123), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (6108, 6123), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6279, 6302), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (6291, 6302), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6460, 6487), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(239)', '(235)', '(231)'], {}), '(239, 235, 231)\n', (6472, 6487), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6887, 6917), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(450)', '(370)', '(82)', '(25)'], {}), '(450, 370, 82, 25)\n', (6899, 6917), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7071, 7101), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(360)', '(370)', '(82)', '(25)'], {}), '(360, 370, 82, 25)\n', (7083, 7101), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
from pathFinders import Dijkstra
from pathFinders import Astar
from mazeGenerators import randomMaze
from pathFinders import Bfs
from pathFinders import Dfs
import tkinter as tk
import Grid
class main():
def __init__(self, row, col, sizeCase):
self.win=tk.Tk()
self.win.title("pathfiding vizualiser")
self.canGrid = tk.Canvas(self.win,height=600,width=900)
self.canBt = tk.Canvas(self.win,height=600,width=900)
self.canGrid.bind('<ButtonRelease>',self.clic)
self.guiGrid = Grid.Gui(row, col, sizeCase, self.win, self.canGrid, self.canBt)
self.caseOption = tk.StringVar()
self.pathOption = tk.StringVar()
self.mazeControl = [tk.Button()]*1
self.setBtn()
def setBtn(self):
caseOptions = ("Start","End","Wall","Void")
self.caseOption.set(caseOptions[0])
caseControls = tk.OptionMenu(self.canBt, self.caseOption, *caseOptions)
caseControls.pack(side=tk.LEFT)
pathOptions = ("Dijkstra","A*","BFS","DFS")
self.pathOption .set(pathOptions[0])
self.pathOptions = tk.OptionMenu(self.canBt, self.pathOption , *pathOptions, command = self.changePath)
self.pathOptions.pack(side=tk.RIGHT, anchor = tk.CENTER)
self.mazeControl[0] = tk.Button(self.canBt, text='random maze', command = lambda gui = self: randomMaze.Generate(self.guiGrid))
for i in self.mazeControl:
i.pack(side=tk.LEFT, padx='150')
self.canGrid.pack()
self.canBt.pack()#side=tk.LEFT
def changePath(self, Q):
self.pathOptions.config(state="disable")
self.guiGrid.paintGrid()
opt = self.pathOption.get()
path = False
if(self.guiGrid.start != None and self.guiGrid.end != None):
if (opt == "Dijkstra"):
path = Dijkstra.Dijkstra(self.guiGrid)
elif (opt == "A*"):
path = Astar.Astar(self.guiGrid)
elif (opt == "BFS"):
path = Bfs.Bfs(self.guiGrid)
elif (opt== "DFS"):
path = Dfs.Dfs(self.guiGrid)
if path:
self.guiGrid.paintPath(path)
self.pathOptions.config(state="normal")
def clic(self,event):
j=event.x//self.guiGrid.sizeCase
i=event.y//self.guiGrid.sizeCase
opt = self.caseOption.get()
if (opt == "Void"):
self.guiGrid.grid[i][j] = 0
self.guiGrid.colorCase(j,i,'white')
elif (opt == "Wall"):
self.guiGrid.grid[i][j] = -1
self.guiGrid.colorCase(j,i,'dimgray')
elif (opt == "Start"):
if self.guiGrid.start:
self.guiGrid.colorCase(*(self.guiGrid.start),'white')
self.guiGrid.start = (j,i)
self.guiGrid.colorCase(j,i,'deepskyblue')
elif (opt == "End"):
if self.guiGrid.end:
self.guiGrid.colorCase(*(self.guiGrid.end),'white')
self.guiGrid.end = (j,i)
self.guiGrid.colorCase(j,i,'limegreen')
def launch(self):
self.win.mainloop()
|
[
"tkinter.StringVar",
"pathFinders.Dijkstra.Dijkstra",
"tkinter.Canvas",
"tkinter.Button",
"pathFinders.Dfs.Dfs",
"Grid.Gui",
"mazeGenerators.randomMaze.Generate",
"tkinter.OptionMenu",
"pathFinders.Bfs.Bfs",
"pathFinders.Astar.Astar",
"tkinter.Tk"
] |
[((266, 273), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (271, 273), True, 'import tkinter as tk\n'), ((346, 388), 'tkinter.Canvas', 'tk.Canvas', (['self.win'], {'height': '(600)', 'width': '(900)'}), '(self.win, height=600, width=900)\n', (355, 388), True, 'import tkinter as tk\n'), ((408, 450), 'tkinter.Canvas', 'tk.Canvas', (['self.win'], {'height': '(600)', 'width': '(900)'}), '(self.win, height=600, width=900)\n', (417, 450), True, 'import tkinter as tk\n'), ((536, 600), 'Grid.Gui', 'Grid.Gui', (['row', 'col', 'sizeCase', 'self.win', 'self.canGrid', 'self.canBt'], {}), '(row, col, sizeCase, self.win, self.canGrid, self.canBt)\n', (544, 600), False, 'import Grid\n'), ((628, 642), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (640, 642), True, 'import tkinter as tk\n'), ((669, 683), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (681, 683), True, 'import tkinter as tk\n'), ((895, 951), 'tkinter.OptionMenu', 'tk.OptionMenu', (['self.canBt', 'self.caseOption', '*caseOptions'], {}), '(self.canBt, self.caseOption, *caseOptions)\n', (908, 951), True, 'import tkinter as tk\n'), ((1118, 1204), 'tkinter.OptionMenu', 'tk.OptionMenu', (['self.canBt', 'self.pathOption', '*pathOptions'], {'command': 'self.changePath'}), '(self.canBt, self.pathOption, *pathOptions, command=self.\n changePath)\n', (1131, 1204), True, 'import tkinter as tk\n'), ((712, 723), 'tkinter.Button', 'tk.Button', ([], {}), '()\n', (721, 723), True, 'import tkinter as tk\n'), ((1850, 1881), 'pathFinders.Dijkstra.Dijkstra', 'Dijkstra.Dijkstra', (['self.guiGrid'], {}), '(self.guiGrid)\n', (1867, 1881), False, 'from pathFinders import Dijkstra\n'), ((1370, 1403), 'mazeGenerators.randomMaze.Generate', 'randomMaze.Generate', (['self.guiGrid'], {}), '(self.guiGrid)\n', (1389, 1403), False, 'from mazeGenerators import randomMaze\n'), ((1937, 1962), 'pathFinders.Astar.Astar', 'Astar.Astar', (['self.guiGrid'], {}), '(self.guiGrid)\n', (1948, 1962), False, 'from pathFinders import Astar\n'), ((2019, 2040), 'pathFinders.Bfs.Bfs', 'Bfs.Bfs', (['self.guiGrid'], {}), '(self.guiGrid)\n', (2026, 2040), False, 'from pathFinders import Bfs\n'), ((2096, 2117), 'pathFinders.Dfs.Dfs', 'Dfs.Dfs', (['self.guiGrid'], {}), '(self.guiGrid)\n', (2103, 2117), False, 'from pathFinders import Dfs\n')]
|
import os
import csv
import time
import math
import pandas as pd
import torch
import torch.nn as nn
import torchvision.utils as vutils
from torch.optim.sgd import SGD
from torch.utils.data import DataLoader
from optims import OCGD, BCGD2
from train_utils import get_data, weights_init_d, weights_init_g, \
get_diff, save_checkpoint, lr_scheduler, generate_data, icrScheduler, get_model
from losses import get_loss
# seed = torch.randint(0, 1000000, (1,))
seed = 2020
torch.manual_seed(seed=seed)
print('random seed : %d' % seed)
def train_ocgd(epoch_num=10, optim_type='BCGD2',
startPoint=None, logdir='test',
update_min=True,
z_dim=128, batchsize=64,
loss_name='WGAN', model_name='dc',
data_path='None', dataname='cifar10',
device='cpu', gpu_num=1, collect_info=False):
lr_d = 0.01
lr_g = 0.01
dataset = get_data(dataname=dataname, path='../datas/%s' % data_path)
dataloader = DataLoader(dataset=dataset, batch_size=batchsize, shuffle=True,
num_workers=4)
D, G = get_model(model_name=model_name, z_dim=z_dim)
D.to(device)
G.to(device)
if startPoint is not None:
chk = torch.load(startPoint)
D.load_state_dict(chk['D'])
G.load_state_dict(chk['G'])
print('Start from %s' % startPoint)
optimizer = OCGD(max_params=G.parameters(), min_params=D.parameters(),
udpate_min=update_min, device=device)
loss_list = []
count = 0
for e in range(epoch_num):
for real_x in dataloader:
real_x = real_x[0].to(device)
d_real = D(real_x)
z = torch.randn((real_x.shape[0], z_dim), device=device)
fake_x = G(z)
d_fake = D(fake_x)
D_loss = get_loss(name=loss_name, g_loss=False, d_real=d_real, d_fake=d_fake)
optimizer.zero_grad()
optimizer.step(loss=D_loss)
if count % 100 == 0:
print('Iter %d, Loss: %.5f' % (count, D_loss.item()))
loss_list.append(D_loss.item())
count += 1
print('epoch{%d/%d}' %(e, epoch_num))
name = 'overtrainD.pth' if update_min else 'overtrainG.pth'
save_checkpoint(path=logdir, name=name, D=D, G=G)
loss_data = pd.DataFrame(loss_list)
loss_data.to_csv('logs/train_oneside.csv')
if __name__ == '__main__':
torch.backends.cudnn.benchmark = True
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print(device)
chk = 'checkpoints/0.00000MNIST-0.0100/SGD-0.01000_9000.pth'
train_ocgd(epoch_num=10, startPoint=chk,
z_dim=96, update_min=True,
data_path='mnist', dataname='MNIST',
loss_name='JSD', model_name='mnist',
batchsize=128, device=device)
|
[
"pandas.DataFrame",
"torch.utils.data.DataLoader",
"train_utils.save_checkpoint",
"torch.manual_seed",
"train_utils.get_model",
"torch.load",
"losses.get_loss",
"torch.randn",
"train_utils.get_data",
"torch.cuda.is_available"
] |
[((475, 503), 'torch.manual_seed', 'torch.manual_seed', ([], {'seed': 'seed'}), '(seed=seed)\n', (492, 503), False, 'import torch\n'), ((917, 976), 'train_utils.get_data', 'get_data', ([], {'dataname': 'dataname', 'path': "('../datas/%s' % data_path)"}), "(dataname=dataname, path='../datas/%s' % data_path)\n", (925, 976), False, 'from train_utils import get_data, weights_init_d, weights_init_g, get_diff, save_checkpoint, lr_scheduler, generate_data, icrScheduler, get_model\n'), ((994, 1072), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'dataset', 'batch_size': 'batchsize', 'shuffle': '(True)', 'num_workers': '(4)'}), '(dataset=dataset, batch_size=batchsize, shuffle=True, num_workers=4)\n', (1004, 1072), False, 'from torch.utils.data import DataLoader\n'), ((1112, 1157), 'train_utils.get_model', 'get_model', ([], {'model_name': 'model_name', 'z_dim': 'z_dim'}), '(model_name=model_name, z_dim=z_dim)\n', (1121, 1157), False, 'from train_utils import get_data, weights_init_d, weights_init_g, get_diff, save_checkpoint, lr_scheduler, generate_data, icrScheduler, get_model\n'), ((2260, 2309), 'train_utils.save_checkpoint', 'save_checkpoint', ([], {'path': 'logdir', 'name': 'name', 'D': 'D', 'G': 'G'}), '(path=logdir, name=name, D=D, G=G)\n', (2275, 2309), False, 'from train_utils import get_data, weights_init_d, weights_init_g, get_diff, save_checkpoint, lr_scheduler, generate_data, icrScheduler, get_model\n'), ((2326, 2349), 'pandas.DataFrame', 'pd.DataFrame', (['loss_list'], {}), '(loss_list)\n', (2338, 2349), True, 'import pandas as pd\n'), ((1237, 1259), 'torch.load', 'torch.load', (['startPoint'], {}), '(startPoint)\n', (1247, 1259), False, 'import torch\n'), ((1698, 1750), 'torch.randn', 'torch.randn', (['(real_x.shape[0], z_dim)'], {'device': 'device'}), '((real_x.shape[0], z_dim), device=device)\n', (1709, 1750), False, 'import torch\n'), ((1829, 1897), 'losses.get_loss', 'get_loss', ([], {'name': 'loss_name', 'g_loss': '(False)', 'd_real': 'd_real', 'd_fake': 'd_fake'}), '(name=loss_name, g_loss=False, d_real=d_real, d_fake=d_fake)\n', (1837, 1897), False, 'from losses import get_loss\n'), ((2506, 2531), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2529, 2531), False, 'import torch\n')]
|
# Pytests to test the referenceSystem.json schema file
# TODO: test that "calendar" is "Gregorian" or a URI
import pytest
from jsonschema.exceptions import ValidationError
pytestmark = pytest.mark.schema("/schemas/referenceSystem")
def test_geographic_rs(validator):
''' Tests a minimal geographic RS '''
crs = {
"type" : "GeographicCRS",
"id" : "http://www.opengis.net/def/crs/OGC/1.3/CRS84"
}
validator.validate(crs)
def test_projected_rs(validator):
''' Tests a minimal projected RS '''
crs = {
"type" : "ProjectedCRS",
"id" : "http://www.opengis.net/def/crs/EPSG/0/27700"
}
validator.validate(crs)
def test_vertical_rs(validator):
''' Tests a minimal vertical RS '''
crs = {
"type" : "VerticalCRS",
"id" : "http://www.opengis.net/def/crs/EPSG/0/5703"
}
validator.validate(crs)
def test_minimal_temporal_rs(validator):
''' Tests a minimal temporal RS '''
crs = {
"type" : "TemporalRS",
"calendar" : "Gregorian"
}
validator.validate(crs)
def test_identifier_rs(validator):
''' Tests an example of an IdentifierRS '''
crs = {
"type": "IdentifierRS",
"id": "https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2",
"label": { "en": "ISO 3166-1 alpha-2 codes" },
"targetConcept": {
"id": "http://dbpedia.org/resource/Country",
"label": {"en": "Country", "de": "Land" }
},
"identifiers": {
"de": {
"id": "http://dbpedia.org/resource/Germany",
"label": { "de": "Deutschland", "en": "Germany" }
},
"gb": {
"id": "http://dbpedia.org/resource/United_Kingdom",
"label": { "de": "Vereinigtes Königreich", "en": "United Kingdom" }
}
}
}
validator.validate(crs)
def test_missing_type(validator):
''' Tests an RS with a missing type '''
crs = { "id" : "http://www.opengis.net/def/crs/OGC/1.3/CRS84" }
with pytest.raises(ValidationError):
validator.validate(crs)
def test_missing_calendar(validator):
''' Tests a TemporalRS with a missing calendar '''
crs = { "type" : "TemporalRS" }
with pytest.raises(ValidationError):
validator.validate(crs)
def test_missing_target_concept(validator):
''' Tests an IdentifierRS with a missing targetConcept '''
crs = {
"type": "IdentifierRS",
"id": "https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2",
"label": { "en": "ISO 3166-1 alpha-2 codes" },
"identifiers": {
"de": {
"id": "http://dbpedia.org/resource/Germany",
"label": { "de": "Deutschland", "en": "Germany" }
},
"gb": {
"id": "http://dbpedia.org/resource/United_Kingdom",
"label": { "de": "Vereinigtes Königreich", "en": "United Kingdom" }
}
}
}
with pytest.raises(ValidationError):
validator.validate(crs)
def test_invalid_target_concept(validator):
''' Tests an IdentifierRS with an invalid targetConcept (missing label) '''
crs = {
"type": "IdentifierRS",
"id": "https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2",
"label": { "en": "ISO 3166-1 alpha-2 codes" },
"targetConcept": {
"id": "http://dbpedia.org/resource/Country"
},
"identifiers": {
"de": {
"id": "http://dbpedia.org/resource/Germany",
"label": { "de": "Deutschland", "en": "Germany" }
},
"gb": {
"id": "http://dbpedia.org/resource/United_Kingdom",
"label": { "de": "Vereinigtes Königreich", "en": "United Kingdom" }
}
}
}
with pytest.raises(ValidationError):
validator.validate(crs)
def test_invalid_identifier(validator):
''' Tests an IdentifierRS with an invalid identifier (missing label) '''
crs = {
"type": "IdentifierRS",
"id": "https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2",
"label": { "en": "ISO 3166-1 alpha-2 codes" },
"targetConcept": {
"id": "http://dbpedia.org/resource/Country",
"label": {"en": "Country", "de": "Land" }
},
"identifiers": {
"de": {
"id": "http://dbpedia.org/resource/Germany"
},
"gb": {
"id": "http://dbpedia.org/resource/United_Kingdom",
"label": { "de": "Vereinigtes Königreich", "en": "United Kingdom" }
}
}
}
with pytest.raises(ValidationError):
validator.validate(crs)
|
[
"pytest.raises",
"pytest.mark.schema"
] |
[((188, 234), 'pytest.mark.schema', 'pytest.mark.schema', (['"""/schemas/referenceSystem"""'], {}), "('/schemas/referenceSystem')\n", (206, 234), False, 'import pytest\n'), ((2058, 2088), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (2071, 2088), False, 'import pytest\n'), ((2263, 2293), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (2276, 2293), False, 'import pytest\n'), ((3000, 3030), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (3013, 3030), False, 'import pytest\n'), ((3848, 3878), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (3861, 3878), False, 'import pytest\n'), ((4677, 4707), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (4690, 4707), False, 'import pytest\n')]
|
# reimplementation of lecture2-segment2.py for better understanding.
class Food(object):
def __init__(self, v, n, w):
self.name = n
self.value = v
self.calories = w
def getValue(self):
return self.value
def getName(self):
return self.name
def getCost(self):
return self.calories
def ratio(self):
return self.getValue()/self.getCost()
def __str__(self):
return self.name + ' : (' + str(self.value) + ', ' + str(self.calories) + ')'
def menuBuilder(names, values, calories):
'''
names, values ans weights are lists of same length
:param name:
:param values:
:param weights:
:return: list of foods
'''
menu = []
for i in range(len(values)):
menu.append(Food(values[i], names[i], calories[i]))
return menu
def maxVal(toConsider, avail):
'''
record best soln found so far, doesn't **build** a decision tree
:param toConsider: items yet to be considered in nodes
:param avail: the amount of space still available
:return: tuple of the total value of a solution to the
0/1 knapsack problem and the items of that solution
'''
if toConsider == [] or avail == 0:
result = (0, ())
elif toConsider[0].getCost() > avail:
#Explore right branch only
result = maxVal(toConsider[1:], avail)
else:
nextItem = toConsider[0]
#Explore left branch
withVal, withToTake = maxVal(toConsider[1:],
avail - nextItem.getCost())
withVal += nextItem.getValue()
#Explore right branch
withoutVal, withoutToTake = maxVal(toConsider[1:], avail)
#Choose better branch
if withVal > withoutVal:
result = (withVal, withToTake + (nextItem,))
else:
result = (withoutVal, withoutToTake)
return result
def testMaxVal(foods, maxUnits, printItems = True):
print('Use search tree to allocate', maxUnits,
'calories')
val, taken = maxVal(foods, maxUnits)
print('Total value of items taken =', val, '\n')
if printItems:
for item in taken:
print(' ', item)
# building large menus
import random
def buildLargeMenu(numItems, maxVal, maxCost):
items = []
for i in range(numItems):
items.append(Food(random.randint(1, maxVal), str(i),
random.randint(1, maxCost)))
return items
# building a large menu with > 40 items is very ineffcient
# gotta resort to DP for this task
for numItems in (5, 10, 15, 20, 25, 30, 35, 40, 45):
print('Try a menu with', numItems, 'items')
items = buildLargeMenu(numItems, 90, 250)
testMaxVal(items, 750, False)
# need for dynamic programming understood via comparision b/w
# normie and dank ways to compute nth term of a fibonaccci series
def normie_fibonacci(n):
if n == 0 or n == 1:
return 1
else:
return normie_fibonacci(n - 1) + normie_fibonacci(n - 2)
def dank_fibonacci(n, mem=None):
if mem is None:
mem = {}
if n == 0 or n == 1:
return 1
try:
return mem[n]
except KeyError:
result = dank_fibonacci(n - 1, mem) + dank_fibonacci(n - 2, mem)
mem[n] = result
return result
print(normie_fibonacci(15))
print('\n')
print(dank_fibonacci(15))
# change dank to normie to see how the time to calculate via normie increases exponentially
for i in range(120):
print('fib(' + str(i) + ') =', dank_fibonacci(i))
# thus DP is enormous win for fibonacci.
'''
Question : so when exactly should I use DP
Answer : According to it's creator, it's useful when a program has
- optimal substructure
A problem has optimal substructure if a globally optimal solution can be
found by combining optimal solutions to local subproblems.
- overlapping subproblems
A problem has overlapping subproblems if an optimal solution involves solv-
ing the same problem multiple times.
'''
|
[
"random.randint"
] |
[((2361, 2386), 'random.randint', 'random.randint', (['(1)', 'maxVal'], {}), '(1, maxVal)\n', (2375, 2386), False, 'import random\n'), ((2432, 2458), 'random.randint', 'random.randint', (['(1)', 'maxCost'], {}), '(1, maxCost)\n', (2446, 2458), False, 'import random\n')]
|
from ObstacleManager import ObstacleManager
import time
obstacle = ObstacleManager.get_instance()
obstacle.start_movement()
obstacle.set_period(0.05)
start_time = time.clock()
while time.clock() - start_time < 3:
print("Obstacle Position: (" + str(obstacle.xPosition) + ", " + str(obstacle.yPosition) + ")")
obstacle.stop_movement()
|
[
"time.clock",
"ObstacleManager.ObstacleManager.get_instance"
] |
[((69, 99), 'ObstacleManager.ObstacleManager.get_instance', 'ObstacleManager.get_instance', ([], {}), '()\n', (97, 99), False, 'from ObstacleManager import ObstacleManager\n'), ((166, 178), 'time.clock', 'time.clock', ([], {}), '()\n', (176, 178), False, 'import time\n'), ((185, 197), 'time.clock', 'time.clock', ([], {}), '()\n', (195, 197), False, 'import time\n')]
|
# Copyright (c) 2020 Trail of Bits, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
def strip_whole_config(filename):
if not filename.endswith(".config"):
return ""
filename = filename.rstrip(".config")
basename, ext = os.path.splitext(filename)
return basename
def get_binaries(directory):
result = set()
for f in os.listdir(directory):
filename = strip_whole_config(f)
if filename:
result.add(filename)
return result
def get_tags(config):
with open(config, 'r') as f:
line = f.readline().rstrip('\n')
tokens = line.split(' ')
if tokens[0] != 'TAGS:':
return []
return tokens[1:]
def get_bin2tags(directory):
result = {}
for f in os.listdir(directory):
filename = strip_whole_config(f)
if not filename:
continue
tags = get_tags(os.path.join(directory, f))
if filename not in result:
result[filename] = tags
else:
result[filename].append(tags)
return result
def get_cfg(directory, name):
return os.path.join(directory, name + '.cfg')
|
[
"os.listdir",
"os.path.join",
"os.path.splitext"
] |
[((865, 891), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (881, 891), False, 'import os\n'), ((974, 995), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (984, 995), False, 'import os\n'), ((1380, 1401), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (1390, 1401), False, 'import os\n'), ((1730, 1768), 'os.path.join', 'os.path.join', (['directory', "(name + '.cfg')"], {}), "(directory, name + '.cfg')\n", (1742, 1768), False, 'import os\n'), ((1515, 1541), 'os.path.join', 'os.path.join', (['directory', 'f'], {}), '(directory, f)\n', (1527, 1541), False, 'import os\n')]
|
from socialsent3.representations.embedding import SVDEmbedding, Embedding, GigaEmbedding, FullEmbedding
from socialsent3.representations.explicit import Explicit
def create_representation(rep_type, path, *args, **kwargs):
if rep_type == 'Explicit':
return Explicit.load(path, *args, **kwargs)
elif rep_type == 'SVD':
return SVDEmbedding(path, *args, **kwargs)
elif rep_type == 'GIGA':
return GigaEmbedding(path, *args, **kwargs)
elif rep_type == 'FULL':
return FullEmbedding(path, *args, *kwargs)
else:
return Embedding.load(path, *args, **kwargs)
|
[
"socialsent3.representations.embedding.FullEmbedding",
"socialsent3.representations.embedding.Embedding.load",
"socialsent3.representations.explicit.Explicit.load",
"socialsent3.representations.embedding.SVDEmbedding",
"socialsent3.representations.embedding.GigaEmbedding"
] |
[((276, 312), 'socialsent3.representations.explicit.Explicit.load', 'Explicit.load', (['path', '*args'], {}), '(path, *args, **kwargs)\n', (289, 312), False, 'from socialsent3.representations.explicit import Explicit\n'), ((358, 393), 'socialsent3.representations.embedding.SVDEmbedding', 'SVDEmbedding', (['path', '*args'], {}), '(path, *args, **kwargs)\n', (370, 393), False, 'from socialsent3.representations.embedding import SVDEmbedding, Embedding, GigaEmbedding, FullEmbedding\n'), ((440, 476), 'socialsent3.representations.embedding.GigaEmbedding', 'GigaEmbedding', (['path', '*args'], {}), '(path, *args, **kwargs)\n', (453, 476), False, 'from socialsent3.representations.embedding import SVDEmbedding, Embedding, GigaEmbedding, FullEmbedding\n'), ((523, 558), 'socialsent3.representations.embedding.FullEmbedding', 'FullEmbedding', (['path', '*args', '*kwargs'], {}), '(path, *args, *kwargs)\n', (536, 558), False, 'from socialsent3.representations.embedding import SVDEmbedding, Embedding, GigaEmbedding, FullEmbedding\n'), ((586, 623), 'socialsent3.representations.embedding.Embedding.load', 'Embedding.load', (['path', '*args'], {}), '(path, *args, **kwargs)\n', (600, 623), False, 'from socialsent3.representations.embedding import SVDEmbedding, Embedding, GigaEmbedding, FullEmbedding\n')]
|
import os
from pathlib import Path
import pandas as pd
import numpy as np
import math
from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow
def see_maps_location(lat, lon):
print(f'https://www.google.com.au/maps/search/{lat},{lon}')
def get_complete_station_years(path):
"""
Figure out which stations have complete histories
"""
station_years = pd.DataFrame()
years = os.listdir(path/'raw')
for y in years:
this_station_year = pd.DataFrame.from_dict({
'id':[s[:-4] for s in os.listdir(path/'raw'/f'{y}')],
'year':y
})
station_years = pd.concat([station_years, this_station_year])
files_per_station = station_years['id'].value_counts()
stations_with_complete_history = files_per_station==len(station_years['year'].unique())
is_complete_station_year = station_years['id'].isin(files_per_station[stations_with_complete_history].index)
complete_station_years = station_years[is_complete_station_year].sort_values(['id','year'])
complete_station_years.reset_index(inplace=True, drop=True)
stations = complete_station_years['id'].unique()
return stations, complete_station_years
def process_station_data(df):
"""
Map the raw data from weather obs csv file to numeric columns in DataFrame
"""
df.columns = map(str.lower, df.columns)
timef = ['station','date','report_type']
# parse out information from each of the relevant columns
# data dictionary can be found at https://www.ncei.noaa.gov/data/global-hourly/doc/isd-format-document.pdf
wnd, ceil, vis, tmp = split_wnd(df), split_ceil(df), split_vis(df), split_tmp(df)
wndf, ceilf, visf, tmpf = ['wnd_speed'], ['ceil','ceil_height'], ['vis_distance'], ['tmp']
rain = split_liquid_precip(df)
snow = split_snow(df)
df['total_precip'] = rain['liquid_precip_depth_dimension'] + snow['snow_equivalent_water_depth_dimension']
slim = pd.concat([
df[timef],
tmp['tmp'],
rain['liquid_precip_depth_dimension'], snow['snow_equivalent_water_depth_dimension'], df['total_precip'],
wnd[wndf], ceil[ceilf], vis[visf],
] , axis=1)
# remove "Airways special report" records, 'SY-SA' records
slim = slim[slim['report_type'] != 'SAOSP']
# remove duplicated records by time
slim = slim[~slim.date.duplicated()]
slim.drop(['report_type'], axis=1, inplace=True)
metadata = df[['station','latitude','longitude','elevation','name']].head(1)
return metadata, slim
def get_all_station_data(path, station, years):
"""
Sift through all the years with this station included, read the data, clean it
"""
station_dfs = list()
for year in years:
this_year = pd.read_csv(
path/'raw'/f'{year}'/f'{station}.csv',
encoding='utf-8',
parse_dates=['DATE'],
low_memory=False,
dtype={'STATION': 'object', 'LATITUDE': np.float32,'LONGITUDE': np.float32,
'ELEVATION': np.float32, 'NAME': str, 'REPORT_TYPE':str,
'TMP': str,
},
)
# don't use this station if any of the years have less than two observations per day
if this_year.shape[0] < 365 * 2:
metadata, _ = process_station_data(this_year)
else:
metadata, cleaned_data = process_station_data(this_year)
cleaned_data['year'] = year
station_dfs.append(cleaned_data)
if len(station_dfs) > 0:
station_data = pd.concat(station_dfs)
# time series interpolation only works with datetime index
station_data.set_index('date', inplace=True, drop=False)
station_data = interpolate_measurements(station_data)
station_data.station = station
station_data.reset_index(inplace=True, drop=True)
else:
# filter out stations with less reliable
station_data = None
return metadata, station_data
def interpolate_measurements(station_data):
"""
Create a baseline frequency of measurements, fill in the gaps
"""
base = pd.DataFrame(
index = pd.date_range(
start=str(min(station_data.date).year), end=str(max(station_data.date).year+1),
freq='H', closed='left'
)
)
df = pd.merge(base, station_data, how='left', left_index=True, right_index=True)
df['date'] = df.index.values
df['tmp'] = df['tmp'].interpolate(method='time', limit_direction='both')
# avoid warning about Nan mean operation
if (df['vis_distance'].isnull().sum() == df['vis_distance'].shape[0]):
df['vis_distance'].fillna(0)
else:
df['vis_distance'] = df['vis_distance'].fillna(df['vis_distance'].median())
df['wnd_speed'] = df['wnd_speed'].interpolate(method='time', limit_direction='both')
df['ceil'] = df['ceil'].fillna(0)
df['ceil_height'] = df['ceil_height'].fillna(0)
df['liquid_precip_depth_dimension'] = df['liquid_precip_depth_dimension'].fillna(0)
df['snow_equivalent_water_depth_dimension'] = df['snow_equivalent_water_depth_dimension'].fillna(0)
df['total_precip'] = df['total_precip'].fillna(0)
return df
def collect_data_from_csvs(PATH, sample_size=None, shuffle=True):
stations, station_years = get_complete_station_years(Path(PATH))
if shuffle: np.random.shuffle(stations)
if sample_size is not None:
g = int(sample_size/10)
if (sample_size < len(stations)):
station_iterator = stations[0:int(sample_size)]
else:
g = 100
station_iterator = stations
c=0
dfs = list()
metas = list()
print(f'Iterating through {len(station_iterator)} station file sets')
for station in station_iterator:
years = station_years['year'][station_years['id']==station]
metadata, station_data = get_all_station_data(Path(PATH), station, years)
if station_data is None:
pass
else:
c+=1
if c % g == 0:
print(f'{c} - '+metadata.to_csv(None, header=False, index=False)[:-1])
dfs.append(station_data)
metas.append(metadata)
metadata = pd.concat(metas)
df = pd.concat(dfs)
df = df.drop(['year'],axis=1)
df.station = df.station.astype('category')
metadata.station = metadata.station.astype('category')
# get rid of stations with missing info (already having tried to interpolate)
notnull_counts = df.groupby('station').apply(lambda c: c.notnull().sum())
legit_stations = notnull_counts[(notnull_counts.apply(min, axis=1) == notnull_counts.apply(max).max())].index
df = df[df.station.apply(lambda s: s in legit_stations)]
metadata = metadata[metadata.station.apply(lambda s: s in legit_stations)]
df.sort_values(['station','date'], inplace=True)
df.reset_index(drop=True, inplace=True)
metadata.sort_values(['station'], inplace=True)
metadata.reset_index(drop=True, inplace=True)
return df, metadata
def get_city_data(PATH, metadata, pop_threshold=1e6):
print('Getting populous cities...')
raw_cities = pd.read_csv(PATH,
low_memory=False,
encoding='utf-8',
dtype={
'Country':'category',
'City': 'object',
'AccentCity': 'object',
'Region': 'category',
'Population': 'float32',
'Latitude': 'float32',
'Longitude': 'float32',
})
pop = raw_cities[raw_cities.Population > pop_threshold].copy()
pop.sort_values('Population', ascending=False, inplace=True)
cities = pop[~pop[['Latitude','Longitude']].duplicated()]
cities.reset_index(drop=True, inplace=True)
clos = cities.apply(find_closest_station, metadata=metadata, axis=1).apply(pd.Series)
clos.columns=['station','closest_station_distance_km']
mrgd = pd.merge(cities, clos, left_index=True, right_index=True, how='left')
return mrgd.copy()
def distance(origin, destination):
"""
Haversince distance from https://gist.github.com/rochacbruno/2883505
Returns distance in kilometers
"""
lat1, lon1 = origin
lat2, lon2 = destination
radius = 6371 # km radius of Earth
dlat = math.radians(lat2-lat1)
dlon = math.radians(lon2-lon1)
a = math.sin(dlat/2) * math.sin(dlat/2) + math.cos(math.radians(lat1)) \
* math.cos(math.radians(lat2)) * math.sin(dlon/2) * math.sin(dlon/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = radius * c
return d
def find_distance(m, coords):
return distance((m['latitude'], m['longitude']), coords)
def find_closest_station(p, metadata):
coords = (p.Latitude, p.Longitude)
d = metadata.apply(find_distance, axis=1, coords=coords)
return metadata.loc[d.idxmin()].station, min(d)
if __name__ == '__main__':
PATH = f'/home/ubuntu/climate-classification/data'
SAMPLE_SIZE = 4000
df, metadata = collect_data_from_csvs(PATH, sample_size=SAMPLE_SIZE, shuffle=True)
cities = get_city_data('./data/worldcitiespop.csv', metadata)
closest_cities = cities.groupby('station').apply(lambda d: d.closest_station_distance_km.idxmin())
ma = cities.loc[closest_cities]
slim = df[df.station.apply(lambda s: s in ma.station.values)].copy()
# need to reset categories so .groupby().apply() doesn't pick up the old ones
for d in (slim, ma, cities):
d['station'] = d['station'].astype(str).astype('category')
d.reset_index(drop=True, inplace=True)
print('Saving...')
slim.to_feather(f'{PATH}/df')
ma.to_feather(f'{PATH}/metadata')
cities.to_feather(f'{PATH}/cities')
print('Finished')
|
[
"pandas.DataFrame",
"os.listdir",
"parsing.split_wnd",
"math.sqrt",
"pandas.read_csv",
"math.radians",
"pandas.merge",
"parsing.split_vis",
"parsing.split_tmp",
"parsing.split_liquid_precip",
"math.sin",
"parsing.split_ceil",
"pathlib.Path",
"parsing.split_snow",
"pandas.concat",
"numpy.random.shuffle"
] |
[((414, 428), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (426, 428), True, 'import pandas as pd\n'), ((441, 465), 'os.listdir', 'os.listdir', (["(path / 'raw')"], {}), "(path / 'raw')\n", (451, 465), False, 'import os\n'), ((1817, 1840), 'parsing.split_liquid_precip', 'split_liquid_precip', (['df'], {}), '(df)\n', (1836, 1840), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((1852, 1866), 'parsing.split_snow', 'split_snow', (['df'], {}), '(df)\n', (1862, 1866), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((1994, 2186), 'pandas.concat', 'pd.concat', (["[df[timef], tmp['tmp'], rain['liquid_precip_depth_dimension'], snow[\n 'snow_equivalent_water_depth_dimension'], df['total_precip'], wnd[wndf],\n ceil[ceilf], vis[visf]]"], {'axis': '(1)'}), "([df[timef], tmp['tmp'], rain['liquid_precip_depth_dimension'],\n snow['snow_equivalent_water_depth_dimension'], df['total_precip'], wnd[\n wndf], ceil[ceilf], vis[visf]], axis=1)\n", (2003, 2186), True, 'import pandas as pd\n'), ((4375, 4450), 'pandas.merge', 'pd.merge', (['base', 'station_data'], {'how': '"""left"""', 'left_index': '(True)', 'right_index': '(True)'}), "(base, station_data, how='left', left_index=True, right_index=True)\n", (4383, 4450), True, 'import pandas as pd\n'), ((6295, 6311), 'pandas.concat', 'pd.concat', (['metas'], {}), '(metas)\n', (6304, 6311), True, 'import pandas as pd\n'), ((6321, 6335), 'pandas.concat', 'pd.concat', (['dfs'], {}), '(dfs)\n', (6330, 6335), True, 'import pandas as pd\n'), ((7237, 7469), 'pandas.read_csv', 'pd.read_csv', (['PATH'], {'low_memory': '(False)', 'encoding': '"""utf-8"""', 'dtype': "{'Country': 'category', 'City': 'object', 'AccentCity': 'object', 'Region':\n 'category', 'Population': 'float32', 'Latitude': 'float32', 'Longitude':\n 'float32'}"}), "(PATH, low_memory=False, encoding='utf-8', dtype={'Country':\n 'category', 'City': 'object', 'AccentCity': 'object', 'Region':\n 'category', 'Population': 'float32', 'Latitude': 'float32', 'Longitude':\n 'float32'})\n", (7248, 7469), True, 'import pandas as pd\n'), ((8068, 8137), 'pandas.merge', 'pd.merge', (['cities', 'clos'], {'left_index': '(True)', 'right_index': '(True)', 'how': '"""left"""'}), "(cities, clos, left_index=True, right_index=True, how='left')\n", (8076, 8137), True, 'import pandas as pd\n'), ((8431, 8456), 'math.radians', 'math.radians', (['(lat2 - lat1)'], {}), '(lat2 - lat1)\n', (8443, 8456), False, 'import math\n'), ((8466, 8491), 'math.radians', 'math.radians', (['(lon2 - lon1)'], {}), '(lon2 - lon1)\n', (8478, 8491), False, 'import math\n'), ((660, 705), 'pandas.concat', 'pd.concat', (['[station_years, this_station_year]'], {}), '([station_years, this_station_year])\n', (669, 705), True, 'import pandas as pd\n'), ((1651, 1664), 'parsing.split_wnd', 'split_wnd', (['df'], {}), '(df)\n', (1660, 1664), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((1666, 1680), 'parsing.split_ceil', 'split_ceil', (['df'], {}), '(df)\n', (1676, 1680), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((1682, 1695), 'parsing.split_vis', 'split_vis', (['df'], {}), '(df)\n', (1691, 1695), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((1697, 1710), 'parsing.split_tmp', 'split_tmp', (['df'], {}), '(df)\n', (1706, 1710), False, 'from parsing import split_tmp, split_wnd, split_ceil, split_vis, split_liquid_precip, split_snow\n'), ((2792, 3067), 'pandas.read_csv', 'pd.read_csv', (["(path / 'raw' / f'{year}' / f'{station}.csv')"], {'encoding': '"""utf-8"""', 'parse_dates': "['DATE']", 'low_memory': '(False)', 'dtype': "{'STATION': 'object', 'LATITUDE': np.float32, 'LONGITUDE': np.float32,\n 'ELEVATION': np.float32, 'NAME': str, 'REPORT_TYPE': str, 'TMP': str}"}), "(path / 'raw' / f'{year}' / f'{station}.csv', encoding='utf-8',\n parse_dates=['DATE'], low_memory=False, dtype={'STATION': 'object',\n 'LATITUDE': np.float32, 'LONGITUDE': np.float32, 'ELEVATION': np.\n float32, 'NAME': str, 'REPORT_TYPE': str, 'TMP': str})\n", (2803, 3067), True, 'import pandas as pd\n'), ((3602, 3624), 'pandas.concat', 'pd.concat', (['station_dfs'], {}), '(station_dfs)\n', (3611, 3624), True, 'import pandas as pd\n'), ((5396, 5406), 'pathlib.Path', 'Path', (['PATH'], {}), '(PATH)\n', (5400, 5406), False, 'from pathlib import Path\n'), ((5424, 5451), 'numpy.random.shuffle', 'np.random.shuffle', (['stations'], {}), '(stations)\n', (5441, 5451), True, 'import numpy as np\n'), ((5967, 5977), 'pathlib.Path', 'Path', (['PATH'], {}), '(PATH)\n', (5971, 5977), False, 'from pathlib import Path\n'), ((8498, 8516), 'math.sin', 'math.sin', (['(dlat / 2)'], {}), '(dlat / 2)\n', (8506, 8516), False, 'import math\n'), ((8517, 8535), 'math.sin', 'math.sin', (['(dlat / 2)'], {}), '(dlat / 2)\n', (8525, 8535), False, 'import math\n'), ((8627, 8645), 'math.sin', 'math.sin', (['(dlon / 2)'], {}), '(dlon / 2)\n', (8635, 8645), False, 'import math\n'), ((8667, 8679), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (8676, 8679), False, 'import math\n'), ((8681, 8697), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (8690, 8697), False, 'import math\n'), ((8608, 8626), 'math.sin', 'math.sin', (['(dlon / 2)'], {}), '(dlon / 2)\n', (8616, 8626), False, 'import math\n'), ((572, 605), 'os.listdir', 'os.listdir', (["(path / 'raw' / f'{y}')"], {}), "(path / 'raw' / f'{y}')\n", (582, 605), False, 'import os\n'), ((8545, 8563), 'math.radians', 'math.radians', (['lat1'], {}), '(lat1)\n', (8557, 8563), False, 'import math\n'), ((8586, 8604), 'math.radians', 'math.radians', (['lat2'], {}), '(lat2)\n', (8598, 8604), False, 'import math\n')]
|
import time
import board
# i2c = board.STEMMA_I2C()
rtc_enabled = False
# i2c.try_lock()
# i2c_scan_results = i2c.scan()
# i2c.unlock()
# rtc_enabled = False
# if 0x6b in i2c_scan_results:
# rtc_enabled = True
# import adafruit_pcf8523
# rtc = adafruit_pcf8523.PCF8523(i2c)
# months = ("", "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December")
# months_short = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
# days = ("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday")
# days_short = ("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat")
# # default_alarm = time.struct_time((2017, 1, 1, 0, 0, 0, 6, 1, -1))
# # if rtc.alarm_status == True:
# # rtc.alarm_status = False
# # if rtc.alarm[0] != default_alarm:
# # rtc.alarm = (default_alarm, None)
# startup_time = rtc.datetime
# def get_status_time(
# td: time.struct_time,
# status_time_string: str = "{:02d}/{:02d} {:02d}:{:02d}:{:02d}"):
# return status_time_string.format(td.tm_mon, td.tm_mday, td.tm_hour, td.tm_min, td.tm_sec)
from hx711.hx711_pio import HX711_PIO
pio_data = board.D25
pio_clk = board.D24
hx = HX711_PIO(pio_data, pio_clk, tare=False, scalar=395.513)
import keypad
key_a = board.D11
key_b = board.D12
key_c = board.D13
buttons = keypad.Keys((key_a, key_b, key_c), value_when_pressed=False, pull=True)
event_buffer = keypad.Event()
BUTTON_A_PRESS = keypad.Event(0, True)
BUTTON_B_PRESS = keypad.Event(1, True)
BUTTON_C_PRESS = keypad.Event(2, True)
presses = {
0: False,
1: False,
2: False
}
def get_presses(keys: keypad.Keys):
presses[0] = False
presses[1] = False
presses[2] = False
while keys.events.get_into(event_buffer):
if event_buffer == BUTTON_A_PRESS:
presses[0] = True
elif event_buffer == BUTTON_B_PRESS:
presses[1] = True
elif event_buffer == BUTTON_C_PRESS:
presses[2] = True
return presses
def wait_for_keypress(keys: keypad.Keys):
while True:
new_presses = get_presses(keys)
if new_presses[0] or new_presses[1] or new_presses[2]:
break
else:
time.sleep(0.001)
import neopixel
pixel = neopixel.NeoPixel(board.NEOPIXEL, 1)
pixel.brightness = 1.0
def keep_blinking(pixel, time_sec=1, delay=0.1, fill=(255, 255, 255), keys=None, epd=None):
start_time = time.monotonic()
if epd:
print("Blinking for EPD refresh, busy: {}".format(epd.busy))
blink = True
while blink:
# Check for keypress, if we need to
if keys:
presses = get_presses(keys)
if presses[0] or presses[1] or presses[2]:
blink = False
time.sleep(delay)
pixel.fill(fill)
time.sleep(delay)
pixel.fill((0,0,0))
now_time = time.monotonic()
if now_time - start_time > time_sec:
blink = False
if epd:
if not epd.busy:
print("Stopping EPD refresh blink, busy: {}, time: {}".format(epd.busy, now_time - start_time))
blink = False
keep_blinking(pixel, fill=(0, 255, 0))
import displayio
import terminalio
from adafruit_display_text import label
import adafruit_il0373
epd_sd_cs = board.D5
epd_sram_cs = board.D6
epd_cs = board.D9
epd_dc = board.D10
epd_busy = board.D4 # no dedicated reset pin on FeatherWing, solder BUSY to D4
spi = board.SPI()
displayio.release_displays()
# display_bus = displayio.FourWire(spi, command=epd_dc, chip_select=epd_cs, baudrate=1000000)
display_bus = displayio.FourWire(spi, command=epd_dc, chip_select=epd_cs, baudrate=1000000)
WIDTH = 296
HEIGHT = 128
ROTATION = 270
TEXT_SCALE = 4
eink_refresh_delay = 180 # Advice is not to refresh more often than every 180 seconds, whew...
BG_COLOR = 0xFFFFFF # white background
TEXT_COLOR = 0x000000 # black text
HIGHLIGHT_COLOR = 0xFF0000
# Positioning typo eliminators :joy:
TOP_LEFT = (0.0, 0.0)
TOP_CENTER = (0.5, 0.0)
TOP_RIGHT = (1.0, 0.0)
BOTTOM_LEFT = (0.0, 1.0)
BOTTOM_CENTER = (0.5, 1.0)
BOTTOM_RIGHT = (1.0, 1.0)
CENTER_CENTER = (0.5, 0.5)
# X positions of buttons along the top of the display for labels
XPOS_BUTTON_A = 47
XPOS_BUTTON_B = (WIDTH // 2) - 12
XPOS_BUTTON_C = (WIDTH - XPOS_BUTTON_A) - 25
BUTTON_WIDTH = 44
BUTTON_WIDTH_HALF = BUTTON_WIDTH // 2
BUTTON_BORDERS = [
(XPOS_BUTTON_A - BUTTON_WIDTH_HALF, XPOS_BUTTON_A + BUTTON_WIDTH_HALF),
(XPOS_BUTTON_B - BUTTON_WIDTH_HALF, XPOS_BUTTON_B + BUTTON_WIDTH_HALF),
(XPOS_BUTTON_C - BUTTON_WIDTH_HALF, XPOS_BUTTON_C + BUTTON_WIDTH_HALF),
]
font = terminalio.FONT
# Build UI
splash = displayio.Group()
# Build display background
bg_bitmap = displayio.Bitmap(WIDTH, HEIGHT, 1)
bg_palette = displayio.Palette(3)
bg_palette[0] = BG_COLOR
bg_palette[1] = TEXT_COLOR
bg_palette[2] = HIGHLIGHT_COLOR
bg_sprite = displayio.TileGrid(bg_bitmap, pixel_shader=bg_palette, x=0, y=0)
# Add UI lines: top/bottom title/status
for x in range(0, WIDTH):
bg_bitmap[x, 14] = 1
bg_bitmap[x, HEIGHT - 14] = 1
# Add UI lines: top button label markers
for y in range(0, 14):
for x1, x2 in BUTTON_BORDERS:
bg_bitmap[x1, y] = 1
bg_bitmap[x2, y] = 1
splash.append(bg_sprite)
# Title on bottom line
title_text = "HX711 Filament Scale"
status_center = label.Label(
font,
text=title_text,
color=HIGHLIGHT_COLOR,
scale=1,
anchor_point = (0.5, 1.0),
anchored_position = (WIDTH / 2, HEIGHT)
)
splash.append(status_center)
# Buttons on top row
button_a_label = label.Label(
font,
text="WEIGH",
color=TEXT_COLOR,
anchor_point=TOP_CENTER,
anchored_position=(XPOS_BUTTON_A, -1)
)
splash.append(button_a_label)
button_b_label = label.Label(
font,
text="DEBUG",
color=TEXT_COLOR,
anchor_point=TOP_CENTER,
anchored_position=(XPOS_BUTTON_B, -1)
)
splash.append(button_b_label)
button_c_label = label.Label(
font,
text="TARE",
color=TEXT_COLOR,
anchor_point=TOP_CENTER,
anchored_position=(XPOS_BUTTON_C, -1)
)
splash.append(button_c_label)
if rtc_enabled:
# Last-refresh-time on bottom left
time_left = label.Label(
font,
text=get_status_time(startup_time),
color=TEXT_COLOR,
anchor_point=BOTTOM_LEFT,
anchored_position=(1, HEIGHT - 15)
)
splash.append(time_left)
time_left_label = label.Label(
font,
text="Last refresh:",
color=TEXT_COLOR,
anchor_point=BOTTOM_LEFT,
anchored_position=(1, HEIGHT - 15 - 14)
)
splash.append(time_left_label)
# Debug data on corners
status_left = label.Label(
font,
text=" "*10,
color=TEXT_COLOR,
anchor_point=BOTTOM_LEFT,
anchored_position=(1, HEIGHT)
)
splash.append(status_left)
status_right = label.Label(
font,
text=" "*10,
color=TEXT_COLOR,
anchor_point=BOTTOM_RIGHT,
anchored_position=(WIDTH-1, HEIGHT)
)
splash.append(status_right)
# Center text
text_area_ypos = HEIGHT // 2
if rtc_enabled:
text_area_ypos -= 14
text_area = label.Label(
font,
text="Hello World!",
color=TEXT_COLOR,
scale=TEXT_SCALE,
anchor_point=CENTER_CENTER,
anchored_position=(WIDTH // 2, (HEIGHT // 2)),
)
splash.append(text_area)
# Create the display object - the third color is red (0xff0000)
display = adafruit_il0373.IL0373(
display_bus,
width=WIDTH,
height=HEIGHT,
rotation=ROTATION,
busy_pin=epd_busy,
highlight_color=HIGHLIGHT_COLOR,
seconds_per_frame=15,
)
display.show(splash)
# Display setup complete, grab our first scale read and refresh that puppy
pixel.fill((255, 255, 255))
hx.read(50)
hx.tare()
hx.read(50)
hx.tare()
reading = hx.read(50)
reading_raw = hx.read_raw()
text_area.text = "{:.2f} g".format(reading)
pixel.fill((0, 0, 0))
display.refresh()
time.sleep(1)
print("INIT: [{: 8.2f} g] [{: 8} raw] offset: {}, scalar: {}".format(
reading, reading_raw, hx.offset, hx.scalar))
if rtc_enabled:
print("INIT: Startup at {}, {} {}, {}:{}:{}".format(
days[startup_time.tm_wday],
months[startup_time.tm_mon],
startup_time.tm_mday,
startup_time.tm_hour,
startup_time.tm_min,
startup_time.tm_sec
))
keep_blinking(pixel, time_sec=eink_refresh_delay, fill=(0, 0, 255), epd=display)
# Final prep for loop
# wait_for_keypress(buttons)
debug = False
while True:
new_presses = get_presses(buttons)
tare = False
debug = False
weigh = False
if new_presses[0]:
weigh = True
elif new_presses[1]:
debug = True
elif new_presses[2]:
tare = True
if tare:
pixel.fill((255, 255, 255))
print("Tare requested, current offset: [{}]".format(hx.offset))
hx.read(50)
hx.tare()
hx.read(50)
hx.tare()
print("Tare completed, new offset: [{}]".format(hx.offset))
pixel.fill((0, 0, 0))
weigh = True
time.sleep(1)
if debug:
print("HX details, offset: [{}], scalar: [{}], gain: [{}]".format(
hx.offset, hx.scalar, hx.gain
))
debug = not debug
if debug:
button_b_label.color = HIGHLIGHT_COLOR
status_left.text = "offset: {}".format(hx.offset)
status_center.text = "scalar: {}".format(hx.scalar)
status_right.text = "gain: {}".format(hx.gain)
else:
button_b_label.color = TEXT_COLOR
status_left.text = ""
status_right.text = ""
status_center.text = title_text
if rtc_enabled:
time_left.text = get_status_time(startup_time)
display.refresh()
keep_blinking(pixel, time_sec=eink_refresh_delay, fill=(0, 0, 255), epd=display)
if weigh:
pixel.fill((255, 255, 255))
pre_read = hx.read(50)
reading = hx.read(50)
reading_raw = hx.read_raw()
print(
"Weighed: [{: 8.2f} g] [{: 8} raw] offset: {}, scalar: {}".format(
reading, reading_raw, hx.offset, hx.scalar
)
)
text_area.text = "{:.2f} g".format(reading)
if rtc_enabled:
time_left.text = get_status_time(startup_time)
pixel.fill((0, 0, 0))
if display.busy:
keep_blinking(pixel, time_sec=eink_refresh_delay, fill=(0, 0, 255), epd=display)
display.refresh()
keep_blinking(pixel, time_sec=eink_refresh_delay, fill=(0, 0, 255), epd=display)
|
[
"displayio.FourWire",
"displayio.Group",
"displayio.Bitmap",
"displayio.Palette",
"keypad.Event",
"keypad.Keys",
"time.sleep",
"hx711.hx711_pio.HX711_PIO",
"board.SPI",
"displayio.TileGrid",
"adafruit_il0373.IL0373",
"time.monotonic",
"adafruit_display_text.label.Label",
"neopixel.NeoPixel",
"displayio.release_displays"
] |
[((1233, 1289), 'hx711.hx711_pio.HX711_PIO', 'HX711_PIO', (['pio_data', 'pio_clk'], {'tare': '(False)', 'scalar': '(395.513)'}), '(pio_data, pio_clk, tare=False, scalar=395.513)\n', (1242, 1289), False, 'from hx711.hx711_pio import HX711_PIO\n'), ((1371, 1442), 'keypad.Keys', 'keypad.Keys', (['(key_a, key_b, key_c)'], {'value_when_pressed': '(False)', 'pull': '(True)'}), '((key_a, key_b, key_c), value_when_pressed=False, pull=True)\n', (1382, 1442), False, 'import keypad\n'), ((1459, 1473), 'keypad.Event', 'keypad.Event', ([], {}), '()\n', (1471, 1473), False, 'import keypad\n'), ((1492, 1513), 'keypad.Event', 'keypad.Event', (['(0)', '(True)'], {}), '(0, True)\n', (1504, 1513), False, 'import keypad\n'), ((1531, 1552), 'keypad.Event', 'keypad.Event', (['(1)', '(True)'], {}), '(1, True)\n', (1543, 1552), False, 'import keypad\n'), ((1570, 1591), 'keypad.Event', 'keypad.Event', (['(2)', '(True)'], {}), '(2, True)\n', (1582, 1591), False, 'import keypad\n'), ((2302, 2338), 'neopixel.NeoPixel', 'neopixel.NeoPixel', (['board.NEOPIXEL', '(1)'], {}), '(board.NEOPIXEL, 1)\n', (2319, 2338), False, 'import neopixel\n'), ((3504, 3515), 'board.SPI', 'board.SPI', ([], {}), '()\n', (3513, 3515), False, 'import board\n'), ((3517, 3545), 'displayio.release_displays', 'displayio.release_displays', ([], {}), '()\n', (3543, 3545), False, 'import displayio\n'), ((3654, 3731), 'displayio.FourWire', 'displayio.FourWire', (['spi'], {'command': 'epd_dc', 'chip_select': 'epd_cs', 'baudrate': '(1000000)'}), '(spi, command=epd_dc, chip_select=epd_cs, baudrate=1000000)\n', (3672, 3731), False, 'import displayio\n'), ((4718, 4735), 'displayio.Group', 'displayio.Group', ([], {}), '()\n', (4733, 4735), False, 'import displayio\n'), ((4776, 4810), 'displayio.Bitmap', 'displayio.Bitmap', (['WIDTH', 'HEIGHT', '(1)'], {}), '(WIDTH, HEIGHT, 1)\n', (4792, 4810), False, 'import displayio\n'), ((4824, 4844), 'displayio.Palette', 'displayio.Palette', (['(3)'], {}), '(3)\n', (4841, 4844), False, 'import displayio\n'), ((4941, 5005), 'displayio.TileGrid', 'displayio.TileGrid', (['bg_bitmap'], {'pixel_shader': 'bg_palette', 'x': '(0)', 'y': '(0)'}), '(bg_bitmap, pixel_shader=bg_palette, x=0, y=0)\n', (4959, 5005), False, 'import displayio\n'), ((5391, 5525), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': 'title_text', 'color': 'HIGHLIGHT_COLOR', 'scale': '(1)', 'anchor_point': '(0.5, 1.0)', 'anchored_position': '(WIDTH / 2, HEIGHT)'}), '(font, text=title_text, color=HIGHLIGHT_COLOR, scale=1,\n anchor_point=(0.5, 1.0), anchored_position=(WIDTH / 2, HEIGHT))\n', (5402, 5525), False, 'from adafruit_display_text import label\n'), ((5620, 5737), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': '"""WEIGH"""', 'color': 'TEXT_COLOR', 'anchor_point': 'TOP_CENTER', 'anchored_position': '(XPOS_BUTTON_A, -1)'}), "(font, text='WEIGH', color=TEXT_COLOR, anchor_point=TOP_CENTER,\n anchored_position=(XPOS_BUTTON_A, -1))\n", (5631, 5737), False, 'from adafruit_display_text import label\n'), ((5804, 5921), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': '"""DEBUG"""', 'color': 'TEXT_COLOR', 'anchor_point': 'TOP_CENTER', 'anchored_position': '(XPOS_BUTTON_B, -1)'}), "(font, text='DEBUG', color=TEXT_COLOR, anchor_point=TOP_CENTER,\n anchored_position=(XPOS_BUTTON_B, -1))\n", (5815, 5921), False, 'from adafruit_display_text import label\n'), ((5988, 6104), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': '"""TARE"""', 'color': 'TEXT_COLOR', 'anchor_point': 'TOP_CENTER', 'anchored_position': '(XPOS_BUTTON_C, -1)'}), "(font, text='TARE', color=TEXT_COLOR, anchor_point=TOP_CENTER,\n anchored_position=(XPOS_BUTTON_C, -1))\n", (5999, 6104), False, 'from adafruit_display_text import label\n'), ((6702, 6813), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': "(' ' * 10)", 'color': 'TEXT_COLOR', 'anchor_point': 'BOTTOM_LEFT', 'anchored_position': '(1, HEIGHT)'}), "(font, text=' ' * 10, color=TEXT_COLOR, anchor_point=BOTTOM_LEFT,\n anchored_position=(1, HEIGHT))\n", (6713, 6813), False, 'from adafruit_display_text import label\n'), ((6873, 6994), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': "(' ' * 10)", 'color': 'TEXT_COLOR', 'anchor_point': 'BOTTOM_RIGHT', 'anchored_position': '(WIDTH - 1, HEIGHT)'}), "(font, text=' ' * 10, color=TEXT_COLOR, anchor_point=\n BOTTOM_RIGHT, anchored_position=(WIDTH - 1, HEIGHT))\n", (6884, 6994), False, 'from adafruit_display_text import label\n'), ((7135, 7286), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': '"""Hello World!"""', 'color': 'TEXT_COLOR', 'scale': 'TEXT_SCALE', 'anchor_point': 'CENTER_CENTER', 'anchored_position': '(WIDTH // 2, HEIGHT // 2)'}), "(font, text='Hello World!', color=TEXT_COLOR, scale=TEXT_SCALE,\n anchor_point=CENTER_CENTER, anchored_position=(WIDTH // 2, HEIGHT // 2))\n", (7146, 7286), False, 'from adafruit_display_text import label\n'), ((7412, 7577), 'adafruit_il0373.IL0373', 'adafruit_il0373.IL0373', (['display_bus'], {'width': 'WIDTH', 'height': 'HEIGHT', 'rotation': 'ROTATION', 'busy_pin': 'epd_busy', 'highlight_color': 'HIGHLIGHT_COLOR', 'seconds_per_frame': '(15)'}), '(display_bus, width=WIDTH, height=HEIGHT, rotation=\n ROTATION, busy_pin=epd_busy, highlight_color=HIGHLIGHT_COLOR,\n seconds_per_frame=15)\n', (7434, 7577), False, 'import adafruit_il0373\n'), ((7906, 7919), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7916, 7919), False, 'import time\n'), ((2473, 2489), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (2487, 2489), False, 'import time\n'), ((6457, 6586), 'adafruit_display_text.label.Label', 'label.Label', (['font'], {'text': '"""Last refresh:"""', 'color': 'TEXT_COLOR', 'anchor_point': 'BOTTOM_LEFT', 'anchored_position': '(1, HEIGHT - 15 - 14)'}), "(font, text='Last refresh:', color=TEXT_COLOR, anchor_point=\n BOTTOM_LEFT, anchored_position=(1, HEIGHT - 15 - 14))\n", (6468, 6586), False, 'from adafruit_display_text import label\n'), ((2803, 2820), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (2813, 2820), False, 'import time\n'), ((2854, 2871), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (2864, 2871), False, 'import time\n'), ((2920, 2936), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (2934, 2936), False, 'import time\n'), ((9026, 9039), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9036, 9039), False, 'import time\n'), ((2258, 2275), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (2268, 2275), False, 'import time\n')]
|
import FWCore.ParameterSet.Config as cms
nEvtLumi = 4
nEvtRun = 2*nEvtLumi
nRuns = 64
nStreams = 4
nEvt = nRuns*nEvtRun
process = cms.Process("TESTGLOBALMODULES")
import FWCore.Framework.test.cmsExceptionsFatalOption_cff
process.options = cms.untracked.PSet(
numberOfStreams = cms.untracked.uint32(nStreams),
numberOfThreads = cms.untracked.uint32(nStreams)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(nEvt)
)
process.source = cms.Source("EmptySource",
timeBetweenEvents = cms.untracked.uint64(1000),
firstTime = cms.untracked.uint64(1000000),
numberEventsInRun = cms.untracked.uint32(nEvtRun),
numberEventsInLuminosityBlock = cms.untracked.uint32(nEvtLumi)
)
process.StreamIntProd = cms.EDProducer("edmtest::global::StreamIntProducer",
transitions = cms.int32(nEvt+nStreams*(2*(nEvt/nEvtRun)+2*(nEvt/nEvtLumi)+2))
,cachevalue = cms.int32(1)
)
process.RunIntProd = cms.EDProducer("edmtest::global::RunIntProducer",
transitions = cms.int32(2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiIntProd = cms.EDProducer("edmtest::global::LumiIntProducer",
transitions = cms.int32(2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.RunSumIntProd = cms.EDProducer("edmtest::global::RunSummaryIntProducer",
transitions = cms.int32(nStreams*(nEvt/nEvtRun)+2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiSumIntProd = cms.EDProducer("edmtest::global::LumiSummaryIntProducer",
transitions = cms.int32(nStreams*(nEvt/nEvtLumi)+2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.TestBeginRunProd = cms.EDProducer("edmtest::global::TestBeginRunProducer",
transitions = cms.int32((nEvt/nEvtRun))
)
process.TestEndRunProd = cms.EDProducer("edmtest::global::TestEndRunProducer",
transitions = cms.int32((nEvt/nEvtRun))
)
process.TestBeginLumiBlockProd = cms.EDProducer("edmtest::global::TestBeginLumiBlockProducer",
transitions = cms.int32((nEvt/nEvtLumi))
)
process.TestEndLumiBlockProd = cms.EDProducer("edmtest::global::TestEndLumiBlockProducer",
transitions = cms.int32((nEvt/nEvtLumi))
)
process.StreamIntAn = cms.EDAnalyzer("edmtest::global::StreamIntAnalyzer",
transitions = cms.int32(nEvt+nStreams*(2*(nEvt/nEvtRun)+2*(nEvt/nEvtLumi)+2))
,cachevalue = cms.int32(1)
)
process.RunIntAn= cms.EDAnalyzer("edmtest::global::RunIntAnalyzer",
transitions = cms.int32(nEvt+2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiIntAn = cms.EDAnalyzer("edmtest::global::LumiIntAnalyzer",
transitions = cms.int32(nEvt+2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.RunSumIntAn = cms.EDAnalyzer("edmtest::global::RunSummaryIntAnalyzer",
transitions = cms.int32(nEvt+nStreams*((nEvt/nEvtRun)+1)+2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiSumIntAn = cms.EDAnalyzer("edmtest::global::LumiSummaryIntAnalyzer",
transitions = cms.int32(nEvt+nStreams*((nEvt/nEvtLumi)+1)+2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.StreamIntFil = cms.EDFilter("edmtest::global::StreamIntFilter",
transitions = cms.int32(nEvt+nStreams*(2*(nEvt/nEvtRun)+2*(nEvt/nEvtLumi)+2))
,cachevalue = cms.int32(1)
)
process.RunIntFil = cms.EDFilter("edmtest::global::RunIntFilter",
transitions = cms.int32(nEvt+2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiIntFil = cms.EDFilter("edmtest::global::LumiIntFilter",
transitions = cms.int32(nEvt+2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.RunSumIntFil = cms.EDFilter("edmtest::global::RunSummaryIntFilter",
transitions = cms.int32(nEvt+nStreams*((nEvt/nEvtRun)+1)+2*(nEvt/nEvtRun))
,cachevalue = cms.int32(nEvtRun)
)
process.LumiSumIntFil = cms.EDFilter("edmtest::global::LumiSummaryIntFilter",
transitions = cms.int32(nEvt+nStreams*((nEvt/nEvtLumi)+1)+2*(nEvt/nEvtLumi))
,cachevalue = cms.int32(nEvtLumi)
)
process.TestBeginRunFil = cms.EDFilter("edmtest::global::TestBeginRunFilter",
transitions = cms.int32((nEvt/nEvtRun))
)
process.TestEndRunFil = cms.EDFilter("edmtest::global::TestEndRunFilter",
transitions = cms.int32((nEvt/nEvtRun))
)
process.TestBeginLumiBlockFil = cms.EDFilter("edmtest::global::TestBeginLumiBlockFilter",
transitions = cms.int32((nEvt/nEvtLumi))
)
process.TestEndLumiBlockFil = cms.EDFilter("edmtest::global::TestEndLumiBlockFilter",
transitions = cms.int32((nEvt/nEvtLumi))
)
process.TestAccumulator1 = cms.EDProducer("edmtest::global::TestAccumulator",
expectedCount = cms.uint32(512)
)
process.TestAccumulator2 = cms.EDProducer("edmtest::global::TestAccumulator",
expectedCount = cms.uint32(35)
)
process.testFilterModule = cms.EDFilter("TestFilterModule",
acceptValue = cms.untracked.int32(5),
onlyOne = cms.untracked.bool(False)
)
process.task = cms.Task(process.TestAccumulator1)
process.p = cms.Path(process.StreamIntProd+process.RunIntProd+process.LumiIntProd+process.RunSumIntProd+process.LumiSumIntProd+process.TestBeginRunProd+process.TestEndRunProd+process.TestBeginLumiBlockProd+process.TestEndLumiBlockProd+process.StreamIntAn+process.RunIntAn+process.LumiIntAn+process.RunSumIntAn+process.LumiSumIntAn+process.StreamIntFil+process.RunIntFil+process.LumiIntFil+process.RunSumIntFil+process.LumiSumIntFil+process.TestBeginRunFil+process.TestEndRunFil+process.TestBeginLumiBlockFil+process.TestEndLumiBlockFil+process.testFilterModule+process.TestAccumulator2, process.task)
|
[
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Task",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.int32",
"FWCore.ParameterSet.Config.uint32",
"FWCore.ParameterSet.Config.untracked.uint64",
"FWCore.ParameterSet.Config.untracked.uint32",
"FWCore.ParameterSet.Config.Path"
] |
[((132, 164), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""TESTGLOBALMODULES"""'], {}), "('TESTGLOBALMODULES')\n", (143, 164), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4878, 4912), 'FWCore.ParameterSet.Config.Task', 'cms.Task', (['process.TestAccumulator1'], {}), '(process.TestAccumulator1)\n', (4886, 4912), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4927, 5606), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['(process.StreamIntProd + process.RunIntProd + process.LumiIntProd + process\n .RunSumIntProd + process.LumiSumIntProd + process.TestBeginRunProd +\n process.TestEndRunProd + process.TestBeginLumiBlockProd + process.\n TestEndLumiBlockProd + process.StreamIntAn + process.RunIntAn + process\n .LumiIntAn + process.RunSumIntAn + process.LumiSumIntAn + process.\n StreamIntFil + process.RunIntFil + process.LumiIntFil + process.\n RunSumIntFil + process.LumiSumIntFil + process.TestBeginRunFil +\n process.TestEndRunFil + process.TestBeginLumiBlockFil + process.\n TestEndLumiBlockFil + process.testFilterModule + process.TestAccumulator2)', 'process.task'], {}), '(process.StreamIntProd + process.RunIntProd + process.LumiIntProd +\n process.RunSumIntProd + process.LumiSumIntProd + process.\n TestBeginRunProd + process.TestEndRunProd + process.\n TestBeginLumiBlockProd + process.TestEndLumiBlockProd + process.\n StreamIntAn + process.RunIntAn + process.LumiIntAn + process.\n RunSumIntAn + process.LumiSumIntAn + process.StreamIntFil + process.\n RunIntFil + process.LumiIntFil + process.RunSumIntFil + process.\n LumiSumIntFil + process.TestBeginRunFil + process.TestEndRunFil +\n process.TestBeginLumiBlockFil + process.TestEndLumiBlockFil + process.\n testFilterModule + process.TestAccumulator2, process.task)\n', (4935, 5606), True, 'import FWCore.ParameterSet.Config as cms\n'), ((285, 315), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['nStreams'], {}), '(nStreams)\n', (305, 315), True, 'import FWCore.ParameterSet.Config as cms\n'), ((339, 369), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['nStreams'], {}), '(nStreams)\n', (359, 369), True, 'import FWCore.ParameterSet.Config as cms\n'), ((426, 451), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['nEvt'], {}), '(nEvt)\n', (445, 451), True, 'import FWCore.ParameterSet.Config as cms\n'), ((522, 548), 'FWCore.ParameterSet.Config.untracked.uint64', 'cms.untracked.uint64', (['(1000)'], {}), '(1000)\n', (542, 548), True, 'import FWCore.ParameterSet.Config as cms\n'), ((566, 595), 'FWCore.ParameterSet.Config.untracked.uint64', 'cms.untracked.uint64', (['(1000000)'], {}), '(1000000)\n', (586, 595), True, 'import FWCore.ParameterSet.Config as cms\n'), ((621, 650), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['nEvtRun'], {}), '(nEvtRun)\n', (641, 650), True, 'import FWCore.ParameterSet.Config as cms\n'), ((688, 718), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (708, 718), True, 'import FWCore.ParameterSet.Config as cms\n'), ((818, 897), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))'], {}), '(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))\n', (827, 897), True, 'import FWCore.ParameterSet.Config as cms\n'), ((900, 912), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (909, 912), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1005, 1036), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2 * (nEvt / nEvtRun))'], {}), '(2 * (nEvt / nEvtRun))\n', (1014, 1036), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1051, 1069), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (1060, 1069), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1164, 1196), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2 * (nEvt / nEvtLumi))'], {}), '(2 * (nEvt / nEvtLumi))\n', (1173, 1196), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1211, 1230), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (1220, 1230), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1333, 1394), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nStreams * (nEvt / nEvtRun) + 2 * (nEvt / nEvtRun))'], {}), '(nStreams * (nEvt / nEvtRun) + 2 * (nEvt / nEvtRun))\n', (1342, 1394), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1403, 1421), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (1412, 1421), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1526, 1589), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nStreams * (nEvt / nEvtLumi) + 2 * (nEvt / nEvtLumi))'], {}), '(nStreams * (nEvt / nEvtLumi) + 2 * (nEvt / nEvtLumi))\n', (1535, 1589), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1598, 1617), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (1607, 1617), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1722, 1747), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtRun)'], {}), '(nEvt / nEvtRun)\n', (1731, 1747), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1848, 1873), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtRun)'], {}), '(nEvt / nEvtRun)\n', (1857, 1873), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1990, 2016), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtLumi)'], {}), '(nEvt / nEvtLumi)\n', (1999, 2016), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2129, 2155), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtLumi)'], {}), '(nEvt / nEvtLumi)\n', (2138, 2155), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2252, 2331), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))'], {}), '(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))\n', (2261, 2331), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2334, 2346), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (2343, 2346), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2436, 2474), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + 2 * (nEvt / nEvtRun))'], {}), '(nEvt + 2 * (nEvt / nEvtRun))\n', (2445, 2474), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2487, 2505), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (2496, 2505), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2598, 2637), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + 2 * (nEvt / nEvtLumi))'], {}), '(nEvt + 2 * (nEvt / nEvtLumi))\n', (2607, 2637), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2650, 2669), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (2659, 2669), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2770, 2842), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (nEvt / nEvtRun + 1) + 2 * (nEvt / nEvtRun))'], {}), '(nEvt + nStreams * (nEvt / nEvtRun + 1) + 2 * (nEvt / nEvtRun))\n', (2779, 2842), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2849, 2867), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (2858, 2867), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2970, 3044), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (nEvt / nEvtLumi + 1) + 2 * (nEvt / nEvtLumi))'], {}), '(nEvt + nStreams * (nEvt / nEvtLumi + 1) + 2 * (nEvt / nEvtLumi))\n', (2979, 3044), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3051, 3070), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (3060, 3070), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3164, 3243), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))'], {}), '(nEvt + nStreams * (2 * (nEvt / nEvtRun) + 2 * (nEvt / nEvtLumi) + 2))\n', (3173, 3243), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3246, 3258), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (3255, 3258), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3346, 3384), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + 2 * (nEvt / nEvtRun))'], {}), '(nEvt + 2 * (nEvt / nEvtRun))\n', (3355, 3384), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3397, 3415), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (3406, 3415), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3505, 3544), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + 2 * (nEvt / nEvtLumi))'], {}), '(nEvt + 2 * (nEvt / nEvtLumi))\n', (3514, 3544), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3557, 3576), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (3566, 3576), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3674, 3746), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (nEvt / nEvtRun + 1) + 2 * (nEvt / nEvtRun))'], {}), '(nEvt + nStreams * (nEvt / nEvtRun + 1) + 2 * (nEvt / nEvtRun))\n', (3683, 3746), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3753, 3771), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtRun'], {}), '(nEvtRun)\n', (3762, 3771), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3871, 3945), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt + nStreams * (nEvt / nEvtLumi + 1) + 2 * (nEvt / nEvtLumi))'], {}), '(nEvt + nStreams * (nEvt / nEvtLumi + 1) + 2 * (nEvt / nEvtLumi))\n', (3880, 3945), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3952, 3971), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['nEvtLumi'], {}), '(nEvtLumi)\n', (3961, 3971), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4071, 4096), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtRun)'], {}), '(nEvt / nEvtRun)\n', (4080, 4096), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4192, 4217), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtRun)'], {}), '(nEvt / nEvtRun)\n', (4201, 4217), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4329, 4355), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtLumi)'], {}), '(nEvt / nEvtLumi)\n', (4338, 4355), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4463, 4489), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(nEvt / nEvtLumi)'], {}), '(nEvt / nEvtLumi)\n', (4472, 4489), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4589, 4604), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(512)'], {}), '(512)\n', (4599, 4604), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4704, 4718), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(35)'], {}), '(35)\n', (4714, 4718), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4798, 4820), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(5)'], {}), '(5)\n', (4817, 4820), True, 'import FWCore.ParameterSet.Config as cms\n'), ((4834, 4859), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (4852, 4859), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import io
import logging
import subprocess
import sys
import tarfile
import tempfile
from pathlib import Path
from threading import Lock
from typing import Iterable, List
import numpy as np
from fasteners import InterProcessLock
from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset
from compiler_gym.datasets.benchmark import BenchmarkInitError, BenchmarkWithSource
from compiler_gym.datasets.dataset import DatasetInitError
from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation
from compiler_gym.util.decorators import memoized_property
from compiler_gym.util.download import download
from compiler_gym.util.runfiles_path import transient_cache_path
# The maximum value for the --seed argument to csmith.
UINT_MAX = (2 ** 32) - 1
# A lock for exclusive access to the Csmith build logic.
_CSMITH_BUILD_LOCK = Lock()
class CsmithBenchmark(BenchmarkWithSource):
"""A CSmith benchmark."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._src = None
@classmethod
def create(cls, uri: str, bitcode: bytes, src: bytes) -> Benchmark:
"""Create a benchmark from paths."""
benchmark = cls.from_file_contents(uri, bitcode)
benchmark._src = src # pylint: disable=protected-access
return benchmark
@memoized_property
def sources(self) -> Iterable[BenchmarkSource]:
return [
BenchmarkSource(filename="source.c", contents=self._src),
]
@property
def source(self) -> str:
"""Return the single source file contents as a string."""
return self._src.decode("utf-8")
class CsmithBuildError(DatasetInitError):
"""Error raised if :meth:`CsmithDataset.install()
<compiler_gym.datasets.CsmithDataset.install>` fails."""
def __init__(self, failing_stage: str, stdout: str, stderr: str):
install_instructions = {
"linux": "sudo apt install g++ m4",
"darwin": "brew install m4",
}[sys.platform]
super().__init__(
"\n".join(
[
f"Failed to build Csmith from source, `{failing_stage}` failed.",
"You may be missing installation dependencies. Install them using:",
f" {install_instructions}",
"See https://github.com/csmith-project/csmith#install-csmith for more details",
f"--- Start `{failing_stage}` logs: ---\n",
stdout,
stderr,
]
)
)
class CsmithDataset(Dataset):
"""A dataset which uses Csmith to generate programs.
Csmith is a tool that can generate random conformant C99 programs. It is
described in the publication:
<NAME>, <NAME>, <NAME>, and <NAME>. "Finding and
understanding bugs in C compilers." In Proceedings of the 32nd ACM
SIGPLAN conference on Programming Language Design and Implementation
(PLDI), pp. 283-294. 2011.
For up-to-date information about Csmith, see:
https://embed.cs.utah.edu/csmith/
Note that Csmith is a tool that is used to find errors in compilers. As
such, there is a higher likelihood that the benchmark cannot be used for an
environment and that :meth:`env.reset()
<compiler_gym.envs.CompilerEnv.reset>` will raise :class:`BenchmarkInitError
<compiler_gym.datasets.BenchmarkInitError>`.
Installation
------------
Using the CsmithDataset requires building the Csmith binary from source.
This is done automatically on the first call to :code:`install()`. Building
Csmith requires a working C++ toolchain. Install the required dependencies
using: :code:`sudo apt install -y g++ m4` on Linux, or :code:`brew install
m4` on macOS. :class:`DatasetInitError
<compiler_gym.datasets.DatasetInitError>` is raised if compilation fails.
See the `Csmith repo
<https://github.com/csmith-project/csmith#install-csmith>`_ for further
details.
"""
def __init__(self, site_data_base: Path, sort_order: int = 0):
super().__init__(
name="generator://csmith-v0",
description="Random conformant C99 programs",
references={
"Paper": "http://web.cse.ohio-state.edu/~rountev.1/5343/pdf/pldi11.pdf",
"Homepage": "https://embed.cs.utah.edu/csmith/",
},
license="BSD",
site_data_base=site_data_base,
sort_order=sort_order,
benchmark_class=CsmithBenchmark,
)
self.csmith_path = self.site_data_path / "bin" / "csmith"
csmith_include_dir = self.site_data_path / "include" / "csmith-2.3.0"
self._installed = False
self._build_lockfile = self.site_data_path / ".build.LOCK"
self._build_markerfile = self.site_data_path / ".built"
# The command that is used to compile an LLVM-IR bitcode file from a
# Csmith input. Reads from stdin, writes to stdout.
self.clang_compile_command: List[str] = ClangInvocation.from_c_file(
"-", # Read from stdin.
copt=[
"-xc",
"-ferror-limit=1", # Stop on first error.
"-w", # No warnings.
f"-I{csmith_include_dir}", # Include the Csmith headers.
],
).command(
outpath="-"
) # Write to stdout.
@property
def installed(self) -> bool:
# Fast path for repeated checks to 'installed' without a disk op.
if not self._installed:
self._installed = self._build_markerfile.is_file()
return self._installed
def install(self) -> None:
"""Download and build the Csmith binary."""
super().install()
if self.installed:
return
with _CSMITH_BUILD_LOCK, InterProcessLock(self._build_lockfile):
# Repeat the check to see if we have already installed the dataset
# now that we have acquired the lock.
if not self.installed:
self.logger.info("Downloading and building Csmith")
self._build_csmith(self.site_data_path, self.logger)
self._build_markerfile.touch()
@staticmethod
def _build_csmith(install_root: Path, logger: logging.Logger):
"""Download, build, and install Csmith to the given directory."""
tar_data = io.BytesIO(
download(
urls=[
"https://github.com/csmith-project/csmith/archive/refs/tags/csmith-2.3.0.tar.gz",
],
sha256="ba871c1e5a05a71ecd1af514fedba30561b16ee80b8dd5ba8f884eaded47009f",
)
)
# Csmith uses a standard `configure` + `make install` build process.
with tempfile.TemporaryDirectory(
dir=transient_cache_path("."), prefix="csmith-"
) as d:
with tarfile.open(fileobj=tar_data, mode="r:gz") as arc:
arc.extractall(d)
# The path of the extracted sources.
src_dir = Path(d) / "csmith-csmith-2.3.0"
logger.debug("Configuring Csmith at %s", d)
configure = subprocess.Popen(
["./configure", f"--prefix={install_root}"],
cwd=src_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
)
stdout, stderr = configure.communicate(timeout=600)
if configure.returncode:
raise CsmithBuildError("./configure", stdout, stderr)
logger.debug("Installing Csmith to %s", install_root)
make = subprocess.Popen(
["make", "-j", "install"],
cwd=src_dir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
)
stdout, stderr = make.communicate(timeout=600)
if make.returncode:
raise CsmithBuildError("make install", stdout, stderr)
@property
def size(self) -> float:
# Actually 2^32 - 1, but practically infinite for all intents and
# purposes.
return float("inf")
def benchmark_uris(self) -> Iterable[str]:
return (f"{self.name}/{i}" for i in range(UINT_MAX))
def benchmark(self, uri: str) -> CsmithBenchmark:
return self.benchmark_from_seed(int(uri.split("/")[-1]))
def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark:
seed = random_state.integers(UINT_MAX)
return self.benchmark_from_seed(seed)
def benchmark_from_seed(self, seed: int) -> CsmithBenchmark:
"""Get a benchmark from a uint32 seed.
:param seed: A number in the range 0 <= n < 2^32.
:return: A benchmark instance.
"""
self.install()
# Run csmith with the given seed and pipe the output to clang to
# assemble a bitcode.
self.logger.debug("Exec csmith --seed %d", seed)
csmith = subprocess.Popen(
[str(self.csmith_path), "--seed", str(seed)],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
# Generate the C source.
src, _ = csmith.communicate(timeout=300)
if csmith.returncode:
raise OSError(f"Csmith failed with seed {seed}")
# Compile to IR.
clang = subprocess.Popen(
self.clang_compile_command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
stdout, _ = clang.communicate(src, timeout=300)
if clang.returncode:
compile_cmd = " ".join(self.clang_compile_command)
raise BenchmarkInitError(
f"Compilation job failed!\n"
f"Csmith seed: {seed}\n"
f"Command: {compile_cmd}\n"
)
return self.benchmark_class.create(f"{self.name}/{seed}", stdout, src)
|
[
"subprocess.Popen",
"compiler_gym.datasets.BenchmarkSource",
"compiler_gym.envs.llvm.llvm_benchmark.ClangInvocation.from_c_file",
"compiler_gym.util.download.download",
"fasteners.InterProcessLock",
"threading.Lock",
"pathlib.Path",
"compiler_gym.util.runfiles_path.transient_cache_path",
"compiler_gym.datasets.benchmark.BenchmarkInitError",
"tarfile.open"
] |
[((1019, 1025), 'threading.Lock', 'Lock', ([], {}), '()\n', (1023, 1025), False, 'from threading import Lock\n'), ((9664, 9787), 'subprocess.Popen', 'subprocess.Popen', (['self.clang_compile_command'], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.DEVNULL'}), '(self.clang_compile_command, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE, stderr=subprocess.DEVNULL)\n', (9680, 9787), False, 'import subprocess\n'), ((1598, 1654), 'compiler_gym.datasets.BenchmarkSource', 'BenchmarkSource', ([], {'filename': '"""source.c"""', 'contents': 'self._src'}), "(filename='source.c', contents=self._src)\n", (1613, 1654), False, 'from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset\n'), ((6062, 6100), 'fasteners.InterProcessLock', 'InterProcessLock', (['self._build_lockfile'], {}), '(self._build_lockfile)\n', (6078, 6100), False, 'from fasteners import InterProcessLock\n'), ((6653, 6840), 'compiler_gym.util.download.download', 'download', ([], {'urls': "['https://github.com/csmith-project/csmith/archive/refs/tags/csmith-2.3.0.tar.gz'\n ]", 'sha256': '"""ba871c1e5a05a71ecd1af514fedba30561b16ee80b8dd5ba8f884eaded47009f"""'}), "(urls=[\n 'https://github.com/csmith-project/csmith/archive/refs/tags/csmith-2.3.0.tar.gz'\n ], sha256=\n 'ba871c1e5a05a71ecd1af514fedba30561b16ee80b8dd5ba8f884eaded47009f')\n", (6661, 6840), False, 'from compiler_gym.util.download import download\n'), ((7405, 7556), 'subprocess.Popen', 'subprocess.Popen', (["['./configure', f'--prefix={install_root}']"], {'cwd': 'src_dir', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "(['./configure', f'--prefix={install_root}'], cwd=src_dir,\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)\n", (7421, 7556), False, 'import subprocess\n'), ((7905, 8039), 'subprocess.Popen', 'subprocess.Popen', (["['make', '-j', 'install']"], {'cwd': 'src_dir', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "(['make', '-j', 'install'], cwd=src_dir, stdout=subprocess.\n PIPE, stderr=subprocess.PIPE, universal_newlines=True)\n", (7921, 8039), False, 'import subprocess\n'), ((10009, 10108), 'compiler_gym.datasets.benchmark.BenchmarkInitError', 'BenchmarkInitError', (['f"""Compilation job failed!\nCsmith seed: {seed}\nCommand: {compile_cmd}\n"""'], {}), '(\n f"""Compilation job failed!\nCsmith seed: {seed}\nCommand: {compile_cmd}\n""")\n', (10027, 10108), False, 'from compiler_gym.datasets.benchmark import BenchmarkInitError, BenchmarkWithSource\n'), ((5256, 5358), 'compiler_gym.envs.llvm.llvm_benchmark.ClangInvocation.from_c_file', 'ClangInvocation.from_c_file', (['"""-"""'], {'copt': "['-xc', '-ferror-limit=1', '-w', f'-I{csmith_include_dir}']"}), "('-', copt=['-xc', '-ferror-limit=1', '-w',\n f'-I{csmith_include_dir}'])\n", (5283, 5358), False, 'from compiler_gym.envs.llvm.llvm_benchmark import ClangInvocation\n'), ((7134, 7177), 'tarfile.open', 'tarfile.open', ([], {'fileobj': 'tar_data', 'mode': '"""r:gz"""'}), "(fileobj=tar_data, mode='r:gz')\n", (7146, 7177), False, 'import tarfile\n'), ((7292, 7299), 'pathlib.Path', 'Path', (['d'], {}), '(d)\n', (7296, 7299), False, 'from pathlib import Path\n'), ((7057, 7082), 'compiler_gym.util.runfiles_path.transient_cache_path', 'transient_cache_path', (['"""."""'], {}), "('.')\n", (7077, 7082), False, 'from compiler_gym.util.runfiles_path import transient_cache_path\n')]
|
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
from __future__ import absolute_import
import numpy
import pytest
import uproot
@pytest.mark.network
def test():
with uproot.open(
"https://starterkit.web.cern.ch/starterkit/data/advanced-python-2019/RD_distribution.root:tree"
) as f:
whole_branch = f["vchi2_b"].array(library="np")
assert whole_branch[0] == 5.234916687011719
assert whole_branch[-1] == 12.466843605041504
whole_branch = f["mu_pt_sum"].array(library="np")
assert whole_branch[0] == 26.4675350189209
assert whole_branch[-1] == 39.84319305419922
|
[
"uproot.open"
] |
[((211, 329), 'uproot.open', 'uproot.open', (['"""https://starterkit.web.cern.ch/starterkit/data/advanced-python-2019/RD_distribution.root:tree"""'], {}), "(\n 'https://starterkit.web.cern.ch/starterkit/data/advanced-python-2019/RD_distribution.root:tree'\n )\n", (222, 329), False, 'import uproot\n')]
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import logging
import re
import sys
import threading
from queue import Queue
from typing import Optional
from urllib.parse import urlparse
from pyre_extensions import none_throws
from torchx import specs
from torchx.cli.cmd_base import SubCommand
from torchx.runner import Runner, get_runner
from torchx.specs.api import make_app_handle
GREEN = "\033[32m"
ENDC = "\033[0m"
logger: logging.Logger = logging.getLogger(__name__)
def validate(job_identifier: str) -> None:
if not re.match(r"^\w+://[^/.]*/[^/.]+/[^/.]+(/(\d+,?)+)?$", job_identifier):
print(
f"{job_identifier} is not of the form SCHEDULER://[SESSION_NAME]/APP_ID/ROLE_NAME/[REPLICA_IDS,...]",
file=sys.stderr,
)
sys.exit(1)
def print_log_lines(
runner: Runner,
app_handle: str,
role_name: str,
replica_id: int,
regex: str,
should_tail: bool,
exceptions: "Queue[Exception]",
) -> None:
try:
for line in runner.log_lines(
app_handle, role_name, replica_id, regex, should_tail=should_tail
):
print(f"{GREEN}{role_name}/{replica_id}{ENDC} {line}")
except Exception as e:
exceptions.put(e)
raise
def get_logs(identifier: str, regex: Optional[str], should_tail: bool = False) -> None:
validate(identifier)
url = urlparse(identifier)
scheduler_backend = url.scheme
session_name = url.netloc or "default"
# path is of the form ["", "app_id", "master", "0"]
path = url.path.split("/")
app_id = path[1]
role_name = path[2]
runner = get_runner(name=session_name)
app_handle = make_app_handle(scheduler_backend, session_name, app_id)
app = none_throws(runner.describe(app_handle))
if len(path) == 4:
replica_ids = [int(id) for id in path[3].split(",") if id]
else:
# print all replicas for the role
num_replicas = find_role_replicas(app, role_name)
if num_replicas is None:
valid_ids = "\n".join(
[
f" {idx}: {scheduler_backend}://{app_id}/{role.name}"
for idx, role in enumerate(app.roles)
]
)
print(
f"No role [{role_name}] found for app: {app.name}."
f" Did you mean one of the following:\n{valid_ids}",
file=sys.stderr,
)
sys.exit(1)
replica_ids = list(range(0, num_replicas))
threads = []
exceptions = Queue()
for replica_id in replica_ids:
thread = threading.Thread(
target=print_log_lines,
args=(
runner,
app_handle,
role_name,
replica_id,
regex,
should_tail,
exceptions,
),
)
thread.daemon = True
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
# Retrieve all exceptions, print all except one and raise the first recorded exception
threads_exceptions = []
while not exceptions.empty():
threads_exceptions.append(exceptions.get())
if len(threads_exceptions) > 0:
for i in range(1, len(threads_exceptions)):
logger.error(threads_exceptions[i])
raise threads_exceptions[0]
def find_role_replicas(app: specs.AppDef, role_name: str) -> Optional[int]:
for role in app.roles:
if role_name == role.name:
return role.num_replicas
return None
class CmdLog(SubCommand):
def add_arguments(self, subparser: argparse.ArgumentParser) -> None:
subparser.add_argument(
"--regex",
type=str,
help="regex filter",
)
subparser.add_argument(
"-t",
"--tail",
action="store_true",
help="Tail logs",
)
subparser.add_argument(
"identifier",
type=str,
help="host identifier (scheduler_backend://[session_name]/app_id/role_name/replica_id)",
)
def run(self, args: argparse.Namespace) -> None:
get_logs(args.identifier, args.regex, args.tail)
|
[
"torchx.runner.get_runner",
"threading.Thread",
"re.match",
"logging.getLogger",
"torchx.specs.api.make_app_handle",
"sys.exit",
"queue.Queue",
"urllib.parse.urlparse"
] |
[((648, 675), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (665, 675), False, 'import logging\n'), ((1577, 1597), 'urllib.parse.urlparse', 'urlparse', (['identifier'], {}), '(identifier)\n', (1585, 1597), False, 'from urllib.parse import urlparse\n'), ((1823, 1852), 'torchx.runner.get_runner', 'get_runner', ([], {'name': 'session_name'}), '(name=session_name)\n', (1833, 1852), False, 'from torchx.runner import Runner, get_runner\n'), ((1870, 1926), 'torchx.specs.api.make_app_handle', 'make_app_handle', (['scheduler_backend', 'session_name', 'app_id'], {}), '(scheduler_backend, session_name, app_id)\n', (1885, 1926), False, 'from torchx.specs.api import make_app_handle\n'), ((2747, 2754), 'queue.Queue', 'Queue', ([], {}), '()\n', (2752, 2754), False, 'from queue import Queue\n'), ((732, 802), 're.match', 're.match', (['"""^\\\\w+://[^/.]*/[^/.]+/[^/.]+(/(\\\\d+,?)+)?$"""', 'job_identifier'], {}), "('^\\\\w+://[^/.]*/[^/.]+/[^/.]+(/(\\\\d+,?)+)?$', job_identifier)\n", (740, 802), False, 'import re\n'), ((979, 990), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (987, 990), False, 'import sys\n'), ((2807, 2933), 'threading.Thread', 'threading.Thread', ([], {'target': 'print_log_lines', 'args': '(runner, app_handle, role_name, replica_id, regex, should_tail, exceptions)'}), '(target=print_log_lines, args=(runner, app_handle,\n role_name, replica_id, regex, should_tail, exceptions))\n', (2823, 2933), False, 'import threading\n'), ((2648, 2659), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2656, 2659), False, 'import sys\n')]
|
from __future__ import print_function
from functools import wraps, partial
from strict_functions import overload, attempt
from inspect import getsource
import itertools
from itertools import permutations, islice
import sys
import os
import operator
import generators
from generators import iterable, consume, itemgetter, rps
class OrderError(Exception):
pass
class Generator:
def __init__(self, input_iterable):
#print('__init__ -', locals())
self._iterable = iter(input_iterable)
def __iter__(self):
return self._iterable
def __repr__(self):
return '<Generator with frickin laser beams at {}>'.format(hex(id(self)))
__str__ = __repr__
def to(self, you_want_me_to_wear_what):
''' use this function to convert the generator into another type '''
assert callable(you_want_me_to_wear_what), 'Generator.to needs a callable argument'
return you_want_me_to_wear_what(self)
@staticmethod
def __chainable_method__(fn):
@wraps(fn)
def wrapper(*a, **k):
out = fn(*a, **k)
if iterable(out):
return Generator(out)
else:
return out
return wrapper
@staticmethod
def __grab_first__(t, l):
if type(t)==type:
check = lambda i, t=t:isinstance(i, t)
elif callable(t):
check = t
else:
raise ValueError('t({}) is not a function or type'.format(t))
for i,v in enumerate(list(l)):
if check(v):
return l.pop(i)
raise OrderError()
@staticmethod
def __require_args__(count, args):
if count!=len(args):
raise ValueError(
'wrong arg count\nneeded\n {} args\nreceived\n {} args\nraw\n {}'.format(
count,
len(args),
repr(tuple(args))
)
)
@staticmethod
def __printable_fn__(fn):
fn_repr = repr(fn)
return attempt(
partial(getsource, fn),
fn_repr
) if '<lambda' in fn_repr else (
getattr(fn, '__name__', fn_repr)
)
@staticmethod
def __organize_args__(arg_pattern, args, fn=None, name=None):
_args = args[:]
Generator.__require_args__(len(arg_pattern), args)
try:
return list(map(
partial(
Generator.__grab_first__,
l=args[:]
),
arg_pattern
))
except OrderError:
if fn is not None:
# this is for clean debugging
_og_args = repr(tuple(args[:]))
# try other combinations
for i in permutations(args):
try:
return Generator.__organize_args__(
arg_pattern,
list(i),
)
except OrderError:
pass
raise ValueError(
'-\ncouldnt find a valid ordering for:\n Generator.{}\nrequired:\n {}\nrecieved:\n {}'.format(
name,
'\n '.join(
map(
Generator.__printable_fn__, arg_pattern
)
),
_og_args
)
)
else:
raise OrderError()
@staticmethod
def add_method(fn, arg_pattern, name=None, chainable=True):
if name is None:
name = fn.__name__
@wraps(fn)
def method(*args) -> Generator:
return fn(*Generator.__organize_args__(
arg_pattern,
list(args),
fn,
name=name
))
if chainable:
method = wraps(fn)(Generator.__chainable_method__(method))
if hasattr(Generator, name):
method = overload(
method,
getattr(Generator, name)
)
setattr(
Generator,
name,
method
)
@staticmethod
def add_methods(methods_to_add):
''' use this to bulk add new methods to Generator '''
for i in methods_to_add:
try:
Generator.add_method(*i)
except Exception as ex:
raise Exception('issue adding {} - {}'.format(repr(i), ex))
def __next__(self):
return next(self._iterable)
def next(self):
return next(self._iterable)
def print(self, before='', use_repr=False, **print_options):
return Generator(self.side_task((
lambda i:print('{}{}'.format(before, repr(i)), **print_options)
) if use_repr else (
lambda i:print('{}{}'.format(before, i), **print_options)
)))
def skip_errors(self, *, log=True, logger=None, ex_type=Exception):
return Generator(
generators.skip_errors(
self,
log=log,
logger=logger,
ex_type=ex_type
)
)
benchmark = rps
#def __slice__(self, s):
# raise NotImplementedError()
def __negative_slice__(self, s):
if s.step is None:
return {#start,stop
(None, False):lambda:(self.last(abs(s.stop))), # [:-1]
(False, None):lambda:(self.skip_last(abs(s.start))), # [-1:]
(True, False):lambda:(self.skip(s.start).skip_last(abs(s.stop))), # [5:-1]
(False, True):lambda:(self.last(abs(s.start)).first(s.stop)), # [-10:6]
(False,False):lambda:(self.last(abs(s.start)).skip_last(abs(s.stop))) # [-5:-1]
}[tuple(map((lambda i:None if i is None else i>0), [s.start, s.stop]))]()
else:
raise NotImplemented("This will be possible once I can map out all the possibilities")
def __getitem__(self, a):
if isinstance(a, slice):
return Generator((
islice(self, a.start, a.stop, a.step)
) if all(i is None or 0<i for i in (a.start, a.stop, a.step)) else (
self.__negative_slice__(a)
))
elif isinstance(a, int):
# get single item
if a == 0:
return next(self)
elif a > 0:
return next(self.skip(a-1))
elif a < 0:
return next(self.last(abs(a)))
elif isinstance(a, tuple): # multi-item slice
return Generator(generators.itemgetter(self, a))
else:
raise ValueError('invalid slice argument - {}'.format(repr(a)))
# add the stuff from generators
Generator.add_methods([
[generators.map, [Generator, callable]],
[generators.map, [Generator, callable, callable]],
[generators.map, [Generator, callable, callable, callable]],
[generators.map, [Generator, callable, callable, callable, callable]],
[generators.map_parallel, [Generator, callable, int]],
[generators.map_multithread, [Generator, callable, int]],
[generators.map_multicore, [Generator, callable, int]],
[generators.all_subslices, [Generator]],
[generators.all_substrings, [Generator]],
[generators.alternator, [Generator, iterable], 'alternate'],
[generators.alternator, [Generator, iterable, iterable], 'alternate'],
[generators.alternator, [Generator, iterable, iterable, iterable], 'alternate'],
[generators.alternator, [Generator, iterable, iterable, iterable, iterable], 'alternate'],
[generators.apply_to_last, [Generator, callable]],
[generators.chain, [Generator]],
[generators.chain, [Generator, iterable]],
[generators.chain, [Generator, iterable, iterable]],
[generators.chain, [Generator, iterable, iterable, iterable]],
[generators.chain, [Generator, iterable, iterable, iterable, iterable]],
[generators.chunks, [Generator, int], 'chunk'],
[generators.chunks, [Generator, int, int], 'chunk'],
[generators.chunks, [Generator, int, int, int], 'chunk'],
[generators.chunks, [Generator, int, int, int, int], 'chunk'],
[generators.chunk_on, [Generator, callable]],
[generators.consume, [Generator]], # leave this one here for backwards compatability
[generators.consume, [Generator], 'run'],
[generators.every_other, [Generator, int]],
[generators.first, [Generator]],
[generators.first, [Generator, int]],
[generators.fork, [Generator, int]],
[generators.iterable, [object]],
[generators.ifmap, [Generator, callable, callable]],
[generators.last, [Generator]],
[generators.last, [Generator, int]],
[generators.multi_ops, [Generator, callable]],
[generators.multi_ops, [Generator, callable, callable]],
[generators.multi_ops, [Generator, callable, callable, callable]],
[generators.multi_ops, [Generator, callable, callable, callable, callable]],
[generators.repeater, [Generator], 'repeat'],
[generators.repeater, [Generator, int], 'repeat'],
[generators.reverse, [Generator]],
[generators.side_task, [Generator, callable]],
[generators.side_task, [Generator, callable, callable]],
[generators.side_task, [Generator, callable, callable, callable]],
[generators.side_task, [Generator, callable, callable, callable, callable]],
[generators.skip, [Generator]],
[generators.skip, [Generator, int]],
[generators.skip_first, [Generator]],
[generators.skip_first, [Generator, int]],
[generators.skip_last, [Generator]],
[generators.skip_last, [Generator, int]],
[generators.split, [Generator, iterable]],
[generators.split, [Generator, iterable, bool]],
[generators.switch, [Generator, callable, dict]],
[generators.switch, [Generator, callable, dict, callable]],
[generators.tee, [Generator, str]],
[generators.tee, [Generator, str, callable]],
[generators.timed_pipe, [Generator], 'timed'],
[generators.timed_pipe, [Generator, int], 'timed'],
[generators.timed_pipe, [Generator], 'time_limit'],
[generators.timed_pipe, [Generator, int], 'time_limit'],
[generators.unfork, [Generator]],
[generators.uniq, [Generator]],
[generators.window, [Generator]],
[generators.window, [Generator, int]]
])
# add the stuff from builtins
Generator.add_methods([
[lambda g,k=None:(i for i in sorted(g, key=k)), [Generator], 'sort'],
[lambda g,k=None:(i for i in sorted(g, key=k)), [Generator, callable], 'sort'],
[max, [Generator], None, False],
[min, [Generator], None, False],
[sum, [Generator], None, False],
[enumerate, [Generator]]
])
def _accumulate(iterable, func=(lambda a,b:a+b)): # this was from the itertools documentation
'Return running totals'
# accumulate([1,2,3,4,5]) --> 1 3 6 10 15
# accumulate([1,2,3,4,5], operator.mul) --> 1 2 6 24 120
it = iter(iterable)
try:
total = next(it)
except StopIteration:
return
yield total
for element in it:
total = func(total, element)
yield total
# add stuff from itertools
Generator.add_methods([
[itertools.compress, [Generator, iterable]],
[itertools.dropwhile, [callable, Generator]],
[itertools.groupby, [Generator, callable]],
[itertools.takewhile, [callable, Generator]],
[itertools.permutations, [Generator]],
[itertools.combinations, [Generator]],
[itertools.permutations, [Generator, int]],
[itertools.combinations, [Generator, int]],
[itertools.combinations_with_replacement, [Generator, int]],
[(lambda g,r:itertools.product(g, repeat=r)), [Generator, int], 'product'],
[(lambda g,r:itertools.product(g, repeat=r)), [Generator], 'product'],
[filter, [callable, Generator]],
[getattr(itertools, 'accumulate', _accumulate), [Generator, callable], 'accumulate'],
[getattr(itertools, 'accumulate', _accumulate), [Generator], 'accumulate'],
[getattr(itertools, 'ifilter', filter), [callable, Generator], 'filter'],
[getattr(itertools, 'filterfalse', getattr(itertools, 'ifilterfalse', (lambda f,g:(i for i in g if f(i))))), [Generator, callable], 'filterfalse'],
[getattr(itertools, 'izip', zip), [Generator, int], 'zip']
])
# aliases for better tab completion discovery
Generator.parallel_map = Generator.map_parallel
Generator.multicore_map = Generator.map_multicore
Generator.multithread_map = Generator.map_multithread
if __name__ == '__main__':
g = Generator('hello')
print(g)
print(g.all_subslices().map(print, print).alternate('wwww', 'tttt').to(list))
print(Generator(range(10)).map(print).to(list))
print(Generator(range(10))[8])
print(Generator(range(10))[-8])
print(Generator(range(10))[1:5].to(list))
print(Generator(range(10))[4:].to(list))
print(Generator(range(10))[:4].to(list))
print(Generator(range(10))[-5:-1].to(list))
print(Generator(range(10))[-4:].to(list))
print(Generator(range(10))[:-4].to(list))
print(Generator(range(10))[1:5:2].to(list))
print(Generator(range(10))[4::2].to(list))
print(Generator(range(10))[:4:2].to(list))
print(Generator(range(10))[1,5,2].to(list))
print(Generator(range(10))[4,2].to(list))
print(Generator(range(10))[2,4].to(list))
print(Generator(range(10))[-1,-5,-2].to(list))
print(Generator(range(10))[-4,-2].to(list))
print(Generator(range(10))[-2,-4].to(list))
print(Generator(range(10))[-2,-4].max())
print(Generator(range(10))[-2,-4].min())
print(Generator(range(10)).print()[-2,-4].print().sum())
print(Generator(range(10)).print(end='\n-\n').consume())
print(Generator(range(10)).print(end='\n--\n').run())
|
[
"generators.iterable",
"generators.skip_errors",
"functools.partial",
"itertools.permutations",
"generators.itemgetter",
"functools.wraps",
"itertools.product",
"itertools.islice"
] |
[((1016, 1025), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (1021, 1025), False, 'from functools import wraps, partial\n'), ((3754, 3763), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (3759, 3763), False, 'from functools import wraps, partial\n'), ((1101, 1114), 'generators.iterable', 'iterable', (['out'], {}), '(out)\n', (1109, 1114), False, 'from generators import iterable, consume, itemgetter, rps\n'), ((5146, 5215), 'generators.skip_errors', 'generators.skip_errors', (['self'], {'log': 'log', 'logger': 'logger', 'ex_type': 'ex_type'}), '(self, log=log, logger=logger, ex_type=ex_type)\n', (5168, 5215), False, 'import generators\n'), ((2067, 2089), 'functools.partial', 'partial', (['getsource', 'fn'], {}), '(getsource, fn)\n', (2074, 2089), False, 'from functools import wraps, partial\n'), ((4017, 4026), 'functools.wraps', 'wraps', (['fn'], {}), '(fn)\n', (4022, 4026), False, 'from functools import wraps, partial\n'), ((11784, 11814), 'itertools.product', 'itertools.product', (['g'], {'repeat': 'r'}), '(g, repeat=r)\n', (11801, 11814), False, 'import itertools\n'), ((11864, 11894), 'itertools.product', 'itertools.product', (['g'], {'repeat': 'r'}), '(g, repeat=r)\n', (11881, 11894), False, 'import itertools\n'), ((2449, 2493), 'functools.partial', 'partial', (['Generator.__grab_first__'], {'l': 'args[:]'}), '(Generator.__grab_first__, l=args[:])\n', (2456, 2493), False, 'from functools import wraps, partial\n'), ((2814, 2832), 'itertools.permutations', 'permutations', (['args'], {}), '(args)\n', (2826, 2832), False, 'from itertools import permutations, islice\n'), ((6243, 6280), 'itertools.islice', 'islice', (['self', 'a.start', 'a.stop', 'a.step'], {}), '(self, a.start, a.stop, a.step)\n', (6249, 6280), False, 'from itertools import permutations, islice\n'), ((6762, 6792), 'generators.itemgetter', 'generators.itemgetter', (['self', 'a'], {}), '(self, a)\n', (6783, 6792), False, 'import generators\n')]
|
from utils import log
from utils import correlate
import random
class Predictor:
def __init__(self, history_db, orders_db):
self.history_db = history_db
self.orders_db = orders_db
def predict_simple(self, history_points, statistics):
correlations = [[], [], [], [], []]
guessedmain = []
guessedeuro = []
means, mins, maxs, distance_means, devianceplus, devianceminus, directions, distances, most_frequent = statistics
# highest correlation is always to the next one
for x in range(0, 5):
y = x + 1
while y<5:
corr = correlate(directions[x], directions[y])
correlations[x].append(corr)
log("Correlation: [%d -> %d] %f" % (x + 1, y + 1, corr))
y +=1
# First value
guessdir = 1
if directions[0][-1] >=5:
guessdir = -1
guessedmain.append(int(history_points[0][-1] + means[0]*guessdir) % 50)
# Other vals
for x in range(1, 5):
corr = abs(max(correlations[x-1]))
if corr < 0.5: # correlation higher than 50 % then we use previous direction
guessdir = 1
if directions[x][-1] >=5:
guessdir =-1
#print(guessed)
guessedmain.append(int(history_points[x][-1]+means[x]*guessdir) % 50)
# sanity checks
for x in range(len(guessedmain)):
guessedNumber = guessedmain.pop(x)
if guessedNumber <= 0 or guessedNumber in guessedmain:
value = random.randint(1, 49)
while value in guessedmain:
value = random.randint(1, 49)
guessedmain.append(value)
else:
guessedmain.append(guessedNumber)
# Euro vals
guessdir = 1
if directions[5][-1] >=5:
guessdir = -1
guessedeuro.append(int(history_points[5][-1] + means[5]*guessdir)%10)
guessedeuro.append(int(history_points[6][-1] + means[5]*guessdir)%10)
# sanity checks
for x in range(len(guessedeuro)):
if guessedeuro[x] <= 0:
value = random.randint(1, 10)
while value in guessedeuro:
value = random.randint(1, 10)
guessedeuro[x] = value
guessedmain.sort()
return guessedmain+guessedeuro
|
[
"random.randint",
"utils.log",
"utils.correlate"
] |
[((634, 673), 'utils.correlate', 'correlate', (['directions[x]', 'directions[y]'], {}), '(directions[x], directions[y])\n', (643, 673), False, 'from utils import correlate\n'), ((735, 791), 'utils.log', 'log', (["('Correlation: [%d -> %d] %f' % (x + 1, y + 1, corr))"], {}), "('Correlation: [%d -> %d] %f' % (x + 1, y + 1, corr))\n", (738, 791), False, 'from utils import log\n'), ((1608, 1629), 'random.randint', 'random.randint', (['(1)', '(49)'], {}), '(1, 49)\n', (1622, 1629), False, 'import random\n'), ((2220, 2241), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (2234, 2241), False, 'import random\n'), ((1702, 1723), 'random.randint', 'random.randint', (['(1)', '(49)'], {}), '(1, 49)\n', (1716, 1723), False, 'import random\n'), ((2314, 2335), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (2328, 2335), False, 'import random\n')]
|
from django import template
import random
register = template.Library()
@register.simple_tag
def pet_name():
pet_names = ['happy little shark', 'shimmery merperson', 'delicous ham sandwich',
'button-nosed field mouse', 'shiny little june bug', 'titan, you', 'sweet little croissant',
'lovely little poppy', 'graceful forrest moose', 'champion of the day', 'hero of tomorrow']
pet_name = random.choice(pet_names)
return pet_name
@register.simple_tag
def adjective():
adjectives = ['an unbelievably snazzy', 'a romp-roarin\'', 'a fresh-smelling', 'a fantastic',
'a stupendous', 'a shockingly awesome', 'a super terrific']
adjective = random.choice(adjectives)
return adjective
@register.simple_tag
def compliment():
compliments = ['amazing', 'spectacular', 'dashing, like a superhero', 'svelt',
'lovely', 'terribly attractive', 'so very smart',
'wise, like an owl', 'sparkly' ]
compliment = random.choice(compliments)
return compliment
|
[
"django.template.Library",
"random.choice"
] |
[((54, 72), 'django.template.Library', 'template.Library', ([], {}), '()\n', (70, 72), False, 'from django import template\n'), ((405, 429), 'random.choice', 'random.choice', (['pet_names'], {}), '(pet_names)\n', (418, 429), False, 'import random\n'), ((668, 693), 'random.choice', 'random.choice', (['adjectives'], {}), '(adjectives)\n', (681, 693), False, 'import random\n'), ((947, 973), 'random.choice', 'random.choice', (['compliments'], {}), '(compliments)\n', (960, 973), False, 'import random\n')]
|
import ipaddress
import re
import rdap
from rdap import RdapAsn
from rdap.exceptions import RdapException, RdapHTTPError, RdapNotFoundError
import requests
from django.utils.translation import ugettext_lazy as _
from peeringdb_server import settings
# RFC 5398 documentation asn range
ASN_RFC_5398_16BIT = (64496, 64511)
ASN_RFC_5398_32BIT = (65536, 65551)
# RFC 6996 private asn range
ASN_RFC_6996_16BIT = (64512, 65534)
ASN_RFC_6996_32BIT = (4200000000, 4294967294)
# RFC 7003 last asn
ASN_LAST_16BIT = (65535, 65535)
ASN_LAST_32BIT = (4294967295, 4294967295)
ASN_TRANS = (23456, 23456)
BOGON_ASN_RANGES = [
# RFC 5398 - documentation 16-bit
ASN_RFC_5398_16BIT,
# RFC 5398 - documentation 32-bit
ASN_RFC_5398_32BIT,
# RFC 6996 - private 16-bit
ASN_RFC_6996_16BIT,
# RFC 6996 - private 32-bit
ASN_RFC_6996_32BIT,
# RFC 7003 - last asn 16-bit
ASN_LAST_16BIT,
# RFC 7003 - last asn 32-bit
ASN_LAST_32BIT,
# trans
ASN_TRANS
]
# the following bogon asn ranges are allowed on envionments
# where TUTORIAL_MODE is set to True
TUTORIAL_ASN_RANGES = [
# RFC 5398 - documentation 16-bit
ASN_RFC_5398_16BIT,
# RFC 5398 - documentation 32-bit
ASN_RFC_5398_32BIT,
# RFC 6996 - private 16-bit
ASN_RFC_6996_16BIT,
# RFC 6996 - private 32-bit
ASN_RFC_6996_32BIT,
]
class BogonAsn(rdap.RdapAsn):
"""
On tutorial mode environments we will return an instance
of this to provide an rdapasn result for asns in the
private and documentation ranges
"""
def __init__(self, asn):
name = "AS{}".format(asn)
self._parsed = {
"name":name,
"org_name":name,
"org_address":None,
"emails":[]
}
class RdapLookup(rdap.RdapClient):
"""
Does RDAP lookups against defined URL.
"""
def __init__(self):
# create rdap config
config = dict(
bootstrap_url=settings.RDAP_URL.rstrip('/'),
lacnic_apikey=settings.RDAP_LACNIC_APIKEY,
)
super(RdapLookup, self).__init__(config)
def get_asn(self, asn):
"""
We handle asns that fall into the private/documentation ranges
manually - others are processed normally through rdap lookup
"""
if asn_is_bogon(asn):
if settings.TUTORIAL_MODE and asn_is_in_ranges(asn, TUTORIAL_ASN_RANGES):
return BogonAsn(asn)
else:
raise RdapException(_("ASNs in this range " \
"are not allowed in this environment"))
return super(RdapLookup, self).get_asn(asn)
def asn_is_bogon(asn):
"""
Test if an asn is bogon by being either in the documentation
or private asn ranges
Arguments:
- asn<int>
Return:
- bool: True if in bogon range
"""
return asn_is_in_ranges(asn, BOGON_ASN_RANGES)
def asn_is_in_ranges(asn, ranges):
"""
Test if an asn falls within any of the ranges provided
Arguments:
- asn<int>
- ranges<list[tuple(min,max)]>
Return:
- bool
"""
asn = int(asn)
for as_range in ranges:
if asn >= as_range[0] and asn <= as_range[1]:
return True
return False
def network_is_bogon(network):
"""
Returns if the passed ipaddress network is a bogon
Arguments:
- network <ipaddress.IPv4Network|ipaddress.IPv6Network>
Return:
- bool
"""
return not network.is_global or network.is_reserved
def network_is_pdb_valid(network):
"""
Return if the passed ipaddress network is in pdb valid
address space
Arguments:
- network <ipaddress.IPv4Network|ipaddress.IPv6Network>
Return:
- bool
"""
if network.is_multicast or network_is_bogon(network):
return False
if network.version == 4:
return True
# not allowed v6 blocks
v6_invalid = [
# 2002::/16 - RFC 3068 - 6to4 prefix
0x2002,
# 3ffe::/16 - RFC 5156 - used for the 6bone but was returned
0x3ffe,
# fec0::/10 - RFC 4291 - Reserved by IETF
0xfec0,
# ff00::/8 - RFC 4291 - Multicast
0xff00,
]
if int(network.network_address) >> 112 in v6_invalid:
return False
return True
def get_prefix_protocol(prefix):
"""
Takes a network address space prefix string and returns
a string describing the protocol
Will raise a ValueError if it cannot determine protocol
Returns:
str: IPv4 or IPv6
"""
try:
ipaddress.IPv4Network(prefix)
return "IPv4"
except ipaddress.AddressValueError:
try:
ipaddress.IPv6Network(prefix)
return "IPv6"
except ipaddress.AddressValueError:
raise ValueError("Prefix invalid")
|
[
"ipaddress.IPv6Network",
"django.utils.translation.ugettext_lazy",
"ipaddress.IPv4Network",
"peeringdb_server.settings.RDAP_URL.rstrip"
] |
[((4612, 4641), 'ipaddress.IPv4Network', 'ipaddress.IPv4Network', (['prefix'], {}), '(prefix)\n', (4633, 4641), False, 'import ipaddress\n'), ((1960, 1989), 'peeringdb_server.settings.RDAP_URL.rstrip', 'settings.RDAP_URL.rstrip', (['"""/"""'], {}), "('/')\n", (1984, 1989), False, 'from peeringdb_server import settings\n'), ((4729, 4758), 'ipaddress.IPv6Network', 'ipaddress.IPv6Network', (['prefix'], {}), '(prefix)\n', (4750, 4758), False, 'import ipaddress\n'), ((2507, 2566), 'django.utils.translation.ugettext_lazy', '_', (['"""ASNs in this range are not allowed in this environment"""'], {}), "('ASNs in this range are not allowed in this environment')\n", (2508, 2566), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
import torch
import numpy as np
import pandas as pd
import os
from RBM import RBM
from load_dataset import MNIST
import cv2
from PIL import Image
from matplotlib import pyplot as plt
def image_beautifier(names, final_name):
image_names = sorted(names)
images = [Image.open(x) for x in names]
widths, heights = zip(*(i.size for i in images))
total_width = sum(widths)
max_height = max(heights)
new_im = Image.new('RGB', (total_width, max_height))
x_offset = 0
for im in images:
new_im.paste(im, (x_offset,0))
x_offset += im.size[0]
new_im.save(final_name)
img = cv2.imread(final_name)
img = cv2.resize(img, (img.shape[1]//2, img.shape[0]//2))
cv2.imwrite(final_name, img)
def gen_displayable_images():
suffix = '_image.jpg'
for n in range(10):
prefix = './images_RBM/digitwise/'+str(n)+'_'
names = ['original', 'hidden', 'reconstructed']
names = [prefix+name+suffix for name in names]
image_beautifier(names, './images_RBM/'+str(n)+'.jpg')
if __name__ == '__main__':
mnist = MNIST()
train_x, train_y, test_x, test_y = mnist.load_dataset()
vn = train_x.shape[1]
hn = 2500
rbm = RBM(vn, hn)
rbm.load_rbm('mnist_trained_rbm.pt')
for n in range(10):
x = test_x[np.where(test_y==n)[0][0]]
x = x.unsqueeze(0)
hidden_image = []
gen_image = []
for k in range(rbm.k):
_, hk = rbm.sample_h(x)
_, vk = rbm.sample_v(hk)
gen_image.append(vk.numpy())
hidden_image.append(hk.numpy())
hidden_image = np.array(hidden_image)
hidden_image = np.mean(hidden_image, axis=0)
gen_image = np.array(gen_image)
gen_image = np.mean(gen_image, axis=0)
image = x.numpy()
image = mnist.inv_transform_normalizer(image)[0]
hidden_image = (hidden_image*255)[0]
gen_image = mnist.inv_transform_normalizer(gen_image)[0]
image = np.reshape(image, (28, 28))
hidden_image = np.reshape(hidden_image, (50, 50))
gen_image = np.reshape(gen_image, (28, 28))
image = image.astype(np.int)
hidden_image = hidden_image.astype(np.int)
gen_image = gen_image.astype(np.int)
print(image.shape, hidden_image.shape, gen_image.shape)
prefix = './images_RBM/digitwise/'+str(n)+'_'
suffix = '_image.jpg'
plt.cla()
plt.imshow(image, cmap="gray")
plt.title('original image')
plt.savefig(prefix+'original'+suffix)
plt.cla()
plt.imshow(hidden_image, cmap="gray")
plt.title('hidden image')
plt.savefig(prefix+'hidden'+suffix)
plt.cla()
plt.imshow(gen_image, cmap="gray")
plt.title('reconstructed image')
plt.savefig(prefix+'reconstructed'+suffix)
gen_displayable_images()
|
[
"matplotlib.pyplot.title",
"PIL.Image.new",
"matplotlib.pyplot.savefig",
"load_dataset.MNIST",
"RBM.RBM",
"cv2.imwrite",
"matplotlib.pyplot.imshow",
"PIL.Image.open",
"cv2.imread",
"numpy.mean",
"numpy.array",
"numpy.reshape",
"matplotlib.pyplot.cla",
"numpy.where",
"cv2.resize"
] |
[((410, 453), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(total_width, max_height)'], {}), "('RGB', (total_width, max_height))\n", (419, 453), False, 'from PIL import Image\n'), ((579, 601), 'cv2.imread', 'cv2.imread', (['final_name'], {}), '(final_name)\n', (589, 601), False, 'import cv2\n'), ((609, 664), 'cv2.resize', 'cv2.resize', (['img', '(img.shape[1] // 2, img.shape[0] // 2)'], {}), '(img, (img.shape[1] // 2, img.shape[0] // 2))\n', (619, 664), False, 'import cv2\n'), ((662, 690), 'cv2.imwrite', 'cv2.imwrite', (['final_name', 'img'], {}), '(final_name, img)\n', (673, 690), False, 'import cv2\n'), ((1007, 1014), 'load_dataset.MNIST', 'MNIST', ([], {}), '()\n', (1012, 1014), False, 'from load_dataset import MNIST\n'), ((1114, 1125), 'RBM.RBM', 'RBM', (['vn', 'hn'], {}), '(vn, hn)\n', (1117, 1125), False, 'from RBM import RBM\n'), ((266, 279), 'PIL.Image.open', 'Image.open', (['x'], {}), '(x)\n', (276, 279), False, 'from PIL import Image\n'), ((1449, 1471), 'numpy.array', 'np.array', (['hidden_image'], {}), '(hidden_image)\n', (1457, 1471), True, 'import numpy as np\n'), ((1489, 1518), 'numpy.mean', 'np.mean', (['hidden_image'], {'axis': '(0)'}), '(hidden_image, axis=0)\n', (1496, 1518), True, 'import numpy as np\n'), ((1533, 1552), 'numpy.array', 'np.array', (['gen_image'], {}), '(gen_image)\n', (1541, 1552), True, 'import numpy as np\n'), ((1567, 1593), 'numpy.mean', 'np.mean', (['gen_image'], {'axis': '(0)'}), '(gen_image, axis=0)\n', (1574, 1593), True, 'import numpy as np\n'), ((1775, 1802), 'numpy.reshape', 'np.reshape', (['image', '(28, 28)'], {}), '(image, (28, 28))\n', (1785, 1802), True, 'import numpy as np\n'), ((1820, 1854), 'numpy.reshape', 'np.reshape', (['hidden_image', '(50, 50)'], {}), '(hidden_image, (50, 50))\n', (1830, 1854), True, 'import numpy as np\n'), ((1869, 1900), 'numpy.reshape', 'np.reshape', (['gen_image', '(28, 28)'], {}), '(gen_image, (28, 28))\n', (1879, 1900), True, 'import numpy as np\n'), ((2156, 2165), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (2163, 2165), True, 'from matplotlib import pyplot as plt\n'), ((2168, 2198), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {'cmap': '"""gray"""'}), "(image, cmap='gray')\n", (2178, 2198), True, 'from matplotlib import pyplot as plt\n'), ((2201, 2228), 'matplotlib.pyplot.title', 'plt.title', (['"""original image"""'], {}), "('original image')\n", (2210, 2228), True, 'from matplotlib import pyplot as plt\n'), ((2231, 2272), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(prefix + 'original' + suffix)"], {}), "(prefix + 'original' + suffix)\n", (2242, 2272), True, 'from matplotlib import pyplot as plt\n'), ((2272, 2281), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (2279, 2281), True, 'from matplotlib import pyplot as plt\n'), ((2284, 2321), 'matplotlib.pyplot.imshow', 'plt.imshow', (['hidden_image'], {'cmap': '"""gray"""'}), "(hidden_image, cmap='gray')\n", (2294, 2321), True, 'from matplotlib import pyplot as plt\n'), ((2324, 2349), 'matplotlib.pyplot.title', 'plt.title', (['"""hidden image"""'], {}), "('hidden image')\n", (2333, 2349), True, 'from matplotlib import pyplot as plt\n'), ((2352, 2391), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(prefix + 'hidden' + suffix)"], {}), "(prefix + 'hidden' + suffix)\n", (2363, 2391), True, 'from matplotlib import pyplot as plt\n'), ((2391, 2400), 'matplotlib.pyplot.cla', 'plt.cla', ([], {}), '()\n', (2398, 2400), True, 'from matplotlib import pyplot as plt\n'), ((2403, 2437), 'matplotlib.pyplot.imshow', 'plt.imshow', (['gen_image'], {'cmap': '"""gray"""'}), "(gen_image, cmap='gray')\n", (2413, 2437), True, 'from matplotlib import pyplot as plt\n'), ((2440, 2472), 'matplotlib.pyplot.title', 'plt.title', (['"""reconstructed image"""'], {}), "('reconstructed image')\n", (2449, 2472), True, 'from matplotlib import pyplot as plt\n'), ((2475, 2521), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(prefix + 'reconstructed' + suffix)"], {}), "(prefix + 'reconstructed' + suffix)\n", (2486, 2521), True, 'from matplotlib import pyplot as plt\n'), ((1200, 1221), 'numpy.where', 'np.where', (['(test_y == n)'], {}), '(test_y == n)\n', (1208, 1221), True, 'import numpy as np\n')]
|
import tkinter as tk
class ControlPanel():
def __init__(self, parent, logger):
self.logger = logger
self.backend = None
self.frame = tk.Frame(master=parent)
self.frame.pack(fill=tk.BOTH, expand=True)
self.create_track_labels(self.frame)
self.create_control_area(self.frame)
self.create_volume_bar(self.frame)
def create_track_labels(self, parent):
name_frame = tk.Frame(master=parent)
self.var_track_name = tk.StringVar()
self.var_track_name.set('Track: -')
tk.Label(master=name_frame, textvariable=self.var_track_name).pack(side=tk.LEFT)
name_frame.pack(fill=tk.X, side=tk.TOP)
arist_frame = tk.Frame(master=parent)
self.var_track_artist = tk.StringVar()
self.var_track_artist.set('Artist: -')
tk.Label(master=arist_frame, textvariable=self.var_track_artist).pack(side=tk.LEFT)
arist_frame.pack(fill=tk.X, side=tk.TOP)
def create_control_area(self, parent):
frame = tk.Frame(master=parent)
frame.pack(fill=tk.BOTH)
center_frame = tk.Frame(master=frame)
self.toggle_play_text = tk.StringVar()
self.toggle_play_text.set('Play')
tk.Button(master=center_frame, textvariable=self.toggle_play_text, command=self.toggle_play).pack(side=tk.LEFT, padx=5, pady=5)
tk.Button(master=center_frame, text='Stop', command=self.stop_current).pack(side=tk.LEFT, padx=5, pady=5)
tk.Button(master=center_frame, text='Skip', command=self.skip_current).pack(side=tk.LEFT, padx=5, pady=5)
tk.Button(master=center_frame, text='Clear', command=self.stop_and_clear).pack(side=tk.LEFT, padx=5, pady=5)
center_frame.pack(expand=True)
def create_volume_bar(self, parent):
frame = tk.Frame(master=parent)
frame.pack(fill=tk.X, side=tk.TOP)
tk.Label(master=frame, text='Volume').pack(side=tk.LEFT, padx=5)
self.volume_var = tk.DoubleVar()
self.volume_var.set(0.0)
self.volume_scale = tk.Scale(
master=frame, from_=0, to=100, orient=tk.HORIZONTAL,
variable=self.volume_var,
command=self.set_volume
)
self.volume_scale.pack(fill=tk.BOTH, side=tk.LEFT, expand=True)
def set_backend(self, backend):
if self.backend is not None:
self.backend.remove_state_changed_callback(self.refresh)
self.backend = backend
if self.backend is not None:
self.backend.add_state_changed_callback(self.refresh)
def refresh(self):
if self.backend is None:
return
self.toggle_play_text.set('Pause' if self.backend.is_playing() else 'Play')
track = self.backend.get_current_track()
if track is None:
self.var_track_name.set('Track: -')
self.var_track_artist.set('Artist: -')
else:
self.var_track_name.set(f'Track: {track.name}')
self.var_track_artist.set(f'Artist: {track.artist}')
self.volume_var.set(int(self.backend.get_volume() * 100))
def toggle_play(self):
if self.backend is not None:
self.backend.toggle_playback()
def stop_current(self):
if self.backend is not None:
self.backend.stop()
def skip_current(self):
if self.backend is not None:
self.backend.skip()
def stop_and_clear(self):
if self.backend is not None:
if not self.backend.is_playing() and self.backend.get_current_track() is not None:
self.backend.clear()
else:
self.backend.clear_current_on_next_stop()
self.backend.stop()
def set_volume(self, amount):
if self.backend is not None:
self.backend.set_volume(float(amount) / 100.0)
|
[
"tkinter.StringVar",
"tkinter.Button",
"tkinter.Scale",
"tkinter.Frame",
"tkinter.DoubleVar",
"tkinter.Label"
] |
[((146, 169), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'parent'}), '(master=parent)\n', (154, 169), True, 'import tkinter as tk\n'), ((386, 409), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'parent'}), '(master=parent)\n', (394, 409), True, 'import tkinter as tk\n'), ((434, 448), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (446, 448), True, 'import tkinter as tk\n'), ((631, 654), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'parent'}), '(master=parent)\n', (639, 654), True, 'import tkinter as tk\n'), ((681, 695), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (693, 695), True, 'import tkinter as tk\n'), ((917, 940), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'parent'}), '(master=parent)\n', (925, 940), True, 'import tkinter as tk\n'), ((986, 1008), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'frame'}), '(master=frame)\n', (994, 1008), True, 'import tkinter as tk\n'), ((1036, 1050), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (1048, 1050), True, 'import tkinter as tk\n'), ((1628, 1651), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'parent'}), '(master=parent)\n', (1636, 1651), True, 'import tkinter as tk\n'), ((1778, 1792), 'tkinter.DoubleVar', 'tk.DoubleVar', ([], {}), '()\n', (1790, 1792), True, 'import tkinter as tk\n'), ((1842, 1959), 'tkinter.Scale', 'tk.Scale', ([], {'master': 'frame', 'from_': '(0)', 'to': '(100)', 'orient': 'tk.HORIZONTAL', 'variable': 'self.volume_var', 'command': 'self.set_volume'}), '(master=frame, from_=0, to=100, orient=tk.HORIZONTAL, variable=self\n .volume_var, command=self.set_volume)\n', (1850, 1959), True, 'import tkinter as tk\n'), ((489, 550), 'tkinter.Label', 'tk.Label', ([], {'master': 'name_frame', 'textvariable': 'self.var_track_name'}), '(master=name_frame, textvariable=self.var_track_name)\n', (497, 550), True, 'import tkinter as tk\n'), ((739, 803), 'tkinter.Label', 'tk.Label', ([], {'master': 'arist_frame', 'textvariable': 'self.var_track_artist'}), '(master=arist_frame, textvariable=self.var_track_artist)\n', (747, 803), True, 'import tkinter as tk\n'), ((1089, 1186), 'tkinter.Button', 'tk.Button', ([], {'master': 'center_frame', 'textvariable': 'self.toggle_play_text', 'command': 'self.toggle_play'}), '(master=center_frame, textvariable=self.toggle_play_text, command=\n self.toggle_play)\n', (1098, 1186), True, 'import tkinter as tk\n'), ((1220, 1290), 'tkinter.Button', 'tk.Button', ([], {'master': 'center_frame', 'text': '"""Stop"""', 'command': 'self.stop_current'}), "(master=center_frame, text='Stop', command=self.stop_current)\n", (1229, 1290), True, 'import tkinter as tk\n'), ((1328, 1398), 'tkinter.Button', 'tk.Button', ([], {'master': 'center_frame', 'text': '"""Skip"""', 'command': 'self.skip_current'}), "(master=center_frame, text='Skip', command=self.skip_current)\n", (1337, 1398), True, 'import tkinter as tk\n'), ((1436, 1509), 'tkinter.Button', 'tk.Button', ([], {'master': 'center_frame', 'text': '"""Clear"""', 'command': 'self.stop_and_clear'}), "(master=center_frame, text='Clear', command=self.stop_and_clear)\n", (1445, 1509), True, 'import tkinter as tk\n'), ((1692, 1729), 'tkinter.Label', 'tk.Label', ([], {'master': 'frame', 'text': '"""Volume"""'}), "(master=frame, text='Volume')\n", (1700, 1729), True, 'import tkinter as tk\n')]
|
#
# Copyright 2021. Futurewei Technologies Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
import os
import json
def geticonsonly(name):
folder = str("/tmp/icons_"+str(int(time.time()*1000)))
print("fetching icons for "+name+" logo")
print("creating icons temp dir - " + folder)
os.system("mkdir " + folder)
geticonscmd = "node " + os.getenv('UTIL_PATH') + "getIconsOnly.js " + name + " " + folder
os.system(geticonscmd)
f = open(folder+"/iconUrls.json", "r")
data = json.load(f)
return data
|
[
"json.load",
"os.getenv",
"os.system",
"time.time"
] |
[((863, 891), 'os.system', 'os.system', (["('mkdir ' + folder)"], {}), "('mkdir ' + folder)\n", (872, 891), False, 'import os\n'), ((990, 1012), 'os.system', 'os.system', (['geticonscmd'], {}), '(geticonscmd)\n', (999, 1012), False, 'import os\n'), ((1067, 1079), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1076, 1079), False, 'import json\n'), ((744, 755), 'time.time', 'time.time', ([], {}), '()\n', (753, 755), False, 'import time\n'), ((920, 942), 'os.getenv', 'os.getenv', (['"""UTIL_PATH"""'], {}), "('UTIL_PATH')\n", (929, 942), False, 'import os\n')]
|
from binascii import hexlify
import responses
from pyrex.daemon import Daemon
from pyrex.backends.jsonrpc import JSONRPCDaemon
from pyrex.transaction import Transaction
from .base import JSONTestCase
class JSONRPCDaemonTestCase(JSONTestCase):
jsonrpc_url = 'http://1172.16.58.3:18081/json_rpc'
mempool_url = 'http://127.0.0.1:18081/get_transaction_pool'
data_subdir = 'test_jsonrpcdaemon'
def setUp(self):
self.daemon = Daemon(JSONRPCDaemon())
@responses.activate
def test_basic_info(self):
responses.add(responses.POST, self.jsonrpc_url,
json=self._read('test_basic_info-get_info.json'),
status=200)
responses.add(responses.POST, self.jsonrpc_url,
json=self._read('test_basic_info-get_info.json'),
status=200)
self.assertTrue(self.daemon.info())
self.assertEqual(self.daemon.height(), 294993)
@responses.activate
def test_mempool(self):
responses.add(responses.POST, self.mempool_url,
json=self._read('test_mempool-transactions.json'),
status=200)
txs = self.daemon.mempool()
self.assertEqual(len(txs), 2)
self.assertEqual(txs[0].confirmations, 0)
self.assertEqual(txs[1].confirmations, 0)
self.assertGreater(txs[0].fee, 0)
self.assertGreater(txs[1].fee, 0)
|
[
"pyrex.backends.jsonrpc.JSONRPCDaemon"
] |
[((455, 470), 'pyrex.backends.jsonrpc.JSONRPCDaemon', 'JSONRPCDaemon', ([], {}), '()\n', (468, 470), False, 'from pyrex.backends.jsonrpc import JSONRPCDaemon\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable, Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from egg.core.interaction import LoggingStrategy
class ContinuousLinearSender(nn.Module):
def __init__(
self,
agent: nn.Module,
encoder_input_size: int,
encoder_hidden_size: int = 64,
num_layers: int = 1,
activation: str = "relu",
):
super(ContinuousLinearSender, self).__init__()
self.agent = agent
activations = {
"relu": F.relu,
"tanh": F.tanh,
"leaky_relu": F.leaky_relu,
"identity": nn.Identity(),
}
self.activation = activations[activation.lower()]
encoder_hidden_sizes = [encoder_hidden_size] * num_layers
encoder_layer_dimensions = [(encoder_input_size, encoder_hidden_sizes[0])]
for i, hidden_size in enumerate(encoder_hidden_sizes[1:]):
hidden_shape = (self.encoder_hidden_sizes[i], hidden_size)
encoder_layer_dimensions.append(hidden_shape)
self.encoder_hidden_layers = nn.ModuleList(
[nn.Linear(*dimensions) for dimensions in encoder_layer_dimensions]
)
def forward(self, x, aux_input=None):
x = self.agent(x, aux_input)
for hidden_layer in self.encoder_hidden_layers[:-1]:
x = self.activation(hidden_layer(x))
sender_output = self.encoder_hidden_layers[-1](x)
return sender_output
class ContinuousLinearReceiver(nn.Module):
def __init__(
self,
agent: nn.Module,
):
super(ContinuousLinearReceiver, self).__init__()
self.agent = agent
def forward(self, message, input=None, aux_input=None):
agent_output = self.agent(message, input, aux_input)
return agent_output
class SenderReceiverContinuousCommunication(nn.Module):
def __init__(
self,
sender: nn.Module,
receiver: nn.Module,
loss: Callable,
train_logging_strategy: Optional[LoggingStrategy] = None,
test_logging_strategy: Optional[LoggingStrategy] = None,
):
"""
:param sender: Sender agent. sender.forward() has to output a continouos vector
:param receiver: Receiver agent. receiver.forward() has to accept two parameters:
message and receiver_input.
`message` is shaped as (batch_size, vocab_size).
:param loss: Callable that outputs differentiable loss, takes the following parameters:
* sender_input: input to Sender (comes from dataset)
* message: message sent from Sender
* receiver_input: input to Receiver from dataset
* receiver_output: output of Receiver
* labels: labels that come from dataset
:param train_logging_strategy, test_logging_strategy: specify what parts of interactions to persist for
later analysis in the callbacks.
"""
super(SenderReceiverContinuousCommunication, self).__init__()
self.sender = sender
self.receiver = receiver
self.loss = loss
self.train_logging_strategy = (
LoggingStrategy()
if train_logging_strategy is None
else train_logging_strategy
)
self.test_logging_strategy = (
LoggingStrategy()
if test_logging_strategy is None
else test_logging_strategy
)
def forward(self, sender_input, labels, receiver_input=None, aux_input=None):
message = self.sender(sender_input, aux_input)
receiver_output = self.receiver(message, receiver_input, aux_input)
loss, aux_info = self.loss(
sender_input, message, receiver_input, receiver_output, labels, aux_input
)
logging_strategy = (
self.train_logging_strategy if self.training else self.test_logging_strategy
)
interaction = logging_strategy.filtered_interaction(
sender_input=sender_input,
receiver_input=receiver_input,
labels=labels,
aux_input=aux_input,
receiver_output=receiver_output,
message=message.detach(),
message_length=torch.ones(message[0].size(0)),
aux=aux_info,
)
return loss.mean(), interaction
|
[
"egg.core.interaction.LoggingStrategy",
"torch.nn.Identity",
"torch.nn.Linear"
] |
[((803, 816), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (814, 816), True, 'import torch.nn as nn\n'), ((3334, 3351), 'egg.core.interaction.LoggingStrategy', 'LoggingStrategy', ([], {}), '()\n', (3349, 3351), False, 'from egg.core.interaction import LoggingStrategy\n'), ((3499, 3516), 'egg.core.interaction.LoggingStrategy', 'LoggingStrategy', ([], {}), '()\n', (3514, 3516), False, 'from egg.core.interaction import LoggingStrategy\n'), ((1299, 1321), 'torch.nn.Linear', 'nn.Linear', (['*dimensions'], {}), '(*dimensions)\n', (1308, 1321), True, 'import torch.nn as nn\n')]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""The plugin of the pytest.
The pytest plugin hooks do not need to be imported into any test code, it will
load automatically when running pytest.
References:
https://docs.pytest.org/en/2.7.3/plugins.html
"""
import pytest
from rayvision_max.analyse_max import AnalyseMax
@pytest.fixture()
def analyze_info(tmpdir):
"""Get user info."""
cg_file = str(tmpdir.join('jh.max'))
with open(cg_file, "w"):
pass
return {
"cg_file": cg_file,
"workspace": str(tmpdir),
"software_version": "2018",
"project_name": "Project1",
"plugin_config": {}
}
@pytest.fixture()
def maya_analyze(analyze_info):
"""Create an Maya object."""
return AnalyseMax(**analyze_info)
|
[
"rayvision_max.analyse_max.AnalyseMax",
"pytest.fixture"
] |
[((332, 348), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (346, 348), False, 'import pytest\n'), ((667, 683), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (681, 683), False, 'import pytest\n'), ((760, 786), 'rayvision_max.analyse_max.AnalyseMax', 'AnalyseMax', ([], {}), '(**analyze_info)\n', (770, 786), False, 'from rayvision_max.analyse_max import AnalyseMax\n')]
|
#!/usr/bin/env python
"""
This program will generate a greyscale bitmap from a
CDED flavoured DEM file.
"""
from cdedtools import demparser
import argparse
import Image
# Command Line Options
parser = argparse.ArgumentParser(description="Generate a BMP from a DEM file.")
parser.add_argument("-c", "--colourize", dest="colourize", action="store_true", help="Add this flag to output blue-green instead of greyscale.")
parser.add_argument("-q", "--quality", dest="quality", default=1, help="The resolution of the resulting BMP. 1 will match the source resolution.")
parser.add_argument("sourcefile", help="Read data from SOURCEFILE", metavar="SOURCEFILE")
parser.add_argument("destination", help="Save the resultion Bitmap file as DESTINATION", metavar="DESTINATION")
args = parser.parse_args()
with open(args.sourcefile, 'r') as f:
percentComplete = 0
heightmap = demparser.read_data(f)
resolution = int(args.quality)**2
heightmap = heightmap[::resolution]
height = len(heightmap)
width = len(heightmap[0][::resolution])
img = Image.new('RGB', (width, height), '#00F')
for y in range(len(heightmap)):
if int(float(y) / len(heightmap) * 100) != percentComplete:
percentComplete = int(float(y) / len(heightmap) * 100)
print("Drawing Bitmap... {0}% Complete".format(percentComplete))
heightmap[y] = heightmap[y][::resolution]
heightmap[y] = heightmap[y][::-1]
for x in range(len(heightmap[y])):
colour = int(float(max(heightmap[y][x], 0)) / 500 * 255)
if(args.colourize):
img.putpixel((y,x), (0, colour, 100))
else:
img.putpixel((y,x), (colour, colour, colour))
img.save(args.destination, 'BMP')
print("Image saved as: {0}".format(args.destination))
|
[
"cdedtools.demparser.read_data",
"Image.new",
"argparse.ArgumentParser"
] |
[((205, 275), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate a BMP from a DEM file."""'}), "(description='Generate a BMP from a DEM file.')\n", (228, 275), False, 'import argparse\n'), ((892, 914), 'cdedtools.demparser.read_data', 'demparser.read_data', (['f'], {}), '(f)\n', (911, 914), False, 'from cdedtools import demparser\n'), ((1092, 1133), 'Image.new', 'Image.new', (['"""RGB"""', '(width, height)', '"""#00F"""'], {}), "('RGB', (width, height), '#00F')\n", (1101, 1133), False, 'import Image\n')]
|
'''
This is a script to convert a wikipedia2vec model file to word2vec.
Usage:
$ python3 convert_wiki2vec_glove.py [MODEL_NAME] [OUTPUT_FILE_NAME]
'''
from wikipedia2vec import Wikipedia2Vec
from wikipedia2vec.dictionary import Word
import sys
def save_text(wiki2vec, out_file):
with open(out_file, 'wb') as f:
for item_idx, item in enumerate(sorted(wiki2vec.dictionary, key=lambda o: o.doc_count, reverse=True)):
vec_str = ' '.join('%.4f' % v for v in wiki2vec.get_vector(item))
if isinstance(item, Word):
text = item.text.replace('\t', ' ')
else:
text = 'ENTITY/' + item.title.replace('\t', ' ')
ext = text.replace(' ', '_')
f.write(('%s %s\n' % (text, vec_str)).encode('utf-8'))
def main():
argvs = sys.argv
argc = len(argvs)
MODEL_FILE = argvs[1]
OUTPUT_FILE = argvs[2]
wiki2vec = Wikipedia2Vec.load(MODEL_FILE)
save_text(wiki2vec, OUTPUT_FILE)
if __name__ == "__main__":
main()
|
[
"wikipedia2vec.Wikipedia2Vec.load"
] |
[((916, 946), 'wikipedia2vec.Wikipedia2Vec.load', 'Wikipedia2Vec.load', (['MODEL_FILE'], {}), '(MODEL_FILE)\n', (934, 946), False, 'from wikipedia2vec import Wikipedia2Vec\n')]
|
"""
Base classes for Custom Authenticator to use OAuth with JupyterHub
Most of the code c/o <NAME> (@rgbkrk)
"""
import base64
import json
import os
from urllib.parse import quote, urlparse
import uuid
from tornado import gen, web
from tornado.log import app_log
from jupyterhub.handlers import BaseHandler
from jupyterhub.auth import Authenticator
from jupyterhub.utils import url_path_join
from traitlets import Unicode, Bool, List
def guess_callback_uri(protocol, host, hub_server_url):
return '{proto}://{host}{path}'.format(
proto=protocol,
host=host,
path=url_path_join(
hub_server_url,
'oauth_callback'
)
)
STATE_COOKIE_NAME = 'oauthenticator-state'
def _serialize_state(state):
"""Serialize OAuth state to a base64 string after passing through JSON"""
json_state = json.dumps(state)
return base64.urlsafe_b64encode(
json_state.encode('utf8')
).decode('ascii')
def _deserialize_state(b64_state):
"""Deserialize OAuth state as serialized in _serialize_state"""
if isinstance(b64_state, str):
b64_state = b64_state.encode('ascii')
try:
json_state = base64.urlsafe_b64decode(b64_state).decode('utf8')
except ValueError:
app_log.error("Failed to b64-decode state: %r", b64_state)
return {}
try:
return json.loads(json_state)
except ValueError:
app_log.error("Failed to json-decode state: %r", json_state)
return {}
class OAuthLoginHandler(BaseHandler):
"""Base class for OAuth login handler
Typically subclasses will need
"""
def set_state_cookie(self, state):
self.set_secure_cookie(STATE_COOKIE_NAME,
state, expires_days=1, httponly=True,
)
_state = None
def get_state(self):
next_url = original_next_url = self.get_argument('next', None)
if next_url:
# avoid browsers treating \ as /
next_url = next_url.replace('\\', quote('\\'))
# disallow hostname-having urls,
# force absolute path redirect
urlinfo = urlparse(next_url)
next_url = urlinfo._replace(
scheme='',
netloc='',
path='/' + urlinfo.path.lstrip('/'),
).geturl()
if next_url != original_next_url:
self.log.warning(
"Ignoring next_url %r, using %r",
original_next_url,
next_url,
)
if self._state is None:
self._state = _serialize_state({
'state_id': uuid.uuid4().hex,
'next_url': next_url,
})
return self._state
def get(self):
redirect_uri = self.authenticator.get_callback_url(self)
self.log.info('OAuth redirect: %r', redirect_uri)
state = self.get_state()
self.set_state_cookie(state)
self.authorize_redirect(
redirect_uri=redirect_uri,
client_id=self.authenticator.client_id,
scope=self.authenticator.scope,
extra_params={'state': state},
response_type='code')
class OAuthCallbackHandler(BaseHandler):
"""Basic handler for OAuth callback. Calls authenticator to verify username."""
_state_cookie = None
def get_state_cookie(self):
"""Get OAuth state from cookies
To be compared with the value in redirect URL
"""
if self._state_cookie is None:
self._state_cookie = (self.get_secure_cookie(STATE_COOKIE_NAME) or b'').decode('utf8', 'replace')
self.clear_cookie(STATE_COOKIE_NAME)
return self._state_cookie
def get_state_url(self):
"""Get OAuth state from URL parameters
to be compared with the value in cookies
"""
return self.get_argument("state")
def check_state(self):
"""Verify OAuth state
compare value in cookie with redirect url param
"""
cookie_state = self.get_state_cookie()
url_state = self.get_state_url()
if not cookie_state:
raise web.HTTPError(400, "OAuth state missing from cookies")
if not url_state:
raise web.HTTPError(400, "OAuth state missing from URL")
if cookie_state != url_state:
self.log.warning("OAuth state mismatch: %s != %s", cookie_state, url_state)
raise web.HTTPError(400, "OAuth state mismatch")
def check_error(self):
"""Check the OAuth code"""
error = self.get_argument("error", False)
if error:
message = self.get_argument("error_description", error)
raise web.HTTPError(400, "OAuth error: %s" % message)
def check_code(self):
"""Check the OAuth code"""
if not self.get_argument("code", False):
raise web.HTTPError(400, "OAuth callback made without a code")
def check_arguments(self):
"""Validate the arguments of the redirect
Default:
- check for oauth-standard error, error_description arguments
- check that there's a code
- check that state matches
"""
self.check_error()
self.check_code()
self.check_state()
def get_next_url(self, user=None):
"""Get the redirect target from the state field"""
state = self.get_state_url()
if state:
next_url = _deserialize_state(state).get('next_url')
if next_url:
return next_url
# JupyterHub 0.8 adds default .get_next_url for a fallback
if hasattr(BaseHandler, 'get_next_url'):
return super().get_next_url(user)
return url_path_join(self.hub.server.base_url, 'home')
@gen.coroutine
def _login_user_pre_08(self):
"""login_user simplifies the login+cookie+auth_state process in JupyterHub 0.8
_login_user_07 is for backward-compatibility with JupyterHub 0.7
"""
user_info = yield self.authenticator.get_authenticated_user(self, None)
if user_info is None:
return
if isinstance(user_info, dict):
username = user_info['name']
else:
username = user_info
user = self.user_from_username(username)
self.set_login_cookie(user)
return user
if not hasattr(BaseHandler, 'login_user'):
# JupyterHub 0.7 doesn't have .login_user
login_user = _login_user_pre_08
@gen.coroutine
def get(self):
self.check_arguments()
user = yield self.login_user()
if user is None:
# todo: custom error page?
raise web.HTTPError(403)
self.redirect(self.get_next_url(user))
class OAuthenticator(Authenticator):
"""Base class for OAuthenticators
Subclasses must override:
login_service (string identifying the service provider)
login_handler (likely a subclass of OAuthLoginHandler)
authenticate (method takes one arg - the request handler handling the oauth callback)
"""
scope = List(Unicode(), config=True,
help="""The OAuth scopes to request.
See the OAuth documentation of your OAuth provider for options.
For GitHub in particular, you can see github_scopes.md in this repo.
"""
)
login_service = 'override in subclass'
oauth_callback_url = Unicode(
os.getenv('OAUTH_CALLBACK_URL', ''),
config=True,
help="""Callback URL to use.
Typically `https://{host}/hub/oauth_callback`"""
)
client_id_env = ''
client_id = Unicode(config=True)
def _client_id_default(self):
if self.client_id_env:
client_id = os.getenv(self.client_id_env, '')
if client_id:
return client_id
return os.getenv('OAUTH_CLIENT_ID', '')
client_secret_env = ''
client_secret = Unicode(config=True)
def _client_secret_default(self):
if self.client_secret_env:
client_secret = os.getenv(self.client_secret_env, '')
if client_secret:
return client_secret
return os.getenv('OAUTH_CLIENT_SECRET', '')
validate_server_cert_env = 'OAUTH_TLS_VERIFY'
validate_server_cert = Bool(config=True)
def _validate_server_cert_default(self):
env_value = os.getenv(self.validate_server_cert_env, '')
if env_value == '0':
return False
else:
return True
def login_url(self, base_url):
return url_path_join(base_url, 'oauth_login')
login_handler = "Specify login handler class in subclass"
callback_handler = OAuthCallbackHandler
def get_callback_url(self, handler=None):
"""Get my OAuth redirect URL
Either from config or guess based on the current request.
"""
if self.oauth_callback_url:
return self.oauth_callback_url
elif handler:
return guess_callback_uri(
handler.request.protocol,
handler.request.host,
handler.hub.server.base_url
)
else:
raise ValueError("Specify callback oauth_callback_url or give me a handler to guess with")
def get_handlers(self, app):
return [
(r'/oauth_login', self.login_handler),
(r'/oauth_callback', self.callback_handler),
]
@gen.coroutine
def authenticate(self, handler, data=None):
raise NotImplementedError()
|
[
"traitlets.Bool",
"tornado.web.HTTPError",
"uuid.uuid4",
"json.loads",
"urllib.parse.urlparse",
"json.dumps",
"traitlets.Unicode",
"tornado.log.app_log.error",
"urllib.parse.quote",
"base64.urlsafe_b64decode",
"os.getenv",
"jupyterhub.utils.url_path_join"
] |
[((855, 872), 'json.dumps', 'json.dumps', (['state'], {}), '(state)\n', (865, 872), False, 'import json\n'), ((7635, 7655), 'traitlets.Unicode', 'Unicode', ([], {'config': '(True)'}), '(config=True)\n', (7642, 7655), False, 'from traitlets import Unicode, Bool, List\n'), ((7934, 7954), 'traitlets.Unicode', 'Unicode', ([], {'config': '(True)'}), '(config=True)\n', (7941, 7954), False, 'from traitlets import Unicode, Bool, List\n'), ((8291, 8308), 'traitlets.Bool', 'Bool', ([], {'config': '(True)'}), '(config=True)\n', (8295, 8308), False, 'from traitlets import Unicode, Bool, List\n'), ((1365, 1387), 'json.loads', 'json.loads', (['json_state'], {}), '(json_state)\n', (1375, 1387), False, 'import json\n'), ((5739, 5786), 'jupyterhub.utils.url_path_join', 'url_path_join', (['self.hub.server.base_url', '"""home"""'], {}), "(self.hub.server.base_url, 'home')\n", (5752, 5786), False, 'from jupyterhub.utils import url_path_join\n'), ((7115, 7124), 'traitlets.Unicode', 'Unicode', ([], {}), '()\n', (7122, 7124), False, 'from traitlets import Unicode, Bool, List\n'), ((7437, 7472), 'os.getenv', 'os.getenv', (['"""OAUTH_CALLBACK_URL"""', '""""""'], {}), "('OAUTH_CALLBACK_URL', '')\n", (7446, 7472), False, 'import os\n'), ((7853, 7885), 'os.getenv', 'os.getenv', (['"""OAUTH_CLIENT_ID"""', '""""""'], {}), "('OAUTH_CLIENT_ID', '')\n", (7862, 7885), False, 'import os\n'), ((8176, 8212), 'os.getenv', 'os.getenv', (['"""OAUTH_CLIENT_SECRET"""', '""""""'], {}), "('OAUTH_CLIENT_SECRET', '')\n", (8185, 8212), False, 'import os\n'), ((8374, 8418), 'os.getenv', 'os.getenv', (['self.validate_server_cert_env', '""""""'], {}), "(self.validate_server_cert_env, '')\n", (8383, 8418), False, 'import os\n'), ((8562, 8600), 'jupyterhub.utils.url_path_join', 'url_path_join', (['base_url', '"""oauth_login"""'], {}), "(base_url, 'oauth_login')\n", (8575, 8600), False, 'from jupyterhub.utils import url_path_join\n'), ((597, 644), 'jupyterhub.utils.url_path_join', 'url_path_join', (['hub_server_url', '"""oauth_callback"""'], {}), "(hub_server_url, 'oauth_callback')\n", (610, 644), False, 'from jupyterhub.utils import url_path_join\n'), ((1264, 1322), 'tornado.log.app_log.error', 'app_log.error', (['"""Failed to b64-decode state: %r"""', 'b64_state'], {}), "('Failed to b64-decode state: %r', b64_state)\n", (1277, 1322), False, 'from tornado.log import app_log\n'), ((1419, 1479), 'tornado.log.app_log.error', 'app_log.error', (['"""Failed to json-decode state: %r"""', 'json_state'], {}), "('Failed to json-decode state: %r', json_state)\n", (1432, 1479), False, 'from tornado.log import app_log\n'), ((2124, 2142), 'urllib.parse.urlparse', 'urlparse', (['next_url'], {}), '(next_url)\n', (2132, 2142), False, 'from urllib.parse import quote, urlparse\n'), ((4164, 4218), 'tornado.web.HTTPError', 'web.HTTPError', (['(400)', '"""OAuth state missing from cookies"""'], {}), "(400, 'OAuth state missing from cookies')\n", (4177, 4218), False, 'from tornado import gen, web\n'), ((4263, 4313), 'tornado.web.HTTPError', 'web.HTTPError', (['(400)', '"""OAuth state missing from URL"""'], {}), "(400, 'OAuth state missing from URL')\n", (4276, 4313), False, 'from tornado import gen, web\n'), ((4458, 4500), 'tornado.web.HTTPError', 'web.HTTPError', (['(400)', '"""OAuth state mismatch"""'], {}), "(400, 'OAuth state mismatch')\n", (4471, 4500), False, 'from tornado import gen, web\n'), ((4718, 4765), 'tornado.web.HTTPError', 'web.HTTPError', (['(400)', "('OAuth error: %s' % message)"], {}), "(400, 'OAuth error: %s' % message)\n", (4731, 4765), False, 'from tornado import gen, web\n'), ((4895, 4951), 'tornado.web.HTTPError', 'web.HTTPError', (['(400)', '"""OAuth callback made without a code"""'], {}), "(400, 'OAuth callback made without a code')\n", (4908, 4951), False, 'from tornado import gen, web\n'), ((6705, 6723), 'tornado.web.HTTPError', 'web.HTTPError', (['(403)'], {}), '(403)\n', (6718, 6723), False, 'from tornado import gen, web\n'), ((7745, 7778), 'os.getenv', 'os.getenv', (['self.client_id_env', '""""""'], {}), "(self.client_id_env, '')\n", (7754, 7778), False, 'import os\n'), ((8056, 8093), 'os.getenv', 'os.getenv', (['self.client_secret_env', '""""""'], {}), "(self.client_secret_env, '')\n", (8065, 8093), False, 'import os\n'), ((1182, 1217), 'base64.urlsafe_b64decode', 'base64.urlsafe_b64decode', (['b64_state'], {}), '(b64_state)\n', (1206, 1217), False, 'import base64\n'), ((2001, 2012), 'urllib.parse.quote', 'quote', (['"""\\\\"""'], {}), "('\\\\')\n", (2006, 2012), False, 'from urllib.parse import quote, urlparse\n'), ((2640, 2652), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2650, 2652), False, 'import uuid\n')]
|
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import numpy.distutils.misc_util
import argparse
import sys, os
import numpy as np
print(sys.argv)
parser = argparse.ArgumentParser(description='Build Cython Extension for CPU')
parser.add_argument('-n', dest="n", default=0, help="The device id")
args, unknown = parser.parse_known_args()
dev = args.n
sys.argv = ['cpu_setup.py', 'build_ext', '--inplace']
#Check if Cython is installed
try:
from Cython.Distutils import build_ext
except:
print("You don't seem to have Cython installed")
sys.exit(1)
KOKKOS_DIR=os.environ["KOKKOS_DIR"]
os.environ["CC"] = "gcc -fopenmp"
os.environ["CXX"] = "g++ -fopenmp"
#include directories
inc_dirs = numpy.distutils.misc_util.get_numpy_include_dirs()
inc_dirs = inc_dirs + [KOKKOS_DIR]
inc_dirs = inc_dirs + [np.get_include()]
inc_dirs = inc_dirs + [KOKKOS_DIR+'/lib/include']
inc_dirs = inc_dirs + [KOKKOS_DIR+'/cpu_build/lib/include/']
inc_dirs = inc_dirs + [KOKKOS_DIR+'/cpu_build/core/']
# hmlp library directory
lib_dirs = [KOKKOS_DIR]
lib_dirs = lib_dirs + [KOKKOS_DIR+'/cpu_build/lib/lib64']
lib_dirs = lib_dirs + [KOKKOS_DIR+'/cpu_build/lib/lib']
def scandir(dir, files=[]):
for file in os.listdir(dir):
path = os.path.join(dir, file)
if os.path.isfile(path) and path.endswith(".pyx"):
files.append(path.replace(os.path.sep, ".")[:-4])
elif os.path.isdir(path):
scandir(path, files)
return files
def makeExtension(extName):
extPath = extName.replace(".", os.path.sep)+".pyx"
return Extension(
extName,
[extPath],
include_dirs = inc_dirs,
language='c++',
libraries = ['kokkoscore'],
library_dirs = lib_dirs,
runtime_library_dirs = lib_dirs,
extra_compile_args=["-std=c++11","-O3", "-Wno-sign-compare", "-w"],
extra_link_args=["-lkokkoscore", "-Wl,--no-as-needed", "-ldl", "-lpthread"]
)
extNames = scandir("kokkos/cpu")
print(extNames)
extensions = [makeExtension(name) for name in extNames]
print(extensions)
setup(
name="kokkos_cpu",
packages=["kokkos_cpu"],
ext_modules=extensions,
package_data={
'':['*.pxd']
},
zip_safe=False,
include_package_data=True,
cmdclass = {'build_ext': build_ext}
)
|
[
"setuptools.setup",
"argparse.ArgumentParser",
"os.path.isdir",
"setuptools.extension.Extension",
"os.path.isfile",
"numpy.get_include",
"os.path.join",
"os.listdir",
"sys.exit"
] |
[((231, 300), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Build Cython Extension for CPU"""'}), "(description='Build Cython Extension for CPU')\n", (254, 300), False, 'import argparse\n'), ((2139, 2328), 'setuptools.setup', 'setup', ([], {'name': '"""kokkos_cpu"""', 'packages': "['kokkos_cpu']", 'ext_modules': 'extensions', 'package_data': "{'': ['*.pxd']}", 'zip_safe': '(False)', 'include_package_data': '(True)', 'cmdclass': "{'build_ext': build_ext}"}), "(name='kokkos_cpu', packages=['kokkos_cpu'], ext_modules=extensions,\n package_data={'': ['*.pxd']}, zip_safe=False, include_package_data=True,\n cmdclass={'build_ext': build_ext})\n", (2144, 2328), False, 'from setuptools import setup, find_packages\n'), ((1277, 1292), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (1287, 1292), False, 'import sys, os\n'), ((1633, 1944), 'setuptools.extension.Extension', 'Extension', (['extName', '[extPath]'], {'include_dirs': 'inc_dirs', 'language': '"""c++"""', 'libraries': "['kokkoscore']", 'library_dirs': 'lib_dirs', 'runtime_library_dirs': 'lib_dirs', 'extra_compile_args': "['-std=c++11', '-O3', '-Wno-sign-compare', '-w']", 'extra_link_args': "['-lkokkoscore', '-Wl,--no-as-needed', '-ldl', '-lpthread']"}), "(extName, [extPath], include_dirs=inc_dirs, language='c++',\n libraries=['kokkoscore'], library_dirs=lib_dirs, runtime_library_dirs=\n lib_dirs, extra_compile_args=['-std=c++11', '-O3', '-Wno-sign-compare',\n '-w'], extra_link_args=['-lkokkoscore', '-Wl,--no-as-needed', '-ldl',\n '-lpthread'])\n", (1642, 1944), False, 'from setuptools.extension import Extension\n'), ((624, 635), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (632, 635), False, 'import sys, os\n'), ((884, 900), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (898, 900), True, 'import numpy as np\n'), ((1309, 1332), 'os.path.join', 'os.path.join', (['dir', 'file'], {}), '(dir, file)\n', (1321, 1332), False, 'import sys, os\n'), ((1344, 1364), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1358, 1364), False, 'import sys, os\n'), ((1467, 1486), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (1480, 1486), False, 'import sys, os\n')]
|
#!/usr/bin/env/python
import roslib; roslib.load_manifest('myBot')
import rospy
from geometry_msgs.msg import PoseWithCovarianceStamped
def main():
pub = rospy.Publisher('/initialpose', PoseWithCovarianceStamped)
rospy.sleep(0.3)
pose = PoseWithCovarianceStamped()
pose.header.frame_id = '/map'
pose.pose.pose.position.x = 1.0
pose.pose.pose.position.y = 1.0
pose.pose.pose.orientation.w = 1.0
rospy.loginfo('Publishing:\n' + str(pose))
pub.publish(pose)
if __name__ == '__main__':
rospy.init_node('stupid_initial_pose_node')
main()
|
[
"rospy.Publisher",
"rospy.sleep",
"rospy.init_node",
"geometry_msgs.msg.PoseWithCovarianceStamped",
"roslib.load_manifest"
] |
[((38, 67), 'roslib.load_manifest', 'roslib.load_manifest', (['"""myBot"""'], {}), "('myBot')\n", (58, 67), False, 'import roslib\n'), ((160, 218), 'rospy.Publisher', 'rospy.Publisher', (['"""/initialpose"""', 'PoseWithCovarianceStamped'], {}), "('/initialpose', PoseWithCovarianceStamped)\n", (175, 218), False, 'import rospy\n'), ((223, 239), 'rospy.sleep', 'rospy.sleep', (['(0.3)'], {}), '(0.3)\n', (234, 239), False, 'import rospy\n'), ((251, 278), 'geometry_msgs.msg.PoseWithCovarianceStamped', 'PoseWithCovarianceStamped', ([], {}), '()\n', (276, 278), False, 'from geometry_msgs.msg import PoseWithCovarianceStamped\n'), ((525, 568), 'rospy.init_node', 'rospy.init_node', (['"""stupid_initial_pose_node"""'], {}), "('stupid_initial_pose_node')\n", (540, 568), False, 'import rospy\n')]
|