hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a06b1df56785e99084d56d614ec2e371dbd7d04
| 979
|
py
|
Python
|
journal.py
|
joeryan/pytest-practice
|
45b485cdc654f8d03608a57bbf0b26fcc9b5b82e
|
[
"MIT"
] | null | null | null |
journal.py
|
joeryan/pytest-practice
|
45b485cdc654f8d03608a57bbf0b26fcc9b5b82e
|
[
"MIT"
] | null | null | null |
journal.py
|
joeryan/pytest-practice
|
45b485cdc654f8d03608a57bbf0b26fcc9b5b82e
|
[
"MIT"
] | null | null | null |
import sys
from pathlib import Path
jfile = Path('./data/default.jrn')
print('-'*40 + "\n")
print("\t\tJOURNAL APP")
print('-'*40 + "\n")
journal = []
if jfile.is_file():
count = 0
print("... loading journal from ./data/default.jrn ...")
with open(jfile) as jf:
for entry in jf.readlines():
journal.append(entry.strip())
count += 1
print("... loaded {c} journal entries ...".format(c=count))
def list_entries():
count = 1
for item in journal:
print(str(count) +".\t"+item)
count += 1
while True:
choice = input("What do you want to do? [L]ist, [A]dd, or E[x]it: ")
if choice.upper() == "L":
list_entries()
if choice.upper() == "A":
entry = input("Enter your journal entry:\n")
journal.append(entry)
if choice.upper() == "X":
break
print("... saving journal to ./data/default.jrn ...")
count = 0
with open(jfile, 'w') as jf:
for entry in journal:
jf.write(entry + "\n")
count += 1
print("... saved {c} journal entries ...".format(c=count))
| 23.309524
| 69
| 0.618999
|
4a06b2994c123438ce3bc83c102ddcf6de8d822e
| 40,202
|
py
|
Python
|
cloudbridge/providers/aws/resources.py
|
FabioRosado/cloudbridge
|
bf11839158d2e2fda6fc651f94075c5315aad8f6
|
[
"MIT"
] | null | null | null |
cloudbridge/providers/aws/resources.py
|
FabioRosado/cloudbridge
|
bf11839158d2e2fda6fc651f94075c5315aad8f6
|
[
"MIT"
] | null | null | null |
cloudbridge/providers/aws/resources.py
|
FabioRosado/cloudbridge
|
bf11839158d2e2fda6fc651f94075c5315aad8f6
|
[
"MIT"
] | 1
|
2021-11-17T02:55:37.000Z
|
2021-11-17T02:55:37.000Z
|
"""
DataTypes used by this provider
"""
import hashlib
import inspect
import logging
from botocore.exceptions import ClientError
import tenacity
from cloudbridge.base.resources import BaseAttachmentInfo
from cloudbridge.base.resources import BaseBucket
from cloudbridge.base.resources import BaseBucketObject
from cloudbridge.base.resources import BaseDnsRecord
from cloudbridge.base.resources import BaseDnsZone
from cloudbridge.base.resources import BaseFloatingIP
from cloudbridge.base.resources import BaseInstance
from cloudbridge.base.resources import BaseInternetGateway
from cloudbridge.base.resources import BaseKeyPair
from cloudbridge.base.resources import BaseLaunchConfig
from cloudbridge.base.resources import BaseMachineImage
from cloudbridge.base.resources import BaseNetwork
from cloudbridge.base.resources import BasePlacementZone
from cloudbridge.base.resources import BaseRegion
from cloudbridge.base.resources import BaseRouter
from cloudbridge.base.resources import BaseSnapshot
from cloudbridge.base.resources import BaseSubnet
from cloudbridge.base.resources import BaseVMFirewall
from cloudbridge.base.resources import BaseVMFirewallRule
from cloudbridge.base.resources import BaseVMType
from cloudbridge.base.resources import BaseVolume
from cloudbridge.interfaces.resources import GatewayState
from cloudbridge.interfaces.resources import InstanceState
from cloudbridge.interfaces.resources import MachineImageState
from cloudbridge.interfaces.resources import NetworkState
from cloudbridge.interfaces.resources import RouterState
from cloudbridge.interfaces.resources import SnapshotState
from cloudbridge.interfaces.resources import SubnetState
from cloudbridge.interfaces.resources import VolumeState
from .helpers import find_tag_value
from .helpers import trim_empty_params
from .subservices import AWSBucketObjectSubService
from .subservices import AWSDnsRecordSubService
from .subservices import AWSFloatingIPSubService
from .subservices import AWSGatewaySubService
from .subservices import AWSSubnetSubService
from .subservices import AWSVMFirewallRuleSubService
log = logging.getLogger(__name__)
class AWSMachineImage(BaseMachineImage):
IMAGE_STATE_MAP = {
'pending': MachineImageState.PENDING,
'transient': MachineImageState.PENDING,
'available': MachineImageState.AVAILABLE,
'deregistered': MachineImageState.PENDING,
'failed': MachineImageState.ERROR,
'error': MachineImageState.ERROR,
'invalid': MachineImageState.ERROR
}
def __init__(self, provider, image):
super(AWSMachineImage, self).__init__(provider)
if isinstance(image, AWSMachineImage):
# pylint:disable=protected-access
self._ec2_image = image._ec2_image
else:
self._ec2_image = image
@property
def id(self):
return self._ec2_image.id
@property
def name(self):
try:
return self._ec2_image.name
except (AttributeError, ClientError) as e:
log.warn("Cannot get name for image {0}: {1}".format(self.id, e))
@property
# pylint:disable=arguments-differ
def label(self):
"""
.. note:: an instance must have a (case sensitive) tag ``Name``
"""
return find_tag_value(self._ec2_image.tags, 'Name')
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._ec2_image.create_tags(Tags=[{'Key': 'Name',
'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def description(self):
try:
return self._ec2_image.description
except AttributeError:
return None
@property
def min_disk(self):
vols = [bdm.get('Ebs', {}) for bdm in
self._ec2_image.block_device_mappings if
bdm.get('DeviceName') == self._ec2_image.root_device_name]
if vols:
return vols[0].get('VolumeSize')
else:
return None
def delete(self):
snapshot_id = [
bdm.get('Ebs', {}).get('SnapshotId') for bdm in
self._ec2_image.block_device_mappings if
bdm.get('DeviceName') == self._ec2_image.root_device_name]
self._ec2_image.deregister()
self.wait_for([MachineImageState.UNKNOWN, MachineImageState.ERROR])
snapshot = self._provider.storage.snapshots.get(snapshot_id[0])
if snapshot:
snapshot.delete()
@property
def state(self):
try:
return AWSMachineImage.IMAGE_STATE_MAP.get(
self._ec2_image.state, MachineImageState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return MachineImageState.UNKNOWN
def refresh(self):
self._ec2_image.reload()
class AWSPlacementZone(BasePlacementZone):
def __init__(self, provider, zone, region):
super(AWSPlacementZone, self).__init__(provider)
if isinstance(zone, AWSPlacementZone):
# pylint:disable=protected-access
self._aws_zone = zone._aws_zone
# pylint:disable=protected-access
self._aws_region = zone._aws_region
else:
self._aws_zone = zone
self._aws_region = region
@property
def id(self):
return self._aws_zone
@property
def name(self):
return self.id
@property
def region_name(self):
return self._aws_region
class AWSVMType(BaseVMType):
def __init__(self, provider, instance_dict):
super(AWSVMType, self).__init__(provider)
self._inst_dict = instance_dict
@property
def id(self):
return str(self._inst_dict.get('InstanceType'))
@property
def name(self):
return self.id
@property
def family(self):
# Limited to whether CurrentGeneration or not
curr = self._inst_dict.get('CurrentGeneration')
if curr:
return 'CurrentGeneration'
return None
@property
def vcpus(self):
vcpus = self._inst_dict.get('VCpuInfo')
if vcpus:
return vcpus.get('DefaultVCpus', 0)
return 0
@property
def ram(self):
ram = self._inst_dict.get('MemoryInfo')
if ram:
mib = ram.get('SizeInMiB', 0)
return mib / 1024
return 0
@property
def size_root_disk(self):
return 0
@property
def size_ephemeral_disks(self):
storage = self._inst_dict.get('InstanceStorageInfo')
if storage:
return storage.get('TotalSizeInGB', 0)
return 0
@property
def num_ephemeral_disks(self):
storage = self._inst_dict.get('InstanceStorageInfo')
if storage:
disks = storage.get("Disks", [])
count = 0
for disk in disks:
count += disk.get("Count", 0)
return count
return 0
@property
def extra_data(self):
return {key: val for key, val in self._inst_dict.items()
if key not in ["InstanceType", "VCpuInfo", "MemoryInfo"]}
class AWSInstance(BaseInstance):
# ref:
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html
INSTANCE_STATE_MAP = {
'pending': InstanceState.PENDING,
'running': InstanceState.RUNNING,
'shutting-down': InstanceState.CONFIGURING,
'terminated': InstanceState.DELETED,
'stopping': InstanceState.CONFIGURING,
'stopped': InstanceState.STOPPED
}
def __init__(self, provider, ec2_instance):
super(AWSInstance, self).__init__(provider)
self._ec2_instance = ec2_instance
self._unknown_state = False
@property
def id(self):
return self._ec2_instance.id
@property
def name(self):
return self.id
@property
# pylint:disable=arguments-differ
def label(self):
"""
.. note:: an instance must have a (case sensitive) tag ``Name``
"""
return find_tag_value(self._ec2_instance.tags, 'Name')
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._ec2_instance.create_tags(Tags=[{'Key': 'Name',
'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def public_ips(self):
return ([self._ec2_instance.public_ip_address]
if self._ec2_instance.public_ip_address else [])
@property
def private_ips(self):
return ([self._ec2_instance.private_ip_address]
if self._ec2_instance.private_ip_address else [])
@property
def vm_type_id(self):
return self._ec2_instance.instance_type
@property
def vm_type(self):
return self._provider.compute.vm_types.find(
name=self._ec2_instance.instance_type)[0]
def reboot(self):
self._ec2_instance.reboot()
def start(self):
response = self._ec2_instance.start()
states = ['pending', 'running']
if response['StartingInstances'][0]['CurrentState']['Name'] in states:
return True
else:
return False
def stop(self):
response = self._ec2_instance.stop()
states = ['stopping', 'stopped']
if response['StoppingInstances'][0]['CurrentState']['Name'] in states:
return True
else:
return False
@property
def image_id(self):
return self._ec2_instance.image_id
@property
def zone_id(self):
return self._ec2_instance.placement.get('AvailabilityZone')
@property
def subnet_id(self):
return self._ec2_instance.subnet_id
@property
def vm_firewalls(self):
return [
self._provider.security.vm_firewalls.get(fw_id)
for fw_id in self.vm_firewall_ids
]
@property
def vm_firewall_ids(self):
return list(set([
group.get('GroupId') for group in
self._ec2_instance.security_groups
]))
@property
def key_pair_id(self):
return self._ec2_instance.key_name
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _wait_for_image(self, image):
self._provider.ec2_conn.meta.client.get_waiter('image_exists').wait(
ImageIds=[image.id])
def create_image(self, label):
self.assert_valid_resource_label(label)
name = self._generate_name_from_label(label, 'cb-img')
image = AWSMachineImage(self._provider,
self._ec2_instance.create_image(Name=name))
# Wait for the image to exist
self._wait_for_image(image)
# Add image label
image.label = label
# Return the image
image.refresh()
return image
def _get_fip(self, floating_ip):
"""Get a floating IP object based on the supplied allocation ID."""
return self._provider.networking._floating_ips.get(None, floating_ip)
def add_floating_ip(self, floating_ip):
fip = (floating_ip if isinstance(floating_ip, AWSFloatingIP)
else self._get_fip(floating_ip))
# pylint:disable=protected-access
params = trim_empty_params({
'InstanceId': self.id,
'PublicIp': None if self._ec2_instance.vpc_id else
fip.public_ip,
'AllocationId': fip._ip.allocation_id})
self._provider.ec2_conn.meta.client.associate_address(**params)
self.refresh()
def remove_floating_ip(self, floating_ip):
fip = (floating_ip if isinstance(floating_ip, AWSFloatingIP)
else self._get_fip(floating_ip))
# pylint:disable=protected-access
params = trim_empty_params({
'PublicIp': None if self._ec2_instance.vpc_id else
fip.public_ip,
'AssociationId': fip._ip.association_id})
self._provider.ec2_conn.meta.client.disassociate_address(**params)
self.refresh()
def add_vm_firewall(self, firewall):
self._ec2_instance.modify_attribute(
Groups=self.vm_firewall_ids + [firewall.id])
def remove_vm_firewall(self, firewall):
self._ec2_instance.modify_attribute(
Groups=([fw_id for fw_id in self.vm_firewall_ids
if fw_id != firewall.id]))
@property
def state(self):
if self._unknown_state:
return InstanceState.UNKNOWN
try:
return AWSInstance.INSTANCE_STATE_MAP.get(
self._ec2_instance.state['Name'], InstanceState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return InstanceState.UNKNOWN
def refresh(self):
try:
self._ec2_instance.reload()
self._unknown_state = False
except ClientError:
# The instance no longer exists and cannot be refreshed.
# set the state to unknown
self._unknown_state = True
# pylint:disable=unused-argument
def _wait_till_exists(self, timeout=None, interval=None):
self._ec2_instance.wait_until_exists()
# refresh again to make sure instance status is in sync
self._ec2_instance.reload()
class AWSVolume(BaseVolume):
# Ref:
# http://docs.aws.amazon.com/AWSEC2/latest/CommandLineReference/
# ApiReference-cmd-DescribeVolumes.html
VOLUME_STATE_MAP = {
'creating': VolumeState.CREATING,
'available': VolumeState.AVAILABLE,
'in-use': VolumeState.IN_USE,
'deleting': VolumeState.CONFIGURING,
'deleted': VolumeState.DELETED,
'error': VolumeState.ERROR
}
def __init__(self, provider, volume):
super(AWSVolume, self).__init__(provider)
self._volume = volume
self._unknown_state = False
@property
def id(self):
return self._volume.id
@property
def name(self):
return self.id
@property
# pylint:disable=arguments-differ
def label(self):
try:
return find_tag_value(self._volume.tags, 'Name')
except ClientError as e:
log.warn("Cannot get label for volume {0}: {1}".format(self.id, e))
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._volume.create_tags(Tags=[{'Key': 'Name', 'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def description(self):
return find_tag_value(self._volume.tags, 'Description')
@description.setter
def description(self, value):
self._volume.create_tags(Tags=[{'Key': 'Description',
'Value': value or ""}])
@property
def size(self):
return self._volume.size
@property
def create_time(self):
return self._volume.create_time
@property
def zone_id(self):
return self._volume.availability_zone
@property
def source(self):
if self._volume.snapshot_id:
return self._provider.storage.snapshots.get(
self._volume.snapshot_id)
return None
@property
def attachments(self):
return [
BaseAttachmentInfo(self,
a.get('InstanceId'),
a.get('Device'))
for a in self._volume.attachments
][0] if self._volume.attachments else None
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(Exception),
wait=tenacity.wait_fixed(5),
reraise=True)
def _wait_till_volume_attached(self, instance_id):
self.refresh()
if not self.attachments.instance_id == instance_id:
raise Exception(f"Volume {self.id} is not yet attached to"
f"instance {instance_id}")
def attach(self, instance, device):
instance_id = instance.id if isinstance(
instance,
AWSInstance) else instance
self._volume.attach_to_instance(InstanceId=instance_id,
Device=device)
self._wait_till_volume_attached(instance_id)
def detach(self, force=False):
a = self.attachments
if a:
self._volume.detach_from_instance(
InstanceId=a.instance_id,
Device=a.device,
Force=force)
def create_snapshot(self, label, description=None):
self.assert_valid_resource_label(label)
snap = AWSSnapshot(
self._provider,
self._volume.create_snapshot(
TagSpecifications=[{'ResourceType': 'snapshot',
'Tags': [{'Key': 'Name',
'Value': label}]}],
Description=description or ""))
snap.wait_till_ready()
return snap
@property
def state(self):
if self._unknown_state:
return VolumeState.UNKNOWN
try:
return AWSVolume.VOLUME_STATE_MAP.get(
self._volume.state, VolumeState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return VolumeState.UNKNOWN
def refresh(self):
try:
self._volume.reload()
self._unknown_state = False
except ClientError:
# The volume no longer exists and cannot be refreshed.
# set the status to unknown
self._unknown_state = True
class AWSSnapshot(BaseSnapshot):
# Ref: http://docs.aws.amazon.com/AWSEC2/latest/CommandLineReference/
# ApiReference-cmd-DescribeSnapshots.html
SNAPSHOT_STATE_MAP = {
'pending': SnapshotState.PENDING,
'deleting': SnapshotState.PENDING,
'completed': SnapshotState.AVAILABLE,
'error': SnapshotState.ERROR
}
def __init__(self, provider, snapshot):
super(AWSSnapshot, self).__init__(provider)
self._snapshot = snapshot
self._unknown_state = False
@property
def id(self):
return self._snapshot.id
@property
def name(self):
return self.id
@property
# pylint:disable=arguments-differ
def label(self):
try:
return find_tag_value(self._snapshot.tags, 'Name')
except ClientError as e:
log.warn("Cannot get label for snap {0}: {1}".format(self.id, e))
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._snapshot.create_tags(Tags=[{'Key': 'Name',
'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def description(self):
return find_tag_value(self._snapshot.tags, 'Description')
@description.setter
def description(self, value):
self._snapshot.create_tags(Tags=[{
'Key': 'Description', 'Value': value or ""}])
@property
def size(self):
return self._snapshot.volume_size
@property
def volume_id(self):
return self._snapshot.volume_id
@property
def create_time(self):
return self._snapshot.start_time
@property
def state(self):
if self._unknown_state:
return SnapshotState.UNKNOWN
try:
return AWSSnapshot.SNAPSHOT_STATE_MAP.get(
self._snapshot.state, SnapshotState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return SnapshotState.UNKNOWN
def refresh(self):
try:
self._snapshot.reload()
self._unknown_state = False
except ClientError:
# The snapshot no longer exists and cannot be refreshed.
# set the status to unknown
self._unknown_state = True
def create_volume(self, size=None, volume_type=None, iops=None):
label = "from-snap-{0}".format(self.label or self.id)
cb_vol = self._provider.storage.volumes.create(
label=label,
size=size,
snapshot=self.id)
cb_vol.wait_till_ready()
return cb_vol
class AWSKeyPair(BaseKeyPair):
def __init__(self, provider, key_pair):
super(AWSKeyPair, self).__init__(provider, key_pair)
class AWSVMFirewall(BaseVMFirewall):
def __init__(self, provider, _vm_firewall):
super(AWSVMFirewall, self).__init__(provider, _vm_firewall)
self._rule_container = AWSVMFirewallRuleSubService(provider, self)
@property
def name(self):
"""
Return the name of this VM firewall.
"""
return self._vm_firewall.group_name
@property
def label(self):
try:
return find_tag_value(self._vm_firewall.tags, 'Name')
except ClientError:
return None
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._vm_firewall.create_tags(Tags=[{'Key': 'Name',
'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def description(self):
try:
return find_tag_value(self._vm_firewall.tags, 'Description')
except ClientError:
return None
@description.setter
# pylint:disable=arguments-differ
def description(self, value):
self._vm_firewall.create_tags(Tags=[{'Key': 'Description',
'Value': value or ""}])
@property
def network_id(self):
return self._vm_firewall.vpc_id
@property
def rules(self):
return self._rule_container
def refresh(self):
self._vm_firewall.reload()
def to_json(self):
attr = inspect.getmembers(self, lambda a: not inspect.isroutine(a))
js = {k: v for (k, v) in attr if not k.startswith('_')}
json_rules = [r.to_json() for r in self.rules]
js['rules'] = json_rules
if js.get('network_id'):
js.pop('network_id') # Omit for consistency across cloud providers
return js
class AWSVMFirewallRule(BaseVMFirewallRule):
def __init__(self, parent_fw, direction, rule):
self._direction = direction
super(AWSVMFirewallRule, self).__init__(parent_fw, rule)
# cache id
md5 = hashlib.md5()
md5.update(self._name.encode('ascii'))
self._id = md5.hexdigest()
@property
def id(self):
return self._id
@property
def direction(self):
return self._direction
@property
def protocol(self):
return self._rule.get('IpProtocol')
@property
def from_port(self):
return self._rule.get('FromPort')
@property
def to_port(self):
return self._rule.get('ToPort')
@property
def cidr(self):
if len(self._rule.get('IpRanges') or []) > 0:
return self._rule['IpRanges'][0].get('CidrIp')
return None
@property
def src_dest_fw_id(self):
if len(self._rule.get('UserIdGroupPairs') or []) > 0:
return self._rule['UserIdGroupPairs'][0]['GroupId']
else:
return None
@property
def src_dest_fw(self):
if self.src_dest_fw_id:
return AWSVMFirewall(
self._provider,
self._provider.ec2_conn.SecurityGroup(self.src_dest_fw_id))
else:
return None
@staticmethod
def _construct_ip_perms(protocol, from_port, to_port, cidr,
src_dest_fw_id):
return {
'IpProtocol': protocol,
'FromPort': from_port,
'ToPort': to_port,
'IpRanges': [{'CidrIp': cidr}] if cidr else None,
'UserIdGroupPairs': [{
'GroupId': src_dest_fw_id}
] if src_dest_fw_id else None
}
class AWSBucketObject(BaseBucketObject):
class BucketObjIterator():
CHUNK_SIZE = 4096
def __init__(self, body):
self.body = body
def __iter__(self):
while True:
data = self.read(self.CHUNK_SIZE)
if data:
yield data
else:
break
def read(self, length):
return self.body.read(amt=length)
def close(self):
return self.body.close()
def __init__(self, provider, obj):
super(AWSBucketObject, self).__init__(provider)
self._obj = obj
@property
def id(self):
return self._obj.key
@property
def name(self):
return self.id
@property
def size(self):
try:
return self._obj.content_length
except AttributeError: # we're dealing with s3.ObjectSummary
return self._obj.size
@property
def last_modified(self):
return self._obj.last_modified.strftime("%Y-%m-%dT%H:%M:%S.%f")
def iter_content(self):
return self.BucketObjIterator(self._obj.get().get('Body'))
def upload(self, data):
self._obj.put(Body=data)
def upload_from_file(self, path):
self._obj.upload_file(path)
def delete(self):
self._obj.delete()
def generate_url(self, expires_in):
return self._provider.s3_conn.meta.client.generate_presigned_url(
'get_object',
Params={'Bucket': self._obj.bucket_name, 'Key': self.id},
ExpiresIn=expires_in)
def refresh(self):
self._obj.load()
class AWSBucket(BaseBucket):
def __init__(self, provider, bucket):
super(AWSBucket, self).__init__(provider)
self._bucket = bucket
self._object_container = AWSBucketObjectSubService(provider, self)
@property
def id(self):
return self._bucket.name
@property
def name(self):
return self.id
@property
def objects(self):
return self._object_container
class AWSRegion(BaseRegion):
def __init__(self, provider, aws_region):
super(AWSRegion, self).__init__(provider)
self._aws_region = aws_region
@property
def id(self):
return self._aws_region.get('RegionName')
@property
def name(self):
return self.id
@property
def zones(self):
if self.id == self._provider.region_name: # optimisation
conn = self._provider.ec2_conn
else:
# pylint:disable=protected-access
conn = self._provider._connect_ec2_region(region_name=self.id)
zones = (conn.meta.client.describe_availability_zones()
.get('AvailabilityZones', []))
return [AWSPlacementZone(self._provider, zone.get('ZoneName'),
self.id)
for zone in zones]
class AWSNetwork(BaseNetwork):
# Ref:
# docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpcs.html
_NETWORK_STATE_MAP = {
'pending': NetworkState.PENDING,
'available': NetworkState.AVAILABLE,
}
def __init__(self, provider, network):
super(AWSNetwork, self).__init__(provider)
self._vpc = network
self._unknown_state = False
self._gtw_container = AWSGatewaySubService(provider, self)
self._subnet_svc = AWSSubnetSubService(provider, self)
@property
def id(self):
return self._vpc.id
@property
def name(self):
return self.id
@property
def label(self):
return find_tag_value(self._vpc.tags, 'Name')
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._vpc.create_tags(Tags=[{'Key': 'Name', 'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def external(self):
"""
For AWS, all VPC networks can be connected to the Internet so always
return ``True``.
"""
return True
@property
def state(self):
if self._unknown_state:
return NetworkState.UNKNOWN
try:
return AWSNetwork._NETWORK_STATE_MAP.get(
self._vpc.state, NetworkState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return NetworkState.UNKNOWN
@property
def cidr_block(self):
return self._vpc.cidr_block
@property
def subnets(self):
return self._subnet_svc
def refresh(self):
try:
self._vpc.reload()
self._unknown_state = False
except ClientError:
# The network no longer exists and cannot be refreshed.
# set the status to unknown
self._unknown_state = True
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _wait_for_vpc(self):
self._vpc.wait_until_exists()
self._vpc.wait_until_available()
def wait_till_ready(self, timeout=None, interval=None):
self._wait_for_vpc()
self.refresh()
@property
def gateways(self):
return self._gtw_container
class AWSSubnet(BaseSubnet):
# http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html
_SUBNET_STATE_MAP = {
'pending': SubnetState.PENDING,
'available': SubnetState.AVAILABLE,
}
def __init__(self, provider, subnet):
super(AWSSubnet, self).__init__(provider)
self._subnet = subnet
self._unknown_state = False
@property
def id(self):
return self._subnet.id
@property
def name(self):
return self.id
@property
def label(self):
return find_tag_value(self._subnet.tags, 'Name')
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._subnet.create_tags(Tags=[{'Key': 'Name', 'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
@property
def cidr_block(self):
return self._subnet.cidr_block
@property
def network_id(self):
return self._subnet.vpc_id
@property
def zone(self):
return AWSPlacementZone(self._provider, self._subnet.availability_zone,
self._provider.region_name)
@property
def state(self):
if self._unknown_state:
return SubnetState.UNKNOWN
try:
return self._SUBNET_STATE_MAP.get(
self._subnet.state, SubnetState.UNKNOWN)
except Exception:
# Ignore all exceptions when querying state
return SubnetState.UNKNOWN
def refresh(self):
try:
self._subnet.reload()
self._unknown_state = False
except ClientError:
# subnet no longer exists
self._unknown_state = True
class AWSFloatingIP(BaseFloatingIP):
def __init__(self, provider, floating_ip):
super(AWSFloatingIP, self).__init__(provider)
self._ip = floating_ip
@property
def id(self):
return self._ip.allocation_id
@property
def public_ip(self):
return self._ip.public_ip
@property
def private_ip(self):
return self._ip.private_ip_address
@property
def in_use(self):
return True if self._ip.association_id else False
def refresh(self):
self._ip.reload()
class AWSRouter(BaseRouter):
def __init__(self, provider, route_table):
super(AWSRouter, self).__init__(provider)
self._route_table = route_table
@property
def id(self):
return self._route_table.id
@property
def name(self):
return self.id
@property
def label(self):
return find_tag_value(self._route_table.tags, 'Name')
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(ClientError),
wait=tenacity.wait_fixed(5),
reraise=True)
def _set_label(self, value):
self._route_table.create_tags(Tags=[{'Key': 'Name',
'Value': value or ""}])
@label.setter
# pylint:disable=arguments-differ
def label(self, value):
self.assert_valid_resource_label(value)
self._set_label(value)
def refresh(self):
try:
self._route_table.reload()
except ClientError:
self._route_table.associations = None
@property
def state(self):
if self._route_table.associations:
return RouterState.ATTACHED
return RouterState.DETACHED
@property
def network_id(self):
return self._route_table.vpc_id
@tenacity.retry(stop=tenacity.stop_after_attempt(5),
retry=tenacity.retry_if_exception_type(Exception),
wait=tenacity.wait_fixed(5),
reraise=True)
def _wait_till_subnet_attached(self, subnet_id):
self.refresh()
association = [a for a in self._route_table.associations
if a.subnet_id == subnet_id]
if not association:
raise Exception(
f"Subnet {subnet_id} not attached to route table {self.id}")
def attach_subnet(self, subnet):
subnet_id = subnet.id if isinstance(subnet, AWSSubnet) else subnet
self._route_table.associate_with_subnet(SubnetId=subnet_id)
self._wait_till_subnet_attached(subnet_id)
def detach_subnet(self, subnet):
subnet_id = subnet.id if isinstance(subnet, AWSSubnet) else subnet
associations = [a for a in self._route_table.associations
if a.subnet_id == subnet_id]
for a in associations:
a.delete()
self.refresh()
@property
def subnets(self):
return [AWSSubnet(self._provider, rta.subnet)
for rta in self._route_table.associations if rta.subnet]
def attach_gateway(self, gateway):
gw_id = (gateway.id if isinstance(gateway, AWSInternetGateway)
else gateway)
if self._route_table.create_route(
DestinationCidrBlock='0.0.0.0/0', GatewayId=gw_id):
return True
return False
def detach_gateway(self, gateway):
gw_id = (gateway.id if isinstance(gateway, AWSInternetGateway)
else gateway)
return self._provider.ec2_conn.meta.client.detach_internet_gateway(
InternetGatewayId=gw_id, VpcId=self._route_table.vpc_id)
class AWSInternetGateway(BaseInternetGateway):
def __init__(self, provider, gateway):
super(AWSInternetGateway, self).__init__(provider)
self._gateway = gateway
self._gateway.state = ''
self._fips_container = AWSFloatingIPSubService(provider, self)
@property
def id(self):
return self._gateway.id
@property
def name(self):
return find_tag_value(self._gateway.tags, 'Name')
def refresh(self):
try:
self._gateway.reload()
except ClientError:
self._gateway.state = GatewayState.UNKNOWN
@property
def state(self):
if self._gateway.state == GatewayState.UNKNOWN:
return GatewayState.UNKNOWN
else:
return GatewayState.AVAILABLE
@property
def network_id(self):
if self._gateway.attachments:
return self._gateway.attachments[0].get('VpcId')
return None
@property
def floating_ips(self):
return self._fips_container
class AWSLaunchConfig(BaseLaunchConfig):
def __init__(self, provider):
super(AWSLaunchConfig, self).__init__(provider)
class AWSDnsZone(BaseDnsZone):
def __init__(self, provider, dns_zone):
super(AWSDnsZone, self).__init__(provider)
self._dns_zone = dns_zone
self._dns_record_container = AWSDnsRecordSubService(provider, self)
@property
def id(self):
# The ID contains a slash, do not allow this
return self.escape_zone_id(self.aws_id)
@property
def aws_id(self):
return self._dns_zone.get('Id')
@staticmethod
def escape_zone_id(value):
return value.replace("/", "-") if value else None
@staticmethod
def unescape_zone_id(value):
return value.replace("-", "/") if value else None
@property
def name(self):
return self._dns_zone.get('Name')
@property
def admin_email(self):
comment = self._dns_zone.get('Config', {}).get('Comment')
if comment:
email_field = comment.split(",")[0].split("=")
if email_field[0] == "admin_email":
return email_field[1]
else:
return None
else:
return None
@property
def records(self):
return self._dns_record_container
class AWSDnsRecord(BaseDnsRecord):
def __init__(self, provider, dns_zone, dns_record):
super(AWSDnsRecord, self).__init__(provider)
self._dns_zone = dns_zone
self._dns_rec = dns_record
@property
def id(self):
return self._dns_rec.get('Name') + ":" + self._dns_rec.get('Type')
@property
def name(self):
return self._dns_rec.get('Name')
@property
def zone_id(self):
return self._dns_zone.id
@property
def type(self):
return self._dns_rec.get('Type')
@property
def data(self):
return [rec.get('Value') for rec in
self._dns_rec.get('ResourceRecords')]
@property
def ttl(self):
return self._dns_rec.get('TTL')
def delete(self):
# pylint:disable=protected-access
return self._provider.dns._records.delete(self._dns_zone, self)
| 29.97912
| 84
| 0.621959
|
4a06b3bd536ded5eb24bce08c4ec1816d01fee52
| 805
|
py
|
Python
|
awx/main/migrations/0061_v350_track_native_credentialtype_source.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 17
|
2021-04-03T01:40:17.000Z
|
2022-03-03T11:45:20.000Z
|
awx/main/migrations/0061_v350_track_native_credentialtype_source.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 24
|
2021-05-18T21:13:35.000Z
|
2022-03-29T10:23:52.000Z
|
awx/main/migrations/0061_v350_track_native_credentialtype_source.py
|
Avinesh/awx
|
6310a2edd890d6062a9f6bcdeb2b46c4b876c2bf
|
[
"Apache-2.0"
] | 24
|
2020-11-27T08:37:35.000Z
|
2021-03-08T13:27:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-02-19 04:27
from __future__ import unicode_literals
from django.db import migrations, models
from awx.main.models import CredentialType
from awx.main.utils.common import set_current_apps
def migrate_to_static_inputs(apps, schema_editor):
set_current_apps(apps)
CredentialType.setup_tower_managed_defaults()
class Migration(migrations.Migration):
dependencies = [
('main', '0060_v350_update_schedule_uniqueness_constraint'),
]
operations = [
migrations.AddField(
model_name='credentialtype',
name='namespace',
field=models.CharField(default=None, editable=False, max_length=1024, null=True),
),
migrations.RunPython(migrate_to_static_inputs)
]
| 26.833333
| 93
| 0.711801
|
4a06b4111c09991f5b3ba8b06f3c5f2afd640272
| 1,956
|
py
|
Python
|
geotrek/api/v2/views/__init__.py
|
numahell/Geotrek-admin
|
e279875b0b06ef60928c049d51533f76716c902a
|
[
"BSD-2-Clause"
] | 1
|
2019-12-11T11:04:05.000Z
|
2019-12-11T11:04:05.000Z
|
geotrek/api/v2/views/__init__.py
|
numahell/Geotrek-admin
|
e279875b0b06ef60928c049d51533f76716c902a
|
[
"BSD-2-Clause"
] | null | null | null |
geotrek/api/v2/views/__init__.py
|
numahell/Geotrek-admin
|
e279875b0b06ef60928c049d51533f76716c902a
|
[
"BSD-2-Clause"
] | null | null | null |
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from rest_framework import response, permissions
from rest_framework.views import APIView
from django.conf import settings
from django.contrib.gis.geos import Polygon
from .authent import StructureViewSet # noqa
from .common import TargetPortalViewSet, ThemeViewSet, SourceViewSet, ReservationSystemViewSet, LabelViewSet # noqa
if 'geotrek.core' in settings.INSTALLED_APPS:
from .core import PathViewSet # noqa
if 'geotrek.trekking' in settings.INSTALLED_APPS:
from .trekking import TrekViewSet, TourViewSet, POIViewSet, POITypeViewSet, AccessibilityViewSet, RouteViewSet, DifficultyViewSet, NetworksViewSet, PracticeViewSet # noqa
if 'geotrek.sensitivity' in settings.INSTALLED_APPS:
from .sensitivity import SensitiveAreaViewSet # noqa
from .sensitivity import SportPracticeViewSet # noqa
if 'geotrek.tourism' in settings.INSTALLED_APPS:
from .tourism import TouristicContentViewSet, InformationDeskViewSet # noqa
if 'geotrek.zoning' in settings.INSTALLED_APPS:
from .zoning import CityViewSet, DistrictViewSet # noqa
if 'geotrek.outdoor' in settings.INSTALLED_APPS:
from .outdoor import SiteViewSet, OutdoorPracticeViewSet, SiteTypeViewSet # noqa
schema_view = get_schema_view(
openapi.Info(
title="Geotrek API v2",
default_version='v2',
description="New Geotrek API.",
),
urlconf='geotrek.api.v2.urls',
public=True,
permission_classes=(permissions.AllowAny,),
)
class ConfigView(APIView):
"""
Configuration endpoint that gives the BBox used in the Geotrek configuration
"""
permission_classes = [permissions.AllowAny, ]
def get(self, request, *args, **kwargs):
bbox = Polygon.from_bbox(settings.SPATIAL_EXTENT)
bbox.srid = settings.SRID
bbox.transform(settings.API_SRID)
return response.Response({
'bbox': bbox.extent
})
| 39.12
| 175
| 0.754601
|
4a06b620ecef0174df64d32bf3cce002500f78c6
| 15,366
|
py
|
Python
|
apps/reports/tests/case.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | 1
|
2017-05-19T07:23:00.000Z
|
2017-05-19T07:23:00.000Z
|
apps/reports/tests/case.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | null | null | null |
apps/reports/tests/case.py
|
commtrack/commtrack-old-to-del
|
cc9c22754ac192a45483cef609bdcf09aa990340
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import os
from receiver.models import Submission, Attachment
from reports.models import *
from xformmanager.tests.util import *
from xformmanager.models import *
from xformmanager.storageutility import StorageUtility
from hq.models import Domain
#from django.test import TestCase
class CaseTestCase(unittest.TestCase):
def setUp(self):
# clean up, in case some other tests left some straggling
# form data.
clear_data()
path = os.path.dirname(__file__)
# register some schemas
create_xsd_and_populate("data/pf_followup.xsd", "data/pf_followup_1.xml", path=path)
for i in range(2, 6):
populate("data/pf_followup_%s.xml" % i, path=path)
create_xsd_and_populate("data/pf_new_reg.xsd", "data/pf_new_reg_1.xml", path=path)
populate("data/pf_new_reg_2.xml", path=path)
create_xsd_and_populate("data/pf_ref_completed.xsd", "data/pf_ref_completed_1.xml", path=path)
populate("data/pf_ref_completed_2.xml", path=path)
# get the three forms
self.reg_form = FormDefModel.objects.get(form_name="schema_pathfinder_pathfinder_cc_registration_0_0_2")
self.follow_form = FormDefModel.objects.get(form_name="schema_pathfinder_pathfinder_cc_follow_0_0_2")
self.close_form = FormDefModel.objects.get(form_name="schema_pathfinder_pathfinder_cc_resolution_0_0_2")
# make some objects for these to build our case
self.reg_fid = FormIdentifier.objects.create(form=self.reg_form, identity_column="meta_username")
self.follow_fid = FormIdentifier.objects.create(form=self.follow_form, identity_column="meta_username",
sorting_column="meta_timeend", sort_descending=True)
self.close_fid = FormIdentifier.objects.create(form=self.close_form, identity_column="meta_username")
# and our complex case, which uses multiple columns as keys
self.reg_complex_fid = FormIdentifier.objects.create(form=self.reg_form, identity_column="meta_username|pathfinder_registration_meta_chw_id")
self.follow_complex_fid = FormIdentifier.objects.create(form=self.follow_form, identity_column="meta_username|pathfinder_followup_meta_chw_id",
sorting_column="meta_timeend", sort_descending=True)
self.close_complex_fid = FormIdentifier.objects.create(form=self.close_form, identity_column="meta_username|pathfinder_referral_meta_chw_id")
pf_domain = Domain.objects.all()[0]
self.pf_case = Case.objects.create(name="pathfinder cases", domain=pf_domain)
self.complex_case = Case.objects.create(name="pathfinder complex cases", domain=pf_domain)
self.reg_cfi = CaseFormIdentifier.objects.create(form_identifier=self.reg_fid, case=self.pf_case, sequence_id=1, form_type="open")
self.follow_cfi = CaseFormIdentifier.objects.create(form_identifier=self.follow_fid, case=self.pf_case, sequence_id=2, form_type="follow")
self.close_cfi = CaseFormIdentifier.objects.create(form_identifier=self.close_fid, case=self.pf_case, sequence_id=3, form_type="close")
self.reg_complex_cfi = CaseFormIdentifier.objects.create(form_identifier=self.reg_complex_fid, case=self.complex_case, sequence_id=1, form_type="open")
self.follow_complex_cfi = CaseFormIdentifier.objects.create(form_identifier=self.follow_complex_fid, case=self.complex_case, sequence_id=2, form_type="follow")
self.close_complex_cfi = CaseFormIdentifier.objects.create(form_identifier=self.close_complex_fid, case=self.complex_case, sequence_id=3, form_type="close")
def tearDown(self):
# clean up, in case some other tests left some straggling
# form data. Do this in setup and teardown because we want
# to start with a clean slate and leave a clean slate.
su = StorageUtility()
su.clear()
Submission.objects.all().delete()
Attachment.objects.all().delete()
def testFormIdentifier(self):
uniques = self.reg_fid.get_uniques()
self.assertEqual(1, len(uniques))
self.assertEqual("mary", uniques[0])
uniques = self.follow_fid.get_uniques()
self.assertEqual(2, len(uniques))
self.assertEqual("demo_user", uniques[0])
self.assertEqual("mary", uniques[1])
uniques = self.close_fid.get_uniques()
self.assertEqual(1, len(uniques))
self.assertEqual("demo_user", uniques[0])
def testFormIdentifierComplex(self):
uniques = self.reg_complex_fid.get_uniques()
self.assertEqual(2, len(uniques))
self.assertEqual("mary|0", uniques[0])
self.assertEqual("mary|1", uniques[1])
uniques = self.follow_complex_fid.get_uniques()
self.assertEqual(4, len(uniques))
self.assertEqual("demo_user|0", uniques[0])
self.assertEqual("demo_user|1", uniques[1])
self.assertEqual("mary|0", uniques[2])
self.assertEqual("mary|2", uniques[3])
uniques = self.close_complex_fid.get_uniques()
self.assertEqual(2, len(uniques))
self.assertEqual("demo_user|0", uniques[0])
self.assertEqual("demo_user|2", uniques[1])
def testGetFormUtilities(self):
pf_forms = self.pf_case.forms
self.assertEqual(3, len(pf_forms))
self.assertEqual(self.reg_form, pf_forms[0])
self.assertEqual(self.follow_form, pf_forms[1])
self.assertEqual(self.close_form, pf_forms[2])
pf_form_ids = self.pf_case.form_identifiers
self.assertEqual(3, len(pf_form_ids))
self.assertEqual(self.reg_fid, pf_form_ids[0])
self.assertEqual(self.follow_fid, pf_form_ids[1])
self.assertEqual(self.close_fid, pf_form_ids[2])
# change around the sequence ids and make sure they come back in the right order
self.follow_cfi.sequence_id = 4
self.follow_cfi.save()
pf_forms = self.pf_case.forms
self.assertEqual(3, len(pf_forms))
self.assertEqual(self.reg_form, pf_forms[0])
self.assertEqual(self.close_form, pf_forms[1])
self.assertEqual(self.follow_form, pf_forms[2])
pf_form_ids = self.pf_case.form_identifiers
self.assertEqual(3, len(pf_form_ids))
self.assertEqual(self.reg_fid, pf_form_ids[0])
self.assertEqual(self.close_fid, pf_form_ids[1])
self.assertEqual(self.follow_fid, pf_form_ids[2])
def testGetUniqueIds(self):
uniques = self.pf_case.get_unique_ids()
self.assertEqual(2, len(uniques))
# for now, we don't know what order these will come back in
if uniques[0] == "mary":
self.assertEqual("demo_user", uniques[1])
elif uniques[0] == "demo_user":
self.assertEqual("mary", uniques[1])
else:
self.fail("Get uniques returned wrong first value: %s" % uniques[0])
def testGetUniqueIdsComplex(self):
uniques = self.complex_case.get_unique_ids()
self.assertEqual(6, len(uniques))
# for now, we don't know what order these will come back in
for user in ["mary", "demo_user"]:
for id in ["0", "1", "2"]:
key = "%s|%s" % (user, id)
self.assertTrue(key in uniques, "%s is in list of unique ids" % key)
def testGetColumnNames(self):
reg_cols = self.reg_fid.get_column_names()
follow_cols = self.follow_fid.get_column_names()
close_cols = self.close_fid.get_column_names()
# start with a base count of 1 for the "case_id" column
total_cols = 1 + len(reg_cols) + len(follow_cols) + len(close_cols)
case_cols = self.pf_case.get_column_names()
self.assertEqual(total_cols, len(case_cols))
# walk through the list of columns in order and
# ensure that each table's columns match up.
count = 1
for col in reg_cols:
self.assertTrue(col in case_cols[count])
count += 1
for col in follow_cols:
self.assertTrue(col in case_cols[count])
count += 1
for col in close_cols:
self.assertTrue(col in case_cols[count])
count += 1
def testGetColumnNamesComplex(self):
reg_cols = self.reg_complex_fid.get_column_names()
follow_cols = self.follow_complex_fid.get_column_names()
close_cols = self.close_complex_fid.get_column_names()
# start with a base count of 1 for the "case_id" column
total_cols = 1 + len(reg_cols) + len(follow_cols) + len(close_cols)
case_cols = self.complex_case.get_column_names()
self.assertEqual(total_cols, len(case_cols))
# walk through the list of columns in order and
# ensure that each table's columns match up.
count = 1
for col in reg_cols:
self.assertTrue(col in case_cols[count])
count += 1
for col in follow_cols:
self.assertTrue(col in case_cols[count])
count += 1
for col in close_cols:
self.assertTrue(col in case_cols[count])
count += 1
def testGetDataFromFormIdentifier(self):
followup_data = self.follow_fid.get_data_maps()
self.assertEqual(2, len(followup_data))
self.assertEqual(3, len(followup_data["demo_user"]))
self.assertEqual(2, len(followup_data["mary"]))
for id, list in followup_data.items():
dict = list[0]
self.assertEqual(id, dict[self.follow_fid.identity_column])
# add the sorting checks based on the knowledge of the form.
# This is done by manually setting the device ids in the forms
if id == "demo_user":
self.assertEqual("device3", dict["meta_deviceid"])
elif id == "mary":
self.assertEqual("device5", dict["meta_deviceid"])
else:
self.fail("unexpected identity: %s" % id)
# change the sort order and make sure it works
self.follow_fid.sort_descending = False
self.follow_fid.save()
followup_data = self.follow_fid.get_data_maps()
self.assertEqual(2, len(followup_data))
self.assertEqual(3, len(followup_data["demo_user"]))
self.assertEqual(2, len(followup_data["mary"]))
for id, list in followup_data.items():
dict = list[0]
self.assertEqual(id, dict[self.follow_fid.identity_column])
if id == "demo_user":
self.assertEqual("device1", dict["meta_deviceid"])
elif id == "mary":
self.assertEqual("device4", dict["meta_deviceid"])
else:
self.fail("unexpected identity: %s" % id)
# change the sorting column and do it one more time
self.follow_fid.sorting_column = "meta_timestart"
self.follow_fid.save()
followup_data = self.follow_fid.get_data_maps()
self.assertEqual(2, len(followup_data))
self.assertEqual(3, len(followup_data["demo_user"]))
self.assertEqual(2, len(followup_data["mary"]))
for id, list in followup_data.items():
dict = list[0]
self.assertEqual(id, dict[self.follow_fid.identity_column])
if id == "demo_user":
self.assertEqual("device3", dict["meta_deviceid"])
self.assertEqual(3, len(list))
elif id == "mary":
self.assertEqual("device5", dict["meta_deviceid"])
self.assertEqual(2, len(list))
else:
self.fail("unexpected identity: %s" % id)
def testGetDataFromFormIdentifierComplex(self):
followup_data = self.follow_complex_fid.get_data_maps()
self.assertEqual(4, len(followup_data))
self.assertEqual(2, len(followup_data["demo_user|0"]))
for id in ["demo_user|1", "mary|0", "mary|2"]:
self.assertEqual(1, len(followup_data[id]))
def testGetTopmostData(self):
data = self.pf_case.get_topmost_data_maps()
self.assertEqual(2, len(data))
for id, col_map in data.items():
if id == "demo_user":
self.assertEqual("device3",
col_map["meta_deviceid_%s" % self.follow_cfi.sequence_id])
# demo_user has a close form but no reg
# the id below is copied from the xml form
self.assertEqual("7WM8SPBUWGXTDRO4TAJVR6MA0",
col_map["meta_uid_%s" % self.close_cfi.sequence_id])
self.assertEqual(None,
col_map["meta_uid_%s" % self.reg_cfi.sequence_id])
elif id == "mary":
self.assertEqual("device5",
col_map["meta_deviceid_%s" % self.follow_cfi.sequence_id])
# mary has a reg, but no close form
# the id below is copied from the xml form
self.assertEqual("NFLFYINTDW16XPMOY0QXVXSH1",
col_map["meta_uid_%s" % self.reg_cfi.sequence_id])
self.assertEqual(None,
col_map["meta_uid_%s" % self.close_cfi.sequence_id])
else:
self.fail("unexpected identity: %s" % id)
def testGetCaseData(self):
data = self.pf_case.get_data_for_case("demo_user")
self.assertEqual(3, len(data))
self.assertEqual(0, len(data[self.reg_fid]))
self.assertEqual(3, len(data[self.follow_fid]))
self.assertEqual(2, len(data[self.close_fid]))
data = self.pf_case.get_data_for_case("mary")
self.assertEqual(3, len(data))
self.assertEqual(2, len(data[self.reg_fid]))
self.assertEqual(2, len(data[self.follow_fid]))
self.assertEqual(0, len(data[self.close_fid]))
def testGetAllData(self):
all_data = self.pf_case.get_all_data_maps()
self.assertEqual(2, len(all_data))
data = all_data["demo_user"]
self.assertEqual(3, len(data))
self.assertEqual(0, len(data[self.reg_fid]))
self.assertEqual(3, len(data[self.follow_fid]))
self.assertEqual("device3",
data[self.follow_fid][0]["meta_deviceid"])
self.assertEqual("device2",
data[self.follow_fid][1]["meta_deviceid"])
self.assertEqual("device1",
data[self.follow_fid][2]["meta_deviceid"])
self.assertEqual(2, len(data[self.close_fid]))
data = all_data["mary"]
self.assertEqual(3, len(data))
self.assertEqual(2, len(data[self.reg_fid]))
self.assertEqual(2, len(data[self.follow_fid]))
self.assertEqual("device5",
data[self.follow_fid][0]["meta_deviceid"])
self.assertEqual("device4",
data[self.follow_fid][1]["meta_deviceid"])
self.assertEqual(0, len(data[self.close_fid]))
| 48.626582
| 167
| 0.626904
|
4a06b647ae810cc83badaf9bc15d9958891a2a0c
| 30,941
|
py
|
Python
|
ryu/ofproto/nicira_ext.py
|
yyd01245/ryu
|
2ed74efd26bc6bb32ca9292ee7c22c7600ee89d2
|
[
"Apache-2.0"
] | null | null | null |
ryu/ofproto/nicira_ext.py
|
yyd01245/ryu
|
2ed74efd26bc6bb32ca9292ee7c22c7600ee89d2
|
[
"Apache-2.0"
] | null | null | null |
ryu/ofproto/nicira_ext.py
|
yyd01245/ryu
|
2ed74efd26bc6bb32ca9292ee7c22c7600ee89d2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Nicira extensions
# Many of these definitions are common among OpenFlow versions.
import sys
from struct import calcsize
from ryu.lib import type_desc
from ryu.ofproto.ofproto_common import OFP_HEADER_SIZE
from ryu.ofproto import oxm_fields
# Action subtypes
NXAST_RESUBMIT = 1
NXAST_SET_TUNNEL = 2
NXAST_DROP_SPOOFED_ARP__OBSOLETE = 3
NXAST_SET_QUEUE = 4
NXAST_POP_QUEUE = 5
NXAST_REG_MOVE = 6
NXAST_REG_LOAD = 7
NXAST_NOTE = 8
NXAST_SET_TUNNEL64 = 9
NXAST_MULTIPATH = 10
NXAST_AUTOPATH = 11
NXAST_BUNDLE = 12
NXAST_BUNDLE_LOAD = 13
NXAST_RESUBMIT_TABLE = 14
NXAST_OUTPUT_REG = 15
NXAST_LEARN = 16
NXAST_EXIT = 17
NXAST_DEC_TTL = 18
NXAST_FIN_TIMEOUT = 19
NXAST_CONTROLLER = 20
NXAST_DEC_TTL_CNT_IDS = 21
NXAST_PUSH_MPLS = 23
NXAST_POP_MPLS = 24
NXAST_SET_MPLS_TTL = 25
NXAST_DEC_MPLS_TTL = 26
NXAST_STACK_PUSH = 27
NXAST_STACK_POP = 28
NXAST_SAMPLE = 29
NXAST_SET_MPLS_LABEL = 30
NXAST_SET_MPLS_TC = 31
NXAST_OUTPUT_REG2 = 32
NXAST_REG_LOAD2 = 33
NXAST_CONJUNCTION = 34
NXAST_CT = 35
NXAST_NAT = 36
NXAST_CONTROLLER2 = 37
NXAST_SAMPLE2 = 38
NXAST_OUTPUT_TRUNC = 39
NXAST_DEC_NSH_TTL = 48
NX_ACTION_RESUBMIT_PACK_STR = '!HHIHHB3x'
NX_ACTION_RESUBMIT_SIZE = 16
assert calcsize(NX_ACTION_RESUBMIT_PACK_STR) == NX_ACTION_RESUBMIT_SIZE
NX_ACTION_SET_TUNNEL_PACK_STR = '!HHIH2xI'
NX_ACTION_SET_TUNNEL_SIZE = 16
assert calcsize(NX_ACTION_SET_TUNNEL_PACK_STR) == NX_ACTION_SET_TUNNEL_SIZE
NX_ACTION_SET_QUEUE_PACK_STR = '!HHIH2xI'
NX_ACTION_SET_QUEUE_SIZE = 16
assert calcsize(NX_ACTION_SET_QUEUE_PACK_STR) == NX_ACTION_SET_QUEUE_SIZE
NX_ACTION_POP_QUEUE_PACK_STR = '!HHIH6x'
NX_ACTION_POP_QUEUE_SIZE = 16
assert calcsize(NX_ACTION_POP_QUEUE_PACK_STR) == NX_ACTION_POP_QUEUE_SIZE
NX_ACTION_REG_MOVE_PACK_STR = '!HHIHHHHII'
NX_ACTION_REG_MOVE_SIZE = 24
assert calcsize(NX_ACTION_REG_MOVE_PACK_STR) == NX_ACTION_REG_MOVE_SIZE
NX_ACTION_REG_LOAD_PACK_STR = '!HHIHHIQ'
NX_ACTION_REG_LOAD_SIZE = 24
assert calcsize(NX_ACTION_REG_LOAD_PACK_STR) == NX_ACTION_REG_LOAD_SIZE
NX_ACTION_SET_TUNNEL64_PACK_STR = '!HHIH6xQ'
NX_ACTION_SET_TUNNEL64_SIZE = 24
assert calcsize(NX_ACTION_SET_TUNNEL64_PACK_STR) == NX_ACTION_SET_TUNNEL64_SIZE
NX_ACTION_MULTIPATH_PACK_STR = '!HHIHHH2xHHI2xHI'
NX_ACTION_MULTIPATH_SIZE = 32
assert calcsize(NX_ACTION_MULTIPATH_PACK_STR) == NX_ACTION_MULTIPATH_SIZE
NX_ACTION_NOTE_PACK_STR = '!HHIH6B'
NX_ACTION_NOTE_SIZE = 16
assert calcsize(NX_ACTION_NOTE_PACK_STR) == NX_ACTION_NOTE_SIZE
NX_ACTION_BUNDLE_PACK_STR = '!HHIHHHHIHHI4x'
NX_ACTION_BUNDLE_SIZE = 32
NX_ACTION_BUNDLE_0_SIZE = 24
assert calcsize(NX_ACTION_BUNDLE_PACK_STR) == NX_ACTION_BUNDLE_SIZE
NX_ACTION_AUTOPATH_PACK_STR = '!HHIHHII4x'
NX_ACTION_AUTOPATH_SIZE = 24
assert calcsize(NX_ACTION_AUTOPATH_PACK_STR) == NX_ACTION_AUTOPATH_SIZE
NX_ACTION_OUTPUT_REG_PACK_STR = '!HHIHHIH6x'
NX_ACTION_OUTPUT_REG_SIZE = 24
assert calcsize(NX_ACTION_OUTPUT_REG_PACK_STR) == NX_ACTION_OUTPUT_REG_SIZE
NX_ACTION_LEARN_PACK_STR = '!HHIHHHHQHBxHH'
NX_ACTION_LEARN_SIZE = 32
assert calcsize(NX_ACTION_LEARN_PACK_STR) == NX_ACTION_LEARN_SIZE
NX_ACTION_CONTROLLER_PACK_STR = '!HHIHHHBB'
NX_ACTION_CONTROLLER_SIZE = 16
assert calcsize(NX_ACTION_CONTROLLER_PACK_STR) == NX_ACTION_CONTROLLER_SIZE
NX_ACTION_FIN_TIMEOUT_PACK_STR = '!HHIHHH2x'
NX_ACTION_FIN_TIMEOUT_SIZE = 16
assert calcsize(NX_ACTION_FIN_TIMEOUT_PACK_STR) == NX_ACTION_FIN_TIMEOUT_SIZE
NX_ACTION_HEADER_PACK_STR = '!HHIH6x'
NX_ACTION_HEADER_SIZE = 16
NX_ACTION_HEADER_0_SIZE = 2
assert calcsize(NX_ACTION_HEADER_PACK_STR) == NX_ACTION_HEADER_SIZE
# Messages
NXT_ROLE_REQUEST = 10
NXT_ROLE_REPLY = 11
NXT_SET_FLOW_FORMAT = 12
NXT_FLOW_MOD = 13
NXT_FLOW_REMOVED = 14
NXT_FLOW_MOD_TABLE_ID = 15
NXT_SET_PACKET_IN_FORMAT = 16
NXT_PACKET_IN = 17
NXT_FLOW_AGE = 18
NXT_SET_ASYNC_CONFIG = 19
NXT_SET_CONTROLLER_ID = 20
# enum nx_role
NX_ROLE_OTHER = 0
NX_ROLE_MASTER = 1
NX_ROLE_SLAVE = 2
# enum nx_flow_format
NXFF_OPENFLOW10 = 0
NXFF_NXM = 2
# enum nx_packet_in_format
NXPIF_OPENFLOW10 = 0
NXPIF_NXM = 1
# enum nx_stats_types
NXST_FLOW = 0
NXST_AGGREGATE = 1
NXST_FLOW_MONITOR = 2
# enum nx_action_controller2_prop_type
NXAC2PT_MAX_LEN = 0
NXAC2PT_CONTROLLER_ID = 1
NXAC2PT_REASON = 2
NXAC2PT_USERDATA = 3
NXAC2PT_PAUSE = 4
NICIRA_HEADER_PACK_STR = '!II'
NICIRA_HEADER_SIZE = 16
assert (calcsize(NICIRA_HEADER_PACK_STR) +
OFP_HEADER_SIZE == NICIRA_HEADER_SIZE)
NX_ROLE_PACK_STR = '!I'
NX_ROLE_SIZE = 20
assert (calcsize(NX_ROLE_PACK_STR) +
NICIRA_HEADER_SIZE == NX_ROLE_SIZE)
NX_FLOW_MOD_PACK_STR = '!Q4HI3H6x'
NX_FLOW_MOD_SIZE = 48
assert (calcsize(NX_FLOW_MOD_PACK_STR) +
NICIRA_HEADER_SIZE == NX_FLOW_MOD_SIZE)
NX_SET_FLOW_FORMAT_PACK_STR = '!I'
NX_SET_FLOW_FORMAT_SIZE = 20
assert (calcsize(NX_SET_FLOW_FORMAT_PACK_STR) +
NICIRA_HEADER_SIZE == NX_SET_FLOW_FORMAT_SIZE)
NX_FLOW_REMOVED_PACK_STR = '!QHBxIIHHQQ'
NX_FLOW_REMOVED_SIZE = 56
assert (calcsize(NX_FLOW_REMOVED_PACK_STR) +
NICIRA_HEADER_SIZE == NX_FLOW_REMOVED_SIZE)
NX_FLOW_MOD_TABLE_ID_PACK_STR = '!B7x'
NX_FLOW_MOD_TABLE_ID_SIZE = 24
assert (calcsize(NX_FLOW_MOD_TABLE_ID_PACK_STR) +
NICIRA_HEADER_SIZE == NX_FLOW_MOD_TABLE_ID_SIZE)
NX_SET_PACKET_IN_FORMAT_PACK_STR = '!I'
NX_SET_PACKET_IN_FORMAT_SIZE = 20
assert (calcsize(NX_SET_PACKET_IN_FORMAT_PACK_STR) +
NICIRA_HEADER_SIZE == NX_SET_PACKET_IN_FORMAT_SIZE)
NX_PACKET_IN_PACK_STR = '!IHBBQH6x'
NX_PACKET_IN_SIZE = 40
assert (calcsize(NX_PACKET_IN_PACK_STR) +
NICIRA_HEADER_SIZE == NX_PACKET_IN_SIZE)
NX_ASYNC_CONFIG_PACK_STR = '!IIIIII'
NX_ASYNC_CONFIG_SIZE = 40
assert (calcsize(NX_ASYNC_CONFIG_PACK_STR) +
NICIRA_HEADER_SIZE == NX_ASYNC_CONFIG_SIZE)
NX_CONTROLLER_ID_PACK_STR = '!6xH'
NX_CONTROLLER_ID_SIZE = 24
assert (calcsize(NX_CONTROLLER_ID_PACK_STR) +
NICIRA_HEADER_SIZE == NX_CONTROLLER_ID_SIZE)
NX_STATS_MSG_PACK_STR = '!I4x'
NX_STATS_MSG0_SIZE = 8
assert calcsize(NX_STATS_MSG_PACK_STR) == NX_STATS_MSG0_SIZE
NX_STATS_MSG_SIZE = 24
_OFP_VENDOR_STATS_MSG_SIZE = 16
assert (calcsize(NX_STATS_MSG_PACK_STR) + _OFP_VENDOR_STATS_MSG_SIZE ==
NX_STATS_MSG_SIZE)
NX_FLOW_STATS_REQUEST_PACK_STR = '!2HB3x'
NX_FLOW_STATS_REQUEST_SIZE = 8
assert (calcsize(NX_FLOW_STATS_REQUEST_PACK_STR) ==
NX_FLOW_STATS_REQUEST_SIZE)
NX_FLOW_STATS_PACK_STR = '!HBxIIHHHHHHQQQ'
NX_FLOW_STATS_SIZE = 48
assert calcsize(NX_FLOW_STATS_PACK_STR) == NX_FLOW_STATS_SIZE
NX_AGGREGATE_STATS_REQUEST_PACK_STR = '!2HB3x'
NX_AGGREGATE_STATS_REQUEST_SIZE = 8
assert (calcsize(NX_AGGREGATE_STATS_REQUEST_PACK_STR) ==
NX_AGGREGATE_STATS_REQUEST_SIZE)
NX_AGGREGATE_STATS_REPLY_PACK_STR = '!QQI4x'
NX_AGGREGATE_STATS_REPLY_SIZE = 24
assert (calcsize(NX_AGGREGATE_STATS_REPLY_PACK_STR) ==
NX_AGGREGATE_STATS_REPLY_SIZE)
# enum nx_hash_fields
NX_HASH_FIELDS_ETH_SRC = 0
NX_HASH_FIELDS_SYMMETRIC_L4 = 1
# enum nx_mp_algorithm
NX_MP_ALG_MODULO_N = 0
NX_MP_ALG_HASH_THRESHOLD = 1
NX_MP_ALG_HRW = 2
NX_MP_ALG_ITER_HASH = 3
# enum nx_bd_algorithm
NX_BD_ALG_ACTIVE_BACKUP = 0
NX_BD_ALG_HRW = 1
# nx_learn constants
NX_LEARN_N_BITS_MASK = 0x3ff
NX_LEARN_SRC_FIELD = 0 << 13 # Copy from field.
NX_LEARN_SRC_IMMEDIATE = 1 << 13 # Copy from immediate value.
NX_LEARN_SRC_MASK = 1 << 13
NX_LEARN_DST_MATCH = 0 << 11 # Add match criterion.
NX_LEARN_DST_LOAD = 1 << 11 # Add NXAST_REG_LOAD action
NX_LEARN_DST_OUTPUT = 2 << 11 # Add OFPAT_OUTPUT action.
NX_LEARN_DST_RESERVED = 3 << 11 # Not yet defined.
NX_LEARN_DST_MASK = 3 << 11
# nx_ct constants
NX_CT_F_COMMIT = 1 << 0
NX_CT_F_FORCE = 1 << 1
NX_CT_RECIRC_NONE = 0xff # OFPTT_ALL
# nx_nat constants
NX_NAT_RANGE_IPV4_MIN = 1 << 0
NX_NAT_RANGE_IPV4_MAX = 1 << 1
NX_NAT_RANGE_IPV6_MIN = 1 << 2
NX_NAT_RANGE_IPV6_MAX = 1 << 3
NX_NAT_RANGE_PROTO_MIN = 1 << 4
NX_NAT_RANGE_PROTO_MAX = 1 << 5
# nx ip_frag constants
FLOW_NW_FRAG_ANY = 1 << 0 # Set for any IP frag.
FLOW_NW_FRAG_LATER = 1 << 1 # Set for IP frag with nonzero offset.
FLOW_NW_FRAG_MASK = FLOW_NW_FRAG_ANY | FLOW_NW_FRAG_LATER
# nx ip_frag match values
NXM_IP_FRAG_NO = (0, FLOW_NW_FRAG_MASK)
NXM_IP_FRAG_YES = (FLOW_NW_FRAG_ANY, FLOW_NW_FRAG_ANY)
NXM_IP_FRAG_FIRST = (FLOW_NW_FRAG_ANY, FLOW_NW_FRAG_MASK)
NXM_IP_FRAG_LATER = (FLOW_NW_FRAG_LATER, FLOW_NW_FRAG_LATER)
NXM_IP_FRAG_NOT_LATER = (0, FLOW_NW_FRAG_LATER)
def ofs_nbits(start, end):
"""
The utility method for ofs_nbits
This method is used in the class to set the ofs_nbits.
This method converts start/end bits into ofs_nbits required to
specify the bit range of OXM/NXM fields.
ofs_nbits can be calculated as following::
ofs_nbits = (start << 6) + (end - start)
The parameter start/end means the OXM/NXM field of ovs-ofctl command.
..
field[start..end]
..
+------------------------------------------+
| *field*\ **[**\ *start*\..\ *end*\ **]** |
+------------------------------------------+
================ ======================================================
Attribute Description
================ ======================================================
start Start bit for OXM/NXM field
end End bit for OXM/NXM field
================ ======================================================
"""
return (start << 6) + (end - start)
def nxm_header__(vendor, field, hasmask, length):
return (vendor << 16) | (field << 9) | (hasmask << 8) | length
def nxm_header(vendor, field, length):
return nxm_header__(vendor, field, 0, length)
def nxm_header_w(vendor, field, length):
return nxm_header__(vendor, field, 1, (length) * 2)
NXM_OF_IN_PORT = nxm_header(0x0000, 0, 2)
NXM_OF_ETH_DST = nxm_header(0x0000, 1, 6)
NXM_OF_ETH_DST_W = nxm_header_w(0x0000, 1, 6)
NXM_OF_ETH_SRC = nxm_header(0x0000, 2, 6)
NXM_OF_ETH_SRC_W = nxm_header_w(0x0000, 2, 6)
NXM_OF_ETH_TYPE = nxm_header(0x0000, 3, 2)
NXM_OF_VLAN_TCI = nxm_header(0x0000, 4, 2)
NXM_OF_VLAN_TCI_W = nxm_header_w(0x0000, 4, 2)
NXM_OF_IP_TOS = nxm_header(0x0000, 5, 1)
NXM_OF_IP_PROTO = nxm_header(0x0000, 6, 1)
NXM_OF_IP_SRC = nxm_header(0x0000, 7, 4)
NXM_OF_IP_SRC_W = nxm_header_w(0x0000, 7, 4)
NXM_OF_IP_DST = nxm_header(0x0000, 8, 4)
NXM_OF_IP_DST_W = nxm_header_w(0x0000, 8, 4)
NXM_OF_TCP_SRC = nxm_header(0x0000, 9, 2)
NXM_OF_TCP_SRC_W = nxm_header_w(0x0000, 9, 2)
NXM_OF_TCP_DST = nxm_header(0x0000, 10, 2)
NXM_OF_TCP_DST_W = nxm_header_w(0x0000, 10, 2)
NXM_OF_UDP_SRC = nxm_header(0x0000, 11, 2)
NXM_OF_UDP_SRC_W = nxm_header_w(0x0000, 11, 2)
NXM_OF_UDP_DST = nxm_header(0x0000, 12, 2)
NXM_OF_UDP_DST_W = nxm_header_w(0x0000, 12, 2)
NXM_OF_ICMP_TYPE = nxm_header(0x0000, 13, 1)
NXM_OF_ICMP_CODE = nxm_header(0x0000, 14, 1)
NXM_OF_ARP_OP = nxm_header(0x0000, 15, 2)
NXM_OF_ARP_SPA = nxm_header(0x0000, 16, 4)
NXM_OF_ARP_SPA_W = nxm_header_w(0x0000, 16, 4)
NXM_OF_ARP_TPA = nxm_header(0x0000, 17, 4)
NXM_OF_ARP_TPA_W = nxm_header_w(0x0000, 17, 4)
NXM_NX_TUN_ID = nxm_header(0x0001, 16, 8)
NXM_NX_TUN_ID_W = nxm_header_w(0x0001, 16, 8)
NXM_NX_TUN_IPV4_SRC = nxm_header(0x0001, 31, 4)
NXM_NX_TUN_IPV4_SRC_W = nxm_header_w(0x0001, 31, 4)
NXM_NX_TUN_IPV4_DST = nxm_header(0x0001, 32, 4)
NXM_NX_TUN_IPV4_DST_W = nxm_header_w(0x0001, 32, 4)
NXM_NX_ARP_SHA = nxm_header(0x0001, 17, 6)
NXM_NX_ARP_THA = nxm_header(0x0001, 18, 6)
NXM_NX_IPV6_SRC = nxm_header(0x0001, 19, 16)
NXM_NX_IPV6_SRC_W = nxm_header_w(0x0001, 19, 16)
NXM_NX_IPV6_DST = nxm_header(0x0001, 20, 16)
NXM_NX_IPV6_DST_W = nxm_header_w(0x0001, 20, 16)
NXM_NX_ICMPV6_TYPE = nxm_header(0x0001, 21, 1)
NXM_NX_ICMPV6_CODE = nxm_header(0x0001, 22, 1)
NXM_NX_ND_TARGET = nxm_header(0x0001, 23, 16)
NXM_NX_ND_TARGET_W = nxm_header_w(0x0001, 23, 16)
NXM_NX_ND_SLL = nxm_header(0x0001, 24, 6)
NXM_NX_ND_TLL = nxm_header(0x0001, 25, 6)
NXM_NX_IP_FRAG = nxm_header(0x0001, 26, 1)
NXM_NX_IP_FRAG_W = nxm_header_w(0x0001, 26, 1)
NXM_NX_IPV6_LABEL = nxm_header(0x0001, 27, 4)
NXM_NX_IP_ECN = nxm_header(0x0001, 28, 1)
NXM_NX_IP_TTL = nxm_header(0x0001, 29, 1)
NXM_NX_PKT_MARK = nxm_header(0x0001, 33, 4)
NXM_NX_PKT_MARK_W = nxm_header_w(0x0001, 33, 4)
NXM_NX_TCP_FLAGS = nxm_header(0x0001, 34, 2)
NXM_NX_TCP_FLAGS_W = nxm_header_w(0x0001, 34, 2)
def nxm_nx_reg(idx):
return nxm_header(0x0001, idx, 4)
def nxm_nx_reg_w(idx):
return nxm_header_w(0x0001, idx, 4)
NXM_HEADER_PACK_STRING = '!I'
#
# The followings are implementations for OpenFlow 1.2+
#
sys.modules[__name__].__doc__ = """
The API of this class is the same as ``OFPMatch``.
You can define the flow match by the keyword arguments.
The following arguments are available.
================ =============== ==============================================
Argument Value Description
================ =============== ==============================================
in_port_nxm Integer 16bit OpenFlow port number.
eth_dst_nxm MAC address Ethernet destination address.
eth_src_nxm MAC address Ethernet source address.
eth_type_nxm Integer 16bit Ethernet type. Needed to support Nicira
extensions that require the eth_type to
be set. (i.e. tcp_flags_nxm)
vlan_tci Integer 16bit VLAN TCI. Basically same as vlan_vid plus
vlan_pcp.
nw_tos Integer 8bit IP ToS or IPv6 traffic class field dscp.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
ip_proto_nxm Integer 8bit IP protocol. Needed to support Nicira
extensions that require the ip_proto to
be set. (i.e. tcp_flags_nxm)
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
ipv4_src_nxm IPv4 address IPv4 source address.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4)
ipv4_dst_nxm IPv4 address IPv4 destination address.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4)
tcp_src_nxm Integer 16bit TCP source port.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
and ip_proto_nxm = 6 (TCP)
tcp_dst_nxm Integer 16bit TCP destination port.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
and ip_proto_nxm = 6 (TCP)
udp_src_nxm Integer 16bit UDP source port.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
and ip_proto_nxm = 17 (UDP)
udp_dst_nxm Integer 16bit UDP destination port.
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
and ip_proto_nxm = 17 (UDP)
icmpv4_type_nxm Integer 8bit Type matches the ICMP type and code matches
the ICMP code.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4) and
ip_proto_nxm = 1 (ICMP)
icmpv4_code_nxm Integer 8bit Type matches the ICMP type and code matches
the ICMP code.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4) and
ip_proto_nxm = 1 (ICMP)
arp_op_nxm Integer 16bit Only ARP opcodes between 1 and 255 should be
specified for matching.
Requires setting fields:
eth_type_nxm = 0x0806 (ARP)
arp_spa_nxm IPv4 address An address may be specified as an IP address
or host name.
Requires setting fields:
eth_type_nxm = 0x0806 (ARP)
arp_tpa_nxm IPv4 address An address may be specified as an IP address
or host name.
Requires setting fields:
eth_type_nxm = 0x0806 (ARP)
tunnel_id_nxm Integer 64bit Tunnel identifier.
arp_sha_nxm MAC address An address is specified as 6 pairs of
hexadecimal digits delimited by colons.
Requires setting fields:
eth_type_nxm = 0x0806 (ARP)
arp_tha_nxm MAC address An address is specified as 6 pairs of
hexadecimal digits delimited by colons.
Requires setting fields:
eth_type_nxm = 0x0806 (ARP)
ipv6_src_nxm IPv6 address IPv6 source address.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6)
ipv6_dst_nxm IPv6 address IPv6 destination address.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6)
icmpv6_type_nxm Integer 8bit Type matches the ICMP type and code matches
the ICMP code.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6) and
ip_proto_nxm = 58 (ICMP for IPv6)
icmpv6_code_nxm Integer 8bit Type matches the ICMP type and code matches
the ICMP code.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6) and
ip_proto_nxm = 58 (ICMP for IPv6)
nd_target IPv6 address The target address ipv6.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6) and
ip_proto_nxm = 58 (ICMP for IPv6)
nd_sll MAC address The source link-layer address option.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6) and
ip_proto_nxm = 58 (ICMP for IPv6) and
icmpv6_type_nxm = 135 (Neighbor solicitation)
nd_tll MAC address The target link-layer address option.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6) and
ip_proto_nxm = 58 (ICMP for IPv6) and
icmpv6_type_nxm = 136 (Neighbor advertisement)
ip_frag Integer 8bit frag_type specifies what kind of IP fragments
or non-fragments to match.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
ipv6_label Integer 32bit Matches IPv6 flow label.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6)
ip_ecn_nxm Integer 8bit Matches ecn bits in IP ToS or IPv6 traffic
class fields.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
nw_ttl Integer 8bit IP TTL or IPv6 hop limit value ttl.
Requires setting fields:
eth_type_nxm = [0x0800 (IPv4)|0x86dd (IPv6)]
mpls_ttl Integer 8bit The TTL of the outer MPLS label stack entry
of a packet.
Requires setting fields:
eth_type_nxm = 0x8847 (MPLS Unicast)
tun_ipv4_src IPv4 address Tunnel IPv4 source address.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4)
tun_ipv4_dst IPv4 address Tunnel IPv4 destination address.
Requires setting fields:
eth_type_nxm = 0x0800 (IPv4)
pkt_mark Integer 32bit Packet metadata mark.
tcp_flags_nxm Integer 16bit TCP Flags. Requires setting fields:
eth_type_nxm = [0x0800 (IP)|0x86dd (IPv6)] and
ip_proto_nxm = 6 (TCP)
conj_id Integer 32bit Conjunction ID used only with
the conjunction action
tun_gbp_id Integer 16bit The group policy identifier in the
VXLAN header.
tun_gbp_flags Integer 8bit The group policy flags in the
VXLAN header.
tun_flags Integer 16bit Flags indicating various aspects of
the tunnel encapsulation.
ct_state Integer 32bit Conntrack state.
ct_zone Integer 16bit Conntrack zone.
ct_mark Integer 32bit Conntrack mark.
ct_label Integer 128bit Conntrack label.
tun_ipv6_src IPv6 address Tunnel IPv6 source address.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6)
tun_ipv6_dst IPv6 address Tunnel IPv6 destination address.
Requires setting fields:
eth_type_nxm = 0x86dd (IPv6)
_recirc_id Integer 32bit ID for recirculation.
_dp_hash Integer 32bit Flow hash computed in Datapath.
nsh_flags Integer 8bit Flags field in NSH Base Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_mdtype Integer 8bit Metadata Type in NSH Base Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_np Integer 8bit Next Protocol type in NSH Base Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_spi Integer 32bit Service Path Identifier in NSH Service Path
Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_si Integer 8bit Service Index in NSH Service Path Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_c<N> Integer 32bit Context fields in NSH Context Header.
<N> is a number of 1-4.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.8.
nsh_ttl Integer 8bit TTL field in NSH Base Header.
Requires eth_type_nxm = 0x894f (NSH).
Since OpenFlow 1.3 and OVS v2.9.
reg<idx> Integer 32bit Packet register.
<idx> is register number 0-15.
xxreg<idx> Integer 128bit Packet extended-extended register.
<idx> is register number 0-3.
================ =============== ==============================================
.. Note::
Setting the TCP flags via the nicira extensions.
This is required when using OVS version < 2.4.
When using the nxm fields, you need to use any nxm prereq
fields as well or you will receive a OFPBMC_BAD_PREREQ error
Example::
# WILL NOT work
flag = tcp.TCP_ACK
match = parser.OFPMatch(
tcp_flags_nxm=(flag, flag),
ip_proto=inet.IPPROTO_TCP,
eth_type=eth_type)
# Works
flag = tcp.TCP_ACK
match = parser.OFPMatch(
tcp_flags_nxm=(flag, flag),
ip_proto_nxm=inet.IPPROTO_TCP,
eth_type_nxm=eth_type)
"""
oxm_types = [
# OFPXMC_NXM_0
oxm_fields.NiciraExtended0('in_port_nxm', 0, type_desc.Int2),
oxm_fields.NiciraExtended0('eth_dst_nxm', 1, type_desc.MacAddr),
oxm_fields.NiciraExtended0('eth_src_nxm', 2, type_desc.MacAddr),
oxm_fields.NiciraExtended0('eth_type_nxm', 3, type_desc.Int2),
oxm_fields.NiciraExtended0('vlan_tci', 4, type_desc.Int2),
oxm_fields.NiciraExtended0('nw_tos', 5, type_desc.Int1),
oxm_fields.NiciraExtended0('ip_proto_nxm', 6, type_desc.Int1),
oxm_fields.NiciraExtended0('ipv4_src_nxm', 7, type_desc.IPv4Addr),
oxm_fields.NiciraExtended0('ipv4_dst_nxm', 8, type_desc.IPv4Addr),
oxm_fields.NiciraExtended0('tcp_src_nxm', 9, type_desc.Int2),
oxm_fields.NiciraExtended0('tcp_dst_nxm', 10, type_desc.Int2),
oxm_fields.NiciraExtended0('udp_src_nxm', 11, type_desc.Int2),
oxm_fields.NiciraExtended0('udp_dst_nxm', 12, type_desc.Int2),
oxm_fields.NiciraExtended0('icmpv4_type_nxm', 13, type_desc.Int1),
oxm_fields.NiciraExtended0('icmpv4_code_nxm', 14, type_desc.Int1),
oxm_fields.NiciraExtended0('arp_op_nxm', 15, type_desc.Int2),
oxm_fields.NiciraExtended0('arp_spa_nxm', 16, type_desc.IPv4Addr),
oxm_fields.NiciraExtended0('arp_tpa_nxm', 17, type_desc.IPv4Addr),
# OFPXMC_NXM_1
oxm_fields.NiciraExtended1('tunnel_id_nxm', 16, type_desc.Int8),
oxm_fields.NiciraExtended1('arp_sha_nxm', 17, type_desc.MacAddr),
oxm_fields.NiciraExtended1('arp_tha_nxm', 18, type_desc.MacAddr),
oxm_fields.NiciraExtended1('ipv6_src_nxm', 19, type_desc.IPv6Addr),
oxm_fields.NiciraExtended1('ipv6_dst_nxm', 20, type_desc.IPv6Addr),
oxm_fields.NiciraExtended1('icmpv6_type_nxm', 21, type_desc.Int1),
oxm_fields.NiciraExtended1('icmpv6_code_nxm', 22, type_desc.Int1),
oxm_fields.NiciraExtended1('nd_target', 23, type_desc.IPv6Addr),
oxm_fields.NiciraExtended1('nd_sll', 24, type_desc.MacAddr),
oxm_fields.NiciraExtended1('nd_tll', 25, type_desc.MacAddr),
oxm_fields.NiciraExtended1('ip_frag', 26, type_desc.Int1),
oxm_fields.NiciraExtended1('ipv6_label', 27, type_desc.Int4),
oxm_fields.NiciraExtended1('ip_ecn_nxm', 28, type_desc.Int1),
oxm_fields.NiciraExtended1('nw_ttl', 29, type_desc.Int1),
oxm_fields.NiciraExtended1('mpls_ttl', 30, type_desc.Int1),
oxm_fields.NiciraExtended1('tun_ipv4_src', 31, type_desc.IPv4Addr),
oxm_fields.NiciraExtended1('tun_ipv4_dst', 32, type_desc.IPv4Addr),
oxm_fields.NiciraExtended1('pkt_mark', 33, type_desc.Int4),
oxm_fields.NiciraExtended1('tcp_flags_nxm', 34, type_desc.Int2),
oxm_fields.NiciraExtended1('conj_id', 37, type_desc.Int4),
oxm_fields.NiciraExtended1('tun_gbp_id', 38, type_desc.Int2),
oxm_fields.NiciraExtended1('tun_gbp_flags', 39, type_desc.Int1),
oxm_fields.NiciraExtended1('tun_flags', 104, type_desc.Int2),
oxm_fields.NiciraExtended1('ct_state', 105, type_desc.Int4),
oxm_fields.NiciraExtended1('ct_zone', 106, type_desc.Int2),
oxm_fields.NiciraExtended1('ct_mark', 107, type_desc.Int4),
oxm_fields.NiciraExtended1('ct_label', 108, type_desc.Int16),
oxm_fields.NiciraExtended1('tun_ipv6_src', 109, type_desc.IPv6Addr),
oxm_fields.NiciraExtended1('tun_ipv6_dst', 110, type_desc.IPv6Addr),
# Prefix the name with '_' to indicate this is not intended to be used
# in wild.
# Because the following definitions are supposed to be internal use only
# in OVS.
oxm_fields.NiciraExtended1('_recirc_id', 36, type_desc.Int4),
# The following definition is merely for testing 64-bit experimenter OXMs.
# Following Open vSwitch, we use dp_hash for this purpose.
# Prefix the name with '_' to indicate this is not intended to be used
# in wild.
oxm_fields.NiciraExperimenter('_dp_hash', 0, type_desc.Int4),
# Nicira Experimenter for Network Service Header
oxm_fields.NiciraNshExperimenter('nsh_flags', 1, type_desc.Int1),
oxm_fields.NiciraNshExperimenter('nsh_mdtype', 2, type_desc.Int1),
oxm_fields.NiciraNshExperimenter('nsh_np', 3, type_desc.Int1),
# aka "nsp"
oxm_fields.NiciraNshExperimenter('nsh_spi', 4, type_desc.Int4),
# aka "nsi"
oxm_fields.NiciraNshExperimenter('nsh_si', 5, type_desc.Int1),
# aka "nshc<N>"
oxm_fields.NiciraNshExperimenter('nsh_c1', 6, type_desc.Int4),
oxm_fields.NiciraNshExperimenter('nsh_c2', 7, type_desc.Int4),
oxm_fields.NiciraNshExperimenter('nsh_c3', 8, type_desc.Int4),
oxm_fields.NiciraNshExperimenter('nsh_c4', 9, type_desc.Int4),
oxm_fields.NiciraNshExperimenter('nsh_ttl', 10, type_desc.Int1),
# Support for matching/setting NX registers 0-15
oxm_fields.NiciraExtended1('reg0', 0, type_desc.Int4),
oxm_fields.NiciraExtended1('reg1', 1, type_desc.Int4),
oxm_fields.NiciraExtended1('reg2', 2, type_desc.Int4),
oxm_fields.NiciraExtended1('reg3', 3, type_desc.Int4),
oxm_fields.NiciraExtended1('reg4', 4, type_desc.Int4),
oxm_fields.NiciraExtended1('reg5', 5, type_desc.Int4),
oxm_fields.NiciraExtended1('reg6', 6, type_desc.Int4),
oxm_fields.NiciraExtended1('reg7', 7, type_desc.Int4),
oxm_fields.NiciraExtended1('reg8', 8, type_desc.Int4),
oxm_fields.NiciraExtended1('reg9', 9, type_desc.Int4),
oxm_fields.NiciraExtended1('reg10', 10, type_desc.Int4),
oxm_fields.NiciraExtended1('reg11', 11, type_desc.Int4),
oxm_fields.NiciraExtended1('reg12', 12, type_desc.Int4),
oxm_fields.NiciraExtended1('reg13', 13, type_desc.Int4),
oxm_fields.NiciraExtended1('reg14', 14, type_desc.Int4),
oxm_fields.NiciraExtended1('reg15', 15, type_desc.Int4),
# Support for matching/setting NX extended-extended registers 0-3
oxm_fields.NiciraExtended1('xxreg0', 111, type_desc.Int16),
oxm_fields.NiciraExtended1('xxreg1', 112, type_desc.Int16),
oxm_fields.NiciraExtended1('xxreg2', 113, type_desc.Int16),
oxm_fields.NiciraExtended1('xxreg3', 114, type_desc.Int16),
]
| 41.199734
| 79
| 0.654762
|
4a06b74c49584a8700af25bb344579795873a8ee
| 246
|
py
|
Python
|
setup.py
|
florianwittkamp/nicegui
|
4c054f4e5b82e4ac56db93b73d5fb5ffcd480d06
|
[
"MIT"
] | 30
|
2021-06-16T15:46:45.000Z
|
2022-03-27T03:14:18.000Z
|
setup.py
|
florianwittkamp/nicegui
|
4c054f4e5b82e4ac56db93b73d5fb5ffcd480d06
|
[
"MIT"
] | 11
|
2021-05-24T17:05:22.000Z
|
2022-02-19T07:13:18.000Z
|
setup.py
|
florianwittkamp/nicegui
|
4c054f4e5b82e4ac56db93b73d5fb5ffcd480d06
|
[
"MIT"
] | 7
|
2021-07-22T05:51:04.000Z
|
2022-01-31T19:39:37.000Z
|
#!/usr/bin/env python3
from distutils.core import setup
setup(
name='NiceGUI',
version='1.0',
author='Zauberzeug',
author_email='info@zauberzeug.com',
url='https://github.com/zauberzeug/nicegui/',
packages=['nicegui'],
)
| 20.5
| 49
| 0.662602
|
4a06b7b17f15a557d0cd06a942bb026540c53821
| 362
|
py
|
Python
|
Daily Programmer/Easy Challenges/0016 - Character Removal/Solution.py
|
MJVL/Daily-Programmer-Challenges
|
7db19b46754436189dbcf980348fe47708b44c7e
|
[
"MIT"
] | 2
|
2018-03-28T18:08:51.000Z
|
2018-06-09T20:27:52.000Z
|
Daily Programmer/Easy Challenges/0016 - Character Removal/Solution.py
|
MJVL/Programming-Challenges
|
7db19b46754436189dbcf980348fe47708b44c7e
|
[
"MIT"
] | null | null | null |
Daily Programmer/Easy Challenges/0016 - Character Removal/Solution.py
|
MJVL/Programming-Challenges
|
7db19b46754436189dbcf980348fe47708b44c7e
|
[
"MIT"
] | null | null | null |
def remove_characters(str_one, str_two):
for c in str_two:
str_one = str_one.replace(c, "")
return str_one
def main():
str_one = str(input("Enter the string: "))
characters = str(input("Enter characters to remove: "))
print("Modified string: %s" % remove_characters(str_one, characters))
if __name__ == '__main__':
main()
| 24.133333
| 73
| 0.649171
|
4a06b947056dbce421714238d115bf0ad07ac1cc
| 7,718
|
py
|
Python
|
ROS/rufus_remote/src/bras_teleop.py
|
Beam-create/S4_projet_QUADRUS_V2
|
304b434b552a4a5377dd2f49c4bb4c8e9fcb714b
|
[
"MIT"
] | null | null | null |
ROS/rufus_remote/src/bras_teleop.py
|
Beam-create/S4_projet_QUADRUS_V2
|
304b434b552a4a5377dd2f49c4bb4c8e9fcb714b
|
[
"MIT"
] | 52
|
2022-02-12T17:12:17.000Z
|
2022-03-30T00:39:18.000Z
|
ROS/rufus_remote/src/bras_teleop.py
|
Beam-create/S4_projet_QUADRUS_V2
|
304b434b552a4a5377dd2f49c4bb4c8e9fcb714b
|
[
"MIT"
] | 2
|
2022-01-19T15:44:53.000Z
|
2022-01-20T21:44:41.000Z
|
#!/usr/bin/env python3
"""
File to convert joy messages to joint angles messages
Subscriber : joy
Publisher : rufus/bras_teleop
"""
import rospy
from sensor_msgs.msg import Joy
from rufus_master.msg import bras_commands
from geometry_msgs.msg import Vector3
from sympy import *
class bras_teleop:
def __init__(self):
"""
Initialize subscriber, pulisher and node
"""
#objet message de commande
self.commands = bras_commands()
self.joy_sub = rospy.Subscriber("joy", Joy, self.cb_joy)
self.cam_sub = rospy.Subscriber("/camera/Ball_pos", Vector3, self.cb_cam)
self.comm_pub = rospy.Publisher("rufus/bras_arduino", bras_commands, queue_size=1)
# Initial values of bras_command message
self.commands.q1 = 0.0
self.commands.q2 = 90.0
self.commands.q3 = 0.0
self.commands.gimbalAng = 90.0
self.commands.mode = False
self.commands.effector = False
self.L1 = 9.5 #cm
self.L2 = 16.0 #cm
self.L3 = 18.0 #cm
self.L4y = 9.8 #cm
self.L4x = 3.5 #cm
self.camx = 12.48
self.camy = 10.87
self.isGood = False
self.ball_position = [0] * 3
self.ang_inc = 0.5
self.flags = {
"q1+": False,
"q1-": False,
"q2+": False,
"q2-": False,
"q3+": False,
"q3-": False,
"gim+": False,
"gim-": False,
}
self.lim = {
"x_min":0.0,
"x_max":200.0,
"y_min":0.0,
"y_max":200.0,
"z_min":-200.0,
"z_max":200.0,
"q1_min":-45.0,
"q1_max":45.0,
"q2_min":30.0,
"q2_max":130.0,
"q3_min":-15.0,
"q3_max":60.0,
"gim_max":90.0,
"gim_min":20.0
}
##################### Class methods #######################
def verify_camLimits(self, array):
if array[0] <= self.lim['x_min'] or array[0] >= self.lim['x_max'] or array[1] <= self.lim['y_min'] or array[1] >= self.lim['y_max'] or array[2] <= self.lim['z_min'] or array[2] >= self.lim['z_max']:
self.isGood = False
return False
else:
self.isGood = True
return True
def inverseKinematics(self):
"""
Fonction to fin the Inverse Kinematics for robot arm
:param x: Position of the object on the 'x' axis
:param y: Position of the object on the 'y' axis
:param z: Position of the object on the 'z' axis
:return: q1, q2, q3 -> Corresponding joint angles in degrees
"""
x = self.ball_position.x
y = self.ball_position.y
z = self.ball_position.z
pi = 3.14159265359
ik_angles = [0] * 3 # Init empty array of size 3
# Find the value for the first angle
q1 = atan2(z, x)
#Solving equation for q2 and q3
a = Symbol('a') # Angle q2
b = Symbol('b') # Angle q3
########## Solution finale pour la resolution de la cinematique inverse #############
e1 = Eq(cos(q1)*(self.L2*cos(a) + self.L3*cos(b) + self.L4x) - x - self.camx, 0.0) #x equation
e2 = Eq(self.camy + self.L1 + self.L2*sin(a) - self.L3*sin(b) - self.L4y - y, 0.0) #y equation
sol = nsolve([e1, e2], [a, b], [pi/2, 0]) #pi/2 rad est l'angle initiale q2 et 0 rad est q3
Angle_q2 = float(sol[0])*180/pi
Angle_q3 = float(sol[1])*180/pi
#Angles mit en deg.
ik_angles[0] = round(float(q1)*180 / pi, 2)
ik_angles[1] = round(Angle_q2,2)
ik_angles[2] = round(Angle_q3,2)
return ik_angles
################## Callback functions ###################
def cb_cam(self, data):
"""
Fonction callback from camera topic and verifies limits of received message
:param x: Position of the object on the 'x' axis
:param y: Position of the object on the 'y' axis
:param z: Position of the object on the 'z' axis
:return: void
"""
if self.verify_camLimits([data.x, data.y, data.z]):
self.ball_position = data
def cb_joy(self, data):
# Tester config manette pour attribuer les valeurs a angles.q*
# Force set au mode manuelle en cas dappuis
if (data.buttons[8]):#data.buttons[16] or data.buttons[15] or data.buttons[13] or data.buttons[14] or data.buttons[0] or data.buttons[2] or data.buttons[5] or data.buttons[4] or
self.commands.mode = False
#q1
self.flags["q1+"] = True if data.buttons[16] else False
self.flags["q1-"] = True if data.buttons[15] else False
#q2
self.flags["q2+"] = True if data.buttons[13] else False
self.flags["q2-"] = True if data.buttons[14] else False
#q3
self.flags["q3+"] = True if data.buttons[0] else False
self.flags["q3-"] = True if data.buttons[2] else False
#gimbal
self.flags["gim+"] = True if data.buttons[7] else False
self.flags["gim-"] = True if data.buttons[6] else False
#effector
if(data.buttons[5]):
self.commands.effector = True
if(data.buttons[4]):
self.commands.effector = False
# Go to home
if(data.buttons[8]):
self.commands.q1 = 0.0
self.commands.q2 = 90.0
self.commands.q3 = 0.0
#mode Auto
if(data.buttons[1] and self.isGood):
self.commands.mode = True
def controllerCommands(self):
"""
Fonction to set command message according to user inputs
:return: void
"""
# q1
if(self.flags["q1+"]):
self.commands.q1 = self.lim["q1_max"] if (self.commands.q1 + self.ang_inc >= self.lim["q1_max"]) else self.commands.q1 + self.ang_inc
if(self.flags["q1-"]):
self.commands.q1 = self.lim["q1_min"] if (self.commands.q1 - self.ang_inc <= self.lim["q1_min"]) else self.commands.q1 - self.ang_inc
#q2
if(self.flags["q2+"]): #La verif des limites se fait
self.commands.q2 = self.commands.q2 + self.ang_inc
if(self.flags["q2-"]):
self.commands.q2 = self.commands.q2 - self.ang_inc
#q3
if(self.flags["q3+"]):
self.commands.q3 = self.commands.q3 + self.ang_inc
if(self.flags["q3-"]):
self.commands.q3 = self.commands.q3 - self.ang_inc
# Gimbal control
if(self.flags["gim+"]):
self.commands.gimbalAng = self.lim["gim_max"] if (self.commands.gimbalAng + self.ang_inc >= self.lim["gim_max"]) else self.commands.gimbalAng + self.ang_inc
if(self.flags["gim-"]):
self.commands.gimbalAng = self.lim["gim_min"] if (self.commands.gimbalAng - self.ang_inc <= self.lim["gim_min"]) else self.commands.gimbalAng - self.ang_inc
# IK mode
if(self.commands.mode):
try:
Angles = self.inverseKinematics()
self.commands.q1 = Angles[0]
self.commands.q2 = Angles[1]
self.commands.q3 = Angles[2]
except:
pass
if __name__=='__main__':
# Messages are published at a rate of 22Hz to bras_commands
try:
bras_t = bras_teleop()
rospy.init_node('bras_teleop', anonymous=True)
rate = rospy.Rate(22)
while not rospy.is_shutdown():
bras_t.controllerCommands()
bras_t.comm_pub.publish(bras_t.commands)
rate.sleep()
except rospy.ROSInterruptException:
pass
| 32.842553
| 206
| 0.549754
|
4a06ba244aa65eeebc0bd9877f86082732bae47b
| 482
|
py
|
Python
|
examples/sync_client.py
|
william-wambua/rpc.py
|
af43ccc9734c751ab49171c3a4c004a996612d73
|
[
"Apache-2.0"
] | 152
|
2020-07-11T07:56:35.000Z
|
2022-03-23T07:06:19.000Z
|
examples/sync_client.py
|
william-wambua/rpc.py
|
af43ccc9734c751ab49171c3a4c004a996612d73
|
[
"Apache-2.0"
] | 14
|
2020-07-16T02:16:21.000Z
|
2022-03-30T13:54:59.000Z
|
examples/sync_client.py
|
william-wambua/rpc.py
|
af43ccc9734c751ab49171c3a4c004a996612d73
|
[
"Apache-2.0"
] | 11
|
2020-11-26T14:23:05.000Z
|
2021-12-02T18:36:29.000Z
|
from typing import Generator
import httpx
from rpcpy.client import Client
from rpcpy.typing import TypedDict
app = Client(httpx.Client(), base_url="http://127.0.0.1:65432/")
@app.remote_call
def none() -> None:
...
@app.remote_call
def sayhi(name: str) -> str:
...
@app.remote_call
def yield_data(max_num: int) -> Generator[int, None, None]:
yield
D = TypedDict("D", {"key": str, "other-key": str})
@app.remote_call
def query_dict(value: str) -> D:
...
| 15.0625
| 64
| 0.661826
|
4a06bb62e8deb7cd306af483b416feb6f18be2dd
| 8,314
|
py
|
Python
|
asposewordscloud/models/style_update.py
|
rizwanniazigroupdocs/aspose-words-cloud-python
|
b943384a1e3c0710cc84df74119e6edf7356037e
|
[
"MIT"
] | null | null | null |
asposewordscloud/models/style_update.py
|
rizwanniazigroupdocs/aspose-words-cloud-python
|
b943384a1e3c0710cc84df74119e6edf7356037e
|
[
"MIT"
] | null | null | null |
asposewordscloud/models/style_update.py
|
rizwanniazigroupdocs/aspose-words-cloud-python
|
b943384a1e3c0710cc84df74119e6edf7356037e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="style_update.py">
# Copyright (c) 2020 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
import json
class StyleUpdate(object):
"""Represents a single document style properties to update.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'base_style_name': 'str',
'is_quick_style': 'bool',
'name': 'str',
'next_paragraph_style_name': 'str'
}
attribute_map = {
'base_style_name': 'BaseStyleName',
'is_quick_style': 'IsQuickStyle',
'name': 'Name',
'next_paragraph_style_name': 'NextParagraphStyleName'
}
def __init__(self, base_style_name=None, is_quick_style=None, name=None, next_paragraph_style_name=None): # noqa: E501
"""StyleUpdate - a model defined in Swagger""" # noqa: E501
self._base_style_name = None
self._is_quick_style = None
self._name = None
self._next_paragraph_style_name = None
self.discriminator = None
if base_style_name is not None:
self.base_style_name = base_style_name
if is_quick_style is not None:
self.is_quick_style = is_quick_style
if name is not None:
self.name = name
if next_paragraph_style_name is not None:
self.next_paragraph_style_name = next_paragraph_style_name
@property
def base_style_name(self):
"""Gets the base_style_name of this StyleUpdate. # noqa: E501
Gets or sets the name of the style this style is based on. # noqa: E501
:return: The base_style_name of this StyleUpdate. # noqa: E501
:rtype: str
"""
return self._base_style_name
@base_style_name.setter
def base_style_name(self, base_style_name):
"""Sets the base_style_name of this StyleUpdate.
Gets or sets the name of the style this style is based on. # noqa: E501
:param base_style_name: The base_style_name of this StyleUpdate. # noqa: E501
:type: str
"""
self._base_style_name = base_style_name
@property
def is_quick_style(self):
"""Gets the is_quick_style of this StyleUpdate. # noqa: E501
Gets or sets a value indicating whether this style is shown in the Quick Style gallery inside MS Word UI. # noqa: E501
:return: The is_quick_style of this StyleUpdate. # noqa: E501
:rtype: bool
"""
return self._is_quick_style
@is_quick_style.setter
def is_quick_style(self, is_quick_style):
"""Sets the is_quick_style of this StyleUpdate.
Gets or sets a value indicating whether this style is shown in the Quick Style gallery inside MS Word UI. # noqa: E501
:param is_quick_style: The is_quick_style of this StyleUpdate. # noqa: E501
:type: bool
"""
self._is_quick_style = is_quick_style
@property
def name(self):
"""Gets the name of this StyleUpdate. # noqa: E501
Gets or sets the name of the style. # noqa: E501
:return: The name of this StyleUpdate. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this StyleUpdate.
Gets or sets the name of the style. # noqa: E501
:param name: The name of this StyleUpdate. # noqa: E501
:type: str
"""
self._name = name
@property
def next_paragraph_style_name(self):
"""Gets the next_paragraph_style_name of this StyleUpdate. # noqa: E501
Gets or sets the name of the style to be applied automatically to a new paragraph inserted after a paragraph formatted with the specified style. # noqa: E501
:return: The next_paragraph_style_name of this StyleUpdate. # noqa: E501
:rtype: str
"""
return self._next_paragraph_style_name
@next_paragraph_style_name.setter
def next_paragraph_style_name(self, next_paragraph_style_name):
"""Sets the next_paragraph_style_name of this StyleUpdate.
Gets or sets the name of the style to be applied automatically to a new paragraph inserted after a paragraph formatted with the specified style. # noqa: E501
:param next_paragraph_style_name: The next_paragraph_style_name of this StyleUpdate. # noqa: E501
:type: str
"""
self._next_paragraph_style_name = next_paragraph_style_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[self.attribute_map[attr]] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[self.attribute_map[attr]] = value.to_dict()
elif isinstance(value, dict):
result[self.attribute_map[attr]] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[self.attribute_map[attr]] = value
return json.dumps(result)
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StyleUpdate):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 36.147826
| 166
| 0.610536
|
4a06bbd5e0b3d04198b83195c6b3c4ee8d0741e4
| 489
|
py
|
Python
|
user/migrations/0002_auto_20171101_1602.py
|
Ilmanfordinner/registration
|
d729f9a990d6f63dc29e14c1657dba87c3233971
|
[
"MIT"
] | null | null | null |
user/migrations/0002_auto_20171101_1602.py
|
Ilmanfordinner/registration
|
d729f9a990d6f63dc29e14c1657dba87c3233971
|
[
"MIT"
] | null | null | null |
user/migrations/0002_auto_20171101_1602.py
|
Ilmanfordinner/registration
|
d729f9a990d6f63dc29e14c1657dba87c3233971
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-11-01 23:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='nickname',
field=models.CharField(max_length=255, verbose_name='Preferred name'),
),
]
| 23.285714
| 83
| 0.593047
|
4a06bbfc90ce78ff69e3b34567739468b5c5deba
| 2,295
|
py
|
Python
|
services/dy-3dvis/src/3d-viewer/utils/state_manager.py
|
GitHK/osparc-services-forked
|
a8ab08ff7c32de8f1abde015c1515e8cf61426c0
|
[
"MIT"
] | 1
|
2019-07-26T02:04:44.000Z
|
2019-07-26T02:04:44.000Z
|
services/dy-3dvis/src/3d-viewer/utils/state_manager.py
|
GitHK/osparc-services-forked
|
a8ab08ff7c32de8f1abde015c1515e8cf61426c0
|
[
"MIT"
] | null | null | null |
services/dy-3dvis/src/3d-viewer/utils/state_manager.py
|
GitHK/osparc-services-forked
|
a8ab08ff7c32de8f1abde015c1515e8cf61426c0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
""" Tries to pull the node data from S3. Will return error code unless the --silent flag is on and only a warning will be output.
Usage python state_puller.py PATH_OR_FILE --silent
:return: error code
"""
import argparse
import asyncio
import logging
import os
import sys
import time
from enum import IntEnum
from pathlib import Path
from simcore_sdk.node_data import data_manager
from simcore_sdk.node_ports import exceptions
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__file__ if __name__ == "__main__" else __name__)
class ExitCode(IntEnum):
SUCCESS = 0
FAIL = 1
def state_path() -> Path:
path = os.environ.get("SIMCORE_NODE_APP_STATE_PATH", "undefined")
assert path != "undefined", "SIMCORE_NODE_APP_STATE_PATH is not defined!"
return Path(path)
def main(args=None) -> int:
try:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--path", help="The folder or file to get for the node",
type=Path, default=state_path(), required=False)
parser.add_argument("--silent", help="The script will silently fail if the flag is on",
default=False, const=True, action="store_const", required=False)
parser.add_argument("type", help="push or pull",
choices=["push", "pull"])
options = parser.parse_args(args)
loop = asyncio.get_event_loop()
# push or pull state
start_time = time.clock()
loop.run_until_complete(getattr(data_manager, options.type)(options.path))
end_time = time.clock()
log.info("time to %s: %.2fseconds", options.type, end_time - start_time)
return ExitCode.SUCCESS
except exceptions.S3InvalidPathError:
if options.silent:
log.warning("Could not %s state from S3 for %s", options.type, options.path)
return ExitCode.SUCCESS
log.exception("Could not %s state from S3 for %s", options.type, options.path)
return ExitCode.FAIL
except: # pylint: disable=bare-except
log.exception("Unexpected error when %s state from/to S3 for %s", options.type, options.path)
return ExitCode.FAIL
if __name__ == "__main__":
sys.exit(main())
| 33.26087
| 129
| 0.67146
|
4a06bc0478fdfb66443a29a5adbad3df630a4f94
| 10,343
|
py
|
Python
|
script/train.py
|
Wei-Jyu/Deep-Hierarchical-Interest-Network-for-Click-Through-Rate-Prediction
|
e7348f4e3e1297f85ca68b006890cbd62ca2cb8b
|
[
"MIT"
] | null | null | null |
script/train.py
|
Wei-Jyu/Deep-Hierarchical-Interest-Network-for-Click-Through-Rate-Prediction
|
e7348f4e3e1297f85ca68b006890cbd62ca2cb8b
|
[
"MIT"
] | null | null | null |
script/train.py
|
Wei-Jyu/Deep-Hierarchical-Interest-Network-for-Click-Through-Rate-Prediction
|
e7348f4e3e1297f85ca68b006890cbd62ca2cb8b
|
[
"MIT"
] | 1
|
2020-11-20T07:14:30.000Z
|
2020-11-20T07:14:30.000Z
|
import tensorflow as tf
import numpy
from data_iterator import DataIterator
from model import *
#import time
import argparse
import random
import sys
from utils import *
parser = argparse.ArgumentParser()
parser.add_argument('-task', type=str, default='train', help='train | test')
parser.add_argument('--model_type', type=str, default='none', help='DHIN | DIEN | ..')
EMBEDDING_DIM = 16
HIDDEN_SIZE = 16 * 2
ATTENTION_SIZE = 16 * 2
best_auc = 0.0
def prepare_data(input, target, maxlen = None, return_neg = False):
# x: a list of sentences
seqs_mid = [inp[3] for inp in input]
seqs_cat = [inp[4] for inp in input]
lengths_x = [len(s[4]) for s in input]
noclk_seqs_mid = [inp[5] for inp in input]
noclk_seqs_cat = [inp[6] for inp in input]
if maxlen is not None:
new_seqs_mid = []
new_seqs_cat = []
new_noclk_seqs_mid = []
new_noclk_seqs_cat = []
new_lengths_x = []
for l_x, inp in zip(lengths_x, input):
if l_x > maxlen:
new_seqs_mid.append(inp[3][l_x - maxlen:])
new_seqs_cat.append(inp[4][l_x - maxlen:])
new_noclk_seqs_mid.append(inp[5][l_x - maxlen:])
new_noclk_seqs_cat.append(inp[6][l_x - maxlen:])
new_lengths_x.append(maxlen)
else:
new_seqs_mid.append(inp[3])
new_seqs_cat.append(inp[4])
new_noclk_seqs_mid.append(inp[5])
new_noclk_seqs_cat.append(inp[6])
new_lengths_x.append(l_x)
lengths_x = new_lengths_x
seqs_mid = new_seqs_mid
seqs_cat = new_seqs_cat
noclk_seqs_mid = new_noclk_seqs_mid
noclk_seqs_cat = new_noclk_seqs_cat
if len(lengths_x) < 1:
return None, None, None, None
n_samples = len(seqs_mid)
maxlen_x = numpy.max(lengths_x)
neg_samples = len(noclk_seqs_mid[0][0])
mid_his = numpy.zeros((n_samples, maxlen_x)).astype('int64')
cat_his = numpy.zeros((n_samples, maxlen_x)).astype('int64')
noclk_mid_his = numpy.zeros((n_samples, maxlen_x, neg_samples)).astype('int64')
noclk_cat_his = numpy.zeros((n_samples, maxlen_x, neg_samples)).astype('int64')
mid_mask = numpy.zeros((n_samples, maxlen_x)).astype('float32')
for idx, [s_x, s_y, no_sx, no_sy] in enumerate(zip(seqs_mid, seqs_cat, noclk_seqs_mid, noclk_seqs_cat)):
mid_mask[idx, :lengths_x[idx]] = 1.
mid_his[idx, :lengths_x[idx]] = s_x
cat_his[idx, :lengths_x[idx]] = s_y
noclk_mid_his[idx, :lengths_x[idx], :] = no_sx
noclk_cat_his[idx, :lengths_x[idx], :] = no_sy
uids = numpy.array([inp[0] for inp in input])
mids = numpy.array([inp[1] for inp in input])
cats = numpy.array([inp[2] for inp in input])
if return_neg:
return uids, mids, cats, mid_his, cat_his, mid_mask, numpy.array(target), numpy.array(lengths_x), noclk_mid_his, noclk_cat_his
else:
return uids, mids, cats, mid_his, cat_his, mid_mask, numpy.array(target), numpy.array(lengths_x)
def eval(sess, test_data, model, model_path):
loss_sum = 0.
accuracy_sum = 0.
aux_loss_sum = 0.
nums = 0
stored_arr = []
for source, target in test_data:
nums += 1
uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats = prepare_data(source, target, return_neg=True)
prob, loss, acc, aux_loss = model.calculate(sess, [uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats])
loss_sum += loss
aux_loss_sum = aux_loss
accuracy_sum += acc
prob_1 = prob[:, 0].tolist()
target_1 = target[:, 0].tolist()
for p ,t in zip(prob_1, target_1):
stored_arr.append([p, t])
test_auc = calc_auc(stored_arr)
accuracy_sum = accuracy_sum / nums
loss_sum = loss_sum / nums
aux_loss_sum / nums
global best_auc
if best_auc < test_auc:
best_auc = test_auc
model.save(sess, model_path)
return test_auc, loss_sum, accuracy_sum, aux_loss_sum
def train(
train_file = "local_train_splitByUser",
test_file = "local_test_splitByUser",
uid_voc = "uid_voc.pkl",
mid_voc = "mid_voc.pkl",
cat_voc = "cat_voc.pkl",
batch_size = 128,
maxlen = 100,
test_iter = 50,
save_iter = 300,
model_type = 'DNN',
seed = 2,
):
model_path = "dnn_save_path/ckpt_noshuff" + model_type + str(seed)
best_model_path = "dnn_best_model/ckpt_noshuff" + model_type + str(seed)
gpu_options = tf.GPUOptions(allow_growth=True)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
train_data = DataIterator(train_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen, shuffle_each_epoch=False)
test_data = DataIterator(test_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen)
n_uid, n_mid, n_cat = train_data.get_n()
if model_type == 'DNN':
model = Model_DNN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'PNN':
model = Model_PNN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIN':
model = Model_DIN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIEN':
model = Model_DIEN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIEN_with_InnerAtt':
model = Model_DIEN_with_InnerAtt(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DHIN_without_InnerAtt':
model = Model_DHIN_without_InnerAtt(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DHIN':
model = Model_DHIN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
else:
print ("Invalid model_type : %s", model_type)
return
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
sys.stdout.flush()
print(' test_auc: %.4f --- test_loss: %.4f --- test_acc: %.4f --- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path))
sys.stdout.flush()
#start_time = time.time()
iter = 0
lr = 0.001
for itr in range(3):
loss_sum = 0.0
accuracy_sum = 0.
aux_loss_sum = 0.
for source, target in train_data:
uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, noclk_mids, noclk_cats = prepare_data(source, target, maxlen, return_neg=True)
loss, acc, aux_loss = model.train(sess, [uids, mids, cats, mid_his, cat_his, mid_mask, target, sl, lr, noclk_mids, noclk_cats])
loss_sum += loss
accuracy_sum += acc
aux_loss_sum += aux_loss
iter += 1
sys.stdout.flush()
if (iter % test_iter) == 0:
print('iter: %d ---> train_loss: %.4f --- train_acc: %.4f --- tran_aux_loss: %.4f' % \
(iter, loss_sum / test_iter, accuracy_sum / test_iter, aux_loss_sum / test_iter))
print(' test_auc: %.4f ---test_loss: %.4f --- test_acc: %.4f --- test_aux_loss: %.4f' % eval(sess, test_data, model, best_model_path))
loss_sum = 0.0
accuracy_sum = 0.0
aux_loss_sum = 0.0
if (iter % save_iter) == 0:
print('save model iter: %d' %(iter))
model.save(sess, model_path+"--"+str(iter))
lr *= 0.5
def test(
train_file = "local_train_splitByUser",
test_file = "local_test_splitByUser",
uid_voc = "uid_voc.pkl",
mid_voc = "mid_voc.pkl",
cat_voc = "cat_voc.pkl",
batch_size = 128,
maxlen = 100,
model_type = 'DNN',
seed = 2
):
model_path = "dnn_best_model/ckpt_noshuff" + model_type + str(seed)
gpu_options = tf.GPUOptions(allow_growth=True)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
train_data = DataIterator(train_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen)
test_data = DataIterator(test_file, uid_voc, mid_voc, cat_voc, batch_size, maxlen)
n_uid, n_mid, n_cat = train_data.get_n()
if model_type == 'DNN':
model = Model_DNN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'PNN':
model = Model_PNN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIN':
model = Model_DIN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIEN':
model = Model_DIN_V2_Gru_Vec_attGru_Neg(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DIEN_with_InnerAtt':
model = Model_DIEN_with_InnerAtt(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DHIN_without_InnerAtt':
model = Model_DHIN_without_InnerAtt(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
elif model_type == 'DHIN':
model = Model_DHIN(n_uid, n_mid, n_cat, EMBEDDING_DIM, HIDDEN_SIZE, ATTENTION_SIZE)
else:
print ("Invalid model_type : %s", model_type)
return
model.restore(sess, model_path)
print('test_auc: %.4f ---test_loss: %.4f --- test_acc: %.4f --- test_aux_loss: %.4f' % eval(sess, test_data, model, model_path))
if __name__ == '__main__':
print sys.argv
args = parser.parse_args()
Model_type = args.model_type
SEED = 3
tf.set_random_seed(SEED)
numpy.random.seed(SEED)
random.seed(SEED)
if args.task == 'train':
train(model_type=Model_type, seed=SEED)
elif args.task == 'test':
test(model_type=Model_type, seed=SEED)
else:
print('Please Choose Task Type and Model Type!')
| 43.826271
| 236
| 0.614909
|
4a06bc69d5c850fa9f7c4861bc6b3acca3905056
| 34,448
|
py
|
Python
|
src/pip/_vendor/urllib3/contrib/securetransport.py
|
Harry-Hopkinson/pip
|
4fe127ebde383235a5d5c30e801b2440e057780d
|
[
"MIT"
] | 1
|
2022-03-06T13:06:33.000Z
|
2022-03-06T13:06:33.000Z
|
src/pip/_vendor/urllib3/contrib/securetransport.py
|
Harry-Hopkinson/pip
|
4fe127ebde383235a5d5c30e801b2440e057780d
|
[
"MIT"
] | null | null | null |
src/pip/_vendor/urllib3/contrib/securetransport.py
|
Harry-Hopkinson/pip
|
4fe127ebde383235a5d5c30e801b2440e057780d
|
[
"MIT"
] | null | null | null |
"""
SecureTranport support for urllib3 via ctypes.
This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.
We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.
This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.
To use this module, simply import and inject it::
import pip._vendor.urllib3.contrib.securetransport as securetransport
securetransport.inject_into_urllib3()
Happy TLSing!
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
.. code-block::
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
from pip._vendor import six
from .. import util
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
# 1. It is not possible to call into the callbacks before the dictionary is
# populated, so once in the callback the id must be in the dictionary.
# 2. The callbacks don't mutate the dictionary, they only read from it, and
# so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_AES_256_GCM_SHA384,
SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_AES_128_CCM_8_SHA256,
SecurityConst.TLS_AES_128_CCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
SecurityConst.kSSLProtocol2,
SecurityConst.kSSLProtocol2,
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
SecurityConst.kSSLProtocol3,
SecurityConst.kSSLProtocol3,
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
SecurityConst.kTLSProtocol1,
SecurityConst.kTLSProtocol1,
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
SecurityConst.kTLSProtocol11,
SecurityConst.kTLSProtocol11,
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
SecurityConst.kTLSProtocol12,
SecurityConst.kTLSProtocol12,
)
def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.SSLContext = SecureTransportContext
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport write callback. This is called by ST to request that data
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this data path.
data = data[chunk_sent:]
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
class WrappedSocket(object):
"""
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
def __init__(self, socket):
self.socket = socket
self.context = None
self._makefile_refs = 0
self._closed = False
self._exception = None
self._keychain = None
self._keychain_dir = None
self._client_cert_chain = None
# We save off the previously-configured timeout and then set it to
# zero. This is done because we use select and friends to handle the
# timeouts, but if we leave the timeout set on the lower socket then
# Python will "kindly" call select on that socket again for us. Avoid
# that by forcing the timeout to zero.
self._timeout = self.socket.gettimeout()
self.socket.settimeout(0)
@contextlib.contextmanager
def _raise_on_error(self):
"""
A context manager that can be used to wrap calls that do I/O from
SecureTransport. If any of the I/O callbacks hit an exception, this
context manager will correctly propagate the exception after the fact.
This avoids silently swallowing those exceptions.
It also correctly forces the socket closed.
"""
self._exception = None
# We explicitly don't catch around this yield because in the unlikely
# event that an exception was hit in the block we don't want to swallow
# it.
yield
if self._exception is not None:
exception, self._exception = self._exception, None
self.close()
raise exception
def _set_ciphers(self):
"""
Sets up the allowed ciphers. By default this matches the set in
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
custom and doesn't allow changing at this time, mostly because parsing
OpenSSL cipher strings is going to be a freaking nightmare.
"""
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
result = Security.SSLSetEnabledCiphers(
self.context, ciphers, len(CIPHER_SUITES)
)
_assert_no_error(result)
def _set_alpn_protocols(self, protocols):
"""
Sets up the ALPN protocols on the context.
"""
if not protocols:
return
protocols_arr = _create_cfstring_array(protocols)
try:
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
_assert_no_error(result)
finally:
CoreFoundation.CFRelease(protocols_arr)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
try:
trust_result = self._evaluate_trust(trust_bundle)
if trust_result in successes:
return
reason = "error code: %d" % (trust_result,)
except Exception as e:
# Do not trust on error
reason = "exception: %r" % (e,)
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
self.socket.sendall(rec)
# close the connection immediately
# l_onoff = 1, activate linger
# l_linger = 0, linger for 0 seoncds
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
raise ssl.SSLError("certificate verify failed, %s" % reason)
def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
return trust_result.value
def handshake(
self,
server_hostname,
verify,
trust_bundle,
min_version,
max_version,
client_cert,
client_key,
client_key_passphrase,
alpn_protocols,
):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
"""
# First, we do the initial bits of connection setup. We need to create
# a context, set its I/O funcs, and set the connection reference.
self.context = Security.SSLCreateContext(
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
)
result = Security.SSLSetIOFuncs(
self.context, _read_callback_pointer, _write_callback_pointer
)
_assert_no_error(result)
# Here we need to compute the handle to use. We do this by taking the
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
# just keep incrementing by one until we find a free space.
with _connection_ref_lock:
handle = id(self) % 2147483647
while handle in _connection_refs:
handle = (handle + 1) % 2147483647
_connection_refs[handle] = self
result = Security.SSLSetConnection(self.context, handle)
_assert_no_error(result)
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode("utf-8")
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
)
_assert_no_error(result)
# Setup the ciphers.
self._set_ciphers()
# Setup the ALPN protocols.
self._set_alpn_protocols(alpn_protocols)
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
_assert_no_error(result)
# If there's a trust DB, we need to use it. We do that by telling
# SecureTransport to break on server auth. We also do that if we don't
# want to validate the certs at all: we just won't actually do any
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
)
_assert_no_error(result)
# If there's a client cert, we need to use it.
if client_cert:
self._keychain, self._keychain_dir = _temporary_keychain()
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
_assert_no_error(result)
while True:
with self._raise_on_error():
result = Security.SSLHandshake(self.context)
if result == SecurityConst.errSSLWouldBlock:
raise socket.timeout("handshake timed out")
elif result == SecurityConst.errSSLServerAuthCompleted:
self._custom_validate(verify, trust_bundle)
continue
else:
_assert_no_error(result)
break
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, bufsiz):
buffer = ctypes.create_string_buffer(bufsiz)
bytes_read = self.recv_into(buffer, bufsiz)
data = buffer[:bytes_read]
return data
def recv_into(self, buffer, nbytes=None):
# Read short on EOF.
if self._closed:
return 0
if nbytes is None:
nbytes = len(buffer)
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLRead(
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
)
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
if result == SecurityConst.errSSLWouldBlock:
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some data, and in those cases we should just read "short"
# and return.
if processed_bytes.value == 0:
# Timed out, no data read.
raise socket.timeout("recv timed out")
elif result in (
SecurityConst.errSSLClosedGraceful,
SecurityConst.errSSLClosedNoNotify,
):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return data.
# It's unlikely though.
self.close()
else:
_assert_no_error(result)
# Ok, we read and probably succeeded. We should return whatever data
# was actually read.
return processed_bytes.value
def settimeout(self, timeout):
self._timeout = timeout
def gettimeout(self):
return self._timeout
def send(self, data):
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLWrite(
self.context, data, len(data), ctypes.byref(processed_bytes)
)
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
# Timed out
raise socket.timeout("send timed out")
else:
_assert_no_error(result)
# We sent, and probably succeeded. Tell them how much we sent.
return processed_bytes.value
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
with self._raise_on_error():
Security.SSLClose(self.context)
def close(self):
# TODO: should I do clean shutdown here? Do I have to?
if self._makefile_refs < 1:
self._closed = True
if self.context:
CoreFoundation.CFRelease(self.context)
self.context = None
if self._client_cert_chain:
CoreFoundation.CFRelease(self._client_cert_chain)
self._client_cert_chain = None
if self._keychain:
Security.SecKeychainDelete(self._keychain)
CoreFoundation.CFRelease(self._keychain)
shutil.rmtree(self._keychain_dir)
self._keychain = self._keychain_dir = None
return self.socket.close()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
# Urgh, annoying.
#
# Here's how we do this:
#
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
# connection.
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
# 3. To get the CN, call SecCertificateCopyCommonName and process that
# string so that it's of the appropriate type.
# 4. To get the SAN, we need to do something a bit more complex:
# a. Call SecCertificateCopyValues to get the data, requesting
# kSecOIDSubjectAltName.
# b. Mess about with this dictionary to try to get the SANs out.
#
# This is gross. Really gross. It's going to be a few hundred LoC extra
# just to repeat something that SecureTransport can *already do*. So my
# operating assumption at this time is that what we want to do is
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
raise ValueError("SecureTransport only supports dumping binary certs")
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
return None
cert_count = Security.SecTrustGetCertificateCount(trust)
if not cert_count:
# Also a case that might happen if we haven't handshaked.
# Handshook? Handshaken?
return None
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
assert leaf
# Ok, now we want the DER bytes.
certdata = Security.SecCertificateCopyData(leaf)
assert certdata
data_length = CoreFoundation.CFDataGetLength(certdata)
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
der_bytes = ctypes.string_at(data_buffer, data_length)
finally:
if certdata:
CoreFoundation.CFRelease(certdata)
if trust:
CoreFoundation.CFRelease(trust)
return der_bytes
def version(self):
protocol = Security.SSLProtocol()
result = Security.SSLGetNegotiatedProtocolVersion(
self.context, ctypes.byref(protocol)
)
_assert_no_error(result)
if protocol.value == SecurityConst.kTLSProtocol13:
raise ssl.SSLError("SecureTransport does not support TLS 1.3")
elif protocol.value == SecurityConst.kTLSProtocol12:
return "TLSv1.2"
elif protocol.value == SecurityConst.kTLSProtocol11:
return "TLSv1.1"
elif protocol.value == SecurityConst.kTLSProtocol1:
return "TLSv1"
elif protocol.value == SecurityConst.kSSLProtocol3:
return "SSLv3"
elif protocol.value == SecurityConst.kSSLProtocol2:
return "SSLv2"
else:
raise ssl.SSLError("Unknown TLS version: %r" % protocol)
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
WrappedSocket.makefile = makefile
class SecureTransportContext(object):
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle = None
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
self._alpn_protocols = None
@property
def check_hostname(self):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
@check_hostname.setter
def check_hostname(self, value):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
@property
def options(self):
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
@options.setter
def options(self, value):
# TODO: Update in line with above.
self._options = value
@property
def verify_mode(self):
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
@verify_mode.setter
def verify_mode(self, value):
self._verify = True if value == ssl.CERT_REQUIRED else False
def set_default_verify_paths(self):
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError("SecureTransport does not support cert directories")
# Raise if cafile does not exist.
if cafile is not None:
with open(cafile):
pass
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def set_alpn_protocols(self, protocols):
"""
Sets the ALPN protocols that will later be set on the context.
Raises a NotImplementedError if ALPN is not supported.
"""
if not hasattr(Security, "SSLSetALPNProtocols"):
raise NotImplementedError(
"SecureTransport supports ALPN only in macOS 10.12+"
)
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname,
self._verify,
self._trust_bundle,
self._min_version,
self._max_version,
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
| 37.362256
| 86
| 0.658761
|
4a06bdc2fdb67c9b6050fa93c154bff2a347980b
| 1,320
|
py
|
Python
|
src/python/pants/init/load_backends_integration_test.py
|
thamenato/pants
|
bc4a8fb3f07f6145649f02b06a1e5599aa28b36c
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/init/load_backends_integration_test.py
|
thamenato/pants
|
bc4a8fb3f07f6145649f02b06a1e5599aa28b36c
|
[
"Apache-2.0"
] | null | null | null |
src/python/pants/init/load_backends_integration_test.py
|
thamenato/pants
|
bc4a8fb3f07f6145649f02b06a1e5599aa28b36c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pathlib import Path
from typing import List
from pants.testutil.pants_integration_test import run_pants
def discover_backends() -> List[str]:
register_pys = Path().glob("src/python/**/register.py")
backends = {
str(register_py.parent).replace("src/python/", "").replace("/", ".")
for register_py in register_pys
}
always_activated = {"pants.core", "pants.backend.project_info", "pants.backend.pants_info"}
return sorted(backends - always_activated)
def assert_backends_load(backends: List[str]) -> None:
run_pants(
["--no-verify-config", "--version"], config={"GLOBAL": {"backend_packages": backends}}
).assert_success(f"Failed to load: {backends}")
def test_no_backends_loaded() -> None:
assert_backends_load([])
def test_all_backends_loaded() -> None:
"""This should catch all ambiguity issues."""
all_backends = discover_backends()
assert_backends_load(all_backends)
def test_each_distinct_backend_loads() -> None:
"""This should catch graph incompleteness errors, i.e. when a required rule is not
registered."""
for backend in discover_backends():
assert_backends_load([backend])
| 32.195122
| 95
| 0.709091
|
4a06be2576920b10ad2b3cba4b3ba8c3a1be628a
| 135
|
py
|
Python
|
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
gloomhaven/__init__.py
|
Softyy/gloomhaven-campaign-manager
|
0d704c248184edfe62b95d286203d58febb011c5
|
[
"MIT"
] | null | null | null |
from .server import *
from .components.cyto_reactor import *
from .components.modal import *
from .components.campaign_modal import *
| 22.5
| 40
| 0.792593
|
4a06beca153a0de848847edbe29e4186a26303fd
| 813
|
py
|
Python
|
manage.py
|
crod93/Woodward-Monitoring-Wells
|
ebea1e3f902701e18a6a91bb3480ad633f9b3104
|
[
"MIT"
] | null | null | null |
manage.py
|
crod93/Woodward-Monitoring-Wells
|
ebea1e3f902701e18a6a91bb3480ad633f9b3104
|
[
"MIT"
] | null | null | null |
manage.py
|
crod93/Woodward-Monitoring-Wells
|
ebea1e3f902701e18a6a91bb3480ad633f9b3104
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monitoringwells.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.347826
| 79
| 0.645756
|
4a06bedb7891380e9d6f6d5184bde9d3798f3f6b
| 723
|
py
|
Python
|
pages/admin.py
|
wonderbeyond/ezlog
|
6c0673d5ef0c90a5f977d410f3cc20587e239815
|
[
"BSD-2-Clause"
] | 31
|
2015-02-19T03:23:52.000Z
|
2021-03-30T09:38:27.000Z
|
pages/admin.py
|
rouqiu/ezlog
|
6c0673d5ef0c90a5f977d410f3cc20587e239815
|
[
"BSD-2-Clause"
] | null | null | null |
pages/admin.py
|
rouqiu/ezlog
|
6c0673d5ef0c90a5f977d410f3cc20587e239815
|
[
"BSD-2-Clause"
] | 37
|
2015-01-13T07:42:58.000Z
|
2018-04-08T08:06:28.000Z
|
# coding=utf-8
from django.contrib import admin
from mptt.admin import MPTTModelAdmin
from pages.models import *
class PageAdmin(MPTTModelAdmin):
mptt_level_indent = 20
fields = ('title', 'parent', 'public', 'in_navigation', 'allow_comment',
'content',)
list_display = ('title', 'created', 'modified',
'public', 'allow_comment', 'in_navigation')
list_filter = ('created', 'modified', 'public')
search_fields = ('title', 'content')
change_list_template = 'admin/pages/page/change_list.html'
class Media:
js = ('ckeditor/ckeditor.js',
'ckeditor/config.js',
'js/ckeditor-setup.js',
)
admin.site.register(Page, PageAdmin)
| 30.125
| 76
| 0.633472
|
4a06bfc5b97ddab9e27e05820ab72e5e9f474d9b
| 8,150
|
py
|
Python
|
autofit/graphical/declarative/graph.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 39
|
2019-01-24T10:45:23.000Z
|
2022-03-18T09:37:59.000Z
|
autofit/graphical/declarative/graph.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 260
|
2018-11-27T12:56:33.000Z
|
2022-03-31T16:08:59.000Z
|
autofit/graphical/declarative/graph.py
|
caoxiaoyue/PyAutoFit
|
819cd2acc8d4069497a161c3bb6048128e44d828
|
[
"MIT"
] | 13
|
2018-11-30T16:49:05.000Z
|
2022-01-21T17:39:29.000Z
|
from abc import ABC, abstractmethod
from typing import List, cast, Optional, Union
from autofit.graphical.declarative.factor.prior import PriorFactor
from autofit.graphical.expectation_propagation.ep_mean_field import EPMeanField
from autofit.graphical.factor_graphs.factor import Factor
from autofit.graphical.factor_graphs.graph import FactorGraph
from autofit.mapper.prior.abstract import Prior
from autofit.mapper.prior_model.abstract import AbstractPriorModel
from autofit.mapper.variable import Variable
from autofit.text.formatter import TextFormatter
class DeclarativeGraphFormatter(ABC):
def __init__(
self,
graph: "DeclarativeFactorGraph"
):
self.graph = graph
@abstractmethod
def variable_formatter(self, variable):
pass
@property
def info(self) -> str:
"""
Describes the graph. Output in graph.info
"""
prior_factor_info = "\n".join(
map(
self.info_for_prior_factor,
self.graph.prior_factors
)
)
analysis_factor_info = "\n\n".join(
map(
self.info_for_analysis_factor,
self.graph.analysis_factors
)
)
hierarchical_factor_info = "\n\n".join(
map(
self.info_for_hierarchical_factor,
self.graph.hierarchical_factors
)
)
string = f"""PriorFactors\n\n{prior_factor_info}\n\nAnalysisFactors\n\n{analysis_factor_info}"""
if len(self.graph.hierarchical_factors) > 0:
string = f"{string}\n\nHierarchicalFactors\n\n{hierarchical_factor_info}"
return string
def _related_factor_names(
self,
variable: Variable,
excluded_factor: Optional[Factor] = None
) -> str:
"""
Create a comma separated string describing factor names associated with
the variable.
Hierarchical factors are grouped.
Parameters
----------
variable
A variable in the graph
excluded_factor
A factor which should not be included. e.g. the factor
for which the variable is being checked.
Returns
-------
A string describing the other factor's relationship to the variable.
"""
from autofit.graphical.declarative.factor.hierarchical import _HierarchicalFactor
related_factors = self.graph.related_factors(
variable,
excluded_factor=excluded_factor
)
names = set()
for factor in related_factors:
if isinstance(
factor,
_HierarchicalFactor
):
names.add(
factor.distribution_model.name
)
else:
names.add(
factor.name_for_variable(
variable
)
)
return ", ".join(sorted(names))
def info_for_prior_factor(
self,
prior_factor: PriorFactor
) -> str:
"""
A string describing a given PriorFactor in the context of this graph.
"""
related_factor_names = self._related_factor_names(
variable=prior_factor.variable,
excluded_factor=prior_factor
)
formatter = TextFormatter()
formatter.add(
(f"{prior_factor.name} ({related_factor_names})",),
self.variable_formatter(
prior_factor.variable
)
)
return formatter.text
def info_for_analysis_factor(
self,
analysis_factor
) -> str:
"""
A string describing a given AnalysisFactor in the context of this graph.
"""
model = analysis_factor.prior_model
formatter = TextFormatter()
for path, prior in model.path_instance_tuples_for_class(
Prior,
ignore_children=True
):
name = path[-1]
related_factor_names = self._related_factor_names(
prior,
excluded_factor=analysis_factor
)
if len(related_factor_names) > 0:
name = f"{name} ({related_factor_names})"
path = path[:-1] + (name,)
formatter.add(
path,
self.variable_formatter(
prior
)
)
return f"{analysis_factor.name}\n\n{formatter.text}"
def info_for_hierarchical_factor(
self,
hierarchical_factor
):
distribution_model_info = self.info_for_analysis_factor(
hierarchical_factor
)
formatter = TextFormatter()
for factor in hierarchical_factor.factors:
related_factor_names = self._related_factor_names(
variable=factor.variable,
excluded_factor=factor
)
formatter.add(
(related_factor_names,),
self.variable_formatter(
factor.variable
)
)
return f"{distribution_model_info}\n\nDrawn Variables\n\n{formatter.text}"
class GraphInfoFormatter(DeclarativeGraphFormatter):
"""
Formats a string that is output to graph.info
"""
def variable_formatter(
self,
variable: Variable
):
"""
Returns a string description of the variable
"""
return str(variable)
class ResultsFormatter(DeclarativeGraphFormatter):
def __init__(
self,
graph: "DeclarativeFactorGraph",
model_approx: EPMeanField
):
"""
Formats a string that is output to graph.results
Parameters
----------
graph
A factor graph comprising AnalysisFactors
model_approx
An approximation of the model at the point of output
"""
self.model_approx = model_approx
super().__init__(graph)
def variable_formatter(
self,
variable: Variable
):
"""
Returns the mean value for the message corresponding to the
variable
"""
return self.model_approx.mean_field[
variable
].mean
class DeclarativeFactorGraph(FactorGraph):
@property
def analysis_factors(self):
"""
Analysis factors associated with this graph.
"""
from .factor.analysis import AnalysisFactor
return cast(
List[AnalysisFactor],
self._factors_with_type(
AnalysisFactor
)
)
@property
def prior_factors(self) -> List[PriorFactor]:
"""
Prior factors associated with this graph.
"""
return cast(
List[PriorFactor],
self._factors_with_type(
PriorFactor
)
)
@property
def hierarchical_factors(self):
"""
Prior factors associated with this graph.
"""
from autofit.graphical.declarative.factor.hierarchical import _HierarchicalFactor
hierarchical_factor_set = set()
for factor in self._factors_with_type(
_HierarchicalFactor
):
hierarchical_factor_set.add(
cast(
_HierarchicalFactor,
factor
).distribution_model
)
return sorted(hierarchical_factor_set)
@property
def info(self) -> str:
"""
Describes the graph. Output in graph.info
"""
return GraphInfoFormatter(self).info
def make_results_text(
self,
model_approx: Union[EPMeanField, AbstractPriorModel]
) -> str:
"""
Generate text describing the graph w.r.t. a given model approximation
"""
return ResultsFormatter(
self,
model_approx
).info
| 28.006873
| 104
| 0.563558
|
4a06bfc9e647686d165497bc22ed4de731c042d4
| 5,960
|
py
|
Python
|
ccaches.py
|
btidor/mailto
|
fbc3dee3a9d4f0aaf12c3ddf85b0cb1d455b444b
|
[
"MIT"
] | 2
|
2015-09-15T17:21:38.000Z
|
2016-01-13T23:46:40.000Z
|
ccaches.py
|
garywang/subscribe
|
7aff399122f6f43aaf460837b079d5388b25eb16
|
[
"MIT"
] | null | null | null |
ccaches.py
|
garywang/subscribe
|
7aff399122f6f43aaf460837b079d5388b25eb16
|
[
"MIT"
] | null | null | null |
"""
Utilities to convert Webathena tickets into a ccache; make_ccache is the
interesting one (it takes r.session from the client and returns a ccache)
Drawn from davidben's shellinabox example in the Webathena source tree.
https://github.com/davidben/webathena.
"""
import base64
import struct
# Some DER encoding stuff. Bleh. This is because the ccache contains a
# DER-encoded krb5 Ticket structure, whereas Webathena deserializes
# into the various fields. Re-encoding in the client would be easy as
# there is already an ASN.1 implementation, but in the interest of
# limiting MIT Kerberos's exposure to malformed ccaches, encode it
# ourselves. To that end, here's the laziest DER encoder ever.
def der_encode_length(l):
if l <= 127:
return chr(l)
out = ""
while l > 0:
out = chr(l & 0xff) + out
l >>= 8
out = chr(len(out) | 0x80) + out
return out
def der_encode_tlv(tag, value):
return chr(tag) + der_encode_length(len(value)) + value
def der_encode_integer_value(val):
if not isinstance(val, (int, long)):
raise TypeError("int")
# base 256, MSB first, two's complement, minimum number of octets
# necessary. This has a number of annoying edge cases:
# * 0 and -1 are 0x00 and 0xFF, not the empty string.
# * 255 is 0x00 0xFF, not 0xFF
# * -256 is 0xFF 0x00, not 0x00
# Special-case to avoid an empty encoding.
if val == 0:
return "\x00"
sign = 0 # What you would get if you sign-extended the current high bit.
out = ""
# We can stop once sign-extension matches the remaining value.
while val != sign:
byte = val & 0xff
out = chr(byte) + out
sign = -1 if byte & 0x80 == 0x80 else 0
val >>= 8
return out
def der_encode_integer(val):
return der_encode_tlv(0x02, der_encode_integer_value(val))
def der_encode_int32(val):
if val < -2147483648 or val > 2147483647:
raise ValueError("Bad value")
return der_encode_integer(val)
def der_encode_uint32(val):
if val < 0 or val > 4294967295:
raise ValueError("Bad value")
return der_encode_integer(val)
def der_encode_string(val):
if not isinstance(val, unicode):
raise TypeError("unicode")
return der_encode_tlv(0x1b, val.encode("utf-8"))
def der_encode_octet_string(val):
if not isinstance(val, str):
raise TypeError("str")
return der_encode_tlv(0x04, val)
def der_encode_sequence(tlvs, tagged=True):
body = []
for i, tlv in enumerate(tlvs):
# Missing optional elements represented as None.
if not tlv:
continue
if tagged:
# Assume kerberos-style explicit tagging of components.
tlv = der_encode_tlv(0xa0 | i, tlv)
body.append(tlv)
return der_encode_tlv(0x30, "".join(body))
def der_encode_ticket(tkt):
return der_encode_tlv(
0x61, # Ticket
der_encode_sequence(
[der_encode_integer(5), # tktVno
der_encode_string(tkt["realm"]),
der_encode_sequence( # PrincipalName
[der_encode_int32(tkt["sname"]["nameType"]),
der_encode_sequence([der_encode_string(c)
for c in tkt["sname"]["nameString"]],
tagged=False)]),
der_encode_sequence( # EncryptedData
[der_encode_int32(tkt["encPart"]["etype"]),
(der_encode_uint32(tkt["encPart"]["kvno"])
if "kvno" in tkt["encPart"]
else None),
der_encode_octet_string(
base64.b64decode(tkt["encPart"]["cipher"]))])]))
# Kerberos ccache writing code. Using format documentation from here:
# http://www.gnu.org/software/shishi/manual/html_node/The-Credential-Cache-Binary-File-Format.html
def ccache_counted_octet_string(data):
if not isinstance(data, str):
raise TypeError("str")
return struct.pack("!I", len(data)) + data
def ccache_principal(name, realm):
header = struct.pack("!II", name["nameType"], len(name["nameString"]))
return (header + ccache_counted_octet_string(realm.encode("utf-8")) +
"".join(ccache_counted_octet_string(c.encode("utf-8"))
for c in name["nameString"]))
def ccache_key(key):
return (struct.pack("!H", key["keytype"]) +
ccache_counted_octet_string(base64.b64decode(key["keyvalue"])))
def flags_to_uint32(flags):
ret = 0
for i, v in enumerate(flags):
if v:
ret |= 1 << (31 - i)
return ret
def ccache_credential(cred):
out = ccache_principal(cred["cname"], cred["crealm"])
out += ccache_principal(cred["sname"], cred["srealm"])
out += ccache_key(cred["key"])
out += struct.pack("!IIII",
cred["authtime"] // 1000,
cred.get("starttime", cred["authtime"]) // 1000,
cred["endtime"] // 1000,
cred.get("renewTill", 0) // 1000)
out += struct.pack("!B", 0)
out += struct.pack("!I", flags_to_uint32(cred["flags"]))
# TODO: Care about addrs or authdata? Former is "caddr" key.
out += struct.pack("!II", 0, 0)
out += ccache_counted_octet_string(der_encode_ticket(cred["ticket"]))
# No second_ticket.
out += ccache_counted_octet_string("")
return out
def make_ccache(cred):
# Do we need a DeltaTime header? The ccache I get just puts zero
# in there, so do the same.
out = struct.pack("!HHHHII",
0x0504, # file_format_version
12, # headerlen
1, # tag (DeltaTime)
8, # taglen (two uint32_ts)
0, 0, # time_offset / usec_offset
)
out += ccache_principal(cred["cname"], cred["crealm"])
out += ccache_credential(cred)
return out
| 36.790123
| 98
| 0.60906
|
4a06bfdbe5f9f073066685af3536ae8c892d9894
| 6,185
|
py
|
Python
|
test/function_test.py
|
TaekedeHaan/rqt_ez_publisher
|
80f1a26f738beca02c520ebaf77019441cd564e5
|
[
"BSD-3-Clause"
] | 26
|
2015-01-22T13:11:48.000Z
|
2022-02-23T15:07:23.000Z
|
test/function_test.py
|
TaekedeHaan/rqt_ez_publisher
|
80f1a26f738beca02c520ebaf77019441cd564e5
|
[
"BSD-3-Clause"
] | 15
|
2015-03-22T04:39:18.000Z
|
2020-12-01T01:42:11.000Z
|
test/function_test.py
|
TaekedeHaan/rqt_ez_publisher
|
80f1a26f738beca02c520ebaf77019441cd564e5
|
[
"BSD-3-Clause"
] | 14
|
2015-11-12T16:07:07.000Z
|
2021-08-17T13:58:23.000Z
|
#!/usr/bin/env python
import unittest
import geometry_msgs.msg as geo_msgs
import rqt_ez_publisher.ez_publisher_model as ez_model
from rqt_ez_publisher import quaternion_module
PKG='rqt_ez_publisher'
class FunctionTest(unittest.TestCase):
def test_make_topic_strings(self):
strings = ez_model.make_topic_strings(geo_msgs.Twist(), '/cmd_vel')
self.assertEqual(len(strings), 6)
self.assertTrue('/cmd_vel/linear/x' in strings)
self.assertTrue('/cmd_vel/linear/y' in strings)
self.assertTrue('/cmd_vel/linear/z' in strings)
self.assertTrue('/cmd_vel/angular/x' in strings)
self.assertTrue('/cmd_vel/angular/y' in strings)
self.assertTrue('/cmd_vel/angular/z' in strings)
def test_make_topic_strings_with_header(self):
strings = ez_model.make_topic_strings(geo_msgs.PointStamped(),
'/cmd_vel')
self.assertEqual(len(strings), 7)
self.assertTrue('/cmd_vel/header/seq' in strings)
self.assertTrue('/cmd_vel/header/stamp/secs' in strings)
self.assertTrue('/cmd_vel/header/stamp/nsecs' in strings)
self.assertTrue('/cmd_vel/header/frame_id' in strings)
self.assertTrue('/cmd_vel/point/x' in strings)
self.assertTrue('/cmd_vel/point/y' in strings)
self.assertTrue('/cmd_vel/point/z' in strings)
def test_flatten(self):
flattened = ez_model.flatten([0, [[1, 2], 3, 4], [5, 6], [7], 8])
self.assertEqual(len(flattened), 9)
def test_find_topic_name_found(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge/data', {'/hoge': 'type_a', '/hoga': 'type_b'})
self.assertEqual(topic, '/hoge')
self.assertEqual(attr, ['data'])
self.assertEqual(index, None)
def test_find_topic_name_found_topic_only(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge', {'/hoge': 'type_a', '/hoga': 'type_b'})
self.assertEqual(topic, '/hoge')
self.assertEqual(attr, None)
self.assertEqual(index, None)
def test_find_topic_name_found_topic_only_nested(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge', {'/hoge': 'type_a', '/hoga': 'type_b', '/hoge/nested': 'type_c'})
self.assertEqual(topic, '/hoge')
self.assertEqual(attr, None)
self.assertEqual(index, None)
def test_find_topic_name_found_topic_only_nested_by_nested(self):
topic, attr, index = ez_model.find_topic_name(
'/some/topic/again/data',
{'/some/topic/again': 'std_msgs/Float32', '/some/topic': 'std_msgs/Float32', '/this/works': 'std_msgs/Float32'})
self.assertEqual(topic, '/some/topic/again')
# self.assertEqual(attr, None)
self.assertEqual(index, None)
def test_find_topic_name_found_topic_only_nested_by_nested2(self):
topic, attr, index = ez_model.find_topic_name(
'/some/topic/again',
{'/some/topic/again': 'std_msgs/Float32', '/some/topic': 'std_msgs/Float32', '/this/works': 'std_msgs/Float32'})
self.assertEqual(topic, '/some/topic/again')
self.assertEqual(attr, None)
self.assertEqual(index, None)
def test_find_topic_name_found_with_index(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge/data[2]', {'/hoge': 'type_a', '/hoga': 'type_b'})
self.assertEqual(topic, '/hoge')
self.assertEqual(attr, ['data'])
self.assertEqual(index, 2)
def test_find_topic_name_not_found(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge/data', {'/hogi': 'type_a', '/hoga': 'type_b'})
self.assertEqual(topic, None)
self.assertEqual(attr, None)
self.assertEqual(index, None)
def test_find_topic_name_repeated(self):
topic, attr, index = ez_model.find_topic_name(
'/hoge/goal/goal', {'/hoge/goal': 'type_a', '/hoga': 'type_b'})
self.assertEqual(topic, '/hoge/goal')
self.assertEqual(attr, ['goal'])
self.assertEqual(index, None)
def test_get_value_type(self):
type, is_array = ez_model.get_value_type(
'geometry_msgs/Twist', ['linear', 'x'])
self.assertEqual(type, float)
self.assertEqual(is_array, False)
def test_get_value_type_header(self):
type, is_array = ez_model.get_value_type(
'geometry_msgs/PointStamped', ['header', 'frame_id'])
self.assertEqual(type, str)
self.assertEqual(is_array, False)
def test_get_value_type_not_found(self):
type, is_array = ez_model.get_value_type(
'geometry_msgs/Twist', ['linear'])
self.assertEqual(type, None)
self.assertEqual(is_array, False)
def test_get_value_type_array(self):
type, is_array = ez_model.get_value_type(
'geometry_msgs/TwistWithCovariance', ['covariance'])
self.assertEqual(type, float)
self.assertEqual(is_array, True)
def test_get_value_type_non_builtin_array(self):
type, is_array = ez_model.get_value_type(
'geometry_msgs/Polygon', ['points[0]', 'x'])
self.assertEqual(type, float)
self.assertEqual(is_array, False)
def test_make_test(self):
text = ez_model.make_text('/cmd_vel', ['linear', 'x'], None)
self.assertEqual(text, '/cmd_vel/linear/x')
def test_make_test_array(self):
text = ez_model.make_text('/cmd_vel', ['linear', 'x'], 2)
self.assertEqual(text, '/cmd_vel/linear/x[2]')
def test_get_value_type_quaternion(self):
msg_type, is_array = ez_model.get_value_type('geometry_msgs/Pose', ['orientation'])
self.assertEqual(msg_type, None)
self.assertEqual(is_array, False)
msg_type, is_array = ez_model.get_value_type(
'geometry_msgs/Pose', ['orientation'],
modules=[quaternion_module.QuaternionModule()])
self.assertEqual(msg_type, 'geometry_msgs/Quaternion')
self.assertEqual(is_array, False)
if __name__ == '__main__':
import rosunit
rosunit.unitrun(PKG, 'function_test', FunctionTest)
| 41.510067
| 124
| 0.648343
|
4a06c1258d3e7e8c1bf15b4b051ad02a26127397
| 7,658
|
py
|
Python
|
tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
|
hefen1/chromium
|
52f0b6830e000ca7c5e9aa19488af85be792cc88
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
|
hefen1/chromium
|
52f0b6830e000ca7c5e9aa19488af85be792cc88
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
|
hefen1/chromium
|
52f0b6830e000ca7c5e9aa19488af85be792cc88
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2020-04-04T13:34:56.000Z
|
2020-11-04T07:17:52.000Z
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds android browsers that can be controlled by telemetry."""
import logging
import os
from telemetry import decorators
from telemetry.core import browser
from telemetry.core import exceptions
from telemetry.core import possible_browser
from telemetry.core import platform
from telemetry.core import util
from telemetry.core.backends import adb_commands
from telemetry.core.backends import android_browser_backend_settings
from telemetry.core.backends.chrome import android_browser_backend
from telemetry.core.platform import android_device
CHROME_PACKAGE_NAMES = {
'android-content-shell':
['org.chromium.content_shell_apk',
android_browser_backend_settings.ContentShellBackendSettings,
'ContentShell.apk'],
'android-chrome-shell':
['org.chromium.chrome.shell',
android_browser_backend_settings.ChromeShellBackendSettings,
'ChromeShell.apk'],
'android-webview':
['org.chromium.telemetry_shell',
android_browser_backend_settings.WebviewBackendSettings,
None],
'android-webview-shell':
['org.chromium.android_webview.shell',
android_browser_backend_settings.WebviewShellBackendSettings,
'AndroidWebView.apk'],
'android-chrome':
['com.google.android.apps.chrome',
android_browser_backend_settings.ChromeBackendSettings,
'Chrome.apk'],
'android-chrome-work':
['com.chrome.work',
android_browser_backend_settings.ChromeBackendSettings,
None],
'android-chrome-beta':
['com.chrome.beta',
android_browser_backend_settings.ChromeBackendSettings,
None],
'android-chrome-dev':
['com.google.android.apps.chrome_dev',
android_browser_backend_settings.ChromeBackendSettings,
None],
'android-chrome-canary':
['com.chrome.canary',
android_browser_backend_settings.ChromeBackendSettings,
None],
'android-jb-system-chrome':
['com.android.chrome',
android_browser_backend_settings.ChromeBackendSettings,
None]
}
class PossibleAndroidBrowser(possible_browser.PossibleBrowser):
"""A launchable android browser instance."""
def __init__(self, browser_type, finder_options, android_platform,
backend_settings, apk_name):
super(PossibleAndroidBrowser, self).__init__(
browser_type, 'android', backend_settings.supports_tab_control)
assert browser_type in FindAllBrowserTypes(finder_options), (
'Please add %s to android_browser_finder.FindAllBrowserTypes' %
browser_type)
self._platform = android_platform
self._platform_backend = (
android_platform._platform_backend) # pylint: disable=W0212
self._backend_settings = backend_settings
self._local_apk = None
if browser_type == 'exact':
if not os.path.exists(apk_name):
raise exceptions.PathMissingError(
'Unable to find exact apk %s specified by --browser-executable' %
apk_name)
self._local_apk = apk_name
elif apk_name:
chrome_root = util.GetChromiumSrcDir()
candidate_apks = []
for build_dir, build_type in util.GetBuildDirectories():
apk_full_name = os.path.join(chrome_root, build_dir, build_type, 'apks',
apk_name)
if os.path.exists(apk_full_name):
last_changed = os.path.getmtime(apk_full_name)
candidate_apks.append((last_changed, apk_full_name))
if candidate_apks:
# Find the candidate .apk with the latest modification time.
newest_apk_path = sorted(candidate_apks)[-1][1]
self._local_apk = newest_apk_path
def __repr__(self):
return 'PossibleAndroidBrowser(browser_type=%s)' % self.browser_type
def _InitPlatformIfNeeded(self):
pass
def Create(self, finder_options):
self._InitPlatformIfNeeded()
browser_backend = android_browser_backend.AndroidBrowserBackend(
self._platform_backend,
finder_options.browser_options, self._backend_settings,
output_profile_path=finder_options.output_profile_path,
extensions_to_load=finder_options.extensions_to_load,
target_arch=finder_options.target_arch)
return browser.Browser(
browser_backend, self._platform_backend, self._credentials_path)
def SupportsOptions(self, finder_options):
if len(finder_options.extensions_to_load) != 0:
return False
return True
def HaveLocalAPK(self):
return self._local_apk and os.path.exists(self._local_apk)
@decorators.Cache
def UpdateExecutableIfNeeded(self):
if self.HaveLocalAPK():
logging.warn('Installing %s on device if needed.' % self._local_apk)
self.platform.InstallApplication(self._local_apk)
def last_modification_time(self):
if self.HaveLocalAPK():
return os.path.getmtime(self._local_apk)
return -1
def SelectDefaultBrowser(possible_browsers):
"""Return the newest possible browser."""
if not possible_browsers:
return None
return max(possible_browsers, key=lambda b: b.last_modification_time())
def CanFindAvailableBrowsers():
return android_device.CanDiscoverDevices()
def CanPossiblyHandlePath(target_path):
return os.path.splitext(target_path.lower())[1] == '.apk'
def FindAllBrowserTypes(_options):
return CHROME_PACKAGE_NAMES.keys() + ['exact']
def _FindAllPossibleBrowsers(finder_options, android_platform):
"""Testable version of FindAllAvailableBrowsers."""
if not android_platform:
return []
possible_browsers = []
# Add the exact APK if given.
if (finder_options.browser_executable and
CanPossiblyHandlePath(finder_options.browser_executable)):
normalized_path = os.path.expanduser(finder_options.browser_executable)
exact_package = adb_commands.GetPackageName(normalized_path)
if not exact_package:
raise exceptions.PackageDetectionError(
'Unable to find package for %s specified by --browser-executable' %
normalized_path)
package_info = next((info for info in CHROME_PACKAGE_NAMES.itervalues()
if info[0] == exact_package), None)
if package_info:
[package, backend_settings, _] = package_info
possible_browsers.append(
PossibleAndroidBrowser(
'exact',
finder_options,
android_platform,
backend_settings(package),
normalized_path))
else:
raise exceptions.UnknownPackageError(
'%s specified by --browser-executable has an unknown package: %s' %
(normalized_path, exact_package))
for name, package_info in CHROME_PACKAGE_NAMES.iteritems():
package, backend_settings, local_apk = package_info
b = PossibleAndroidBrowser(name,
finder_options,
android_platform,
backend_settings(package),
local_apk)
if b.platform.CanLaunchApplication(package) or b.HaveLocalAPK():
possible_browsers.append(b)
return possible_browsers
def FindAllAvailableBrowsers(finder_options, device):
"""Finds all the possible browsers on one device.
The device is either the only device on the host platform,
or |finder_options| specifies a particular device.
"""
if not isinstance(device, android_device.AndroidDevice):
return []
android_platform = platform.GetPlatformForDevice(device, finder_options)
return _FindAllPossibleBrowsers(finder_options, android_platform)
| 35.953052
| 80
| 0.719509
|
4a06c1716260e1f13d2b189f34f1d31dcd968fb8
| 2,624
|
py
|
Python
|
core/groups.py
|
PIESBOYBRO/MusicPlayer-1
|
da36513888318d3ce6031b9cf3e94f2a4d0d9de9
|
[
"MIT"
] | 1
|
2022-02-12T11:45:27.000Z
|
2022-02-12T11:45:27.000Z
|
core/groups.py
|
PIESBOYBRO/MusicPlayer-1
|
da36513888318d3ce6031b9cf3e94f2a4d0d9de9
|
[
"MIT"
] | null | null | null |
core/groups.py
|
PIESBOYBRO/MusicPlayer-1
|
da36513888318d3ce6031b9cf3e94f2a4d0d9de9
|
[
"MIT"
] | 1
|
2022-02-08T03:31:31.000Z
|
2022-02-08T03:31:31.000Z
|
"""
Music Player, Telegram Voice Chat Bot
Copyright (c) 2021-present PIESBOY BRO <https://github.com/PIESBOYBRO>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>
"""
from config import config
from core.queue import Queue
from pyrogram.types import Message
from typing import Any, Dict, Union
from pyrogram.raw.functions.channels import GetFullChannel
from pyrogram.raw.functions.phone import EditGroupCallTitle
GROUPS: Dict[int, Dict[str, Any]] = {}
def all_groups():
return GROUPS.keys()
def set_default(chat_id: int) -> None:
global GROUPS
GROUPS[chat_id] = {}
GROUPS[chat_id]["is_playing"] = False
GROUPS[chat_id]["now_playing"] = None
GROUPS[chat_id]["stream_mode"] = config.STREAM_MODE
GROUPS[chat_id]["admins_only"] = config.ADMINS_ONLY
GROUPS[chat_id]["loop"] = False
GROUPS[chat_id]["lang"] = config.LANGUAGE
GROUPS[chat_id]["queue"] = Queue()
def get_group(chat_id) -> Dict[str, Any]:
if chat_id not in all_groups():
set_default(chat_id)
return GROUPS[chat_id]
def set_group(chat_id: int, **kwargs) -> None:
global GROUPS
for key, value in kwargs.items():
GROUPS[chat_id][key] = value
async def set_title(message_or_chat_id: Union[Message, int], title: str, **kw):
if isinstance(message_or_chat_id, Message):
client = message_or_chat_id._client
chat_id = message_or_chat_id.chat.id
elif isinstance(message_or_chat_id, int):
client = kw.get("client")
chat_id = message_or_chat_id
try:
peer = await client.resolve_peer(chat_id)
chat = await client.send(GetFullChannel(channel=peer))
await client.send(EditGroupCallTitle(call=chat.full_chat.call, title=title))
except BaseException:
pass
def get_queue(chat_id: int) -> Queue:
return GROUPS[chat_id]["queue"]
def clear_queue(chat_id: int) -> None:
global GROUPS
GROUPS[chat_id]["queue"].clear()
def shuffle_queue(chat_id: int) -> Queue:
global GROUPS
return GROUPS[chat_id]["queue"].shuffle()
| 30.870588
| 84
| 0.720274
|
4a06c183589102dcce3df81650858654b12561b4
| 91
|
py
|
Python
|
tests/roots/test-ext-autosectionlabel/conf.py
|
zhsj/sphinx
|
169297d0b76bf0b503033dadeb14f9a2b735e422
|
[
"BSD-2-Clause"
] | 3
|
2019-06-11T09:42:08.000Z
|
2020-03-10T15:57:09.000Z
|
tests/roots/test-ext-autosectionlabel/conf.py
|
zhsj/sphinx
|
169297d0b76bf0b503033dadeb14f9a2b735e422
|
[
"BSD-2-Clause"
] | 12
|
2019-01-09T15:43:57.000Z
|
2020-01-21T10:46:30.000Z
|
tests/roots/test-ext-autosectionlabel/conf.py
|
zhsj/sphinx
|
169297d0b76bf0b503033dadeb14f9a2b735e422
|
[
"BSD-2-Clause"
] | 10
|
2019-02-04T11:49:35.000Z
|
2020-03-21T13:32:20.000Z
|
# -*- coding: utf-8 -*-
extensions = ['sphinx.ext.autosectionlabel']
master_doc = 'index'
| 18.2
| 44
| 0.659341
|
4a06c29ecc1685e747b5e654f75ffa8576749972
| 109
|
py
|
Python
|
django_gotolong/broker/zerodha/zsum/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 15
|
2019-12-06T16:19:45.000Z
|
2021-08-20T13:22:22.000Z
|
django_gotolong/broker/zerodha/zsum/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 14
|
2020-12-08T10:45:05.000Z
|
2021-09-21T17:23:45.000Z
|
django_gotolong/broker/zerodha/zsum/apps.py
|
ParikhKadam/gotolong
|
839beb8aa37055a2078eaa289b8ae05b62e8905e
|
[
"BSD-2-Clause",
"BSD-3-Clause"
] | 9
|
2020-01-01T03:04:29.000Z
|
2021-04-18T08:42:30.000Z
|
from django.apps import AppConfig
class BrokerZerodhaSumConfig(AppConfig):
name = 'broker_zerodha_sum'
| 18.166667
| 40
| 0.798165
|
4a06c3e9b2ddec02e0144a635e5eb195ca560130
| 241
|
py
|
Python
|
LABORATORIO 3/numero suerte.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
LABORATORIO 3/numero suerte.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
LABORATORIO 3/numero suerte.py
|
msolivera/Phyton
|
1322fa2ff4bb06a17350fefa7e5268c0969e5b53
|
[
"bzip2-1.0.6"
] | null | null | null |
def numero_suerte (n):
suma = 0
while n != 0:
n=n//10
resto=n%10
suma += resto
if suma==21:
print ("numero de la suerte")
else:
print("numero cualquiera")
numero_suerte(993)
| 16.066667
| 37
| 0.497925
|
4a06c3ff4e2a48afd8f0175e367ab7cd0b420ccb
| 5,279
|
py
|
Python
|
Tools/mavlink_direct.py
|
clearbothk/PX4-Autopilot
|
235845bb5c879d3ede6d7a0d042826a87c08be7f
|
[
"BSD-3-Clause"
] | null | null | null |
Tools/mavlink_direct.py
|
clearbothk/PX4-Autopilot
|
235845bb5c879d3ede6d7a0d042826a87c08be7f
|
[
"BSD-3-Clause"
] | null | null | null |
Tools/mavlink_direct.py
|
clearbothk/PX4-Autopilot
|
235845bb5c879d3ede6d7a0d042826a87c08be7f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
"""
Open a shell over MAVLink.
@author: Beat Kueng (beat-kueng@gmx.net)
"""
from __future__ import print_function
import sys, select
import termios
from timeit import default_timer as timer
from argparse import ArgumentParser
import os
try:
from pymavlink import mavutil
except ImportError as e:
print("Failed to import pymavlink: " + str(e))
print("")
print("You may need to install it with:")
print(" pip3 install --user pymavlink")
print("")
sys.exit(1)
try:
import serial
except ImportError as e:
print("Failed to import pyserial: " + str(e))
print("")
print("You may need to install it with:")
print(" pip3 install --user pyserial")
print("")
sys.exit(1)
class MavlinkSerialPort():
'''an object that looks like a serial port, but
transmits using mavlink SERIAL_CONTROL packets'''
def __init__(self, portname, baudrate, devnum=0, debug=0):
self.baudrate = 0
self._debug = debug
self.buf = ''
self.port = devnum
self.debug("Connecting with MAVLink to %s ..." % portname)
self.mav = mavutil.mavlink_connection(portname, autoreconnect=True, baud=baudrate)
self.mav.mav.heartbeat_send(mavutil.mavlink.MAV_TYPE_GENERIC, mavutil.mavlink.MAV_AUTOPILOT_INVALID, 0, 0, 0)
self.mav.wait_heartbeat()
self.debug("HEARTBEAT OK\n")
self.debug("Locked serial device\n")
def debug(self, s, level=1):
'''write some debug text'''
if self._debug >= level:
print(s)
def write(self, b):
'''write some bytes'''
self.debug("sending '%s' (0x%02x) of len %u\n" % (b, ord(b[0]), len(b)), 2)
while len(b) > 0:
n = len(b)
if n > 70:
n = 70
buf = [ord(x) for x in b[:n]]
buf.extend([0]*(70-len(buf)))
self.mav.mav.serial_control_send(self.port,
mavutil.mavlink.SERIAL_CONTROL_FLAG_EXCLUSIVE |
mavutil.mavlink.SERIAL_CONTROL_FLAG_RESPOND,
0,
0,
n,
buf)
b = b[n:]
def close(self):
self.mav.mav.serial_control_send(self.port, 0, 0, 0, 0, [0]*70)
def sendMsg(self):
self.mav.mav.serial_control_send(self.port,
mavutil.mavlink.SERIAL_CONTROL_FLAG_EXCLUSIVE |
mavutil.mavlink.SERIAL_CONTROL_FLAG_RESPOND,
0,
0,
38,
"micrortps_client start -d /dev/ttyS2\n")
def _recv(self):
'''read some bytes into self.buf'''
m = self.mav.recv_match(condition='SERIAL_CONTROL.count!=0',
type='SERIAL_CONTROL', blocking=True,
timeout=0.03)
if m is not None:
if self._debug > 2:
print(m)
data = m.data[:m.count]
self.buf += ''.join(str(chr(x)) for x in data)
def read(self, n):
'''read some bytes'''
if len(self.buf) == 0:
self._recv()
if len(self.buf) > 0:
if n > len(self.buf):
n = len(self.buf)
ret = self.buf[:n]
self.buf = self.buf[n:]
if self._debug >= 2:
for b in ret:
self.debug("read 0x%x" % ord(b), 2)
return ret
return ''
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('port', metavar='PORT', nargs='?', default = None,
help='Mavlink port name: serial: DEVICE[,BAUD], udp: IP:PORT, tcp: tcp:IP:PORT. Eg: \
/dev/ttyUSB0 or 0.0.0.0:14550. Auto-detect serial if not given.')
parser.add_argument("--baudrate", "-b", dest="baudrate", type=int,
help="Mavlink port baud rate (default=57600)", default=57600)
args = parser.parse_args()
if args.port == None:
if sys.platform == "darwin":
args.port = "/dev/tty.usbmodem01"
else:
serial_list = mavutil.auto_detect_serial(preferred_list=['*FTDI*',
"*Arduino_Mega_2560*", "*3D_Robotics*", "*USB_to_UART*", '*PX4*', '*FMU*', "*Gumstix*"])
if len(serial_list) == 0:
print("Error: no serial connection found")
return
if len(serial_list) > 1:
print('Auto-detected serial ports are:')
for port in serial_list:
print(" {:}".format(port))
print('Using port {:}'.format(serial_list[0]))
args.port = serial_list[0].device
print("Connecting to MAVLINK...")
mav_serialport = MavlinkSerialPort(args.port, args.baudrate, devnum=10)
mav_serialport.write('\n') # make sure the shell is started
mav_serialport.write('micrortps_client start -d /dev/ttyS2\n')
if __name__ == '__main__':
main()
| 34.503268
| 117
| 0.520743
|
4a06c4b83cfdb697b35e6e573c927b8f40c19ea0
| 2,420
|
py
|
Python
|
forkit/tests/diff.py
|
bartee/django-forkit
|
a9f236c5ff969d4e2dbc12b0670587b82edcf446
|
[
"BSD-3-Clause"
] | 1
|
2018-11-09T11:09:26.000Z
|
2018-11-09T11:09:26.000Z
|
forkit/tests/diff.py
|
bartee/django-forkit
|
a9f236c5ff969d4e2dbc12b0670587b82edcf446
|
[
"BSD-3-Clause"
] | 1
|
2018-11-09T11:04:47.000Z
|
2018-11-15T12:54:17.000Z
|
forkit/tests/diff.py
|
bartee/django-forkit
|
a9f236c5ff969d4e2dbc12b0670587b82edcf446
|
[
"BSD-3-Clause"
] | 3
|
2016-12-02T22:15:00.000Z
|
2018-11-08T13:33:57.000Z
|
from django.test import TestCase
from forkit.tests.models import Author, Post, Blog, Tag, C
__all__ = ('DiffModelObjectTestCase',)
class DiffModelObjectTestCase(TestCase):
fixtures = ['test_data.json']
def setUp(self):
self.author = Author.objects.get(pk=1)
self.post = Post.objects.get(pk=1)
self.blog = Blog.objects.get(pk=1)
self.tag = Tag.objects.get(pk=1)
def test_empty_shallow_diff(self):
diff = self.author.diff(Author())
self.assertEqual(diff, {
'first_name': '',
'last_name': '',
'posts': [],
})
diff = self.blog.diff(Blog())
self.assertEqual(diff, {
'name': '',
'author': None,
})
diff = self.post.diff(Post())
self.assertEqual(diff, {
'blog': None,
'authors': [],
'tags': [],
'title': '',
})
diff = self.tag.diff(Tag())
self.assertEqual(diff, {
'name': '',
'post_set': [],
})
def test_fork_shallow_diff(self):
# even without the commit, the diff is clean. related objects are
# compared against the _related dict
fork = self.author.fork(commit=False)
diff = fork.diff(self.author)
self.assertEqual(diff, {})
fork = self.post.fork(commit=False)
diff = fork.diff(self.post)
self.assertEqual(diff, {})
# since Author is a OneToOneField and this is not a deep fork, it
# still does not have a value
fork = self.blog.fork(commit=False)
diff = fork.diff(self.blog)
self.assertEqual(diff, {
'author': self.author
})
diff = self.blog.diff(fork)
self.assertEqual(diff, {
'author': None
})
fork = self.tag.fork(commit=False)
diff = self.tag.diff(fork)
self.assertEqual(diff, {})
def test_deep_diff(self):
# only simple data models are currently supported
c = C.objects.get(pk=1)
# need to commit, since lists are not yet handled..
fork = c.fork(commit=True, deep=True)
diff = c.diff(fork, deep=True)
self.assertEqual(diff, {})
fork.b.title = 'foobar'
self.assertEqual(c.diff(fork, deep=True), {
'b': {
'title': 'foobar',
}
})
| 27.816092
| 73
| 0.533471
|
4a06c55e9dc9956778fd62ed6903b15aad18b22d
| 1,654
|
py
|
Python
|
userbot/plugins/design.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
userbot/plugins/design.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
userbot/plugins/design.py
|
justteen/BUZZ-USERBOT
|
55651cce150e1d04d2c61efb2565ef9f46b42933
|
[
"BSL-1.0"
] | null | null | null |
""".admin Plugin for @UniBorg"""
from telethon.tl.types import ChannelParticipantsAdmins
from userbot.utils import lightning_cmd
@borg.on(lightning_cmd("join"))
async def _(event):
if event.fwd_from:
return
mentions = "`━━━━━┓ \n┓┓┓┓┓┃\n┓┓┓┓┓┃ ヽ○ノ ⇦ Me When You Joined \n┓┓┓┓┓┃. / \n┓┓┓┓┓┃ ノ) \n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃\n┓┓┓┓┓┃`"
chat = await event.get_input_chat()
async for x in borg.iter_participants(chat, filter=ChannelParticipantsAdmins):
mentions += f""
reply_message = None
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
await reply_message.reply(mentions)
else:
await event.reply(mentions)
await event.delete()
@borg.on(lightning_cmd("pay"))
async def _(event):
if event.fwd_from:
return
mentions = "`█▀▀▀▀▀█░▀▀░░░█░░░░█▀▀▀▀▀█\n█░███░█░█▄░█▀▀░▄▄░█░███░█\n█░▀▀▀░█░▀█▀▀▄▀█▀▀░█░▀▀▀░█\n▀▀▀▀▀▀▀░▀▄▀▄▀▄█▄▀░▀▀▀▀▀▀▀\n█▀█▀▄▄▀░█▄░░░▀▀░▄█░▄▀█▀░▀\n░█▄▀░▄▀▀░░░▄▄▄█░▀▄▄▄▀▄▄▀▄\n░░▀█░▀▀▀▀▀▄█░▄░████ ██▀█▄\n▄▀█░░▄▀█▀█▀░█▄▀░▀█▄██▀░█▄\n░░▀▀▀░▀░█▄▀▀▄▄░▄█▀▀▀█░█▀▀\n█▀▀▀▀▀█░░██▀█░░▄█░▀░█▄░██\n█░███░█░▄▀█▀██▄▄▀▀█▀█▄░▄▄\n█░▀▀▀░█░█░░▀▀▀░█░▀▀▀▀▄█▀░\n▀▀▀▀▀▀▀░▀▀░░▀░▀░░░▀▀░▀▀▀▀`"
chat = await event.get_input_chat()
async for x in borg.iter_participants(chat, filter=ChannelParticipantsAdmins):
mentions += f""
reply_message = None
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
await reply_message.reply(mentions)
else:
await event.reply(mentions)
await event.delete()
| 42.410256
| 368
| 0.484281
|
4a06c594fab888587b59e4ea2457c0aff53db122
| 4,296
|
py
|
Python
|
hyperactive/hyperactive.py
|
nevinadalal/Hyperactive
|
3232ffeda70c5d4853b9e71aaf5d1e761c0db9c2
|
[
"MIT"
] | null | null | null |
hyperactive/hyperactive.py
|
nevinadalal/Hyperactive
|
3232ffeda70c5d4853b9e71aaf5d1e761c0db9c2
|
[
"MIT"
] | null | null | null |
hyperactive/hyperactive.py
|
nevinadalal/Hyperactive
|
3232ffeda70c5d4853b9e71aaf5d1e761c0db9c2
|
[
"MIT"
] | null | null | null |
# Author: Simon Blanke
# Email: simon.blanke@yahoo.com
# License: MIT License
import multiprocessing as mp
from tqdm import tqdm
from .optimizers import RandomSearchOptimizer
from .run_search import run_search
from .results import Results
from .print_results import PrintResults
from .search_space import SearchSpace
class Hyperactive:
def __init__(
self,
verbosity=["progress_bar", "print_results", "print_times"],
distribution="multiprocessing",
n_processes="auto",
):
super().__init__()
if verbosity is False:
verbosity = []
self.verbosity = verbosity
self.distribution = distribution
self.n_processes = n_processes
self.opt_pros = {}
def _create_shared_memory(self, new_opt):
if new_opt.memory == "share":
if len(self.opt_pros) == 0:
manager = mp.Manager()
new_opt.memory = manager.dict()
for opt in self.opt_pros.values():
same_obj_func = (
opt.objective_function.__name__
== new_opt.objective_function.__name__
)
same_ss_length = len(opt.s_space()) == len(new_opt.s_space())
if same_obj_func and same_ss_length:
new_opt.memory = opt.memory # get same manager.dict
else:
manager = mp.Manager() # get new manager.dict
new_opt.memory = manager.dict()
@staticmethod
def _default_opt(optimizer):
if isinstance(optimizer, str):
if optimizer == "default":
optimizer = RandomSearchOptimizer()
return optimizer
@staticmethod
def _default_search_id(search_id, objective_function):
if not search_id:
search_id = objective_function.__name__
return search_id
@staticmethod
def check_list(search_space):
for key in search_space.keys():
search_dim = search_space[key]
error_msg = (
"Value in '{}' of search space dictionary must be of type list".format(
key
)
)
if not isinstance(search_dim, list):
print("Warning", error_msg)
# raise ValueError(error_msg)
def add_search(
self,
objective_function,
search_space,
n_iter,
search_id=None,
optimizer="default",
n_jobs=1,
initialize={"grid": 4, "random": 2, "vertices": 4},
max_score=None,
early_stopping=None,
random_state=None,
memory="share",
memory_warm_start=None,
):
self.check_list(search_space)
optimizer = self._default_opt(optimizer)
search_id = self._default_search_id(search_id, objective_function)
s_space = SearchSpace(search_space)
optimizer.setup_search(
objective_function,
s_space,
n_iter,
initialize,
max_score,
early_stopping,
random_state,
memory,
memory_warm_start,
self.verbosity,
)
if memory == "share":
self._create_shared_memory(optimizer)
if n_jobs == -1:
n_jobs = mp.cpu_count()
for _ in range(n_jobs):
nth_process = len(self.opt_pros)
self.opt_pros[nth_process] = optimizer
def _print_info(self):
print_res = PrintResults(self.opt_pros, self.verbosity)
for results in self.results_list:
nth_process = results["nth_process"]
print_res.print_process(results, nth_process)
def run(self, max_time=None):
for opt in self.opt_pros.values():
opt.max_time = max_time
self.results_list = run_search(
self.opt_pros, self.distribution, self.n_processes
)
self.results_ = Results(self.results_list, self.opt_pros)
self._print_info()
def best_para(self, id_):
return self.results_.best_para(id_)
def best_score(self, id_):
return self.results_.best_score(id_)
def search_data(self, id_):
return self.results_.search_data(id_)
| 28.263158
| 87
| 0.58473
|
4a06c5ca3032b728d78967c36e56de39ff4025ea
| 218
|
py
|
Python
|
pymatgen/io/exciting/__init__.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 1
|
2021-11-02T21:10:11.000Z
|
2021-11-02T21:10:11.000Z
|
pymatgen/io/exciting/__init__.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 5
|
2018-08-07T23:00:23.000Z
|
2021-01-05T22:46:23.000Z
|
pymatgen/io/exciting/__init__.py
|
Crivella/pymatgen
|
dd3737011e76520da1347d5db75db3a3f87e520f
|
[
"MIT"
] | 6
|
2019-04-26T18:50:41.000Z
|
2020-03-29T17:58:34.000Z
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This package containes classes to parse input files from the exciting
code package.
"""
from .inputs import * # noqa
| 19.818182
| 69
| 0.743119
|
4a06c6688768d0b399a05d294ea0946512424bc5
| 687
|
py
|
Python
|
python/admin/tables/orders_table/accept_order_by_user.py
|
OSAMAMOHAMED1234/E-Commerce_Blueprint
|
eca5d0c2eb22a0e6a30bfd2499e85775b43ef919
|
[
"MIT"
] | 1
|
2019-05-04T11:52:49.000Z
|
2019-05-04T11:52:49.000Z
|
python/admin/tables/orders_table/accept_order_by_user.py
|
osama-mohamed/E-Commerce_Blueprint
|
eca5d0c2eb22a0e6a30bfd2499e85775b43ef919
|
[
"MIT"
] | null | null | null |
python/admin/tables/orders_table/accept_order_by_user.py
|
osama-mohamed/E-Commerce_Blueprint
|
eca5d0c2eb22a0e6a30bfd2499e85775b43ef919
|
[
"MIT"
] | null | null | null |
from python.admin.login.login_check import *
from python.database.flask_database import *
accept_order_by_user_admin = Blueprint('accept_order_by_user_admin', __name__)
# admin accept orders for user
@accept_order_by_user_admin.route('/admin/accept_order_user/<username>/<id>', methods=['post', 'get'])
@is_admin_logged_in
def accept_order_user(username, id):
cur = mysql.connection.cursor()
cur.execute("UPDATE buy_orders SET status = %s WHERE id = %s AND user_name = %s", (['Accepted'], id, username))
mysql.connection.commit()
cur.close()
flash('You have accepted the order Successfully!', 'success')
return redirect(url_for('dashboard.admin_dashboard'))
| 36.157895
| 115
| 0.746725
|
4a06c6be0f8080efc2cdca274d38545714c2cf34
| 280
|
py
|
Python
|
examples/listDoubt.py
|
mnishitha/INF502-Fall2020
|
63c93219ba593fe50accfb24ea04752be2a62ddb
|
[
"Unlicense"
] | 8
|
2020-08-07T00:04:02.000Z
|
2020-12-14T21:43:54.000Z
|
examples/listDoubt.py
|
mnishitha/INF502-Fall2020
|
63c93219ba593fe50accfb24ea04752be2a62ddb
|
[
"Unlicense"
] | 34
|
2020-09-05T04:13:37.000Z
|
2020-10-13T21:57:29.000Z
|
examples/listDoubt.py
|
mnishitha/INF502-Fall2020
|
63c93219ba593fe50accfb24ea04752be2a62ddb
|
[
"Unlicense"
] | 34
|
2020-08-18T22:42:39.000Z
|
2020-09-11T00:25:14.000Z
|
def list_the_numbers(list1):
print("This list contains ", len(list1), " elements, as follows:")
for element in list1:
print (element)
my_list=[1,2,3,4,5]
my_list_float=[1.1,2.3,4.5]
my_list_string=['Igor','Maria','Andreia','Enzo']
list_the_numbers(my_list_string)
| 31.111111
| 70
| 0.692857
|
4a06c6f8a38e232110c2dd2e26af5536e9fb35c5
| 737
|
py
|
Python
|
beginning-python-3ed-master/Chapter13/listing13-1.py
|
zhuxinkai/python3-book-practice
|
a0d297df0755eaf239398f138c3b38746c32ac99
|
[
"MIT"
] | null | null | null |
beginning-python-3ed-master/Chapter13/listing13-1.py
|
zhuxinkai/python3-book-practice
|
a0d297df0755eaf239398f138c3b38746c32ac99
|
[
"MIT"
] | 1
|
2020-06-19T05:47:38.000Z
|
2020-06-19T05:47:46.000Z
|
beginning-python-3ed-master/Chapter13/listing13-1.py
|
zhuxinkai/python3-book-practice
|
a0d297df0755eaf239398f138c3b38746c32ac99
|
[
"MIT"
] | null | null | null |
import sqlite3
def convert(value):
if value.startswith('~'):
return value.strip('~')
if not value:
value = '0'
return float(value)
conn = sqlite3.connect('food.db')
curs = conn.cursor()
curs.execute('''
CREATE TABLE food (
id TEXT PRIMARY KEY,
desc TEXT,
water FLOAT,
kcal FLOAT,
protein FLOAT,
fat FLOAT,
ash FLOAT,
carbs FLOAT,
fiber FLOAT,
sugar FLOAT
)
''')
query = 'INSERT INTO food VALUES (?,?,?,?,?,?,?,?,?,?)'
field_count = 10
for line in open('ABBREV.txt'):
fields = line.split('^')
vals = [convert(f) for f in fields[:field_count]]
curs.execute(query, vals)
conn.commit()
conn.close()
| 19.918919
| 55
| 0.552239
|
4a06c707d3ac8b0757e64f628d710388e55f63d1
| 4,718
|
py
|
Python
|
train1.py
|
lilinlin001/YOLOv3
|
00ee3c5832a73ef4e01b71db2502f83968c8008e
|
[
"MIT"
] | null | null | null |
train1.py
|
lilinlin001/YOLOv3
|
00ee3c5832a73ef4e01b71db2502f83968c8008e
|
[
"MIT"
] | null | null | null |
train1.py
|
lilinlin001/YOLOv3
|
00ee3c5832a73ef4e01b71db2502f83968c8008e
|
[
"MIT"
] | null | null | null |
"""
Retrain the YOLO model for your own dataset.
"""
import numpy as np
import keras.backend as K
from keras.layers import Input, Lambda
from keras.models import Model
from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping
from yolo3.model import preprocess_true_boxes, yolo_body, tiny_yolo_body, yolo_loss
from yolo3.utils import get_random_data
def _main():
annotation_path = 'model_data/2007_train.txt'
log_dir = 'model_data/logs/'
classes_path = 'model_data/my_class.txt'
anchors_path = 'model_data/yolo_anchors.txt'
class_names = get_classes(classes_path)
anchors = get_anchors(anchors_path)
input_shape = (416,416) # multiple of 32, hw
model = create_model(input_shape, anchors, len(class_names) )
train(model, annotation_path, input_shape, anchors, len(class_names), log_dir=log_dir)
def train(model, annotation_path, input_shape, anchors, num_classes, log_dir='logs/'):
model.compile(optimizer='adam', loss={
'yolo_loss': lambda y_true, y_pred: y_pred})
logging = TensorBoard(log_dir=log_dir)
checkpoint = ModelCheckpoint(log_dir + "ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5",
monitor='val_loss', save_weights_only=True, save_best_only=True, period=1)
batch_size = 1
val_split = 0.5
with open(annotation_path) as f:
lines = f.readlines()
np.random.shuffle(lines)
num_val = int(len(lines)*val_split)
num_train = len(lines) - num_val
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
model.fit_generator(data_generator_wrap(lines[:num_train], batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, num_train//batch_size),
validation_data=data_generator_wrap(lines[num_train:], batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, num_val//batch_size),
epochs=100,
initial_epoch=0)
model.save_weights(log_dir + 'trained_weights.h5')
def get_classes(classes_path):
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def get_anchors(anchors_path):
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def create_model(input_shape, anchors, num_classes, load_pretrained=False, freeze_body=False,
weights_path='model_data/yolo_weights.h5'):
K.clear_session() # get a new session
image_input = Input(shape=(None, None, 3))
h, w = input_shape
num_anchors = len(anchors)
y_true = [Input(shape=(h//{0:32, 1:16, 2:8}[l], w//{0:32, 1:16, 2:8}[l], \
num_anchors//3, num_classes+5)) for l in range(3)]
model_body = yolo_body(image_input, num_anchors//3, num_classes)
print('Create YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
if load_pretrained:
model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
print('Load weights {}.'.format(weights_path))
if freeze_body:
# Do not freeze 3 output layers.
num = len(model_body.layers)-7
for i in range(num): model_body.layers[i].trainable = False
print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.5})(
[*model_body.output, *y_true])
model = Model([model_body.input, *y_true], model_loss)
return model
def data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes):
n = len(annotation_lines)
np.random.shuffle(annotation_lines)
i = 0
while True:
image_data = []
box_data = []
for b in range(batch_size):
i %= n
image, box = get_random_data(annotation_lines[i], input_shape, random=True)
image_data.append(image)
box_data.append(box)
i += 1
image_data = np.array(image_data)
box_data = np.array(box_data)
y_true = preprocess_true_boxes(box_data, input_shape, anchors, num_classes)
yield [image_data, *y_true], np.zeros(batch_size)
def data_generator_wrap(annotation_lines, batch_size, input_shape, anchors, num_classes):
n = len(annotation_lines)
if n==0 or batch_size<=0: return None
return data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes)
if __name__ == '__main__':
_main()
| 42.890909
| 114
| 0.687368
|
4a06c81783fcb4e35bab25411155a9e9b0e24b3f
| 909
|
py
|
Python
|
setup.py
|
Raguggg/pywhatsbomb
|
5f43083e9487f8e7867fac531635a26204f80c93
|
[
"MIT"
] | null | null | null |
setup.py
|
Raguggg/pywhatsbomb
|
5f43083e9487f8e7867fac531635a26204f80c93
|
[
"MIT"
] | null | null | null |
setup.py
|
Raguggg/pywhatsbomb
|
5f43083e9487f8e7867fac531635a26204f80c93
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
def readme() -> str:
with open(r"README.txt") as f:
README = f.read()
return README
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Education',
'Operating System :: Microsoft :: Windows :: Windows 10',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3'
]
setup(
name='pywhatsbomb',
version='2.1.1.5',
description='It is a power full tool for whatsapp prank',
long_description=readme(),
long_description_content_type="text/markdown",
url='',
author='Ragu G ',
author_email='ragu19062002@gmail.com',
license='MIT',
classifiers=classifiers,
keywords='whatsapp,whatsappauto,auto,whatsappbot,bot,whatsapp tool',
packages=find_packages(),
install_requires=['PyAutoGUI']
)
| 25.971429
| 73
| 0.636964
|
4a06c86195e5c818166f2c12d7a8542ae3a9fb6a
| 1,530
|
py
|
Python
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_endpoint_service_list_endpoints_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:23:29.000Z
|
2022-03-30T05:23:29.000Z
|
samples/generated_samples/aiplatform_generated_aiplatform_v1_endpoint_service_list_endpoints_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
samples/generated_samples/aiplatform_generated_aiplatform_v1_endpoint_service_list_endpoints_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListEndpoints
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1_EndpointService_ListEndpoints_sync]
from google.cloud import aiplatform_v1
def sample_list_endpoints():
# Create a client
client = aiplatform_v1.EndpointServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.ListEndpointsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_endpoints(request=request)
# Handle the response
for response in page_result:
print(response)
# [END aiplatform_generated_aiplatform_v1_EndpointService_ListEndpoints_sync]
| 32.553191
| 85
| 0.763399
|
4a06c8ae565632ac79d897d3726955f8f6103433
| 5,226
|
py
|
Python
|
mlonmcu/flow/tvm/backend/tvmrt.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 3
|
2022-03-07T09:38:12.000Z
|
2022-03-24T09:28:36.000Z
|
mlonmcu/flow/tvm/backend/tvmrt.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 24
|
2022-03-07T16:09:32.000Z
|
2022-03-31T08:08:51.000Z
|
mlonmcu/flow/tvm/backend/tvmrt.py
|
tum-ei-eda/mlonmcu
|
0d5c114b85f2ae9e48e7d815bfce8df04c2bdb46
|
[
"Apache-2.0"
] | 1
|
2022-03-07T09:38:17.000Z
|
2022-03-07T09:38:17.000Z
|
#
# Copyright (c) 2022 TUM Department of Electrical and Computer Engineering.
#
# This file is part of MLonMCU.
# See https://github.com/tum-ei-eda/mlonmcu.git for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import tempfile
# import json
import tarfile
from pathlib import Path
from .backend import TVMBackend
from .wrapper import generate_tvmrt_wrapper, generate_wrapper_header
from mlonmcu.flow.backend import main
from mlonmcu.artifact import Artifact, ArtifactFormat
class TVMRTBackend(TVMBackend):
FEATURES = [
*TVMBackend.FEATURES,
"debug_arena",
]
DEFAULTS = {
**TVMBackend.DEFAULTS,
"arena_size": 2**20, # Can not be detemined automatically (Very large)
# TODO: arena size warning!
}
name = "tvmrt"
@property
def arena_size(self):
size = self.config["arena_size"]
return int(size) if size else None
def get_tvmc_compile_args(self):
return super().get_tvmc_compile_args("graph") + [
"--runtime-crt-system-lib",
str(1),
"--executor-graph-link-params",
str(0),
]
def get_graph_and_params_from_mlf(self, path):
graph = None
with open(Path(path) / "executor-config" / "graph" / "graph.json", "r") as handle:
graph = handle.read()
params = None
with open(Path(path) / "parameters" / "default.params", "rb") as handle:
params = handle.read()
return graph, params
def generate_code(self, verbose=False):
artifacts = []
assert self.model is not None
full = False # Required due to bug in TVM
dump = ["c", "relay"] if full else []
with tempfile.TemporaryDirectory() as temp_dir:
out_path = Path(temp_dir) / f"{self.prefix}.tar"
out = self.invoke_tvmc_compile(out_path, dump=dump, verbose=verbose)
mlf_path = Path(temp_dir) / "mlf"
tarfile.open(out_path).extractall(mlf_path)
# with open(mlf_path / "metadata.json") as handle:
# metadata = json.load(handle)
# metadata_txt = json.dumps(metadata)
with open(out_path, "rb") as handle:
mlf_data = handle.read()
artifacts.append(
Artifact(
f"{self.prefix}.tar",
raw=mlf_data,
fmt=ArtifactFormat.MLF,
archive=True,
)
)
if full:
with open(str(out_path) + ".c", "r") as handle:
mod_src = handle.read()
artifacts.append(
Artifact(
f"{self.prefix}.c",
content=mod_src,
fmt=ArtifactFormat.SOURCE,
optional=True,
)
)
with open(str(out_path) + ".relay", "r") as handle:
mod_txt = handle.read()
artifacts.append(
Artifact(
f"{self.prefix}.relay",
content=mod_txt,
fmt=ArtifactFormat.TEXT,
optional=True,
)
)
generate_wrapper = True
if generate_wrapper:
workspace_size = self.arena_size
assert workspace_size >= 0
graph, params = self.get_graph_and_params_from_mlf(mlf_path)
wrapper_src = generate_tvmrt_wrapper(graph, params, self.model_info, workspace_size)
artifacts.append(Artifact("rt_wrapper.c", content=wrapper_src, fmt=ArtifactFormat.SOURCE))
header_src = generate_wrapper_header()
artifacts.append(Artifact("tvm_wrapper.h", content=header_src, fmt=ArtifactFormat.SOURCE))
workspace_size_artifact = Artifact(
"tvmrt_workspace_size.txt", content=f"{workspace_size}", fmt=ArtifactFormat.TEXT
)
artifacts.append(workspace_size_artifact)
stdout_artifact = Artifact(
"tvmc_compile_out.log", content=out, fmt=ArtifactFormat.TEXT
) # TODO: rename to tvmrt_out.log?
artifacts.append(stdout_artifact)
# prepare -> common?
# invoke_tvmc -> common?
# generate_wrapper()
self.artifacts = artifacts
if __name__ == "__main__":
sys.exit(
main(
TVMRTBackend,
args=sys.argv[1:],
)
) # pragma: no cover
| 36.041379
| 106
| 0.561041
|
4a06c9179a1d83d35ee9319306557e56d4cc15e0
| 6,671
|
py
|
Python
|
main/bot.py
|
Balakae/fishing-bot
|
5d75a8371e00eed1f8e0aa91fc150e12182e25d7
|
[
"MIT"
] | null | null | null |
main/bot.py
|
Balakae/fishing-bot
|
5d75a8371e00eed1f8e0aa91fc150e12182e25d7
|
[
"MIT"
] | null | null | null |
main/bot.py
|
Balakae/fishing-bot
|
5d75a8371e00eed1f8e0aa91fc150e12182e25d7
|
[
"MIT"
] | null | null | null |
from cv2 import cv2
import mss
import numpy as np
import os
import time
#from pynput.mouse import Button, Controller
from pynput import mouse, keyboard
import random
import pydirectinput
class Fisher:
def __init__(self):
self.stc = mss.mss()
path = os.path.dirname(os.path.dirname(__file__))
self.img_path = os.path.join(path, 'img')
self.mouse = mouse.Controller()
self.keyboard = keyboard.Controller()
self.bar_top = 0
self.bar_left = 0
# Increase this limit if you have a larger basket
self.fish_count = 0
self.fish_limit = 6
self.keep_fishing = True
# Adding spot to update sell thresholds!
self.sell_threshold = .8
def fish(self):
while self.keep_fishing:
if self.close_caught_fish():
# We caught a fish
self.fish_count += 1
print(f"Fish Count: {self.fish_count}")
if self.is_bobber():
print("FISH on SLEEPING!")
time.sleep(10)
continue
if self.fish_count >= self.fish_limit:
self.Sell_Fish()
continue
#Reset click
jitter = random.randint(-25, 25)
cast_jitter = random.random()
pydirectinput.click(800 + jitter,800 + jitter)
time.sleep(1)
self.Click_Location(800 + jitter,800 + jitter,.2 + cast_jitter)
print("Throwing line")
time.sleep(11)
self.Click_Location(800 + jitter,800 + jitter,.5)
time.sleep(.5)
def is_bobber(self):
img = self.Screen_Shot()
bobber_img = cv2.imread(os.path.join(self.img_path, 'bobber.jpg'), cv2.IMREAD_UNCHANGED)
result_try = cv2.matchTemplate(img, bobber_img, cv2.TM_CCOEFF_NORMED)
_, max_val, _, max_loc = cv2.minMaxLoc(result_try)
if max_val > .9:
return True
else:
return False
def Set_Bobber(self):
while True:
print("Reset Click.")
pydirectinput.click(800,800)
time.sleep(.6)
self.Click_Location(800,800,1)
time.sleep(11)
pydirectinput.click(800,800)
time.sleep(.6)
print("finding Bobber")
img = self.Screen_Shot()
bobber_img = cv2.imread(os.path.join(self.img_path, 'bobber.jpg'), cv2.IMREAD_UNCHANGED)
result_try = cv2.matchTemplate(img, bobber_img, cv2.TM_CCOEFF_NORMED)
_, max_val, _, max_loc = cv2.minMaxLoc(result_try)
if max_val > .9:
print("Found it!!")
new_max = max_loc
bar_top = new_max[1] - 20
bar_left = new_max[0]
return bar_left, bar_top
print(f"Current Max: {max_val} sleeping")
def close_caught_fish(self):
max_loc, max_val = self.Template_Match("YellowX.jpg", self.Screen_Shot())
if max_val > .9:
print("Pushing YellowX")
self.Click_Location(max_loc[0] + 10, max_loc[1] + 10)
self.Click_Location(max_loc[0] + 5, max_loc[1] + 5)
# Means we caught a fish
return True
return False
def Sell_Fish(self):
# Get to store if we are not there...
self.keyboard.press(keyboard.Key.up)
time.sleep(8)
self.keyboard.release(keyboard.Key.up)
self.keyboard.press(keyboard.Key.space)
time.sleep(1)
self.keyboard.release(keyboard.Key.space)
max_loc, max_val = self.Template_Match("SellBox.jpg", self.Screen_Shot())
if max_val > self.sell_threshold:
print("We got fish to sell!")
self.Click_Location(max_loc[0] + 20, max_loc[1] + 30)
# Look for sell button
time.sleep(1)
print("Looking to for sell")
max_loc, max_val = self.Template_Match("SellFor.jpg", self.Screen_Shot())
if max_val > self.sell_threshold:
print("Pushing Sell")
self.Click_Location(max_loc[0] + 40, max_loc[1] + 10)
time.sleep(1)
print("Looking to for sell Green")
max_loc, max_val = self.Template_Match("Sell.jpg", self.Screen_Shot())
while max_val > self.sell_threshold:
print("Pushing Sell Green")
self.Click_Location(max_loc[0] + 10, max_loc[1] + 10)
# Get all the way through we return True for sold something
time.sleep(1)
max_loc, max_val = self.Template_Match("Sell.jpg", self.Screen_Shot())
time.sleep(1)
self.fish_count = 0
self.Click_Location(200,500)
self.Click_Location(200,500)
time.sleep(1)
self.Click_Location(100,500)
# Go back fishing...
self.keyboard.press(keyboard.Key.down)
time.sleep(8)
self.keyboard.release(keyboard.Key.down)
self.keyboard.press(keyboard.Key.down)
time.sleep(2)
self.keyboard.release(keyboard.Key.down)
def Screen_Shot(self, left=0, top=0, width=1920, height=1080):
stc = mss.mss()
scr = stc.grab({
'left': left,
'top': top,
'width': width,
'height': height
})
img = np.array(scr)
img = cv2.cvtColor(img, cv2.IMREAD_COLOR)
return img
# Compare to images return max value / location
def Template_Match(self, needle, haystack):
sell_box_img = cv2.imread(os.path.join(self.img_path, needle), cv2.IMREAD_UNCHANGED)
result_try = cv2.matchTemplate(haystack, sell_box_img, cv2.TM_CCOEFF_NORMED)
_, max_val, _, max_loc = cv2.minMaxLoc(result_try)
return (max_loc, max_val)
def Click_Location(self, x, y, wait=0):
pydirectinput.moveTo(x, y)
pydirectinput.mouseDown()
time.sleep(wait)
pydirectinput.mouseUp()
def start_fresh(self):
time.sleep(5)
self.keyboard.press(keyboard.Key.ctrl)
self.keyboard.press('r')
time.sleep(1)
self.keyboard.release(keyboard.Key.ctrl)
self.keyboard.release('r')
time.sleep(1)
self.keyboard.press(keyboard.Key.enter)
self.keyboard.release(keyboard.Key.enter)
# Test our classes and functions
if __name__ == "__main__":
print("Unless your testing run main.py")
fisher = Fisher()
time.sleep(5)
fisher.Sell_Fish()
pip install -r requirements.txt
| 34.386598
| 100
| 0.570529
|
4a06c96834cebd3d034658fbb6fe392d1ef00835
| 978
|
py
|
Python
|
ferkeeRun.py
|
pyrasun/ferkee
|
8c9986c748662aa58c95b05cb2ce43a1ad4897a6
|
[
"BSD-2-Clause"
] | null | null | null |
ferkeeRun.py
|
pyrasun/ferkee
|
8c9986c748662aa58c95b05cb2ce43a1ad4897a6
|
[
"BSD-2-Clause"
] | 6
|
2017-12-02T01:24:17.000Z
|
2018-01-02T18:10:37.000Z
|
ferkeeRun.py
|
pyrasun/ferkee
|
8c9986c748662aa58c95b05cb2ce43a1ad4897a6
|
[
"BSD-2-Clause"
] | null | null | null |
import sys
import pprint
import ConfigParser as configparser
import argparse
import ferkee_props
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from scrapy.utils.log import configure_logging
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--properties", help="Path to ferkee properties file", action="store")
parser.add_argument("--nodb", action="store_true")
parser.add_argument("--noemail", action="store_true")
args = parser.parse_args()
config = configparser.RawConfigParser()
config.read(args.properties)
ferkee_props.props = dict(config.items("Ferkee"))
ferkee_props.props['noDBMode'] = args.nodb
ferkee_props.props['noEmail'] = args.noemail
configure_logging({'LOG_FORMAT': '%(levelname)s: %(message)s'})
process = CrawlerProcess(get_project_settings())
ferkee_props.dump_props()
process.crawl('ferkee', domain='ferc.gov')
process.start() # the script will block here until the crawling is finished
| 29.636364
| 96
| 0.787321
|
4a06c9a0bd7c32adce14488cdce87d23e954ba55
| 2,454
|
py
|
Python
|
covid/views.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | null | null | null |
covid/views.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | null | null | null |
covid/views.py
|
CMU-TRP/podd-api
|
6eb5c4598f848f75d131287163cd9babf2a0a0fc
|
[
"MIT"
] | 1
|
2021-11-30T15:25:40.000Z
|
2021-11-30T15:25:40.000Z
|
from datetime import timedelta, date
import pytz
from django.shortcuts import render
from rest_framework.authentication import TokenAuthentication, SessionAuthentication
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from accounts.models import Authority
from common.utils import thai_strftime
from covid.models import MonitoringReport, DailySummary, DailySummaryByVillage
from covid.serializers import MonitoringReportSerializer, DailySummaryByVillageSerializer, DailySummarySerializer
from datetime import datetime
@api_view(['GET'])
@authentication_classes((TokenAuthentication, SessionAuthentication))
@permission_classes((IsAuthenticated,))
def list_monitoring(request):
all_flag = request.GET.get('all')
timezone = pytz.timezone("Asia/Bangkok")
today = date.today()
user = request.user
reports = MonitoringReport.objects.filter(
active=True,
until__gte=today,
).prefetch_related("report")
if all_flag:
reports = reports.filter(authority__in=user.authority_users.all())
else:
reports = reports.filter(reporter_id=user.id)
serializer = MonitoringReportSerializer(reports, many=True)
return Response(serializer.data)
def daily_summary(request, authority_id):
date_str = request.GET.get('date')
parsed_date = datetime.strptime(date_str, "%Y-%m-%d").date()
ds = DailySummary.objects.get(authority_id=authority_id, date=parsed_date)
dsbv = DailySummaryByVillage.objects.filter(authority_id=authority_id, date=parsed_date).order_by('village_no')
authority = Authority.default_manager.get(pk=authority_id)
total_low_risk = 0
total_medium_risk = 0
total_high_risk = 0
total_total = 0
for village in dsbv:
total_low_risk += village.low_risk
total_medium_risk += village.medium_risk
total_high_risk += village.high_risk
total_total += village.total
return render(request, 'covid/daily_summary.html', {
"daily_summary": ds,
"daily_summary_by_village": dsbv,
"date": parsed_date,
"th_date": thai_strftime(parsed_date),
"authority": authority,
"total_low_risk": total_low_risk,
"total_medium_risk": total_medium_risk,
"total_high_risk": total_high_risk,
"total_total": total_total
})
| 36.626866
| 115
| 0.749796
|
4a06c9e3bca9fe78bdc94f0f2bc7af5e3342b5d0
| 5,164
|
py
|
Python
|
tests/unit/test_validation_error_collection.py
|
futureironman/braintree_python
|
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
|
[
"MIT"
] | 182
|
2015-01-09T05:26:46.000Z
|
2022-03-16T14:10:06.000Z
|
tests/unit/test_validation_error_collection.py
|
futureironman/braintree_python
|
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
|
[
"MIT"
] | 95
|
2015-02-24T23:29:56.000Z
|
2022-03-13T03:27:58.000Z
|
tests/unit/test_validation_error_collection.py
|
futureironman/braintree_python
|
26bb8a857bc29322a8bca2e8e0fe6d99cfe6a1ac
|
[
"MIT"
] | 93
|
2015-02-19T17:59:06.000Z
|
2022-03-19T17:01:25.000Z
|
from tests.test_helper import *
class TestValidationErrorCollection(unittest.TestCase):
def test_it_builds_an_array_of_errors_given_an_array_of_hashes(self):
test_hash = {"errors": [{"attribute": "some model attribute", "code": 1, "message": "bad juju"}]}
errors = ValidationErrorCollection(test_hash)
error = errors[0]
self.assertEqual("some model attribute", error.attribute)
self.assertEqual(1, error.code)
self.assertEqual("bad juju", error.message)
def test_for_object_provides_access_to_nested_attributes(self):
test_hash = {
"errors": [{"attribute": "some model attribute", "code": 1, "message": "bad juju"}],
"nested": {
"errors": [{"attribute": "number", "code": 2, "message": "badder juju"}]
}
}
errors = ValidationErrorCollection(test_hash)
error = errors.for_object("nested").on("number")[0]
self.assertEqual("number", error.attribute)
self.assertEqual(2, error.code)
self.assertEqual("badder juju", error.message)
def test_deep_size_non_nested(self):
test_hash = {
"errors": [
{"attribute": "one", "code": 1, "message": "is too long"},
{"attribute": "two", "code": 2, "message": "contains invalid chars"},
{"attribute": "thr", "code": 3, "message": "is invalid"}
]
}
self.assertEqual(3, ValidationErrorCollection(test_hash).deep_size)
def test_deep_size_nested(self):
test_hash = {
"errors": [{"attribute": "one", "code": 1, "message": "is too long"}],
"nested": {
"errors": [{"attribute": "two", "code": 2, "message": "contains invalid chars"}]
}
}
self.assertEqual(2, ValidationErrorCollection(test_hash).deep_size)
def test_deep_size_multiple_nestings(self):
test_hash = {
"errors": [{"attribute": "one", "code": 1, "message": "is too long"}],
"nested": {
"errors": [{"attribute": "two", "code": 2, "message": "contains invalid chars"}],
"nested_again": {
"errors": [
{"attribute": "three", "code": 3, "message": "super nested"},
{"attribute": "four", "code": 4, "message": "super nested 2"}
]
}
}
}
self.assertEqual(4, ValidationErrorCollection(test_hash).deep_size)
def test_len_multiple_nestings(self):
test_hash = {
"errors": [{"attribute": "one", "code": 1, "message": "is too long"}],
"nested": {
"errors": [{"attribute": "two", "code": 2, "message": "contains invalid chars"}],
"nested_again": {
"errors": [
{"attribute": "three", "code": 3, "message": "super nested"},
{"attribute": "four", "code": 4, "message": "super nested 2"}
]
}
}
}
validation_error_collection = ValidationErrorCollection(test_hash)
self.assertEqual(1, len(validation_error_collection))
self.assertEqual(1, len(validation_error_collection.for_object("nested")))
self.assertEqual(2, len(validation_error_collection.for_object("nested").for_object("nested_again")))
def test_deep_errors(self):
test_hash = {
"errors": [{"attribute": "one", "code": 1, "message": "is too long"}],
"nested": {
"errors": [{"attribute": "two", "code": 2, "message": "contains invalid chars"}],
"nested_again": {
"errors": [
{"attribute": "three", "code": 3, "message": "super nested"},
{"attribute": "four", "code": 4, "message": "super nested 2"}
]
}
}
}
validation_error_collection = ValidationErrorCollection(test_hash)
self.assertEqual([1, 2, 3, 4], [error.code for error in validation_error_collection.deep_errors])
def test_errors(self):
test_hash = {
"errors": [{"attribute": "one", "code": 1, "message": "is too long"}],
"nested": {
"errors": [{"attribute": "two", "code": 2, "message": "contains invalid chars"}],
"nested_again": {
"errors": [
{"attribute": "three", "code": 3, "message": "super nested"},
{"attribute": "four", "code": 4, "message": "super nested 2"}
]
}
}
}
validation_error_collection = ValidationErrorCollection(test_hash)
self.assertEqual([1], [error.code for error in validation_error_collection.errors])
self.assertEqual([2], [error.code for error in validation_error_collection.for_object("nested").errors])
self.assertEqual([3, 4], [error.code for error in validation_error_collection.for_object("nested").for_object("nested_again").errors])
| 44.517241
| 142
| 0.537374
|
4a06ca0f124311ce6fb2e8458823b029bce2c684
| 6,870
|
py
|
Python
|
util.py
|
Fanta007/Graph-U-Nets
|
96fcd45fd1f486d2e9e45cde169b7cadaca86247
|
[
"MIT"
] | 1
|
2021-07-12T18:14:14.000Z
|
2021-07-12T18:14:14.000Z
|
util.py
|
Fanta007/Graph-U-Nets
|
96fcd45fd1f486d2e9e45cde169b7cadaca86247
|
[
"MIT"
] | null | null | null |
util.py
|
Fanta007/Graph-U-Nets
|
96fcd45fd1f486d2e9e45cde169b7cadaca86247
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import numpy as np
import networkx as nx
import argparse
from sklearn.model_selection import StratifiedKFold
# added by Guan
import matplotlib.pyplot as plt
cmd_opt = argparse.ArgumentParser(description='Argparser for graph_classification')
cmd_opt.add_argument('-mode', default='cpu', help='cpu/gpu')
cmd_opt.add_argument('-data', default=None, help='data folder name')
#cmd_opt.add_argument('-batch_size', type=int, default=50, help='minibatch size')
cmd_opt.add_argument('-batch_size', type=int, default= 1, help='minibatch size')
cmd_opt.add_argument('-seed', type=int, default=1, help='seed')
cmd_opt.add_argument('-feat_dim', type=int, default=0, help='dimension of discrete node feature (maximum node tag)')
cmd_opt.add_argument('-num_class', type=int, default=0, help='#classes')
cmd_opt.add_argument('-fold', type=int, default=1, help='fold (1..10)')
cmd_opt.add_argument('-test_number', type=int, default=0, help='if specified, will overwrite -fold and use the last -test_number graphs as testing data')
#cmd_opt.add_argument('-num_epochs', type=int, default=1000, help='number of epochs')
cmd_opt.add_argument('-num_epochs', type=int, default=10, help='number of epochs')
cmd_opt.add_argument('-latent_dim', type=str, default='64', help='dimension(s) of latent layers')
cmd_opt.add_argument('-sortpooling_k', type=float, default=30, help='number of nodes kept after SortPooling')
cmd_opt.add_argument('-out_dim', type=int, default=1024, help='s2v output size')
cmd_opt.add_argument('-hidden', type=int, default=100, help='dimension of regression')
cmd_opt.add_argument('-max_lv', type=int, default=4, help='max rounds of message passing')
cmd_opt.add_argument('-learning_rate', type=float, default=0.0001, help='init learning_rate')
cmd_opt.add_argument('-dropout', type=bool, default=False, help='whether add dropout after dense layer')
cmd_opt.add_argument('-printAUC', type=bool, default=False, help='whether to print AUC (for binary classification only)')
cmd_opt.add_argument('-extract_features', type=bool, default=False, help='whether to extract final graph features')
cmd_args, _ = cmd_opt.parse_known_args()
cmd_args.latent_dim = [int(x) for x in cmd_args.latent_dim.split('-')]
if len(cmd_args.latent_dim) == 1:
cmd_args.latent_dim = cmd_args.latent_dim[0]
class S2VGraph(object):
def __init__(self, g, label, node_tags=None, node_features=None):
'''
g: a networkx graph
label: an integer graph label
node_tags: a list of integer node tags
node_features: a numpy array of continuous node features
'''
self.num_nodes = len(node_tags)
self.node_tags = node_tags
self.label = label
self.node_features = node_features # numpy array (node_num * feature_dim)
self.degs = list(dict(g.degree).values())
if len(g.edges()) != 0:
x, y = zip(*g.edges())
self.num_edges = len(x)
self.edge_pairs = np.ndarray(shape=(self.num_edges, 2), dtype=np.int32)
self.edge_pairs[:, 0] = x
self.edge_pairs[:, 1] = y
self.edge_pairs = self.edge_pairs.flatten()
else:
self.num_edges = 0
self.edge_pairs = np.array([])
def load_data():
print('loading data')
g_list = []
label_dict = {}
feat_dict = {}
# with open('data/%s/%s.txt' % (cmd_args.data, cmd_args.data), 'r') as f:
with open('data/%s/%s.txt' % ('DD', 'DD'), 'r') as f:
n_g = int(f.readline().strip())
for i in range(n_g):
row = f.readline().strip().split()
n, l = [int(w) for w in row]
if not l in label_dict:
mapped = len(label_dict)
label_dict[l] = mapped
g = nx.Graph()
node_tags = []
node_features = []
n_edges = 0
for j in range(n):
g.add_node(j)
row = f.readline().strip().split()
tmp = int(row[1]) + 2
if tmp == len(row):
# no node attributes
row = [int(w) for w in row]
attr = None
else:
row, attr = [int(w) for w in row[:tmp]], np.array([float(w) for w in row[tmp:]])
if not row[0] in feat_dict:
mapped = len(feat_dict)
feat_dict[row[0]] = mapped
node_tags.append(feat_dict[row[0]])
if tmp > len(row):
node_features.append(attr)
n_edges += row[1]
for k in range(2, len(row)):
g.add_edge(j, row[k])
if node_features != []:
node_features = np.stack(node_features)
node_feature_flag = True
else:
node_features = None
node_feature_flag = False
# Here we draw the graph g
# if i < 10:
# position = nx.spring_layout(g)
# plt.figure()
# nx.draw(g, pos = position, node_size = 50)
# plt.show()
#assert len(g.edges()) * 2 == n_edges (some graphs in COLLAB have self-loops, ignored here)
assert len(g) == n
g_list.append(S2VGraph(g, l, node_tags, node_features))
for g in g_list:
g.label = label_dict[g.label]
cmd_args.num_class = len(label_dict)
cmd_args.feat_dim = len(feat_dict) # maximum node label (tag)
if node_feature_flag == True:
cmd_args.attr_dim = node_features.shape[1] # dim of node features (attributes)
else:
cmd_args.attr_dim = 0
print('# classes: %d' % cmd_args.num_class)
print('# maximum node tag: %d' % cmd_args.feat_dim)
return sep_data(g_list, cmd_args.fold-1)
# if cmd_args.test_number == 0:
# train_idxes = np.loadtxt('data/%s/10fold_idx/train_idx-%d.txt' % (cmd_args.data, cmd_args.fold), dtype=np.int32).tolist()
# test_idxes = np.loadtxt('data/%s/10fold_idx/test_idx-%d.txt' % (cmd_args.data, cmd_args.fold), dtype=np.int32).tolist()
# return [g_list[i] for i in train_idxes], [g_list[i] for i in test_idxes]
# else:
# return g_list[: n_g - cmd_args.test_number], g_list[n_g - cmd_args.test_number :]
def sep_data(graph_list, fold_idx, seed=0):
skf = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
labels = [graph.label for graph in graph_list]
idx_list = []
for idx in skf.split(np.zeros(len(labels)), labels):
idx_list.append(idx)
train_idx, test_idx = idx_list[fold_idx]
train_graph_list = [graph_list[i] for i in train_idx]
test_graph_list = [graph_list[i] for i in test_idx]
return train_graph_list, test_graph_list
| 43.757962
| 153
| 0.615866
|
4a06ca29f8d3167c6502cb66747c9e193d4a9e43
| 8
|
py
|
Python
|
test/integration/Value/infinity.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 30
|
2019-10-29T12:47:50.000Z
|
2022-02-12T06:41:39.000Z
|
test/integration/Value/infinity.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 247
|
2017-09-21T17:11:18.000Z
|
2019-10-08T12:59:07.000Z
|
test/integration/Value/infinity.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 17
|
2017-10-01T16:53:20.000Z
|
2018-11-28T07:20:35.000Z
|
#
inf
#
| 2
| 3
| 0.375
|
4a06ca80ff0955f5cb1c4f3bc6a59780aca5d74c
| 3,113
|
py
|
Python
|
p4a/pythonforandroid/recipes/android/__init__.py
|
Janith96/lbry-android
|
b44770c77fc6103a7f366fac3446366365a74fea
|
[
"MIT"
] | 4
|
2019-07-09T17:50:46.000Z
|
2019-12-07T08:37:58.000Z
|
p4a/pythonforandroid/recipes/android/__init__.py
|
Janith96/lbry-android
|
b44770c77fc6103a7f366fac3446366365a74fea
|
[
"MIT"
] | 4
|
2020-07-17T01:37:37.000Z
|
2020-07-21T14:21:08.000Z
|
p4a/pythonforandroid/recipes/android/__init__.py
|
Janith96/lbry-android
|
b44770c77fc6103a7f366fac3446366365a74fea
|
[
"MIT"
] | 3
|
2020-02-21T04:34:20.000Z
|
2021-03-19T22:32:38.000Z
|
from __future__ import unicode_literals
from pythonforandroid.recipe import CythonRecipe, IncludedFilesBehaviour
from pythonforandroid.util import current_directory
from pythonforandroid.patching import will_build
from pythonforandroid import logger
from os.path import join
class AndroidRecipe(IncludedFilesBehaviour, CythonRecipe):
# name = 'android'
version = None
url = None
src_filename = 'src'
depends = [('pygame', 'sdl2', 'genericndkbuild'),
'pyjnius']
config_env = {}
def get_recipe_env(self, arch):
env = super(AndroidRecipe, self).get_recipe_env(arch)
env.update(self.config_env)
return env
def prebuild_arch(self, arch):
super(AndroidRecipe, self).prebuild_arch(arch)
ctx_bootstrap = self.ctx.bootstrap.name
# define macros for Cython, C, Python
tpxi = 'DEF {} = {}\n'
th = '#define {} {}\n'
tpy = '{} = {}\n'
# make sure bootstrap name is in unicode
if isinstance(ctx_bootstrap, bytes):
ctx_bootstrap = ctx_bootstrap.decode('utf-8')
bootstrap = bootstrap_name = ctx_bootstrap
is_sdl2 = bootstrap_name in ('sdl2', 'sdl2python3', 'sdl2_gradle')
is_pygame = bootstrap_name in ('pygame',)
is_webview = bootstrap_name in ('webview',)
if is_sdl2 or is_webview:
if is_sdl2:
bootstrap = 'sdl2'
java_ns = u'org.kivy.android'
jni_ns = u'org/kivy/android'
elif is_pygame:
java_ns = u'org.renpy.android'
jni_ns = u'org/renpy/android'
else:
logger.error((
'unsupported bootstrap for android recipe: {}'
''.format(bootstrap_name)
))
exit(1)
config = {
'BOOTSTRAP': bootstrap,
'IS_SDL2': int(is_sdl2),
'IS_PYGAME': int(is_pygame),
'PY2': int(will_build('python2')(self)),
'JAVA_NAMESPACE': java_ns,
'JNI_NAMESPACE': jni_ns,
}
# create config files for Cython, C and Python
with (
current_directory(self.get_build_dir(arch.arch))), (
open(join('android', 'config.pxi'), 'w')) as fpxi, (
open(join('android', 'config.h'), 'w')) as fh, (
open(join('android', 'config.py'), 'w')) as fpy:
for key, value in config.items():
fpxi.write(tpxi.format(key, repr(value)))
fpy.write(tpy.format(key, repr(value)))
fh.write(th.format(
key,
value if isinstance(value, int) else '"{}"'.format(value)
))
self.config_env[key] = str(value)
if is_sdl2:
fh.write('JNIEnv *SDL_AndroidGetJNIEnv(void);\n')
fh.write(
'#define SDL_ANDROID_GetJNIEnv SDL_AndroidGetJNIEnv\n'
)
elif is_pygame:
fh.write('JNIEnv *SDL_ANDROID_GetJNIEnv(void);\n')
recipe = AndroidRecipe()
| 32.427083
| 77
| 0.558946
|
4a06cb111ee2e3131d70354ca9e65f4eb223a7dc
| 2,159
|
py
|
Python
|
Prediction and Control with Function Approximation/Week 3/Notebook: Function Approximation and Control/random_agent.py
|
Shreyas-Donki/Reinforcement-Learning-Specialization-master
|
6568b237e29b3dc19f419d0825d09354fcc71011
|
[
"MIT"
] | 83
|
2019-10-23T04:16:43.000Z
|
2022-03-31T01:31:16.000Z
|
Prediction and Control with Function Approximation/Week 3/Notebook: Function Approximation and Control/random_agent.py
|
Shreyas-Donki/Reinforcement-Learning-Specialization-master
|
6568b237e29b3dc19f419d0825d09354fcc71011
|
[
"MIT"
] | 1
|
2020-08-31T23:52:00.000Z
|
2020-08-31T23:52:00.000Z
|
Prediction and Control with Function Approximation/Week 3/Notebook: Function Approximation and Control/random_agent.py
|
Shreyas-Donki/Reinforcement-Learning-Specialization-master
|
6568b237e29b3dc19f419d0825d09354fcc71011
|
[
"MIT"
] | 76
|
2019-12-22T19:05:12.000Z
|
2022-03-23T01:48:41.000Z
|
#!/usr/bin/env python
from agent import BaseAgent
import numpy as np
class Agent(BaseAgent):
"""agent does *no* learning, selects action 0 always"""
def __init__(self):
self.last_action = None
def agent_init(self, agent_info={}):
"""Setup for the agent called when the experiment first starts."""
if "actions" in agent_info:
self.actions = agent_info["actions"]
if "state_array" in agent_info:
self.q_values = agent_info["state_array"]
self.last_action = 0
def agent_start(self, observation):
"""The first method called when the experiment starts, called after
the environment starts.
Args:
observation (Numpy array): the state observation from the
environment's evn_start function.
Returns:
The first action the agent takes.
"""
self.last_action = np.random.choice(3) # set first action to 0
return self.last_action
def agent_step(self, reward, observation):
"""A step taken by the agent.
Args:
reward (float): the reward received for taking the last action taken
observation (Numpy array): the state observation from the
environment's step based, where the agent ended up after the
last step
Returns:
The action the agent is taking.
"""
# local_action = 0 # choose the action here
self.last_action = np.random.choice(3)
return self.last_action
def agent_end(self, reward):
"""Run when the agent terminates.
Args:
reward (float): the reward the agent received for entering the
terminal state.
"""
pass
def agent_cleanup(self):
"""Cleanup done after the agent ends."""
pass
def agent_message(self, message):
"""A function used to pass information from the agent to the experiment.
Args:
message: The message passed to the agent.
Returns:
The response (or answer) to the message.
"""
pass
| 30.842857
| 80
| 0.600278
|
4a06cbbf2edcd0735283593c7fb6684010cdda69
| 1,486
|
py
|
Python
|
reviewboard/admin/tests.py
|
Khan/reviewboard
|
51ec4261e67b8bf4e2cfa9a0894a97b16509ad33
|
[
"MIT"
] | 1
|
2015-09-11T15:50:17.000Z
|
2015-09-11T15:50:17.000Z
|
reviewboard/admin/tests.py
|
Khan/reviewboard
|
51ec4261e67b8bf4e2cfa9a0894a97b16509ad33
|
[
"MIT"
] | null | null | null |
reviewboard/admin/tests.py
|
Khan/reviewboard
|
51ec4261e67b8bf4e2cfa9a0894a97b16509ad33
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.test import TestCase
from reviewboard.admin import checks
class UpdateTests(TestCase):
"""Tests for update required pages"""
def tearDown(self):
# Make sure we don't break further tests by resetting this fully.
checks.reset_check_cache()
def testManualUpdatesRequired(self):
"""Testing check_updates_required with valid configuration"""
# NOTE: This is assuming the install is fine. It should be given
# that we set things like the uploaded path correctly to
# a known good directory before starting unit tests.
updates_required = checks.check_updates_required()
self.assertEqual(len(updates_required), 0)
def testManualUpdatesRequiredBadMediaDirs(self):
"""Testing check_updates_required with bad media directories"""
old_media_root = settings.MEDIA_ROOT
settings.MEDIA_ROOT = "/"
checks.reset_check_cache()
updates_required = checks.check_updates_required()
settings.MEDIA_ROOT = old_media_root
# Should complain about the upload directory.
self.assertEqual(len(updates_required), 1)
url, data = updates_required[0]
self.assertEqual(url, "admin/manual-updates/media-upload-dir.html")
response = self.client.get("/")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "admin/manual_updates_required.html")
| 36.243902
| 79
| 0.700538
|
4a06cbddc86b56e32735fbb7b3ce81c102a06aac
| 2,086
|
py
|
Python
|
applications/robin/test.py
|
beckerrh/simfemsrc
|
d857eb6f6f8627412d4f9d89a871834c756537db
|
[
"MIT"
] | null | null | null |
applications/robin/test.py
|
beckerrh/simfemsrc
|
d857eb6f6f8627412d4f9d89a871834c756537db
|
[
"MIT"
] | 1
|
2019-01-31T10:59:11.000Z
|
2019-01-31T10:59:11.000Z
|
applications/robin/test.py
|
beckerrh/simfemsrc
|
d857eb6f6f8627412d4f9d89a871834c756537db
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import sys, os, shutil
from argparse import ArgumentParser
sys.path.append("@simfempythonpath@")
sys.path.append("@libpythonpath@")
import tools.plot
import tools.testerrors
import simfempy
import simfemrobin
#---------------------------------------------------------#
def main():
testtype = 'hmean'
# testtype = 'robin'
if len(sys.argv)>1:
if sys.argv[1] == "1D":
meshtypes = ["LineMesh"]
elif sys.argv[1] == "2D":
meshtypes = ["TriangleMesh"]
elif sys.argv[1] == "3D":
meshtypes = ["TetrahedralMesh"]
else:
raise KeyError("unknwon argument", sys.argv[1])
meshtypes = ["LineMesh", "TriangleMesh", "TetrahedralMesh"]
meshtypes = ["TriangleMesh"]
paramdict = {}
paramdict["fem"] = "P1"
paramdict["application"] = "quadratic"
paramdict["application"] = "cosinus"
paramdict["diff"] = 1.0
paramdict["alpha"] = 1.0
paramdict["gamma"] = 0.1
paramdict["robin"] = 100.
paramdict["beta"] = "zero"
methods=["traditional", "nitsche"]
methods=["nitsche", "traditional"]
# methods=["traditional"]
# methods=["nitsche"]
if testtype == 'hmean':
hmean = None
params=[0.5*0.5**i for i in range(4)]
elif testtype == 'robin':
hmean=0.02
params=[0.001* 5.0**i for i in range(15)]
errs = ["L2", "Linf", "H1"]
simfemplot = tools.plot.SimFemPlot(methods, params=params, param=testtype)
simfemplot.methodsnames["traditional"] = "trad"
simfemplot.methodsnames["nitsche"] = "nit"
solver = simfemrobin.Solver()
if testtype == 'robin':
simfemplot.paramname = r"1/$\varepsilon$"
for meshtype in meshtypes:
simfemtesterrors = tools.testerrors.SimFemTestErrors(solver=solver, errs=errs, plotter=simfemplot, meshtype=meshtype, paramdict=paramdict, methods=methods, param=testtype, params=params, hmean=hmean)
simfemtesterrors.run()
#---------------------------------------------------------#
if __name__ == '__main__':
main()
| 33.645161
| 207
| 0.59396
|
4a06cc790959accdabadf078313607a5149863ba
| 1,350
|
py
|
Python
|
pygears/cookbook/trr_dist.py
|
Risto97/pygears
|
19393e85101a16762cb3bbbf3010946ef69217f2
|
[
"MIT"
] | null | null | null |
pygears/cookbook/trr_dist.py
|
Risto97/pygears
|
19393e85101a16762cb3bbbf3010946ef69217f2
|
[
"MIT"
] | null | null | null |
pygears/cookbook/trr_dist.py
|
Risto97/pygears
|
19393e85101a16762cb3bbbf3010946ef69217f2
|
[
"MIT"
] | null | null | null |
from pygears import gear
from pygears.conf import gear_log
from pygears.svgen.inst import SVGenInstPlugin
from pygears.svgen.svmod import SVModuleGen
from pygears.typing import Queue
def trr_dist_type(dtype):
return Queue[dtype[0]]
@gear
async def trr_dist(din: Queue, *,
dout_num) -> b'(trr_dist_type(din), ) * dout_num':
t_din = din.dtype
for i in range(dout_num):
out_res = [None] * dout_num
val = t_din((0, 0, 0))
while (val.eot[0] == 0):
async with din as val:
out_res[i] = val[:-1]
gear_log().debug(
f'Trr_dist yielding on output {i} value {out_res[i]}')
yield tuple(out_res)
if val.eot == int('1' * t_din.lvl, 2):
gear_log().debug(f'Trr_dist reset to first output')
break
class SVGenTrrDist(SVModuleGen):
@property
def is_generated(self):
return True
def get_module(self, template_env):
context = {
'module_name': self.sv_module_name,
'intfs': list(self.sv_port_configs())
}
return template_env.render_local(__file__, "trr_dist.j2", context)
class SVGenTrrDistPlugin(SVGenInstPlugin):
@classmethod
def bind(cls):
cls.registry['svgen']['module_namespace'][trr_dist] = SVGenTrrDist
| 27
| 74
| 0.606667
|
4a06cc82521bed749b2fa0ce8fa02cd8644265bd
| 3,064
|
py
|
Python
|
pyscf/scf/test/test_diis.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 501
|
2018-12-06T23:48:17.000Z
|
2022-03-31T11:53:18.000Z
|
pyscf/scf/test/test_diis.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 710
|
2018-11-26T22:04:52.000Z
|
2022-03-30T03:53:12.000Z
|
pyscf/scf/test/test_diis.py
|
QuESt-Calculator/pyscf
|
0ed03633b699505c7278f1eb501342667d0aa910
|
[
"Apache-2.0"
] | 273
|
2018-11-26T10:10:24.000Z
|
2022-03-30T12:25:28.000Z
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import tempfile
import numpy
from pyscf import gto
from pyscf import scf
from pyscf.scf import diis
class KnownValues(unittest.TestCase):
def test_addis_minimize(self):
numpy.random.seed(1)
ds = numpy.random.random((4,2,2))
fs = numpy.random.random((4,2,2))
es = numpy.random.random(4)
v, x = diis.adiis_minimize(ds, fs, -1)
self.assertAlmostEqual(v, -0.44797757916272785, 9)
def test_eddis_minimize(self):
numpy.random.seed(1)
ds = numpy.random.random((4,2,2))
fs = numpy.random.random((4,2,2))
es = numpy.random.random(4)
v, x = diis.ediis_minimize(es, ds, fs)
self.assertAlmostEqual(v, 0.31551563100606295, 9)
def test_input_diis(self):
mol = gto.M(
verbose = 7,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -0.757 0.587
H 0 0.757 0.587''',
basis = '631g',
)
mf1 = scf.RHF(mol)
mf1.DIIS = diis.EDIIS
mf1.max_cycle = 4
e = mf1.kernel()
self.assertAlmostEqual(e, -75.983875341696987, 9)
mol.stdout.close()
def test_roll_back(self):
mol = gto.M(
verbose = 7,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -1.757 1.587
H 0 1.757 1.587''',
basis = '631g',
)
mf1 = scf.RHF(mol)
mf1.diis_space = 4
mf1.diis_space_rollback = True
mf1.max_cycle = 10
e = mf1.kernel()
self.assertAlmostEqual(e, -75.446749864901321, 9)
mol.stdout.close()
def test_diis_restart(self):
mol = gto.M(
verbose = 7,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -1.757 1.587
H 0 1.757 1.587''',
basis = '631g',
)
tmpf = tempfile.NamedTemporaryFile()
mf = scf.RHF(mol)
mf.diis_file = tmpf.name
eref = mf.kernel()
self.assertAlmostEqual(eref, -75.44606939063496, 9)
mf = scf.RHF(mol)
mf.diis = scf.diis.DIIS().restore(tmpf.name)
mf.max_cycle = 3
e = mf.kernel()
self.assertAlmostEqual(e, eref, 9)
if __name__ == "__main__":
print("Full Tests for DIIS")
unittest.main()
| 30.039216
| 74
| 0.558094
|
4a06ccb621b6d05c246cb10d482762e47f67009b
| 670
|
py
|
Python
|
data/quantum/species/bR/species.py
|
goldmanm/butanol_paper_data
|
20d25a8287de63a762ac18d66d660c153325a2c7
|
[
"MIT"
] | null | null | null |
data/quantum/species/bR/species.py
|
goldmanm/butanol_paper_data
|
20d25a8287de63a762ac18d66d660c153325a2c7
|
[
"MIT"
] | null | null | null |
data/quantum/species/bR/species.py
|
goldmanm/butanol_paper_data
|
20d25a8287de63a762ac18d66d660c153325a2c7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
spinMultiplicity = 2
opticalIsomers = 1
externalSymmetry = 2
energy = {
'CBS-QB3': GaussianLog('p5.log'),
'CCSD(T)-F12/cc-pVTZ-F12': MolproLog('p5_f12.out'),
}
frequencies = GaussianLog('p5freq.log')
rotors = [HinderedRotor(scanLog=ScanLog('scan_0.log'), pivots=[5,1], top=[1,2,3,4], symmetry=3),
HinderedRotor(scanLog=ScanLog('scan_1.log'), pivots=[5,6], top=[6,7,8,9], symmetry=3),
HinderedRotor(scanLog=ScanLog('scan_2.log'), pivots=[13,10], top=[13,14], symmetry=1),
HinderedRotor(scanLog=ScanLog('scan_3.log'), pivots=[5,10], top=[10,11,12,13,14], symmetry=1)]
| 37.222222
| 105
| 0.631343
|
4a06cdbe13ac566c4f884acb7c05fef7e9188e1d
| 5,276
|
py
|
Python
|
src/firebot/settings/base.py
|
zipmex/fire
|
a41bbdbc86085c055ae4706fadea4f142e881a85
|
[
"Apache-2.0"
] | 52
|
2017-03-15T16:25:14.000Z
|
2022-03-01T16:50:14.000Z
|
src/firebot/settings/base.py
|
zipmex/fire
|
a41bbdbc86085c055ae4706fadea4f142e881a85
|
[
"Apache-2.0"
] | 239
|
2017-03-16T17:10:22.000Z
|
2022-03-06T07:24:24.000Z
|
src/firebot/settings/base.py
|
zipmex/fire
|
a41bbdbc86085c055ae4706fadea4f142e881a85
|
[
"Apache-2.0"
] | 8
|
2017-03-15T17:45:18.000Z
|
2022-01-26T14:51:03.000Z
|
import json
import os
###############################################################################
# Django
###############################################################################
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
SITE_ID = 1
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.github',
'django_extensions',
'rest_framework',
'firebot.apps.FireBotConfig',
'fb_emails.apps.FbEmailsConfig',
'fb_github.apps.FbGithubConfig',
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'firebot.middleware.XForwardedForMiddleware',
]
ROOT_URLCONF = 'firebot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'firebot.context_processors.firebot_context_processor',
],
},
},
]
WSGI_APPLICATION = 'firebot.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..', 'staticfiles'))
STATIC_FRONTEND_ROOT = os.path.join(STATIC_ROOT, 'frontend')
STATICFILES_DIRS = [
os.path.abspath(os.path.join(BASE_DIR, '..', 'dist-frontend'))
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
###############################################################################
# Allauth
###############################################################################
LOGIN_REDIRECT_URL = '/'
ACCOUNT_LOGOUT_ON_GET = True
SOCIALACCOUNT_PROVIDERS = {
'github': {
'SCOPE': [
'user:email',
],
'VERIFIED_EMAIL': True,
}
}
###############################################################################
# Django Rest Framework
###############################################################################
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': [
'rest_framework.renderers.JSONRenderer',
],
}
###############################################################################
# Celery
###############################################################################
CELERY_IGNORE_RESULT = True
from firebot.settings.tasks import * # noqa
###############################################################################
# Firebot
###############################################################################
FIREBOT_BANNED_EMAIL_DOMAINS = []
# Support banning disposable email domains based on a list grabbed from
# https://github.com/groundcat/disposable-email-domain-list
if os.environ.get('FIREBOT_BAN_DISPOSABLE_DOMAINS') == '1':
FIREBOT_BANNED_EMAIL_DOMAINS += json.load(open(os.path.join(
BASE_DIR,
'disposable-domains-list.json',
)))
print('Ignoring {} email domains in base.py'.format(len(FIREBOT_BANNED_EMAIL_DOMAINS)))
FIREBOT_BANNED_EMAIL_SLUGS = (
'abuse',
'admin',
'administrator',
'billing',
'bot',
'catalyst',
'compliance',
'dev',
'devnull',
'dns',
'fc',
'fire',
'firebot',
'fire-bot',
'ftp',
'fundersclub',
'help',
'hostmaster',
'info',
'inoc',
'ispfeedback',
'ispsupport',
'issues',
'list',
'list-request',
'maildaemon',
'no-reply',
'noc',
'noreply',
'null',
'phish',
'phishing',
'postmaster',
'privacy',
'registrar',
'root',
'security',
'spam',
'support',
'sysadmin',
'tech',
'undisclosed-recipients',
'unsubscribe',
'usenet',
'uucp',
'webmaster',
'www',
)
| 25.990148
| 91
| 0.554966
|
4a06cdf788cb0e18ef66e9a1f3f50e0a25414d72
| 818
|
py
|
Python
|
django_alive/tests/test_middleware.py
|
lincolnloop/django-alive
|
608fd0a1d94f90f51a48a943348e9038f402a504
|
[
"MIT"
] | 25
|
2018-08-22T12:42:14.000Z
|
2022-01-18T19:03:05.000Z
|
django_alive/tests/test_middleware.py
|
lincolnloop/django-alive
|
608fd0a1d94f90f51a48a943348e9038f402a504
|
[
"MIT"
] | 7
|
2019-11-05T23:52:49.000Z
|
2020-07-01T18:03:32.000Z
|
django_alive/tests/test_middleware.py
|
lincolnloop/django-alive
|
608fd0a1d94f90f51a48a943348e9038f402a504
|
[
"MIT"
] | null | null | null |
from django.core.exceptions import DisallowedHost
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from ..middleware import healthcheck_bypass_host_check
class MiddlewareTestCase(TestCase):
def setUp(self):
self.middleware = healthcheck_bypass_host_check(lambda r: r)
@override_settings(ALLOWED_HOSTS=["not-testserver"])
def test_bypass(self):
request = RequestFactory().get(reverse("alive_alive"))
passed_request = self.middleware(request)
self.assertEqual(request, passed_request)
@override_settings(ALLOWED_HOSTS=["not-testserver"])
def test_disallowed(self):
request = RequestFactory().get("/")
request = self.middleware(request)
self.assertRaises(DisallowedHost, request.get_host)
| 35.565217
| 68
| 0.744499
|
4a06ce3e37af80377adea147a10b95bc7483c91a
| 680
|
py
|
Python
|
python_auto/ex02.py
|
eatmore/python_practice
|
c6a773c8d24182b23a86fd9b66b27b5ff948b258
|
[
"MIT"
] | null | null | null |
python_auto/ex02.py
|
eatmore/python_practice
|
c6a773c8d24182b23a86fd9b66b27b5ff948b258
|
[
"MIT"
] | null | null | null |
python_auto/ex02.py
|
eatmore/python_practice
|
c6a773c8d24182b23a86fd9b66b27b5ff948b258
|
[
"MIT"
] | 1
|
2020-03-12T06:05:38.000Z
|
2020-03-12T06:05:38.000Z
|
import re
beginsWithHello = re.compile(r'Hello')
mo1 = beginsWithHello.search('Hello world')
mo2 = beginsWithHello.search('He said Hello')
print(mo1.group(),mo2.group())
endsWithNumber = re.compile(r'\d$')
mo3 = endsWithNumber.search('Your number is 42')
print(mo3.group())
atRegex = re.compile(r'.at')
mo4 = atRegex.findall('The cat in the hat sat on the flat mat')
print(mo4)
nameRegex = re.compile(r'First Name: (.*) Last Name: (.*)')
mo5 = nameRegex.search('First Name: Al Last Name: Swei')
print(mo5.group(1))
noNewlineRegex = re.compile('.*', re.DOTALL)
mo6 = noNewlineRegex.search('Serve the public trust. \nProduct the innocent. \nUpload the law.')
print(mo6.group())
| 30.909091
| 96
| 0.713235
|
4a06ce733a9b6ee4257a3099591c4861e7636260
| 125
|
py
|
Python
|
ncssl_api_client/api/commands/retry_dcv_command.py
|
antonku/ncssl_api_client
|
c463b000960d50368d39bde2a180499f1da3a29a
|
[
"MIT"
] | 8
|
2017-11-28T11:05:52.000Z
|
2021-11-16T13:52:45.000Z
|
ncssl_api_client/api/commands/retry_dcv_command.py
|
antonku/ncssl_api_client
|
c463b000960d50368d39bde2a180499f1da3a29a
|
[
"MIT"
] | 4
|
2018-12-23T14:52:11.000Z
|
2019-08-09T21:01:44.000Z
|
ncssl_api_client/api/commands/retry_dcv_command.py
|
antonku/ncssl_api_client
|
c463b000960d50368d39bde2a180499f1da3a29a
|
[
"MIT"
] | 2
|
2017-11-28T14:38:24.000Z
|
2017-11-29T09:03:20.000Z
|
from ncssl_api_client.api.commands.abstract_command import AbstractCommand
class RetryDcvCommand(AbstractCommand):
pass
| 25
| 74
| 0.856
|
4a06ceb204228cfe2b9058e43ee2d638c24a11f1
| 2,871
|
py
|
Python
|
Day 08/problem 02.py
|
RealTigerCZ/AoC2021
|
22d8a05a15fec4abab6713e678f8c29400ee56a3
|
[
"MIT"
] | null | null | null |
Day 08/problem 02.py
|
RealTigerCZ/AoC2021
|
22d8a05a15fec4abab6713e678f8c29400ee56a3
|
[
"MIT"
] | null | null | null |
Day 08/problem 02.py
|
RealTigerCZ/AoC2021
|
22d8a05a15fec4abab6713e678f8c29400ee56a3
|
[
"MIT"
] | null | null | null |
path = "input.txt"
file = open(path)
input = file.readlines()
file.close()
#Logic solution
"""
∩ = intersection of sets
/ = without
A = !4 ∩ 7 = 7 / 1
B = !3 ∩ 4
---------
C = 1 ∩ 2 = !6
D = !0
E = !9
F = !2 ∩ 1
G =
--------- not needed
C, F = 7 / A
B, D = 4 / 1
problematic:
0, 6, 9
2, 3, 5
2 ∩ 1 != 1
3 ∩ 1 = 1
6 ∩ 1 != 1
9 ∩ 1 = 1
0 ∩ 1 = 1
6 ∩ 1 != 1
-------------------
Actual solving method / aglorithm
A -> 7 / 1
3 -> len = 5 and 3 ∩ 1 = 1
B -> !3 ∩ 4
5 -> contain b and len = 5
2 -> len = 5 and is ont 5 or 3
D -> B, D = 4 / 1 (we know B)
0 -> !D
6 -> 6 ∩ 1 != 1 and len = 6
9 -> len = 6 and is not 6 or 0
MAPPING IS DONE MANUALLY TO NUMBERS WITH THAT ALGORITHM
ITS NOT NEEDED TO MAP ALL SEGMENTS (for this problem)
"""
def sort(a): #sorts string
return "".join(sorted(a))
def without(a, b):
for item in b:
if item in a:
a = a.replace(item, "")
return a
def intersect(a, b):
toReturn = ""
for item in a:
if item in b:
toReturn += item
return toReturn
def not_code(a):
return(without("abcdefg", a))
nums = []
for line in input:
line = line.split("|")
zero_six_nine = []
two_three_five = []
numbers = [0 for _ in range(10)]
for element in line[0].strip().split(" "):
element = sort(element)
if len(element) == 2:
numbers[1] = element
elif len(element) == 3:
numbers[7] = element
elif len(element) == 4:
numbers[4] = element
elif len(element) == 5:
two_three_five.append(element)
elif len(element) == 6:
zero_six_nine.append(element)
elif len(element) == 7:
numbers[8] = element
else:
assert False, "Unreachable!"
A = without(numbers[7], numbers[1])
for n in two_three_five: #finding 3
if intersect(n, numbers[1]) == numbers[1]:
two_three_five.remove(n)
numbers[3] = n
break
B = intersect(not_code(numbers[3]), numbers[4])
if B in two_three_five[0]: #finding 5 and 2
numbers[5] = two_three_five[0]
numbers[2] = two_three_five[1]
else:
numbers[5] = two_three_five[1]
numbers[2] = two_three_five[0]
D = without(without(numbers[4], numbers[1]), B)
numbers[0] = not_code(D)
zero_six_nine.remove(numbers[0])
if intersect(zero_six_nine[0], numbers[1]) == numbers[1]:
numbers[9] = zero_six_nine[0]
numbers[6] = zero_six_nine[1]
else:
numbers[9] = zero_six_nine[1]
numbers[6] = zero_six_nine[0]
num_dict = {}
for idx, num in enumerate(numbers):
num_dict[num] = str(idx)
ret_num = ""
for element in line[1].strip().split(" "):
element = sort(element)
ret_num += num_dict[element]
nums.append(int(ret_num))
print(sum(nums))
| 21.425373
| 61
| 0.540578
|
4a06cf11a992fc08950163fd5e207b562d7ca738
| 9,783
|
py
|
Python
|
models/algorithms/daal_trees.py
|
mdolinski-equinix/driverlessai-recipes
|
51e991eacb716746da88390bcf7960dda3d7427b
|
[
"Apache-2.0"
] | null | null | null |
models/algorithms/daal_trees.py
|
mdolinski-equinix/driverlessai-recipes
|
51e991eacb716746da88390bcf7960dda3d7427b
|
[
"Apache-2.0"
] | null | null | null |
models/algorithms/daal_trees.py
|
mdolinski-equinix/driverlessai-recipes
|
51e991eacb716746da88390bcf7960dda3d7427b
|
[
"Apache-2.0"
] | null | null | null |
"""Binary Classification and Regression for Decision Forest and Gradient Boosting based on Intel DAAL"""
import datatable as dt
from h2oaicore.models import CustomModel
import numpy as np
from h2oaicore.systemutils import arch_type
from sklearn.preprocessing import LabelEncoder
if arch_type != 'ppc64le':
import daal4py as d4p
class DaalBaseModel(object):
_regression = True
_binary = True # FIXME: but returns class, not probabilities
_multiclass = False # FIXME: shape issue
_can_use_gpu = False
_is_reproducible = False
@staticmethod
def is_enabled():
from h2oaicore.systemutils import arch_type
return not (arch_type == "ppc64le")
def dt_to_numpy(self, X, y=None):
if isinstance(X, dt.Frame):
X = X.to_numpy()
dtype = np.float32 if self.params['fptype'] == 'float' else np.float64
X = np.ascontiguousarray(X, dtype=dtype)
if y is not None:
y = np.ascontiguousarray(y, dtype=dtype).reshape(X.shape[0], 1)
else:
raise
return X, y
def fit(self, X, y, sample_weight=None, eval_set=None, sample_weight_eval_set=None, **kwargs):
if self.num_classes > 1:
lb = LabelEncoder()
lb.fit(self.labels)
y = lb.transform(y)
X_features = list(X.names)
X, y = self.dt_to_numpy(X, y)
if self.num_classes == 1:
train_func = self._train_func_regress
else:
train_func = self._train_func_class
train_algo = train_func(**self.params)
train_result = train_algo.compute(X, y)
model_tuple = (train_result, self.num_classes, self.params['fptype'])
if hasattr(train_result, 'variableImportance'):
importances = train_result.variableImportance.tolist()[0]
else:
importances = np.ones(len(X_features))
self.set_model_properties(model=model_tuple,
features=X_features,
importances=importances,
iterations=self.params.get('nTrees', self.params.get('maxIterations', 100)))
def predict(self, X, **kwargs):
model_tuple, _, _, _ = self.get_model_properties()
train_result = model_tuple[0]
nClasses = model_tuple[1]
fptype = model_tuple[2]
if self.num_classes == 1:
predict_func = self._predict_func_regress
other_kwargs = {}
else:
predict_func = self._predict_func_class
other_kwargs = {'nClasses': nClasses}
predict_algo = predict_func(fptype=fptype, **other_kwargs)
X, _ = self.dt_to_numpy(X, None)
# This is not optimal at the moment because it returns the 0/1 label and not a probability.
# So the ROC curve in DAI looks very jagged. A future version of DAAL Decision Forest will
# support predicting probabilities as well as the label.
if self.num_classes <= 2:
result = predict_algo.compute(X, train_result.model).prediction.ravel()
else:
result = predict_algo.compute(X, train_result.model).prediction
return result
class DaalTreeModel(DaalBaseModel, CustomModel):
_display_name = "DaalTree"
_description = "Decision Tree Model based on Intel DAAL (https://intelpython.github.io/daal4py/algorithms.html)"
if arch_type != 'ppc64le':
_train_func_class = d4p.gbt_classification_training
_predict_func_class = d4p.gbt_classification_prediction
_train_func_regress = d4p.gbt_regression_training
_predict_func_regress = d4p.gbt_regression_prediction
else:
_train_func_class = None
_predict_func_class = None
_train_func_regress = None
_predict_func_regress = None
def set_default_params(self, accuracy=None, time_tolerance=None, interpretability=None, **kwargs):
self.params = {
'nClasses': self.num_classes,
'fptype': 'float',
'maxIterations': 200,
'maxTreeDepth': 6,
'minSplitLoss': 0.1,
'shrinkage': 0.3,
'observationsPerTreeFraction': 1,
'lambda_': 1,
'maxBins': 256,
'featuresPerNode': 0,
'minBinSize': 5,
'memorySavingMode': False,
'minObservationsInLeafNode': 1
}
if self.num_classes == 1:
self.params.pop('nClasses', None)
self.params.pop('nTrees', None)
self.params.pop('maxIterations', None)
class DaalForestModel(DaalBaseModel, CustomModel):
_display_name = "DaalForest"
_description = "Decision Forest Model based on Intel DAAL (https://intelpython.github.io/daal4py/algorithms.html)"
if arch_type != 'ppc64le':
_train_func_class = d4p.decision_forest_classification_training
_predict_func_class = d4p.decision_forest_classification_prediction
_train_func_regress = d4p.decision_forest_regression_training
_predict_func_regress = d4p.decision_forest_regression_prediction
else:
_train_func_class = None
_predict_func_class = None
_train_func_regress = None
_predict_func_regress = None
def set_default_params(self, accuracy=None, time_tolerance=None, interpretability=None, **kwargs):
self.params = dict(nClasses=self.num_classes,
fptype='float',
varImportance='MDI',
nTrees=100)
if self.num_classes == 1:
self.params.pop('nClasses', None)
self.params.pop('nTrees', None)
self.params.pop('maxIterations', None)
def _setup_recipe():
# for DAI 1.7.0 one is required to run this function manually
# in DAI >=1.7.1, this function will be run by DAI itself
import os
from h2oaicore.systemutils_more import extract, download
from h2oaicore.systemutils import config, remove
from h2oaicore.systemutils import user_dir
import shutil
from h2oaicore.systemutils import arch_type # don't remove this import, setup_recipe parsed-out separately
if arch_type == "ppc64le":
if config.hard_asserts:
# in CI testing just ignore
return True
else:
# for user use, raise
raise RuntimeError("Cannot use daal on PPC")
daal_is_installed_path = os.path.join(user_dir(), config.contrib_env_relative_directory, "daal")
daal_is_installed_file = os.path.join(daal_is_installed_path, "daal_is_installed")
if not os.path.isfile(daal_is_installed_file):
daal_temp_path = os.path.join(user_dir(), config.contrib_relative_directory, "daal")
os.makedirs(daal_temp_path, exist_ok=True)
prefix = "https://anaconda.org/intel"
try:
file1 = download("%s/daal4py/2019.4/download/linux-64/daal4py-2019.4-py36h7b7c402_6.tar.bz2" % prefix,
dest_path=daal_temp_path)
file2 = download("%s/impi_rt/2019.4/download/linux-64/impi_rt-2019.4-intel_243.tar.bz2" % prefix,
dest_path=daal_temp_path)
file3 = download("%s/daal/2019.4/download/linux-64/daal-2019.4-intel_243.tar.bz2" % prefix,
dest_path=daal_temp_path)
file4 = download("https://github.com/intel/daal/releases/download/2019_u4/l_daal_oss_p_2019.4.007.tgz",
dest_path=daal_temp_path)
except:
file1 = download("https://0xdata-public.s3.amazonaws.com/daal4py-2019.4-py36h7b7c402_6.tar.bz2",
dest_path=daal_temp_path)
file2 = download("https://0xdata-public.s3.amazonaws.com/impi_rt-2019.4-intel_243.tar.bz2",
dest_path=daal_temp_path)
file3 = download("https://0xdata-public.s3.amazonaws.com/daal-2019.4-intel_243.tar.bz2",
dest_path=daal_temp_path)
file4 = download("https://0xdata-public.s3.amazonaws.com/l_daal_oss_p_2019.4.007.tgz",
dest_path=daal_temp_path)
temp_path = os.path.join(user_dir(), config.contrib_env_relative_directory, "info")
os.makedirs(temp_path, exist_ok=True)
python_site_packages_path = os.path.join(user_dir(), config.contrib_env_relative_directory)
extract(file1, python_site_packages_path)
python_site_packages_path2 = os.path.join(user_dir(), config.contrib_env_relative_directory)
extract(file2, python_site_packages_path2)
extract(file3, python_site_packages_path2)
extract(file4, python_site_packages_path2, "gz")
other_path = os.path.join(python_site_packages_path2, "lib/libfabric/")
import glob
for file in glob.glob(os.path.join(other_path, "*.so*")):
new_file = os.path.join(python_site_packages_path2, "lib", os.path.basename(file))
if not os.path.isfile(new_file):
shutil.copy(file, new_file)
other_path = os.path.join(python_site_packages_path2,
"l_daal_oss_p_2019.4.007/daal_prebuild/linux/tbb/lib/intel64_lin/gcc4.4/")
import glob
for file in glob.glob(os.path.join(other_path, "*.so*")):
new_file = os.path.join(python_site_packages_path2, "lib", os.path.basename(file))
if not os.path.isfile(new_file):
shutil.copy(file, new_file)
os.makedirs(daal_is_installed_path, exist_ok=True)
with open(daal_is_installed_file, "wt") as f:
f.write("DONE")
remove(file1)
remove(file2)
remove(file3)
remove(file4)
return True
| 44.671233
| 118
| 0.637841
|
4a06cf761af3b973b83ed6b56c53d4ad1ab83c79
| 451
|
py
|
Python
|
freezing/model/migrations/versions/41c3e58a61aa_bookkeeping.py
|
freezingsaddles/freezing-model
|
3bb03739d5bdff418bcf17707a52c9994c45e52f
|
[
"Apache-2.0"
] | 2
|
2020-01-02T01:23:00.000Z
|
2022-01-03T20:57:39.000Z
|
freezing/model/migrations/versions/41c3e58a61aa_bookkeeping.py
|
freezingsaddles/freezing-model
|
3bb03739d5bdff418bcf17707a52c9994c45e52f
|
[
"Apache-2.0"
] | 8
|
2018-01-19T14:36:05.000Z
|
2021-11-24T19:22:19.000Z
|
freezing/model/migrations/versions/41c3e58a61aa_bookkeeping.py
|
freezingsaddles/freezing-model
|
3bb03739d5bdff418bcf17707a52c9994c45e52f
|
[
"Apache-2.0"
] | 1
|
2018-10-28T16:09:51.000Z
|
2018-10-28T16:09:51.000Z
|
"""bookkeeping
Revision ID: 41c3e58a61aa
Revises: 17b73a90925d
Create Date: 2016-01-29 21:58:13.978603
"""
# revision identifiers, used by Alembic.
revision = "41c3e58a61aa"
down_revision = "17b73a90925d"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"rides", sa.Column("detail_fetched", sa.Boolean, nullable=False, default=False)
)
def downgrade():
op.drop_column("rides", "detail_fetched")
| 18.04
| 87
| 0.720621
|
4a06cfac135b2b72f0f46068047a38a6889f2a25
| 33
|
py
|
Python
|
python/print/print/src/print/print.py
|
bg1bgst333/Sample
|
68e3a5c26c6d9bc1906ce0cca2fd586f0790fa52
|
[
"MIT"
] | 9
|
2016-12-22T20:24:09.000Z
|
2021-05-08T08:48:24.000Z
|
python/print/print/src/print/print.py
|
bg1bgst333/Sample
|
68e3a5c26c6d9bc1906ce0cca2fd586f0790fa52
|
[
"MIT"
] | 36
|
2018-08-16T06:43:36.000Z
|
2022-03-25T19:01:34.000Z
|
python/print/print/src/print/print.py
|
bg1bgst333/Sample
|
68e3a5c26c6d9bc1906ce0cca2fd586f0790fa52
|
[
"MIT"
] | 9
|
2016-09-03T02:57:31.000Z
|
2021-09-09T02:42:26.000Z
|
#!/usr/bin/python
print "ABCDE"
| 8.25
| 17
| 0.666667
|
4a06cfcff23b4c046c45b3a7a948fb7198fa8158
| 3,056
|
py
|
Python
|
simple LSTM.py
|
55708r/ANCM-Mini-Project
|
4c66ef03e88c9cbfa5468192221f8c00c286d591
|
[
"MIT"
] | 2
|
2020-12-01T13:25:04.000Z
|
2022-01-21T02:20:30.000Z
|
simple LSTM.py
|
55708r/ANCM-Mini-Project
|
4c66ef03e88c9cbfa5468192221f8c00c286d591
|
[
"MIT"
] | null | null | null |
simple LSTM.py
|
55708r/ANCM-Mini-Project
|
4c66ef03e88c9cbfa5468192221f8c00c286d591
|
[
"MIT"
] | 1
|
2020-12-10T13:01:36.000Z
|
2020-12-10T13:01:36.000Z
|
#Import the libraries
import math
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, LSTM
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
features = (np.random.randint(10, size=(100, 1)))
print(features.shape)
training_dataset_length = math.ceil(len(features) * .75)
print(training_dataset_length)
#Scale the all of the data to be values between 0 and 1
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(features)
train_data = scaled_data[0:training_dataset_length , : ]
#Splitting the data
x_train=[]
y_train = []
for i in range(10, len(train_data)):
x_train.append(train_data[i-10:i,0])
y_train.append(train_data[i,0])
#Convert to numpy arrays
x_train, y_train = np.array(x_train), np.array(y_train)
#Reshape the data into 3-D array
x_train = np.reshape(x_train, (x_train.shape[0],x_train.shape[1],1))
from keras.layers import Dropout
# Initialising the RNN
model = Sequential()
drop = 0
drops = []
for l in range (0,9):
np.insert(drops,l,drop)
drop = drop +0.1
model.add(LSTM(units = 50, return_sequences = True, input_shape = (x_train.shape[1], 1)))
model.add(Dropout(drop))
# Adding a second LSTM layer and Dropout layer
model.add(LSTM(units = 50, return_sequences = True))
model.add(Dropout(drop))
# Adding a third LSTM layer and Dropout layer
model.add(LSTM(units = 50, return_sequences = True))
model.add(Dropout(drop))
# Adding a fourth LSTM layer and and Dropout layer
model.add(LSTM(units = 50, return_sequences= True))
model.add(Dropout(drop))
## Adding the output layer
# For Full connection layer we use dense
# As the output is 1D so we use unit=1
model.add(Dense(units = 1))
##Adding linear layer with dropout
#model.add(units = )
#check model properties
model.summary()
#compile and fit the model on 30 epochs
model.compile(optimizer = 'adam', loss = 'mean_squared_error')
model.fit(x_train, y_train, epochs = 30, batch_size = 50)
#Test data set
test_data = scaled_data[training_dataset_length - 10: , : ]
#splitting the x_test and y_test data sets
x_test = []
y_test = features[training_dataset_length : , : ]
for i in range(10,len(test_data)):
x_test.append(test_data[i-10:i,0])
#Convert x_test to a numpy array
x_test = np.array(x_test)
#Reshape the data into 3-D array
x_test = np.reshape(x_test, (x_test.shape[0],x_test.shape[1],1))
#check predicted values
predictions = model.predict(x_test)
#Undo scaling
predictions = scaler.inverse_transform(predictions)
#Calculate RMSE score
rmse = np.sqrt(np.mean(((predictions- y_test)**2)))
rmses = []
rmses = np.insert(rmses,l,rmse)
## PLOTTING
#Import plotting package
import matplotlib.pyplot as plt
#plot rmse to dropout value
plt.plot(drops,rmses)
plt.xlabel('dropout values')
plt.ylabel('Root mean square error (rsme)')
| 26.807018
| 91
| 0.703534
|
4a06d07806e754ba517f028ac5aa705c8eab3efe
| 3,871
|
py
|
Python
|
libs/rtm_dispatcher.py
|
Arthuchaut/clue
|
95f215f28adc5550e0c5a5f1470f36f07214bffe
|
[
"MIT"
] | null | null | null |
libs/rtm_dispatcher.py
|
Arthuchaut/clue
|
95f215f28adc5550e0c5a5f1470f36f07214bffe
|
[
"MIT"
] | null | null | null |
libs/rtm_dispatcher.py
|
Arthuchaut/clue
|
95f215f28adc5550e0c5a5f1470f36f07214bffe
|
[
"MIT"
] | null | null | null |
"""The Slack bot commands dispatcher class.
"""
from typing import Dict, List, Awaitable, Any
import asyncio
import logging
import slack
from config.custom_type import (
EventLoop,
RTMClient,
WebClient
)
class RTMDispatcher:
"""The SlackbotRTMDispatcher class declaration.
"""
def __init__(self, token: str) -> None:
"""The RTMDispatcher constructor.
"""
self.__stack: Dict[str, Awaitable[Dict[str, Any]]] = {}
self.__loop: EventLoop = asyncio.get_event_loop()
self.__rtm_client: RTMClient = slack.RTMClient(
token=token,
run_async=True
)
def impl(
self,
*,
command: str,
task: Awaitable[Dict[str, Any]]
) -> None:
"""The task staking method.
Create a new task associated with a specific command.
When the command is sent from the slack emitter,
the task will be called.
Args:
command (str): The command to listen.
task (Awaitable[Dict[str, Any]]): The task to call.
Raises:
RTMDispatcherStackKeyError:
If the command is already in the stack.
"""
if command in self.__stack.keys():
raise RTMDispatcherStackKeyError(
f'Command `{command}` already in the stack.'
)
self.__stack[command] = task
def run(self) -> None:
"""Run the RTM Slack API asynchronously and listen for messages.
When a message event is catched, this method dispatch it.
"""
slack.RTMClient.on(event='message', callback=self.__dispatch)
self.__loop.run_until_complete(self.__rtm_client.start())
async def __dispatch(self, **kwargs: Dict[str, Any]) -> None:
"""The handler of the RTM message event.
Test if the first word is the bot mention.
If the test pass, the dispatcher call the Awaitable
associated with it.
Args:
**kwargs (Dict[str, Any]): The data sent
from the RTM event handler.
It contains these following parameters:
data (Dict[str, Any]): The data mapping from the RTM.
channel (str): The emitter channel id.
client_msg_id (str): The emitter message id.
event_ts (str): The emitter thread ts id.
source_team (str): The emitter source team id.
suppress_notification (bool): A boolean which specify
if the emitter notification are enabled or not.
team (str): The emitter team id.
text (str): The emitter message.
ts (str): The emitter thread ts id.
user (str): The emitter id.
user_team (str): the emitter team id.
rtm_client (RTMClient): The RTMClient websocket pipe.
web_client (WebClient): The WebClient Slack API.
"""
web_client: WebClient = kwargs['web_client']
argv: List[str] = kwargs['data']['text'].split()
self_id: str = (await web_client.api_call('auth.test')).data['user_id']
if argv[0] == f'<@{self_id}>':
kwargs['data']['argv'] = argv
kwargs['data']['self_id'] = self_id
command: str = argv[1] if len(argv) > 1 else None
try:
await self.__stack[command](**kwargs)
except KeyError:
logging.debug(f'Command `{command}` doesn\'t exists in stack.')
class RTMDispatcherStackKeyError(Exception):
"""The SlackbotDispatcher stack key error exception declaration.
"""
def __init__(self, message: str) -> None:
"""The RTMDispatcherStackKeyError constructor.
"""
super().__init__(message)
| 31.217742
| 79
| 0.573754
|
4a06d0821d5a4970caddf87d61fc7ebe786a6ee1
| 3,229
|
py
|
Python
|
models/addatt_RNN.py
|
caokyhieu/DELAFO
|
1c84e5f4ef9690e453973b57d1f0feb194a55a8c
|
[
"MIT"
] | 17
|
2020-05-04T08:54:14.000Z
|
2022-03-28T08:43:09.000Z
|
models/addatt_RNN.py
|
caokyhieu/DELAFO
|
1c84e5f4ef9690e453973b57d1f0feb194a55a8c
|
[
"MIT"
] | 2
|
2020-11-03T01:07:24.000Z
|
2021-08-06T16:56:18.000Z
|
models/addatt_RNN.py
|
caokyhieu/DELAFO
|
1c84e5f4ef9690e453973b57d1f0feb194a55a8c
|
[
"MIT"
] | 10
|
2020-06-11T15:59:32.000Z
|
2022-03-22T14:00:04.000Z
|
from keras.layers import Input, Activation, Dense,Flatten, BatchNormalization, Add, Conv2D
from keras.layers import MaxPooling2D,AveragePooling2D,Permute,Reshape,LSTM,Lambda,GRU,Bidirectional,BatchNormalization,Concatenate
from keras import regularizers
from keras.optimizers import Adam
from models.attention_layer import *
from keras.models import Model
from utils import sharpe_ratio_loss,sharpe_ratio
###############################
# additive attention RNN models
###############################
def build_add_att_lstm_model(params):
units = params['units']
activation = params['activation']
reg1 = params['l2']
reg2 = params['l2_1']
lr = params['l2_2']
input_shape = params['input_shape']
ts = input_shape[1]
tickers = input_shape[0]
input = Input(shape=input_shape)
reshape_inp = Lambda(lambda x: K.permute_dimensions(x,pattern=(0,2,1,3))) (input)
reshape_inp = Reshape((ts,-1)) (reshape_inp)
batch_norm = BatchNormalization()(reshape_inp)
recurrent_layer = LSTM(units = units,
activation = activation,
kernel_regularizer=regularizers.l2(reg1)) (batch_norm)
batch_norm_2 = BatchNormalization()(recurrent_layer)
##ATTENTION LAYER
contxt_layer = AdditiveAttentionLayer(name='Att',latent_dim=32,kernel_regularizer=regularizers.l2(0.01))([batch_norm,batch_norm_2])
merge = Concatenate()([batch_norm_2,contxt_layer])
out = Dense(units, kernel_regularizer =regularizers.l2(reg2),activation='tanh') (merge)
batch_norm_3 = BatchNormalization()(out)
out = Dense(tickers, kernel_regularizer =regularizers.l2(reg2)) (batch_norm_3)
out = Activation('sigmoid')(out)
model = Model([input], [out])
optimizer = Adam(lr = lr)
model.compile(loss=sharpe_ratio_loss, optimizer=optimizer, metrics = [sharpe_ratio])
return model
def build_add_att_gru_model(params):
units = params['units']
activation = params['activation']
reg1 = params['l2']
reg2 = params['l2_1']
lr = params['l2_2']
input_shape = params['input_shape']
ts = input_shape[1]
tickers = input_shape[0]
input = Input(shape=input_shape)
reshape_inp = Lambda(lambda x: K.permute_dimensions(x,pattern=(0,2,1,3))) (input)
reshape_inp = Reshape((ts,-1)) (reshape_inp)
batch_norm = BatchNormalization()(reshape_inp)
recurrent_layer = GRU(units = units,
activation = activation,
kernel_regularizer=regularizers.l2(reg1)) (batch_norm)
batch_norm_2 = BatchNormalization()(recurrent_layer)
##ATTENTION LAYER
contxt_layer = AdditiveAttentionLayer(name='Att',latent_dim=32,kernel_regularizer=regularizers.l2(0.01))([batch_norm,batch_norm_2])
merge = Concatenate()([batch_norm_2,contxt_layer])
out = Dense(units, kernel_regularizer =regularizers.l2(reg2),activation='tanh') (merge)
batch_norm_3 = BatchNormalization()(out)
out = Dense(tickers, kernel_regularizer =regularizers.l2(reg2)) (batch_norm_3)
out = Activation('sigmoid')(out)
model = Model([input], [out])
optimizer = Adam(lr = lr)
model.compile(loss=sharpe_ratio_loss, optimizer=optimizer, metrics = [sharpe_ratio])
return model
| 33.989474
| 135
| 0.69681
|
4a06d0862fd3797185603a3f77a10e4abd1f28f9
| 6,319
|
py
|
Python
|
phase_diagrammer/lennard-jones_diameter_overlay_mono.py
|
kolbt/whingdingdilly
|
4c17b594ebc583750fe7565d6414f08678ea7882
|
[
"BSD-3-Clause"
] | 4
|
2017-09-04T14:36:57.000Z
|
2022-03-28T23:24:58.000Z
|
phase_diagrammer/lennard-jones_diameter_overlay_mono.py
|
kolbt/whingdingdilly
|
4c17b594ebc583750fe7565d6414f08678ea7882
|
[
"BSD-3-Clause"
] | null | null | null |
phase_diagrammer/lennard-jones_diameter_overlay_mono.py
|
kolbt/whingdingdilly
|
4c17b594ebc583750fe7565d6414f08678ea7882
|
[
"BSD-3-Clause"
] | null | null | null |
'''
# This is an 80 character line #
This file:
1. Reads in data for diameter from textfiles
2. Computes the LJ force for given distances
3. Plots this data
'''
# Imports and loading the .gsd file
import sys
import numpy as np
import math
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
# Function that'll grab my parameters from the filenames
def getFromTxt(fname, first, last):
'''Takes a string, text before and after desired text, outs text between'''
start = fname.index( first ) + len( first )
end = fname.index( last, start )
myTxt = fname[start:end]
return float(myTxt)
def ljForce(r):
'''Takes distance gives force from Lennard-Jones potential'''
forceLJ = 4 * epsilon * ((12 * (sigma ** 12) * (r ** -13)) - (6 * (sigma ** 12) * (r ** -7)))
return forceLJ
sigma = 1.0
# The old model
epsilon = 1.0
# Grab the command line arguments
txtFiles = sys.argv[2:] # start at 2 to avoid early arguments
# Parse out the activities and fractions from the filenames
peA = np.zeros_like(txtFiles, dtype=np.int)
peB = np.zeros_like(txtFiles, dtype=np.int)
xA = np.zeros_like(txtFiles, dtype=np.float64)
ep = np.zeros_like(txtFiles, dtype=np.int)
try:
for i in range(0, len(txtFiles)):
peA[i] = getFromTxt(txtFiles[i], "pa", "_pb")
peB[i] = getFromTxt(txtFiles[i], "pb", "_xa")
xA[i] = getFromTxt(txtFiles[i], "xa", "_ep")
ep[i] = getFromTxt(txtFiles[i], "ep", ".txt")
except:
for i in range(0, len(txtFiles)):
peA[i] = getFromTxt(txtFiles[i], "pa", "_pb")
peB[i] = getFromTxt(txtFiles[i], "pb", "_xa")
xA[i] = getFromTxt(txtFiles[i], "xa", ".txt")
ep[i] = 1
try:
peR = peA.astype(float) / peB.astype(float) # Compute activity ratio
except:
peR = np.zeros(len(txtFiles))
# Instantiate arrays I'd like to plot
phaseSep = np.zeros(len(txtFiles), dtype=np.int)
ALL = np.zeros(len(txtFiles), dtype=np.float64)
AA = np.zeros(len(txtFiles), dtype=np.float64)
AB = np.zeros(len(txtFiles), dtype=np.float64)
BB = np.zeros(len(txtFiles), dtype=np.float64)
phiAvg = np.zeros(len(txtFiles), dtype=np.float64)
# Loop through each data series
for i in range(0, len(txtFiles)):
# Import data into arrays
tst, \
gasA, \
gasB, \
gasTot, \
denA, \
denB, \
denTot, \
lgClust, \
MCS, \
sigALL, \
sigAA, \
sigAB, \
sigBB, \
phiEff, \
lC_Area, \
totC_Area, \
lC_density, \
denDen, \
gasDen = np.loadtxt(txtFiles[i], skiprows=1, unpack=True)
# Requirement to be consider phase separated
partNum = gasTot[0] # everything starts in a gas
frames = len(tst)
sizeMin = partNum * 0.25 # 40% of particles in single cluster
timeMin = frames * 0.50 # cluster present for half of all frames
count = 0
# Get last 10% of simulation
numAvg = (0.10 * len(lgClust))
avgTime = len(lgClust) - numAvg
for j in range(0, len(lgClust)):
# Average over last
if j >= avgTime:
ALL[i] += sigALL[j]
AA[i] += sigAA[j]
AB[i] += sigAB[j]
BB[i] += sigBB[j]
phiAvg[i] += phiEff[j]
if lgClust[j] >= sizeMin:
count += 1
# Average diameter values
ALL[i] /= numAvg
AA[i] /= numAvg
AB[i] /= numAvg
BB[i] /= numAvg
phiAvg[i] /= numAvg
if count >= timeMin:
phaseSep = 1
# Now everything is in an array, sort them (for lines)
for i in range(0, len(txtFiles)):
for j in range(0, len(txtFiles)):
# Values need to be swapped
if peA[i] > peA[j] and i < j:
# Swap A activity
tmp = peA[j]
peA[j] = peA[i]
peA[i] = tmp
# Swap total diameter
tmp = ALL[j]
ALL[j] = ALL[i]
ALL[i] = tmp
# Swap AA diameter
tmp = AA[j]
AA[j] = AA[i]
AA[i] = tmp
# Swap AB diameter
tmp = AB[j]
AB[j] = AB[i]
AB[i] = tmp
# Swap BB diameter
tmp = BB[j]
BB[j] = BB[i]
BB[i] = tmp
# Swap phi
tmp = phiAvg[j]
phiAvg[j] = phiAvg[i]
phiAvg[i] = tmp
# Plot the data
plt.plot(peA, ALL, marker='o', c='k', label='Emergent Diameter')
# Get theory on fine r-scale
ys = np.arange(min(ALL), max(ALL), 0.001)
xs = np.zeros_like(ys)
for i in range(0, len(xs)):
xs[i] = ljForce(ys[i])
# Plot theory
plt.plot(xs, ys, c='g', label=r'$Pe=F_{LJ}$')
plt.plot(0.5 * xs, ys, c='r', label=r'$2Pe=F_{LJ}$')
plt.plot(0.3 * xs, ys, c='b', label=r'$3Pe=F_{LJ}$')
# Axes limits
plt.xlim(min(peA), max(peA))
plt.ylim(min(ALL), max(ALL))
# Labels
plt.xlabel(r'Activity $(Pe)$')
plt.ylabel(r'Center-to-center Distance $(\sigma_{Eff}$)')
# Get information for legend
plt.legend()
# Plot :)
plt.savefig('data_LJ_overlay_monodisperse.png', bbox_inches='tight', dpi=1000)
plt.close()
# # This is an example that works for using multiple axes
# # Instantiate figure
# fig = plt.figure()
# ax1 = fig.add_subplot(111)
# ax2 = ax1.twiny()
# fig.subplots_adjust(bottom=0.2)
#
# # Plot the data
# data = ax1.plot(peA, ALL, c='k', label='Emergent Diameter')
#
# # Get theory on fine r-scale
# ys = np.arange(min(ALL), max(ALL), 0.001)
# xs = np.zeros_like(ys)
# for i in range(0, len(xs)):
# xs[i] = ljForce(ys[i])
#
# # Plot theory
# first = ax1.plot(xs, ys, c='g', label=r'$Pe=F_{LJ}$')
# second = ax2.plot(xs, ys, c='r', label=r'$2Pe=F_{LJ}$')
# third = ax1.plot(0.45 * xs, ys, c='b', label=r'$Pe2=F_{LJ}$')
#
# # Additional plot restrictions
#
# # Axes limits
# ax1.set_xlim(min(peA), max(peA))
# ax2.set_xlim(2 * min(peA), 2 * max(peA))
# plt.ylim(min(ALL), max(ALL))
# # Labels
# ax1.set_xlabel(r'Activity $(Pe)$')
# ax2.set_xlabel(r'Twice Activity $(2Pe)$')
# plt.ylabel(r'Center-to-center Distance $(\sigma_{Eff}$)')
# # Move second axis to bottom
# ax2.xaxis.set_ticks_position("bottom")
# ax2.xaxis.set_label_position("bottom")
# ax2.spines["bottom"].set_position(("axes", -0.15))
# # Get information for legend
# lns = data + first + second + third
# labs = [l.get_label() for l in lns]
# plt.legend(lns, labs)
# # Plot :)
# plt.show()
| 27.120172
| 97
| 0.582212
|
4a06d0d54b31fc7d8aabfed18cf80960d0026b9e
| 19,541
|
py
|
Python
|
ax/service/utils/with_db_settings_base.py
|
trsvchn/Ax
|
0b430641c6b33920757dd09ae4318ea487fb4136
|
[
"MIT"
] | null | null | null |
ax/service/utils/with_db_settings_base.py
|
trsvchn/Ax
|
0b430641c6b33920757dd09ae4318ea487fb4136
|
[
"MIT"
] | null | null | null |
ax/service/utils/with_db_settings_base.py
|
trsvchn/Ax
|
0b430641c6b33920757dd09ae4318ea487fb4136
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
from logging import INFO
from typing import List, Optional, Tuple, Type
from ax.core.base_trial import BaseTrial
from ax.core.experiment import Experiment
from ax.core.generator_run import GeneratorRun
from ax.exceptions.core import UnsupportedError
from ax.modelbridge.generation_strategy import GenerationStrategy
from ax.utils.common.executils import retry_on_exception
from ax.utils.common.logger import _round_floats_for_logging, get_logger
from ax.utils.common.typeutils import not_none
RETRY_EXCEPTION_TYPES: Tuple[Type[Exception], ...] = ()
try: # We don't require SQLAlchemy by default.
from ax.storage.sqa_store.db import init_engine_and_session_factory
from ax.storage.sqa_store.decoder import Decoder
from ax.storage.sqa_store.encoder import Encoder
from ax.storage.sqa_store.load import (
_get_experiment_id,
_get_generation_strategy_id,
_load_experiment,
_load_generation_strategy_by_experiment_name,
)
from ax.storage.sqa_store.save import (
_save_or_update_trials,
_save_experiment,
_save_generation_strategy,
_update_generation_strategy,
update_properties_on_experiment,
)
from ax.storage.sqa_store.sqa_config import SQAConfig
from ax.storage.sqa_store.structs import DBSettings
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm.exc import StaleDataError
# We retry on `OperationalError` if saving to DB.
RETRY_EXCEPTION_TYPES = (OperationalError, StaleDataError)
except ModuleNotFoundError: # pragma: no cover
DBSettings = None
Decoder = None
Encoder = None
SQAConfig = None
STORAGE_MINI_BATCH_SIZE = 50
LOADING_MINI_BATCH_SIZE = 10000
logger = get_logger(__name__)
class WithDBSettingsBase:
"""Helper class providing methods for saving changes made to an experiment
if `db_settings` property is set to a non-None value on the instance.
"""
_db_settings: Optional[DBSettings] = None
def __init__(
self,
db_settings: Optional[DBSettings] = None,
logging_level: int = INFO,
suppress_all_errors: bool = False,
) -> None:
if db_settings and (not DBSettings or not isinstance(db_settings, DBSettings)):
raise ValueError(
"`db_settings` argument should be of type ax.storage.sqa_store."
f"(Got: {db_settings} of type {type(db_settings)}. "
"structs.DBSettings. To use `DBSettings`, you will need SQLAlchemy "
"installed in your environment (can be installed through pip)."
)
self._db_settings = db_settings
self._suppress_all_errors = suppress_all_errors
if self.db_settings_set:
init_engine_and_session_factory(
creator=self.db_settings.creator, url=self.db_settings.url
)
logger.setLevel(logging_level)
@property
def db_settings_set(self) -> bool:
"""Whether non-None DB settings are set on this instance."""
return self._db_settings is not None
@property
def db_settings(self) -> DBSettings:
"""DB settings set on this instance; guaranteed to be non-None."""
if self._db_settings is None:
raise ValueError("No DB settings are set on this instance.")
return not_none(self._db_settings)
def _get_experiment_and_generation_strategy_db_id(
self, experiment_name: str
) -> Tuple[Optional[int], Optional[int]]:
"""Retrieve DB ids of experiment by the given name and the associated
generation strategy. Each ID is None if corresponding object is not
found.
"""
if not self.db_settings_set:
return None, None
exp_id = _get_experiment_id(
experiment_name=experiment_name, config=self.db_settings.decoder.config
)
if not exp_id:
return None, None
gs_id = _get_generation_strategy_id(
experiment_name=experiment_name, decoder=self.db_settings.decoder
)
return exp_id, gs_id
def _maybe_save_experiment_and_generation_strategy(
self, experiment: Experiment, generation_strategy: GenerationStrategy
) -> Tuple[bool, bool]:
"""If DB settings are set on this `WithDBSettingsBase` instance, checks
whether given experiment and generation strategy are already saved and
saves them, if not.
Returns:
Tuple of two booleans: whether experiment was saved in the course of
this function's execution and whether generation strategy was
saved.
"""
saved_exp, saved_gs = False, False
if self.db_settings_set:
if experiment._name is None:
raise ValueError(
"Experiment must specify a name to use storage functionality."
)
exp_name = not_none(experiment.name)
exp_id, gs_id = self._get_experiment_and_generation_strategy_db_id(
experiment_name=exp_name
)
if exp_id: # Experiment in DB.
logger.info(f"Experiment {exp_name} is in DB, updating it.")
self._save_experiment_to_db_if_possible(experiment=experiment)
saved_exp = True
else: # Experiment not yet in DB.
logger.info(f"Experiment {exp_name} is not yet in DB, storing it.")
self._save_experiment_to_db_if_possible(experiment=experiment)
saved_exp = True
if gs_id and generation_strategy._db_id != gs_id:
raise UnsupportedError(
"Experiment was associated with generation strategy in DB, "
f"but a new generation strategy {generation_strategy.name} "
"was provided. To use the generation strategy currently in DB,"
" instantiate scheduler via: `Scheduler.from_stored_experiment`."
)
if not gs_id or generation_strategy._db_id is None:
# There is no GS associated with experiment or the generation
# strategy passed in is different from the one associated with
# experiment currently.
logger.info(
f"Generation strategy {generation_strategy.name} is not yet in DB, "
"storing it."
)
# If generation strategy does not yet have an experiment attached,
# attach the current experiment to it, as otherwise it will not be
# possible to retrieve by experiment name.
if generation_strategy._experiment is None:
generation_strategy.experiment = experiment
self._save_generation_strategy_to_db_if_possible(
generation_strategy=generation_strategy
)
saved_gs = True
return saved_exp, saved_gs
def _load_experiment_and_generation_strategy(
self,
experiment_name: str,
reduced_state: bool = False,
) -> Tuple[Optional[Experiment], Optional[GenerationStrategy]]:
"""Loads experiment and its corresponding generation strategy from database
if DB settings are set on this `WithDBSettingsBase` instance.
Args:
experiment_name: Name of the experiment to load, used as unique
identifier by which to find the experiment.
reduced_state: Whether to load experiment and generation strategy
with a slightly reduced state (without abandoned arms on experiment
and model state on each generator run in experiment and generation
strategy; last generator run on generation strategy will still
have its model state).
Returns:
- Tuple of `None` and `None` if `DBSettings` are set and no experiment
exists by the given name.
- Tuple of `Experiment` and `None` if experiment exists but does not
have a generation strategy attached to it.
- Tuple of `Experiment` and `GenerationStrategy` if experiment exists
and has a generation strategy attached to it.
"""
if not self.db_settings_set:
raise ValueError("Cannot load from DB in absence of DB settings.")
logger.info(
"Loading experiment and generation strategy (with reduced state: "
f"{reduced_state})..."
)
start_time = time.time()
experiment = _load_experiment(
experiment_name,
decoder=self.db_settings.decoder,
reduced_state=reduced_state,
load_trials_in_batches_of_size=LOADING_MINI_BATCH_SIZE,
)
if not isinstance(experiment, Experiment) or experiment.is_simple_experiment:
raise ValueError("Service API only supports `Experiment`.")
logger.info(
f"Loaded experiment {experiment_name} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds, "
f"loading trials in mini-batches of {LOADING_MINI_BATCH_SIZE}."
)
try:
start_time = time.time()
generation_strategy = _load_generation_strategy_by_experiment_name(
experiment_name=experiment_name,
decoder=self.db_settings.decoder,
experiment=experiment,
reduced_state=reduced_state,
)
logger.info(
f"Loaded generation strategy for experiment {experiment_name} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds."
)
except ValueError as err:
if "does not have a generation strategy" in str(err):
return experiment, None
raise # `ValueError` here could signify more than just absence of GS.
return experiment, generation_strategy
def _save_experiment_to_db_if_possible(self, experiment: Experiment) -> bool:
"""Saves attached experiment and generation strategy if DB settings are
set on this `WithDBSettingsBase` instance.
Args:
experiment: Experiment to save new trials in DB.
Returns:
bool: Whether the experiment was saved.
"""
if self.db_settings_set:
_save_experiment_to_db_if_possible(
experiment=experiment,
encoder=self.db_settings.encoder,
decoder=self.db_settings.decoder,
suppress_all_errors=self._suppress_all_errors,
)
return True
return False
def _save_or_update_trials_and_generation_strategy_if_possible(
self,
experiment: Experiment,
trials: List[BaseTrial],
generation_strategy: GenerationStrategy,
new_generator_runs: List[GeneratorRun],
) -> None:
"""Saves new trials (and updates existing ones) on given experiment
and updates the given generation strategy, if DB settings are set on
this `WithDBSettingsBase` instance.
Args:
experiment: Experiment, on which to save new trials in DB.
trials: Newly added or updated trials to save or update in DB.
generation_strategy: Generation strategy to update in DB.
new_generator_runs: Generator runs to add to generation strategy.
"""
logger.debug(f"Saving or updating {len(trials)} trials in DB.")
self._save_or_update_trials_in_db_if_possible(
experiment=experiment, trials=trials
)
logger.debug(
"Updating generation strategy in DB with "
f"{len(new_generator_runs)} generator runs."
)
self._update_generation_strategy_in_db_if_possible(
generation_strategy=generation_strategy,
new_generator_runs=new_generator_runs,
)
return
# No retries needed, covered in `self._save_or_update_trials_in_db_if_possible`
def _save_or_update_trial_in_db_if_possible(
self,
experiment: Experiment,
trial: BaseTrial,
) -> bool:
"""Saves new trial on given experiment if DB settings are set on this
`WithDBSettingsBase` instance.
Args:
experiment: Experiment, on which to save new trial in DB.
trials: Newly added trial to save.
Returns:
bool: Whether the trial was saved.
"""
return self._save_or_update_trials_in_db_if_possible(
experiment=experiment,
trials=[trial],
)
def _save_or_update_trials_in_db_if_possible(
self,
experiment: Experiment,
trials: List[BaseTrial],
) -> bool:
"""Saves new trials or update existing trials on given experiment if DB
settings are set on this `WithDBSettingsBase` instance.
Args:
experiment: Experiment, on which to save new trials in DB.
trials: Newly added trials to save.
Returns:
bool: Whether the trials were saved.
"""
if self.db_settings_set:
_save_or_update_trials_in_db_if_possible(
experiment=experiment,
trials=trials,
encoder=self.db_settings.encoder,
decoder=self.db_settings.decoder,
suppress_all_errors=self._suppress_all_errors,
)
return True
return False
def _save_generation_strategy_to_db_if_possible(
self, generation_strategy: GenerationStrategy, suppress_all_errors: bool = False
) -> bool:
"""Saves given generation strategy if DB settings are set on this
`WithDBSettingsBase` instance.
Args:
generation_strategy: Generation strategy to save in DB.
Returns:
bool: Whether the generation strategy was saved.
"""
if self.db_settings_set:
_save_generation_strategy_to_db_if_possible(
generation_strategy=generation_strategy,
encoder=self.db_settings.encoder,
decoder=self.db_settings.decoder,
suppress_all_errors=self._suppress_all_errors,
)
return True
return False
def _update_generation_strategy_in_db_if_possible(
self,
generation_strategy: GenerationStrategy,
new_generator_runs: List[GeneratorRun],
) -> bool:
"""Updates the given generation strategy with new generator runs (and with
new current generation step if applicable) if DB settings are set
on this `WithDBSettingsBase` instance.
Args:
generation_strategy: Generation strategy to update in DB.
new_generator_runs: New generator runs of this generation strategy
since its last save.
Returns:
bool: Whether the experiment was saved.
"""
if self.db_settings_set:
_update_generation_strategy_in_db_if_possible(
generation_strategy=generation_strategy,
new_generator_runs=new_generator_runs,
encoder=self.db_settings.encoder,
decoder=self.db_settings.decoder,
suppress_all_errors=self._suppress_all_errors,
)
return True
return False
def _update_experiment_properties_in_db(
self,
experiment_with_updated_properties: Experiment,
) -> bool:
exp = experiment_with_updated_properties
if self.db_settings_set:
_update_experiment_properties_in_db(
experiment_with_updated_properties=exp,
sqa_config=self.db_settings.encoder.config,
suppress_all_errors=self._suppress_all_errors,
)
return True
return False
# ------------- Utils for storage that assume `DBSettings` are provided --------
@retry_on_exception(
retries=3,
default_return_on_suppression=False,
exception_types=RETRY_EXCEPTION_TYPES,
)
def _save_experiment_to_db_if_possible(
experiment: Experiment,
encoder: Encoder,
decoder: Decoder,
suppress_all_errors: bool,
) -> None:
start_time = time.time()
_save_experiment(
experiment,
encoder=encoder,
decoder=decoder,
)
logger.debug(
f"Saved experiment {experiment.name} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds."
)
@retry_on_exception(
retries=3,
default_return_on_suppression=False,
exception_types=RETRY_EXCEPTION_TYPES,
)
def _save_or_update_trials_in_db_if_possible(
experiment: Experiment,
trials: List[BaseTrial],
encoder: Encoder,
decoder: Decoder,
suppress_all_errors: bool,
) -> None:
start_time = time.time()
_save_or_update_trials(
experiment=experiment,
trials=trials,
encoder=encoder,
decoder=decoder,
batch_size=STORAGE_MINI_BATCH_SIZE,
)
logger.debug(
f"Saved or updated trials {[trial.index for trial in trials]} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds "
f"in mini-batches of {STORAGE_MINI_BATCH_SIZE}."
)
@retry_on_exception(
retries=3,
default_return_on_suppression=False,
exception_types=RETRY_EXCEPTION_TYPES,
)
def _save_generation_strategy_to_db_if_possible(
generation_strategy: GenerationStrategy,
encoder: Encoder,
decoder: Decoder,
suppress_all_errors: bool,
) -> None:
start_time = time.time()
_save_generation_strategy(
generation_strategy=generation_strategy,
encoder=encoder,
decoder=decoder,
)
logger.debug(
f"Saved generation strategy {generation_strategy.name} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds."
)
@retry_on_exception(
retries=3,
default_return_on_suppression=False,
exception_types=RETRY_EXCEPTION_TYPES,
)
def _update_generation_strategy_in_db_if_possible(
generation_strategy: GenerationStrategy,
new_generator_runs: List[GeneratorRun],
encoder: Encoder,
decoder: Decoder,
suppress_all_errors: bool,
) -> None:
start_time = time.time()
_update_generation_strategy(
generation_strategy=generation_strategy,
generator_runs=new_generator_runs,
encoder=encoder,
decoder=decoder,
batch_size=STORAGE_MINI_BATCH_SIZE,
)
logger.debug(
f"Updated generation strategy {generation_strategy.name} in "
f"{_round_floats_for_logging(time.time() - start_time)} seconds in "
f"mini-batches of {STORAGE_MINI_BATCH_SIZE} generator runs."
)
@retry_on_exception(
retries=3,
default_return_on_suppression=False,
exception_types=RETRY_EXCEPTION_TYPES,
)
def _update_experiment_properties_in_db(
experiment_with_updated_properties: Experiment,
sqa_config: SQAConfig,
suppress_all_errors: bool,
) -> None:
update_properties_on_experiment(
experiment_with_updated_properties=experiment_with_updated_properties,
config=sqa_config,
)
| 37.506718
| 88
| 0.654265
|
4a06d0ea15c87f2bc930a15b51b282d1669874ca
| 6,609
|
gyp
|
Python
|
binding.gyp
|
naturalatlas/node-mapnik
|
02e5d8eb0488128e1c51ad7d9ea9e5becb57dbb9
|
[
"BSD-3-Clause"
] | null | null | null |
binding.gyp
|
naturalatlas/node-mapnik
|
02e5d8eb0488128e1c51ad7d9ea9e5becb57dbb9
|
[
"BSD-3-Clause"
] | null | null | null |
binding.gyp
|
naturalatlas/node-mapnik
|
02e5d8eb0488128e1c51ad7d9ea9e5becb57dbb9
|
[
"BSD-3-Clause"
] | null | null | null |
{
'includes': [ 'common.gypi' ],
'variables': {
'ENABLE_GLIBC_WORKAROUND%':'false', # can be overriden by a command line variable because of the % sign
'napi_build_version%':'3', # can be overriden by a command line variable because of the % sign
'enable_sse%':'true'
},
'targets': [
{
'target_name': '<(module_name)',
'product_dir': '<(module_path)',
'sources': [
"src/mapnik_logger.cpp",
"src/node_mapnik.cpp",
"src/blend.cpp",
"src/mapnik_map.cpp",
"src/mapnik_map_load.cpp",
"src/mapnik_map_from_string.cpp",
"src/mapnik_map_render.cpp",
"src/mapnik_map_query_point.cpp",
"src/mapnik_color.cpp",
"src/mapnik_geometry.cpp",
"src/mapnik_feature.cpp",
"src/mapnik_image.cpp",
"src/mapnik_image_encode.cpp",
"src/mapnik_image_open.cpp",
"src/mapnik_image_fill.cpp",
"src/mapnik_image_save.cpp",
"src/mapnik_image_from_bytes.cpp",
"src/mapnik_image_from_svg.cpp",
"src/mapnik_image_solid.cpp",
"src/mapnik_image_multiply.cpp",
"src/mapnik_image_clear.cpp",
"src/mapnik_image_copy.cpp",
"src/mapnik_image_resize.cpp",
"src/mapnik_image_compositing.cpp",
"src/mapnik_image_filter.cpp",
"src/mapnik_image_view.cpp",
"src/mapnik_grid.cpp",
"src/mapnik_grid_view.cpp",
"src/mapnik_palette.cpp",
"src/mapnik_projection.cpp",
"src/mapnik_layer.cpp",
"src/mapnik_datasource.cpp",
"src/mapnik_featureset.cpp",
"src/mapnik_expression.cpp",
"src/mapnik_cairo_surface.cpp",
"src/mapnik_vector_tile.cpp",
"src/mapnik_vector_tile_data.cpp",
"src/mapnik_vector_tile_query.cpp",
"src/mapnik_vector_tile_json.cpp",
"src/mapnik_vector_tile_info.cpp",
"src/mapnik_vector_tile_simple_valid.cpp",
"src/mapnik_vector_tile_render.cpp",
"src/mapnik_vector_tile_clear.cpp",
"src/mapnik_vector_tile_image.cpp",
"src/mapnik_vector_tile_composite.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_compression.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_datasource_pbf.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_featureset_pbf.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_geometry_decoder.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_geometry_encoder_pbf.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_layer.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_processor.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_raster_clipper.cpp",
"node_modules/mapnik-vector-tile/src/vector_tile_tile.cpp"
],
'include_dirs': [
'./mason_packages/.link/include/',
'./mason_packages/.link/include/freetype2',
'./mason_packages/.link/include/cairo',
'./mason_packages/.link/include/mapnik',
'./src',
"<!@(node -p \"require('node-addon-api').include\")",
# TODO: move these to mason packages once we have a minimal windows client for mason (@springmeyer)
# https://github.com/mapbox/mason/issues/396
"./deps/geometry/include/",
"./deps/protozero/include/",
"./deps/wagyu/include/",
"<!(node -e \"require('mapnik-vector-tile')\")"
],
'defines': [
'NAPI_VERSION=<(napi_build_version)',
'MAPNIK_GIT_REVISION="<!@(mapnik-config --git-describe)"',
'MAPNIK_VECTOR_TILE_LIBRARY=1',
],
'conditions': [
['OS=="mac"', {
'cflags+': ['-fvisibility=hidden'],
'xcode_settings': {
'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
}
}],
['ENABLE_GLIBC_WORKAROUND != "false"', {
'sources': [
"src/glibc_workaround.cpp"
]
}],
['OS=="win"',
{
'include_dirs':[
'<!@(mapnik-config --includes)',
'<!@(mapnik-config --dep-includes)'
],
'defines': ['NOMINMAX','<!@(mapnik-config --defines)'],
'defines!': ["_HAS_EXCEPTIONS=0"],
'libraries': [
'<!@(mapnik-config --libs)',
'mapnik-wkt.lib',
'mapnik-json.lib',
'<!@(mapnik-config --dep-libs)',
],
'msvs_disabled_warnings': [ 4244,4005,4506,4345,4804,4805 ],
'msvs_settings': {
'VCLinkerTool': {
'AdditionalLibraryDirectories': [
'<!@(mapnik-config --ldflags)'
],
},
}
},
{
'cflags_cc!': ['-fno-rtti', '-fno-exceptions'],
'cflags_cc' : [
'<!@(mapnik-config --cflags)',
],
'libraries':[
'<!@(mapnik-config --libs)',
'-lmapnik-wkt',
'-lmapnik-json',
'<!@(mapnik-config --ldflags)',
'<!@(mapnik-config --dep-libs)'
],
'ldflags': [
'-Wl,-z,now',
"-Wl,-z,origin",
"-Wl,-rpath=\$$ORIGIN/lib"
],
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS':[
'<!@(mapnik-config --cflags)',
],
'OTHER_CFLAGS':[
'<!@(mapnik-config --cflags)'
],
'OTHER_LDFLAGS':[
'-Wl,-bind_at_load'
],
'GCC_ENABLE_CPP_RTTI': 'YES',
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET':'10.8',
'CLANG_CXX_LIBRARY': 'libc++',
'CLANG_CXX_LANGUAGE_STANDARD':'c++14',
'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0'
}
},
],
['enable_sse == "true"', {
'defines' : [ 'SSE_MATH' ]
}]
]
},
{
'target_name': 'action_after_build',
'type': 'none',
'dependencies': [ '<(module_name)' ],
'hard_dependency': 1,
'conditions': [
['OS!="win"',
{
'actions': [
{
'action_name': 'postinstall',
'inputs': ['./scripts/postinstall.sh'],
'outputs': ['./lib/binding'],
'action': ['./scripts/postinstall.sh', '<(module_path)']
}
]
}
]
]
},
]
}
| 35.724324
| 109
| 0.525042
|
4a06d12199b0d758b9fe073ced5f8c72fa752574
| 3,867
|
py
|
Python
|
Day19/first.py
|
PulakIIIT/AOC-2021
|
3f54b4bbce5e84d604d4718ba479beb566fc17d1
|
[
"MIT"
] | null | null | null |
Day19/first.py
|
PulakIIIT/AOC-2021
|
3f54b4bbce5e84d604d4718ba479beb566fc17d1
|
[
"MIT"
] | null | null | null |
Day19/first.py
|
PulakIIIT/AOC-2021
|
3f54b4bbce5e84d604d4718ba479beb566fc17d1
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
import sys
import pprint
from itertools import permutations
def subt(pt1, pt2):
return pt1[0] - pt2[0], pt1[1] - pt2[1], pt1[2] - pt2[2]
def add(pt1, pt2):
return pt1[0] + pt2[0], pt1[1] + pt2[1], pt1[2] + pt2[2]
def rotate(pt, permut):
return pt[permut[0]], pt[permut[1]], pt[permut[2]]
def change_sign(pt, sign):
return pt[0] * sign[0], pt[1] * sign[1], pt[2] * sign[2]
def get_relative_dist(pts1, pts2):
max_len = 0
pts_old = set(pts1)
signs = [
(1, 1, 1),
(1, 1, -1),
(1, -1, 1),
(1, -1, -1),
(-1, 1, 1),
(-1, 1, -1),
(-1, -1, 1),
(-1, -1, -1),
]
for pt in pts1:
for pt2 in pts2:
for permut in permutations((0, 1, 2)):
for sign in signs:
pt2_rotated = rotate(pt2, permut)
pt2_signed = change_sign(pt2_rotated, sign)
diff = subt(pt, pt2_signed)
new_pts = []
for pt3 in pts2:
new_pts.append(
add(change_sign(rotate(pt3, permut), sign), diff)
)
new_pts = set(new_pts)
common = new_pts.intersection(pts_old)
if len(common) > max_len:
max_len = len(common)
if max_len >= 12:
pprint.pprint(common)
print("Returning", diff, permut, sign)
return diff, permut, sign
return None
def transform(pts, diff, permut, sign):
ans = []
for pt in pts:
ans.append(add(change_sign(rotate(pt, permut), sign), diff))
return ans
def main():
with open(sys.argv[1], "r") as f:
text = f.read()
scanner = text.split("\n\n")
pts = []
for line in scanner:
inputs = line.split("\n")
st_pts = []
for st in inputs[1:]:
if st == "":
continue
x, y, z = st.split(",")
st_pts.append((int(x), int(y), int(z)))
pts.append(st_pts)
edge_map = {}
adj_map = defaultdict(lambda: [])
for i in range(0, len(pts)):
for j in range(0, len(pts)):
if i == j:
continue
res = get_relative_dist(pts[i], pts[j])
if res is not None:
edge_map[(i, j)] = res
adj_map[i].append(j)
print(f"{i} relative to {j} is ", res)
from collections import deque
total = set(pts[0])
for i in range(1, len(pts)):
q = deque()
q.append(i)
visited = set()
visited.add(i)
mp = {}
while q:
curr = q.popleft()
if curr == 0:
dts = [0]
while curr != i:
curr = mp[curr]
dts.append(curr)
from copy import deepcopy
print("Found a way to reach 0 from", i)
print(dts)
ptts = deepcopy(pts[i])
for j in range(len(dts) - 1, 0, -1):
edge = (dts[j - 1], dts[j])
diff, permut, sign = edge_map[edge]
print("Applied edge", edge)
print("Got", diff, permut, sign)
ptts = transform(ptts, diff, permut, sign)
pprint.pprint(ptts)
total.update(ptts)
break
for x in adj_map[curr]:
if x in visited:
continue
visited.add(x)
q.append(x)
mp[x] = curr
print("answer is", total)
print("final answer is", len(total))
if __name__ == "__main__":
main()
| 26.128378
| 77
| 0.438583
|
4a06d1352fbea5c2fe7f2006a82b5c821b642310
| 195
|
py
|
Python
|
appengine_config.py
|
avishekhbharati/GcloudPythonSimpleLoginApp
|
7299b5825216521cc3949be70ea3372350158481
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
avishekhbharati/GcloudPythonSimpleLoginApp
|
7299b5825216521cc3949be70ea3372350158481
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
avishekhbharati/GcloudPythonSimpleLoginApp
|
7299b5825216521cc3949be70ea3372350158481
|
[
"Apache-2.0"
] | null | null | null |
from gaesessions import SessionMiddleware
def webapp_add_wsgi_middleware(app):
app = SessionMiddleware(app, cookie_key="jkdhgfdjkghfjdkghfdgjhdfkjghdfjgnmxbvxcmnvbitueotuirt")
return app
| 39
| 100
| 0.846154
|
4a06d25e2fcbd1b5bed452dbaf1f0689d0ad2995
| 5,131
|
py
|
Python
|
tests/unit/test_timeslive_crawler.py
|
CodeForAfrica/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | 12
|
2015-06-14T05:50:39.000Z
|
2021-09-12T17:03:47.000Z
|
tests/unit/test_timeslive_crawler.py
|
CodeForAfrica/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | 32
|
2019-07-25T06:17:31.000Z
|
2019-08-05T02:41:42.000Z
|
tests/unit/test_timeslive_crawler.py
|
CodeForAfricaLabs/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | 9
|
2016-04-04T03:08:22.000Z
|
2020-02-19T09:55:45.000Z
|
import unittest
from dexter.models import Document, db
from dexter.models.seeds import seed_db
from dexter.processing.crawlers import TimesLiveCrawler
class TestTimesliveCrawler(unittest.TestCase):
def setUp(self):
self.crawler = TimesLiveCrawler()
self.db = db
self.db.drop_all()
self.db.create_all()
seed_db(db)
def tearDown(self):
self.db.session.remove()
self.db.drop_all()
def test_extract(self):
html = """
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<title>TimesLIVE - Print Article</title>
<link rel="stylesheet" href="http://www.timeslive.co.za/template/common/css/print.css" type="text/css" media="print" />
<link type="text/css" rel="stylesheet" href="http://www.timeslive.co.za/template/common/css/uniform.default.css" />
<script type="text/javascript">
function printpage()
{
window.print();
}
</script>
</head>
<body onload="printpage()">
<a href="#" onclick="javascript:print();">
Print this page
</a><br />
<div class="container">
<div class="clear"></div>
<div id="content">
<div class="span-24 column">
<div class="articleheader">
<h1>IEC's Tlakula must resign: opposition parties</h1>
<div> Apr 1, 2014 | Sapa</div>
<h3>Several opposition parties met in Pretoria on Tuesday to discuss Public Protector Thuli Madonsela's finding on the Nkandla upgrades and the controversy around IEC chairwoman Pansy Tlakula.</h3>
</div>
<div class="articlerelated">
<div class="image">
<img width="290px" title="" alt="" src="http://www.timeslive.co.za/migration_catalog/ST/2009/09/10/26869_499542.jpg/RESIZED/Small/26869_499542.jpg">
IEC chairwoman Pansy Tlakula</div>
<br/>
<div class="quote">
<h3>
<span>"</span>
<p> </p><span class="end">"</span>
</h3>
<div class="clear"></div>
</div>
</div>
<p>Chairman of the multi-party forum Bantu Holomisa, who also heads the United Democratic Movement (UDM), said the opposition parties resolved to push for Tlakula's resignation.</p><p>"All the political parties present, with the exception of the Democratic Alliance and the Freedom Front Plus, agreed that advocate Tlakula must resign immediately.</p><p>"Should she refuse to resign, the parties who are in agreement will pursue legal action," said Holomisa.</p><p>The forum included the African Christian Democratic Party, AgangSA, Azapo, Economic Freedom Fighters, FF Plus, Inkatha Freedom Party, United Christian Democratic Party, and Holomisa's UDM.</p><p>Regarding Nkandla, the parties resolved to convene another summit after President Jacob Zuma had reacted to Parliament as ordered by Madonsela.</p><br/>
<br/>
<center>
~ o O o ~
</center>
</div>
</div>
</div>
</body>
</html>
"""
doc = Document()
doc.url = 'http://www.timeslive.co.za/politics/2014/04/01/iec-s-tlakula-must-resign-opposition-parties'
self.crawler.extract(doc, html)
self.assertEqual(doc.title, u"IEC's Tlakula must resign: opposition parties")
self.assertEqual(doc.summary, u"Several opposition parties met in Pretoria on Tuesday to discuss Public Protector Thuli Madonsela's finding on the Nkandla upgrades and the controversy around IEC chairwoman Pansy Tlakula.")
self.assertEqual(doc.published_at.strftime('%d %m %Y'), '01 04 2014')
self.assertEqual(doc.author.name, "Sapa")
self.assertEqual(doc.medium.name, 'Times')
self.assertEqual(doc.text, u'Several opposition parties met in Pretoria on Tuesday to discuss Public Protector Thuli Madonsela\'s finding on the Nkandla upgrades and the controversy around IEC chairwoman Pansy Tlakula.\n\nChairman of the multi-party forum Bantu Holomisa, who also heads the United Democratic Movement (UDM), said the opposition parties resolved to push for Tlakula\'s resignation.\n\n"All the political parties present, with the exception of the Democratic Alliance and the Freedom Front Plus, agreed that advocate Tlakula must resign immediately.\n\n"Should she refuse to resign, the parties who are in agreement will pursue legal action," said Holomisa.\n\nThe forum included the African Christian Democratic Party, AgangSA, Azapo, Economic Freedom Fighters, FF Plus, Inkatha Freedom Party, United Christian Democratic Party, and Holomisa\'s UDM.\n\nRegarding Nkandla, the parties resolved to convene another summit after President Jacob Zuma had reacted to Parliament as ordered by Madonsela.')
| 60.364706
| 1,022
| 0.634184
|
4a06d27a66e5e8ae6a75f59aa2b5e6685afe7a9e
| 378
|
py
|
Python
|
app/main/forms/Compra_forms.py
|
amandapersampa/Franguinho
|
940b6601a821ab4857de7f0a5a0ac53f6f54a564
|
[
"MIT"
] | null | null | null |
app/main/forms/Compra_forms.py
|
amandapersampa/Franguinho
|
940b6601a821ab4857de7f0a5a0ac53f6f54a564
|
[
"MIT"
] | 8
|
2017-03-14T11:55:07.000Z
|
2017-04-03T00:53:32.000Z
|
app/main/forms/Compra_forms.py
|
amandapersampa/MicroGerencia
|
940b6601a821ab4857de7f0a5a0ac53f6f54a564
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from flask_wtf import FlaskForm
from wtforms import *
from wtforms.validators import DataRequired
class Compra_forms(FlaskForm):
produto = SelectField('produto')
quantidade = IntegerField("quantidade", validators=[DataRequired()])
valor = FloatField("quantidade", validators=[DataRequired()])
data = DateField("date", validators=[DataRequired()])
| 37.8
| 72
| 0.753968
|
4a06d2807a057e15a676c6c0c426bc1b7957e993
| 2,417
|
py
|
Python
|
redash/cli/__init__.py
|
aladdinwang/redash
|
934b340ff7dab7caa3c91618ffdbe109dfdb31f5
|
[
"BSD-2-Clause"
] | 1
|
2019-10-25T07:30:26.000Z
|
2019-10-25T07:30:26.000Z
|
redash/cli/__init__.py
|
aladdinwang/redash
|
934b340ff7dab7caa3c91618ffdbe109dfdb31f5
|
[
"BSD-2-Clause"
] | null | null | null |
redash/cli/__init__.py
|
aladdinwang/redash
|
934b340ff7dab7caa3c91618ffdbe109dfdb31f5
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import print_function
import click
import simplejson
from flask import current_app
from flask.cli import FlaskGroup, run_command
from redash import __version__, create_app, settings
from redash.cli import data_sources, database, groups, organization, queries, users
from redash.monitor import get_status
def create(group):
app = current_app or create_app()
group.app = app
@app.shell_context_processor
def shell_context():
from redash import models, settings
return {
'models': models,
'settings': settings,
}
return app
@click.group(cls=FlaskGroup, create_app=create)
def manager():
"""Management script for Redash"""
manager.add_command(database.manager, "database")
manager.add_command(users.manager, "users")
manager.add_command(groups.manager, "groups")
manager.add_command(data_sources.manager, "ds")
manager.add_command(organization.manager, "org")
manager.add_command(queries.manager, "queries")
manager.add_command(run_command, "runserver")
@manager.command()
def version():
"""Displays Redash version."""
print(__version__)
@manager.command()
def status():
print(simplejson.dumps(get_status(), indent=2))
@manager.command()
def check_settings():
"""Show the settings as Redash sees them (useful for debugging)."""
for name, item in current_app.config.items():
print("{} = {}".format(name, item))
@manager.command()
@click.argument('email', default=settings.MAIL_DEFAULT_SENDER, required=False)
def send_test_mail(email=None):
"""
Send test message to EMAIL (default: the address you defined in MAIL_DEFAULT_SENDER)
"""
from redash import mail
from flask_mail import Message
if email is None:
email = settings.MAIL_DEFAULT_SENDER
mail.send(Message(subject="Test Message from Redash", recipients=[email],
body="Test message."))
@manager.command()
def ipython():
"""Starts IPython shell instead of the default Python shell."""
import sys
import IPython
from flask.globals import _app_ctx_stack
app = _app_ctx_stack.top.app
banner = 'Python %s on %s\nIPython: %s\nRedash version: %s\n' % (
sys.version,
sys.platform,
IPython.__version__,
__version__
)
ctx = {}
ctx.update(app.make_shell_context())
IPython.embed(banner1=banner, user_ns=ctx)
| 25.712766
| 88
| 0.6988
|
4a06d28f0b85231426ffbef9115775f9a8cf8b81
| 4,092
|
py
|
Python
|
pychron/graph/tools/point_inspector.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | 1
|
2019-02-27T21:57:44.000Z
|
2019-02-27T21:57:44.000Z
|
pychron/graph/tools/point_inspector.py
|
ael-noblegas/pychron
|
6ebbbb1f66a614972b62b7a9be4c784ae61b5d62
|
[
"Apache-2.0"
] | 80
|
2018-07-17T20:10:20.000Z
|
2021-08-17T15:38:24.000Z
|
pychron/graph/tools/point_inspector.py
|
AGESLDEO/pychron
|
1a81e05d9fba43b797f335ceff6837c016633bcf
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# Copyright 2012 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from numpy import where, vstack
from traits.api import Callable
from pychron.core.helpers.formatting import floatfmt
from pychron.graph.tools.info_inspector import InfoInspector, InfoOverlay
from pychron.pychron_constants import PLUSMINUS
from six.moves import zip
class PointInspector(InfoInspector):
convert_index = Callable
additional_info = Callable
single_point = True
id = None
def get_selected_index(self):
threshold = self.hittest_threshold
if self.single_point:
idx = self.component.map_index(self.current_position, threshold=threshold)
if idx is not None:
return [idx]
else:
xs = self.component.index.get_data()
ys = self.component.value.get_data()
d = vstack((xs, ys)).T
spts = self.component.map_screen(d)
cx, cy = self.current_position
distances = ((spts[:, 0] - cx) ** 2 + (spts[:, 1] - cy) ** 2) ** 0.5
return where(distances <= threshold)[0]
def percent_error(self, s, e):
v = '(Inf%)'
try:
return '({:0.2f}%)'.format(abs(e / s) * 100)
except ZeroDivisionError:
pass
return v
def assemble_lines(self):
pt = self.current_position
if pt:
# x, y = self.component.map_data(pt)
comp = self.component
inds = self.get_selected_index()
lines = []
convert_index = self.convert_index
if inds is not None and len(inds):
he = hasattr(self.component, 'yerror')
ys = comp.value.get_data()[inds]
xs = comp.index.get_data()[inds]
for i, x, y in zip(inds, xs, ys):
if he:
ye = comp.yerror.get_data()[i]
pe = self.percent_error(y, ye)
ye = floatfmt(ye, n=6, s=3)
sy = u'{} {}{} ({})'.format(y, PLUSMINUS, ye, pe)
else:
sy = floatfmt(y, n=6, s=3)
if convert_index:
x = convert_index(x)
else:
x = '{:0.5f}'.format(x)
lines.extend([u'pt={:03d}, x= {}, y= {}'.format(i + 1, x, sy)])
if hasattr(comp, 'display_index'):
x = comp.display_index.get_data()[i]
lines.append('{}'.format(x))
if self.additional_info is not None:
try:
ad = self.additional_info(i, self.id)
except BaseException:
ad = self.additional_info(i)
if isinstance(ad, (list, tuple)):
lines.extend(ad)
else:
lines.append(ad)
return lines
# delim_n = max([len(li) for li in lines])
# return intersperse(lines, '-' * delim_n)
else:
return []
class PointInspectorOverlay(InfoOverlay):
pass
# print comp
# ============= EOF =============================================
| 35.275862
| 86
| 0.501222
|
4a06d2ebb02786c2a9c7d23f89d327d48dc27c44
| 12,854
|
py
|
Python
|
tests/clients/test_pika.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 61
|
2017-09-27T02:50:17.000Z
|
2022-03-22T12:13:37.000Z
|
tests/clients/test_pika.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 82
|
2017-07-11T13:47:33.000Z
|
2022-03-22T10:10:38.000Z
|
tests/clients/test_pika.py
|
rlopes-ki/python-sensor
|
07e827f9982b2a0c482e8eab82d1a420923efd5e
|
[
"MIT"
] | 27
|
2017-09-11T16:22:32.000Z
|
2022-03-11T17:21:49.000Z
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2021
from __future__ import absolute_import
import os
import pika
import unittest
import mock
import threading
import time
from ..helpers import testenv
from instana.singletons import tracer
class _TestPika(unittest.TestCase):
@staticmethod
@mock.patch('pika.connection.Connection')
def _create_connection(connection_class_mock=None):
return connection_class_mock()
def _create_obj(self):
raise NotImplementedError()
def setUp(self):
self.recorder = tracer.recorder
self.recorder.clear_spans()
self.connection = self._create_connection()
self._on_openok_callback = mock.Mock()
self.obj = self._create_obj()
def tearDown(self):
del self.connection
del self._on_openok_callback
del self.obj
class TestPikaChannel(_TestPika):
def _create_obj(self):
return pika.channel.Channel(self.connection, 1, self._on_openok_callback)
@mock.patch('pika.spec.Basic.Publish')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_publish(self, send_method, _unused):
self.obj._set_state(self.obj.OPEN)
with tracer.start_active_span("testing"):
self.obj.basic_publish("test.exchange", "test.queue", "Hello!")
spans = self.recorder.queued_spans()
self.assertEqual(2, len(spans))
rabbitmq_span = spans[0]
test_span = spans[1]
self.assertIsNone(tracer.active_span)
# Same traceId
self.assertEqual(test_span.t, rabbitmq_span.t)
# Parent relationships
self.assertEqual(rabbitmq_span.p, test_span.s)
# Error logging
self.assertIsNone(test_span.ec)
self.assertIsNone(rabbitmq_span.ec)
# Span tags
self.assertEqual("test.exchange", rabbitmq_span.data["rabbitmq"]["exchange"])
self.assertEqual('publish', rabbitmq_span.data["rabbitmq"]["sort"])
self.assertIsNotNone(rabbitmq_span.data["rabbitmq"]["address"])
self.assertEqual("test.queue", rabbitmq_span.data["rabbitmq"]["key"])
self.assertIsNotNone(rabbitmq_span.stack)
self.assertTrue(type(rabbitmq_span.stack) is list)
self.assertGreater(len(rabbitmq_span.stack), 0)
send_method.assert_called_once_with(
pika.spec.Basic.Publish(
exchange="test.exchange",
routing_key="test.queue"), (pika.spec.BasicProperties(headers={
"X-INSTANA-T": rabbitmq_span.t,
"X-INSTANA-S": rabbitmq_span.s,
"X-INSTANA-L": "1"
}), b"Hello!"))
@mock.patch('pika.spec.Basic.Publish')
@mock.patch('pika.channel.Channel._send_method')
def test_basic_publish_with_headers(self, send_method, _unused):
self.obj._set_state(self.obj.OPEN)
with tracer.start_active_span("testing"):
self.obj.basic_publish("test.exchange",
"test.queue",
"Hello!",
pika.BasicProperties(headers={
"X-Custom-1": "test"
}))
spans = self.recorder.queued_spans()
self.assertEqual(2, len(spans))
rabbitmq_span = spans[0]
test_span = spans[1]
send_method.assert_called_once_with(
pika.spec.Basic.Publish(
exchange="test.exchange",
routing_key="test.queue"), (pika.spec.BasicProperties(headers={
"X-Custom-1": "test",
"X-INSTANA-T": rabbitmq_span.t,
"X-INSTANA-S": rabbitmq_span.s,
"X-INSTANA-L": "1"
}), b"Hello!"))
@mock.patch('pika.spec.Basic.Get')
def test_basic_get(self, _unused):
self.obj._set_state(self.obj.OPEN)
body = "Hello!"
properties = pika.BasicProperties()
method_frame = pika.frame.Method(1, pika.spec.Basic.GetOk)
header_frame = pika.frame.Header(1, len(body), properties)
cb = mock.Mock()
self.obj.basic_get("test.queue", cb)
self.obj._on_getok(method_frame, header_frame, body)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.t)
self.assertIsNone(rabbitmq_span.p)
self.assertIsNotNone(rabbitmq_span.s)
# Error logging
self.assertIsNone(rabbitmq_span.ec)
# Span tags
self.assertIsNone(rabbitmq_span.data["rabbitmq"]["exchange"])
self.assertEqual("consume", rabbitmq_span.data["rabbitmq"]["sort"])
self.assertIsNotNone(rabbitmq_span.data["rabbitmq"]["address"])
self.assertEqual("test.queue", rabbitmq_span.data["rabbitmq"]["queue"])
self.assertIsNotNone(rabbitmq_span.stack)
self.assertTrue(type(rabbitmq_span.stack) is list)
self.assertGreater(len(rabbitmq_span.stack), 0)
cb.assert_called_once_with(self.obj, pika.spec.Basic.GetOk, properties, body)
@mock.patch('pika.spec.Basic.Get')
def test_basic_get_with_trace_context(self, _unused):
self.obj._set_state(self.obj.OPEN)
body = "Hello!"
properties = pika.BasicProperties(headers={
"X-INSTANA-T": "0000000000000001",
"X-INSTANA-S": "0000000000000002",
"X-INSTANA-L": "1"
})
method_frame = pika.frame.Method(1, pika.spec.Basic.GetOk)
header_frame = pika.frame.Header(1, len(body), properties)
cb = mock.Mock()
self.obj.basic_get("test.queue", cb)
self.obj._on_getok(method_frame, header_frame, body)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# Trace context propagation
self.assertEqual("0000000000000001", rabbitmq_span.t)
self.assertEqual("0000000000000002", rabbitmq_span.p)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.s)
self.assertNotEqual(rabbitmq_span.p, rabbitmq_span.s)
@mock.patch('pika.spec.Basic.Consume')
def test_basic_consume(self, _unused):
self.obj._set_state(self.obj.OPEN)
body = "Hello!"
properties = pika.BasicProperties()
method_frame = pika.frame.Method(1, pika.spec.Basic.Deliver(consumer_tag="test"))
header_frame = pika.frame.Header(1, len(body), properties)
cb = mock.Mock()
self.obj.basic_consume("test.queue", cb, consumer_tag="test")
self.obj._on_deliver(method_frame, header_frame, body)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.t)
self.assertIsNone(rabbitmq_span.p)
self.assertIsNotNone(rabbitmq_span.s)
# Error logging
self.assertIsNone(rabbitmq_span.ec)
# Span tags
self.assertIsNone(rabbitmq_span.data["rabbitmq"]["exchange"])
self.assertEqual("consume", rabbitmq_span.data["rabbitmq"]["sort"])
self.assertIsNotNone(rabbitmq_span.data["rabbitmq"]["address"])
self.assertEqual("test.queue", rabbitmq_span.data["rabbitmq"]["queue"])
self.assertIsNotNone(rabbitmq_span.stack)
self.assertTrue(type(rabbitmq_span.stack) is list)
self.assertGreater(len(rabbitmq_span.stack), 0)
cb.assert_called_once_with(self.obj, method_frame.method, properties, body)
@mock.patch('pika.spec.Basic.Consume')
def test_basic_consume_with_trace_context(self, _unused):
self.obj._set_state(self.obj.OPEN)
body = "Hello!"
properties = pika.BasicProperties(headers={
"X-INSTANA-T": "0000000000000001",
"X-INSTANA-S": "0000000000000002",
"X-INSTANA-L": "1"
})
method_frame = pika.frame.Method(1, pika.spec.Basic.Deliver(consumer_tag="test"))
header_frame = pika.frame.Header(1, len(body), properties)
cb = mock.Mock()
self.obj.basic_consume(queue="test.queue", on_message_callback=cb, consumer_tag="test")
self.obj._on_deliver(method_frame, header_frame, body)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# Trace context propagation
self.assertEqual("0000000000000001", rabbitmq_span.t)
self.assertEqual("0000000000000002", rabbitmq_span.p)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.s)
self.assertNotEqual(rabbitmq_span.p, rabbitmq_span.s)
class TestPikaBlockingChannel(_TestPika):
@mock.patch('pika.channel.Channel', spec=pika.channel.Channel)
def _create_obj(self, channel_impl):
self.impl = channel_impl()
self.impl.channel_number = 1
return pika.adapters.blocking_connection.BlockingChannel(self.impl, self.connection)
def _generate_delivery(self, consumer_tag, properties, body):
from pika.adapters.blocking_connection import _ConsumerDeliveryEvt
# Wait until queue consumer is initialized
while self.obj._queue_consumer_generator is None:
time.sleep(0.25)
method = pika.spec.Basic.Deliver(consumer_tag=consumer_tag)
self.obj._on_consumer_generator_event(_ConsumerDeliveryEvt(method, properties, body))
def test_consume(self):
consumed_deliveries = []
def __consume():
for delivery in self.obj.consume("test.queue", inactivity_timeout=3.0):
# Skip deliveries generated due to inactivity
if delivery is not None and any(delivery):
consumed_deliveries.append(delivery)
break
consumer_tag = "test.consumer"
self.impl.basic_consume.return_value = consumer_tag
self.impl._generate_consumer_tag.return_value = consumer_tag
self.impl._consumers = {}
t = threading.Thread(target=__consume)
t.start()
self._generate_delivery(consumer_tag, pika.BasicProperties(), "Hello!")
t.join(timeout=5.0)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.t)
self.assertIsNone(rabbitmq_span.p)
self.assertIsNotNone(rabbitmq_span.s)
# Error logging
self.assertIsNone(rabbitmq_span.ec)
# Span tags
self.assertIsNone(rabbitmq_span.data["rabbitmq"]["exchange"])
self.assertEqual("consume", rabbitmq_span.data["rabbitmq"]["sort"])
self.assertIsNotNone(rabbitmq_span.data["rabbitmq"]["address"])
self.assertEqual("test.queue", rabbitmq_span.data["rabbitmq"]["queue"])
self.assertIsNotNone(rabbitmq_span.stack)
self.assertTrue(type(rabbitmq_span.stack) is list)
self.assertGreater(len(rabbitmq_span.stack), 0)
self.assertEqual(1, len(consumed_deliveries))
def test_consume_with_trace_context(self):
consumed_deliveries = []
def __consume():
for delivery in self.obj.consume("test.queue", inactivity_timeout=3.0):
# Skip deliveries generated due to inactivity
if delivery is not None and any(delivery):
consumed_deliveries.append(delivery)
break
consumer_tag = "test.consumer"
self.impl.basic_consume.return_value = consumer_tag
self.impl._generate_consumer_tag.return_value = consumer_tag
self.impl._consumers = {}
t = threading.Thread(target=__consume)
t.start()
self._generate_delivery(consumer_tag, pika.BasicProperties(headers={
"X-INSTANA-T": "0000000000000001",
"X-INSTANA-S": "0000000000000002",
"X-INSTANA-L": "1"
}), "Hello!")
t.join(timeout=5.0)
spans = self.recorder.queued_spans()
self.assertEqual(1, len(spans))
rabbitmq_span = spans[0]
self.assertIsNone(tracer.active_span)
# Trace context propagation
self.assertEqual("0000000000000001", rabbitmq_span.t)
self.assertEqual("0000000000000002", rabbitmq_span.p)
# A new span has been started
self.assertIsNotNone(rabbitmq_span.s)
self.assertNotEqual(rabbitmq_span.p, rabbitmq_span.s)
| 34.005291
| 95
| 0.642991
|
4a06d43eead8313b5334d2c7d9a287b29ece4515
| 11,618
|
py
|
Python
|
tensorflow/contrib/data/python/ops/readers.py
|
dantkz/tensorflow
|
5333bbeb3142af2a06f1ebd971061fc4e28da743
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/data/python/ops/readers.py
|
dantkz/tensorflow
|
5333bbeb3142af2a06f1ebd971061fc4e28da743
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/data/python/ops/readers.py
|
dantkz/tensorflow
|
5333bbeb3142af2a06f1ebd971061fc4e28da743
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrappers for reader Datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.data.python.ops import dataset_ops as contrib_dataset_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import readers
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import gfile
from tensorflow.python.util import deprecation
class TextLineDataset(contrib_dataset_ops.Dataset):
"""A `Dataset` comprising lines from one or more text files."""
@deprecation.deprecated(None, "Use `tf.data.TextLineDataset`.")
def __init__(self, filenames, compression_type=None, buffer_size=None):
"""Creates a `TextLineDataset`.
Args:
filenames: A `tf.string` tensor containing one or more filenames.
compression_type: (Optional.) A `tf.string` scalar evaluating to one of
`""` (no compression), `"ZLIB"`, or `"GZIP"`.
buffer_size: (Optional.) A `tf.int64` scalar denoting the number of bytes
to buffer. A value of 0 results in the default buffering values chosen
based on the compression type.
"""
dataset = readers.TextLineDataset(filenames, compression_type,
buffer_size)
super(TextLineDataset, self).__init__(dataset)
class TFRecordDataset(contrib_dataset_ops.Dataset):
"""A `Dataset` comprising records from one or more TFRecord files."""
@deprecation.deprecated(None, "Use `tf.data.TFRecordDataset`.")
def __init__(self, filenames, compression_type=None, buffer_size=None):
"""Creates a `TFRecordDataset`.
Args:
filenames: A `tf.string` tensor containing one or more filenames.
compression_type: (Optional.) A `tf.string` scalar evaluating to one of
`""` (no compression), `"ZLIB"`, or `"GZIP"`.
buffer_size: (Optional.) A `tf.int64` scalar representing the number of
bytes in the read buffer. 0 means no buffering.
"""
dataset = readers.TFRecordDataset(filenames, compression_type,
buffer_size)
super(TFRecordDataset, self).__init__(dataset)
class FixedLengthRecordDataset(contrib_dataset_ops.Dataset):
"""A `Dataset` of fixed-length records from one or more binary files."""
@deprecation.deprecated(None, "Use `tf.data.FixedLengthRecordDataset`.")
def __init__(self,
filenames,
record_bytes,
header_bytes=None,
footer_bytes=None,
buffer_size=None):
"""Creates a `FixedLengthRecordDataset`.
Args:
filenames: A `tf.string` tensor containing one or more filenames.
record_bytes: A `tf.int64` scalar representing the number of bytes in
each record.
header_bytes: (Optional.) A `tf.int64` scalar representing the number of
bytes to skip at the start of a file.
footer_bytes: (Optional.) A `tf.int64` scalar representing the number of
bytes to ignore at the end of a file.
buffer_size: (Optional.) A `tf.int64` scalar representing the number of
bytes to buffer when reading.
"""
dataset = readers.FixedLengthRecordDataset(
filenames, record_bytes, header_bytes, footer_bytes, buffer_size)
super(FixedLengthRecordDataset, self).__init__(dataset)
def read_batch_features(file_pattern,
batch_size,
features,
reader,
reader_args=None,
randomize_input=True,
num_epochs=None,
capacity=10000):
"""Reads batches of Examples.
Example:
```
serialized_examples = [
features {
feature { key: "age" value { int64_list { value: [ 0 ] } } }
feature { key: "gender" value { bytes_list { value: [ "f" ] } } }
feature { key: "kws" value { bytes_list { value: [ "code", "art" ] } } }
},
features {
feature { key: "age" value { int64_list { value: [] } } }
feature { key: "gender" value { bytes_list { value: [ "f" ] } } }
feature { key: "kws" value { bytes_list { value: [ "sports" ] } } }
}
]
```
We can use arguments:
```
features: {
"age": FixedLenFeature([], dtype=tf.int64, default_value=-1),
"gender": FixedLenFeature([], dtype=tf.string),
"kws": VarLenFeature(dtype=tf.string),
}
```
And the expected output is:
```python
{
"age": [[0], [-1]],
"gender": [["f"], ["f"]],
"kws": SparseTensor(
indices=[[0, 0], [0, 1], [1, 0]],
values=["code", "art", "sports"]
dense_shape=[2, 2]),
}
```
Args:
file_pattern: List of files or patterns of file paths containing
`Example` records. See `tf.gfile.Glob` for pattern rules.
batch_size: An int representing the number of consecutive elements of this
dataset to combine in a single batch.
features: A `dict` mapping feature keys to `FixedLenFeature` or
`VarLenFeature` values. See `tf.parse_example`.
reader: A function or class that can be called with a `filenames` tensor
and (optional) `reader_args` and returns a `Dataset` of serialized
Examples.
reader_args: Additional arguments to pass to the reader class.
randomize_input: Whether the input should be randomized.
num_epochs: Integer specifying the number of times to read through the
dataset. If None, cycles through the dataset forever.
capacity: Capacity of the ShuffleDataset. A large capacity ensures better
shuffling but would increase memory usage and startup time.
Returns:
A dict from keys in features to `Tensor` or `SparseTensor` objects.
"""
filenames = _get_file_names(file_pattern, randomize_input)
if reader_args:
dataset = reader(filenames, *reader_args)
else:
dataset = reader(filenames)
if dataset.output_types == (dtypes.string, dtypes.string):
dataset = dataset.map(lambda unused_k, v: v)
elif dataset.output_types != dtypes.string:
raise TypeError("`reader` must be a dataset of `tf.string` values, "
"or `(tf.string, tf.string)` key-value pairs.")
if num_epochs != 1:
dataset = dataset.repeat(num_epochs)
if randomize_input:
dataset = dataset.shuffle(capacity)
dataset = dataset.batch(batch_size)
dataset = dataset.map(lambda x: _parse_example(x, features))
iterator = dataset.make_one_shot_iterator()
outputs = iterator.get_next()
index = 0
result = {}
for key in sorted(features.keys()):
feature = features[key]
if isinstance(feature, parsing_ops.FixedLenFeature):
result[key] = outputs[index]
index += 1
else:
result[key] = sparse_tensor_lib.SparseTensor(
indices=outputs[index],
values=outputs[index + 1],
dense_shape=outputs[index + 2])
index += 3
return result
def _get_file_names(file_pattern, randomize_input):
"""Parse list of file names from pattern, optionally shuffled.
Args:
file_pattern: File glob pattern, or list of glob patterns.
randomize_input: Whether to shuffle the order of file names.
Returns:
List of file names matching `file_pattern`.
Raises:
ValueError: If `file_pattern` is empty, or pattern matches no files.
"""
if isinstance(file_pattern, list):
if not file_pattern:
raise ValueError("File pattern is empty.")
file_names = []
for entry in file_pattern:
file_names.extend(gfile.Glob(entry))
else:
file_names = list(gfile.Glob(file_pattern))
if not file_names:
raise ValueError("No files match %s." % file_pattern)
# Sort files so it will be deterministic for unit tests.
if not randomize_input:
file_names = sorted(file_names)
return file_names
def _parse_example(serialized, features):
parsed = parsing_ops.parse_example(serialized, features)
result = []
for key in sorted(features.keys()):
val = parsed[key]
if isinstance(val, sparse_tensor_lib.SparseTensor):
result.extend([val.indices, val.values, val.dense_shape])
else:
result.append(val)
return tuple(result)
class SqlDataset(contrib_dataset_ops.Dataset):
def __init__(self, driver_name, data_source_name, query, output_types):
dataset = _SqlDataset(driver_name, data_source_name, query, output_types)
super(SqlDataset, self).__init__(dataset)
class _SqlDataset(dataset_ops.Dataset):
"""A `Dataset` consisting of the results from a SQL query."""
def __init__(self, driver_name, data_source_name, query, output_types):
"""Creates a `SqlDataset`.
`SqlDataset` allows a user to read data from the result set of a SQL query.
For example:
```python
dataset = tf.contrib.data.SqlDataset("sqlite", "/foo/bar.sqlite3",
"SELECT name, age FROM people",
(tf.string, tf.int32))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
# Prints the rows of the result set of the above query.
while True:
try:
print(sess.run(next_element))
except tf.errors.OutOfRangeError:
break
```
Args:
driver_name: A 0-D `tf.string` tensor containing the database type.
Currently, the only supported value is 'sqlite'.
data_source_name: A 0-D `tf.string` tensor containing a connection string
to connect to the database.
query: A 0-D `tf.string` tensor containing the SQL query to execute.
output_types: A tuple of `tf.DType` objects representing the types of the
columns returned by `query`.
"""
super(_SqlDataset, self).__init__()
self._driver_name = ops.convert_to_tensor(
driver_name, dtype=dtypes.string, name="driver_name")
self._data_source_name = ops.convert_to_tensor(
data_source_name, dtype=dtypes.string, name="data_source_name")
self._query = ops.convert_to_tensor(
query, dtype=dtypes.string, name="query")
self._output_types = output_types
def _as_variant_tensor(self):
return gen_dataset_ops.sql_dataset(self._driver_name,
self._data_source_name, self._query,
nest.flatten(self.output_types),
nest.flatten(self.output_shapes))
@property
def output_shapes(self):
return nest.map_structure(lambda _: tensor_shape.TensorShape([]),
self._output_types)
@property
def output_types(self):
return self._output_types
| 37.477419
| 81
| 0.669909
|
4a06d4aa0e577ae6e113d2b4003bd5f44efb648a
| 388
|
py
|
Python
|
docs/hh_guide/hh_guide.py
|
notsue/HaHa
|
c7969bb3b54121f117f96ffacb96661f7d11eecc
|
[
"MIT"
] | null | null | null |
docs/hh_guide/hh_guide.py
|
notsue/HaHa
|
c7969bb3b54121f117f96ffacb96661f7d11eecc
|
[
"MIT"
] | null | null | null |
docs/hh_guide/hh_guide.py
|
notsue/HaHa
|
c7969bb3b54121f117f96ffacb96661f7d11eecc
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('../../')
import hh
config = {'sourcefile':'hh_guide.hh', 'outputfile': None, 'toc':2, 'h1':0, 'numbers': 1, 'includes': 3 , 'x3d': 1,
'css':'default', 'katex':1, 'highlight': 1, 'highlight-css': 'github.min.css', 'dir':'ltr', 'lang':'en', 'class': 'hh', 'view': 0}
doc = hh.Document(config)
print(doc.dublin_core['dc.title'])
print('DONE')
| 32.333333
| 140
| 0.582474
|
4a06d54179b4c5cc94564d42c84bb597a12961f7
| 301,861
|
py
|
Python
|
meerk40t/svgelements.py
|
apbarratt/meerk40t
|
7049ab7693d482bd96ff3a298a9fc164836a750d
|
[
"MIT"
] | 118
|
2019-09-09T14:15:28.000Z
|
2022-03-17T18:54:27.000Z
|
meerk40t/svgelements.py
|
apbarratt/meerk40t
|
7049ab7693d482bd96ff3a298a9fc164836a750d
|
[
"MIT"
] | 656
|
2019-07-21T03:34:12.000Z
|
2022-03-30T18:10:31.000Z
|
meerk40t/svgelements.py
|
apbarratt/meerk40t
|
7049ab7693d482bd96ff3a298a9fc164836a750d
|
[
"MIT"
] | 48
|
2019-12-22T23:05:27.000Z
|
2022-03-17T18:54:25.000Z
|
# -*- coding: ISO-8859-1 -*-
import re
try:
from collections.abc import MutableSequence # noqa
except ImportError:
from collections import MutableSequence # noqa
from copy import copy
from math import (
acos,
atan,
atan2,
ceil,
cos,
degrees,
hypot,
log,
radians,
sin,
sqrt,
tan,
)
from xml.etree.ElementTree import iterparse
try:
from math import tau
except ImportError:
from math import pi
tau = pi * 2
"""
The path elements are derived from regebro's svg.path project ( https://github.com/regebro/svg.path ) with
some of the math from mathandy's svgpathtools project ( https://github.com/mathandy/svgpathtools ).
The goal is to provide svg like path objects and structures. The svg standard 1.1 and elements of 2.0 will
be used to provide much of the decisions within path objects. Such that if there is a question on
implementation if the SVG documentation has a methodology it should be used.
Though not required the Image class acquires new functionality if provided with PIL/Pillow as an import
and the Arc can do exact arc calculations if scipy is installed.
"""
SVGELEMENTS_VERSION = "1.6.4"
MIN_DEPTH = 5
ERROR = 1e-12
max_depth = 0
# SVG STATIC VALUES
DEFAULT_PPI = 96.0
SVG_NAME_TAG = "svg"
SVG_ATTR_VERSION = "version"
SVG_VALUE_VERSION = "1.1"
SVG_ATTR_XMLNS = "xmlns"
SVG_VALUE_XMLNS = "http://www.w3.org/2000/svg"
SVG_ATTR_XMLNS_LINK = "xmlns:xlink"
SVG_VALUE_XLINK = "http://www.w3.org/1999/xlink"
SVG_ATTR_XMLNS_EV = "xmlns:ev"
SVG_VALUE_XMLNS_EV = "http://www.w3.org/2001/xml-events"
XLINK_HREF = "{http://www.w3.org/1999/xlink}href"
SVG_HREF = "href"
SVG_ATTR_WIDTH = "width"
SVG_ATTR_HEIGHT = "height"
SVG_ATTR_VIEWBOX = "viewBox"
SVG_VIEWBOX_TRANSFORM = "viewbox_transform"
SVG_TAG_PATH = "path"
SVG_TAG_GROUP = "g"
SVG_TAG_RECT = "rect"
SVG_TAG_CIRCLE = "circle"
SVG_TAG_ELLIPSE = "ellipse"
SVG_TAG_LINE = "line"
SVG_TAG_POLYLINE = "polyline"
SVG_TAG_POLYGON = "polygon"
SVG_TAG_TEXT = "text"
SVG_TAG_TSPAN = "tspan"
SVG_TAG_IMAGE = "image"
SVG_TAG_DESC = "desc"
SVG_TAG_TITLE = "title"
SVG_TAG_METADATA = "metadata"
SVG_TAG_STYLE = "style"
SVG_TAG_DEFS = "defs"
SVG_TAG_USE = "use"
SVG_TAG_CLIPPATH = "clipPath"
SVG_TAG_PATTERN = "pattern"
SVG_STRUCT_ATTRIB = "attributes"
SVG_ATTR_ID = "id"
SVG_ATTR_DATA = "d"
SVG_ATTR_DISPLAY = "display"
SVG_ATTR_COLOR = "color"
SVG_ATTR_FILL = "fill"
SVG_ATTR_FILL_OPACITY = "fill-opacity"
SVG_ATTR_STROKE = "stroke"
SVG_ATTR_STROKE_OPACITY = "stroke-opacity"
SVG_ATTR_STROKE_WIDTH = "stroke-width"
SVG_ATTR_TRANSFORM = "transform"
SVG_ATTR_STYLE = "style"
SVG_ATTR_CLASS = "class"
SVG_ATTR_CLIP_PATH = "clip-path"
SVG_ATTR_CLIP_RULE = "clip-rule"
SVG_ATTR_CLIP_UNIT_TYPE = "clipPathUnits"
SVG_ATTR_CENTER_X = "cx"
SVG_ATTR_CENTER_Y = "cy"
SVG_ATTR_RADIUS_X = "rx"
SVG_ATTR_RADIUS_Y = "ry"
SVG_ATTR_RADIUS = "r"
SVG_ATTR_POINTS = "points"
SVG_ATTR_PRESERVEASPECTRATIO = "preserveAspectRatio"
SVG_ATTR_X = "x"
SVG_ATTR_Y = "y"
SVG_ATTR_X0 = "x0"
SVG_ATTR_Y0 = "y0"
SVG_ATTR_X1 = "x1"
SVG_ATTR_Y1 = "y1"
SVG_ATTR_X2 = "x2"
SVG_ATTR_Y2 = "y2"
SVG_ATTR_DX = "dx"
SVG_ATTR_DY = "dy"
SVG_ATTR_TAG = "tag"
SVG_ATTR_FONT = "font"
SVG_ATTR_FONT_FAMILY = "font-family" # Serif, sans-serif, cursive, fantasy, monospace
SVG_ATTR_FONT_FACE = "font-face"
SVG_ATTR_FONT_SIZE = "font-size"
SVG_ATTR_FONT_WEIGHT = "font-weight" # normal, bold, bolder, lighter, 100-900
SVG_ATTR_TEXT_ANCHOR = "text-anchor"
SVG_ATTR_PATTERN_CONTENT_UNITS = "patternContentUnits"
SVG_ATTR_PATTERN_TRANSFORM = "patternTransform"
SVG_ATTR_PATTERN_UNITS = "patternUnits"
SVG_ATTR_VECTOR_EFFECT = "vector-effect"
SVG_UNIT_TYPE_USERSPACEONUSE = "userSpaceOnUse"
SVG_UNIT_TYPE_OBJECTBOUNDINGBOX = "objectBoundingBox"
SVG_RULE_NONZERO = "nonzero"
SVG_RULE_EVENODD = "evenodd"
SVG_TRANSFORM_MATRIX = "matrix"
SVG_TRANSFORM_TRANSLATE = "translate"
SVG_TRANSFORM_SCALE = "scale"
SVG_TRANSFORM_ROTATE = "rotate"
SVG_TRANSFORM_SKEW_X = "skewx"
SVG_TRANSFORM_SKEW_Y = "skewy"
SVG_TRANSFORM_SKEW = "skew"
SVG_TRANSFORM_TRANSLATE_X = "translatex"
SVG_TRANSFORM_TRANSLATE_Y = "translatey"
SVG_TRANSFORM_SCALE_X = "scalex"
SVG_TRANSFORM_SCALE_Y = "scaley"
SVG_VALUE_NONE = "none"
SVG_VALUE_CURRENT_COLOR = "currentColor"
SVG_VALUE_NON_SCALING_STROKE = "non-scaling-stroke"
PATTERN_WS = r"[\s\t\n]*"
PATTERN_COMMA = r"(?:\s*,\s*|\s+|(?=-))"
PATTERN_COMMAWSP = r"[ ,\t\n\x09\x0A\x0C\x0D]+"
PATTERN_FLOAT = r"[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?"
PATTERN_LENGTH_UNITS = "cm|mm|Q|in|pt|pc|px|em|cx|ch|rem|vw|vh|vmin|vmax"
PATTERN_ANGLE_UNITS = "deg|grad|rad|turn"
PATTERN_TIME_UNITS = "s|ms"
PATTERN_FREQUENCY_UNITS = "Hz|kHz"
PATTERN_RESOLUTION_UNITS = "dpi|dpcm|dppx"
PATTERN_PERCENT = "%"
PATTERN_TRANSFORM = (
SVG_TRANSFORM_MATRIX
+ "|"
+ SVG_TRANSFORM_TRANSLATE
+ "|"
+ SVG_TRANSFORM_TRANSLATE_X
+ "|"
+ SVG_TRANSFORM_TRANSLATE_Y
+ "|"
+ SVG_TRANSFORM_SCALE
+ "|"
+ SVG_TRANSFORM_SCALE_X
+ "|"
+ SVG_TRANSFORM_SCALE_Y
+ "|"
+ SVG_TRANSFORM_ROTATE
+ "|"
+ SVG_TRANSFORM_SKEW
+ "|"
+ SVG_TRANSFORM_SKEW_X
+ "|"
+ SVG_TRANSFORM_SKEW_Y
)
PATTERN_TRANSFORM_UNITS = (
PATTERN_LENGTH_UNITS + "|" + PATTERN_ANGLE_UNITS + "|" + PATTERN_PERCENT
)
REGEX_IRI = re.compile(r"url\(#?(.*)\)")
REGEX_DATA_URL = re.compile(r"^data:([^,]*),(.*)")
REGEX_FLOAT = re.compile(PATTERN_FLOAT)
REGEX_COORD_PAIR = re.compile(
"(%s)%s(%s)" % (PATTERN_FLOAT, PATTERN_COMMA, PATTERN_FLOAT)
)
REGEX_TRANSFORM_TEMPLATE = re.compile(
r"(?u)(%s)%s\(([^)]+)\)" % (PATTERN_TRANSFORM, PATTERN_WS)
)
REGEX_TRANSFORM_PARAMETER = re.compile(
"(%s)%s(%s)?" % (PATTERN_FLOAT, PATTERN_WS, PATTERN_TRANSFORM_UNITS)
)
REGEX_COLOR_HEX = re.compile(r"^#?([0-9A-Fa-f]{3,8})$")
REGEX_COLOR_RGB = re.compile(
r"rgba?\(\s*(%s)\s*,\s*(%s)\s*,\s*(%s)\s*(?:,\s*(%s)\s*)?\)"
% (PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT)
)
REGEX_COLOR_RGB_PERCENT = re.compile(
r"rgba?\(\s*(%s)%%\s*,\s*(%s)%%\s*,\s*(%s)%%\s*(?:,\s*(%s)\s*)?\)"
% (PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT)
)
REGEX_COLOR_HSL = re.compile(
r"hsla?\(\s*(%s)\s*,\s*(%s)%%\s*,\s*(%s)%%\s*(?:,\s*(%s)\s*)?\)"
% (PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT, PATTERN_FLOAT)
)
REGEX_LENGTH = re.compile(r"(%s)([A-Za-z%%]*)" % PATTERN_FLOAT)
REGEX_CSS_STYLE = re.compile(r"([^{]+)\s*\{\s*([^}]+)\s*\}")
REGEX_CSS_FONT = re.compile(
r"(?:(normal|italic|oblique)\s|(normal|small-caps)\s|(normal|bold|bolder|lighter|\d{3})\s|(normal|ultra-condensed|extra-condensed|condensed|semi-condensed|semi-expanded|expanded|extra-expanded|ultra-expanded)\s)*\s*(xx-small|x-small|small|medium|large|x-large|xx-large|larger|smaller|\d+(?:em|pt|pc|px|%))(?:/(xx-small|x-small|small|medium|large|x-large|xx-large|larger|smaller|\d+(?:em|pt|pc|px|%)))?\s*(.*),?\s+(serif|sans-serif|cursive|fantasy|monospace);?"
)
svg_parse = [("COMMAND", r"[MmZzLlHhVvCcSsQqTtAa]"), ("SKIP", PATTERN_COMMAWSP)]
svg_re = re.compile("|".join("(?P<%s>%s)" % pair for pair in svg_parse))
num_parse = [("FLOAT", PATTERN_FLOAT), ("CLOSE", r"[Zz]"), ("SKIP", PATTERN_COMMAWSP)]
num_re = re.compile("|".join("(?P<%s>%s)" % pair for pair in num_parse))
flag_parse = [("FLAG", r"[01]"), ("SKIP", PATTERN_COMMAWSP)]
flag_re = re.compile("|".join("(?P<%s>%s)" % pair for pair in flag_parse))
class SVGLexicalParser:
def __init__(self):
self.parser = None
self.pathd = None
self.pos = 0
self.limit = 0
self.inline_close = None
def _command(self):
while self.pos < self.limit:
match = svg_re.match(self.pathd, self.pos)
if match is None:
return None # Did not match at command sequence.
self.pos = match.end()
kind = match.lastgroup
if kind == "SKIP":
continue
return match.group()
return None
def _more(self):
while self.pos < self.limit:
match = num_re.match(self.pathd, self.pos)
if match is None:
return False
kind = match.lastgroup
if kind == "CLOSE":
self.inline_close = match.group()
return False
if kind == "SKIP":
# move skipped elements forward.
self.pos = match.end()
continue
return True
return None
def _number(self):
while self.pos < self.limit:
match = num_re.match(self.pathd, self.pos)
if match is None:
break # No more matches.
kind = match.lastgroup
if kind == "CLOSE":
# Inline Close
self.inline_close = match.group()
return None
self.pos = match.end()
if kind == "SKIP":
continue
return float(match.group())
return None
def _flag(self):
while self.pos < self.limit:
match = flag_re.match(self.pathd, self.pos)
if match is None:
break # No more matches.
self.pos = match.end()
kind = match.lastgroup
if kind == "SKIP":
continue
return bool(int(match.group()))
return None
def _coord(self):
x = self._number()
if x is None:
return None
y = self._number()
if y is None:
raise ValueError
return x, y
def _rcoord(self):
position = self._coord()
if position is None:
return None
current_pos = self.parser.current_point
if current_pos is None:
return position
return position[0] + current_pos.x, position[1] + current_pos.y
def parse(self, parser, pathd):
self.parser = parser
self.parser.start()
self.pathd = pathd
self.pos = 0
self.limit = len(pathd)
while True:
cmd = self._command()
if cmd is None:
return
elif cmd == "z" or cmd == "Z":
if self._more():
raise ValueError
self.parser.closed(relative=cmd.islower())
self.inline_close = None
continue
elif cmd == "m":
if not self._more():
raise ValueError
coord = self._rcoord()
self.parser.move(coord, relative=True)
while self._more():
coord = self._rcoord()
self.parser.line(coord, relative=True)
elif cmd == "M":
if not self._more():
raise ValueError
coord = self._coord()
self.parser.move(coord, relative=False)
while self._more():
coord = self._coord()
self.parser.line(coord, relative=False)
elif cmd == "l":
while True:
coord = self._rcoord()
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.line(coord, relative=True)
if not self._more():
break
elif cmd == "L":
while True:
coord = self._coord()
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.line(coord, relative=False)
if not self._more():
break
elif cmd == "t":
while True:
coord = self._rcoord()
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.smooth_quad(coord, relative=True)
if not self._more():
break
elif cmd == "T":
while True:
coord = self._coord()
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.smooth_quad(coord, relative=False)
if not self._more():
break
elif cmd == "h":
while True:
value = self._number()
self.parser.horizontal(value, relative=True)
if not self._more():
break
elif cmd == "H":
while True:
value = self._number()
self.parser.horizontal(value, relative=False)
if not self._more():
break
elif cmd == "v":
while True:
value = self._number()
self.parser.vertical(value, relative=True)
if not self._more():
break
elif cmd == "V":
while self._more():
value = self._number()
self.parser.vertical(value, relative=False)
elif cmd == "c":
while True:
coord1, coord2, coord3 = (
self._rcoord(),
self._rcoord(),
self._rcoord(),
)
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
if coord3 is None:
coord3 = self.inline_close
if coord3 is None:
raise ValueError
self.parser.cubic(coord1, coord2, coord3, relative=True)
if not self._more():
break
elif cmd == "C":
while True:
coord1, coord2, coord3 = self._coord(), self._coord(), self._coord()
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
if coord3 is None:
coord3 = self.inline_close
if coord3 is None:
raise ValueError
self.parser.cubic(coord1, coord2, coord3, relative=False)
if not self._more():
break
elif cmd == "q":
while True:
coord1, coord2 = self._rcoord(), self._rcoord()
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
self.parser.quad(coord1, coord2, relative=True)
if not self._more():
break
elif cmd == "Q":
while True:
coord1, coord2 = self._coord(), self._coord()
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
self.parser.quad(coord1, coord2, relative=False)
if not self._more():
break
elif cmd == "s":
while True:
coord1, coord2 = self._rcoord(), self._rcoord()
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
self.parser.smooth_cubic(coord1, coord2, relative=True)
if not self._more():
break
elif cmd == "S":
while True:
coord1, coord2 = self._coord(), self._coord()
if coord1 is None:
coord1 = self.inline_close
if coord1 is None:
raise ValueError
if coord2 is None:
coord2 = self.inline_close
if coord2 is None:
raise ValueError
self.parser.smooth_cubic(coord1, coord2, relative=False)
if not self._more():
break
elif cmd == "a":
while self._more():
rx, ry, rotation, arc, sweep, coord = (
self._number(),
self._number(),
self._number(),
self._flag(),
self._flag(),
self._rcoord(),
)
if sweep is None:
raise ValueError
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.arc(rx, ry, rotation, arc, sweep, coord, relative=True)
elif cmd == "A":
while self._more():
rx, ry, rotation, arc, sweep, coord = (
self._number(),
self._number(),
self._number(),
self._flag(),
self._flag(),
self._coord(),
)
if coord is None:
coord = self.inline_close
if coord is None:
raise ValueError
self.parser.arc(rx, ry, rotation, arc, sweep, coord, relative=False)
self.parser.end()
class Length(object):
"""
SVGLength as used in SVG
Length class is lazy when solving values. Several conversion values are unknown by default and length simply
stores that ambiguity. So we can have a length of 50% and without calling .value(relative_length=3000) it will
simply store as 50%. Likewise you can have absolute values like 30cm or 20in which are not knowable in pixels
unless a PPI value is supplied. We can say .value(relative_length=30cm, PPI=96) and solve this for a value like
12%. We can also convert values between knowable lengths. So 30cm is 300mm regardless whether we know how to
convert this to pixels. 0% is 0 in any units or relative values. We can convert pixels to pc and pt without issue.
We can convert vh, vw, vmax, vmin values if we know viewbox values. We can convert em values if we know the font_size.
We can add values together if they are convertible units e.g. Length("20in") + Length("3cm").
If .value() cannot solve for the value with the given information then it will return a Length value. If it can
be solved it will return a float.
"""
def __init__(self, *args, **kwargs):
if len(args) == 1:
value = args[0]
if value is None:
self.amount = None
self.units = None
return
s = str(value)
for m in REGEX_LENGTH.findall(s):
self.amount = float(m[0])
self.units = m[1]
return
elif len(args) == 2:
self.amount = args[0]
self.units = args[1]
return
self.amount = 0.0
self.units = ""
def __float__(self):
if self.amount is None:
return None
if self.units == "pt":
return self.amount * 1.3333
elif self.units == "pc":
return self.amount * 16.0
return self.amount
def __imul__(self, other):
if isinstance(other, (int, float)):
self.amount *= other
return self
if self.amount == 0.0:
return 0.0
if isinstance(other, str):
other = Length(other)
if isinstance(other, Length):
if other.amount == 0.0:
self.amount = 0.0
return self
if self.units == other.units:
self.amount *= other.amount
return self
if self.units == "%":
self.units = other.units
self.amount = self.amount * other.amount / 100.0
return self
elif other.units == "%":
self.amount = self.amount * other.amount / 100.0
return self
raise ValueError
def __iadd__(self, other):
if not isinstance(other, Length):
other = Length(other)
if self.units == other.units:
self.amount += other.amount
return self
if self.amount == 0:
self.amount = other.amount
self.units = other.units
return self
if other.amount == 0:
return self
if self.units == "px" or self.units == "":
if other.units == "px" or other.units == "":
self.amount += other.amount
elif other.units == "pt":
self.amount += other.amount * 1.3333
elif other.units == "pc":
self.amount += other.amount * 16.0
else:
raise ValueError
return self
if self.units == "pt":
if other.units == "px" or other.units == "":
self.amount += other.amount / 1.3333
elif other.units == "pc":
self.amount += other.amount * 12.0
else:
raise ValueError
return self
elif self.units == "pc":
if other.units == "px" or other.units == "":
self.amount += other.amount / 16.0
elif other.units == "pt":
self.amount += other.amount / 12.0
else:
raise ValueError
return self
elif self.units == "cm":
if other.units == "mm":
self.amount += other.amount / 10.0
elif other.units == "in":
self.amount += other.amount / 0.393701
else:
raise ValueError
return self
elif self.units == "mm":
if other.units == "cm":
self.amount += other.amount * 10.0
elif other.units == "in":
self.amount += other.amount / 0.0393701
else:
raise ValueError
return self
elif self.units == "in":
if other.units == "cm":
self.amount += other.amount * 0.393701
elif other.units == "mm":
self.amount += other.amount * 0.0393701
else:
raise ValueError
return self
raise ValueError("%s units were not determined." % self.units)
def __abs__(self):
c = self.__copy__()
c.amount = abs(c.amount)
return c
def __truediv__(self, other):
if isinstance(other, (int, float)):
c = self.__copy__()
c.amount /= other
return c
if self.amount == 0.0:
return 0.0
if isinstance(other, str):
other = Length(other)
if isinstance(other, Length):
if self.units == other.units:
q = self.amount / other.amount
return q # no units
if self.units == "px" or self.units == "":
if other.units == "px" or other.units == "":
return self.amount / other.amount
elif other.units == "pt":
return self.amount / (other.amount * 1.3333)
elif other.units == "pc":
return self.amount / (other.amount * 16.0)
else:
raise ValueError
if self.units == "pt":
if other.units == "px" or other.units == "":
return self.amount / (other.amount / 1.3333)
elif other.units == "pc":
return self.amount / (other.amount * 12.0)
else:
raise ValueError
if self.units == "pc":
if other.units == "px" or other.units == "":
return self.amount / (other.amount / 16.0)
elif other.units == "pt":
return self.amount / (other.amount / 12.0)
else:
raise ValueError
if self.units == "cm":
if other.units == "mm":
return self.amount / (other.amount / 10.0)
elif other.units == "in":
return self.amount / (other.amount / 0.393701)
else:
raise ValueError
if self.units == "mm":
if other.units == "cm":
return self.amount / (other.amount * 10.0)
elif other.units == "in":
return self.amount / (other.amount / 0.0393701)
else:
raise ValueError
if self.units == "in":
if other.units == "cm":
return self.amount / (other.amount * 0.393701)
elif other.units == "mm":
return self.amount / (other.amount * 0.0393701)
else:
raise ValueError
raise ValueError
__floordiv__ = __truediv__
__div__ = __truediv__
def __lt__(self, other):
return (self - other).amount < 0.0
def __le__(self, other):
return (self - other).amount <= 0.0
def __gt__(self, other):
return (self - other).amount > 0.0
def __ge__(self, other):
return (self - other).amount >= 0.0
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, other):
if isinstance(other, (str, float, int)):
other = Length(other)
c = self.__copy__()
c += other
return c
__radd__ = __add__
def __mul__(self, other):
c = copy(self)
c *= other
return c
def __rdiv__(self, other):
c = copy(self)
c *= 1.0 / other.amount
return c
def __neg__(self):
s = self.__copy__()
s.amount = -s.amount
return s
def __isub__(self, other):
if isinstance(other, (str, float, int)):
other = Length(other)
self += -other
return self
def __sub__(self, other):
s = self.__copy__()
s -= other
return s
def __rsub__(self, other):
if isinstance(other, (str, float, int)):
other = Length(other)
return (-self) + other
def __copy__(self):
return Length(self.amount, self.units)
__rmul__ = __mul__
def __repr__(self):
return "Length('%s')" % (str(self))
def __str__(self):
if self.amount is None:
return SVG_VALUE_NONE
return "%s%s" % (Length.str(self.amount), self.units)
def __eq__(self, other):
if other is None:
return False
s = self.in_pixels()
if isinstance(other, (float, int)):
if s is not None:
return abs(s - other) <= ERROR
else:
return other == 0 and self.amount == 0
if isinstance(other, str):
other = Length(other)
if self.amount == other.amount and self.units == other.units:
return True
if s is not None:
o = self.in_pixels()
if abs(s - o) <= ERROR:
return True
s = self.in_inches()
if s is not None:
o = self.in_inches()
if abs(s - o) <= ERROR:
return True
return False
@property
def value_in_units(self):
return self.amount
def in_pixels(self):
if self.units == "px" or self.units == "":
return self.amount
if self.units == "pt":
return self.amount / 1.3333
if self.units == "pc":
return self.amount / 16.0
return None
def in_inches(self):
if self.units == "mm":
return self.amount * 0.0393701
if self.units == "cm":
return self.amount * 0.393701
if self.units == "in":
return self.amount
return None
def to_mm(
self,
ppi=DEFAULT_PPI,
relative_length=None,
font_size=None,
font_height=None,
viewbox=None,
):
value = self.value(
ppi=ppi,
relative_length=relative_length,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
v = value / (ppi * 0.0393701)
return Length("%smm" % (Length.str(v)))
def to_cm(
self,
ppi=DEFAULT_PPI,
relative_length=None,
font_size=None,
font_height=None,
viewbox=None,
):
value = self.value(
ppi=ppi,
relative_length=relative_length,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
v = value / (ppi * 0.393701)
return Length("%scm" % (Length.str(v)))
def to_inch(
self,
ppi=DEFAULT_PPI,
relative_length=None,
font_size=None,
font_height=None,
viewbox=None,
):
value = self.value(
ppi=ppi,
relative_length=relative_length,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
v = value / ppi
return Length("%sin" % (Length.str(v)))
def value(
self,
ppi=None,
relative_length=None,
font_size=None,
font_height=None,
viewbox=None,
**kwargs,
):
if self.amount is None:
return None
if self.units == "%":
if relative_length is None:
return self
fraction = self.amount / 100.0
if isinstance(relative_length, (float, int)):
return fraction * relative_length
elif isinstance(relative_length, (str, Length)):
length = relative_length * self
if isinstance(length, Length):
return length.value(
ppi=ppi,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
return length
return self
if self.units == "mm":
if ppi is None:
return self
return self.amount * ppi * 0.0393701
if self.units == "cm":
if ppi is None:
return self
return self.amount * ppi * 0.393701
if self.units == "in":
if ppi is None:
return self
return self.amount * ppi
if self.units == "px" or self.units == "":
return self.amount
if self.units == "pt":
return self.amount * 1.3333
if self.units == "pc":
return self.amount * 16.0
if self.units == "em":
if font_size is None:
return self
return self.amount * float(font_size)
if self.units == "ex":
if font_height is None:
return self
return self.amount * float(font_height)
if self.units == "vw":
if viewbox is None:
return self
v = Viewbox(viewbox)
return self.amount * v.width / 100.0
if self.units == "vh":
if viewbox is None:
return self
v = Viewbox(viewbox)
return self.amount * v.height / 100.0
if self.units == "vmin":
if viewbox is None:
return self
v = Viewbox(viewbox)
m = min(v.height, v.height)
return self.amount * m / 100.0
if self.units == "vmax":
if viewbox is None:
return self
v = Viewbox(viewbox)
m = max(v.height, v.height)
return self.amount * m / 100.0
try:
return float(self)
except ValueError:
return self
@staticmethod
def str(s):
if s is None:
return "n/a"
if isinstance(s, Length):
if s.units == "":
s = s.amount
else:
a = "%.12f" % s.amount
if "." in a:
a = a.rstrip("0").rstrip(".")
return "'%s%s'" % (a, s.units)
try:
s = "%.12f" % s
except TypeError:
return str(s)
if "." in s:
s = s.rstrip("0").rstrip(".")
return s
class Color(object):
"""
SVG Color Parsing
Parses different forms of defining colors.
Including keyword: https://www.w3.org/TR/SVG11/types.html#ColorKeywords
"""
def __init__(self, *args, **kwargs):
self.value = 0
arglen = len(args)
if arglen == 1:
v = args[0]
if isinstance(v, Color):
self.value = v.value
elif isinstance(v, int):
self.rgb = v
else:
self.value = Color.parse(v)
elif arglen == 2:
v = args[0]
if isinstance(v, Color):
self.value = v.value
elif isinstance(v, int):
self.rgb = v
else:
self.value = Color.parse(v)
self.opacity = float(args[1])
elif arglen == 3:
r = args[0]
g = args[1]
b = args[2]
self.value = Color.rgb_to_int(r, g, b)
elif arglen == 4:
r = args[0]
g = args[1]
b = args[2]
opacity = args[3] / 255.0
self.value = Color.rgb_to_int(r, g, b, opacity)
if "red" in kwargs:
self.red = kwargs["red"]
if "green" in kwargs:
self.green = kwargs["green"]
if "blue" in kwargs:
self.blue = kwargs["blue"]
if "alpha" in kwargs:
self.alpha = kwargs["alpha"]
if "opacity" in kwargs:
self.opacity = kwargs["opacity"]
if "r" in kwargs:
self.red = kwargs["r"]
if "g" in kwargs:
self.green = kwargs["g"]
if "b" in kwargs:
self.blue = kwargs["b"]
if "rgb" in kwargs:
self.rgb = kwargs["rgb"]
if "argb" in kwargs:
self.argb = kwargs["argb"]
if "rgba" in kwargs:
self.rgba = kwargs["rgba"]
if "h" in kwargs:
self.hue = kwargs["h"]
if "s" in kwargs:
self.saturation = kwargs["s"]
if "l" in kwargs:
self.lightness = kwargs["l"]
if "hue" in kwargs:
self.hue = kwargs["hue"]
if "saturation" in kwargs:
self.saturation = kwargs["saturation"]
if "lightness" in kwargs:
self.lightness = kwargs["lightness"]
def __int__(self):
return self.value
def __str__(self):
if self.value is None:
return str(self.value)
return self.hex
def __repr__(self):
return "Color('%s')" % str(self)
def __eq__(self, other):
if self is other:
return True
first = self.value
second = other
if isinstance(second, str):
second = Color(second)
if isinstance(second, Color):
second = second.value
if first is None:
return second is None
if second is None:
return first is None
return first & 0xFFFFFFFF == second & 0xFFFFFFFF
def __ne__(self, other):
return not self == other
def __abs__(self):
# Return opaque color.
if self.value is None:
return Color(self.value)
return Color(self.red, self.green, self.blue)
@staticmethod
def rgb_to_int(r, g, b, opacity=1.0):
if opacity > 1:
opacity = 1.0
if opacity < 0:
opacity = 0
r = Color.crimp(r)
g = Color.crimp(g)
b = Color.crimp(b)
a = int(round(opacity * 255.0))
a = Color.crimp(a)
r <<= 24
g <<= 16
b <<= 8
c = r | g | b | a
return c
@staticmethod
def hsl_to_int(h, s, l, opacity=1.0):
def hue_2_rgb(v1, v2, vh):
if vh < 0:
vh += 1
if vh > 1:
vh -= 1
if 6.0 * vh < 1.0:
return v1 + (v2 - v1) * 6.0 * vh
if 2.0 * vh < 1:
return v2
if 3 * vh < 2.0:
return v1 + (v2 - v1) * ((2.0 / 3.0) - vh) * 6.0
return v1
if s == 0.0:
r = 255.0 * l
g = 255.0 * l
b = 255.0 * l
else:
if l < 0.5:
v2 = l * (1.0 + s)
else:
v2 = (l + s) - (s * l)
v1 = 2 * l - v2
r = 255.0 * hue_2_rgb(v1, v2, h + (1.0 / 3.0))
g = 255.0 * hue_2_rgb(v1, v2, h)
b = 255.0 * hue_2_rgb(v1, v2, h - (1.0 / 3.0))
value = Color.rgb_to_int(r, g, b, opacity=opacity)
return value
@staticmethod
def parse(color_string):
"""Parse SVG color, will return a set value."""
if color_string is None or color_string == SVG_VALUE_NONE:
return None
match = REGEX_COLOR_HEX.match(color_string)
if match:
return Color.parse_color_hex(color_string)
match = REGEX_COLOR_RGB.match(color_string)
if match:
return Color.parse_color_rgb(match.groups())
match = REGEX_COLOR_RGB_PERCENT.match(color_string)
if match:
return Color.parse_color_rgbp(match.groups())
match = REGEX_COLOR_HSL.match(color_string)
if match:
return Color.parse_color_hsl(match.groups())
return Color.parse_color_lookup(color_string)
@staticmethod
def parse_color_lookup(v):
"""Parse SVG Color by Keyword on dictionary lookup"""
if not isinstance(v, str):
return Color.rgb_to_int(0, 0, 0)
else:
v = v.replace(" ", "").lower()
if v == "transparent":
return Color.rgb_to_int(0, 0, 0, 0.0)
if v == "aliceblue":
return Color.rgb_to_int(250, 248, 255)
if v == "aliceblue":
return Color.rgb_to_int(240, 248, 255)
if v == "antiquewhite":
return Color.rgb_to_int(250, 235, 215)
if v == "aqua":
return Color.rgb_to_int(0, 255, 255)
if v == "aquamarine":
return Color.rgb_to_int(127, 255, 212)
if v == "azure":
return Color.rgb_to_int(240, 255, 255)
if v == "beige":
return Color.rgb_to_int(245, 245, 220)
if v == "bisque":
return Color.rgb_to_int(255, 228, 196)
if v == "black":
return Color.rgb_to_int(0, 0, 0)
if v == "blanchedalmond":
return Color.rgb_to_int(255, 235, 205)
if v == "blue":
return Color.rgb_to_int(0, 0, 255)
if v == "blueviolet":
return Color.rgb_to_int(138, 43, 226)
if v == "brown":
return Color.rgb_to_int(165, 42, 42)
if v == "burlywood":
return Color.rgb_to_int(222, 184, 135)
if v == "cadetblue":
return Color.rgb_to_int(95, 158, 160)
if v == "chartreuse":
return Color.rgb_to_int(127, 255, 0)
if v == "chocolate":
return Color.rgb_to_int(210, 105, 30)
if v == "coral":
return Color.rgb_to_int(255, 127, 80)
if v == "cornflowerblue":
return Color.rgb_to_int(100, 149, 237)
if v == "cornsilk":
return Color.rgb_to_int(255, 248, 220)
if v == "crimson":
return Color.rgb_to_int(220, 20, 60)
if v == "cyan":
return Color.rgb_to_int(0, 255, 255)
if v == "darkblue":
return Color.rgb_to_int(0, 0, 139)
if v == "darkcyan":
return Color.rgb_to_int(0, 139, 139)
if v == "darkgoldenrod":
return Color.rgb_to_int(184, 134, 11)
if v == "darkgray":
return Color.rgb_to_int(169, 169, 169)
if v == "darkgreen":
return Color.rgb_to_int(0, 100, 0)
if v == "darkgrey":
return Color.rgb_to_int(169, 169, 169)
if v == "darkkhaki":
return Color.rgb_to_int(189, 183, 107)
if v == "darkmagenta":
return Color.rgb_to_int(139, 0, 139)
if v == "darkolivegreen":
return Color.rgb_to_int(85, 107, 47)
if v == "darkorange":
return Color.rgb_to_int(255, 140, 0)
if v == "darkorchid":
return Color.rgb_to_int(153, 50, 204)
if v == "darkred":
return Color.rgb_to_int(139, 0, 0)
if v == "darksalmon":
return Color.rgb_to_int(233, 150, 122)
if v == "darkseagreen":
return Color.rgb_to_int(143, 188, 143)
if v == "darkslateblue":
return Color.rgb_to_int(72, 61, 139)
if v == "darkslategray":
return Color.rgb_to_int(47, 79, 79)
if v == "darkslategrey":
return Color.rgb_to_int(47, 79, 79)
if v == "darkturquoise":
return Color.rgb_to_int(0, 206, 209)
if v == "darkviolet":
return Color.rgb_to_int(148, 0, 211)
if v == "deeppink":
return Color.rgb_to_int(255, 20, 147)
if v == "deepskyblue":
return Color.rgb_to_int(0, 191, 255)
if v == "dimgray":
return Color.rgb_to_int(105, 105, 105)
if v == "dimgrey":
return Color.rgb_to_int(105, 105, 105)
if v == "dodgerblue":
return Color.rgb_to_int(30, 144, 255)
if v == "firebrick":
return Color.rgb_to_int(178, 34, 34)
if v == "floralwhite":
return Color.rgb_to_int(255, 250, 240)
if v == "forestgreen":
return Color.rgb_to_int(34, 139, 34)
if v == "fuchsia":
return Color.rgb_to_int(255, 0, 255)
if v == "gainsboro":
return Color.rgb_to_int(220, 220, 220)
if v == "ghostwhite":
return Color.rgb_to_int(248, 248, 255)
if v == "gold":
return Color.rgb_to_int(255, 215, 0)
if v == "goldenrod":
return Color.rgb_to_int(218, 165, 32)
if v == "gray":
return Color.rgb_to_int(128, 128, 128)
if v == "grey":
return Color.rgb_to_int(128, 128, 128)
if v == "green":
return Color.rgb_to_int(0, 128, 0)
if v == "greenyellow":
return Color.rgb_to_int(173, 255, 47)
if v == "honeydew":
return Color.rgb_to_int(240, 255, 240)
if v == "hotpink":
return Color.rgb_to_int(255, 105, 180)
if v == "indianred":
return Color.rgb_to_int(205, 92, 92)
if v == "indigo":
return Color.rgb_to_int(75, 0, 130)
if v == "ivory":
return Color.rgb_to_int(255, 255, 240)
if v == "khaki":
return Color.rgb_to_int(240, 230, 140)
if v == "lavender":
return Color.rgb_to_int(230, 230, 250)
if v == "lavenderblush":
return Color.rgb_to_int(255, 240, 245)
if v == "lawngreen":
return Color.rgb_to_int(124, 252, 0)
if v == "lemonchiffon":
return Color.rgb_to_int(255, 250, 205)
if v == "lightblue":
return Color.rgb_to_int(173, 216, 230)
if v == "lightcoral":
return Color.rgb_to_int(240, 128, 128)
if v == "lightcyan":
return Color.rgb_to_int(224, 255, 255)
if v == "lightgoldenrodyellow":
return Color.rgb_to_int(250, 250, 210)
if v == "lightgray":
return Color.rgb_to_int(211, 211, 211)
if v == "lightgreen":
return Color.rgb_to_int(144, 238, 144)
if v == "lightgrey":
return Color.rgb_to_int(211, 211, 211)
if v == "lightpink":
return Color.rgb_to_int(255, 182, 193)
if v == "lightsalmon":
return Color.rgb_to_int(255, 160, 122)
if v == "lightseagreen":
return Color.rgb_to_int(32, 178, 170)
if v == "lightskyblue":
return Color.rgb_to_int(135, 206, 250)
if v == "lightslategray":
return Color.rgb_to_int(119, 136, 153)
if v == "lightslategrey":
return Color.rgb_to_int(119, 136, 153)
if v == "lightsteelblue":
return Color.rgb_to_int(176, 196, 222)
if v == "lightyellow":
return Color.rgb_to_int(255, 255, 224)
if v == "lime":
return Color.rgb_to_int(0, 255, 0)
if v == "limegreen":
return Color.rgb_to_int(50, 205, 50)
if v == "linen":
return Color.rgb_to_int(250, 240, 230)
if v == "magenta":
return Color.rgb_to_int(255, 0, 255)
if v == "maroon":
return Color.rgb_to_int(128, 0, 0)
if v == "mediumaquamarine":
return Color.rgb_to_int(102, 205, 170)
if v == "mediumblue":
return Color.rgb_to_int(0, 0, 205)
if v == "mediumorchid":
return Color.rgb_to_int(186, 85, 211)
if v == "mediumpurple":
return Color.rgb_to_int(147, 112, 219)
if v == "mediumseagreen":
return Color.rgb_to_int(60, 179, 113)
if v == "mediumslateblue":
return Color.rgb_to_int(123, 104, 238)
if v == "mediumspringgreen":
return Color.rgb_to_int(0, 250, 154)
if v == "mediumturquoise":
return Color.rgb_to_int(72, 209, 204)
if v == "mediumvioletred":
return Color.rgb_to_int(199, 21, 133)
if v == "midnightblue":
return Color.rgb_to_int(25, 25, 112)
if v == "mintcream":
return Color.rgb_to_int(245, 255, 250)
if v == "mistyrose":
return Color.rgb_to_int(255, 228, 225)
if v == "moccasin":
return Color.rgb_to_int(255, 228, 181)
if v == "navajowhite":
return Color.rgb_to_int(255, 222, 173)
if v == "navy":
return Color.rgb_to_int(0, 0, 128)
if v == "oldlace":
return Color.rgb_to_int(253, 245, 230)
if v == "olive":
return Color.rgb_to_int(128, 128, 0)
if v == "olivedrab":
return Color.rgb_to_int(107, 142, 35)
if v == "orange":
return Color.rgb_to_int(255, 165, 0)
if v == "orangered":
return Color.rgb_to_int(255, 69, 0)
if v == "orchid":
return Color.rgb_to_int(218, 112, 214)
if v == "palegoldenrod":
return Color.rgb_to_int(238, 232, 170)
if v == "palegreen":
return Color.rgb_to_int(152, 251, 152)
if v == "paleturquoise":
return Color.rgb_to_int(175, 238, 238)
if v == "palevioletred":
return Color.rgb_to_int(219, 112, 147)
if v == "papayawhip":
return Color.rgb_to_int(255, 239, 213)
if v == "peachpuff":
return Color.rgb_to_int(255, 218, 185)
if v == "peru":
return Color.rgb_to_int(205, 133, 63)
if v == "pink":
return Color.rgb_to_int(255, 192, 203)
if v == "plum":
return Color.rgb_to_int(221, 160, 221)
if v == "powderblue":
return Color.rgb_to_int(176, 224, 230)
if v == "purple":
return Color.rgb_to_int(128, 0, 128)
if v == "red":
return Color.rgb_to_int(255, 0, 0)
if v == "rosybrown":
return Color.rgb_to_int(188, 143, 143)
if v == "royalblue":
return Color.rgb_to_int(65, 105, 225)
if v == "saddlebrown":
return Color.rgb_to_int(139, 69, 19)
if v == "salmon":
return Color.rgb_to_int(250, 128, 114)
if v == "sandybrown":
return Color.rgb_to_int(244, 164, 96)
if v == "seagreen":
return Color.rgb_to_int(46, 139, 87)
if v == "seashell":
return Color.rgb_to_int(255, 245, 238)
if v == "sienna":
return Color.rgb_to_int(160, 82, 45)
if v == "silver":
return Color.rgb_to_int(192, 192, 192)
if v == "skyblue":
return Color.rgb_to_int(135, 206, 235)
if v == "slateblue":
return Color.rgb_to_int(106, 90, 205)
if v == "slategray":
return Color.rgb_to_int(112, 128, 144)
if v == "slategrey":
return Color.rgb_to_int(112, 128, 144)
if v == "snow":
return Color.rgb_to_int(255, 250, 250)
if v == "springgreen":
return Color.rgb_to_int(0, 255, 127)
if v == "steelblue":
return Color.rgb_to_int(70, 130, 180)
if v == "tan":
return Color.rgb_to_int(210, 180, 140)
if v == "teal":
return Color.rgb_to_int(0, 128, 128)
if v == "thistle":
return Color.rgb_to_int(216, 191, 216)
if v == "tomato":
return Color.rgb_to_int(255, 99, 71)
if v == "turquoise":
return Color.rgb_to_int(64, 224, 208)
if v == "violet":
return Color.rgb_to_int(238, 130, 238)
if v == "wheat":
return Color.rgb_to_int(245, 222, 179)
if v == "white":
return Color.rgb_to_int(255, 255, 255)
if v == "whitesmoke":
return Color.rgb_to_int(245, 245, 245)
if v == "yellow":
return Color.rgb_to_int(255, 255, 0)
if v == "yellowgreen":
return Color.rgb_to_int(154, 205, 50)
try:
return int(v)
except ValueError:
return Color.rgb_to_int(0, 0, 0)
@staticmethod
def parse_color_hex(hex_string):
"""Parse SVG Color by Hex String"""
h = hex_string.lstrip("#")
size = len(h)
if size == 8:
return int(h[:8], 16)
elif size == 6:
s = "{0}FF".format(h[:6])
v = int(s, 16)
return v
elif size == 4:
s = h[0] + h[0] + h[1] + h[1] + h[2] + h[2] + h[3] + h[3]
return int(s, 16)
elif size == 3:
s = "{0}{0}{1}{1}{2}{2}FF".format(h[0], h[1], h[2])
v = int(s, 16)
return v
return Color.rgb_to_int(0, 0, 0)
@staticmethod
def parse_color_rgb(values):
"""Parse SVG Color, RGB value declarations"""
r = int(values[0])
g = int(values[1])
b = int(values[2])
if values[3] is not None:
opacity = float(values[3])
else:
opacity = 1
return Color.rgb_to_int(r, g, b, opacity)
@staticmethod
def parse_color_rgbp(values):
"""Parse SVG color, RGB percent value declarations"""
ratio = 255.0 / 100.0
r = round(float(values[0]) * ratio)
g = round(float(values[1]) * ratio)
b = round(float(values[2]) * ratio)
if values[3] is not None:
opacity = float(values[3])
else:
opacity = 1
return Color.rgb_to_int(r, g, b, opacity)
@staticmethod
def parse_color_hsl(values):
"""Parse SVG color, HSL value declarations"""
h = Angle.parse(values[0])
h = h.as_turns
s = float(values[1]) / 100.0
if s > 1:
s = 1.0
if s < 0:
s = 0.0
l = float(values[2]) / 100.0
if l > 1:
l = 1.0
if l < 0:
l = 0.0
if values[3] is not None:
opacity = float(values[3])
else:
opacity = 1
return Color.hsl_to_int(h, s, l, opacity)
@classmethod
def distinct(cls, index):
"""
Produces a deterministic distinct color for the given index.
"""
def _pattern(pattern: int):
n = int(pattern ** (1.0 / 3.0))
pattern -= n * n * n
p = [n] * 3
if pattern == 0:
return p
pattern -= 1
v = int(pattern % 3)
pattern = int(pattern // 3)
if pattern < n:
p[v] = pattern % n
return p
pattern -= n
p[v] = pattern // n
v += 1
p[v % 3] = pattern % n
return p
def _8bit_reverse(r: int):
value = r - 1
v = 0
for i in range(0, 8):
v = v | (value & 1)
v <<= 1
value >>= 1
v >>= 1
return v & 0xFF
p = _pattern(index)
return Color(
_8bit_reverse(p[0]),
_8bit_reverse(p[1]),
_8bit_reverse(p[2]),
)
@property
def rgb(self):
if self.value is None:
return None
return self.value >> 8
@rgb.setter
def rgb(self, rgb):
rgb <<= 8
rgb |= 0xFF
self.value = rgb
@property
def rgba(self):
return self.value
@rgba.setter
def rgba(self, rgba):
self.value = rgba
@property
def argb(self):
if self.value is None:
return None
return ((self.value >> 8) & 0xFFFFFF) | (self.alpha << 24)
@argb.setter
def argb(self, argb):
self.value = ((argb << 8) & 0xFFFFFF00) | (argb >> 24 & 0xFF)
@property
def opacity(self):
return self.alpha / 255.0 if self.value is not None else None
@opacity.setter
def opacity(self, opacity):
if self.value is None:
raise ValueError
a = int(round(opacity * 255.0))
a = Color.crimp(a)
self.alpha = a
@property
def alpha(self):
return self.value & 0xFF if self.value is not None else None
@alpha.setter
def alpha(self, a):
if self.value is None:
raise ValueError
a = Color.crimp(a)
self.value &= ~0xFF
self.value |= a
@property
def red(self):
return (self.value >> 24) & 0xFF if self.value is not None else None
@red.setter
def red(self, r):
if self.value is None:
raise ValueError
r = Color.crimp(r)
self.value &= ~0xFF000000
self.value |= r << 24
@property
def green(self):
return (self.value >> 16) & 0xFF if self.value is not None else None
@green.setter
def green(self, g):
if self.value is None:
raise ValueError
g = Color.crimp(g)
self.value &= ~0xFF0000
self.value |= g << 16
@property
def blue(self):
return (self.value >> 8) & 0xFF if self.value is not None else None
@blue.setter
def blue(self, b):
if self.value is None:
raise ValueError
b = Color.crimp(b)
self.value &= ~0xFF00
self.value |= b << 8
@property
def hexa(self):
if self.value is None:
return None
return "#%02x%02x%02x%02x" % (self.red, self.green, self.blue, self.alpha)
@property
def hexrgb(self):
if self.value is None:
return None
return "#%02x%02x%02x" % (self.red, self.green, self.blue)
@property
def hex(self):
if self.value is None:
return None
if self.alpha == 0xFF:
return self.hexrgb
else:
return self.hexa
@property
def hue(self):
if self.value is None:
return None
r = self.red / 255.0
g = self.green / 255.0
b = self.blue / 255.0
var_min = min(r, g, b)
var_max = max(r, g, b)
delta_max = var_max - var_min
if delta_max == 0:
return 0
dr = (((var_max - r) / 6.0) + delta_max / 2.0) / delta_max
dg = (((var_max - g) / 6.0) + delta_max / 2.0) / delta_max
db = (((var_max - b) / 6.0) + delta_max / 2.0) / delta_max
if r == var_max:
h = db - dg
elif g == var_max:
h = (1.0 / 3.0) + dr - db
else: # db == max_v
h = (2.0 / 3.0) + dg - dr
if h < 0:
h += 1
if h > 1:
h -= 1
return Angle.turns(h).as_degrees
@hue.setter
def hue(self, v):
if self.value is None:
raise ValueError
h, s, l = self.hsl
self.hsl = v, s, l
@property
def saturation(self):
if self.value is None:
return None
r = self.red / 255.0
g = self.green / 255.0
b = self.blue / 255.0
min_v = min(r, g, b)
max_v = max(r, g, b)
delta = max_v - min_v
if max_v == min_v:
return 0.0
if (max_v + min_v) < 1:
return delta / (max_v + min_v)
else:
return delta / (2.0 - max_v - min_v)
@saturation.setter
def saturation(self, v):
if self.value is None:
raise ValueError
h, s, l = self.hsl
self.hsl = h, v, l
@property
def lightness(self):
if self.value is None:
return None
r = self.red / 255.0
g = self.green / 255.0
b = self.blue / 255.0
min_v = min(r, g, b)
max_v = max(r, g, b)
return (max_v + min_v) / 2.0
@lightness.setter
def lightness(self, v):
if self.value is None:
raise ValueError
h, s, l = self.hsl
self.hsl = h, s, v
@property
def intensity(self):
if self.value is None:
return None
r = self.red
g = self.green
b = self.blue
return (r + b + g) / 768.0
@property
def brightness(self):
if self.value is None:
return None
r = self.red
g = self.green
b = self.blue
cmax = max(r, g, b)
return cmax / 255.0
@property
def blackness(self):
if self.value is None:
return None
return 1.0 - self.brightness
@property
def luminance(self):
if self.value is None:
return None
r = self.red / 255.0
g = self.green / 255.0
b = self.blue / 255.0
return r * 0.3 + g * 0.59 + b * 0.11
@property
def luma(self):
if self.value is None:
return None
r = self.red / 255.0
g = self.green / 255.0
b = self.blue / 255.0
return r * 0.2126 + g * 0.7152 + b * 0.0722
@staticmethod
def over(c1, c2):
"""
Porter Duff Alpha compositing operation over.
Returns c1 over c2. This is the standard painter algorithm.
"""
if isinstance(c1, str):
c1 = Color.parse(c1)
elif isinstance(c1, int):
c1 = Color(c1)
if isinstance(c2, str):
c2 = Color.parse(c2)
elif isinstance(c2, int):
c2 = Color(c2)
r1 = c1.red
g1 = c1.green
b1 = c1.blue
a1 = c1.alpha
if a1 == 255:
return c1.value
if a1 == 0:
return c2.value
r2 = c2.red
g2 = c2.green
b2 = c2.blue
a2 = c2.alpha
q = 255.0 - a1
sr = r1 * a1 * 255.0 + r2 * a2 * q
sg = g1 * a1 * 255.0 + g2 * a2 * q
sb = b1 * a1 * 255.0 + b2 * a2 * q
sa = a1 * 255.0 + a2 * q
sr /= sa
sg /= sa
sb /= sa
sa /= 255.0 * 255.0
return Color.rgb_to_int(sr, sg, sb, sa)
@staticmethod
def distance(c1, c2):
return sqrt(Color.distance_sq(c1, c2))
@staticmethod
def distance_sq(c1, c2):
"""
Function returns the square of colordistance. The square of the color distance will always be closer than the
square of another color distance.
Rather than naive Euclidean distance we use Compuphase's Redmean color distance.
https://www.compuphase.com/cmetric.htm
It's computationally simple, and empirical tests finds it to be on par with LabDE2000.
:param c1: first color
:param c2: second color
:return: square of color distance
"""
if isinstance(c1, str):
c1 = Color(c1)
elif isinstance(c1, int):
c1 = Color(c1)
if isinstance(c2, str):
c2 = Color(c2)
elif isinstance(c2, int):
c2 = Color(c2)
red_mean = int((c1.red + c2.red) / 2.0)
r = c1.red - c2.red
g = c1.green - c2.green
b = c1.blue - c2.blue
return (
(((512 + red_mean) * r * r) >> 8)
+ (4 * g * g)
+ (((767 - red_mean) * b * b) >> 8)
)
@staticmethod
def crimp(v):
if v > 255:
return 255
if v < 0:
return 0
return int(v)
@property
def hsl(self):
if self.value is None:
return None
return self.hue, self.saturation, self.lightness
@hsl.setter
def hsl(self, value):
if not isinstance(value, (tuple, list)):
return
h = value[0]
s = value[1]
l = value[2]
self.value = Color.hsl_to_int(h, s, l, 1.0)
def distance_to(self, other):
return Color.distance(self, other)
def blend(self, other, opacity=None):
"""
Blends the given color with the current color.
"""
if opacity is None:
self.value = Color.over(other, self)
else:
color = Color(other)
color.opacity = opacity
self.value = Color.over(color, self)
class Point:
"""Point is a general subscriptable point class with .x and .y as well as [0] and [1]
For compatibility with regebro svg.path we accept complex numbers as points x + yj,
and provide .real and .imag as properties. As well as float and integer values as (v,0) elements.
With regard to SVG 7.15.1 defining SVGPoint this class provides for matrix transformations.
Points are only positions in real Euclidean space. This class is not intended to interact with
the Length class.
"""
def __init__(self, x, y=None):
if x is not None and y is None:
if isinstance(x, str):
string_x, string_y = REGEX_COORD_PAIR.findall(x)[0]
self.x = float(string_x)
self.y = float(string_y)
return
try: # Try .x .y
self.y = x.y
self.x = x.x
return
except AttributeError:
pass
try: # try subscription.
self.y = x[1]
self.x = x[0]
return
except TypeError:
pass
try: # try .imag .real complex values.
self.y = x.imag
self.x = x.real
return
except AttributeError:
# Unknown.
raise TypeError
self.x = x
self.y = y
def __key(self):
return (self.x, self.y)
def __hash__(self):
return hash(self.__key())
def __eq__(self, other):
if other is None:
return False
try:
if not isinstance(other, Point):
other = Point(other)
except Exception:
return NotImplemented
return abs(self.x - other.x) <= ERROR and abs(self.y - other.y) <= ERROR
def __ne__(self, other):
return not self == other
def __len__(self):
return 2
def __getitem__(self, item):
if item == 0:
return self.x
elif item == 1:
return self.y
else:
raise IndexError
def __setitem__(self, key, value):
if key == 0:
self.x = value
elif key == 1:
self.y = value
else:
raise IndexError
def __repr__(self):
x_str = Length.str(self.x)
y_str = Length.str(self.y)
return "Point(%s,%s)" % (x_str, y_str)
def __copy__(self):
return Point(self.x, self.y)
def __str__(self):
try:
x_str = "%.12G" % self.x
except TypeError:
return self.__repr__()
if "." in x_str:
x_str = x_str.rstrip("0").rstrip(".")
y_str = "%.12G" % self.y
if "." in y_str:
y_str = y_str.rstrip("0").rstrip(".")
return "%s,%s" % (x_str, y_str)
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
v = other.point_in_matrix_space(self)
self.x = v.x
self.y = v.y
return self
try:
c = complex(self) * complex(other.x, other.y)
self.x = c.real
self.y = c.imag
return self
except AttributeError:
pass
try:
c = complex(self) * complex(other[0], other[1])
self.x = c.real
self.y = c.imag
return self
except (TypeError, IndexError):
pass
try:
c = complex(self) * complex(other.real, other.imag)
self.x = c.real
self.y = c.imag
return self
except AttributeError:
pass
try:
self.x *= other
self.y *= other
return self
except Exception:
return NotImplemented
def __mul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
return other.point_in_matrix_space(self)
try:
return Point(complex(self) * complex(other.x, other.y))
except AttributeError:
pass
try:
return Point(complex(self) * complex(other[0], other[1]))
except (TypeError, IndexError):
pass
try:
return Point(complex(self) * complex(other.real, other.imag))
except AttributeError:
pass
try:
return Point(self.x * other, self.y * other)
except Exception:
return NotImplemented
__rmul__ = __mul__
def __iadd__(self, other):
try:
self.x += other.x
self.y += other.y
return self
except AttributeError:
pass
try:
self.y += other[1]
self.x += other[0]
return self
except (TypeError, IndexError):
pass
try:
self.x += other.real
self.y += other.imag
return self
except AttributeError:
pass
try:
self.x += other
return self
except Exception:
return NotImplemented
def __add__(self, other):
try:
x = self.x + other.x
y = self.y + other.y
return Point(x, y)
except AttributeError:
pass
try:
y = self.y + other[1]
x = self.x + other[0]
return Point(x, y)
except (TypeError, IndexError):
pass
try:
x = self.x + other.real
y = self.y + other.imag
return Point(x, y)
except AttributeError:
pass
if isinstance(other, (float, int)):
x = self.x + other
return Point(x, self.y)
return NotImplemented
__radd__ = __add__
def __isub__(self, other):
try:
self.x -= other.x
self.y -= other.y
return self
except AttributeError:
pass
try:
self.y -= other[1]
self.x -= other[0]
return self
except (TypeError, IndexError):
pass
try:
self.x -= other.real
self.y -= other.imag
return self
except AttributeError:
pass
try:
self.x -= other
return self
except Exception:
return NotImplemented
def __sub__(self, other):
try:
x = self.x - other.x
y = self.y - other.y
return Point(x, y)
except AttributeError:
pass
try:
y = self.y - other[1]
x = self.x - other[0]
return Point(x, y)
except (TypeError, IndexError):
pass
try:
x = self.x - other.real
y = self.y - other.imag
return Point(x, y)
except AttributeError:
pass
if isinstance(other, (float, int)):
x = self.x - other
return Point(x, self.y)
return NotImplemented
def __rsub__(self, other):
try:
x = other.x - self.x
y = other.y - self.y
return Point(x, y)
except AttributeError:
pass
try:
y = other[1] - self.y
x = other[0] - self.x
return Point(x, y)
except (TypeError, IndexError):
pass
try:
x = other.real - self.x
y = other.imag - self.y
return Point(x, y)
except AttributeError:
pass
if isinstance(other, (float, int)):
x = other - self.x
return Point(x, self.y)
return NotImplemented
def __complex__(self):
return self.x + self.y * 1j
def __abs__(self):
return hypot(self.x, self.y)
def __pow__(self, other):
r_raised = abs(self) ** other
argz_multiplied = self.argz() * other
real_part = round(r_raised * cos(argz_multiplied))
imag_part = round(r_raised * sin(argz_multiplied))
return self.__class__(real_part, imag_part)
def conjugate(self):
return self.__class__(self.real, -self.imag)
def argz(self):
return atan(self.imag / self.real)
@property
def real(self):
"""Emulate svg.path use of complex numbers"""
return self.x
@property
def imag(self):
"""Emulate svg.path use of complex numbers"""
return self.y
def matrix_transform(self, matrix):
self *= matrix
return self
def move_towards(self, p2, amount=1):
if not isinstance(p2, Point):
p2 = Point(p2)
self += amount * (p2 - self)
def distance_to(self, p2):
return abs(self - p2)
def angle_to(self, p2):
p = p2 - self
return Angle.radians(atan2(p.y, p.x))
def polar_to(self, angle, distance):
q = Point.polar(self, angle, distance)
self.x = q.x
self.y = q.y
return self
def reflected_across(self, p):
return p + (p - self)
@staticmethod
def orientation(p, q, r):
"""Determine the clockwise, linear, or counterclockwise orientation of the given points"""
val = (q[1] - p[1]) * (r[0] - q[0]) - (q[0] - p[0]) * (r[1] - q[1])
if val == 0:
return 0
elif val > 0:
return 1
else:
return 2
@staticmethod
def convex_hull(pts):
if len(pts) == 0:
return
points = sorted(set(pts), key=lambda p: p[0])
first_point_on_hull = points[0]
point_on_hull = first_point_on_hull
while True:
yield point_on_hull
endpoint = point_on_hull
for t in points:
if (
point_on_hull is endpoint
or Point.orientation(point_on_hull, t, endpoint) == 2
):
endpoint = t
point_on_hull = endpoint
if first_point_on_hull is point_on_hull:
break
@staticmethod
def distance(p1, p2):
dx = p1[0] - p2[0]
dy = p1[1] - p2[1]
dx *= dx
dy *= dy
return sqrt(dx + dy)
@staticmethod
def polar(p1, angle, r):
dx = cos(angle) * r
dy = sin(angle) * r
return Point(p1[0] + dx, p1[1] + dy)
@staticmethod
def angle(p1, p2):
return Angle.radians(atan2(p2[1] - p1[1], p2[0] - p1[0]))
@staticmethod
def towards(p1, p2, amount):
tx = amount * (p2[0] - p1[0]) + p1[0]
ty = amount * (p2[1] - p1[1]) + p1[1]
return Point(tx, ty)
class Angle(float):
"""CSS Angle defines as used in SVG/CSS"""
def __repr__(self):
return "Angle(%.12f)" % self
def __copy__(self):
return Angle(self)
def __eq__(self, other):
# Python 2
c1 = abs((self % tau) - (other % tau)) <= 1e-11
return c1
def normalized(self):
return Angle(self % tau)
@classmethod
def parse(cls, angle_string):
if not isinstance(angle_string, str):
return
angle_string = angle_string.lower()
if angle_string.endswith("deg"):
return Angle.degrees(float(angle_string[:-3]))
if angle_string.endswith("grad"):
return Angle.gradians(float(angle_string[:-4]))
if angle_string.endswith(
"rad"
): # Must be after 'grad' since 'grad' ends with 'rad' too.
return Angle.radians(float(angle_string[:-3]))
if angle_string.endswith("turn"):
return Angle.turns(float(angle_string[:-4]))
if angle_string.endswith("%"):
return Angle.turns(float(angle_string[:-1]) / 100.0)
return Angle.degrees(float(angle_string))
@classmethod
def radians(cls, radians):
return cls(radians)
@classmethod
def degrees(cls, degrees):
return cls(tau * degrees / 360.0)
@classmethod
def gradians(cls, gradians):
return cls(tau * gradians / 400.0)
@classmethod
def turns(cls, turns):
return cls(tau * turns)
@property
def as_radians(self):
return self
@property
def as_degrees(self):
return self * 360.0 / tau
@property
def as_positive_degrees(self):
v = self.as_degrees
while v < 0:
v += 360.0
return v
@property
def as_gradians(self):
return self * 400.0 / tau
@property
def as_turns(self):
return self / tau
def is_orthogonal(self):
return (self % (tau / 4.0)) == 0
class Matrix:
""" "
Provides svg matrix interfacing.
SVG 7.15.3 defines the matrix form as:
[a c e]
[b d f]
While e and f are defined as floats, they can be for limited periods defined as a Length.
With regard to CSS, it's reasonable to perform operations like 'transform(20cm, 20cm)' and
expect these to be treated consistently. Performing other matrix operations in a consistent
way. However, render must be called to change these parameters into float locations prior to
any operation which might be used to transform a point or polyline or path object.
"""
def __init__(self, *components, **kwargs):
self.a = 1.0
self.b = 0.0
self.c = 0.0
self.d = 1.0
self.e = 0.0
self.f = 0.0
len_args = len(components)
if len_args == 0:
pass
elif len_args == 1:
m = components[0]
if isinstance(m, str):
self.parse(m)
self.render(**kwargs)
else:
self.a = m[0]
self.b = m[1]
self.c = m[2]
self.d = m[3]
self.e = m[4]
self.f = m[5]
else:
self.a = components[0]
self.b = components[1]
self.c = components[2]
self.d = components[3]
self.e = components[4]
self.f = components[5]
self.render(**kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
if other is None:
return False
if isinstance(other, str):
other = Matrix(other)
if not isinstance(other, Matrix):
return False
if abs(self.a - other.a) > 1e-12:
return False
if abs(self.b - other.b) > 1e-12:
return False
if abs(self.c - other.c) > 1e-12:
return False
if abs(self.d - other.d) > 1e-12:
return False
if self.e != other.e and abs(self.e - other.e) > 1e-12:
return False
if self.f != other.f and abs(self.f - other.f) > 1e-12:
return False
return True
def __len__(self):
return 6
def __invert__(self):
m = self.__copy__()
return m.inverse()
def __matmul__(self, other):
m = copy(self)
m.__imatmul__(other)
return m
def __rmatmul__(self, other):
m = copy(other)
m.__imatmul__(self)
return m
def __imatmul__(self, other):
if isinstance(other, str):
other = Matrix(other)
self.a, self.b, self.c, self.d, self.e, self.f = Matrix.matrix_multiply(
self, other
)
return self
__mul__ = __matmul__
__rmul__ = __rmatmul__
__imul__ = __imatmul__
def __getitem__(self, item):
if item == 0:
return float(self.a)
elif item == 1:
return float(self.b)
elif item == 2:
return float(self.c)
elif item == 3:
return float(self.d)
elif item == 4:
return self.e
elif item == 5:
return self.f
def __setitem__(self, key, value):
if key == 0:
self.a = value
elif key == 1:
self.b = value
elif key == 2:
self.c = value
elif key == 3:
self.d = value
elif key == 4:
self.e = value
elif key == 5:
self.f = value
def __repr__(self):
return "Matrix(%s, %s, %s, %s, %s, %s)" % (
Length.str(self.a),
Length.str(self.b),
Length.str(self.c),
Length.str(self.d),
Length.str(self.e),
Length.str(self.f),
)
def __copy__(self):
return Matrix(self.a, self.b, self.c, self.d, self.e, self.f)
def __str__(self):
"""
Many of SVG's graphics operations utilize 2x3:
:returns string representation of matrix.
"""
return "[%3f, %3f,\n %3f, %3f, %s, %s]" % (
self.a,
self.c,
self.b,
self.d,
self.e,
self.f,
)
def parse(self, transform_str):
"""Parses the svg transform string.
Transforms from SVG 1.1 have a smaller complete set of operations. Whereas in SVG 2.0 they gain
the CSS transforms and the additional functions and parsing that go with that. This parse is
compatible with SVG 1.1 and the SVG 2.0 which includes the CSS 2d superset.
CSS transforms have scalex() scaley() translatex(), translatey(), and skew() (deprecated).
2D CSS angles haves units: "deg" tau / 360, "rad" tau/tau, "grad" tau/400, "turn" tau.
2D CSS distances have length/percentages: "px", "cm", "mm", "in", "pt", etc. (+|-)?d+%
In the case of percentages there must be a known height and width to properly create a matrix out of that.
"""
if not transform_str:
return
if not isinstance(transform_str, str):
raise TypeError("Must provide a string to parse")
for sub_element in REGEX_TRANSFORM_TEMPLATE.findall(transform_str.lower()):
name = sub_element[0]
params = tuple(REGEX_TRANSFORM_PARAMETER.findall(sub_element[1]))
params = [mag + units for mag, units in params]
if SVG_TRANSFORM_MATRIX == name:
params = map(float, params)
self.pre_cat(*params)
elif SVG_TRANSFORM_TRANSLATE == name:
try:
x_param = Length(params[0]).value()
except IndexError:
continue
try:
y_param = Length(params[1]).value()
self.pre_translate(x_param, y_param)
except IndexError:
self.pre_translate(x_param)
elif SVG_TRANSFORM_TRANSLATE_X == name:
self.pre_translate(Length(params[0]).value(), 0)
elif SVG_TRANSFORM_TRANSLATE_Y == name:
self.pre_translate(0, Length(params[0]).value())
elif SVG_TRANSFORM_SCALE == name:
params = map(float, params)
self.pre_scale(*params)
elif SVG_TRANSFORM_SCALE_X == name:
self.pre_scale(float(params[0]), 1)
elif SVG_TRANSFORM_SCALE_Y == name:
self.pre_scale(1, float(params[0]))
elif SVG_TRANSFORM_ROTATE == name:
angle = Angle.parse(params[0])
try:
x_param = Length(params[1]).value()
except IndexError:
self.pre_rotate(angle)
continue
try:
y_param = Length(params[2]).value()
self.pre_rotate(angle, x_param, y_param)
except IndexError:
self.pre_rotate(angle, x_param)
elif SVG_TRANSFORM_SKEW == name:
angle_a = Angle.parse(params[0])
try:
angle_b = Angle.parse(params[1])
except IndexError: # this isn't valid.
continue
try:
x_param = Length(params[2]).value()
except IndexError:
self.pre_skew(angle_a, angle_b)
continue
try:
y_param = Length(params[3]).value()
self.pre_skew(angle_a, angle_b, x_param, y_param)
except IndexError:
self.pre_skew(angle_a, angle_b, x_param)
elif SVG_TRANSFORM_SKEW_X == name:
angle_a = Angle.parse(params[0])
try:
x_param = Length(params[1]).value()
except IndexError:
self.pre_skew_x(angle_a)
continue
try:
y_param = Length(params[2]).value()
self.pre_skew_x(angle_a, x_param, y_param)
except IndexError:
self.pre_skew_x(angle_a, x_param)
elif SVG_TRANSFORM_SKEW_Y == name:
angle_b = Angle.parse(params[0])
try:
x_param = Length(params[1]).value()
except IndexError:
self.pre_skew_y(angle_b)
continue
try:
y_param = Length(params[2]).value()
self.pre_skew_y(angle_b, x_param, y_param)
except IndexError:
self.pre_skew_y(angle_b, x_param)
return self
def render(
self,
ppi=None,
relative_length=None,
width=None,
height=None,
font_size=None,
font_height=None,
viewbox=None,
**kwargs,
):
"""
Provides values to turn trans_x and trans_y values into user units floats rather
than Lengths by giving the required information to perform the conversions.
"""
if isinstance(self.e, Length):
if width is None and relative_length is not None:
width = relative_length
self.e = self.e.value(
ppi=ppi,
relative_length=width,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
if isinstance(self.f, Length):
if height is None and relative_length is not None:
height = relative_length
self.f = self.f.value(
ppi=ppi,
relative_length=height,
font_size=font_size,
font_height=font_height,
viewbox=viewbox,
)
return self
@property
def determinant(self):
return self.a * self.d - self.c * self.b
def value_trans_x(self):
return self.e
def value_trans_y(self):
return self.f
def value_scale_x(self):
return float(self.a)
def value_scale_y(self):
return float(self.d)
def value_skew_x(self):
return float(self.b)
def value_skew_y(self):
return float(self.c)
def reset(self):
"""Resets matrix to identity."""
self.a = 1.0
self.b = 0.0
self.c = 0.0
self.d = 1.0
self.e = 0.0
self.f = 0.0
def inverse(self):
"""
SVG Matrix:
[a c e]
[b d f]
"""
m00 = self.a
m01 = self.c
m02 = self.e
m10 = self.b
m11 = self.d
m12 = self.f
determinant = m00 * m11 - m01 * m10
inverse_determinant = 1.0 / determinant
self.a = m11 * inverse_determinant
self.c = -m01 * inverse_determinant
self.b = -m10 * inverse_determinant
self.d = m00 * inverse_determinant
self.e = (m01 * m12 - m02 * m11) * inverse_determinant
self.f = (m10 * m02 - m00 * m12) * inverse_determinant
return self
def vector(self):
"""
provide the matrix suitable for multiplying vectors. This will be the matrix with the same rotation and scale
aspects but with no translation. This matrix is for multiplying vector elements where the position doesn't
matter but the scaling and rotation do.
:return:
"""
return Matrix(self.a, self.b, self.c, self.d, 0.0, 0.0)
def is_identity(self):
return (
self.a == 1
and self.b == 0
and self.c == 0
and self.d == 1
and self.e == 0
and self.f == 0
)
def post_cat(self, *components):
mx = Matrix(*components)
self.__imatmul__(mx)
def post_scale(self, sx=1.0, sy=None, x=0.0, y=0.0):
if sy is None:
sy = sx
if x is None:
x = 0.0
if y is None:
y = 0.0
if x == 0 and y == 0:
self.post_cat(Matrix.scale(sx, sy))
else:
self.post_translate(-x, -y)
self.post_scale(sx, sy)
self.post_translate(x, y)
def post_scale_x(self, sx=1.0, x=0.0, y=0.0):
self.post_scale(sx, 1, x, y)
def post_scale_y(self, sy=1.0, x=0.0, y=0.0):
self.post_scale(1, sy, x, y)
def post_translate(self, tx=0.0, ty=0.0):
self.post_cat(Matrix.translate(tx, ty))
def post_translate_x(self, tx=0.0):
self.post_translate(tx, 0.0)
def post_translate_y(self, ty=0.0):
self.post_translate(0.0, ty)
def post_rotate(self, angle, x=0.0, y=0.0):
if x is None:
x = 0.0
if y is None:
y = 0.0
if x == 0 and y == 0:
self.post_cat(Matrix.rotate(angle)) # self %= self.get_rotate(theta)
else:
matrix = Matrix()
matrix.post_translate(-x, -y)
matrix.post_cat(Matrix.rotate(angle))
matrix.post_translate(x, y)
self.post_cat(matrix)
def post_skew(self, angle_a=0.0, angle_b=0.0, x=0.0, y=0.0):
if x is None:
x = 0
if y is None:
y = 0
if x == 0 and y == 0:
self.post_cat(Matrix.skew(angle_a, angle_b))
else:
self.post_translate(-x, -y)
self.post_skew(angle_a, angle_b)
self.post_translate(x, y)
def post_skew_x(self, angle_a=0.0, x=0.0, y=0.0):
self.post_skew(angle_a, 0.0, x, y)
def post_skew_y(self, angle_b=0.0, x=0.0, y=0.0):
self.post_skew(0.0, angle_b, x, y)
def pre_cat(self, *components):
mx = Matrix(*components)
self.a, self.b, self.c, self.d, self.e, self.f = Matrix.matrix_multiply(
mx, self
)
def pre_scale(self, sx=1.0, sy=None, x=0.0, y=0.0):
if sy is None:
sy = sx
if x is None:
x = 0.0
if y is None:
y = 0.0
if x == 0 and y == 0:
self.pre_cat(Matrix.scale(sx, sy))
else:
self.pre_translate(x, y)
self.pre_scale(sx, sy)
self.pre_translate(-x, -y)
def pre_scale_x(self, sx=1.0, x=0.0, y=0.0):
self.pre_scale(sx, 1, x, y)
def pre_scale_y(self, sy=1.0, x=0.0, y=0.0):
self.pre_scale(1, sy, x, y)
def pre_translate(self, tx=0.0, ty=0.0):
self.pre_cat(Matrix.translate(tx, ty))
def pre_translate_x(self, tx=0.0):
self.pre_translate(tx, 0.0)
def pre_translate_y(self, ty=0.0):
self.pre_translate(0.0, ty)
def pre_rotate(self, angle, x=0.0, y=0.0):
if x is None:
x = 0
if y is None:
y = 0
if x == 0 and y == 0:
self.pre_cat(Matrix.rotate(angle))
else:
self.pre_translate(x, y)
self.pre_rotate(angle)
self.pre_translate(-x, -y)
def pre_skew(self, angle_a=0.0, angle_b=0.0, x=0.0, y=0.0):
if x is None:
x = 0
if y is None:
y = 0
if x == 0 and y == 0:
self.pre_cat(Matrix.skew(angle_a, angle_b))
else:
self.pre_translate(x, y)
self.pre_skew(angle_a, angle_b)
self.pre_translate(-x, -y)
def pre_skew_x(self, angle_a=0.0, x=0.0, y=0.0):
self.pre_skew(angle_a, 0, x, y)
def pre_skew_y(self, angle_b=0.0, x=0.0, y=0.0):
self.pre_skew(0.0, angle_b, x, y)
def point_in_inverse_space(self, v0):
inverse = Matrix(self)
inverse.inverse()
return inverse.point_in_matrix_space(v0)
def point_in_matrix_space(self, v0):
return Point(
v0[0] * self.a + v0[1] * self.c + 1 * self.e,
v0[0] * self.b + v0[1] * self.d + 1 * self.f,
)
def transform_point(self, v):
nx = v[0] * self.a + v[1] * self.c + 1 * self.e
ny = v[0] * self.b + v[1] * self.d + 1 * self.f
v[0] = nx
v[1] = ny
return v
def transform_vector(self, v):
"""
Applies the transformation without the translation.
"""
nx = v[0] * self.a + v[1] * self.c
ny = v[0] * self.b + v[1] * self.d
v[0] = nx
v[1] = ny
return v
@classmethod
def scale(cls, sx=1.0, sy=None):
if sy is None:
sy = sx
return cls(sx, 0, 0, sy, 0, 0)
@classmethod
def scale_x(cls, sx=1.0):
return cls.scale(sx, 1.0)
@classmethod
def scale_y(cls, sy=1.0):
return cls.scale(1.0, sy)
@classmethod
def translate(cls, tx=0.0, ty=0.0):
"""SVG Matrix:
[a c e]
[b d f]
"""
return cls(1.0, 0.0, 0.0, 1.0, tx, ty)
@classmethod
def translate_x(cls, tx=0.0):
return cls.translate(tx, 0)
@classmethod
def translate_y(cls, ty=0.0):
return cls.translate(0.0, ty)
@classmethod
def rotate(cls, angle=0.0):
ct = cos(angle)
st = sin(angle)
return cls(ct, st, -st, ct, 0.0, 0.0)
@classmethod
def skew(cls, angle_a=0.0, angle_b=0.0):
aa = tan(angle_a)
bb = tan(angle_b)
return cls(1.0, bb, aa, 1.0, 0.0, 0.0)
@classmethod
def skew_x(cls, angle=0.0):
return cls.skew(angle, 0.0)
@classmethod
def skew_y(cls, angle=0.0):
return cls.skew(0.0, angle)
@classmethod
def identity(cls):
"""
1, 0, 0,
0, 1, 0,
"""
return cls()
@staticmethod
def matrix_multiply(m, s):
"""
[a c e] [a c e] [a b 0]
[b d f] % [b d f] = [c d 0]
[0 0 1] [0 0 1] [e f 1]
:param m: matrix operand
:param s: matrix operand
:return: multiplied matrix.
"""
r0 = (
s.a * m.a + s.c * m.b + s.e * 0,
s.a * m.c + s.c * m.d + s.e * 0,
s.a * m.e + s.c * m.f + s.e * 1,
)
r1 = (
s.b * m.a + s.d * m.b + s.f * 0,
s.b * m.c + s.d * m.d + s.f * 0,
s.b * m.e + s.d * m.f + s.f * 1,
)
return float(r0[0]), float(r1[0]), float(r0[1]), float(r1[1]), r0[2], r1[2]
class Viewbox:
def __init__(self, *args, **kwargs):
"""
Viewbox controls the scaling between the drawing size view that is observing that drawing.
:param viewbox: either values or viewbox attribute or a Viewbox object
:param preserveAspectRatio or preserve_aspect_ratio: preserveAspectRatio
"""
self.x = None
self.y = None
self.width = None
self.height = None
self.preserve_aspect_ratio = None
if args and len(args) <= 2:
viewbox = args[0]
if isinstance(viewbox, dict):
self.property_by_values(viewbox)
elif isinstance(viewbox, Viewbox):
self.property_by_object(viewbox)
else:
self.set_viewbox(viewbox)
if len(args) == 2:
self.preserve_aspect_ratio = args[1]
elif len(args) == 4:
self.x = float(args[0])
self.y = float(args[1])
self.width = float(args[2])
self.height = float(args[3])
if kwargs:
self.property_by_values(dict(kwargs))
def __eq__(self, other):
if not isinstance(other, Viewbox):
return False
if self.x != other.x:
return False
if self.y != other.y:
return False
if self.width != other.width:
return False
if self.height != other.height:
return False
return self.preserve_aspect_ratio == other.preserve_aspect_ratio
def __str__(self):
return "%s %s %s %s" % (
Length.str(self.x),
Length.str(self.y),
Length.str(self.width),
Length.str(self.height),
)
def __repr__(self):
values = []
if self.x is not None:
values.append("%s=%s" % (SVG_ATTR_X, Length.str(self.x)))
if self.y is not None:
values.append("%s=%s" % (SVG_ATTR_Y, Length.str(self.y)))
if self.width is not None:
values.append("%s=%s" % (SVG_ATTR_WIDTH, Length.str(self.width)))
if self.height is not None:
values.append("%s=%s" % (SVG_ATTR_HEIGHT, Length.str(self.height)))
if self.preserve_aspect_ratio is not None:
values.append(
"%s='%s'" % (SVG_ATTR_PRESERVEASPECTRATIO, self.preserve_aspect_ratio)
)
params = ", ".join(values)
return "Viewbox(%s)" % params
def property_by_object(self, obj):
self.x = obj.x
self.y = obj.y
self.width = obj.width
self.height = obj.height
self.preserve_aspect_ratio = obj.preserve_aspect_ratio
def property_by_values(self, values):
if SVG_ATTR_VIEWBOX in values:
self.set_viewbox(values[SVG_ATTR_VIEWBOX])
if SVG_ATTR_X in values:
self.x = values[SVG_ATTR_X]
if SVG_ATTR_Y in values:
self.y = values[SVG_ATTR_Y]
if SVG_ATTR_WIDTH in values:
self.width = values[SVG_ATTR_WIDTH]
if SVG_ATTR_HEIGHT in values:
self.height = values[SVG_ATTR_HEIGHT]
if "preserve_aspect_ratio" in values:
self.preserve_aspect_ratio = values["preserve_aspect_ratio"]
if SVG_ATTR_PRESERVEASPECTRATIO in values:
self.preserve_aspect_ratio = values[SVG_ATTR_PRESERVEASPECTRATIO]
def set_viewbox(self, viewbox):
if viewbox is not None:
dims = list(REGEX_FLOAT.findall(viewbox))
try:
self.x = float(dims[0])
self.y = float(dims[1])
self.width = float(dims[2])
self.height = float(dims[3])
except IndexError:
pass
def transform(self, element):
return Viewbox.viewbox_transform(
element.x,
element.y,
element.width,
element.height,
self.x,
self.y,
self.width,
self.height,
self.preserve_aspect_ratio,
)
@staticmethod
def viewbox_transform(
e_x, e_y, e_width, e_height, vb_x, vb_y, vb_width, vb_height, aspect
):
"""
SVG 1.1 7.2, SVG 2.0 8.2 equivalent transform of an SVG viewport.
With regards to https://github.com/w3c/svgwg/issues/215 use 8.2 version.
It creates transform commands equal to that viewport expected.
Let e-x, e-y, e-width, e-height be the position and size of the element respectively.
Let vb-x, vb-y, vb-width, vb-height be the min-x, min-y, width and height values of the viewBox attribute
respectively.
Let align be the align value of preserveAspectRatio, or 'xMidYMid' if preserveAspectRatio is not defined.
Let meetOrSlice be the meetOrSlice value of preserveAspectRatio, or 'meet' if preserveAspectRatio is not defined
or if meetOrSlice is missing from this value.
:param e_x: element_x value
:param e_y: element_y value
:param e_width: element_width value
:param e_height: element_height value
:param vb_x: viewbox_x value
:param vb_y: viewbox_y value
:param vb_width: viewbox_width value
:param vb_height: viewbox_height value
:param aspect: preserve aspect ratio value
:return: string of the SVG transform commands to account for the viewbox.
"""
if (
e_x is None
or e_y is None
or e_width is None
or e_height is None
or vb_x is None
or vb_y is None
or vb_width is None
or vb_height is None
):
return ""
if aspect is not None:
aspect_slice = aspect.split(" ")
try:
align = aspect_slice[0]
except IndexError:
align = "xMidyMid"
try:
meet_or_slice = aspect_slice[1]
except IndexError:
meet_or_slice = "meet"
else:
align = "xMidyMid"
meet_or_slice = "meet"
# Initialize scale-x to e-width/vb-width.
scale_x = e_width / vb_width
# Initialize scale-y to e-height/vb-height.
scale_y = e_height / vb_height
# If align is not 'none' and meetOrSlice is 'meet', set the larger of scale-x and scale-y to the smaller.
if align != SVG_VALUE_NONE and meet_or_slice == "meet":
scale_x = scale_y = min(scale_x, scale_y)
# Otherwise, if align is not 'none' and meetOrSlice is 'slice', set the smaller of scale-x and scale-y to the larger
elif align != SVG_VALUE_NONE and meet_or_slice == "slice":
scale_x = scale_y = max(scale_x, scale_y)
# Initialize translate-x to e-x - (vb-x * scale-x).
translate_x = e_x - (vb_x * scale_x)
# Initialize translate-y to e-y - (vb-y * scale-y)
translate_y = e_y - (vb_y * scale_y)
# If align contains 'xMid', add (e-width - vb-width * scale-x) / 2 to translate-x.
align = align.lower()
if "xmid" in align:
translate_x += (e_width - vb_width * scale_x) / 2.0
# If align contains 'xMax', add (e-width - vb-width * scale-x) to translate-x.
if "xmax" in align:
translate_x += e_width - vb_width * scale_x
# If align contains 'yMid', add (e-height - vb-height * scale-y) / 2 to translate-y.
if "ymid" in align:
translate_y += (e_height - vb_height * scale_y) / 2.0
# If align contains 'yMax', add (e-height - vb-height * scale-y) to translate-y.
if "ymax" in align:
translate_y += e_height - vb_height * scale_y
# The transform applied to content contained by the element is given by:
# translate(translate-x, translate-y) scale(scale-x, scale-y)
if isinstance(scale_x, Length) or isinstance(scale_y, Length):
raise ValueError
if translate_x == 0 and translate_y == 0:
if scale_x == 1 and scale_y == 1:
return "" # Nothing happens.
else:
return "scale(%s, %s)" % (Length.str(scale_x), Length.str(scale_y))
else:
if scale_x == 1 and scale_y == 1:
return "translate(%s, %s)" % (
Length.str(translate_x),
Length.str(translate_y),
)
else:
return "translate(%s, %s) scale(%s, %s)" % (
Length.str(translate_x),
Length.str(translate_y),
Length.str(scale_x),
Length.str(scale_y),
)
class SVGElement(object):
"""
Any element within the SVG namespace.
if args[0] is a dict or SVGElement class the value is used to seed the values.
Else, the values consist of the kwargs used. The priority is such that kwargs
will overwrite any previously set value.
If additional args exist these will be passed to property_by_args
"""
def __init__(self, *args, **kwargs):
self.id = None
self.values = None
if len(args) >= 1:
s = args[0]
if isinstance(s, dict):
args = args[1:]
self.values = dict(s)
self.values.update(kwargs)
elif isinstance(s, SVGElement):
args = args[1:]
self.property_by_object(s)
self.property_by_args(*args)
return
if self.values is None:
self.values = dict(kwargs)
self.property_by_values(self.values)
if len(args) != 0:
self.property_by_args(*args)
def property_by_args(self, *args):
pass
def property_by_object(self, obj):
self.id = obj.id
self.values = dict(obj.values)
def property_by_values(self, values):
self.id = values.get(SVG_ATTR_ID)
def render(self, **kwargs):
"""
Render changes any length/percent values or attributes into real usable limits if
given the information required to change such parameters.
:param kwargs: various other properties to be rendered with.
:return:
"""
pass
def set(self, key, value):
self.values[key] = value
return self
class Transformable:
"""Any element that is transformable and has a transform property."""
def __init__(self, *args, **kwargs):
self._length = None
self._lengths = None
self.transform = None
self.apply = None
def property_by_object(self, s):
self.transform = Matrix(s.transform)
self.apply = s.apply
def property_by_values(self, values):
self.transform = Matrix(values.get(SVG_ATTR_TRANSFORM, ""))
self.apply = bool(values.get("apply", True))
def __mul__(self, other):
if isinstance(other, (Matrix, str)):
n = copy(self)
n *= other
return n
return NotImplemented
__rmul__ = __mul__
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
self.transform *= other
return self
def __abs__(self):
"""
The absolute value is taken to be the actual shape transformed.
:return: transformed version of the given shape.
"""
m = copy(self)
m.reify()
return m
def reify(self):
"""
Realizes the transform to the attributes. Such that the attributes become actualized and the transform
simplifies towards the identity matrix. In many cases it will become the identity matrix. In other cases the
transformed shape cannot be represented through the properties alone. And shall keep those parts of the
transform required preserve equivalency.
The default method will be called by submethods but will only scale properties like stroke_width which should
scale with the transform.
"""
self._lengths = None
self._length = None
def render(self, **kwargs):
"""
Renders the transformable by performing any required length conversion operations into pixels. The element
will be the pixel-length form.
"""
if self.transform is not None:
self.transform.render(**kwargs)
return self
def bbox(self, transformed=True, with_stroke=False):
"""
Returns the bounding box of the given object.
:param transformed: whether this is the transformed bounds or default.
:param with_stroke: should the stroke-width be included in the bounds.
:return: bounding box of the given element
"""
raise NotImplementedError
@property
def rotation(self):
if not self.apply:
return Angle.degrees(0)
prx = Point(1, 0)
prx *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.angle_to(prx)
class GraphicObject:
"""Any drawn element."""
def __init__(self, *args, **kwargs):
self.stroke = None
self.fill = None
self.stroke_width = None
def property_by_object(self, s):
self.fill = Color(s.fill) if s.fill is not None else None
self.stroke = Color(s.stroke) if s.stroke is not None else None
self.stroke_width = (
Length(s.stroke_width).value() if s.stroke_width is not None else None
)
def property_by_values(self, values):
stroke = values.get(SVG_ATTR_STROKE)
self.stroke = Color(stroke) if stroke is not None else None
stroke_opacity = values.get("stroke_opacity")
stroke_opacity = values.get(SVG_ATTR_STROKE_OPACITY, stroke_opacity)
if (
stroke_opacity is not None
and self.stroke is not None
and self.stroke.value is not None
):
try:
self.stroke.opacity = float(stroke_opacity)
except ValueError:
pass
fill = values.get(SVG_ATTR_FILL)
self.fill = Color(fill) if fill is not None else None
fill_opacity = values.get("fill_opacity")
fill_opacity = values.get(SVG_ATTR_FILL_OPACITY, fill_opacity)
if (
fill_opacity is not None
and self.fill is not None
and self.fill.value is not None
):
try:
self.fill.opacity = float(fill_opacity)
except ValueError:
pass
self.stroke_width = Length(values.get("stroke_width", 1.0)).value()
self.stroke_width = Length(
values.get(SVG_ATTR_STROKE_WIDTH, self.stroke_width)
).value()
def render(self, **kwargs):
if isinstance(self.stroke_width, Length):
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
self.stroke_width = self.stroke_width.value(
relative_length=sqrt(width * width + height * height), **kwargs
)
# A percentage stroke_width is always computed as a percentage of the normalized viewBox diagonal length.
def reify(self):
"""
Realizes the transform to the attributes. Such that the attributes become actualized and the transform
simplifies towards the identity matrix. In many cases it will become the identity matrix. In other cases the
transformed shape cannot be represented through the properties alone. And shall keep those parts of the
transform required preserve equivalency.
"""
self.stroke_width = self.implicit_stroke_width
return self
@property
def implicit_stroke_width(self):
try:
if not self.apply:
return self.stroke_width
if self.stroke_width is not None:
if (
hasattr(self, "values")
and SVG_ATTR_VECTOR_EFFECT in self.values
and SVG_VALUE_NON_SCALING_STROKE
in self.values[SVG_ATTR_VECTOR_EFFECT]
):
return self.stroke_width # we are not to scale the stroke.
width = self.stroke_width
det = self.transform.determinant
return width * sqrt(abs(det))
except AttributeError:
return self.stroke_width
def is_degenerate(self):
return False
class Shape(SVGElement, GraphicObject, Transformable):
"""
SVG Shapes are several SVG items defined in SVG 1.1 9.1
https://www.w3.org/TR/SVG11/shapes.html
These shapes are circle, ellipse, line, polyline, polygon, and path.
All shapes have methods:
d(relative, transform): provides path_d string for the shape.
reify(): Applies transform of the shape to modify the shape attributes.
render(): Ensure that the shape properties have real space values.
bbox(transformed): Provides the bounding box for the given shape.
All shapes must implement:
__repr__(), with a call to _repr_shape()
__copy__()
All shapes have attributes:
id: SVG ID attributes. (SVGElement)
transform: SVG Matrix to apply to this shape. (Transformable)
apply: Determine whether transform should be applied. (Transformable)
fill: SVG color of the shape fill. (GraphicObject)
stroke: SVG color of the shape stroke. (GraphicObject)
stroke_width: Stroke width of the stroke. (GraphicObject)
"""
def __init__(self, *args, **kwargs):
self._strict = True
Transformable.__init__(self, *args, **kwargs)
GraphicObject.__init__(self, *args, **kwargs)
SVGElement.__init__(
self, *args, **kwargs
) # Must go last, triggers, by_object, by_value, by_arg functions.
def property_by_object(self, s):
SVGElement.property_by_object(self, s)
Transformable.property_by_object(self, s)
GraphicObject.property_by_object(self, s)
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
Transformable.property_by_values(self, values)
GraphicObject.property_by_values(self, values)
def render(self, **kwargs):
SVGElement.render(self, **kwargs)
Transformable.render(self, **kwargs)
GraphicObject.render(self, **kwargs)
def __eq__(self, other):
if not isinstance(other, Shape):
return NotImplemented
if self.fill != other.fill or self.stroke != other.stroke:
return False
first = self
if not isinstance(first, Path):
first = Path(first)
second = other
if not isinstance(second, Path):
second = Path(second)
return first == second
def __ne__(self, other):
if not isinstance(other, Shape):
return NotImplemented
return not self == other
def __iadd__(self, other):
if isinstance(other, Shape):
return Path(self) + Path(other)
return NotImplemented
__add__ = __iadd__
def __matmul__(self, other):
m = copy(self)
m.__imatmul__(other)
return m
def __rmatmul__(self, other):
m = copy(other)
m.__imatmul__(self)
return m
def __imatmul__(self, other):
"""
The % operation with a matrix works much like multiplication except that it automatically reifies the shape.
"""
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
self.transform *= other
self.reify()
return self
def _calc_lengths(self, error=ERROR, min_depth=MIN_DEPTH, segments=None):
"""
Calculate the length values for the segments of the Shape.
:param error: error permitted for length calculations.
:param min_depth: minimum depth for the length calculation.
:param segments: optional segments to use.
:return:
"""
if segments is None:
segments = self.segments(False)
if self._length is not None:
return
lengths = [each.length(error=error, min_depth=min_depth) for each in segments]
self._length = sum(lengths)
if self._length == 0:
self._lengths = lengths
else:
self._lengths = [each / self._length for each in lengths]
def npoint(self, positions, error=ERROR):
"""
Find a points between 0 and 1 within the shape. Numpy acceleration allows points to be an array of floats.
"""
try:
import numpy as np
except ImportError:
return [self.point(pos) for pos in positions]
segments = self.segments(False)
if len(segments) == 0:
return None
# Shortcuts
if self._length is None:
self._calc_lengths(error=error, segments=segments)
xy = np.empty((len(positions), 2), dtype=float)
if self._length == 0:
i = int(round(positions * (len(segments) - 1)))
point = segments[i].point(0.0)
xy[:] = point
return xy
# Find which segment the point we search for is located on:
segment_start = 0
for index, segment in enumerate(segments):
segment_end = segment_start + self._lengths[index]
position_subset = (segment_start <= positions) & (positions < segment_end)
v0 = positions[position_subset]
if not len(v0):
continue # Nothing matched.
d = segment_end - segment_start
if d == 0: # This segment is 0 length.
segment_pos = 0.0
else:
segment_pos = (v0 - segment_start) / d
c = segment.npoint(segment_pos)
xy[position_subset] = c[:]
segment_start = segment_end
# the loop above will miss position == 1
xy[positions == 1] = np.array(list(segments[-1].end))
return xy
def point(self, position, error=ERROR):
"""
Find a point between 0 and 1 within the Shape, going through the shape with regard to position.
:param position: value between 0 and 1 within the shape.
:param error: Length error permitted.
:return: Point at the given location.
"""
segments = self.segments(False)
if len(segments) == 0:
return None
# Shortcuts
try:
if position <= 0.0:
return segments[0].point(position)
if position >= 1.0:
return segments[-1].point(position)
except ValueError:
return self.npoint([position], error=error)[0]
if self._length is None:
self._calc_lengths(error=error, segments=segments)
if self._length == 0:
i = int(round(position * (len(segments) - 1)))
return segments[i].point(0.0)
# Find which segment the point we search for is located on:
segment_start = 0
segment_pos = 0
segment = segments[0]
for index, segment in enumerate(segments):
segment_end = segment_start + self._lengths[index]
if segment_end >= position:
# This is the segment! How far in on the segment is the point?
segment_pos = (position - segment_start) / (segment_end - segment_start)
break
segment_start = segment_end
return segment.point(segment_pos)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
self._calc_lengths(error, min_depth)
return self._length
def segments(self, transformed=True):
"""
Returns PathSegments which correctly produce this shape.
This should be implemented by subclasses.
"""
raise NotImplementedError
def d(self, relative=False, transformed=True):
"""
Returns the path_d string of the shape.
:param relative: Returns path_d in relative form.
:param transformed: Return path_d, with applied transform.
:return: path_d string
"""
return Path(self.segments(transformed=transformed)).d(relative=relative)
def bbox(self, transformed=True, with_stroke=False):
"""
Get the bounding box for the given shape.
:param transformed: whether this is the transformed bounds or default.
:param with_stroke: should the stroke-width be included in the bounds.
:return: bounding box of the given element
"""
bbs = [
seg.bbox()
for seg in self.segments(transformed=transformed)
if not isinstance(Close, Move)
]
try:
xmins, ymins, xmaxs, ymaxs = list(zip(*bbs))
except ValueError:
return None # No bounding box items existed. So no bounding box.
if with_stroke and self.stroke_width is not None:
if transformed:
delta = float(self.implicit_stroke_width) / 2.0
else:
delta = float(self.stroke_width) / 2.0
else:
delta = 0.0
return (
min(xmins) - delta,
min(ymins) - delta,
max(xmaxs) + delta,
max(ymaxs) + delta,
)
def _init_shape(self, *args):
"""
Generic SVG parsing of args. In those cases where the shape accepts finite elements we can process the last
four elements of the shape with this code. This will happen in simpleline, roundshape, and rect. It will not
happen in polyshape or paths since these can accept infinite arguments.
"""
arg_length = len(args)
if arg_length >= 1:
if args[0] is not None:
self.transform = Matrix(args[0])
if arg_length >= 2:
if args[1] is not None:
self.stroke = Color(args[1])
if arg_length >= 3:
if args[2] is not None:
self.fill = Color(args[2])
if arg_length >= 4:
if args[3] is not None:
self.apply = bool(args[3])
def _repr_shape(self, values):
"""
Generic pieces of repr shape.
"""
# Cannot use SVG_ATTR_* for some attributes in repr because they contain hyphens
if self.stroke is not None and self.stroke.rgb is not None:
values.append("%s='%s'" % (SVG_ATTR_STROKE, self.stroke.hexrgb))
if self.stroke.opacity != 1.0:
values.append("%s=%s" % ("stroke_opacity", str(self.stroke.opacity)))
if self.fill is not None and self.fill.rgb is not None:
values.append("%s='%s'" % (SVG_ATTR_FILL, self.fill.hexrgb))
if self.fill.opacity != 1.0:
values.append("%s=%s" % ("fill_opacity", str(self.fill.opacity)))
if self.stroke_width is not None and self.stroke_width != 1.0:
values.append("stroke_width=%s" % str(self.stroke_width))
if not self.transform.is_identity():
values.append("%s=%s" % (SVG_ATTR_TRANSFORM, repr(self.transform)))
if self.apply is not None and not self.apply:
values.append("apply=%s" % self.apply)
if self.id is not None:
values.append("%s='%s'" % (SVG_ATTR_ID, self.id))
def _str_shape(self, values):
"""
Generic pieces of str shape.
"""
if self.stroke is not None and self.stroke.rgb is not None:
values.append("%s='%s'" % (SVG_ATTR_STROKE, self.stroke.hexrgb))
if self.stroke.opacity != 1.0:
values.append(
"%s=%s" % (SVG_ATTR_STROKE_OPACITY, str(self.stroke.opacity))
)
if self.fill is not None and self.fill.rgb is not None:
values.append("%s='%s'" % (SVG_ATTR_FILL, self.fill.hexrgb))
if self.fill.opacity != 1.0:
values.append("%s=%s" % (SVG_ATTR_FILL_OPACITY, str(self.fill.opacity)))
if self.stroke_width is not None and self.stroke_width != 1.0:
values.append("%s=%s" % (SVG_ATTR_STROKE_WIDTH, str(self.stroke_width)))
if not self.transform.is_identity():
values.append("%s=%s" % (SVG_ATTR_TRANSFORM, repr(self.transform)))
if self.apply is not None and not self.apply:
values.append("apply=%s" % self.apply)
if self.id is not None:
values.append("%s='%s'" % (SVG_ATTR_ID, self.id))
def _name(self):
return self.__class__.__name__
class PathSegment:
"""
Path Segments are the base class for all the segment within a Path.
These are defined in SVG 1.1 8.3 and SVG 2.0 9.3
https://www.w3.org/TR/SVG11/paths.html#PathData
https://www.w3.org/TR/SVG2/paths.html#PathElement
These segments define a 1:1 relationship with the path_d or path data attribute, denoted in
SVG by the 'd' attribute. These are moveto, closepath, lineto, and the curves which are cubic
bezier curves, quadratic bezier curves, and elliptical arc. These are classed as Move, Close,
Line, CubicBezier, QuadraticBezier, and Arc. And in path_d are denoted as M, Z, L, C, Q, A.
There are lowercase versions of these commands. And for C, and Q there are S and T which are
smooth versions. For lines there are also V and H commands which denote vertical and horizontal
versions of the line command.
The major difference between paths in 1.1 and 2.0 is the use of Z to truncate a command to close.
"M0,0C 0,100 100,0 z is valid in 2.0 since the last z replaces the 0,0. These are read by
svg.elements but they are not written.
"""
def __init__(self, **kwargs):
try:
self.relative = bool(kwargs["relative"])
except (KeyError, ValueError):
self.relative = False
try:
self.smooth = bool(kwargs["smooth"])
except (KeyError, ValueError):
self.smooth = True
self.start = None
self.end = None
def __repr__(self):
values = []
s = self.start
if s is not None:
values.append("start=%s" % repr(s))
e = self.end
if e is not None:
values.append("end=%s" % repr(e))
return "%s(%s)" % (self.__class__.__name__, ", ".join(values))
def __mul__(self, other):
if isinstance(other, (Matrix, str)):
n = copy(self)
n *= other
return n
return NotImplemented
__rmul__ = __mul__
def __iadd__(self, other):
if isinstance(other, PathSegment):
path = Path(self, other)
return path
elif isinstance(other, str):
path = Path(self) + other
return path
return NotImplemented
__add__ = __iadd__
def __str__(self):
"""
This defines an individual path segment string. Since this isn't part of a Path it appends a pseudo-Move
command to correctly provide the starting position.
:return: string representation of the object.
"""
d = self.d()
if self.start is not None:
if self.relative:
return "m %s %s" % (self.start, d)
else:
return "M %s %s" % (self.start, d)
return d
def __iter__(self):
self.n = -1
return self
def __next__(self):
self.n += 1
try:
val = self[self.n]
if val is None:
self.n += 1
val = self[self.n]
return val
except IndexError:
raise StopIteration
next = __next__
@staticmethod
def segment_length(
curve,
start=0.0,
end=1.0,
start_point=None,
end_point=None,
error=ERROR,
min_depth=MIN_DEPTH,
depth=0,
):
"""Recursively approximates the length by straight lines"""
if start_point is None:
start_point = curve.point(start)
if end_point is None:
end_point = curve.point(end)
mid = (start + end) / 2.0
mid_point = curve.point(mid)
length = abs(end_point - start_point)
first_half = abs(mid_point - start_point)
second_half = abs(end_point - mid_point)
length2 = first_half + second_half
if (length2 - length > error) or (depth < min_depth):
# Calculate the length of each segment:
depth += 1
return PathSegment.segment_length(
curve, start, mid, start_point, mid_point, error, min_depth, depth
) + PathSegment.segment_length(
curve, mid, end, mid_point, end_point, error, min_depth, depth
)
# This is accurate enough.
return length2
def _line_length(self, start=0.0, end=1.0, error=ERROR, min_depth=MIN_DEPTH):
return PathSegment.segment_length(
self, start, end, error=error, min_depth=min_depth
)
def bbox(self):
"""returns the bounding box for the segment.
xmin, ymin, xmax, ymax
"""
xs = [p.x for p in self if p is not None]
ys = [p.y for p in self if p is not None]
xmin = min(xs)
xmax = max(xs)
ymin = min(ys)
ymax = max(ys)
return xmin, ymin, xmax, ymax
def reverse(self):
"""
Reverses the current path segment.
"""
end = self.end
self.end = self.start
self.start = end
def point(self, position):
"""
Returns the point at a given amount through the path segment.
:param position: t value between 0 and 1
:return: Point instance
"""
return Point(self.npoint([position])[0])
def npoint(self, positions):
"""
Returns the points at given positions along the path segment
:param positions: N-sized sequence of t value between 0 and 1
:return: N-sized sequence of 2-sized sequence of float
"""
return [self.end] * len(positions)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
"""
Returns the length of this path segment.
:param error:
:param min_depth:
:return:
"""
return 0
def d(self, current_point=None, relative=None, smooth=None):
"""Returns the fragment path_d value for the current path segment.
For a relative segment the current_point must be provided. If it is omitted then only an absolute segment
can be returned."""
raise NotImplementedError
class Move(PathSegment):
"""Represents move commands. Moves to a new location without any path distance.
Paths that consist of only move commands, are valid.
Move serve to make discontinuous paths into continuous linked paths segments
with non-drawn sections.
"""
def __init__(self, *args, **kwargs):
"""
Move commands most importantly go to a place. So if one location is given, that's the end point.
If two locations are given then first is the start location.
For many Move commands it is not necessary to have an original start location. The start point provides a
linked locations for some elements that may require it. If known it can be provided.
Move(p) where p is the End point.
Move(s,e) where s is the Start point, e is the End point.
Move(p, start=s) where p is End point, s is the Start point.
Move(p, end=e) where p is the Start point, e is the End point.
Move(start=s, end=e) where s is the Start point, e is the End point.
"""
PathSegment.__init__(self, **kwargs)
self.end = None
self.start = None
if len(args) == 0:
if "end" in kwargs:
self.end = kwargs["end"]
if "start" in kwargs:
self.start = kwargs["start"]
elif len(args) == 1:
if len(kwargs) == 0:
self.end = args[0]
else:
if "end" in kwargs:
self.start = args[0]
self.end = kwargs["end"]
elif "start" in kwargs:
self.start = kwargs["start"]
self.end = args[0]
elif len(args) == 2:
self.start = args[0]
self.end = args[1]
if self.start is not None:
self.start = Point(self.start)
if self.end is not None:
self.end = Point(self.end)
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
if self.start is not None:
self.start *= other
if self.end is not None:
self.end *= other
return self
def __copy__(self):
return Move(self.start, self.end, relative=self.relative)
def __eq__(self, other):
if not isinstance(other, Move):
return NotImplemented
return self.start == other.start and self.end == other.end
def __ne__(self, other):
if not isinstance(other, Move):
return NotImplemented
return not self == other
def __len__(self):
return 2
def __getitem__(self, item):
if item == 0:
return self.start
elif item == 1:
return self.end
else:
raise IndexError
def d(self, current_point=None, relative=None, smooth=None):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "M %s" % self.end
return "m %s" % (self.end - current_point)
class Curve(PathSegment):
"""Represents curve commands"""
def __init__(self, start=None, end=None, **kwargs):
PathSegment.__init__(self, **kwargs)
self.start = Point(start) if start is not None else None
self.end = Point(end) if end is not None else None
class Linear(PathSegment):
"""Represents line commands."""
def __init__(self, start=None, end=None, **kwargs):
PathSegment.__init__(self, **kwargs)
self.start = Point(start) if start is not None else None
self.end = Point(end) if end is not None else None
def __copy__(self):
return self.__class__(self.start, self.end, relative=self.relative)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.start == other.start and self.end == other.end
def __ne__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return not self == other
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
if self.start is not None:
self.start *= other
if self.end is not None:
self.end *= other
return self
def __len__(self):
return 2
def __getitem__(self, item):
if item == 0:
return self.start
elif item == 1:
return self.end
else:
raise IndexError
def npoint(self, positions):
try:
import numpy as np
xy = np.empty(shape=(len(positions), 2), dtype=float)
xy[:, 0] = np.interp(positions, [0, 1], [self.start.x, self.end.x])
xy[:, 1] = np.interp(positions, [0, 1], [self.start.y, self.end.y])
return xy
except ImportError:
return [Point.towards(self.start, self.end, pos) for pos in positions]
def length(self, error=None, min_depth=None):
if self.start is not None and self.end is not None:
return Point.distance(self.end, self.start)
else:
return 0
def closest_segment_point(self, p, respect_bounds=True):
"""Gives the point on the line closest to the given point."""
a = self.start
b = self.end
v_ap_x = p[0] - a.x
v_ap_y = p[1] - a.y
v_ab_x = b.x - a.x
v_ab_y = b.y - a.y
sq_distance_ab = v_ab_x * v_ab_x + v_ab_y * v_ab_y
ab_ap_product = v_ab_x * v_ap_x + v_ab_y * v_ap_y
if sq_distance_ab == 0:
return 0 # Line is point.
amount = ab_ap_product / float(sq_distance_ab)
if respect_bounds:
if amount > 1:
amount = 1
if amount < 0:
amount = 0
return self.point(amount)
def d(self, current_point=None, relative=None, smooth=None):
raise NotImplementedError
class Close(Linear):
"""Represents close commands. If this exists at the end of the shape then the shape is closed.
the methodology of a single flag close fails in a couple ways. You can have multi-part shapes
which can close or not close several times.
"""
def d(self, current_point=None, relative=None, smooth=None):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "Z"
else:
return "z"
class Line(Linear):
"""Represents line commands."""
def d(self, current_point=None, relative=None, smooth=None):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "L %s" % self.end
else:
return "l %s" % (self.end - current_point)
class QuadraticBezier(Curve):
"""Represents Quadratic Bezier commands."""
def __init__(self, start, control, end, **kwargs):
Curve.__init__(self, start, end, **kwargs)
self.control = Point(control) if control is not None else None
def __repr__(self):
return "QuadraticBezier(start=%s, control=%s, end=%s)" % (
repr(self.start),
repr(self.control),
repr(self.end),
)
def __copy__(self):
return QuadraticBezier(
self.start,
self.control,
self.end,
relative=self.relative,
smooth=self.smooth,
)
def __eq__(self, other):
if not isinstance(other, QuadraticBezier):
return NotImplemented
return (
self.start == other.start
and self.end == other.end
and self.control == other.control
)
def __ne__(self, other):
if not isinstance(other, QuadraticBezier):
return NotImplemented
return not self == other
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
if self.start is not None:
self.start *= other
if self.control is not None:
self.control *= other
if self.end is not None:
self.end *= other
return self
def __len__(self):
return 3
def __getitem__(self, item):
if item == 0:
return self.start
elif item == 1:
return self.control
elif item == 2:
return self.end
raise IndexError
def npoint(self, positions):
"""Calculate the x,y position at a certain position of the path. `pos` may be a
float or a NumPy array."""
x0, y0 = self.start
x1, y1 = self.control
x2, y2 = self.end
def _compute_point(position):
# compute factors
n_pos = 1 - position
pos_2 = position * position
n_pos_2 = n_pos * n_pos
n_pos_pos = n_pos * position
return (
n_pos_2 * x0 + 2 * n_pos_pos * x1 + pos_2 * x2,
n_pos_2 * y0 + 2 * n_pos_pos * y1 + pos_2 * y2,
)
try:
import numpy as np
xy = np.empty(shape=(len(positions), 2))
xy[:, 0], xy[:, 1] = _compute_point(np.array(positions))
return xy
except ImportError:
return [Point(*_compute_point(position)) for position in positions]
def bbox(self):
"""
Returns the bounding box for the quadratic bezier curve.
"""
n = self.start.x - self.control.x
d = self.start.x - 2 * self.control.x + self.end.x
if d != 0:
t = n / float(d)
else:
t = 0.5
if 0 < t < 1:
x_values = [self.start.x, self.end.x, self.point(t).x]
else:
x_values = [self.start.x, self.end.x]
n = self.start.y - self.control.y
d = self.start.y - 2 * self.control.y + self.end.y
if d != 0:
t = n / float(d)
else:
t = 0.5
if 0 < t < 1:
y_values = [self.start.y, self.end.y, self.point(t).y]
else:
y_values = [self.start.y, self.end.y]
return min(x_values), min(y_values), max(x_values), max(y_values)
def length(self, error=None, min_depth=None):
"""Calculate the length of the path up to a certain position"""
a = self.start - 2 * self.control + self.end
b = 2 * (self.control - self.start)
try:
# For an explanation of this case, see
# http://www.malczak.info/blog/quadratic-bezier-curve-length/
A = 4 * (a.real * a.real + a.imag * a.imag)
B = 4 * (a.real * b.real + a.imag * b.imag)
C = b.real * b.real + b.imag * b.imag
Sabc = 2 * sqrt(A + B + C)
A2 = sqrt(A)
A32 = 2 * A * A2
C2 = 2 * sqrt(C)
BA = B / A2
s = (
A32 * Sabc
+ A2 * B * (Sabc - C2)
+ (4 * C * A - B * B) * log((2 * A2 + BA + Sabc) / (BA + C2))
) / (4 * A32)
except (ZeroDivisionError, ValueError):
# a_dot_b = a.real * b.real + a.imag * b.imag
if abs(a) < 1e-10:
s = abs(b)
else:
k = abs(b) / abs(a)
if k >= 2:
s = abs(b) - abs(a)
else:
s = abs(a) * (k * k / 2 - k + 1)
return s
def is_smooth_from(self, previous):
"""Checks if this segment would be a smooth segment following the previous"""
if isinstance(previous, QuadraticBezier):
return self.start == previous.end and (self.control - self.start) == (
previous.end - previous.control
)
else:
return self.control == self.start
def d(self, current_point=None, relative=None, smooth=None):
if (smooth is None and self.smooth) or (smooth is not None and smooth):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "T %s" % self.end
else:
return "t %s" % (self.end - current_point)
else:
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "Q %s %s" % (self.control, self.end)
else:
return "q %s %s" % (
self.control - current_point,
self.end - current_point,
)
class CubicBezier(Curve):
"""Represents Cubic Bezier commands."""
def __init__(self, start, control1, control2, end, **kwargs):
Curve.__init__(self, start, end, **kwargs)
self.control1 = Point(control1) if control1 is not None else None
self.control2 = Point(control2) if control1 is not None else None
def __repr__(self):
return "CubicBezier(start=%s, control1=%s, control2=%s, end=%s)" % (
repr(self.start),
repr(self.control1),
repr(self.control2),
repr(self.end),
)
def __copy__(self):
return CubicBezier(
self.start,
self.control1,
self.control2,
self.end,
relative=self.relative,
smooth=self.smooth,
)
def __eq__(self, other):
if not isinstance(other, CubicBezier):
return NotImplemented
return (
self.start == other.start
and self.end == other.end
and self.control1 == other.control1
and self.control2 == other.control2
)
def __ne__(self, other):
if not isinstance(other, CubicBezier):
return NotImplemented
return not self == other
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
if self.start is not None:
self.start *= other
if self.control1 is not None:
self.control1 *= other
if self.control2 is not None:
self.control2 *= other
if self.end is not None:
self.end *= other
return self
def __len__(self):
return 4
def __getitem__(self, item):
if item == 0:
return self.start
elif item == 1:
return self.control1
elif item == 2:
return self.control2
elif item == 3:
return self.end
else:
raise IndexError
def reverse(self):
PathSegment.reverse(self)
c2 = self.control2
self.control2 = self.control1
self.control1 = c2
def npoint(self, positions):
"""Calculate the x,y position at a certain position of the path. `pos` may be a
float or a NumPy array."""
x0, y0 = self.start
x1, y1 = self.control1
x2, y2 = self.control2
x3, y3 = self.end
def _compute_point(position):
# compute factors
pos_3 = position * position * position
n_pos = 1 - position
n_pos_3 = n_pos * n_pos * n_pos
pos_2_n_pos = position * position * n_pos
n_pos_2_pos = n_pos * n_pos * position
return (
n_pos_3 * x0 + 3 * (n_pos_2_pos * x1 + pos_2_n_pos * x2) + pos_3 * x3,
n_pos_3 * y0 + 3 * (n_pos_2_pos * y1 + pos_2_n_pos * y2) + pos_3 * y3,
)
try:
import numpy as np
xy = np.empty(shape=(len(positions), 2))
xy[:, 0], xy[:, 1] = _compute_point(np.array(positions))
return xy
except ImportError:
return [Point(*_compute_point(position)) for position in positions]
def bbox(self):
"""returns the tight fitting bounding box of the bezier curve.
Code by:
https://github.com/mathandy/svgpathtools
"""
xmin, xmax = self._real_minmax(0)
ymin, ymax = self._real_minmax(1)
return xmin, ymin, xmax, ymax
def _real_minmax(self, v):
"""returns the minimum and maximum for a real cubic bezier, with a non-zero denom
Code by:
https://github.com/mathandy/svgpathtools
"""
local_extremizers = [0, 1]
a = [c[v] for c in self]
denom = a[0] - 3 * a[1] + 3 * a[2] - a[3]
if abs(denom) >= 1e-12:
delta = (
a[1] * a[1] - (a[0] + a[1]) * a[2] + a[2] * a[2] + (a[0] - a[1]) * a[3]
)
if delta >= 0: # otherwise no local extrema
sqdelta = sqrt(delta)
tau = a[0] - 2 * a[1] + a[2]
r1 = (tau + sqdelta) / denom
r2 = (tau - sqdelta) / denom
if 0 < r1 < 1:
local_extremizers.append(r1)
if 0 < r2 < 1:
local_extremizers.append(r2)
else:
local_extremizers.append(0.5)
local_extrema = [self.point(t)[v] for t in local_extremizers]
return min(local_extrema), max(local_extrema)
def _length_scipy(self, error=ERROR):
from scipy.integrate import quad
p0 = complex(*self.start)
p1 = complex(*self.control1)
p2 = complex(*self.control2)
p3 = complex(*self.end)
def _abs_derivative(t):
return abs(
3 * (p1 - p0) * (1 - t) ** 2
+ 6 * (p2 - p1) * (1 - t) * t
+ 3 * (p3 - p2) * t ** 2
)
return quad(_abs_derivative, 0.0, 1.0, epsabs=error, limit=1000)[0]
def _length_default(self, error=ERROR, min_depth=MIN_DEPTH):
return self._line_length(0, 1, error, min_depth)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
"""Calculate the length of the path up to a certain position"""
try:
return self._length_scipy(error)
except: # Fallback on any failure
return self._length_default(error, min_depth)
def is_smooth_from(self, previous):
"""Checks if this segment would be a smooth segment following the previous"""
if isinstance(previous, CubicBezier):
return self.start == previous.end and (self.control1 - self.start) == (
previous.end - previous.control2
)
else:
return self.control1 == self.start
def d(self, current_point=None, relative=None, smooth=None):
if (smooth is None and self.smooth) or (smooth is not None and smooth):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "S %s %s" % (self.control2, self.end)
else:
return "s %s %s" % (
self.control2 - current_point,
self.end - current_point,
)
else:
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "C %s %s %s" % (self.control1, self.control2, self.end)
else:
return "c %s %s %s" % (
self.control1 - current_point,
self.control2 - current_point,
self.end - current_point,
)
class Arc(Curve):
def __init__(self, *args, **kwargs):
"""
Represents Arc commands.
Arc objects can take different parameters to create arcs.
Since we expect taking in SVG parameters. We accept SVG parameterization which is:
start, rx, ry, rotation, arc_flag, sweep_flag, end.
To do matrix transitions, the native parameterization is start, end, center, prx, pry, sweep
'start, end, center, prx, pry' are points and sweep amount is a t value in tau radians.
If points are modified by an affine transformation, the arc is transformed.
There is a special case for when the scale factor inverts, it inverts the sweep.
Note: t-values are not angles from center in elliptical arcs. These are the same thing in
circular arcs. But, here t is a parameterization around the ellipse, as if it were a circle.
The position on the arc is (a * cos(t), b * sin(t)). If r-major was 0 for example. The
positions would all fall on the x-axis. And the angle from center would all be either 0 or
tau/2. However, since t is the parameterization we can conceptualize it as a position on a
circle which is then scaled and rotated by a matrix.
prx is the point at t 0 in the ellipse.
pry is the point at t tau/4 in the ellipse.
prx -> center -> pry should form a right triangle.
The rotation can be defined as the angle from center to prx. Since prx is located at
t(0) its deviation can only be the result of a rotation.
Sweep is a value in t.
The sweep angle can be a value greater than tau and less than -tau.
However if this is the case, conversion back to Path.d() is expected to fail.
We can denote these arc events but not as a single command.
start_t + sweep = end_t
"""
Curve.__init__(self, **kwargs)
self.center = None
self.prx = None
self.pry = None
self.sweep = None
if len(args) == 6 and isinstance(args[1], complex):
self._svg_complex_parameterize(*args)
return
elif len(kwargs) == 6 and "rotation" in kwargs:
self._svg_complex_parameterize(**kwargs)
return
elif len(args) == 7:
# This is an svg parameterized call.
# A: rx ry x-axis-rotation large-arc-flag sweep-flag x y
self._svg_parameterize(
args[0], args[1], args[2], args[3], args[4], args[5], args[6]
)
return
if (
"left" in kwargs
and "right" in kwargs
and "top" in kwargs
and "bottom" in kwargs
):
left = kwargs["left"]
right = kwargs["right"]
top = kwargs["top"]
bottom = kwargs["bottom"]
self.center = Point((left + right) / 2.0, (top + bottom) / 2.0)
rx = (right - left) / 2.0
ry = (bottom - top) / 2.0
self.prx = Point(self.center.x + rx, self.center.y)
self.pry = Point(self.center.x, self.center.y + ry)
len_args = len(args)
if len_args > 0:
if args[0] is not None:
self.start = Point(args[0])
if len_args > 1:
if args[1] is not None:
self.end = Point(args[1])
if len_args > 2:
if args[2] is not None:
self.center = Point(args[2])
if len_args > 3:
if args[3] is not None:
self.prx = Point(args[3])
if len_args > 4:
if args[4] is not None:
self.pry = Point(args[4])
if len_args > 5:
self.sweep = args[5]
return # The args gave us everything.
if "start" in kwargs:
self.start = Point(kwargs["start"])
if "end" in kwargs:
self.end = Point(kwargs["end"])
if "center" in kwargs:
self.center = Point(kwargs["center"])
if "prx" in kwargs:
self.prx = Point(kwargs["prx"])
if "pry" in kwargs:
self.pry = Point(kwargs["pry"])
if "sweep" in kwargs:
self.sweep = kwargs["sweep"]
cw = True # Clockwise default. (sometimes needed)
if self.start is not None and self.end is not None and self.center is None:
# Start and end, but no center.
# Solutions require a radius, a control point, or a bulge
control = None
sagitta = None
if "bulge" in kwargs:
bulge = float(kwargs["bulge"])
sagitta = bulge * self.start.distance_to(self.end) / 2.0
elif "sagitta" in kwargs:
sagitta = float(kwargs["sagitta"])
if sagitta is not None:
control = Point.towards(self.start, self.end, 0.5)
angle = self.start.angle_to(self.end)
control = control.polar_to(angle - tau / 4.0, sagitta)
if "control" in kwargs: # Control is any additional point on the arc.
control = Point(kwargs["control"])
if control is not None:
delta_a = control - self.start
delta_b = self.end - control
if abs(delta_a.x) > 1e-12:
slope_a = delta_a.y / delta_a.x
else:
slope_a = float("inf")
if abs(delta_b.x) > 1e-12:
slope_b = delta_b.y / delta_b.x
else:
slope_b = float("inf")
ab_mid = Point.towards(self.start, control, 0.5)
bc_mid = Point.towards(control, self.end, 0.5)
if abs(delta_a.y) < 1e-12: # slope_a == 0
cx = ab_mid.x
if abs(delta_b.x) < 1e-12: # slope_b == inf
cy = bc_mid.y
else:
cy = bc_mid.y + (bc_mid.x - cx) / slope_b
elif abs(delta_b.y) < 1e-12: # slope_b == 0
cx = bc_mid.x
if abs(delta_a.y) < 1e-12: # slope_a == inf
cy = ab_mid.y
else:
cy = ab_mid.y + (ab_mid.x - cx) / slope_a
elif abs(delta_a.x) < 1e-12: # slope_a == inf
cy = ab_mid.y
cx = slope_b * (bc_mid.y - cy) + bc_mid.x
elif abs(delta_b.x) < 1e-12: # slope_b == inf
cy = bc_mid.y
cx = slope_a * (ab_mid.y - cy) + ab_mid.x
elif abs(slope_a - slope_b) < 1e-12:
cx = ab_mid.x
cy = ab_mid.y
else:
cx = (
slope_a * slope_b * (ab_mid.y - bc_mid.y)
- slope_a * bc_mid.x
+ slope_b * ab_mid.x
) / (slope_b - slope_a)
cy = ab_mid.y - (cx - ab_mid.x) / slope_a
self.center = Point(cx, cy)
cw = bool(Point.orientation(self.start, control, self.end) == 2)
elif "r" in kwargs:
r = kwargs["r"]
mid = Point(
(self.start.x + self.end.x) / 2.0, (self.start.y + self.end.y) / 2.0
)
q = Point.distance(self.start, self.end)
hq = q / 2.0
if r < hq:
kwargs["r"] = r = hq # Correct potential math domain error.
self.center = Point(
mid.x + sqrt(r * r - hq * hq) * (self.start.y - self.end.y) / q,
mid.y + sqrt(r * r - hq * hq) * (self.end.x - self.start.x) / q,
)
cw = bool(Point.orientation(self.start, self.center, self.end) == 1)
if "ccw" in kwargs and kwargs["ccw"] and cw or not cw:
# ccw arg exists, is true, and we found the cw center, or we didn't find the cw center.
self.center = Point(
mid.x - sqrt(r * r - hq * hq) * (self.start.y - self.end.y) / q,
mid.y - sqrt(r * r - hq * hq) * (self.end.x - self.start.x) / q,
)
elif "rx" in kwargs and "ry" in kwargs:
# This formulation will assume p1 and p2 are both axis aligned.
# rx = kwargs["rx"]
# ry = kwargs["ry"]
# We will assume rx == abs(self.start.x - self.end.x)
self.sweep = tau / 4.0
self.center = Point(self.start.x, self.end.y)
cw = bool(Point.orientation(self.start, self.center, self.end) == 1)
if "scooped" in kwargs and kwargs["scooped"]:
self.sweep = -self.sweep
cw = not cw
if ("ccw" in kwargs and kwargs["ccw"] and cw) or not cw:
self.center = Point(self.end.x, self.start.y)
if self.center is None:
raise ValueError("Not enough values to solve for center.")
if "r" in kwargs:
r = kwargs["r"]
if self.prx is None:
self.prx = Point(self.center.x + r, self.center.y)
if self.pry is None:
self.pry = Point(self.center.x, self.center.y + r)
if "rx" in kwargs:
rx = kwargs["rx"]
if self.prx is None:
if "rotation" in kwargs:
theta = kwargs["rotation"]
self.prx = Point.polar(self.center, theta, rx)
else:
self.prx = Point(self.center.x + rx, self.center.y)
if "ry" in kwargs:
ry = kwargs["ry"]
if self.pry is None:
if "rotation" in kwargs:
theta = kwargs["rotation"]
theta += tau / 4.0
self.pry = Point.polar(self.center, theta, ry)
else:
self.pry = Point(self.center.x, self.center.y + ry)
if self.start is not None and (self.prx is None or self.pry is None):
radius_s = Point.distance(self.center, self.start)
self.prx = Point(self.center.x + radius_s, self.center.y)
self.pry = Point(self.center.x, self.center.y + radius_s)
if self.end is not None and (self.prx is None or self.pry is None):
radius_e = Point.distance(self.center, self.end)
self.prx = Point(self.center.x + radius_e, self.center.y)
self.pry = Point(self.center.x, self.center.y + radius_e)
if self.sweep is None and self.start is not None and self.end is not None:
start_t = self.get_start_t()
end_t = self.get_end_t()
self.sweep = end_t - start_t
if "ccw" in kwargs:
cw = not bool(kwargs["ccw"])
if cw and self.sweep < 0:
self.sweep += tau
if not cw and self.sweep > 0:
self.sweep -= tau
if self.sweep is not None and self.start is not None and self.end is None:
start_t = self.get_start_t()
end_t = start_t + self.sweep
self.end = self.point_at_t(end_t)
if self.sweep is not None and self.start is None and self.end is not None:
end_t = self.get_end_t()
start_t = end_t - self.sweep
self.end = self.point_at_t(start_t)
def __repr__(self):
return "Arc(start=%s, end=%s, center=%s, prx=%s, pry=%s, sweep=%s)" % (
repr(self.start),
repr(self.end),
repr(self.center),
repr(self.prx),
repr(self.pry),
self.sweep,
)
def __copy__(self):
return Arc(
self.start,
self.end,
self.center,
self.prx,
self.pry,
self.sweep,
relative=self.relative,
)
def __eq__(self, other):
if not isinstance(other, Arc):
return NotImplemented
return (
self.start == other.start
and self.end == other.end
and self.prx == other.prx
and self.pry == other.pry
and self.center == other.center
and self.sweep == other.sweep
)
def __ne__(self, other):
if not isinstance(other, Arc):
return NotImplemented
return not self == other
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
if self.start is not None:
self.start *= other
if self.center is not None:
self.center *= other
if self.end is not None:
self.end *= other
if self.prx is not None:
self.prx *= other
if self.pry is not None:
self.pry *= other
if other.value_scale_x() < 0:
self.sweep = -self.sweep
if other.value_scale_y() < 0:
self.sweep = -self.sweep
return self
def __len__(self):
return 5
def __getitem__(self, item):
if item == 0:
return self.start
elif item == 1:
return self.end
elif item == 2:
return self.center
elif item == 3:
return self.prx
elif item == 4:
return self.pry
raise IndexError
@property
def theta(self):
"""legacy property"""
return Angle.radians(self.get_start_t()).as_positive_degrees
@property
def delta(self):
"""legacy property"""
return Angle.radians(self.sweep).as_degrees
def reverse(self):
PathSegment.reverse(self)
self.sweep = -self.sweep
def npoint(self, positions):
try:
import numpy as np
return self._points_numpy(np.array(positions))
except ImportError:
if self.start == self.end and self.sweep == 0:
# This is equivalent of omitting the segment
return [self.start] * len(positions)
start_t = self.get_start_t()
return [
self.start
if pos == 0
else self.end
if pos == 1
else self.point_at_t(start_t + self.sweep * pos)
for pos in positions
]
def _points_numpy(self, positions):
"""Vectorized version of `point()`.
:param positions: 1D numpy array of float in [0, 1]
:return: 1D numpy array of complex
"""
import numpy as np
xy = np.empty((len(positions), 2), dtype=float)
if self.start == self.end and self.sweep == 0:
xy[:, 0], xy[:, 1] = self.start
else:
t = self.get_start_t() + self.sweep * positions
rotation = self.get_rotation()
a = self.rx
b = self.ry
cx = self.center.x
cy = self.center.y
cos_rot = cos(rotation)
sin_rot = sin(rotation)
cos_t = np.cos(t)
sin_t = np.sin(t)
xy[:, 0] = cx + a * cos_t * cos_rot - b * sin_t * sin_rot
xy[:, 1] = cy + a * cos_t * sin_rot + b * sin_t * cos_rot
# ensure clean endings
xy[positions == 0, :] = list(self.start)
xy[positions == 1, :] = list(self.end)
return xy
def _integral_length(self):
def ellipse_part_integral(t1, t2, a, b, n=100000):
# function to integrate
def f(t):
sint = sin(t)
return sqrt(1 - (1 - (a * a) / (b * b)) * sint * sint)
start = min(t1, t2)
seg_len = abs(t1 - t2) / n
return b * sum(f(start + seg_len * i) * seg_len for i in range(1, n + 1))
start_angle = self.get_start_t()
end_angle = start_angle + self.sweep
return ellipse_part_integral(start_angle, end_angle, self.rx, self.ry)
def _exact_length(self):
"""scipy is not a dependency. However, if scipy exists this function will find the
exact arc length. By default .length() delegates to here and on failure uses the
fallback method."""
from scipy.special import ellipeinc
a = self.rx
b = self.ry
adb = a / b
m = 1 - adb * adb
phi = self.get_start_t()
d1 = ellipeinc(phi, m)
phi = phi + self.sweep
d2 = ellipeinc(phi, m)
return b * abs(d2 - d1)
def length(self, error=ERROR, min_depth=MIN_DEPTH):
"""The length of an elliptical arc segment requires numerical
integration, and in that case it's simpler to just do a geometric
approximation, as for cubic bezier curves.
"""
if self.sweep == 0:
return 0
if self.start == self.end and self.sweep == 0:
# This is equivalent of omitting the segment
return 0
a = self.rx
b = self.ry
d = abs(a - b)
if d < ERROR: # This is a circle.
return abs(self.rx * self.sweep)
try:
return self._exact_length()
except: # Fallback on any failure
return self._line_length(error=error, min_depth=min_depth)
def _svg_complex_parameterize(
self, start, radius, rotation, arc_flag, sweep_flag, end
):
"""Parameterization with complex radius and having rotation factors."""
self._svg_parameterize(
Point(start),
radius.real,
radius.imag,
rotation,
arc_flag,
sweep_flag,
Point(end),
)
def _svg_parameterize(
self, start, rx, ry, rotation, large_arc_flag, sweep_flag, end
):
"""Conversion from svg parameterization, our chosen native native form.
http://www.w3.org/TR/SVG/implnote.html#ArcImplementationNotes"""
large_arc_flag = bool(large_arc_flag)
sweep_flag = bool(sweep_flag)
start = Point(start)
self.start = start
end = Point(end)
self.end = end
if start == end or rx == 0 or ry == 0:
# If start is equal to end, there are infinite number of circles so these void out.
# We still permit this kind of arc, but SVG parameterization cannot be used to achieve it.
self.sweep = 0
self.prx = Point(start)
self.pry = Point(start)
self.center = Point(start)
return
cosr = cos(radians(rotation))
sinr = sin(radians(rotation))
dx = (start.real - end.real) / 2
dy = (start.imag - end.imag) / 2
x1prim = cosr * dx + sinr * dy
x1prim_sq = x1prim * x1prim
y1prim = -sinr * dx + cosr * dy
y1prim_sq = y1prim * y1prim
rx_sq = rx * rx
ry_sq = ry * ry
# Correct out of range radii
radius_check = (x1prim_sq / rx_sq) + (y1prim_sq / ry_sq)
if radius_check > 1:
rx *= sqrt(radius_check)
ry *= sqrt(radius_check)
rx_sq = rx * rx
ry_sq = ry * ry
t1 = rx_sq * y1prim_sq
t2 = ry_sq * x1prim_sq
c = sqrt(abs((rx_sq * ry_sq - t1 - t2) / (t1 + t2)))
if large_arc_flag == sweep_flag:
c = -c
cxprim = c * rx * y1prim / ry
cyprim = -c * ry * x1prim / rx
center = Point(
(cosr * cxprim - sinr * cyprim) + ((start.real + end.real) / 2),
(sinr * cxprim + cosr * cyprim) + ((start.imag + end.imag) / 2),
)
ux = (x1prim - cxprim) / rx
uy = (y1prim - cyprim) / ry
vx = (-x1prim - cxprim) / rx
vy = (-y1prim - cyprim) / ry
n = sqrt(ux * ux + uy * uy)
p = ux
# theta = degrees(acos(p / n))
# if uy < 0:
# theta = -theta
# theta = theta % 360
n = sqrt((ux * ux + uy * uy) * (vx * vx + vy * vy))
p = ux * vx + uy * vy
d = p / n
# In certain cases the above calculation can through inaccuracies
# become just slightly out of range, f ex -1.0000000000000002.
if d > 1.0:
d = 1.0
elif d < -1.0:
d = -1.0
delta = degrees(acos(d))
if (ux * vy - uy * vx) < 0:
delta = -delta
delta = delta % 360
if not sweep_flag:
delta -= 360
# built parameters, delta, theta, center
rotate_matrix = Matrix()
rotate_matrix.post_rotate(
Angle.degrees(rotation).as_radians, center.x, center.y
)
self.center = center
self.prx = Point(center.x + rx, center.y)
self.pry = Point(center.x, center.y + ry)
self.prx.matrix_transform(rotate_matrix)
self.pry.matrix_transform(rotate_matrix)
self.sweep = Angle.degrees(delta).as_radians
def as_quad_curves(self, arc_required=None):
if arc_required is None:
sweep_limit = tau / 12.0
arc_required = int(ceil(abs(self.sweep) / sweep_limit))
if arc_required == 0:
return
t_slice = self.sweep / float(arc_required)
current_t = self.get_start_t()
p_start = self.start
theta = self.get_rotation()
cos_theta = cos(theta)
sin_theta = sin(theta)
a = self.rx
b = self.ry
cx = self.center.x
cy = self.center.y
for i in range(0, arc_required):
next_t = current_t + t_slice
mid_t = (next_t + current_t) / 2
p_end = self.point_at_t(next_t)
if i == arc_required - 1:
p_end = self.end
cos_mid_t = cos(mid_t)
sin_mid_t = sin(mid_t)
alpha = (4.0 - cos(t_slice)) / 3.0
px = cx + alpha * (a * cos_mid_t * cos_theta - b * sin_mid_t * sin_theta)
py = cy + alpha * (a * cos_mid_t * sin_theta + b * sin_mid_t * cos_theta)
yield QuadraticBezier(p_start, (px, py), p_end)
p_start = p_end
current_t = next_t
def as_cubic_curves(self, arc_required=None):
if arc_required is None:
sweep_limit = tau / 12.0
arc_required = int(ceil(abs(self.sweep) / sweep_limit))
if arc_required == 0:
return
t_slice = self.sweep / float(arc_required)
theta = self.get_rotation()
rx = self.rx
ry = self.ry
p_start = self.start
current_t = self.get_start_t()
x0 = self.center.x
y0 = self.center.y
cos_theta = cos(theta)
sin_theta = sin(theta)
for i in range(0, arc_required):
next_t = current_t + t_slice
alpha = sin(t_slice) * (sqrt(4 + 3 * pow(tan(t_slice / 2.0), 2)) - 1) / 3.0
cos_start_t = cos(current_t)
sin_start_t = sin(current_t)
ePrimen1x = -rx * cos_theta * sin_start_t - ry * sin_theta * cos_start_t
ePrimen1y = -rx * sin_theta * sin_start_t + ry * cos_theta * cos_start_t
cos_end_t = cos(next_t)
sin_end_t = sin(next_t)
p2En2x = x0 + rx * cos_end_t * cos_theta - ry * sin_end_t * sin_theta
p2En2y = y0 + rx * cos_end_t * sin_theta + ry * sin_end_t * cos_theta
p_end = (p2En2x, p2En2y)
if i == arc_required - 1:
p_end = self.end
ePrimen2x = -rx * cos_theta * sin_end_t - ry * sin_theta * cos_end_t
ePrimen2y = -rx * sin_theta * sin_end_t + ry * cos_theta * cos_end_t
p_c1 = (p_start[0] + alpha * ePrimen1x, p_start[1] + alpha * ePrimen1y)
p_c2 = (p_end[0] - alpha * ePrimen2x, p_end[1] - alpha * ePrimen2y)
yield CubicBezier(p_start, p_c1, p_c2, p_end)
p_start = Point(p_end)
current_t = next_t
def is_circular(self):
a = self.rx
b = self.ry
return a == b
@property
def radius(self):
"""Legacy complex radius property
Point will work like a complex for legacy reasons.
"""
return Point(self.rx, self.ry)
@property
def rx(self):
return Point.distance(self.center, self.prx)
@property
def ry(self):
return Point.distance(self.center, self.pry)
def get_rotation(self):
return Point.angle(self.center, self.prx)
def get_start_angle(self):
"""
:return: Angle from the center point to start point.
"""
return self.angle_at_point(self.start)
def get_end_angle(self):
"""
:return: Angle from the center point to end point.
"""
return self.angle_at_point(self.end)
def get_start_t(self):
"""
start t value in the ellipse.
:return: t parameter of start point.
"""
return self.t_at_point(self.point_at_angle(self.get_start_angle()))
def get_end_t(self):
"""
end t value in the ellipse.
:return: t parameter of start point.
"""
return self.t_at_point(self.point_at_angle(self.get_end_angle()))
def point_at_angle(self, angle):
"""
find the point on the ellipse from the center at the given angle.
Note: For non-circular arcs this is different than point(t).
:param angle: angle from center to find point
:return: point found
"""
angle -= self.get_rotation()
a = self.rx
b = self.ry
if a == b:
return self.point_at_t(angle)
t = atan2(a * tan(angle), b)
tau_1_4 = tau / 4.0
tau_3_4 = 3 * tau_1_4
if tau_3_4 >= abs(angle) % tau > tau_1_4:
t += tau / 2.0
return self.point_at_t(t)
def angle_at_point(self, p):
"""
find the angle to the point.
:param p: point
:return: angle to given point.
"""
return self.center.angle_to(p)
def t_at_point(self, p):
"""
find the t parameter to at the point.
:param p: point
:return: t parameter to the given point.
"""
angle = self.angle_at_point(p)
angle -= self.get_rotation()
a = self.rx
b = self.ry
t = atan2(a * tan(angle), b)
tau_1_4 = tau / 4.0
tau_3_4 = 3 * tau_1_4
if tau_3_4 >= abs(angle) % tau > tau_1_4:
t += tau / 2.0
return t
def point_at_t(self, t):
"""
find the point that corresponds to given value t.
Where t=0 is the first point and t=tau is the final point.
In the case of a circle: t = angle.
:param t:
:return:
"""
rotation = self.get_rotation()
a = self.rx
b = self.ry
cx = self.center.x
cy = self.center.y
cos_rot = cos(rotation)
sin_rot = sin(rotation)
cos_t = cos(t)
sin_t = sin(t)
px = cx + a * cos_t * cos_rot - b * sin_t * sin_rot
py = cy + a * cos_t * sin_rot + b * sin_t * cos_rot
return Point(px, py)
def get_ellipse(self):
return Ellipse(self.center, self.rx, self.ry, self.get_rotation())
def bbox(self):
"""Find the bounding box of a arc.
Code from: https://github.com/mathandy/svgpathtools
"""
if self.sweep == 0:
return self.start.x, self.start.y, self.end.x, self.end.y
phi = self.get_rotation().as_radians
if cos(phi) == 0:
atan_x = tau / 4.0
atan_y = 0
elif sin(phi) == 0:
atan_x = 0
atan_y = tau / 4.0
else:
rx, ry = self.rx, self.ry
atan_x = atan(-(ry / rx) * tan(phi))
atan_y = atan((ry / rx) / tan(phi))
def angle_inv(ang, k): # inverse of angle from Arc.derivative()
return ((ang + (tau / 2.0) * k) * (360 / tau) - self.theta) / self.delta
xtrema = [self.start.x, self.end.x]
ytrema = [self.start.y, self.end.y]
for k in range(-4, 5):
tx = angle_inv(atan_x, k)
ty = angle_inv(atan_y, k)
if 0 <= tx <= 1:
xtrema.append(self.point(tx).x)
if 0 <= ty <= 1:
ytrema.append(self.point(ty).y)
return min(xtrema), min(ytrema), max(xtrema), max(ytrema)
def d(self, current_point=None, relative=None, smooth=None):
if (
current_point is None
or (relative is None and self.relative)
or (relative is not None and not relative)
):
return "A %G,%G %G %d,%d %s" % (
self.rx,
self.ry,
self.get_rotation().as_degrees,
int(abs(self.sweep) > (tau / 2.0)),
int(self.sweep >= 0),
self.end,
)
else:
return "a %G,%G %G %d,%d %s" % (
self.rx,
self.ry,
self.get_rotation().as_degrees,
int(abs(self.sweep) > (tau / 2.0)),
int(self.sweep >= 0),
self.end - current_point,
)
class Path(Shape, MutableSequence):
"""
A Path is a Mutable sequence of path segments
It is a generalized shape which can map out all the other shapes.
Each PathSegment object maps a particular command. Each one exists only once in each path and every point contained
within the object is also unique. We attempt to internally maintain some validity. Each end point should link
to the following segments start point. And each close point should connect from the preceding segments endpoint to
the last Move command.
These are soft checks made only at the time of addition and some manipulations. Modifying the points of the segments
can and will cause path invalidity. Some SVG invalid operations are permitted such as arcs longer than tau radians
or beginning sequences without a move. The expectation is that these will eventually be used as part of a valid path
so these fragment paths are permitted. In some cases these invalid paths will still have consistent path_d values,
in other cases, there will be no valid methods to reproduce these.
Instantiation formats:
Path("d-string", keywords)
Path(pathsegment1,...)
Path(d="d-string", other keywords)
"""
def __init__(self, *args, **kwargs):
Shape.__init__(self, *args, **kwargs)
self._length = None
self._lengths = None
self._segments = list()
if len(args) != 1:
for segment in args:
if not isinstance(segment, PathSegment):
raise ValueError(
"Object not PathSegment when instantiating a Path: %s"
% segment.__class__.__name__
)
self._segments.extend(args)
else:
s = args[0]
if isinstance(s, Subpath):
self._segments.extend(s.segments(transformed=False))
Shape.__init__(self, s._path)
elif isinstance(s, Shape):
self._segments.extend(s.segments(transformed=False))
elif isinstance(s, str):
self._segments = list()
self.parse(s)
elif isinstance(s, tuple):
# We have no guarantee of the validity of the source data
self._segments.extend(s)
self.validate_connections()
elif isinstance(s, list):
# We have no guarantee of the validity of the source data
self._segments.extend(s)
self.validate_connections()
elif isinstance(s, PathSegment):
self._segments.append(s)
if SVG_ATTR_DATA in self.values:
# Not sure what the purpose of pathd_loaded is.
# It is only set and checked here and you cannot have "d" attribute more than once anyway
if not self.values.get("pathd_loaded", False):
self.parse(self.values[SVG_ATTR_DATA])
self.values["pathd_loaded"] = True
def __copy__(self):
path = Path(self)
segs = path._segments
for i in range(0, len(segs)):
segs[i] = copy(segs[i])
return path
def __getitem__(self, index):
return self._segments[index]
def _validate_subpath(self, index):
"""ensure the subpath containing this index is valid."""
if index < 0 or index + 1 >= len(self._segments):
return # This connection doesn't exist.
for j in range(index, len(self._segments)):
close_search = self._segments[j]
if isinstance(close_search, Move):
return # Not a closed path, subpath is valid.
if isinstance(close_search, Close):
for k in range(index, -1, -1):
move_search = self._segments[k]
if isinstance(move_search, Move):
self._segments[j].end = Point(move_search.end)
return
self._segments[j].end = Point(self._segments[0].end)
return
def _validate_move(self, index):
"""ensure the next closed point from this index points to a valid location."""
for i in range(index + 1, len(self._segments)):
segment = self._segments[i]
if isinstance(segment, Move):
return # Not a closed path, the move is valid.
if isinstance(segment, Close):
segment.end = Point(self._segments[index].end)
return
def _validate_close(self, index):
"""ensure the close element at this position correctly links to the previous move"""
for i in range(index, -1, -1):
segment = self._segments[i]
if isinstance(segment, Move):
self._segments[index].end = Point(segment.end)
return
self._segments[index].end = (
Point(self._segments[0].end) if self._segments[0].end is not None else None
)
# If move is never found, just the end point of the first element. Unless that's not a thing.
def _validate_connection(self, index, prefer_second=False):
"""
Validates the connection at the index.
Connection 0 is the connection between getitem(0) and getitem(1)
prefer_second is for those cases where failing the connection requires replacing
a existing value. It will prefer the authority of right side, second value.
"""
if index < 0 or index + 1 >= len(self._segments):
return # This connection doesn't exist.
first = self._segments[index]
second = self._segments[index + 1]
if first.end is not None and second.start is None:
second.start = Point(first.end)
elif first.end is None and second.start is not None:
first.end = Point(second.start)
elif first.end != second.start:
# The two values exist but are not equal. One must replace the other.
if prefer_second:
first.end = Point(second.start)
else:
second.start = Point(first.end)
def __setitem__(self, index, new_element):
if isinstance(new_element, str):
new_element = Path(new_element)
if len(new_element) == 0:
return
new_element = new_element.segments()
if isinstance(index, int):
if len(new_element) > 1:
raise ValueError # Cannot insert multiple items into a single space. Requires slice.
new_element = new_element[0]
self._segments[index] = new_element
self._length = None
self._lengths = None
if isinstance(index, slice):
self.validate_connections()
else:
self._validate_connection(index - 1)
self._validate_connection(index)
if isinstance(new_element, Move):
self._validate_move(index)
if isinstance(new_element, Close):
self._validate_close(index)
def __delitem__(self, index):
original_element = self._segments[index]
del self._segments[index]
self._length = None
if isinstance(index, slice):
self.validate_connections()
else:
self._validate_connection(index - 1)
if isinstance(original_element, (Close, Move)):
self._validate_subpath(index)
def __iadd__(self, other):
if isinstance(other, str):
self.parse(other)
elif isinstance(other, (Path, Subpath)):
self.extend(map(copy, list(other)))
elif isinstance(other, Shape):
self.parse(other.d())
elif isinstance(other, PathSegment):
self.append(other)
else:
return NotImplemented
return self
def __add__(self, other):
if isinstance(other, (str, Path, Subpath, Shape, PathSegment)):
n = copy(self)
n += other
return n
return NotImplemented
def __radd__(self, other):
if isinstance(other, str):
path = Path(other)
path.extend(map(copy, self._segments))
return path
elif isinstance(other, PathSegment):
path = copy(self)
path.insert(0, other)
return path
else:
return NotImplemented
def __len__(self):
return len(self._segments)
def __str__(self):
return self.d()
def __repr__(self):
values = []
if len(self) > 0:
values.append(", ".join(repr(x) for x in self._segments))
self._repr_shape(values)
params = ", ".join(values)
return "%s(%s)" % (self.__class__.__name__, params)
def __eq__(self, other):
if isinstance(other, str):
return self.__eq__(Path(other))
if not isinstance(other, Path):
return NotImplemented
if len(self) != len(other):
return False
p = abs(self)
q = abs(other)
for s, o in zip(q._segments, p._segments):
if not s == o:
return False
if p.stroke_width != q.stroke_width:
return False
return True
def __ne__(self, other):
if not isinstance(other, (Path, str)):
return NotImplemented
return not self == other
def parse(self, pathdef):
"""Parses the SVG path."""
tokens = SVGLexicalParser()
tokens.parse(self, pathdef)
def validate_connections(self):
"""
Force validate all connections.
This will scan path connections and link any adjacent elements together by replacing any None points or causing
the start position of the next element to equal the end position of the previous. This should only be needed
when combining paths and elements together. Close elements are always connected to the last Move element or to
the end position of the first element in the list. The start element of the first segment may or may not be
None.
"""
zpoint = None
last_segment = None
for segment in self._segments:
if zpoint is None or isinstance(segment, Move):
zpoint = segment.end
if last_segment is not None:
if segment.start is None and last_segment.end is not None:
segment.start = Point(last_segment.end)
elif last_segment.end is None and segment.start is not None:
last_segment.end = Point(segment.start)
elif last_segment.end != segment.start:
segment.start = Point(last_segment.end)
if (
isinstance(segment, Close)
and zpoint is not None
and segment.end != zpoint
):
segment.end = Point(zpoint)
last_segment = segment
def _is_valid(self):
"""
Checks validation of all connections.
Paths are valid if all end points match the start of the next point and all close
commands return to the last valid move command.
This does not check for incongruent path validity. Path fragments without initial moves
double closed paths, may all pass this check.
"""
zpoint = None
last_segment = None
for segment in self._segments:
if zpoint is None or isinstance(segment, Move):
zpoint = segment.end
if last_segment is not None:
if segment.start is None:
return False
elif last_segment.end is None:
return False
elif last_segment.end != segment.start:
return False
if (
isinstance(segment, Close)
and zpoint is not None
and segment.end != zpoint
):
return False
last_segment = segment
return True
@property
def first_point(self):
"""First point along the Path. This is the start point of the first segment unless it starts
with a Move command with a None start in which case first point is that Move's destination."""
if len(self._segments) == 0:
return None
if self._segments[0].start is not None:
return Point(self._segments[0].start)
return (
Point(self._segments[0].end) if self._segments[0].end is not None else None
)
@property
def current_point(self):
if len(self._segments) == 0:
return None
return (
Point(self._segments[-1].end)
if self._segments[-1].end is not None
else None
)
@property
def z_point(self):
"""
Z is the destination of the last Move. It can mean, but doesn't necessarily mean the first_point in the path.
This behavior of Z is defined in svg spec:
http://www.w3.org/TR/SVG/paths.html#PathDataClosePathCommand
"""
end_pos = None
for segment in reversed(self._segments):
if isinstance(segment, Move):
end_pos = segment.end
break
if end_pos is None:
try:
end_pos = self._segments[0].end
except IndexError:
pass # entire path is "z".
return end_pos
@property
def smooth_point(self):
"""Returns the smoothing control point for the smooth commands.
With regards to the SVG standard if the last command was a curve the smooth
control point is the reflection of the previous control point.
If the last command was not a curve, the smooth_point is coincident with the current.
https://www.w3.org/TR/SVG/paths.html#PathDataCubicBezierCommands
"""
if len(self._segments) == 0:
return None
start_pos = self.current_point
last_segment = self._segments[-1]
if isinstance(last_segment, QuadraticBezier):
previous_control = last_segment.control
return previous_control.reflected_across(start_pos)
elif isinstance(last_segment, CubicBezier):
previous_control = last_segment.control2
return previous_control.reflected_across(start_pos)
return start_pos
def start(self):
pass
def end(self):
pass
def move(self, *points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
end_pos = points[0]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = Move(start_pos, end_pos)
segment.relative = relative
self.append(segment)
if len(points) > 1:
self.line(*points[1:], relative=relative)
return self
def line(self, *points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
end_pos = points[0]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = Line(start_pos, end_pos)
segment.relative = relative
self.append(segment)
if len(points) > 1:
self.line(*points[1:])
return self
def vertical(self, *y_points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
if relative:
segment = Line(start_pos, Point(start_pos.x, start_pos.y + y_points[0]))
else:
segment = Line(start_pos, Point(start_pos.x, y_points[0]))
segment.relative = relative
self.append(segment)
if len(y_points) > 1:
self.vertical(*y_points[1:], relative=relative)
return self
def horizontal(self, *x_points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
if relative:
segment = Line(start_pos, Point(start_pos.x + x_points[0], start_pos.y))
segment.relative = relative
else:
segment = Line(start_pos, Point(x_points[0], start_pos.y))
segment.relative = relative
self.append(segment)
if len(x_points) > 1:
self.horizontal(*x_points[1:], relative=relative)
return self
def smooth_quad(self, *points, **kwargs):
"""Smooth curve. First control point is the "reflection" of
the second control point in the previous path."""
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
control1 = self.smooth_point
end_pos = points[0]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = QuadraticBezier(start_pos, control1, end_pos)
segment.relative = relative
segment.smooth = True
self.append(segment)
if len(points) > 1:
self.smooth_quad(*points[1:])
return self
def quad(self, *points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
control = points[0]
if control in ("z", "Z"):
control = self.z_point
end_pos = points[1]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = QuadraticBezier(start_pos, control, end_pos)
segment.relative = relative
segment.smooth = False
self.append(segment)
if len(points) > 2:
self.quad(*points[2:])
return self
def smooth_cubic(self, *points, **kwargs):
"""Smooth curve. First control point is the "reflection" of
the second control point in the previous path."""
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
control1 = self.smooth_point
control2 = points[0]
if control2 in ("z", "Z"):
control2 = self.z_point
end_pos = points[1]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = CubicBezier(start_pos, control1, control2, end_pos)
segment.relative = relative
segment.smooth = True
self.append(segment)
if len(points) > 2:
self.smooth_cubic(*points[2:])
return self
def cubic(self, *points, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
control1 = points[0]
if control1 in ("z", "Z"):
control1 = self.z_point
control2 = points[1]
if control2 in ("z", "Z"):
control2 = self.z_point
end_pos = points[2]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = CubicBezier(start_pos, control1, control2, end_pos)
segment.relative = relative
segment.smooth = False
self.append(segment)
if len(points) > 3:
self.cubic(*points[3:])
return self
def arc(self, *arc_args, **kwargs):
relative = kwargs["relative"] if "relative" in kwargs else False
start_pos = self.current_point
rx = arc_args[0]
ry = arc_args[1]
if rx < 0:
rx = abs(rx)
if ry < 0:
ry = abs(ry)
rotation = arc_args[2]
arc = arc_args[3]
sweep = arc_args[4]
end_pos = arc_args[5]
if end_pos in ("z", "Z"):
end_pos = self.z_point
segment = Arc(start_pos, rx, ry, rotation, arc, sweep, end_pos)
segment.relative = relative
self.append(segment)
if len(arc_args) > 6:
self.arc(*arc_args[6:])
return self
def closed(self, relative=False):
start_pos = self.current_point
end_pos = self.z_point
segment = Close(start_pos, end_pos)
segment.relative = relative
self.append(segment)
return self
def append(self, value):
if isinstance(value, str):
value = Path(value)
if len(value) == 0:
return
if len(value) > 1:
self.extend(value)
return
value = value[0]
self._length = None
index = len(self._segments) - 1
self._segments.append(value)
self._validate_connection(index)
if isinstance(value, Close):
self._validate_close(index + 1)
def insert(self, index, value):
if isinstance(value, str):
value = Path(value)
if len(value) == 0:
return
value = value[0]
self._length = None
self._segments.insert(index, value)
self._validate_connection(index - 1)
self._validate_connection(index)
if isinstance(value, Move):
self._validate_move(index)
if isinstance(value, Close):
self._validate_close(index)
def extend(self, iterable):
if isinstance(iterable, str):
iterable = Path(iterable)
self._length = None
index = len(self._segments) - 1
self._segments.extend(iterable)
self._validate_connection(index)
self._validate_subpath(index)
def direct_close(self):
"""Forces close operations to be zero length by introducing a direct
line to operation just before any non-zero length close.
This is helpful because for some operations like reverse() because the
close must located at the very end of the path sequence. But, if it's
in effect a line-to and close, the line-to would need to start the sequence.
But, for some operations this won't matter since it will still result in
a closed shape with reversed ordering. But, if the final point in the
sequence must exactly switch with the first point in the sequence. The
close segments must be direct and zero length.
"""
if len(self._segments) == 0:
return
for i in range(len(self._segments) - 1, -1, -1):
segment = self._segments[i]
if isinstance(segment, Close):
if segment.length() != 0:
line = Line(segment.start, segment.end)
segment.start = Point(segment.end)
self.insert(i, line)
return self
def reverse(self):
if len(self._segments) == 0:
return
prepoint = self._segments[0].start
self._segments[0].start = None
p = Path()
subpaths = list(self.as_subpaths())
for subpath in subpaths:
subpath.reverse()
for subpath in reversed(subpaths):
p += subpath
self._segments = p._segments
self._segments[0].start = prepoint
return self
def subpath(self, index):
subpaths = list(self.as_subpaths())
return subpaths[index]
def count_subpaths(self):
subpaths = list(self.as_subpaths())
return len(subpaths)
def as_subpaths(self):
last = 0
for current, seg in enumerate(self):
if current != last and isinstance(seg, Move):
yield Subpath(self, last, current - 1)
last = current
yield Subpath(self, last, len(self) - 1)
def as_points(self):
"""Returns the list of defining points within path"""
for seg in self:
for p in seg:
if not isinstance(p, Point):
yield Point(p)
else:
yield p
def reify(self):
"""
Realizes the transform to the shape properties.
Path objects reify perfectly.
"""
GraphicObject.reify(self)
Transformable.reify(self)
if isinstance(self.transform, Matrix):
for e in self._segments:
e *= self.transform
self.transform.reset()
return self
@staticmethod
def svg_d(segments, relative=None, smooth=None):
if len(segments) == 0:
return ""
parts = []
previous_segment = None
p = Point(0)
if smooth is None:
override_smooth = False
smooth_set_value = True
else:
override_smooth = True
smooth_set_value = bool(smooth)
if relative is not None:
for segment in segments:
if isinstance(segment, (Move, Line, Arc, Close)):
parts.append(segment.d(p, relative=relative))
elif isinstance(segment, (CubicBezier, QuadraticBezier)):
if (override_smooth and smooth_set_value) or (
not override_smooth and segment.smooth
):
parts.append(
segment.d(
p,
relative=relative,
smooth=segment.is_smooth_from(previous_segment),
)
)
else:
parts.append(segment.d(p, relative=relative, smooth=False))
previous_segment = segment
p = previous_segment.end
else:
for segment in segments:
if isinstance(segment, (Move, Line, Arc, Close)):
parts.append(segment.d(p, relative=segment.relative))
elif isinstance(segment, (CubicBezier, QuadraticBezier)):
if (override_smooth and smooth_set_value) or (
not override_smooth and segment.smooth
):
parts.append(
segment.d(
p,
relative=segment.relative,
smooth=segment.is_smooth_from(previous_segment),
)
)
else:
parts.append(
segment.d(p, relative=segment.relative, smooth=False)
)
previous_segment = segment
p = previous_segment.end
return " ".join(parts)
def d(self, relative=None, transformed=True, smooth=None):
path = self
if transformed:
path = abs(path)
return Path.svg_d(path._segments, relative=relative, smooth=smooth)
def segments(self, transformed=True):
if transformed and not self.transform.is_identity():
return [s * self.transform for s in self._segments]
return self._segments
def approximate_arcs_with_cubics(self, error=0.1):
"""
Iterates through this path and replaces any Arcs with cubic bezier curves.
"""
sweep_limit = tau * error
for s in range(len(self) - 1, -1, -1):
segment = self[s]
if isinstance(segment, Arc):
arc_required = int(ceil(abs(segment.sweep) / sweep_limit))
self[s : s + 1] = list(segment.as_cubic_curves(arc_required))
def approximate_arcs_with_quads(self, error=0.1):
"""
Iterates through this path and replaces any Arcs with quadratic bezier curves.
"""
sweep_limit = tau * error
for s in range(len(self) - 1, -1, -1):
segment = self[s]
if isinstance(segment, Arc):
arc_required = int(ceil(abs(segment.sweep) / sweep_limit))
self[s : s + 1] = list(segment.as_quad_curves(arc_required))
class Rect(Shape):
"""
SVG Rect shapes are defined in SVG2 10.2
https://www.w3.org/TR/SVG2/shapes.html#RectElement
These have geometric properties x, y, width, height, rx, ry
Geometric properties can be Length values.
Rect(x, y, width, height)
Rect(x, y, width, height, rx, ry)
Rect(x, y, width, height, rx, ry, matrix)
Rect(x, y, width, height, rx, ry, matrix, stroke, fill)
Rect(dict): dictionary values read from svg.
"""
def __init__(self, *args, **kwargs):
self.x = None
self.y = None
self.width = None
self.height = None
self.rx = None
self.ry = None
Shape.__init__(self, *args, **kwargs)
self._validate_rect()
def property_by_object(self, s):
Shape.property_by_object(self, s)
self.x = s.x
self.y = s.y
self.width = s.width
self.height = s.height
self.rx = s.rx
self.ry = s.ry
self._validate_rect()
def property_by_values(self, values):
Shape.property_by_values(self, values)
self.x = Length(values.get(SVG_ATTR_X, 0)).value()
self.y = Length(values.get(SVG_ATTR_Y, 0)).value()
self.width = Length(values.get(SVG_ATTR_WIDTH, 1)).value()
self.height = Length(values.get(SVG_ATTR_HEIGHT, 1)).value()
self.rx = Length(values.get(SVG_ATTR_RADIUS_X, None)).value()
self.ry = Length(values.get(SVG_ATTR_RADIUS_Y, None)).value()
def property_by_args(self, *args):
arg_length = len(args)
if arg_length >= 1:
self.x = Length(args[0]).value()
if arg_length >= 2:
self.y = Length(args[1]).value()
if arg_length >= 3:
self.width = Length(args[2]).value()
if arg_length >= 4:
self.height = Length(args[3]).value()
if arg_length >= 5:
self.rx = Length(args[4]).value()
if arg_length >= 6:
self.ry = Length(args[5]).value()
if arg_length >= 7:
self._init_shape(*args[6:])
def _validate_rect(self):
"""None is 'auto' for values."""
rx = self.rx
ry = self.ry
if rx is None and ry is None:
rx = ry = 0
if rx is not None and ry is None:
rx = Length(rx).value(relative_length=self.width)
ry = rx
elif ry is not None and rx is None:
ry = Length(ry).value(relative_length=self.height)
rx = ry
elif rx is not None and ry is not None:
rx = Length(rx).value(relative_length=self.width)
ry = Length(ry).value(relative_length=self.height)
if rx == 0 or ry == 0:
rx = ry = 0
else:
try:
rx = min(rx, self.width / 2.0)
ry = min(ry, self.height / 2.0)
except ValueError:
pass # If width is in inches and rx is in units, this is unsolvable without knowing the ppi
self.rx = rx
self.ry = ry
def _attrs(self, values):
if self.x != 0:
values.append("%s=%s" % (SVG_ATTR_X, Length.str(self.x)))
if self.y != 0:
values.append("%s=%s" % (SVG_ATTR_Y, Length.str(self.y)))
if self.width != 0:
values.append("%s=%s" % (SVG_ATTR_WIDTH, Length.str(self.width)))
if self.height != 0:
values.append("%s=%s" % (SVG_ATTR_HEIGHT, Length.str(self.height)))
if self.rx != 0:
values.append("%s=%s" % (SVG_ATTR_RADIUS_X, Length.str(self.rx)))
if self.ry != 0:
values.append("%s=%s" % (SVG_ATTR_RADIUS_Y, Length.str(self.ry)))
def __repr__(self):
values = []
self._attrs(values)
self._repr_shape(values)
params = ", ".join(values)
return "Rect(%s)" % params
def __str__(self):
values = []
self._attrs(values)
self._str_shape(values)
params = ", ".join(values)
return "Rect(%s)" % params
def __copy__(self):
return Rect(self)
@property
def implicit_position(self):
if not self.apply:
return Point(self.x, self.y)
point = Point(self.x, self.y)
point *= self.transform
return point
@property
def implicit_x(self):
if not self.apply:
return self.x
return self.implicit_position[0]
@property
def implicit_y(self):
if not self.apply:
return self.y
return self.implicit_position[1]
@property
def implicit_width(self):
if not self.apply:
return self.width
p = Point(self.width, 0)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
@property
def implicit_height(self):
if not self.apply:
return self.height
p = Point(0, self.height)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
@property
def implicit_rx(self):
if not self.apply:
return self.rx
p = Point(self.rx, 0)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
@property
def implicit_ry(self):
if not self.apply:
return self.ry
p = Point(0, self.ry)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
def segments(self, transformed=True):
"""
Rect decomposition is given in SVG 2.0 10.2
Rect:
* perform an absolute moveto operation to location (x,y);
* perform an absolute horizontal lineto with parameter x+width;
* perform an absolute vertical lineto parameter y+height;
* perform an absolute horizontal lineto parameter x;
* ( close the path)
Rounded Rect:
rx and ry are used as the equivalent parameters to the elliptical arc command,
the x-axis-rotation and large-arc-flag are set to zero, the sweep-flag is set to one
* perform an absolute moveto operation to location (x+rx,y);
* perform an absolute horizontal lineto with parameter x+width-rx;
* perform an absolute elliptical arc operation to coordinate (x+width,y+ry)
* perform an absolute vertical lineto parameter y+height-ry;
* perform an absolute elliptical arc operation to coordinate (x+width-rx,y+height)
* perform an absolute horizontal lineto parameter x+rx;
* perform an absolute elliptical arc operation to coordinate (x,y+height-ry)
* perform an absolute vertical lineto parameter y+ry
* perform an absolute elliptical arc operation with a segment-completing close path operation
:param transformed: provide the reified version.
:return: path_d of shape.
"""
scooped = False
x = self.x
y = self.y
width = self.width
height = self.height
if self.is_degenerate():
return () # a computed value of zero for either dimension disables rendering.
rx = self.rx
ry = self.ry
if not self._strict:
if rx < 0 < width and ry < 0 < height:
scooped = True
rx = abs(rx)
ry = abs(ry)
if rx < 0 < width or ry < 0 < height:
rx = 0
ry = 0
if rx == ry == 0:
segments = (
Move(None, (x, y)),
Line((x, y), (x + width, y)),
Line((x + width, y), (x + width, y + height)),
Line((x + width, y + height), (x, y + height)),
Close((x, y + height), (x, y)),
)
else:
segments = (
Move(None, (x + rx, y)),
Line((x + rx, y), (x + width - rx, y)),
Arc(
(x + width - rx, y),
(x + width, y + ry),
rx=rx,
ry=ry,
scooped=scooped,
),
Line((x + width, y + ry), (x + width, y + height - ry)),
Arc(
(x + width, y + height - ry),
(x + width - rx, y + height),
rx=rx,
ry=ry,
scooped=scooped,
),
Line((x + width - rx, y + height), (x + rx, y + height)),
Arc(
(x + rx, y + height),
(x, y + height - ry),
rx=rx,
ry=ry,
scooped=scooped,
),
Line((x, y + height - ry), (x, y + ry)),
Arc((x, y + ry), (x + rx, y), rx=rx, ry=ry, scooped=scooped),
Close((x + rx, y), (x + rx, y)),
)
if not transformed or self.transform.is_identity():
return segments
else:
return [s * self.transform for s in segments]
def reify(self):
"""
Realizes the transform to the shape properties.
If the realized shape can be properly represented as a rectangle with an identity matrix
it will be, otherwise the properties will approximate the implied values.
Skewed and Rotated rectangles cannot be reified.
"""
scale_x = self.transform.value_scale_x()
scale_y = self.transform.value_scale_y()
if scale_x * scale_y < 0:
return self # No reification of negative values, gives negative dims.
translate_x = self.transform.value_trans_x()
translate_y = self.transform.value_trans_y()
if (
self.transform.value_skew_x() == 0
and self.transform.value_skew_y() == 0
and scale_x != 0
and scale_y != 0
):
GraphicObject.reify(self)
Transformable.reify(self)
self.x *= scale_x
self.y *= scale_y
self.x += translate_x
self.y += translate_y
self.transform *= Matrix.translate(-translate_x, -translate_y)
self.rx = scale_x * self.rx
self.ry = scale_y * self.ry
self.width = scale_x * self.width
self.height = scale_y * self.height
self.transform *= Matrix.scale(1.0 / scale_x, 1.0 / scale_y)
return self
def render(self, **kwargs):
Shape.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.x, Length):
self.x = self.x.value(relative_length=width, **kwargs)
if isinstance(self.y, Length):
self.y = self.y.value(relative_length=height, **kwargs)
if isinstance(self.width, Length):
self.width = self.width.value(relative_length=width, **kwargs)
if isinstance(self.height, Length):
self.height = self.height.value(relative_length=height, **kwargs)
if isinstance(self.rx, Length):
self.rx = self.rx.value(relative_length=width, **kwargs)
if isinstance(self.ry, Length):
self.ry = self.ry.value(relative_length=height, **kwargs)
return self
def is_degenerate(self):
return (
self.width == 0
or self.height == 0
or self.width is None
or self.height is None
)
class _RoundShape(Shape):
def __init__(self, *args, **kwargs):
self.cx = None
self.cy = None
self.rx = None
self.ry = None
Shape.__init__(self, *args, **kwargs)
def property_by_object(self, s):
Shape.property_by_object(self, s)
self.cx = s.cx
self.cy = s.cy
self.rx = s.rx
self.ry = s.ry
def property_by_values(self, values):
Shape.property_by_values(self, values)
self.cx = Length(values.get(SVG_ATTR_CENTER_X)).value()
self.cy = Length(values.get(SVG_ATTR_CENTER_Y)).value()
self.rx = Length(values.get(SVG_ATTR_RADIUS_X)).value()
self.ry = Length(values.get(SVG_ATTR_RADIUS_Y)).value()
r = Length(values.get(SVG_ATTR_RADIUS, None)).value()
if r is not None:
self.rx = r
self.ry = r
else:
if self.rx is None:
self.rx = 1
if self.ry is None:
self.ry = 1
center = values.get("center", None)
if center is not None:
self.cx, self.cy = Point(center)
if self.cx is None:
self.cx = 0
if self.cy is None:
self.cy = 0
def property_by_args(self, *args):
arg_length = len(args)
if arg_length >= 1:
self.cx = Length(args[0]).value()
if arg_length >= 2:
self.cy = Length(args[1]).value()
if arg_length >= 3:
self.rx = Length(args[2]).value()
if arg_length >= 4:
self.ry = Length(args[3]).value()
else:
self.ry = self.rx
if arg_length >= 5:
self._init_shape(*args[4:])
def _attrs(self, values):
if self.cx is not None:
values.append("%s=%s" % (SVG_ATTR_CENTER_X, Length.str(self.cx)))
if self.cy is not None:
values.append("%s=%s" % (SVG_ATTR_CENTER_Y, Length.str(self.cy)))
if self.rx == self.ry or self.ry is None:
values.append("%s=%s" % (SVG_ATTR_RADIUS, Length.str(self.rx)))
else:
values.append("%s=%s" % (SVG_ATTR_RADIUS_X, Length.str(self.rx)))
values.append("%s=%s" % (SVG_ATTR_RADIUS_Y, Length.str(self.ry)))
def __repr__(self):
values = []
self._attrs(values)
self._repr_shape(values)
params = ", ".join(values)
return "%s(%s)" % (self.__class__.__name__, params)
def __str__(self):
values = []
self._attrs(values)
self._str_shape(values)
params = ", ".join(values)
return "%s(%s)" % (self.__class__.__name__, params)
@property
def implicit_rx(self):
if not self.apply:
return self.rx
prx = Point(self.rx, 0)
prx *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(prx)
@property
def implicit_ry(self):
if not self.apply:
return self.ry
pry = Point(0, self.ry)
pry *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(pry)
implicit_r = implicit_rx
@property
def implicit_center(self):
center = Point(self.cx, self.cy)
if not self.apply:
return center
center *= self.transform
return center
def segments(self, transformed=True):
"""
SVG path decomposition is given in SVG 2.0 10.3, 10.4.
A move-to command to the point cx+rx,cy;
arc to cx,cy+ry;
arc to cx-rx,cy;
arc to cx,cy-ry;
arc with a segment-completing close path operation.
Converts the parameters from an ellipse or a circle to a string for a
Path object d-attribute"""
original = self.apply
self.apply = transformed
path = Path()
steps = 4
step_size = tau / steps
if (
transformed
and self.transform.value_scale_x() * self.transform.value_scale_y() < 0
):
step_size = -step_size
t_start = 0
t_end = step_size
# zero for either dimension, or a computed value of auto for both dimensions, disables rendering of the element.
rx = self.implicit_rx
ry = self.implicit_ry
if self.is_degenerate():
return ()
center = self.implicit_center
path.move((self.point_at_t(0)))
for i in range(steps):
path += Arc(
self.point_at_t(t_start),
self.point_at_t(t_end),
center,
rx=rx,
ry=ry,
rotation=self.rotation,
sweep=step_size,
)
t_start = t_end
t_end += step_size
path.closed()
self.apply = original
return path.segments(transformed)
def reify(self):
"""
Realizes the transform to the shape properties.
Skewed and Rotated roundshapes cannot be reified.
"""
scale_x = self.transform.value_scale_x()
scale_y = self.transform.value_scale_y()
if scale_y * scale_x < 0:
return self # No reification of flipped values.
translate_x = self.transform.value_trans_x()
translate_y = self.transform.value_trans_y()
if (
self.transform.value_skew_x() == 0
and self.transform.value_skew_y() == 0
and scale_x != 0
and scale_y != 0
):
GraphicObject.reify(self)
Transformable.reify(self)
self.cx *= scale_x
self.cy *= scale_y
self.cx += translate_x
self.cy += translate_y
self.transform *= Matrix.translate(-translate_x, -translate_y)
self.rx = scale_x * self.rx
self.ry = scale_y * self.ry
self.transform *= Matrix.scale(1.0 / scale_x, 1.0 / scale_y)
return self
def render(self, **kwargs):
Shape.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.cx, Length):
self.cx = self.cx.value(relative_length=width, **kwargs)
if isinstance(self.cy, Length):
self.cy = self.cy.value(relative_length=height, **kwargs)
if isinstance(self.rx, Length):
self.rx = self.rx.value(relative_length=width, **kwargs)
if isinstance(self.ry, Length):
self.ry = self.ry.value(relative_length=height, **kwargs)
return self
def is_degenerate(self):
rx = self.implicit_rx
ry = self.implicit_ry
return rx == 0 or ry == 0
def unit_matrix(self):
"""
return the unit matrix which could would transform the unit circle into this ellipse.
One of the valid parameterizations for ellipses is that they are all affine transforms of the unit circle.
This provides exactly such a matrix.
:return: matrix
"""
m = Matrix()
m.post_scale(self.implicit_rx, self.implicit_ry)
m.post_rotate(self.rotation)
center = self.implicit_center
m.post_translate(center.x, center.y)
return m
def arc_t(self, t0, t1):
"""
return the arc found between the given values of t on the ellipse.
:param t0: t start
:param t1: t end
:return: arc
"""
return Arc(
self.point_at_t(t0),
self.point_at_t(t1),
self.implicit_center,
rx=self.implicit_rx,
ry=self.implicit_ry,
rotation=self.rotation,
sweep=t1 - t0,
)
def arc_angle(self, a0, a1, ccw=None):
"""
return the arc found between the given angles on the ellipse.
:param a0: start angle
:param a1: end angle
:param ccw: optional flag to force clockwise or counter-clockwise arc-angles, default is smaller angle
:return: arc
"""
if ccw is None:
ccw = a0 > a1
return Arc(
self.point_at_angle(a0),
self.point_at_angle(a1),
self.implicit_center,
rx=self.implicit_rx,
ry=self.implicit_ry,
rotation=self.rotation,
ccw=ccw,
)
def point_at_angle(self, angle):
"""
find the point on the ellipse from the center at the given angle.
Note: For non-circular arcs this is different than point(t).
:param angle: angle from center to find point
:return: point found
"""
a = self.implicit_rx
b = self.implicit_ry
if a == b:
return self.point_at_t(angle)
angle -= self.rotation
t = atan2(a * tan(angle), b)
tau_1_4 = tau / 4.0
tau_3_4 = 3 * tau_1_4
if tau_3_4 >= abs(angle) % tau > tau_1_4:
t += tau / 2.0
return self.point_at_t(t)
def angle_at_point(self, p):
"""
find the angle to the point.
:param p: point
:return: angle to given point.
"""
if self.apply and not self.transform.is_identity():
return self.implicit_center.angle_to(p)
else:
center = Point(self.cx, self.cy)
return center.angle_to(p)
def t_at_point(self, p):
"""
find the t parameter to at the point.
:param p: point
:return: t parameter to the given point.
"""
angle = self.angle_at_point(p)
angle -= self.rotation
a = self.implicit_rx
b = self.implicit_ry
t = atan2(a * tan(angle), b)
tau_1_4 = tau / 4.0
tau_3_4 = 3 * tau_1_4
if tau_3_4 >= abs(angle) % tau > tau_1_4:
t += tau / 2.0
return t
def point_at_t(self, t):
"""
find the point that corresponds to given value t.
Where t=0 is the first point and t=tau is the final point.
In the case of a circle: t = angle.
:param t:
:return:
"""
rotation = self.rotation
a = self.implicit_rx
b = self.implicit_ry
center = self.implicit_center
cx = center.x
cy = center.y
cos_theta = cos(rotation)
sin_theta = sin(rotation)
cos_t = cos(t)
sin_t = sin(t)
px = cx + a * cos_t * cos_theta - b * sin_t * sin_theta
py = cy + a * cos_t * sin_theta + b * sin_t * cos_theta
return Point(px, py)
def point(self, position, error=ERROR):
"""
find the point that corresponds to given value [0,1].
Where t=0 is the first point and t=1 is the final point.
:param position: position value between 0,1 where value equals the amount through the shape
:param error: error permitted in determining point value (unused for this shape)
:return: point at t
"""
return self.point_at_t(tau * position)
def _ramanujan_length(self):
a = self.implicit_rx
b = self.implicit_ry
if b > a:
a, b = b, a
h = ((a - b) * (a - b)) / ((a + b) * (a + b))
return pi * (a + b) * (1 + (3 * h / (10 + sqrt(4 - 3 * h))))
class Ellipse(_RoundShape):
"""
SVG Ellipse shapes are defined in SVG2 10.4
https://www.w3.org/TR/SVG2/shapes.html#EllipseElement
These have geometric properties cx, cy, rx, ry
"""
def __init__(self, *args, **kwargs):
_RoundShape.__init__(self, *args, **kwargs)
def __copy__(self):
return Ellipse(self)
def _name(self):
return self.__class__.__name__
class Circle(_RoundShape):
"""
SVG Circle shapes are defined in SVG2 10.3
https://www.w3.org/TR/SVG2/shapes.html#CircleElement
These have geometric properties cx, cy, r
"""
def __init__(self, *args, **kwargs):
_RoundShape.__init__(self, *args, **kwargs)
def __copy__(self):
return Circle(self)
def _name(self):
return self.__class__.__name__
class SimpleLine(Shape):
"""
SVG Line shapes are defined in SVG2 10.5
https://www.w3.org/TR/SVG2/shapes.html#LineElement
These have geometric properties x1, y1, x2, y2
These are called Line in SVG but that name is already used for Line(PathSegment)
"""
def __init__(self, *args, **kwargs):
self.x1 = None
self.y1 = None
self.x2 = None
self.y2 = None
Shape.__init__(self, *args, **kwargs)
def property_by_object(self, s):
Shape.property_by_object(self, s)
self.x1 = s.x1
self.y1 = s.y1
self.x2 = s.x2
self.y2 = s.y2
def property_by_values(self, values):
Shape.property_by_values(self, values)
self.x1 = Length(values.get(SVG_ATTR_X1, 0)).value()
self.y1 = Length(values.get(SVG_ATTR_Y1, 0)).value()
self.x2 = Length(values.get(SVG_ATTR_X2, 0)).value()
self.y2 = Length(values.get(SVG_ATTR_Y2, 0)).value()
def property_by_args(self, *args):
arg_length = len(args)
if arg_length >= 1:
self.x1 = Length(args[0]).value()
if arg_length >= 2:
self.y1 = Length(args[1]).value()
if arg_length >= 3:
self.x2 = Length(args[2]).value()
if arg_length >= 4:
self.y2 = Length(args[3]).value()
self._init_shape(*args[4:])
def _attrs(self, values):
if self.x1 is not None:
values.append("%s=%s" % (SVG_ATTR_X1, str(self.x1)))
if self.y1 is not None:
values.append("%s=%s" % (SVG_ATTR_Y1, str(self.y1)))
if self.x2 is not None:
values.append("%s=%s" % (SVG_ATTR_X2, str(self.x2)))
if self.y2 is not None:
values.append("%s=%s" % (SVG_ATTR_Y2, str(self.y2)))
def __repr__(self):
values = []
self._attrs(values)
self._repr_shape(values)
params = ", ".join(values)
return "SimpleLine(%s)" % params
def __str__(self):
values = []
self._attrs(values)
self._str_shape(values)
params = ", ".join(values)
return "SimpleLine(%s)" % params
def __copy__(self):
return SimpleLine(self)
@property
def implicit_x1(self):
point = Point(self.x1, self.y1)
point *= self.transform
return point.x
@property
def implicit_y1(self):
point = Point(self.x1, self.y1)
point *= self.transform
return point.y
@property
def implicit_x2(self):
point = Point(self.x2, self.y2)
point *= self.transform
return point.x
@property
def implicit_y2(self):
point = Point(self.x2, self.y2)
point *= self.transform
return point.y
def segments(self, transformed=True):
"""
SVG path decomposition is given in SVG 2.0 10.5.
perform an absolute moveto operation to absolute location (x1,y1)
perform an absolute lineto operation to absolute location (x2,y2)
:returns Path_d path for line.
"""
start = Point(self.x1, self.y1)
end = Point(self.x2, self.y2)
if transformed:
start *= self.transform
end *= self.transform
return Move(None, start), Line(start, end)
def reify(self):
"""
Realizes the transform to the shape properties.
SimpleLines are perfectly reified.
"""
GraphicObject.reify(self)
Transformable.reify(self)
matrix = self.transform
p = Point(self.x1, self.y1)
p *= matrix
self.x1 = p.x
self.y1 = p.y
p = Point(self.x2, self.y2)
p *= matrix
self.x2 = p.x
self.y2 = p.y
matrix.reset()
return self
def render(self, **kwargs):
Shape.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.x1, Length):
self.x1 = self.x1.value(relative_length=width, **kwargs)
if isinstance(self.y1, Length):
self.y1 = self.y1.value(relative_length=height, **kwargs)
if isinstance(self.x2, Length):
self.x2 = self.x2.value(relative_length=width, **kwargs)
if isinstance(self.y2, Length):
self.y2 = self.y2.value(relative_length=height, **kwargs)
return self
class _Polyshape(Shape):
"""Base form of Polygon and Polyline since the objects are nearly the same."""
def __init__(self, *args, **kwargs):
self.points = list()
Shape.__init__(self, *args, **kwargs)
def property_by_object(self, s):
Shape.property_by_object(self, s)
self._init_points(s.points)
def property_by_values(self, values):
Shape.property_by_values(self, values)
self._init_points(values)
def property_by_args(self, *args):
self._init_points(args)
def _init_points(self, points):
if len(self.points) != 0:
return
if points is None:
self.points = list()
return
if isinstance(points, dict):
if SVG_ATTR_POINTS in points:
points = points[SVG_ATTR_POINTS]
else:
self.points = list()
return
try:
if len(points) == 1:
points = points[0]
except TypeError:
pass
if isinstance(points, str):
findall = REGEX_COORD_PAIR.findall(points)
self.points = [Point(float(j), float(k)) for j, k in findall]
elif isinstance(points, (list, tuple)):
if len(points) == 0:
self.points = list()
else:
first_point = points[0]
if isinstance(first_point, (float, int)):
self.points = list(map(Point, zip(*[iter(points)] * 2)))
elif isinstance(first_point, (list, tuple, complex, str, Point)):
self.points = list(map(Point, points))
else:
self.points = list()
def __repr__(self):
values = []
if self.points is not None:
s = " ".join(map(str, self.points))
values.append("%s='%s'" % (SVG_ATTR_POINTS, s))
self._repr_shape(values)
params = ", ".join(values)
return "%s(%s)" % (self.__class__.__name__, params)
def __str__(self):
values = []
if self.points is not None:
s = " ".join(map(str, self.points))
values.append("%s='%s'" % (SVG_ATTR_POINTS, s))
self._str_shape(values)
params = ", ".join(values)
return "%s(%s)" % (self.__class__.__name__, params)
def __len__(self):
return len(self.points)
def __getitem__(self, item):
return self.points[item]
def segments(self, transformed=True):
"""
Polyline and Polygon decomposition is given in SVG2. 10.6 and 10.7
* perform an absolute moveto operation to the first coordinate pair in the list of points
* for each subsequent coordinate pair, perform an absolute lineto operation to that coordinate pair.
* (Polygon-only) perform a closepath command
Note: For a polygon/polyline made from n points, the resulting path will
be composed of n lines (even if some of these lines have length zero).
"""
if self.transform.is_identity() or not transformed:
points = self.points
else:
points = list(map(self.transform.point_in_matrix_space, self.points))
if self.is_degenerate():
return []
segments = [Move(None, points[0])]
last = points[0]
for i in range(1, len(points)):
current = points[i]
segments.append(Line(last, current))
last = current
if isinstance(self, Polygon):
segments.append(Close(last, points[0]))
return segments
def reify(self):
"""
Realizes the transform to the shape properties.
Polyshapes are perfectly reified.
"""
GraphicObject.reify(self)
Transformable.reify(self)
matrix = self.transform
for p in self:
p *= matrix
matrix.reset()
return self
def is_degenerate(self):
return len(self.points) == 0
class Polyline(_Polyshape):
"""
SVG Polyline shapes are defined in SVG2 10.6
https://www.w3.org/TR/SVG2/shapes.html#PolylineElement
These have geometric properties points
"""
def __init__(self, *args, **kwargs):
_Polyshape.__init__(self, *args, **kwargs)
def __copy__(self):
return Polyline(self)
def _name(self):
return self.__class__.__name__
class Polygon(_Polyshape):
"""
SVG Polygon shapes are defined in SVG2 10.7
https://www.w3.org/TR/SVG2/shapes.html#PolygonElement
These have geometric properties points
"""
def __init__(self, *args, **kwargs):
_Polyshape.__init__(self, *args, **kwargs)
def __copy__(self):
return Polygon(self)
def _name(self):
return self.__class__.__name__
class Subpath:
"""
Subpath is a Path-backed window implementation. It does not store a list of segments but rather
stores a Path, start position, end position. When a function is called on a subpath, the result of
those events is performed on the backing Path. When the backing Path is modified the behavior is
undefined."""
def __init__(self, path, start, end):
self._path = path
self._start = start
self._end = end
def __copy__(self):
return Subpath(Path(self._path), self._start, self._end)
def __getitem__(self, index):
return self._path[self.index_to_path_index(index)]
def __setitem__(self, index, value):
self._path[self.index_to_path_index(index)] = value
def __delitem__(self, index):
del self._path[self.index_to_path_index(index)]
self._end -= 1
def __iadd__(self, other):
if isinstance(other, str):
p = Path(other)
self._path[self._end : self._end] = p
elif isinstance(other, Path):
p = copy(other)
self._path[self._end : self._end] = p
elif isinstance(other, PathSegment):
self._path.insert(self._end, other)
else:
return NotImplemented
return self
def __add__(self, other):
if isinstance(other, (str, Path, PathSegment)):
n = copy(self)
n += other
return n
return NotImplemented
def __radd__(self, other):
if isinstance(other, str):
path = Path(other)
path.extend(map(copy, self._path))
return path
elif isinstance(other, PathSegment):
path = Path(self)
path.insert(0, other)
return path
else:
return NotImplemented
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
for e in self:
e *= other
return self
def __mul__(self, other):
if isinstance(other, (Matrix, str)):
n = copy(self)
n *= other
return n
return NotImplemented
__rmul__ = __mul__
def __iter__(self):
class Iterator:
def __init__(self, subpath):
self.n = subpath._start - 1
self.subpath = subpath
def __next__(self):
self.n += 1
try:
if self.n > self.subpath._end:
raise StopIteration
return self.subpath._path[self.n]
except IndexError:
raise StopIteration
next = __next__
return Iterator(self)
def __len__(self):
return self._end - self._start + 1
def __str__(self):
return self.d()
def __repr__(self):
return "Path(%s)" % (", ".join(repr(x) for x in self))
def __eq__(self, other):
if isinstance(other, str):
return self.__eq__(Path(other))
if not isinstance(other, (Path, Subpath)):
return NotImplemented
if len(self) != len(other):
return False
for s, o in zip(self, other):
if not s == o:
return False
return True
def __ne__(self, other):
if not isinstance(other, (Path, Subpath, str)):
return NotImplemented
return not self == other
def segments(self, transformed=True):
path = self._path
if transformed:
return [
s * path.transform for s in path._segments[self._start : self._end + 1]
]
return path._segments[self._start : self._end + 1]
def _numeric_index(self, index):
if index < 0:
return self._end + index + 1
else:
return self._start + index
def index_to_path_index(self, index):
if isinstance(index, slice):
start = index.start
stop = index.stop
step = index.step
if start is None:
start = 0
start = self._numeric_index(start)
if stop is None:
stop = len(self)
stop = self._numeric_index(stop)
return slice(start, stop, step)
return self._numeric_index(index)
def bbox(self, transformed=True, with_stroke=False):
"""returns a bounding box for the subpath"""
if transformed:
return Path(self).bbox(transformed=transformed, with_stroke=with_stroke)
segments = self._path._segments[self._start : self._end + 1]
bbs = [seg.bbox() for seg in segments if not isinstance(Close, Move)]
try:
xmins, ymins, xmaxs, ymaxs = list(zip(*bbs))
except ValueError:
return None # No bounding box items existed. So no bounding box.
if with_stroke and self._path.stroke_width is not None:
delta = float(self._path.stroke_width) / 2.0
else:
delta = 0.0
return (
min(xmins) - delta,
min(ymins) - delta,
max(xmaxs) + delta,
max(ymaxs) + delta,
)
def d(self, relative=None, smooth=None):
segments = self._path._segments[self._start : self._end + 1]
return Path.svg_d(segments, relative=relative, smooth=None)
def _reverse_segments(self, start, end):
"""Reverses segments between the given indexes in the subpath space."""
segments = self._path._segments # must avoid path validation.
s = self.index_to_path_index(start)
e = self.index_to_path_index(end)
while s <= e:
start_segment = segments[s]
end_segment = segments[e]
start_segment.reverse()
if start_segment is not end_segment:
end_segment.reverse()
segments[s] = end_segment
segments[e] = start_segment
s += 1
e -= 1
start = self.index_to_path_index(start)
end = self.index_to_path_index(end)
self._path._validate_connection(start - 1, prefer_second=True)
self._path._validate_connection(end)
def reverse(self):
size = len(self)
if size == 0:
return
start = 0
end = size - 1
if isinstance(self[-1], Close):
end -= 1
if isinstance(
self[0], Move
): # Move remains in place but references next element.
start += 1
self._reverse_segments(start, end)
if size > 1:
if isinstance(self[0], Move):
self[0].end = Point(self[1].start)
last = self[-1]
if isinstance(last, Close):
last.reverse()
if last.start != self[-2].end:
last.start = Point(self[-2].end)
if last.end != self[0].end:
last.end = Point(self[0].end)
return self
class Group(SVGElement, Transformable, list):
"""
Group Container element can have children.
SVG 2.0 <g> are defined in:
5.2. Grouping: the g element
"""
def __init__(self, *args, **kwargs):
Transformable.__init__(self, *args, **kwargs)
list.__init__(self)
if len(args) >= 1:
s = args[0]
if isinstance(s, Group):
self.extend(list(map(copy, s)))
SVGElement.__init__(self, *args, **kwargs)
def __imul__(self, other):
if isinstance(other, str):
other = Matrix(other)
if isinstance(other, Matrix):
self.transform *= other
for e in self:
e *= other
return self
def render(self, **kwargs):
Transformable.render(self, **kwargs)
def __copy__(self):
return Group(self)
def property_by_object(self, s):
Transformable.property_by_object(self, s)
SVGElement.property_by_object(self, s)
def property_by_values(self, values):
Transformable.property_by_values(self, values)
SVGElement.property_by_values(self, values)
def select(self, conditional=None):
"""
Finds all flattened subobjects of this group for which the conditional returns
true.
:param conditional: function taking element and returns True to include or False if exclude
"""
if conditional is None:
for subitem in self:
yield subitem
if isinstance(subitem, Group):
for s in subitem.select(conditional):
yield s
else:
for subitem in self:
if conditional(subitem):
yield subitem
if isinstance(subitem, Group):
for s in subitem.select(conditional):
yield s
def reify(self):
Transformable.reify(self)
@staticmethod
def union_bbox(elements, transformed=True, with_stroke=False):
"""
Returns the union of the bounding boxes for the elements within the iterator.
:param transformed: Should the children of this object be properly transformed.
:param with_stroke: should the stroke-width be included in the bounds of the elements
:return: union of all bounding boxes of elements within the iterable.
"""
boundary_points = []
for e in elements:
if not hasattr(e, "bbox"):
continue
box = e.bbox(transformed=False, with_stroke=with_stroke)
if box is None:
continue
top_left = (box[0], box[1])
top_right = (box[2], box[1])
bottom_left = (box[0], box[3])
bottom_right = (box[2], box[3])
if transformed:
top_left = e.transform.point_in_matrix_space(top_left)
top_right = e.transform.point_in_matrix_space(top_right)
bottom_left = e.transform.point_in_matrix_space(bottom_left)
bottom_right = e.transform.point_in_matrix_space(bottom_right)
boundary_points.append(top_left)
boundary_points.append(top_right)
boundary_points.append(bottom_left)
boundary_points.append(bottom_right)
if len(boundary_points) == 0:
return None
xmin = min([e[0] for e in boundary_points])
ymin = min([e[1] for e in boundary_points])
xmax = max([e[0] for e in boundary_points])
ymax = max([e[1] for e in boundary_points])
return xmin, ymin, xmax, ymax
def bbox(self, transformed=True, with_stroke=False):
"""
Returns the bounding box of the given object.
In the case of groups this is the union of all the bounding boxes of all bound children.
Setting transformed to false, may yield unexpected results if subitems are transformed in non-uniform
ways.
:param transformed: bounding box of the properly transformed children.
:param with_stroke: should the stroke-width be included in the bounds.
:return: bounding box of the given element
"""
return Group.union_bbox(
self.select(),
transformed=transformed,
with_stroke=with_stroke,
)
class ClipPath(SVGElement, list):
"""
clipPath elements are defined in svg 14.3.5
https://www.w3.org/TR/SVG11/masking.html#ClipPathElement
Clip paths conceptually define a 1 bit mask for images these are usually defined within
def blocks and do not render themselves but rather are attached by IRI references to the
"""
def __init__(self, *args, **kwargs):
list.__init__(self)
self.unit_type = SVG_UNIT_TYPE_USERSPACEONUSE
SVGElement.__init__(self, *args, **kwargs)
def property_by_object(self, s):
SVGElement.property_by_object(self, s)
self.unit_type = s.unit_type
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
self.unit_type = self.values.get(
SVG_ATTR_CLIP_UNIT_TYPE, SVG_UNIT_TYPE_USERSPACEONUSE
)
class Pattern(SVGElement, list):
def __init__(self, *args, **kwargs):
self.viewbox = None
self.preserve_aspect_ratio = None
self.x = None
self.y = None
self.width = None
self.height = None
self.href = None
self.pattern_content_units = None # UserSpaceOnUse default
self.pattern_transform = None
self.pattern_units = None
SVGElement.__init__(self, *args, **kwargs)
def __int__(self):
return 0
@property
def viewbox_transform(self):
if self.viewbox is None:
return ""
return self.viewbox.transform(self)
def property_by_object(self, s):
SVGElement.property_by_object(self, s)
self.viewbox = s.viewbox
self.preserve_aspect_ratio = s.preserve_aspect_ratio
self.x = s.x
self.y = s.y
self.width = s.width
self.height = s.height
self.href = s.href
self.pattern_content_units = s.pattern_contents_units
self.pattern_transform = (
Matrix(s.pattern_transform) if s.pattern_transform is not None else None
)
self.pattern_units = s.pattern_units
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
if XLINK_HREF in values:
self.href = values[XLINK_HREF]
elif SVG_HREF in values:
self.href = values[SVG_HREF]
viewbox = values.get(SVG_ATTR_VIEWBOX)
if viewbox is not None:
self.viewbox = Viewbox(viewbox)
if SVG_ATTR_PRESERVEASPECTRATIO in values:
self.preserve_aspect_ratio = values[SVG_ATTR_PRESERVEASPECTRATIO]
self.x = Length(values.get(SVG_ATTR_X, 0)).value()
self.y = Length(values.get(SVG_ATTR_Y, 0)).value()
self.width = Length(values.get(SVG_ATTR_WIDTH, "100%")).value()
self.height = Length(values.get(SVG_ATTR_HEIGHT, "100%")).value()
if SVG_ATTR_PATTERN_CONTENT_UNITS in values:
self.pattern_content_units = values[SVG_ATTR_PATTERN_CONTENT_UNITS]
if SVG_ATTR_PATTERN_TRANSFORM in values:
self.pattern_transform = Matrix(values[SVG_ATTR_PATTERN_TRANSFORM])
if SVG_ATTR_PATTERN_UNITS in values:
self.pattern_units = values[SVG_ATTR_PATTERN_UNITS]
def render(self, **kwargs):
if self.pattern_transform is not None:
self.pattern_transform.render(**kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.x, Length):
self.x = self.x.value(relative_length=width, **kwargs)
if isinstance(self.y, Length):
self.y = self.y.value(relative_length=height, **kwargs)
if isinstance(self.width, Length):
self.width = self.width.value(relative_length=width, **kwargs)
if isinstance(self.height, Length):
self.height = self.height.value(relative_length=height, **kwargs)
return self
class Text(SVGElement, GraphicObject, Transformable):
"""
SVG Text are defined in SVG 2.0 Chapter 11
No methods are implemented to perform a text to path conversion.
However, if such a method exists the assumption is that the results will be
placed in the .path attribute, and functions like bbox() will check if such
a value exists.
"""
def __init__(self, *args, **kwargs):
if len(args) >= 1:
self.text = args[0]
else:
self.text = ""
self.width = 0
self.height = 0
self.x = 0
self.y = 0
self.dx = 0
self.dy = 0
self.anchor = "start" # start, middle, end.
self.font_family = "san-serif"
self.font_size = 16.0 # 16 point font 'normal'
self.font_weight = 400.0 # Thin=100, Normal=400, Bold=700
self.font_face = ""
self.path = None
Transformable.__init__(self, *args, **kwargs)
GraphicObject.__init__(self, *args, **kwargs)
SVGElement.__init__(self, *args, **kwargs)
def __str__(self):
values = list()
values.append("'%s'" % self.text)
values.append("%s='%s'" % (SVG_ATTR_FONT_FAMILY, self.font_family))
if self.font_face:
values.append("%s=%s" % (SVG_ATTR_FONT_FACE, self.font_face))
values.append("%s=%d" % (SVG_ATTR_FONT_SIZE, self.font_size))
values.append("%s='%s'" % (SVG_ATTR_FONT_WEIGHT, str(self.font_weight)))
values.append("%s='%s'" % (SVG_ATTR_TEXT_ANCHOR, self.anchor))
if self.x != 0 or self.y != 0:
values.append("%s=%s" % (SVG_ATTR_X, self.x))
values.append("%s=%s" % (SVG_ATTR_Y, self.y))
if self.dx != 0 or self.dy != 0:
values.append("%s=%s" % (SVG_ATTR_DX, self.dx))
values.append("%s=%s" % (SVG_ATTR_DY, self.dy))
if self.stroke is not None:
values.append("%s='%s'" % (SVG_ATTR_STROKE, self.stroke))
if self.fill is not None:
values.append("%s='%s'" % (SVG_ATTR_FILL, self.fill))
if self.stroke_width is not None and self.stroke_width != 1.0:
values.append("%s=%s" % (SVG_ATTR_STROKE_WIDTH, str(self.stroke_width)))
if not self.transform.is_identity():
values.append("%s=%s" % (SVG_ATTR_TRANSFORM, repr(self.transform)))
if self.id is not None:
values.append("%s='%s'" % (SVG_ATTR_ID, self.id))
return "Text(%s)" % (", ".join(values))
def __repr__(self):
# Cannot use SVG_ATTR_FONT_* or SVG_ATTR_TEXT_ANCHOR for repr because they contain hyphens
values = list()
values.append("'%s'" % self.text)
values.append("font_family='%s'" % self.font_family)
if self.font_face:
values.append("font_face=%s" % self.font_face)
values.append("font_size=%d" % self.font_size)
values.append("font_weight='%s'" % str(self.font_weight))
values.append("text_anchor='%s'" % self.anchor)
if self.x != 0 or self.y != 0:
values.append("%s=%s" % (SVG_ATTR_X, self.x))
values.append("%s=%s" % (SVG_ATTR_Y, self.y))
if self.dx != 0 or self.dy != 0:
values.append("%s=%s" % (SVG_ATTR_DX, self.dx))
values.append("%s=%s" % (SVG_ATTR_DY, self.dy))
if self.stroke is not None:
values.append("%s='%s'" % (SVG_ATTR_STROKE, self.stroke))
if self.fill is not None:
values.append("%s='%s'" % (SVG_ATTR_FILL, self.fill))
if self.stroke_width is not None and self.stroke_width != 1.0:
values.append(
"stroke_width=%s" % str(self.stroke_width)
) # Cannot use SVG_ATTR_STROKE_WIDTH for repr because it contains a hyphen
if not self.transform.is_identity():
values.append("%s=%s" % (SVG_ATTR_TRANSFORM, repr(self.transform)))
if self.id is not None:
values.append("%s='%s'" % (SVG_ATTR_ID, self.id))
return "Text(%s)" % (", ".join(values))
def __eq__(self, other):
if not isinstance(other, Text):
return NotImplemented
if self.text != other.text:
return False
if self.width != other.width:
return False
if self.height != other.height:
return False
if self.x != other.x:
return False
if self.y != other.y:
return False
if self.dx != other.dx:
return False
if self.dy != other.dy:
return False
if self.anchor != other.anchor:
return False
if self.font_family != other.font_family:
return False
if self.font_size != other.font_size:
return False
if self.font_weight != other.font_weight:
return False
return self.font_face == other.font_face
def __ne__(self, other):
if not isinstance(other, Text):
return NotImplemented
return not self == other
def property_by_object(self, s):
Transformable.property_by_object(self, s)
GraphicObject.property_by_object(self, s)
SVGElement.property_by_object(self, s)
self.text = s.text
self.x = s.x
self.y = s.y
self.width = s.width
self.height = s.height
self.dx = s.dx
self.dy = s.dy
self.anchor = s.anchor
self.font_family = s.font_family
self.font_size = s.font_size
self.font_weight = s.font_weight
self.font_face = s.font_face
def parse_font(self, font):
"""
CSS Fonts 3 has a shorthand font property which serves to provide a single location to define:
`font-style`, `font-variant`, `font-weight`, `font-stretch`, `font-size`, `line-height`, and `font-family`
font-style: normal | italic | oblique
font-variant: normal | small-caps
font-weight: normal | bold | bolder | lighter | 100 | 200 | 300 | 400 | 500 | 600 | 700 | 800 | 900
font-stretch: normal | ultra-condensed | extra-condensed | condensed | semi-condensed | semi-expanded | expanded | extra-expanded | ultra-expanded
font-size: <absolute-size> | <relative-size> | <length-percentage>
line-height: '/' <`line-height`>
font-family: [ <family-name> | <generic-family> ] #
generic-family: `serif`, `sans-serif`, `cursive`, `fantasy`, and `monospace`
"""
# https://www.w3.org/TR/css-fonts-3/#font-prop
font_elements = list(*re.findall(REGEX_CSS_FONT, font))
font_style = font_elements[0]
font_variant = font_elements[1]
font_weight = font_elements[2]
font_stretch = font_elements[3]
font_size = font_elements[4]
line_height = font_elements[5]
font_face = font_elements[6]
font_family = font_elements[7]
if len(font_weight) > 0:
self.font_weight = self.parse_font_weight(font_weight)
if len(font_size) > 0:
self.font_size = Length(font_size).value()
if len(font_face) > 0:
if font_face.endswith(","):
font_face = font_face[:-1]
self.font_face = font_face
if len(font_family) > 0:
self.font_family = font_family
def parse_font_weight(self, weight):
if weight == "bold":
return 700
if weight == "normal":
return 400
try:
return int(weight)
except KeyError:
return 400
def property_by_values(self, values):
Transformable.property_by_values(self, values)
GraphicObject.property_by_values(self, values)
self.anchor = values.get(SVG_ATTR_TEXT_ANCHOR, self.anchor)
self.font_face = values.get("font_face")
self.font_face = values.get(SVG_ATTR_FONT_FACE, self.font_face)
self.font_family = values.get("font_family", self.font_family)
self.font_family = values.get(SVG_ATTR_FONT_FAMILY, self.font_family)
self.font_size = Length(values.get("font_size", self.font_size)).value()
self.font_size = Length(values.get(SVG_ATTR_FONT_SIZE, self.font_size)).value()
self.font_weight = values.get("font_weight", self.font_weight)
self.font_weight = values.get(SVG_ATTR_FONT_WEIGHT, self.font_weight)
self.font_weight = self.parse_font_weight(self.font_weight)
self.anchor = values.get("text_anchor", self.anchor)
self.anchor = values.get(SVG_ATTR_TEXT_ANCHOR, self.anchor)
font = values.get(SVG_ATTR_FONT, None)
if font is not None:
self.parse_font(font)
self.text = values.get(SVG_TAG_TEXT, self.text)
self.x = Length(values.get(SVG_ATTR_X, self.x)).value()
self.y = Length(values.get(SVG_ATTR_Y, self.y)).value()
self.dx = Length(values.get(SVG_ATTR_DX, self.dx)).value()
self.dy = Length(values.get(SVG_ATTR_DY, self.dy)).value()
def reify(self):
GraphicObject.reify(self)
Transformable.reify(self)
def render(self, **kwargs):
GraphicObject.render(self, **kwargs)
Transformable.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.x, Length):
self.x = self.x.value(relative_length=width, **kwargs)
if isinstance(self.y, Length):
self.y = self.y.value(relative_length=height, **kwargs)
if isinstance(self.dx, Length):
self.dx = self.dx.value(relative_length=width, **kwargs)
if isinstance(self.dy, Length):
self.dy = self.dy.value(relative_length=height, **kwargs)
return self
def __copy__(self):
return Text(self)
def bbox(self, transformed=True, with_stroke=False):
"""
Get the bounding box for the given text object.
:param transformed: whether this is the transformed bounds or default.
:param with_stroke: should the stroke-width be included in the bounds.
:return: bounding box of the given element
"""
if self.path is not None:
return (self.path * self.transform).bbox(
transformed=True,
with_stroke=with_stroke,
)
width = self.width
height = self.height
xmin = self.x
ymin = self.y - height
xmax = self.x + width
ymax = self.y
if not hasattr(self, "anchor") or self.anchor == "start":
pass
elif self.anchor == "middle":
xmin -= width / 2
xmax -= width / 2
elif self.anchor == "end":
xmin -= width
xmax -= width
if transformed:
p0 = self.transform.transform_point([xmin, ymin])
p1 = self.transform.transform_point([xmin, ymax])
p2 = self.transform.transform_point([xmax, ymin])
p3 = self.transform.transform_point([xmax, ymax])
xmin = min(p0[0], p1[0], p2[0], p3[0])
ymin = min(p0[1], p1[1], p2[1], p3[1])
xmax = max(p0[0], p1[0], p2[0], p3[0])
ymax = max(p0[1], p1[1], p2[1], p3[1])
if with_stroke and self.stroke_width is not None:
if transformed:
delta = float(self.implicit_stroke_width) / 2.0
else:
delta = float(self.stroke_width) / 2.0
else:
delta = 0.0
return (
xmin - delta,
ymin - delta,
xmax + delta,
ymax + delta,
)
SVGText = Text
class Image(SVGElement, GraphicObject, Transformable):
"""
SVG Images are defined in SVG 2.0 12.3
This class is called SVG Image rather than image as a guard against many Image objects
which are quite useful and would be ideal for reading the linked or contained data.
"""
def __init__(self, *args, **kwargs):
self.url = None
self.data = None
self.media_type = None
self.viewbox = None
self.preserve_aspect_ratio = None
self.x = None
self.y = None
self.width = None
self.height = None
self.image = None
self.image_width = None
self.image_height = None
Transformable.__init__(self, *args, **kwargs)
GraphicObject.__init__(self, *args, **kwargs)
SVGElement.__init__(
self, *args, **kwargs
) # Dataurl requires this be processed first.
if self.url is not None:
match = REGEX_DATA_URL.match(self.url)
if match:
# Data URL
self.media_type = match.group(1).split(";")
self.data = match.group(2)
if "base64" in self.media_type:
from base64 import b64decode
self.data = b64decode(self.data)
else:
from urllib.parse import unquote_to_bytes
self.data = unquote_to_bytes(self.data)
def __repr__(self):
values = []
if self.x != 0:
values.append("%s=%s" % (SVG_ATTR_X, Length.str(self.x)))
if self.y != 0:
values.append("%s=%s" % (SVG_ATTR_Y, Length.str(self.y)))
if self.width != "100%":
values.append("%s=%s" % (SVG_ATTR_WIDTH, Length.str(self.width)))
if self.height != "100%":
values.append("%s=%s" % (SVG_ATTR_HEIGHT, Length.str(self.height)))
if self.preserve_aspect_ratio is not None:
values.append(
"%s=%s" % (SVG_ATTR_PRESERVEASPECTRATIO, self.preserve_aspect_ratio)
)
if self.viewbox is not None:
values.append("%s='%s'" % (SVG_ATTR_VIEWBOX, str(self.viewbox)))
if self.url is not None:
values.append("%s='%s'" % (SVG_HREF, self.url))
if not self.transform.is_identity():
values.append("transform=%s" % repr(self.transform))
params = ", ".join(values)
return "Image(%s)" % params
def __eq__(self, other):
if not isinstance(other, Image):
return NotImplemented
if self.url != other.url:
return False
if self.data != other.data:
return False
if self.width != other.width:
return False
if self.height != other.height:
return False
if self.x != other.x:
return False
if self.y != other.y:
return False
if self.image != other.image:
return False
if self.viewbox != other.viewbox:
return False
return self.preserve_aspect_ratio == other.preserve_aspect_ratio
def __ne__(self, other):
if not isinstance(other, Image):
return NotImplemented
return not self == other
def property_by_object(self, s):
SVGElement.property_by_object(self, s)
Transformable.property_by_object(self, s)
GraphicObject.property_by_object(self, s)
self.url = s.url
self.data = s.data
self.viewbox = s.viewbox
self.preserve_aspect_ratio = s.preserve_aspect_ratio
self.x = s.x
self.y = s.y
self.width = s.width
self.height = s.height
self.image = s.image
self.image_width = s.image_width
self.image_height = s.image_height
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
Transformable.property_by_values(self, values)
GraphicObject.property_by_values(self, values)
if XLINK_HREF in values:
self.url = values[XLINK_HREF]
elif SVG_HREF in values:
self.url = values[SVG_HREF]
viewbox = values.get(SVG_ATTR_VIEWBOX)
if viewbox is not None:
self.viewbox = Viewbox(viewbox)
if SVG_ATTR_PRESERVEASPECTRATIO in values:
if values[SVG_ATTR_PRESERVEASPECTRATIO] == SVG_VALUE_NONE:
self.preserve_aspect_ratio = None
else:
self.preserve_aspect_ratio = values[SVG_ATTR_PRESERVEASPECTRATIO]
self.x = Length(values.get(SVG_ATTR_X, 0)).value()
self.y = Length(values.get(SVG_ATTR_Y, 0)).value()
self.width = Length(values.get(SVG_ATTR_WIDTH, "100%")).value()
self.height = Length(values.get(SVG_ATTR_HEIGHT, "100%")).value()
if "image" in values:
self.image = values["image"]
self.image_width, self.image_height = self.image.size
def render(self, **kwargs):
GraphicObject.render(self, **kwargs)
Transformable.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
if isinstance(self.x, Length):
self.x = self.x.value(relative_length=width, **kwargs)
if isinstance(self.y, Length):
self.y = self.y.value(relative_length=height, **kwargs)
if isinstance(self.width, Length):
self.width = self.width.value(relative_length=width, **kwargs)
if isinstance(self.height, Length):
self.height = self.height.value(relative_length=height, **kwargs)
return self
def __copy__(self):
"""
Copy of Image. This will not copy the .image subobject in a deep manner
since it's optional that that object will exist or not. As such if using PIL it would
be required to either say self.image = self.image.copy() or call .load() again.
"""
return Image(self)
@property
def viewbox_transform(self):
if self.viewbox is None:
return ""
return self.viewbox.transform(self)
def load(self, directory=None):
try:
from PIL import Image as PILImage
if self.data is not None:
self.load_data()
elif self.url is not None:
self.load_file(directory)
self.set_values_by_image()
except ImportError:
pass
def load_data(self):
try:
# This code will not activate without PIL/Pillow installed.
from PIL import Image as PILImage
if self.data is not None:
from io import BytesIO
self.image = PILImage.open(BytesIO(self.data))
else:
return
except ImportError:
# PIL/Pillow not found, decoding data is most we can do.
pass
def load_file(self, directory):
try:
# This code will not activate without PIL/Pillow installed.
from PIL import Image as PILImage
if self.url is not None:
try:
self.image = PILImage.open(self.url)
except IOError:
try:
if directory is not None:
from os.path import join
relpath = join(directory, self.url)
self.image = PILImage.open(relpath)
except IOError:
return
except ImportError:
# PIL/Pillow not found, decoding data is most we can do.
pass
def set_values_by_image(self):
if self.image is None:
return # No image to set values by.
self.image_width = self.image.width
self.image_height = self.image.height
self.viewbox = Viewbox(
"0 0 %d %d" % (self.image_width, self.image_height),
self.preserve_aspect_ratio,
)
self.render(width=self.image_width, height=self.image_height)
self.transform = Matrix(self.viewbox_transform) * self.transform
def bbox(self, transformed=True, with_stroke=False):
"""
Get the bounding box for the given image object
:param transformed: whether this is the transformed bounds or default.
:param with_stroke: There is no stroke for an image so with_stroke is ignored
:return: bounding box of the given element
"""
if self.image_width is None or self.image_height is None:
p = Point(0, 0)
p *= self.transform
return p.x, p.y, p.x, p.y
width = self.image_width
height = self.image_height
if transformed:
p = (
Point(0, 0) * self.transform,
Point(width, 0) * self.transform,
Point(width, height) * self.transform,
Point(0, height) * self.transform,
)
else:
p = (Point(0, 0), Point(width, 0), Point(width, height), Point(0, height))
x_vals = list(s.x for s in p)
y_vals = list(s.y for s in p)
min_x = min(x_vals)
min_y = min(y_vals)
max_x = max(x_vals)
max_y = max(y_vals)
return min_x, min_y, max_x, max_y
SVGImage = Image
class Desc(SVGElement):
def __init__(self, *args, **values):
self.desc = None
if values is None:
values = dict()
SVGElement.__init__(self, *args, **values)
def __eq__(self, other):
if not isinstance(other, Desc):
return False
return self.desc == other.desc
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self.desc)
def property_by_args(self, *args):
if len(args) == 1:
self.desc = args[0]
def property_by_object(self, obj):
SVGElement.property_by_object(self, obj)
self.desc = obj.desc
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
if SVG_TAG_DESC in values:
self.desc = values[SVG_TAG_DESC]
SVGDesc = Desc
class Title(SVGElement):
def __init__(self, *args, **values):
self.title = None
if values is None:
values = dict()
SVGElement.__init__(self, *args, **values)
def __eq__(self, other):
if not isinstance(other, Title):
return False
return self.title == other.title
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self.title)
def property_by_args(self, *args):
if len(args) == 1:
self.title = args[0]
def property_by_object(self, obj):
SVGElement.property_by_object(self, obj)
self.title = obj.title
def property_by_values(self, values):
SVGElement.property_by_values(self, values)
if SVG_TAG_TITLE in values:
self.title = values[SVG_TAG_TITLE]
class SVG(Group):
"""
SVG Document and Parsing.
SVG is the SVG main object and also the embedded SVGs within it. It's a subtype of Group. The SVG has a viewbox,
and parsing methods which can be used if given a stream, path, or svg string.
"""
def __init__(self, *args, **kwargs):
self.objects = {}
self.x = None
self.y = None
self.width = None
self.height = None
self.viewbox = None
Group.__init__(self, *args, **kwargs)
@property
def implicit_position(self):
if not self.apply:
return Point(self.x, self.y)
point = Point(self.x, self.y)
point *= self.transform
return point
@property
def implicit_x(self):
if not self.apply:
return self.x
return self.implicit_position[0]
@property
def implicit_y(self):
if not self.apply:
return self.y
return self.implicit_position[1]
@property
def implicit_width(self):
if not self.apply:
return self.width
p = Point(self.width, 0)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
@property
def implicit_height(self):
if not self.apply:
return self.height
p = Point(0, self.height)
p *= self.transform
origin = Point(0, 0)
origin *= self.transform
return origin.distance_to(p)
def property_by_object(self, s):
Group.property_by_object(self, s)
self.x = s.x
self.y = s.y
self.width = s.width
self.height = s.height
self.viewbox = Viewbox(s.viewbox) if s.viewbox is not None else None
def property_by_values(self, values):
Group.property_by_values(self, values)
self.x = Length(values.get(SVG_ATTR_X, 0)).value()
self.y = Length(values.get(SVG_ATTR_Y, 0)).value()
self.width = Length(values.get(SVG_ATTR_WIDTH, "100%")).value()
self.height = Length(values.get(SVG_ATTR_HEIGHT, "100%")).value()
viewbox = values.get(SVG_ATTR_VIEWBOX)
par = values.get(SVG_ATTR_PRESERVEASPECTRATIO)
self.viewbox = Viewbox(viewbox, par) if viewbox is not None else None
def get_element_by_id(self, id):
return self.objects.get(id)
def get_element_by_url(self, url):
for _id in REGEX_IRI.findall(url):
return self.get_element_by_id(_id)
def render(self, **kwargs):
Group.render(self, **kwargs)
width = kwargs.get("width", kwargs.get("relative_length"))
height = kwargs.get("height", kwargs.get("relative_length"))
try:
del kwargs["relative_length"]
except KeyError:
pass
self.width = Length(self.width).value(relative_length=width, **kwargs)
self.height = Length(self.height).value(relative_length=height, **kwargs)
self.x = Length(self.x).value(relative_length=width, **kwargs)
self.y = Length(self.y).value(relative_length=height, **kwargs)
def elements(self, conditional=None):
if conditional is None or conditional(self):
yield self
for q in self.select(conditional):
yield q
@property
def viewbox_transform(self):
if self.viewbox is None:
return ""
return self.viewbox.transform(self)
@staticmethod
def _shadow_iter(tag, elem, children):
yield tag, "start", elem
try:
for t, e, c in children:
for shadow_tag, shadow_event, shadow_elem in SVG._shadow_iter(t, e, c):
yield shadow_tag, shadow_event, shadow_elem
except ValueError:
"""
Strictly speaking it is possible to reference use from other use objects. If this is an infinite loop
we should not block the rendering. Just say we finished. See: W3C, struct-use-12-f
"""
pass
yield tag, "end", elem
@staticmethod
def _use_structure_parse(source):
"""
SVG structure pass: parses the svg file such that it creates the structure implied by reused objects in a
generalized context. Objects ids are read and put into an unparsed shadow tree. <use> objects seamlessly contain
their definitions.
"""
defs = {}
parent = None # Define Root Node.
children = list()
for event, elem in iterparse(source, events=("start", "end", "start-ns")):
try:
tag = elem.tag
if tag.startswith("{http://www.w3.org/2000/svg"):
tag = tag[28:] # Removing namespace. http://www.w3.org/2000/svg:
except AttributeError:
yield None, event, elem
continue
if event == "start":
attributes = elem.attrib
# Create new node.
siblings = children # Parent's children are now my siblings.
parent = (parent, children) # parent is now previous node context
children = list() # new node has no children.
node = (tag, elem, children) # define this node.
siblings.append(node) # siblings now includes this node.
if SVG_TAG_USE == tag:
url = None
if XLINK_HREF in attributes:
url = attributes[XLINK_HREF]
if SVG_HREF in attributes:
url = attributes[SVG_HREF]
if url is not None:
transform = False
try:
x = attributes[SVG_ATTR_X]
del attributes[SVG_ATTR_X]
transform = True
except KeyError:
x = "0"
try:
y = attributes[SVG_ATTR_Y]
del attributes[SVG_ATTR_Y]
transform = True
except KeyError:
y = "0"
if transform:
try:
attributes[
SVG_ATTR_TRANSFORM
] = "%s translate(%s, %s)" % (
attributes[SVG_ATTR_TRANSFORM],
x,
y,
)
except KeyError:
attributes[SVG_ATTR_TRANSFORM] = "translate(%s, %s)" % (
x,
y,
)
yield tag, event, elem
try:
shadow_node = defs[url[1:]]
children.append(
shadow_node
) # Shadow children are children of the use.
for n in SVG._shadow_iter(*shadow_node):
yield n
except KeyError:
pass # Failed to find link.
else:
yield tag, event, elem
if SVG_ATTR_ID in attributes: # If we have an ID, we save the node.
defs[attributes[SVG_ATTR_ID]] = node # store node value in defs.
elif event == "end":
yield tag, event, elem
# event is 'end', pop values.
parent, children = parent # Parent is now node.
@staticmethod
def parse(
source,
reify=True,
ppi=DEFAULT_PPI,
width=None,
height=None,
color="black",
transform=None,
context=None,
):
"""
Parses the SVG file. All attributes are things which the SVG document itself could not be aware of, such as
the real size of pixels and the size of the viewport (as opposed to the viewbox).
:param source: Source svg file or stream.
:param reify: Should the Geometry sized or have lazy matrices.
:param ppi: How many physical pixels per inch are there in this view.
:param width: The physical width of the viewport
:param height: The physical height of the viewport
:param color: the `currentColor` value from outside the current scope.
:param transform: Any required transformations to be pre-applied to this document
:param context: Any existing document context.
:return:
"""
clip = 0
root = context
styles = {}
stack = []
values = {
SVG_ATTR_COLOR: color,
SVG_ATTR_FILL: "black",
SVG_ATTR_STROKE: "none",
}
if transform is not None:
values[SVG_ATTR_TRANSFORM] = transform
for tag, event, elem in SVG._use_structure_parse(source):
"""
SVG element parsing parses the job compiling any parsed elements into their compiled object forms.
"""
if event == "start":
stack.append((context, values))
if (
SVG_ATTR_DISPLAY in values
and values[SVG_ATTR_DISPLAY].lower() == SVG_VALUE_NONE
):
continue # Values has a display=none. Do not render anything. No Shadow Dom.
current_values = values
values = {}
values.update(current_values) # copy of dictionary
# Non-propagating values.
if SVG_ATTR_PRESERVEASPECTRATIO in values:
del values[SVG_ATTR_PRESERVEASPECTRATIO]
if SVG_ATTR_VIEWBOX in values:
del values[SVG_ATTR_VIEWBOX]
if SVG_ATTR_ID in values:
del values[SVG_ATTR_ID]
if SVG_ATTR_CLIP_PATH in values:
del values[SVG_ATTR_CLIP_PATH]
attributes = elem.attrib # priority; lowest
attributes[SVG_ATTR_TAG] = tag
# Split any Style block elements into parts; priority medium
style = ""
if "*" in styles: # Select all.
style += styles["*"]
if tag in styles: # selector type
style += styles[tag]
if SVG_ATTR_ID in attributes: # Selector id #id
svg_id = attributes[SVG_ATTR_ID]
css_tag = "#%s" % svg_id
if css_tag in styles:
if len(style) != 0:
style += ";"
style += styles[css_tag]
if SVG_ATTR_CLASS in attributes: # Selector class .class
for svg_class in attributes[SVG_ATTR_CLASS].split(" "):
css_tag = ".%s" % svg_class
if css_tag in styles:
if len(style) != 0:
style += ";"
style += styles[css_tag]
css_tag = "%s.%s" % (
tag,
svg_class,
) # Selector type/class type.class
if css_tag in styles:
if len(style) != 0:
style += ";"
style += styles[css_tag]
# Split style element into parts; priority highest
if SVG_ATTR_STYLE in attributes:
style += attributes[SVG_ATTR_STYLE]
# Process style tag left to right.
for equate in style.split(";"):
equal_item = equate.split(":")
if len(equal_item) == 2:
key = str(equal_item[0]).strip()
value = str(equal_item[1]).strip()
attributes[key] = value
if (
SVG_ATTR_FILL in attributes
and attributes[SVG_ATTR_FILL] == SVG_VALUE_CURRENT_COLOR
):
if SVG_ATTR_COLOR in attributes:
attributes[SVG_ATTR_FILL] = attributes[SVG_ATTR_COLOR]
else:
attributes[SVG_ATTR_FILL] = values[SVG_ATTR_COLOR]
if (
SVG_ATTR_STROKE in attributes
and attributes[SVG_ATTR_STROKE] == SVG_VALUE_CURRENT_COLOR
):
if SVG_ATTR_COLOR in attributes:
attributes[SVG_ATTR_STROKE] = attributes[SVG_ATTR_COLOR]
else:
attributes[SVG_ATTR_STROKE] = values[SVG_ATTR_COLOR]
if SVG_ATTR_TRANSFORM in attributes:
# If transform is already in values, append the new value.
if SVG_ATTR_TRANSFORM in values:
attributes[SVG_ATTR_TRANSFORM] = (
values[SVG_ATTR_TRANSFORM]
+ " "
+ attributes[SVG_ATTR_TRANSFORM]
)
else:
attributes[SVG_ATTR_TRANSFORM] = attributes[SVG_ATTR_TRANSFORM]
# All class and attribute properties are compiled.
values.update(attributes)
values[SVG_STRUCT_ATTRIB] = attributes
if (
SVG_ATTR_DISPLAY in values
and values[SVG_ATTR_DISPLAY].lower() == SVG_VALUE_NONE
):
continue # If the attributes flags our values to display=none, stop rendering.
if SVG_NAME_TAG == tag:
# The ordering for transformations on the SVG object are:
# explicit transform, parent transforms, attribute transforms, viewport transforms
s = SVG(values)
if width is None:
# If a dim was not provided but a viewbox was, use the viewbox dim as physical size, else 1000
width = (
s.viewbox.width if s.viewbox is not None else 1000
) # 1000 default no information.
if height is None:
height = s.viewbox.height if s.viewbox is not None else 1000
s.render(ppi=ppi, width=width, height=height)
height, width = s.width, s.height
if s.viewbox is not None:
try:
if s.height == 0 or s.width == 0:
return s
viewport_transform = s.viewbox_transform
except ZeroDivisionError:
# The width or height was zero.
# https://www.w3.org/TR/SVG11/struct.html#SVGElementWidthAttribute
# "A value of zero disables rendering of the element."
return s # No more parsing will be done.
if SVG_ATTR_TRANSFORM in values:
# transform on SVG element applied as if svg had parent with transform.
values[SVG_ATTR_TRANSFORM] += " " + viewport_transform
else:
values[SVG_ATTR_TRANSFORM] = viewport_transform
width, height = s.viewbox.width, s.viewbox.height
if context is None:
stack[-1] = (context, values)
if context is not None:
context.append(s)
context = s
if root is None:
root = s
elif SVG_TAG_GROUP == tag:
s = Group(values)
context.append(s)
context = s
s.render(ppi=ppi, width=width, height=height)
elif SVG_TAG_DEFS == tag:
s = Group(values)
context = s # Non-Rendered
s.render(ppi=ppi, width=width, height=height)
elif SVG_TAG_CLIPPATH == tag:
s = ClipPath(values)
context = s # Non-Rendered
s.render(ppi=ppi, width=width, height=height)
clip += 1
elif SVG_TAG_PATTERN == tag:
s = Pattern(values)
context = s # Non-rendered
s.render(ppi=ppi, width=width, height=height)
elif tag in (
SVG_TAG_PATH,
SVG_TAG_CIRCLE,
SVG_TAG_ELLIPSE,
SVG_TAG_LINE, # Shapes
SVG_TAG_POLYLINE,
SVG_TAG_POLYGON,
SVG_TAG_RECT,
SVG_TAG_IMAGE,
):
try:
if SVG_TAG_PATH == tag:
s = Path(values)
elif SVG_TAG_CIRCLE == tag:
s = Circle(values)
elif SVG_TAG_ELLIPSE == tag:
s = Ellipse(values)
elif SVG_TAG_LINE == tag:
s = SimpleLine(values)
elif SVG_TAG_POLYLINE == tag:
s = Polyline(values)
elif SVG_TAG_POLYGON == tag:
s = Polygon(values)
elif SVG_TAG_RECT == tag:
s = Rect(values)
else: # SVG_TAG_IMAGE == tag:
s = Image(values)
except ValueError:
continue
s.render(ppi=ppi, width=width, height=height)
if reify:
s.reify()
if s.is_degenerate():
continue
context.append(s)
elif tag in (
SVG_TAG_STYLE,
SVG_TAG_TEXT,
SVG_TAG_DESC,
SVG_TAG_TITLE,
SVG_TAG_TSPAN,
):
# <style>, <text>, <desc>, <title>
continue
else:
s = SVGElement(values) # SVG Unknown object return as element.
context.append(s)
# Assign optional linked properties.
try:
clip_path_url = s.values.get(SVG_ATTR_CLIP_PATH, None)
if clip_path_url is not None:
clip_path = root.get_element_by_url(clip_path_url)
s.clip_path = clip_path
except AttributeError:
pass
if clip != 0:
try:
clip_rule = s.values.get(SVG_ATTR_CLIP_RULE, SVG_RULE_NONZERO)
if clip_rule is not None:
s.clip_rule = clip_rule
except AttributeError:
pass
if SVG_ATTR_ID in attributes and root is not None:
root.objects[attributes[SVG_ATTR_ID]] = s
elif event == "end": # End event.
# The iterparse spec makes it clear that internal text data is undefined except at the end.
s = None
if tag in (
SVG_TAG_TEXT,
SVG_TAG_TSPAN,
SVG_TAG_DESC,
SVG_TAG_TITLE,
SVG_TAG_STYLE,
):
attributes = elem.attrib
if SVG_ATTR_ID in attributes and root is not None:
root.objects[attributes[SVG_ATTR_ID]] = s
if tag in (SVG_TAG_TEXT, SVG_TAG_TSPAN):
s = Text(values, text=elem.text)
s.render(ppi=ppi, width=width, height=height)
if reify:
s.reify()
context.append(s)
elif SVG_TAG_DESC == tag:
s = Desc(values, desc=elem.text)
context.append(s)
elif SVG_TAG_TITLE == tag:
s = Title(values, title=elem.text)
context.append(s)
elif SVG_TAG_STYLE == tag:
assignments = list(re.findall(REGEX_CSS_STYLE, elem.text))
for key, value in assignments:
key = key.strip()
value = value.strip()
for selector in key.split(","): # Can comma select subitems.
sel = selector.strip()
if sel not in styles:
styles[sel] = value
else:
if not styles[sel].endswith(";"):
styles[sel] += ";"
styles[sel] += value
elif SVG_TAG_CLIPPATH == tag:
clip -= 1
if s is not None:
# Assign optional linked properties.
try:
clip_path_url = s.values.get(SVG_ATTR_CLIP_PATH, None)
if clip_path_url is not None:
clip_path = root.get_element_by_url(clip_path_url)
s.clip_path = clip_path
except AttributeError:
pass
if clip != 0:
try:
clip_rule = s.values.get(
SVG_ATTR_CLIP_RULE, SVG_RULE_NONZERO
)
if clip_rule is not None:
s.clip_rule = clip_rule
except AttributeError:
pass
context, values = stack.pop()
elif event == "start-ns":
if elem[0] != SVG_ATTR_DATA:
values[elem[0]] = elem[1]
return root
| 34.333599
| 464
| 0.533749
|
4a06d712c63678d73594b560ac852ba069e92509
| 8,426
|
py
|
Python
|
SeanCody.bundle/Contents/Code/__init__.py
|
legosteve/plex-gay-metadata-agent
|
83571b942ac104906ed3c1915572410b112f25cb
|
[
"MIT"
] | null | null | null |
SeanCody.bundle/Contents/Code/__init__.py
|
legosteve/plex-gay-metadata-agent
|
83571b942ac104906ed3c1915572410b112f25cb
|
[
"MIT"
] | null | null | null |
SeanCody.bundle/Contents/Code/__init__.py
|
legosteve/plex-gay-metadata-agent
|
83571b942ac104906ed3c1915572410b112f25cb
|
[
"MIT"
] | null | null | null |
# SeanCody
import re, os, platform, simplejson as json
PLUGIN_LOG_TITLE = 'Sean Cody' # Log Title
VERSION_NO = '2017.07.26.0'
# Delay used when requesting HTML, may be good to have to prevent being
# banned from the site
REQUEST_DELAY = 0
# URLS
BASE_URL = 'https://www.seancody.com%s'
# Example Tour URL
# http://www.seancody.com/tour/movie/9291/brodie-cole-bareback/trailer/
BASE_TOUR_MOVIE_URL = 'http://www.seancody.com/tour/movie/%s/%s/trailer'
# File names to match for this agent
movie_pattern = re.compile(Prefs['regex'])
def Start():
HTTP.CacheTime = CACHE_1WEEK
HTTP.Headers['User-agent'] = 'Mozilla/4.0 (compatible; MSIE 8.0; ' \
'Windows NT 6.2; Trident/4.0; SLCC2; .NET CLR 2.0.50727; ' \
'.NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)'
class SeanCody(Agent.Movies):
name = 'Sean Cody'
languages = [Locale.Language.NoLanguage, Locale.Language.English]
primary_provider = False
fallback_agent = ['com.plexapp.agents.gayporncollector']
contributes_to = ['com.plexapp.agents.cockporn']
def Log(self, message, *args):
if Prefs['debug']:
Log(PLUGIN_LOG_TITLE + ' - ' + message, *args)
def search(self, results, media, lang, manual):
self.Log('-----------------------------------------------------------------------')
self.Log('SEARCH CALLED v.%s', VERSION_NO)
self.Log('SEARCH - Platform: %s %s', platform.system(), platform.release())
self.Log('SEARCH - media.title - %s', media.title)
self.Log('SEARCH - media.items[0].parts[0].file - %s', media.items[0].parts[0].file)
self.Log('SEARCH - media.primary_metadata.title - %s', media.primary_metadata.title)
self.Log('SEARCH - media.items - %s', media.items)
self.Log('SEARCH - media.filename - %s', media.filename)
self.Log('SEARCH - lang - %s', lang)
self.Log('SEARCH - manual - %s', manual)
self.Log('SEARCH - Prefs->cover - %s', Prefs['cover'])
self.Log('SEARCH - Prefs->folders - %s', Prefs['folders'])
self.Log('SEARCH - Prefs->regex - %s', Prefs['regex'])
if not media.items[0].parts[0].file:
return
path_and_file = media.items[0].parts[0].file.lower()
self.Log('SEARCH - File Path: %s', path_and_file)
(file_dir, basename) = os.path.split(os.path.splitext(path_and_file)[0])
final_dir = os.path.split(file_dir)[1]
self.Log('SEARCH - Enclosing Folder: %s', final_dir)
if Prefs['folders'] != "*":
folder_list = re.split(',\s*', Prefs['folders'].lower())
if final_dir not in folder_list:
self.Log('SEARCH - Skipping %s because the folder %s is not in the acceptable folders list: %s', basename, final_dir, ','.join(folder_list))
return
m = movie_pattern.search(basename)
if not m:
self.Log('SEARCH - Skipping %s because the file name is not in the expected format.', basename)
return
self.Log('SEARCH - File Name: %s' % basename)
self.Log('SEARCH - Split File Name: %s' % basename.split(' '))
groups = m.groupdict()
movie_url_name = re.sub('[^a-z0-9\-]', '', re.sub(' +', '-', groups['clip_name']))
# movie_url_name not required to provide valid movie_url so I've taken it out
# to simplify things.
movie_url = BASE_TOUR_MOVIE_URL % (groups['clip_number'] , 'x')
self.Log('SEARCH - Video URL: %s' % movie_url)
try:
html = HTML.ElementFromURL(movie_url, sleep=REQUEST_DELAY)
except:
self.Log("SEARCH - Title not found: %s" % movie_url)
return
def fetch_movie_name = html.xpath('//*[@id="player-wrapper"]/div/h1/text()')[0]
self.Log('SEARCH - title: %s' % movie_name)
results.Append(MetadataSearchResult(id=movie_url, name=movie_name, score=100, lang=lang))
return
def fetch_summary(self, html, metadata):
raw_about_text = html.xpath('//*[@id="description"]/p')
self.Log('UPDATE - About Text - RAW %s', raw_about_text)
about_text = ' '.join(str(x.text_content().strip()) for x in raw_about_text)
metadata.summary = about_text
def fetch_release_date(self, html, metadata):
release_date = html.xpath('//*[@id="player-wrapper"]/div/span/time/text()')[0].strip()
self.Log('UPDATE - Release Date - New: %s' % release_date)
metadata.originally_available_at = Datetime.ParseDate(release_date).date()
metadata.year = metadata.originally_available_at.year
def fetch_roles(self, html, metadata):
metadata.roles.clear()
htmlcast = html.xpath('//*[@id="scroll"]/div[2]/ul[2]/li/a/span/text()')
self.Log('UPDATE - cast: "%s"' % htmlcast)
for cast in htmlcast:
cname = cast.strip()
if (len(cname) > 0):
role = metadata.roles.new()
role.name = cname
def fetch_genre(self, html, metadata):
metadata.genres.clear()
genres = html.xpath('//*[@id="scroll"]/div[2]/ul[1]/li/a/text()')
self.Log('UPDATE - video_genres: "%s"' % genres)
for genre in genres:
genre = genre.strip()
if (len(genre) > 0):
metadata.genres.add(genre)
def fetch_gallery(self, html, metadata):
i = 0
# convert the gallery source variable to parseable JSON and then
# grab the useful bits out of it
gallery_info = json.loads(html.xpath('/html/body/div[1]/div/div/section[2]/div/script/text()')[0].
replace('\n', '').
replace('var gallerySource = ', '').
replace('};', '}'))
try:
coverPrefs = int(Prefs['cover'])
except ValueError:
# an absurdly high number means "download all the things"
coverPrefs = 10000
thumb_path = gallery_info['thumb']['path']
thumb_hash = gallery_info['thumb']['hash']
poster_path = gallery_info['fullsize']['path']
poster_hash = gallery_info['fullsize']['hash']
gallery_length = int(gallery_info['length'])
valid_image_names = []
for i in xrange(1, gallery_length + 1):
if i > coverPrefs:
break
thumb_url = "%s%02d.jpg%s" % (thumb_path, i, thumb_hash)
poster_url = "%s%02d.jpg%s" % (poster_path, i, poster_hash)
valid_image_names.append(poster_url)
if poster_url not in metadata.posters:
try:
i += 1
metadata.posters[poster_url] = Proxy.Preview(HTTP.Request(thumb_url), sort_order=i)
except:
pass
return valid_image_names
def update(self, metadata, media, lang, force=False):
self.Log('UPDATE CALLED')
if not media.items[0].parts[0].file:
return
file_path = media.items[0].parts[0].file
self.Log('UPDATE - Video Title: %s', movie_name)
self.Log('UPDATE - File Path: %s', file_path)
self.Log('UPDATE - Video URL: %s', metadata.id)
# Fetch HTML
html = HTML.ElementFromURL(metadata.id, sleep=REQUEST_DELAY)
# Set tagline to URL
metadata.tagline = metadata.id
# The Title
video_title = html.xpath('//*[@id="player-wrapper"]/div/h1/text()')[0]
# Try to get description text
try:
self.fetch_summary(html, metadata)
except:
pass
# Try to get release date
try:
self.fetch_release_date(html, metadata)
except:
pass
# Try to get and process the video cast
try:
self.fetch_roles(html, metadata)
except:
pass
# Try to get and process the video genres
try:
self.fetch_genres(html, metadata)
except:
pass
valid_image_names = self.fetch_gallery(html, metadata)
metadata.posters.validate_keys(valid_image_names)
metadata.content_rating = 'X'
metadata.title = video_title
metadata.studio = "Sean Cody"
| 38.474886
| 157
| 0.570377
|
4a06d730bf35f3c2ae36de268f8d757fca89df03
| 557
|
py
|
Python
|
conftest.py
|
bob-r/crudlfap
|
7034718bbd9e18bb0fccae03638fed04243c9fe4
|
[
"BSD-4-Clause"
] | 19
|
2017-09-21T18:29:49.000Z
|
2021-06-10T21:57:54.000Z
|
conftest.py
|
jpic/crudlfap
|
8d569973f9abba6dcb3bcca0e58731afdee27414
|
[
"BSD-4-Clause"
] | 81
|
2017-10-29T15:50:23.000Z
|
2018-10-05T05:59:32.000Z
|
conftest.py
|
jpic/crudlfap
|
8d569973f9abba6dcb3bcca0e58731afdee27414
|
[
"BSD-4-Clause"
] | 10
|
2017-10-26T10:21:01.000Z
|
2022-02-19T13:21:56.000Z
|
import pytest
from django.contrib.sessions.backends.base import SessionBase
from django.test.client import RequestFactory as drf
class RequestFactory(drf):
def __init__(self, user):
self.user = user
super().__init__()
def generic(self, *args, **kwargs):
request = super().generic(*args, **kwargs)
request.session = SessionBase()
request.user = self.user
return request
@pytest.fixture
def srf():
from django.contrib.auth.models import AnonymousUser
return RequestFactory(AnonymousUser())
| 25.318182
| 61
| 0.691203
|
4a06d8701835e3b2b9d08dd92573742fa03c0f03
| 603
|
py
|
Python
|
tests/functional/test_bd.py
|
AbacherlyGabriel/tech.academy
|
ff2c3155397da61d9dfbb729b776c1725b63b327
|
[
"MIT"
] | 3
|
2020-09-20T22:04:05.000Z
|
2020-09-25T04:35:08.000Z
|
tests/functional/test_bd.py
|
AbacherlyGabriel/tech.academy
|
ff2c3155397da61d9dfbb729b776c1725b63b327
|
[
"MIT"
] | null | null | null |
tests/functional/test_bd.py
|
AbacherlyGabriel/tech.academy
|
ff2c3155397da61d9dfbb729b776c1725b63b327
|
[
"MIT"
] | null | null | null |
from src.dao import UserDao
def test_dao_read():
assert UserDao().read("admin@admin.com", "pass") is True
def test_dao_read_wrong_password():
assert UserDao().read("admin@admin.com", "wrong") is False
def test_dao_create_error_different_passwords():
assert UserDao().create_user("admin@admin.com", "admin", "pass", "wrong") is False
def test_dao_create_error_same_name():
assert UserDao().create_user("admin@admin.com", "admin", "pass", "pass") is False
def test_dao_create_error_same_key():
assert UserDao().create_user("admin@admin.com", "test", "pass", "pass") is False
| 27.409091
| 86
| 0.719735
|
4a06d90fb1ed9c33cc25272467c30ced3cb8a0e7
| 1,369
|
py
|
Python
|
discovery/api/catalog.py
|
amenezes/discovery-client
|
9c41456d1cc14f4aab34628ad4e13423e00bc4be
|
[
"Apache-2.0"
] | 2
|
2019-07-18T22:43:49.000Z
|
2020-03-09T03:27:41.000Z
|
discovery/api/catalog.py
|
amenezes/discovery-client
|
9c41456d1cc14f4aab34628ad4e13423e00bc4be
|
[
"Apache-2.0"
] | 20
|
2019-02-27T19:08:03.000Z
|
2021-06-22T16:47:32.000Z
|
discovery/api/catalog.py
|
amenezes/discovery-client
|
9c41456d1cc14f4aab34628ad4e13423e00bc4be
|
[
"Apache-2.0"
] | null | null | null |
from discovery.api.abc import Api
class Catalog(Api):
def __init__(self, endpoint: str = "/catalog", **kwargs):
super().__init__(endpoint=endpoint, **kwargs)
async def register(self, data: dict, **kwargs):
response = await self.client.put(f"{self.url}/register", data=data, **kwargs)
return response
async def deregister(self, data, **kwargs):
response = await self.client.put(f"{self.url}/deregister", **kwargs, data=data)
return response
async def datacenters(self, **kwargs):
response = await self.client.get(f"{self.url}/datacenters", **kwargs)
return response
async def nodes(self, **kwargs):
response = await self.client.get(f"{self.url}/nodes", **kwargs)
return response
async def services(self, **kwargs):
response = await self.client.get(f"{self.url}/services", **kwargs)
return response
async def service(self, name, **kwargs):
response = await self.client.get(f"{self.url}/service/{name}", **kwargs)
return response
async def connect(self, service, **kwargs):
response = await self.client.get(f"{self.url}/connect/{service}", **kwargs)
return response
async def node(self, node, **kwargs):
response = await self.client.get(f"{self.url}/node/{node}", **kwargs)
return response
| 35.102564
| 87
| 0.636231
|
4a06d989ce2457797a6ea5a2ae31da44cb4c1891
| 23,244
|
py
|
Python
|
tests/components/media_player/test_universal.py
|
sbidoul/home-assistant
|
75adb7ff46e64e510c206d2b1f141253bbc4997a
|
[
"MIT"
] | 1
|
2017-01-25T15:31:20.000Z
|
2017-01-25T15:31:20.000Z
|
tests/components/media_player/test_universal.py
|
bytebility/home-assistant
|
6015274ee2486f797fd6ee8f5f2074a601953e03
|
[
"MIT"
] | null | null | null |
tests/components/media_player/test_universal.py
|
bytebility/home-assistant
|
6015274ee2486f797fd6ee8f5f2074a601953e03
|
[
"MIT"
] | 2
|
2018-10-22T17:05:47.000Z
|
2021-09-22T10:52:31.000Z
|
"""The tests for the Universal Media player platform."""
from copy import copy
import unittest
from homeassistant.const import (
STATE_OFF, STATE_ON, STATE_UNKNOWN, STATE_PLAYING, STATE_PAUSED)
import homeassistant.components.switch as switch
import homeassistant.components.input_slider as input_slider
import homeassistant.components.input_select as input_select
import homeassistant.components.media_player as media_player
import homeassistant.components.media_player.universal as universal
from tests.common import mock_service, get_test_home_assistant
class MockMediaPlayer(media_player.MediaPlayerDevice):
"""Mock media player for testing."""
def __init__(self, hass, name):
"""Initialize the media player."""
self.hass = hass
self._name = name
self.entity_id = media_player.ENTITY_ID_FORMAT.format(name)
self._state = STATE_OFF
self._volume_level = 0
self._is_volume_muted = False
self._media_title = None
self._supported_media_commands = 0
self._source = None
self._tracks = 12
self._media_image_url = None
self.service_calls = {
'turn_on': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_TURN_ON),
'turn_off': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_TURN_OFF),
'mute_volume': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_MUTE),
'set_volume_level': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET),
'media_play': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PLAY),
'media_pause': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_PAUSE),
'media_previous_track': mock_service(
hass, media_player.DOMAIN,
media_player.SERVICE_MEDIA_PREVIOUS_TRACK),
'media_next_track': mock_service(
hass, media_player.DOMAIN,
media_player.SERVICE_MEDIA_NEXT_TRACK),
'media_seek': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_MEDIA_SEEK),
'play_media': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_PLAY_MEDIA),
'volume_up': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_UP),
'volume_down': mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_DOWN),
'media_play_pause': mock_service(
hass, media_player.DOMAIN,
media_player.SERVICE_MEDIA_PLAY_PAUSE),
'select_source': mock_service(
hass, media_player.DOMAIN,
media_player.SERVICE_SELECT_SOURCE),
'clear_playlist': mock_service(
hass, media_player.DOMAIN,
media_player.SERVICE_CLEAR_PLAYLIST),
}
@property
def name(self):
"""Return the name of player."""
return self._name
@property
def state(self):
"""Return the state of the player."""
return self._state
@property
def volume_level(self):
"""The volume level of player."""
return self._volume_level
@property
def is_volume_muted(self):
"""Return true if the media player is muted."""
return self._is_volume_muted
@property
def supported_media_commands(self):
"""Supported media commands flag."""
return self._supported_media_commands
@property
def media_image_url(self):
"""Image url of current playing media."""
return self._media_image_url
def turn_on(self):
"""Mock turn_on function."""
self._state = STATE_UNKNOWN
def turn_off(self):
"""Mock turn_off function."""
self._state = STATE_OFF
def mute_volume(self):
"""Mock mute function."""
self._is_volume_muted = ~self._is_volume_muted
def set_volume_level(self, volume):
"""Mock set volume level."""
self._volume_level = volume
def media_play(self):
"""Mock play."""
self._state = STATE_PLAYING
def media_pause(self):
"""Mock pause."""
self._state = STATE_PAUSED
def select_source(self, source):
"""Set the input source."""
self._state = source
def clear_playlist(self):
"""Clear players playlist."""
self._tracks = 0
class TestMediaPlayer(unittest.TestCase):
"""Test the media_player module."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.mock_mp_1 = MockMediaPlayer(self.hass, 'mock1')
self.mock_mp_1.update_ha_state()
self.mock_mp_2 = MockMediaPlayer(self.hass, 'mock2')
self.mock_mp_2.update_ha_state()
self.mock_mute_switch_id = switch.ENTITY_ID_FORMAT.format('mute')
self.hass.states.set(self.mock_mute_switch_id, STATE_OFF)
self.mock_state_switch_id = switch.ENTITY_ID_FORMAT.format('state')
self.hass.states.set(self.mock_state_switch_id, STATE_OFF)
self.mock_volume_id = input_slider.ENTITY_ID_FORMAT.format(
'volume_level')
self.hass.states.set(self.mock_volume_id, 0)
self.mock_source_list_id = input_select.ENTITY_ID_FORMAT.format(
'source_list')
self.hass.states.set(self.mock_source_list_id, ['dvd', 'htpc'])
self.mock_source_id = input_select.ENTITY_ID_FORMAT.format('source')
self.hass.states.set(self.mock_source_id, 'dvd')
self.config_children_only = {
'name': 'test', 'platform': 'universal',
'children': [media_player.ENTITY_ID_FORMAT.format('mock1'),
media_player.ENTITY_ID_FORMAT.format('mock2')]
}
self.config_children_and_attr = {
'name': 'test', 'platform': 'universal',
'children': [media_player.ENTITY_ID_FORMAT.format('mock1'),
media_player.ENTITY_ID_FORMAT.format('mock2')],
'attributes': {
'is_volume_muted': self.mock_mute_switch_id,
'volume_level': self.mock_volume_id,
'source': self.mock_source_id,
'source_list': self.mock_source_list_id,
'state': self.mock_state_switch_id
}
}
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_config_children_only(self):
"""Check config with only children."""
config_start = copy(self.config_children_only)
del config_start['platform']
config_start['commands'] = {}
config_start['attributes'] = {}
response = universal.validate_config(self.config_children_only)
self.assertTrue(response)
self.assertEqual(config_start, self.config_children_only)
def test_config_children_and_attr(self):
"""Check config with children and attributes."""
config_start = copy(self.config_children_and_attr)
del config_start['platform']
config_start['commands'] = {}
response = universal.validate_config(self.config_children_and_attr)
self.assertTrue(response)
self.assertEqual(config_start, self.config_children_and_attr)
def test_config_no_name(self):
"""Check config with no Name entry."""
response = universal.validate_config({'platform': 'universal'})
self.assertFalse(response)
def test_config_bad_children(self):
"""Check config with bad children entry."""
config_no_children = {'name': 'test', 'platform': 'universal'}
config_bad_children = {'name': 'test', 'children': {},
'platform': 'universal'}
response = universal.validate_config(config_no_children)
self.assertTrue(response)
self.assertEqual([], config_no_children['children'])
response = universal.validate_config(config_bad_children)
self.assertTrue(response)
self.assertEqual([], config_bad_children['children'])
def test_config_bad_commands(self):
"""Check config with bad commands entry."""
config = {'name': 'test', 'commands': [], 'platform': 'universal'}
response = universal.validate_config(config)
self.assertTrue(response)
self.assertEqual({}, config['commands'])
def test_config_bad_attributes(self):
"""Check config with bad attributes."""
config = {'name': 'test', 'attributes': [], 'platform': 'universal'}
response = universal.validate_config(config)
self.assertTrue(response)
self.assertEqual({}, config['attributes'])
def test_config_bad_key(self):
"""Check config with bad key."""
config = {'name': 'test', 'asdf': 5, 'platform': 'universal'}
response = universal.validate_config(config)
self.assertTrue(response)
self.assertFalse('asdf' in config)
def test_platform_setup(self):
"""Test platform setup."""
config = {'name': 'test', 'platform': 'universal'}
bad_config = {'platform': 'universal'}
entities = []
def add_devices(new_entities):
"""Add devices to list."""
for dev in new_entities:
entities.append(dev)
universal.setup_platform(self.hass, bad_config, add_devices)
self.assertEqual(0, len(entities))
universal.setup_platform(self.hass, config, add_devices)
self.assertEqual(1, len(entities))
self.assertEqual('test', entities[0].name)
def test_master_state(self):
"""Test master state property."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual(None, ump.master_state)
def test_master_state_with_attrs(self):
"""Test master state property."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual(STATE_OFF, ump.master_state)
self.hass.states.set(self.mock_state_switch_id, STATE_ON)
self.assertEqual(STATE_ON, ump.master_state)
def test_master_state_with_bad_attrs(self):
"""Test master state property."""
config = self.config_children_and_attr
config['attributes']['state'] = 'bad.entity_id'
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual(STATE_OFF, ump.master_state)
def test_active_child_state(self):
"""Test active child state property."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertEqual(None, ump._child_state)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(self.mock_mp_1.entity_id,
ump._child_state.entity_id)
self.mock_mp_2._state = STATE_PLAYING
self.mock_mp_2.update_ha_state()
ump.update()
self.assertEqual(self.mock_mp_1.entity_id,
ump._child_state.entity_id)
self.mock_mp_1._state = STATE_OFF
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(self.mock_mp_2.entity_id,
ump._child_state.entity_id)
def test_name(self):
"""Test name property."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual(config['name'], ump.name)
def test_polling(self):
"""Test should_poll property."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual(False, ump.should_poll)
def test_state_children_only(self):
"""Test media player state with only children."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertTrue(ump.state, STATE_OFF)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(STATE_PLAYING, ump.state)
def test_state_with_children_and_attrs(self):
"""Test media player with children and master state."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertEqual(STATE_OFF, ump.state)
self.hass.states.set(self.mock_state_switch_id, STATE_ON)
ump.update()
self.assertEqual(STATE_ON, ump.state)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(STATE_PLAYING, ump.state)
self.hass.states.set(self.mock_state_switch_id, STATE_OFF)
ump.update()
self.assertEqual(STATE_OFF, ump.state)
def test_volume_level(self):
"""Test volume level property."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertEqual(None, ump.volume_level)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(0, ump.volume_level)
self.mock_mp_1._volume_level = 1
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(1, ump.volume_level)
def test_media_image_url(self):
"""Test media_image_url property."""
TEST_URL = "test_url"
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertEqual(None, ump.media_image_url)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1._media_image_url = TEST_URL
self.mock_mp_1.update_ha_state()
ump.update()
# mock_mp_1 will convert the url to the api proxy url. This test
# ensures ump passes through the same url without an additional proxy.
self.assertEqual(self.mock_mp_1.entity_picture, ump.entity_picture)
def test_is_volume_muted_children_only(self):
"""Test is volume muted property w/ children only."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertFalse(ump.is_volume_muted)
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertFalse(ump.is_volume_muted)
self.mock_mp_1._is_volume_muted = True
self.mock_mp_1.update_ha_state()
ump.update()
self.assertTrue(ump.is_volume_muted)
def test_source_list_children_and_attr(self):
"""Test source list property w/ children and attrs."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual("['dvd', 'htpc']", ump.source_list)
self.hass.states.set(self.mock_source_list_id, ['dvd', 'htpc', 'game'])
self.assertEqual("['dvd', 'htpc', 'game']", ump.source_list)
def test_source_children_and_attr(self):
"""Test source property w/ children and attrs."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual('dvd', ump.source)
self.hass.states.set(self.mock_source_id, 'htpc')
self.assertEqual('htpc', ump.source)
def test_volume_level_children_and_attr(self):
"""Test volume level property w/ children and attrs."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertEqual('0', ump.volume_level)
self.hass.states.set(self.mock_volume_id, 100)
self.assertEqual('100', ump.volume_level)
def test_is_volume_muted_children_and_attr(self):
"""Test is volume muted property w/ children and attrs."""
config = self.config_children_and_attr
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
self.assertFalse(ump.is_volume_muted)
self.hass.states.set(self.mock_mute_switch_id, STATE_ON)
self.assertTrue(ump.is_volume_muted)
def test_supported_media_commands_children_only(self):
"""Test supported media commands with only children."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.assertEqual(0, ump.supported_media_commands)
self.mock_mp_1._supported_media_commands = 512
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
self.assertEqual(512, ump.supported_media_commands)
def test_supported_media_commands_children_and_cmds(self):
"""Test supported media commands with children and attrs."""
config = self.config_children_and_attr
universal.validate_config(config)
config['commands']['turn_on'] = 'test'
config['commands']['turn_off'] = 'test'
config['commands']['volume_up'] = 'test'
config['commands']['volume_down'] = 'test'
config['commands']['volume_mute'] = 'test'
config['commands']['volume_set'] = 'test'
config['commands']['select_source'] = 'test'
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.mock_mp_1._state = STATE_PLAYING
self.mock_mp_1.update_ha_state()
ump.update()
check_flags = universal.SUPPORT_TURN_ON | universal.SUPPORT_TURN_OFF \
| universal.SUPPORT_VOLUME_STEP | universal.SUPPORT_VOLUME_MUTE \
| universal.SUPPORT_SELECT_SOURCE
self.assertEqual(check_flags, ump.supported_media_commands)
def test_service_call_no_active_child(self):
"""Test a service call to children with no active child."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.mock_mp_1._state = STATE_OFF
self.mock_mp_1.update_ha_state()
self.mock_mp_2._state = STATE_OFF
self.mock_mp_2.update_ha_state()
ump.update()
ump.turn_off()
self.assertEqual(0, len(self.mock_mp_1.service_calls['turn_off']))
self.assertEqual(0, len(self.mock_mp_2.service_calls['turn_off']))
def test_service_call_to_child(self):
"""Test service calls that should be routed to a child."""
config = self.config_children_only
universal.validate_config(config)
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.mock_mp_2._state = STATE_PLAYING
self.mock_mp_2.update_ha_state()
ump.update()
ump.turn_off()
self.assertEqual(1, len(self.mock_mp_2.service_calls['turn_off']))
ump.turn_on()
self.assertEqual(1, len(self.mock_mp_2.service_calls['turn_on']))
ump.mute_volume(True)
self.assertEqual(1, len(self.mock_mp_2.service_calls['mute_volume']))
ump.set_volume_level(0.5)
self.assertEqual(
1, len(self.mock_mp_2.service_calls['set_volume_level']))
ump.media_play()
self.assertEqual(1, len(self.mock_mp_2.service_calls['media_play']))
ump.media_pause()
self.assertEqual(1, len(self.mock_mp_2.service_calls['media_pause']))
ump.media_previous_track()
self.assertEqual(
1, len(self.mock_mp_2.service_calls['media_previous_track']))
ump.media_next_track()
self.assertEqual(
1, len(self.mock_mp_2.service_calls['media_next_track']))
ump.media_seek(100)
self.assertEqual(1, len(self.mock_mp_2.service_calls['media_seek']))
ump.play_media('movie', 'batman')
self.assertEqual(1, len(self.mock_mp_2.service_calls['play_media']))
ump.volume_up()
self.assertEqual(1, len(self.mock_mp_2.service_calls['volume_up']))
ump.volume_down()
self.assertEqual(1, len(self.mock_mp_2.service_calls['volume_down']))
ump.media_play_pause()
self.assertEqual(
1, len(self.mock_mp_2.service_calls['media_play_pause']))
ump.select_source('dvd')
self.assertEqual(
1, len(self.mock_mp_2.service_calls['select_source']))
ump.clear_playlist()
self.assertEqual(
1, len(self.mock_mp_2.service_calls['clear_playlist']))
def test_service_call_to_command(self):
"""Test service call to command."""
config = self.config_children_only
config['commands'] = {'turn_off': {
'service': 'test.turn_off', 'data': {}}}
universal.validate_config(config)
service = mock_service(self.hass, 'test', 'turn_off')
ump = universal.UniversalMediaPlayer(self.hass, **config)
ump.entity_id = media_player.ENTITY_ID_FORMAT.format(config['name'])
ump.update()
self.mock_mp_2._state = STATE_PLAYING
self.mock_mp_2.update_ha_state()
ump.update()
ump.turn_off()
self.assertEqual(1, len(service))
| 36.1493
| 79
| 0.654276
|
4a06da0a84fe5683e7da64ac74a1fba9cff87852
| 10,722
|
py
|
Python
|
src/mbed_os_tools/test/host_tests_runner/mbed_base.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | null | null | null |
src/mbed_os_tools/test/host_tests_runner/mbed_base.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | null | null | null |
src/mbed_os_tools/test/host_tests_runner/mbed_base.py
|
noralsydmp/mbed-os-tools
|
5a14958aa49eb5764afba8e1dc3208cae2955cd7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018, Arm Limited and affiliates.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from time import sleep
from .. import host_tests_plugins as ht_plugins
from ... import detect
from .. import DEFAULT_BAUD_RATE
from ..host_tests_logger import HtrunLogger
class Mbed:
"""! Base class for a host driven test
@details This class stores information about things like disk, port, serial speed etc.
Class is also responsible for manipulation of serial port between host and mbed device
"""
def __init__(self, options):
""" ctor
"""
# For compatibility with old mbed. We can use command line options for Mbed object
# or we can pass options directly from .
self.options = options
self.logger = HtrunLogger('MBED')
# Options related to copy / reset mbed device
self.port = self.options.port
self.mcu = self.options.micro
self.disk = self.options.disk
self.target_id = self.options.target_id
self.image_path = self.options.image_path.strip('"') if self.options.image_path is not None else ''
self.copy_method = self.options.copy_method
self.retry_copy = self.options.retry_copy
self.program_cycle_s = float(self.options.program_cycle_s if self.options.program_cycle_s is not None else 2.0)
self.polling_timeout = self.options.polling_timeout
# Serial port settings
self.serial_baud = DEFAULT_BAUD_RATE
self.serial_timeout = 1
# Users can use command to pass port speeds together with port name. E.g. COM4:115200:1
# Format if PORT:SPEED:TIMEOUT
port_config = self.port.split(':') if self.port else ''
if len(port_config) == 2:
# -p COM4:115200
self.port = port_config[0]
self.serial_baud = int(port_config[1])
elif len(port_config) == 3:
# -p COM4:115200:0.5
self.port = port_config[0]
self.serial_baud = int(port_config[1])
self.serial_timeout = float(port_config[2])
# Overriding baud rate value with command line specified value
self.serial_baud = self.options.baud_rate if self.options.baud_rate else self.serial_baud
# Test configuration in JSON format
self.test_cfg = None
if self.options.json_test_configuration is not None:
# We need to normalize path before we open file
json_test_configuration_path = self.options.json_test_configuration.strip("\"'")
try:
self.logger.prn_inf("Loading test configuration from '%s'..." % json_test_configuration_path)
with open(json_test_configuration_path) as data_file:
self.test_cfg = json.load(data_file)
except IOError as e:
self.logger.prn_err("Test configuration JSON file '{0}' I/O error({1}): {2}"
.format(json_test_configuration_path, e.errno, e.strerror))
except:
self.logger.prn_err("Test configuration JSON Unexpected error:", str(e))
raise
def copy_image(self, image_path=None, disk=None, copy_method=None, port=None, mcu=None, retry_copy=5):
"""! Closure for copy_image_raw() method.
@return Returns result from copy plugin
"""
def get_remount_count(disk_path, tries=2):
"""! Get the remount count from 'DETAILS.TXT' file
@return Returns count, None if not-available
"""
#In case of no disk path, nothing to do
if disk_path is None:
return None
for cur_try in range(1, tries + 1):
try:
files_on_disk = [x.upper() for x in os.listdir(disk_path)]
if 'DETAILS.TXT' in files_on_disk:
with open(os.path.join(disk_path, 'DETAILS.TXT'), 'r') as details_txt:
for line in details_txt.readlines():
if 'Remount count:' in line:
return int(line.replace('Remount count: ', ''))
# Remount count not found in file
return None
# 'DETAILS.TXT file not found
else:
return None
except OSError as e:
self.logger.prn_err("Failed to get remount count due to OSError.", str(e))
self.logger.prn_inf("Retrying in 1 second (try %s of %s)" % (cur_try, tries))
sleep(1)
# Failed to get remount count
return None
def check_flash_error(target_id, disk, initial_remount_count):
"""! Check for flash errors
@return Returns false if FAIL.TXT present, else true
"""
if not target_id:
self.logger.prn_wrn("Target ID not found: Skipping flash check and retry")
return True
bad_files = set(['FAIL.TXT'])
# Re-try at max 5 times with 0.5 sec in delay
for i in range(5):
# mbed_os_tools.detect.create() should be done inside the loop. Otherwise it will loop on same data.
mbeds = detect.create()
mbed_list = mbeds.list_mbeds() #list of mbeds present
# get first item in list with a matching target_id, if present
mbed_target = next((x for x in mbed_list if x['target_id']==target_id), None)
if mbed_target is not None:
if 'mount_point' in mbed_target and mbed_target['mount_point'] is not None:
if not initial_remount_count is None:
new_remount_count = get_remount_count(disk)
if not new_remount_count is None and new_remount_count == initial_remount_count:
sleep(0.5)
continue
common_items = []
try:
items = set([x.upper() for x in os.listdir(mbed_target['mount_point'])])
common_items = bad_files.intersection(items)
except OSError as e:
print("Failed to enumerate disk files, retrying")
continue
for common_item in common_items:
full_path = os.path.join(mbed_target['mount_point'], common_item)
self.logger.prn_err("Found %s"% (full_path))
bad_file_contents = "[failed to read bad file]"
try:
with open(full_path, "r") as bad_file:
bad_file_contents = bad_file.read()
except IOError as error:
self.logger.prn_err("Error opening '%s': %s" % (full_path, error))
self.logger.prn_err("Error file contents:\n%s" % bad_file_contents)
if common_items:
return False
sleep(0.5)
return True
# Set-up closure environment
if not image_path:
image_path = self.image_path
if not disk:
disk = self.disk
if not copy_method:
copy_method = self.copy_method
if not port:
port = self.port
if not mcu:
mcu = self.mcu
if not retry_copy:
retry_copy = self.retry_copy
target_id = self.target_id
if not image_path:
self.logger.prn_err("Error: image path not specified")
return False
if not os.path.isfile(image_path):
self.logger.prn_err("Error: image file (%s) not found" % image_path)
return False
for count in range(0, retry_copy):
initial_remount_count = get_remount_count(disk)
# Call proper copy method
result = self.copy_image_raw(image_path, disk, copy_method, port, mcu)
sleep(self.program_cycle_s)
if not result:
continue
result = check_flash_error(target_id, disk, initial_remount_count)
if result:
break
return result
def copy_image_raw(self, image_path=None, disk=None, copy_method=None, port=None, mcu=None):
"""! Copy file depending on method you want to use. Handles exception
and return code from shell copy commands.
@return Returns result from copy plugin
@details Method which is actually copying image to mbed
"""
# image_path - Where is binary with target's firmware
# Select copy_method
# We override 'default' method with 'shell' method
copy_method = {
None : 'shell',
'default' : 'shell',
}.get(copy_method, copy_method)
result = ht_plugins.call_plugin('CopyMethod',
copy_method,
image_path=image_path,
mcu=mcu,
serial=port,
destination_disk=disk,
target_id=self.target_id,
pooling_timeout=self.polling_timeout)
return result
def hw_reset(self):
"""
Performs hardware reset of target ned device.
:return:
"""
device_info = {}
result = ht_plugins.call_plugin('ResetMethod',
'power_cycle',
target_id=self.target_id,
device_info=device_info)
if result:
self.port = device_info['serial_port']
self.disk = device_info['mount_point']
return result
| 44.305785
| 119
| 0.556333
|
4a06db6e827c46cc63841deee2d6fbb96949412e
| 27,345
|
py
|
Python
|
qcodes/utils/helpers.py
|
Dominik-Vogel/Qcodes
|
b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb
|
[
"MIT"
] | 1
|
2019-12-07T01:25:49.000Z
|
2019-12-07T01:25:49.000Z
|
qcodes/utils/helpers.py
|
Dominik-Vogel/Qcodes
|
b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb
|
[
"MIT"
] | null | null | null |
qcodes/utils/helpers.py
|
Dominik-Vogel/Qcodes
|
b4cf7d58bc1bf3be97af6bf48f57cb6b87d588bb
|
[
"MIT"
] | null | null | null |
import io
import json
import logging
import math
import numbers
import time
import os
from pathlib import Path
from collections.abc import Iterator, Sequence, Mapping
from copy import deepcopy
from typing import (Dict, Any, TypeVar, Type, List, Tuple, Union, Optional,
cast, Callable, SupportsAbs)
from typing import Sequence as TSequence
from contextlib import contextmanager
from asyncio import iscoroutinefunction
from inspect import signature
from functools import partial
from collections import OrderedDict
import numpy as np
import qcodes
from qcodes.utils.deprecate import deprecate
QCODES_USER_PATH_ENV = 'QCODES_USER_PATH'
_tprint_times: Dict[str, float] = {}
log = logging.getLogger(__name__)
class NumpyJSONEncoder(json.JSONEncoder):
"""
This JSON encoder adds support for serializing types that the built-in
``json`` module does not support out-of-the-box. See the docstring of the
``default`` method for the description of all conversions.
"""
def default(self, obj):
"""
List of conversions that this encoder performs:
* ``numpy.generic`` (all integer, floating, and other types) gets
converted to its python equivalent using its ``item`` method (see
``numpy`` docs for more information,
https://docs.scipy.org/doc/numpy/reference/arrays.scalars.html).
* ``numpy.ndarray`` gets converted to python list using its ``tolist``
method.
* Complex number (a number that conforms to ``numbers.Complex`` ABC) gets
converted to a dictionary with fields ``re`` and ``im`` containing floating
numbers for the real and imaginary parts respectively, and a field
``__dtype__`` containing value ``complex``.
* Object with a ``_JSONEncoder`` method get converted the return value of
that method.
* Objects which support the pickle protocol get converted using the
data provided by that protocol.
* Other objects which cannot be serialized get converted to their
string representation (suing the ``str`` function).
"""
if isinstance(obj, np.generic) \
and not isinstance(obj, np.complexfloating):
# for numpy scalars
return obj.item()
elif isinstance(obj, np.ndarray):
# for numpy arrays
return obj.tolist()
elif (isinstance(obj, numbers.Complex) and
not isinstance(obj, numbers.Real)):
return {
'__dtype__': 'complex',
're': float(obj.real),
'im': float(obj.imag)
}
elif hasattr(obj, '_JSONEncoder'):
# Use object's custom JSON encoder
return obj._JSONEncoder()
else:
try:
s = super(NumpyJSONEncoder, self).default(obj)
except TypeError:
# See if the object supports the pickle protocol.
# If so, we should be able to use that to serialize.
if hasattr(obj, '__getnewargs__'):
return {
'__class__': type(obj).__name__,
'__args__': obj.__getnewargs__()
}
else:
# we cannot convert the object to JSON, just take a string
s = str(obj)
return s
def tprint(string, dt=1, tag='default'):
"""Print progress of a loop every ``dt`` seconds."""
ptime = _tprint_times.get(tag, 0)
if (time.time() - ptime) > dt:
print(string)
_tprint_times[tag] = time.time()
def is_sequence(obj):
"""
Test if an object is a sequence.
We do not consider strings or unordered collections like sets to be
sequences, but we do accept iterators (such as generators).
"""
return (isinstance(obj, (Iterator, Sequence, np.ndarray)) and
not isinstance(obj, (str, bytes, io.IOBase)))
def is_sequence_of(obj: Any,
types: Optional[Union[Type[object],
Tuple[Type[object], ...]]] = None,
depth: Optional[int] = None,
shape: Optional[TSequence[int]] = None
) -> bool:
"""
Test if object is a sequence of entirely certain class(es).
Args:
obj: The object to test.
types: Allowed type(s). If omitted, we just test the depth/shape.
depth: Level of nesting, ie if ``depth=2`` we expect a sequence of
sequences. Default 1 unless ``shape`` is supplied.
shape: The shape of the sequence, ie its length in each dimension.
If ``depth`` is omitted, but ``shape`` included, we set
``depth = len(shape)``.
Returns:
bool: ``True`` if every item in ``obj`` matches ``types``.
"""
if not is_sequence(obj):
return False
if shape is None or shape == ():
next_shape: Optional[Tuple[int]] = None
if depth is None:
depth = 1
else:
if depth is None:
depth = len(shape)
elif depth != len(shape):
raise ValueError('inconsistent depth and shape')
if len(obj) != shape[0]:
return False
next_shape = cast(Tuple[int], shape[1:])
for item in obj:
if depth > 1:
if not is_sequence_of(item, types, depth=depth - 1,
shape=next_shape):
return False
elif types is not None and not isinstance(item, types):
return False
return True
def is_function(f: Callable, arg_count: int, coroutine: bool=False) -> bool:
"""
Check and require a function that can accept the specified number of
positional arguments, which either is or is not a coroutine
type casting "functions" are allowed, but only in the 1-argument form.
Args:
f: Function to check.
arg_count: Number of argument f should accept.
coroutine: Is a coroutine.
Return:
bool: is function and accepts the specified number of arguments.
"""
if not isinstance(arg_count, int) or arg_count < 0:
raise TypeError('arg_count must be a non-negative integer')
if not (callable(f) and bool(coroutine) is iscoroutinefunction(f)):
return False
if isinstance(f, type):
# for type casting functions, eg int, str, float
# only support the one-parameter form of these,
# otherwise the user should make an explicit function.
return arg_count == 1
try:
sig = signature(f)
except ValueError:
# some built-in functions/methods don't describe themselves to inspect
# we already know it's a callable and coroutine is correct.
return True
try:
inputs = [0] * arg_count
sig.bind(*inputs)
return True
except TypeError:
return False
def full_class(obj):
"""The full importable path to an object's class."""
return type(obj).__module__ + '.' + type(obj).__name__
def named_repr(obj):
"""Enhance the standard repr() with the object's name attribute."""
s = '<{}.{}: {} at {}>'.format(
obj.__module__,
type(obj).__name__,
str(obj.name),
id(obj))
return s
def deep_update(dest, update):
"""
Recursively update one JSON structure with another.
Only dives into nested dicts; lists get replaced completely.
If the original value is a dictionary and the new value is not, or vice versa,
we also replace the value completely.
"""
for k, v_update in update.items():
v_dest = dest.get(k)
if isinstance(v_update, Mapping) and isinstance(v_dest, Mapping):
deep_update(v_dest, v_update)
else:
dest[k] = deepcopy(v_update)
return dest
# could use numpy.arange here, but
# a) we don't want to require that as a dep so low level
# b) I'd like to be more flexible with the sign of step
def permissive_range(start: Union[int, float], stop: Union[int, float],
step: SupportsAbs[float]) -> np.ndarray:
"""
Returns a range (as a list of values) with floating point steps.
Always starts at start and moves toward stop, regardless of the
sign of step.
Args:
start: The starting value of the range.
stop: The end value of the range.
step: Spacing between the values.
"""
signed_step = abs(step) * (1 if stop > start else -1)
# take off a tiny bit for rounding errors
step_count = math.ceil((stop - start) / signed_step - 1e-10)
return [start + i * signed_step for i in range(step_count)]
# This is very much related to the permissive_range but more
# strict on the input, start and endpoints are always included,
# and a sweep is only created if the step matches an integer
# number of points.
# numpy is a dependency anyways.
# Furthermore the sweep allows to take a number of points and generates
# an array with endpoints included, which is more intuitive to use in a sweep.
def make_sweep(start: Union[int, float], stop: Union[int, float],
step: Optional[Union[int, float]]=None, num: Optional[int]=None
) -> np.ndarray:
"""
Generate numbers over a specified interval.
Requires ``start`` and ``stop`` and (``step`` or ``num``).
The sign of ``step`` is not relevant.
Args:
start: The starting value of the sequence.
stop: The end value of the sequence.
step: Spacing between values.
num: Number of values to generate.
Returns:
numpy.ndarray: numbers over a specified interval as a ``numpy.linspace``.
Examples:
>>> make_sweep(0, 10, num=5)
[0.0, 2.5, 5.0, 7.5, 10.0]
>>> make_sweep(5, 10, step=1)
[5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
>>> make_sweep(15, 10.5, step=1.5)
>[15.0, 13.5, 12.0, 10.5]
"""
if step and num:
raise AttributeError('Don\'t use `step` and `num` at the same time.')
if (step is None) and (num is None):
raise ValueError('If you really want to go from `start` to '
'`stop` in one step, specify `num=2`.')
if step is not None:
steps = abs((stop - start) / step)
tolerance = 1e-10
steps_lo = int(np.floor(steps + tolerance))
steps_hi = int(np.ceil(steps - tolerance))
if steps_lo != steps_hi:
raise ValueError(
'Could not find an integer number of points for '
'the the given `start`, `stop`, and `step` '
'values. \nNumber of points is {:d} or {:d}.'
.format(steps_lo + 1, steps_hi + 1))
num = steps_lo + 1
return np.linspace(start, stop, num=num).tolist()
def wait_secs(finish_clock):
"""
Calculate the number of seconds until a given clock time.
The clock time should be the result of ``time.perf_counter()``.
Does NOT wait for this time.
"""
delay = finish_clock - time.perf_counter()
if delay < 0:
logging.warning('negative delay {:.6f} sec'.format(delay))
return 0
return delay
class LogCapture():
"""
Context manager to grab all log messages, optionally
from a specific logger.
usage::
with LogCapture() as logs:
code_that_makes_logs(...)
log_str = logs.value
"""
@deprecate(reason="The logging infrastructure has moved to `qcodes.utils.logger`",
alternative="`qcodes.utils.logger.LogCapture`")
def __init__(self, logger=logging.getLogger()):
self.logger = logger
self.stashed_handlers = self.logger.handlers[:]
for handler in self.stashed_handlers:
self.logger.removeHandler(handler)
def __enter__(self):
self.log_capture = io.StringIO()
self.string_handler = logging.StreamHandler(self.log_capture)
self.string_handler.setLevel(logging.DEBUG)
self.logger.addHandler(self.string_handler)
return self
def __exit__(self, type, value, tb):
self.logger.removeHandler(self.string_handler)
self.value = self.log_capture.getvalue()
self.log_capture.close()
for handler in self.stashed_handlers:
self.logger.addHandler(handler)
@deprecate(
reason="This method is no longer being used in QCoDeS.")
def make_unique(s, existing):
"""
Make string ``s`` unique, able to be added to a sequence ``existing`` of
existing names without duplication, by ``appending _<int>`` to it if needed.
"""
n = 1
s_out = s
existing = set(existing)
while s_out in existing:
n += 1
s_out = '{}_{}'.format(s, n)
return s_out
class DelegateAttributes:
"""
Mixin class to create attributes of this object by
delegating them to one or more dictionaries and/or objects.
Also fixes ``__dir__`` so the delegated attributes will show up
in ``dir()`` and ``autocomplete``.
Attributes:
delegate_attr_dicts (list): A list of names (strings) of dictionaries
which are (or will be) attributes of ``self``, whose keys should
be treated as attributes of ``self``.
delegate_attr_objects (list): A list of names (strings) of objects
which are (or will be) attributes of ``self``, whose attributes
should be passed through to ``self``.
omit_delegate_attrs (list): A list of attribute names (strings)
to *not* delegate to any other dictionary or object.
Any ``None`` entry is ignored.
Attribute resolution order:
1. Real attributes of this object.
2. Keys of each dictionary in ``delegate_attr_dicts`` (in order).
3. Attributes of each object in ``delegate_attr_objects`` (in order).
"""
delegate_attr_dicts: List[str] = []
delegate_attr_objects: List[str] = []
omit_delegate_attrs: List[str] = []
def __getattr__(self, key):
if key in self.omit_delegate_attrs:
raise AttributeError("'{}' does not delegate attribute {}".format(
self.__class__.__name__, key))
for name in self.delegate_attr_dicts:
if key == name:
# needed to prevent infinite loops!
raise AttributeError(
"dict '{}' has not been created in object '{}'".format(
key, self.__class__.__name__))
try:
d = getattr(self, name, None)
if d is not None:
return d[key]
except KeyError:
pass
for name in self.delegate_attr_objects:
if key == name:
raise AttributeError(
"object '{}' has not been created in object '{}'".format(
key, self.__class__.__name__))
try:
obj = getattr(self, name, None)
if obj is not None:
return getattr(obj, key)
except AttributeError:
pass
raise AttributeError(
"'{}' object and its delegates have no attribute '{}'".format(
self.__class__.__name__, key))
def __dir__(self):
names = super().__dir__()
for name in self.delegate_attr_dicts:
d = getattr(self, name, None)
if d is not None:
names += [k for k in d.keys()
if k not in self.omit_delegate_attrs]
for name in self.delegate_attr_objects:
obj = getattr(self, name, None)
if obj is not None:
names += [k for k in dir(obj)
if k not in self.omit_delegate_attrs]
return sorted(set(names))
def strip_attrs(obj, whitelist=()):
"""
Irreversibly remove all direct instance attributes of object, to help with
disposal, breaking circular references.
Args:
obj: Object to be stripped.
whitelist (list): List of names that are not stripped from the object.
"""
try:
lst = set(list(obj.__dict__.keys())) - set(whitelist)
for key in lst:
try:
del obj.__dict__[key]
# TODO (giulioungaretti) fix bare-except
except:
pass
# TODO (giulioungaretti) fix bare-except
except:
pass
def compare_dictionaries(dict_1: Dict, dict_2: Dict,
dict_1_name: Optional[str]='d1',
dict_2_name: Optional[str]='d2',
path: str="") -> Tuple[bool, str]:
"""
Compare two dictionaries recursively to find non matching elements.
Args:
dict_1: First dictionary to compare.
dict_2: Second dictionary to compare.
dict_1_name: Optional name of the first dictionary used in the
differences string.
dict_2_name: Optional name of the second dictionary used in the
differences string.
Returns:
Tuple: Are the dicts equal and the difference rendered as
a string.
"""
err = ''
key_err = ''
value_err = ''
old_path = path
for k in dict_1.keys():
path = old_path + "[%s]" % k
if k not in dict_2.keys():
key_err += "Key {}{} not in {}\n".format(
dict_1_name, path, dict_2_name)
else:
if isinstance(dict_1[k], dict) and isinstance(dict_2[k], dict):
err += compare_dictionaries(dict_1[k], dict_2[k],
dict_1_name, dict_2_name, path)[1]
else:
match = (dict_1[k] == dict_2[k])
# if values are equal-length numpy arrays, the result of
# "==" is a bool array, so we need to 'all' it.
# In any other case "==" returns a bool
# TODO(alexcjohnson): actually, if *one* is a numpy array
# and the other is another sequence with the same entries,
# this will compare them as equal. Do we want this, or should
# we require exact type match?
if hasattr(match, 'all'):
match = match.all()
if not match:
value_err += (
'Value of "{}{}" ("{}", type"{}") not same as\n'
' "{}{}" ("{}", type"{}")\n\n').format(
dict_1_name, path, dict_1[k], type(dict_1[k]),
dict_2_name, path, dict_2[k], type(dict_2[k]))
for k in dict_2.keys():
path = old_path + "[{}]".format(k)
if k not in dict_1.keys():
key_err += "Key {}{} not in {}\n".format(
dict_2_name, path, dict_1_name)
dict_differences = key_err + value_err + err
if len(dict_differences) == 0:
dicts_equal = True
else:
dicts_equal = False
return dicts_equal, dict_differences
def warn_units(class_name, instance):
logging.warning('`units` is deprecated for the `' + class_name +
'` class, use `unit` instead. ' + repr(instance))
def foreground_qt_window(window):
"""
Try as hard as possible to bring a qt window to the front. This
will use pywin32 if installed and running on windows as this
seems to be the only reliable way to foreground a window. The
build-in qt functions often doesn't work. Note that to use this
with pyqtgraphs remote process you should use the ref in that module
as in the example below.
Args:
window: Handle to qt window to foreground.
Examples:
>>> Qtplot.qt_helpers.foreground_qt_window(plot.win)
"""
try:
from win32gui import SetWindowPos
import win32con
# use the idea from
# https://stackoverflow.com/questions/12118939/how-to-make-a-pyqt4-window-jump-to-the-front
SetWindowPos(window.winId(),
win32con.HWND_TOPMOST, # = always on top. only reliable way to bring it to the front on windows
0, 0, 0, 0,
win32con.SWP_NOMOVE | win32con.SWP_NOSIZE | win32con.SWP_SHOWWINDOW)
SetWindowPos(window.winId(),
win32con.HWND_NOTOPMOST, # disable the always on top, but leave window at its top position
0, 0, 0, 0,
win32con.SWP_NOMOVE | win32con.SWP_NOSIZE | win32con.SWP_SHOWWINDOW)
except ImportError:
pass
window.show()
window.raise_()
window.activateWindow()
def add_to_spyder_UMR_excludelist(modulename: str):
"""
Spyder tries to reload any user module. This does not work well for
qcodes because it overwrites Class variables. QCoDeS uses these to
store global attributes such as default station, monitor and list of
instruments. This "feature" can be disabled by the
gui. Unfortunately this cannot be disabled in a natural way
programmatically so in this hack we replace the global ``__umr__`` instance
with a new one containing the module we want to exclude. This will do
nothing if Spyder is not found.
TODO is there a better way to detect if we are in spyder?
"""
if any('SPYDER' in name for name in os.environ):
sitecustomize_found = False
try:
from spyder.utils.site import sitecustomize
except ImportError:
pass
else:
sitecustomize_found = True
if sitecustomize_found is False:
try:
from spyder_kernels.customize import spydercustomize as sitecustomize
except ImportError:
pass
else:
print("found kernels site")
sitecustomize_found = True
if sitecustomize_found is False:
return
excludednamelist = os.environ.get('SPY_UMR_NAMELIST',
'').split(',')
if modulename not in excludednamelist:
log.info("adding {} to excluded modules".format(modulename))
excludednamelist.append(modulename)
sitecustomize.__umr__ = sitecustomize.UserModuleReloader(namelist=excludednamelist)
os.environ['SPY_UMR_NAMELIST'] = ','.join(excludednamelist)
@contextmanager
def attribute_set_to(object_: Any, attribute_name: str, new_value: Any):
"""
This context manager allows to change a given attribute of a given object
to a new value, and the original value is reverted upon exit of the context
manager.
Args:
object_: The object which attribute value is to be changed.
attribute_name: The name of the attribute that is to be changed.
new_value: The new value to which the attribute of the object is
to be changed.
"""
old_value = getattr(object_, attribute_name)
setattr(object_, attribute_name, new_value)
try:
yield
finally:
setattr(object_, attribute_name, old_value)
def partial_with_docstring(func: Callable, docstring: str, **kwargs):
"""
We want to have a partial function which will allow us access the docstring
through the python built-in help function. This is particularly important
for client-facing driver methods, whose arguments might not be obvious.
Consider the follow example why this is needed:
>>> from functools import partial
>>> def f():
>>> ... pass
>>> g = partial(f)
>>> g.__doc__ = "bla"
>>> help(g) # this will print an unhelpful message
Args:
func: A function that its docstring will be accessed.
docstring: The docstring of the corresponding function.
"""
ex = partial(func, **kwargs)
def inner(**inner_kwargs):
ex(**inner_kwargs)
inner.__doc__ = docstring
return inner
def create_on_off_val_mapping(on_val: Any = True, off_val: Any = False
) -> Dict:
"""
Returns a value mapping which maps inputs which reasonably mean "on"/"off"
to the specified ``on_val``/``off_val`` which are to be sent to the
instrument. This value mapping is such that, when inverted,
``on_val``/``off_val`` are mapped to boolean ``True``/``False``.
"""
# Here are the lists of inputs which "reasonably" mean the same as
# "on"/"off" (note that True/False values will be added below, and they
# will always be added)
ons_: Tuple[Union[str, bool], ...] = ('On', 'ON', 'on', '1')
offs_: Tuple[Union[str, bool], ...] = ('Off', 'OFF', 'off', '0')
# The True/False values are added at the end of on/off inputs,
# so that after inversion True/False will be the only remaining
# keys in the inverted value mapping dictionary.
# NOTE that using 1/0 integer values will also work implicitly
# due to `hash(True) == hash(1)`/`hash(False) == hash(0)`,
# hence there is no need for adding 1/0 values explicitly to
# the list of `ons` and `offs` values.
ons = ons_ + (True,)
offs = offs_ + (False,)
return OrderedDict([(on, on_val) for on in ons]
+ [(off, off_val) for off in offs])
def abstractmethod(funcobj: Callable) -> Callable:
"""
A decorator indicating abstract methods.
This is heavily inspired by the decorator of the same name in
the ABC standard library. But we make our own version because
we actually want to allow the class with the abstract method to be
instantiated and we will use this property to detect if the
method is abstract and should be overwritten.
"""
funcobj.__qcodes_is_abstract_method__ = True # type: ignore[attr-defined]
return funcobj
def _ruamel_importer():
try:
from ruamel_yaml import YAML
except ImportError:
try:
from ruamel.yaml import YAML
except ImportError:
raise ImportError('No ruamel module found. Please install '
'either ruamel.yaml or ruamel_yaml.')
return YAML
# YAML module to be imported. Resovles naming issues of YAML from pypi and
# anaconda
YAML = _ruamel_importer()
def get_qcodes_path(*subfolder: str) -> str:
"""
Return full file path of the QCoDeS module. Additional arguments will be
appended as subfolder.
"""
path = os.sep.join(qcodes.__file__.split(os.sep)[:-1])
return os.path.join(path, *subfolder) + os.sep
def get_qcodes_user_path(*file_parts: str) -> str:
"""
Get ``~/.qcodes`` path or if defined the path defined in the
``QCODES_USER_PATH`` environment variable.
Returns:
path to the user qcodes directory
"""
path = os.environ.get(QCODES_USER_PATH_ENV,
os.path.join(Path.home(), '.qcodes'))
os.makedirs(os.path.dirname(path), exist_ok=True)
return os.path.join(path, *file_parts)
def checked_getattr(
instance: Any,
attribute: str,
expected_type: Union[type, Tuple[type, ...]]
) -> Any:
"""
Like ``getattr`` but raises type error if not of expected type.
"""
attr: Any = getattr(instance, attribute)
if not isinstance(attr, expected_type):
raise TypeError()
return attr
| 35.012804
| 116
| 0.604717
|
4a06dbeee5a2ebfc3240b782941f56b7845bdf2e
| 4,790
|
py
|
Python
|
enaBrowserTools/enaDataGet.py
|
esteinig/enaBrowserTools
|
f2f69acee61039872243b28dbab0ef32ee651726
|
[
"Apache-2.0"
] | null | null | null |
enaBrowserTools/enaDataGet.py
|
esteinig/enaBrowserTools
|
f2f69acee61039872243b28dbab0ef32ee651726
|
[
"Apache-2.0"
] | null | null | null |
enaBrowserTools/enaDataGet.py
|
esteinig/enaBrowserTools
|
f2f69acee61039872243b28dbab0ef32ee651726
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# enaDataGet.py
#
#
# Copyright 2017 EMBL-EBI, Hinxton outstation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import sys
import sequenceGet
import assemblyGet
import readGet
import utils
def set_parser():
parser = argparse.ArgumentParser(prog='enaDataGet',
description = 'Download data for a given accession')
parser.add_argument('accession', help="""Sequence, coding, assembly, run, experiment or
analysis accession or WGS prefix (LLLLVV) to download """)
parser.add_argument('-f', '--format', default=None,
choices=['embl', 'fasta', 'submitted', 'fastq', 'sra'],
help="""File format required. Format requested must be permitted for
data type selected. sequence, assembly and wgs accessions: embl(default) and fasta formats.
read group: submitted, fastq and sra formats. analysis group: submitted only.""")
parser.add_argument('-d', '--dest', default='.',
help='Destination directory (default is current running directory)')
parser.add_argument('-w', '--wgs', action='store_true',
help='Download WGS set for each assembly if available (default is false)')
parser.add_argument('-m', '--meta', action='store_true',
help='Download read or analysis XML in addition to data files (default is false)')
parser.add_argument('-i', '--index', action='store_true',
help="""Download CRAM index files with submitted CRAM files, if any (default is false).
This flag is ignored for fastq and sra format options. """)
parser.add_argument('-a', '--aspera', action='store_true',
help='Use the aspera command line client to download, instead of FTP.')
parser.add_argument('-as', '--aspera-settings', default=None,
help="""Use the provided settings file, will otherwise check
for environment variable or default settings file location.""")
parser.add_argument('-v', '--version', action='version', version='%(prog)s 1.4.1')
return parser
if __name__ == '__main__':
parser = set_parser()
args = parser.parse_args()
accession = args.accession
output_format = args.format
dest_dir = args.dest
fetch_wgs = args.wgs
fetch_meta = args.meta
fetch_index = args.index
aspera = args.aspera
aspera_settings = args.aspera_settings
if aspera or aspera_settings is not None:
aspera = utils.set_aspera(aspera_settings)
try:
if utils.is_wgs_set(accession):
if output_format is not None:
sequenceGet.check_format(output_format)
sequenceGet.download_wgs(dest_dir, accession, output_format)
elif not utils.is_available(accession):
sys.stderr.write('ERROR: Record does not exist or is not available for accession provided\n')
sys.exit(1)
elif utils.is_sequence(accession):
if output_format is not None:
sequenceGet.check_format(output_format)
sequenceGet.download_sequence(dest_dir, accession, output_format)
elif utils.is_analysis(accession):
if output_format is not None:
readGet.check_read_format(output_format)
readGet.download_files(accession, output_format, dest_dir, fetch_index, fetch_meta, aspera)
elif utils.is_run(accession) or utils.is_experiment(accession):
if output_format is not None:
readGet.check_read_format(output_format)
readGet.download_files(accession, output_format, dest_dir, fetch_index, fetch_meta, aspera)
elif utils.is_assembly(accession):
if output_format is not None:
assemblyGet.check_format(output_format)
assemblyGet.download_assembly(dest_dir, accession, output_format, fetch_wgs)
else:
sys.stderr.write('ERROR: Invalid accession provided\n')
sys.exit(1)
print ('Completed')
except Exception:
utils.print_error()
sys.exit(1)
| 45.188679
| 121
| 0.646555
|
4a06dd3d00d3e3378737bbd5701a9e2ac32904a7
| 465
|
py
|
Python
|
exceptions.py
|
spausanc/astr-119-hw-1
|
f2e17dbea70f0eebdd3555718285cafce2ac3cf4
|
[
"MIT"
] | null | null | null |
exceptions.py
|
spausanc/astr-119-hw-1
|
f2e17dbea70f0eebdd3555718285cafce2ac3cf4
|
[
"MIT"
] | 1
|
2018-10-25T23:42:12.000Z
|
2018-10-25T23:42:12.000Z
|
exceptions.py
|
spausanc/astr-119-hw-2
|
f2e17dbea70f0eebdd3555718285cafce2ac3cf4
|
[
"MIT"
] | 1
|
2018-10-18T01:53:25.000Z
|
2018-10-18T01:53:25.000Z
|
# Python exceptions let you deal with
#unexpected results
try:
print(a) # This will throw and exception since a is not defined
except:
print("a is not defined!")
# There are specific errors to help with cases
try:
print(a) # This will throw an exception since a is not defined
except NameError:
print("a is still not defined!")
except:
print("Something else went wrong.")
# This will break our program
#since a is not defined:
print(a)
| 23.25
| 65
| 0.707527
|
4a06dd8b676947adc1c1600e0b2663840b1a36fc
| 8,253
|
py
|
Python
|
nemo/collections/nlp/modules/common/tokenizer_utils.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/modules/common/tokenizer_utils.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | 1
|
2021-11-10T14:59:54.000Z
|
2021-11-12T16:42:14.000Z
|
nemo/collections/nlp/modules/common/tokenizer_utils.py
|
madhukarkm/NeMo
|
648c97f076147684bee6aaada209f2f20adcaf5d
|
[
"Apache-2.0"
] | 2
|
2021-11-04T14:39:00.000Z
|
2021-11-04T14:56:30.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from dataclasses import MISSING, dataclass
from os import path
from typing import Dict, List, Optional
import nemo
from nemo.collections.common.tokenizers.bytelevel_tokenizers import ByteLevelTokenizer
from nemo.collections.common.tokenizers.char_tokenizer import CharTokenizer
from nemo.collections.common.tokenizers.huggingface.auto_tokenizer import AutoTokenizer
from nemo.collections.common.tokenizers.word_tokenizer import WordTokenizer
from nemo.collections.common.tokenizers.youtokentome_tokenizer import YouTokenToMeTokenizer
from nemo.collections.nlp.modules.common.huggingface.huggingface_utils import get_huggingface_pretrained_lm_models_list
from nemo.collections.nlp.modules.common.lm_utils import get_pretrained_lm_models_list
from nemo.collections.nlp.parts.nlp_overrides import HAVE_APEX
from nemo.utils import logging
try:
from nemo.collections.nlp.modules.common.megatron.megatron_utils import get_megatron_tokenizer
HAVE_APEX = True
except (ImportError, ModuleNotFoundError):
HAVE_APEX = False
__all__ = ['get_tokenizer', 'get_tokenizer_list']
megatron_tokenizer_model_map = {
'BertWordPieceLowerCase': 'megatron-bert-345m-uncased',
'BertWordPieceCase': 'megatron-bert-345m-cased',
'GPT2BPETokenizer': 'megatron-gpt-345m',
}
def get_tokenizer_list() -> List[str]:
"""
Returns all all supported tokenizer names
"""
s = set(get_pretrained_lm_models_list())
s.update(set(get_huggingface_pretrained_lm_models_list(include_external=True)))
return ["sentencepiece", "char", "word"] + list(s)
@dataclass
class TokenizerConfig:
library: str = MISSING
tokenizer_model: Optional[str] = None
vocab_size: Optional[int] = None
vocab_file: Optional[str] = None
special_tokens: Optional[Dict[str, str]] = None
bpe_dropout: Optional[float] = 0.0
coverage: Optional[float] = 0.999
training_sample_size: Optional[int] = None
r2l: Optional[bool] = False
def get_tokenizer(
tokenizer_name: str,
tokenizer_model: Optional[str] = None,
vocab_file: Optional[str] = None,
merges_file: Optional[str] = None,
special_tokens: Optional[Dict[str, str]] = None,
use_fast: Optional[bool] = False,
bpe_dropout: Optional[float] = 0.0,
):
"""
Args:
tokenizer_name: sentencepiece or pretrained model from the hugging face list,
for example: bert-base-cased
To see the list of all HuggingFace pretrained models, use: nemo_nlp.modules.common.get_huggingface_pretrained_lm_models_list()
tokenizer_model: tokenizer model file of sentencepiece or youtokentome
special_tokens: dict of special tokens
vocab_file: path to vocab file
use_fast: (only for HuggingFace AutoTokenizer) set to True to use fast HuggingFace tokenizer
bpe_dropout: (only supported by YTTM tokenizer) BPE dropout tries to corrupt the standard segmentation procedure of BPE to help
model better learn word compositionality and become robust to segmentation errors.
It has emperically been shown to improve inference time BLEU scores.
"""
if special_tokens is None:
special_tokens_dict = {}
else:
special_tokens_dict = special_tokens
if 'megatron' in tokenizer_name:
if not HAVE_APEX:
raise RuntimeError("Apex required to use megatron.")
if vocab_file is None:
vocab_file = nemo.collections.nlp.modules.common.megatron.megatron_utils.get_megatron_vocab_file(
tokenizer_name
)
merges_file = nemo.collections.nlp.modules.common.megatron.megatron_utils.get_megatron_merges_file(
tokenizer_name
)
tokenizer_name = get_megatron_tokenizer(tokenizer_name)
if tokenizer_name == 'sentencepiece':
return nemo.collections.common.tokenizers.sentencepiece_tokenizer.SentencePieceTokenizer(
model_path=tokenizer_model, special_tokens=special_tokens, legacy=True
)
elif tokenizer_name == 'yttm':
return YouTokenToMeTokenizer(model_path=tokenizer_model, bpe_dropout=bpe_dropout)
elif tokenizer_name == 'word':
return WordTokenizer(vocab_file=vocab_file, **special_tokens_dict)
elif tokenizer_name == 'char':
return CharTokenizer(vocab_file=vocab_file, **special_tokens_dict)
logging.info(
f"Getting HuggingFace AutoTokenizer with pretrained_model_name: {tokenizer_name}, vocab_file: {vocab_file}, special_tokens_dict: {special_tokens_dict}, and use_fast: {use_fast}"
)
return AutoTokenizer(
pretrained_model_name=tokenizer_name,
vocab_file=vocab_file,
merges_file=merges_file,
**special_tokens_dict,
use_fast=use_fast,
)
def get_nmt_tokenizer(
library: str = 'yttm',
model_name: Optional[str] = None,
tokenizer_model: Optional[str] = None,
vocab_file: Optional[str] = None,
merges_file: Optional[str] = None,
special_tokens: Optional[Dict[str, str]] = None,
use_fast: Optional[bool] = False,
bpe_dropout: Optional[float] = 0.0,
r2l: Optional[bool] = False,
):
"""
Args:
model_name: if using a pretrained model from NeMo, HuggingFace, or Megatron
tokenizer_model: tokenizer model file of sentencepiece or youtokentome
special_tokens: dict of special tokens
vocab_file: path to vocab file
use_fast: (only for HuggingFace AutoTokenizer) set to True to use fast HuggingFace tokenizer
bpe_dropout: (only supported by YTTM tokenizer) BPE dropout tries to corrupt the standard segmentation procedure
of BPE to help model better learn word compositionality and become robust to segmentation errors.
It has empirically been shown to improve inference time BLEU scores.
r2l: Whether to return subword IDs from right to left
"""
if special_tokens is None:
special_tokens_dict = {}
else:
special_tokens_dict = special_tokens
if library == 'yttm':
logging.info(f'Getting YouTokenToMeTokenizer with model: {tokenizer_model} with r2l: {r2l}.')
return YouTokenToMeTokenizer(model_path=tokenizer_model, bpe_dropout=bpe_dropout, r2l=r2l)
elif library == 'huggingface':
logging.info(f'Getting HuggingFace AutoTokenizer with pretrained_model_name: {model_name}')
return AutoTokenizer(
pretrained_model_name=model_name,
vocab_file=vocab_file,
merges_file=merges_file,
**special_tokens_dict,
use_fast=use_fast,
)
elif library == 'sentencepiece':
logging.info(f'Getting SentencePiece with model: {tokenizer_model}')
return nemo.collections.common.tokenizers.sentencepiece_tokenizer.SentencePieceTokenizer(
model_path=tokenizer_model, special_tokens=special_tokens_dict
)
elif library == 'byte-level':
logging.info(f'Using byte-level tokenization')
return ByteLevelTokenizer()
elif library == 'megatron':
if model_name in megatron_tokenizer_model_map:
model_name = megatron_tokenizer_model_map[model_name]
logging.info(
f'Getting Megatron tokenizer for pretrained model name: {model_name} and custom vocab file: {vocab_file}'
)
return get_tokenizer(tokenizer_name=model_name, vocab_file=vocab_file, merges_file=merges_file)
else:
raise NotImplementedError(
'Currently we only support "yttm", "huggingface", "sentencepiece", "megatron", and "byte-level" tokenizer'
'libraries.'
)
| 42.541237
| 185
| 0.723131
|
4a06de353f751b7923da3d442bdbeba1268b3324
| 1,704
|
py
|
Python
|
pdfparser/modules/rolls/daman/english.py
|
in-rolls/parse_searchable_rolls
|
f8f41ac4f848c6a29be1d98dcfb00d1549c16e43
|
[
"MIT"
] | 3
|
2020-12-16T09:00:59.000Z
|
2021-07-11T01:17:18.000Z
|
pdfparser/modules/rolls/daman/english.py
|
in-rolls/parse_elex_rolls
|
f8f41ac4f848c6a29be1d98dcfb00d1549c16e43
|
[
"MIT"
] | 2
|
2018-07-09T16:01:26.000Z
|
2019-07-08T05:35:19.000Z
|
pdfparser/modules/rolls/daman/english.py
|
in-rolls/parse_searchable_rolls
|
f8f41ac4f848c6a29be1d98dcfb00d1549c16e43
|
[
"MIT"
] | 3
|
2021-08-24T10:49:50.000Z
|
2022-01-22T01:11:36.000Z
|
from ..base import Reader, Patterns, Elector
__all__ = ['DamanPDF']
class DamanPatterns(Patterns):
part_patterns = dict(
main=r'(?<=Page2)(.*?)(?=\nSUMMARY OF ELECTORS\n)',
addition=None,
correction=None,
deletion=None,
)
box_patterns = dict(
main=r'(\d+\n[FHM]\. Name :(.*?(?=\n\d+\n[FHM]\. Name :)|.*))',
deletion=None
)
general_patterns = dict(
state=r'(?<=Photo Electoral Roll - \d{4}, )([^\n]+)',
acName=r'([^\n]+)$',
partNo=r'^[^\n]+\n(\d+)',
year=r'Year of Revision +: (\d+)',
mainTown=r'(?<=Main Village Town\n:\n)([^\na-z]+)',
policeStation=r'([^\na-z]+)(?=\n\d{6}\n)',
district=r'(?<=Main Village Town\n:\n)[^\n]+\n([^\na-z]+)',
stationName=r'\d+ ,.*,([^a-z]+)(?=\n\d+\nPhoto Electoral)',
stationAddress=r'\d+ ,(.*)(?=\n\d+\nPhoto Electoral)',
netMale=r'^[^\n]+\n\d+\n\d+\n(\d+)',
netFemale=r'^[^\n]+\n\d+\n\d+\n\d+\n(\d+)',
netThird=r'^[^\n]+\n\d+\n\d+\n\d+\n\d+\n\d+\n\d+\n(\d+)',
netTotal=r'^[^\n]+\n\d+\n\d+\n\d+\n\d+\n(\d+)'
)
elector_patterns = dict(
number=r'^(\d+)',
name=r'(?<=Name :- )([^\n]+)',
relativeName=r'(?<=Name :- )[^\n]+\n(.*?)(?=$|\n)',
house=r'(?<=[FHM]\. Name :\n)[^\n]+\n([^\n]+)(?=\n[A-Z]{1,5}[0-9\/]+)',
age=r'(?<=Age:\n)(\d+)',
sex=r'(?<=[FHM]\. Name :\n)([^\n]+)(?=\n)',
relativeType=r'(?<=\n)([FHM])(?=\. Name :)'
)
class DamanElector(Elector):
husband = 'H'
class DamanPDF(Reader):
pat = DamanPatterns()
def __init__(self, path):
super(DamanPDF, self).__init__(path, cls_elector=DamanElector)
| 30.981818
| 79
| 0.460681
|
4a06de62ac332216a2cc44f25165b334686fa0e7
| 2,168
|
py
|
Python
|
sa/profiles/HP/1910/get_arp.py
|
xUndero/noc
|
9fb34627721149fcf7064860bd63887e38849131
|
[
"BSD-3-Clause"
] | 1
|
2019-09-20T09:36:48.000Z
|
2019-09-20T09:36:48.000Z
|
sa/profiles/HP/1910/get_arp.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
sa/profiles/HP/1910/get_arp.py
|
ewwwcha/noc
|
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# HP.1910.get_arp
# ---------------------------------------------------------------------
# Copyright (C) 2007-2013 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetarp import IGetARP
class Script(BaseScript):
name = "HP.1910.get_arp"
interface = IGetARP
cache = True
rx_line = re.compile(
r"^(?P<ip>\S+)\s+(?P<mac>\S+)\s+\d+\s+(?P<interface>\S+)\s+\d+\s+\S$", re.MULTILINE
)
def execute(self):
r = []
# Try SNMP first
"""
# working but give vlan interface instead port name
if self.has_snmp():
try:
for v in self.snmp.get_tables(
["1.3.6.1.2.1.4.22.1.1", "1.3.6.1.2.1.4.22.1.2",
"1.3.6.1.2.1.4.22.1.3"], bulk=True):
iface = self.snmp.get("1.3.6.1.2.1.31.1.1.1.1." + v[1],
cached=True) # IF-MIB
mac = ":".join(["%02x" % ord(c) for c in v[2]])
ip = ["%02x" % ord(c) for c in v[3]]
ip = ".".join(str(int(c, 16)) for c in ip)
r.append({"ip": ip,
"mac": mac,
"interface": iface,
})
return r
except self.snmp.TimeOutError:
pass
"""
# Fallback to CLI
for match in self.rx_line.finditer(self.cli("display arp", cached=True)):
mac = match.group("mac")
if mac.lower() == "incomplete":
r.append({"ip": match.group("ip"), "mac": None, "interface": None})
else:
iface = match.group("interface")
iface = iface.replace("GE", "Gi ")
iface = iface.replace("BAGG", "Po ")
r.append({"ip": match.group("ip"), "mac": match.group("mac"), "interface": iface})
return r
| 35.540984
| 98
| 0.416052
|
4a06de876a031af3ee0ba354f126a3faa66b0c84
| 3,034
|
py
|
Python
|
Guess_a_number/guess_a_num.py
|
chineidu/Python_projects
|
6d1909338e964ea36a58d3318eeaf9cc796ecb67
|
[
"MIT"
] | null | null | null |
Guess_a_number/guess_a_num.py
|
chineidu/Python_projects
|
6d1909338e964ea36a58d3318eeaf9cc796ecb67
|
[
"MIT"
] | null | null | null |
Guess_a_number/guess_a_num.py
|
chineidu/Python_projects
|
6d1909338e964ea36a58d3318eeaf9cc796ecb67
|
[
"MIT"
] | null | null | null |
import random
def guess_num(num: int) -> None:
""" This function is used to guess the computer's number.
Arguments:
----------
num -- An integer that's greater than 1.
Returns:
--------
None
"""
random_num = random.randint(1, num)
active = True
while active:
guess = int(input(f"Guess a number between 1 and {num}: "))
if guess < random_num:
print("The number is too low!")
elif guess > random_num:
print("The number is too high!")
elif guess == random_num:
active = False
print(f"Congrats fam! You guessed the number {random_num} correctly!.")
def guess_num_mod(num: int) -> None:
""" This function is used to guess the computer's number given 3 turns.
Arguments:
----------
num -- An integer that's greater than 1.
Returns:
--------
None
"""
random_num = random.randint(1, num)
counter = 0
threshold = 3
while counter < threshold:
guess = int(input(f"Guess a number between 1 and {num}: "))
counter += 1
if guess != random_num:
if guess < random_num:
print("The number is too low!")
elif guess > random_num:
print("The number is too high!")
if counter == threshold - 1:
print("\nLast chance!")
if counter == threshold and guess != random_num:
print(f"Gameover Loser! The number was: '{random_num}'")
break
elif guess == random_num:
print(f"Congrats fam! You guessed the number '{random_num}' correctly!.")
break
def computer_guess_my_num() -> None:
""" This function is used by the computer to guess the user's number given 3 turns.
Arguments:
----------
Returns:
--------
None
"""
import time
low = 1
high = 15
counter = 0
threshold = 4
msg = "Enter a number between 1 and 15: "
while counter < threshold:
num = input(msg)
if not num.isdigit():
continue
while True:
num = int(num)
counter += 1
print("Guess the number in my mind: ")
time.sleep(1)
guess = random.randint(low, high)
print(f"Computer guessed ==> {guess}")
if guess != num:
prompt = input(f"Enter 'H' if {guess} is too high or 'L' if it's too low. \n").upper()
if prompt == "H":
high = guess - 1
elif prompt == "L":
low = guess + 1
if counter == threshold and guess != num:
print(f"Gameover Loser! The number was: '{num}'")
break
elif guess == num:
print(f"Congrats fam! You guessed the number '{num}' correctly!.")
break
break
if __name__ == "__main__":
num = 15
guess_num_mod(num)
# computer_guess_my_num()
| 25.711864
| 102
| 0.515491
|
4a06df79a6acb87bef0dffe0ac8eb8ae06eb888b
| 1,930
|
py
|
Python
|
Users/RemindPendingMembers.py
|
dropbox/DropboxBusinessScripts
|
4f4c32ddd488b29e7fd16a40966761e70a758239
|
[
"Apache-2.0"
] | 48
|
2015-11-09T20:05:14.000Z
|
2021-12-17T23:35:12.000Z
|
Users/RemindPendingMembers.py
|
dropbox/DropboxBusinessScripts
|
4f4c32ddd488b29e7fd16a40966761e70a758239
|
[
"Apache-2.0"
] | null | null | null |
Users/RemindPendingMembers.py
|
dropbox/DropboxBusinessScripts
|
4f4c32ddd488b29e7fd16a40966761e70a758239
|
[
"Apache-2.0"
] | 24
|
2015-11-18T16:15:18.000Z
|
2022-03-04T10:35:22.000Z
|
import urllib
import urllib2
import json
import argparse
import sys
from collections import Counter
reload(sys)
sys.setdefaultencoding('UTF8')
parser = argparse.ArgumentParser(description='Send reminder emails to all invited (but not joined) members.')
args = parser.parse_args()
dfbToken = raw_input('Enter your Dropbox Business API App token (Team Member Management permission): ')
# Get all DfB members, paging through member list if necessary
def getDfbMembers(cursor):
data = {"limit":100}
if cursor is not None:
data["cursor"] = cursor
request = urllib2.Request('https://api.dropboxapi.com/2/team/members/list', json.dumps(data))
request.add_header("Authorization", "Bearer "+dfbToken)
request.add_header("Content-type", 'application/json')
try:
response = json.loads(urllib2.urlopen(request).read())
members = response["members"]
if response["has_more"]:
members = members + getDfbMembers(cursor=response["cursor"])
return members
# Exit on error here. Probably bad OAuth token. Show DfB response.
except urllib2.HTTPError, error:
parser.error(error.read())
# Sends a reminder
def remind(memberId):
params = {'.tag':'team_member_id','team_member_id':memberId}
request = urllib2.Request('https://api.dropboxapi.com/2/team/members/send_welcome_email', data=json.dumps(params))
request.add_header("Authorization", "Bearer "+dfbToken)
request.add_header("Content-type", 'application/json')
try:
urllib2.urlopen(request).read()
except urllib2.HTTPError, error:
parser.error(error.read())
members = getDfbMembers(None)
print "Reminding invited members.."
for member in members:
if member["profile"]["status"][".tag"] == "invited":
print " reminding "+member["profile"]["email"]
remind(member["profile"]["team_member_id"])
print "Done"
| 31.639344
| 118
| 0.690155
|
4a06dfd9a26e7533b319c9cdc7b2bf3c1868a57d
| 238
|
py
|
Python
|
service/service/settings/local.py
|
gitter-badger/share-analytics
|
2a1a6d2d2ba412627750517a6b02088456c04c23
|
[
"Apache-2.0"
] | null | null | null |
service/service/settings/local.py
|
gitter-badger/share-analytics
|
2a1a6d2d2ba412627750517a6b02088456c04c23
|
[
"Apache-2.0"
] | null | null | null |
service/service/settings/local.py
|
gitter-badger/share-analytics
|
2a1a6d2d2ba412627750517a6b02088456c04c23
|
[
"Apache-2.0"
] | null | null | null |
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'public',
'USER': 'admin',
'PASSWORD': 'itsover9000',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
| 21.636364
| 59
| 0.47479
|
4a06e17c2f375b54b22dc567d85674d1d73ae970
| 280
|
py
|
Python
|
tests/test_key_transform.py
|
dana/python-message-transform
|
f505ae814c2430b523751ef86e6772af27593797
|
[
"MIT"
] | null | null | null |
tests/test_key_transform.py
|
dana/python-message-transform
|
f505ae814c2430b523751ef86e6772af27593797
|
[
"MIT"
] | null | null | null |
tests/test_key_transform.py
|
dana/python-message-transform
|
f505ae814c2430b523751ef86e6772af27593797
|
[
"MIT"
] | null | null | null |
import sys
sys.path.append('..')
sys.path.append('.')
from message_transform import mtransform # noqa: E402
def test_simple_key_transform():
message = {'a': 'b'}
mtransform(message, {' specials/$message->{a}': ' specials/$message->{a}'})
assert message['b'] == 'b'
| 25.454545
| 79
| 0.646429
|
4a06e312c3d1739d7287797f019839de0a837882
| 858
|
py
|
Python
|
pydov/search/bodemdiepteinterval.py
|
DOV-Vlaanderen/dov-pydownloader
|
126b17f4ad870d9fae5cb2c4b868c564cf7cd1b3
|
[
"MIT"
] | null | null | null |
pydov/search/bodemdiepteinterval.py
|
DOV-Vlaanderen/dov-pydownloader
|
126b17f4ad870d9fae5cb2c4b868c564cf7cd1b3
|
[
"MIT"
] | 4
|
2016-12-23T10:36:05.000Z
|
2017-01-03T11:38:39.000Z
|
pydov/search/bodemdiepteinterval.py
|
DOV-Vlaanderen/dov-pydownloader
|
126b17f4ad870d9fae5cb2c4b868c564cf7cd1b3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Module containing the search classes to retrieve DOV
bodemdiepteinterval data."""
from ..types.bodemdiepteinterval import Bodemdiepteinterval
from .abstract import AbstractSearch
class BodemdiepteintervalSearch(AbstractSearch):
"""Search class to retrieve information about bodemdiepteintervallen."""
def __init__(self, objecttype=Bodemdiepteinterval):
"""Initialisation.
Parameters
----------
objecttype : subclass of pydov.types.abstract.AbstractDovType
Reference to a class representing the Bodemdiepteinterval type.
Optional: defaults to the Bodemdiepteinterval type containing the
fields described in the documentation.
"""
super(BodemdiepteintervalSearch, self).__init__(
'bodem:bodemdiepteintervallen', objecttype)
| 35.75
| 77
| 0.712121
|
4a06e331ec2e457386e696ab5958f864f4a212c5
| 4,027
|
py
|
Python
|
alipay/aop/api/request/KoubeiServindustryExerciseRecordDeleteRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/request/KoubeiServindustryExerciseRecordDeleteRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/request/KoubeiServindustryExerciseRecordDeleteRequest.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiServindustryExerciseRecordDeleteModel import KoubeiServindustryExerciseRecordDeleteModel
class KoubeiServindustryExerciseRecordDeleteRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiServindustryExerciseRecordDeleteModel):
self._biz_content = value
else:
self._biz_content = KoubeiServindustryExerciseRecordDeleteModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.servindustry.exercise.record.delete'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 27.772414
| 148
| 0.649615
|
4a06e404db3ecfde274f00856070f7394b9946fa
| 25,423
|
py
|
Python
|
ncov_ism/_pickism.py
|
EESI/ncov_ism
|
9ef8971eb59b5fd3086c13876818bda94660f0ad
|
[
"BSD-3-Clause"
] | 1
|
2021-12-24T19:36:38.000Z
|
2021-12-24T19:36:38.000Z
|
ncov_ism/_pickism.py
|
EESI/ncov_ism
|
9ef8971eb59b5fd3086c13876818bda94660f0ad
|
[
"BSD-3-Clause"
] | null | null | null |
ncov_ism/_pickism.py
|
EESI/ncov_ism
|
9ef8971eb59b5fd3086c13876818bda94660f0ad
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
import json
import datetime
import pandas as pd
import numpy as np
import itertools
from collections import Counter, OrderedDict
from math import log2, ceil
from Bio import SeqIO
logging.basicConfig(format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
def base_entropy_masked(seq_list, base_set, base_idx):
"""
Compute masked entropy for a position in the sequences
Parameters
----------
seq_list: list
a list of aligned sequences
base_set: set
a set of unique characters
base_idx: int
a position in the sequences
Returns
-------
H: float
entropy
masked_pct: float
percentage of '-' and 'N'
"""
# entropy analysis
base_list = [seq[base_idx] for seq in seq_list]
freq_dict = Counter(base_list)
mask_list = ['-', 'N']
n_seq = sum([freq_dict[base] for base in freq_dict if base not in mask_list])
H = 0
total_masked = 0
for base in freq_dict:
if base in mask_list:
total_masked += freq_dict[base]
continue
P = freq_dict[base]/n_seq
H -= log2(P) * P
masked_pct = total_masked/len(base_list)
return H, masked_pct
def entropy_analysis(data_df):
"""
Masked Shannon entropy analysis for sequences
Parameters
----------
data_df: pandas.DataFrame
merged Pandas dataframe
Returns
-------
H_list: list
entropy values for all positions
null_freq_list: list
masked percentage for all positions
"""
seq_list = data_df['sequence'].values.tolist()
base_set = set([])
for seq in seq_list:
base_set.update(set(seq))
H_list = []
null_freq_list = []
STEP = ceil(len(seq_list[0]) / 10)
for base_idx in range(len(seq_list[0])):
if base_idx % STEP == 0:
logging.info('Entropy analysis in progress: {}% completed.'.format(10 * base_idx // STEP))
H, null_freq = base_entropy_masked(seq_list, base_set, base_idx)
H_list.append(H,)
null_freq_list.append(null_freq)
logging.info('Entropy analysis in progress: DONE.')
return H_list, null_freq_list
def entropy_analysis_customized(data_df, positions):
"""
Masked Shannon entropy analysis for sequences
Parameters
----------
data_df: pandas.DataFrame
merged Pandas dataframe
Returns
-------
H_list: list
entropy values for all positions
null_freq_list: list
masked percentage for all positions
"""
seq_list = data_df['sequence'].values.tolist()
base_set = set([])
for seq in seq_list:
base_set.update(set(seq))
H_list = {}
null_freq_list = {}
STEP = ceil(len(positions) / 10)
for base_idx in range(len(positions)):
if base_idx % STEP == 0:
logging.info('Entropy analysis in progress: {}% completed.'.format(10 * base_idx // STEP))
H, null_freq = base_entropy_masked(seq_list, base_set, positions[base_idx])
H_list[positions[base_idx]] = H
null_freq_list[positions[base_idx]] = null_freq
logging.info('Entropy analysis in progress: DONE.')
return H_list, null_freq_list
def pick_ISM_spots(H_list, null_freq_list, en_thres=0.2, null_thres=0.25):
"""
Pick Informative Subtype Markers based on masked Shannon entropy
Parameters
----------
H_list: list
entropy values for all positions
null_freq_list: list
masked percentage for all positions
en_thres: float
Threshold for entropy values, entropy values greater than the threshold are valid
null_thres: float
Threshold for masked percentages, masked percentages lower than the threshold are valid
Returns
-------
position_list: list
selected position in a tuple: (postion, entropy values)
"""
H_list, null_freq_list = np.array(H_list), np.array(null_freq_list)
hot_spot_list = np.where((H_list > en_thres) & (null_freq_list < null_thres))[0]
position_list = [(base_idx, H_list[base_idx]) for base_idx in hot_spot_list]
logging.info('Pick Informative Subtype Markers: {} ISMs picked.'.format(len(position_list)))
return position_list
def translate(seq):
'''
Convert a given sequence of DNA into its Protein equivalent.
Adapted from https://www.geeksforgeeks.org/dna-protein-python-3/
Parameters
----------
seq: str
DNA sequence to translate
Returns
-------
protein: str
resultant prontein sequence
'''
CODON_TABLE = {
'ATA':'I', 'ATC':'I', 'ATT':'I', 'ATG':'M',
'ACA':'T', 'ACC':'T', 'ACG':'T', 'ACT':'T',
'AAC':'N', 'AAT':'N', 'AAA':'K', 'AAG':'K',
'AGC':'S', 'AGT':'S', 'AGA':'R', 'AGG':'R',
'CTA':'L', 'CTC':'L', 'CTG':'L', 'CTT':'L',
'CCA':'P', 'CCC':'P', 'CCG':'P', 'CCT':'P',
'CAC':'H', 'CAT':'H', 'CAA':'Q', 'CAG':'Q',
'CGA':'R', 'CGC':'R', 'CGG':'R', 'CGT':'R',
'GTA':'V', 'GTC':'V', 'GTG':'V', 'GTT':'V',
'GCA':'A', 'GCC':'A', 'GCG':'A', 'GCT':'A',
'GAC':'D', 'GAT':'D', 'GAA':'E', 'GAG':'E',
'GGA':'G', 'GGC':'G', 'GGG':'G', 'GGT':'G',
'TCA':'S', 'TCC':'S', 'TCG':'S', 'TCT':'S',
'TTC':'F', 'TTT':'F', 'TTA':'L', 'TTG':'L',
'TAC':'Y', 'TAT':'Y', 'TAA':'_', 'TAG':'_',
'TGC':'C', 'TGT':'C', 'TGA':'_', 'TGG':'W',
}
protein = ""
if len(seq)%3 == 0:
for i in range(0, len(seq), 3):
codon = seq[i:i + 3]
protein+= CODON_TABLE.get(codon, 'X')
return protein
def find_SNP(position, gene_dict, reference_raw):
'''
by Zhengqiao Zhao, v.0.2
this function will take the position of a SNP as input,
find the gene associated with this SNP if any
and output the corresponding codon in reference.
Parameters
----------
position: int
1-indexed position of SNP
gene_dict: dict
a dictionary of gene sequences, key: (start, end), value: (name, amino acid sequence)
reference_raw: str
the nucleotide reference sequence
Returns
-------
codon: str
codon corresponding to the position
codon_idx: int
corresponding position in codon
gene_name: str
corresponding protein if valid
'''
for key in gene_dict:
# interate over all genes and find the related gene
if position > key[0] and position <= key[1] and (key[1] - key[0]) % 3 == 0:
start = key[0]
end = key[1]
# extract the nucleotide gene sequence from the reference
cDNA = reference_raw[start:end]
# find the codon
# python 0 indexed position
delta = position -1 - key[0]
condon_idx = delta % 3
condon_pos = delta // 3 + 1
full_codon = delta - condon_idx
name, seq = gene_dict[(start, end)]
if int(full_codon/3) >= len(seq):
return None, None, name, None
codon = cDNA[full_codon:full_codon+3]
return codon, condon_idx, name, condon_pos
return None, None, None, None
def load_gene_dict(reference_genbank_name="data/covid-19-genbank.gb"):
"""
Load gene annotations from reference genbank file
Parameters
----------
reference_genbank_name: str
path to the reference genbank file
Returns
-------
gene_dict: dict
dictionary containing gene annotations
"""
recs = [rec for rec in SeqIO.parse(reference_genbank_name, "genbank")]
gene_dict = {}
for rec in recs:
feats = [feat for feat in rec.features if feat.type == "CDS"]
for feat in feats:
content = '{}: {}'.format(feat.qualifiers['protein_id'][0], feat.qualifiers['product'][0])
if feat.qualifiers['product'][0] == 'ORF1a polyprotein':
continue
if feat.location_operator == 'join':
for item in feat.location.parts:
key = (item.start.position, item.end.position)
if 'translation' in feat.qualifiers:
seq = feat.qualifiers['translation']
if len(seq) == 1:
amino_acid_seq = seq[0]
gene_dict[key] = (content, amino_acid_seq)
else:
key = (feat.location.start.position, feat.location.end.position)
if 'translation' in feat.qualifiers:
seq = feat.qualifiers['translation']
if len(seq) == 1:
amino_acid_seq = seq[0]
gene_dict[key] = (content, amino_acid_seq)
return gene_dict
def annotate_ISM(data_df, REFERENCE, position_list, reference_genbank_name="data/covid-19-genbank.gb"):
"""
Annotate Informative Subtype Markers
Parameters
----------
data_df: pandas.DataFrame
merged Pandas dataframe
REFERENCE: tuple
tuple containing reference accession number and aligned reference genome
position_list: list
selected position in a tuple: (postion, entropy values)
reference_genbank_name: str
path to the reference genbank file
Returns
-------
annotation_df: pandas.DataFrame
Pandas dataframe containing annotaiton information
"""
seq_list = data_df['sequence'].values.tolist()
seq_index = []
index = 0
for base in REFERENCE[1]:
if base == '-':
seq_index.append(index)
else:
index += 1
seq_index.append(index)
reference_local_index_map = np.array(seq_index)
mapped_reference_index = []
for index, entropy in position_list:
mapped_reference_index.append((index, reference_local_index_map[index], entropy))
REFERENCE_ISM = ''.join([REFERENCE[1][item[0]] for item in position_list])
logging.info('Reference ISM: {}.'.format(REFERENCE_ISM))
gene_dict = load_gene_dict(reference_genbank_name)
reference_raw = REFERENCE[1].replace('-', '')
res = OrderedDict()
res['Ref position'] = []
res['Entropy'] = []
res['Gene'] = []
res['Is silent'] = []
res['AA position'] = []
for align_index, ref_index, entropy in mapped_reference_index:
codon, codon_idx, name, codon_pos = find_SNP(ref_index, gene_dict, reference_raw)
base_freq = Counter([item[align_index] for item in seq_list]).most_common()
for alt_base, count in base_freq:
if alt_base != reference_raw[ref_index-1]:
break
if codon is None:
if_silence = True
else:
alt_codon = list(codon)
alt_codon[codon_idx] = alt_base
alt_codon = ''.join(alt_codon)
ref_aa = translate(codon)
ism_aa = translate(alt_codon)
if ref_aa == ism_aa:
if_silence = True
else:
if_silence = False
res['Ref position'].append(ref_index)
res['Entropy'].append(entropy)
if name is None:
name = 'Non-coding'
res['Gene'].append(name)
res['Is silent'].append(if_silence)
if codon_pos is None:
res['AA position'].append('NaN')
else:
res['AA position'].append('{}{}{}'.format(ref_aa, codon_pos, ism_aa))
annotation_df = pd.DataFrame.from_dict(res)
return annotation_df
# ambiguous bases correction
def is_same(error, target, mask, ISM_LEN):
"""
Check if a masked ambiguous ISM is the same as a non-ambiguous ISM
Parameters
----------
error: str
an ambiguous ISM
target: str
a non-ambiguous ISM
mask: list
positions masked
ISM_LEN: int
length of ISM
Returns
-------
res: boolean
if two ISMs are the same or not
"""
match = np.array(list(target)) == np.array(list(error))
res = np.logical_or(mask, match).sum() == ISM_LEN
return res
def error_correction(error, ambiguous_base, base_to_ambiguous, ISM_list, ISM_LEN, THRESHOLD = 0.9):
"""
Correct ISM by replacing ambiguous bases in an ISM by the similar non-ambiguous ISMs
Parameters
----------
error: str
an ambiguous ISM
ambiguous_base: set
set containing ambiguous bases
base_to_ambiguous: dictionary
map an ambiguous base to all possible bases
ISM_list: list
list containing all ISMs
ISM_LEN: int
length of ISM
THRESHOLD: float
percentage of non-ambiguous supporting instances
Returns
-------
FLAG: boolean
if fully corrected
corrected: str
corrected ISM
"""
mask = [True if base in ambiguous_base else False for base in error]
support_ISM = []
for target_ISM in ISM_list:
if is_same(error, target_ISM, mask, ISM_LEN):
support_ISM.append(target_ISM)
partial_correction = list(error)
FLAG = True
for position_idx in list(np.where(mask)[0]):
possible_bases = set([candid_ISM[position_idx] for candid_ISM in support_ISM])
possible_bases.discard('N')
possible_bases.discard(error[position_idx])
possible_bases.discard('-')
non_ambiguous_set = set([])
ambiguous_set = set([])
for base in possible_bases:
if base not in ambiguous_base:
non_ambiguous_set.add(base)
else:
ambiguous_set.add(base)
if len(ambiguous_set) == 0:
if len(non_ambiguous_set) == 0:
continue
bases = ''.join(sorted(non_ambiguous_set))
if len(bases) == 1:
num_support = len([candid_ISM[position_idx] for candid_ISM in support_ISM if candid_ISM[position_idx] == bases])
non_support = set([candid_ISM[position_idx] for candid_ISM in support_ISM if candid_ISM[position_idx] != bases])
if num_support/len(support_ISM) > THRESHOLD and bases in ambiguous_base[error[position_idx]]:
partial_correction[position_idx] = bases
else:
FLAG = False
logging.debug('Error Correction DEBUG: one-base-correction failed because no enough support: {}/{}: {}->{}'.format(num_support, len(support_ISM), non_support, bases))
elif bases in base_to_ambiguous:
FLAG = False
partial_correction[position_idx] = base_to_ambiguous[bases]
else:
FLAG = False
logging.debug("Error Correction DEBUG: can't find: {}".format(bases))
else:
bases_from_ambiguous_set = set([])
ambiguous_bases_intersection = ambiguous_base[error[position_idx]].copy()
for base in ambiguous_set:
bases_from_ambiguous_set = bases_from_ambiguous_set.union(ambiguous_base[base])
ambiguous_bases_intersection = ambiguous_bases_intersection.intersection(ambiguous_base[base])
if bases_from_ambiguous_set.issubset(ambiguous_base[error[position_idx]]) is False:
logging.debug('Error Correction DEBUG: new bases {} conflict with or are not as good as original bases {}'.format(bases_from_ambiguous_set, ambiguous_base[error[position_idx]]))
bases_from_ambiguous_set = ambiguous_base[error[position_idx]]
bases_from_ambiguous_set = ''.join(sorted(bases_from_ambiguous_set))
bases = ''.join(sorted(non_ambiguous_set))
if len(bases) == 0:
bases = bases_from_ambiguous_set
if len(bases) == 1 and bases in bases_from_ambiguous_set:
num_support = len([candid_ISM[position_idx] for candid_ISM in support_ISM if candid_ISM[position_idx] == bases])
non_support = set([candid_ISM[position_idx] for candid_ISM in support_ISM if candid_ISM[position_idx] != bases])
if num_support/len(support_ISM) > THRESHOLD and bases in ambiguous_bases_intersection:
partial_correction[position_idx] = bases
else:
if bases not in ambiguous_bases_intersection:
logging.debug('Error Correction DEBUG: conflicts dected between proposed correct and all supporting ISMs')
bases = ''.join(ambiguous_bases_intersection.add(base))
if bases in base_to_ambiguous and set(bases).issubset(ambiguous_base[error[position_idx]]):
FLAG = False
partial_correction[position_idx] = base_to_ambiguous[bases]
else:
FLAG = False
logging.debug('Error Correction DEBUG: one-base-correction failed because no enough support: {}/{}: {}->{}'.format(num_support, len(support_ISM), non_support, bases))
else:
bases = ''.join(sorted(set(bases_from_ambiguous_set + bases)))
if bases in base_to_ambiguous and set(bases).issubset(ambiguous_base[error[position_idx]]):
FLAG = False
partial_correction[position_idx] = base_to_ambiguous[bases]
else:
FLAG = False
logging.debug('Error Correction DEBUG: new bases {} conflict with or are not as good as original bases {}'.format(bases, ambiguous_base[error[position_idx]]))
return FLAG, ''.join(partial_correction)
def check_completeness(ISM):
"""
Check if an ISM is fully corrected (no ambiguous bases)
Parameters
----------
ISM: str
an ISM of interest
Returns
-------
FLAG: boolean
if fully corrected
"""
for item in ISM:
if item not in ['A', 'T', 'C', 'G', '-']:
return False
return True
def ISM_disambiguation(ISM_df, THRESHOLD=0):
"""
Correct all ISMs by replacing ambiguous bases in an ISM by the similar non-ambiguous ISMs
Ambiguous bases can be found in https://www.bioinformatics.nl/molbi/SCLResources/sequence_notation.htm
Parameters
----------
ISM_df: pandas.DataFrame
Pandas dataframe containing an ISM column to be cleaned
ambiguous_base: set
set containing ambiguous bases
base_to_ambiguous: dictionary
map an ambiguous base to all possible bases
ISM_list: list
list containing all ISMs
ISM_LEN: int
length of ISM
THRESHOLD: float
percentage of non-ambiguous supporting instances
Returns
-------
FLAG: boolean
if fully corrected
corrected: str
corrected ISM
"""
ambiguous_base = {'B': set(['C', 'G', 'T']),
'D': set(['A', 'G', 'T']),
'H': set(['A', 'C', 'T']),
'K': set(['G', 'T']),
'M': set(['A', 'C']),
'N': set(['A', 'C', 'G', 'T']),
'R': set(['A', 'G']),
'S': set(['C', 'G']),
'V': set(['A', 'C', 'G']),
'W': set(['A', 'T']),
'Y': set(['C', 'T'])}
base_to_ambiguous = {}
for base in ambiguous_base:
bases = ''.join(sorted(ambiguous_base[base]))
base_to_ambiguous[bases] = base
ISM_list = list(ISM_df['ISM'].values)
error_ISM_list = list(ISM_df[ISM_df.apply(lambda x,
ambiguous_base=ambiguous_base: True if len(set(x['ISM']).intersection(ambiguous_base)) > 0 else False,
axis = 1)]['ISM'].unique())
ERR_DICT = {}
for ISM in error_ISM_list:
ERR_DICT[ISM] = ISM_df[ISM_df['ISM'] == ISM].shape[0]
ISM_LEN = len(ISM_list[0])
partial_ISM = 0
partial_subj = 0
full_ISM = 0
full_subj = 0
total_ISM = len(error_ISM_list)
total_subj = sum([ERR_DICT[item] for item in ERR_DICT])
ISM_error_correction_partial = {}
ISM_error_correction_full = {}
STEP = ceil(len(error_ISM_list) / 10)
for ISM_idx, error in enumerate(error_ISM_list):
if ISM_idx % STEP == 0:
logging.info('ISM Disambiguation in progress: {}% completed.'.format(10 * ISM_idx // STEP))
FLAG, correction = error_correction(error, ambiguous_base, base_to_ambiguous, ISM_list, ISM_LEN, THRESHOLD)
FLAG = check_completeness(correction)
if error != correction:
ISM_error_correction_partial[error] = correction
partial_ISM += 1
partial_subj += ERR_DICT[error]
if FLAG and error != correction:
ISM_error_correction_full[error] = correction
full_ISM += 1
full_subj += ERR_DICT[error]
logging.info('ISM Disambiguation in progress: DONE.')
logging.info('ISM Disambiguation: percentage of unique ISMs partially corrected: {}'.format(partial_ISM/total_ISM))
logging.info('ISM Disambiguation: percentage of unique ISMs completely corrected: {}'.format(full_ISM/total_ISM))
logging.info('ISM Disambiguation: percentage of records (submissions) partially corrected: {}'.format(partial_subj/total_subj))
logging.info('ISM Disambiguation: percentage of records (submissions) completely corrected: {}'.format(full_subj/total_subj))
return ISM_error_correction_partial, ISM_error_correction_full
def ISM_disambiguate_fast(ism_list):
'''Returns a dictionary mapping each distinct ISM to the corresponding disambiguated ISM, or if
no disambiguation could be identified or it was already unambiguous, itself.
If ismlist is a list nd not a set, then it instead returns a list of corrected ISMs,
corresponding in order to original list of ISMs.
'''
if type(ism_list) is list:
return_list = True
else:
return_list = False
ism_set = set(ism_list)
ismlen = len(next(iter(ism_list))) # assume all ISMs are the same length
ism_clean = set(); ism_toclean = set()
# Create mask vectors for each ISM that is set to True if ACTG or gap vs. False if an ambiguous base,
# and separate ISMs in the ones that ar e clean and the ones that are ambiguous.
ism_mask_map = {}
for ism in ism_set:
ism_mask = [False]*ismlen
for m in range(ismlen):
if ism[m] in 'ACTG-':
ism_mask[m] = True
if any(ism_mask):
ism_toclean.add(ism)
else:
ism_clean.add(ism)
ism_mask_map[ism] = ism_mask
ism_disamb = {}
# assign all of the clean ISMs to themselves (i.e. they translate to themselves)
# it is a one-element array since other ISMs may translate to multiple clean ISMs
for ism in ism_clean:
ism_disamb[ism] = ism
def hamming_distance(string1, string2):
# Return the Hamming distance between string1 and string2 of same length.
distance = 0
for i in range(len(string1)):
if string1[i] != string2[i]:
distance += 1
# Return the final count of differences
return distance
# Find the Hamming distance of the ISM to each of the other ISMs
# This means that *any* ISM, including one with ambiguous bases itself, may be a support
HDist_Map = {} # Dictionary where HDist_Map[ISM1][ISM2] = distance(ISM1,ISM2)
for ism in ism_toclean:
HDist_Map[ism] = {}
# First, find all the binary distances between the ISMs that are to be cleaned
for ismpair in itertools.combinations(ism_toclean, 2):
HDist_Map[ismpair[0]][ismpair[1]] = hamming_distance(ismpair[0], ismpair[1])
HDist_Map[ismpair[1]][ismpair[0]] = HDist_Map[ismpair[0]][ismpair[1]]
# Second, find all the binary distances between each ISM to be cleaned with each clean ISM
for ism in ism_toclean:
for ismref in ism_clean:
HDist_Map[ism][ismref] = hamming_distance(ism, ismref)
for ism in ism_toclean:
min_h_dist = min(HDist_Map[ism].values())
min_hd = [key for key in HDist_Map[ism] if HDist_Map[ism][key]==min_h_dist]
support = set(min_hd)
# For each ambiguous base position in the ISM, see if there is a consensus among support set members
# who have an ambiguity at that location.
# Select the ambiguous base positions:
amb_ntpos = set(itertools.compress(range(len(ism_mask_map[ism])), [not(m) for m in ism_mask_map[ism]]))
# Identify the non-ambiguous bases in support ISMs at each of these positions. Once identified, overwrite
# the position in a new disambiguated ISM:
ism_new = ism
for ntpos in amb_ntpos:
supportbases = [supportism[ntpos] for supportism in support if ism_mask_map[supportism][ntpos]]
if len(supportbases) == 0:
# no valid bases at that position in any support ISMs
pass
elif supportbases.count(supportbases[0]) == len(supportbases):
# then all of the bases in support ISMs are identical; replace the ambiguous base
ism_new = ism_new[:ntpos] + supportbases[0] + ism_new[ntpos+1:]
ism_disamb[ism] = ism_new
if return_list:
return [ism_disamb[ism] for ism in ism_list]
else:
return ism_disamb
| 39.910518
| 193
| 0.6052
|
4a06e4c4ca2e92b339c224058c2e4c7bdd891119
| 14,504
|
py
|
Python
|
legacy/PPchan.py
|
andreagalle/pigcasso
|
e9b60fb595ba6bd2c402a5b4b16665d4d41fa748
|
[
"MIT"
] | null | null | null |
legacy/PPchan.py
|
andreagalle/pigcasso
|
e9b60fb595ba6bd2c402a5b4b16665d4d41fa748
|
[
"MIT"
] | 4
|
2020-06-13T09:17:15.000Z
|
2020-06-17T16:26:03.000Z
|
legacy/PPchan.py
|
andreagalle/pigcasso
|
e9b60fb595ba6bd2c402a5b4b16665d4d41fa748
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
PPchan.py
"""
import modUtils as util
import os, sys, shutil
import draft as fct
import vtk
import numpy as np
import scipy.integrate
import toolCase as toca
import time
from scipy.interpolate import griddata
import matplotlib
import matplotlib.pyplot as plt
"############################ 3D CHANNEL .vtu #####################################"
# calculation of the main quantities to be PP, this function retrieve mean profiles from the .vtu file
def getChannel(workdir, chan, model): # workdir='string/path', chan='string.3Dfile.vtu'
workdir = workdir + '/%s_cc/vtu/'%model
resdir = workdir + '/../../PP_results/profiles_plot/'
check_dir = util.fct_dir_exist(resdir)
if check_dir == False:
os.makedirs(resdir) # make sure the results directoru exists
file_d = workdir + chan
ori = [3,0,0]
norm = [0,0,1]
out_f = fct.getOutputVTKwithPointDataFromFile(file_d)
Slice_out = fct.getSlice(out_f, ori, norm)
ori1 = [3,0,0]
norm1 = [1,0,0]
"############################ GET THE FIELDS #####################################"
gamma = 1.4
R = 0.714286
# scalar fields and bulk data calculation
# [y_coo, P] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'P')
[y_coo, T] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'T')
[y_coo, Rho] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'RHO')
[y_coo, Mu] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'MU_LAM') # not avg because it's almost constant
[y_coo, Mu_sgs] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'MU_SGS_AVG')
Rhoinf = scipy.integrate.simps(Rho[:], y_coo)/2
Muinf = scipy.integrate.simps(Mu[:], y_coo)/2
Tinf = scipy.integrate.simps(T[:], y_coo)/2
a = np.sqrt(gamma*R*Tinf)
# velocity fields and bulk velocity calculation
[y_coo, Urms] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'U_RMS')
[y_coo, Urey] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'U_REY')
[y_coo, U] = util.getVarDataAtPositionNoWall(Slice_out, ori1, norm1, 'U_AVG')
"############################ NORMALIZATION #####################################"
Ut = np.sum(U*norm1, axis=1) ; Ut[-1]=0 ; Ut[0]=0 # tang. velocity (more general than the 'along x' one)
Uinf = scipy.integrate.simps(U[:,0], y_coo)/2 ; print 'Uinf classica : ', Uinf
Ma = Uinf/a ; print 'calculated Mach number (%s) : Ma = '%model, Ma
# wall density calculation to get tau_wall and normalize in wall unit
ori2 = [3., -0.99999, 0.]
norm2 = [0,1,0]
[x_coo, Rhow] = toca.getVarDataAtWall(Slice_out, ori2, norm2, 'RHO')
Rhowall = scipy.integrate.simps(Rhow[:], x_coo)/6
# print 'Rhowall = ', Rhowall, 'and the first Rho at y=0 : ', Rho[0] # to compare the density at the wall and the bulk one
# wall shear stress to calculate the Reynolds wall number
Tw = 0.5*(Ut[5]/y_coo[5]+Ut[-6]/(2.-y_coo[-6]))*Muinf # averaged to be more precise
Utau = np.sqrt(Tw/Rhowall)
# Cf = 2*Tw/(Rhoinf*Uinf**2) # useless in this case
# errorCf = (Cf-0.00667)/0.00667*100 # useless in this case
""" NOTE : Retau and 1/Lw (reciprocal of the friction length) are hereafter confused
because of the channel half height h = 1, but they are not in general the same!!! """
Retau = Utau*Rhowall/Muinf
yp = y_coo*Retau
Up = Ut/Utau # wall units velocity
Mu_sgsp = Mu_sgs.T/Muinf # normalized SGS viscosity
Urms = Urms.T/Utau
Urey = Urey.T/Utau**2 # normalized with the square of the friction velocity
#calculation of the grid size, which is not reliable. It is better to use the ruler on Paraview.
"""
"############################ GRID DIMENSIONS ################################"
# print 'Normalized grid dimensions : '
# ori3 = [3,-0.99,0]
# norm3 = [0,1,0]
# mean_planeZ = fct.getSlice(out_f, ori1, norm1)
# [z_coo, Rhowz] = toca.getVarDataAtWall_GridZ(mean_planeZ, ori3, norm3, 'RHO')
# mean_planeX = fct.getSlice(out_f, ori, norm)
# [x_coo, Rhowz] = toca.getVarDataAtWall(mean_planeX, ori3, norm2, 'RHO')
# # print len(x_coo), len(z_coo)
# # plt.figure()
# # plt.plot(x_coo,z_coo)
# # plt.show()
# print 'deltaYp', np.ediff1d(y_coo)*Retau, '\n deltaXp', np.ediff1d(x_coo)*Retau, '\n deltaZp', np.ediff1d(z_coo)*Retau
# a test to verify the validity of the linear rang near the wall
"############################ BEGIN OF THE SLOPE TEST #####################################"
fig = plt.figure(figsize=(10, 6))
ax = fig.add_subplot(111)
ax.plot(yp,dudy,'b-.',marker='v',label=r'$slopetest$')#,posd[:,0],Cflog*1E3,'k--',pos[:,0],Cfpower*1E3,'b--')
# ax.plot(yp_VRE[1:],(yp_VRE[1:])**3,ls='-.',color='0.5')
ax.set_xlabel(r'$y^+$')
ax.set_ylabel(r'$(du/dy)$')
ax.set_xlim([min(y_coo),7]) # max(y_coo)])
# ax.set_ylim([0,30])
# ax.set_xscale('log')
# ax.set_yscale('log')
plt.legend(loc='best')
plt.grid(which='both', axis='both',color='darkgrey')
fig.savefig(resdir+'/slope_%s.eps'%model)
fig = plt.figure(figsize=(10, 6))
ax = fig.add_subplot(111)
ax.plot(yp,Ut,'b-.',marker='v',label=r'$data$')
ax.plot(yp,Tw/Muinf*y_coo,'r-.',label=r'$regression$')
# ax.plot(yp_VRE[1:],(yp_VRE[1:])**3,ls='-.',color='0.5')
ax.set_xlabel(r'$y^+$')
ax.set_ylabel(r'$(U_t)$')
ax.set_xlim([min(yp),5]) # max(y_coo)])
ax.set_ylim([0,Ut[15]])
# ax.set_xscale('log')
# ax.set_yscale('log')
plt.legend(loc='best')
plt.grid(which='both', axis='both',color='darkgrey')
fig.savefig(resdir+'/linear_range_%s.eps'%model)
print 'check the first y = ', y_coo[:15]
print 'check the velocity graient du/dy = ', dudy[:15]
print 'check the first y+ = ', yp[:15]
"############################ END OF THE SLOPE TEST #####################################"
"""
# print of some of the results obtained
yp_slice = np.array([5, 10, 98, 392]) # where we want to calculate the spectra (depending on th DNS data avalaibility)
y_coo_slice = yp_slice / Retau - np.ones(4)
y_coo_slice_sym = -y_coo_slice
print '--> calculated Bulk density (%s): Rhoinf = '%model, Rhoinf
print '--> calculated Bulk viscosity (%s): Muinf = '%model, Muinf
print '--> calculated Bulk velocity (%s): Uinf = '%model, Uinf
print ''
print '--> calculated wall density (%s): Rhowall = '%model, Rhowall
print '--> calculated wall velocity (%s): u_tau = '%model, Utau,
# print '--> calculated viscosity: nu= ', Muinf/Rhoinf
print '--> calculated wall Reynolds number (%s): Re_tau = '%model, Retau
print '--> calculated friction length (%s): l_w = '%model, 1/Retau
print '--> calculated bulk Reynolds number (%s): Re_b = '%model, Retau*Uinf/Utau
print '--> calculated friction coefficient (%s): Cf = '%model, Cf, ' with an error respect to DNS of : ', errorCf, '%'
print ''
# print of the altitude where to slice, because the plane files are not 2D
# print '--> calculated positions where to slice (%s): y = '%model, np.array_str(y_coo_slice)
# print '--> the symmetric ones to average these (%s): y = '%model, np.array_str(y_coo_slice_sym)
"############################ SAVE DATA #####################################"
np.savetxt(resdir+'/Profiles_%s.dat'%model,np.vstack([y_coo,yp,Up,Urms[0],Urms[1],Urms[2],Urey[2],Mu_sgsp]).T,fmt="%-13e",
header=' y y+ u_mean+ u_rms+ v_rms+ w_rms+ -uv+ mu_sgs+', footer='', comments='# ')
# np.savetxt(resdir+'/wall_distance_%s.dat'%model,np.vstack([y_coo,yp]).T,fmt="%-13e",
# header=' y y+', footer='', comments='# ')
return [yp, Up, Mu_sgsp, y_coo, Ut, Urms, Urey, Uinf, Utau, Retau]
"############################ PROBES #####################################"
# reading of the probes data
def getProbes(workdir,model,Uinf):
probe ='/%s_cc/probes/'%model
loc_probes, nb_probes = toca.read_probes_pos(workdir, probe)
UX, time_UX = toca.read_probes_data(workdir,probe,'U_AVG-X',start=0)
flowth_UX = Uinf*time_UX/6 #(Xmax-Xmin) to be set in this form
MU, time_MU = toca.read_probes_data(workdir,probe,'MU_SGS_AVG',start=0)
flowth_MU = Uinf*time_MU/6 #(Xmax-Xmin) to be set in this form
return [flowth_UX, UX, flowth_MU, MU, nb_probes]
"############################ 2D PLANES .vtu #####################################"
# calculation of the spectra to be PP, this function uses instantaneous velocity fields from the .vtu file
def getSpectra(workdir, model):
workdir = workdir + '/%s_cc/vtu/planes/'%model
# this function calculates the spectra at the 4 different position, making averages of
# symmetric planes thaks to the homogeneous directions
ID = ['1', '2', '3', '4']
sliceA = [-0.98877096, -0.97754192, -0.77991079, -0.11964314]
sliceB = [0.98877096, 0.97754192, 0.77991079, 0.11964314]
coordX = [] ; coordZ = []
spectraX = [] ; spectraZ = []
for plane in range(4):
idp = ID[plane]
resdir = workdir + '/../../../PP_results/spectra_plot/plane_%s'%idp
check_dir = util.fct_dir_exist(resdir)
if check_dir == False:
# os.mkdir(resdir)
os.makedirs(resdir)
file_d = [] # ; cnt = 0 # what is that cnt ???...
for name in os.listdir(workdir): # list entries in workdir in arbitrary order.
if os.path.isfile(os.path.join(workdir, name)): # it should not be an "is True:" ?
if name.startswith("cc.%s_Plane%s"%(model,idp)) is True:
if name.endswith(".vtu") is True:
file_d.append(name) # file_d is a list of planar .vtu files
print '--> number of planes %sA and %sB founded: '%(idp,idp), len(file_d) # number of files to be Post Processed?
norm = [0,0,1]
ori = [0,0,1.5]
"###############################################################################"
modX = [] ; modZ = []
specX = [] ; specZ = []
# y_tick = np.arange(0, 1.25, 0.25)
# y_label = [r"$0$", r"$\frac{\pi}{4}$", r"$\frac{\pi}{2}$", r"$\frac{3\pi}{4}$", r"$\pi$"]
# x_tick = np.arange(0, 2.5, 0.5)
# x_label = [r"$0$", r"$\frac{\pi}{2}$", r"$\pi$", r"$\frac{3\pi}{2}$", r"$2\pi$"]
time1 = time.time() ; cnt = 0
for comp in range(3):
SpanSpectrum = [] ; SpanK = []
StreamSpectrum = [] ; StreamK = []
for t in range(len(file_d)): # loop over all the planes (at y=fix) to be PP and then averaged
out_f = fct.getOutputVTKwithPointDataFromFile(workdir+file_d[t])
if file_d[t].startswith("cc.%s_Plane%sA"%(model,idp)) is True: # "A", 13) is True:
out_f = fct.getSlice(out_f, [0, sliceA[plane], 0], [0, 1, 0])
[coord, U] = fct.getArrayFromPointData(out_f, 'U')
elif file_d[t].startswith("cc.%s_Plane%sB"%(model,idp)) is True: # "B", 13) is True:
out_f = fct.getSlice(out_f, [0, sliceB[plane], 0], [0, 1, 0])
[coord, U] = fct.getArrayFromPointData(out_f, 'U')
"# to be COMMENTED when the symmetric planes are available <--------------------- #"
# out_f = fct.getSlice(out_f, [0, sliceA[plane], 0], [0, 1, 0])
# [coord, U] = fct.getArrayFromPointData(out_f, 'U')
"# to be COMMENTED when the symmetric planes are available <--------------------- #"
x = set(coord.T[0])
nx = int(6./(1*np.ediff1d(np.sort(coord.T[0])).max()))+1
# print nx
z = set(coord.T[2])
nz = int(3./(1*np.ediff1d(np.sort(coord.T[2])).max()))+1
# print nz
line_x = np.linspace(0.,6.,nx)
line_z = np.linspace(-1.5,1.5,nz)
grid_x, grid_z = np.meshgrid(line_x,line_z) # coord of the 2D domain: array of x for each z and viceversa
grid_data = griddata((coord[:,0],coord[:,2]), U[:,comp], (grid_x, grid_z), method='cubic')
index = []
for i in range(nz):
if np.isnan(grid_data[i]).any():
index.append(i)
grid_data = np.delete(grid_data, index, axis=0)
grid_x = np.delete(grid_x, index, axis=0)
grid_z = np.delete(grid_z, index, axis=0)
nz = nz - len(index)
""" # better if commented if not the calculation is much more slow # """
# fig = plt.figure(figsize=(14,14))
# ax1 = fig.add_subplot(211)
# ax1.imshow(grid_data, extent=(0,6,-1.5,1.5), origin='lower')
# ax1.plot(grid_x, grid_z, 'rs', ms=2)
# ax1.plot(coord[:,0], coord[:,2], 'ko', ms=2)
# ax1.set_title(r'$Original\/data$')
# ax1.set_xlabel(r'$X$')
# ax1.set_ylabel(r'$Z$')
# # ax1.set_yticks(y_tick*np.pi)
# # ax1.set_yticklabels(y_label, fontsize=18)
# # ax1.set_xticks(x_tick*np.pi)
# # ax1.set_xticklabels(x_label, fontsize=18)
# ax1.set_ylim(min(z),max(z))
# ax1.set_xlim(min(x),max(x))
# ax2 = fig.add_subplot(212)
# ax2.imshow(grid_data, extent=(0,6,-1.5,1.5), origin='lower')
# ax2.set_title(r'$Interpolated\/-\/Cubic$')
# ax2.set_xlabel(r'$X$')
# ax2.set_ylabel(r'$Z$')
# # ax2.set_yticks(y_tick*np.pi)
# # ax2.set_yticklabels(y_label, fontsize=18)
# # ax2.set_xticks(x_tick*np.pi)
# # ax2.set_xticklabels(x_label, fontsize=18)
# # plt.gcf().set_size_inches(6, 6)
# # plt.show()
# fig.savefig(resdir+'/U%d_plane%s_%s.eps'%(comp,idp,model))
"########################## SPECTRA #################################"
[KX, FFTX, KZ, FFTZ] = toca.computeSpectrum(nx,nz,grid_x,grid_z,grid_data)
SpanK.append(np.mean(KZ,axis=0))
SpanSpectrum.append(np.mean(FFTZ,axis=0))
StreamK.append(np.mean(KX,axis=0))
StreamSpectrum.append(np.mean(FFTX,axis=0))
txt_min,seconds = util.time_stat(time1)
percent = (cnt+1)*100/((len(file_d)-1)*3) # (len(file_d)-1) if we don't average erase -1
cmd = ' --> Processing: ' + str(percent) + '%' + ' (elapsed time: %s %2.2f seconds)'%(txt_min,seconds)
util.fct_progress_bar(cmd,percent/2,50)
cnt += 1
modX.append(np.mean(StreamK,axis=0))
specX.append(np.mean(StreamSpectrum,axis=0))
modZ.append(np.mean(SpanK,axis=0))
specZ.append(np.mean(SpanSpectrum,axis=0))
modX = np.array(modX).real ; modZ = np.array(modZ).real
specX = np.array(specX).real ; specZ = np.array(specZ).real
"############################ SAVE DATA #####################################"
np.savetxt(resdir+'/SpanSpectrum_plane%s_%s.dat'%(idp,model),np.vstack([modZ[0],specZ[0],specZ[1],specZ[2]]).T,fmt="%-13e",
header=' k UU VV WW', footer='', comments='# ')
np.savetxt(resdir+'/StreamSpectrum_plane%s_%s.dat'%(idp,model),np.vstack([modX[0],specX[0],specX[1],specX[2]]).T,fmt="%-13e",
header=' k UU VV WW', footer='', comments='# ')
coordX.append(modX) ; coordZ.append(modZ)
spectraX.append(specX) ; spectraZ.append(specZ)
return [coordX, spectraX, coordZ, spectraZ]
| 39.306233
| 153
| 0.601489
|
4a06e515a6c37fc2503a95598a02935f95c24aa4
| 3,750
|
py
|
Python
|
lib/galaxy/datatypes/xml.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 2
|
2016-02-23T00:09:14.000Z
|
2019-02-11T07:48:44.000Z
|
lib/galaxy/datatypes/xml.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/datatypes/xml.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 6
|
2015-05-27T13:09:50.000Z
|
2019-02-11T07:48:46.000Z
|
"""
XML format classes
"""
import data
import logging
from galaxy.datatypes.sniff import *
log = logging.getLogger(__name__)
class GenericXml( data.Text ):
"""Base format class for any XML file."""
file_ext = "xml"
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = 'XML data'
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def sniff( self, filename ):
"""
Determines whether the file is XML or not
>>> fname = get_test_fname( 'megablast_xml_parser_test1.blastxml' )
>>> GenericXml().sniff( fname )
True
>>> fname = get_test_fname( 'interval.interval' )
>>> GenericXml().sniff( fname )
False
"""
#TODO - Use a context manager on Python 2.5+ to close handle
handle = open(filename)
line = handle.readline()
handle.close()
#TODO - Is there a more robust way to do this?
return line.startswith('<?xml ')
def merge(split_files, output_file):
"""Merging multiple XML files is non-trivial and must be done in subclasses."""
if len(split_files) > 1:
raise NotImplementedError("Merging multiple XML files is non-trivial and must be implemented for each XML type")
#For one file only, use base class method (move/copy)
data.Text.merge(split_files, output_file)
merge = staticmethod(merge)
class MEMEXml( GenericXml ):
"""MEME XML Output data"""
file_ext = "memexml"
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = 'MEME XML data'
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def sniff( self, filename ):
return False
class CisML( GenericXml ):
"""CisML XML data""" #see: http://www.ncbi.nlm.nih.gov/pubmed/15001475
file_ext = "cisml"
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = 'CisML data'
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def sniff( self, filename ):
return False
class Phyloxml( GenericXml ):
"""Format for defining phyloxml data http://www.phyloxml.org/"""
file_ext = "phyloxml"
def set_peek( self, dataset, is_multi_byte=False ):
"""Set the peek and blurb text"""
if not dataset.dataset.purged:
dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte )
dataset.blurb = 'Phyloxml data'
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
def sniff( self, filename ):
""""Checking for keyword - 'phyloxml' always in lowercase in the first few lines"""
f = open( filename, "r" )
firstlines = "".join( f.readlines(5) )
f.close()
if "phyloxml" in firstlines:
return True
return False
def get_visualizations( self, dataset ):
"""
Returns a list of visualizations for datatype.
"""
return [ 'phyloviz' ]
| 34.722222
| 124
| 0.610933
|
4a06e597c3608a1bf79d6a651f864ad6f8162aca
| 8,828
|
py
|
Python
|
homeassistant/components/mill/climate.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 3
|
2021-11-22T22:37:43.000Z
|
2022-03-17T00:55:28.000Z
|
homeassistant/components/mill/climate.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 25
|
2021-11-24T06:24:10.000Z
|
2022-03-31T06:23:06.000Z
|
homeassistant/components/mill/climate.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 3
|
2022-01-02T18:49:54.000Z
|
2022-01-25T02:03:54.000Z
|
"""Support for mill wifi-enabled home heaters."""
import mill
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_ON,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_IP_ADDRESS,
CONF_USERNAME,
PRECISION_WHOLE,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_AWAY_TEMP,
ATTR_COMFORT_TEMP,
ATTR_ROOM_NAME,
ATTR_SLEEP_TEMP,
CLOUD,
CONNECTION_TYPE,
DOMAIN,
LOCAL,
MANUFACTURER,
MAX_TEMP,
MIN_TEMP,
SERVICE_SET_ROOM_TEMP,
)
SET_ROOM_TEMP_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ROOM_NAME): cv.string,
vol.Optional(ATTR_AWAY_TEMP): cv.positive_int,
vol.Optional(ATTR_COMFORT_TEMP): cv.positive_int,
vol.Optional(ATTR_SLEEP_TEMP): cv.positive_int,
}
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Mill climate."""
if entry.data.get(CONNECTION_TYPE) == LOCAL:
mill_data_coordinator = hass.data[DOMAIN][LOCAL][entry.data[CONF_IP_ADDRESS]]
async_add_entities([LocalMillHeater(mill_data_coordinator)])
return
mill_data_coordinator = hass.data[DOMAIN][CLOUD][entry.data[CONF_USERNAME]]
entities = [
MillHeater(mill_data_coordinator, mill_device)
for mill_device in mill_data_coordinator.data.values()
if isinstance(mill_device, mill.Heater)
]
async_add_entities(entities)
async def set_room_temp(service: ServiceCall) -> None:
"""Set room temp."""
room_name = service.data.get(ATTR_ROOM_NAME)
sleep_temp = service.data.get(ATTR_SLEEP_TEMP)
comfort_temp = service.data.get(ATTR_COMFORT_TEMP)
away_temp = service.data.get(ATTR_AWAY_TEMP)
await mill_data_coordinator.mill_data_connection.set_room_temperatures_by_name(
room_name, sleep_temp, comfort_temp, away_temp
)
hass.services.async_register(
DOMAIN, SERVICE_SET_ROOM_TEMP, set_room_temp, schema=SET_ROOM_TEMP_SCHEMA
)
class MillHeater(CoordinatorEntity, ClimateEntity):
"""Representation of a Mill Thermostat device."""
_attr_fan_modes = [FAN_ON, HVAC_MODE_OFF]
_attr_max_temp = MAX_TEMP
_attr_min_temp = MIN_TEMP
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = TEMP_CELSIUS
def __init__(self, coordinator, heater):
"""Initialize the thermostat."""
super().__init__(coordinator)
self._available = False
self._id = heater.device_id
self._attr_unique_id = heater.device_id
self._attr_name = heater.name
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, heater.device_id)},
manufacturer=MANUFACTURER,
model=f"Generation {heater.generation}",
name=self.name,
)
if heater.is_gen1:
self._attr_hvac_modes = [HVAC_MODE_HEAT]
else:
self._attr_hvac_modes = [HVAC_MODE_HEAT, HVAC_MODE_OFF]
if heater.generation < 3:
self._attr_supported_features = (
SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
)
else:
self._attr_supported_features = SUPPORT_TARGET_TEMPERATURE
self._update_attr(heater)
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
return
await self.coordinator.mill_data_connection.set_heater_temp(
self._id, int(temperature)
)
await self.coordinator.async_request_refresh()
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
fan_status = 1 if fan_mode == FAN_ON else 0
await self.coordinator.mill_data_connection.heater_control(
self._id, fan_status=fan_status
)
await self.coordinator.async_request_refresh()
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
heater = self.coordinator.data[self._id]
if hvac_mode == HVAC_MODE_HEAT:
await self.coordinator.mill_data_connection.heater_control(
self._id, power_status=1
)
await self.coordinator.async_request_refresh()
elif hvac_mode == HVAC_MODE_OFF and not heater.is_gen1:
await self.coordinator.mill_data_connection.heater_control(
self._id, power_status=0
)
await self.coordinator.async_request_refresh()
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self._available
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._update_attr(self.coordinator.data[self._id])
self.async_write_ha_state()
@callback
def _update_attr(self, heater):
self._available = heater.available
self._attr_extra_state_attributes = {
"open_window": heater.open_window,
"heating": heater.is_heating,
"controlled_by_tibber": heater.tibber_control,
"heater_generation": heater.generation,
}
if heater.room:
self._attr_extra_state_attributes["room"] = heater.room.name
self._attr_extra_state_attributes["avg_room_temp"] = heater.room.avg_temp
else:
self._attr_extra_state_attributes["room"] = "Independent device"
self._attr_target_temperature = heater.set_temp
self._attr_current_temperature = heater.current_temp
self._attr_fan_mode = FAN_ON if heater.fan_status == 1 else HVAC_MODE_OFF
if heater.is_heating == 1:
self._attr_hvac_action = CURRENT_HVAC_HEAT
else:
self._attr_hvac_action = CURRENT_HVAC_IDLE
if heater.is_gen1 or heater.power_status == 1:
self._attr_hvac_mode = HVAC_MODE_HEAT
else:
self._attr_hvac_mode = HVAC_MODE_OFF
class LocalMillHeater(CoordinatorEntity, ClimateEntity):
"""Representation of a Mill Thermostat device."""
_attr_hvac_mode = HVAC_MODE_HEAT
_attr_hvac_modes = [HVAC_MODE_HEAT]
_attr_max_temp = MAX_TEMP
_attr_min_temp = MIN_TEMP
_attr_supported_features = SUPPORT_TARGET_TEMPERATURE
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = TEMP_CELSIUS
def __init__(self, coordinator):
"""Initialize the thermostat."""
super().__init__(coordinator)
self._attr_name = coordinator.mill_data_connection.name
if mac := coordinator.mill_data_connection.mac_address:
self._attr_unique_id = mac
self._attr_device_info = DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, mac)},
configuration_url=self.coordinator.mill_data_connection.url,
manufacturer=MANUFACTURER,
model="Generation 3",
name=coordinator.mill_data_connection.name,
sw_version=coordinator.mill_data_connection.version,
)
self._update_attr()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
return
await self.coordinator.mill_data_connection.set_target_temperature(
int(temperature)
)
await self.coordinator.async_request_refresh()
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._update_attr()
self.async_write_ha_state()
@callback
def _update_attr(self) -> None:
data = self.coordinator.data
self._attr_target_temperature = data["set_temperature"]
self._attr_current_temperature = data["ambient_temperature"]
if data["current_power"] > 0:
self._attr_hvac_action = CURRENT_HVAC_HEAT
else:
self._attr_hvac_action = CURRENT_HVAC_IDLE
| 35.312
| 87
| 0.684073
|
4a06e5de2da165a36c6b311bf97fa8f2e1be6e98
| 95,575
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/appservice/_help.py
|
sudokylumaster/azure-cli
|
5bbb75e4860dcd086ca8b4c413a64acb9a6dbb2f
|
[
"MIT"
] | 1
|
2020-08-25T00:21:50.000Z
|
2020-08-25T00:21:50.000Z
|
src/azure-cli/azure/cli/command_modules/appservice/_help.py
|
sudokylumaster/azure-cli
|
5bbb75e4860dcd086ca8b4c413a64acb9a6dbb2f
|
[
"MIT"
] | null | null | null |
src/azure-cli/azure/cli/command_modules/appservice/_help.py
|
sudokylumaster/azure-cli
|
5bbb75e4860dcd086ca8b4c413a64acb9a6dbb2f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.help_files import helps # pylint: disable=unused-import
# pylint: disable=line-too-long, too-many-lines
helps['appservice'] = """
type: group
short-summary: Manage App Service plans.
"""
helps['appservice hybrid-connection'] = """
type: group
short-summary: a method that sets the key a hybrid-connection uses
"""
helps['appservice hybrid-connection set-key'] = """
type: command
short-summary: set the key that all apps in an appservice plan use to connect to the hybrid-connections in that appservice plan
examples:
- name: set the key that all apps in an appservice plan use to connect to the hybrid-connections in that appservice plan
text: az appservice hybrid-connection set-key -g MyResourceGroup --plan MyAppServicePlan --namespace [HybridConectionNamespace] --hybrid-connection [HybridConnectionName] --key-type ["primary"/"secondary"]
"""
helps['appservice list-locations'] = """
type: command
short-summary: List regions where a plan sku is available.
examples:
- name: List regions where a plan sku is available. (autogenerated)
text: az appservice list-locations --sku F1
crafted: true
"""
helps['appservice plan'] = """
type: group
short-summary: Manage app service plans.
"""
helps['appservice plan create'] = """
type: command
short-summary: Create an app service plan.
examples:
- name: Create a basic app service plan.
text: >
az appservice plan create -g MyResourceGroup -n MyPlan
- name: Create a standard app service plan with four Linux workers.
text: >
az appservice plan create -g MyResourceGroup -n MyPlan \\
--is-linux --number-of-workers 4 --sku S1
- name: Create a Windows container app service plan.
text: >
az appservice plan create -g MyResourceGroup -n MyPlan \\
--hyper-v --sku P1V3
- name: Create an app service plan for app service environment.
text: >
az appservice plan create -g MyResourceGroup -n MyPlan \\
--app-service-environment MyAppServiceEnvironment --sku I1
"""
helps['appservice plan delete'] = """
type: command
short-summary: Delete an app service plan.
examples:
- name: Delete an app service plan. (autogenerated)
text: az appservice plan delete --name MyAppServicePlan --resource-group MyResourceGroup
crafted: true
"""
helps['appservice plan list'] = """
type: command
short-summary: List app service plans.
examples:
- name: List all free tier App Service plans.
text: >
az appservice plan list --query "[?sku.tier=='Free']"
- name: List all App Service plans for an App Service environment.
text: >
az appservice plan list --query "[?hostingEnvironmentProfile.name=='<ase-name>']"
"""
helps['appservice plan show'] = """
type: command
short-summary: Get the app service plans for a resource group or a set of resource groups.
examples:
- name: Get the app service plans for a resource group or a set of resource groups. (autogenerated)
text: az appservice plan show --name MyAppServicePlan --resource-group MyResourceGroup
crafted: true
"""
helps['appservice plan update'] = """
type: command
short-summary: Update an app service plan.
long-summary: See https:///go.microsoft.com/fwlink/?linkid=2133856 to learn more.
examples:
- name: Update an app service plan. (autogenerated)
text: az appservice plan update --name MyAppServicePlan --resource-group MyResourceGroup --sku F1
crafted: true
"""
helps['appservice vnet-integration'] = """
type: group
short-summary: a method that lists the virtual network integrations used in an appservice plan
"""
helps['appservice vnet-integration list'] = """
type: command
short-summary: list the virtual network integrations used in an appservice plan
examples:
- name: list the virtual network integrations used in an appservice plan
text: az appservice vnet-integration list -g MyResourceGroup --plan MyAppServicePlan
"""
helps['functionapp'] = """
type: group
short-summary: Manage function apps. To install the Azure Functions Core tools see https://github.com/Azure/azure-functions-core-tools
"""
helps['functionapp config'] = """
type: group
short-summary: Configure a function app.
"""
helps['functionapp config access-restriction'] = """
type: group
short-summary: Methods that show, set, add, and remove access restrictions on a functionapp
"""
helps['functionapp config access-restriction add'] = """
type: command
short-summary: Adds an Access Restriction to the functionapp
examples:
- name: Add Access Restriction opening (Allow) named developers for IPv4 address 130.220.0.0/27 with priority 200 to main site.
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name developers --action Allow --ip-address 130.220.0.0/27 --priority 200
- name: Add Access Restriction opening (Allow) named build_server for IPv4 address 192.168.0.0/27 with priority 250 to scm site.
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name build_server --action Allow --ip-address 192.168.0.0/27 --priority 250 --scm-site true
- name: Add Access Restriction opening (Allow) named app_gateway for Subnet app_gw in vNet core_weu with priority 300 to main site.
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name app_gateway --action Allow --vnet-name core_weu --subnet app_gateway --priority 300
- name: Add Access Restriction opening (Allow) named internal_agents for Subnet build_agents in vNet corp01 with priority 500 to scm site; and ignore service endpoint registration on the Subnet.
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name internal_agents --action Allow --vnet-name corp01 --subnet build_agents --priority 500 --scm-site true --ignore-missing-endpoint true
- name: Add Access Restriction opening (Allow) named remote_agents in vNet 'corp01' in rg 'vnets' with subnet 'agents'
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name remote_agents --action Allow --vnet-name corp01 --subnet agents --priority 500 --vnet-resource-group vnets
- name: Add Access Restriction opening (Allow) named agents in vNet 'corp01' in rg 'vnets' with subnet 'agents' (using subnet resource id)
text: az functionapp config access-restriction add -g ResourceGroup -n AppName --rule-name remote_agents --action Allow --priority 800 --subnet '/subscriptions/<subscription-id>/resourceGroups/vnets/providers/Microsoft.Network/virtualNetworks/corp01/subnets/agents'
"""
helps['functionapp config access-restriction remove'] = """
type: command
short-summary: Removes an Access Restriction from the functionapp.
examples:
- name: Remove Access Restriction named developers from the main site.
text: az functionapp config access-restriction remove -g ResourceGroup -n AppName --rule-name developers
- name: Remove Access Restriction named internal_agents from the scm site.
text: az functionapp config access-restriction remove -g ResourceGroup -n AppName --rule-name internal_agents --scm-site true
"""
helps['functionapp config access-restriction set'] = """
type: command
short-summary: Sets if SCM site is using the same restrictions as the main site.
examples:
- name: Enable SCM site to use same access restrictions as main site.
text: az functionapp config access-restriction set -g ResourceGroup -n AppName --use-same-restrictions-for-scm-site true
"""
helps['functionapp config access-restriction show'] = """
type: command
short-summary: Show Access Restriction settings for functionapp.
examples:
- name: Get Access Restriction settings for a functionapp.
text: az functionapp config access-restriction show -g ResourceGroup -n AppName
"""
helps['functionapp config appsettings'] = """
type: group
short-summary: Configure function app settings.
"""
helps['functionapp config appsettings delete'] = """
type: command
short-summary: Delete a function app's settings.
examples:
- name: Delete a function app's settings. (autogenerated)
text: az functionapp config appsettings delete --name MyFunctionApp --resource-group MyResourceGroup --setting-names {setting-names}
crafted: true
"""
helps['functionapp config appsettings list'] = """
type: command
short-summary: Show settings for a function app.
examples:
- name: Show settings for a function app. (autogenerated)
text: az functionapp config appsettings list --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config appsettings set'] = """
type: command
short-summary: Update a function app's settings.
examples:
- name: Update a function app's settings.
text: |
az functionapp config appsettings set --name MyFunctionApp --resource-group MyResourceGroup --settings "AzureWebJobsStorage=$storageConnectionString"
"""
helps['functionapp config container'] = """
type: group
short-summary: Manage function app container settings.
"""
helps['functionapp config container delete'] = """
type: command
short-summary: Delete a function app container's settings.
"""
helps['functionapp config container set'] = """
type: command
short-summary: Set a function app container's settings.
examples:
- name: Set a function app container's settings. (autogenerated)
text: az functionapp config container set --docker-custom-image-name MyDockerCustomImage --docker-registry-server-password StrongPassword --docker-registry-server-url https://{azure-container-registry-name}.azurecr.io --docker-registry-server-user DockerUserId --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config container show'] = """
type: command
short-summary: Get details of a function app container's settings.
examples:
- name: Get details of a function app container's settings. (autogenerated)
text: az functionapp config container show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config hostname'] = """
type: group
short-summary: Configure hostnames for a function app.
"""
helps['functionapp config hostname add'] = """
type: command
short-summary: Bind a hostname to a function app.
examples:
- name: Bind a hostname to a function app. (autogenerated)
text: az functionapp config hostname add --hostname www.yourdomain.com --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config hostname delete'] = """
type: command
short-summary: Unbind a hostname from a function app.
"""
helps['functionapp config hostname get-external-ip'] = """
type: command
short-summary: Get the external-facing IP address for a function app.
examples:
- name: Get the external-facing IP address for a function app. (autogenerated)
text: az functionapp config hostname get-external-ip --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config hostname list'] = """
type: command
short-summary: List all hostname bindings for a function app.
examples:
- name: List all hostname bindings for a function app. (autogenerated)
text: az functionapp config hostname list --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['functionapp config set'] = """
type: command
short-summary: Set the function app's configuration.
examples:
- name: Set the function app's configuration. (autogenerated)
text: az functionapp config set --always-on true --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config show'] = """
type: command
short-summary: Get the details of a function app's configuration.
examples:
- name: Get the details of a web app's configuration. (autogenerated)
text: az functionapp config show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config ssl'] = """
type: group
short-summary: Configure SSL certificates.
"""
helps['functionapp config ssl bind'] = """
type: command
short-summary: Bind an SSL certificate to a function app.
examples:
- name: Bind an SSL certificate to a function app. (autogenerated)
text: az functionapp config ssl bind --certificate-thumbprint {certificate-thumbprint} --name MyFunctionApp --resource-group MyResourceGroup --ssl-type SNI
crafted: true
"""
helps['functionapp config ssl delete'] = """
type: command
short-summary: Delete an SSL certificate from a function app.
"""
helps['functionapp config ssl list'] = """
type: command
short-summary: List SSL certificates for a function app.
examples:
- name: List SSL certificates for a function app. (autogenerated)
text: az functionapp config ssl list --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config ssl show'] = """
type: command
short-summary: Show the details of an SSL certificate for a function app.
examples:
- name: Show the details of an SSL certificate for a function app. (autogenerated)
text: az functionapp config ssl show --resource-group MyResourceGroup --certificate-name cname.mycustomdomain.com
crafted: true
"""
helps['functionapp config ssl unbind'] = """
type: command
short-summary: Unbind an SSL certificate from a function app.
"""
helps['functionapp config ssl upload'] = """
type: command
short-summary: Upload an SSL certificate to a function app.
examples:
- name: Upload an SSL certificate to a function app. (autogenerated)
text: az functionapp config ssl upload --certificate-file {certificate-file} --certificate-password {certificate-password} --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp config ssl import'] = """
type: command
short-summary: Import an SSL certificate to a function app from Key Vault.
examples:
- name: Import an SSL certificate to a function app from Key Vault.
text: az functionapp config ssl import --resource-group MyResourceGroup --name MyFunctionApp --key-vault MyKeyVault --key-vault-certificate-name MyCertificateName
- name: Import an SSL certificate to a function app from Key Vault using resource id (typically if Key Vault is in another subscription).
text: az functionapp config ssl import --resource-group MyResourceGroup --name MyFunctionApp --key-vault '/subscriptions/[sub id]/resourceGroups/[rg]/providers/Microsoft.KeyVault/vaults/[vault name]' --key-vault-certificate-name MyCertificateName
"""
helps['functionapp config ssl create'] = """
type: command
short-summary: Create a Managed Certificate for a hostname in a function app.
examples:
- name: Create a Managed Certificate for cname.mycustomdomain.com.
text: az functionapp config ssl create --resource-group MyResourceGroup --name MyWebapp --hostname cname.mycustomdomain.com
"""
helps['functionapp cors'] = """
type: group
short-summary: Manage Cross-Origin Resource Sharing (CORS)
"""
helps['functionapp cors add'] = """
type: command
short-summary: Add allowed origins
examples:
- name: add a new allowed origin
text: >
az functionapp cors add -g {myRG} -n {myAppName} --allowed-origins https://myapps.com
"""
helps['functionapp cors remove'] = """
type: command
short-summary: Remove allowed origins
examples:
- name: remove an allowed origin
text: >
az functionapp cors remove -g {myRG} -n {myAppName} --allowed-origins https://myapps.com
- name: remove all allowed origins
text: >
az functionapp cors remove -g {myRG} -n {myAppName} --allowed-origins
"""
helps['functionapp cors show'] = """
type: command
short-summary: show allowed origins
examples:
- name: show allowed origins (autogenerated)
text: az functionapp cors show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp create'] = """
type: command
short-summary: Create a function app.
long-summary: The function app's name must be able to produce a unique FQDN as AppName.azurewebsites.net.
examples:
- name: Create a basic function app.
text: >
az functionapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName -s MyStorageAccount
- name: Create a function app. (autogenerated)
text: az functionapp create --consumption-plan-location westus --name MyUniqueAppName --os-type Windows --resource-group MyResourceGroup --runtime dotnet --storage-account MyStorageAccount
crafted: true
- name: Create a function app using a private ACR image.
text: >
az functionapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName --runtime node --storage-account MyStorageAccount --deployment-container-image-name myacr.azurecr.io/myimage:tag --docker-registry-server-password passw0rd --docker-registry-server-user MyUser
"""
helps['functionapp delete'] = """
type: command
short-summary: Delete a function app.
examples:
- name: Delete a function app. (autogenerated)
text: az functionapp delete --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment'] = """
type: group
short-summary: Manage function app deployments.
"""
helps['functionapp deployment container'] = """
type: group
short-summary: Manage container-based continuous deployment.
"""
helps['functionapp deployment container config'] = """
type: command
short-summary: Configure continuous deployment via containers.
examples:
- name: Configure continuous deployment via containers (autogenerated)
text: az functionapp deployment container config --enable-cd true --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment container show-cd-url'] = """
type: command
short-summary: Get the URL which can be used to configure webhooks for continuous deployment.
examples:
- name: Get the URL which can be used to configure webhooks for continuous deployment. (autogenerated)
text: az functionapp deployment container show-cd-url --ids {ids}
crafted: true
- name: Get the URL which can be used to configure webhooks for continuous deployment. (autogenerated)
text: az functionapp deployment container show-cd-url --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment list-publishing-credentials'] = """
type: command
short-summary: Get the details for available function app publishing credentials.
examples:
- name: Get the details for available function app deployment publishing credentials.
text: az functionapp deployment list-publishing-credentials --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment list-publishing-profiles'] = """
type: command
short-summary: Get the details for available function app deployment profiles.
examples:
- name: Get the details for available function app deployment profiles. (autogenerated)
text: az functionapp deployment list-publishing-profiles --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment slot'] = """
type: group
short-summary: Manage function app deployment slots.
"""
helps['functionapp deployment slot auto-swap'] = """
type: command
short-summary: Configure deployment slot auto swap.
"""
helps['functionapp deployment slot create'] = """
type: command
short-summary: Create a deployment slot.
examples:
- name: Create a deployment slot. (autogenerated)
text: az functionapp deployment slot create --name MyFunctionapp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['functionapp deployment slot delete'] = """
type: command
short-summary: Delete a deployment slot.
examples:
- name: Delete a deployment slot. (autogenerated)
text: az functionapp deployment slot delete --name MyFunctionapp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['functionapp deployment slot list'] = """
type: command
short-summary: List all deployment slots.
examples:
- name: List all deployment slots. (autogenerated)
text: az functionapp deployment slot list --name MyFunctionapp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment slot swap'] = """
type: command
short-summary: Change deployment slots for a function app.
examples:
- name: Swap a staging slot into production for the MyUniqueApp function app.
text: >
az functionapp deployment slot swap -g MyResourceGroup -n MyUniqueApp --slot staging \\
--target-slot production
"""
helps['functionapp deployment source'] = """
type: group
short-summary: Manage function app deployment via source control.
"""
helps['functionapp deployment source config'] = """
type: command
short-summary: Manage deployment from git or Mercurial repositories.
examples:
- name: Manage deployment from git or Mercurial repositories. (autogenerated)
text: az functionapp deployment source config --branch master --manual-integration --name MyFunctionApp --repo-url https://github.com/Azure-Samples/function-image-upload-resize --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment source config-local-git'] = """
type: command
short-summary: Get a URL for a git repository endpoint to clone and push to for function app deployment.
examples:
- name: Get an endpoint and add it as a git remote.
text: >
az functionapp deployment source config-local-git \\
-g MyResourceGroup -n MyUniqueApp
git remote add azure \\
https://{deploy_user_name}@MyUniqueApp.scm.azurewebsites.net/MyUniqueApp.git
"""
helps['functionapp deployment source config-zip'] = """
type: command
short-summary: Perform deployment using the kudu zip push deployment for a function app.
long-summary: >
By default Kudu assumes that zip deployments do not require any build-related actions like
npm install or dotnet publish. This can be overridden by including an .deployment file in your
zip file with the following content '[config] SCM_DO_BUILD_DURING_DEPLOYMENT = true',
to enable Kudu detection logic and build script generation process.
See https://github.com/projectkudu/kudu/wiki/Configurable-settings#enabledisable-build-actions-preview.
Alternately the setting can be enabled using the az functionapp config appsettings set command.
examples:
- name: Perform deployment by using zip file content.
text: >
az functionapp deployment source config-zip \\
-g {myRG} -n {myAppName} \\
--src {zipFilePathLocation}
"""
helps['functionapp deployment source delete'] = """
type: command
short-summary: Delete a source control deployment configuration.
examples:
- name: Delete a source control deployment configuration. (autogenerated)
text: az functionapp deployment source delete --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment source show'] = """
type: command
short-summary: Get the details of a source control deployment configuration.
examples:
- name: Get the details of a source control deployment configuration. (autogenerated)
text: az functionapp deployment source show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment source sync'] = """
type: command
short-summary: Synchronize from the repository. Only needed under manual integration mode.
examples:
- name: Synchronize from the repository. Only needed under manual integration mode. (autogenerated)
text: az functionapp deployment source sync --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp deployment user'] = """
type: group
short-summary: Manage user credentials for deployment.
"""
helps['functionapp deployment user set'] = """
type: command
short-summary: Update deployment credentials.
long-summary: All function and web apps in the subscription will be impacted since they share the same deployment credentials.
examples:
- name: Set FTP and git deployment credentials for all apps.
text: >
az functionapp deployment user set --user-name MyUserName
"""
helps['functionapp devops-pipeline'] = """
type: group
short-summary: Azure Function specific integration with Azure DevOps. Please visit https://aka.ms/functions-azure-devops for more information.
"""
helps['functionapp devops-pipeline create'] = """
type: command
short-summary: Create an Azure DevOps pipeline for a function app.
examples:
- name: create an Azure Pipeline to a function app.
text: >
az functionapp devops-pipeline create --functionapp-name FunctionApp
- name: create an Azure Pipeline from a Github function app repository.
text: >
az functionapp devops-pipeline create --github-repository GithubOrganization/GithubRepository --github-pat GithubPersonalAccessToken
- name: create an Azure Pipeline with specific Azure DevOps organization and project
text: >
az functionapp devops-pipeline create --organization-name AzureDevOpsOrganization --project-name AzureDevOpsProject
"""
helps['functionapp function'] = """
type: group
short-summary: Manage function app functions.
"""
helps['functionapp function show'] = """
type: command
short-summary: Get the details of a function.
examples:
- name: Show function details.
text: >
az functionapp function show -g MyResourceGroup -n MyFunctionAppName --function-name MyFunctionName
crafted: true
"""
helps['functionapp function delete'] = """
type: command
short-summary: Delete a function.
examples:
- name: Delete a function.
text: >
az functionapp function delete -g MyResourceGroup -n MyFunctionAppName --function-name MyFunctionName
crafted: true
"""
helps['functionapp function keys'] = """
type: group
short-summary: Manage function keys.
"""
helps['functionapp function keys set'] = """
type: command
short-summary: Create or update a function key.
examples:
- name: Create a function key.
text: >
az functionapp function keys set -g MyResourceGroup -n MyFunctionAppName --function-name MyFunctionName --key-name MyKeyName --key-value MyKeyValue
crafted: true
"""
helps['functionapp function keys list'] = """
type: command
short-summary: List all function keys.
examples:
- name: List all function keys.
text: >
az functionapp function keys list -g MyResourceGroup -n MyFunctionAppName --function-name MyFunctionName
crafted: true
"""
helps['functionapp function keys delete'] = """
type: command
short-summary: Delete a function key.
examples:
- name: Delete a function key.
text: >
az functionapp function keys delete -g MyResourceGroup -n MyFunctionAppName --function-name MyFunctionName --key-name MyKeyName
crafted: true
"""
helps['functionapp hybrid-connection'] = """
type: group
short-summary: methods that list, add and remove hybrid-connections from functionapp
"""
helps['functionapp hybrid-connection add'] = """
type: command
short-summary: add a hybrid-connection to a functionapp
examples:
- name: add a hybrid-connection to a functionapp
text: az functionapp hybrid-connection add -g MyResourceGroup -n MyWebapp --namespace [HybridConnectionNamespace] --hybrid-connection [HybridConnectionName] -s [slot]
"""
helps['functionapp hybrid-connection list'] = """
type: command
short-summary: list the hybrid-connections on a functionapp
examples:
- name: list the hybrid-connections on a functionapp
text: az functionapp hybrid-connection list -g MyResourceGroup -n MyWebapp -s [slot]
"""
helps['functionapp hybrid-connection remove'] = """
type: command
short-summary: remove a hybrid-connection from a functionapp
examples:
- name: remove a hybrid-connection from a functionapp
text: az functionapp hybrid-connection remove -g MyResourceGroup -n MyWebapp --namespace [HybridConnectionNamespace] --hybrid-connection [HybridConnectionName] -s [slot]
"""
helps['functionapp identity'] = """
type: group
short-summary: manage web app's managed service identity
"""
helps['functionapp identity assign'] = """
type: command
short-summary: assign managed service identity to the web app
examples:
- name: assign local identity and assign a reader role to the current resource group.
text: >
az functionapp identity assign -g MyResourceGroup -n MyUniqueApp --role reader --scope /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/MyResourceGroup
- name: enable identity for the web app.
text: >
az functionapp identity assign -g MyResourceGroup -n MyUniqueApp
- name: assign local identity and a user assigned identity to a function app.
text: >
az functionapp identity assign -g MyResourceGroup -n MyUniqueApp --identities [system] myAssignedId
"""
helps['functionapp identity remove'] = """
type: command
short-summary: Disable web app's managed service identity
examples:
- name: Disable web app's system managed identity
text: az functionapp identity remove --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
- name: Disable web app's system managed identity and a user managed identity
text: az functionapp identity remove --name MyFunctionApp --resource-group MyResourceGroup --identities [system] myAssignedId
"""
helps['functionapp identity show'] = """
type: command
short-summary: display web app's managed service identity
examples:
- name: display functionapp's managed service identity (autogenerated)
text: az functionapp identity show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp keys'] = """
type: group
short-summary: Manage function app keys.
"""
helps['functionapp keys set'] = """
type: command
short-summary: Create or update a function app key.
examples:
- name: Create a function key for an Azure Function app.
text: >
az functionapp keys set -g MyResourceGroup -n MyFunctionAppName --key-type functionKeys --key-name MyKeyName --key-value MyKeyValue
crafted: true
"""
helps['functionapp keys list'] = """
type: command
short-summary: List all function app keys.
examples:
- name: List all keys for an Azure Function app.
text: >
az functionapp keys list -g MyResourceGroup -n MyFunctionAppName
crafted: true
"""
helps['functionapp keys delete'] = """
type: command
short-summary: Delete a function app key.
examples:
- name: Delete a master key for an Azure Function app.
text: >
az functionapp keys delete -g MyResourceGroup -n MyFunctionAppName --key-type masterKey --key-name MyKeyName
crafted: true
"""
helps['functionapp list'] = """
type: command
short-summary: List function apps.
examples:
- name: List default host name and state for all function apps.
text: >
az functionapp list --query "[].{hostName: defaultHostName, state: state}"
- name: List all running function apps.
text: >
az functionapp list --query "[?state=='Running']"
"""
helps['functionapp list-consumption-locations'] = """
type: command
short-summary: List available locations for running function apps.
"""
helps['functionapp plan'] = """
type: group
short-summary: Manage App Service Plans for an Azure Function
"""
helps['functionapp plan create'] = """
type: command
short-summary: Create an App Service Plan for an Azure Function.
examples:
- name: Create an elastic premium app service plan with burst out capability up to 10 instances.
text: >
az functionapp plan create -g MyResourceGroup -n MyPlan --min-instances 1 --max-burst 10 --sku EP1
- name: Create a basic app service plan.
text: >
az functionapp plan create -g MyResourceGroup -n MyPlan --sku B1
- name: Create an App Service Plan for an Azure Function. (autogenerated)
text: az functionapp plan create --location westus2 --name MyPlan --number-of-workers 1 --resource-group MyResourceGroup --sku B1
crafted: true
"""
helps['functionapp plan delete'] = """
type: command
short-summary: Delete an App Service Plan.
"""
helps['functionapp plan list'] = """
type: command
short-summary: List App Service Plans.
examples:
- name: List all Elastic Premium 1 tier App Service plans.
text: >
az functionapp plan list --query "[?sku.tier=='EP1']"
"""
helps['functionapp plan show'] = """
type: command
short-summary: Get the App Service Plans for a resource group or a set of resource groups.
examples:
- name: Get the app service plans for a resource group or a set of resource groups. (autogenerated)
text: az functionapp plan show --name MyAppServicePlan --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp plan update'] = """
type: command
short-summary: Update an App Service plan for an Azure Function.
examples:
- name: Update an app service plan to EP2 sku with twenty maximum workers.
text: >
az functionapp plan update -g MyResourceGroup -n MyPlan --max-burst 20 --sku EP2
"""
helps['functionapp restart'] = """
type: command
short-summary: Restart a function app.
examples:
- name: Restart a function app. (autogenerated)
text: az functionapp restart --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp show'] = """
type: command
short-summary: Get the details of a function app.
examples:
- name: Get the details of a function app. (autogenerated)
text: az functionapp show --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp start'] = """
type: command
short-summary: Start a function app.
examples:
- name: Start a function app. (autogenerated)
text: az functionapp start --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp stop'] = """
type: command
short-summary: Stop a function app.
examples:
- name: Stop a function app. (autogenerated)
text: az functionapp stop --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp update'] = """
type: command
short-summary: Update a function app.
examples:
- name: Update a function app. (autogenerated)
text: az functionapp update --name MyFunctionApp --resource-group MyResourceGroup
crafted: true
"""
helps['functionapp vnet-integration'] = """
type: group
short-summary: methods that list, add, and remove virtual networks integrations from a functionapp
"""
helps['functionapp vnet-integration add'] = """
type: command
short-summary: Add a regional virtual network integration to a functionapp
long-summary: >
If there are multiple vnets of the same name across different resource groups, use vnet resource id to specify
which vnet to use. If vnet name is used, by default, the vnet in the same resource group as the functionapp will be used.
examples:
- name: Add a regional virtual network integration to a functionapp
text: az functionapp vnet-integration add -g MyResourceGroup -n MyFunctionapp --vnet MyVnetName --subnet MySubnetName -s [slot]
- name: Add a regional virtual network integration to a functionapp using vnet resource id
text: az functionapp vnet-integration add -g MyResourceGroup -n MyFunctionapp --vnet '/subscriptions/[sub id]/resourceGroups/[rg]/providers/Microsoft.Network/virtualNetworks/[virtual network name]' --subnet MySubnetName -s [slot]
"""
helps['functionapp vnet-integration list'] = """
type: command
short-summary: list the virtual network integrations on a functionapp
examples:
- name: list the virtual networks integrations on a functionapp
text: az functionapp vnet-integration list -g MyResourceGroup -n MyFunctionapp -s [slot]
"""
helps['functionapp vnet-integration remove'] = """
type: command
short-summary: remove a regional virtual network integration from functionapp
examples:
- name: remove a regional virtual network integration from functionapp
text: az functionapp vnet-integration remove -g MyResourceGroup -n MyFunctionapp -s [slot]
"""
helps['webapp'] = """
type: group
short-summary: Manage web apps.
"""
helps['webapp auth'] = """
type: group
short-summary: Manage webapp authentication and authorization
"""
helps['webapp auth show'] = """
type: command
short-summary: Show the authentification settings for the webapp.
examples:
- name: Show the authentification settings for the webapp. (autogenerated)
text: az webapp auth show --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp auth update'] = """
type: command
short-summary: Update the authentication settings for the webapp.
examples:
- name: Enable AAD by enabling authentication and setting AAD-associated parameters. Default provider is set to AAD. Must have created a AAD service principal beforehand.
text: >
az webapp auth update -g myResourceGroup -n myUniqueApp --enabled true \\
--action LoginWithAzureActiveDirectory \\
--aad-allowed-token-audiences https://webapp_name.azurewebsites.net/.auth/login/aad/callback \\
--aad-client-id ecbacb08-df8b-450d-82b3-3fced03f2b27 --aad-client-secret very_secret_password \\
--aad-token-issuer-url https://sts.windows.net/54826b22-38d6-4fb2-bad9-b7983a3e9c5a/
- name: Allow Facebook authentication by setting FB-associated parameters and turning on public-profile and email scopes; allow anonymous users
text: >
az webapp auth update -g myResourceGroup -n myUniqueApp --action AllowAnonymous \\
--facebook-app-id my_fb_id --facebook-app-secret my_fb_secret \\
--facebook-oauth-scopes public_profile email
"""
helps['webapp browse'] = """
type: command
short-summary: Open a web app in a browser.
examples:
- name: Open a web app in a browser. (autogenerated)
text: az webapp browse --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config'] = """
type: group
short-summary: Configure a web app.
"""
helps['webapp config access-restriction'] = """
type: group
short-summary: Methods that show, set, add, and remove access restrictions on a webapp
"""
helps['webapp config access-restriction add'] = """
type: command
short-summary: Adds an Access Restriction to the webapp.
examples:
- name: Add Access Restriction opening (Allow) named developers for IPv4 address 130.220.0.0/27 with priority 200 to main site.
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name developers --action Allow --ip-address 130.220.0.0/27 --priority 200
- name: Add Access Restriction opening (Allow) named build_server for IPv4 address 192.168.0.0/27 with priority 250 to scm site.
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name build_server --action Allow --ip-address 192.168.0.0/27 --priority 250 --scm-site true
- name: Add Access Restriction opening (Allow) named app_gateway for Subnet app_gw in vNet core_weu with priority 300 to main site.
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name app_gateway --action Allow --vnet-name core_weu --subnet app_gateway --priority 300
- name: Add Access Restriction opening (Allow) named internal_agents for Subnet build_agents in vNet corp01 with priority 500 to scm site; and ignore service endpoint registration on the Subnet.
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name internal_agents --action Allow --vnet-name corp01 --subnet build_agents --priority 500 --scm-site true --ignore-missing-endpoint true
- name: Add Access Restriction opening (Allow) named remote_agents in vNet 'corp01' in rg 'vnets' with subnet 'agents'
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name remote_agents --action Allow --vnet-name corp01 --subnet agents --priority 500 --vnet-resource-group vnets
- name: Add Access Restriction opening (Allow) named agents in vNet 'corp01' in rg 'vnets' with subnet 'agents' (using subnet resource id)
text: az webapp config access-restriction add -g ResourceGroup -n AppName --rule-name remote_agents --action Allow --priority 800 --subnet '/subscriptions/<subscription-id>/resourceGroups/vnets/providers/Microsoft.Network/virtualNetworks/corp01/subnets/agents'
"""
helps['webapp config access-restriction remove'] = """
type: command
short-summary: Removes an Access Restriction from the webapp.
examples:
- name: Remove Access Restriction named developers from the main site.
text: az webapp config access-restriction remove -g ResourceGroup -n AppName --rule-name developers
- name: Remove Access Restriction named internal_agents from the scm site.
text: az webapp config access-restriction remove -g ResourceGroup -n AppName --rule-name internal_agents --scm-site true
"""
helps['webapp config access-restriction set'] = """
type: command
short-summary: Sets if SCM site is using the same restrictions as the main site.
examples:
- name: Enable SCM site to use same access restrictions as main site.
text: az webapp config access-restriction set -g ResourceGroup -n AppName --use-same-restrictions-for-scm-site true
"""
helps['webapp config access-restriction show'] = """
type: command
short-summary: Show Access Restriction settings for webapp.
examples:
- name: Get Access Restriction settings for a webapp.
text: az webapp config access-restriction show -g ResourceGroup -n AppName
"""
helps['webapp config appsettings'] = """
type: group
short-summary: Configure web app settings. Updating or removing application settings will cause an app recycle.
"""
helps['webapp config appsettings delete'] = """
type: command
short-summary: Delete web app settings.
examples:
- name: Delete web app settings. (autogenerated)
text: az webapp config appsettings delete --name MyWebApp --resource-group MyResourceGroup --setting-names {setting-names}
crafted: true
"""
helps['webapp config appsettings list'] = """
type: command
short-summary: Get the details of a web app's settings.
examples:
- name: Get the details of a web app's settings. (autogenerated)
text: az webapp config appsettings list --name MyWebapp --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['webapp config appsettings set'] = """
type: command
short-summary: Set a web app's settings.
examples:
- name: Set the default NodeJS version to 6.9.1 for a web app.
text: >
az webapp config appsettings set -g MyResourceGroup -n MyUniqueApp --settings WEBSITE_NODE_DEFAULT_VERSION=6.9.1
- name: Set using both key-value pair and a json file with more settings.
text: >
az webapp config appsettings set -g MyResourceGroup -n MyUniqueApp --settings mySetting=value @moreSettings.json
parameters:
- name: --settings
short-summary: Space-separated appsettings in KEY=VALUE format. Use @{file} to load from a file.
- name: --slot-settings
short-summary: Space-separated slot appsettings in KEY=VALUE format. Use @{file} to load from a file.
"""
helps['webapp config backup'] = """
type: group
short-summary: Manage backups for web apps.
"""
helps['webapp config backup create'] = """
type: command
short-summary: Create a backup of a web app.
examples:
- name: Create a backup of a web app. (autogenerated)
text: az webapp config backup create --container-url {container-url} --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config backup list'] = """
type: command
short-summary: List backups of a web app.
examples:
- name: List backups of a web app. (autogenerated)
text: az webapp config backup list --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config backup restore'] = """
type: command
short-summary: Restore a web app from a backup.
"""
helps['webapp config backup show'] = """
type: command
short-summary: Show the backup schedule for a web app.
examples:
- name: Show the backup schedule for a web app. (autogenerated)
text: az webapp config backup show --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config backup update'] = """
type: command
short-summary: Configure a new backup schedule for a web app.
"""
helps['webapp config connection-string'] = """
type: group
short-summary: Manage a web app's connection strings.
"""
helps['webapp config connection-string delete'] = """
type: command
short-summary: Delete a web app's connection strings.
examples:
- name: Delete a web app's connection strings. (autogenerated)
text: az webapp config connection-string delete --name MyWebApp --resource-group MyResourceGroup --setting-names {setting-names}
crafted: true
"""
helps['webapp config connection-string list'] = """
type: command
short-summary: Get a web app's connection strings.
examples:
- name: Get a web app's connection strings. (autogenerated)
text: az webapp config connection-string list --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config connection-string set'] = """
type: command
short-summary: Update a web app's connection strings.
examples:
- name: Add a mysql connection string.
text: >
az webapp config connection-string set -g MyResourceGroup -n MyUniqueApp -t mysql \\
--settings mysql1='Server=myServer;Database=myDB;Uid=myUser;Pwd=myPwd;'
"""
helps['webapp config container'] = """
type: group
short-summary: Manage web app container settings.
"""
helps['webapp config container delete'] = """
type: command
short-summary: Delete a web app container's settings.
examples:
- name: Delete a web app container's settings. (autogenerated)
text: az webapp config container delete --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config container set'] = """
type: command
short-summary: Set a web app container's settings.
examples:
- name: Set a web app container's settings. (autogenerated)
text: az webapp config container set --docker-custom-image-name MyDockerCustomImage --docker-registry-server-password StrongPassword --docker-registry-server-url https://{azure-container-registry-name}.azurecr.io --docker-registry-server-user DockerUserId --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config container show'] = """
type: command
short-summary: Get details of a web app container's settings.
examples:
- name: Get details of a web app container's settings. (autogenerated)
text: az webapp config container show --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config hostname'] = """
type: group
short-summary: Configure hostnames for a web app.
"""
helps['webapp config hostname add'] = """
type: command
short-summary: Bind a hostname to a web app.
examples:
- name: Bind a hostname to a web app. (autogenerated)
text: az webapp config hostname add --hostname cname.mycustomdomain.com --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config hostname delete'] = """
type: command
short-summary: Unbind a hostname from a web app.
"""
helps['webapp config hostname get-external-ip'] = """
type: command
short-summary: Get the external-facing IP address for a web app.
examples:
- name: Get the external-facing IP address for a web app. (autogenerated)
text: az webapp config hostname get-external-ip --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config hostname list'] = """
type: command
short-summary: List all hostname bindings for a web app.
examples:
- name: List all hostname bindings for a web app. (autogenerated)
text: az webapp config hostname list --resource-group MyResourceGroup --webapp-name MyWebapp
crafted: true
"""
helps['webapp config set'] = """
type: command
short-summary: Set a web app's configuration.
examples:
- name: turn on "alwaysOn"
text: >
az webapp config set -g MyResourceGroup -n MyUniqueApp --always-on true
- name: turn on "alwaysOn" through a json with content "{"alwaysOn", true}"
text: >
az webapp config set -g MyResourceGroup -n MyUniqueApp --generic-configurations "{"alwaysOn": true}"
"""
helps['webapp config show'] = """
type: command
short-summary: Get the details of a web app's configuration.
examples:
- name: Get the details of a web app's configuration. (autogenerated)
text: az webapp config show --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config snapshot'] = """
type: group
short-summary: Manage web app snapshots.
"""
helps['webapp config snapshot list'] = """
type: command
short-summary: List the restorable snapshots for a web app.
examples:
- name: List the restorable snapshots for a web app. (autogenerated)
text: az webapp config snapshot list --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config snapshot restore'] = """
type: command
short-summary: Restore a web app snapshot.
examples:
- name: Restore web app files from a snapshot. Overwrites the web app's current files and settings.
text: >
az webapp config snapshot restore -g MyResourceGroup -n MySite --time 2018-12-11T23:34:16.8388367
- name: Restore a snapshot of web app SourceApp to web app TargetApp. Use --restore-content-only to not restore app settings. Overwrites TargetApp's files.
text: >
az webapp config snapshot restore -g TargetResourceGroup -n TargetApp --source-name SourceApp --source-resource-group OriginalResourceGroup --time 2018-12-11T23:34:16.8388367 --restore-content-only
"""
helps['webapp config ssl'] = """
type: group
short-summary: Configure SSL certificates for web apps.
"""
helps['webapp config ssl bind'] = """
type: command
short-summary: Bind an SSL certificate to a web app.
examples:
- name: Bind an SSL certificate to a web app. (autogenerated)
text: az webapp config ssl bind --certificate-thumbprint {certificate-thumbprint} --name MyWebapp --resource-group MyResourceGroup --ssl-type SNI
crafted: true
"""
helps['webapp config ssl delete'] = """
type: command
short-summary: Delete an SSL certificate from a web app.
examples:
- name: Delete an SSL certificate from a web app. (autogenerated)
text: az webapp config ssl delete --certificate-thumbprint {certificate-thumbprint} --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config ssl list'] = """
type: command
short-summary: List SSL certificates for a web app.
examples:
- name: List SSL certificates for a web app. (autogenerated)
text: az webapp config ssl list --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config ssl show'] = """
type: command
short-summary: Show the details of an SSL certificate for a web app.
examples:
- name: Show the details of an SSL certificate for a web app. (autogenerated)
text: az webapp config ssl show --resource-group MyResourceGroup --certificate-name cname.mycustomdomain.com
crafted: true
"""
helps['webapp config ssl unbind'] = """
type: command
short-summary: Unbind an SSL certificate from a web app.
"""
helps['webapp config ssl upload'] = """
type: command
short-summary: Upload an SSL certificate to a web app.
examples:
- name: Upload an SSL certificate to a web app. (autogenerated)
text: az webapp config ssl upload --certificate-file {certificate-file} --certificate-password {certificate-password} --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config ssl import'] = """
type: command
short-summary: Import an SSL or App Service Certificate to a web app from Key Vault.
examples:
- name: Import an SSL or App Service Certificate certificate to a web app from Key Vault.
text: az webapp config ssl import --resource-group MyResourceGroup --name MyWebapp --key-vault MyKeyVault --key-vault-certificate-name MyCertificateName
- name: Import an SSL or App Service Certificate to a web app from Key Vault using resource id (typically if Key Vault is in another subscription).
text: az webapp config ssl import --resource-group MyResourceGroup --name MyWebapp --key-vault '/subscriptions/[sub id]/resourceGroups/[rg]/providers/Microsoft.KeyVault/vaults/[vault name]' --key-vault-certificate-name MyCertificateName
"""
helps['webapp config ssl create'] = """
type: command
short-summary: Create a Managed Certificate for a hostname in a webapp app.
examples:
- name: Create a Managed Certificate for cname.mycustomdomain.com.
text: az webapp config ssl create --resource-group MyResourceGroup --name MyWebapp --hostname cname.mycustomdomain.com
"""
helps['webapp config storage-account'] = """
type: group
short-summary: Manage a web app's Azure storage account configurations. (Linux Web Apps and Windows Containers Web Apps Only)
"""
helps['webapp config storage-account add'] = """
type: command
short-summary: Add an Azure storage account configuration to a web app. (Linux Web Apps and Windows Containers Web Apps Only)
examples:
- name: Add a connection to the Azure Files file share called MyShare in the storage account named MyStorageAccount.
text: >
az webapp config storage-account add -g MyResourceGroup -n MyUniqueApp \\
--custom-id CustomId \\
--storage-type AzureFiles \\
--account-name MyStorageAccount \\
--share-name MyShare \\
--access-key MyAccessKey \\
--mount-path /path/to/mount
"""
helps['webapp config storage-account delete'] = """
type: command
short-summary: Delete a web app's Azure storage account configuration. (Linux Web Apps and Windows Containers Web Apps Only)
examples:
- name: Delete a web app's Azure storage account configuration. (Linux Web Apps and Windows Containers Web Apps Only) (autogenerated)
text: az webapp config storage-account delete --custom-id CustomId --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config storage-account list'] = """
type: command
short-summary: Get a web app's Azure storage account configurations. (Linux Web Apps and Windows Containers Web Apps Only)
examples:
- name: Get a web app's Azure storage account configurations. (Linux Web Apps and Windows Containers Web Apps Only) (autogenerated)
text: az webapp config storage-account list --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp config storage-account update'] = """
type: command
short-summary: Update an existing Azure storage account configuration on a web app. (Linux Web Apps and Windows Containers Web Apps Only)
examples:
- name: Update the mount path for a connection to the Azure Files file share with the ID MyId.
text: >
az webapp config storage-account update -g MyResourceGroup -n MyUniqueApp \\
--custom-id CustomId \\
--mount-path /path/to/new/mount
- name: Update an existing Azure storage account configuration on a web app. (Linux Web Apps and Windows Containers Web Apps Only) (autogenerated)
text: az webapp config storage-account update --access-key MyAccessKey --account-name MyAccount --custom-id CustomId --mount-path /path/to/new/mount --name MyUniqueApp --resource-group MyResourceGroup --share-name MyShare --storage-type AzureFiles
crafted: true
"""
helps['webapp cors'] = """
type: group
short-summary: Manage Cross-Origin Resource Sharing (CORS)
"""
helps['webapp cors add'] = """
type: command
short-summary: Add allowed origins
examples:
- name: add a new allowed origin
text: >
az webapp cors add -g {myRG} -n {myAppName} --allowed-origins https://myapps.com
- name: Add allowed origins (autogenerated)
text: az webapp cors add --allowed-origins https://myapps.com --name MyWebApp --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['webapp cors remove'] = """
type: command
short-summary: Remove allowed origins
examples:
- name: remove an allowed origin
text: >
az webapp cors remove -g {myRG} -n {myAppName} --allowed-origins https://myapps.com
- name: remove all allowed origins
text: >
az webapp cors remove -g {myRG} -n {myAppName} --allowed-origins
"""
helps['webapp cors show'] = """
type: command
short-summary: show allowed origins
examples:
- name: show allowed origins (autogenerated)
text: az webapp cors show --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp create'] = """
type: command
short-summary: Create a web app.
long-summary: The web app's name must be able to produce a unique FQDN as AppName.azurewebsites.net.
examples:
- name: Create a web app with the default configuration.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName
- name: Create a web app with a java|11|Java SE|8 runtime using '|' delimiter.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName --runtime "java|11|Java SE|8"
- name: Create a web app with a java|11|Java SE|8 runtime using ':' delimiter.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName --runtime "java:11:Java SE:8"
- name: Create a web app with a NodeJS 10.14 runtime and deployed from a local git repository.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName --runtime "node|10.14" --deployment-local-git
- name: Create a web app with an image from DockerHub.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName -i nginx
- name: Create a web app with an image from a private DockerHub registry.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName -i MyImageName -s username -w password
- name: Create a web app with an image from a private Azure Container Registry.
text: >
az webapp create -g MyResourceGroup -p MyPlan -n MyUniqueAppName -i myregistry.azurecr.io/docker-image:tag
- name: create a WebApp using shared App Service Plan that is in a different resource group.
text: >
AppServicePlanID=$(az appservice plan show -n SharedAppServicePlan -g MyResourceGroup --query "id" --out tsv)
az webapp create -g MyResourceGroup -p "$AppServicePlanID" -n MyUniqueAppName
"""
helps['webapp create-remote-connection'] = """
type: command
short-summary: Creates a remote connection using a tcp tunnel to your web app
"""
helps['webapp delete'] = """
type: command
short-summary: Delete a web app.
examples:
- name: Delete a web app. (autogenerated)
text: az webapp delete --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deleted'] = """
type: group
short-summary: Manage deleted web apps.
"""
helps['webapp deleted list'] = """
type: command
short-summary: List web apps that have been deleted.
"""
helps['webapp deleted restore'] = """
type: command
short-summary: Restore a deleted web app.
long-summary: Restores the files and settings of a deleted web app to the specified web app.
examples:
- name: Restore a deleted app to the Staging slot of MySite.
text: >
az webapp deleted restore -g MyResourceGroup -n MySite -s Staging --deleted-id /subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Web/locations/location/deletedSites/1234
- name: Restore a deleted app to the app MySite. Do not restore the deleted app's settings.
text: >
az webapp deleted restore -g MyResourceGroup -n MySite --deleted-id /subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Web/locations/location/deletedSites/1234 --restore-content-only
- name: Restore a deleted web app. (autogenerated)
text: az webapp deleted restore --deleted-id /subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Web/deletedSites/1234 --name MySite --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['webapp deployment'] = """
type: group
short-summary: Manage web app deployments.
"""
helps['webapp deployment container'] = """
type: group
short-summary: Manage container-based continuous deployment.
"""
helps['webapp deployment container config'] = """
type: command
short-summary: Configure continuous deployment via containers.
examples:
- name: Configure continuous deployment via containers. (autogenerated)
text: az webapp deployment container config --enable-cd true --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment container show-cd-url'] = """
type: command
short-summary: Get the URL which can be used to configure webhooks for continuous deployment.
examples:
- name: Get the URL which can be used to configure webhooks for continuous deployment (autogenerated)
text: az webapp deployment container show-cd-url --name MyWebApp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['webapp deployment list-publishing-credentials'] = """
type: command
short-summary: Get the details for available web app publishing credentials
examples:
- name: Get the details for available web app publishing credentials (autogenerated)
text: az webapp deployment list-publishing-credentials --name MyWebapp --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['webapp deployment list-publishing-profiles'] = """
type: command
short-summary: Get the details for available web app deployment profiles.
examples:
- name: Get the details for available web app deployment profiles. (autogenerated)
text: az webapp deployment list-publishing-profiles --name MyWebapp --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['webapp deployment slot'] = """
type: group
short-summary: Manage web app deployment slots.
"""
helps['webapp deployment slot auto-swap'] = """
type: command
short-summary: Configure deployment slot auto swap.
examples:
- name: Configure deployment slot auto swap. (autogenerated)
text: az webapp deployment slot auto-swap --name MyWebapp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['webapp deployment slot create'] = """
type: command
short-summary: Create a deployment slot.
examples:
- name: Create a deployment slot. (autogenerated)
text: az webapp deployment slot create --name MyWebapp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['webapp deployment slot delete'] = """
type: command
short-summary: Delete a deployment slot.
examples:
- name: Delete a deployment slot. (autogenerated)
text: az webapp deployment slot delete --name MyWebapp --resource-group MyResourceGroup --slot staging
crafted: true
"""
helps['webapp deployment slot list'] = """
type: command
short-summary: List all deployment slots.
examples:
- name: List all deployment slots. (autogenerated)
text: az webapp deployment slot list --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment slot swap'] = """
type: command
short-summary: Change deployment slots for a web app.
examples:
- name: Swap a staging slot into production for the MyUniqueApp web app.
text: >
az webapp deployment slot swap -g MyResourceGroup -n MyUniqueApp --slot staging \\
--target-slot production
"""
helps['webapp deployment source'] = """
type: group
short-summary: Manage web app deployment via source control.
"""
helps['webapp deployment source config'] = """
type: command
short-summary: Manage deployment from git or Mercurial repositories.
examples:
- name: Manage deployment from git or Mercurial repositories. (autogenerated)
text: az webapp deployment source config --branch master --manual-integration --name MyWebApp --repo-url https://github.com/Azure-Samples/function-image-upload-resize --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment source config-local-git'] = """
type: command
short-summary: Get a URL for a git repository endpoint to clone and push to for web app deployment.
examples:
- name: Get an endpoint and add it as a git remote.
text: >
az webapp deployment source config-local-git \\
-g MyResourceGroup -n MyUniqueApp
git remote add azure \\
https://{deploy_user_name}@MyUniqueApp.scm.azurewebsites.net/MyUniqueApp.git
"""
helps['webapp deployment source config-zip'] = """
type: command
short-summary: Perform deployment using the kudu zip push deployment for a web app.
long-summary: >
By default Kudu assumes that zip deployments do not require any build-related actions like
npm install or dotnet publish. This can be overridden by including a .deployment file in your
zip file with the following content '[config] SCM_DO_BUILD_DURING_DEPLOYMENT = true',
to enable Kudu detection logic and build script generation process.
See https://github.com/projectkudu/kudu/wiki/Configurable-settings#enabledisable-build-actions-preview.
Alternately the setting can be enabled using the az webapp config appsettings set command.
examples:
- name: Perform deployment by using zip file content.
text: >
az webapp deployment source config-zip \\
-g {myRG} -n {myAppName} \\
--src {zipFilePathLocation}
"""
helps['webapp deployment source delete'] = """
type: command
short-summary: Delete a source control deployment configuration.
examples:
- name: Delete a source control deployment configuration. (autogenerated)
text: az webapp deployment source delete --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment source show'] = """
type: command
short-summary: Get the details of a source control deployment configuration.
examples:
- name: Get the details of a source control deployment configuration. (autogenerated)
text: az webapp deployment source show --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment source sync'] = """
type: command
short-summary: Synchronize from the repository. Only needed under manual integration mode.
examples:
- name: Synchronize from the repository. Only needed under manual integration mode. (autogenerated)
text: az webapp deployment source sync --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp deployment user'] = """
type: group
short-summary: Manage user credentials for deployment.
"""
helps['webapp deployment user set'] = """
type: command
short-summary: Update deployment credentials.
long-summary: All function and web apps in the subscription will be impacted since they share the same deployment credentials.
examples:
- name: Set FTP and git deployment credentials for all apps.
text: >
az webapp deployment user set --user-name MyUserName
"""
helps['webapp deployment user show'] = """
type: command
short-summary: Get deployment publishing user.
examples:
- name: Get publishing user information.
text: >
az webapp deployment user show
"""
helps['webapp hybrid-connection'] = """
type: group
short-summary: methods that list, add and remove hybrid-connections from webapps
"""
helps['webapp hybrid-connection add'] = """
type: command
short-summary: add a hybrid-connection to a webapp
examples:
- name: add a hybrid-connection to a webapp
text: az webapp hybrid-connection add -g MyResourceGroup -n MyWebapp --namespace [HybridConnectionNamespace] --hybrid-connection [HybridConnectionName] -s [slot]
"""
helps['webapp hybrid-connection list'] = """
type: command
short-summary: list the hybrid-connections on a webapp
examples:
- name: list the hybrid-connections on a webapp
text: az webapp hybrid-connection list -g MyResourceGroup -n MyWebapp -s [slot]
"""
helps['webapp hybrid-connection remove'] = """
type: command
short-summary: remove a hybrid-connection from a webapp
examples:
- name: remove a hybrid-connection from a webapp
text: az webapp hybrid-connection remove -g MyResourceGroup -n MyWebapp --namespace [HybridConnectionNamespace] --hybrid-connection [HybridConnectionName] -s [slot]
"""
helps['webapp identity'] = """
type: group
short-summary: manage web app's managed service identity
"""
helps['webapp identity assign'] = """
type: command
short-summary: assign managed service identity to the web app
examples:
- name: assign local identity and assign a reader role to the current resource group.
text: >
az webapp identity assign -g MyResourceGroup -n MyUniqueApp --role reader --scope /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/MyResourceGroup
- name: enable identity for the web app.
text: >
az webapp identity assign -g MyResourceGroup -n MyUniqueApp
- name: assign local identity and a user assigned identity to a webapp.
text: >
az webapp identity assign -g MyResourceGroup -n MyUniqueApp --identities [system] myAssignedId
"""
helps['webapp identity remove'] = """
type: command
short-summary: Disable web app's managed service identity
examples:
- name: Disable web app's system managed service identity
text: az webapp identity remove --name MyWebApp --resource-group MyResourceGroup
crafted: true
- name: Disable web app's system managed service identity and a user managed identity
text: az webapp identity remove --name MyWebApp --resource-group MyResourceGroup --identities [system] myAssignedId
"""
helps['webapp identity show'] = """
type: command
short-summary: display web app's managed service identity
examples:
- name: display webapp's managed service identity (autogenerated)
text: az webapp identity show --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp list'] = """
type: command
short-summary: List web apps.
examples:
- name: List default host name and state for all web apps.
text: >
az webapp list --query "[].{hostName: defaultHostName, state: state}"
- name: List all running web apps.
text: >
az webapp list --query "[?state=='Running']"
"""
helps['webapp list-instances'] = """
type: command
short-summary: List all scaled out instances of a web app or web app slot.
"""
helps['webapp list-runtimes'] = """
type: command
short-summary: List available built-in stacks which can be used for web apps.
"""
helps['webapp log'] = """
type: group
short-summary: Manage web app logs.
"""
helps['webapp log config'] = """
type: command
short-summary: Configure logging for a web app.
examples:
- name: Configure logging for a web app. (autogenerated)
text: az webapp log config --name MyWebapp --resource-group MyResourceGroup --web-server-logging off
crafted: true
- name: Configure logging for a web app. (autogenerated)
text: az webapp log config --docker-container-logging off --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp log download'] = """
type: command
short-summary: Download a web app's log history as a zip file.
long-summary: This command may not work with web apps running on Linux.
examples:
- name: Download a web app's log history as a zip file. (autogenerated)
text: az webapp log download --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp log show'] = """
type: command
short-summary: Get the details of a web app's logging configuration.
examples:
- name: Get the details of a web app's logging configuration. (autogenerated)
text: az webapp log show --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp log tail'] = """
type: command
short-summary: Start live log tracing for a web app.
long-summary: This command may not work with web apps running on Linux.
"""
helps['webapp log deployment'] = """
type: group
short-summary: Manage web app deployment logs.
"""
helps['webapp log deployment show'] = """
type: command
short-summary: Show deployment logs of the latest deployment, or a specific deployment if deployment-id is specified.
examples:
- name: Show the deployment logs of the latest deployment
text: az webapp log deployment show --name MyWebApp --resource-group MyResourceGroup
- name: Show the deployment logs of a particular deployment
text: az webapp log deployment show --name MyWebApp --resource-group MyResourceGroup --deployment-id MyDeploymentId
"""
helps['webapp log deployment list'] = """
type: command
short-summary: List deployments associated with web app
examples:
- name: List the deployment logs
text: az webapp log deployment list --name MyWebApp --resource-group MyResourceGroup
"""
helps['functionapp log deployment'] = """
type: group
short-summary: Manage function app deployment logs.
"""
helps['functionapp log deployment show'] = """
type: command
short-summary: Show deployment logs of the latest deployment, or a specific deployment if deployment-id is specified.
examples:
- name: Show the deployment logs of the latest deployment
text: az functionapp log deployment show --name MyFunctionApp --resource-group MyResourceGroup
- name: Show the deployment logs of a particular deployment
text: az functionapp log deployment show --name MyFunctionApp --resource-group MyResourceGroup --deployment-id MyDeploymentId
"""
helps['functionapp log deployment list'] = """
type: command
short-summary: List deployment logs of the deployments associated with function app
examples:
- name: List the deployment logs
text: az functionapp log deployment list --name MyFunctionApp --resource-group MyResourceGroup
"""
helps['webapp restart'] = """
type: command
short-summary: Restart a web app.
examples:
- name: Restart a web app. (autogenerated)
text: az webapp restart --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp show'] = """
type: command
short-summary: Get the details of a web app.
examples:
- name: Get the details of a web app. (autogenerated)
text: az webapp show --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp ssh'] = """
type: command
short-summary: SSH command establishes a ssh session to the web container and developer would get a shell terminal remotely.
examples:
- name: ssh into a web app
text: >
az webapp ssh -n MyUniqueAppName -g MyResourceGroup
"""
helps['webapp start'] = """
type: command
short-summary: Start a web app.
examples:
- name: Start a web app. (autogenerated)
text: az webapp start --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp stop'] = """
type: command
short-summary: Stop a web app.
examples:
- name: Stop a web app. (autogenerated)
text: az webapp stop --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp traffic-routing'] = """
type: group
short-summary: Manage traffic routing for web apps.
"""
helps['webapp traffic-routing clear'] = """
type: command
short-summary: Clear the routing rules and send all traffic to production.
examples:
- name: Clear the routing rules and send all traffic to production. (autogenerated)
text: az webapp traffic-routing clear --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp traffic-routing set'] = """
type: command
short-summary: Configure routing traffic to deployment slots.
examples:
- name: Configure routing traffic to deployment slots. (autogenerated)
text: az webapp traffic-routing set --distribution staging=50 --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp traffic-routing show'] = """
type: command
short-summary: Display the current distribution of traffic across slots.
examples:
- name: Display the current distribution of traffic across slots. (autogenerated)
text: az webapp traffic-routing show --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp up'] = """
type: command
short-summary: >
Create a webapp and deploy code from a local workspace to the app. The command is required to run from the folder
where the code is present. Current support includes Node, Python, .NET Core and ASP.NET. Node,
Python apps are created as Linux apps. .Net Core, ASP.NET, and static HTML apps are created as Windows apps.
Append the html flag to deploy as a static HTML app.
examples:
- name: View the details of the app that will be created, without actually running the operation
text: >
az webapp up --dryrun
- name: Create a web app with the default configuration, by running the command from the folder where the code to be deployed exists.
text: >
az webapp up
- name: Create a web app with a specified name
text: >
az webapp up -n MyUniqueAppName
- name: Create a web app with a specified name and a java|11|Java SE|8 runtime using '|' delimiter
text: >
az webapp up -n MyUniqueAppName --runtime "java|11|Java SE|8"
- name: Create a web app with a specified name and a java|11|Java SE|8 runtime using ':' delimiter
text: >
az webapp up -n MyUniqueAppName --runtime "java:11:Java SE:8"
- name: Create a web app in a specific region, by running the command from the folder where the code to be deployed exists.
text: >
az webapp up -l locationName
- name: Create a web app and enable log streaming after the deployment operation is complete. This will enable the default configuration required to enable log streaming.
text: >
az webapp up --logs
- name: Create a web app and deploy as a static HTML app.
text: >
az webapp up --html
"""
helps['webapp update'] = """
type: command
short-summary: Update a web app.
examples:
- name: Update the tags of a web app.
text: >
az webapp update -g MyResourceGroup -n MyAppName --set tags.tagName=tagValue
- name: Update a web app. (autogenerated)
text: az webapp update --https-only true --name MyAppName --resource-group MyResourceGroup
crafted: true
"""
helps['webapp vnet-integration'] = """
type: group
short-summary: methods that list, add, and remove virtual network integrations from a webapp
"""
helps['webapp vnet-integration add'] = """
type: command
short-summary: Add a regional virtual network integration to a webapp
long-summary: >
If there are multiple vnets of the same name across different resource groups, use vnet resource id to specify
which vnet to use. If vnet name is used, by default, the vnet in the same resource group as the webapp will be used.
examples:
- name: Add a regional virtual network integration to a webapp
text: az webapp vnet-integration add -g MyResourceGroup -n MyWebapp --vnet MyVnetName --subnet MySubnetName -s [slot]
- name: Add a regional virtual network integration to a webapp using vnet resource id
text: az webapp vnet-integration add -g MyResourceGroup -n MyWebapp --vnet '/subscriptions/[sub id]/resourceGroups/[rg]/providers/Microsoft.Network/virtualNetworks/[virtual network name]' --subnet MySubnetName -s [slot]
"""
helps['webapp vnet-integration list'] = """
type: command
short-summary: list the virtual network integrations on a webapp
examples:
- name: list the virtual network integrations on a webapp
text: az webapp vnet-integration list -g MyResourceGroup -n MyWebapp -s [slot]
"""
helps['webapp vnet-integration remove'] = """
type: command
short-summary: remove a regional virtual network integration from webapp
examples:
- name: remove a regional virtual network integration from webapp
text: az webapp vnet-integration remove -g MyResourceGroup -n MyWebapp -s [slot]
"""
helps['webapp webjob'] = """
type: group
short-summary: Allows management operations for webjobs on a web app.
"""
helps['webapp webjob continuous'] = """
type: group
short-summary: Allows management operations of continuous webjobs on a web app.
"""
helps['webapp webjob continuous list'] = """
type: command
short-summary: List all continuous webjobs on a selected web app.
examples:
- name: List all continuous webjobs on a selected webapp. (autogenerated)
text: az webapp webjob continuous list --name MyWebapp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp webjob continuous remove'] = """
type: command
short-summary: Delete a specific continuous webjob.
examples:
- name: Delete a specific continuous webjob. (autogenerated)
text: az webapp webjob continuous remove --name MyWebApp --resource-group MyResourceGroup --webjob-name MyWebjob
crafted: true
"""
helps['webapp webjob continuous start'] = """
type: command
short-summary: Start a specific continuous webjob on a selected web app.
examples:
- name: Start a specific continuous webjob on a selected web app. (autogenerated)
text: az webapp webjob continuous start --name MyWebApp --resource-group MyResourceGroup --webjob-name MyWebjob
crafted: true
"""
helps['webapp webjob continuous stop'] = """
type: command
short-summary: Stop a specific continuous webjob.
examples:
- name: Stop a specific continuous webjob. (autogenerated)
text: az webapp webjob continuous stop --name MyWebApp --resource-group MyResourceGroup --webjob-name MyWebjob
crafted: true
"""
helps['webapp webjob triggered'] = """
type: group
short-summary: Allows management operations of triggered webjobs on a web app.
"""
helps['webapp webjob triggered list'] = """
type: command
short-summary: List all triggered webjobs hosted on a web app.
examples:
- name: List all triggered webjobs hosted on a web app. (autogenerated)
text: az webapp webjob triggered list --name MyWebApp --resource-group MyResourceGroup
crafted: true
"""
helps['webapp webjob triggered log'] = """
type: command
short-summary: Get history of a specific triggered webjob hosted on a web app.
examples:
- name: Get history of a specific triggered webjob hosted on a web app. (autogenerated)
text: az webapp webjob triggered log --name MyWebApp --resource-group MyResourceGroup --subscription MySubscription --webjob-name MyWebjob
crafted: true
"""
helps['webapp webjob triggered remove'] = """
type: command
short-summary: Delete a specific triggered webjob hosted on a web app.
examples:
- name: Delete a specific triggered webjob hosted on a web app. (autogenerated)
text: az webapp webjob triggered remove --name MyWebApp --resource-group MyResourceGroup --webjob-name MyWebjob
crafted: true
"""
helps['webapp webjob triggered run'] = """
type: command
short-summary: Run a specific triggered webjob hosted on a web app.
examples:
- name: Run a specific triggered webjob hosted on a web app. (autogenerated)
text: az webapp webjob triggered run --name MyWebApp --resource-group MyResourceGroup --webjob-name MyWebjob
crafted: true
"""
helps['appservice ase'] = """
type: group
short-summary: Manage App Service Environments v2
"""
helps['appservice ase list'] = """
type: command
short-summary: List app service environments.
examples:
- name: List all app service environments in subscription.
text: az appservice ase list
- name: List all app service environment in resource group.
text: az appservice ase list --resource-group MyResourceGroup
"""
helps['appservice ase show'] = """
type: command
short-summary: Show details of an app service environment.
examples:
- name: Show app service environment.
text: az appservice ase show --name MyAseName
"""
helps['appservice ase list-addresses'] = """
type: command
short-summary: List VIPs associated with an app service environment.
examples:
- name: List VIPs for an app service environments.
text: az appservice ase list-addresses --name MyAseName
"""
helps['appservice ase list-plans'] = """
type: command
short-summary: List app service plans associated with an app service environment.
examples:
- name: List app service plans for an app service environments.
text: az appservice ase list-plans --name MyAseName
"""
helps['appservice ase create'] = """
type: command
short-summary: Create app service environment.
examples:
- name: Create Resource Group, vNet and app service environment v2 with default values.
text: |
az group create -g MyResourceGroup --location westeurope
az network vnet create -g MyResourceGroup -n MyVirtualNetwork \\
--address-prefixes 10.0.0.0/16 --subnet-name MyAseSubnet --subnet-prefixes 10.0.0.0/24
az appservice ase create -n MyAseName -g MyResourceGroup --vnet-name MyVirtualNetwork \\
--subnet MyAseSubnet
- name: Create External app service environments v2 with large front-ends and scale factor of 10 in existing resource group and vNet.
text: |
az appservice ase create -n MyAseName -g MyResourceGroup --vnet-name MyVirtualNetwork \\
--subnet MyAseSubnet --front-end-sku I3 --front-end-scale-factor 10 \\
--virtual-ip-type External
- name: Create vNet and app service environment v2, but do not create network security group and route table in existing resource group.
text: |
az network vnet create -g MyResourceGroup -n MyVirtualNetwork \\
--address-prefixes 10.0.0.0/16 --subnet-name MyAseSubnet --subnet-prefixes 10.0.0.0/24
az appservice ase create -n MyAseName -g MyResourceGroup --vnet-name MyVirtualNetwork \\
--subnet MyAseSubnet --ignore-network-security-group --ignore-route-table
- name: Create vNet and app service environment v2 in a smaller than recommended subnet in existing resource group.
text: |
az network vnet create -g MyResourceGroup -n MyVirtualNetwork \\
--address-prefixes 10.0.0.0/16 --subnet-name MyAseSubnet --subnet-prefixes 10.0.0.0/26
az appservice ase create -n MyAseName -g MyResourceGroup --vnet-name MyVirtualNetwork \\
--subnet MyAseSubnet --ignore-subnet-size-validation
- name: Create Resource Group, vNet and app service environment v3 with default values.
text: |
az group create -g ASEv3ResourceGroup --location westeurope
az network vnet create -g ASEv3ResourceGroup -n MyASEv3VirtualNetwork \\
--address-prefixes 10.0.0.0/16 --subnet-name Inbound --subnet-prefixes 10.0.0.0/24
az network vnet subnet create -g ASEv3ResourceGroup --vnet-name MyASEv3VirtualNetwork \\
--name Outbound --address-prefixes 10.0.1.0/24
az appservice ase create -n MyASEv3Name -g ASEv3ResourceGroup \\
--vnet-name MyASEv3VirtualNetwork --subnet Outbound --kind asev3
"""
helps['appservice ase create-inbound-services'] = """
type: command
short-summary: Create the inbound services needed in preview for ASEv3 (private endpoint and DNS) or Private DNS Zone for Internal ASEv2.
examples:
- name: Create private endpoint, Private DNS Zone, A records and ensure subnet network policy.
text: |
az appservice ase create-inbound-services -n MyASEName -g ASEResourceGroup \\
--vnet-name MyASEVirtualNetwork --subnet MyAseSubnet
- name: Create private endpoint and ensure subnet network policy (ASEv3), but do not create DNS Zone and records.
text: |
az appservice ase create-inbound-services -n MyASEv3Name -g ASEv3ResourceGroup \\
--vnet-name MyASEv3VirtualNetwork --subnet Inbound --skip-dns
"""
helps['appservice ase update'] = """
type: command
short-summary: Update app service environment.
examples:
- name: Update app service environment with medium front-ends and scale factor of 10.
text: |
az appservice ase update -n MyAseName -g MyResourceGroup --front-end-sku I2 \\
--front-end-scale-factor 10
"""
helps['appservice ase delete'] = """
type: command
short-summary: Delete app service environment.
examples:
- name: Delete app service environment.
text: az appservice ase delete -n MyAseName
"""
helps['appservice domain'] = """
type: group
short-summary: Manage custom domains.
"""
helps['appservice domain create'] = """
type: command
short-summary: Create and purchase a custom domain.
examples:
- name: Accept the legal terms for purchasing and creating MyCustomDomain.com, then purchase and create domain.
text: az appservice domain create -g MyResourceGroup --hostname MyCustomDomain.com --contact-info=@'C:/path_to_contact_info.json' --accept-terms
- name: View the details of the domain that will be purchased and created, without actually running the operation
text: az appservice domain create -g MyResourceGroup --hostname MyCustomDomain.com --contact-info=@'C:/path_to_contact_info.json' --dryrun
"""
helps['appservice domain show-terms'] = """
type: command
short-summary: Show the legal terms for purchasing and creating a custom domain.
examples:
- name: Show the legal terms for purchasing and creating MyCustomDomain.com
text: az appservice domain show-terms --hostname MyCustomDomain.com
"""
helps['staticwebapp'] = """
type: group
short-summary: Manage static apps.
"""
helps['staticwebapp list'] = """
type: command
short-summary: List all static app resources in a subscription, or in resource group if provided
examples:
- name: List static apps in a subscription.
text: az staticwebapp list
"""
helps['staticwebapp browse'] = """
type: command
short-summary: Show details of a static app.
examples:
- name: Show static app in a subscription.
text: az staticwebapp browse -n MyStaticAppName
"""
helps['staticwebapp create'] = """
type: command
short-summary: Create a static app with content from GitHubRepository URL provided in source on provided branch. Return the app created.
examples:
- name: Create static app in a subscription.
text: az staticwebapp create -n MyStaticAppName -g MyExistingRg
-s https://github.com/JohnDoe/my-first-static-web-app -l WestUs2 -b master
"""
helps['staticwebapp disconnect'] = """
type: command
short-summary: Disconnect source control to enable connecting to a different repo.
examples:
- name: Disconnect static app.
text: az staticwebapp disconnect -n MyStaticAppName
"""
helps['staticwebapp reconnect'] = """
type: command
short-summary: Connect to a repo and branch following a disconnect command.
examples:
- name: Connect a repo and branch to static app.
text: az staticwebapp reconnect -n MyStaticAppName --source MyGitHubRepo -b master --token MyAccessToken
"""
helps['staticwebapp delete'] = """
type: command
short-summary: Delete a static app.
examples:
- name: Delete a static app.
text: az staticwebapp delete -n MyStaticAppName -g MyRg
"""
helps['staticwebapp environment'] = """
type: group
short-summary: Manage environment of the static app.
"""
helps['staticwebapp environment list'] = """
type: command
short-summary: List all environment of the static app including production.
examples:
- name: List static app environment.
text: az staticwebapp environment list -n MyStaticAppName
"""
helps['staticwebapp environment show'] = """
type: command
short-summary: Show information about the production environment or the specified environment.
examples:
- name: Show a static app environment.
text: az staticwebapp environment show -n MyStaticAppName
"""
helps['staticwebapp environment functions'] = """
type: command
short-summary: Show information about functions.
examples:
- name: Show static app functions.
text: az staticwebapp environment functions -n MyStaticAppName
"""
helps['staticwebapp hostname'] = """
type: group
short-summary: Manage custom hostnames of Functions of the static app.
"""
helps['staticwebapp hostname list'] = """
type: command
short-summary: List custom hostnames of the static app.
examples:
- name: List custom hostnames of the static app.
text: az staticwebapp hostname list -n MyStaticAppName
"""
helps['staticwebapp hostname set'] = """
type: command
short-summary: Set given sub-domain hostname to the static app. Please configure a CNAME record with your DNS provider.
examples:
- name: Set hostname to the static app.
text: az staticwebapp hostname set -n MyStaticAppName --hostname www.example.com
"""
helps['staticwebapp hostname delete'] = """
type: command
short-summary: Delete given hostname of the static app.
examples:
- name: Delete given hostname of the static app.
text: az staticwebapp hostname delete -n MyStaticAppName --hostname HostnameToDelete
"""
helps['staticwebapp appsettings'] = """
type: group
short-summary: Manage app settings of Functions of the static app.
"""
helps['staticwebapp appsettings list'] = """
type: command
short-summary: List function app settings of the static app. A function must first be deployed to use this method.
examples:
- name: List function app settings of the static app.
text: az staticwebapp appsettings list -n MyStaticAppName
"""
helps['staticwebapp appsettings set'] = """
type: command
short-summary: Set (replace) function app settings of the static app.
examples:
- name: Set (replace) function app settings of the static app.
text: az staticwebapp appsettings set -n MyStaticAppName --setting-names key1=val1 key2=val2
"""
helps['staticwebapp appsettings delete'] = """
type: command
short-summary: Delete function app settings with given keys of the static app.
examples:
- name: Delete given app settings of the static app.
text: az staticwebapp appsettings delete -n MyStaticAppName --setting-names key1 key2
"""
helps['staticwebapp users'] = """
type: group
short-summary: Manage users of the static app.
"""
helps['staticwebapp users list'] = """
type: command
short-summary: Lists users and assigned roles, limited to users who accepted their invites.
examples:
- name: Lists users and assigned roles.
text: az staticwebapp users list -n MyStaticAppName
"""
helps['staticwebapp users invite'] = """
type: command
short-summary: Create invitation link for specified user to the static app.
examples:
- name: Create invitation link for specified user to the static app.
text: az staticwebapp users invite -n MyStaticAppName --authentication-provider GitHub --user-details JohnDoe
--role Contributor --domain static-app-001.azurestaticapps.net --invitation-expiration-in-hours 1
"""
helps['staticwebapp users update'] = """
type: command
short-summary: Updates a user entry with the listed roles. Either user details or user id is required.
examples:
- name: Updates a user entry with the listed roles.
text: az staticwebapp users update -n MyStaticAppName --user-details JohnDoe --role Contributor
"""
| 39.347468
| 318
| 0.734742
|
4a06e623e59ce627420942321afc12d25246ec22
| 1,800
|
py
|
Python
|
tests/unit/small_text/integrations/transformers/classifiers/test_base.py
|
chschroeder/small-text
|
ef28e91ba0c94fe938dde4f16253aa8695ea13b7
|
[
"MIT"
] | 218
|
2021-05-26T16:38:53.000Z
|
2022-03-30T09:48:54.000Z
|
tests/unit/small_text/integrations/transformers/classifiers/test_base.py
|
chschroeder/small-text
|
ef28e91ba0c94fe938dde4f16253aa8695ea13b7
|
[
"MIT"
] | 9
|
2021-10-16T23:23:02.000Z
|
2022-02-22T15:23:11.000Z
|
tests/unit/small_text/integrations/transformers/classifiers/test_base.py
|
chschroeder/small-text
|
ef28e91ba0c94fe938dde4f16253aa8695ea13b7
|
[
"MIT"
] | 21
|
2021-06-24T11:19:44.000Z
|
2022-03-12T16:29:53.000Z
|
import pytest
import unittest
import numpy as np
from small_text.integrations.pytorch.exceptions import PytorchNotFoundError
try:
import torch
from small_text.integrations.transformers.datasets import TransformersDataset
from small_text.integrations.transformers.classifiers import TransformerBasedClassification
from small_text.integrations.transformers.classifiers.classification import TransformerModelArguments
except (ModuleNotFoundError, PytorchNotFoundError):
pass
class _TransformerClassifierBaseFunctionalityTest(object):
def _get_clf(self):
raise NotImplementedError()
def test_predict_on_empty_data(self):
test_set = TransformersDataset([], None)
clf = self._get_clf()
# here would be a clf.fit call, which omit due to the runtime costs
predictions = clf.predict(test_set)
self.assertEqual(0, predictions.shape[0])
self.assertTrue(np.issubdtype(predictions.dtype, np.integer))
def test_predict_proba_on_empty_data(self):
test_set = TransformersDataset([], None)
clf = self._get_clf()
# here would be a clf.fit call, which omit due to the runtime costs
predictions, proba = clf.predict_proba(test_set)
self.assertEqual(0, predictions.shape[0])
self.assertTrue(np.issubdtype(predictions.dtype, np.integer))
self.assertEqual(0, proba.shape[0])
self.assertTrue(np.issubdtype(proba.dtype, np.float))
@pytest.mark.pytorch
class TransformerBasedClassificationBaseFunctionalityTest(unittest.TestCase,_TransformerClassifierBaseFunctionalityTest):
def _get_clf(self):
return TransformerBasedClassification(TransformerModelArguments('sshleifer/tiny-distilroberta-base'),
2)
| 34.615385
| 121
| 0.737778
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.