content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
# Copyright 2021 VMware, Inc.
# SPDX-License: Apache-2.0
import logging
import salt.exceptions
import saltext.vmware.utils.common as utils_common
import saltext.vmware.utils.esxi as utils_esxi
from salt.defaults import DEFAULT_TARGET_DELIM
from saltext.vmware.utils.connect import get_service_instance
log = logging.getLogger(__name__)
try:
from pyVmomi import vmodl, vim, VmomiSupport
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
__virtualname__ = "vmware_esxi"
def __virtual__():
if not HAS_PYVMOMI:
return False, "Unable to import pyVmomi module."
return __virtualname__
def get_lun_ids(service_instance=None):
"""
Return a list of LUN (Logical Unit Number) NAA (Network Addressing Authority) IDs.
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(service_instance=service_instance, get_all_hosts=True)
ids = []
for host in hosts:
for datastore in host.datastore:
for extent in datastore.info.vmfs.extent:
ids.append(extent.diskName)
return ids
def _get_capability_attribs(host):
ret = {}
for attrib in dir(host.capability):
if attrib.startswith("_") or attrib.lower() == "array":
continue
val = getattr(host.capability, attrib)
# Convert all pyvmomi str[], bool[] and int[] to list.
if isinstance(val, list):
val = list(val)
ret.update({utils_common.camel_to_snake_case(attrib): val})
return ret
def get_capabilities(service_instance=None):
"""
Return ESXi host's capability information.
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(service_instance=service_instance, get_all_hosts=True)
capabilities = {}
for host in hosts:
capabilities[host.name] = _get_capability_attribs(host)
return capabilities
def power_state(
datacenter_name=None, cluster_name=None, host_name=None, state=None, timeout=600, force=True
):
"""
Manage the power state of the ESXi host.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname whose power state needs to be managed (optional).
state
Sets the ESXi host to this power state. Valid values: "reboot", "standby", "poweron", "shutdown".
timeout
Timeout when transitioning power state to standby / poweron. Default: 600 seconds
force
Force power state transition. Default: True
.. code-block:: bash
salt '*' vmware_esxi.power_state datacenter_name=dc1 cluster_name=cl1 host_name=host1 state=shutdown
"""
ret = None
task = None
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
if state == "reboot":
task = h.RebootHost_Task(force)
elif state == "standby":
task = h.PowerDownHostToStandBy_Task(timeout, force)
elif state == "poweron":
task = h.PowerUpHostFromStandBy_Task(timeout)
elif state == "shutdown":
task = h.ShutdownHost_Task(force)
if task:
utils_common.wait_for_task(task, h.name, "PowerStateTask")
ret = True
except (vmodl.fault.NotSupported, salt.exceptions.VMwareApiError) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def manage_service(
service_name,
datacenter_name=None,
cluster_name=None,
host_name=None,
state=None,
startup_policy=None,
service_instance=None,
):
"""
Manage the state of the service running on the EXSI host.
service_name
Service that needs to be managed.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname whose power state needs to be managed (optional)
state
Sets the service running on the ESXi host to this state. Valid values: "start", "stop", "restart".
startup_policy
Sets the service startup policy. If unspecified, no changes are made. Valid values "on", "off", "automatic".
- on: Start and stop with host
- off: Start and stop manually
- automatic: Start automatically if any ports are open, and stop when all ports are closed
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional)
.. code-block:: bash
salt '*' vmware_esxi.manage_service sshd datacenter_name=dc1 cluster_name=cl1 host_name=host1 state=restart startup_policy=on
"""
log.debug("Running vmware_esxi.manage_service")
ret = None
task = None
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_service = h.configManager.serviceSystem
if not host_service:
continue
if state:
if state == "start":
host_service.StartService(id=service_name)
elif state == "stop":
host_service.StopService(id=service_name)
elif state == "restart":
host_service.RestartService(id=service_name)
else:
raise salt.exceptions.SaltException("Unknown state - {}".format(state))
if startup_policy is not None:
if startup_policy is True:
startup_policy = "on"
elif startup_policy is False:
startup_policy = "off"
host_service.UpdateServicePolicy(id=service_name, policy=startup_policy)
ret = True
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def list_services(
service_name=None,
datacenter_name=None,
cluster_name=None,
host_name=None,
state=None,
startup_policy=None,
service_instance=None,
):
"""
List the state of services running on matching EXSI hosts.
service_name
Filter by this service name. (optional)
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
state
Filter by this service state. Valid values: "running", "stopped"
startup_policy
Filter by this service startup policy. Valid values "on", "off", "automatic".
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.list_services
"""
log.debug("Running vmware_esxi.list_services")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_service = h.configManager.serviceSystem
ret[h.name] = {}
if not host_service:
continue
if startup_policy is not None:
# salt converts command line input "on" and "off" to True and False. Handle explicitly.
if startup_policy is True:
startup_policy = "on"
elif startup_policy is False:
startup_policy = "off"
services = host_service.serviceInfo.service
for service in services or []:
if service_name and service.key != service_name:
continue
if startup_policy and service.policy != startup_policy:
continue
if state and state == "running" and not service.running:
continue
if state and state == "stopped" and service.running:
continue
ret[h.name][service.key] = {
"state": "running" if service.running else "stopped",
"startup_policy": service.policy,
}
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def get_acceptance_level(
datacenter_name=None,
cluster_name=None,
host_name=None,
acceptance_level=None,
service_instance=None,
):
"""
Get acceptance level on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
acceptance_level
Filter by this acceptance level. Valid values: "community", "partner", "vmware_accepted", "vmware_certified". (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_acceptance_level
Returns:
.. code-block:: json
{
"host1": "partner",
"host2": "partner"
}
"""
log.debug("Running vmware_esxi.get_acceptance_level")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_config_manager = h.configManager.imageConfigManager
if not host_config_manager:
continue
host_acceptance_level = host_config_manager.HostImageConfigGetAcceptance()
if acceptance_level and host_acceptance_level != acceptance_level:
continue
ret[h.name] = host_acceptance_level
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_acceptance_level(
acceptance_level,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set acceptance level on matching EXSI hosts.
acceptance_level
Set to this acceptance level. Valid values: "community", "partner", "vmware_accepted", "vmware_certified".
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_acceptance_level
Returns:
.. code-block:: json
{
"host1": "partner",
"host2": "partner"
}
"""
log.debug("Running vmware_esxi.set_acceptance_level")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_config_manager = h.configManager.imageConfigManager
if not host_config_manager:
continue
host_config_manager.UpdateHostImageAcceptanceLevel(newAcceptanceLevel=acceptance_level)
ret[h.name] = acceptance_level
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def get_advanced_config(
datacenter_name=None,
cluster_name=None,
host_name=None,
config_name=None,
service_instance=None,
):
"""
Get advanced config on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
config_name
Filter by this config_name. (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_advanced_config
"""
log.debug("Running vmware_esxi.get_advanced_config")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
config_manager = h.configManager.advancedOption
ret[h.name] = {}
if not config_manager:
continue
for opt in config_manager.QueryOptions(config_name):
ret[h.name][opt.key] = opt.value
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_advanced_configs(
config_dict,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set multiple advanced configurations on matching EXSI hosts.
config_dict
Set the configuration key to the configuration value. Eg: {"Annotations.WelcomeMessage": "Hello"}
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_advanced_config config_name=Annotations.WelcomeMessage config_value=Hello
Returns:
.. code-block:: json
{
"host1": {
"Annotations.WelcomeMessage": "HelloDemo"
},
}
"""
log.debug("Running vmware_esxi.set_advanced_configs")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
config_manager = h.configManager.advancedOption
ret[h.name] = {}
if not config_manager:
continue
supported_configs = {}
for opt in config_manager.supportedOption:
if opt.key not in config_dict:
continue
supported_configs[opt.key] = opt.optionType
advanced_configs = []
for opt in config_dict:
opt_type = supported_configs[opt]
val = config_dict[opt]
if isinstance(opt_type, vim.option.BoolOption) and not isinstance(val, bool):
val = val.lower() == "true"
elif isinstance(opt_type, vim.option.LongOption):
val = VmomiSupport.vmodlTypes["long"](val)
elif isinstance(opt_type, vim.option.IntOption):
val = VmomiSupport.vmodlTypes["int"](val)
advanced_configs.append(vim.option.OptionValue(key=opt, value=val))
ret[h.name][opt] = config_dict[opt]
config_manager.UpdateOptions(changedValue=advanced_configs)
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def set_advanced_config(
config_name,
config_value,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Set a single advanced configuration on matching EXSI hosts.
config_name
Name of the advanced configuration to be set.
config_value
Set the advanced configuration to this value.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.set_advanced_config config_name=Annotations.WelcomeMessage config_value=Hello
Returns:
.. code-block:: json
{
"host1": {
"Annotations.WelcomeMessage": "HelloDemo"
},
}
"""
log.debug("Running vmware_esxi.set_advanced_config")
return set_advanced_configs(
config_dict={config_name: config_value},
datacenter_name=datacenter_name,
cluster_name=cluster_name,
host_name=host_name,
service_instance=service_instance,
)
def get_dns_config(
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
Get DNS configuration on matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get_dns_config
"""
log.debug("Running vmware_esxi.get_dns_config")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
dns_config = h.config.network.dnsConfig
if not dns_config:
continue
ret[h.name] = {}
ret[h.name]["dhcp"] = dns_config.dhcp
ret[h.name]["virtual_nic"] = dns_config.virtualNicDevice
ret[h.name]["host_name"] = dns_config.hostName
ret[h.name]["domain_name"] = dns_config.domainName
ret[h.name]["ip"] = list(dns_config.address)
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
return ret
def connect(host, service_instance=None):
"""
Connect an ESXi instance to a vCenter instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Connect ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.reconnect_host(host, service_instance)
return {"state": state}
def disconnect(host, service_instance=None):
"""
Disconnect an ESXi instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Disconnect ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.disconnect_host(host, service_instance)
return {"state": state}
def remove(host, service_instance=None):
"""
Remove an ESXi instance from a vCenter instance.
host
Name of ESXi instance in vCenter.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Remove ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.remove_host(host, service_instance)
return {"state": state}
def move(host, cluster_name, service_instance=None):
"""
Move an ESXi instance to a different cluster.
host
Name of ESXi instance in vCenter.
cluster_name
Name of cluster to move host to.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Move ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.move_host(host, cluster_name, service_instance)
return {"state": state}
def add(
host,
root_user,
password,
cluster_name,
datacenter_name,
verify_host_cert=True,
connect=True,
service_instance=None,
):
"""
Add an ESXi instance to a vCenter instance.
host
IP address or hostname of ESXi instance.
root_user
Username with root privilege to ESXi instance.
password
Password to root user.
cluster_name
Name of cluster ESXi host is being added to.
datacenter
Datacenter that contains cluster that ESXi instance is being added to.
verify_host_cert
Validates the host's SSL certificate is signed by a CA, and that the hostname in the certificate matches the host. Defaults to True.
connect
Specifies whether host should be connected after being added. Defaults to True.
service_instance
The Service Instance from which to obtain managed object references. (Optional)
"""
log.debug(f"Adding ESXi instance {host}.")
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
state = utils_esxi.add_host(
host,
root_user,
password,
cluster_name,
datacenter_name,
verify_host_cert,
connect,
service_instance,
)
return {"state": state}
def list_pkgs(
pkg_name=None,
datacenter_name=None,
cluster_name=None,
host_name=None,
service_instance=None,
):
"""
List the packages installed on matching EXSi hosts.
Note: Appropriate filters are recommended for large installations.
pkg_name
Filter by this package name. (optional)
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.list_pkgs
"""
log.debug("Running vmware_esxi.list_pkgs")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
host_pkg_manager = h.configManager.imageConfigManager
if not host_pkg_manager:
continue
ret[h.name] = {}
pkgs = host_pkg_manager.FetchSoftwarePackages()
for pkg in pkgs:
if pkg_name and pkg.name != pkg_name:
continue
ret[h.name][pkg.name] = {
"version": pkg.version,
"vendor": pkg.vendor,
"summary": pkg.summary,
"description": pkg.description,
"acceptance_level": pkg.acceptanceLevel,
"maintenance_mode_required": pkg.maintenanceModeRequired,
"creation_date": pkg.creationDate,
}
return ret
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
def get(
datacenter_name=None,
cluster_name=None,
host_name=None,
key=None,
default="",
delimiter=DEFAULT_TARGET_DELIM,
service_instance=None,
):
"""
Get configuration information for matching EXSI hosts.
datacenter_name
Filter by this datacenter name (required when cluster is specified)
cluster_name
Filter by this cluster name (optional)
host_name
Filter by this ESXi hostname (optional)
key
Attempt to retrieve the named value from ESXi host configuration data, if the named value is not
available return the passed default. The default return is an empty string.
Follows the grains.get filter semantics. (optional)
The value can also represent a value in a nested dict using a ":" delimiter
for the dict. This means that if a dict in ESXi host configuration looks like this:
{'vsan': {'health': 'good'}}
To retrieve the value associated with the apache key in the pkg dict this
key can be passed:
vsan:health
delimiter
Specify an alternate delimiter to use when traversing a nested dict.
This is useful for when the desired key contains a colon. (optional)
service_instance
Use this vCenter service connection instance instead of creating a new one. (optional).
.. code-block:: bash
salt '*' vmware_esxi.get dc1 cl1
"""
log.debug("Running vmware_esxi.get")
ret = {}
if not service_instance:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
hosts = utils_esxi.get_hosts(
service_instance=service_instance,
host_names=[host_name] if host_name else None,
cluster_name=cluster_name,
datacenter_name=datacenter_name,
get_all_hosts=host_name is None,
)
try:
for h in hosts:
ret[h.name] = {}
ret[h.name]["vsan"] = {}
vsan_manager = h.configManager.vsanSystem
if vsan_manager:
vsan = vsan_manager.QueryHostStatus()
ret[h.name]["vsan"]["cluster_uuid"] = vsan.uuid
ret[h.name]["vsan"]["node_uuid"] = vsan.nodeUuid
ret[h.name]["vsan"]["health"] = vsan.health
ret[h.name]["datastores"] = {}
for store in h.datastore:
ret[h.name]["datastores"][store.name] = {}
ret[h.name]["datastores"][store.name]["capacity"] = store.summary.capacity
ret[h.name]["datastores"][store.name]["free_space"] = store.summary.freeSpace
ret[h.name]["nics"] = {}
for nic in h.config.network.vnic:
ret[h.name]["nics"][nic.device] = {}
ret[h.name]["nics"][nic.device]["ip_address"] = nic.spec.ip.ipAddress
ret[h.name]["nics"][nic.device]["subnet_mask"] = nic.spec.ip.subnetMask
ret[h.name]["nics"][nic.device]["mac"] = nic.spec.mac
ret[h.name]["nics"][nic.device]["mtu"] = nic.spec.mtu
ret[h.name]["cpu_model"] = h.summary.hardware.cpuModel
ret[h.name]["num_cpu_cores"] = h.summary.hardware.numCpuCores
ret[h.name]["num_cpu_pkgs"] = h.summary.hardware.numCpuPkgs
ret[h.name]["num_cpu_threads"] = h.summary.hardware.numCpuThreads
ret[h.name]["memory_size"] = h.summary.hardware.memorySize
ret[h.name]["overall_memory_usage"] = h.summary.quickStats.overallMemoryUsage
ret[h.name]["product_name"] = h.config.product.name
ret[h.name]["product_version"] = h.config.product.version
ret[h.name]["product_build"] = h.config.product.build
ret[h.name]["product_os_type"] = h.config.product.osType
ret[h.name]["host_name"] = h.summary.config.name
ret[h.name]["system_vendor"] = h.hardware.systemInfo.vendor
ret[h.name]["system_model"] = h.hardware.systemInfo.model
ret[h.name]["bios_release_date"] = h.hardware.biosInfo.releaseDate
ret[h.name]["bios_release_version"] = h.hardware.biosInfo.biosVersion
ret[h.name]["uptime"] = h.summary.quickStats.uptime
ret[h.name]["in_maintenance_mode"] = h.runtime.inMaintenanceMode
ret[h.name]["system_uuid"] = h.hardware.systemInfo.uuid
for info in h.hardware.systemInfo.otherIdentifyingInfo:
ret[h.name].update(
{
utils_common.camel_to_snake_case(
info.identifierType.key
): info.identifierValue
}
)
ret[h.name]["capabilities"] = _get_capability_attribs(host=h)
if key:
ret[h.name] = salt.utils.data.traverse_dict_and_list(
ret[h.name], key, default, delimiter
)
return ret
except (
vim.fault.InvalidState,
vim.fault.NotFound,
vim.fault.HostConfigFault,
vmodl.fault.InvalidArgument,
salt.exceptions.VMwareApiError,
) as exc:
raise salt.exceptions.SaltException(str(exc))
|
nilq/baby-python
|
python
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id$
"""Internal use: Topic-specific extensions for TCPROS support"""
import socket
import threading
import time
try:
from xmlrpc.client import ServerProxy # Python 3.x
except ImportError:
from xmlrpclib import ServerProxy # Python 2.x
from rospy.core import logwarn, logerr, logdebug, rospyerr
import rospy.exceptions
import rospy.names
import rospy.impl.registration
import rospy.impl.transport
from rospy.impl.tcpros_base import TCPROSTransport, TCPROSTransportProtocol, \
get_tcpros_server_address, start_tcpros_server,\
DEFAULT_BUFF_SIZE, TCPROS
class TCPROSSub(TCPROSTransportProtocol):
"""
Subscription transport implementation for receiving topic data via
peer-to-peer TCP/IP sockets
"""
def __init__(self, resolved_name, recv_data_class, queue_size=None, \
buff_size=DEFAULT_BUFF_SIZE, tcp_nodelay=False):
"""
ctor.
@param resolved_name: resolved subscription name
@type resolved_name: str
@param recv_data_class: class to instantiate to receive
messages
@type recv_data_class: L{rospy.Message}
@param queue_size: maximum number of messages to
deserialize from newly read data off socket
@type queue_size: int
@param buff_size: recv buffer size
@type buff_size: int
@param tcp_nodelay: If True, request TCP_NODELAY from publisher
@type tcp_nodelay: bool
"""
super(TCPROSSub, self).__init__(resolved_name, recv_data_class, queue_size, buff_size)
self.direction = rospy.impl.transport.INBOUND
self.tcp_nodelay = tcp_nodelay
def get_header_fields(self):
"""
@return: dictionary of subscriber fields
@rtype: dict
"""
return {'topic': self.resolved_name,
'message_definition': self.recv_data_class._full_text,
'tcp_nodelay': '1' if self.tcp_nodelay else '0',
'md5sum': self.recv_data_class._md5sum,
'type': self.recv_data_class._type,
'callerid': rospy.names.get_caller_id(),
'node_type': "rospy"}
# Separate method for easier testing
def _configure_pub_socket(sock, is_tcp_nodelay):
"""
Configure socket options on a new publisher socket.
@param sock: socket.socket
@type sock: socket.socket
@param is_tcp_nodelay: if True, TCP_NODELAY will be set on outgoing socket if available
@param is_tcp_nodelay: bool
"""
# #956: low latency, TCP_NODELAY support
if is_tcp_nodelay:
if hasattr(socket, 'TCP_NODELAY'):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
else:
logwarn("WARNING: cannot enable TCP_NODELAY as its not supported on this platform")
#TODO:POLLING: TCPROSPub currently doesn't actually do anything -- not until polling is implemented
class TCPROSPub(TCPROSTransportProtocol):
"""
Publisher transport implementation for publishing topic data via
peer-to-peer TCP/IP sockets.
"""
def __init__(self, resolved_name, pub_data_class, is_latch=False, headers=None):
"""
ctor.
@param resolved_name: resolved topic name
@type resolved_name: str
@param pub_data_class: class to instance to receive messages
@type pub_data_class: L{rospy.Message} class
@param is_latch: If True, Publisher is latching
@type is_latch: bool
"""
# very small buffer size for publishers as the messages they receive are very small
super(TCPROSPub, self).__init__(resolved_name, None, queue_size=None, buff_size=128)
self.pub_data_class = pub_data_class
self.direction = rospy.impl.transport.OUTBOUND
self.is_latch = is_latch
self.headers = headers if headers else {}
def get_header_fields(self):
base = {'topic': self.resolved_name,
'type': self.pub_data_class._type,
'latching': '1' if self.is_latch else '0',
'message_definition': self.pub_data_class._full_text,
'md5sum': self.pub_data_class._md5sum,
'callerid': rospy.names.get_caller_id(),
'node_type': "rospy"}
# this implementation allows the user to override builtin
# fields. this could potentially enable some interesting
# features... or it could be really bad.
if self.headers:
base.update(self.headers)
return base
def robust_connect_subscriber(conn, dest_addr, dest_port, pub_uri, receive_cb, resolved_topic_name):
"""
Keeps trying to create connection for subscriber. Then passes off to receive_loop once connected.
"""
# kwc: this logic is not very elegant. I am waiting to rewrite
# the I/O loop with async i/o to clean this up.
# timeout is really generous. for now just choosing one that is large but not infinite
interval = 0.5
while conn.socket is None and not conn.done and not rospy.is_shutdown():
try:
conn.connect(dest_addr, dest_port, pub_uri, timeout=60.)
except rospy.exceptions.TransportInitError as e:
# if the connection was closed intentionally
# because of an unknown error, stop trying
if conn.protocol is None:
conn.done = True
break
rospyerr("unable to create subscriber transport: %s. Will try again in %ss", e, interval)
interval = interval * 2
time.sleep(interval)
# check to see if publisher state has changed
conn.done = not check_if_still_publisher(resolved_topic_name, pub_uri)
if not conn.done:
conn.receive_loop(receive_cb)
def check_if_still_publisher(resolved_topic_name, pub_uri):
try:
s = ServerProxy(pub_uri)
code, msg, val = s.getPublications(rospy.names.get_name())
if code == 1:
return len([t for t in val if t[0] == resolved_topic_name]) > 0
else:
return False
except:
return False
class TCPROSHandler(rospy.impl.transport.ProtocolHandler):
"""
ROS Protocol handler for TCPROS. Accepts both TCPROS topic
connections as well as ROS service connections over TCP. TCP server
socket is run once start_server() is called -- this is implicitly
called during init_publisher().
"""
def __init__(self):
"""ctor"""
self.tcp_nodelay_map = {} # { topic : tcp_nodelay}
def set_tcp_nodelay(self, resolved_name, tcp_nodelay):
"""
@param resolved_name: resolved topic name
@type resolved_name: str
@param tcp_nodelay: If True, sets TCP_NODELAY on publisher's
socket (disables Nagle algorithm). This results in lower
latency publishing at the cost of efficiency.
@type tcp_nodelay: bool
"""
self.tcp_nodelay_map[resolved_name] = tcp_nodelay
def shutdown(self):
"""
stops the TCP/IP server responsible for receiving inbound connections
"""
pass
def create_transport(self, resolved_name, pub_uri, protocol_params):
"""
Connect to topic resolved_name on Publisher pub_uri using TCPROS.
@param resolved_name str: resolved topic name
@type resolved_name: str
@param pub_uri: XML-RPC URI of publisher
@type pub_uri: str
@param protocol_params: protocol parameters to use for connecting
@type protocol_params: [XmlRpcLegal]
@return: code, message, debug
@rtype: (int, str, int)
"""
#Validate protocol params = [TCPROS, address, port]
if type(protocol_params) != list or len(protocol_params) != 3:
return 0, "ERROR: invalid TCPROS parameters", 0
if protocol_params[0] != TCPROS:
return 0, "INTERNAL ERROR: protocol id is not TCPROS: %s"%id, 0
id, dest_addr, dest_port = protocol_params
sub = rospy.impl.registration.get_topic_manager().get_subscriber_impl(resolved_name)
#Create connection
protocol = TCPROSSub(resolved_name, sub.data_class, \
queue_size=sub.queue_size, buff_size=sub.buff_size,
tcp_nodelay=sub.tcp_nodelay)
conn = TCPROSTransport(protocol, resolved_name)
conn.set_endpoint_id(pub_uri);
t = threading.Thread(name=resolved_name, target=robust_connect_subscriber, args=(conn, dest_addr, dest_port, pub_uri, sub.receive_callback,resolved_name))
# don't enable this just yet, need to work on this logic
#rospy.core._add_shutdown_thread(t)
# Attach connection to _SubscriberImpl
if sub.add_connection(conn): #pass tcp connection to handler
# since the thread might cause the connection to close
# it should only be started after the connection has been added to the subscriber
# https://github.com/ros/ros_comm/issues/544
t.start()
return 1, "Connected topic[%s]. Transport impl[%s]"%(resolved_name, conn.__class__.__name__), dest_port
else:
# _SubscriberImpl already closed or duplicate subscriber created
conn.close()
return 0, "ERROR: Race condition failure creating topic subscriber [%s]"%(resolved_name), 0
def supports(self, protocol):
"""
@param protocol: name of protocol
@type protocol: str
@return: True if protocol is supported
@rtype: bool
"""
return protocol == TCPROS
def get_supported(self):
"""
Get supported protocols
"""
return [[TCPROS]]
def init_publisher(self, resolved_name, protocol):
"""
Initialize this node to receive an inbound TCP connection,
i.e. startup a TCP server if one is not already running.
@param resolved_name: topic name
@type resolved__name: str
@param protocol: negotiated protocol
parameters. protocol[0] must be the string 'TCPROS'
@type protocol: [str, value*]
@return: (code, msg, [TCPROS, addr, port])
@rtype: (int, str, list)
"""
if protocol[0] != TCPROS:
return 0, "Internal error: protocol does not match TCPROS: %s"%protocol, []
start_tcpros_server()
addr, port = get_tcpros_server_address()
return 1, "ready on %s:%s"%(addr, port), [TCPROS, addr, port]
def topic_connection_handler(self, sock, client_addr, header):
"""
Process incoming topic connection. Reads in topic name from
handshake and creates the appropriate L{TCPROSPub} handler for the
connection.
@param sock: socket connection
@type sock: socket.socket
@param client_addr: client address
@type client_addr: (str, int)
@param header: key/value pairs from handshake header
@type header: dict
@return: error string or None
@rtype: str
"""
if rospy.core.is_shutdown_requested():
return "Node is shutting down"
for required in ['topic', 'md5sum', 'callerid']:
if not required in header:
return "Missing required '%s' field"%required
else:
resolved_topic_name = header['topic']
md5sum = header['md5sum']
tm = rospy.impl.registration.get_topic_manager()
topic = tm.get_publisher_impl(resolved_topic_name)
if not topic:
return "[%s] is not a publisher of [%s]. Topics are %s"%(rospy.names.get_caller_id(), resolved_topic_name, tm.get_publications())
elif not topic.data_class or topic.closed:
return "Internal error processing topic [%s]"%(resolved_topic_name)
elif md5sum != rospy.names.TOPIC_ANYTYPE and md5sum != topic.data_class._md5sum:
data_class = topic.data_class
actual_type = data_class._type
# check to see if subscriber sent 'type' header. If they did, check that
# types are same first as this provides a better debugging message
if 'type' in header:
requested_type = header['type']
if requested_type != actual_type:
return "topic types do not match: [%s] vs. [%s]"%(requested_type, actual_type)
else:
# defaults to actual type
requested_type = actual_type
return "Client [%s] wants topic [%s] to have datatype/md5sum [%s/%s], but our version has [%s/%s] Dropping connection."%(header['callerid'], resolved_topic_name, requested_type, md5sum, actual_type, data_class._md5sum)
else:
#TODO:POLLING if polling header is present, have to spin up receive loop as well
# #1334: tcp_nodelay support from subscriber option
if 'tcp_nodelay' in header:
tcp_nodelay = True if header['tcp_nodelay'].strip() == '1' else False
else:
tcp_nodelay = self.tcp_nodelay_map.get(resolved_topic_name, False)
_configure_pub_socket(sock, tcp_nodelay)
protocol = TCPROSPub(resolved_topic_name, topic.data_class, is_latch=topic.is_latch, headers=topic.headers)
transport = TCPROSTransport(protocol, resolved_topic_name)
transport.set_socket(sock, header['callerid'])
transport.remote_endpoint = client_addr
transport.write_header()
topic.add_connection(transport)
class QueuedConnection(object):
"""
It wraps a Transport instance and behaves like one
but it queues the data written to it and relays them
asynchronously to the wrapped instance.
"""
def __init__(self, connection, queue_size):
"""
ctor.
@param connection: the wrapped transport instance
@type connection: Transport
@param queue_size: the maximum size of the queue, zero means infinite
@type queue_size: int
"""
super(QueuedConnection, self).__init__()
self._connection = connection
self._queue_size = queue_size
self._lock = threading.Lock()
self._cond_data_available = threading.Condition(self._lock)
self._connection.set_cleanup_callback(self._closed_connection_callback)
self._queue = []
self._error = None
self._thread = threading.Thread(target=self._run)
self._thread.start()
def _closed_connection_callback(self, connection):
with self._lock:
self._cond_data_available.notify()
def __getattr__(self, name):
if name.startswith('__'):
raise AttributeError(name)
return getattr(self._connection, name)
def write_data(self, data):
with self._lock:
# if there was previously an error within the dispatch thread raise it
if self._error:
error = self._error
self._error = None
raise error
# pop oldest data if queue limit is reached
if self._queue_size > 0 and len(self._queue) == self._queue_size:
del self._queue[0]
self._queue.append(data)
self._cond_data_available.notify()
# effectively yields the rest of the thread quantum
time.sleep(0)
return True
def _run(self):
while not self._connection.done:
queue = []
with self._lock:
# wait for available data
while not self._queue and not self._connection.done:
self._cond_data_available.wait()
# take all data from queue for processing outside of the lock
if self._queue:
queue = self._queue
self._queue = []
# relay all data
for data in queue:
try:
self._connection.write_data(data)
except Exception as e:
with self._lock:
self._error = e
|
nilq/baby-python
|
python
|
a = int(input())
b = int(input())
cont = a
if a <= b:
for i in range(a, b+1):
for n in range(1, 10+1):
total = cont * n
print(f'{cont} x {n} = {total}')
print('-'*10)
cont += 1
else:
print('Nenhuma tabuada no intervalo!')
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 6 07:51:51 2018
@author: tuheenahmmed
"""
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
# values=SCRABBLE_LETTER_VALUES.values()
# keys=SCRABBLE_LETTER_VALUES.keys()
score =0
if word == []:
return score
else:
for letter in word:
if letter in SCRABBLE_LETTER_VALUES:
#print (letter)
score1=SCRABBLE_LETTER_VALUES[letter]
#print(score1)
score=score+score1
#print(score)
score=score*len(word)
#print(score)
if len(word) == n:
score = score+50
return score
return score
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
"""
ะกะฝะตะถะธะฝะบะฐ / Snowflake
"""
# ะัะธะณะธะฝะฐะป: http://www.cyberforum.ru/pascalabc/thread994987.html
# uses graphABC;
# const k=8;
# var x,y:integer;
# procedure snow (x0,y0,r,n:integer);
# const t=2*pi/k;
# var i,x,y:integer;
# begin
# for i:=1 to k do
# begin
# x:=x0+round(r*cos(i*t));
# y:=y0-round(r*sin(i*t));
# line(x0,y0,x,y);
# if n>1 then snow(x,y,r div 5,n-1);
# end;
# end;
# begin
# SetWindowSize(500,500);
# SetWindowCaption('ะคัะฐะบัะฐะปั: ััะพ-ัะพ ะฟะพั
ะพะถะตะต ะฝะฐ ัะฝะตะถะธะฝะบั');
# x:=windowwidth div 2;
# y:=windowheight div 2;
# snow(x,y,180,4);
# end.
from math import *
def draw_snowflake(draw_by_image, width, height, count):
def draw(x0, y0, r, n):
t = 2 * pi / count
for i in range(count):
x = x0 + r * cos(i * t)
y = y0 - r * sin(i * t)
draw_by_image.line((x0, y0, x, y), 'black')
if n > 1:
draw(x, y, r // 5, n-1)
x = width // 2
y = height // 2
draw(x, y, 180, 4)
if __name__ == '__main__':
from PIL import Image, ImageDraw
img = Image.new("RGB", (500, 500), "white")
# ะะพะปะธัะตััะฒะพ ะฟะพะฒัะพัะตะฝะธะน
count = 8
draw_snowflake(ImageDraw.Draw(img), img.width, img.height, count)
img.save('img.png')
|
nilq/baby-python
|
python
|
##
# Copyright (c) 2007-2016 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from caldavclientlibrary.browser.baseshell import BaseShell
from caldavclientlibrary.browser.command import Command
from caldavclientlibrary.client.account import CalDAVAccount
from getpass import getpass
from caldavclientlibrary.protocol.url import URL
import caldavclientlibrary.browser.commands
import atexit
import getopt
import sys
import urlparse
class Shell(BaseShell):
def __init__(self, server, path, user, pswd, logging, noHostRedirect=False, afunix=None):
super(Shell, self).__init__("caldav_client")
self.prefix = self.wd = "/"
self.server = server
self.user = user
self.pswd = pswd
self.registerCommands()
# Create the account
ssl = server.startswith("https://")
server = server[8:] if ssl else server[7:]
self.account = CalDAVAccount(server, ssl=ssl, afunix=afunix, user=self.user, pswd=self.pswd, root=path, principal=None, logging=logging, noHostRedirect=noHostRedirect)
atexit.register(self.saveHistory)
def registerCommands(self):
module = caldavclientlibrary.browser.commands
for item in module.__all__:
mod = __import__("caldavclientlibrary.browser.commands." + item, globals(), locals(), ["Cmd", ])
cmd_class = mod.Cmd
if type(cmd_class) is type and issubclass(cmd_class, Command):
self.registerCommand(cmd_class())
def setWD(self, newwd):
# Check that the new one exists
resource = (newwd if newwd.endswith("/") else newwd + "/")
if not self.account.session.testResource(URL(url=resource)):
return False
self.prefix = self.wd = newwd
return True
def setUserPswd(self, user, pswd):
self.user = user
self.pswd = pswd
self.account.setUserPswd(user, pswd)
def usage():
return """Usage: shell [OPTIONS]
Options:
-l start with HTTP logging on.
--server=HOST url of the server include http/https scheme and port [REQUIRED].
--unix=PATH path to unix socket to connect to server [OPTIONAL]
--user=USER user name to login as - will be prompted if not present [OPTIONAL].
--pswd=PSWD password for user - will be prompted if not present [OPTIONAL].
--no-host-redirect Don't allow the hostname to change when an HTTP redirect occurs [OPTIONAL]
"""
def runit():
logging = False
server = None
afunix = None
user = None
pswd = None
noHostRedirect = False
opts, _ignore_args = getopt.getopt(sys.argv[1:], 'lh', ["help", "server=", "unix=", "user=", "pswd=", "no-host-redirect"])
for name, value in opts:
if name == "-l":
logging = True
elif name == "--server":
server = value
elif name == "--unix":
afunix = value
elif name == "--user":
user = value
elif name == "--pswd":
pswd = value
elif name == "--no-host-redirect":
noHostRedirect = True
else:
print usage()
raise SystemExit()
if not server or not (server.startswith("http://") or server.startswith("https://")):
print usage()
raise SystemExit()
splits = urlparse.urlsplit(server)
server = splits.scheme + "://" + splits.netloc
path = splits.path
if not path:
path = "/"
if not user:
user = raw_input("User: ")
if not pswd:
pswd = getpass("Password: ")
shell = Shell(server, path, user, pswd, logging, noHostRedirect=noHostRedirect, afunix=afunix)
shell.run()
if __name__ == '__main__':
runit()
|
nilq/baby-python
|
python
|
'''Author: Brandon Trabucco, Copyright 2019
Helper functions to display and run a simple game'''
from game_engine.colors import *
from game_engine.tiles import *
from game_engine.stacks import *
from game_engine.drawable import Drawable
import random
random.seed(12345)
#####################################
# lets make a game board to play on #
#####################################
class Board(Drawable):
def __init__(self, name, tiles):
super(Board, self).__init__(len(tiles), len(tiles[0]))
assert(all([len(t) == self.width for t in tiles]))
self.name = name
self.tiles = tiles
self.entities = []
def place_tile(self, tile, x, y):
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return False
self.tiles[y][x].place_tile(tile)
return True
def remove_tile(self, x, y):
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return Null()
return self.tiles[y][x].remove_tile()
def add_entity(self, e):
if not (isinstance(e, Entity)):
return -1
self.entities.append(e)
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return -1
self.tiles[e.y][e.x].place_tile(e)
return len(self.entities) - 1
def in_front_of(self, which_entity):
e = self.entities[which_entity]
dx, dy = [(0, -1), (1, 0), (0, 1), (-1, 0)][e.z]
if not (e.x + dx >= 0 and e.x + dx < self.width and e.y + dy >= 0 and e.y + dy < self.height):
return Null()
return self.tiles[e.y + dy][e.x + dx].first_tile
def break_in_front_of(self, which_entity):
e = self.entities[which_entity]
dx, dy = [(0, -1), (1, 0), (0, 1), (-1, 0)][e.z]
return self.remove_tile(e.x + dx, e.y + dy)
def face_entity(self, which_entity, dx, dy):
e = self.entities[which_entity]
e.face(dx, dy)
def shift_entity(self, which_entity, dx, dy):
e = self.entities[which_entity]
if isinstance(self.tiles[e.y][e.x].first_tile, Entity):
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return False
self.tiles[e.y][e.x].remove_tile()
e.move(dx, dy)
if not (e.x >= 0 and e.x < self.width and e.y >= 0 and e.y < self.height):
return False
self.tiles[e.y][e.x].place_tile(e)
return True
def shift_destination(self, which_entity, dx, dy):
e = self.entities[which_entity]
x, y = e.x + dx, e.y + dy
if not (x >= 0 and x < self.width and y >= 0 and y < self.height):
return Null()
return self.tiles[y][x].first_tile
def move_entity(self, which_entity, x, y):
e = self.entities[which_entity]
dx, dy = x - e.x, y - e.y
self.shift_entity(which_entity, dx, dy)
def move_destination(self, which_entity, x, y):
e = self.entities[which_entity]
dx, dy = x - e.x, y - e.y
return self.shift_destination(which_entity, dx, dy)
def draw(self, canvas):
for x in range(self.width):
for y in range(self.height):
self.tiles[y][x].draw(canvas, x, y)
def undraw(self, canvas):
for x in range(self.width):
for y in range(self.height):
self.tiles[y][x].undraw(canvas)
class House(Board):
def __init__(self, name, height, width, offset):
tiles = [[None for x in range(width + 2 * offset)]
for y in range(height + 2 * offset)]
for x in range(width + 2 * offset):
for y in range(height + 2 * offset):
next_stack = Indoor()
if (x < offset or x >= width + offset or
y < offset or y >= height + offset ):
next_stack = Forest()
if random.random() > 0.6:
next_stack.remove_tile()
elif (x == offset or x == width + offset - 1 or
y == offset or y == height + offset - 1 ):
next_stack = Building()
tiles[y][x] = next_stack
super(House, self).__init__(name, tiles)
|
nilq/baby-python
|
python
|
import rospy
from geometry_msgs.msg import Twist
from std_msgs.msg import String
import time
import sys, select, termios, tty
import donkeycar as dk
# import keyboard # using module keyboard
from pynput import keyboard
#############################################################################################
# keyboard
moveBindings = {
'w': (1, 0),
'a': (0, -1),
's': (-1.2, 0),
'd': (0, 1),
'wa': (1, -1),
'wd': (1, 1),
'sa': (-1.2, -1),
'sd': (-1.2, 1)
}
speedBindings={
'z': (1.1, 1),
'x': (.9, 1),
'c': (1, 1.1),
'v': (1, .9)
}
msg = '''
Reading from the keyboard and Publishing to Twist!
---------------------------
Moving around:
w
a s d
anything else : stop
z/x : increase/decrease only linear speed by 10%
c/v : increase/decrease only angular speed by 10%
CTRL-C to quit
'''
class KeyboardRun:
key_w = False
key_a = False
key_s = False
key_d = False
key_z = False
key_x = False
key_c = False
key_v = False
key_esc = False
cmd_controller_pub_str = 'keyboard'
def __init__(self):
# settings = termios.tcgetattr(sys.stdin)
rospy.init_node('keyboard_node')
self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
self.cmd_controller_pub = rospy.Publisher('cmd_controller', String, queue_size=1)
self.speed = rospy.get_param("~speed", 0.19)
self.turn = rospy.get_param("~turn", 0.7)
self.x = 0
self.y = 0
self.z = 0
self.th = 0
self.status = 0
self.switch = False
def run(self):
self.listener = keyboard.Listener(
on_press=self.on_press,
on_release=self.on_release)
self.listener.start()
self.switch = True
try:
print(msg)
print(self.vels(self.speed, self.turn))
while (self.listener.isAlive()):
if self.key_w == True and self.key_a == False and self.key_d == False:
self.x, self.th = moveBindings['w']
elif self.key_s == True and self.key_a == False and self.key_d == False:
self.x, self.th = moveBindings['s']
elif self.key_w == True and self.key_a == True:
self.x, self.th = moveBindings['wa']
elif self.key_w == True and self.key_d == True:
self.x, self.th = moveBindings['wd']
elif self.key_s == True and self.key_a == True:
self.x, self.th = moveBindings['sa']
elif self.key_s == True and self.key_d == True:
self.x, self.th = moveBindings['sd']
elif self.key_d == True:
self.x, self.th = moveBindings['d']
elif self.key_a == True:
self.x, self.th = moveBindings['a']
elif self.key_z == True:
self.speed *= speedBindings['z'][0]
self.turn *= speedBindings['z'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_x == True:
self.speed *= speedBindings['x'][0]
self.turn *= speedBindings['x'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_c == True:
self.speed *= speedBindings['c'][0]
self.turn *= speedBindings['c'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_v == True:
self.speed *= speedBindings['v'][0]
self.turn *= speedBindings['v'][0]
print(self.vels(self.speed, self.turn))
if (self.status == 14):
print(msg)
self.status = (self.status + 1) % 15
elif self.key_esc== True:
print('exit')
self.switch = False
else:
self.x = 0
self.y = 0
self.z = 0
self.th = 0
twist = Twist()
twist.linear.x = self.x * self.speed
twist.linear.y = 0
twist.linear.z = 0
twist.angular.x = 0
twist.angular.y = 0
twist.angular.z = self.th * self.turn
self.cmd_controller_pub.publish(self.cmd_controller_pub_str)
self.cmd_vel_pub.publish(twist)
print('twist value : ',twist.linear.x,twist.angular.z)
time.sleep(0.1)
except Exception as e:
print(e)
def on_press(self, key):
try:
print('alphanumeric key {0} pressed'.format(key.char))
_key = key.char
if _key == 'w':
self.key_w = True
elif _key == 'a':
self.key_a = True
elif _key == 's':
self.key_s = True
elif _key == 'd':
self.key_d = True
elif _key == 'z':
self.key_z = True
elif _key == 'x':
self.key_x = True
elif _key == 'c':
self.key_c = True
elif _key == 'v':
self.key_v = True
except AttributeError:
print('special key {0} pressed'.format(key.char))
def on_release(self, key):
try:
print('{0} released'.format(key))
_key = key.char
if _key == 'w':
self.key_w = False
elif _key == 'a':
self.key_a = False
elif _key == 's':
self.key_s = False
elif _key == 'd':
self.key_d = False
elif _key == 'z':
self.key_z = False
elif _key == 'x':
self.key_x = False
elif _key == 'c':
self.key_c = False
elif _key == 'v':
self.key_v = False
except AttributeError:
print('special key {0} pressed'.format(key.char))
def vels(self, speed, turn):
return "currently:\tspeed %s\tturn %s " % (speed, turn)
if __name__=="__main__":
cfg = dk.load_config()
keyboardRun = KeyboardRun() #
keyboardRun.run()
|
nilq/baby-python
|
python
|
from os import getcwd
from re import findall
from re import match
def parseStep(line):
state = match(r"(on|off)", line).groups()[0]
x1, x2, y1, y2, z1, z2 = map(int, findall(r"(-?\d+)", line))
return state, (x1, x2), (y1, y2), (z1, z2)
def main():
with open(f"{getcwd()}/2021/day22/input.txt") as file:
file = file.readlines()
steps = list(map(parseStep, file))
# lists of each axes' critical values (values that on the boundary of regions)
critX = []
critY = []
critZ = []
for step in steps:
_, x, y, z = step
critX.append(x[0])
critX.append(x[1] + 1)
critY.append(y[0])
critY.append(y[1] + 1)
critZ.append(z[0])
critZ.append(z[1] + 1)
steps.reverse()
critX.sort()
critY.sort()
critZ.sort()
# now we iterate over all regions, adding the volume of each "on" region
total = 0
for x1, x2 in zip(critX, critX[1:]):
print(f"Tallying x={x1} .. {x2}")
xRegs = [v for v in steps if v[1][0] <= x1 <= v[1][1]]
for y1, y2 in zip(critY, critY[1:]):
yRegs = [v for v in xRegs if v[2][0] <= y1 <= v[2][1]]
for z1, z2 in zip(critZ, critZ[1:]):
if next((s == "on" for s, _, _, z in yRegs if z[0] <= z1 <= z[1]), False):
total += (x2 - x1) * (y2 - y1) * (z2 - z1)
print(f"{total} cubes are left on")
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
" Help code for sl training "
import traceback
import numpy as np
import torch
import torch.nn as nn
from pysc2.lib.actions import RAW_FUNCTIONS as F
from alphastarmini.core.arch.agent import Agent
from alphastarmini.core.sl.feature import Feature
from alphastarmini.core.sl.label import Label
from alphastarmini.lib.hyper_parameters import StarCraft_Hyper_Parameters as SCHP
from alphastarmini.lib.hyper_parameters import Label_Size as LS
from alphastarmini.lib.hyper_parameters import Arch_Hyper_Parameters as AHP
from alphastarmini.lib.sc2 import raw_actions_mapping_protoss as RAMP
debug = False
def obs2feature(obs):
s = Agent.get_state_and_action_from_pickle(obs)
feature = Feature.state2feature(s)
print("feature:", feature) if debug else None
print("feature.shape:", feature.shape) if debug else None
print("begin a:") if debug else None
func_call = obs['func_call']
action = Agent.func_call_to_action(func_call).toTenser()
#tag_list = agent.get_tag_list(obs)
print('action.get_shape:', action.get_shape()) if debug else None
logits = action.toLogits()
print('logits.shape:', logits) if debug else None
label = Label.action2label(logits)
print("label:", label) if debug else None
print("label.shape:", label.shape) if debug else None
return feature, label
def obs2feature_numpy(obs):
s = Agent.get_state_and_action_from_pickle_numpy(obs)
feature = Feature.state2feature_numpy(s)
print("feature:", feature) if debug else None
print("feature.shape:", feature.shape) if debug else None
print("begin a:") if debug else None
func_call = obs['func_call']
action = Agent.func_call_to_action(func_call).toArray()
#tag_list = agent.get_tag_list(obs)
print('action.get_shape:', action.get_shape()) if debug else None
logits = action.toLogits_numpy()
print('logits.shape:', logits) if debug else None
label = Label.action2label_numpy(logits)
print("label:", label) if debug else None
print("label.shape:", label.shape) if debug else None
return feature, label
def obsToTensor(obs, final_index_list, seq_len):
feature_list = []
label_list = []
for value in obs:
feature, label = obs2feature(value)
feature_list.append(feature)
label_list.append(label)
features = torch.cat(feature_list, dim=0)
print("features.shape:", features.shape) if debug else None
labels = torch.cat(label_list, dim=0)
print("labels.shape:", labels.shape) if debug else None
is_final = torch.zeros([features.shape[0], 1])
# consider is_final
print('begin', index) if debug else None
print('end', index + seq_len) if debug else None
for j in final_index_list:
print('j', j) if debug else None
if j >= index and j < index + seq_len:
if debug:
print('in it!')
print('begin', index)
print('end', index + seq_len)
print('j', j)
is_final[j - index, 0] = 1
else:
pass
one_traj = torch.cat([features, labels, is_final], dim=1)
print("one_traj.shape:", one_traj.shape) if debug else None
return one_traj
def get_mask_by_raw_action_id(raw_action_id):
need_args = F[raw_action_id].args
# action type and delay is always enable
action_mask = [1, 1, 0, 0, 0, 0]
for arg in need_args:
print("arg:", arg) if debug else None
if arg.name == 'queued':
action_mask[2] = 1
elif arg.name == 'unit_tags':
action_mask[3] = 1
elif arg.name == 'target_unit_tag':
action_mask[4] = 1
elif arg.name == 'world':
action_mask[5] = 1
print('action_mask:', action_mask) if debug else None
return action_mask
def get_one_way_mask_in_SL(action_type_gt, device):
# only consider the ground truth
# the action_type_gt is one_hot embedding
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
mask_list = []
for raw_action_id in ground_truth_raw_action_id:
mask_list.append(get_mask_by_raw_action_id(raw_action_id.item()))
mask_tensor = torch.tensor(mask_list)
mask_tensor = mask_tensor.to(device)
return mask_tensor
def get_two_way_mask_in_SL(action_type_gt, action_pred, device, strict_comparsion=True):
# consider the ground truth and the predicted
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
action_pred = action_pred.reshape(-1)
mask_list = []
mask_list_2 = []
print('ground_truth.shape', ground_truth_raw_action_id.shape) if debug else None
print('ground_truth', ground_truth_raw_action_id) if debug else None
print('action_pred.shape', action_pred.shape) if debug else None
print('action_pred', action_pred) if debug else None
for raw_action_id, action_id in zip(ground_truth_raw_action_id, action_pred):
mask_raw = get_mask_by_raw_action_id(raw_action_id.item())
mask_predict = get_mask_by_raw_action_id(action_id.item())
if strict_comparsion:
if raw_action_id.item() == action_id.item():
mask_list.append(mask_raw)
mask_list_2.append(mask_predict)
else:
zero_mask = [1, 1, 0, 0, 0, 0]
mask_list.append(zero_mask)
mask_list_2.append(zero_mask)
else:
mask_list.append(mask_raw)
mask_list_2.append(mask_predict)
mask_tensor = torch.tensor(mask_list)
mask_tensor_2 = torch.tensor(mask_list_2)
print('mask_tensor', mask_tensor) if debug else None
print('mask_tensor_2', mask_tensor_2) if debug else None
mask_tensor_return = mask_tensor * mask_tensor_2
print('mask_tensor_return', mask_tensor_return) if debug else None
mask_tensor_return = mask_tensor_return.to(device)
return mask_tensor_return
def get_move_camera_weight_in_SL(action_type_gt, action_pred, device,
decrease_smart_opertaion=False, only_consider_small=False):
# consider the ground truth and the predicted
ground_truth_raw_action_id = torch.nonzero(action_type_gt, as_tuple=True)[-1]
mask_list = []
MOVE_CAMERA_ID = F.raw_move_camera.id
Smart_pt_id = F.Smart_pt.id
Smart_unit_id = F.Smart_unit.id
# Note, in SC2, move_camera resides as 50% actions in all actions
# we assume every other action has the same happenning rate, so
# assume move_camera weight is 1.
# the non_move_camera weight is MAX_ACTIONS /2. / alpha
# alpha set to 10
MOVE_CAMERA_WEIGHT = 1. # 1. / LS.action_type_encoding * 2.
alpha = 40.
NON_MOVE_CAMERA_WEIGHT = LS.action_type_encoding / 2. / alpha
SMALL_IMPORTANT_WEIGHT = NON_MOVE_CAMERA_WEIGHT * 5
# note, the human replays have many operations like smart_pt and smart_unit
# these actions are meaningless (we are hard to filter unit types for selection for them)
# so we also choose to decrease their weight
if decrease_smart_opertaion:
# TODO: change these ids to Func.id
SMART_WEIGHT = 1.5
else:
SMART_WEIGHT = NON_MOVE_CAMERA_WEIGHT
print('ground_truth_raw_action_id', ground_truth_raw_action_id) if debug else None
for raw_action_id in ground_truth_raw_action_id:
aid = raw_action_id.item()
if not only_consider_small:
if aid == MOVE_CAMERA_ID:
mask_list.append([MOVE_CAMERA_WEIGHT])
elif aid == Smart_pt_id:
mask_list.append([SMART_WEIGHT])
elif aid == Smart_unit_id:
mask_list.append([SMART_WEIGHT])
else:
# func_name = F[aid].name
# select, _, _ = RAMP.SMALL_MAPPING.get(func_name, [None, None, 1])
if aid in RAMP.SMALL_LIST:
mask_list.append([SMALL_IMPORTANT_WEIGHT])
else:
mask_list.append([NON_MOVE_CAMERA_WEIGHT])
else:
if aid in RAMP.SMALL_LIST:
mask_list.append([SMALL_IMPORTANT_WEIGHT])
else:
mask_list.append([1.])
mask_tensor = torch.tensor(mask_list)
print('mask_tensor', mask_tensor) if debug else None
# also use predict value to weight
# not used first
if False:
mask_list_2 = []
for action_id in action_pred:
if action_id.item() == MOVE_CAMERA_ID:
mask_list_2.append([MOVE_CAMERA_WEIGHT])
else:
mask_list_2.append([NON_MOVE_CAMERA_WEIGHT])
mask_tensor_2 = torch.tensor(mask_list_2)
mask_tensor = mask_tensor * mask_tensor_2
mask_tensor = mask_tensor.to(device)
return mask_tensor
def get_selected_units_accuracy(ground_truth, predict, select_units_num, action_equal_mask,
device, strict_comparsion=True, use_strict_order=False):
all_num, correct_num, gt_num, pred_num = 0, 0, 1, 0
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
size = ground_truth.shape[0]
NONE_INDEX = AHP.max_entities - 1
for i in range(size):
ground_truth_sample = ground_truth[i]
ground_truth_new = torch.nonzero(ground_truth_sample, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.cpu().detach().numpy().tolist()
print('ground_truth units', ground_truth_new) if debug else None
predict_sample = predict[i].reshape(-1)
print('predict_sample units', predict_sample) if debug else None
select_units_num_sample = select_units_num[i].item()
print('select_units_num_sample units', select_units_num_sample) if debug else None
for j in range(select_units_num_sample):
pred = predict_sample[j].item()
gt = ground_truth_new[j]
if gt != NONE_INDEX: # the last index is the None index
gt_num += 1
if use_strict_order:
if pred == gt and pred != NONE_INDEX:
correct_num += 1
else:
if pred in ground_truth_new and pred != NONE_INDEX:
correct_num += 1
pred_num += 1
all_num += AHP.max_selected
print('get_selected_units_accuracy', [correct_num, gt_num, pred_num, all_num])
return [correct_num, gt_num, pred_num, all_num]
def get_target_unit_accuracy(ground_truth, predict, action_equal_mask, device,
strict_comparsion=True, remove_none=True):
right_num, all_num = 0, 0
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
print('ground_truth target_unit', ground_truth)
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth_new target_unit', ground_truth_new) if debug else None
predict_new = predict.reshape(-1)
print('predict_new target_unit', predict_new)
NONE_ID = AHP.max_entities - 1
if remove_none:
effect_index = (ground_truth_new != NONE_ID).nonzero(as_tuple=True)[0]
ground_truth_new = ground_truth_new[effect_index]
predict_new = predict_new[effect_index]
right_num, all_num = get_right_and_all_num(ground_truth_new, predict_new)
print('get_target_unit_accuracy', [right_num, all_num])
return [right_num, all_num]
def get_location_accuracy(ground_truth, predict, action_equal_mask, device, strict_comparsion=True):
all_nums = ground_truth.shape[0]
effect_nums = 0 # when the location argument applied both in ground_truth and predict
correct_nums = 0
distance_loss = 0.
if strict_comparsion:
action_equal_index = action_equal_mask.nonzero(as_tuple=True)[0]
ground_truth = ground_truth[action_equal_index]
predict = predict[action_equal_index]
if ground_truth.shape[0] > 0:
ground_truth = ground_truth.reshape(ground_truth.shape[0], -1)
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth location', ground_truth_new) if debug else None
output_map_size = SCHP.world_size
for i, idx in enumerate(ground_truth_new):
row_number = idx // output_map_size
col_number = idx - output_map_size * row_number
gt_location_y = row_number
gt_location_x = col_number
print("gt_location_y, gt_location_x", gt_location_y, gt_location_x) if debug else None
[predict_x, predict_y] = predict[i]
print("predict_x, predict_y", predict_x, predict_y) if debug else None
x_diff_square = (predict_x.item() - gt_location_x.item()) ** 2
y_diff_square = (predict_y.item() - gt_location_y.item()) ** 2
print('x_diff_square', x_diff_square) if debug else None
print('y_diff_square', y_diff_square) if debug else None
# pos(output_map_size-1, output_map_size-1) isconsidered a flag meaning this arugment is not applied for this action;
# e.g., we hardly will choose or see a point of pos(output_map_size-1, output_map_size-1)
if not (gt_location_y.item() == output_map_size - 1 and gt_location_x.item() == output_map_size - 1): # the last index is the None index
if not (predict_x.item() == 0 and predict_y.item() == 0):
effect_nums += 1
diff_square = x_diff_square + y_diff_square
distance_loss += diff_square
if diff_square == 0:
correct_nums += 1
print('get_location_accuracy', [correct_nums, effect_nums, all_nums, distance_loss]) if debug else None
return [correct_nums, effect_nums, all_nums, distance_loss]
def get_accuracy(ground_truth, predict, device, return_important=False):
accuracy = 0.
ground_truth_new = torch.nonzero(ground_truth, as_tuple=True)[-1]
ground_truth_new = ground_truth_new.to(device)
print('ground_truth action_type', ground_truth_new) if debug else None
predict_new = predict.reshape(-1)
print('predict_new', predict_new) if debug else None
# shape: [batch_size]
action_equal_mask = (ground_truth_new == predict_new)
# calculate how many move_camera? the id is 168 in raw_action
MOVE_CAMERA_ID = 168
#camera_num_action_type = torch.sum(MOVE_CAMERA_ID == ground_truth_new)
move_camera_index = (ground_truth_new == MOVE_CAMERA_ID).nonzero(as_tuple=True)[0]
non_camera_index = (ground_truth_new != MOVE_CAMERA_ID).nonzero(as_tuple=True)[0]
short_important_list = []
for j in RAMP.SMALL_MAPPING.keys():
aid = F[j].id.value
print('aid', aid) if debug else None
short_index = (ground_truth_new == aid).nonzero(as_tuple=True)[0]
print('short_index', short_index) if debug else None
short_important_list.append(short_index)
short_important_index = torch.cat(short_important_list)
print('short_important_index', short_important_index) if debug else None
print('move_camera_index', move_camera_index) if debug else None
print('non_camera_index', non_camera_index) if debug else None
print('for any type action') if debug else None
right_num, all_num = get_right_and_all_num(ground_truth_new, predict_new)
print('for move_camera action') if debug else None
camera_ground_truth_new = ground_truth_new[move_camera_index]
camera_predict_new = predict_new[move_camera_index]
camera_right_num, camera_all_num = get_right_and_all_num(camera_ground_truth_new, camera_predict_new)
print('for non-camera action') if debug else None
non_camera_ground_truth_new = ground_truth_new[non_camera_index]
non_camera_predict_new = predict_new[non_camera_index]
non_camera_right_num, non_camera_all_num = get_right_and_all_num(non_camera_ground_truth_new, non_camera_predict_new)
print('for short-important action') if debug else None
short_important_ground_truth_new = ground_truth_new[short_important_index]
short_important_predict_new = predict_new[short_important_index]
short_important_right_num, short_important_all_num = get_right_and_all_num(short_important_ground_truth_new, short_important_predict_new)
acc_list = [right_num, all_num, camera_right_num, camera_all_num, non_camera_right_num,
non_camera_all_num, short_important_right_num, short_important_all_num]
return acc_list, action_equal_mask
def get_right_and_all_num(gt, pred):
acc_num_action_type = torch.sum(pred == gt)
print('acc_num_action_type', acc_num_action_type) if debug else None
right_num = acc_num_action_type.item()
print('right_num', right_num) if debug else None
all_num = gt.shape[0]
print('all_num', all_num) if debug else None
accuracy = right_num / (all_num + 1e-9)
print('accuracy', accuracy) if debug else None
return right_num, all_num
|
nilq/baby-python
|
python
|
from ..catalogs.in_memory import Catalog
from .dataframe import DataFrameAdapter
import dask.dataframe
import pandas
class ExcelReader(Catalog):
"""
Read the sheets in an Excel file.
This maps the Excel file, which may contain one of more spreadsheets,
onto a "Catalog" of tabular structures.
Examples
--------
Given a file path
>>> ExcelReader.from_file("path/to/excel_file.xlsx")
Given a file object
>>> file = open("path/to/excel_file.xlsx")
>>> ExcelReader.from_file(file)
Given a pandas.ExcelFile object
>>> import pandas
>>> ef = pandas.ExcelFile(file)
>>> ExcelReader.from_file(ef)
"""
@classmethod
def from_file(cls, file):
if isinstance(file, pandas.ExcelFile):
excel_file = file
else:
excel_file = pandas.ExcelFile(file)
mapping = {
sheet_name: DataFrameAdapter(
dask.dataframe.from_pandas(
excel_file.parse(sheet_name),
npartitions=1, # TODO Be smarter about this.
)
)
for sheet_name in excel_file.sheet_names
}
return cls(mapping)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tool to manage local 3D Beacon install
.. currentmodule:: bio3dbeacon
.. moduleauthor:: Ian Sillitoe <i.sillitoe@ucl.ac.uk>
"""
from .version import __version__, __release__ # noqa
|
nilq/baby-python
|
python
|
import mido
import cv2
#Color library using dictionary from RGB to velocity value of the Launchpad MK2
from ClearLaunchpad import RemoveNotes, ClearScreen
from FirstMido import FillNotes
cap = cv2.imread("Velocity2RGB.png")
Complete = cap.copy()
while(True):
Mat = cv2.inRange(cap, (0, 0, 0), (254, 254, 254))
cv2.imshow("Mat", Mat)
contours, hierarchy = cv2.findContours(Mat, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
for i, c in enumerate(contours):
if hierarchy[0][i][2] == -1 or hierarchy[0][i][2] > 1:
if cv2.contourArea(c) < 60000 and cv2.contourArea(c) > 1000:
try:
cX = int(cv2.moments(c)["m10"] / cv2.moments(c)["m00"])
except ZeroDivisionError:
cX = 0
try:
cY = int(cv2.moments(c)["m01"] / cv2.moments(c)["m00"])
except ZeroDivisionError:
cY = 0
points = cv2.circle(Complete, (cX, cY), 0, (255,255,255), -1)
print(cX,cY)
cv2.imshow("Final", Complete)
k = cv2.waitKey(32)
if k == 32:
break
cap.release()
cv2.destroyAllWindows()
#Each center on the x axis is spaced out by 45 units, starting with 20 as the first center point (Left to right). min of 20 max of 335
#As for y 21 and also moves by 45 units (we can go with 20 and it will still be the same). min of 20 max of 756
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2012, Rui Carmo
Description: In-process job management
License: MIT (see LICENSE.md for details)
"""
import os, sys, logging, time, traceback, multiprocessing, gc
from cPickle import loads, dumps
from Queue import PriorityQueue, Empty
from threading import Thread, Semaphore
from uuid import uuid4
from functools import partial
from collections import defaultdict
log = logging.getLogger(__name__)
default_priority = 0
max_workers = multiprocessing.cpu_count() * 2
class Pool:
"""Represents a thread pool"""
def __init__(self, workers = max_workers, rate_limit = 1000):
self.max_workers = workers
self.mutex = Semaphore()
self.results = {}
self.retries = defaultdict(int)
self.queue = PriorityQueue()
self.threads = []
self.rate_limit = rate_limit
def _tick(self):
time.sleep(1.0/self.rate_limit)
# clean up finished threads
self.threads = [t for t in self.threads if t.isAlive()]
return (not self.queue.empty()) or (len(self.threads) > 0)
def _loop(self):
"""Handle task submissions"""
def run_task(priority, f, uuid, retries, args, kwargs):
"""Run a single task"""
try:
t.name = getattr(f, '__name__', None)
result = f(*args, **kwargs)
except Exception as e:
# Retry the task if applicable
if log:
log.error(traceback.format_exc())
if retries > 0:
with self.mutex:
self.retries[uuid] += 1
# re-queue the task with a lower (i.e., higher-valued) priority
self.queue.put((priority+1, dumps((f, uuid, retries - 1, args, kwargs))))
self.queue.task_done()
return
result = e
with self.mutex:
self.results[uuid] = dumps(result)
self.retries[uuid] += 1
self.queue.task_done()
while self._tick():
# spawn more threads to fill free slots
log.warn("Running %d/%d threads" % (len(self.threads),self.max_workers))
if len(self.threads) < self.max_workers:
log.debug("Queue Length: %d" % self.queue.qsize())
try:
priority, data = self.queue.get(True, 1.0/self.rate_limit)
except Empty:
continue
f, uuid, retries, args, kwargs = loads(data)
t = Thread(target=run_task, args=[priority, f, uuid, retries, args, kwargs])
t.setDaemon(True)
self.threads.append(t)
t.start()
log.debug("Exited loop.")
for t in self.threads:
t.join()
def stop(self):
"""Flush the job queue"""
self.queue = PriorityQueue()
def start(self, daemonize=False):
"""Pool entry point"""
self.results = {}
self.retries = defaultdict(int)
if daemonize:
t = Thread(target = self._loop, args=[self])
t.setDaemon(True)
t.start()
return
else:
self._loop()
default_pool = Pool()
class Deferred(object):
"""Allows lookup of task results and status"""
def __init__(self, pool, uuid):
self.uuid = uuid
self.pool = pool
self._result = None
@property
def result(self):
if self._result is None:
with self.pool.mutex:
if self.uuid in self.pool.results.keys():
self._result = loads(self.pool.results[self.uuid])
return self._result
@property
def retries(self):
return self.pool.retries[self.uuid]
def task(func=None, pool=None, max_retries=0, priority=default_priority):
"""Task decorator - setus up a .delay() attribute in the task function"""
if func is None:
return partial(task, pool=pool, max_retries=max_retries)
if pool is None:
pool = default_pool
def delay(*args, **kwargs):
uuid = str(uuid4()) # one for each task
pool.queue.put((priority,dumps((func, uuid, max_retries, args, kwargs))))
return Deferred(pool, uuid)
func.delay = delay
func.pool = pool
return func
def start(daemonize = False):
default_pool.start(daemonize = daemonize)
|
nilq/baby-python
|
python
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import cirq
import cirq.testing
def assert_dirac_notation_numpy(vec, expected, decimals=2):
assert cirq.dirac_notation(np.array(vec), decimals=decimals) == expected
def assert_dirac_notation_python(vec, expected, decimals=2):
assert cirq.dirac_notation(vec, decimals=decimals) == expected
def assert_valid_density_matrix(matrix, num_qubits=None, qid_shape=None):
if qid_shape is None and num_qubits is None:
num_qubits = 1
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
matrix, num_qubits=num_qubits, qid_shape=qid_shape, dtype=matrix.dtype
),
matrix,
)
def test_quantum_state():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
density_matrix_1 = np.outer(state_vector_1, np.conj(state_vector_1))
state = cirq.QuantumState(state_vector_1)
assert state.data is state_vector_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
np.testing.assert_array_equal(state.state_vector(), state_vector_1)
np.testing.assert_array_equal(state.state_tensor(), state_tensor_1)
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), state_vector_1)
state = cirq.QuantumState(state_tensor_1, qid_shape=(2, 2))
assert state.data is state_tensor_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
np.testing.assert_array_equal(state.state_vector(), state_vector_1)
np.testing.assert_array_equal(state.state_tensor(), state_tensor_1)
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), state_vector_1)
state = cirq.QuantumState(density_matrix_1, qid_shape=(2, 2))
assert state.data is density_matrix_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
assert state.state_vector() is None
assert state.state_tensor() is None
np.testing.assert_array_equal(state.density_matrix(), density_matrix_1)
np.testing.assert_array_equal(state.state_vector_or_density_matrix(), density_matrix_1)
def test_quantum_state_quantum_state():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
quantum_state = cirq.QuantumState(state_vector_1)
state = cirq.quantum_state(quantum_state)
assert state is quantum_state
assert state.data is quantum_state.data
assert state.dtype == np.complex128
state = cirq.quantum_state(quantum_state, copy=True)
assert state is not quantum_state
assert state.data is not quantum_state.data
assert state.dtype == np.complex128
state = cirq.quantum_state(quantum_state, dtype=np.complex64)
assert state is not quantum_state
assert state.data is not quantum_state.data
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='qid shape'):
state = cirq.quantum_state(quantum_state, qid_shape=(4,))
def test_quantum_state_computational_basis_state():
state = cirq.quantum_state(7, qid_shape=(3, 4))
np.testing.assert_allclose(state.data, cirq.one_hot(index=7, shape=(12,), dtype=np.complex64))
assert state.qid_shape == (3, 4)
assert state.dtype == np.complex64
state = cirq.quantum_state((0, 1, 2, 3), qid_shape=(1, 2, 3, 4), dtype=np.complex128)
np.testing.assert_allclose(
state.data, cirq.one_hot(index=(0, 1, 2, 3), shape=(1, 2, 3, 4), dtype=np.complex64)
)
assert state.qid_shape == (1, 2, 3, 4)
assert state.dtype == np.complex128
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state(7)
with pytest.raises(ValueError, match='out of range'):
_ = cirq.quantum_state(7, qid_shape=(2, 2))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state((0, 1, 2, 3))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.quantum_state((0, 1, 2, 3), qid_shape=(2, 2, 2, 2))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state((0, 0, 1, 1), qid_shape=(1, 1, 2, 2))
def test_quantum_state_state_vector_state_tensor():
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex128)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
state = cirq.quantum_state(state_vector_1, dtype=np.complex64)
np.testing.assert_array_equal(state.data, state_vector_1)
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex64
state = cirq.quantum_state(state_tensor_1, qid_shape=(2, 2))
assert state.data is state_tensor_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex128
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.quantum_state(state_tensor_1)
with pytest.raises(ValueError, match='not compatible'):
_ = cirq.quantum_state(state_tensor_1, qid_shape=(2, 3))
def test_quantum_state_density_matrix():
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
state = cirq.quantum_state(density_matrix_1, qid_shape=(4,), copy=True)
assert state.data is not density_matrix_1
np.testing.assert_array_equal(state.data, density_matrix_1)
assert state.qid_shape == (4,)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='not compatible'):
_ = cirq.quantum_state(density_matrix_1, qid_shape=(8,))
def test_quantum_state_product_state():
q0, q1, q2 = cirq.LineQubit.range(3)
product_state_1 = cirq.KET_PLUS(q0) * cirq.KET_PLUS(q1) * cirq.KET_ONE(q2)
state = cirq.quantum_state(product_state_1)
np.testing.assert_allclose(state.data, product_state_1.state_vector())
assert state.qid_shape == (2, 2, 2)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='qid shape'):
_ = cirq.quantum_state(product_state_1, qid_shape=(2, 2))
def test_density_matrix():
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex64)
state = cirq.density_matrix(density_matrix_1)
assert state.data is density_matrix_1
assert state.qid_shape == (2, 2)
assert state.dtype == np.complex64
with pytest.raises(ValueError, match='square'):
_ = cirq.density_matrix(state_vector_1)
def test_infer_qid_shape():
computational_basis_state_1 = [0, 0, 0, 1]
computational_basis_state_2 = [0, 1, 2, 3]
computational_basis_state_3 = [0, 1, 2, 4]
computational_basis_state_4 = 9
computational_basis_state_5 = [0, 1, 2, 4, 5]
state_vector_1 = cirq.one_hot(shape=(4,), dtype=np.complex64)
state_vector_2 = cirq.one_hot(shape=(24,), dtype=np.complex64)
state_tensor_1 = np.reshape(state_vector_1, (2, 2))
state_tensor_2 = np.reshape(state_vector_2, (1, 2, 3, 4))
density_matrix_1 = np.eye(4, dtype=np.complex64) / 4
density_matrix_2 = np.eye(24, dtype=np.complex64) / 24
q0, q1 = cirq.LineQubit.range(2)
product_state_1 = cirq.KET_PLUS(q0) * cirq.KET_PLUS(q1)
assert cirq.qis.infer_qid_shape(
computational_basis_state_1,
state_vector_1,
state_tensor_1,
density_matrix_1,
product_state_1,
) == (2, 2)
assert cirq.qis.infer_qid_shape(
product_state_1,
density_matrix_1,
state_tensor_1,
state_vector_1,
computational_basis_state_1,
) == (2, 2)
assert cirq.qis.infer_qid_shape(
computational_basis_state_1,
computational_basis_state_2,
computational_basis_state_4,
state_tensor_2,
) == (1, 2, 3, 4)
assert cirq.qis.infer_qid_shape(
state_vector_2, density_matrix_2, computational_basis_state_4
) == (24,)
assert cirq.qis.infer_qid_shape(state_tensor_2, density_matrix_2) == (1, 2, 3, 4)
assert cirq.qis.infer_qid_shape(computational_basis_state_4) == (10,)
assert cirq.qis.infer_qid_shape(15, 7, 22, 4) == (23,)
with pytest.raises(ValueError, match='No states were specified'):
_ = cirq.qis.infer_qid_shape()
with pytest.raises(ValueError, match='Failed'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1, computational_basis_state_5)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(state_tensor_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(density_matrix_1)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_1, computational_basis_state_2)
with pytest.raises(ValueError, match='Failed'):
_ = cirq.qis.infer_qid_shape(state_vector_1, computational_basis_state_4)
with pytest.raises(ValueError, match='Failed to infer'):
_ = cirq.qis.infer_qid_shape(state_vector_1, state_vector_2)
with pytest.raises(ValueError, match='Failed to infer'):
_ = cirq.qis.infer_qid_shape(computational_basis_state_3, state_tensor_2)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_zero_state(global_phase):
zero_state = global_phase * np.array([1, 0])
bloch = cirq.bloch_vector_from_state_vector(zero_state, 0)
desired_simple = np.array([0, 0, 1])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_one_state(global_phase):
one_state = global_phase * np.array([0, 1])
bloch = cirq.bloch_vector_from_state_vector(one_state, 0)
desired_simple = np.array([0, 0, -1])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_plus_state(global_phase):
sqrt = np.sqrt(0.5)
plus_state = global_phase * np.array([sqrt, sqrt])
bloch = cirq.bloch_vector_from_state_vector(plus_state, 0)
desired_simple = np.array([1, 0, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_minus_state(global_phase):
sqrt = np.sqrt(0.5)
minus_state = np.array([-1.0j * sqrt, 1.0j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(minus_state, 0)
desired_simple = np.array([-1, 0, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_iplus_state(global_phase):
sqrt = np.sqrt(0.5)
iplus_state = global_phase * np.array([sqrt, 1j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(iplus_state, 0)
desired_simple = np.array([0, 1, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
@pytest.mark.parametrize('global_phase', (1, 1j, np.exp(1j)))
def test_bloch_vector_iminus_state(global_phase):
sqrt = np.sqrt(0.5)
iminus_state = global_phase * np.array([sqrt, -1j * sqrt])
bloch = cirq.bloch_vector_from_state_vector(iminus_state, 0)
desired_simple = np.array([0, -1, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_simple_th_zero():
sqrt = np.sqrt(0.5)
# State TH|0>.
th_state = np.array([sqrt, 0.5 + 0.5j])
bloch = cirq.bloch_vector_from_state_vector(th_state, 0)
desired_simple = np.array([sqrt, sqrt, 0])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_equal_sqrt3():
sqrt3 = 1 / np.sqrt(3)
test_state = np.array([0.888074, 0.325058 + 0.325058j])
bloch = cirq.bloch_vector_from_state_vector(test_state, 0)
desired_simple = np.array([sqrt3, sqrt3, sqrt3])
np.testing.assert_array_almost_equal(bloch, desired_simple)
def test_bloch_vector_multi_pure():
plus_plus_state = np.array([0.5, 0.5, 0.5, 0.5])
bloch_0 = cirq.bloch_vector_from_state_vector(plus_plus_state, 0)
bloch_1 = cirq.bloch_vector_from_state_vector(plus_plus_state, 1)
desired_simple = np.array([1, 0, 0])
np.testing.assert_array_almost_equal(bloch_1, desired_simple)
np.testing.assert_array_almost_equal(bloch_0, desired_simple)
def test_bloch_vector_multi_mixed():
sqrt = np.sqrt(0.5)
# Bell state 1/sqrt(2)(|00>+|11>)
phi_plus = np.array([sqrt, 0.0, 0.0, sqrt])
bloch_0 = cirq.bloch_vector_from_state_vector(phi_plus, 0)
bloch_1 = cirq.bloch_vector_from_state_vector(phi_plus, 1)
zero = np.zeros(3)
np.testing.assert_array_almost_equal(bloch_0, zero)
np.testing.assert_array_almost_equal(bloch_1, zero)
rcnot_state = np.array([0.90612745, -0.07465783j, -0.37533028j, 0.18023996])
bloch_mixed_0 = cirq.bloch_vector_from_state_vector(rcnot_state, 0)
bloch_mixed_1 = cirq.bloch_vector_from_state_vector(rcnot_state, 1)
true_mixed_0 = np.array([0.0, -0.6532815, 0.6532815])
true_mixed_1 = np.array([0.0, 0.0, 0.9238795])
np.testing.assert_array_almost_equal(true_mixed_0, bloch_mixed_0)
np.testing.assert_array_almost_equal(true_mixed_1, bloch_mixed_1)
def test_bloch_vector_multi_big():
five_qubit_plus_state = np.array([0.1767767] * 32)
desired_simple = np.array([1, 0, 0])
for qubit in range(0, 5):
bloch_i = cirq.bloch_vector_from_state_vector(five_qubit_plus_state, qubit)
np.testing.assert_array_almost_equal(bloch_i, desired_simple)
def test_bloch_vector_invalid():
with pytest.raises(ValueError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5]), 0)
with pytest.raises(IndexError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5, 0.5]), -1)
with pytest.raises(IndexError):
_ = cirq.bloch_vector_from_state_vector(np.array([0.5, 0.5, 0.5, 0.5]), 2)
def test_density_matrix_from_state_vector():
test_state = np.array(
[
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
0.0 + 0.35355339j,
0.0 - 0.35355339j,
]
)
full_rho = cirq.density_matrix_from_state_vector(test_state)
np.testing.assert_array_almost_equal(full_rho, np.outer(test_state, np.conj(test_state)))
rho_one = cirq.density_matrix_from_state_vector(test_state, [1])
true_one = np.array([[0.5 + 0.0j, 0.5 + 0.0j], [0.5 + 0.0j, 0.5 + 0.0j]])
np.testing.assert_array_almost_equal(rho_one, true_one)
rho_two_zero = cirq.density_matrix_from_state_vector(test_state, [0, 2])
true_two_zero = np.array(
[
[0.25 + 0.0j, -0.25 + 0.0j, -0.25 + 0.0j, 0.25 + 0.0j],
[-0.25 + 0.0j, 0.25 + 0.0j, 0.25 + 0.0j, -0.25 + 0.0j],
[-0.25 + 0.0j, 0.25 + 0.0j, 0.25 + 0.0j, -0.25 + 0.0j],
[0.25 + 0.0j, -0.25 + 0.0j, -0.25 + 0.0j, 0.25 + 0.0j],
]
)
np.testing.assert_array_almost_equal(rho_two_zero, true_two_zero)
# two and zero will have same single qubit density matrix.
rho_two = cirq.density_matrix_from_state_vector(test_state, [2])
true_two = np.array([[0.5 + 0.0j, -0.5 + 0.0j], [-0.5 + 0.0j, 0.5 + 0.0j]])
np.testing.assert_array_almost_equal(rho_two, true_two)
rho_zero = cirq.density_matrix_from_state_vector(test_state, [0])
np.testing.assert_array_almost_equal(rho_zero, true_two)
def test_density_matrix_invalid():
bad_state = np.array([0.5, 0.5, 0.5])
good_state = np.array([0.5, 0.5, 0.5, 0.5])
with pytest.raises(ValueError):
_ = cirq.density_matrix_from_state_vector(bad_state)
with pytest.raises(ValueError):
_ = cirq.density_matrix_from_state_vector(bad_state, [0, 1])
with pytest.raises(IndexError):
_ = cirq.density_matrix_from_state_vector(good_state, [-1, 0, 1])
with pytest.raises(IndexError):
_ = cirq.density_matrix_from_state_vector(good_state, [-1])
def test_dirac_notation():
sqrt = np.sqrt(0.5)
exp_pi_2 = 0.5 + 0.5j
assert_dirac_notation_numpy([0, 0], "0")
assert_dirac_notation_python([1], "|โฉ")
assert_dirac_notation_numpy([sqrt, sqrt], "0.71|0โฉ + 0.71|1โฉ")
assert_dirac_notation_python([-sqrt, sqrt], "-0.71|0โฉ + 0.71|1โฉ")
assert_dirac_notation_numpy([sqrt, -sqrt], "0.71|0โฉ - 0.71|1โฉ")
assert_dirac_notation_python([-sqrt, -sqrt], "-0.71|0โฉ - 0.71|1โฉ")
assert_dirac_notation_numpy([sqrt, 1j * sqrt], "0.71|0โฉ + 0.71j|1โฉ")
assert_dirac_notation_python([sqrt, exp_pi_2], "0.71|0โฉ + (0.5+0.5j)|1โฉ")
assert_dirac_notation_numpy([exp_pi_2, -sqrt], "(0.5+0.5j)|0โฉ - 0.71|1โฉ")
assert_dirac_notation_python([exp_pi_2, 0.5 - 0.5j], "(0.5+0.5j)|0โฉ + (0.5-0.5j)|1โฉ")
assert_dirac_notation_numpy([0.5, 0.5, -0.5, -0.5], "0.5|00โฉ + 0.5|01โฉ - 0.5|10โฉ - 0.5|11โฉ")
assert_dirac_notation_python([0.71j, 0.71j], "0.71j|0โฉ + 0.71j|1โฉ")
def test_dirac_notation_partial_state():
sqrt = np.sqrt(0.5)
exp_pi_2 = 0.5 + 0.5j
assert_dirac_notation_numpy([1, 0], "|0โฉ")
assert_dirac_notation_python([1j, 0], "1j|0โฉ")
assert_dirac_notation_numpy([0, 1], "|1โฉ")
assert_dirac_notation_python([0, 1j], "1j|1โฉ")
assert_dirac_notation_numpy([sqrt, 0, 0, sqrt], "0.71|00โฉ + 0.71|11โฉ")
assert_dirac_notation_python([sqrt, sqrt, 0, 0], "0.71|00โฉ + 0.71|01โฉ")
assert_dirac_notation_numpy([exp_pi_2, 0, 0, exp_pi_2], "(0.5+0.5j)|00โฉ + (0.5+0.5j)|11โฉ")
assert_dirac_notation_python([0, 0, 0, 1], "|11โฉ")
def test_dirac_notation_precision():
sqrt = np.sqrt(0.5)
assert_dirac_notation_numpy([sqrt, sqrt], "0.7|0โฉ + 0.7|1โฉ", decimals=1)
assert_dirac_notation_python([sqrt, sqrt], "0.707|0โฉ + 0.707|1โฉ", decimals=3)
def test_to_valid_state_vector():
with pytest.raises(ValueError, match='Computational basis state is out of range'):
cirq.to_valid_state_vector(2, 1)
np.testing.assert_almost_equal(
cirq.to_valid_state_vector(np.array([1.0, 0.0, 0.0, 0.0], dtype=np.complex64), 2),
np.array([1.0, 0.0, 0.0, 0.0]),
)
np.testing.assert_almost_equal(
cirq.to_valid_state_vector(np.array([0.0, 1.0, 0.0, 0.0], dtype=np.complex64), 2),
np.array([0.0, 1.0, 0.0, 0.0]),
)
np.testing.assert_almost_equal(cirq.to_valid_state_vector(0, 2), np.array([1.0, 0.0, 0.0, 0.0]))
np.testing.assert_almost_equal(cirq.to_valid_state_vector(1, 2), np.array([0.0, 1.0, 0.0, 0.0]))
v = cirq.to_valid_state_vector([0, 1, 2, 0], qid_shape=(3, 3, 3, 3))
assert v.shape == (3**4,)
assert v[6 + 9] == 1
v = cirq.to_valid_state_vector([False, True, False, False], num_qubits=4)
assert v.shape == (16,)
assert v[4] == 1
v = cirq.to_valid_state_vector([0, 1, 0, 0], num_qubits=2)
assert v.shape == (4,)
assert v[1] == 1
v = cirq.to_valid_state_vector(np.array([1, 0], dtype=np.complex64), qid_shape=(2, 1))
assert v.shape == (2,)
assert v[0] == 1
def test_to_valid_state_vector_creates_new_copy():
state = np.array([1.0, 0.0, 0.0, 0.0], dtype=np.complex64)
out = cirq.to_valid_state_vector(state, 2)
assert out is not state
def test_invalid_to_valid_state_vector():
with pytest.raises(ValueError, match="Please specify"):
_ = cirq.to_valid_state_vector(np.array([1]))
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(np.array([1.0, 0.0], dtype=np.complex64), 2)
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(-1, 2)
with pytest.raises(ValueError):
_ = cirq.to_valid_state_vector(5, 2)
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector('0000', 2)
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector('not an int', 2)
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
_ = cirq.to_valid_state_vector(0, 5, qid_shape=(1, 2, 3))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.to_valid_state_vector([3], qid_shape=(3,))
with pytest.raises(ValueError, match='out of bounds'):
_ = cirq.to_valid_state_vector([-1], qid_shape=(3,))
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector([], qid_shape=(3,))
with pytest.raises(ValueError, match='Invalid quantum state'):
_ = cirq.to_valid_state_vector([0, 1], num_qubits=3)
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.to_valid_state_vector([1, 0], qid_shape=(2, 1))
with pytest.raises(ValueError, match='ambiguous'):
_ = cirq.to_valid_state_vector(np.array([1, 0], dtype=np.int64), qid_shape=(2, 1))
def test_validate_normalized_state():
cirq.validate_normalized_state_vector(cirq.testing.random_superposition(2), qid_shape=(2,))
cirq.validate_normalized_state_vector(
np.array([0.5, 0.5, 0.5, 0.5], dtype=np.complex64), qid_shape=(2, 2)
)
with pytest.raises(ValueError, match='invalid dtype'):
cirq.validate_normalized_state_vector(
np.array([1, 1], dtype=np.complex64), qid_shape=(2, 2), dtype=np.complex128
)
with pytest.raises(ValueError, match='incorrect size'):
cirq.validate_normalized_state_vector(
np.array([1, 1], dtype=np.complex64), qid_shape=(2, 2)
)
with pytest.raises(ValueError, match='not normalized'):
cirq.validate_normalized_state_vector(
np.array([1.0, 0.2, 0.0, 0.0], dtype=np.complex64), qid_shape=(2, 2)
)
def test_validate_density_matrix():
cirq.validate_density_matrix(cirq.testing.random_density_matrix(2), qid_shape=(2,))
with pytest.raises(ValueError, match='dtype'):
cirq.to_valid_density_matrix(
np.array([[1, 0], [0, 0]], dtype=np.complex64), qid_shape=(2,), dtype=np.complex128
)
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[1, 0]]), qid_shape=(2,))
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(np.array([[1, 0.1], [0, 0]]), qid_shape=(2,))
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.array([[1, 0], [0, 0.1]]), qid_shape=(2,))
with pytest.raises(ValueError, match='positive semidefinite'):
cirq.to_valid_density_matrix(
np.array([[1.1, 0], [0, -0.1]], dtype=np.complex64), qid_shape=(2,)
)
def test_to_valid_density_matrix_from_density_matrix():
assert_valid_density_matrix(np.array([[1, 0], [0, 0]]))
assert_valid_density_matrix(np.array([[0.5, 0], [0, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.5], [0.5, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.2], [0.2, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.5j], [-0.5j, 0.5]]))
assert_valid_density_matrix(np.array([[0.5, 0.2 - 0.2j], [0.2 + 0.2j, 0.5]]))
assert_valid_density_matrix(np.eye(4) / 4.0, num_qubits=2)
assert_valid_density_matrix(np.diag([1, 0, 0, 0]), num_qubits=2)
assert_valid_density_matrix(np.ones([4, 4]) / 4.0, num_qubits=2)
assert_valid_density_matrix(np.diag([0.2, 0.8, 0, 0]), num_qubits=2)
assert_valid_density_matrix(
np.array([[0.2, 0, 0, 0.2 - 0.3j], [0, 0, 0, 0], [0, 0, 0, 0], [0.2 + 0.3j, 0, 0, 0.8]]),
num_qubits=2,
)
assert_valid_density_matrix(np.array([[1, 0, 0]] + [[0, 0, 0]] * 2), qid_shape=(3,))
assert_valid_density_matrix(
np.array([[0, 0, 0], [0, 0.5, 0.5j], [0, -0.5j, 0.5]]), qid_shape=(3,)
)
assert_valid_density_matrix(np.eye(9) / 9.0, qid_shape=(3, 3))
assert_valid_density_matrix(np.eye(12) / 12.0, qid_shape=(3, 4))
assert_valid_density_matrix(np.ones([9, 9]) / 9.0, qid_shape=(3, 3))
assert_valid_density_matrix(np.diag([0.2, 0.8, 0, 0]), qid_shape=(4,))
def test_to_valid_density_matrix_from_density_matrix_tensor():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
cirq.one_hot(shape=(2, 2, 2, 2, 2, 2), dtype=np.complex64), num_qubits=3
),
cirq.one_hot(shape=(8, 8), dtype=np.complex64),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
cirq.one_hot(shape=(2, 3, 4, 2, 3, 4), dtype=np.complex64), qid_shape=(2, 3, 4)
),
cirq.one_hot(shape=(24, 24), dtype=np.complex64),
)
def test_to_valid_density_matrix_not_square():
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[1], [0]]), num_qubits=1)
def test_to_valid_density_matrix_size_mismatch_num_qubits():
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.array([[[1, 0], [0, 0]], [[0, 0], [0, 0]]]), num_qubits=2)
with pytest.raises(ValueError, match='shape'):
cirq.to_valid_density_matrix(np.eye(4) / 4.0, num_qubits=1)
def test_to_valid_density_matrix_not_hermitian():
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(np.array([[0.5, 0.5j], [0.5, 0.5j]]), num_qubits=1)
with pytest.raises(ValueError, match='hermitian'):
cirq.to_valid_density_matrix(
np.array(
[[0.2, 0, 0, -0.2 - 0.3j], [0, 0, 0, 0], [0, 0, 0, 0], [0.2 + 0.3j, 0, 0, 0.8]]
),
num_qubits=2,
)
def test_to_valid_density_matrix_mismatched_qid_shape():
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
cirq.to_valid_density_matrix(np.eye(4) / 4, num_qubits=1, qid_shape=(2, 2))
with pytest.raises(ValueError, match=r'num_qubits != len\(qid_shape\)'):
cirq.to_valid_density_matrix(np.eye(4) / 4, num_qubits=2, qid_shape=(4,))
with pytest.raises(ValueError, match='Both were None'):
cirq.to_valid_density_matrix(np.eye(4) / 4)
def test_to_valid_density_matrix_not_unit_trace():
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.array([[1, 0], [0, -0.1]]), num_qubits=1)
with pytest.raises(ValueError, match='trace 1'):
cirq.to_valid_density_matrix(np.zeros([2, 2]), num_qubits=1)
def test_to_valid_density_matrix_not_positive_semidefinite():
with pytest.raises(ValueError, match='positive semidefinite'):
cirq.to_valid_density_matrix(
np.array([[0.6, 0.5], [0.5, 0.4]], dtype=np.complex64), num_qubits=1
)
def test_to_valid_density_matrix_wrong_dtype():
with pytest.raises(ValueError, match='dtype'):
cirq.to_valid_density_matrix(
np.array([[1, 0], [0, 0]], dtype=np.complex64), num_qubits=1, dtype=np.complex128
)
def test_to_valid_density_matrix_from_state_vector():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([1, 0], dtype=np.complex64), num_qubits=1
),
np.array([[1, 0], [0, 0]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([np.sqrt(0.3), np.sqrt(0.7)], dtype=np.complex64),
num_qubits=1,
),
np.array([[0.3, np.sqrt(0.3 * 0.7)], [np.sqrt(0.3 * 0.7), 0.7]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([np.sqrt(0.5), np.sqrt(0.5) * 1j], dtype=np.complex64),
num_qubits=1,
),
np.array([[0.5, -0.5j], [0.5j, 0.5]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array([0.5] * 4, dtype=np.complex64), num_qubits=2
),
0.25 * np.ones((4, 4)),
)
def test_to_valid_density_matrix_from_state_vector_tensor():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(
density_matrix_rep=np.array(np.full((2, 2), 0.5), dtype=np.complex64), num_qubits=2
),
0.25 * np.ones((4, 4)),
)
def test_to_valid_density_matrix_from_state_invalid_state():
with pytest.raises(ValueError, match="Invalid quantum state"):
cirq.to_valid_density_matrix(np.array([1, 0, 0]), num_qubits=2)
def test_to_valid_density_matrix_from_computational_basis():
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=0, num_qubits=1), np.array([[1, 0], [0, 0]])
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=1, num_qubits=1), np.array([[0, 0], [0, 1]])
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=2, num_qubits=2),
np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]]),
)
np.testing.assert_almost_equal(
cirq.to_valid_density_matrix(density_matrix_rep=0, num_qubits=0), np.array([[1]])
)
def test_to_valid_density_matrix_from_state_invalid_computational_basis():
with pytest.raises(ValueError, match="out of range"):
cirq.to_valid_density_matrix(-1, num_qubits=2)
def test_one_hot():
result = cirq.one_hot(shape=4, dtype=np.int32)
assert result.dtype == np.int32
np.testing.assert_array_equal(result, [1, 0, 0, 0])
np.testing.assert_array_equal(
cirq.one_hot(shape=[2, 3], dtype=np.complex64), [[1, 0, 0], [0, 0, 0]]
)
np.testing.assert_array_equal(
cirq.one_hot(shape=[2, 3], dtype=np.complex64, index=(0, 2)), [[0, 0, 1], [0, 0, 0]]
)
np.testing.assert_array_equal(
cirq.one_hot(shape=5, dtype=np.complex128, index=3), [0, 0, 0, 1, 0]
)
def test_eye_tensor():
assert np.all(cirq.eye_tensor((), dtype=int) == np.array(1))
assert np.all(cirq.eye_tensor((1,), dtype=int) == np.array([[1]]))
assert np.all(cirq.eye_tensor((2,), dtype=int) == np.array([[1, 0], [0, 1]])) # yapf: disable
assert np.all(
cirq.eye_tensor((2, 2), dtype=int)
== np.array([[[[1, 0], [0, 0]], [[0, 1], [0, 0]]], [[[0, 0], [1, 0]], [[0, 0], [0, 1]]]])
) # yapf: disable
assert np.all(
cirq.eye_tensor((2, 3), dtype=int)
== np.array(
[
[[[1, 0, 0], [0, 0, 0]], [[0, 1, 0], [0, 0, 0]], [[0, 0, 1], [0, 0, 0]]],
[[[0, 0, 0], [1, 0, 0]], [[0, 0, 0], [0, 1, 0]], [[0, 0, 0], [0, 0, 1]]],
]
)
) # yapf: disable
assert np.all(
cirq.eye_tensor((3, 2), dtype=int)
== np.array(
[
[[[1, 0], [0, 0], [0, 0]], [[0, 1], [0, 0], [0, 0]]],
[[[0, 0], [1, 0], [0, 0]], [[0, 0], [0, 1], [0, 0]]],
[[[0, 0], [0, 0], [1, 0]], [[0, 0], [0, 0], [0, 1]]],
]
)
) # yapf: disable
|
nilq/baby-python
|
python
|
#!/usr/bin/python
#
# Copyright 2019 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon sdk
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.14.4
Contact: contact@polyaxon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import polyaxon_sdk
from polyaxon_sdk.api.git_accesses_v1_api import GitAccessesV1Api # noqa: E501
from polyaxon_sdk.rest import ApiException
class TestGitAccessesV1Api(unittest.TestCase):
"""GitAccessesV1Api unit test stubs"""
def setUp(self):
self.api = polyaxon_sdk.api.git_accesses_v1_api.GitAccessesV1Api() # noqa: E501
def tearDown(self):
pass
def test_create_git_access(self):
"""Test case for create_git_access
List runs # noqa: E501
"""
pass
def test_delete_git_access(self):
"""Test case for delete_git_access
Patch run # noqa: E501
"""
pass
def test_get_git_access(self):
"""Test case for get_git_access
Create new run # noqa: E501
"""
pass
def test_list_git_access_names(self):
"""Test case for list_git_access_names
List bookmarked runs for user # noqa: E501
"""
pass
def test_list_git_accesses(self):
"""Test case for list_git_accesses
List archived runs for user # noqa: E501
"""
pass
def test_patch_git_access(self):
"""Test case for patch_git_access
Update run # noqa: E501
"""
pass
def test_update_git_access(self):
"""Test case for update_git_access
Get run # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
grk_tws = [['G946'],
['G5206'],
['G3428', 'G3429', 'G3430', 'G3431', 'G3432'],
['G3841'],
['G1041', 'G2379'],
['G281'],
['G32', 'G743', 'G2465'],
['G32', 'G743', 'G218', 'G1472', 'G2025', 'G3462', 'G5545', 'G5548'],
['G500'],
['G651', ' G652', ' G2491', ' G5376', ' G5570'],
['G322', 'G606', 'G1299', 'G1303', 'G1935', 'G2525', 'G2749', 'G4287', 'G4384', 'G4929', 'G5021', 'G5087'],
['G2787'],
['G2643'],
['G2435'],
['G831', 'G1413', 'G1849', 'G1850', 'G2003', 'G2715', 'G5247'],
['G907'],
['G544', 'G569', 'G571', 'G3982', 'G4100', 'G4102', 'G4103', 'G4135'],
['G4100', ' G4103'],
['G25', 'G27', 'G5207'],
['G4415'],
['G273', 'G274', 'G298', 'G338', 'G410', 'G423'],
['G987', 'G988', 'G989'],
['G1757', 'G2127', 'G2128', 'G2129', 'G3106', 'G3107', 'G3108', 'G6050'],
['G129', 'G130', 'G131', 'G1420'],
['G212', 'G213', 'G2620', 'G2744', 'G2745', 'G2746', 'G3166'],
['G4430', 'G4954', 'G4983', 'G5559'],
['G254', 'G331', 'G332', 'G1195', 'G1196', 'G1198', 'G1199', 'G1210', 'G1397', 'G1398', 'G1401', 'G1402', 'G2611', 'G2615', 'G3734', 'G3784', 'G3814', 'G4019', 'G4029', 'G4385', 'G4886', 'G4887', 'G5265'],
['G313', ' G509', ' G1080', ' G3824'],
['G80', 'G81', 'G2385', 'G2455', 'G2500', 'G4613', 'G5360', 'G5569'],
['G154', 'G363', 'G1458', 'G1528', 'G1941', 'G1951', 'G2028', 'G2046', 'G2564', 'G2821', 'G2822', 'G2840', 'G2919', 'G3004', 'G3106', 'G3333', 'G3343', 'G3603', 'G3686', 'G3687', 'G4316', 'G4341', 'G4377', 'G4779', 'G4867', 'G5455', 'G5537', 'G5581'],
['G1543', ' G2760'],
['G730', 'G815', 'G1025', 'G1064', 'G1471', 'G3439', 'G3515', 'G3516', 'G3808', 'G3812', 'G3813', 'G3816', 'G5040', 'G5041', 'G5042', 'G5043', 'G5044', 'G5206', 'G5207', 'G5388'],
['G3323', 'G5547'],
['G5546'],
['G1577'],
['G203', 'G1986', 'G4059', 'G4061'],
['G2511', 'G2512', 'G2513', 'G3689'],
['G1263', 'G1291', 'G1296', 'G1297', 'G1299', 'G1690', 'G1778', 'G1781', 'G1785', 'G2003', 'G2004', 'G2008', 'G2036', 'G2753', 'G3056', 'G3726', 'G3852', 'G3853', 'G4367', 'G4483', 'G4487', 'G5506'],
['G1653', 'G3356', 'G3627', 'G4697', 'G4834', 'G4835'],
['G176', 'G843', 'G2607', 'G2613', 'G2631', 'G2632', 'G2633', 'G2917', 'G2919', 'G2920', 'G5272', 'G6048'],
['G1843', 'G3670', 'G3671'],
['G4893'],
['G1457', 'G5048'],
['G204', 'G1137', 'G2776', 'G3037'],
['G802', 'G1242', 'G4934'],
['G4716'],
['G388', ' G4362', ' G4717', ' G4957'],
['G331', 'G332', 'G685', 'G1944', 'G2551', 'G2652', 'G2653', 'G2671', 'G2672', 'G6035'],
['G2250', 'G2962'],
['G1249'],
['G169', 'G1139', 'G1140', 'G1141', 'G1142', 'G4190', 'G4151', 'G4152', 'G4189'],
['G1139'],
['G3100', 'G3101', 'G3102'],
['G2304', ' G2999'],
['G2634', 'G2904', 'G2961', 'G2963'],
['G138', 'G140', 'G1586', 'G1588', 'G1589', 'G1951', 'G4400', 'G4401', 'G4758', 'G4899', 'G5500'],
['G126', 'G165', 'G166'],
['G2134', 'G2135'],
['G2099'],
['G92', 'G113', 'G459', 'G932', 'G987', 'G988', 'G1426', 'G2549', 'G2551', 'G2554', 'G2555', 'G2556', 'G2557', 'G2559', 'G2560', 'G2635', 'G2636', 'G4151', 'G4189', 'G4190', 'G4191', 'G5337'],
['G1869', 'G5229', 'G5251', 'G5311', 'G5312'],
['G3867', ' G3870', ' G3874', ' G4389'],
['G1680', 'G3640', 'G4102', 'G6066'],
['G4103'],
['G571'],
['G2299'],
['G1184', 'G3685', 'G4380', 'G5485', 'G5486'],
['G870', 'G1167', 'G1168', 'G1169', 'G1630', 'G1719', 'G2124', 'G2125', 'G2962', 'G5398', 'G5399', 'G5400', 'G5401'],
['G2842', 'G2844', 'G3352', 'G4790'],
['G40', ' G4130', ' G4137', ' G4151'],
['G2907', 'G4559', 'G4560', 'G4561'],
['G453', 'G454', 'G781', 'G801', 'G877', 'G878', 'G3471', 'G3472', 'G3473', 'G3474', 'G3912'],
['G165', 'G166', 'G1336'],
['G859', 'G863', 'G5483'],
['G646', 'G657', 'G863', 'G1459', 'G2641', ''],
['G1096', 'G4138'],
['G1482', 'G1484', 'G1672'],
['G334', 'G1390', 'G1394', 'G1431', 'G1434', 'G1435', 'G3311', 'G5486'],
['G1392', 'G1740', 'G4888'],
['G1391', 'G1741', 'G2620', 'G2744', 'G2745', 'G2746', 'G2755', 'G2811'],
['G112', 'G516', 'G932', 'G935', 'G1096', 'G1140', 'G2098', 'G2124', 'G2128', 'G2150', 'G2152', 'G2153', 'G2299', 'G2304', 'G2305', 'G2312', 'G2313', 'G2314', 'G2315', 'G2316', 'G2317', 'G2318', 'G2319', 'G2320', 'G3361', 'G3785', 'G4151', 'G5207', 'G5377', 'G5463', 'G5537', 'G5538'],
['G516', 'G2124', 'G2150', 'G2152', 'G2153', 'G2316', 'G2317'],
['G3962'],
['G14', 'G15', 'G18', 'G19', 'G515', 'G744', 'G865', 'G979', 'G1380', 'G2095', 'G2097', 'G2106', 'G2107', 'G2108', 'G2109', 'G2114', 'G2115', 'G2133', 'G2140', 'G2162', 'G2163', 'G2174', 'G2293', 'G2565', 'G2567', 'G2570', 'G2573', 'G2887', 'G2986', 'G3140', 'G3617', 'G3776', 'G4147', 'G4632', 'G4674', 'G4851', 'G5223', 'G5224', 'G5358', 'G5542', 'G5543', 'G5544'],
['G2097', ' G2098', ' G4283'],
['G2143', 'G5485', 'G5543'],
['G338', 'G1777', 'G3784', 'G5267'],
['G86'],
['G674', 'G1282', 'G1271', 'G2133', 'G2588', 'G2589', 'G4641', 'G4698', 'G5590'],
['G932', 'G2032', 'G3321', 'G3770', 'G3771', 'G3772'],
['G1444', 'G1445', 'G1446', 'G1447'],
['G86', 'G439', 'G440', 'G1067', 'G3041', 'G4442', 'G4443', 'G4447', 'G4448', 'G5020', 'G5394', 'G5457'],
['G748', 'G749'],
['G37', 'G38', 'G39', 'G40', 'G41', 'G42', 'G1859', 'G2150', 'G2412', 'G2413', 'G3741', 'G3742', ''],
['G40', 'G3741'],
['G39', 'G40', 'G3485', 'G5117'],
['G40', 'G4151'],
['G820', 'G1391', 'G1392', 'G1784', 'G2151', 'G2570', 'G3170', 'G4411', 'G4586', 'G5091', 'G5092', 'G5093', 'G5399'],
['G91', 'G560', 'G1679', 'G1680', 'G2070'],
['G2316', 'G3624'],
['G858', 'G4236', 'G4239', 'G4240', 'G5011', 'G5012', 'G5013', 'G5391'],
['G505', 'G5272', 'G5273'],
['G504', 'G179'],
['G1519', ' G2962', ' G5547'],
['G2816', 'G2817', 'G2819', 'G2820'],
['G92', 'G93', 'G458', 'G3892', 'G4189'],
['G121'],
['G1783', 'G1793', 'G5241'],
['G935', 'G2474', 'G2475'],
['G2205', 'G3863'],
['G2424', ' G5547'],
['G2450', 'G2451', 'G2452', 'G2453', 'G2454'],
['G144', 'G350', 'G968', 'G1106', 'G1252', 'G1341', 'G1345', 'G1348', 'G1349', 'G2917', 'G2919', 'G2920', 'G2922', 'G2923', 'G4232'],
['G2250', 'G2920', 'G2962'],
['G1342', 'G1344', 'G1345', 'G1346', 'G1347', 'G1738'],
['G1344', 'G1345', 'G1347'],
['G932', ' G2316', ' G3772'],
['G935', ' G2453'],
['G721', 'G2316'],
['G2354', 'G2355', 'G2870', 'G2875'],
['G2078', 'G2250'],
['G2316', 'G3551', 'G3565'],
['G198', 'G222', 'G227', 'G806', 'G590'],
['G203', 'G634', 'G962'],
['G1203', 'G2962'],
['G1173', ' G2960'],
['G2316', 'G2962'],
['G25', 'G26', 'G5360', 'G5361', 'G5362', 'G5363', 'G5365', 'G5367', 'G5368', 'G5369', 'G5377', 'G5381', 'G5382', 'G5383', 'G5388'],
['G3168', 'G3172'],
['G3131'],
['G1653', 'G1655', 'G1656', 'G2433', 'G2436', 'G3628', 'G3629', 'G3741', 'G4698'],
['G1247', 'G1248', 'G1249', 'G2023', 'G2038', 'G2418', 'G3008', 'G3009', 'G3010', 'G3011', 'G3930', 'G5256', 'G5257', 'G5524'],
['G880', 'G1213', 'G1229', 'G1411', 'G1569', 'G1718', 'G1770', 'G1839', 'G2285', 'G2296', 'G2297', 'G3167', 'G3902', 'G4591', 'G4592', 'G5059', ''],
['G5310'],
['G3464', 'G4666', 'G4669'],
['G2564', 'G3686', 'G3687', 'G5122'],
['G3850', 'G3942'],
['G3957'],
['G4166'],
['G4005'],
['G2316', 'G2992'],
['G599', 'G622', 'G684', 'G853', 'G1311', 'G2704', 'G4881', 'G5356'],
['G5330'],
['G1411', 'G1415', 'G1756', 'G1849', 'G1850', 'G2478', 'G2479', 'G2904', 'G3168'],
['G154', 'G1162', 'G1189', 'G1783', 'G2065', 'G2171', 'G2172', 'G3870', 'G4335', 'G4336'],
['G4309'],
['G748', 'G749', 'G2405', 'G2406', 'G2407', 'G2409', 'G2420'],
['G1843', 'G1860', 'G1861', 'G1862', 'G3670', 'G4279'],
['G2495', 'G4394', 'G4395', 'G4396', 'G4397', 'G4398', 'G5578'],
['G2434', ' G2435'],
['G5567', 'G5568'],
['G48', 'G49', 'G53', 'G54', 'G1506', 'G2511', 'G2512', 'G2513', 'G2514'],
['G4461'],
['G487', 'G3083'],
['G604', 'G1259', 'G2433', 'G2643', 'G2644'],
['G59', 'G629', 'G1805', 'G3084', 'G3085', ''],
['G2640', 'G3005', 'G3062'],
['G278', 'G3338', 'G3340', 'G3341'],
['G600', 'G2675'],
['G386', ' G1454', ' G1815'],
['G601', 'G602', 'G5537'],
['G1341', 'G1342', 'G1343', 'G1344', 'G1345', 'G1346', 'G2118'],
['G1188'],
['G4315', 'G4521'],
['G4523'],
['G40'],
['G4991', 'G4992'],
['G37', 'G38'],
['G39'],
['G1139', 'G1140', 'G1141', 'G1142', 'G1228', 'G4190', 'G4566', 'G4567'],
['G803', 'G804', 'G806', 'G1295', 'G1508', 'G4982', 'G4991', 'G5198'],
['G4990'],
['G1122'],
['G37', 'G38', 'G40', 'G873'],
['G364', 'G880', 'G1213', 'G1229', 'G1718', 'G1730', 'G1732', 'G1770', 'G3902', 'G4102', 'G4591', 'G4592', 'G4953', 'G4973', 'G5280'],
['G264', 'G265', 'G266', 'G268', 'G361', 'G3781', 'G3900', 'G4258'],
['G3816', 'G5043', 'G5207'],
['G2316', 'G5207'],
['G444', 'G5207'],
['G2316', 'G5043', 'G5207'],
['G5590'],
['G4151', 'G4152', 'G4153', 'G5326', 'G5427'],
['G2642', 'G2991', 'G3034', 'G3035', 'G3036', 'G3037', 'G4074', 'G4348', 'G5586'],
['G656', 'G752', 'G4864'],
['G4633', 'G4634', 'G4636', 'G4638'],
['G1493', 'G2411', 'G3485'],
['G551', 'G1598', 'G3985', 'G3986', 'G3987'],
['G1242', 'G1263', 'G1303', 'G1957', 'G3140', 'G3141', 'G3142', 'G3143', 'G4303', 'G4828', 'G6020'],
['G1263', 'G1957', 'G3140', 'G3141', 'G3142', 'G3143', 'G4303', 'G4828', 'G6020'],
['G5075', ' G5076'],
['G1427', ' G1733'],
['G458', 'G459', 'G3845', 'G3847', 'G3848', 'G3928'],
['G264', 'G3900'],
['G225', 'G226', 'G227', 'G228', 'G230', 'G1103', 'G3303', 'G3483', 'G3689', 'G4103', 'G4137'],
['G1679', 'G3872', 'G3982', 'G4006', 'G4100', 'G4276'],
['G543', ' G544', ' G570', ' G571'],
['G203', 'G564'],
['G167', 'G169', 'G2839', 'G2840', 'G3394'],
['G763', 'G764', 'G765'],
['G462', 'G2839'],
['G91', 'G93', 'G94'],
['G111', ' G459'],
['G106'],
['G93', 'G94', 'G458'],
['G3716', 'G3717'],
['G2171'],
['G1012', 'G1013', 'G2307', 'G2308', 'G2309', 'G2596'],
['G4678', 'G4679', 'G4680', 'G4920', 'G5428', 'G5429', 'G5430'],
['G267', 'G1263', 'G2649', 'G3140', 'G3141', 'G3142', 'G3144', 'G4828', 'G4901', 'G5575', 'G5576', 'G5577'],
['G3759'],
['G3056', 'G4487'],
['G3056', 'G4487'],
['G2041'],
['G1093', 'G2886', 'G2889', 'G3625'],
['G1391', 'G1479', 'G2151', 'G2318', 'G2323', 'G2356', 'G3000', 'G3511', 'G4352', 'G4353', 'G4573', 'G4574', 'G4576'],
['G514', 'G515', 'G516', 'G2425', 'G2661', 'G2735'],
['G2372', 'G3709', 'G3949', 'G3950'],
['G2205', 'G2206', 'G2207', 'G6041'],
['G2'],
['G6'],
['G8'],
['G7'],
['G11'],
['G76'],
['G138'],
['G406'],
['G452'],
['G491'],
['G625'],
['G207'],
['G688', 'G690'],
['G689'],
['G108'],
['G773'],
['G896'],
['G897'],
['G903'],
['G912'],
['G921'],
['G918'],
['G1263'],
['G954'],
['G958'],
['G960'],
['G963'],
['G965'],
['G2541'],
['G2542', ' G5376'],
['G2533'],
['G2535'],
['G2580'],
['G2581', 'G5478'],
['G2584'],
['G5466'],
['G2791'],
['G1138', 'G4172'],
['G2857', ' G2858'],
['G2881', ' G2882'],
['G2883'],
['G2912', ' G2914'],
['G2953', ' G2954'],
['G2956', ' G2957'],
['G1154'],
['G1158'],
['G1138'],
['G2401'],
['G124', 'G125'],
['G1639'],
['G1648'],
['G1662'],
['G2243'],
['G1665'],
['G1802'],
['G2179', ' G2180', ' G2181'],
['G2187'],
['G2269'],
['G128'],
['G2166'],
['G2096'],
['G1043'],
['G1045'],
['G1053', ' G1054'],
['G1056', 'G1057'],
['G1048'],
['G1068'],
['G1115'],
['G1671'],
['G1672', 'G1673', 'G1674', 'G1675', 'G1676'],
['G2264', ' G2265', ' G2267'],
['G2266'],
['G2264'],
['G1478'],
['G5617'],
['G1138', 'G3624'],
['G2430'],
['G2464'],
['G2268'],
['G2466'],
['G2384'],
['G2385'],
['G2385'],
['G2385'],
['G2496'],
['G2498'],
['G2408'],
['G2410'],
['G2414', 'G2415', 'G2419'],
['G2421'],
['G2403'],
['G2492'],
['G2493'],
['G2491', ' G3138'],
['G2491'],
['G910 G2491'],
['G2495'],
['G2445'],
['G2496'],
['G2446'],
['G2501'],
['G2500', 'G2501'],
['G2424'],
['G2502'],
['G2455', ' G2469'],
['G2455'],
['G2453'],
['G2748', 'G5493'],
['G2984'],
['G2976'],
['G3017', 'G3018', 'G3019', 'G3020'],
['G3091'],
['G3082'],
['G3109', ' G3110'],
['G1217'],
['G3128'],
['G444', 'G2316'],
['G3136'],
['G3137'],
['G3094', ' G3137'],
['G3137'],
['G3017', ' G3156'],
['G3370'],
['G3318'],
['G3413'],
['G3434'],
['G3475'],
['G3735', 'G1636'],
['G3497'],
['G3493'],
['G3486'],
['G3508'],
['G3481'],
['G3478', ' G3479', ' G3480'],
['G3535', 'G3536'],
['G3575'],
['G3972', ' G4569'],
['G2786', ' G4074', ' G4613'],
['G5328'],
['G5376'],
['G5374', ' G5375'],
['H776 H6429 H06430'],
['G4949', 'G5403'],
['G4091', ' G4194'],
['G4193', ' G4195'],
['G4478'],
['G4460'],
['G4471'],
['G2063', 'G2281'],
['G4497'],
['G4502'],
['G4514', ' G4516'],
['G4503'],
['G4540', 'G4541', 'G4542'],
['G4546'],
['G4545'],
['G4564'],
['G4549'],
['G1056', 'G1082', 'G2281', 'G3041', 'G5085'],
['G4589'],
['G4590'],
['G4605', 'G4606'],
['G4609', ' G4610'],
['G4826'],
['G2208', ' G2581', ' G4613'],
['G3735', 'G4614'],
['G4670'],
['G4672'],
['G4736'],
['G4947', 'G4948'],
['G5018', ' G5019'],
['G2331', ' G2332'],
['G2381'],
['G5095'],
['G5103'],
['G5174'],
['G5190'],
['G5183', 'G5184'],
['G3774'],
['G2195'],
['G4524'],
['G2199'],
['G2194'],
['G2197'],
['G2216'],
['G1427', 'G2474', 'G5443'],
['G12', ' G5421'],
['G1458', 'G2147', 'G2596', 'G2724'],
['G1492', 'G1921', 'G3670'],
['G2941'],
['G3560', 'G3867', 'G5537'],
['G476', 'G480', 'G2189', 'G2190', 'G4567', 'G5227'],
['G2346', 'G2347', 'G2552', 'G2553', 'G2561', 'G3804', 'G4777', 'G4778', 'G5003'],
['G165', 'G1074'],
['G1654'],
['G2368', 'G2379'],
['G639', 'G1568', 'G1569', 'G1605', 'G1611', 'G1839', 'G2284', 'G2285', 'G2296', 'G2297', 'G2298', 'G3167', 'G4023', 'G4423', 'G4592', 'G5059'],
['G4243'],
['G23', 'G1758', 'G2371', 'G2372', 'G3164', 'G3709', 'G3710', 'G3711', 'G3947', 'G3949', 'G5520'],
['G928', 'G3600', 'G4928'],
['G3696', 'G3833'],
['G2868', 'G4700', 'G5077', 'G5522'],
['G1577', 'G1997', 'G3831', 'G4863', 'G4864', 'G4871', 'G4905'],
['G3307'],
['G4105'],
['G1349', 'G1556', 'G1557', 'G1558', 'G2917', 'G3709'],
['G2124'],
['G513'],
['G1173', 'G1403'],
['G2915', 'G2916'],
['G692', 'G4723'],
['G2894', 'G3426', 'G4553', 'G4711'],
['G142', 'G399', 'G430', 'G503', 'G941', 'G1080', 'G1627', 'G2592', 'G3114', 'G3140', 'G4064', 'G4160', 'G4722', 'G4828', 'G4901', 'G5041', 'G5088', 'G5297', 'G5342', 'G5409', 'G5576'],
['G715'],
['G2226', 'G2341', 'G2342', 'G2934', 'G4968', 'G5074'],
['G154', 'G1871', 'G4319', 'G4434', 'G6075'],
['G3860', 'G4273'],
['G2250'],
['G5610'],
['G3376'],
['G5438'],
['G4521'],
['G1763', 'G2094'],
['G3470'],
['G2210'],
['G631', 'G1591', 'G1813'],
['G662', 'G2292', 'G3618', 'G3954', 'G3955', 'G5111', 'G5112'],
['G976', 'G2222'],
['G1120', 'G2578', 'G2827', 'G4781', 'G4794'],
['G5115'],
['G106', 'G740', 'G4286'],
['G2382'],
['G1709', 'G1720', 'G4157'],
['G5260'],
['G3565'],
['G3566'],
['G5470', 'G5474', 'G5475'],
['G4', 'G916', 'G922', 'G1117', 'G2347', 'G2599', 'G2655', 'G5413'],
['G3646'],
['G1779', 'G1780', 'G2290', 'G4916', 'G5027'],
['G2574'],
['G161', 'G162', 'G163', 'G164', 'G2221'],
['G1544'],
['G726'],
['G582', 'G583'],
['G892'],
['G716', 'G4480'],
['G5502'],
['G749', 'G750', 'G754', 'G4410', 'G4413', 'G5506'],
['G749'],
['G4175', 'G4177', 'G4847'],
['G294', 'G1463', 'G1562', 'G1737', 'G1742', 'G1746', 'G1902', 'G2066', 'G2439', 'G2440', 'G3608', 'G4016', 'G4470', 'G4616', 'G4683', 'G4749', 'G5509', 'G6005'],
['G302', 'G2174', 'G3870', 'G3874', 'G3875', 'G3888', 'G3890', 'G3931'],
['G5506'],
['G264', 'G2038', 'G2716', 'G3429', 'G3431', 'G3860', 'G3872', 'G3908', 'G4102', 'G4160', 'G4203'],
['G2844', 'G3353', 'G4791', 'G4898', 'G4904'],
['G1080', 'G1722', 'G2602', 'G2845', 'G4815'],
['G1340', 'G2292', 'G3954', 'G3982', 'G4006', 'G5287'],
['G950', 'G951', 'G1991', 'G2964', 'G3315', 'G4300', 'G4972'],
['G355', 'G1159', 'G2618', 'G2654', 'G2719', 'G5315', 'G5723'],
['G1848'],
['G853', 'G862', 'G1311', 'G1312', 'G2585', 'G2704', 'G4550', 'G4595', 'G5349', 'G5351', 'G5356'],
['G1010', 'G4824', 'G4892'],
['G1010', 'G1011', 'G1012', 'G1106', 'G4823', 'G4824', 'G4825'],
['G2114', 'G2115', 'G2174', 'G2292', 'G2293', 'G2294', 'G3870', 'G3874', 'G3954', 'G4389', 'G4837', 'G5111'],
['G2681', 'G833', 'G933', 'G4259'],
['G1016', 'G1151', 'G2353', 'G2934', 'G3447', 'G3448', 'G4165', 'G5022', 'G5022'],
['G2041', 'G2602', 'G2675', 'G2936', 'G2937', 'G2939', 'G4160', 'G5480'],
['G2226', 'G2937', 'G2938'],
['G156', 'G1462', 'G2556', 'G2557', 'G4467'],
['G1238', 'G4735', 'G4737'],
['G310', 'G349', 'G863', 'G994', 'G995', 'G1916', 'G2019', 'G2799', 'G2805', 'G2896', 'G2905', 'G2906', 'G2929', 'G4377', 'G5455'],
['G2665'],
['G609', 'G851', 'G1581', 'G2407', 'G5257', ''],
['G2217', 'G4652', 'G4653', 'G4655', 'G4656'],
['G336', 'G337', 'G520', 'G581', 'G599', 'G615', 'G622', 'G684', 'G1634', 'G1935', 'G2079', 'G2253', 'G2286', 'G2287', 'G2288', 'G2289', 'G2348', 'G2837', 'G2966', 'G3498', 'G3499', 'G3500', 'G4430', 'G4880', 'G4881', 'G5053', 'G5054'],
['G538', 'G539', 'G1386', 'G1387', 'G1388', 'G1389', 'G1818', 'G3884', 'G4105', 'G4106', 'G4108', 'G5422', 'G5423'],
['G312', 'G394', 'G518', 'G669', 'G1107', 'G1213', 'G1229', 'G1335', 'G1344', 'G1555', 'G1718', 'G1732', 'G1834', 'G2097', 'G2511', 'G2605', 'G2607', 'G3140', 'G3670', 'G3724', 'G3822', 'G3853', 'G3870', 'G3955', 'G5319', 'G5419'],
['G1378'],
['G1456', 'G1457'],
['G733', 'G2839', 'G2840', 'G3392', 'G3435', 'G4696', 'G5351'],
['G325', 'G525', 'G629', 'G859', 'G1080', 'G1325', 'G1560', 'G1659', 'G1807', 'G1929', 'G2673', 'G3086', 'G3860', 'G4506', 'G4991', 'G5088', 'G5483'],
['G1074', 'G1085', 'G4690'],
['G953'],
['G2047', 'G2048'],
['G2048', 'G2049', 'G2050', 'G3443'],
['G5056', 'G5087'],
['G3645'],
['G946', 'G947', 'G948', 'G4767', 'G5723', 'G3404'],
['G2068', 'G2666', 'G2719', 'G5315'],
['G350', 'G1252', 'G1253', 'G1381', 'G2924'],
['G149', 'G819', 'G3680', 'G3856'],
['G818', 'G819', 'G820', 'G987', 'G2617'],
['G506', 'G543', 'G544', 'G545', 'G3847', 'G3876'],
['G1287', 'G1290', 'G4650'],
['G4436'],
['G630', 'G647', 'G863'],
['G1319', 'G1322', 'G2085'],
['G3678', 'G3688', 'G5268'],
['G4058'],
['G1797', 'G1798', 'G3677'],
['G3178', 'G3182', 'G3183', 'G3184', 'G3630', 'G3632'],
['G906', 'G4657'],
['G105'],
['G1093', 'G1919', 'G2709', 'G2886', 'G3625', 'G3749', 'G4578', 'G5517'],
['G1087', 'G3187', 'G4244', 'G4245', 'G4850'],
['G430', 'G907', 'G1526', 'G2005', 'G2076', 'G2553', 'G2594', 'G3114', 'G3306', 'G4722', 'G5278', 'G5281', 'G5297', 'G5342'],
['G1398', 'G1402', 'G2615'],
['G866', 'G1937', 'G2205', 'G2206', 'G3713', 'G3788', 'G4123', 'G4124', 'G4190', 'G5354', 'G5355', 'G5366'],
['G93', 'G458', 'G2038', 'G2040', 'G2555'],
['G3927'],
['G3799', 'G4383', 'G4750'],
['G5578'],
['G1965', 'G3144', 'G5571', 'G5575', 'G5576', 'G5577'],
['G1085', 'G3614', 'G3624', 'G3965'],
['G3042'],
['G777', 'G3521', 'G3522', 'G3523'],
['G256', 'G540', 'G1080', 'G2495', 'G3737', 'G3962', 'G3964', 'G3966', 'G3967', 'G3970', 'G3971', 'G3995', 'G4245', 'G4269', 'G4613'],
['G26', 'G755', 'G1062', 'G1173', 'G1403', 'G1456', 'G1858', 'G1859', 'G2165', 'G3521', 'G4910'],
['G1456', 'G1858', 'G1859'],
['G3653', 'G4808', 'G4810'],
['G439', 'G440', 'G1067', 'G2741', 'G4442', 'G4443', 'G4447', 'G4448', 'G4451', 'G5394', 'G5457'],
['G4416', 'G5207'],
['G536'],
['G231', 'G1903'],
['G34', 'G4167', 'G4168'],
['G2627', 'G4132', 'G4215', 'G4216'],
['G832', 'G834', 'G836'],
['G4228', 'G5286'],
['G241', 'G245', 'G526', 'G915', 'G1854', 'G3581', 'G3927', 'G3941'],
['G4267', ' G4268'],
['G1608', 'G4202', 'G4203'],
['G2310', 'G2311', 'G2602'],
['G242', 'G305', 'G393', 'G985', 'G1530', 'G1816', 'G4077', 'G4855', 'G5453'],
['G3030'],
['G425', 'G525', 'G558', 'G572', 'G629', 'G630', 'G859', 'G1344', 'G1432', 'G1657', 'G1658', 'G1659', 'G1849', 'G2010', 'G3032', 'G3089', 'G3955', 'G4174', 'G4506', 'G5483', 'G5486'],
['G1081', 'G2590', 'G2592', 'G2593', 'G3703', 'G5052', 'G5352', 'G6013'],
['G2575'],
['G2374', 'G4439', 'G4440'],
['G328', 'G1241', 'G2224', 'G4024'],
['G122', 'G2055', 'G2056', 'G5131'],
['G5552', 'G5553', 'G5554', 'G5557'],
['G2636', 'G2637', 'G5397'],
['G445', 'G446', 'G746', 'G1481', 'G2232', 'G2233', 'G2230', 'G4232'],
['G248', 'G2590', 'G3450', 'G4621', 'G4719'],
['G288', 'G4718'],
['G1690', 'G4726', 'G4727', 'G4959'],
['G5463', 'G5464'],
['G710', 'G1188', 'G1448', 'G1451', 'G1764', 'G2021', 'G2092', 'G2176', 'G2902', 'G4084', 'G4474', 'G4475', 'G5495', 'G5496', 'G5497'],
['G519'],
['G917', 'G1419', 'G1421', 'G1422', 'G1423', 'G1425', 'G2205', 'G2532', 'G2553', 'G2872', 'G2873', 'G3425', 'G3433', 'G4053', 'G4183', 'G4456', 'G4457', 'G4641', 'G4642', 'G4643', 'G4645', 'G4912', 'G4927'],
['G2788', 'G2789', 'G2790'],
['G2326', 'G6013'],
['G5244'],
['G346', 'G755', 'G2775', 'G2776', 'G4719'],
['G1295', 'G1743', 'G2322', 'G2323', 'G2386', 'G2390', 'G2392', 'G2511', 'G3647', 'G4982', 'G4991', 'G5198', 'G5199'],
['G2816', 'G2818', 'G2820', 'G4789'],
['G40', 'G4172'],
['G2781', 'G3192', 'G3193'],
['G2768'],
['G2462'],
['G2460'],
['G5610'],
['G3609', 'G3613', 'G3614', 'G3624'],
['G2322', 'G3609', 'G3614', 'G3615', 'G3616', 'G3623', 'G3624'],
['G2617', 'G5014'],
['G1493', 'G1494', 'G1495', 'G1496', 'G1497', 'G2712'],
['G1504', 'G5179', 'G5481'],
['G1096', 'G2596', 'G3401', 'G3402', 'G4160'],
['G2368', 'G2369', 'G2370', 'G2379', 'G3031'],
['G1830'],
['G1256', 'G1299', 'G1319', 'G1321', 'G1378', 'G1781', 'G1785', 'G2322', 'G2727', 'G2753', 'G3559', 'G3560', 'G3614', 'G3615', 'G3624', 'G3811', 'G3852', 'G3853', 'G4264', 'G4367', 'G4822'],
['G4587'],
['G1252', 'G1328', 'G1329', 'G1381', 'G1955', 'G2058', 'G3177', 'G4793'],
['G2453'],
['G20', 'G21', 'G2167', 'G2744', 'G3685', 'G4640', 'G5463', 'G5479'],
['G2454'],
['G350', 'G1252', 'G1348', 'G2919', 'G2922', 'G2923'],
['G4773'],
['G1085', 'G5449'],
['G935', 'G936'],
['G932'],
['G2705', 'G5368', 'G5370'],
['G50', 'G56', 'G1097', 'G1107', 'G1108', 'G1231', 'G1492', 'G1921', 'G1922', 'G1987', 'G2467', 'G2589', 'G3877', 'G4267', 'G4894'],
['G75', 'G2038', 'G2040', 'G2041', 'G2872', 'G2873', 'G4704', 'G4866', 'G4904', 'G5389'],
['G3449', 'G4944', 'G5088', 'G5604', 'G5605'],
['G2985', 'G3088'],
['G3087'],
['G1785', 'G3548', 'G3551', 'G4747'],
['G111', 'G459', 'G1832', 'G3545'],
['G113', 'G266', 'G458', 'G459'],
['G3097'],
['G3014', 'G3015'],
['G1121', 'G1989', 'G1992'],
['G681', 'G796', 'G1645', 'G2985', 'G3088', 'G5338', 'G5457', 'G5458', 'G5460', 'G5462'],
['G1503', 'G1504', 'G2509', 'G2531', 'G2596', 'G3664', 'G3665', 'G3666', 'G3667', 'G3668', 'G3669', 'G3697', 'G4833', 'G5108', 'G5613', 'G5615', 'G5616', 'G5618', 'G5619'],
['G3023'],
['G200'],
['G3751'],
['G2624', 'G2819', 'G2975', 'G3091'],
['G865', 'G866', 'G5358', 'G5366', 'G5367', 'G5369', 'G5377', 'G5381', 'G5382'],
['G5011', 'G5012', 'G5014'],
['G766', 'G1937', 'G1938', 'G1939', 'G1971', 'G2237', 'G3715', 'G3806'],
['G3097'],
['G758', 'G3980', 'G4755'],
['G1392', 'G3170'],
['G2012', 'G3621', 'G3623'],
['G3312', 'G3316'],
['G3191', 'G4304'],
['G4235', 'G4236', 'G4239', 'G4240'],
['G3089', 'G5080'],
['G1010', 'G3196', 'G3609'],
['G3422'],
['G32', 'G652'],
['G972', 'G1411', 'G1413', 'G1414', 'G1415', 'G1498', 'G1752', 'G1754', 'G2159', 'G2478', 'G2479', 'G2900', 'G2904', 'G3168', 'G3173', 'G5082'],
['G1271', 'G1374', 'G3328', 'G3525', 'G3540', 'G3563', 'G4993', 'G5590'],
['G1592', 'G1701', 'G1702', 'G1703', 'G2301', 'G2606', 'G3456', 'G5512'],
['G4110', 'G4111'],
['G2354', 'G2875', 'G3602', 'G3996', 'G3997'],
['G4052', 'G4129'],
['G3466'],
['G246', 'G1074', 'G1085', 'G1484'],
['G1069', 'G2087', 'G4040', 'G4139'],
['G3376', 'G3561'],
['G937', 'G2104', 'G2903'],
['G332', 'G3660', 'G3727', 'G3728'],
['G191', 'G544', 'G3980', 'G3982', 'G4198', 'G5083', 'G5084', 'G5218', 'G5219', 'G5255', 'G5292', 'G5293', 'G5442'],
['G1081', 'G1085'],
['G1637', 'G3464'],
['G65', 'G1636', 'G1637', 'G2565'],
['G1722', 'G5308', 'G5310', 'G5311'],
['G2616', 'G2669'],
['G1299', 'G2525', 'G2680', 'G3724', 'G4270', 'G4282', 'G4309', 'G5021', 'G5500'],
['G1296', 'G1345', 'G1378', 'G1379', 'G2937', 'G3862'],
['G1983', 'G1984', 'G1985'],
['G2638', 'G2983'],
['G1484', 'G1494'],
['G833', 'G933', 'G4232'],
['G5404'],
['G991', 'G1519', 'G2983', 'G4299', 'G4382', 'G4383'],
['G420', 'G463', 'G1933', 'G3114', 'G3115', 'G3116', 'G5278', 'G5281'],
['G3966'],
['G269', 'G425', 'G31514', 'G1515', 'G1516', 'G1517', 'G1518', 'G2272'],
['G246', 'G1074', 'G1085', 'G1218', 'G1484', 'G2560', 'G2992', 'G3793'],
['G195', 'G197', 'G199', 'G739', 'G1295', 'G2005', 'G2675', 'G2676', 'G2677', 'G3647', 'G5046', 'G5047', 'G5048', 'G5050', 'G5052'],
['G1375', 'G1376', 'G1377', 'G1559', 'G2347'],
['G3115', ' G4343', ' G5281'],
['G654', 'G1294', 'G3344', 'G3346', 'G3859', 'G4106'],
['G1330', 'G1338', 'G1574', 'G2660', 'G3572', 'G4044', 'G4138'],
['G5519'],
['G4769'],
['G12', 'G999', 'G5421'],
['G3061', 'G3148', 'G4127'],
['G1189', 'G1793', 'G2065', 'G3870'],
['G728'],
['G722', 'G723'],
['G1139', 'G2192', 'G2697', 'G2722', 'G2932', 'G2933', 'G2935', 'G4047', 'G5224', 'G5564'],
['G1229', 'G1256', 'G2097', 'G2605', 'G2782', 'G2783', 'G2784', 'G2980', 'G3853', 'G3955', 'G4283', 'G4296'],
['G1229', 'G1256', 'G2097', 'G2605', 'G2782', 'G2783', 'G2784', 'G2980', 'G3955', 'G4283', 'G4296'],
['G927', 'G1784', 'G2472', 'G4185', 'G4186', 'G5092', 'G5093'],
['G747', 'G758', 'G1413', 'G2232', 'G3175'],
['G1198', 'G1199', 'G1200', 'G1201', 'G1202', 'G1210', 'G2252', 'G3612', 'G4788', 'G4869', 'G5083', 'G5084', 'G5438', 'G5439'],
['G518', 'G591', 'G1229', 'G1861', 'G2097', 'G2605', 'G2782', 'G2784', 'G2980', 'G3142', 'G3853', 'G4135'],
['G952', 'G953'],
['G147', 'G1281', 'G2585', 'G2770', 'G2771', 'G3408', 'G4297', 'G4298', 'G4851', 'G5539', 'G5622', 'G5623', 'G5624'],
['G2137'],
['G4204'],
['G4098'],
['G212', 'G1391', 'G1392', 'G2744', 'G2745', 'G2746', 'G3173', 'G5187', 'G5229', 'G5243', 'G5244', 'G5308', 'G5309', 'G5426', 'G5450'],
['G3850', 'G3942'],
['G1885'],
['G653', 'G2042', 'G3863', 'G3893', 'G3947', 'G3948', 'G3949', 'G4292'],
['G4908', 'G5428'],
['G5229', 'G5448'],
['G1349', 'G1556', 'G1557', 'G2849', 'G3811', 'G5097'],
['G4209', 'G4210', 'G4211'],
['G683', 'G4261'],
['G938'],
['G762', 'G4570'],
['G1693', 'G2830', 'G3710', 'G5433'],
['G305', 'G386', 'G393', 'G450', 'G1096', 'G1326', 'G1453', 'G1525', 'G1817', 'G1825', 'G1892', 'G1999', 'G4891', ''],
['G270', 'G2325', 'G2327'],
['G3893', 'G4955'],
['G298', 'G299', 'G1649', 'G1651', 'G1969', 'G2008', 'G3679'],
['G308', 'G324', 'G353', 'G354', 'G568', 'G588', 'G618', 'G1183', 'G1209', 'G1523', 'G1653', 'G1926', 'G2210', 'G2865', 'G2983', 'G3028', 'G3335', 'G3336', 'G3549', 'G3858', 'G3880', 'G3970', 'G4327', 'G4355', 'G4356', 'G4687', 'G4732', 'G5264', 'G5274', 'G5562'],
['G2063', 'G2563'],
['G757', 'G936', 'G2231', 'G4821'],
['G96', 'G114', 'G483', 'G550', 'G579', 'G580', 'G593', 'G683', 'G720', 'G1609', 'G3868'],
['G21', 'G2165', 'G2620', 'G2744', 'G2745', 'G4796', 'G4913', 'G5463'],
['G189', 'G191', 'G312', 'G518', 'G987', 'G1225', 'G1310', 'G1426', 'G1834', 'G2036', 'G2162', 'G2163', 'G3004', 'G3056', 'G3140', 'G3141', 'G3377'],
['G410', 'G423', 'G819', 'G3059', 'G3679', 'G3680', 'G3681', 'G5195', 'G5196', 'G5484'],
['G372', 'G373', 'G425', 'G1515', 'G1879', 'G1954', 'G1981', 'G2270', 'G2663', 'G2664', 'G2681', 'G2838', 'G3062', 'G4520'],
['G344', 'G360', 'G390', 'G1877', 'G1880', 'G1994', 'G5290'],
['G127', 'G1788', 'G2125', 'G2412', 'G5399', 'G5401'],
['G469', 'G514', 'G591', 'G2603', 'G3405', 'G3406', 'G3408'],
['G1746', 'G2066', 'G2067', 'G2440', 'G4749', 'G4016', 'G5511'],
['G2563', 'G4463', 'G4464'],
['G933', 'G934', 'G937'],
['G2679', 'G2692', 'G3639', 'G4485'],
['G746', 'G752', 'G755', 'G757', 'G758', 'G932', 'G936', 'G1018', 'G1203', 'G1299', 'G1778', 'G1785', 'G1849', 'G2232', 'G2233', 'G2525', 'G2583', 'G2888', 'G2961', 'G3545', 'G3841', 'G4165', 'G4173', 'G4291'],
['G413', 'G1377', 'G1601', 'G1530', 'G1532', 'G1632', 'G1998', 'G2027', 'G2701', 'G3729', 'G4063', 'G4370', 'G4390', 'G4890', 'G4936', 'G5143', 'G5240', 'G5295', 'G5302', 'G5343'],
['G4526'],
['G2413'],
['G266', 'G334', 'G1049', 'G1435', 'G1494', 'G2378', 'G2380', 'G3646', 'G4376', 'G5485'],
['G4547', 'G5266'],
['G4464'],
['G974', 'G975'],
['G2696', 'G4972', 'G4973'],
['G4615', 'G4687', 'G4690', 'G4701', 'G4703'],
['G327', 'G1567', 'G1934', 'G2052', 'G2212'],
['G724', 'G1949', 'G2638', 'G2902', 'G2983', 'G4815', 'G4884'],
['G192', 'G193', 'G1466', 'G1467', 'G1468', 'G4997'],
['G782', 'G375', 'G630', 'G649', 'G652', 'G657', 'G1026', 'G1032', 'G1544', 'G1599', 'G1821', 'G3333', 'G3343', 'G3936', 'G3992', 'G4311', 'G4341', 'G4369', 'G4842', 'G4882'],
['G2191', 'G2062', 'G3789'],
['G1249', 'G1401', 'G1402', 'G2324', 'G3407', 'G3411', 'G3610', 'G3816', 'G4983', 'G5257'],
['G1247', 'G1248', 'G1398', 'G1402', 'G1438', 'G1983', 'G2064', 'G2212', 'G2323', 'G2999', 'G3000', 'G3009', 'G4337', 'G4342', 'G4754', 'G5087', 'G5256'],
['G1097'],
['G644', 'G1982', 'G2683', 'G4639'],
['G127', 'G149', 'G152', 'G153', 'G422', 'G808', 'G818', 'G819', 'G821', 'G1788', 'G1791', 'G1870', 'G2617', 'G3856', 'G5195'],
['G4165', 'G4262', 'G4263'],
['G750', 'G4165', 'G4166'],
['G2375'],
['G5429'],
['G693', 'G694', 'G695', 'G696', 'G1406'],
['G27', 'G79'],
['G337', 'G615', 'G1315', 'G2380', 'G2695', 'G4968', 'G4969', 'G5407'],
['G337', 'G615', 'G1315', 'G2380', 'G2695', 'G4968', 'G4969', 'G5407'],
['G987', 'G988', 'G1228', 'G1426', 'G2636', 'G2637', 'G3059', 'G3060', 'G6022'],
['G2871', 'G4967', 'G4969', 'G5408'],
['G879', 'G1852', 'G1853', 'G2518', 'G2837', 'G5258'],
['G64', 'G1029', 'G2339', 'G2340', 'G3802', 'G3803', 'G3985', 'G4625'],
['G5510'],
['G3095', 'G3096', 'G3097', 'G5331', 'G5332', 'G5333'],
['G4687', 'G4703', 'G5300', 'G5452', 'G6037'],
['G3057'],
['G2563', 'G3586', 'G4464'],
['G483', 'G4644', 'G4645'],
['G596'],
['G461', 'G772', 'G950', 'G1411', 'G1412', 'G1743', 'G1765', 'G1840', 'G1849', 'G1991', 'G2479', 'G2480', 'G2901', 'G2904', 'G3619', 'G3756', 'G4599', 'G4732', 'G4733', 'G4741'],
['G485', 'G2052', 'G2054', 'G3055', 'G3163', 'G5379'],
['G4608'],
['G3794'],
['G679', 'G4348', 'G4350', 'G4417', 'G4624', 'G4625'],
['G3037', 'G4349', 'G4625'],
['G350', 'G1379', 'G1396', 'G1777', 'G3663', 'G5292', 'G5293'],
['G5226', 'G5293'],
['G91', 'G941', 'G971', 'G2210', 'G2346', 'G2347', 'G3804', 'G3958', 'G4310', 'G4778', 'G4841', 'G5004', 'G5723'],
['G2303'],
['G4216', 'G4563', 'G4951'],
['G3162', 'G4501'],
['G583', 'G5411'],
['G5057', 'G5058'],
['G1317', 'G1321', 'G1322', 'G2085', 'G2605', 'G2727', 'G3100', 'G2312', 'G2567', 'G3811', 'G4994'],
['G1320', 'G2567', 'G3547', 'G5572'],
['G586', 'G1181', 'G1183'],
['G1629', 'G1630', 'G2258', 'G4422', 'G4426', 'G5401'],
['G727', 'G1888', 'G2417', 'G2812', 'G3027'],
['G173', 'G174', 'G4647', 'G5146'],
['G248'],
['G968', 'G2362'],
['G744', 'G530', 'G1074', 'G1208', 'G1441', 'G1597', 'G1626', 'G1909', 'G2034', 'G2119', 'G2121', 'G2235', 'G2250', 'G2540', 'G3379', 'G3461', 'G3568', 'G3763', 'G3764', 'G3819', 'G3956', 'G3999', 'G4178', 'G4181', 'G4183', 'G4218', 'G4277', 'G4287', 'G4340', 'G4455', 'G5119', 'G5151', 'G5305', 'G5550', 'G5551', 'G5610'],
['G86', 'G2750', 'G3418', 'G3419', 'G5028'],
['G1100', 'G1258', 'G1447', 'G2084'],
['G928', 'G929', 'G930', 'G931', 'G2558', 'G2851', 'G3600'],
['G3862', ' G3970'],
['G2662', 'G3961'],
['G1611'],
['G1719', 'G1790', 'G5141', 'G5156', 'G5425'],
['G178', 'G1382', 'G1383', 'G2919', 'G3984', 'G3986', 'G4451'],
['G1429', 'G5443'],
['G2346', 'G2347'],
['G1323', 'G2778', 'G5411'],
['G387', 'G1298', 'G1613', 'G1776', 'G2346', 'G2347', 'G2350', 'G2360', 'G2553', 'G2873', 'G3636', 'G3926', 'G3930', 'G3986', 'G4423', 'G4660', 'G5015', 'G5016', 'G5182'],
['G4536', 'G4537', 'G4538'],
['G5509'],
['G344', 'G387', 'G402', 'G576', 'G654', 'G665', 'G868', 'G1294', 'G1578', 'G1612', 'G1624', 'G1994', 'G2827', 'G3179', 'G3313', 'G3329', 'G3344', 'G3346', 'G4762', 'G5077', 'G5157', 'G5290', 'G6060'],
['G50', 'G145', 'G191', 'G801', 'G1097', 'G1107', 'G1108', 'G1271', 'G1921', 'G1922', 'G1987', 'G1990', 'G2657', 'G3129', 'G3539', 'G3563', 'G3877', 'G4441', 'G4907', 'G4908', 'G4920', 'G5424', 'G5428', 'G5429', 'G6063'],
['G255', 'G512', 'G888', 'G889', 'G890'],
['G945', 'G1432', 'G1500', 'G2755', 'G2756', 'G2757', 'G2758', 'G2761', 'G3150', 'G3151', 'G3152', 'G3153', 'G3154', 'G3155'],
['G2665'],
['G288', 'G290', 'G1009', 'G1092'],
['G289', 'G290'],
['G3932', 'G3933'],
['G3701', 'G3705', 'G3706'],
['G2906', 'G5456', 'G5586'],
['G1330', 'G1704', 'G3716', 'G4043', 'G4198', 'G4748'],
['', 'G4686', 'G4753', 'G4754', 'G4757', 'G4758', 'G4961'],
['G684', 'G1287', 'G2049', 'G2673', 'G4199'],
['G69', 'G70', 'G991', 'G1127', 'G1492', 'G2334', 'G2892', 'G3525', 'G3708', 'G3906', 'G4337', 'G4648', 'G5083', 'G5438'],
['G4444'],
['G504', 'G4215', 'G4222', 'G5202', 'G5204'],
['G4077', 'G5421'],
['G4621'],
['G1098', 'G3631', 'G3820', 'G3943'],
['G3025', 'G5276'],
['G4425', 'G4617'],
['G4680'],
['G3074'],
['G1064', 'G2836', 'G3388'],
['G518', 'G1024', 'G3050', 'G3054', 'G3055', 'G3056', 'G4086', 'G4487', 'G4935', 'G5023', 'G5542'],
['G1125'],
['G91', 'G92', 'G93', 'G95', 'G264', 'G824', 'G983', 'G984', 'G1536', 'G1626', 'G1651', 'G1727', 'G1908', 'G2556', 'G2558', 'G2559', 'G2607', 'G3076', 'G3077', 'G3762', 'G4122', 'G5195', 'G5196'],
['G106', 'G2219', 'G2220'],
['G2086', 'G2201', 'G2218', 'G4805'],
]
eng_tws = [['abomination', ' abominations', ' abominable', ' foul', ' disgusting thing'],
['adoption', ' adopt', ' adopted'],
['adultery', ' adulterous', ' adulterer', ' adulteress', ' adulterers', ' adulteresses'],
['Almighty'],
['altar', ' altars'],
['amen', ' truly'],
['angel', ' angels', ' archangel'],
['anoint', ' anointed', ' anointing', ' sons of fresh olive oil'],
['antichrist', ' antichrists'],
['apostle', ' apostles', ' apostleship'],
['appoint', ' appoints', ' appointed', ' appointment', ' ordered', ' desired', ' directed', ' selected', ' reserved'],
['ark'],
['ark of the covenant', ' ark of Yahweh'],
['atonement', ' atone', ' atones', ' atoned'],
['atonement lid'],
['authority', ' authorities'],
['baptize', ' baptized', ' baptism'],
['believe', ' believes', ' believed', ' believer', ' belief', ' unbeliever', ' unbelievers', ' unbelief'],
['believer'],
['beloved', ' treasured'],
['birthright'],
['blameless'],
['blasphemy', ' blaspheme', ' blasphemes', ' blasphemed', ' blasphemous', ' blasphemies'],
['bless', ' blessed', ' blessing', ' blessings'],
['blood'],
['boast', ' boasts', ' boastful'],
['body', ' bodies'],
['bind', ' binding', ' bond', ' bonds', ' bound', ' is bound', ' tie', ' tying', ' tie up', ' tied', ' tied up', ' put on', ' puts on', ' gird up', ' fetters', ' chains'],
['born again', ' born of God', ' new birth'],
['brother', ' brothers'],
['call', ' calls', ' calling', ' called', ' invite'],
['centurion', ' centurions'],
['children', ' child'],
['Christ', ' Messiah'],
['Christian'],
['church', ' churches', ' Church'],
['circumcise', ' circumcised', ' circumcision', ' uncircumcised', ' uncircumcision'],
['clean', ' cleans', ' cleaned', ' cleanse', ' cleansed', ' cleansing', ' wash', ' washing', ' washed', ' washes', ' unclean'],
['command', ' commands', ' commanded', ' commandment', ' commandments'],
['compassion', ' compassionate', ' deal gently with'],
['condemn', ' condemns', ' condemned', ' condemnation', ' sentence of condemnation', ' sentenced to death'],
['confess', ' confessed', ' confesses', ' confession'],
['conscience', ' consciences'],
['consecrate', ' consecrated', ' consecration'],
['cornerstone', ' cornerstones'],
['covenant', ' covenants', ' new covenant', ' agreed'],
['covenant faithfulness', ' covenant loyalty', ' loving kindness', ' unfailing love', ' steadfast love'],
['cross'],
['crucify', ' crucified', ' nailing to a cross'],
['curse', ' cursed', ' curses', ' cursing', ' speaks evil'],
['daughter of Zion'],
['day of the Lord', ' day of Yahweh'],
['deacon', ' deacons'],
['demon', ' evil spirit', ' unclean spirit', ' demonic'],
['demon-possessed', ' possessed by a demon', ' possessed by demons'],
['disciple', ' disciples'],
['discipline', ' disciplines', ' disciplined', ' self-discipline', ' no discipline'],
['divine', ' divine nature'],
['dominion', ' subjugate', ' control'],
['elect', ' choose', ' chose', ' chosen', ' chosen one', ' chosen ones', ' chosen people', ' Chosen One'],
['ephod'],
['eternity', ' everlasting', ' eternal', ' forever'],
['eunuch', ' eunuchs'],
['evangelist', ' evangelists'],
['evil', ' wicked', ' wickedness', ' wickedly'],
['exalt', ' exalted', ' exalts', ' exaltation', ' high position'],
['exhort', ' exhortation', ' urge'],
['faith'],
['faithful', ' faithfulness', ' unfaithful', ' unfaithfulness', ' breach of faith', ' show self faithful', ' faithful one', ' faithful people'],
['faithless', ' faithlessness'],
['god', ' false god', ' gods', ' goddess', ' idol', ' idols', ' idolater', ' idolaters', ' idolatrous', ' idolatry', ' disgusting figure', ' goat idols'],
['favor', ' favors', ' favorable', ' favoritism'],
['fear', ' fears', ' afraid'],
['fellowship', ' be participants with', ' associate with', ' be allied with'],
['filled with the Holy Spirit'],
['flesh', ' meat', ' humans', ' living creatures'],
['fool', ' fools', ' foolish', ' folly', ' out of mind', ' unwise', ' stupid', ' doing what is repulsive', ' blame'],
['forever'],
['forgive', ' forgives', ' forgiven', ' forgiveness', ' pardon', ' pardoned'],
['forsake', ' forsakes', ' forsaken', ' forsook', ' abandon', ' abandoned', ' give up', ' has left', ' have left'],
['fulfill', ' fulfilled', ' carried out'],
['Gentile', ' Gentiles'],
['gift', ' gifts'],
['glorify'],
['glory', ' glorious', ' glorify', ' glorified', ' glorifies'],
['God'],
['godly', ' godliness', ' ungodly', ' godless', ' ungodliness', ' godlessness', ' godless actions', ' godly life'],
['God the Father', ' heavenly Father', ' Father'],
['good', ' goodness', ' pleasing'],
['good news', ' gospel'],
['grace', ' gracious', ' kind'],
['guilt', ' guilty', ' held accountable'],
['Hades', ' Sheol'],
['heart', ' hearts', ' inner self', ' myself'],
['heaven', ' sky', ' skies', ' heavens', ' heavenly', ' overhead', ' in midair'],
['Hebrew', ' Hebrews'],
['hell', ' lake of fire', ' handed down to Tartarus'],
['high priest'],
['holy', ' holiness', ' unholy', ' sacred'],
['Holy One'],
['holy place'],
['Holy Spirit', ' Spirit of God', ' Spirit of the Lord', ' Spirit'],
['honor', ' honors'],
['hope', ' hoped', ' hopes'],
['house of God', ' Yahwehs house'],
['humble', ' humbles', ' humbled', ' humility'],
['hypocrite', ' hypocrites', ' hypocrisy'],
['image of God'],
['in Christ', ' in Jesus', ' in the Lord', ' in him'],
['inherit', ' inheritance', ' heritage'],
['iniquity', ' iniquities'],
['innocent', ' innocence', ' guiltless'],
['intercede', ' intercededs', ' intercession'],
['Israel', ' Israelites'],
['jealous', ' jealousy'],
['Jesus', ' Jesus Christ', ' Christ Jesus'],
['Jew', ' Jewish', ' Jews'],
['judge', ' judges', ' judgment', ' judgments', ' decision'],
['judgment day', ' that day'],
['just', ' justice', ' unjust', ' unjustly', ' injustice', ' justly', ' justify', ' justification'],
['justify'],
['kingdom of God', ' kingdom of heaven'],
['King of the Jews', ' king of the Jews'],
['lamb', ' Lamb of God'],
['lament', ' laments', ' lamentation', ' wail'],
['last day', ' last days', ' latter days'],
['law of Moses', ' Gods law', ' law of Yahweh', ' the law', ' expert in the law'],
['life', ' live', ' lived', ' lives', ' living', ' alive'],
['lord', ' lords', ' Lord', ' master', ' masters', ' sir', ' sirs'],
['lordgod'],
['Lords Supper'],
['Lord Yahweh', ' Yahweh God'],
['love', ' loves', ' loving', ' loved'],
['majesty'],
['manna'],
['mercy', ' merciful'],
['to minister', ' ministry', ' offer as a priest', ' performs service', ' attending to needs'],
['miracle', ' miracles'],
['Most High'],
['myrrh'],
['name', ' names', ' named'],
['Nazirite', ' Nazirites', ' Nazirite vow'],
['newcovenant'],
['parable', ' parables'],
['Passover'],
['pastor', ' pastors'],
['Pentecost', ' Festival of Weeks'],
['people of God', ' my people'],
['perish', ' perished', ' perishing', ' perishable'],
['Pharisee', ' Pharisees'],
['power', ' powers', ' powerful', ' powerfully', ' be mastered'],
['pray', ' prayer', ' prayers', ' prayed'],
['predestine', ' predestined'],
['priest', ' priests', ' priesthood'],
['promise', ' promises', ' promised'],
['Promised Land'],
['prophet', ' prophets', ' prophecy', ' prophesy', ' seer', ' prophetess'],
['propitiation'],
['psalm', ' psalms'],
['pure', ' purify', ' purity', ' purification', ' sincere'],
['Rabbi'],
['ransom', ' ransomed'],
['reconcile', ' reconciles', ' reconciled', ' reconciliation'],
['redeem', ' redeems', ' redemption', ' redeemer'],
['remnant'],
['repent', ' repents', ' repented', ' repentance'],
['restore', ' restores', ' restored', ' restoration', ' bring back'],
['resurrection'],
['reveal', ' reveals', ' revealed', ' revelation'],
['righteous', ' righteousness', ' unrighteous', ' unrighteousness', ' upright', ' uprightness', ' do what is right'],
['right hand', ' right hands'],
['Sabbath'],
['Sadducee', ' Sadducees'],
['saint', ' saints', ' Gods holy people', ' holy ones', ' holy people'],
['salvation'],
['sanctify', ' sanctifies', ' sanctification'],
['sanctuary'],
['Satan', ' devil', ' evil one'],
['save', ' saves', ' saved', ' safe', ' salvation'],
['Savior', ' savior'],
['scribe', ' scribes'],
['set apart'],
['sign', ' signs', ' proof', ' remind', ' reminds', ' reminded', ' reminder', ' reminders', ' reminding'],
['sin', ' sins', ' sinned', ' sinful', ' sinner', ' sinning'],
['son', ' sons'],
['Son of God', ' Son'],
['Son of Man', ' son of man'],
['sons of God', ' children of God'],
['soul', ' souls'],
['spirit', ' spirits', ' spiritual', ' ghost'],
['stone', ' stones', ' stoning'],
['synagogue'],
['tabernacle'],
['temple'],
['tempt', ' temptation'],
['test', ' tests', ' tested', ' testing', ' testing in the fire'],
['testimony', ' testify', ' testified', ' witness', ' witnesses', ' eyewitness', ' eyewitnesses'],
['tetrarch'],
['the twelve', ' the eleven'],
['transgress', ' transgresses', ' transgression'],
['trespass', ' trespasses', ' trespassed'],
['true', ' truth', ' truths', ' truly'],
['trust', ' trusts', ' trusted', ' trustworthy', ' trustworthiness'],
['unbeliever'],
['uncircumcised'],
['unclean'],
['unfaithful'],
['ungodly'],
['unholy'],
['unjust'],
['unlawful'],
['unleavened bread'],
['unrighteous'],
['upright'],
['vow', ' vows', ' vowed'],
['will of God', ' his will', ' he wishes'],
['wise', ' wisdom'],
['witness'],
['woe'],
['word of God', ' words of God', ' word of Yahweh', ' word of the Lord', ' word of truth', ' scripture', ' scriptures'],
['wordoftruth'],
['works', ' deeds', ' work', ' acts'],
['world', ' worldly'],
['worship', ' worshiping'],
['worthy', ' worth', ' unworthy', ' worthless', ' value', ' valuation', ' full price'],
['wrath', ' fury'],
['Yahweh'],
['Yahweh of hosts', ' God of hosts', ' host of heaven', ' host of the heavens', ' Lord of hosts'],
['zeal', ' zealous'],
['Zion', ' Mount Zion'],
['Aaron'],
['Abel'],
['Abiathar'],
['Abijah'],
['Abimelech'],
['Abner'],
['Abraham', ' Abram'],
['Absalom'],
['Adam'],
['Adonijah'],
['Ahab'],
['Ahasuerus'],
['Ahaz'],
['Ahaziah'],
['Ahijah'],
['Ai'],
['Amalek', ' Amalekite', ' Amalekites'],
['Amaziah'],
['Ammon', ' Ammonite', ' Ammonites'],
['Amnon'],
['Amorite', ' Amorites'],
['Amos'],
['Amoz'],
['Andrew'],
['Annas'],
['Antioch'],
['Apollos'],
['Aquila'],
['Arabah'],
['Arabia', ' Arabian', ' Arabians'],
['Aram', ' Aramean', ' Arameans', ' Aramaic', ' Aram of Damascus'],
['Ararat'],
['Artaxerxes'],
['Asa'],
['Asaph'],
['Ashdod', ' Azotus'],
['Asher'],
['Asherah', ' Asherah pole', ' Asherah poles', ' Ashtoreth', ' Ashtoreths'],
['Ashkelon'],
['Asia'],
['Assyria', ' Assyrian', ' Assyrians', ' Assyrian Empire'],
['Athaliah'],
['Azariah'],
['Baal'],
['Baasha'],
['Babel'],
['Babylon', ' Babylonia', ' Babylonian', ' Babylonians'],
['Balaam'],
['Barabbas'],
['Barnabas'],
['Bartholomew'],
['Baruch'],
['Bashan'],
['Bathsheba'],
['Beelzebul'],
['Beersheba'],
['Benaiah'],
['Benjamin', ' Benjamite', ' Benjamites'],
['Berea'],
['Bethany'],
['Bethel'],
['Bethlehem'],
['Beth Shemesh'],
['Bethuel'],
['Boaz'],
['Caesar'],
['Caesarea', ' Caesarea Philippi'],
['Caiaphas'],
['Cain'],
['Caleb'],
['Cana'],
['Canaan', ' Canaanite', ' Canaanites'],
['Capernaum'],
['Carmel', ' Mount Carmel'],
['Chaldea', ' Chaldean', ' Chaldeans'],
['Kerethites'],
['Cilicia'],
['city of David'],
['Colossae', ' Colossians'],
['Corinth', ' Corinthians'],
['Cornelius'],
['Crete', ' Cretan', ' Cretans'],
['Cush'],
['Cyprus'],
['Cyrene'],
['Cyrus'],
['Damascus'],
['Dan'],
['Daniel'],
['Darius'],
['David'],
['Delilah'],
['Eden', ' garden of Eden'],
['Edom', ' Edomite', ' Edomites', ' Idumea'],
['Egypt', ' Egyptian', ' Egyptians'],
['Ekron', ' Ekronites'],
['Elam', ' Elamites'],
['Eleazar'],
['Eliakim'],
['Elijah'],
['Elisha'],
['Elizabeth'],
['En Gedi'],
['Enoch'],
['Ephesus', ' Ephesian', ' Ephesians'],
['Ephraim', ' Ephraimite', ' Ephraimites'],
['Ephrath', ' Ephrathah', ' Ephrathite', ' Ephrathites'],
['Esau'],
['Esther'],
['Ethiopia', ' Ethiopian'],
['Euphrates River', ' the River'],
['Eve'],
['Ezekiel'],
['Ezra'],
['Gabriel'],
['Gad'],
['Galatia', ' Galatians'],
['Galilee', ' Galilean', ' Galileans'],
['Gath', ' Gittite', ' Gittites'],
['Gaza'],
['Gerar'],
['Geshur', ' Geshurites'],
['Gethsemane'],
['Gibeah'],
['Gibeon', ' Gibeonite', ' Gibeonites'],
['Gideon'],
['Gilead', ' Gileadite', ' Gileadites'],
['Gilgal'],
['Girgashites'],
['Golgotha'],
['Goliath'],
['Gomorrah'],
['Goshen'],
['Greece'],
['Greek', ' Grecian'],
['Habakkuk'],
['Hagar'],
['Haggai'],
['Ham'],
['Hamath', ' Hamathites', ' Lebo Hamath'],
['Hamor'],
['Hananiah'],
['Hannah'],
['Haran'],
['Hebron'],
['Herod', ' Herod Antipas'],
['Herodias'],
['Herod', ' Herod the Great'],
['Hezekiah'],
['Hilkiah'],
['Hittite', ' Hittites'],
['Hivite', ' Hivites'],
['Horeb'],
['Hosea'],
['Hoshea'],
['house of David'],
['Iconium'],
['Isaac'],
['Isaiah'],
['Ishmael', ' Ishmaelite', ' Ishmaelites'],
['Issachar'],
['Israel', ' Israelite', ' Israelites', ' Jacob'],
['James (brother of Jesus)'],
['James (son of Alphaeus)'],
['James (son of Zebedee)'],
['Japheth'],
['Jebus', ' Jebusite', ' Jebusites'],
['Jehoiachin'],
['Jehoiada'],
['Jehoiakim'],
['Jehoram', ' Joram'],
['Jehoshaphat'],
['Jehu'],
['Jephthah'],
['Jeremiah'],
['Jericho'],
['Jeroboam'],
['Jerusalem'],
['Jesse'],
['Jethro', ' Reuel'],
['Jezebel'],
['Jezreel', ' Jezreelite'],
['Joab'],
['Joash'],
['Job'],
['Joel'],
['John Mark'],
['John (the apostle)'],
['John (the Baptist)'],
['Jonah'],
['Jonathan'],
['Joppa'],
['Joram'],
['Jordan River', ' Jordan'],
['Joseph (NT)'],
['Joseph (OT)'],
['Joshua'],
['Josiah'],
['Jotham'],
['Judah'],
['Judas Iscariot'],
['Judas son of James'],
['Judea'],
['Kadesh', ' Kadesh-Barnea', ' Meribah Kadesh'],
['Kedar'],
['Kedesh'],
['Kidron Valley'],
['Israel', ' kingdom of Israel'],
['Judah', ' kingdom of Judah'],
['Korah', ' Korahite', ' Korahites'],
['Laban'],
['Lamech'],
['Lazarus'],
['Leah'],
['Lebanon'],
['Leviathan'],
['Levi', ' Levite', ' Levites', ' Levitical'],
['Lot'],
['Luke'],
['Lystra'],
['Maacah'],
['Macedonia'],
['Maker'],
['Malachi'],
['Manasseh'],
['man of God'],
['Martha'],
['Mary', ' the mother of Jesus'],
['Mary Magdalene'],
['Mary (sister of Martha)'],
['Matthew', ' Levi'],
['Mede', ' Medes', ' Media'],
['the sea', ' the Great Sea', ' the western sea', ' Mediterranean Sea'],
['Melchizedek'],
['Memphis'],
['Meshech'],
['Mesopotamia', ' Aram Naharaim'],
['Micah'],
['Michael'],
['Midian', ' Midianite', ' Midianites'],
['Miriam'],
['Mishael'],
['Mizpah'],
['Moab', ' Moabite', ' Moabitess'],
['Molech', ' Moloch'],
['Mordecai'],
['Moses'],
['Mount Hermon'],
['Mount of Olives'],
['Naaman'],
['Nahor'],
['Nahum'],
['Naphtali'],
['Nathan'],
['Nazareth', ' Nazarene'],
['Nebuchadnezzar'],
['Negev'],
['Nehemiah'],
['Nile River', ' River of Egypt', ' the Nile'],
['Nineveh', ' Ninevite'],
['Noah'],
['Obadiah'],
['Omri'],
['Paddan Aram'],
['Paran'],
['Paul', ' Saul'],
['Peor', ' Mount Peor', ' Baal Peor'],
['Perizzite'],
['Persia', ' Persians'],
['Peter', ' Simon Peter', ' Cephas'],
['Pharaoh', ' king of Egypt'],
['Philip', ' the evangelist'],
['Philippi', ' Philippians'],
['Philip', ' the apostle'],
['Philistia'],
['Philistines'],
['Phinehas'],
['Phoenicia', ' Syrophoenician'],
['Pilate'],
['Pontus'],
['Potiphar'],
['Priscilla', ' Prisca'],
['Rabbah'],
['Rachel'],
['Rahab'],
['Ramah'],
['Ramoth'],
['Rebekah'],
['Sea of Reeds', ' Red Sea'],
['Rehoboam'],
['Reuben'],
['Rimmon'],
['Rome', ' Roman'],
['Ruth'],
['Salt Sea', ' Dead Sea'],
['Samaria', ' Samaritan'],
['Samson'],
['Samuel'],
['Sarah', ' Sarai'],
['Saul'],
['Sea of Galilee', ' Sea of Kinnereth', ' lake of Gennesaret', ' Sea of Tiberias'],
['Sennacherib'],
['Seth'],
['Sharon', ' Plain of Sharon'],
['Sheba'],
['Shechem'],
['Shem'],
['Shiloh'],
['Shimei'],
['Shinar'],
['Sidon', ' Sidonians'],
['Silas', ' Silvanus'],
['Simeon'],
['Simon the Zealot'],
['Sinai', ' Mount Sinai'],
['Sodom'],
['Solomon'],
['Stephen'],
['Succoth'],
['Syria', ' Ashur'],
['Tamar'],
['Tarshish'],
['Tarsus'],
['Terah'],
['Thessalonica', ' Thessalonian', ' Thessalonians'],
['Thomas'],
['Timothy'],
['Tirzah'],
['Titus'],
['Troas'],
['Tubal'],
['Tychicus'],
['Tyre', ' Tyrians'],
['Ur'],
['Uriah'],
['Uzziah'],
['Vashti'],
['Zacchaeus'],
['Zadok'],
['Zebedee'],
['Zebulun'],
['Zechariah (NT)'],
['Zechariah (OT)'],
['Zedekiah'],
['Zephaniah'],
['Zerubbabel'],
['Zoar'],
['twelve tribes of Israel', ' twelve tribes of the children of Israel', ' twelve tribes'],
['abyss', ' bottomless pit'],
['acacia'],
['accuse', ' accuses', ' accused', ' accusing', ' accuser', ' accusers', ' accusation', ' accusations'],
['acknowledge', ' acknowledges', ' acknowledged', ' admit', ' admitted'],
['acquit', ' acquits', ' acquitted'],
['administration', ' administrator', ' administrators', ' administered', ' administering'],
['admonish', ' warned', ' aware', ' have been a witness against'],
['adversary', ' adversaries', ' enemy', ' enemies'],
['afflicted', ' afflict', ' afflicting', ' affliction', ' afflictions'],
['age', ' ages', ' aged'],
['alarm', ' alarms', ' alarmed'],
['alms'],
['altar of incense', ' incense altar'],
['amazed', ' amazement', ' astonished', ' marvel', ' marveled', ' marveling', ' marvelous', ' wonder', ' wonders', ' dumbfounded', ' confusion'],
['ambassador', ' ambassadors', ' representative', ' representatives'],
['anger', ' angered', ' angry'],
['anguish'],
['archer', ' archers'],
['armor', ' armory'],
['arrogant', ' arrogantly', ' arrogance'],
['ash', ' ashes', ' dust'],
['assembly', ' assemblies', ' assemble', ' assembled', ' congregation', ' meeting'],
['assign', ' assigned', ' assigning', ' assignment', ' assignments', ' reassign'],
['astray', ' go astray', ' went astray', ' lead astray', ' led astray', ' stray', ' strayed', ' strays'],
['avenge', ' avenges', ' avenged', ' avenging', ' avenger', ' revenge', ' vengeance'],
['awe', ' awesome'],
['ax', ' axes'],
['banquet'],
['barley'],
['barren', ' dry'],
['basket', ' baskets', ' basketfuls'],
['bear', ' bears', ' bearing', ' bearer', ' carry', ' carried'],
['bear', ' bears'],
['beast', ' beasts', ' animal', ' animals'],
['beg', ' begged', ' begging', ' beggar', ' needy'],
['betray', ' betrays', ' betrayed', ' betraying', ' betrayer', ' betrayers'],
['day', ' days'],
['hour', ' hours', ' moment', ' immediately', ' for a while'],
['month', ' months', ' monthly'],
['watch (biblical time)', ' watches'],
['week', ' weeks'],
['year', ' years'],
['blemish', ' blemishes', ' unblemished', ' defect'],
['bloodshed'],
['blot out', ' blots out', ' blotted out', ' wipe out', ' wipes out', ' wiped out'],
['bold', ' boldly', ' boldness', ' emboldened'],
['Book of Life'],
['bow', ' bows', ' bowed', ' bowing', ' bow down', ' bows down', ' bowed down', ' bowing down', ' knelt', ' are bent', ' have bent', ' bend', ' bend the knee', ' will bend'],
['bow and arrow', ' bows and arrows', ' a bow', ' arrows'],
['bread'],
['breastplate', ' breastplates', ' breastpiece'],
['breath', ' breathe', ' breathes', ' breathed', ' breathing'],
['bribe', ' bribes', ' bribed', ' bribery', ' payoffs'],
['bride', ' brides', ' bridal'],
['bridegroom', ' bridegrooms'],
['bronze'],
['burden', ' burdens', ' burdened', ' burdensome', ' heavy', ' utterances'],
['burnt offering', ' burnt offerings', ' offering by fire'],
['bury', ' buries', ' buried', ' burying', ' burial'],
['camel', ' camels'],
['captive', ' captives', ' captivate', ' captivated', ' captivity', ' catch', ' captured'],
['cast out', ' casting out', ' driving out', ' throw out', ' throwing out'],
['caught up', ' caught up with', ' catch up with'],
['cedar', ' cedars', ' cedarwood'],
['census'],
['chaff'],
['chariot', ' chariots', ' charioteers'],
['cherub', ' cherubim', ' cherubs'],
['chief', ' chiefs', ' officers'],
['chief priests'],
['chronicles'],
['citizen', ' citizens', ' citizenship'],
['clan', ' clans'],
['clothe', ' clothed', ' clothes', ' clothing', ' unclothed', ' garments'],
['comfort', ' comforts', ' comforted', ' comforting', ' comforter', ' comforters', ' uncomforted'],
['commander', ' commanders', ' leader', ' leaders'],
['commit', ' commits', ' committed', ' committing', ' commitment'],
['companion', ' companions', ' fellow worker', ' fellow workers', ' friend'],
['conceive', ' conceives', ' conceived', ' conception'],
['concubine', ' concubines'],
['confidence', ' confident', ' confidently'],
['confirm', ' confirms', ' confirmed', ' confirmation', ' guaranteed'],
['consume', ' consumes', ' consumed', ' consuming'],
['contempt', ' contemptible', ' not worth listening to'],
['corrupt', ' corrupts', ' corrupted', ' corrupting', ' corruption', ' corruptly', ' incorruptibility', ' depraved'],
['council', ' councils'],
['advice', ' advise', ' advised', ' advisor', ' advisors', ' counsel', ' counselor', ' counselors', ' counsels'],
['courage', ' courageous', ' encourage', ' encouraged', ' encouragement', ' take courage', ' discourage', ' discouraged', ' discouragement', ' discouraging', ' bravest'],
['court', ' courts', ' courtyard', ' courtyards'],
['cow', ' cows', ' bull', ' bulls', ' calf', ' calves', ' cattle', ' heifer', ' ox', ' oxen'],
['create', ' creates', ' created', ' creation', ' creator'],
['creature', ' creatures'],
['crime', ' crimes', ' criminal', ' criminals'],
['crown', ' crowns', ' crowned'],
['cry', ' cries', ' cried', ' crying', ' cry out', ' cries out', ' cried out', ' outcry', ' outcries', ' shouted', ' shouts'],
['cupbearer', ' cupbearers'],
['curtain', ' curtains'],
['cut off', ' cuts off', ' cutting off'],
['cypress'],
['darkness'],
['die', ' dies', ' died', ' dead', ' deadly', ' deadness', ' death', ' deaths', ' deathly'],
['deceive', ' deceives', ' deceived', ' deceiving', ' deceit', ' deceiver', ' deceivers', ' deceitful', ' deceitfully', ' deceitfulness', ' deception', ' deceptive', ' illusions'],
['declare', ' declares', ' declared', ' declaring', ' declaration', ' declarations', ' proclaim', ' proclaimed', ' proclaims', ' proclaiming', ' proclamation', ' proclamations', ' announce', ' announces', ' make clear'],
['decree', ' decrees', ' decreed'],
['dedicate', ' dedicates', ' dedicated', ' dedication', ' established', ' devoted'],
['deer', ' doe', ' does', ' fawns', ' roebuck', ' roebucks'],
['defile', ' defiles', ' defiled', ' defiling', ' be defiled', ' are defiled', ' was defiled', ' were defiled'],
['delight', ' delights', ' delighted', ' delightful', ' took pleasure'],
['deliver', ' delivers', ' delivered', ' delivering', ' deliverer', ' deliverance', ' hand over', ' handed over', ' turn over', ' released', ' rescue', ' rescues', ' rescued', ' be rescued', ' permit to escape', ' defended'],
['descend', ' descends', ' descended', ' descending', ' descendant', ' descendants', ' clans descended'],
['desecrate', ' desecrated', ' desecrating'],
['desert', ' deserts', ' wilderness', ' wildernesses'],
['desolate', ' desolation', ' desolations', ' alone', ' left all alone', ' deserted'],
['destiny'],
['destroy', ' destroys', ' destroyed', ' destroyer', ' destroyers', ' destroying', ' destruction'],
['detest', ' detested', ' detestable'],
['devastate', ' devastated', ' devastating', ' devastation', ' devastations'],
['devour', ' devours', ' devoured', ' devouring'],
['discern', ' discerned', ' discerning', ' discernment', ' distinguish', ' distinguishing'],
['disgrace', ' disgraces', ' disgraced', ' disgraceful'],
['dishonor', ' dishonors', ' dishonored', ' dishonorable'],
['disobey', ' disobeys', ' disobeyed', ' disobedience', ' disobedient'],
['disperse', ' dispersion', ' scatter', ' scattered', ' scatters', ' distributed'],
['divination', ' diviner', ' soothsaying', ' soothsayer'],
['divorce'],
['doctrine', ' beliefs'],
['donkey', ' mule'],
['doom'],
['doorpost'],
['dove', ' pigeon'],
['dream'],
['drink offering'],
['drunk', ' drunkard'],
['dung', ' manure'],
['eagle', ' eagles'],
['earth', ' earthen', ' earthly'],
['elder', ' elders', ' older'],
['endure', ' endures', ' endured', ' enduring', ' endurance'],
['enslave', ' enslaves', ' enslaved', ' bondage'],
['envy', ' covet', ' greedy'],
['evildoer', ' evildoers', ' evildoing'],
['exile', ' exiles', ' exiled'],
['exult', ' exults', ' exulting', ' exultant'],
['face', ' faces', ' faced', ' facing', ' facial', ' facedown', ' before'],
['false prophet', ' false prophets'],
['corrupt witness', ' false report', ' false testimony', ' false witness', ' false witnesses'],
['family', ' families'],
['famine', ' famines'],
['fast', ' fasts', ' fasted', ' fasting', ' fastings'],
['ancestor', ' ancestors', ' father', ' fathers', ' fathered', ' fathering', ' forefather', ' forefathers', ' grandfather'],
['feast', ' feasts', ' feasting'],
['fellowship offering', ' fellowship offerings'],
['festival', ' festivals'],
['fig', ' figs'],
['fir', ' firs'],
['fire', ' fires', ' firebrands', ' firepans', ' fireplaces', ' firepot', ' firepots'],
['firstborn'],
['firstfruits'],
['fishermen', ' fishers'],
['flocks', ' flock', ' flocking', ' herd', ' herds'],
['flood'],
['flute', ' flutes', ' pipe', ' pipes'],
['footstool'],
['alien', ' alienates', ' alienated', ' foreign', ' foreigner', ' foreigners'],
['foreknew', ' foreknowledge'],
['sexual immorality', ' immorality', ' immoral', ' fornication'],
['found', ' founded', ' founder', ' foundation', ' foundations'],
['fountain', ' fountains', ' source', ' sources', ' spring', ' springs'],
['frankincense'],
['free', ' frees', ' freed', ' freeing', ' freedom', ' freely', ' freeman', ' freewill', ' liberty'],
['freewill offering', ' freewill offerings'],
['fruit', ' fruits', ' fruitful', ' unfruitful'],
['furnace'],
['gate', ' gates', ' gate bars', ' gatekeeper', ' gatekeepers', ' gateposts', ' gateway', ' gateways'],
['generation'],
['giant', ' giants'],
['gird', ' girded', ' wrapped around', ' tied up', ' belt', ' tuck in belt', ' tucked in belt', ' put belt around', ' put belt on', ' had sashes around', ' fastening waistband around'],
['glean', ' gleans', ' gleaned', ' gleanings'],
['goat', ' goats', ' goatskins', ' scapegoat', ' kids'],
['gold', ' golden'],
['gossip', ' gossips', ' gossiper', ' talk nonsense'],
['govern', ' government', ' governments', ' governor', ' governors', ' provincial governors', ' proconsul', ' proconsuls'],
['grain', ' grains', ' grainfields'],
['grain offering', ' grain offerings'],
['grape', ' grapes', ' grapevine'],
['groan', ' groans', ' groaned', ' groaning', ' groanings'],
['guilt offering', ' guilt offerings'],
['hail', ' hails', ' hailstones', ' hailstorm'],
['hand', ' hands', ' handed', ' handing', ' by the hand of', ' lay a hand on', ' lays his hand on', ' from the hand of'],
['hang', ' hangs', ' hanged', ' hanging', ' hangings', ' hung'],
['hard', ' harder', ' hardest', ' harden', ' hardens', ' hardened', ' hardening', ' hardness'],
['harp', ' harps', ' harpist', ' harpists'],
['harvest', ' harvests', ' harvested', ' harvesting', ' harvester', ' harvesters'],
['haughty', ' lofty'],
['head', ' heads', ' forehead', ' foreheads', ' baldhead', ' headfirst', ' headbands', ' headscarves', ' beheaded'],
['cure', ' cured', ' heal', ' heals', ' healed', ' healing', ' healings', ' healer', ' health', ' healthy', ' unhealthy'],
['heir', ' heirs'],
['high place', ' high places'],
['holy city', ' holy cities'],
['honey', ' honeycomb'],
['hoof', ' hoofs', ' hooves'],
['horn', ' horns', ' horned'],
['horror', ' horrors', ' horrible', ' horribly', ' horrified', ' horrifying', ' source of horror', ' object of horror'],
['horse', ' horses', ' warhorse', ' warhorses', ' horseback'],
['horseman', ' horsemen'],
['hour', ' hours'],
['house', ' houses', ' housetop', ' housetops', ' housekeepers', ' dwelling'],
['household', ' household members', ' members of household', ' households'],
['humiliate', ' humiliated', ' humiliation'],
['idol'],
['image', ' images', ' carved image', ' carved images', ' cast metal images', ' figure', ' figures', ' carved figure', ' carved figures', ' cast metal figure', ' cast metal figures', ' statue'],
['imitate', ' imitator', ' imitators'],
['incense', ' incenses'],
['inquire', ' inquires', ' inquired', ' inquiries'],
['instruct', ' instructs', ' instructed', ' instructing', ' instruction', ' instructions', ' instructors'],
['integrity'],
['interpret', ' interprets', ' interpreted', ' interpreting', ' interpretation', ' interpretations', ' interpreter'],
['Jewish authorities', ' Jewish leader'],
['joy', ' joyful', ' joyfully', ' joyfulness', ' enjoy', ' enjoys', ' enjoyed', ' enjoying', ' enjoyment', ' rejoice', ' gladness', ' rejoices', ' rejoiced', ' rejoicing'],
['Judaism', ' Jewish religion'],
['judge', ' judges'],
['kin', ' kinfolk', ' kindred', ' kinsfolk', ' kinsman', ' kinsmen'],
['kind', ' kinds', ' kindness', ' kindnesses'],
['king', ' kings', ' kingship', ' kingly'],
['kingdom', ' kingdoms'],
['kiss', ' kisses', ' kissed', ' kissing'],
['know', ' knows', ' knew', ' knowing', ' knowledge', ' known', ' make known', ' makes known', ' made known', ' unknown', ' seeing', ' saw'],
['labor', ' labors', ' labored', ' laborer', ' laborers', ' work', ' worked', ' hard work'],
['labor pains', ' in labor', ' birth pains', ' pains of childbirth', ' labors in pain together'],
['lamp', ' lamps torch', ' torches'],
['lampstand', ' lampstands'],
['law', ' laws', ' lawgiver', ' lawbreaker', ' lawbreakers', ' lawsuit', ' lawyer', ' principle', ' principled', ' principles'],
['lawful', ' lawfully', ' unlawful', ' not lawful', ' lawless', ' lawlessness'],
['lawless'],
['learned men', ' astrologers'],
['leopard', ' leopards'],
['leper', ' lepers', ' leprosy', ' leprous'],
['epistle', ' letter', ' letters'],
['light', ' lights', ' lighting', ' lightning', ' daylight', ' sunlight', ' twilight', ' enlighten', ' enlightened'],
['like', ' liken', ' likeness', ' likenesses', ' likewise', ' alike', ' unlike', ' as if'],
['lions', ' lion', ' lioness', ' lionesses'],
['livestock'],
['locust', ' locusts'],
['loins', ' were descendants', ' waist'],
['lots', ' casting lots'],
['lover', ' lovers'],
['lowly', ' lowliest', ' lowliness'],
['lust', ' lusts', ' lusted', ' lusting', ' lustful', ' passions', ' desires'],
['lute', ' lyre', ' lyres'],
['magic', ' magical', ' magician', ' magicians', ' who talk with spirits', ' who talks with spirits'],
['magistrate', ' magistrates'],
['magnify'],
['manager', ' managers', ' steward', ' stewards', ' stewardship'],
['mealoffering'],
['mediator'],
['meditate', ' meditates', ' meditation', ' thoughts'],
['meek', ' meekness'],
['melt', ' melted', ' melting', ' melts', ' molten'],
['member', ' members', ' body parts'],
['memorial', ' memorial offering'],
['messenger', ' messengers'],
['might', ' mighty', ' mightier', ' mightily', ' mighty works', ' mighty host'],
['mind', ' minds', ' minded', ' mindful', ' likeminded'],
['mock', ' mocks', ' mocked', ' mocking', ' mocker', ' mockers', ' mockery', ' ridicule', ' ridiculed', ' scoff at', ' scoffed at', ' taunting song', ' laughingstock'],
['mold', ' molds', ' molded', ' molding', ' molder', ' moldy'],
['mourn', ' mourns', ' mourned', ' mourning', ' mourner', ' mourners', ' mournful', ' mournfully', ' weep', ' weeping'],
['multiply', ' multiplies', ' multiplied', ' multiplying', ' multiplication', ' increase'],
['mystery', ' mysteries', ' hidden truth', ' hidden truths'],
['nation', ' nations'],
['neighbor', ' neighbors', ' neighborhood', ' neighboring'],
['new moon', ' new moons'],
['noble', ' nobles', ' nobleman', ' noblemen', ' nobility', ' royal official'],
['oak', ' oaks'],
['oath', ' oaths', ' swear', ' swears', ' swearing', ' swear by', ' swears by'],
['obey', ' obeys', ' obeyed', ' obeying', ' obedience', ' obedient', ' obediently'],
['offspring'],
['oil'],
['olive', ' olives'],
['on high', ' in the highest'],
['oppress', ' oppresses', ' oppressed', ' oppressing', ' oppression', ' oppressive', ' oppressor', ' oppressors'],
['ordain', ' ordained', ' ordinary', ' ordination', ' planned long ago', ' set up', ' prepared', ' bring about', ' marked out'],
['ordinance', ' ordinances', ' regulation offering', ' regulations', ' requirements', ' strict law', ' permanent things', ' customs'],
['oversee', ' oversees', ' overseen', ' overseer', ' overseers'],
['overtake', ' overtakes', ' overtaken', ' overtook'],
['pagan', ' pagans'],
['palace', ' palaces'],
['palm', ' palms'],
['pardon'],
['partial', ' be partial', ' partiality'],
['patient', ' patiently', ' patience', ' impatient'],
['patriarch', ' patriarchs'],
['peace', ' peaceful', ' peacefully', ' peaceable', ' peacemakers'],
['peace offering', ' peace offerings'],
['people group', ' peoples', ' the people', ' a people', ' people'],
['perfect', ' perfected', ' perfecter', ' perfection', ' perfectly', ' complete'],
['persecute', ' persecuted', ' persecuting', ' persecution', ' persecutions', ' persecutor', ' persecutors', ' chase', ' pursuers'],
['persevere', ' perseverance'],
['perverse', ' perversely', ' perversion', ' perversions', ' perversities', ' pervert', ' perverts', ' perverted', ' perverting', ' malicious', ' maliciously', ' devious', ' dishonest', ' crooked ways', ' distortion'],
['pierce', ' pierces', ' pierced', ' piercing'],
['pig', ' pigs', ' pork', ' swine'],
['column', ' columns', ' pillar', ' pillars'],
['pit', ' pits', ' pitfall', ' trenches'],
['plague', ' plagues'],
['plea', ' pleas', ' plead', ' pleads', ' pleaded', ' pleading', ' pleadings'],
['pledge', ' pledged', ' pledges'],
['plow', ' plows', ' plowed', ' plowing', ' plowers', ' plowman', ' plowmen', ' plowshares', ' unplowed'],
['pomegranate', ' pomegranates'],
['possess', ' possesses', ' possessed', ' possessing', ' possession', ' possessions', ' dispossess', ' owned title'],
['praise', ' praises', ' praised', ' praising', ' praiseworthy'],
['preach', ' preached', ' preaching', ' preacher'],
['precious', ' valuable', ' expensive', ' fine'],
['prey', ' prey on'],
['prince', ' princes', ' princess', ' princesses', ' officials', ' high officials'],
['prison', ' prisoner', ' prisoners', ' prisons', ' imprison', ' imprisons', ' imprisoned', ' imprisonment', ' imprisonments'],
['proclaim'],
['profane', ' profaned', ' profaning'],
['profit', ' profits', ' profitable', ' unprofitable', ' useful', ' progress', ' benefit'],
['prosper', ' prospered', ' prospering', ' prosperity', ' prosperous'],
['prostitute', ' prostituted', ' prostitutes', ' harlot', ' whored'],
['prostrate', ' prostrated'],
['proud', ' proudly', ' pride', ' prideful'],
['proverb', ' proverbs'],
['province', ' provinces', ' provincial'],
['provoke', ' provokes', ' provoked', ' provoking', ' provocation'],
['prudence', ' prudent', ' prudently'],
['puffed up', ' puffs up'],
['punish', ' punishes', ' punished', ' punishing', ' punishment', ' unpunished'],
['purple'],
['push', ' pushed', ' pushing'],
['qualify', ' qualified', ' disqualified'],
['queen', ' queens'],
['quench', ' quenched', ' unquenchable'],
['rage', ' rages', ' raged', ' raging', ' enrage'],
['raise', ' raises', ' raised', ' rise', ' risen', ' arise', ' arose', ' got up', ' stir up', ' stirring up', ' stirred up'],
['reap', ' reaps', ' reaped', ' reaper', ' reapers', ' reaping'],
['rebel', ' rebels', ' rebelled', ' rebelling', ' rebellion', ' rebellious', ' rebelliousness'],
['rebuke', ' rebukes', ' rebuked'],
['receive', ' receives', ' received', ' receiving', ' receiver', ' welcome', ' welcomed', ' taken up', ' acceptance'],
['reed', ' reeds'],
['refuge', ' refugee', ' refugees', ' shelter', ' shelters', ' sheltered', ' sheltering'],
['reign', ' reigns', ' reigned', ' reigning'],
['reject', ' rejects', ' rejected', ' rejecting', ' rejection'],
['rejoice'],
['renown', ' renowned', ' famous'],
['report', ' reports', ' reported', ' reputation'],
['reproach', ' reproaches', ' reproached', ' reproaching', ' reproachfully', ' insult', ' insults', ' insulted'],
['rest', ' rests', ' rested', ' resting', ' restless', ' relief'],
['return', ' returns', ' returned', ' returning', ' turning back', ' return back'],
['revere', ' revered', ' reverence', ' reverences', ' reverent', ' respect'],
['reward', ' rewards', ' rewarded', ' rewarding', ' rewarder', ' prize', ' deserve', ' future', ' payment', ' wages'],
['robe', ' robes', ' robed'],
['rod', ' rods'],
['royal', ' royalty', ' kings'],
['ruin', ' ruins', ' ruined'],
['rule', ' rules', ' ruled', ' ruler', ' rulers', ' ruling', ' rulings', ' overrules', ' overruled'],
['run', ' runs', ' ran', ' runner', ' runners', ' running', ' rushed', ' quickly went', ' spilling over', ' be spilled', ' flows', ' leap', ' climb', ' moving swiftly', ' flow'],
['sackcloth'],
['sacred'],
['sacrifice', ' sacrifices', ' sacrificed', ' sacrificing', ' offering', ' offerings'],
['sandal', ' sandals'],
['scepter', ' scepters'],
['scroll', ' scrolls'],
['seacow'],
['seal', ' seals', ' sealed', ' sealing', ' unsealed'],
['seed', ' semen'],
['seek', ' seeks', ' seeking', ' sought', ' look for', ' searches for', ' seek advice'],
['seize', ' seizes', ' seized', ' seizure'],
['selah'],
['self-control', ' self-controlled', ' controlled self'],
['send', ' sends', ' sent', ' sending', ' send out', ' sends out', ' sent out', ' sending out'],
['serpent', ' serpents', ' snake', ' snakes', ' viper', ' vipers', ' reptile'],
['servant', ' servants', ' hired servant', ' hired servants', ' female servant', ' female servants', ' servant girl', ' servant girls', ' slave', ' slaves', ' slave girl', ' slaved', ' slavery', ' maidservants', ' serve', ' serves', ' served', ' serving', ' service', ' services', ' eyeservice'],
['serve'],
['had relations with', ' lovemaking', ' sleep with', ' sleeps with', ' slept with', ' sleeping with'],
['shadow', ' shadows', ' overshadow', ' overshadowed', ' shade'],
['shame', ' shames', ' shamed', ' shameful', ' shamefully', ' shameless', ' shamelessly', ' ashamed', ' unashamed', ' causing to mock'],
['ewe', ' ewes', ' ram', ' rams', ' sheep', ' sheepfold', ' sheepfolds', ' sheepshearers', ' sheepskins'],
['shepherd', ' shepherds', ' shepherded', ' shepherding', ' chief shepherd'],
['shield', ' shields', ' shielded'],
['shrewd', ' shrewdly'],
['siege', ' besiege', ' besieged', ' besiegers', ' besieging', ' siegeworks'],
['silver'],
['sin offering', ' sin offerings'],
['sister', ' sisters'],
['skull'],
['slay', ' slain', ' kill', ' killed', ' murder', ' murdered', ' murders'],
['slander', ' slanders', ' slandered', ' slanderers', ' slandering', ' slanderous'],
['slaughter', ' slaughters', ' slaughtered', ' slaughtering'],
['asleep', ' fall asleep', ' fell asleep', ' fallen asleep', ' sleep', ' sleeps', ' slept', ' sleeping', ' sleeper', ' sleepless', ' sleepy'],
['snare', ' snares', ' ensnare', ' ensnares', ' ensnared', ' entrap', ' trap', ' traps', ' trapped'],
['snow', ' snowed', ' snowing'],
['sorcerer', ' sorcerers', ' sorceress', ' sorcery', ' sorceries', ' witchcraft', ' someone who talked with the dead'],
['plant', ' plants', ' planted', ' planting', ' implanted', ' replanted', ' transplanted', ' sow', ' sows', ' sowed', ' sown', ' sowing'],
['spear', ' spears', ' spearmen'],
['splendor', ' splendid'],
['staff', ' staffs', ' clubs'],
['statute', ' statutes'],
['stiff-necked', ' stubborn', ' stubbornly', ' stubbornness'],
['storehouse', ' storehouses'],
['strength', ' strengthen', ' strengthens', ' strengthened', ' strengthening', ' strong', ' valor', ' influence'],
['strife', ' disputes', ' quarrel', ' arguing', ' conflict', ' conflicts'],
['strong drink', ' strong drinks'],
['stronghold', ' strongholds', ' fortifications', ' fortified', ' fortress', ' fortresses'],
['stumble', ' stumbles', ' stumbled', ' stumbling', ' reeling'],
['stumbling block', ' stumbling blocks', ' stone of stumbling'],
['subject', ' subjects', ' subjected', ' subject to', ' be subject to', ' subjection', ' be subjected', ' are subjected', ' was subjected', ' were subjected', ' in subjection to', ' subdue', ' forced to become slaves'],
['submit', ' submits', ' submitted', ' submitting', ' submission', ' in submission'],
['suffer', ' suffers', ' suffered', ' suffering', ' sufferings'],
['sulfur', ' sulfurous'],
['sweep', ' sweeps', ' swept', ' sweeping', ' pursued'],
['sword', ' swords', ' swordsmen'],
['tax', ' taxes', ' taxed', ' taxing', ' taxation', ' taxpayers', ' tax collector', ' tax collectors'],
['taxcollector'],
['teach', ' teaches', ' taught', ' teaching', ' teachings', ' untaught'],
['teacher', ' teachers', ' Teacher'],
['Ten Commandments'],
['tent', ' tents', ' tentmakers'],
['tenth', ' tenths', ' tithe', ' tithes'],
['tent of meeting'],
['terror', ' terrorize', ' terrorized', ' terrors', ' terrify', ' terrified', ' terrifying', ' frightened', ' panic', ' terrifying events'],
['thief', ' thieves', ' rob', ' robs', ' robbed', ' robber', ' robbers', ' robbery', ' robbing', ' bandits', ' violent'],
['thorn', ' thornbush', ' thornbushes', ' thorns', ' thistle', ' thistles'],
['thresh', ' threshes', ' threshed', ' threshing'],
['threshold', ' thresholds'],
['throne', ' thrones', ' enthroned'],
['time', ' timely', ' times', ' untimely', ' date'],
['grave', ' gravediggers', ' graves', ' tomb', ' tombs', ' burial place'],
['tongue', ' tongues', ' language'],
['torment', ' tormented', ' tormenting', ' tormentors', ' agony', ' torturers'],
['tradition', ' traditions'],
['trample', ' tramples', ' trampled', ' trampling'],
['trance'],
['tremble', ' trembles', ' trembled', ' trembling', ' staggering'],
['trial', ' trials', ' proving'],
['tribe', ' tribes', ' tribal', ' tribesmen'],
['tribulation', ' distresses'],
['tribute', ' contribution', ' fined'],
['trouble', ' troubles', ' troubled', ' troubling', ' troublemaker', ' troublesome', ' disturbing', ' upset', ' shaken', ' hardship'],
['trumpet', ' trumpets', ' trumpeters'],
['tunic', ' tunics'],
['turn', ' turns', ' turn away', ' turns away', ' turn back', ' turns back', ' turned', ' turned away', ' turned back', ' turning', ' turning away', ' direct'],
['understand', ' understands', ' understood', ' understanding', ' thinking'],
['unprofitable'],
['vain', ' vanity', ' futile', ' empty', ' useless', ' meaningless'],
['veil', ' veils', ' veiled', ' unveiled'],
['vine', ' vines'],
['vineyard', ' vineyards'],
['virgin', ' virgins', ' virginity'],
['vision', ' visions', ' envision'],
['voice', ' voices'],
['walk', ' walks', ' walked', ' walking'],
['soldier', ' soldiers', ' warrior', ' warriors', ' army'],
['waste', ' wastes', ' wasted', ' wasting', ' wasteland', ' wastelands', ' becomes weak', ' devastates'],
['watch', ' watches', ' watched', ' watching', ' watchman', ' watchmen', ' watchful', ' take heed', ' beware', ' watch out', ' guard'],
['watchtower', ' watchtowers', ' tower'],
['water', ' waters', ' watered', ' watering'],
['cistern', ' cisterns', ' well', ' wells'],
['wheat'],
['wine', ' wines', ' wineskin', ' wineskins', ' new wine'],
['winepress'],
['winnow', ' winnows', ' winnowed', ' winnowing', ' sift', ' sifting'],
['wise men'],
['wolf', ' wolves', ' wild dogs'],
['womb', ' wombs'],
['word', ' words', ' speech'],
['it is written'],
['wrong', ' wrongs', ' wronged', ' wrongly', ' wrongfully', ' wrongdoer', ' wrongdoing', ' mistreat', ' mistreated', ' hurt', ' hurts', ' hurting', ' hurtful', ' injury', ' harm', ' harmful'],
['yeast', ' leaven', ' leavens', ' leavened', ' unleavened'],
['yoke', ' yokes', ' yoked'],
]
asm_tws = []
ben_tws = []
guj_tws = []
hin_tws = [['เคเฅเคฃเคพ', ' เคเคฟเคจเฅเคจเฅ', ' เคเคฟเคจเฅเคจเคพ'],
['เคฒเฅเคชเคพเคฒเค', ' เคเฅเคฆ เคฒเฅเคจเคพ', ' เคฆเคคเฅเคคเค'],
['เคตเฅเคฏเคญเคฟเคเคพเคฐ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเฅ', 'เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเคฟเคฏเฅเค'],
['เคธเคฐเฅเคตเคถเคเฅเคคเคฟเคฎเคพเคจ'],
['เคตเฅเคฆเฅ', ' เคตเฅเคฆเคฟเคฏเฅเค'],
['เคเคฎเฅเคจ', ' เคธเค เคฎเฅเค'],
['เคธเฅเคตเคฐเฅเคเคฆเฅเคค', ' เคธเฅเคตเคฐเฅเคเคฆเฅเคคเฅเค', ' เคชเฅเคฐเคงเคพเคจ เคธเฅเคตเคฐเฅเคเคฆเฅเคค'],
['เค
เคญเคฟเคทเฅเค เคเคฐเคจเคพ', ' เค
เคญเคฟเคทเคฟเคเฅเคค', ' เค
เคญเคฟเคทเฅเค'],
['เคฎเคธเฅเคน เคเคพ เคตเคฟเคฐเฅเคงเฅ', ' เคฎเคธเฅเคน เคเฅ เคตเคฟเคฐเฅเคงเฅ'],
['เคชเฅเคฐเฅเคฐเคฟเคค', ' เคชเฅเคฐเฅเคฐเคฟเคคเฅเค', ' เคชเฅเคฐเฅเคฐเคฟเคคเคพเค'],
['เค เคนเคฐเคพเค', ' เคจเคฟเคฏเฅเคเฅเคค เคเคฐเคจเคพ', ' เคจเคฟเฅเคฏเคเฅเคค เคเคฟเคฏเคพ'],
['เคเคนเคพเฅ'],
['เคตเคพเคเคพ เคเคพ เคธเคจเฅเคฆเฅเค', ' เคฏเคนเฅเคตเคพ เคเคพ เคธเคจเฅเคฆเฅเค'],
['เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค', ' เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค เคเคฐเคจเคพ', ' เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค เคเคฟเคฏเคพ เคเคพเค', ' เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค เคเคฟเคฏเคพ'],
['เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค เคเคพ เคขเคเคจเคพ'],
['เค
เคงเคฟเคเคพเคฐเฅ', ' เค
เคงเคฟเคเคพเคฐเคฟเคฏเฅเค'],
['เคฌเคชเคคเคฟเคธเฅเคฎเคพ เคฆเฅเคจเคพ', ' เคฌเคชเคคเคฟเคธเฅเคฎเคพ เคฒเคฟเคฏเคพ', ' เคฌเคชเคคเคฟเคธเฅเคฎเคพ'],
['เคตเคฟเคถเฅเคตเคพเคธ', ' เคตเคฟเคถเฅเคตเคพเคธ เคเคฐเฅ', ' เคตเคฟเคถเฅเคตเคพเคธ เคเคฟเคฏเคพ', ' เคตเคฟเคถเฅเคตเคพเคธ'],
['เคตเคฟเคถเฅเคตเคพเคธเฅ'],
['เคชเฅเคฐเคฟเคฏ'],
['เคชเคนเคฟเคฒเฅเค เฅ เคเคพ เค
เคงเคฟเคเคพเคฐ'],
['เคจเคฟเคฐเฅเคฆเฅเคท'],
['เคจเคฟเคจเฅเคฆเคพ', ' เคจเคฟเคจเฅเคฆเคพ', ' เคจเคฟเคจเฅเคฆเคพ เคเฅ', ' เคจเคฟเคจเฅเคฆเคพ เคเคฐเคจเคพ', ' เคจเคฟเคจเฅเคฆเค'],
['เคเคถเฅเคท', ' เคงเคจเฅเคฏ', ' เคเคถเฅเคฐเฅเคตเคพเคฆ'],
['เคฒเคนเฅ'],
['เคเคฎเคฃเฅเคก', ' เคเคฎเคฃเฅเคก เคเคฐเคจเคพ', ' เคเคฎเคฃเฅเคกเฅ'],
['เคฆเฅเคน', ' เคถเคฐเฅเคฐเฅเค'],
['เคฌเคพเคเคงเคจเคพ', ' เคฌเคจเฅเคงเคจ', ' เคฌเคพเคเคงเคพ'],
['เคจเค เคธเคฟเคฐเฅ เคธเฅ เคเคจเฅเคฎ เคฒเฅเคจเคพ', ' เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคธเฅ เคเคจเฅเคฎเคพ เคนเฅ', ' เคจเค เคเคจเฅเคฎ'],
['เคญเคพเค', ' เคญเคพเคเคฏเฅเค'],
['เคฌเฅเคฒเคพเคจเคพ', ' เคชเฅเคเคพเคฐเฅ', ' เคชเฅเคเคพเคฐเคจเคพ', ' เคเคนเคฒเคพเคคเคพ'],
['เคธเฅเคฌเฅเคฆเคพเคฐ', ' เคธเฅเคฌเฅเคฆเคพเคฐเฅเค'],
['เคฌเคเฅเคเฅ', ' เคฌเคพเคฒเค'],
['เคฎเคธเฅเคน', ' เคฎเคธเฅเคนเคพ'],
['เคฎเคธเฅเคนเฅ'],
['เคเคฒเฅเคธเคฟเคฏเคพ', ' เคเคฒเฅเคธเคฟเคฏเคพเคเค', ' เคเคฒเฅเคธเคฟเคฏเคพ'],
['เคเคคเคจเคพ เคเคฐเคจเคพ', ' เคเคคเคจเคพ เคเคฟเคฏเคพ', ' เคเคคเคจเคพ'],
['เคถเฅเคฆเฅเคง', ' เคถเฅเคฆเฅเคง เคเคฐเฅเคเคพ', ' เคถเฅเคฆเฅเคง เคเคฟเคฏเคพ', ' เคถเฅเคฆเฅเคง เคเคฐเคจเคพ', ' เคถเฅเคฆเฅเคง', ' เคถเฅเคฆเฅเคง เคนเฅเคจเฅ', ' เคงเฅเคฒเคพเค', ' เคงเฅเคฒเคพเค', ' เคงเฅเคฏเคพ', ' เคงเฅเคฏเคพ'],
['เคเคเฅเคเคพ', ' เคเคเฅเคเคพเคเค', ' เคเคเฅเคเคพ เคฆเฅ', ' เคเคเฅเคเคพ', ' เคเคเฅเคเคพเคเค'],
['เคคเคฐเคธ', ' เคฆเคฏเคพเคฒเฅ'],
['เคฆเฅเคท เคฒเคเคพเคจเคพ', ' เคฆเฅเคทเฅ', ' เคจเคฟเคจเฅเคฆเคพ', ' เคฆเคฃเฅเคก เคเฅ เคเคเฅเคเคพ'],
['เคฎเคพเคจ เคฒเฅเคเคพ', ' เคฎเคพเคจเคเคฐ', ' เคฎเคพเคจ เคฒเฅเคเคพ', ' เค
เคเคเฅเคเคพเคฐ'],
['เคตเคฟเคตเฅเค', ' เคตเคฟเคตเฅเค'],
['เคชเคตเคฟเคคเฅเคฐ เคเคฐเคจเคพ', ' เคชเคตเคฟเคคเฅเคฐ เค เคนเคฐเฅเคเคพ', ' เคธเคเคธเฅเคเคพเคฐ'],
['เคเฅเคจเฅ เคเคพ เคชเคคเฅเคฅเคฐ', ' เคชเฅเคฐเคงเคพเคจ'],
['เคตเคพเคเคพ', ' เคตเคพเคเคพเคเค'],
['เคเคฐเฅเคฃเคพ'],
['เคเฅเคฐเฅเคธ'],
['เคเฅเคฐเฅเคธ เคชเคฐ เคเคขเคผเคพ', ' เคเฅเคฐเฅเคธ เคชเคฐ เคเคขเคผเคพเคฏเคพ'],
['เคถเฅเคฐเคพเคช', ' เคถเฅเคฐเคพเคชเคฟเคค', ' เคถเฅเคฐเคพเคช เคฆเฅ', ' เคเฅเคธเคคเคพ เคนเฅ'],
['เคธเคฟเคฏเฅเคฏเฅเคจ เคเฅ เคฌเฅเคเฅ'],
['เคชเฅเคฐเคญเฅ เคเคพ เคฆเคฟเคจ', ' เคฏเคนเฅเคตเคพ เคเคพ เคฆเคฟเคจ'],
['เคธเฅเคตเค', ' เคธเฅเคตเคเฅเค'],
['เคฆเฅเคทเฅเคเคพเคคเฅเคฎเคพ', ' เคฆเฅเคทเฅเค เคเคคเฅเคฎเคพ', ' เค
เคถเฅเคฆเฅเคง เคเคคเฅเคฎเคพ'],
['เคฆเฅเคทเฅเคเคพเคคเฅเคฎเคพเคเค เคฅเฅเค'],
['เคเฅเคฒเคพ', ' เคเฅเคฒเฅ'],
['เคคเคพเคกเคผเคจเคพ', ' เคคเคพเฅเคจเคพ เคฆเฅเคคเคพ', ' เคคเคพเคกเคผเคจเคพ เคเคฐเคคเฅ', ' เคเคคเฅเคฎ เคธเคเคฏเคฎ'],
['เคเคถเฅเคตเคฐเฅเคฏ'],
['เคชเฅเคฐเคญเฅเคคเคพ'],
['เคเฅเคจเคพ เคนเฅเค', ' เคเฅเคจเฅ เคนเฅเค', ' เคเฅเคจเคจเคพ', ' เคเฅเคจเฅ เคฒเฅเค', ' เคเฅเคจเคพ เคนเฅเค', ' เคเฅเคจเคจเคพเฅค'],
['เคเคชเฅเคฆ'],
['เคธเคจเคพเคคเคจ', ' เค
เคจเคจเฅเคค', ' เค
เคจเคเคค เคเคพเคฒ'],
['เคเฅเคเฅ', ' เคเฅเคเฅเค'],
['เคธเฅเคธเคฎเคพเคเคพเคฐ เคชเฅเคฐเคเคพเคฐเค', ' เคธเฅเคธเคฎเคพเคเคพเคฐ เคธเฅเคจเคพเคจเฅเคตเคพเคฒเฅ'],
['เคฌเฅเคฐเคพเค', ' เคฆเฅเคทเฅเค', ' เคฆเฅเคทเฅเคเคคเคพ'],
['เคเคเคเคพ', ' เคเคเคเคพ เคเคฟเคฏเคพ', ' เคฌเคขเคผเคพเคคเคพ', ' เคเคจเคจเฅเคฆ'],
['เคธเคฎเคเคพ', ' เคเคชเคฆเฅเคถ'],
['เคตเคฟเคถเฅเคตเคพเคธ'],
['เคตเคฟเคถเฅเคตเคพเคธเคฏเฅเคเฅเคฏ', ' เคตเคฟเคถเฅเคตเคพเคธเคฏเฅเคเฅเคฏเคคเคพ'],
['เคญเคเคเคจเฅเคตเคพเคฒเฅ', ' เคตเคฟเคถเฅเคตเคพเคธเคเคพเคค'],
['เคฆเฅเคต', ' เคฆเฅเคตเฅเค', ' เคฆเฅเคตเฅ'],
['เค
เคจเฅเคเฅเคฐเคน', ' เคชเคเฅเคท', ' เคชเคเฅเคทเคชเคพเคค', ' เคชเคเฅเคท'],
['เคกเคฐ', ' เคญเคฏ', ' เคกเคฐเคจเคพ'],
['เคธเคนเคญเคพเคเคฟเคคเคพ'],
['เคชเคตเคฟเคคเฅเคฐ เคเคคเฅเคฎเคพ เคธเฅ เคญเคฐ เคเค'],
['เคฎเคพเคเคธ'],
['เคฎเฅเคฐเฅเค', ' เคฎเฅเคฐเฅเค เคฒเฅเค', ' เคฎเฅเคฐเฅเค', ' เคฎเฅเคฐเฅเคเคคเคพ'],
['เคธเคฐเฅเคตเคฆเคพ'],
['เคเฅเคทเคฎเคพ เคเคฐ', ' เคเฅเคทเคฎเคพ เคเคฐเคคเคพ', ' เคเฅเคทเคฎเคพ เคเคฟเคฏเคพ', ' เคเฅเคทเคฎเคพ'],
['เคเฅเคกเคผเคจเคพ', ' เคเฅเคกเคผ เคฆเฅเคคเคพ', ' เคเฅเคกเคผ เคฆเคฟเคฏเคพ', ' เคคเฅเคฏเคพเค เคเคฐ'],
['เคชเฅเคฐเคพ เคเคฐ', ' เคชเฅเคฐเคพ เคนเฅเค'],
['เค
เคจเฅเคฏเคเคพเคคเคฟ', ' เค
เคจเฅเคฏเคเคพเคคเคฟเคฏเฅเค'],
['เคฆเคพเคจ', ' เคญเฅเคเคเฅเค'],
['เคฎเคนเคฟเคฎเคพ เคเคฐเฅ', ' เคฎเคนเคฟเคฎเคพ เคนเฅเคคเฅ เคนเฅ'],
['เคฎเคนเคฟเคฎเคพ', ' เคฎเคนเคฟเคฎเคพเคฎเคฏ'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ'],
['เคงเคฐเฅเคฎเฅ', ' เคญเคเฅเคคเคฟ'],
['เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคชเคฟเคคเคพ', ' เคธเฅเคตเคฐเฅเคเฅเคฏ เคชเคฟเคคเคพ', ' เคชเคฟเคคเคพ'],
['เค
เคเฅเคเคพ', ' เคญเคฒเคพเค'],
['เคถเฅเคญ เคธเคฎเคพเคเคพเคฐ', ' เคธเฅเคธเคฎเคพเคเคพเคฐ'],
['เค
เคจเฅเคเฅเคฐเคน', ' เค
เคจเฅเคเฅเคฐเคนเคเคพเคฐเฅ'],
['เคฆเฅเคท', ' เคฆเฅเคทเฅ เค เคนเคฐเคพ'],
['เค
เคงเฅเคฒเฅเค', ' เค
เคฅเคพเคน-เคเฅเคฃเฅเคก'],
['เคนเฅเคฆเคฏ', ' เคฎเคจ'],
['เคธเฅเคตเคฐเฅเค', ' เคเคเคพเคถ', ' เคเคเคพเคถเคฎเคฃเฅเคกเคฒ', ' เคธเฅเคตเคฐเฅเคเฅเคฏ'],
['เคเคฌเฅเคฐเคพเคจเฅ', ' เคเคฌเฅเคฐเคพเคจเคฟเคฏเฅเค'],
['เคจเคฐเค', ' เคเค เคเฅ เคเฅเคฒ'],
['เคฎเคนเคพเคฏเคพเคเค'],
['เคชเคตเคฟเคคเฅเคฐ', ' เคชเคตเคฟเคคเฅเคฐเคคเคพ'],
['เคชเคตเคฟเคคเฅเคฐ'],
['เคชเคตเคฟเคคเฅเคฐเคธเฅ\u200dเคฅเคพเคจ'],
['เคชเคตเคฟเคคเฅเคฐ เคเคคเฅเคฎเคพ', ' เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคเฅ เคเคคเฅเคฎเคพ', ' เคชเฅเคฐเคญเฅ เคเฅ เคเคคเฅเคฎเคพ', ' เคเคคเฅเคฎเคพ'],
['เคเคฆเคฐ', ' เคเคฆเคฐ เคเคฐเคคเฅ เคนเฅเค', ' เคเคฆเคฐ เคเคฐเคจเคพ'],
['เคเคถเคพ', ' เคเคถเคพ', ' เคเคถเคพเคเค'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเฅ เคญเคตเคจ', ' เคฏเคนเฅเคตเคพ เคเฅ เคญเคตเคจ'],
['เคฆเฅเคจ', ' เคตเคฟเคจเคฎเฅเคฐ', ' เคจเคฎเฅเคฐ เคฌเคจเคพเคฏเคพ', ' เคจเคฎเฅเคฐเคคเคพ'],
['เคเคชเคเฅ', ' เคเคชเคเคฟเคฏเฅเค', ' เคเคชเค'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเคพ เคชเฅเคฐเคคเคฟเคฐเฅเคช', ' เคธเฅเคตเคฐเฅเคช'],
['เคฎเคธเฅเคน เคฎเฅเค', ' เคฏเฅเคถเฅ เคฎเฅเค', ' เคชเฅเคฐเคญเฅ เคฎเฅเค', ' เคเคธเคฎเฅเค'],
['เค
เคงเคฟเคเคพเคฐเฅ เคนเฅเคจเคพ', ' เคตเคเคถ', ' เคญเคพเค', ' เคตเคพเคฐเคฟเคธ'],
['เค
เคงเคฐเฅเคฎ', ' เค
เคงเคฐเฅเคฎเฅเค'],
['เคจเคฟเคฐเฅเคฆเฅเคท'],
['เคชเฅเคฐเคพเคฐเฅเคฅเคจเคพ เคเคฟเคฏเคพ', ' เคฎเคงเฅเคฏเคธเฅเคฅเคคเคพ เคเฅ', ' เคฎเคงเฅเคฏเคธเฅเคฅเคคเคพ'],
['เคเคธเฅเคฐเคพเคเคฒ', ' เคเคธเฅเคฐเคพเคเคฒเฅ'],
['เคเคฒเคจ', ' เคเคฐเฅเคทเฅเคฏเคพ'],
['เคฏเฅเคถเฅ', ' เคฏเฅเคถเฅ เคฎเคธเฅเคน', ' เคฎเคธเฅเคน เคฏเฅเคถเฅ'],
['เคฏเคนเฅเคฆเฅ', ' เคฏเคนเฅเคฆเคฟเคฏเฅเค เคเคพ', ' เคฏเคนเฅเคฆเคฟเคฏเฅเค'],
['เคจเฅเคฏเคพเคฏเฅ', ' เคจเฅเคฏเคพเคฏ เคเคฐเคคเคพ', ' เคจเฅเคฏเคพเคฏ', ' เคจเคฟเคฐเฅเคฃเคฏ'],
['เคฆเคฃเฅเคก เคเฅ เคฆเคฟเคจ'],
['เคธเคเฅเคเคพ', ' เคจเฅเคฏเคพเคฏ', ' เคจเฅเคฏเคพเคฏ เคธเฅ'],
['เคงเคฐเฅเคฎเฅ เค เคนเคฐเคพเคเคเคพ', ' เคงเคพเคฐเฅเคฎเคฟเคเคคเคพ'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเคพ เคฐเคพเคเฅเคฏ', ' เคธเฅเคตเคฐเฅเค เคเคพ เคฐเคพเคเฅเคฏ'],
['เคฏเคนเฅเคฆเคฟเคฏเฅเค เคเคพ เคฐเคพเคเคพ', ' เคฏเคนเฅเคฆเคฟเคฏเฅเค เคเคพ เคฐเคพเคเคพ'],
['เคฎเฅเคฎเฅ\u200dเคจเคพ', ' เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเคพ เคฎเฅเคฎเฅ\u200dเคจเคพ'],
['เคตเคฟเคฒเคพเคช', ' เคตเคฟเคฒเคพเคช เคเคฐเคจเคพ', ' เคตเคฟเคฒเคพเคช เคเคฟเคฏเคพ'],
['เคเคเคฐเฅ เคฆเคฟเคจ', ' เค
เคจเฅเคคเคฟเคฎ เคฆเคฟเคจเฅเค', ' เค
เคจเฅเคค เคเฅ เคฆเคฟเคจเฅเค'],
['เคตเฅเคฏเคตเคธเฅเคฅเคพ', ' เคฎเฅเคธเคพ เคเฅ เคตเฅเคฏเคตเคธเฅเคฅเคพ', ' เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคเฅ เคตเฅเคฏเคตเคธเฅเคฅเคพ', ' เคฏเคนเฅเคตเคพ เคเฅ เคตเฅเคฏเคตเคธเฅเคฅเคพ'],
['เคเฅเคตเคจ', ' เคเฅเคจเคพ', ' เคฐเคนเคคเฅ เคฅเฅ', ' เคเฅเคตเคจ', 'เคเฅเคตเคคเฅ', ' เคเฅเคตเคฟเคค'],
['เคชเฅเคฐเคญเฅ', ' เคชเฅเคฐเคญเฅเคเค', ' เคเฅเคฐเฅ', ' เคธเฅเคตเคพเคฎเฅ', ' เคธเฅเคตเคพเคฎเคฟเคฏเฅเค', ' เคถเฅเคฐเฅเคฎเคพเคจ', ' เคฎเคนเฅเคฆเคฏ'],
['เคชเฅเคฐเคญเฅ'],
['เคชเฅเคฐเคญเฅ เคญเฅเค'],
['เคชเฅเคฐเคญเฅ เคฏเคนเฅเคตเคพ', ' เคฏเคนเฅเคตเคพ เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ'],
['เคชเฅเคฐเฅเคฎ', ' เคชเฅเคฐเฅเคฎ เคเคฐเคคเคพ เคนเฅ', ' เคชเฅเคฐเคฟเคฏ', ' เคชเฅเคฐเฅเคฎ เคเคฟเคฏเคพ'],
['เคฎเคนเคพเคฎเคนเคฟเคฎเคจเฅ'],
['เคฎเคจเฅเคจเคพ'],
['เคฆเคฏเคพ', ' เคฆเคฏเคพเคฒเฅ'],
['เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคธเฅเคตเคเคพเค'],
['เคเคฎเคคเฅเคเคพเคฐ', ' เคเคถเฅเคเคฐเฅเคฏเคเคฐเฅเคฎเฅเค', ' เค
เคฆเฅเคญเฅเคค', ' เคเคถเฅเคเคฐเฅเคฏ เคเฅ เคเคพเคฎเฅเค', ' เคเคฟเคจเฅเคน', ' เคเคฟเคจเฅเคนเฅเค'],
['เคชเคฐเคฎเคชเฅเคฐเคงเคพเคจ'],
['เคเคจเฅเคงเคฐเคธ'],
['เคจเคพเคฎ', ' เคจเคพเคฎ', ' เคจเคพเคฎ เคชเคฐ'],
['เคจเคพเคเคผเฅเคฐ', ' เคจเคพเฅเฅเคฐเฅเค', ' เคจเคพเคเคผเฅเคฐ เคถเคชเคฅ'],
['เคจเค เคตเคพเคเคพ'],
['เคฆเฅเคทเฅเคเคพเคจเฅเคค', ' เคฆเฅเคทเฅเคเคพเคจเฅเคคเฅเค'],
['เคซเคธเคน'],
['เคฐเคเคตเคพเคฒเคพ', ' เคฐเคเคตเคพเคฒเฅ'],
['เคชเคฟเคจเฅเคคเฅเคเฅเคธเฅเคค', ' เคธเคชเฅเคคเคพเคนเฅเค เคเคพ เคชเคฐเฅเคต'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเฅ เคชเฅเคฐเคเคพ', ' เคฎเฅเคฐเฅ เคชเฅเคฐเคเคพ'],
['เคจเคพเคถ เคนเฅ', ' เคจเคพเคถ เคนเฅเค', ' เคจเคพเคถ เคนเฅ เคฐเคนเฅ', ' เคจเคพเคถเคตเคพเคจ'],
['เคซเคฐเฅเคธเฅ', ' เคซเคฐเฅเคธเคฟเคฏเฅเค'],
['เคธเคพเคฎเคฐเฅเคฅเฅเคฏ', ' เคถเคเฅเคคเคฟเคฏเคพเค'],
['เคชเฅเคฐเคพเคฐเฅเคฅเคจเคพ เคเคฐ', ' เคชเฅเคฐเคพเคฐเฅเคฅเคจเคพ', ' เคชเฅเคฐเคพเคฐเฅเคฅเคจเคพเคเค', ' เคชเฅเคฐเคพเคฐเฅเคฅเคจเคพ เคเฅ'],
['เคชเคนเคฒเฅ เคธเฅ เค เคนเคฐเคพเคจเคพ', ' เคชเคนเคฒเฅ เคธเฅ เค เคนเคฐเคพเคฏเคพ'],
['เคฏเคพเคเค', ' เคฏเคพเคเคเฅเค', ' เคฏเคพเคเค เคชเคฆ'],
['เคชเฅเคฐเคคเคฟเคเฅเคเคพ', ' เคชเฅเคฐเคคเคฟเคเฅเคเคพเคเค', ' เคชเฅเคฐเคคเคฟเคเฅเคเคพ เคเคฟเคฏเคพ'],
['เคชเฅเคฐเคคเคฟเคเฅเคเคพ เคเคพ เคฆเฅเคถ'],
['เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคพ', ' เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคพ', ' เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ', ' เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคพเคฃเฅ', ' เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคพ', ' เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคฟเคจ'],
['เคชเฅเคฐเคพเคฏเคถเฅเคเคฟเคค'],
['เคญเคเคจ', ' เคญเคเคจ'],
['เคถเฅเคฆเฅเคง', ' เคถเฅเคฆเฅเคงเคฟ', ' เคถเฅเคฆเฅเคงเคฟเคเคฐเคฃ'],
['เคฐเคฌเฅเคฌเฅ'],
['เคเฅเคเคเคพเคฐเฅ เคเฅ เคฒเคฟเคฏเฅ', ' เคเฅเคกเคผเคพ เคฒเคฟเคฏเคพ'],
['เคฎเฅเคฒ เคเคฐเคจเคพ', ' เคฎเฅเคฒ-เคฎเคฟเคฒเคพเคช', ' เคฎเฅเคฒ เคฎเคฟเคฒเคพเคช เคเคฐ เคฒเคฟเคฏเคพ', ' เคฎเคฟเคฒเคพเคช'],
['เคเฅเคกเคผเคพ เคฒเฅ', ' เคเฅเคเคเคพเคฐเคพ', ' เคเฅเคเคเคพเคฐเคพ', ' เคเฅเคเคเคพเคฐเคพ เคฆเคฟเคฒเคพเคจเฅเคตเคพเคฒเคพ'],
['เคฌเคเฅ เคนเฅเค'],
['เคฎเคจ เคซเคฟเคฐเคพเคเคฐ', ' เคชเคถเฅเคเคพเคคเคพเคช', ' เคซเคฟเคฐเคพเคฏเคพ', ' เคซเคฟเคฐเคพเคต'],
['เคเฅเคฏเฅเค เคเคพ เคคเฅเคฏเฅเค เคเคฐเคจเคพ', ' เคฆเฅเฅ เคเคฐเคจเคพ', ' เคชเฅเคจเค เคธเฅเคฅเคพเคชเคจ', ' เคชเฅเคจเค เคธเฅเคฅเคพเคชเคจ'],
['เคเฅ เคเค เคจเฅ'],
['เคชเฅเคฐเคเค เคเคฐเคจเคพ', ' เคชเฅเคฐเคเค เคเคฐเคจเคพ', ' เคชเฅเคฐเคเค เคเคฟเคฏเคพ', ' เคชเฅเคฐเคเคพเคถเคจ'],
['เคงเคฐเฅเคฎเฅ', ' เคงเคพเคฐเฅเคฎเคฟเค'],
['เคฆเคพเคนเคฟเคจเคพ เคนเคพเคฅ'],
['เคธเคฌเฅเคค'],
['เคธเคฆเฅเคเฅ', ' เคธเคฆเฅเคเคฟเคฏเฅเค'],
['เคธเคเคค', ' เคชเคตเคฟเคคเฅเคฐ เคเคจ'],
['เคเคฆเฅเคงเคพเคฐ'],
['เคชเคตเคฟเคคเฅเคฐ เคเคฐเคจเคพ', ' เคชเคตเคฟเคคเฅเคฐ เคเคฐเคจเคพ', ' เคชเคตเคฟเคคเฅเคฐเคคเคพ'],
['เคชเคตเคฟเคคเฅเคฐเคธเฅเคฅเคพเคจ'],
['เคถเฅเคคเคพเคจ', ' เคถเฅเคคเคพเคจ', ' เคฆเฅเคทเฅเค'],
['เคฌเคเคพเคจเคพ', ' เคฌเคเคพเคคเคพ เคนเฅ', ' เคเคฆเฅเคงเคพเคฐ', ' เคธเฅเคฐเคเฅเคทเคพ'],
['เคเคฆเฅเคงเคพเคฐเคเคฐเฅเคคเคพ', ' เคฌเคเคพเคจเฅ เคตเคพเคฒเคพ'],
['เคถเคพเคธเฅเคคเฅเคฐเฅ', ' เคถเคพเคธเฅเคคเฅเคฐเคฟเคฏเฅเค'],
['เคชเฅเคฅเค เคเคฐเคจเคพ'],
['เคเคฟเคจเฅเคน', ' เคชเฅเคฐเคฎเคพเคฃ', ' เคธเฅเคฎเคฐเคฃ เคเคฐเคพเคจเฅ เคตเคพเคฒเฅ เคฌเคพเคค'],
['เคชเคพเคช', ' เคชเคพเคชเฅ', ' เคชเคพเคช เคเคฐเคจเคพ', ' เคชเคพเคชเคฎเคฏ', ' เคชเคพเคชเฅ', ' เคชเคพเคช เคเคฐเคคเฅ เคฐเคนเคจเคพ'],
['เคชเฅเคคเฅเคฐ', ' เคชเฅเคคเฅเคฐเฅเค'],
['เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคเคพ เคชเฅเคคเฅเคฐ', ' เคชเฅเคคเฅเคฐ'],
['เคฎเคจเฅเคทเฅเคฏ เคเคพ เคชเฅเคคเฅเคฐ', ' เคฎเคจเฅเคทเฅเคฏ เคเคพ เคชเฅเคคเฅเคฐ'],
['เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคเฅ เคธเคจเฅเคคเคพเคจ'],
['เคชเฅเคฐเคพเคฃ', ' เคชเฅเคฐเคพเคฃ'],
['เคเคคเฅเคฎเคพ', ' เคเคคเฅเคฎเคพ', ' เคเคคเฅเคฎเคฟเค'],
['เคชเคคเฅเคฅเคฐ', ' เคชเคคเฅเคฅเคฐ', ' เคชเคคเฅเคฅเคฐ'],
['เคเคฐเคพเคงเคจเคพเคฒเคฏ'],
['เคจเคฟเคตเคพเคธเคธเฅเคฅเคพเคจ'],
['เคฎเคจเฅเคฆเคฟเคฐ'],
['เคชเคฐเฅเคเฅเคทเคพ เคเคฐเคจเฅ', ' เคชเคฐเฅเคเฅเคทเคพ'],
['เคชเคฐเฅเคเฅเคทเคพ', ' เคชเคฐเฅเคเฅเคทเคฃ', ' เคชเคฐเฅเคเฅเคทเคฃ'],
['เคเคตเคพเคนเฅ', ' เคเคตเคพเคนเฅ เคฆเฅเคจเคพ'],
['เคเฅเคฅเคพเค เคฆเฅเคถ เคเฅ เคฐเคพเคเคพ'],
['เคฌเคพเคฐเคนเฅเค', ' เคเฅเคฏเคพเคฐเคนเฅเค'],
['เคเคพเคฒเคจเคพ', ' เคเคฒเฅเคฒเคเคเคจ', ' เค
เคชเคฐเคพเคง'],
['เค
เคชเคฐเคพเคง', ' เค
เคชเคฐเคพเคงเฅเค', ' เคตเคฟเคถเฅเคตเคพเคธเคเคพเคค เคเคฟเคฏเคพ'],
['เคธเคเฅเคเคพ', ' เคธเคเฅเคเคพเค', ' เคธเคคเฅเคฏ'],
['เคตเคฟเคถเฅเคตเคพเคธ', ' เคญเคฐเฅเคธเคพ', ' เคตเคฟเคถเฅเคตเคธเคจเฅเคฏ', ' เคญเคฐเฅเคธเฅเคฎเคเคฆ', ' เคตเคฟเคถเฅเคตเคธเคจเฅเคฏเคคเคพ'],
['เค
เคตเคฟเคถเฅเคตเคพเคธเฅ', ' เค
เคตเคฟเคถเฅเคตเคพเคธเคฟเคฏเฅเค', ' เค
เคตเคฟเคถเฅเคตเคพเคธ'],
['เคเคคเคจเคพเคฐเคนเคฟเคค', ' เคเคคเคจเคพเคนเฅเคจ'],
['เค
เคถเฅเคฆเฅเคง'],
['เคตเคฟเคถเฅเคตเคพเคธเคเคพเคคเฅ', ' เคตเคฟเคถเฅเคตเคพเคธเคเคพเคค เคเคฟเคฏเคพ'],
['เค
เคญเคเฅเคค', ' เค
เคงเคฐเฅเคฎเฅ', ' เค
เคญเคเฅเคคเคฟ', ' เค
เคงเคฐเฅเคฎเฅ'],
['เค
เคชเคตเคฟเคคเฅเคฐ'],
['เค
เคงเคฐเฅเคฎเฅ', ' เค
เคจเฅเคฏเคพเคฏ เคธเฅ', ' เค
เคจเฅเคฏเคพเคฏ'],
['เคเคเคฟเคค เคจเคนเฅเค', ' เค
เคงเคฐเฅเคฎ'],
['เค
เฅเคฎเฅเคฐเฅ เคฐเฅเคเฅ'],
['เค
เคงเคฐเฅเคฎเฅ', ' เค
เคงเคฐเฅเคฎ'],
['เคธเฅเคงเฅ', ' เคธเคฟเคงเคพเค'],
['เคฎเคจเฅเคจเคค', ' เคฎเคจเฅเคจเคคเฅเค', ' เคถเคชเคฅ เคเคพเค'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเฅ เคเคเฅเคเคพ'],
['เคฌเฅเคฆเฅเคงเคฟเคฎเคพเคจ', ' เคฌเฅเคฆเฅเคงเคฟ'],
['เคธเคพเคเฅเคทเฅ', ' เคเคตเคพเคนเฅเค', ' เคเคตเคพเคน', ' เคฌเคพเคคเฅเค เคเฅ เคฆเฅเคเคจเฅเคตเคพเคฒเฅ'],
['เคนเคพเคฏ'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเฅ เคตเคเคจ', ' เคชเคฐเคฎเฅเคถเฅเคตเคฐ เคเฅ เคตเคเคจเฅเค', ' เคฏเคนเฅเคตเคพ เคเฅ เคตเคเคจ', ' เคชเฅเคฐเคญเฅ เคเคพ เคตเคเคจ', ' เคชเคตเคฟเคคเฅเคฐเคถเคพเคธเฅเคคเฅเคฐ', ' เคชเคตเคฟเคคเฅเคฐเคถเคพเคธเฅเคคเฅเคฐ'],
['เคธเคคเฅเคฏ เคเคพ เคตเคเคจ'],
['เคเคพเคฎ', ' เคเคฐเฅเคฎ', ' เคเคพเคฐเฅเคฏ', ' เคเฅเคคเฅเคฏ'],
['เคธเคเคธเคพเคฐ', ' เคธเคพเคเคธเคพเคฐเคฟเค'],
['เคเคชเคพเคธเคจเคพ'],
['เคฏเฅเคเฅเคฏ', ' เคฎเฅเคฒเฅเคฏเคตเคพเคจ', ' เค
เคฏเฅเคเฅเคฏ', ' เคจเคฟเคเคฎเฅเคฎเคพ'],
['เคเฅเคฐเฅเคง', ' เคฐเฅเคท'],
['เคฏเคนเฅเคตเคพ'],
['เคธเฅเคจเคพเคเค เคเฅ เคฏเคนเฅเคตเคพ', ' เคธเฅเคจเคพเคเค เคเฅ เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ', ' เคเคเคพเคถ เคเฅ เคเคฃ', ' เคเคเคพเคถ เคเคพ เคธเคพเคฐเคพ เคคเคพเคฐเคพเคเคฃ', ' เคธเฅเคจเคพเคเค เคเคพ เคชเฅเคฐเคญเฅ'],
['เคงเฅเคจ', ' เคเคคเฅเคคเฅเคเคฟเคค'],
['เคธเคฟเคฏเฅเคฏเฅเคจ', ' เคธเคฟเคฏเฅเคฏเฅเคจ เคชเคฐเฅเคตเคค'],
['เคนเคพเคฐเฅเคจ'],
['เคนเคพเคฌเคฟเคฒ'],
['เคเคฌเฅเคฏเคพเคคเคพเคฐ'],
['เค
เคฌเคฟเคฏเฅเคฏเคพเคน'],
['เค
เคฌเฅเคฎเฅเคฒเฅเค'],
['เค
เคฌเฅเคจเฅเคฐ'],
['เค
เคฌเฅเคฐเคพเคนเคฎ', ' เค
เคฌเฅเคฐเคพเคฎ'],
['เค
เคฌเคถเคพเคฒเฅเคฎ'],
['เคเคฆเคฎ'],
['เค
เคฆเฅเคจเคฟเคฏเฅเคฏเคพเคน'],
['เคเคนเคพเคฌ'],
['เคเฅเคทเคฏเคฐเฅเคท'],
['เค
เคนเคพเค'],
['เค
เคนเคเฅเคฏเคพเคน'],
['เค
เคนเคฟเคฏเฅเคฏเคพเคน'],
['เคเค'],
['เค
เคฎเคพเคฒเฅเค', ' เค
เคฎเคพเคฒเฅเคเฅ', ' เค
เคฎเคพเคฒเฅเคเคฟเคฏเฅเค'],
['เค
เคฎเคธเฅเคฏเคพเคน'],
['เค
เคฎเฅเคฎเฅเคจ', ' เค
เคฎเฅเคฎเฅเคจเฅ', ' เค
เคฎเฅเคฎเฅเคจเคฟเคฏเฅเค'],
['เค
เคฎเฅเคจเฅเคจ'],
['เคเคฎเฅเคฐเฅ', ' เคเคฎเฅเคฐเคฟเคฏเฅเค'],
['เคเคฎเฅเคธ'],
['เคเคฎเฅเคธ'],
['เค
เคจเฅเคฆเฅเคฐเคฟเคฏเคพเคธ'],
['เคนเคจเฅเคจเคพ'],
['เค
เคจเฅเคคเคพเคเคฟเคฏเคพ'],
['เค
เคชเฅเคชเฅเคฒเฅเคฒเฅเคธ'],
['เค
เคเฅเคตเคฟเคฒเคพ'],
['เค
เคฐเคพเคฌเคพ'],
['เค
เคฐเคฌ', ' เค
เคฐเคฌเฅ', ' เค
เคฐเคฌเคฟเคฏเฅเค'],
['เค
เคฐเคพเคฎ', ' เค
เคฐเคพเคฎเฅ', ' เค
เคฐเคพเคฎเคฟเคฏเฅเค', ' เค
เคฐเคพเคฎเฅ เคญเคพเคทเคพ'],
['เค
เคฐเคพเคฐเคพเคค'],
['เค
เคฐเฅเคคเคเฅเคทเคคเฅเคฐ'],
['เคเคธเคพ'],
['เคเคธเคพเคช'],
['เค
เคถเฅเคฆเฅเคฆ', ' เค
เฅเฅเคคเคธ'],
['เค
เคถเฅเคฐ'],
['เค
เคถเฅเคฐเคพ', ' เค
เคถเฅเคฐเคพ เคเฅ เคฒเคฟเค เคฎเฅเคฐเคค', ' เค
เคถเฅเคฐเคพ เคจเคพเคฎเค เคฎเฅเคฐเฅเคคเคฟเคฏเฅเค', ' เค
เคถเฅเคคเฅเคฐเฅเคค'],
['เค
เคถเฅเคเคฒเฅเคจ'],
['เคเคธเคฟเคฏเคพ'],
['เค
เคถเฅเคถเฅเคฐ', ' เค
เคถเฅเคถเฅเคฐเฅ', ' เค
เคถเฅเคถเฅเคฐเคฟเคฏเฅเค', ' เค
เคถเฅเคถเฅเคฐ เคฐเคพเคเฅเคฏ'],
['เค
เคคเคฒเฅเคฏเคพเคน'],
['เค
เคเคฐเฅเคฏเคพเคน'],
['เคฌเคพเคฒ'],
['เคฌเคพเคถเคพ'],
['เคฌเคพเคฌเฅเคฒ'],
['เคฌเคพเคฌเฅเคฒ', ' เคฌเคพเคฌเฅเคฒ', ' เคฌเคพเคฌเฅเคฒ', ' เคฌเคพเคฌเฅเคฒเฅ'],
['เคฌเคฟเคฒเคพเคฎ'],
['เคฌเคฐเค
เคฌเฅเคฌเคพ'],
['เคฌเคฐเคจเคฌเคพเคธ'],
['เคฌเคฐเคคเฅเคฒเฅเคฎเฅ'],
['เคฌเคพเคฐเฅเค'],
['เคฌเคพเคถเคพเคจ'],
['เคฌเคคเคถเฅเคฌเคพ'],
['เคฌเคพเคฒเคเคฌเฅเคฒ(เคถเฅเคคเคพเคจ)'],
['เคฌเฅเคฐเฅเคถเฅเคฌเคพ'],
['เคฌเคจเคพเคฏเคพเคน'],
['เคฌเคฟเคจเฅเคฏเคพเคฎเฅเคจ', ' เคฌเคฟเคจเฅเคฏเคพเคฎเฅเคจเฅ', ' เคฌเคฟเคจเฅเคฏเคพเคฎเฅเคจเคฟเคฏเฅเค'],
['เคฌเคฟเคฐเฅเคฏเคพ'],
['เคฌเฅเคคเคจเคฟเคฏเฅเคฏเคพเคน'],
['เคฌเฅเคคเฅเคฒ'],
['เคฌเฅเคคเคฒเคนเคฎ', ' เคเคชเฅเคฐเคพเคค'],
['เคฌเฅเคคเคถเฅเคฎเฅเคถ'],
['เคฌเคคเฅเคเคฒ'],
['เคฌเฅเคเคเคผ'],
['เคเฅเคธเคฐ'],
['เคเฅเคธเคฐเคฟเคฏเคพ', ' เคเฅเคธเคฐเคฟเคฏเคพ เคซเคฟเคฒเคฟเคชเฅเคชเฅ'],
['เคเฅเคซเคพ'],
['เคเฅเคจ'],
['เคเคพเคฒเฅเคฌ'],
['เคเคพเคจเคพ'],
['เคเคจเคพเคจ', ' เคเคจเคพเคจเฅ', ' เคเคจเคพเคจเคฟเคฏเฅเค'],
['เคเคซเคฐเคจเคนเฅเคฎ'],
['เคเคฐเฅเคฎเฅเคฒ', ' เคเคฐเฅเคฎเฅเคฒ เคชเคนเคพเฅ'],
['เคเคธเคฆเฅ', ' เคเคธเคฆเฅ', ' เคเคธเคฆเคฟเคฏเฅเค'],
['เคเคฐเฅเคคเคฟเคฏเฅเค'],
['เคเคฟเคฒเคฟเคเคฟเคฏเคพ'],
['เคฆเคพเคเคฆ เคเฅ เคจเคเคฐ'],
['เคเฅเคฒเฅเคธเฅเคธเฅ', ' เคเฅเคฒเฅเคธเฅเคธเคฟเคฏเฅเค'],
['เคเฅเคฐเคฟเคจเฅเคฅเฅเคธ', ' เคเฅเคฐเคฟเคจเฅเคฅเคตเคพเคธเฅ'],
['เคเฅเคฐเคจเฅเคฒเคฟเคฏเฅเคธ'],
['เคเฅเคฐเฅเคคเฅ', ' เคเฅเคฐเฅเคคเฅเคตเคพเคธเฅ', ' เคเฅเคฐเฅเคคเฅเคตเคพเคธเคฟเคฏเฅเค'],
['เคเฅเคถ'],
['เคธเคพเคเคชเฅเคฐเคธ'],
['เคเฅเคฐเฅเคจเฅ'],
['เคเฅเคธเฅเคฐเฅ'],
['เคฆเคฎเคฟเคถเฅเค'],
['เคฆเคพเคจ'],
['เคฆเคพเคจเคฟเคฏเฅเคฏเฅเคฒ'],
['เคฆเคพเคฐเคพ'],
['เคฆเคพเคเคฆ'],
['เคฆเคฒเฅเคฒเคพ'],
['เค
เคฆเคจ', ' เค
เคฆเคจ เคเฅ เคตเคพเคเคฟเคเคพ'],
['เคเคฆเฅเคฎ', ' เคเคฆเฅเคฎเฅ', ' เคเคฆเฅเคฎเคฟเคฏเฅเค', ' เคเคฆเฅเคฎเคฟเคฏเคพ'],
['เคฎเคฟเคธเฅเคฐ', ' เคฎเคฟเคธเฅเคฐเฅ', ' เคฎเคฟเคธเฅเคฐเคฟเคฏเฅเค'],
['เคเคเฅเคฐเฅเคจ', ' เคเคเฅเคฐเฅเคจเฅ'],
['เคเคฒเคพเคฎ', ' เคเคฒเคพเคฎ เคฒเฅเค'],
['เคเคฒเฅเคเคเคพเคฐ'],
['เคเคฒเคฏเคพเคเฅเคฎ'],
['เคเคฒเคฟเคฏเฅเคฏเคพเคน'],
['เคเคฒเฅเคถเคพ'],
['เคเคฒเฅเคถเคฟเคฌเคพ'],
['เคเคจเคเคฆเฅ'],
['เคนเคจเฅเค'],
['เคเคซเคฟเคธเฅเคธ', ' เคเคซเคฟเคธเฅเคธ เคตเคพเคธเฅ', ' เคเคซเคฟเคธเคฟเคฏเฅเค'],
['เคเคชเฅเคฐเฅเคฎ', ' เคเคชเฅเคฐเฅเคฎเฅ', ' เคเคชเฅเคฐเฅเคฎเคฟเคฏเฅเค'],
['เคเคชเฅเคฐเคพเคค', ' เคเคชเฅเคฐเคพเคค', ' เคเคชเฅเคฐเคพเคคเฅ', ' เคเคชเฅเคฐเคพเคคเฅ'],
['เคเคธเคพเคต'],
['เคเคธเฅเคคเฅเคฐ'],
['เคเฅเคถ', ' เคเฅเคถเฅ'],
['เคซเคฐเคพเคค เคฎเคนเคพเคจเคฆ', ' เคฎเคนเคพเคจเคฆ'],
['เคนเคตเฅเคตเคพ'],
['เคฏเคนเฅเคเคเฅเคฒ'],
['เคเคเฅเคฐเคพ'],
['เคเคฟเคฌเฅเคฐเคพเคเคฒ'],
['เคเคพเคฆ'],
['เคเคฒเคพเคคเคฟเคฏเคพ', ' เคเคฒเคพเคคเคฟเคฏเฅเค'],
['เคเคฒเฅเคฒ', ' เคเคฒเฅเคฒเฅ', ' เคเคฒเฅเคฒเคฟเคฏเฅเค'],
['เคเคค', ' เคเคคเคตเคพเคธเฅ', ' เคเคคเฅ'],
['เคเคพเฅเคพ'],
['เคเคฐเคพเคฐ'],
['เคเคถเฅเคฐ', ' เคเคถเฅเคฐเคฟเคฏเฅเค'],
['เคเคคเคธเคฎเคจเฅ'],
['เคเคฟเคฌเคพ'],
['เคเคฟเคฌเฅเคจ', ' เคเคฟเคฌเฅเคจเฅ', ' เคเคฟเคฌเฅเคจเคฟเคฏเฅเค'],
['เคเคฟเคฆเฅเคจ'],
['เคเคฟเคฒเคพเคฆ', ' เคเคฟเคฒเคพเคฆเฅ', ' เคเคฟเคฒเคพเคฆเคฟเคฏเฅเค'],
['เคเคฟเคฒเคเคพเคฒ'],
['เคเคฟเคฐเฅเคเคพเคถเคฟเคฏเฅเค'],
['เคเฅเคฒเคเฅเคคเคพ'],
['เคเฅเคฒเคฟเคฏเคค'],
['เค
เคฎเฅเคฐเคพ'],
['เคเฅเคถเฅเคจ'],
['เคฏเฅเคจเคพเคจ', ' เคฏเฅเคจเคพเคจเฅ'],
['เคนเคฌเคเฅเคเฅเค'],
['เคนเคพเคเคฟเคฐเคพ'],
['เคนเคพเคเฅเคเฅ'],
['เคนเคพเคฎ'],
['เคนเคพเคฎเคพเคค', ' เคนเคฎเคพเคคเฅ', ' เคฒเฅเคฌเฅ เคนเคพเคฎเคพเคค'],
['เคนเคพเคฎเฅเคฐ'],
['เคนเคจเคจเฅเคฏเคพเคน'],
['เคนเคจเฅเคจเคพ'],
['เคนเคพเคฐเคพเคจ'],
['เคนเฅเคฌเฅเคฐเฅเคจ'],
['เคนเฅเคฐเฅเคฆเฅเคธ เค
เคจเฅเคคเคฟเคชเคพเคธ'],
['เคนเฅเคฐเฅเคฆเคฟเคฏเคพเคธ'],
['เคนเฅเคฐเฅเคฆเฅเคธ เคฎเคนเคพเคจ'],
['เคนเคฟเคเคเคฟเคฏเฅเคฏเคพเคน'],
['เคนเคฟเคฒเฅเคเคฟเคฏเฅเคฏเคพเคน'],
['เคนเคฟเคคเฅเคคเฅ', ' เคนเคฟเคคเฅเคคเคฟเคฏเฅเค'],
['เคนเคฟเคตเฅเคตเฅ', ' เคนเคฟเคตเฅเคตเคฟเคฏเฅเค'],
['เคนเฅเคฐเฅเคฌ'],
['เคนเฅเคถเฅ'],
['เคนเฅเคถเฅ'],
['เคฆเคพเคเคฆ เคเฅ เคเคฐเคพเคจเฅ'],
['เคเคเฅเคจเคฟเคฏเฅเคฎ'],
['เคเคธเคนเคพเค'],
['เคฏเคถเคพเคฏเคพเคน'],
['เคเคถเฅเคฎเคพเคเคฒ', ' เคเคถเฅเคฎเคพเคเคฒเฅ', ' เคเคถเฅเคฎเคพเคเคฒเคฟเคฏเฅเค'],
['เคเคธเฅเคธเคพเคเคพเคฐ'],
['เคเคธเฅเคฐเคพเคเคฒ', ' เคเคธเฅเคฐเคพเคเคฒเฅ', ' เคเคธเฅเคฐเคพเคเคฒเคฟเคฏเฅเค', ' เคฏเคพเคเฅเคฌ'],
['เคฏเคพเคเฅเคฌ(เคนเคฒเคซเคเคธ เคเคพ เคชเฅเคคเฅเคฐ)'],
['เคฏเคพเคเฅเคฌ (เคเคฌเฅเคฆเฅ เคเคพ เคชเฅเคคเฅเคฐ)'],
['เคฏเฅเคชเฅเคค'],
['เคฏเคฌเฅเคธ', ' เคฏเคฌเฅเคธเฅ', ' เคฏเคฌเฅเคธเคฟเคฏเฅเค'],
['เคฏเคนเฅเคฏเคพเคเฅเคจ'],
['เคฏเคนเฅเคฏเคพเคฆเคพ'],
['เคฏเคนเฅเคฏเคพเคเฅเคฎ'],
['เคฏเคนเฅเคฐเคพเคฎ', ' เคฏเฅเคฐเคพเคฎ'],
['เคฏเคนเฅเคถเคพเคชเคพเคค'],
['เคฏเฅเคนเฅ'],
['เคฏเคฟเคชเฅเคคเคน'],
['เคฏเคฟเคฐเฅเคฎเคฏเคพเคน'],
['เคฏเคฐเฅเคนเฅ'],
['เคฏเคพเคฐเฅเคฌเคพเคฎ'],
['เคฏเคฐเฅเคถเคฒเฅเคฎ'],
['เคฏเคฟเคถเฅ'],
['เคฏเคฟเคคเฅเคฐเฅ', ' เคฐเฅเคเคฒ'],
['เคเคเฅเคฌเฅเคฒ'],
['เคฏเคฟเคเฅเคฐเฅเคฒ', ' เคฏเคฟเคเฅเคฐเฅเคฒเฅ'],
['เคฏเฅเคเคฌ'],
['เคฏเฅเคเคถ'],
['เค
เคฏเฅเคฏเฅเคฌ'],
['เคฏเฅเคเคฒ'],
['เคฏเฅเคนเคจเฅเคจเคพ เคฎเคฐเคเฅเคธ'],
['เคฏเฅเคนเคจเฅเคจเคพ (เคชเฅเคฐเฅเคฐเคฟเคค)'],
['เคฏเฅเคนเคจเฅเคจเคพ (เคฌเคชเคคเคฟเคธเฅเคฎเคพ เคฆเฅเคจเฅเคตเคพเคฒเคพ)'],
['เคฏเฅเคจเคพ'],
['เคฏเฅเคจเคพเคคเคพเคจ'],
['เคฏเคพเคซเคพ'],
['เคฏเฅเคฐเคพเคฎ'],
['เคฏเคฐเคฆเคจ เคจเคฆเฅ', ' เคฏเคฐเคฆเคจ'],
['เคฏเฅเคธเฅเคซ (เคจเคฏเคพ เคจเคฟเคฏเคฎ)'],
['เคฏเฅเคธเฅเคซ (เคชเฅเคฐเคพเคจเคพ เคจเคฟเคฏเคฎ)'],
['เคฏเคนเฅเคถเฅ'],
['เคฏเฅเคถเคฟเคฏเฅเคฏเคพเคน'],
['เคฏเฅเคคเคพเคฎ'],
['เคฏเคนเฅเคฆเคพ'],
['เคฏเคนเฅเคฆเคพ เคเคธเฅเคเคฐเคฟเคฏเฅเคคเฅ'],
['เคฏเคพเคเฅเคฌ เคเคพ เคชเฅเคคเฅเคฐ เคฏเคนเฅเคฆเคพ'],
['เคฏเคนเฅเคฆเคฟเคฏเคพ'],
['เคเคพเคฆเฅเคถ', ' เคเคพเคฆเฅเคถเคฌเคฐเฅเคจเฅ', ' เคเคพเคฆเฅเคถ เคเฅ เคฎเคฐเฅเคฌเฅเคค'],
['เคเฅเคฆเคพเคฐ'],
['เคเฅเคฆเฅเคถ'],
['เคเคฟเคฆเฅเคฐเฅเคจ เคจเคพเคฒเฅ'],
['เคเคธเฅเคฐเคพเคเคฒ เคเฅ เคฐเคพเคเฅเคฏ'],
['เคฏเคนเฅเคฆเคพ', ' เคฏเคนเฅเคฆเคพ เคเคพ เคฐเคพเคเฅเคฏ'],
['เคเฅเคฐเคน', ' เคเฅเคฐเฅ', ' เคเฅเคฐเคนเคฟเคฏเฅเค'],
['เคฒเคพเคฌเคพเคจ'],
['เคฒเฅเคฎเฅเค'],
['เคฒเคพเฅเคฐ'],
['เคฒเคฟเค:'],
['เคฒเคฌเคพเคจเฅเคจ'],
['เคฒเคฟเคตเฅเคฏเคพเคคเคพเคจ'],
['เคฒเฅเคตเฅ', ' เคฒเฅเคตเฅเคฏ', ' เคฒเฅเคตเคฟเคฏเฅเค', ' เคฒเฅเคตเฅเคฏ'],
['เคฒเฅเคค'],
['เคฒเฅเคเคพ'],
['เคฒเฅเคธเฅเคคเฅเคฐเคพ'],
['เคฎเคพเคเคพ'],
['เคฎเคเคฟเคฆเฅเคจเคฟเคฏเคพ'],
['เคธเฅเคเคจเคนเคพเคฐ'],
['เคฎเคฒเคพเคเฅ'],
['เคฎเคจเคถเฅเคถเฅ'],
['เคชเคฐเคฎเฅเคถเฅ\u200dเคตเคฐ เคเคพ เคเคจ'],
['เคฎเคพเคฐเฅเคฅเคพ'],
['เคฎเคฐเคฟเคฏเคฎ', ' เคฏเฅเคถเฅ เคเฅ เคฎเคพเคคเคพ'],
['เคฎเคฐเคฟเคฏเคฎ เคฎเคเคฆเคฒเฅเคจเฅ'],
['เคฎเคคเฅเคคเฅ', ' เคฒเฅเคตเฅ'],
['เคฎเคพเคฆเคฟเคฏเฅเค', ' เคฎเคพเคฆเฅ'],
['เคธเคฎเฅเคฆเฅเคฐ', ' เคฎเคนเคพเคธเคฎเฅเคฆเฅเคฐ', ' เคชเคถเฅเคเคฟเคฎ เคเฅ เคธเคฎเฅเคฆเฅเคฐ', ' เคญเฅเคฎเคงเฅเคฏ เคธเคพเคเคฐ'],
['เคฎเฅเคฒเคฟเคเคฟเคธเคฟเคฆเค'],
['เคจเฅเคช'],
['เคฎเฅเคถเฅเค'],
['เคฎเฅเคธเฅเคชเฅเคเคพเคฎเคฟเคฏเคพ', 'เค
เคฐเคฎเฅเคจเคนเคฐเฅเคฎ'],
['เคฎเฅเคเคพ'],
['เคฎเฅเคเคพเคเคฒ'],
['เคฎเคฟเคฆเฅเคฏเคพเคจ', ' เคฎเคฟเคฆเฅเคฏเคพเคจเฅ', ' เคฎเคฟเคฆเฅเคฏเคพเคจเคฟเคฏเฅเค'],
['เคฎเคฟเคฐเฅเคฏเคพเคฎ'],
['เคฎเฅเคถเคพเคเคฒ'],
['เคฎเคฟเคธเฅเคชเคพ'],
['เคฎเฅเคเคฌ', ' เคฎเฅเคเคฌเฅ', ' เคฎเฅเคเคฌเคฟเคจ'],
['เคฎเฅเคฒเฅเค', ' Moloch'],
['เคฎเฅเคฐเฅเคฆเคเฅ'],
['เคฎเฅเคธเคพ'],
['เคนเฅเคฐเฅเคฎเฅเคจ\xa0เคชเคฐเฅเคตเคค'],
['เคเฅเคคเฅเคจ เคเฅ เคชเคนเคพเคกเคผ'],
['เคจเคพเคฎเคพเคจ'],
['เคจเคพเคนเฅเคฐ'],
['เคจเคนเฅเคฎ'],
['เคจเคชเฅเคคเคพเคฒเฅ'],
['เคจเคพเคคเคพเคจ'],
['เคจเคพเคธเคฐเคค', ' เคจเคพเคธเคฐเคฟเคฏเฅเค'],
['เคจเคฌเฅเคเคฆเคจเฅเคธเฅเคธเคฐ'],
['เคฆเคเฅเคทเคฟเคฃ เคฆเฅเคถ'],
['เคจเคนเฅเคฎเฅเคฏเคพเคน'],
['เคจเฅเคฒ เคจเคฆเฅ', ' เคฎเคฟเคธเฅเคฐ เคเฅ เคจเคฆเฅ', ' เคจเฅเคฒ เคจเคฆเฅ'],
['เคจเฅเคจเคตเฅ', ' เคจเฅเคจเคตเฅ เคเฅ เคฒเฅเคเฅเค'],
['เคจเฅเคน'],
['เคเคฌเคฆเฅเคฏเคพเคน'],
['เคเคฎเฅเคฐเฅ'],
['เคชเคฆเฅเคฆเคจเคฐเคพเคฎ'],
['เคชเคพเคฐเคพเคจ'],
['เคชเฅเคฒเฅเคธ', ' เคถเคพเคเคฒ'],
['เคชเฅเคฐ', ' เคชเฅเคฐ เคชเคฐเฅเคตเคค', ' เคฌเคพเคฒเคชเฅเคฐ'],
['เคชเคฐเคฟเคเฅเคเฅ'],
['เคซเคพเคฐเคธ', ' เคซเคพเคฐเคธเคฟเคฏเฅเค'],
['เคชเคคเคฐเคธ', ' เคถเคฎเฅเคจ เคชเคคเคฐเคธ', ' เคเฅเคซเคพ'],
['เคซเคผเคฟเคฐเฅเคจ', ' เคฎเคฟเคธเฅเคฐ เคเฅ เคฐเคพเคเคพ'],
['เคซเคฟเคฒเคฟเคชเฅเคชเฅเคธ', ' เคธเฅเคธเคฎเคพเคเคพเคฐ เคชเฅเคฐเคเคพเคฐเค'],
['เคซเคฟเคฒเคฟเคชเฅเคชเฅ', ' เคซเคฟเคฒเคฟเคชเฅเคชเคฟเคฏเฅเค'],
['เคซเคฟเคฒเคฟเคชเฅเคชเฅเคธ', ' เคชเฅเคฐเฅเคฐเคฟเคค'],
['เคชเคฒเคฟเคถเฅเคคเฅเคจ'],
['เคชเคฒเคฟเคถเฅเคคเคฟเคฏเฅเค'],
['เคชเฅเคจเคนเคพเคธ'],
['เคซเฅเคจเฅเคเฅ'],
['เคชเคฟเคฒเคพเคคเฅเคธ'],
['เคชเฅเคจเฅเคคเฅเคธ'],
['เคชเฅเคคเฅเคชเคฐ'],
['เคชเฅเคฐเคฟเคธเฅเคเคฟเคฒเฅเคฒเคพ'],
['เคฐเคฌเฅเคฌเคพ'],
['เคฐเคพเคนเฅเคฒ'],
['เคฐเคพเคนเคพเคฌ'],
['เคฐเคพเคฎเคพเคน'],
['เคฐเคพเคฎเฅเคค'],
['เคฐเคฟเคฌเคเคพ'],
['เคจเฅ เคธเคพเคเคฐ', ' เคฒเคพเคฒ เคธเคฎเฅเคฆเฅเคฐ'],
['เคฐเคนเฅเคฌเคฟเคฏเคพเคฎ'],
['เคฐเฅเคฌเฅเคจ'],
['เคฐเคฟเคฎเฅเคฎเฅเคจ'],
['เคฐเฅเคฎ', ' เคฐเฅเคฎเฅ'],
['เคฐเฅเคค'],
['เคเคพเคฐเคพ เคคเคพเคฒ', ' เคฎเฅเคค เคธเคพเคเคฐ'],
['เคธเคพเคฎเคฐเคฟเคฏเคพ', ' เคธเคพเคฎเคฐเฅ'],
['เคถเคฟเคฎเคถเฅเคจ'],
['เคถเคฎเฅเคเคฒ'],
['เคธเคพเคฐเคพ', ' เคธเคพเคฐเฅ'],
['เคถเคพเคเคฒ (เคชเฅเคฐเคพเคจเคพ เคจเคฟเคฏเคฎ)'],
['เคเคฒเฅเคฒ เคธเคพเคเคฐ', ' เคเคฟเคจเฅเคจเฅเคฐเฅเคค เคเฅ เคธเคพเคเคฐ', ' เคเคจเฅเคจเฅเคธเคฐเคค เคเฅ เคเฅเคฒ', ' เคคเคฟเคฌเคฟเคฐเคฟเคฏเฅเคธ เคเฅ เคเฅ'],
['เคธเคจเฅเคนเฅเคฐเฅเคฌ'],
['เคถเฅเคค'],
['เคถเคพเคฐเฅเคจ', ' เคถเคพเคฐเฅเคจ เคเคพ เคฎเฅเคฆเคพเคจ'],
['เคถเฅเคฌเคพ'],
['เคถเฅเคเฅเคฎ'],
['เคถเฅเคฎ'],
['เคถเฅเคฒเฅ'],
['เคถเคฟเคฎเฅ'],
['เคถเคฟเคจเคพเคฐ'],
['เคธเฅเคฆเฅเคจ', ' เคธเฅเคฆเฅเคจเคฟเคฏเฅเค'],
['เคธเฅเคฒเคพเคธ', ' เคธเคฟเคฒเฅเคตเคพเคจเฅเคธ'],
['เคถเคฎเฅเคจ'],
['เคถเคฎเฅเคจ เคเคจเคพเคจเฅ'],
['เคธเฅเคจเฅ', ' เคธเฅเคจเฅ เคชเคฐเฅเคตเคค'],
['เคธเคฆเฅเคฎ'],
['เคธเฅเคฒเฅเคฎเคพเคจ'],
['เคธเฅเคคเคฟเคซเคจเฅเคธ'],
['เคธเฅเคเฅเคเฅเคค'],
['เคธเฅเคฐเคฟเคฏเคพ'],
['เคคเคพเคฎเคพเคฐ'],
['เคคเคฐเฅเคถเฅเคถ'],
['เคคเคฐเคธเฅเคธ'],
['เคคเฅเคฐเคน'],
['เคฅเคฟเคธเฅเคธเคฒเฅเคจเฅเคเฅ', ' เคฅเคฟเคธเฅเคธเคฒเฅเคจเฅเคเคฟเคฏเฅเค', ' เคฅเคฟเคธเฅเคธเคฒเฅเคจเฅเคเคฟเคฏเฅเค'],
['เคฅเฅเคฎเคพ'],
['เคคเฅเคฎเฅเคฅเคฟเคฏเฅเคธ'],
['เคคเคฟเคฐเฅเคธเคพ'],
['เคคเฅเคคเฅเคธ'],
['เคคเฅเคฐเฅเคเคธ'],
['เคคเฅเคฌเคฒ'],
['เคคเฅเคเคฟเคเฅเคธ'],
['เคธเฅเคฐ', ' เคธเฅเคฐ เคเฅ เคฒเฅเค'],
['เคเคฐ'],
['เคเคฐเคฟเคฏเฅเคฏเคพเคน'],
['เคเคเฅเคเคฟเคฏเคพเคน', ' เค
เคเคฐเฅเคฏเคพเคน'],
['เคตเคถเคคเฅ'],
['เคเคเฅเคเค'],
['เคธเคพเคฆเฅเค'],
['เคเคฌเฅเคฆเฅ'],
['เคเคฌเฅเคฒเฅเคจ'],
['เคเคเคฐเฅเคฏเคพเคน (เคจเคฏเคพ เคจเคฟเคฏเคฎ)'],
['เคเคเคฐเฅเคฏเคพเคน (เคชเฅเคฐเคพเคจเคพ เคจเคฟเคฏเคฎ)'],
['เคธเคฟเคฆเคเคฟเคฏเฅเคฏเคพเคน'],
['เคธเคชเคจเฅเคฏเคพเคน'],
['เคเคฐเฅเคฌเฅเคฌเคพเคฌเฅเคฒ'],
['เคธเฅเค
เคฐ'],
['เคเคธเฅเคฐเคพเคเคฒ เคเฅ เคฌเคพเคฐเคน เคเฅเคคเฅเคฐ', ' เคเคธเฅเคฐเคพเคเคฒ เคเฅ เคฌเคพเคฐเคน เคเฅเคคเฅเคฐ', ' เคฌเคพเคฐเคน เคเฅเคคเฅเคฐ'],
['เค
เคฅเคพเคน เคเฅเคฃเฅเคก', ' เค
เคฅเคพเคน เคเคกเฅเคขเฅ'],
['เคฌเคฌเฅเคฒ'],
['เคฆเฅเคท เคฒเคเคพเคจเคพ', ' เคฆเฅเคทเคพ เคฒเคเคพเคคเคพ เคนเฅ', ' เคฆเฅเคทเฅ', ' เคฆเฅเคท เคฒเคเคพ เคฐเคนเฅ เคนเฅ', ' เคเคฐเฅเคช เคฒเคเคพเคจเฅ เคตเคพเคฒเคพ', ' เคเคฐเฅเคช เคฒเคเคพเคจเฅ เคตเคพเคฒเฅ', ' เคเคฐเฅเคช', ' เคเคฐเฅเคชเฅเค'],
['เคฎเคพเคจเคจเคพ', ' เคฎเคพเคจ เคฒเฅเคคเคพ เคนเฅ', ' เคฎเคพเคจเคเคฐ', ' เคฎเคพเคจ เคฒเฅเคคเคพ', ' เคฎเคพเคจ เคฒเคฟเคฏเคพ'],
['เคจเคฟเคฐเฅเคฆเฅเคท', ' เคจเคฟเคฐเฅเคฆเฅเคท เค เคนเคฐเคพเคจเคพ', ' เคเฅเค เคเคพเคคเคพ'],
['เคชเฅเคฐเคถเคพเคธเคจ', ' เคชเฅเคฐเคถเคพเคธเค', ' เคชเฅเคฐเคถเคพเคธเค', ' เคชเฅเคฐเคถเคพเคธเคฟเคค', ' เคชเฅเคฐเคถเคพเคธเคจ'],
['เคเคฟเคคเคพเคจเคพ'],
['เคตเคฟเคฐเฅเคงเฅ', ' เคฆเฅเคฐเฅเคนเคฟเคฏเฅเค', ' เคฌเฅเคฐเฅ', ' เคถเคคเฅเคฐเฅเคเค'],
['เคฆเฅเคเคเคฟเคค', ' เคเฅเคฒเฅเคถ เคฆเฅเคจเคพ', ' เคฆเฅเคเคเคฟเคค', ' เคฎเคพเคฐเฅเคเคพ', ' เคฆเฅเคเค', ' เคเฅเคฒเฅเคถ'],
['เคเคฏเฅ', ' เคฏเฅเคเฅเค', ' เคตเฅเคฆเฅเคง'],
['เคตเฅเคฏเคพเคเฅเคฒ', ' เคฒเคฒเคเคพเคฐเคจเฅ', ' เคเคฌเคฐเคพ เคเคฏเคพ'],
['เคฆเคพเคจ'],
['เคงเฅเคช เคเฅ เคตเฅเคฆเฅ', ' เคงเฅเคช เคตเฅเคฆเฅ'],
['เคเคเคฟเคค', ' เคตเคฟเคธเฅเคฎเคฏ', ' เค
เคเคฎเฅเคญเคพ เคเคฟเคฏเคพ', ' เค
เคเคฎเฅเคญเคพ', ' เค
เคเคฎเฅเคญเคพ เคเคฐเคเฅ', ' เค
เคเคฎเฅเคญเฅ เคฎเฅเค เค เคเคพเคจเคพ', ' เคเคถเฅเคเคฐเฅเคฏเคเคฐเฅเคฎเฅเค', ' เคเคฎเคคเฅเคเคพเคฐ', ' เคเคฎเคคเฅเคเคพเคฐเฅเค'],
['เคฆเฅเคค', ' เคฆเฅเคคเฅเค', ' เคชเฅเคฐเคคเคฟเคจเคฟเคงเคฟ', ' เคฐเคพเคเคฆเฅเคค'],
['เคเฅเคฐเฅเคง', ' เคเฅเคฐเฅเคงเคฟเคค เคนเฅเค', ' เคเฅเคฐเฅเคงเคฟเคค'],
['เคชเฅเฅเคพ'],
['เคงเคจเฅเคฐเฅเคงเคพเคฐเฅ', ' เคงเคจเฅเคฐเฅเคงเคพเคฐเคฟเคฏเฅเค'],
['เคนเคฅเคฟเคฏเคพเคฐ', ' เคถเคธเฅเคคเฅเคฐเฅเค เคเคพ เคเคฐ'],
['เค
เคนเคเคเคพเคฐเฅ', ' เค
เคญเคฟเคฎเคพเคจ เคเคฐเคเฅ', ' เค
เคนเคเคเคพเคฐ'],
['เคฐเคพเค', ' เคฐเคพเค', ' เคงเฅเคฒ'],
['เคฎเคฃเฅเคกเคฒเฅ', ' เคธเคญเคพเคเค', ' เคเคเคเฅเค เคพ เคเคฐเคจเคพ', ' เคเคเคเฅเค เคพ เคเคฟเคฏเคพ'],
['เคจเคฟเคฏเฅเคเฅเคค เคเคฐเคจเคพ', ' เคฌเคพเคเคเคพ', ' เค เคนเคฐเคพเค', ' เคญเคพเค', ' เคญเคพเคเฅเค', ' เคซเคฟเคฐ เคฆเฅ เคฆเฅเคจเคพ'],
['เคญเคเค', ' เคญเคเค เคเคพเคคเฅ เคนเฅเค', ' เคญเคเค เคเค', ' เคญเคเคเคพ เคฆเฅเคจเคพ', ' เคญเคเคเคพ เคฆเคฟเคฏเคพ', ' เคญเคเคเคจเคพ', ' เคญเคเคเคพ เคฆเคฟเคฏเคพ', ' เคญเคเคเคจเคพ'],
['เคฌเคฆเคฒเคพ เคฒเฅเคจเคพ', ' เคชเคฒเคเคพ เคฒเฅเคจเฅเคตเคพเคฒเคพ', ' เคชเคฒเคเคพ เคฒเคฟเคฏเคพ', ' เคฌเคฆเคฒเคพ เคฒเฅเคจเฅ', ' เคชเคฒเคเคพ เคฒเฅเคจเฅเคตเคพเคฒเคพ', ' เคฌเคฆเคฒเคพ', ' เคชเคฒเคเคพ เคฒเฅเคจเคพ'],
['เคญเคฏ', ' เคญเคฏเคฏเฅเคเฅเคฏ'],
['เคเฅเคฒเฅเคนเคพเคกเคผเคพ', ' เคเฅเคฒเฅเคนเคพเคกเคผเฅ'],
['เคญเฅเค'],
['เคเฅ'],
['เคเคเคพเคกเคผ'],
['เคเฅเคเคฐเฅ', ' เคเฅเคเคฐเคฟเคฏเคพเค', ' เคเฅเคเคฐเคฟเคฏเคพเค เคญเคฐเคเคฐ'],
['เคธเคนเคจเคพ', ' เคธเคน เคฒเฅเคคเคพ เคนเฅ', ' เคเค เคพเค', ' เคขเฅเคจเฅเคตเคพเคฒเคพ'],
['เคฐเฅเค', ' เคฐเฅเคเคจเคฟเคฏเฅเค'],
['เคชเคถเฅ', ' เคชเคถเฅเคเค'],
['เคตเคฟเคจเคคเฅ', ' เคตเคฟเคจเคคเฅ เคเฅ', ' เคตเคฟเคจเคคเฅ เคเฅ', ' เคเคเคเคพเคฒ'],
['เคชเคเฅเคตเคพเคจเคพ', ' เคชเคเคกเคผเคตเคพเคจเฅเคตเคพเคฒเฅ', ' เคชเคเคกเคผเคตเคพเคฏเคพ', ' เคตเคฟเคถเฅเคตเคพเคธเคเคพเคค เคเคฟเคฏเคพ', ' เคชเคเคกเคผเคจเฅเคตเคพเคฒเคพ', ' เคชเคเคกเคผเคตเคพเคจเฅเคตเคพเคฒเฅ'],
['เคฆเคฟเคจ', ' เคฆเคฟเคจเฅเค'],
['เคชเคนเคฐ', ' เคเคเคเฅ'],
['เคฎเคนเฅเคจเฅ', ' เคฎเคนเฅเคจเฅเค', ' เคฎเคนเฅเคจเฅ เคเฅ'],
['เคชเคนเคฐ(เคฌเคพเคเคฌเคฒ เคเคพ เคธเคฎเคฏ)'],
['เคธเคชเฅเคคเคพเคน', ' เคธเคชเฅเคคเคพเคนเฅเค'],
['เคตเคฐเฅเคท', ' เคตเคฐเฅเคทเฅเค'],
['เคฆเฅเคท', ' เคฆเฅเคท', ' เคจเคฟเคฐเฅเคฆเฅเคท'],
['เคฒเคนเฅ เคฌเคนเคพเคจเคพ'],
['เคฎเคฟเคเคพ เคฆเฅ', ' เคฎเคฟเคเคพ เคฆเฅเคคเคพ', ' เคฎเคฟเคเคพเคฏเคพ เคเคพเคคเคพ', ' เคฎเคฟเคเคพ เคกเคพเคฒเฅ', ' เคฎเคฟเคเคพ', ' เคฎเคฟเค เคเค'],
['เคจเคฟเคกเคฐ', ' เคจเคฟเคกเคฐ เคนเฅเคเคฐ', ' เคธเคพเคนเคธ', ' เคธเคพเคนเคธเฅ'],
['เคเฅเคตเคจ เคเฅ เคชเฅเคธเฅเคคเค'],
['เคฆเคฃเฅเคกเคตเคคเฅ', ' เคเฅเค เคเคฏเคพ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฟเคฏเคพ', ' เคเฅเคเคจเฅ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฐเคจเคพ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฐเฅ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฟเคฏเคพ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฐเคคเฅ เคฐเคนเฅ'],
['เคงเคจเฅเคท เคเคฐ เคคเฅเคฐ', ' เคงเคจเฅเคท เคเคฐ เคคเฅเคฐ'],
['เคฐเฅเคเฅ'],
['เคเคฟเคฒเคฎ', ' เคเคฟเคฒเคฎเฅเค', ' เคเคชเคฐเคพเคธ'],
['เคถเฅเคตเคพเคเคธ', ' เคถเฅเคตเคพเคเคธ เคซเฅเคเคเคจเคพ', ' เคธเคพเคเคธ เคฒเฅเคคเคพ เคนเฅ', ' เคถเฅเคตเคพเคเคธ เคซเฅเคเค เคฆเคฟเคฏเคพ', ' เคธเคพเคเคธ เคฒเฅเคจเคพ'],
['เคเฅเคธ', ' เคเฅเคธ', ' เคเฅเคธ เคฆเคฟเคฏเคพ', ' เคเฅเคธ เคฒเฅเคคเฅ เคนเฅเค'],
['เคฆเฅเคฒเฅเคนเคจ', ' เคตเคงเฅ', ' เคตเคฟเคตเคพเคน'],
['เคฆเฅเคฒเฅเคนเคพ', ' เคฆเฅเคฒเฅเคนเฅ'],
['เคชเฅเคคเคฒ'],
['เคฌเฅเค', ' เคฌเฅเค', ' เคฌเฅเค เคธเฅ เคฆเคฌเฅ', ' เคญเคพเคฐเฅ'],
['เคนเฅเคฎเคฌเคฒเคฟ', ' เคนเคตเคจ', ' เค
เคเฅเคจเคฟเคฆเคพเคจ'],
['เคฎเคฟเคเฅเคเฅ เคฆเฅเคจเคพ', ' เคฆเคฌเคพ เคฆเฅเคคเคพ เคนเฅ', ' เคเคพเคกเคผเฅ เคเค', ' เคฎเคฟเคเฅเคเฅ เคฆเฅ', ' เคฎเคฟเคเฅเคเฅ เคฆเฅเคจเฅ'],
['เคเคเค', ' เคเคเคเฅเค'],
['เคฌเคจเฅเคฆเฅ เคฌเคจเคพเคจเคพ', ' เคฌเคจเฅเคฆเฅ', ' เคตเคถ เคฎเฅเค เคเคฐเคจเคพ', ' เคฎเฅเคนเคฟเคค', ' เคฌเคเคงเฅเคเค'],
['เคจเคฟเคเคพเคฒเคจเคพ', ' เคจเคฟเคเคพเคฒ เคฆเคฟเคฏเคพ', ' เคฌเคพเคนเคฐ เคจเคฟเคเคพเคฒเคจเฅ', ' เคซเฅเคเค เคฆเฅเคจเคพ', ' เคซเฅเคเคเคเคฐ'],
['เคเค เคพ เคฒเคฟเคฏเคพ', ' เคเคพ เคฒเคฟเคฏเคพ', ' เคฆเฅเฅ เคเคฏเคพ'],
['เคฆเฅเคตเคฆเคพเคฐเฅ', ' เคฆเฅเคตเคฆเคพเคฐเฅเค', ' เคฆเฅเคตเคฆเคพเคฐเฅ เคเฅ เคฒเคเคกเคผเฅ'],
['เคจเคพเคฎ เคฒเคฟเคเคพเค'],
['เคญเฅเคธเฅ'],
['เคฐเคฅ', ' เคฐเคฅเฅเค', ' เคฐเคฅเคฟเคฏเฅเค'],
['เคเคฐเฅเคฌ', ' เคเคฐเฅเคฌเฅเค', ' เคเคฐเฅเคฌเฅเค'],
['เคชเฅเคฐเคงเคพเคจ', ' เคชเฅเคฐเคงเคพเคจเฅเค'],
['เคชเฅเคฐเคงเคพเคจ เคฏเคพเคเคเฅเค'],
['เคเคคเคฟเคนเคพเคธ'],
['เคจเคฟเคตเคพเคธเฅ', ' เคจเคฟเคตเคพเคธเคฟเคฏเฅเค', ' citizenship'],
['เคเฅเคฒ', ' เคเฅเคฒเฅเค'],
['เคชเคนเคจเคพเคจเคพ', ' เคชเคนเคจเคจเคพ', ' เคตเคธเฅเคคเฅเคฐ', ' เค
เคเคเคฐเคเคพ', ' เคเคคเคพเคฐเคจเคพ'],
['เคถเคพเคจเฅเคคเคฟ', ' เคถเคพเคจเฅเคคเคฟ', ' เคถเคพเคจเฅเคคเคฟ เคฆเฅ', ' เคถเคพเคจเฅเคคเคฟเคฆเคพเคฏเค', ' เคถเคพเคจเฅเคคเคฟ เคฆเฅเคจเฅเคตเคพเคฒเคพ', ' เคถเคพเคจเฅเคคเคฟ เคฆเฅเคจเฅเคตเคพเคฒเฅ', ' เคถเคพเคจเฅเคคเคฟ เคจเคนเฅเค เคฎเคฟเคฒเฅ'],
['เคธเฅเคจเคพเคชเคคเคฟ', ' เคธเคฐเคฆเคพเคฐเฅเค'],
['เคเคฐเคจเคพ', ' เคเคฐเคคเคพ เคนเฅ', ' เคเคฟเคฏเคพ เคนเฅ', ' เคเคฐเคคเฅ', ' เคชเฅเคฐเคคเคฟเคเฅเคเคพ'],
['เคธเคพเคฅเฅ', ' เคธเคเคเฅ'],
['เคเคฐเฅเคญ เคงเคพเคฐเคฃ', ' เคเคฐเฅเคญเคตเคคเฅ', ' เคเคฐเฅเคญเคตเคคเฅ', ' เคเคฐเฅเคญเคตเคคเฅ เคนเฅเคจเคพ'],
['เคฐเคเฅเคฒ', ' เคฐเคเฅเคฒเคฟเคฏเฅเค'],
['เคญเคฐเฅเคธเคพ', ' เคญเคฐเฅเคธเคพ เคเคฐเคจเคพ', ' เคเคคเฅเคฎเคตเคฟเคถเฅเคตเคพเคธ เคธเฅ'],
['เคฆเฅเคขเคผ เคเคฐเคจเฅ', ' เคฆเฅเคขเคผ เคเคฐเคคเคพ', ' เคชเคเฅเคเฅ เคเฅ', ' เคชเฅเคฐเคฎเคพเคฃ เคฆเฅเคจเฅ'],
['เคญเคธเฅเคฎ เคเคฐ เคฆเฅเคเคพ', ' เคเคพเค', ' เคญเคธเฅเคฎ เคเคฟเคฏเคพ', ' เคญเคธเฅเคฎ เคเคฐเคคเฅ เคเคพเคเคเฅ'],
['เคคเฅเคเฅเค เคเคพเคจเฅ', ' เคคเฅเคเฅเค'],
['เคฌเคฟเคเคกเคผเคเคฐ', ' เคจเคพเคถ เคนเฅเคคเฅ เคนเฅ', ' เคฌเคฟเคเคกเคผ เคเค', ' เคญเฅเคฐเคทเฅเค', ' เคธเคกเคผเคพเคนเค', ' เคฌเคฟเคเคกเคผ เคเค'],
['เคฎเคนเคพเคธเคญเคพ', ' เคธเคญเคพเคเค'],
['เคธเคฎเฅเคฎเคคเคฟ', ' เคธเคฎเฅเคฎเคคเคฟ', ' เคธเคฎเฅเคฎเคคเคฟ เคฆเฅ', ' เคฎเคเคคเฅเคฐเฅ', ' เคธเคฎเฅเคฎเคคเคฟ เคฆเฅเคจเฅเคตเคพเคฒเฅเค', ' เคฏเฅเคเฅเคคเคฟ ', ' เคฏเฅเคเฅเคคเคฟ เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เคฎเคเคคเฅเคฐเคฟเคฏเฅเค', ' เคธเคฎเฅเคฎเคคเคฟ เคฆเฅ'],
['เคนเคฟเคฏเคพเคต', ' เคนเคฟเคฏเคพเคต เคฌเคพเคเคงเฅ', ' เคชเฅเคฐเฅเคคเฅเคธเคพเคนเคฟเคค', ' เคชเฅเคฐเฅเคคเฅเคธเคพเคนเคจ', ' เคขเคพเคขเคผเคธ เคฌเคพเคเคงเฅ', ' เคเคฆเคพเคธ', ' เคจเคฟเคฐเฅเคคเฅเคธเคพเคนเคฟเคค', ' เคเคฆเคพเคธ เคเคฐเคจเคพ', ' discouraging'],
['เคเคเคเคจ', ' เคเคเคเคจเฅเค', ' เคเคเคเคจ', ' เคเคเคเคจเฅเค'],
['เคเคพเคฏ', ' เคเคพเคฏเฅเค', ' เคฌเฅเคฒ', ' เคฌเฅเคฒเฅเค', ' เคฌเคเคกเคผเคพ', ' เคฌเคเคกเคผเฅเค', ' เคชเคถเฅเคเค', ' เคฌเคเคฟเคฏเคพ', ' เคฌเฅเคฒ', ' เคฌเฅเคฒเฅเค'],
['เคเคคเฅ\u200dเคชเคจเฅ\u200dเคจ', ' เคธเคฐเฅเคเคจ เคเคฐเคจเคพ', ' เคธเฅเคทเฅเคเคฟ เคเฅ', ' เคธเฅเคทเฅเคเคฟ', ' เคธเฅเคเคจเคนเคพเคฐ'],
['เคชเฅเคฐเคพเคฃเฅ', ' เคชเฅเคฐเคพเคฃเคฟเคฏเฅเค'],
['เคฆเฅเคท', ' เค
เคชเคฐเคพเคงเฅเค', ' เคเฅเคเคฐเฅเคฎเฅ', ' เคเฅเคเคฟเคฒ เคเคจ'],
['เคฎเฅเคเฅเค', ' เคฎเฅเคเฅเค', ' เคฎเฅเคเฅเค เคชเคนเคจเคพเคจเคพ', ' เคฎเฅเคเฅเค เคฐเคเคพ'],
['เคชเฅเคเคพเคฐ', ' เคเคฟเคฒเฅเคฒเคพเคนเค', ' เคชเฅเคเคพเคฐเคเคฐ', ' เคฐเฅเคจเคพ', ' เคฆเฅเคนเคพเค', ' เคฆเฅเคนเคพเค', ' เคชเฅเคเคพเคฐเคเคฐ', ' เคเคฟเคฒเฅเคฒเคพเคนเค', ' เคเคฏ-เคเคฏเคเคพเคฐ'],
['เคชเคฟเคฒเคพเคจเฅเคนเคพเคฐเคพ', ' เคชเคฟเคฒเคพเคจเฅเคตเคพเคฒเฅ'],
['เคชเคฐเคฆเคพ', ' เคชเคฐเคฆเฅ'],
['เคจเคพเคถ เคเคฟเคฏเคพ', ' เคจเคพเคถ เคเคฐเฅ', ' เคเคพเคเคเคฐ'],
['เคธเคจเฅเคฌเคฐ'],
['เค
เคเคงเคฟเคฏเคพเคฐเคพ'],
['เคฎเคฐเฅ', ' เคฎเคฐ เคเคพเค', ' เคฎเคฐ เคเคฏเคพ', ' เคฎเคฐเฅ เคนเฅเคเค', ' เคชเฅเคฐเคพเคฃเคจเคพเคถเค', ' เคฎเคฐเฅ เคนเฅเค', ' เคฎเฅเคคเฅเคฏเฅ', ' เคฎเคฐเฅเคเคเฅ', ' เคฎเฅเคคเฅเคฏเฅ เคเฅ เคญเคฏ'],
['เคงเฅเคเคพ', ' เคงเฅเคเคพ', ' เคงเฅเคเคพ เคฆเคฟเคฏเคพ', ' เคเคฒเคคเคพ', ' เคเคฒเฅ', ' เคงเฅเคเฅเคฌเคพเค', ' เคงเฅเคเฅเคฌเคพเคเฅเค', ' เคเคฒเคชเฅเคฐเฅเคฃ', ' เคงเฅเคฐเฅเคคเคคเคพ เคธเฅ', ' เคเคฒ เคฎเฅเค', ' เคเคฒเคคเคพ', ' เคเคฒเฅ'],
['เคตเคฐเฅเคฃเคจ', ' เคตเคพเคฃเฅ', ' เค เคนเคฐเคพเคฏเคพ', ' เคเคนเคคเคพ เคนเฅ', ' เคตเคพเคฃเฅ', ' เคเฅเคทเคฃเคพเคเค'],
['เคเคเฅเคเคพ', ' เคเคเฅเคเคพเคเค', ' เคเคฆเฅเคถ เคฆเฅ'],
['เคธเคฎเคฐเฅเคชเคฃ เคเคฐเฅ', ' เคธเคฎเคฐเฅเคชเคฃ เคเคฐเคจเคพ', ' เคธเคฎเคฐเฅเคชเคฟเคค', ' เคธเคฎเคฐเฅเคชเคฃ'],
['เคนเคฟเคฐเคจ', ' เคนเคฟเคฐเคจเฅ', ' เคนเคฟเคฐเคจเคฟเคฏเคพเค', ' เคฎเฅเคเคจเฅ', ' เคฎเฅเค', ' เคฎเฅเคเฅเค'],
['เค
เคถเฅเคฆเฅเคง', ' เค
เคถเฅเคฆเฅเคง', ' เค
เคถเฅเคฆเฅเคง เคเคฐ เคกเคพเคฒเคพ', ' เค
เคชเคตเคฟเคคเฅเคฐ เคเคฐเคจเฅ', ' เค
เคชเคตเคฟเคคเฅเคฐ เคเคฟเค', ' เค
เคถเฅเคฆเฅเคง เคนเฅเค', ' เค
เคถเฅเคฆเฅเคง เคนเฅ เคเคฏเคพ', ' เค
เคถเฅเคฆเฅเคง เคนเฅ เคเค'],
['เคเคจเคจเฅเคฆ', ' เคชเฅเคฐเคธเคจเฅ\u200dเคจ', ' เคธเฅเคเฅ', ' เคฎเคจเฅเคนเคฐ'],
['เคเฅเฅเคพเคจเคพ', ' เคเฅเฅเคพเคจเคพ', ' เคเฅเคกเคผเคพเคฏเคพ ', 'เคเฅเคกเคผเคพเคฏเคพ เคเคพเคจเคพ', ' เคเฅเคเคเคพเคฐเคพ เคฆเคฟเคฒเคพเคจเฅ เคตเคพเคฒเคพ ', 'เคเฅเคเคเคพเคฐเคพ'],
['เคเคคเคฐเฅเคเคพ', ' เคขเคฒเคพเคจ', ' เคเคคเคฐ เคเคฏเคพ', ' เคเคคเคฐเคคเฅ', ' เคตเคเคถ', ' เคตเคเคถ'],
['เค
เคถเฅเคฆเฅเคง เคเคฐเคจเคพ', ' เค
เคถเฅเคฆเฅเคง เคเคฟเคฏเคพ', ' เค
เคชเคตเคฟเคคเฅเคฐ'],
['เคเคเคเคฒ', ' เคเฅเคกเคผเคเคฐ ', ' เคธเฅเคจเคธเคพเคจ', ' เคเฅเคกเคผ เคฆเฅเคจเคพ', ' เคเคเคเคฒ', ' เคเคเคเคฒเฅเค'],
['เคเคเคกเคผ', ' เคเคเคพเคกเคผ', ' เคเคเคกเคผเฅ'],
['เค เคนเคฐเคพเคฏเคพ', ' เคจเคฟเคฏเฅเคเฅเคค', ' เคฆเคถเคพ', ' เคชเคนเคฒเฅ เคธเฅ เค เคนเคฐเคพเคฏเคพ'],
['เคจเคพเคถ', ' เคจเคพเคถ เคเคฐเคคเคพ', ' เคจเคพเคถ เคเคฟเคฏเคพ', ' เคจเคพเคถ เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เคจเคพเคถ เคเคฐเคจเฅเคตเคพเคฒเฅ', ' เคธเคคเฅเคฏเคพเคจเคพเคถ เคเคฐเคจเคพ'],
['เคเฅเคฃเคพ', ' เคเฅเคฃเคพ เคเคฐเคพ', ' เคเฅเคฃเคฟเคค'],
['เคจเคพเคถ เคนเฅเคเคพ', ' เคเคเคพเคกเคผ เคฆเคฟเคฏเคพ', ' เคจเคพเคถ เคเคฐเคคเคพ', ' เคเคเคพเคกเคผ', ' เคเคเคพเคกเคผ'],
['เคญเคธเฅเคฎ เคนเฅ เคเคพเคเคเคพ', ' เคญเคธเฅเคฎ เคเคฐเคคเฅ', ' เคญเคธเฅเคฎ เคนเฅเค', ' เคญเคธเฅเคฎ เคเคฐเคจเฅเคตเคพเคฒเฅ'],
['เคชเคนเคเคพเคจ', ' เคเคพเคจ เคธเคเคคเคพ', ' เคธเคฎเคเคฆเคพเคฐ', ' เคตเคฟเคตเฅเค-เคถเคเฅเคคเคฟ'],
['เค
เคชเคฎเคพเคจ', ' เค
เคชเคตเคฟเคคเฅเคฐ เค เคนเคฐเคพเคคเฅ', ' เค
เคชเคตเคฟเคคเฅเคฐ เค เคนเคฐเคพเคฏเคพ', ' เคฒเคเฅเคเคพ เคเฅ เคฌเคพเคค'],
['เคจเคฟเคฐเคพเคฆเคฐ', ' เค
เคชเคฎเคพเคจ เคเคฐเคคเคพ', ' เค
เคจเคพเคฆเคฐ เคเคฟเคฏเคพ', ' เคจเฅเค'],
['เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพ เคเคฟเคฏเคพ', ' เคเคเฅเคเคพ เคเคพ เคเคฒเฅเคฒเคเคเคจ', ' เค
เคตเคเฅเคเคพเคเคพเคฐเฅ'],
['เคเคฟเคคเคฐเคพ', ' เคคเคฟเคคเคฐ-เคฌเคฟเคคเคฐ เคนเฅเคเคฐ'],
['เคญเคพเคตเฅ เคเคนเคจเฅ', ' เคญเคพเคตเฅ เคเคนเคจเฅเคตเคพเคฒเฅเค', ' เคญเฅเคค เคธเคฟเคฆเฅเคงเคฟเคตเคพเคฒเฅเค', ' เคเฅเคฏเฅเคคเคฟเคทเฅ'],
['เคคเฅเคฏเคพเคเคจเคพ'],
['เคงเคฐเฅเคฎเฅเคชเคฆเฅเคถ'],
['เคเคฆเคนเคพ', ' เคเคเฅเคเคฐ'],
['เคฆเคฃเฅเคก'],
['เคเฅเคเค'],
['เคชเคฃเฅเคกเฅเคเฅ', ' เคเคฌเฅเคคเคฐ'],
['เคธเฅเคตเคชเฅเคจ'],
['เค
เคฐเฅเค'],
['เคฎเคคเคตเคพเคฒเฅ', ' เคชเคฟเคฏเคเฅเคเคกเคผ'],
['เคเฅเคฌเคฐ', ' เคเคพเคฆ'],
['เคเคเคพเคฌ', ' เคเคเคพเคฌเฅเค'],
['เคชเฅเคฅเฅเคตเฅ', ' เคฎเคฟเคเฅเคเฅ เคเคพ', ' เคชเคพเคฐเฅเคฅเคฟเคต'],
['เคชเฅเคฐเคพเคเฅเคจ', ' เคชเฅเคฐเคพเคเฅเคจเฅเค'],
['เคฌเคจเคพ เคฐเคนเฅเคเคพ', ' เคงเฅเคฐเค เคงเคฐเฅเคเคพ', ' เคธเคนเคคเฅ', ' เคธเฅเคฅเคฟเคฐ เคฐเคนเคคเคพ', ' เคงเฅเคฐเค'],
['เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคฆเคพเคธ เคฌเคจเคพเคจเคพ', ' เคฆเคพเคธเคคเฅเคต', ' เคฌเคจเฅเคงเคจ', ' เคตเคถ เคฎเฅเค', ' เคฌเคจเฅเคงเคจ', ' เคฌเคพเคเคงเคพ'],
['เคกเคพเคน', ' เคฒเคพเคฒเค'],
['เคเฅเคเคฐเฅเคฎเฅ', ' เคเฅเคเคฐเฅเคฎเคฟเคฏเฅเค', ' เคฌเฅเคฐเคพเค'],
['เคฌเคเคงเฅเคเค', ' เคฌเคจเฅเคงเฅเคเค', ' เคฌเคจเฅเคฆเฅ เคเคฐเคเฅ'],
['เคชเฅเคฐเคซเฅเคฒเฅเคฒเคฟเคค', ' เคฎเคเคจ', ' เคชเฅเคฐเคธเคจเฅ\u200dเคจ', ' เคเคจเคจเฅเคฆ เคเคฐเคจเฅเคตเคพเคฒเฅ'],
['เคฎเฅเคเคน', ' เคฎเฅเคเคน', ' เคเฅ เคธเคพเคฎเคจเฅ', ' เคเฅ เคธเคพเคฎเคจเฅ', ' เคเฅเคนเคฐเฅ', ' เคฎเฅเคเคน เคเฅ เคฌเคฒ เคเคฟเคฐเฅ'],
['เคเฅเค เคพ เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคพ', ' เคเฅเค เฅ เคญเคตเคฟเคทเฅเคฏเคฆเฅเคตเคเฅเคคเคพเคเค'],
['เค
เคงเคฐเฅเคฎเฅ เคธเคพเคเฅเคทเฅ', ' เค
เคจเฅเคฏเคพเคฏเฅ เคธเคพเคเฅเคทเฅ', ' เคเฅเค เฅ เคเคตเคพเคนเฅ', ' เคเฅเค เคพ เคธเคพเคเฅเคทเฅ', ' เคเฅเค เฅ เคเคตเคพเคน'],
['เคชเคฐเคฟเคตเคพเคฐ', ' เคเคฐเคพเคจเฅเค'],
['เค
เคเคพเคฒ', ' เค
เคเคพเคฒ'],
['เคเคชเคตเคพเคธ', ' เคเคชเคตเคพเคธ', ' เคเคชเคตเคพเคธ เคเคฟเคฏเคพ', ' เคเคชเคตเคพเคธ เคเคฐเคจเคพ', ' เคเคชเคตเคพเคธ เคเคฐเคจเคพ'],
['เคฎเฅเคฒเคชเคฟเคคเคพ', ' เคชเฅเคฐเฅเคตเคเฅเค', ' เคชเคฟเคคเคพ', ' เคฌเคพเคชเคฆเคพเคฆเฅ', ' เคเคจเฅเคฎเคพ', ' เคเคจเฅเคฎเคพเคคเคพ', ' เคชเฅเคฐเฅเคตเค', ' เคชเฅเคฐเฅเคตเคเฅเค', ' เคชเฅเคฐเคเคพเคเค'],
['เคชเคฐเฅเคต', ' เคชเคฐเฅเคตเฅเค', ' เคญเฅเค'],
['เคฎเฅเคฒเคฌเคฒเคฟ', ' เคฎเฅเคฒเคฌเคฒเคฟเคฏเฅเค'],
['เคชเคฐเฅเคต', ' เคชเคฐเฅเคต'],
['เค
เคเคเฅเคฐ', ' เค
เคเคเฅเคฐเฅเค'],
['เคธเคจเฅเคฌเคฐ', ' เคธเคจเฅเคฌเคฐ'],
['เคเค', ' เคเค', ' เคฒเฅเคเคเคฟเคฏเฅเค', ' เคเคฐเคเฅเค', ' เคเคฟเคฎเคจเคฟเคฏเฅเค', ' เคญเคเฅเค เคพ', ' เค
เคเคเฅเค เคฟเคฏเคพเค'],
['เคชเคนเคฒเฅเค เฅ'],
['เคชเคนเคฒเฅ เคเคชเค'],
['เคฎเคเฅเค', ' เคชเคเคกเคผเคจเฅเคตเคพเคฒเฅ'],
['', ' เคญเฅเคกเคผ-เคฌเคเคฐเคฟเคฏเฅเค', ' เคเฅเคฃเฅเคก', ' เคฎเคฃเฅเคกเคฒเฅ', ' เคเฅเคฃเฅเคก', ' เคเคพเคฏ-เคฌเฅเคฒเฅเค'],
['เคเคฒ-เคชเฅเคฐเคฒเคฏ', ' เคฌเคพเคขเคผเฅเค', ' เคเคฒ เคฎเฅเค เคกเฅเคฌ เคเคฐ', ' เคฌเคพเคขเคผ', ' เคเคฒ เคธเฅ เคกเฅเคฌ เคเคพเคเคเคพ'],
['เคฌเคพเคเคธเฅเคฐเฅ', ' เคฌเคพเคเคธเฅเคฐเฅ', ' เคฌเคพเคเคธเฅเคฐเฅ', ' เคธเฅเคเฅ เคฌเคเคพเคจเฅ เคเคพ เคฏเคเคคเฅเคฐ'],
['เคชเคพเคเคตเฅเค เคเฅ เคเฅเคเฅ'],
['เคชเคฐเคฆเฅเคถเฅ', ' เคซเฅเค เคเคฐเคพ', ' เค
เคฒเค เคเคฟเค', ' เคชเคฐเคพเค', ' เคชเคฐเคฆเฅเคถเฅ', ' เคชเคฐเคฆเฅเคถเคฟเคฏเฅเค'],
['เคชเคนเคฒเฅ เคธเฅ เคเคพเคจ เคฒเคฟเคฏเคพ', ' เคชเฅเคฐเฅเคต เคเฅเคเคพเคจ'],
['เคตเฅเคฏเคญเคฟเคเคพเคฐ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐ', ' เค
เคจเฅเคคเคฟเค', ' เคชเคฐเคธเฅเคคเฅเคฐเฅเคเคฎเคจ'],
['เคฎเคฟเคฒเคพ', ' เคจเฅเคเคต เคกเคพเคฒเฅ', ' เคชเฅเคฐเคคเคฟเคทเฅเค เคพเคชเค', ' เคจเฅเคเคต', ' เคจเฅเคตเฅเค'],
['เคธเฅเคคเคพ', ' เคธเฅเคคเฅ', ' เคเคฐเคจเคพ', ' เคเคฐเคจเฅ', ' เคเคฎเคกเคผเคคเคพ'],
['เคฒเฅเคฌเคพเคจ'],
['เคธเฅเคตเคคเคเคคเฅเคฐ', ' เคเฅเคกเคผเคพเคคเคพ', ' เคเฅเค เคเคฏเคพ', ' เคฎเฅเคเฅเคค', ' เคธเฅเคตเคคเคเคคเฅเคฐเคคเคพ', ' เคธเฅเคเคค-เคฎเฅเคเคค', ' เคธเฅเคตเคคเคเคคเฅเคฐ เคฎเคจเฅเคทเฅเคฏ', ' เคธเฅเคตเฅเคเฅเคเคพ', ' เคธเฅเคตเคคเคเคคเฅเคฐเคคเคพ'],
['เคธเฅเคตเฅเคเฅเคเคพเคฌเคฒเคฟ', ' เคธเฅเคตเฅเคเฅเคเคพเคฌเคฒเคฟเคฏเฅเค'],
['เคซเคฒ', ' เคซเคฒเฅเค', ' เคซเคฒเคตเคจเฅเคค', ' เคจเคฟเคทเฅเคซเคฒ'],
['เคญเคเฅเค เคพ'],
['เคซเคพเคเค', ' เคซเคพเคเคเฅเค', ' เคฌเฅเคเคกเคผเฅเค', ' เคฆเฅเคตเคพเคฐเคชเคพเคฒ', ' เคเฅเคเฅเคฆเคพเคฐเฅเค', ' เคฆเฅเคตเคพเคฐ เคเฅ เคเคฎเฅเคญเฅ', ' เคฆเฅเคตเคพเคฐ', ' เคฆเฅเคตเคพเคฐ'],
['เคชเฅเคขเคผเฅ'],
['เคฐเคชเคพเค', ' เคฆเคพเคจเคต'],
['เคฌเคพเคเคงเฅ', ' เคฌเคพเคเคงเคพ เคนเฅเค'],
['เคฌเฅเคจเคจเฅ', ' เคฌเฅเคจเคคเคพ', ' เคฌเฅเคจเคพ เคนเฅเค', ' เคเคเคเฅเค เคพ เคเคฐเคจเคพ'],
['เคฌเคเคฐเคพ', ' เคฌเคเคฐเคฟเคฏเฅเค', ' เคฌเคเคฐเคฟเคฏเฅเค เคเฅ เคเคพเคฒเฅเค', ' scapegoat', ' เคฌเคเฅเคเฅ'],
['เคธเฅเคจเคพ', ' เคธเฅเคจเฅ'],
['เคเคพเคจเคพเคซเฅเคธเฅ', ' เคเคชเคถเคช', ' เคเคพเคจเคพเคซเฅเคธเฅ เคเคฐเคจเฅเคตเคพเคฒเคพ'],
['เคถเคพเคธเคจ เคเคฐเคจเคพ', ' เคชเฅเคฐเคญเฅเคคเคพ', ' เคชเฅเคฐเคงเคพเคจเคคเคพเคเค', ' เคฐเคพเคเฅเคฏเคชเคพเคฒ', ' เคฐเคพเคเฅเคฏเคชเคพเคฒเฅเค', ' เคนเคพเคเคฟเคฎ', ' เคธเฅเคฌเฅเคฆเคพเคฐเฅเค'],
['เค
เคจเฅเคจ', ' เคเคฟเคจเคเฅเค', ' เคเฅเคคเฅเค'],
['เค
เคจเฅเคจเคฌเคฒเคฟ', ' เค
เคจเฅเคจเคฌเคฒเคฟเคฏเฅเค'],
['เค
เคเคเฅเคฐ', ' เคฆเคพเค', ' เคฆเคพเค เคเฅ เคฒเคคเคพ'],
['เคเคฐเคพเคนเคคเฅ', ' เคฐเฅเคจเคพ', ' เคเคฐเคพเคนเคจเคพ'],
['เคฆเฅเคทเคฌเคฒเคฟ', ' เคฆเฅเคทเคฌเคฒเคฟเคฏเฅเค'],
['เคเคฒเฅเค', ' เคเคฒเฅ เคเคฟเคฐเคจเคพ', ' เคเคฒเคพ-เคตเฅเคทเฅเคเคฟ', ' เคเคฒเฅเค เคเคพ เคคเฅเคซเคผเคพเคจ'],
['เคนเคพเคฅ', ' เคนเคพเคฅเฅเค', ' เคนเคพเคฅ', ' เคธเฅเคชเคจเคพ', ' เคเฅ เคฆเฅเคตเคพเคฐเคพ', ' เคชเคฐ เคนเคพเคฅ เคฐเคเคจเคพ', ' เคชเคฐ เคนเคพเคฅ เคฒเคเคพเคคเคพ', ' เคฆเคพเคนเคฟเคจเคพ เคนเคพเคฅ', ' เคฆเคพเคนเคฟเคจเฅ เคนเคพเคฅ', ' เคเฅ เคนเคพเคฅ เคธเฅ'],
['เคฒเคเคเคพ เคนเฅเค', ' เคฒเคเคเคพเค', ' เคฒเคเคเคพเคฏเคพ เคเคฏเคพ', ' เคฒเคเคเคพเคเคฐ', ' เคชเคฐเฅเคฆเฅเค', ' เคฒเคเคเคพ เคฆเคฟเคฏเคพ'],
['เคเค เคฟเคจ', ' เคเคกเคผเคพ', ' เคธเคฌเคธเฅ เคเค เคฟเคจ', ' เคเค เฅเคฐ', ' เคเค เฅเคฐ เคเคฐ เคฒเฅเคคเคพ เคนเฅ', ' เคนเค เฅเคฒเคพ', ' เคเค เฅเคฐ เคฐเคนเฅเคเคพ', ' เคเค เฅเคฐเคคเคพ'],
['เคตเฅเคฃเคพ', ' เคตเฅเคฃเคพเคเค', ' เคฌเคเคพเคจเฅเคตเคพเคฒเคพ', ' เคตเฅเคฃเคพ เคฌเคเคพเคจเฅเคตเคพเคฒเฅ'],
['เคเคชเค', ' เคซเคธเคฒ', ' เคฒเคตเคจเฅ', ' เคเคเคจเฅ', ' เคฒเคตเคจเฅเคตเคพเคฒเคพ', ' เคฒเคตเคจเฅเคตเคพเคฒเฅเค'],
['เคเคฎเคฃเฅเคก'],
['เคธเคฟเคฐ', ' เคธเคฟเคฐเฅเค', ' เคฎเคพเคฅเฅ', ' เคฎเคพเคฅเฅเค', ' เคเคจเฅเคฆเฅเค', ' เคเฅเคชเคฟเคฏเคพเค', ' เคเฅเคฒเฅเคฌเคเคฆ', ' เคธเคฟเคฐ เคเคเคตเคพ เคฆเคฟเคฏเคพ'],
['เคเคเคเคพ', ' เคเคเคเคพ เคเคฟเคฏเคพ', ' เคเคเคเคพ เคเคฐเคจเคพ', ' เคเคเคเคพ เคนเฅ เคเคฏเคพ', ' เคเคเคเคพ เคเคฐเคจเฅ', ' เคเคเคเคพ เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เคธเฅเคนเคค', ' เคฌเฅเคฎเคพเคฐ'],
['เคเคคเฅเคคเคฐเคพเคงเคฟเคเคพเคฐเฅ', ' เคตเคพเคฐเคฟเคธ'],
['เคเคเคเฅ เคธเฅเคฅเคพเคจ', ' เคเคเคเฅ เคธเฅเคฅเคพเคจเฅเค'],
['เคชเคตเคฟเคคเฅเคฐ เคจเคเคฐ', ' เคชเคตเคฟเคคเฅเคฐ เคจเคเคฐเฅเค'],
['เคฎเคงเฅ', ' เคฎเคงเฅ เคเคพ เคเคคเฅเคคเคพ'],
['เคเฅเคฐ', ' เคเฅเคฐเฅเค', ' เคเคพเคชเฅเค'],
['เคธเฅเคเค', ' เคธเฅเคเคเฅเค', ' เคธเฅเคเค เคตเคพเคฒเฅ'],
['เคญเคฏ', ' เคเคเคฟเคค', ' เคฌเฅเคฐเฅ', ' เคฌเฅเคฐเฅ เคคเคฐเคน เคธเฅ', ' เคญเคฏเคพเคคเฅเคฐ', ' เคญเคฏเคพเคจเค'],
['เคเฅเคกเคผเคพ', ' เคเฅเคกเคผเฅ', ' เคฏเฅเคฆเฅเคง เคเคพ เคเฅเคกเคพ', ' เคฏเฅเคฆเฅเคง เคเฅ เคเฅเคกเคผเฅเค', ' เคธเคตเคพเคฐ เคนเฅเคเคฐ'],
['เคธเคตเคพเคฐ', ' เคธเคตเคพเคฐเฅเค'],
['เคเคกเคผเฅ', ' เคเคเคเฅ'],
['เคเคฐ', ' เคเคฐเฅเค', ' เคเคค เคเฅ เคเคชเคฐ', ' เคเคคเฅเค', ' เคญเคฃเฅเคกเคพเคฐ', ' เคญเคฃเฅเคกเคพเคฐเฅเค', ' เคเคฐ เคเคพ เคเคพเคฐเคฌเคพเคฐ เคเคฐเคจเฅเคตเคพเคฒเฅ'],
['เคเฅเคเฅเคฎเฅเคฌ', ' เคเคฐเคพเคจเคพ'],
['เคฒเคเฅเคเคฟเคค เคเคฐเคจเคพ', ' เค
เคชเคฎเคพเคจเคฟเคค', ' เคฆเฅเคจเคคเคพ'],
['เคฎเฅเคฐเฅเคคเคฟ', ' เคฎเฅเคฐเคคเฅเค', ' เคฎเฅเคฐเฅเคคเคฟเคชเฅเคเค', ' เคฎเฅเคฐเฅเคคเคฟเคชเฅเคเคเฅเค', ' เคฎเฅเคฐเฅเคคเคฟ-เคชเฅเคเค', ' เคฎเฅเคฐเฅเคคเคฟ เคชเฅเคเคพ'],
['เคฎเฅเคฐเคค', ' เคฎเฅเคฐเคคเฅเค', ' เคฎเฅเคฐเฅเคคเคฟ เคเฅเคฆเคเคฐ', ' เคเฅเคฆเฅ เคนเฅเค เคฎเฅเคฐเคคเฅเค', ' เคงเคพเคคเฅ เคเฅ เคฎเฅเคฐเคคเฅเค เคขเคพเคฒเคเคฐ', ' เคฎเฅเคฐเฅเคคเคฟ', ' เคฎเฅเคฐเคคเฅเค', ' เคเฅเคฆเฅ เคนเฅเค เคฎเฅเคฐเคค', ' เคเฅเคฆเฅ เคนเฅเค เคฎเฅเคฐเฅเคคเคฟเคฏเฅเค', ' เคงเคพเคคเฅ เคเฅ เคเฅเคฆเฅ เคนเฅเค เคฎเฅเคฐเคค', ' เคขเคฒเฅ เคนเฅเค เคฎเฅเคฐเฅเคคเคฟเคฏเคพเค'],
['เค
เคจเฅเคเคฐเคฃ เคเคฐเคจเคพ', ' เคเฅ เคธเคฎเคพเคจ เคเคพเคฒ เคเคฒเคจเฅ', ' เคเฅเคธเฅ เคเคพเคฒ เคเคฒเฅ'],
['เคงเฅเคช'],
['เคชเฅเคเคจเคพ', ' เคเคพเคเค เคเคฐเคจเคพ', ' เคเคพเคเค-เคชเคกเคผเคคเคพเคฒ', ' เคชเฅเค-เคคเคพเค'],
['เคจเคฟเคฐเฅเคฆเฅเคถ', ' เคจเคฟเคฐเฅเคฆเฅเคถ', ' เคจเคฟเคฐเฅเคฆเฅเคถ เคฆเคฟเค', ' เคจเคฟเคฐเฅเคฆเฅเคถ เคฆเฅเคคเฅ เคฐเคนเคจเคพ', ' เค
เคจเฅเคฆเฅเคถ', ' เคจเคฟเคฐเฅเคฆเฅเคถ', ' เคจเคฟเคฐเฅเคฆเฅเคถเค'],
['เคเคฐเคพเค'],
['เคญเฅเคฆ เคเคฐเคจเคพ', ' เค
เคจเฅเคตเคพเคฆ เคเคฐเคจเคพ', ' เคธเคฎเคเคพ เคฆเคฟเคฏเคพ', ' เคตเฅเคฏเคพเคเฅเคฏเคพ', ' เค
เคฐเฅเคฅ เคฌเคคเคพเคจเคพ', ' เคตเฅเคฏเคพเคเฅเคฏเคพเคเค', ' เคซเคฒ เคเคพ เคฌเคคเคพเคจเฅเคตเคพเคฒเคพ'],
['เคฏเคนเฅเคฆเคฟเคฏเฅเค', ' เคฏเคนเฅเคฆเฅ'],
['เคเคจเคจเฅเคฆ', ' เคเคจเคจเฅเคฆเคฟเคค', ' เคเคจเคจเฅเคฆ เคธเฅ', ' เคเคจเคจเฅเคฆ', ' เคเคจเคจเฅเคฆเคฎเคฏ ', ' เคเคจเคเคฆ เคฒเคฟเคฏเคพ', ' เคธเฅเค-เคตเคฟเคฒเคพเคธ'],
['เคฏเคนเฅเคฆเฅ เคงเคฐเฅเคฎ', ' เคฏเคนเฅเคฆเฅ เคฎเคค'],
['เคจเฅเคฏเคพเคฏเฅ', ' เคจเฅเคฏเคพเคฏ'],
['เคเฅเคเฅเคฎเฅเคฌเฅ', ' เคญเคพเคเคฏเฅเค', ' เคเคจเฅเคฎ-เคญเฅเคฎเคฟ', ' เคเฅเคเฅเคฎเฅเคฌเฅ', ' เคเฅเคเฅเคฎเฅเคฌเคฟเคฏเฅเค'],
['เคชเฅเคฐเคเคพเคฐ', ' เคญเคพเคเคคเคฟ-เคญเคพเคเคคเคฟ', ' เคเคฐเฅเคฃเคพ', ' เคเคชเคเคพเคฐ'],
['เคฐเคพเคเคพ', ' เคฐเคพเคเคพเคเค', ' เคฐเคพเคเฅเคฏ', ' เคฐเคพเคเฅเคฏเฅเค', ' เคฐเคพเคเฅเคฏ', ' เคฐเคพเคเคธเฅ'],
['เคฐเคพเคเฅเคฏ', ' เคฐเคพเคเฅเคฏเฅเค'],
['เคเฅเคฎเฅเคฌเคจ', ' เคเฅเคฎเฅเคฌเคจเฅเค', ' เคเฅเคฎเคพ', ' เคเฅเคฎเคจเคพ'],
['เคเคพเคจเคจเคพ', ' เคเคพเคจเคคเคพ เคนเฅ', ' เคเคพเคจเคคเคพ เคฅเคพ', ' เคเคพเคจเคจเคพ', ' เคเฅเคเคพเคจ', ' เคเฅเคเคพเคค', ' เคชเฅเคฐเคเค เคเคฐเคจเคพ', ' เคเฅเคเคพเคค เคเคฐเคคเคพ เคนเฅ', ' เคเฅเคเคพเคค เคเคฐเคคเคพ เคนเฅ', ' เค
เคเฅเคเคพเคค', ' เคชเคนเคฒเฅ เคธเฅ เคเคพเคจเคจเคพ', ' เคชเฅเคฐเฅเคตเคเฅเคเคพเคจ'],
['เคชเคฐเคฟเคถเฅเคฐเคฎ', ' เคชเคฐเคฟเคถเฅเคฐเคฎ เคเคฐเฅ', ' เคชเคฐเคฟเคถเฅเคฐเคฎ เคเคฟเคฏเคพ', ' เคฎเคเคฆเฅเคฐ', ' เคฎเคเคฆเฅเคฐเฅเค'],
['เคชเฅเคฐเคธเคต', ' เคเคเฅเคเคพ เคเฅ เคธเฅ', ' เคเคเฅเคเคพ เคเฅ เคธเฅ เคชเฅเคกเคผเคพเคเค'],
['เคฆเฅเคชเค', ' เคฆเฅเคชเคเฅเค'],
['เคฆเฅเคตเค', ' เคฆเฅเคตเคเฅเค'],
['เคตเฅเคฏเคตเคธเฅเคฅเคพ', ' เคตเฅเคฏเคตเคธเฅเคฅเคพเคเค', ' เคตเฅเคฏเคตเคธเฅเคฅเคพ เคฆเฅเคจเฅเคตเคพเคฒเคพ', ' เค
เคชเคฐเคพเคงเฅ', ' เค
เคชเคฐเคพเคงเคฟเคฏเฅเค', ' เคฎเฅเคเคฆเคฎเคพ', ' เคตเคเฅเคฒ', ' เคธเคฟเคฆเฅเคงเคพเคเคค', ' เคธเฅเคฆเฅเคงเคพเคเคคเคฟเค', ' เคธเคฟเคฆเฅเคงเคพเคเคค'],
['เคเคเคฟเคค', ' เคตเฅเคฏเคตเคธเฅเคฅเคพ เคเฅ เคฐเฅเคคเคฟ เคชเคฐ', ' เคเคเคฟเคค เคจเคนเฅเค'],
['เค
เคงเคฐเฅเคฎเฅ', ' เค
เคงเคฐเฅเคฎ'],
['เคเฅเคฏเฅเคคเคฟเคทเฅ', ' เคญเคพเคตเฅ เคฌเคคเคพเคจเฅเคตเคพเคฒเฅเค'],
['เคเฅเคคเคพ', ' เคเฅเคคเฅเค'],
['เคเฅเคขเคผเฅ', ' เคเฅเฅเคฟเคฏเฅเค', ' เคเฅเคขเคผ', ' เคเฅเคขเคผ'],
['เคชเคคเฅเคฐเฅ', ' เคชเคคเฅเคฐ', ' เคเคฟเคเฅเค เคฟเคฏเคพเค'],
['เคเคเคฟเคฏเคพเคฒเคพ', ' เคเฅเคฏเฅเคคเคฟเคฏเฅเค', ' เคฌเคฟเคเคฒเฅ', ' เคฌเคฟเคเคฒเคฟเคฏเคพเค', ' เคเคเคฟเคฏเคพเคฒเฅ', ' เคธเฅเคฐเฅเคฏ เคเฅ เคเคเคฟเคฏเคพเคฒเฅ', ' เคธเคพเคเค', ' เคชเฅเคฐเคเคพเคถเคฟเคค', ' เคเฅเคฏเฅเคคเคฟเคฐเฅเคฎเคฏ'],
['เคเฅ เคธเคฎเคพเคจ', ' เคเค เคฎเคจ', ' เคธเคฆเฅเคถ เคเคฐเคจเคพ', ' เคธเคฎเคพเคจเคคเคพ', ' เคธเคฎเคคเคพ', ' เคตเฅเคธเฅ เคนเฅ', ' เคฌเคฐเคพเคฌเคฐ', ' เคธเฅ เค
เคฒเค'],
['เคธเคฟเคเคนเฅเค', ' เคธเคฟเคเคน', ' เคธเคฟเคเคนเคจเฅ', ' เคธเคฟเคเคนเคจเฅ'],
['เคชเคถเฅ'],
['เคเคฟเคกเฅเคกเฅ', ' เคเคฟเคกเฅเคกเคฟเคฏเคพเค'],
['เคเคฎเคฐ'],
['เคเคฟเคเฅเค เคฟเคฏเคพเค', ' เคเคฟเคเฅเค เคฟเคฏเคพเค เคกเคพเคฒเคเคฐ'],
['เคชเฅเคฐเฅเคฎเฅ', ' เคฏเคพเคฐเฅเค'],
['เคฆเฅเคจ', ' เคเฅเคเคพ เคนเฅเคเคพ', ' เคฆเฅเคจเคคเคพ'],
['เคฒเคพเคฒเค', ' เค
เคญเคฟเคฒเคพเคทเคพเคเค', ' เคฎเฅเคนเคฟเคค', ' เคฒเคพเคฒเคธเคพ เคเคฐเคจเคพ', ' เคฒเฅเคเคชเคจ'],
['เคตเฅเคฃเคพ', ' เคคเคพเคฐ เคตเคพเคฒเคพ เคฌเคพเคเคพ', ' เคธเคพเคฐเคเคเคฟเคฏเคพเค'],
['เคเคพเคฆเฅ', ' เคเคพเคฆเฅ เคเฅเคจเคพ', ' เคคเคพเคเคคเฅเคฐเคฟเค', ' เคเคพเคฆเฅเคเคฐเฅเค'],
['เคจเฅเคฏเคพเคฏเคพเคงเฅเคถ', ' เคนเคพเคเคฟเคฎเฅเค'],
['เคฌเคกเคผเคพ เค เคนเคฐเคพเคเคเคพ'],
['เคชเฅเคฐเคฌเคเคงเค เคชเฅเคฐเคฌเคเคงเค เคชเฅเคฐเคฌเคเคงเคเฅเค', ' เคญเคฃเฅเคกเคพเคฐเฅ', ' เคญเคฃเฅเคกเคพเคฐเคฟเคฏเฅเค', ' เคญเคเคกเคพเคฐเฅเคชเคจ'],
['เค
เคจเฅเคจเคฌเคฒเคฟ'],
['เคฎเคงเฅเคฏเคธเฅเคฅ'],
['เคงเฅเคฏเคพเคจ', ' เคงเฅเคฏเคพเคจ เคเคฐเคคเคพ', ' เคงเฅเคฏเคพเคจ'],
['เคจเคฎเฅเคฐ', ' เคจเคฎเฅเคฐเคคเคพ'],
['เคชเคฟเคเคฒเคจเคพ', ' เคชเคฟเคเคฒ เคเคฏเคพ', ' เคชเคฟเคเคฒเคพเค', ' เคชเคฟเคเคฒ เคเคพเคคเคพ', ' เคขเคพเคฒเคเคฐ'],
['เค
เคเค', ' เค
เคเคเฅเค'],
['เคธเฅเคฎเคฐเคฃ เคฆเคฟเคฒเคพเคจเฅเคตเคพเคฒเฅ', ' เคฆเคพเคจ เคธเฅเคฎเคฐเคฃ เคเฅ เคฒเคฟเคฏเฅ'],
['เคฆเฅเคค', ' เคฆเฅเคคเฅเค'],
['เคธเคพเคฎเคฐเฅเคฅเฅเคฏ', ' เคธเคพเคฎเคฐเฅเคฅเฅ', ' เคฌเคนเฅเคค เคธเคพเคฎเคฐเฅเคฅเฅ', ' เคชเคฐเคพเคเฅเคฐเคฎ เคธเฅ'],
['เคฎเคจ', ' เคฎเคจเฅเค', ' เคฎเคจ เคฎเฅเค เคฒเฅเคจเคพ', ' เคธเฅเคงเคฟ เคฒเฅเคจเคพ', ' เคธเฅเคงเคฟ เคฆเคฟเคฒเคพ', ' เคฏเคพเคฆ เคฆเคฟเคฒเคพเคคเคพ เคนเฅ', ' เคธเฅเคฎเคฐเคฃ เคเคฟเค เคเค', ' เคธเฅเคฎเคฐเคฃ', ' เคธเฅเคฎเคฐเคฃ เคฆเคฟเคฒเคพเคจเฅ เคตเคพเคฒเคพ', ' เคธเฅเคฎเคฐเคฃ เคฆเคฟเคฒเคพเคจเคพ', ' เคเค เคฎเคจ'],
['เคจเคฟเคจเฅเคฆเคพ', ' เค เคเฅเค เคพ เคเคฐเคคเคพ', ' เคเคชเคนเคพเคธ เคเคฐเคเฅ', ' เคเคชเคนเคพเคธ', ' เค เคเฅเค เคพ เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เค เคเฅเคเคพ เคเคฐเคจเฅเคตเคพเคฒเฅเค', ' เคนเคเคธเฅ เคเฅเคพเคจเฅ', ' เคเคฒเคเค เคฒเคเคพเคคเฅ', ' เคเคชเคนเคพเคธ เคเคฐเคจเฅ เคฒเคเฅ', ' เคเฅเคฃเคพ เคเคฐเคคเฅ', ' เค เคเฅเค เฅ เคฎเฅเค เคเคกเคผเคพเคฏเคพ'],
['เคขเคพเคฒเคจเคพ', ' เคธเคพเคเคเคพ', ' เคขเคพเคฒเคเคฐ', ' เคฌเคจเคพ เคฐเคนเคพ เคฅเคพ', ' เคฌเคจเคพเคจเฅเคตเคพเคฒเคพ', ' เคซเคซเฅเคเคฆเฅ'],
['เคถเฅเค เคเคฐเคจเคพ', ' เคตเคฟเคฒเคพเคช เคเคฐ เคฐเคนเคพ เคนเฅ', ' เคตเคฟเคฒเคพเคช เคเคฐเคจเฅ เคฒเคเฅ', ' เคถเฅเค เคเคฐเคคเคพ เคนเฅเค', ' เคถเฅเค เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เคตเคฟเคฒเคพเคช เคเคฐเคจเฅเคตเคพเคฒเฅเค', ' เคเคฆเคพเคธเฅ', ' เคถเฅเค'],
['เคฌเคขเคผเคพเคเคเคพ', ' เคฌเคขเคผเคพเคคเคพ เคฐเคนเคคเคพ', ' เคฌเคขเคผ เคเค', ' เคฌเฅเคจเฅ เคฒเคเฅ', ' เคฌเคขเคผเคคเฅ เคเคฐเฅเคเคพ'],
['เคญเฅเคฆ', ' เคญเฅเคฆเฅเค', ' เคญเฅเคฆ', ' เคญเฅเคฆเฅเค'],
['เคเคพเคคเคฟ', ' เคเคพเคคเคฟเคฏเฅเค'],
['เคชเคกเคผเฅเคธเฅ', ' เคชเคกเคผเฅเคธเคฟเคฏเฅเค', ' เคชเคกเฅเคธ', ' เคเคธ เคชเคพเคธ เคเฅ'],
['เคจเคฏเคพ เคเคพเคเคฆ', ' เคจเคฏเฅ เคเคพเคเคฆ'],
['เคชเฅเคฐเคคเคฟเคทเฅเค เคฟเคค', ' เคฐเคเคธเฅเค', ' เคงเคจเฅ เคฎเคจเฅเคทเฅเคฏ', ' เคชเฅเคฐเคงเคพเคจ เคฒเฅเค'],
['เคฌเคพเคเค เคตเฅเคเฅเคท', ' เคฌเคพเคเค เคตเฅเคเฅเคทเฅเค'],
['เคถเคชเคฅ', ' เคถเคชเคฅ', ' เคถเคชเคฅ เคเคพ', ' เคถเคชเคฅ เคเคพเค', ' เคถเคชเคฅ เคเคพเคเคฐ', ' เคถเคชเคฅ เคเคพเคเฅ', ' เคเฅ เคถเคชเคฅ เคเคพเคคเคพ เคนเฅ'],
['เคเคเฅเคเคพ เคฎเคพเคจเคจเคพ', ' เคเคเฅเคเคพ เคฎเคพเคจเคจเคพ', ' เคเคเฅเคเคพ เคฆเคฟเคฏเคพ', ' เคเคเฅเคเคพเคเคพเคฐเฅ', ' เคเคเฅเคเคพเคเคพเคฐเฅ', ' เคเคเฅเคเคพเคเคพเคฐเฅ', ' เคเคเฅเคเคพเคเคพเคฐเฅ', ' เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพ', ' เค
เคตเคเฅเคเคพเคเคพเคฐเฅ'],
['เคตเคเคถ'],
['เคคเฅเคฒ'],
['เคเฅเคคเฅเคจ'],
['เคเคเคเฅ เคธเฅเคฅเคพเคจ เคชเคฐ', ' เคเคเคพเคถ เคฎเฅเค'],
['เค
เคคเฅเคฏเคพเคเคพเคฐ เคเคฐเคจเฅ', ' เค
เคเคงเฅเคฐ เคเคฐเคคเคพ', ' เคธเคคเคพเค เคนเฅเค', ' เค
เคเคงเฅเคฐ เคเคฐเคคเฅ', ' เค
เคเคงเฅเคฐ', ' เคคเคพเคกเคผเคจเคพ ', ' เค
เคเคงเฅเคฐ เคเคฐเคจเฅเคตเคพเคฒเฅ', ' เค
เคคเฅเคฏเคพเคเคพเคฐ เคเคฐเคจเฅเคตเคพเคฒเฅเค'],
['เคจเคฟเคฏเฅเคเฅเคค', ' เค เคนเคฐเคพเคจเคพ', ' เคธเคพเคฎเคพเคจเฅเคฏ', ' เคจเคฟเคฏเฅเคเฅเคคเคฟ'],
['เคตเคฟเคงเคฟ', ' เคตเคฟเคงเคฟเคฏเฅเค'],
['เคฆเฅเคเคฐเฅเค', ' เคชเคฐเฅเคฏเคตเฅเคเฅเคทเคฃ', ' เคชเคฐเฅเคฏเคตเฅเคเฅเคทเค', ' เค
เคงเฅเคฏเคเฅเคท', ' เคชเคฐเฅเคฏเคตเฅเคเฅเคทเค'],
['เค เคชเฅเฅ', ' เคชเคเฅ เคฒเฅเคจเคพ', ' เค เคชเคเคกเคผเคพ', ' เคชเคเคกเคผ เคฒเคฟเคฏเคพ'],
['เค
เคจเฅเคฏเคเคพเคคเคฟ', ' เค
เคจเฅเคฏเคเคพเคคเคฟเคฏเคพเค'],
['เคฎเคนเคฒ', ' เคฎเคนเคฒเฅเค'],
['เคเคเฅเคฐ', ' เคนเคฅเฅเคฒเคฟเคฏเฅเค'],
['เคเฅเคทเคฎเคพ เคเคฐ', ' เคเฅเคทเคฎเคพ'],
['เคเคเคถเคฟเค', ' เคชเคเฅเคท เคเคฐเคจเคพ', ' เคชเคเฅเคทเคชเคพเคค'],
['เคงเฅเคฐเคเคตเคจเฅเคค', ' เคงเฅเคฐเค เคธเฅ', ' เคธเคนเคจเคถเฅเคฒเคคเคพ', ' เค
เคงเฅเคฐ'],
['เคเฅเคฒเคชเคคเคฟ', ' เคเฅเคฒเคชเคคเคฟเคฏเฅเค'],
['เคถเคพเคจเฅเคคเคฟ', ' เคถเคพเคเคคเคฟเคชเฅเคฐเฅเคฃ', ' เคถเคพเคเคคเคฟเคชเฅเคฐเฅเคตเค', ' เคถเคเคคเคฟเคฏเฅเคเฅเคฏ', ' เคถเคพเคเคคเคฟ เคฌเคจเคพเคจเฅ เคตเคพเคฒเฅ'],
['เคฎเฅเคฒเคฌเคฒเคฟ', ' เคฎเฅเคฒเคฌเคฒเคฟเคฏเฅเค'],
['เคเคพเคคเคฟ', ' เคฒเฅเคเฅเค', ' เคฒเฅเค', ' เคชเฅเคฐเคเคพ'],
['เคธเคฟเคฆเฅเคง', ' เคธเคฟเคฆเฅเคง เคเคฐเคพเค', ' เคธเคฟเคฆเฅเคง เคเคฐเคจเฅเคตเคพเคฒเฅ', ' เคธเคฟเคฆเฅเคงเคคเคพ', ' เคเคฐเฅ'],
['เคธเคคเคพเคเค', ' เคธเคคเคพเค เคเคพเคคเฅ', ' เคธเคคเคพเคคเคพ', ' เคเคชเคฆเฅเคฐเคต', ' เคเคคเฅเคชเฅเคกเคผเคจ', ' เคธเคคเคพเคจเฅเคตเคพเคฒเคพ', ' เคชเฅเคเคพ เคเคฐเคจเฅเคตเคพเคฒเฅ'],
['เคงเฅเคฐเค เคงเคฐเคจเคพ ', ' เคงเฅเคฐเค'],
['เคเฅเคขเคผเฅ', ' เคเฅเคเคฟเคฒเคคเคพ', ' เคตเคฟเคเฅเคคเคฟ', ' เค
เคจเฅเคฏเคพเคฏ', ' เคเคฒเค เคซเฅเคฐ', ' เคฌเคฟเคเคพเคกเคผเคจเฅ', ' เคเฅเคขเคผเฅ-เคฎเฅเคขเคผเฅ', ' เคเคฒเค-เคชเฅเคฒเค เคเคฐ เคฆเคฟเคฏเคพ', ' เคฌเคนเคเคพเคคเฅ'],
['เคเฅเคฆเคจเคพ', ' เคฌเฅเคงเคพเคคเคพ', ' เคฌเฅเคงเคพ', ' เคญเฅเคฆเคคเคพ เคนเฅเค'],
['เคธเฅเค
เคฐ', ' เคธเฅเค
เคฐเฅเค', ' เคธเฅเค
เคฐ เคเคพ เคฎเคพเคเคธ', ' เคธเฅเค
เคฐ'],
['เคธเฅเคคเคเคญ', ' เคฒเคพเคเฅเค', ' เคเคฎเฅเคญเคพ', ' เคเคฎเฅเคญเฅเค'],
['เคเคกเฅเคขเคพ', ' เคเคกเฅเคขเฅ', ' เคซเคเคฆเฅเค'],
['เคฎเคฐเฅ', ' เคตเคฟเคชเคคเฅเคคเคฟเคฏเฅเค'],
['เคเคฟเคกเคผเคเคฟเคกเคผเคพเคจเคพ', ' เคตเคฟเคจเคคเฅ', ' เคฎเฅเคเคฆเฅเคฆเคฎเคพ', ' เคตเคพเคฆ เคตเคฟเคตเคพเคฆ เคเคฐเคจเคพ', ' เคเคฟเคกเคผเคเคฟเคกเคผเคพเคเคฐ', ' เคจเคฟเคตเฅเคฆเคจ เคเคฐเคคเคพ', ' เคจเคฟเคตเฅเคฆเคจ'],
['เคฐเฅเคนเคจ', ' เคเคพ เคตเคเคจ เคฆเคฟเคฏเคพ', ' เคชเฅเคฐเคคเคฟเคเฅเคเคพเคเค'],
['เคนเคฒ', ' เคนเคฒ เคเคฒเคพเคจเคพ', ' เคนเคฒ เคเคฒเคพเคฏเคพ', ' เคนเคฒ เคเฅเคคเคจเฅ', ' เคนเคฒเคตเคพเคนเฅเค', ' เคเฅเคคเคจเฅเคตเคพเคฒเคพ', ' เคเคฟเคธเคพเคจ', ' เคนเคฒ เคเฅ เคซเคพเคฒ', ' เค
เคเฅเคค'],
['เค
เคจเคพเคฐ', ' เค
เคจเคพเคฐเฅเค'],
['เค
เคงเคฟเคเคพเคฐ เคฎเฅเค เคฒเฅเคจเคพ', ' เคฎเฅเคฒ เคฒเคฟเคฏเคพ', ' เคเคฌเฅเคเคผเคพ เคฅเคพ', ' เค
เคงเคฟเคเคพเคฐ เคฎเฅเค เคฐเคเคจเคพ', ' เค
เคงเคฟเคเคพเคฐ', ' เคธเคฎเฅเคชเคคเคฟ', ' เคจเคฟเคเคพเคฒ เคฆเฅเคจเคพ'],
['เคธเฅเคคเฅเคคเคฟ', ' เคญเคเคจ', ' เคธเฅเคคเฅเคคเคฟ เคเฅ', ' เคธเฅเคคเฅเคคเคฟ เคเคฐเคคเฅ', ' เคฌเคกเคผเคพเค เคเฅ เคฌเคพเคค'],
['เคชเฅเคฐเคเคพเคฐ เคเคฐเคจเคพ', ' เคชเฅเคฐเคเคพเคฐ เคเคฟเคฏเคพ', ' เคชเฅเคฐเคเคพเคฐ', ' เคชเฅเคฐเคเคพเคฐเค'],
['เค
เคจเคฎเฅเคฒ'],
['เคเคนเฅเคฐ', ' เคเคนเฅเคฐ เคเคฐเคจเคพ'],
['เคฐเคพเคเคเฅเคฎเคพเคฐ', ' เคฐเคพเคเคเฅเคฎเคพเคฐเฅ', ' เคฐเคพเคเคเฅเคฎเคพเคฐเฅ', ' เคฐเคพเคเคเฅเคฎเคพเคฐเคฟเคฏเคพเค'],
['เคฌเคจเฅเคฆเฅเคเฅเคน', ' เคฌเคจเฅเคฆเฅ', ' เคฌเคจเฅเคฆเฅ', ' เคฌเคจเฅเคฆเฅเคเฅเคน', ' เคเฅเคฆ เคฎเฅเค', ' เคเฅเคฆ เคฎเฅเค', ' เคฌเคจเฅเคฆเฅ เคฌเคจเคพเคจเคพ', ' เคฌเคจเฅเคฆเฅ เคฌเคจเคจเคพ', ' เคฌเคจเฅเคฆเฅ เคฌเคจเคจเคพ'],
['เคชเฅเคฐเคเคพเคฐ เคเคฐเคจเคพ', ' เคชเฅเคฐเคเคพเคฐ เคเคฐเคจเคพ', ' เคชเฅเคฐเคเคพเคฐ', ' เคเฅเคทเคฃเคพ', ' เคเคฆเฅเคเฅเคทเคฃเคพ', ' เคเคฆเฅเคเฅเคทเคฃเคพ'],
['เค
เคถเฅเคฆเฅเคง เคเคฐเคจเคพ', ' เค
เคชเคตเคฟเคคเฅเคฐ', ' เค
เคถเฅเคฆเฅเคง เคเคฐเคจเคพ'],
['เคฒเคพเคญ', ' เคฒเคพเคญ', ' เคฒเคพเคญเคฆเคพเคฏเค'],
['เคธเคฎเฅเคฆเฅเคง เคนเฅเคจเคพ', ' เคธเคฎเฅเคฆเฅเคงเคฟ', ' เคธเคฎเฅเคฆเฅเคง', ' เคธเคฎเฅเคฆเฅเคงเคฟ', ' เคธเคฎเฅเคฆเฅเคง'],
['เคตเฅเคถเฅเคฏเคพ', ' เคตเฅเคถเฅเคฏเคพเคตเฅเคคเฅเคคเคฟ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเฅ', ' เคตเฅเคฏเคญเคฟเคเคพเคฐเคฟเคฃเฅ '],
['เคฆเคฃเฅเคกเคตเคคเฅ เคเคฐเคจเคพ', ' เคฆเคฃเฅเคกเคตเคคเฅ เคเคฟเคฏเคพ'],
['เคเคฎเคฃเฅเคก', ' เคเคฎเคฃเฅเคก เคธเฅ', ' เคฌเคกเคผเคพเค', ' เคเคฎเคฃเฅเคก เคญเคฐเฅ'],
['เคจเฅเคคเคฟเคตเคเคจ', ' เคจเฅเคคเคฟเคตเคเคจ'],
['เคชเฅเคฐเคพเคเคค', ' เคชเฅเคฐเคพเคเคค', ' เคชเฅเคฐเคพเคฆเฅเคถเคฟเค'],
['เคญเฅเคเคพเคจเคพ', ' เคญเฅเคเคพเคจเคพ', ' เคญเฅเคเคพเคจเคพ', ' เคญเฅเคเคพเคจเคพ', ' เคญเฅเคเคพเคจเคพ'],
['เคซเฅเคฒ เคเคพเคจเคพ', ' เคซเฅเคฒ เคเคพเคจเคพ'],
['เคฆเคฃเฅเคก เคฆเฅเคจเคพ', ' เคฆเคเคกเคฟเคค เคเคฐเคจเฅ', ' เคฆเคเคกเคฟเคค เคเคฐเคจเฅ', ' เคฆเคเคกเคฟเคค เคเคฐเคจเฅ', ' เคฆเคเคก', ' เค
เคฆเคเคกเคฟเคค'],
['เคฌเฅเคเคเคจเฅ'],
['เคงเคเฅเคเคพ เคฆเฅเคจเคพ', ' เคงเคเฅเคเคพ เคฆเคฟเคฏเคพ', ' เคงเคเฅเคเคพ'],
['เคฏเฅเคเฅเคฏ เคเคฐเคจเคพ', ' เคฏเฅเคเฅเคฏ', ' เคจเคฟเคเคฎเฅเคฎเคพ เค เคนเคฐเคพ'],
['เคฐเคพเคจเฅ', ' เคฐเคพเคจเคฟเคฏเคพเค'],
['เคฌเฅเคเคพเคจเคพ', ' เคฌเฅเคเฅเคเฅ', ' เคจเคนเฅเค เคฌเฅเคเคคเฅ'],
['เคเฅเคฐเฅเคง', ' เคเฅเคฐเฅเคงเคฟเคค', ' เคญเคกเคผเคเคพ', ' เคเฅเคฐเฅเคง เคญเคกเคผเคเคพเคคเคพ'],
['เคเฅเคพ เคเคฐเคจเคพ', ' เคเค เคพเคจเคพ', ' เคเค เคพเคฏเคพ', ' เคเฅเคพ เคนเฅเคจเคพ', ' เคเค เคจเคพ', ' เคเค เคพ', ' เคเค เคพ เคฅเคพ'],
['เคฒเคตเคจเฅ', ' เคฒเคตเคจเฅเคตเคพเคฒเคพ', ' เคเคพเคเคพ', ' เคฒเคตเคจเฅเคตเคพเคฒเฅ', ' เคฒเคตเคจเฅเคตเคพเคฒเฅเค', ' เคฒเคตเคคเฅ'],
['เคฌเคฒเคตเคพ ', ' เคฌเคฒเคตเคพ', ' เคฌเคฒเคตเคพ เคเคฟเคฏเคพ', ' เคตเคฟเคฆเฅเคฐเฅเคน', ' เคตเคฟเคฆเฅเคฐเฅเคน', ' เคฌเคฒเคตเคพเค', ' เคตเคฟเคฆเฅเคฐเฅเคน เคถเฅเคฒเคคเคพ'],
['เคเคฟเฅเคเคจเคพ', ' เคเฅเคกเคผเคเคจเฅ', ' เคกเคพเคเคเคพ'],
['เคจเคฐเคเค', ' เคเคพเคเคธเฅ'],
['เคถเคฐเคฃ', ' เคถเคฐเคฃเคพเคฐเฅเคฅเฅ', ' เคถเคฐเคฃเคพเคฐเฅเคฅเคฟเคฏเฅเค', ' เคฎเคฃเฅเคกเคช', ' เคฎเคฃเฅเคกเคชเฅเค', ' เคเคกเคผ', ' เคเคกเคผ เคฆเฅเคจเคพ'],
['เคฐเคพเค เคเคฐเคจเคพ', ' เคฐเคพเคเฅเคฏ เคเคฐเคคเคพ เคนเฅ', ' เคฐเคพเคเฅเคฏ เคเคฐเคคเคพ', ' เคฐเคพเคเฅเคฏ เคเคฐ เคฐเคนเคพ เคนเฅ'],
['เคชเคฐเคฟเคคเฅเคฏเคพเค', ' เคชเคฐเคฟเคคเฅเคฏเคพเค', ' เคชเคฐเคฟเคคเฅเคฏเคพเค', ' เคชเคฐเคฟเคคเฅเคฏเคพเค', ' เคชเคฐเคฟเคคเฅเคฏเคพเค'],
['เคเคจเคจเฅเคฆ', ' เคเคจเคจเฅเคฆ เคฎเคจเคพเคจเคพ', ' เคเคจเคจเฅเคฆเคฟเคค เคนเฅเค', ' เคเคจเคจเฅเคฆ เคเคฐเฅเค'],
['เคเฅเคฐเฅเคคเคฟ', ' เคเฅเคฐเฅเคคเคฟเคฎเคพเคจ'],
['เคธเคฎเคพเคเคพเคฐ', ' เคธเคฎเคพเคเคพเคฐเฅเค', ' เคธเคฎเคพเคเคพเคฐ เคฆเคฟเคฏเคพ'],
['เคจเคฟเคจเฅเคฆเคพ', ' เค
เคชเคฎเคพเคจ', ' เคจเคฟเคจเฅเคฆเคพ', ' เคจเคฟเคจเฅเคฆเคพ', ' เคจเคพเคฎเคงเคฐเคพเค'],
['เคตเคฟเคถเฅเคฐเคพเคฎ เคเคฐเคจเคพ', ' เคตเคฟเคถเฅเคฐเคพเคฎ', ' เคตเคฟเคถเฅเคฐเคพเคฎ เคเคฟเคฏเคพ', ' เคตเคฟเคถเฅเคฐเคพเคฎ', ' เคตเคฟเคถเฅเคฐเคพเคฎ เคฐเคนเคฟเคค'],
['เคฒเฅเค เคเคจเคพ', ' เคฒเฅเค เคเคจเคพ', ' เคฒเฅเคเคเคฐ', ' เคฒเฅเค เคฐเคนเฅ'],
['เคถเฅเคฐเคฆเฅเคงเคพ', ' เคญเคฏ เคฎเคพเคจเคพ', ' เคญเคฏ เคฎเคพเคจเคเคฐ', ' เคธเคฎเฅเคฎเคพเคจเฅเค', ' เคญเคเฅเคคเคฟเคฏเฅเคเฅเคค'],
['เคชเฅเคฐเคคเคฟเคซเคฒ', ' เคชเฅเคฐเคคเคฟเคซเคฒ', ' เคชเฅเคฐเคคเคฟเคซเคฒ เคฆเฅเคจเคพ', ' เคชเฅเคฐเคธเฅเคเฅเคค', ' เคชเฅเคฐเคคเคฟเคซเคฒ เคฆเฅเคจเฅ เคฌเคพเคฒเคพ'],
['เคเฅเคเคพ', ' เคตเคธเฅเคคเฅเคฐ', ' เคเฅเคเคพ เคชเคนเคจเคพเคจเคพ'],
['เคฒเคพเค เฅ', ' เคเคกเคผเฅเค'],
['เคฐเคพเคเคเฅเคฏ', ' เคฐเคพเคเคธเฅ เคเฅเคฐเคต'],
['เคจเคทเฅเค เคเคฐเคจเคพ', ' เคจเคทเฅเค เคเคฐเคจเคพ', ' เคฌเคฐเฅเคฌเคพเคฆ'],
['เคถเคพเคธเคจ', ' เคจเคฟเคฏเคฎ', ' เคถเคพเคธเคฟเคค', ' เคถเคพเคธเค', ' เคถเคพเคธเคเฅเค', ' เคจเคฟเคฐเฅเคฃเคฏเฅเค', ' เคซเฅเคธเคฒเฅเค', ' เค
เคคเคฟเคฐเคเคเคฟเคค'],
['เคฆเฅเคกเคผเคจเคพ', ' เคฆเฅเคกเคผเคจเคพ', ' เคฆเฅเคกเคผเคจเคพ', ' เคฆเฅเคกเคผเคจเคพ', ' เคฆเฅเคกเคผเคจเคพ'],
['เคเคพเค'],
['เคชเคตเคฟเคคเฅเคฐ'],
['เคฌเคฒเคฟเคฆเคพเคจ เคเคฐเคจเคพ', ' เคฌเคฒเคฟ', ' เคฌเคฒเคฟเคฆเคพเคจ เคเคฟเคฏเฅ', ' เคฌเคฒเคฟเคฆเคพเคจ เคเคฐเคจเคพ', ' เคญเฅเคเค', ' เคญเฅเคเค เคเฅ เคตเคธเฅเคคเฅเคเค'],
['เคเฅเคคเคพ', ' เคเฅเคคเคฟเคฏเคพเค'],
['เคฐเคพเคเคฆเคฃเฅเคก', ' เคฐเคพเคเคฆเคฃเฅเคกเฅเค'],
['เคชเฅเคธเฅเคคเค', ' เคฆเคธเฅเคคเคพเคตเฅเคเคผเฅเค'],
['เคฎเฅเคขเคผเฅเค\xa0เคเคฐ เคธเคฎเฅเคฆเฅเคฐเฅ เคเคพเคฏ'],
['เคฎเฅเคนเคฐ', ' เคฎเฅเคนเคฐ', ' เคฎเฅเคนเคฐ เคฒเคเคพเคจเคพ', ' เคเฅเคฒเฅ'],
['เคฌเฅเค', ' เคตเฅเคฐเฅเคฏ'],
['เคขเฅเคเคขเคผเฅ', ' เคขเฅเคเคขเคผเคคเฅ เคนเฅเค', ' เคเฅเคเคจเคพ', ' เคฎเคพเคเคเคพ'],
['เคฌเคจเฅเคฆเฅ เคฌเคจเคพเคจเคพ', ' เคฌเคจเฅเคฆเฅ', ' เคเคฌเฅเคค', ' เคเคฌเฅเคเคพ'],
['เคธเฅเคฒเคพ'],
['เคธเคเคฏเคฎ', ' เคธเคเคฏเคฎ', ' เคเคคเฅเคฎ เคจเคฟเคฏเคเคคเฅเคฐเคฟเคค', ''],
['เคญเฅเคเคจเคพ', ' เคญเฅเคเคพ เคเคพเคคเคพ', ' เคญเฅเคเคพ เคเคฏเคพ', ' เคญเฅเคเคพ เคเคพเคจเคพ', ' เคฌเคพเคนเคฐ เคญเฅเคเคจเคพ', ' เคฌเคพเคนเคฐ เคญเฅเคเคจเคพ', ' เคฌเคพเคนเคฐ เคญเฅเคเคพ เคเคฏเคพ', ' เคฌเคพเคนเคฐ เคญเฅเคเคพ เคเคพเคจเคพ'],
['เคธเคฐเฅเคช', ' เคธเคพเคเคชเฅเค', ' เคธเคพเคเคช', ' เคธเคพเคเคช', ' เคธเคพเคเคช', ' เคธเคพเคเคชเฅเค'],
['เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคฆเคพเคธ เคฌเคจเคพเคจเคพ', ' เคฆเคพเคธ เคฌเคจเคพ เคฆเคฟเคฏเคพ', ' เคธเฅเคตเค', ' เคธเฅเคตเคเฅเค', ' เคฆเคพเคธ', ' เคฆเคพเคธเฅเค', ' เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคฆเคพเคธเคคเฅเคต', ' เคฆเคพเคธเฅ'],
['เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคธเฅเคตเคพ เคเคฐเคจเคพ', ' เคธเฅเคตเคพ เคเคฟเคฏเคพ', ' เคธเฅเคตเคพ เคเคฐ เคฐเคนเคพ เคนเฅ', ' เคธเฅเคตเคพ', ' เคธเฅเคตเคพ', ' เคฎเคเคฌเฅเคฐ เคนเฅเคเคฐ เคเคพเคฎ'],
['เคเฅ เคธเคพเคฅ เคธเคเคฌเคเคง เคฅเฅ', ' เคชเฅเคฏเคพเคฐ เคฎเฅเค เคฐเคค เคนเฅเคจเคพ', ' เคธเคพเคฅ เคธเฅเคจเคพ', ' เคธเคพเคฅ เคธเฅเคคเคพ เคนเฅ', ' เคเฅ เคธเคพเคฅ เคธเฅเคฏเคพ', ' เคเฅ เคธเคพเคฅ เคธเฅเคจเคพ'],
['เคเคพเคฏเคพ', ' เคเคพเคฏเคพ', ' เคเคเฅเคเคพเคฆเคจ', ' เคเคเฅเคเคพเคฆเคจ'],
['เคจเคพเคฎเคงเคฐเคพเค', ' เคฒเคเฅเคเคพ', ' เคถเคฐเฅเคฎ', ' เคถเคฐเฅเคฎเคชเฅเคฐเฅเคฃ', ' เคถเคฐเฅเคฎเคชเฅเคฐเฅเคฃเคคเคพ เคธเฅ', ' เคถเคฐเฅเคฎเคนเฅเคจเคคเคพ', ' เคถเคฐเฅเคฎเคนเฅเคจเคคเคพ เคธเฅ', 'เคฒเคเฅเคเคฟเคค', ' เคฒเคเฅเคเคฟเคค เคจเคนเฅเค'],
['เคญเฅเฅ', ' เคญเฅเคกเคผเฅเค', ' เคฎเฅเคเคขเคผเคพ', ' เคฎเฅเคเคขเคผเคพ', ' เคญเฅเฅ', ' เคญเฅเฅเคถเคพเคฒเคพ', ' เคญเฅเฅเคถเคพเคฒเคพ', ' เคญเฅเคกเคผ-เคฌเคเคฐเฅ', ' เคญเฅเคกเคผ-เคเคพเค'],
['เคเคฐเคตเคพเคนเฅ', ' เคเคฐเคตเคพเคนเคพ', ' เคเคฐเคตเคพเคนเคพ', ' เคเคฐเคตเคพเคนเฅ'],
['เคขเคพเคฒ', ' เคขเคพเคฒ', ' เคขเคพเคฒ'],
['เคงเฅเคฐเฅเคค ', ' เคเคคเฅเคฐเคคเคพ'],
['เคเฅเคฐ', ' เคเฅเคฐ เคฒเฅเคจเคพ', ' เคเฅเคฐ เคฒเคฟเคฏเคพ', ' เคเฅเคฐ เคเคฐเคจเฅเคตเคพเคฒเคพ', ' เคเฅเคฐ เคฒเฅเคจเคพ', ' เคฎเฅเคฐเฅเคเคพ เคฌเคพเคเคงเคจเคพ'],
['เคเคพเคเคฆเฅ'],
['เคชเคพเคชเคฌเคฒเคฟ', ' เคชเคพเคชเคฌเคฒเคฟ'],
['เคฌเคนเคจ', ' เคฌเคนเคจเฅเค'],
['เคเฅเคชเฅเฅ'],
['เคเคพเคค เคเคฐเคจเคพ', ' เคเคพเคค เคเคฟเค เคเค'],
['เคฌเคฆเคจเคพเคฎเฅ', ' เคฌเคฆเคจเคพเคฎเฅ', ' เคฌเคฆเคจเคพเคฎ', ' เคจเคฟเคเคฆเค', ' เคจเคฟเคเคฆเคพ', ' เคจเฅเคถเคเคธ'],
['เคตเคง เคเคฐเคจเคพ', ' เคตเคง เคเคฐเคจเคพ', ' เคตเคง เคเคฐเคจเคพ', ' เคตเคง เคเคฐเคจเคพ'],
['เคจเฅเคเคฆ', ' เคธเฅ เคเคพเคจเคพ', ' เคธเฅ เคเค เคฅเฅ', ' เคธเฅเคจเคพ', ' เคธเฅเคจเคพ', ' โเคเคธเฅ เคจเฅเคเคฆ เค เคเคโ', ' เคธเฅเคจเคพ', ' เคธเฅเคจเคพ', ' เคจเฅเคเคฆ เคจเคพ เคเคจเคพ', ' เคจเฅเคเคฆ'],
['เคซเคเคฆเคพ', ' เคซเคเคฆเฅ', ' เคซเคเคธเคพเคจเคพ', ' เคซเคเคธเคพเคจเคพ', ' เคซเคเคธเคจเคพ', ' เคซเคเคธเคพเคจเคพ', ' เคเคพเคฒ', ' เคเคพเคฒเฅเค', ' เคซเคเคธ เคเค'],
['เคนเคฟเคฎ', ' เคนเคฟเคฎ เคชเคกเคผเคพ', ' เคฌเคฐเฅเคซ เคเคฟเคฐเคจเฅ เคเฅ เคธเคฎเคฏ'],
['เคเคพเคฆเฅเคเคฐ', ' เคเคพเคฆเฅเคเคฐ', ' เคเคพเคฆเฅเคเคฐ', ' เคเคพเคฆเฅเคเคฐ', ' เคเคพเคฆเฅเคเคฐ', ' เคเคพเคฆเฅ เคเฅเคจเคพ'],
['เคชเฅเคงเฅ', ' เคชเฅเคงเฅ', ' เคฒเคเคพเค เคเค', ' เคฐเฅเคชเคฃ', ' เคชเฅเคฐเคคเฅเคฏเคพเคฐเฅเคชเคฟเคค', ' เคชเฅเคจเคฐเฅเคจเคพเคฎเคฟเคค', ' เคชเฅเคฐเคคเฅเคฏเคพเคฐเฅเคชเคฟเคค', ' เคฌเฅเคจเคพ', ' เคฌเฅเค', ' เคฌเฅเคฏเคพ', ' เคฌเฅเคฏเคพ', ' เคฌเฅเคตเคพเค'],
['เคญเคพเคฒเคพ', ' เคญเคพเคฒเฅ', ' เคญเคพเคฒเคพ เคงเคพเคฐเคฃ เคเคฐเคจเฅเคตเคพเคฒเคพ เคธเคฟเคชเคพเคนเฅ'],
['เคตเฅเคญเคต'],
['เคฒเคพเค เฅ', ' เคฒเคพเค เฅ'],
['เคตเคฟเคงเคฟ', ' เคตเคฟเคงเคฟเคฏเคพเค'],
['เคนเค เฅเคฒเฅ', ' เคนเค เฅเคฒเคพ', ' เคเค เฅเคฐ เคนเฅเคเคฐ', ' เคเค เฅเคฐเคคเคพ'],
['เคญเคฃเฅเคกเคพเคฐ', ' เคญเคฃเฅเคกเคพเคฐ'],
['เคฌเคฒ', ' เคฌเคฒเคตเคจเฅเคค เคเคฐเคจเคพ', ' เคฆเฅเคขเคผ เคเคฟเคฏเคพ', ' เคฎเคเคฌเฅเคค', ' เคนเคฟเคฏเคพเคต เคฌเคพเคเคงเคพ'],
['เคเคฒเคน'],
['เคฆเคพเคเคฎเคงเฅ', ' เคฆเคพเคเคฎเคงเฅ'],
['เคเฅ', ' เคเคขเคผเฅเค', ' เคฎเคเคฌเฅเคค เคเฅ', ' เคฆเฅเคขเคผ', ' เคฎเคเคฌเฅเคค เคเฅ', ' เคเคขเคผเฅเค'],
['เค เฅเคเคฐ', ' เค เฅเคเคฐ เคเคพเค', ' เค เฅเคเคฐ เคเคพเคฏเคพ', ' เค เฅเคเคฐ เคเคพเคคเคพ'],
['เค เฅเคเคฐ', ' เค เฅเคเคฐ เคเคพ เคเคพเคฐเคฃ', ' เค เฅเคเคฐ เคเฅ เคเคพเคฐเคฃ', ' เค เฅเคเคฐ เคเคพ เคชเคคเฅเคฅเคฐ'],
['เค
เคงเฅเคจ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจเคคเคพ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจ', ' เค
เคงเฅเคจ เคฅเคพ', ' เค
เคงเฅเคจ เคฅเฅ', ' เค
เคงเฅเคจ'],
['เค
เคงเฅเคจ เคนเฅเคจเคพ', ' เค
เคงเฅเคจ เคฐเคนเคจเคพ', ' เค
เคงเฅเคจ เคนเฅเค', ' เค
เคงเฅเคจ เคฐเคนเคจเคพ', ' เค
เคงเฅเคจเคคเคพ', 'เค
เคงเฅเคจเคคเคพ เคฎเฅเค'],
['เคฆเฅเคเค เคเค เคพเค', ' เคฆเฅเคเค เคเค เคพเคจเคพ', ' เคฆเฅเคเค เคเค เคพเคฏเคพ', ' เคฆเฅเคเค เคเค เคคเคพ', ' เคฆเฅเคเค เคเค เคพเคคเคพ'],
['เคเคจเฅเคงเค', ' เคเคจเฅเคงเค'],
['เคเคพเฅเคจเคพ', ' เคเคกเคผเคพ เคฒเฅ เคเคพเคจเคพ', ' เคเคพเคกเคผเคพ-เคฌเฅเคนเคพเคฐเคพ', ' เคเคพเฅเคจ'],
['เคคเคฒเคตเคพเคฐ', ' เคคเคฒเคตเคพเคฐเฅเค', ' เคคเคฒเคตเคพเคฐ เคฐเคเคจเฅเคตเคพเคฒเฅ'],
['เคเคฐ', ' เคเคฐเฅเค', ' เคเคฐ เคฒเคเคพเคฏเคพ', ' เคเคฐ เคฒเคเคพเคจเคพ', ' เคเคฐเคฆเคพเคคเคพเคเค'],
['เคเฅเคเคเฅ เคฒเฅเคจเฅเคตเคพเคฒเคพ', ' เคเฅเคเคเฅ เคฒเฅเคจเฅเคตเคพเคฒเฅเค'],
['เคธเคฟเคเคพเคจเคพ', ' เคธเคฟเคเคพเคคเคพ เคนเฅ', ' เคชเคขเคผเคพเคฏเคพ เคเคฏเคพ', ' เคถเคฟเคเฅเคทเคพ', ' เคถเคฟเคเฅเคทเคพเคฏเฅเค', ' เค
เคถเคฟเคเฅเคทเคฟเคค'],
['เคเฅเคฐเฅ', ' เคเฅเคฐเฅเคเค', ' เคเคชเคฆเฅเคถเค'],
['เคฆเคธ เคเคเฅเคเคพเคเค'],
['เคคเคฎเฅเคฌเฅ', ' เคคเคฎเฅเคฌเฅเคเค', ' เคคเคฎเฅเคฌเฅ เคฌเคจเคพเคจเฅ เคตเคพเคฒเคพ'],
['เคฆเคถเคฎเคพเคเคถ', ' เคฆเคธเคตเฅเค เค
เคเคถ', ' เคฆเคถเคฎเคพเคเคถ', ' เคฆเคธเคตเคพเค'],
['เคฎเคฟเคฒเคพเคชเคตเคพเคฒเคพ เคคเคฎเฅเคฌเฅ'],
['เคกเคฐ', ' เคกเคฐเคพเคเคเคเฅ', ' เคฆเคนเคถเคค', ' เคญเคฏเคเคเคฐ', ' เคกเคฐเคพเคเค', ' เคเคฌเคฐเคพ เคเค', ' เคญเคฏเคพเคจเค'],
['เคเฅเคฐ', ' เคเฅเคฐ', ' เคฒเฅเคเคจเฅ', ' เคฒเฅเคเคจเฅ', ' เคฒเฅเคเคจเฅ', ' เคกเคพเคเฅ', ' เคฒเฅเคเฅเคฐเฅ', ' เคกเคเฅเคคเฅ', ' เคฒเฅเค'],
['เคเคเฅเคฒเฅ', ' เคเคกเคผเคฌเฅเคฐเฅ', ' เคเคพเฅเคฟเคฏเฅเค', ' เคเคพเคเคเฅเค', ' เคเคพเฅเฅ', ' เคเคเคเคเคเคพเคฐเฅ'],
['เคฆเคพเคเคตเคจเคพ', ' เคฆเคพเคเคตเคจเคพ', ' เคฆเคพเคเค เคนเฅเค', ' เคฆเคพเคเคตเคจเฅ'],
['เคกเฅเคตเคขเคผเฅ', ' เคกเฅเคตเคขเคผเคฟเคฏเฅเค'],
['เคธเคฟเคเคนเคพเคธเคจ', ' เคธเคฟเคเคนเคพเคธเคจเฅเค', ' เคตเคฟเคฐเคพเคเคฎเคพเคจ'],
['เคธเคฎเคฏ', ' เคธเคฎเคฏเคพเคจเฅเคเฅเคฒ', ' เคธเคฎเคฏ', ' เค
เคธเคพเคงเคพเคฐเคฃ'],
['เคเคฌเฅเคฐ', ' เคฎเคฟเคเฅเคเฅ เคฆเฅเคจเฅเคตเคพเคฒเฅ', ' เคเคฌเฅเคฐเฅเค', ' เคเคฌเฅเคฐ', ' เคเคฌเฅเคฐเฅเค', ' เคเคฌเฅเคฐเคฟเคธเฅเคคเคพเคจ'],
['เคเฅเคญ', ' เคเฅเคญเฅเค'],
['เคฆเฅเคเค เคฆเฅเคจเฅ', ' เคธเคคเคพเคฏเคพ', ' เค
เคเคงเฅเคฐ เคเคฐเคจเคพ', ' เคฆเฅเคเค เคฆเฅเคจเฅเคตเคพเคฒเฅเค'],
['เคชเคฐเคฎเฅเคชเคฐเคพ', ' เคชเคฐเคฎเฅเคชเคฐเคพเคเค'],
['เคฐเฅเคเคฆเฅ', ' เคฐเฅเคเคฆเฅเคเคพ', ' เคฐเฅเคเคฆเคพ', ' เคฐเฅเคเคฆเคจเคพ'],
['เคฌเฅเคธเฅเคง เคนเฅเคเคฐ'],
['เคเคพเคเคชเคจเคพ', ' เคเคพเคเคช เคเค เคจเคพ', ' เคเคพเคเคชเคเคฐ', ' เคฅเคฐเคฅเคฐเคพเคคเฅ เคนเฅเค'],
['เคชเคฐเฅเคเฅเคทเคพ', ' เคฆเฅเคเค'],
['เคเฅเคคเฅเคฐ', ' เคเฅเคคเฅเคฐเฅเค', ' เคเฅเคคเฅเคฐเฅเค', ' เคญเคพเคเคฏเฅเค'],
['เคเฅเคฒเฅเคถ'],
['tribute'],
['เคฆเฅเคเค', ' เคเฅเคฒเฅเคถ', ' เคชเคฐเฅเคถเคพเคจ เคนเฅเคจเคพ', ' เคธเคคเคพเคจเคพ', ' เคธเคคเคพเคจเฅเคตเคพเคฒเฅ', ' เคเคชเคฆเฅเคฐเคตเฅ'],
['เคคเฅเคฐเคนเฅ', ' เคคเฅเคฐเคนเคฟเคฏเคพเค', ' เคคเฅเคฐเคนเฅ เคซเฅเคเคเคจเฅเคตเคพเคฒเฅเค'],
['เคเฅเคฐเฅเคคเคพ', ' เค
เคเคเคฐเคเฅเค'],
['เคฎเฅเฅ', ' เคฎเฅเคกเคผเคคเคพ', ' เคฒเฅเคเคจเคพ', ' เคชเฅเคเฅ เคฎเฅเคกเคผเคคเคพ เคนเฅ', ' เคตเคพเคชเคธ เคฎเฅเคกเคผเคคเคพ เคนเฅ', ' เคตเคพเคชเคธ เคฎเฅเฅเคจเคพ', ' เคตเคพเคชเคธ เคฎเฅเฅเคพ', ' เคฎเฅเฅ เคเคพเคจเคพ', ' เคตเคพเคชเคธ เคฎเฅเฅเคพ', 'เคฎเฅเคกเคผ', ' เคฎเฅเฅ เคเคฐ เคฆเฅเคฐ เคเคพ เคฐเคนเคพ เคนเฅ', ' เคฒเฅเคเคคเคพ เคนเฅ', ' เคตเคพเคชเคธ เคฒเฅเคเคพเคฏเคพ', ' เคตเคพเคชเคธ เคฒเฅเค เคฐเคนเคพ เคนเฅ', ' เคตเคพเคชเคธ เคฒเฅเค เคเคพเคคเคพ เคนเฅ'],
['เคธเคฎเคเคจเคพ', ' เคธเคฎเคเคคเคพ เคนเฅ', ' เคธเคฎเค เคฒเคฟเคฏเคพ', ' เคธเคฎเค'],
['เคจเคฟเคทเฅเคซเคฒ'],
['เคตเฅเคฏเคฐเฅเคฅ', ' เค
เคจเคฐเฅเคฅ'],
['เคชเคฐเคฆเคพ', ' เคเฅเคเคเคเฅเค', ' เคชเคฐเคฆเคพ เคชเคกเคผเคพ', ' เคเคเคพเคกเคผเฅ'],
['เคฆเคพเคเคฒเคคเคพ', ' เคฆเคพเคเคฒเคคเคพเคเค'],
['เคฆเคพเค เคเฅ เคฌเคพเคฐเฅ', ' เคฆเคพเค เคเฅ เคฌเคพเคฐเคฟเคฏเฅเค'],
['เคเฅเคเคตเคพเคฐเฅ', ' เคเฅเคฎเคพเคฐเคฟเคฏเฅเค', ' เคเฅเคเคตเคพเคฐเฅเคชเคจ'],
['เคฆเคฐเฅเคถเคจ', ' เคฆเคฐเฅเคถเคจเฅเค', ' เคฆเคฐเฅเคถเคจ'],
['เคถเคฌเฅเคฆ', ' เคธเฅเคตเคฐ'],
['เคเคฒเฅ', ' เคเคฒเคคเคพ', ' เคเคฒเคพ', ' เคเคฒเคคเคพ'],
['เคธเฅเคจเคฟเค', ' เคธเคฟเคชเคพเคนเคฟเคฏเฅเค', ' เคฏเฅเคฆเฅเคงเคพ', ' เคถเฅเคฐเคตเฅเคฐเฅเค'],
['เคธเคคเฅเคฏเคพเคจเคพเคถ', ' เคจเคพเคถ', ' เคจเคพเคถ เคนเฅ เคเคฏเคพ', ' เคจเคพเคถ เคเคฐ', ' เคเคเคพเคกเคผ', ' เคเคฃเฅเคกเคนเคฐเฅเค'],
['เคเฅเคเคธ', ' เคคเคพเคเคคเคพ', ' เคฆเฅเคเคพ', ' เคฆเฅเค เคฐเคนเคพ เคฅเคพ', ' เคฆเฅเคตเคพเคฐเคชเคพเคฒ', ' เคชเคนเคฐเฅเคเค', ' เคเคพเคเคคเฅ เคฐเคนเฅ'],
['เคเฅเคฎเฅเคฎเค', ' เคชเคนเคฐเฅ เคเฅ เคฎเคฟเคจเคพเคฐเฅเค', ' เคเฅเคฎเฅเคฎเค'],
['เคชเคพเคจเฅ', ' เคเคฒ', ' เคชเคพเคจเฅ เคชเคฟเคฒเคพเคฏเคพ', ' เคชเคพเคจเฅ เคฆเฅเคจเคพ'],
['เคเคกเฅเคขเฅ', ' เคเฅเคเค', ' เคเฅเคเค', ' เคเฅเคเค'],
['เคเฅเคนเฅเค'],
['เคฆเคพเคเคฐเคธ', ' เคเฅเคฃเฅเคก', ' เคเฅเคฃเฅเคกเฅเค', ' เคฆเคพเคเคฐเคธ', ' เคฎเคถเค', ' เคฎเคถเคเฅเค', ' เคจเค เคฆเคพเคเคฐเคธ'],
['เคฆเคพเคเคฐเคธ เคเฅ เคเฅเคฃเฅเคก'],
['เคซเคเคเคจเคพ', ' เคซเคเคเคคเคพ', ' เคซเคเคเคพ', ' เคซเคเคเฅเคเคพ', ' เคซเคเคเฅ', ' เคเคพเคจเคจเคพ'],
['เคฌเฅเคฆเฅเคงเคฟเคฎเคพเคจเฅเค'],
['เคญเฅเฅเคฟเคฏเคพ', ' เคญเฅเฅเคฟเค', ' เคเคเคเคฒเฅ เคเฅเคคเฅเคคเฅ'],
['เคเคฐเฅเคญ'],
['เคตเคเคจ', ' เคถเคฌเฅเคฆ'],
['เคฒเคฟเคเคพ เคเคฏเคพ'],
['เคเคฒเคค', ' เคเคฒเคคเคฟเคฏเคพเค', ' เคเคฒเคค เคเคฐเคจเคพ', ' เคเคฒเคค เคคเคฐเฅเคเฅ เคธเฅ', ' เคเคฒเคค เคคเคฐเฅเคเฅ เคธเฅ', ' เคเคฒเคค เคเคฐเคจเฅเคตเคพเคฒเฅ', ' เคเคฒเคค', ' เคฆเฅเคฐเฅเคตเฅเคฏเคตเคนเคพเคฐ', ' เคฆเฅเคฐเฅเคตเฅเคฏเคตเคนเคพเคฐ', ' เคธเคคเคพเคฏเคพ เคนเฅเค', ' เคฆเคฐเฅเคฆ', ' เคเฅเค เคชเคนเฅเคเคเคพเคจเคพ', ' เคฆเคฐเฅเคฆเคจเคพเค'],
['เคเคผเคฎเฅเคฐ', ' เคเคผเคฎเฅเคฐเฅ', ' เคเคผเคฎเฅเคฐ', ' เคเคผเคฎเฅเคฐ เคฌเคจเคพเคจเคพ', ' เค
เฅเคฎเฅเคฐเฅ'],
['เคเฅเค', ' เคเฅเค', ' เคเฅเค เคฎเฅเค'],
]
kan_tws = []
mal_tws = []
mar_tws = []
pun_tws = []
odi_tws = []
tam_tws = []
tel_tws = []
urd_tws = []
heb_tws = []
|
nilq/baby-python
|
python
|
import os
# os is only used for finding a dynamic absolute path to the I/O files
absolute_path = os.path.dirname(os.path.abspath(__file__))
inn = absolute_path + '/rom.in'
outt = absolute_path + '/rom.out'
# Open input files
fin = open(inn)
fout = open(outt, 'w')
# Get first line for array size
firstLine = fin.readlines()
# Create array size of numbers in file
nums = [None] * int(firstLine[0])
# Load array with values
for i in range(len(nums)):
nums[i] = str(firstLine[i + 1]).strip()
# Loop through each equation
for i in range(len(nums)):
# Init/reset values after each iteration of equation
romanValue = 0
first = 0
second = 0
print(nums[i], end='')
fout.write(nums[i])
# Loop through each character of the equation
for j in range(len(nums[i])):
# Get sum
romanSum = first + second
currentChar = nums[i][j]
# When current character is not the last, assign next character
if currentChar != '=':
nextChar = nums[i][j + 1]
# While not on the last character go through each character and increment a variable based on the values of the input
while currentChar != '=':
if currentChar == 'M':
romanValue += 1000
elif currentChar == 'D':
romanValue += 500
elif currentChar == 'C' and nextChar == 'M':
romanValue += 900
j += 1
elif currentChar == 'C' and nextChar == 'D':
romanValue += 400
j += 1
elif currentChar == 'C':
romanValue += 100
elif currentChar == 'L':
romanValue += 50
elif currentChar == 'X' and nextChar == 'C':
romanValue += 90
j += 1
elif currentChar == 'X' and nextChar == 'L':
romanValue += 40
j += 1
elif currentChar == 'X':
romanValue += 10
elif currentChar == 'V':
romanValue += 5
elif currentChar == 'I' and nextChar == 'X':
romanValue += 9
j += 1
elif currentChar == 'I' and nextChar == 'V':
romanValue += 4
j += 1
elif currentChar == 'I':
romanValue += 1
# When the plus is reached
else:
# Get value for first and second part of the sum equation
if first == 0:
first = romanValue
else:
second = romanValue
romanValue = 0
j += 1
currentChar = nums[i][j]
if currentChar != '=':
nextChar = nums[i][j + 1]
# Once the sum of the equation is determined, turn it back into a roman numeral
while romanSum > 0:
if romanSum > 1000:
romanSum = 0
print('CONCORDIA CUM VERITATE', end='')
fout.write('CONCORDIA CUM VERITATE')
elif romanSum == 1000:
romanSum -= 1000
print('M', end='')
fout.write('M')
elif romanSum - 900 >= 0:
romanSum -= 900
print('CM', end='')
fout.write('CM')
elif romanSum - 500 >= 0:
romanSum -= 500
print('D', end='')
fout.write('D')
elif romanSum - 400 >= 0:
romanSum -= 400
print('CD', end='')
fout.write('CD')
elif romanSum - 100 >= 0:
romanSum -= 100
print('C', end='')
fout.write('C')
elif romanSum - 90 >= 0:
romanSum -= 90
print('XC', end='')
fout.write('XC')
elif romanSum - 50 >= 0:
romanSum -= 50
print('L', end='')
fout.write('L')
elif romanSum - 40 >= 0:
romanSum -= 40
print('XL', end='')
fout.write('XL')
elif romanSum - 10 >= 0:
romanSum -= 10
print('X', end='')
fout.write('X')
elif romanSum - 9 >= 0:
romanSum -= 9
print('IX', end='')
fout.write('IX')
elif romanSum - 5 >= 0:
romanSum -= 5
print('V', end='')
fout.write('V')
elif romanSum - 4 >= 0:
romanSum -= 4
print('IV', end='')
fout.write('IV')
else:
romanSum -= 1
print('I', end='')
fout.write('I')
print()
fout.write('\n')
fin.close()
fout.close()
|
nilq/baby-python
|
python
|
import factory
from karp.domain.model import Entry, Resource
class ResourceFactory(factory.Factory):
class Meta:
model = Resource
entity_id = factory.
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# Copyright 2020 Christian Henning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @title :hnets/structured_hmlp_examples.py
# @author :ch
# @contact :henningc@ethz.ch
# @created :05/02/2020
# @version :1.0
# @python_version :3.6.10
"""
Example Instantiations of a Structured Chunked MLP - Hypernetwork
-----------------------------------------------------------------
The module :mod:`hnets.structured_hmlp_examples` provides helpers for example
instantiations of :class:`hnets.structured_mlp_hnet.StructuredHMLP`.
Functions in this module typically take a given main network and produce the
constructor arguments ``chunk_shapes``, ``num_per_chunk`` and ``assembly_fct``
of class :class:`hnets.structured_mlp_hnet.StructuredHMLP`.
Note:
These examples should be used with care. They are meant as inspiration and
might not cover all possible usecases.
.. autosummary::
hnets.structured_hmlp_examples.resnet_chunking
hnets.structured_hmlp_examples.wrn_chunking
"""
import math
import numpy as np
import torch
from warnings import warn
from mnets.resnet import ResNet
from mnets.wide_resnet import WRN
def resnet_chunking(net, gcd_chunking=False):
r"""Design a structured chunking for a ResNet.
A resnet as implemented in class :class:`mnets.resnet.ResNet` consists
roughly of 5 parts:
- An input convolutional layer with weight shape ``[C_1, C_in, 3, 3]``
- 3 blocks of ``2*n`` convolutional layers each where the first layer has
shape ``[C_i, C_j, 3, 3]`` with :math:`i \in \{2, 3, 4\}` and
:math:`j \equiv i-1` and the remaining ``2*n-1`` layers have a weight
shape of ``[C_i, C_i, 3, 3]``.
- A final fully connected layer of shape ``[n_classes, n_hidden]``.
Each layer may additionally have a bias vector and (if batch normalization
is used) a scale and shift vector.
For instance, if a resnet with biases and batchnorm is used and the first
layer will be produced as one structured chunk, then the first chunk shape
(see return value ``chunk_shapes``) will be:
``[[C_1, C_in, 3, 3], [C_1], [C_1], [C_1]]``.
This function will chunk layer wise (i.e., a chunk always comprises up to
4 elements: weights tensor, bias vector, batchnorm scale and shift). By
default, layers with the same shape are grouped together. Hence, the
standard return value contains 8 chunk shapes (input layer, first layer of
each block, remaining layers of each block (which all have the same shape)
and the fully-connected output layer). Therefore, the return value
``num_per_chunk`` would be as follows:
``[1, 1, 2*n-1, 1, 2*n-1, 1, 2*n-1, 1]``.
Args:
net (mnets.resnet.ResNet): The network for which the structured chunking
should be devised.
gcd_chunking (bool): If ``True``, the layers within the 3 resnet blocks
will be produced by 4 chunks. Therefore, the greatest common divisor
(gcd) of the feature sizes ``C_1, C_2, C_3, C_4`` is computed and
the 6 middle ``chunk_shapes`` produced by default are replaced by 4
chunk shapes ``[[C_gcd, C_i, 3, 3], [C_gcd]]`` (assuming no
batchnorm is used). Note, the first and last entry of
``chunk_shapes`` will remain unchanged by this option.
Hence, ``len(num_per_chunk) = 6`` in this case.
Returns:
(tuple): Tuple containing the following arguments that can be passed
to the constructor of class
:class:`hnets.structured_mlp_hnet.StructuredHMLP`.
- **chunk_shapes** (list)
- **num_per_chunk** (list)
- **assembly_fct** (func)
"""
if not isinstance(net, ResNet):
raise ValueError('Function expects resnet as argument ""net".')
if net._use_context_mod:
raise NotImplementedError('This function doesn\'t handle context-mod ' +
'layers yet!')
if net._param_shapes_meta is not None:
warn('Note, at the time of implementation of this function, the ' +
'resnet attribute "param_shapes_meta" was not yet implemented. ' +
'Hence, this function implementation should be updated.')
has_bn = net._use_batch_norm
has_bias = net.has_bias
n = net._n
filter_sizes = net._filter_sizes
num_layers = 6*n + 2
factor = 1
sub = 0
if has_bias:
factor += 1
if has_bn:
factor += 2
sub = 2
assert len(net.param_shapes) == factor * num_layers - sub
if gcd_chunking:
# Note, each of the `6*n` layers in the middle can be made up of
# several chunks. We know that 1 layer has `C1` as input channel
# dimension, 2n layers have `C2` and `C3` as input channel dimension and
# 2n-1 layers have `C4` as input channel dimension. Though, depending on
# the gcd, multiple chunks are required to produce the weights of 1
# layer.
num_per_chunk = [1, None, None, None, None, 1]
else:
num_per_chunk = [1, 1, 2*n-1, 1, 2*n-1, 1, 2*n-1, 1]
chunk_shapes = []
assembly_fct = None
# Note, if batchnorm is used, then the first 2 * (6*n+1) weights belong to
# batch normalization.
bn_start = 0
w_start = 2 * (6*n+1) if has_bn else 0
### First layer ###
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[:2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
### Resnet blocks ###
c_div_gcd = None
if gcd_chunking:
gcd = math.gcd(filter_sizes[0], filter_sizes[1])
gcd = math.gcd(gcd, filter_sizes[2])
gcd = math.gcd(gcd, filter_sizes[3])
# The first block is made up of layers requiring `C1//gcd` chunks each,
# and so on ...
fsl = filter_sizes
c_div_gcd = [fsl[1] // gcd, fsl[2] // gcd, fsl[3] // gcd]
for i, fs in enumerate(filter_sizes):
if i == 0:
#n_layers = 1
n_chunks = c_div_gcd[0]
elif i == 1:
#n_layers = 2 * n
n_chunks = c_div_gcd[0] * (2*n-1) + c_div_gcd[1]
elif i == 2:
#n_layers = 2 * n
n_chunks = c_div_gcd[1] * (2*n-1) + c_div_gcd[2]
else:
#n_layers = 2 * n - 1
n_chunks = c_div_gcd[2] * (2*n-1)
num_per_chunk[1+i] = n_chunks
cs = []
cs.append([gcd, fs, *net._kernel_size])
if has_bias:
cs.append([gcd])
if has_bn:
cs.extend([[gcd], [gcd]])
chunk_shapes.append(cs)
bn_start += 2 * (6*n)
w_start += (2 if has_bias else 1) * (6*n)
else:
for i in range(3): # For each resnet block
# FIXME If two consecutive filter sizes are identical, we could
# add one chunk shape for this block rather than 2.
# First layer of block.
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[bn_start:bn_start+2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
# Remaining 2*n-1 layers of block.
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
if has_bn:
cs.extend(net.param_shapes[bn_start:bn_start+2])
chunk_shapes.append(cs)
bn_start += 2
w_start += 2 if has_bias else 1
for ii in range(2*n-2):
assert len(cs[0]) == 4
assert np.all(np.equal(net.param_shapes[w_start], cs[0]))
if has_bias:
assert len(cs[1]) == 1
assert np.all(np.equal(net.param_shapes[w_start+1], cs[1]))
if has_bn:
o = 2 if has_bias else 1
assert len(cs[o]) == 1 and len(cs[o+1]) == 1
assert np.all(np.equal(net.param_shapes[bn_start], cs[o]))
assert np.all(np.equal(net.param_shapes[bn_start+1],
cs[o+1]))
bn_start += 2
w_start += 2 if has_bias else 1
### Final layer ###
cs = []
cs.append(net.param_shapes[w_start])
if has_bias:
cs.append(net.param_shapes[w_start+1])
# No batchnorm for last layer!
chunk_shapes.append(cs)
assert len(chunk_shapes) == len(num_per_chunk)
assembly_fct = lambda x : _resnet_chunking_afct(x, net, chunk_shapes,
num_per_chunk, gcd_chunking, c_div_gcd)
return chunk_shapes, num_per_chunk, assembly_fct
def _resnet_chunking_afct(list_of_chunks, net, chunk_shapes, num_per_chunk,
gcd_chunking, c_div_gcd):
"""The ``assembly_fct`` function required by function
:func:`resnet_chunking`.
"""
assert len(list_of_chunks) == np.sum(num_per_chunk)
has_bn = net._use_batch_norm
has_bias = net.has_bias
n = net._n
bn_weights = []
layer_weights = []
cind = 0
### First layer ###
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Resnet blocks ###
if gcd_chunking:
# Number of layers per channel size.
n_per_c = [1, 2*n, 2*n, 2*n-1]
layer_ind = 0
for i, n_layer in enumerate(n_per_c):
for l in range(n_layer):
# Out of how many chunks does this layer consist?
n_c = c_div_gcd[layer_ind // (2*n)]
layer_ind += 1
chunks = list_of_chunks[cind:cind+n_c]
cind += n_c
layer_weights.append(torch.cat([c[0] for c in chunks], dim=0))
if has_bias:
layer_weights.append(torch.cat([c[1] for c in chunks],
dim=0))
if has_bn:
bn_weights.append(torch.cat([c[-2] for c in chunks], dim=0))
bn_weights.append(torch.cat([c[-1] for c in chunks], dim=0))
else:
for i in range(3): # For each block.
# First layer in block.
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
# Remaining layers in block.
for _ in range(2*n-1):
layer_weights.append(list_of_chunks[cind][0])
if has_bias:
layer_weights.append(list_of_chunks[cind][1])
if has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Last layer ###
# No batchnorm for last layer!
layer_weights.append(list_of_chunks[-1][0])
if has_bias:
layer_weights.append(list_of_chunks[-1][1])
return bn_weights + layer_weights
def wrn_chunking(net, ignore_bn_weights=True, ignore_out_weights=True,
gcd_chunking=False):
r"""Design a structured chunking for a Wide-ResNet (WRN).
This function is in principle similar to function :func:`resnet_chunking`,
but with the goal to provide a chunking scheme that is identical to the one
proposed in (accessed August 18th, 2020):
Sacramento et al., "Economical ensembles with hypernetworks", 2020
https://arxiv.org/abs/2007.12927
Therefore, a WRN as implemented in class :class:`mnets.wide_resnet.WRN`
is required. For instance, a `WRN-28-10-B(3,3)` can be instantiated as
follows, using batchnorm but no biases in all convolutional layers:
.. code-block:: python
wrn = WRN(in_shape=(32, 32, 3), num_classes=10, n=4, k=10,
num_feature_maps=(16, 16, 32, 64), use_bias=False,
use_fc_bias=True, no_weights=False, use_batch_norm=True)
We denote channel sizes by ``[C_in, C_1, C_2, C_3, C_4]``, where ``C_in`` is
the number of input channels and the remaining ``C_1, C_2, C_3, C_4`` denote
the channel size per convolutional group. The widening factor is denoted by
``k``.
In general, there will be up to 11 `layer groups`, which will be realized
by separate hypernetworks (cmp table S1 in
`Sacramento et al. <https://arxiv.org/pdf/2007.12927.pdf>`_):
- ``0``: Input layer weights. If the network's convolutional layers have
biases and batchnorm layers while ``ignore_bn_weights=False``, then this
hypernet will produce weights of shape
``[[C_1, C_in, 3, 3], [C_1], [C_1], [C_1]]``. However, without
convolutional bias terms and with ``ignore_bn_weights=True``, the hypernet
will only produce weights of shape ``[[C_1, C_in, 3, 3]]``. This
specification applies to all layer groups generating convolutional layers.
- ``1``: This layer group will generate the weights of the first
convolutional layer in the first convolutional group, e.g.,
``[[k*C_2, C_1, 3, 3]]``. Let's define
``r = max(k*C_2/C_1, C_1/k*C_2)``. If ``r=1`` or ``r=2`` or
``gcd_chunking=True``, then this group is merged with layer group ``2``.
- ``2``: The remaining convolutional layer of the first convolutional group.
If ``r=1``, ``r=2`` or ``gcd_chunking=True``, then all convolutional
layers of the first group are generated. However, if biases or batch norm
weights have to be generated, then this form of chunking leads to
redundancy. Imagine bias terms are used and that the first layer in this
convolutional group has weights ``[[160, 16, 3, 3], [160]]``, while the
remaining layers have shape ``[[160, 160, 3, 3], [160]]``. If that's the
case, the hypernetwork output will be of shape
``[[160, 16, 3, 3], [160]]``, meaning that 10 chunks have to be produced
for each except the first layer. However, this means that per
convolutional layer 10 bias vectors are generated, while only one is
needed and therefore the other 9 will go to waste.
- ``3``: Same as ``1`` for the first layer in the second convolutional
group.
- ``4`` (labelled as ``3`` in the paper): Same as ``2`` for all
convolutional layers (potentially excluding the first) in the second
convolutional group.
- ``5``: Same as ``1`` for the first layer in the third convolutional
group.
- ``6`` (labelled as ``4`` in the paper): Same as ``2`` for all
convolutional layers (potentially excluding the first) in the third
convolutional group.
- ``7`` (labelled as ``5`` in the paper): If existing, this hypernetwork
produces the 1x1 convolutional layer realizing the residual connection
connecting the first and second residual block in the first convolutional
group.
- ``8`` (labelled as ``6`` in the paper): Same as ``7`` but for the first
residual connection in the second convolutional group.
- ``9`` (labelled as ``7`` in the paper): Same as ``7`` but for the first
residual connection in the third convolutional group.
- ``10``: This hypernetwork will produce the weights of the fully connected
output layer, if ``ignore_out_weights=False``.
Thus, the WRN weights would maximally be produced by 11 different sub-
hypernetworks.
Note:
There is currently an implementation mismatch, such that the
implementation provided here does not 100% mimic the architecture
described in
`Sacramento et al. <https://arxiv.org/pdf/2007.12927.pdf>`_.
To be specific, given the ``wrn`` generated above, the hypernetwork
output for layer group ``2`` will be of shape ``[160, 160, 3, 3]``,
while the paper expects a vertical chunking with a hypernet output of
shape ``[160, 80, 3, 3]``.
Args:
net (mnets.wide_resnet.WRN): The network for which the structured
chunking should be devised.
ignore_bn_weights (bool): If ``True``, even if the given ``net`` has
batchnorm weights, they will be ignored by this function.
ignore_out_weights (bool): If ``True``, output weights (layer group
``10``) will be ignored by this function.
gcd_chunking (bool): If ``True``, layer groups ``1``, ``3`` and ``5``
are ignored. Instead, the greatest common divisor (gcd) of input and
output feature size in a convolutional group is computed and weight
tensors within a convolutional group (i.e., layer groups ``2``,
``4`` and ``6``) are chunked according to this value. However, note
that this will cause the generation of unused bias and batchnorm
weights if existing (cp. description of layer group ``2``).
Returns:
(tuple): Tuple containing the following arguments that can be passed
to the constructor of class
:class:`hnets.structured_mlp_hnet.StructuredHMLP`.
- **chunk_shapes** (list)
- **num_per_chunk** (list)
- **assembly_fct** (func)
"""
if not isinstance(net, WRN):
raise ValueError('Function expects WRN as argument ""net".')
if net._use_context_mod:
raise NotImplementedError('This function doesn\'t handle context-mod ' +
'layers yet!')
assert net.param_shapes_meta is not None
has_bn = net.batchnorm_layers is not None and len(net.batchnorm_layers) > 0
has_conv_bias = net._use_bias
has_fc_bias = net._use_fc_bias
n = net._n
filter_sizes = net._filter_sizes
#n_conv_layers = 1 + 6 * n + np.sum(net._group_has_1x1)
### Group parameter shapes accoding to their meaning ###
bn_shapes = None
if has_bn:
bn_shapes = net.param_shapes[:2*len(net.batchnorm_layers)]
assert len(net.batchnorm_layers) == 6 * n + 1
for i, meta in enumerate(net.param_shapes_meta[:len(bn_shapes)]):
assert meta['name'].startswith('bn_')
if i % 2 == 1:
assert meta['layer'] == net.param_shapes_meta[i-1]['layer']
elif i > 1:
assert meta['layer'] > net.param_shapes_meta[i-2]['layer']
conv_1x1_shapes = []
pind = 0 if bn_shapes is None else len(bn_shapes)
for g_has_1x1 in net._group_has_1x1:
if g_has_1x1:
conv_1x1_shapes.append(net.param_shapes[pind])
pind += 1
assert len(conv_1x1_shapes[-1]) == 4 and \
conv_1x1_shapes[-1][-1] == 1
else:
conv_1x1_shapes.append(None)
conv_layers = []
conv_biases = [] if has_conv_bias else None
for i in range(2*(1+6*n) if has_conv_bias else 1+6*n):
shape = net.param_shapes[pind]
meta = net.param_shapes_meta[pind]
if has_conv_bias and i % 2 == 1:
assert meta['name'] == 'bias'
conv_biases.append(shape)
else:
assert meta['name'] == 'weight'
conv_layers.append(shape)
pind += 1
assert pind == len(net.param_shapes) - (2 if has_fc_bias else 2)
assert net.has_fc_out and net.mask_fc_out
if has_fc_bias:
fc_w_shape = net.param_shapes[-2]
fc_b_shape = net.param_shapes[-1]
else:
fc_w_shape = net.param_shapes[-1]
fc_b_shape = None
### Decide on chunking strategy ###
use_lg_135 = [True, True, True] # Use layer group 1, 3 or 5?
conv_group_gcd = [-1, -1, -1]
for i in range(1, 4):
fs_prev = filter_sizes[i-1]
fs_curr = filter_sizes[i]
# In this case, we always chunk.
if max(fs_prev, fs_curr) / min(fs_prev, fs_curr) in [1, 2]:
use_lg_135[i-1] = False
conv_group_gcd[i-1] = min(fs_prev, fs_curr)
elif gcd_chunking:
use_lg_135[i-1] = False
conv_group_gcd[i-1] = math.gcd(fs_prev, fs_curr)
### Prepare chunking for each layer group ###
layer_groups = [True] * 11
# Which layer group actually exist?
if not use_lg_135[0]:
layer_groups[1] = False
if not use_lg_135[1]:
layer_groups[3] = False
if not use_lg_135[2]:
layer_groups[5] = False
# 7, 8, 9 are the 1x1 layer groups.
for i, val in enumerate(net._group_has_1x1):
if not val:
layer_groups[7+i] = False
if ignore_out_weights:
layer_groups[-1] = False
chunk_shapes = []
num_per_chunk = []
# Layer group 0.
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[0])
if has_conv_bias:
chunk_shapes[-1].append(conv_biases[0])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend(bn_shapes[:2])
# Layer groups 1 to 6.
for g in range(3): # For each conv group.
# Input layer to convolutional group.
if layer_groups[1+2*g]:
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[1+2*n*g])
if has_conv_bias:
chunk_shapes[-1].append(conv_biases[1+2*n*g])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend(bn_shapes[2*(1+2*n*g):2*(1+2*n*g)+2])
# Remaining layers of convolutional group.
fs_prev = filter_sizes[g]
fs_curr = filter_sizes[g+1]
assert not has_conv_bias or np.all(np.equal([a[0] for a in \
conv_biases[1+2*n*g:1+2*n*(g+1)]], fs_curr))
assert not has_bn or np.all(np.equal([a[0] for a in \
bn_shapes[2*(1+2*n*g):2*(1+2*n*(g+1))]], fs_curr))
if layer_groups[1+2*g]:
num_per_chunk.append(2*n-1) # 1 chunk per conv layer.
chunk_shapes.append([])
chunk_shapes[-1].append(conv_layers[1+2*n*g+1])
else:
gcd = conv_group_gcd[g]
num_per_chunk.append(fs_prev//gcd + (2*n-1) * fs_curr//gcd)
chunk_shapes.append([[fs_curr, gcd, 3, 3]])
if has_conv_bias:
chunk_shapes[-1].append([fs_curr])
if not ignore_bn_weights and has_bn:
chunk_shapes[-1].extend([[fs_curr], [fs_curr]])
# Layer group 7 - 9.
for i in range(7, 10):
if layer_groups[i]:
num_per_chunk.append(1)
chunk_shapes.append([conv_1x1_shapes[i-7]])
# Layer group 10.
if not ignore_out_weights:
num_per_chunk.append(1)
chunk_shapes.append([])
chunk_shapes[-1].append(fc_w_shape)
if has_fc_bias:
chunk_shapes[-1].append(fc_b_shape)
### Get assembly function ###
assembly_fct = lambda x : _wrn_chunking_afct(x, chunk_shapes, num_per_chunk,
layer_groups, conv_group_gcd, has_conv_bias, has_fc_bias, has_bn,
ignore_bn_weights, ignore_out_weights, n, filter_sizes)
return chunk_shapes, num_per_chunk, assembly_fct
def _wrn_chunking_afct(list_of_chunks, chunk_shapes, num_per_chunk,
layer_groups, conv_group_gcd, has_conv_bias, has_fc_bias,
has_bn, ignore_bn_weights, ignore_out_weights, n,
filter_sizes):
"""The ``assembly_fct`` function required by function :func:`wrn_chunking`.
"""
assert len(list_of_chunks) == np.sum(num_per_chunk)
bn_weights = []
conv_layer_weights = []
res_1x1_layer_weights = []
last_layer_weights = []
cind = 0
### First layer ###
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
### Resnet blocks ###
for g in range(3): # For each block.
# First layer in block.
if layer_groups[1+2*g]:
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
# Remaining layers in block.
fs_prev = filter_sizes[g]
fs_curr = filter_sizes[g+1]
if layer_groups[1+2*g]:
for _ in range(2*n-1): # 1 chunk per layer
conv_layer_weights.append(list_of_chunks[cind][0])
if has_conv_bias:
conv_layer_weights.append(list_of_chunks[cind][1])
if not ignore_bn_weights and has_bn:
bn_weights.extend(list_of_chunks[cind][-2:])
cind += 1
else:
num_chunks_first = fs_prev // conv_group_gcd[g]
num_chunks_rem = fs_curr // conv_group_gcd[g]
# Important: Bias and batchnorm weights are always taken from the
# first chunk of a layer (corresponding weights in remaining layers
# are ignored). Weight tensors are concatenated across chunks.
n_per_l = [num_chunks_first] + [num_chunks_rem] * (2*n-1)
for n_c in n_per_l:
chunks = list_of_chunks[cind:cind+n_c]
cind += n_c
conv_layer_weights.append(torch.cat([c[0] for c in chunks],
dim=1))
if has_conv_bias:
conv_layer_weights.append(chunks[0][1])
if not ignore_bn_weights and has_bn:
bn_weights.append(chunks[0][-2])
bn_weights.append(chunks[0][-1])
### 1x1 residual connections ###
for i in range(3):
if layer_groups[7+i]:
res_1x1_layer_weights.append(list_of_chunks[cind][0])
cind += 1
### Last layer ###
# No batchnorm for last layer!
if not ignore_out_weights:
last_layer_weights.append(list_of_chunks[-1][0])
if has_fc_bias:
last_layer_weights.append(list_of_chunks[-1][1])
return bn_weights + res_1x1_layer_weights + conv_layer_weights + \
last_layer_weights
if __name__ == '__main__':
pass
|
nilq/baby-python
|
python
|
import glob
import os
import pandas as pd
import pytz
from dateutil import parser, tz
from matplotlib import pyplot as plt
fp = "C:\\Users\\Robert\\Documents\\Uni\\SOLARNET\\HomogenizationCampaign\\rome\\"
file = os.path.join(fp, "data.csv")
data = pd.read_csv(file, delimiter=" ")
print(data)
converted_data = []
for fits_file, ut in zip(data.file, data.UT):
time = parser.parse(fits_file[-15:-7] +"T" + ut)
time = pytz.utc.localize(time)
type = fits_file[9:14]
if type != "CaIIK":
print(fits_file)
converted_data.append([fits_file, time, type, 1])
converted_data = pd.DataFrame(converted_data, columns=["file", "date", "type", "quality"])
converted_data.to_csv(os.path.join(fp, "converted_ds.csv"))
|
nilq/baby-python
|
python
|
from test.common_test_util import expected_result
from test.hquery.hquery_test_util import query_html_doc
def test_escapes_work_in_string_literals():
assert query_html_doc('', '"foo bar"') == expected_result("""
foo
bar""")
assert query_html_doc('', "'foo bar'") == expected_result("""
foo
bar""")
assert query_html_doc('', '`foo bar`') == expected_result("""
foo
bar""")
|
nilq/baby-python
|
python
|
import argparse
import sys
parser = argparse.ArgumentParser(description='Extract gold entities conll file.')
parser.add_argument('--input_file')
args = parser.parse_args()
reading = 0
golds = []
sentences = []
with open(args.input_file, 'r') as i_file:
for line in i_file:
line = line.strip()
if line and reading == 0:
sentences.append(line)
elif line and reading == 2:
parts = line.split("\t")
golds.append(parts[1])
if not line and (reading == 0 or reading == 1):
reading += 1
elif not line:
reading = 0
print("\t".join(golds))
golds = []
sentences = []
if len(sentences) > 0:
print("\n".join(golds), end="")
|
nilq/baby-python
|
python
|
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.contrib.auth.models import User
from django.http import Http404
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
pic = models.ImageField(upload_to='profile_photo/', blank=True, default='profile_photo/defaultprofile_Wk2PTL2.jpg')
bio = models.CharField(max_length=265, blank=True)
contact_info = models.CharField(max_length=255, blank=True)
def __str__(self):
return self.bio
@classmethod
def get_profiles(cls):
profile = cls.objects.all()
return profile
class Projects(models.Model):
image = models.ImageField(upload_to='project_folder')
title = models.CharField(max_length=255)
description = models.TextField()
link = models.CharField(max_length=200)
post_date = models.DateTimeField(auto_now_add=True)
profile = models.ForeignKey(Profile, on_delete=models.CASCADE, default='1')
author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
def __str__(self):
return f'{self.profile.user.username}'
class Meta:
ordering = ['-post_date']
@classmethod
def get_project_by_id(cls, id):
try:
proj = Projects.objects.get(pk=id)
except ObjectDoesNotExist:
raise Http404()
return proj
@classmethod
def get_projects(cls):
project = cls.objects.all()
return project
@classmethod
def search_by_title(cls, search_term):
projects = cls.objects.filter(title__icontains=search_term)
return projects
class Reviews(models.Model):
design = models.PositiveSmallIntegerField(default=0)
usability = models.PositiveSmallIntegerField(default=0)
content = models.PositiveSmallIntegerField(default=0)
author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
project = models.ForeignKey(Projects, on_delete=models.CASCADE, default='project_folder/responsive.jpg')
def __str__(self):
return f'{self.design}'
#
# class Comment(models.Model):
# number = models.IntegerField(default=0)
# comment = models.CharField(max_length=200)
# date = models.DateTimeField(auto_now_add=True)
# author = models.ForeignKey(User, on_delete=models.CASCADE, default='1')
# project = models.ForeignKey(Projects, on_delete=models.CASCADE, default='project_folder/responsive.jpg')
#
# def __str__(self):
# return f'{self.username}'
#
# class Meta:
# ordering = ['-date']
#
# @classmethod
# def get_all_comments(cls):
# comments = Comment.objects.all()
# return comments
|
nilq/baby-python
|
python
|
import torch
import torch.utils.data
import os
import numpy as np
from PIL import Image
from utils.util import *
def loaderAndResize(path):
return Image.open(path).resize((128, 128))
def loader(path):
return Image.open(path)
class GlassandAttrFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, list_wo_g, list_w_g,
transform = None, list_reader = list_reader_all,
loader = loader):
self.face_img_root = face_img_root
self.face_list_wo_g = list_reader(list_wo_g)
self.face_list_w_g = list_reader(list_w_g)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
none_occ_attr_list = self.face_list_wo_g[index][1 : ]
face_name = self.face_list_wo_g[index][0]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
none_occ_attr = [int(none_occ_attr_list[0]), int(none_occ_attr_list[16]),
int(none_occ_attr_list[22]), int(none_occ_attr_list[24]),
int(none_occ_attr_list[30]), int(none_occ_attr_list[20]),
int(none_occ_attr_list[39])]
for i in range(len(none_occ_attr)):
if none_occ_attr[i] == -1:
none_occ_attr[i] = 0
idx2 = np.random.randint(0, len(self.face_list_w_g))
occ_attr_list = self.face_list_w_g[idx2][1 : ]
occ_name = self.face_list_w_g[idx2][0]
occ_img = self.loader(os.path.join(self.face_img_root, occ_name))
occ_attr = [int(occ_attr_list[0]), int(occ_attr_list[16]),
int(occ_attr_list[22]), int(occ_attr_list[24]),
int(occ_attr_list[30]), int(occ_attr_list[20]),
int(occ_attr_list[39])]
for i in range(len(occ_attr)):
if occ_attr[i] == -1:
occ_attr[i] = 0
if self.transform is not None:
occ_img = self.transform(occ_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_img,
'occ_attr': torch.from_numpy(np.array(occ_attr)),
'none_occ_attr': torch.from_numpy(np.array(none_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_list_wo_g)
class RandomFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, img_list,
transform = None, list_reader = list_reader_2,
loader = loader):
self.face_img_root = face_img_root
self.face_list, self.label = list_reader(img_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_name = self.face_list[index]
face_label_ori = self.label[index]
face_label_des = 1
img = self.loader(os.path.join(self.face_img_root, face_name))
if self.transform is not None:
img = self.transform(img)
sample = {'img': img,
'label_ori': face_label_ori,
'label_des': face_label_des,
}
return sample
def __len__(self):
return len(self.face_list)
class GlassFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, list_wo_g, list_w_g,
transform = None, list_reader = list_reader,
loader = loader):
self.face_img_root = face_img_root
self.face_list_wo_g = list_reader(list_wo_g)
self.face_list_w_g = list_reader(list_w_g)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_name = self.face_list_wo_g[index]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
occ_name = self.face_list_w_g[np.random.randint(0, len(self.face_list_w_g))]
occ_img = self.loader(os.path.join(self.face_img_root, occ_name))
if self.transform is not None:
occ_img = self.transform(occ_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_img
}
return sample
def __len__(self):
return len(self.face_list_wo_g)
class OccFaceImageLoader(torch.utils.data.Dataset):
def __init__(self, face_img_root, face_name_list, occ_img_root,
occ_name_list, transform = None, list_reader = list_reader_all,
loader = loader):
self.face_img_root = face_img_root
self.face_list = list_reader(face_name_list)
self.occ_img_root = occ_img_root
self.occ_list = list_reader(occ_name_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
none_occ_attr_list = self.face_list[index][1 : ]
face_name = self.face_list[index][0]
none_occ_img = self.loader(os.path.join(self.face_img_root, face_name))
none_occ_attr = [int(none_occ_attr_list[0]), int(none_occ_attr_list[16]),
int(none_occ_attr_list[22]), int(none_occ_attr_list[24]),
int(none_occ_attr_list[30]), int(none_occ_attr_list[20]),
int(none_occ_attr_list[39])]
for i in range(len(none_occ_attr)):
if none_occ_attr[i] == -1:
none_occ_attr[i] = 0
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_img_root, occ_name))
if occ_name[0] == 'm':
occ_type = occ_name.split()[0]
else:
occ_type = occ_name.split('_')[0]
print(occ_type)
occ_face_img = process_image(none_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_face_img = self.transform(occ_face_img)
none_occ_img = self.transform(none_occ_img)
sample = {'none_occ_img': none_occ_img,
'occ_img': occ_face_img,
'occ_attr': torch.from_numpy(np.array(none_occ_attr)),
'none_occ_attr': torch.from_numpy(np.array(none_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_list)
class OccFaceImageMixLoader(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
sample = {'face_wo_occ_img': face_wo_occ_img,
'occ_img_syn': occ_img_syn,
'face_wo_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
'name': self.face_w_occ_list[index1][0],
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
class OccFaceImageMixLoader_test(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = 8
top_y = 8
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
sample = {'face_wo_occ_img': face_wo_occ_img,
'occ_img_syn': occ_img_syn,
'face_wo_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
'name': self.face_w_occ_list[index1][0],
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
class OccFaceImageMixLoaderV2(torch.utils.data.Dataset):
def __init__(self, face_wo_occ_root, face_wo_occ_list,
occ_root, occ_list,
face_w_occ_root, face_w_occ_list,
transform = None, loader = loader):
self.face_wo_occ_root = face_wo_occ_root
self.face_wo_occ_list = load_pickle(face_wo_occ_list)
self.occ_root = occ_root
self.occ_list = load_pickle(occ_list)
self.face_w_occ_root = face_w_occ_root
self.face_w_occ_list = load_pickle(face_w_occ_list)
self.loader = loader
self.transform = transform
def __getitem__(self, index):
####################
face_wo_occ_attr_list = self.face_wo_occ_list[index][1 : ]
face_wo_occ_img = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img = face_wo_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_wo_occ_attr = [int(face_wo_occ_attr_list[0]), int(face_wo_occ_attr_list[16]),
int(face_wo_occ_attr_list[22]), int(face_wo_occ_attr_list[24]),
int(face_wo_occ_attr_list[30]), int(face_wo_occ_attr_list[20]),
int(face_wo_occ_attr_list[39])]
####################
index1 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_list = self.face_w_occ_list[index1][1 : ]
face_w_occ_img = self.loader(
os.path.join(self.face_w_occ_root, self.face_w_occ_list[index1][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_w_occ_img = face_w_occ_img.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr = [int(face_w_occ_attr_list[0]), int(face_w_occ_attr_list[16]),
int(face_w_occ_attr_list[22]), int(face_w_occ_attr_list[24]),
int(face_w_occ_attr_list[30]), int(face_w_occ_attr_list[20]),
int(face_w_occ_attr_list[39])]
####################
index2 = np.random.randint(0, len(self.face_wo_occ_list))
face_w_occ_attr_adv_list = self.face_wo_occ_list[index2][1 : ]
face_wo_occ_img_adv = self.loader(
os.path.join(self.face_wo_occ_root, self.face_wo_occ_list[index2][0]))
top_x = np.random.randint(0, 16)
top_y = np.random.randint(0, 16)
face_wo_occ_img_adv = face_wo_occ_img_adv.crop((top_x, top_y, top_x + 128, top_y + 128))
face_w_occ_attr_adv = [int(face_w_occ_attr_adv_list[0]), int(face_w_occ_attr_adv_list[16]),
int(face_w_occ_attr_adv_list[22]), int(face_w_occ_attr_adv_list[24]),
int(face_w_occ_attr_adv_list[30]), int(face_w_occ_attr_adv_list[20]),
int(face_w_occ_attr_adv_list[39])]
###################
occ_name = self.occ_list[np.random.randint(0, len(self.occ_list))][0]
occ_img = self.loader(os.path.join(self.occ_root, occ_name))
occ_type = occ_name.split('_')[0]
occ_img_syn = process_image(face_wo_occ_img, occ_img, occ_type)
if self.transform is not None:
occ_img_syn = self.transform(occ_img_syn)
face_wo_occ_img = self.transform(face_wo_occ_img)
face_w_occ_img = self.transform(face_w_occ_img)
face_wo_occ_img_adv = self.transform(face_wo_occ_img_adv)
sample = {
'face_w_syn_occ_img': occ_img_syn,
'face_w_syn_occ_attr': torch.from_numpy(np.array(face_wo_occ_attr)),
'face_w_syn_occ_img_GT': face_wo_occ_img,
'face_wo_occ_img_adv': face_wo_occ_img_adv,
'face_wo_occ_attr_adv': torch.from_numpy(np.array(face_w_occ_attr_adv)),
'face_w_occ_img': face_w_occ_img,
'face_w_occ_attr': torch.from_numpy(np.array(face_w_occ_attr)),
}
return sample
def __len__(self):
return len(self.face_w_occ_list)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import pytest
from pytest import approx
import igrf13
time = "2010-07-12"
def test_igrf13():
mag = igrf13.igrf(time, 65, 85, 0, model=12)
assert mag.north.item() == approx(9295.100256)
assert mag.east.item() == approx(2560.199706)
assert mag.down.item() == approx(59670.251893)
assert mag.total.item() == approx(60444.126863)
assert mag.incl.item() == approx(80.821738)
assert mag.decl.item() == approx(15.399442)
# def test_igrf11():
#
# mag = igrf11.igrf(time, 65, 85, 0, model=11)
#
# assert mag.north.item() == approx(9301.523160)
# assert mag.east.item() == approx(2563.450424)
# assert mag.down.item() == approx(59666.132881)
# assert mag.total.item() == approx(60441.186489)
#
# assert mag.incl.item() == approx(80.814513)
# assert mag.decl.item() == approx(15.407924)
if __name__ == "__main__":
pytest.main([__file__])
|
nilq/baby-python
|
python
|
import logging
import sdk_cmd
import sdk_tasks
import shakedown
from tests import config
log = logging.getLogger(__name__)
def broker_count_check(count, service_name=config.SERVICE_NAME):
def fun():
try:
if len(sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'broker list', json=True)) == count:
return True
except:
pass
return False
shakedown.wait_for(fun)
def restart_broker_pods(service_name=config.SERVICE_NAME):
for i in range(config.DEFAULT_BROKER_COUNT):
pod_name = '{}-{}'.format(config.DEFAULT_POD_TYPE, i)
task_name = '{}-{}'.format(pod_name, config.DEFAULT_TASK_NAME)
broker_id = sdk_tasks.get_task_ids(service_name, task_name)
restart_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'pod restart {}'.format(pod_name), json=True)
assert len(restart_info) == 2
assert restart_info['tasks'][0] == task_name
sdk_tasks.check_tasks_updated(service_name, task_name, broker_id)
sdk_tasks.check_running(service_name, config.DEFAULT_BROKER_COUNT)
def replace_broker_pod(service_name=config.SERVICE_NAME):
pod_name = '{}-0'.format(config.DEFAULT_POD_TYPE)
task_name = '{}-{}'.format(pod_name, config.DEFAULT_TASK_NAME)
broker_0_id = sdk_tasks.get_task_ids(service_name, task_name)
sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'pod replace {}'.format(pod_name))
sdk_tasks.check_tasks_updated(service_name, task_name, broker_0_id)
sdk_tasks.check_running(service_name, config.DEFAULT_BROKER_COUNT)
# wait till all brokers register
broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=service_name)
def create_topic(topic_name, service_name=config.SERVICE_NAME):
# Get the list of topics that exist before we create a new topic
topic_list_before = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic list', json=True)
create_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic create {}'.format(topic_name), json=True)
log.info(create_info)
assert ('Created topic "%s".\n' % topic_name in create_info['message'])
if '.' in topic_name or '_' in topic_name:
assert ("topics with a period ('.') or underscore ('_') could collide." in create_info['message'])
topic_list_after = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic list', json=True)
new_topics = set(topic_list_after) - set(topic_list_before)
assert topic_name in new_topics
topic_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic describe {}'.format(topic_name), json=True)
assert len(topic_info) == 1
assert len(topic_info['partitions']) == config.DEFAULT_PARTITION_COUNT
def delete_topic(topic_name, service_name=config.SERVICE_NAME):
delete_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic delete {}'.format(topic_name), json=True)
assert len(delete_info) == 1
assert delete_info['message'].startswith('Output: Topic {} is marked for deletion'.format(topic_name))
topic_info = sdk_cmd.svc_cli(config.PACKAGE_NAME, service_name, 'topic describe {}'.format(topic_name), json=True)
assert len(topic_info) == 1
assert len(topic_info['partitions']) == config.DEFAULT_PARTITION_COUNT
def assert_topic_lists_are_equal_without_automatic_topics(expected, actual):
"""Check for equality in topic lists after filtering topics that start with
an underscore."""
filtered_actual = list(filter(lambda x: not x.startswith('_'), actual))
assert expected == filtered_actual
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
""" testunit ๅบ็ก็ฑป
@Time : 2020/4/10 ไธๅ1:03
@File : testbase.py
@author : pchaos
@license : Copyright(C), pchaos
@Contact : p19992003#gmail.com
"""
import unittest
import datetime
import QUANTAXIS as qa
from .testbase import TestingBase
class qaTestingBase(TestingBase):
"""unittest base class for QA
"""
@classmethod
def userInit(cls):
"""็จๆทๅๅงๅ
"""
cls.code = '000300'
dateStart = datetime.date(2005, 3, 1)
dateEnd = datetime.date(2017, 3, 31)
cls.dataFrame = qa.QA_fetch_index_day_adv(cls.code, start=dateStart, end=dateEnd)
@classmethod
def userEnd(cls):
"""class็ปๆ๏ผ็จๆท้ๆพ่ตๆบ
"""
if cls.dataFrame is not None:
cls.dataFrame = None
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import pytest
import os
import csv
import tempfile
from datetime import datetime
from nart.writer.builtins.csvwriter import CSVWriter
from nart.model.nartdata import NartData
from nart.model.nartitem import NartItem
@pytest.fixture
def csvwriter_fixture():
"""
csvrepo์ filepath๋ฅผ ์์ฑํ๊ณ , ํ
์คํธ๊ฐ ๋๋ ๋ค ์ ๊ฑฐํ๋ค.
:return: str. filepath. csvrepo์ filepath์ด๋ค.
"""
fd, filepath = tempfile.mkstemp()
os.close(fd)
yield filepath
if os.path.exists(filepath):
os.remove(filepath)
def test_csvwriter_success(csvwriter_fixture):
"""
CSVWriter์ ์ฑ๊ณต ํ
์คํธ์ด๋ค.
:param csvwriter_fixture: fixture์ด๋ค.
"""
filepath = csvwriter_fixture
rank1 = NartItem(1, 'test1')
rank2 = NartItem(2, 'test2')
keywords = NartData(datetime.now(), [rank1, rank2])
writer = CSVWriter(path=filepath, append_if_exist=True)
writer.write(keywords)
assert os.path.exists(writer.path)
with open(writer.path, mode='r', encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
reader_count = 0
for row in reader:
reader_count = reader_count + 1
assert row[1] == 'test1'
assert row[2] == 'test2'
assert reader_count == 1
|
nilq/baby-python
|
python
|
import torchvision.transforms as transforms
config = {
'params': {
"backbone": {
"kernel_size": 3,
"output_dim": 128,
"input_dim": 3,
"stride": 2,
"padding": 1,
"out_img_size": 16
},
"primary_capsules": {
"kernel_size": 1,
"stride": 1,
"input_dim": 128,
"caps_dim": 36,
"num_caps": 32,
"padding": 0,
"out_img_size": 16
},
"capsules": [{
"type": "CONV",
"num_caps": 32,
"caps_dim": 36,
"kernel_size": 3,
"stride": 2,
"matrix_pose": True,
"out_img_size": 7
}, {
"type": "CONV",
"num_caps": 32,
"caps_dim": 36,
"kernel_size": 3,
"stride": 1,
"matrix_pose": True,
"out_img_size": 5
}, {
"type": "FC",
"num_caps": 20,
"caps_dim": 36,
"matrix_pose": True
}],
"class_capsules": {
"num_caps": 100,
"caps_dim": 36,
"matrix_pose": True
}
},
"transform_train":
transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010)),
]),
"transform_test":
transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465),
(0.2023, 0.1994, 0.2010)),
])
}
|
nilq/baby-python
|
python
|
"""
This file is part of tendril
See the COPYING, README, and INSTALL files for more information
"""
import os
import imp
dirname, fname = os.path.split(os.path.abspath(__file__))
def import_(filename):
(path, name) = os.path.split(filename)
(name, ext) = os.path.splitext(name)
(f, filename, data) = imp.find_module(name, [path])
return imp.load_module(name, f, filename, data)
def get_test_object(testst, offline=False):
if '.' in testst:
modname, clsname = testst.rsplit('.', 1)
elif ':' in testst:
modname, clsname = testst.split(':')
clsname = 'Test' + clsname
else:
modname = testst
clsname = 'Test' + testst
try:
mod = import_(os.path.join(dirname, modname))
cls = getattr(mod, clsname)
instance = cls(offline=offline)
return instance
except ImportError:
raise ValueError("Test Unrecognized :" + testst)
|
nilq/baby-python
|
python
|
# Copyright (C) 2021, Raffaello Bonghi <raffaello@rnext.it>
# All rights reserved
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import launch
from launch.actions import IncludeLaunchDescription
from launch.substitutions import Command, LaunchConfiguration
from launch.launch_description_sources import PythonLaunchDescriptionSource
import launch_ros
import os
def generate_launch_description():
pkg_share = launch_ros.substitutions.FindPackageShare(package='nanosaur_description').find('nanosaur_description')
default_rviz_config_path = os.path.join(pkg_share, 'rviz/urdf.rviz')
joint_state_publisher_node = launch_ros.actions.Node(
package='joint_state_publisher',
executable='joint_state_publisher',
name='joint_state_publisher',
condition=launch.conditions.UnlessCondition(LaunchConfiguration('gui'))
)
joint_state_publisher_gui_node = launch_ros.actions.Node(
package='joint_state_publisher_gui',
executable='joint_state_publisher_gui',
name='joint_state_publisher_gui',
condition=launch.conditions.IfCondition(LaunchConfiguration('gui'))
)
rviz_node = launch_ros.actions.Node(
package='rviz2',
executable='rviz2',
name='rviz2',
output='screen',
arguments=['-d', LaunchConfiguration('rvizconfig')],
)
return launch.LaunchDescription([
launch.actions.DeclareLaunchArgument(name='gui', default_value='True',
description='Flag to enable joint_state_publisher_gui'),
launch.actions.DeclareLaunchArgument(name='rvizconfig', default_value=default_rviz_config_path,
description='Absolute path to rviz config file'),
# Nanosaur description launch
# https://answers.ros.org/question/306935/ros2-include-a-launch-file-from-a-launch-file/
IncludeLaunchDescription(
PythonLaunchDescriptionSource(
[pkg_share, '/launch/description.launch.py'])),
joint_state_publisher_node,
joint_state_publisher_gui_node,
rviz_node
])
# EOF
|
nilq/baby-python
|
python
|
import logging
from typing import TYPE_CHECKING, Optional
from web3.types import BlockIdentifier
from rotkehlchen.assets.asset import Asset
from rotkehlchen.constants.assets import A_ALETH, A_ETH, A_WETH
from rotkehlchen.constants.ethereum import SADDLE_ALETH_POOL
from rotkehlchen.constants.misc import EXP18
from rotkehlchen.errors.price import PriceQueryUnsupportedAsset
from rotkehlchen.inquirer import Inquirer
from rotkehlchen.interfaces import CurrentPriceOracleInterface
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.types import Price
if TYPE_CHECKING:
from rotkehlchen.chain.ethereum.manager import EthereumManager
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
class SaddleOracle(CurrentPriceOracleInterface):
"""
Provides logic to use saddle as oracle for certain assets
"""
def __init__(self, eth_manager: 'EthereumManager'):
super().__init__(oracle_name='saddle')
self.eth_manager = eth_manager
def rate_limited_in_last(
self,
seconds: Optional[int] = None, # pylint: disable=unused-argument
) -> bool:
return False
def get_price(
self,
from_asset: Asset,
to_asset: Asset,
block_identifier: BlockIdentifier,
) -> Price:
"""
NOTE: This function is limited to be used for ALETH at the moment.
The reason for that is how pools for saddle are engineered and the lack
of an automated way to get the pools. ALETH was chosen because this is
the only place where its price can be queried.
What the code does is querying the pool for the swap ALETH -> ETH
and then get the eth price to calculate the ALETH price
"""
log.debug(f'Querying saddle for price of {from_asset} to {to_asset}')
if from_asset != A_ALETH:
raise PriceQueryUnsupportedAsset(
f'{from_asset} is not a valid asset for the Saddle oracle',
)
aleth_eth_price = SADDLE_ALETH_POOL.call(
ethereum=self.eth_manager,
method_name='calculateSwap',
arguments=[1, 0, 1000000000000000000],
block_identifier=block_identifier,
)
aleth_eth_price /= EXP18
if to_asset not in (A_WETH, A_ETH):
eth_price = Inquirer().find_price(A_ETH, to_asset)
return aleth_eth_price * eth_price
return aleth_eth_price
def query_current_price(self, from_asset: Asset, to_asset: Asset) -> Price:
"""At the moment until more pools get implemented this function is limited to ALETH
Refer to the docstring of `get_price`.
"""
return self.get_price(
from_asset=from_asset,
to_asset=to_asset,
block_identifier='latest',
)
|
nilq/baby-python
|
python
|
from .version import __version__ # scTenifoldXct.__version__
from scTenifoldXct.core import scTenifoldXct
from scTenifoldXct.visualization import get_Xct_pairs, plot_XNet
from scTenifoldXct.merge import merge_scTenifoldXct
|
nilq/baby-python
|
python
|
# Copyright 2014-2017 Lionheart Software LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import operator
import urllib.request, urllib.parse, urllib.error
import logging
from . import exceptions
PINBOARD_API_ENDPOINT = "https://api.pinboard.in/v1/"
PINBOARD_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
PINBOARD_ALTERNATE_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
PINBOARD_DATE_FORMAT = "%Y-%m-%d"
class Bookmark(object):
def __init__(self, payload, token):
self.description = payload['description']
self.extended = payload['extended']
self.url = payload['href']
self.meta = payload['meta']
self.hash = payload['hash']
self.shared = payload['shared'] == "yes"
self.toread = payload['toread'] == "yes"
self.tags = payload['tags'].split(' ')
self.time = Pinboard.datetime_from_string(payload['time'])
self.token = token
def __eq__(self, other):
return other.hash == self.hash
def __ne__(self, other):
return other.meta != self.meta
def __gt__(self, other):
return self.time > other.time
def __lt__(self, other):
return self.time < other.time
def __ge__(self, other):
return self.time >= other.time
def __le__(self, other):
return self.time <= other.time
@property
def pinboard(self):
return Pinboard(self.token)
def __repr__(self):
parse_result = urllib.parse.urlparse(self.url)
return "<Bookmark description=\"{}\" url=\"{}\">".format(self.description, parse_result.netloc)
def save(self, update_time=False):
params = {
'url': self.url,
'description': self.description,
'extended': self.extended,
'tags': self.tags,
'shared': "yes" if self.shared else "no",
'toread': "yes" if self.toread else "no",
}
if update_time:
params['dt'] = self.time
return self.pinboard.posts.add(**params)
def delete(self):
return self.pinboard.posts.delete(url=self.url)
class Tag(object):
def __init__(self, key, value):
self.name = key
self.count = int(value)
def __repr__(self):
return "<Tag name=\"{}\" count={}>".format(self.name, self.count)
class Pinboard(object):
DATE_FIELDS = ["dt", "date", "update_time", "created_at", "updated_at"]
BOOLEAN_FIELDS = ["replace", "shared", "toread"]
SPACE_DELIMITED_FIELDS = ["tag", "tags"]
def __init__(self, token):
self.token = token
def __getattr__(self, k):
return PinboardCall(self.token, k)
@staticmethod
def date_from_string(value):
return datetime.datetime.strptime(value, PINBOARD_DATE_FORMAT).date()
@staticmethod
def string_from_date(d):
return d.strftime(PINBOARD_DATE_FORMAT)
@staticmethod
def datetime_from_string(value):
try:
return datetime.datetime.strptime(value, PINBOARD_DATETIME_FORMAT)
except ValueError:
return datetime.datetime.strptime(value, PINBOARD_ALTERNATE_DATETIME_FORMAT)
@staticmethod
def string_from_datetime(dt):
return dt.strftime(PINBOARD_DATETIME_FORMAT)
class PinboardCall(object):
def __init__(self, token, path):
self.token = token
self.components = [path]
def __getattr__(self, k):
self.components.append(k)
return self
def __getitem__(self, k):
self.components.append(k)
return self
def __call__(self, *args, **kwargs):
url = "{}{}".format(PINBOARD_API_ENDPOINT, "/".join(self.components))
parse_response = kwargs.get('parse_response', True)
if 'parse_response' in kwargs:
del kwargs['parse_response']
params = kwargs.copy()
for field in Pinboard.DATE_FIELDS:
if field in kwargs:
try:
params[field] = Pinboard.string_from_datetime(kwargs[field])
except:
params[field] = kwargs[field]
for field in Pinboard.BOOLEAN_FIELDS:
if field in kwargs:
if isinstance(kwargs[field], bool):
params[field] = "yes" if kwargs[field] else "no"
else:
params[field] = kwargs[field]
for field in Pinboard.SPACE_DELIMITED_FIELDS:
if field in kwargs:
if isinstance(kwargs[field], list):
params[field] = ' '.join(kwargs[field])
else:
params[field] = kwargs[field]
params['format'] = "json"
params['auth_token'] = self.token
if 'meta' in params:
params['meta'] = 1 if kwargs['meta'] else 0
query_string = urllib.parse.urlencode(params)
final_url = "{}?{}".format(url, query_string)
try:
request = urllib.request.Request(final_url)
opener = urllib.request.build_opener(urllib.request.HTTPSHandler)
response = opener.open(request)
except urllib.error.HTTPError as e:
error_mappings = {
401: exceptions.PinboardAuthenticationError,
403: exceptions.PinboardForbiddenError,
500: exceptions.PinboardServerError,
503: exceptions.PinboardServiceUnavailable,
}
if e.code in error_mappings:
Error = error_mappings[e.code]
raise Error(e.url, e.code, e.msg, e.hdrs, e.fp)
raise
else:
if parse_response:
json_response = json.load(response)
for field in Pinboard.DATE_FIELDS:
if field in json_response:
json_response[field] = Pinboard.datetime_from_string(json_response[field])
if self.components == ["posts", "all"]:
return [Bookmark(k, self.token) for k in json_response]
elif self.components in [["posts", "get"], ["posts", "recent"]]:
json_response['posts'] = [Bookmark(k, self.token) for k in json_response['posts']]
elif self.components == ["posts", "dates"]:
json_response['dates'] = {Pinboard.date_from_string(k): int(v) \
for k, v in list(json_response['dates'].items())}
elif self.components == ["posts", "update"]:
return json_response['update_time']
elif self.components == ["tags", "get"]:
tags = [Tag(k, v) for k, v in list(json_response.items())]
tags.sort(key=operator.attrgetter('name'))
return tags
elif self.components == ["notes", "list"]:
for note in json_response['notes']:
for field in Pinboard.DATE_FIELDS:
if field in note:
note[field] = Pinboard.datetime_from_string(note[field])
elif 'result_code' in json_response:
if json_response['result_code'] == "done":
return True
else:
raise exceptions.PinboardError(json_response['result_code'])
return json_response
else:
return response
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# check_apcaccess.py - a script for checking a APC UPS
# using the apcaccess utility
#
# 2016 By Christian Stankowic
# <info at stankowic hyphen development dot net>
# https://github.com/stdevel
#
# Enhanced and error corrections by Chris Johnston 2017
# Tested on BX1300G & RS 1500G but should work on most APC UPS
# <HireChrisJohnston at g mail>
#
#> Added detection of TONBATT
#> Corrected misc errors
#> Enchanced charting & messaging
from optparse import OptionParser, OptionGroup
import os
import subprocess
import logging
import re
#set logger
LOGGER = logging.getLogger("check_apcaccess")
#global variables
ups_info={}
state=0
def check_value(val, desc, warn, crit, reverse=False):
#compares value to thresholds and sets codes
LOGGER.debug("Comparing '{0}' ({1}) to warning/critical thresholds {2}/{3} (reverse: {4})".format(val, desc, warn, crit, reverse))
snip=""
if reverse == False:
if val > crit:
#critical
snip="{0} *Critical* ({1})".format(desc, val)
set_code(2)
elif val > warn:
#warning
snip="{0} -Warning ({1})".format(desc, val)
set_code(1)
else: snip="{0}: {1}".format(desc, val)
else:
if val < crit:
#critical
snip="{0} *Critical* ({1})".format(desc, val)
set_code(2)
elif val < warn:
#warning
snip="{0} -Warning ({1})".format(desc, val)
set_code(1)
else: snip="{0}: {1}".format(desc, val)
return snip
def set_code(int):
#set result code
global state
if int > state: state = int
def get_return_str():
#get return string
if state == 3: return "UNKNOWN"
elif state == 2: return "CRITICAL"
elif state == 1: return "WARNING"
else: return "OK"
def get_value(key, isFloat=False):
#get value from apcaccess information
if isFloat:
temp = re.findall(r'[-+]?[0-9]*\.?[0-9]*', ups_info[key])
return float(temp[0])
else: return ups_info[key]
def calc_consumption():
#calculate power consumption
load = get_value('LOADPCT', True)
out = get_value('NOMPOWER', True)
power_cons = int(out*(load/100))
LOGGER.debug("MATH says, based on the information provided, it is assumed that the power consumption might be ~{0} watts".format(power_cons))
return power_cons
def check_ups():
#check UPS
global state
#get _all_ the values
starttime = get_value('STARTTIME')
status = get_value('STATUS')
battv = get_value('BATTV', True)
LOGGER.debug("BattV: {0}".format(battv))
load = get_value('LOADPCT', True)
batt = get_value('BCHARGE', True)
xfers = get_value('NUMXFERS')
tot_onbat = get_value('CUMONBATT')
on_bat = get_value('TONBATT')
linev = get_value('LINEV')
if options.time_warn and options.time_crit: time = get_value('TIMELEFT', True)
power_cons = calc_consumption()
#Check if line level is high
curr_line_level = get_value('LINEV', True)
if options.line_level > curr_line_level:
snip_line_level = " Line Level low {2} {0} for {1}".format(status,on_bat,linev)
set_code(1)
else: snip_line_level = status
#check Batt V
snip_battv = check_value(battv, "Voltage", options.battv_warn, options.battv_crit, True) +'v'
#check load
snip_load = check_value(load, "Load", options.load_warn, options.load_crit)+ '%'
#check battery charge
snip_batt = check_value(batt, "Charge", options.bat_warn, options.bat_crit, True) +'%'
#check battery time (optional)
if options.time_warn or options.time_crit:
snip_time = check_value(time, "Time Left", options.time_warn, options.time_crit, True) + 'min'
else: snip_time=""
#check power consumption (optional)
if options.consum_warn or options.consum_crit:
snip_consum = check_value(power_cons, "Power consumption", options.consum_warn, options.consum_crit) +'w'
else: snip_consum=""
# get detail
snip_detail ="(Total On Battery: " + tot_onbat + " / #Xfers: " + xfers + " since "+starttime+")"
#get performance data
if options.show_perfdata:
#initialize perfdata
perfdata=" |"
#power consumption
if options.consum_warn and options.consum_crit: perfdata = "{0} 'consumption'={1};{2};{3};;".format(perfdata, power_cons, float(options.consum_warn), float(options.consum_crit))
else: perfdata = "{0} 'Consumption'={1}w;;;".format(perfdata, power_cons)
#voltage
perfdata = "{0} 'Voltage'={1}v;{2};{3};{4};{5}".format(perfdata, battv, float(options.battv_warn), float(options.battv_crit), 11.0, 27.3)
#load
perfdata = "{0} 'Load'={1}%;{2};{3};{4};{5}".format(perfdata, load, float(options.load_warn), float(options.load_crit), 0.0, 100.0)
#battery charge
perfdata = "{0} 'Battery_Charge'={1}%;{2};{3};{4};{5}".format(perfdata, batt, float(options.bat_warn), float(options.bat_crit), 0.0, 100.0)
#battery time left only if user specified the warning and critical values
if options.time_warn or options.time_crit:
perfdata = "{0} 'Battery_Time_Left'={1};{2};{3};;".format(perfdata, time, float(options.time_warn), float(options.time_crit))
else: perfdata=""
#return result
snips = [x for x in [snip_line_level,snip_battv, snip_batt, snip_load,snip_consum,snip_time,snip_detail ] if x != ""]
print "{0}: {1}{2}".format(get_return_str(), str(", ".join(snips)), perfdata)
exit(state)
def run_cmd(cmd=""):
#run the command, it's tricky!
output = subprocess.Popen("LANG=C {0}".format(cmd), shell=True, stdout=subprocess.PIPE).stdout.read()
LOGGER.debug("Output of '{0}' => '{1}".format(cmd, output))
return output
def get_apcaccess_data():
#get output of apcaccess
global ups_info
raw_data = run_cmd("apcaccess -h" + options.host)
raw_data = raw_data.splitlines()
for line in raw_data:
#parse lines to key/value dict
key=line[:line.find(":")].strip()
value=line[line.find(":")+1:].strip()
LOGGER.debug("Found key '{0}' with value '{1}'".format(key, value))
ups_info[key]=value
if __name__ == "__main__":
#define description, version and load parser
desc='''%prog is used to check a APC UPS using the apcaccess utility.
https://github.com/HireChrisJohnston/nagios-apcupsd'''
parser = OptionParser(description=desc,version="%prog version 1.0.0")
gen_opts = OptionGroup(parser, "Generic options")
mon_opts = OptionGroup(parser, "Monitoring options")
thres_opts = OptionGroup(parser, "Threshold options")
parser.add_option_group(gen_opts)
parser.add_option_group(mon_opts)
parser.add_option_group(thres_opts)
#-d / --debug
gen_opts.add_option("-d", "--debug", dest="debug", default=False, action="store_true", help="enable debugging outputs")
#-P / --enable-perfdata
mon_opts.add_option("-P", "--enable-perfdata", dest="show_perfdata", default=False, action="store_true", help="enables performance data (default: no)")
#-w / --battv-warning
thres_opts.add_option("-w", "--battv-warning", dest="battv_warn", default=24, type=float, metavar="VOLTS", action="store", help="Defines battery voltage warning threshold (default: 24)")
#-W / --battv-critical
thres_opts.add_option("-W", "--battv-critical", dest="battv_crit", default=23.3, type=float, metavar="VOLTS", action="store", help="Defines battery voltage critical threshold (default: 23.3)")
#-c / --temp-critical
#thres_opts.add_option("-c", "--temp-critical", dest="temp_crit", default=55, type=float, metavar="TEMP", action="store", help="Defines temprature critical threshold(defalt: 55)")
#-l / --load-warning
thres_opts.add_option("-l", "--load-warning", dest="load_warn", default=50, type=int, metavar="PERCENT", action="store", help="Defines load warning threshold in percent (default: 50%)")
#-L / --load-critical
thres_opts.add_option("-L", "--load-critical", dest="load_crit", default=80, type=int, metavar="PERCENT", action="store", help="Defines load critical threshold in percent (default: 80%)")
#-b / --battery-warning
thres_opts.add_option("-b", "--battery-warning", dest="bat_warn", default=30, type=int, metavar="PERCENT", action="store", help="Defines battery load warning threshold in percent (default: 30%)")
#-B / --battery-critical
thres_opts.add_option("-B", "--battery-critical", dest="bat_crit", default=15, type=int, metavar="PERCENT", action="store", help="Defines battery load critical threshold in percent (default: 15%)")
#-t / --time-warning
thres_opts.add_option("-t", "--time-warning", dest="time_warn", type=int, metavar="TIME", action="store", help="Defines battery time left warning threshold in minutes (default: empty). If defined you must also define time-critical")
#-T / --time-critical
thres_opts.add_option("-T", "--time-critical", dest="time_crit", type=int, metavar="TIME", action="store", help="Defines battery time left critical threshold in minutes (default: empty). If defined you must also define time-warning")
#-u / --consumption-warning
thres_opts.add_option("-u", "--consumption-warning", dest="consum_warn", type=int, metavar="WATTS", action="store", help="Defines power consumption warning threshold in watts (default: empty)")
#-U / --consumption-critical
thres_opts.add_option("-U", "--consumption-critical", dest="consum_crit", type=int, metavar="WATTS", action="store", help="Defines power consumption critical threshold in watts (default: empty)")
#-H / --host
gen_opts.add_option("-H", "--host", dest="host", type="string", action="store", default="127.0.0.1", help="host of appcupsd")
#-X / --line-level
gen_opts.add_option("-X", "--line-level", dest="line_level", type="int", action="store", default="110", help="Volts of power outlet to detect no power if less than the line level")
#parse arguments
(options, args) = parser.parse_args()
#set logger level
if options.debug:
logging.basicConfig(level=logging.DEBUG)
LOGGER.setLevel(logging.DEBUG)
else:
logging.basicConfig()
LOGGER.setLevel(logging.INFO)
#debug outputs
LOGGER.debug("OPTIONS: {0}".format(options))
#get information
get_apcaccess_data()
#check UPS
check_ups()
|
nilq/baby-python
|
python
|
from tiltfile_runner import run_tiltfile_func
from unittest.mock import Mock
import unittest
import pytest
import yaml
class DockerLocalTest(unittest.TestCase):
def test_delegates_to_local_resource_for_build(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context")
local_resource.assert_any_call(
"my_image_build", "docker build -t my_image -f Dockerfile ././path/to/build/context")
def test_delegates_to_local_resource_for_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context")
local_resource.assert_called_with("my_image",
"docker run --rm my_image",
resource_deps=["my_image_build"])
def test_adds_optional_recource_deps_to_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
runtime_deps=["something", "else"])
local_resource.assert_called_with(
"my_image",
"docker run --rm my_image",
resource_deps=["something", "else", "my_image_build"])
def test_adds_optional_env_vars_to_run(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
env_vars={
"DOG": 1,
"CAT": "two"
})
local_resource.assert_called_with(
"my_image",
'docker run --rm -e DOG="1" -e CAT="two" my_image',
resource_deps=["my_image_build"])
def test_adds_optional_run_command_array(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
context="././path/to/build/context",
run_cmd=["sh", "echo", "hi"])
local_resource.assert_called_with(
"my_image",
'docker run --rm my_image sh echo hi',
resource_deps=["my_image_build"])
def test_overrides_dockerfile_for_build(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_local",
mocks={'local_resource': local_resource},
ref="my_image",
dockerfile="another.Dockerfile",
context="././path/to/build/context")
local_resource.assert_any_call(
"my_image_build", "docker build -t my_image -f another.Dockerfile ././path/to/build/context")
class DockerRemoteTest(unittest.TestCase):
def test_delegates_to_docker_build_for_build(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="./path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my-image", "./path/to/build/context", dockerfile="Dockerfile")
def test_overrides_dockerfile_for_build(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
dockerfile="another.Dockerfile",
build_context="./path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my-image", "./path/to/build/context", dockerfile="another.Dockerfile")
def test_uses_repository_instead_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
readiness_probe=None)
docker_build.assert_called_with("my.aws/repo", "././path/to/build/context", dockerfile="Dockerfile")
def test_generates_k8_yaml_job_with_defaults_for_image(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context")
expected_spec = yaml.safe_load("""
apiVersion: batch/v1
kind: Job
metadata:
name: my-image
spec:
parallelism: 1
completions: 1
backoffLimit: 0
template:
metadata:
annotations:
sidecar.istio.io/inject: "false"
spec:
containers:
- name: main
image: my-image
readinessProbe:
exec:
command:
- 'false'
initialDelaySeconds: 120
periodSeconds: 120
resources:
requests:
cpu: 1
memory: 2056Mi
limits:
cpu: 1
memory: 2056Mi
restartPolicy: Never
""")
assert k8s_yaml.call_count == 1
print(k8s_yaml.call_args[0][0])
assert yaml.safe_load(k8s_yaml.call_args[0][0]) == expected_spec
def test_can_overwrite_resource_requirements(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
cpu="2000m",
memory="4Gi",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"resources"] == yaml.safe_load("""
requests:
cpu: 2000m
memory: 4Gi
limits:
cpu: 2000m
memory: 4Gi
""")
def test_includes_image_repo_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
docker_repo="my.aws/repo",
build_context="././path/to/build/context",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"image"] == "my.aws/repo"
def test_defines_k8_job_namespace_if_provided(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None)
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["metadata"]["namespace"] == "somewhere"
def test_creates_dependent_k8s_resource_for_yaml(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
runtime_deps=["a", "b"])
assert k8s_resource.call_count == 1
k8s_resource.assert_called_with("my-image", resource_deps=["a", "b"])
def test_passes_env_vars_to_container_spec(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
env_vars={
"DOG": 1,
"CAT": "two"
})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"env"] == yaml.safe_load("""
- name: DOG
value: 1
- name: CAT
value: two
""")
def test_creates_specified_readiness_probe(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe={"httpGet": {
"path": "/health"
}})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0][
"readinessProbe"] == yaml.safe_load("""
httpGet:
path: /health
""")
def test_passes_command_array_to_container_spec(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
readiness_probe=None,
run_cmd=["bloop", "--something", "--another-thing"])
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["spec"]["containers"][0]["args"] == [
"bloop", "--something", "--another-thing"
]
def test_adds_custom_pod_annotations(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
build_context="././path/to/build/context",
namespace="somewhere",
annotations={
'custom': 'annotation'
})
assert k8s_yaml.call_count == 1
job = yaml.safe_load(k8s_yaml.call_args[0][0])
assert job["spec"]["template"]["metadata"]["annotations"] == {
"sidecar.istio.io/inject": "false",
'custom': 'annotation'
}
def test_errors_if_resource_name_contains_invalid_char(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
with pytest.raises(Exception):
run_tiltfile_func(
"docker_task/Tiltfile",
"docker_remote",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my_image",
build_context="././path/to/build/context",
)
class DockerTaskTest(unittest.TestCase):
def test_delegates_to_local_resource_for_build(self):
local_resource = Mock()
k8s_yaml = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={
'local_resource': local_resource,
'k8s_yaml': k8s_yaml
},
ref="my-image",
build_context="././path/to/build/context",
run_remote=False)
local_resource.assert_any_call(
"my-image_build", "docker build -t my-image -f Dockerfile ././path/to/build/context")
k8s_yaml.call_count == 0
def test_strips_out_non_local_args(self):
local_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={'local_resource': local_resource},
ref="my-image",
build_context="././path/to/build/context",
run_remote=False,
namespace="dave",
docker_repo="test",
readiness_probe="1234")
local_resource.assert_any_call(
"my-image_build", "docker build -t my-image -f Dockerfile ././path/to/build/context")
def test_runs_on_remote(self):
docker_build = Mock()
k8s_yaml = Mock()
k8s_resource = Mock()
run_tiltfile_func("docker_task/Tiltfile",
"docker_task",
mocks={
'docker_build': docker_build,
"k8s_yaml": k8s_yaml,
"k8s_resource": k8s_resource
},
ref="my-image",
run_remote=True,
build_context="././path/to/build/context",
readiness_probe=None)
assert k8s_yaml.call_count == 1
|
nilq/baby-python
|
python
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# File : embedding.py
# Author : Jiayuan Mao
# Email : maojiayuan@gmail.com
# Date : 10/03/2018
#
# This file is part of NSCL-PyTorch.
# Distributed under terms of the MIT license.
import torch
import torch.nn as nn
__all__ = ['LearnedPositionalEmbedding']
class LearnedPositionalEmbedding(nn.Embedding):
"""This module learns positional embeddings up to a fixed maximum size.
Padding symbols are ignored, but it is necessary to specify whether padding
is added on the left side (left_pad=True) or right side (left_pad=False).
Adapted from: https://github.com/pytorch/fairseq/blob/master/fairseq/modules/learned_positional_embedding.py.
"""
def __init__(self, num_embeddings, embedding_dim, padding_idx=0, left_pad=False):
super().__init__(num_embeddings, embedding_dim, padding_idx)
self.left_pad = left_pad
def forward(self, input, incremental_state=None):
"""Input is expected to be of size [bsz x seqlen]."""
if incremental_state is not None:
# positions is the same for every token when decoding a single step
positions = input.data.new(1, 1).fill_(self.padding_idx + input.size(1))
else:
positions = make_positions(input.data, self.padding_idx, self.left_pad)
return super().forward(positions)
def max_positions(self):
"""Maximum number of supported positions."""
return self.num_embeddings - self.padding_idx - 1
def make_positions(tensor, padding_idx, left_pad):
"""Replace non-padding symbols with their position numbers.
Position numbers begin at padding_idx+1.
Padding symbols are ignored, but it is necessary to specify whether padding
is added on the left side (left_pad=True) or right side (left_pad=False).
"""
max_pos = padding_idx + 1 + tensor.size(1)
if not hasattr(make_positions, 'range_buf'):
make_positions.range_buf = tensor.new()
make_positions.range_buf = make_positions.range_buf.type_as(tensor)
if make_positions.range_buf.numel() < max_pos:
torch.arange(padding_idx + 1, max_pos, out=make_positions.range_buf)
mask = tensor.ne(padding_idx)
positions = make_positions.range_buf[:tensor.size(1)].expand_as(tensor)
if left_pad:
positions = positions - mask.size(1) + mask.long().sum(dim=1).unsqueeze(1)
return tensor.clone().masked_scatter_(mask, positions[mask])
|
nilq/baby-python
|
python
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.plant_heating_and_cooling_equipment import HeatPumpWaterToWaterEquationFitHeating
log = logging.getLogger(__name__)
class TestHeatPumpWaterToWaterEquationFitHeating(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_heatpumpwatertowaterequationfitheating(self):
pyidf.validation_level = ValidationLevel.error
obj = HeatPumpWaterToWaterEquationFitHeating()
# alpha
var_name = "Name"
obj.name = var_name
# node
var_source_side_inlet_node_name = "node|Source Side Inlet Node Name"
obj.source_side_inlet_node_name = var_source_side_inlet_node_name
# node
var_source_side_outlet_node_name = "node|Source Side Outlet Node Name"
obj.source_side_outlet_node_name = var_source_side_outlet_node_name
# node
var_load_side_inlet_node_name = "node|Load Side Inlet Node Name"
obj.load_side_inlet_node_name = var_load_side_inlet_node_name
# node
var_load_side_outlet_node_name = "node|Load Side Outlet Node Name"
obj.load_side_outlet_node_name = var_load_side_outlet_node_name
# real
var_rated_load_side_flow_rate = 0.0001
obj.rated_load_side_flow_rate = var_rated_load_side_flow_rate
# real
var_rated_source_side_flow_rate = 0.0001
obj.rated_source_side_flow_rate = var_rated_source_side_flow_rate
# real
var_rated_heating_capacity = 0.0001
obj.rated_heating_capacity = var_rated_heating_capacity
# real
var_rated_heating_power_consumption = 0.0001
obj.rated_heating_power_consumption = var_rated_heating_power_consumption
# real
var_heating_capacity_coefficient_1 = 10.1
obj.heating_capacity_coefficient_1 = var_heating_capacity_coefficient_1
# real
var_heating_capacity_coefficient_2 = 11.11
obj.heating_capacity_coefficient_2 = var_heating_capacity_coefficient_2
# real
var_heating_capacity_coefficient_3 = 12.12
obj.heating_capacity_coefficient_3 = var_heating_capacity_coefficient_3
# real
var_heating_capacity_coefficient_4 = 13.13
obj.heating_capacity_coefficient_4 = var_heating_capacity_coefficient_4
# real
var_heating_capacity_coefficient_5 = 14.14
obj.heating_capacity_coefficient_5 = var_heating_capacity_coefficient_5
# real
var_heating_compressor_power_coefficient_1 = 15.15
obj.heating_compressor_power_coefficient_1 = var_heating_compressor_power_coefficient_1
# real
var_heating_compressor_power_coefficient_2 = 16.16
obj.heating_compressor_power_coefficient_2 = var_heating_compressor_power_coefficient_2
# real
var_heating_compressor_power_coefficient_3 = 17.17
obj.heating_compressor_power_coefficient_3 = var_heating_compressor_power_coefficient_3
# real
var_heating_compressor_power_coefficient_4 = 18.18
obj.heating_compressor_power_coefficient_4 = var_heating_compressor_power_coefficient_4
# real
var_heating_compressor_power_coefficient_5 = 19.19
obj.heating_compressor_power_coefficient_5 = var_heating_compressor_power_coefficient_5
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].name, var_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].source_side_inlet_node_name, var_source_side_inlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].source_side_outlet_node_name, var_source_side_outlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].load_side_inlet_node_name, var_load_side_inlet_node_name)
self.assertEqual(idf2.heatpumpwatertowaterequationfitheatings[0].load_side_outlet_node_name, var_load_side_outlet_node_name)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_load_side_flow_rate, var_rated_load_side_flow_rate)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_source_side_flow_rate, var_rated_source_side_flow_rate)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_heating_capacity, var_rated_heating_capacity)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].rated_heating_power_consumption, var_rated_heating_power_consumption)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_1, var_heating_capacity_coefficient_1)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_2, var_heating_capacity_coefficient_2)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_3, var_heating_capacity_coefficient_3)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_4, var_heating_capacity_coefficient_4)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_capacity_coefficient_5, var_heating_capacity_coefficient_5)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_1, var_heating_compressor_power_coefficient_1)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_2, var_heating_compressor_power_coefficient_2)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_3, var_heating_compressor_power_coefficient_3)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_4, var_heating_compressor_power_coefficient_4)
self.assertAlmostEqual(idf2.heatpumpwatertowaterequationfitheatings[0].heating_compressor_power_coefficient_5, var_heating_compressor_power_coefficient_5)
|
nilq/baby-python
|
python
|
#!/usr/bin/python
"""
Runs every day as crontab task to pull down previous day's log from
Google app engine, and uploads it to S3.
"""
import os
import sys
import shutil
import subprocess
import string
from pytz import timezone
from datetime import datetime, timedelta
settings = {
'appcfg' : '<path to gae sdk>/bin/appcfg.sh',
'email' : '<gae account>',
'pwd' : '<gae account password>',
'outdir' : '/tmp/sortbox',
'repo' : 'git@github.com:mustpax/sortbox.git',
'bucket' : '<S3 bucket for logs>',
'access_key' : '',
'secret_key' : '',
}
outdir = settings['outdir']
sortboxdir = os.path.join(outdir, 'sortbox')
logdir = os.path.join(outdir, 'logs')
pacific_tz = timezone('US/Pacific')
def cleanup():
"""
Deletes tmp dir.
"""
if os.path.exists(outdir):
print "Deleted %s" % outdir
shutil.rmtree(outdir)
def clone_repo():
"""
Clones the remote sortbox repository.
"""
cleanup()
subprocess.call("git clone %s %s" % (settings['repo'], sortboxdir), shell=True)
def build_war():
def touch(fname, times=None):
"""
Equivalent to unix touch command
"""
with file(fname, 'a'):
os.utime(fname, times)
os.chdir(sortboxdir)
# Switch to prod branch
subprocess.call("git checkout prod", shell=True)
# Create secret.conf
secret = os.path.join(sortboxdir, 'conf', 'secret.conf')
touch(secret)
print "Make all"
# Build all
subprocess.call("make all", shell=True)
war_path = os.path.join(outdir, "sortbox.war")
print "Build war file"
# Build war file
subprocess.call("play war -o %s" % war_path, shell=True)
if not os.path.exists(war_path):
print "Failed to create war file"
exit(2)
def export_log():
"""
Exports logs from the last 2 days from GAE
"""
os.chdir(outdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
target = os.path.join(logdir, "raw.txt")
# Export log for the last 2 day
subprocess.call("echo %s | %s --num_days=1 --email=%s --severity=1 request_logs sortbox.war %s" \
% (settings['pwd'], settings['appcfg'], settings['email'], target), shell=True)
logfile = os.path.join(logdir, 'raw.txt')
if not os.path.exists(logfile):
print "Failed to download log file"
exit(2)
print "Saved exported log as %s" % logfile
def format_date(date):
format = "%d/%b/%Y"
return date.strftime(format)
def preprocess_log():
os.chdir(logdir)
today = format_date(datetime.now(pacific_tz))
# Remove entries from the 1st day
subprocess.call("grep -va %s raw.txt > log.tmp.txt" % today, shell=True)
# Replace null byte delimiters with new line character
subprocess.call("tr '\\0' '\n' < log.tmp.txt > log.tmp2.txt", shell=True);
# Remove all lines that starts with ':'
subprocess.call("sed '/^:/d' log.tmp2.txt > log.txt", shell=True);
print "Saved preprocessed log as %s" % os.path.join(logdir, 'log.txt')
def upload_log():
"""
Uploads log file to S3.
"""
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from itertools import takewhile
yesterday = datetime.now(pacific_tz) - timedelta(1)
logfile = "log_%s.txt" % string.replace(format_date(yesterday), '/', '_')
conn = S3Connection(settings['access_key'], settings['secret_key'])
bucket = conn.create_bucket(settings['bucket'])
k = bucket.get_key(logfile)
if not k:
k = Key(bucket)
k.key = logfile
os.chdir(logdir)
k.set_contents_from_filename('log.txt')
bucket.set_acl('public-read', k)
print "Uploaded log file as %s to S3" % k.name
else:
print "Log file already uploaded."
def pull_log():
now = datetime.now()
print "Start log export: ", now
clone_repo()
build_war()
export_log()
preprocess_log()
upload_log()
cleanup()
def main():
import time
start = time.time()
pull_log()
duration = time.time() - start
print "Finished in %d second(s)." % duration
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# Copyright (c) 2017, 2018 Michael De La Rue
# Copyright (c) 2017, 2018 Will Thames
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: rds_instance_facts
version_added: "2.6"
short_description: obtain facts about one or more RDS instances
description:
- obtain facts about one or more RDS instances
options:
db_instance_identifier:
description:
- The RDS instance's unique identifier.
required: false
aliases:
- id
filters:
description:
- A filter that specifies one or more DB instances to describe.
See U(https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBInstances.html)
requirements:
- "python >= 2.7"
- "boto3"
author:
- "Will Thames (@willthames)"
- "Michael De La Rue (@mikedlr)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Get facts about an instance
- rds_instance_facts:
db_instance_identifier: new-database
register: new_database_facts
# Get all RDS instances
- rds_instance_facts:
'''
RETURN = '''
instances:
description: List of RDS instances
returned: always
type: complex
contains:
allocated_storage:
description: Gigabytes of storage allocated to the database
returned: always
type: int
sample: 10
auto_minor_version_upgrade:
description: Whether minor version upgrades happen automatically
returned: always
type: bool
sample: true
availability_zone:
description: Availability Zone in which the database resides
returned: always
type: str
sample: us-west-2b
backup_retention_period:
description: Days for which backups are retained
returned: always
type: int
sample: 7
ca_certificate_identifier:
description: ID for the CA certificate
returned: always
type: str
sample: rds-ca-2015
copy_tags_to_snapshot:
description: Whether DB tags should be copied to the snapshot
returned: always
type: bool
sample: false
db_instance_arn:
description: ARN of the database instance
returned: always
type: str
sample: arn:aws:rds:us-west-2:111111111111:db:helloworld-rds
db_instance_class:
description: Instance class of the database instance
returned: always
type: str
sample: db.t2.small
db_instance_identifier:
description: Database instance identifier
returned: always
type: str
sample: helloworld-rds
db_instance_port:
description: Port used by the database instance
returned: always
type: int
sample: 0
db_instance_status:
description: Status of the database instance
returned: always
type: str
sample: available
db_name:
description: Name of the database
returned: always
type: str
sample: management
db_parameter_groups:
description: List of database parameter groups
returned: always
type: complex
contains:
db_parameter_group_name:
description: Name of the database parameter group
returned: always
type: str
sample: psql-pg-helloworld
parameter_apply_status:
description: Whether the parameter group has been applied
returned: always
type: str
sample: in-sync
db_security_groups:
description: List of security groups used by the database instance
returned: always
type: list
sample: []
db_subnet_group:
description: list of subnet groups
returned: always
type: complex
contains:
db_subnet_group_description:
description: Description of the DB subnet group
returned: always
type: str
sample: My database subnet group
db_subnet_group_name:
description: Name of the database subnet group
returned: always
type: str
sample: my-subnet-group
subnet_group_status:
description: Subnet group status
returned: always
type: str
sample: Complete
subnets:
description: List of subnets in the subnet group
returned: always
type: complex
contains:
subnet_availability_zone:
description: Availability zone of the subnet
returned: always
type: complex
contains:
name:
description: Name of the availability zone
returned: always
type: str
sample: us-west-2c
subnet_identifier:
description: Subnet ID
returned: always
type: str
sample: subnet-abcd1234
subnet_status:
description: Subnet status
returned: always
type: str
sample: Active
vpc_id:
description: VPC id of the subnet group
returned: always
type: str
sample: vpc-abcd1234
dbi_resource_id:
description: AWS Region-unique, immutable identifier for the DB instance
returned: always
type: str
sample: db-AAAAAAAAAAAAAAAAAAAAAAAAAA
domain_memberships:
description: List of domain memberships
returned: always
type: list
sample: []
endpoint:
description: Database endpoint
returned: always
type: complex
contains:
address:
description: Database endpoint address
returned: always
type: str
sample: helloworld-rds.ctrqpe3so1sf.us-west-2.rds.amazonaws.com
hosted_zone_id:
description: Route53 hosted zone ID
returned: always
type: str
sample: Z1PABCD0000000
port:
description: Database endpoint port
returned: always
type: int
sample: 5432
engine:
description: Database engine
returned: always
type: str
sample: postgres
engine_version:
description: Database engine version
returned: always
type: str
sample: 9.5.10
iam_database_authentication_enabled:
description: Whether database authentication through IAM is enabled
returned: always
type: bool
sample: false
instance_create_time:
description: Date and time the instance was created
returned: always
type: str
sample: '2017-10-10T04:00:07.434000+00:00'
kms_key_id:
description: KMS Key ID
returned: always
type: str
sample: arn:aws:kms:us-west-2:111111111111:key/abcd1234-0000-abcd-1111-0123456789ab
latest_restorable_time:
description: Latest time to which a database can be restored with point-in-time restore
returned: always
type: str
sample: '2018-05-17T00:03:56+00:00'
license_model:
description: License model
returned: always
type: str
sample: postgresql-license
master_username:
description: Database master username
returned: always
type: str
sample: dbadmin
monitoring_interval:
description: Interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB instance
returned: always
type: int
sample: 0
multi_az:
description: Whether Multi-AZ is on
returned: always
type: bool
sample: false
option_group_memberships:
description: List of option groups
returned: always
type: complex
contains:
option_group_name:
description: Option group name
returned: always
type: str
sample: default:postgres-9-5
status:
description: Status of option group
returned: always
type: str
sample: in-sync
pending_modified_values:
description: Modified values pending application
returned: always
type: complex
contains: {}
performance_insights_enabled:
description: Whether performance insights are enabled
returned: always
type: bool
sample: false
preferred_backup_window:
description: Preferred backup window
returned: always
type: str
sample: 04:00-05:00
preferred_maintenance_window:
description: Preferred maintenance window
returned: always
type: str
sample: mon:05:00-mon:05:30
publicly_accessible:
description: Whether the DB is publicly accessible
returned: always
type: bool
sample: false
read_replica_db_instance_identifiers:
description: List of database instance read replicas
returned: always
type: list
sample: []
storage_encrypted:
description: Whether the storage is encrypted
returned: always
type: bool
sample: true
storage_type:
description: Storage type of the Database instance
returned: always
type: str
sample: gp2
tags:
description: Tags used by the database instance
returned: always
type: complex
contains: {}
vpc_security_groups:
description: List of VPC security groups
returned: always
type: complex
contains:
status:
description: Status of the VPC security group
returned: always
type: str
sample: active
vpc_security_group_id:
description: VPC Security Group ID
returned: always
type: str
sample: sg-abcd1234
'''
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, boto3_tag_list_to_ansible_dict, AWSRetry, camel_dict_to_snake_dict
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
def instance_facts(module, conn):
instance_name = module.params.get('db_instance_identifier')
filters = module.params.get('filters')
params = dict()
if instance_name:
params['DBInstanceIdentifier'] = instance_name
if filters:
params['Filters'] = ansible_dict_to_boto3_filter_list(filters)
paginator = conn.get_paginator('describe_db_instances')
try:
results = paginator.paginate(**params).build_full_result()['DBInstances']
except is_boto3_error_code('DBInstanceNotFound'):
results = []
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, "Couldn't get instance information")
for instance in results:
try:
instance['Tags'] = boto3_tag_list_to_ansible_dict(conn.list_tags_for_resource(ResourceName=instance['DBInstanceArn'],
aws_retry=True)['TagList'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, "Couldn't get tags for instance %s" % instance['DBInstanceIdentifier'])
return dict(changed=False, instances=[camel_dict_to_snake_dict(instance, ignore_list=['Tags']) for instance in results])
def main():
argument_spec = dict(
db_instance_identifier=dict(aliases=['id']),
filters=dict(type='dict')
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
conn = module.client('rds', retry_decorator=AWSRetry.jittered_backoff(retries=10))
module.exit_json(**instance_facts(module, conn))
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
# window.py
#
# Copyright 2020 Herpiko Dwi Aguno
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import time
from gi.repository import Gtk
from gi.repository import Gio
from gi.repository import GLib
import threading
@Gtk.Template(resource_path='/org/blankon/blankonWelcome/window.ui')
class BlankonWelcomeWindow(Gtk.ApplicationWindow):
__gtype_name__ = 'BlankonWelcomeWindow'
SkipBackButton = Gtk.Template.Child()
NextButton = Gtk.Template.Child()
Stacks = Gtk.Template.Child()
SpinnerBox = Gtk.Template.Child()
WelcomeBox = Gtk.Template.Child()
MainBox = Gtk.Template.Child()
SeeingBox = Gtk.Template.Child()
HearingBox = Gtk.Template.Child()
TypingBox = Gtk.Template.Child()
PointingBox = Gtk.Template.Child()
SeeingButton = Gtk.Template.Child()
HearingButton = Gtk.Template.Child()
TypingButton = Gtk.Template.Child()
PointingButton = Gtk.Template.Child()
SeeingMagnifierSwitch = Gtk.Template.Child()
SeeingLargeTextSwitch = Gtk.Template.Child()
SeeingHighContrastSwitch = Gtk.Template.Child()
currentView = "welcome"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.Stacks.set_visible_child(self.WelcomeBox)
self.NextButton.connect("clicked", self.a11y)
self.SkipBackButton.connect("clicked", self.do_skip_back)
self.SeeingButton.connect("clicked", self.show_seeing_box)
self.HearingButton.connect("clicked", self.show_hearing_box)
self.TypingButton.connect("clicked", self.show_typing_box)
self.PointingButton.connect("clicked", self.show_pointing_box)
self.SeeingMagnifierSwitch.connect("state-set", self.toggle_magnifier)
self.SeeingLargeTextSwitch.connect("state-set", self.toggle_large_text)
self.SeeingHighContrastSwitch.connect("state-set", self.toggle_high_contrast)
# Set default values
setting = Gio.Settings.new("org.gnome.desktop.interface")
current_value = setting.get_value("gtk-theme")
print(current_value)
if ("Contrast" in current_value.get_string()):
self.SeeingHighContrastSwitch.set_active(True)
def do_skip_back(self, button):
if self.currentView == "welcome" or self.currentView == "a11y":
self.NextButton.hide()
self.SkipBackButton.hide()
self.Stacks.set_visible_child(self.SpinnerBox)
# Use threading to avoid blocking UI
thread = threading.Thread(target=self.send_analytic)
thread.daemon = True
thread.start()
else:
self.Stacks.set_visible_child(self.MainBox)
self.SkipBackButton.set_label("Finish")
self.currentView = "a11y"
def a11y(self, button):
self.NextButton.hide()
self.Stacks.set_visible_child(self.MainBox)
self.SkipBackButton.set_label("Finish")
self.currentView = "a11y"
def show_seeing_box(self, button):
self.SkipBackButton.show()
self.currentView = "seeing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.SeeingBox)
def show_hearing_box(self, button):
self.SkipBackButton.show()
self.currentView = "hearing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.HearingBox)
def show_typing_box(self, button):
self.SkipBackButton.show()
self.currentView = "typing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.TypingBox)
def show_pointing_box(self, button):
self.SkipBackButton.show()
self.currentView = "pointing"
self.SkipBackButton.set_label("Back")
self.Stacks.set_visible_child(self.PointingBox)
def toggle_magnifier(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.a11y.applications")
bool_value = GLib.Variant("b", state)
setting.set_value("screen-magnifier-enabled", bool_value)
def toggle_high_contrast(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.interface")
default_value = setting.get_default_value("gtk-theme")
current_value = setting.get_value("gtk-theme")
high_contrast_value = GLib.Variant("s", "HighContrast")
print(state)
print(default_value)
print(current_value)
if (state):
print(high_contrast_value)
setting.set_value("gtk-theme", high_contrast_value)
else:
setting.set_value("gtk-theme", default_value)
def toggle_large_text(self, switch, state):
setting = Gio.Settings.new("org.gnome.desktop.interface")
scale_value = GLib.Variant("d", 1.0)
if (state):
scale_value = GLib.Variant("d", 1.5)
setting.set_value("text-scaling-factor", scale_value)
def send_analytic(self):
print("Send analytic data...")
time.sleep(1)
print("Data sent")
self.close()
|
nilq/baby-python
|
python
|
'''
Module for performing Stable Matching
it solves an instance of the stable marriage problem.
This is used as a utility for Text-Media Matching
'''
class StableMatcher:
''' Class to implement Stable matching
This Class implements the stable matching
using the gale shapley algorithm.
'''
def __init__(
self,
media_preference_for_sentence,
sentence_preference_for_media,
set_size):
self.set_size = set_size
self.media_preference_for_sentence = media_preference_for_sentence
self.sentence_preference_for_media = sentence_preference_for_media
def get_matching(self):
''' returns the matching as a list of 2-tuples'''
return self.__gale_shapley_matching()
def __media_rank_in_sentence_preference(self, sentence_index, media_index):
return self.media_preference_for_sentence[sentence_index].index(
media_index)
def __sentence_has_better_preference(
self, sentence_index, unmatched_index):
return self.__media_rank_in_sentence_preference(
sentence_index, unmatched_index) < \
self.__media_rank_in_sentence_preference(
sentence_index,
self.media_matched_for_sentence[sentence_index])
def __gale_shapley_matching(self):
'''
Finds the stable matching between the text and media
Given two (n,n) matrices of preferences for the set of Sentences, Media
finds the stable matching by running the gale-shapley
matching algorithm
Returns : a list of tuples where each tuple (x,y) means
x = index of sentence
y = index of media
Thus, it returns the indices matched as a list of tuples
'''
# Make the matching optimal for the Media
# -1 denotes it is currently unmatched
self.sentence_matched_for_media = [-1] * self.set_size
self.media_matched_for_sentence = [-1] * self.set_size
self.count_of_unmatched_media = self.set_size
while self.count_of_unmatched_media > 0:
unmatched_media_index = -1 # no index found currently
for i in range(self.set_size):
if self.sentence_matched_for_media[i] == -1:
unmatched_media_index = i
break
for i in self.sentence_preference_for_media[unmatched_media_index]:
# the sentence is unmatched
if self.media_matched_for_sentence[i] == -1:
# we can match the sentence directly
self.media_matched_for_sentence[i] = unmatched_media_index
self.sentence_matched_for_media[unmatched_media_index] = i
self.count_of_unmatched_media -= 1
break
if self.__sentence_has_better_preference(
i,
unmatched_media_index):
# i prefers the current media better
# unmatch media currently matched for sentence i
self.sentence_matched_for_media[
self.media_matched_for_sentence[i]] = -1
self.sentence_matched_for_media[unmatched_media_index] = i
self.media_matched_for_sentence[i] = unmatched_media_index
break
matchings = [(self.sentence_matched_for_media[i], i)
for i in range(self.set_size)]
return matchings
|
nilq/baby-python
|
python
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
csv_files = os.listdir(os.getcwd())
csv_files = [f for f in csv_files if "Line" in f and ".csv" in f]
# Function to determine significance
def isSignificant(xval,yval, xthr = 1, ythr = 2):
if abs(xval) >= xthr and abs(yval) >= ythr:
return True
else:
return False
# Read Entrez -> Name map
entrezToName = pd.read_csv("EntrezToNameMap.csv", header=0)
for csv_file in csv_files:
print("Processing file {}".format(csv_file))
df = pd.read_csv(csv_file, header=0)
df = df.rename(columns={"Unnamed: 0":"gename"})
x = df['log2FoldChange'].values
y = df['padj'].values + 1e-5
y = -np.log10(y)
significant_idx = [i for i in range(len(x)) if isSignificant(x[i],y[i])]
nonsignificant_idx = [i for i in range(len(x)) if not isSignificant(x[i],y[i])]
# Plot Volcano Plot
plt.figure(figsize=(8,8))
plt.scatter(x[significant_idx], y[significant_idx], c='red', alpha=0.35, label='Significant')
plt.scatter(x[nonsignificant_idx], y[nonsignificant_idx], c='blue', alpha=0.35, label='Nonsignificant')
plt.vlines(-1, 0, 5, linestyles='dashed')
plt.vlines(1, 0, 5, linestyles='dashed')
plt.hlines(2, min(x), max(x), linestyles='dashed')
plt.xlabel('Log2 Fold Change')
plt.ylabel('-log10 (adjusted p-value)')
plt.legend()
plt.savefig(csv_file.replace(".csv","_volcanoPlot.pdf"))
# Save names of significant differentially expressed genes
tmp_df = df.iloc[significant_idx,:].reset_index(drop=True)
final_df = pd.merge(entrezToName, tmp_df, on="gename")
final_df['keggGeneName'] = ["cge:" + str(id) for id in list(final_df['geneid'])] # Required for pathway analysis with ROntoTools
final_df.to_csv(csv_file.replace(".csv","_SignificantGenes.csv"), index=False)
|
nilq/baby-python
|
python
|
x, y = map(int, input().split(" "))
if x == 0 and y == 0:
print("origem")
elif x > 0 and y > 0:
print("1 quadrante")
elif x < 0 and y > 0:
print("2 quadrante")
elif x < 0 and y < 0:
print("3 quadrante")
elif x > 0 and y < 0:
print("4 quadrante")
elif x == 0 and y != 0:
print("Eixo y")
elif x != 0 and y == 0:
print("Eixo x")
else:
print("ta errado")
|
nilq/baby-python
|
python
|
from django.conf.urls import url
from web.views import get_index, fetch
urlpatterns = [
url(r'^$', get_index),
url(r'^fetch/$', fetch),
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
def part1(numbers, cards):
for number in numbers:
for card in cards:
mark(card, number)
if has_won(card):
return score(number, card)
def part2(numbers, cards):
for number in numbers:
iter_cards = cards.copy()
for card in iter_cards:
mark(card, number)
if has_won(card) and len(cards) > 1:
cards.remove(card)
elif has_won(card) and len(cards) == 1:
return score(number, card)
def score(number, card):
result = 0
for line in card:
for cell in line:
if cell != 'X':
result += cell
return result * number
def mark(card, value):
for y, line in enumerate(card):
for x, number in enumerate(line):
if number == value:
card[y][x] = 'X'
def has_won(card):
for line in card:
if all([cell == 'X' for cell in line]):
return True
for col in range(0, len(card[0])):
if all([line[col] == 'X' for line in card]):
return True
return False
def parse():
with open("../input/input04.txt") as f:
numbers = [int(word) for word in f.readline().split(',')]
cards = []
for card in f.read().split("\n\n"):
new_card = [line.split() for line in card.strip().split('\n')]
for y, line in enumerate(new_card):
for x, cell in enumerate(line):
new_card[y][x] = int(cell)
cards.append(new_card)
return (numbers, cards)
if __name__ == '__main__':
(numbers, cards) = parse()
print("part1 =", part1(numbers, cards))
print("part2 =", part2(numbers, cards))
|
nilq/baby-python
|
python
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from consts.district_type import DistrictType
from consts.event_type import EventType
from datetime import datetime
from database.district_query import DistrictsInYearQuery
from database.event_query import DistrictEventsQuery
from google.appengine.ext import ndb
from database.team_query import DistrictTeamsQuery
from helpers.district_helper import DistrictHelper
from helpers.event_helper import EventHelper
from helpers.model_to_dict import ModelToDict
from models import team
from models.district import District
from models.district_team import DistrictTeam
from models.event import Event
from models.event_team import EventTeam
from models.team import Team
class ApiDistrictControllerBase(ApiBaseController):
def _set_district(self, district, year):
self.district_abbrev = district
self.year = year
@property
def _validators(self):
return [("district_id_validator", "{}{}".format(self.year, self.district_abbrev))]
class ApiDistrictListController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_list_controller_{}" # year
CACHE_VERSION = 3
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictListController, self).__init__(*args, **kw)
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.year)
@property
def _validators(self):
'''
No validators for this endpoint
'''
return []
def _track_call(self, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/list', year)
def _render(self, year=None):
all_districts = DistrictsInYearQuery(self.year).fetch()
districts = list()
for district in all_districts:
dictionary = dict()
dictionary["key"] = district.abbreviation
dictionary["name"] = district.display_name
districts.append(dictionary)
return json.dumps(districts, ensure_ascii=True)
class ApiDistrictEventsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_events_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictEventsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/events', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
events = DistrictEventsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
events = [ModelToDict.eventConverter(event) for event in events]
return json.dumps(events, ensure_ascii=True)
class ApiDistrictRankingsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_rankings_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 61
def __init__(self, *args, **kw):
super(ApiDistrictRankingsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/rankings', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
if self.year < 2009:
return json.dumps([], ensure_ascii=True)
events_future = DistrictEventsQuery(District.renderKeyName(self.year, district_abbrev)).fetch_async()
district_teams_future = DistrictTeamsQuery("{}{}".format(year, district_abbrev)).fetch_async()
events = events_future.get_result()
if not events:
return json.dumps([], ensure_ascii=True)
EventHelper.sort_events(events)
team_totals = DistrictHelper.calculate_rankings(events, district_teams_future.get_result(), self.year)
rankings = []
current_rank = 1
for key, points in team_totals:
point_detail = {}
point_detail["rank"] = current_rank
point_detail["team_key"] = key
point_detail["event_points"] = {}
for event in points["event_points"]:
event_key = event[0].key_name
point_detail["event_points"][event_key] = event[1]
event_details = Event.get_by_id(event_key)
point_detail["event_points"][event[0].key_name]['district_cmp'] = True if event_details.event_type_enum == EventType.DISTRICT_CMP else False
if "rookie_bonus" in points:
point_detail["rookie_bonus"] = points["rookie_bonus"]
else:
point_detail["rookie_bonus"] = 0
point_detail["point_total"] = points["point_total"]
rankings.append(point_detail)
current_rank += 1
return json.dumps(rankings)
class ApiDistrictTeamsController(ApiDistrictControllerBase):
CACHE_KEY_FORMAT = "apiv2_district_teams_controller_{}_{}" # (district_short, year)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60 * 24
def __init__(self, *args, **kw):
super(ApiDistrictTeamsController, self).__init__(*args, **kw)
self.district_abbrev = self.request.route_kwargs["district_abbrev"]
self.year = int(self.request.route_kwargs["year"] or datetime.now().year)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.district_abbrev, self.year)
def _track_call(self, district_abbrev, year=None):
if year is None:
year = datetime.now().year
self._track_call_defer('district/teams', '{}{}'.format(year, district_abbrev))
def _render(self, district_abbrev, year=None):
self._set_district(district_abbrev, self.year)
district_teams = DistrictTeamsQuery('{}{}'.format(self.year, self.district_abbrev)).fetch()
district_teams_dict = [ModelToDict.teamConverter(team) for team in district_teams]
return json.dumps(district_teams_dict, ensure_ascii=True)
|
nilq/baby-python
|
python
|
# coding=utf-8
from sys import exit
from pytun import *
from scapy.all import *
from MANGLE import *
from FenrirFangs import *
from Autoconf import *
import socket
import select
import time
from struct import *
from binascii import hexlify,unhexlify
class FENRIR:
def __init__(self):
if os.geteuid() != 0:
exit("You need root privileges to play with sockets !")
self.isRunning = False
self.tap = None
self.s = None
self.MANGLE = None
self.hostip = '10.0.0.5'
self.hostmac = '\x5c\x26\x0a\x13\x77\x8a'
#self.hostmac = '\x00\x1d\xe6\xd8\x6f\x02'
self.hostmacStr = '5c:26:0a:13:77:8a'
#self.hostmacStr = "00:1d:e6:d8:6f:02"
self.verbosity = 3
self.scksnd1 = None
self.scksnd2 = None
self.Autoconf = Autoconf()
self.FenrirFangs = FenrirFangs(self.verbosity) #FenrirFangs instance
self.pktsCount = 0
self.LhostIface = 'em1'
self.switchIface = 'eth0'
def createTap(self):
self.tap = TunTapDevice(flags=IFF_TAP|IFF_NO_PI, name='FENRIR')
self.tap.addr = "10.0.0.42"
self.tap.netmask = '255.0.0.0'
self.tap.mtu = 1500
self.tap.hwaddr = '\x00\x11\x22\x33\x44\x55'
self.hwaddrStr = "00:11:22:33:44:55"
self.tap.persist(True)
self.tap.up()
def downTap(self):
if self.tap != None:
self.tap.down()
def bindAllIface(self):
self.s = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.ntohs(0x0003))
def setAttribute(self, attributeName, attributeValue):
if attributeName == "host_ip":
self.hostip = attributeValue
elif attributeName == "host_mac":
self.hostmac = attributeValue
tempStr = hexlify(attributeValue).decode('ascii')
self.hostmacStr = tempStr[:2] + ":" + tempStr[2:4] + ":" + tempStr[4:6] + ":" + tempStr[6:8] + ":" + tempStr[8:10] + ":" + tempStr[-2:]
elif attributeName == "verbosity":
if attributeValue >= 0 and attributeValue <= 3:
self.verbosity = attributeValue
self.FenrirFangs.changeVerbosity(self.verbosity)
else:
return False
elif attributeName == "netIface":
self.switchIface = str(attributeValue)
self.Autoconf.sockNetwork = self.switchIface
elif attributeName == "hostIface":
self.LhostIface = str(attributeValue)
self.Autoconf.ifaceHost = self.LhostIface
else:
return False
def chooseIface(self,pkt) :
if pkt[Ether].dst == self.hwaddrStr :
return 'FENRIR'
elif pkt[Ether].dst == self.hostmacStr or ((pkt[Ether].dst == 'ff:ff:ff:ff:ff:ff' or pkt[Ether].dst == '01:80:c2:00:00:03') and pkt[Ether].src != self.hostmacStr) :
#elif pkt[Ether].dst == 'f8:ca:b8:31:c0:2c' or ((pkt[Ether].dst == 'ff:ff:ff:ff:ff:ff' or pkt[Ether].dst == '01:80:c2:00:00:03') and pkt[Ether].src != 'f8:ca:b8:31:c0:2c') :
return self.LhostIface
else :
return self.switchIface
def sendeth2(self, raw, interface):
self.scksnd1 = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)
self.scksnd2 = socket.socket(socket.AF_PACKET, socket.SOCK_RAW)
self.scksnd1.bind((self.LhostIface, 0))
self.scksnd2.bind((self.switchIface, 0))
if interface == self.LhostIface:
# This is a dirty hotfix for the fragmentation problem; will be fixed later
try:
self.scksnd1.send(raw)
except:
pass
else :
try:
self.scksnd2.send(raw)
except:
pass
return
def initAutoconf(self):
self.hostip, self.hostmacStr = self.Autoconf.startAutoconf()
def initMANGLE(self, stop_event):
self.bindAllIface()
inputs = [self.s, self.tap]
last_mangled_request = []
mycount = 1 ## DECOMISSIONNED
self.MANGLE = MANGLE(self.hostip, self.tap.addr, self.hostmacStr, self.hwaddrStr, self.verbosity) # MANGLE instance init # ip host, ip rogue, mac host, mac rogue
while(not stop_event.is_set()):
try:
inputready,outputready,exceptready = select.select(inputs, [], [])
except select.error, e:
break
except socket.error, e:
break
for socketReady in inputready :
roundstart_time = time.time()
###ย FROM NETWORK ###
if socketReady == self.s :
packet = self.s.recvfrom(1600)
raw_pkt = packet[0]
if raw_pkt not in last_mangled_request: # pour รฉviter le sniff de paquets dรฉjร traitรฉs (to avoid sniffing packets that have already been processed)
self.pktsCount += 1
pkt = Ether(packet[0])
if self.FenrirFangs.checkRules(pkt) == True:
if 'IP' in pkt and pkt[IP].dst != '224.0.0.252' and pkt[IP].dst != '10.0.0.255':
self.MANGLE.pktRewriter(pkt, pkt[IP].src, self.MANGLE.rogue, pkt[Ether].src, self.MANGLE.mrogue)
last_mangled_request.append(str(pkt))
#print("PKT in rules")
self.tap.write(str(pkt))
break
elif 'ARP' in pkt and (pkt[Ether].src == self.tap.hwaddr or pkt[ARP].pdst == self.hostip or pkt[ARP].psrc == self.hostip) :
epkt = pkt
elif 'IP' in pkt and (pkt[Ether].src == self.tap.hwaddr or pkt[IP].dst == self.hostip or pkt[IP].src == self.hostip or pkt[IP].dst == '224.0.0.252') :
epkt = pkt
elif 'EAPOL' in pkt :
epkt = pkt
elif 'BOOTP' in pkt :
epkt = pkt
else:
break
##### NBT-NS
if not mycount and 'IP' in epkt and (epkt[IP].dst == '10.0.0.255' and epkt[IP].dport == 137) :
print "---------- UDP Packet NBT-NS"
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
##### LLMNR
elif not mycount and 'IP' in epkt and (epkt[IP].dst == '224.0.0.252' and epkt[IP].dport == 5355) :
print "---------- UDP Packet LLMNR"
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
##### fin LLMNR / NBNS
elif not mycount and 'IP' in epkt and epkt[IP].dport == 445 :
print "IN MY FUCKIN IF-2"
MANGLE.pktRewriter(epkt, epkt[IP].src, MANGLE.rogue, epkt[Ether].src, MANGLE.mrogue)
last_mangled_request.append(str(epkt))
tap.write(str(epkt))
else :
mangled_request = self.MANGLE.Fenrir_Address_Translation(epkt)
ifaceToBeUsed = self.chooseIface(mangled_request)
if ifaceToBeUsed == 'FENRIR' :
self.tap.write(str(mangled_request))
else :
#mangled_request.show2()
last_mangled_request.append(str(mangled_request))
self.sendeth2(str(mangled_request), ifaceToBeUsed)
else :
last_mangled_request.remove(raw_pkt)
###ย FROM FENRIR ###
elif socketReady == self.tap :
self.pktsCount += 1
buf = self.tap.read(self.tap.mtu) # test paquet depuis Rogue
epkt = Ether(buf) # idem que au dessus
if epkt not in last_mangled_request:
mangled_request = self.MANGLE.Fenrir_Address_Translation(epkt)
ifaceToBeUsed = self.chooseIface(mangled_request)
########### debut LLMNR
#print str(mangled_request.summary()) + " ----------- IN tap socket loop (after MANGLE)"
if 'LLMNRQuery' in mangled_request :
print("IN")
mangled_request[LLMNRQuery].an.rdata = '10.0.0.5'
del mangled_request[IP].chksum
if 'UDP' in mangled_request:
del mangled_request[UDP].chksum
mangled_request = mangled_request.__class__(str(mangled_request))
#ls(mangled_request)
########### fin LLMNR
#print(ifaceToBeUsed)
if ifaceToBeUsed == 'FENRIR':
self.tap.write(str(mangled_request))
last_mangled_request.append(mangled_request)
else :
#mangled_request.show2()
###
if 'IP' in mangled_request and 1 == 2:
print("before frag")
frags=fragment(mangled_request, fragsize=500)
print("after frags")
for frag in frags:
frag = frag.__class__(str(frag))
last_mangled_request.append(str(frag))
self.sendeth2(str(frag), ifaceToBeUsed)
#send(frag, iface=ifaceToBeUsed)
else:
if 'IP' in mangled_request:
del mangled_request[IP].len
#mangled_request = mangled_request.__class__(str(mangled_request))
#if 'TCP' in mangled_request:
# new_mangled_request = self.MANGLE.changeSessID(mangled_request)
# mangled_request = new_mangled_request
last_mangled_request.append(str(mangled_request))
#if 'TCP' in mangled_request:
# #print("[[[")
# print(str(mangled_request[TCP].seq) + " : " + str(mangled_request[IP].len))
# print("]]]")
self.sendeth2(str(mangled_request), ifaceToBeUsed)
###
# last_mangled_request.append(str(mangled_request))
# self.sendeth2(str(mangled_request), ifaceToBeUsed)
else:
self.tap.write(str(epkt))
last_mangled_request.remove(epkt)
else :
exit('WTH')
|
nilq/baby-python
|
python
|
from click.testing import CliRunner
import unittest
from mock import patch, Mock, PropertyMock
from floyd.cli.version import upgrade
class TestFloydVersion(unittest.TestCase):
"""
Tests cli utils helper functions
"""
def setUp(self):
self.runner = CliRunner()
@patch('floyd.cli.version.pip_upgrade')
@patch('floyd.cli.version.conda_upgrade')
@patch('floyd.cli.utils.sys')
def test_floyd_upgrade_with_standard_python(self, mock_sys, conda_upgrade, pip_upgrade):
mock_sys.version = '2.7.13 (default, Jan 19 2017, 14:48:08) \n[GCC 6.3.0 20170118]'
self.runner.invoke(upgrade)
conda_upgrade.assert_not_called()
pip_upgrade.assert_called_once()
@patch('floyd.cli.version.pip_upgrade')
@patch('floyd.cli.version.conda_upgrade')
@patch('floyd.cli.utils.sys')
def test_floyd_upgrade_with_anaconda_python(self, mock_sys, conda_upgrade, pip_upgrade):
mock_sys.version = '3.6.3 |Anaconda, Inc.| (default, Oct 13 2017, 12:02:49) \n[GCC 7.2.0]'
self.runner.invoke(upgrade)
pip_upgrade.assert_not_called()
conda_upgrade.assert_called_once()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
EnigmaLight Plugin by Speedy1985, 2014
https://github.com/speedy1985
Parts of the code is from DonDavici (c) 2012 and other plugins:
all credits to the coders :-)
EnigmaLight is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
EnigmaLight is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
"""
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT, RT_VALIGN_CENTER
from threading import Thread, Timer
from Screens.Standby import TryQuitMainloop
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, getConfigListEntry
from Components.Label import Label
from Components.Pixmap import Pixmap
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.HelpMenu import HelpableScreen
from EL_Check import EL_Screen_Check
from __common__ import EnigmaLight_log as log, showMessage, validIP, testDaemonConnectivity, setSymbolic
from __init__ import getCrashFilePath, _ # _ is translation
from EL_PathSelector import EL_Screen_PathSelector
from threading import currentThread
from EL_ThreadHelper import callOnMainThread
import os
#===============================================================================
#
#===============================================================================
class EL_Screen_Settings(Screen, ConfigListScreen, HelpableScreen):
_hasChanged = False
_session = None
skins = None
def __init__(self, session):
log("",self,"Settings Opened succesfull..")
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.cfglist = []
ConfigListScreen.__init__(self, self.cfglist, session, on_change = self._changed)
self._session = session
self._hasChanged = False
self._hasNetworkChanged = False
self._binTypeChanged = False
self._restartBinary = False
self.controller = None
self.selected = None
self["txt_green"] = Label()
self["btn_green"] = Pixmap()
self["statusbar"] = Pixmap()
self["txt_statusbar"] = Label()
self["txt_statusbar_info"] = Label()
self["help"] = StaticText()
self["setupActions"] = ActionMap(["SetupActions", "ColorActions", "EL_Settings"],
{
"green": self.keySave,
"red": self.keyCancel,
"cancel": self.keyCancel,
"ok": self.ok,
"left": self.keyLeft,
"right": self.keyRight,
"bouquet_up": self.keyBouquetUp,
"bouquet_down": self.keyBouquetDown,
}, -2)
self["txt_green"].setText(_("Save"))
self.arm_box = False
arch = os.popen("uname -m").read()
if 'armv7l' in arch:
self.arm_box = True
self.createSetup()
log("",self,"Finisch layout...")
self["config"].onSelectionChanged.append(self.updateHelp)
self.onLayoutFinish.append(self.finishLayout)
#===========================================================================
#
#===========================================================================
def finishLayout(self):
log("",self,"Layout finisched..")
self.setTitle(_("Settings"))
if not config.plugins.enigmalight.showstatusbar.getValue():
self["statusbar"].hide()
self["txt_statusbar"].hide()
self["txt_statusbar_info"].hide()
else:
self["statusbar"].show()
self["txt_statusbar"].show()
self["txt_statusbar_info"].show()
#===========================================================================
#
#===========================================================================
def setController(self, controller):
self.controller = controller
self.controller.setSession(self.session)
#==========================================================================
# Functions for use from others thread
#==========================================================================
def handleFromThread(self,func,*args):
if args:
callOnMainThread(func,args[0])
else:
callOnMainThread(func)
def printWithThread(self,res):
print "%s :: {%s}" %(res, currentThread().getName())
def setStatusBarInfo(self,text):
#self.printWithThread("setStatusBarInfo())")
self["txt_statusbar_info"].setText(text)
def setStatusBarTxt(self,text):
#self.printWithThread("setStatusBarTxt()")
self["txt_statusbar"].setText(text)
def showStatusBar(self,value):
if value:
self["statusbar"].hide()
self["txt_statusbar_info"].hide()
self["txt_statusbar"].hide()
else:
self["statusbar"].show()
self["txt_statusbar_info"].show()
self["txt_statusbar"].show()
#===========================================================================
#
#===========================================================================
def createSetup(self):
log("",self)
self.cfglist = []
# GENERAL SETTINGS
self.cfglist.append(getConfigListEntry(_("[ General Settings ]"), config.plugins.enigmalight.about, _(" ")))
if self.arm_box:
self.cfglist.append(getConfigListEntry(_('- Type of EnigmaLight binary:'),config.plugins.enigmalight.bintype_arm, _(" ")))
#self.cfglist.append(getConfigListEntry(_('- Type of EnigmaLight binary:'),config.plugins.enigmalight.bintype, _("Here you can select the type of enigmalight, the most receivers can use the fpu version but some receivers can't. For then use the normal version")))
self.configfilepath = getConfigListEntry(_("- Configuration File"), config.plugins.enigmalight.configfilepath, _("Select your configfile, default /etc/enigmalight.conf will be used "))
self.cfglist.append(self.configfilepath)
self.cfglist.append(getConfigListEntry(_('- Run EnigmaLight as server when lights are off:'),config.plugins.enigmalight.server, _("Run EnigmaLight as Server for Boblight or other clients ")))
#self.cfglist.append(getConfigListEntry(_('- Check for update, press OK\r'),config.plugins.enigmalight.clickOK, _("Press OK to check for update.. "))),
self.cfglist.append(getConfigListEntry(_('- Show message when turn on/off lights:'),config.plugins.enigmalight.message_onoff, _("Show a messagebox when you turn on/off the lights ")))
self.cfglist.append(getConfigListEntry(_('- Enable lights on boot:'),config.plugins.enigmalight.autostart, _("Automatic turn on lights on boot ")))
self.cfglist.append(getConfigListEntry(_('- Cluster Leds:'),config.plugins.enigmalight.cluster, _("Cluster [X] Leds as one led.\nDefault each led had is own color, with this option you can bundle/cluster this to 2 -> 10 leds.")))
self.cfglist.append(getConfigListEntry(_('- Delay:'), config.plugins.enigmalight.delay, _(" Some tv's are slower then the lights. With this option you can make the output 1 -> 20 frames later.")))
self.cfglist.append(getConfigListEntry(_('- Interval:'), config.plugins.enigmalight.interval, _("How fast Enigmalight wil run.\n0.01 = 15 -> 40fps | 0.10 = 10fps | 0.20 = 5fps: ")))
self.cfglist.append(getConfigListEntry(_('- 3D Mode:'), config.plugins.enigmalight.m_3dmode, _("Turn on/off 3D Mode, SBS or TAB")))
self.cfglist.append(getConfigListEntry(_('- Default lightmode:'),config.plugins.enigmalight.mode, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Standby Mode:'),config.plugins.enigmalight.standbymode, _("Turn off lights or use moodlamp in standby ")))
self.cfglist.append(getConfigListEntry(_('- Color order:'), config.plugins.enigmalight.color_order, _(" Set the order as given in enigmalight.conf.")))
self.cfglist.append(getConfigListEntry(_("[ Blackbars ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Remove Blackbars top and bottom:'),config.plugins.enigmalight.blackbar_h, _("Remove horizontal blackbars from lights.")))
self.cfglist.append(getConfigListEntry(_('- Remove Blackbars left and right:'),config.plugins.enigmalight.blackbar_v, _("Remove vertical blackbars from lights.")))
self.cfglist.append(getConfigListEntry(_('- Delay before remove:'), config.plugins.enigmalight.blackbar_f, _("Count from 0 to given number\nif the blackbars are still there then remove them.\nif enigmalight runs on 10fps and you will wait 10sec before remove, then set it to 100")))
#getConfigListEntry(_('Switch on/off lights when TV turns on/off:'), config.plugins.enigmalight.hdmicec_enabled),
#Network
self.cfglist.append(getConfigListEntry(_("[ Network Settings ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Enable network mode (connect with other daemon):'), config.plugins.enigmalight.network_onoff, _("Use enigmalight as client and connect with other daemon over network (not for local use)")))
if config.plugins.enigmalight.network_onoff.value is True:
self.cfglist.append(getConfigListEntry(_('- Host ipaddress:'), config.plugins.enigmalight.address, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Daemon port:'), config.plugins.enigmalight.port, _(" ")))
#Timer
self.cfglist.append(getConfigListEntry(_("[ Timer Settings ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_('- Use Timer:'), config.plugins.enigmalight.timer_onoff, _("Turn on/off lights @ given time ")))
if config.plugins.enigmalight.timer_onoff.value is True:
self.cfglist.append(getConfigListEntry(_('- Don\'t turn lights off/on in standby:'), config.plugins.enigmalight.timer_standby_onoff, _("Disable timer function in standbymode ")))
self.cfglist.append(getConfigListEntry(_("- Enable lights:"), config.plugins.enigmalight.time_start, _("Time when lights go on ")))
self.cfglist.append(getConfigListEntry(_("- Disable lights:"), config.plugins.enigmalight.time_end, _("Time when lights go off ")))
#server
self.cfglist.append(getConfigListEntry(_("[ Remote ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_("- Use remoteserver:"), config.plugins.enigmalight.remote_server, _("Control EnigmaLight from browser")))
if config.plugins.enigmalight.remote_server.value:
self.cfglist.append(getConfigListEntry(_("- Remoteserver Port:"), config.plugins.enigmalight.remote_port, _("Show status at bottomscreen fps, cpu usage and currentmode")))
#Debug
self.cfglist.append(getConfigListEntry(_("[ Misc ]"), config.plugins.enigmalight.about, _(" ")))
self.cfglist.append(getConfigListEntry(_("- Show statusbar on bottom of screen:"), config.plugins.enigmalight.showstatusbar, _("Show status at bottomscreen fps, currentmode and other info")))
if config.plugins.enigmalight.showstatusbar.getValue():
self.cfglist.append(getConfigListEntry(_("- Remove statusbar from tuningscreen:"), config.plugins.enigmalight.showstatusbar_tuning, _("Remove the statusbar from colortuning screen")))
self.cfglist.append(getConfigListEntry(_("- Show errormessages:"), config.plugins.enigmalight.message_error_onoff, _("Turn on if you want to see error information")))
self.cfglist.append(getConfigListEntry(_("- Debug-Logging > /tmp/enigmalight_gui.log:"), config.plugins.enigmalight.EnableEventLog, ""))
# self.cfglist.append(getConfigListEntry(_("- Log folder path:"), config.plugins.enigmalight.logfolderpath, _("Default log wil be saved at /tmp/enigmalight_gui.log")))
# self.cfglist.append(self.logfolderpath)
self["config"].list = self.cfglist
#self["config"].l.setList(self.cfglist)
#===========================================================================
#
#===========================================================================
def _changed(self):
self._hasChanged = True
self.controller.changeValue(self["config"].getCurrent()[1])
if self["config"].getCurrent()[1] == config.plugins.enigmalight.address or self["config"].getCurrent()[1] == config.plugins.enigmalight.port or self["config"].getCurrent()[1] == config.plugins.enigmalight.network_onoff:
self._hasNetworkChanged = True
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.EnableEventLog:
self._hasNetworkChanged = False
self.saveAll()
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.remote_server or self["config"].getCurrent()[1] == config.plugins.enigmalight.remote_port:
if config.plugins.enigmalight.remote_server.value:
self.controller.StartServer()
else:
self.controller.StopServer()
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.bintype_arm:
self.saveAll()
self._binTypeChanged = True
#===========================================================================
#
#===========================================================================
def updateHelp(self):
cur = self["config"].getCurrent()
self["help"].text = cur and cur[2] or " "
#===========================================================================
#
#===========================================================================
def ok(self):
cur = self["config"].getCurrent()
if cur == self.configfilepath:
self.session.openWithCallback(self.savePathConfig,EL_Screen_PathSelector,self.configfilepath[1].value, "configfile", "Select configfile")
elif self["config"].getCurrent()[1] == config.plugins.enigmalight.clickOK:
EL_Screen_Check(self.session).checkForUpdate(self.controller)
self.controller.setStatusBarInfo(_("Check for update..."))
self.controller.checkedForUpdates = True
#===========================================================================
#
#===========================================================================
def savePathConfig(self, pathValue, myType):
log("",self)
log("",self,"pathValue: " + str(pathValue))
log("",self,"type: " + str(myType))
if pathValue is not None:
if myType == "configfile":
self.configfilepath[1].value = pathValue
self._restartBinary = True
if pathValue != None:
message = self.session.openWithCallback(self.restartEnigmaLight,MessageBox,_("To reload the configfile EnigmaLight needs a restart, restart now ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("Reload configfile ?"))
config.plugins.enigmalight.save()
def restartEnigmaLight(self,answer):
log("",self)
#first kill enigmalight
if answer:
self.controller.killEnigmalight(None,self.KillEnigmaLightDone)
def restartEnigma2(self,answer):
log("",self)
#first kill enigmalight
if answer:
self.session.open(TryQuitMainloop, 3)
def KillEnigmaLightDone(self):
log("",self)
setSymbolic() #set new symbolic if needed
self.controller.Control("grabber","start")
self.close(None)
#===========================================================================
#
#===========================================================================
def keySave(self):
log("",self)
#check ip if network is true, before save
if config.plugins.enigmalight.network_onoff.getValue():
#check ip
if not validIP(str(config.plugins.enigmalight.address.getText())):
showMessage(self.session,_("Ip address %s is not accepted, check your input and try again.") %(str(config.plugins.enigmalight.address.getText())),"W")
else:
#check connection
if not testDaemonConnectivity(config.plugins.enigmalight.address.getText(),config.plugins.enigmalight.port.value):
showMessage(self.session,_("Enigmalight can't connect with %s:%s,\ncheck your input and try again.") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())),"W")
else:
showMessage(self.session,_("Test Connection with %s:%s, succesfull!") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())),"I")
self.saveAll()
message = self.session.openWithCallback(self.startClient,MessageBox,_("Do you want to (re)start the client and connect with %s:%s ?") %(str(config.plugins.enigmalight.address.getText()),str(config.plugins.enigmalight.port.getValue())), MessageBox.TYPE_YESNO)
message.setTitle(_("(Re)start client ?"))
else:
self.saveAll()
if self._hasNetworkChanged:
self._hasNetworkChanged = False
if self.controller.lightsEnabled:
self.controller.killEnigmalight(None,None)
message = self.session.openWithCallback(self.startGrabber,MessageBox,_("Do you want to (re)start the client ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("(Re)start client ?"))
elif self._binTypeChanged:
message = self.session.openWithCallback(self.restartEnigmaLight,MessageBox,_("Type of enigmalight has changed, Start this type of Enigmalight ?"), MessageBox.TYPE_YESNO)
message.setTitle(_("Start ?"))
else:
self.close(None)
#===========================================================================
#
#===========================================================================
def startClient(self, answer):
log("",self)
if answer is True:
self.controller.killEnigmalight(None,self.controller.switchtoNetwork())
else:
self.close()
def startGrabber(self, answer):
log("",self)
if answer is True:
self.controller.Control("grabber","start")
else:
self.close()
#===========================================================================
#
#===========================================================================
def keyLeft(self):
log("",self)
ConfigListScreen.keyLeft(self)
self.createSetup()
#===========================================================================
#
#===========================================================================
def keyRight(self):
log("",self)
ConfigListScreen.keyRight(self)
self.createSetup()
#===========================================================================
#
#===========================================================================
def keyBouquetUp(self):
log("",self)
self["config"].instance.moveSelection(self["config"].instance.pageUp)
#===========================================================================
#
#===========================================================================
def keyBouquetDown(self):
log("",self)
self["config"].instance.moveSelection(self["config"].instance.pageDown)
|
nilq/baby-python
|
python
|
#! /usr/bin/env python3
import os, sys, time, re
pid = os.getpid()
os.write(1, ("About to fork (pid:%d)\n" % pid).encode())
rc = os.fork()
if rc < 0:
os.write(2, ("fork failed, returning %d\n" % rc).encode())
sys.exit(1)
elif rc == 0: # child
os.write(1, ("Child: My pid==%d. Parent's pid=%d\n" %
(os.getpid(), pid)).encode())
args = ["wc", "p3-exec.py"]
for dir in re.split(":", os.environ['PATH']): # try each directory in the path
program = "%s/%s" % (dir, args[0])
os.write(1, ("Child: ...trying to exec %s\n" % program).encode())
try:
os.execve(program, args, os.environ) # try to exec program
except FileNotFoundError: # ...expected
pass # ...fail quietly
os.write(2, ("Child: Could not exec %s\n" % args[0]).encode())
sys.exit(1) # terminate with error
else: # parent (forked ok)
os.write(1, ("Parent: My pid=%d. Child's pid=%d\n" %
(pid, rc)).encode())
childPidCode = os.wait()
os.write(1, ("Parent: Child %d terminated with exit code %d\n" %
childPidCode).encode())
|
nilq/baby-python
|
python
|
import sys
import math
import random
class leds:
def __init__(self, call):
self.call = call
def show_next(self, color, index):
data = [0x18, 0x05, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = index
self.call.blewrite(data)
self.call.blewait()
def show_previous(self, color, index):
data = [0x18, 0x04, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = index
self.call.blewrite(data)
self.call.blewait()
def show_all(self, color, index):
data = [0x18, 0x02, 0x05, 0x02]
if(color == "white"):
data[2] = 0x01
elif(color == "red"):
data[2] = 0x02
elif(color == "yellow"):
data[2] = 0x03
elif(color == "green"):
data[2] = 0x04
elif(color == "blue"):
data[2] = 0x05
elif(color == "purple"):
data[2] = 0x06
elif(color == "black"):
data[2] = 0x07
if(index == "random"):
data[3] = random.randint(1, 7)
else:
data[3] = int(index)
self.call.blewrite(data)
self.call.blewait()
def show_single(self, index, r, g, b):
data = [0x18, 0x08, 0x00, 0x00, 0x00, 0x00]
data[2] = int(index)-1
data[3] = r
data[4] = g
data[5] = b
self.call.blewrite(data)
self.call.blewait()
def color(self, value):
digit = list(map(str, range(10))) + list("abcdef")
if isinstance(value, tuple):
string = '#'
for i in value:
a1 = i // 16
a2 = i % 16
string += digit[a1] + digit[a2]
return string
elif isinstance(value, str):
a1 = digit.index(value[1]) * 16 + digit.index(value[2])
a2 = digit.index(value[3]) * 16 + digit.index(value[4])
a3 = digit.index(value[5]) * 16 + digit.index(value[6])
return [a1, a2, a3]
def trun_ring(self, buf, col):
arr = self.color(col)
buf.append(arr[0])
buf.append(arr[1])
buf.append(arr[2])
return buf
def show_ring(self, led1, led2, led3, led4, led5, led6, led7, led8, led9, led10, led11, led12):
data = [0x18, 0x07]
data = self.trun_ring(data, led1)
data = self.trun_ring(data, led2)
data = self.trun_ring(data, led3)
data = self.trun_ring(data, led4)
data = self.trun_ring(data, led5)
data = self.trun_ring(data, led6)
data = self.trun_ring(data, led7)
data = self.trun_ring(data, led8)
data = self.trun_ring(data, led9)
data = self.trun_ring(data, led10)
data = self.trun_ring(data, led11)
data = self.trun_ring(data, led12)
self.call.blewrite(data)
self.call.blewait()
def clear(self):
data = [0x18, 0x03, 0x00, 0x00, 0x00]
self.call.blewrite(data)
self.call.blewait()
def show_animation(self, mode):
data = [0x18, 0x06, 0x00]
if(mode == "spoondrift"):
data[2] = 0x01
elif(mode == "meteor"):
data[2] = 0x02
elif(mode == "rainbow"):
data[2] = 0x03
elif(mode == "firefly"):
data[2] = 0x04
elif(mode == "colorwipe"):
data[2] = 0x05
elif(mode == "breathe"):
data[2] = 0x06
elif(mode == "random"):
data[2] = random.randint(1, 6)
self.call.blewrite(data)
self.call.blewait()
def color(self, value):
digit = list(map(str, range(10)))+list("abcdef")
if(isinstance(value, tuple)):
string = '#'
for i in value:
a1 = i//16
a2 = i % 16
string += digit[a1]+digit[a2]
return string
elif isinstance(value, str):
a1 = digit.index(value[1])*16+digit.index(value[2])
a2 = digit.index(value[3])*16+digit.index(value[4])
a3 = digit.index(value[5])*16+digit.index(value[6])
return [a1, a2, a3]
def trun_ring(self, buf, col):
arr = self.color(col)
buf.append(arr[0])
buf.append(arr[1])
buf.append(arr[2])
return buf
def show_all_hex(self, color):
self.show_ring(color, color, color, color, color, color,
color, color, color, color, color, color)
def show_single_hex(self, index, color):
if(math.isinf(index)):
index = 0
elif(math.isnan(index)):
index = 0
else:
index = int(index)
if(index == 0):
data = [0x18, 0x03]
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
else:
if(index > 0):
index = index - 1
index = index % 12
if(index < 0):
index = 13 + index
index = index % 12
data = [0x18, 0x08, index]
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
|
nilq/baby-python
|
python
|
#!/usr/bin/python
from UcsSdk import *
import time
# This script shows how to monitor UCS Manager events and define your own call back to take specific action on the respective events.
ucsm_ip = '0.0.0.0'
user = 'username'
password = 'password'
def callback_all(mce):
print 'Received a New Event with ClassId: ' + str(mce.mo.classId)
print "ChangeList: ", mce.changeList
print "EventId: ", mce.eventId
def callback_lsServer(mce):
print 'Received a New Service Profile Event: ' + str(mce.mo.classId)
print "ChangeList: ", mce.changeList
print "EventId: ", mce.eventId
try:
handle = UcsHandle()
handle.Login(ucsm_ip,user, password)
# Add an event handle "ev_all" to montitor the events generated by UCS Manager for any of the ClassIds
ev_all = handle.AddEventHandler()
# Get the list of active event handles.
handle.GetEventHandlers()
# Remove an event handle "ev_all"
handle.RemoveEventHandler(ev_all)
# Use your own callback method to take specific action on respective events.
ev_all_callback = handle.AddEventHandler(callBack = callback_all)
handle.RemoveEventHandler(ev_all_callback)
# Add an event handle to filter events based on classId = lsServer
ev_lsServer = handle.AddEventHandler(classId = "LsServer", callBack = callback_lsServer)
handle.RemoveEventHandler(ev_lsServer)
# loop that keeps the script running for us to get events/callbacks
while True:
time.sleep(5)
handle.Logout()
except Exception, err:
print "Exception:", str(err)
import traceback, sys
print '-'*60
traceback.print_exc(file=sys.stdout)
print '-'*60
handle.Logout()
|
nilq/baby-python
|
python
|
import yaml
import sys
import os
import time
import re
import copy
import pprint
"""
For each possible rule path
"""
class ParserError(ValueError):
pass
class ParserError(ValueError):
pass
class Context(object):
def __init__(self,level,name,parent=None):
self.level = level
self.name = name
self.parent = parent
if self.parent:
self.url = self.parent.url+'.'+self.name
else:
self.url = self.name
def debug(self,msg):
return
print "{}{}: {}".format(" "*self.level,self.name,msg)
class State(object):
def __init__(self,s, pos=0):
self.s = s
self.parent = None
self.store = {}
self.result = None
self.current_node = []
self.root = self.current_node
self.pos = pos
@property
def line(self):
return len(self.s[:self.pos].split("\n"))
@property
def col(self):
return len(self.s[:self.pos].split("\n")[-1])+1
def copy(self,):
state = State(self.s, self.pos)
state.parent = self
state.store = copy.deepcopy(self.store)
state.current_node = self.current_node
state.root = self.root
return state
@property
def value(self):
return self.s[self.pos:]
def advance(self, n):
old_pos = self.pos
self.pos += n
return old_pos
def go_to(self, pos):
old_pos = self.pos
self.pos = pos
return old_pos
from collections import defaultdict
encountered_contexts = defaultdict(dict)
class Iterator(object):
def __init__(self,generator, parent=None):
self.generator = generator
self.parent = parent
self.list = []
self.pos = 0
def __iter__(self):
return self
def get(self,pos):
if self.parent:
return self.parent.get(pos)
while pos >= len(self.list):
value = next(self.generator)
self.list.append(value)
return self.list[pos]
def next(self):
self.pos+=1
return self.get(self.pos-1)
def copy(self):
if self.parent:
return Iterator(None,parent=self.parent)
return Iterator(None,parent=self)
def parser(name, url):
"""
Simplifying parser rules
* or
"""
def dec(f):
return f
def decorated_function(state, context, *args, **kwargs):
if False and url is not None and url in encountered_contexts[state.pos]:
# print url,state.pos
return encountered_contexts[state.pos][url].copy()
print("{}{} {}:{}".format(" "*context.level,context.name,state.line,state.col))
new_context = Context(context.level+1,name,context)
result = f(state, new_context, *args, **kwargs)
if url is not None:
encountered_contexts[state.pos][url] = Iterator(result)
return encountered_contexts[state.pos][url]
return result
return decorated_function
return dec
class ParserGenerator(object):
"""
Generating an abstract syntax tree is done implictly by each rule
"""
def __init__(self, grammar):
self.grammar = grammar
self.parsers = {}
def compile_regex(self, regex, url):
compiled_regex = re.compile('^{}'.format(regex))
@parser('regex', url)
def regex_parser(state, context):
context.debug(regex)
match = compiled_regex.match(state.value)
if match:
s = match.group(0)
context.debug("match!")
new_state = state.copy()
new_state.result = s
new_state.advance(len(s))
yield new_state
else:
raise ParserError("Regex not matched: {}".format(regex))
return regex_parser
def compile_ref(self, key, url):
def ref_parser(state):
new_state = state.copy()
new_state.result = state.store.get(key)
yield new_state
return ref_parser
def compile_ast_list(self, props, url):
name = props.get('name')
rule_parser = self._compile_rule(props['value'], url+'.ast-list')
@parser('ast-list', url)
def ast_list_parser(state, context):
l = []
current_node = state.current_node
state.current_node = l
try:
for new_state in rule_parser(state, context):
if isinstance(current_node,dict) and name:
new_current_node = new_state.current_node
new_state.current_node = current_node.copy()
new_state.current_node[name] = new_current_node
yield new_state
finally:
state.current_node = current_node
return ast_list_parser
def compile_ast_prop(self, props, url):
name = props.get('name')
value_parser = self._compile_rule(props['value'], url+'.ast-prop')
@parser('ast-prop', url)
def ast_prop_parser(state, context):
for new_state in value_parser(state, context):
current_node = new_state.current_node
if isinstance(current_node,dict):
current_node[name] = new_state.result
yield new_state
return ast_prop_parser
def compile_ast_node(self, props, url):
"""
Create a new AST node.
* If the current node is a list, appends the new node to it
* If the current node is a dict, puts the new node in the key given by name (if provided)
* If none of these things match, does nothing
"""
rule_parser = self._compile_rule(props['value'], url+'.ast-node')
name = props.get('name')
@parser('ast-node', url)
def ast_node_parser(state, context):
d = {}
d.update(props.get('props',{}))
current_node = state.current_node
state.current_node = d
try:
for new_state in rule_parser(state, context):
new_current_node = new_state.current_node
if isinstance(current_node,list):
new_state.current_node = current_node[:]
new_state.current_node.append(new_current_node)
elif isinstance(current_node,dict):
new_state.current_node = current_node.copy()
if name:
new_state.current_node[name] = new_current_node
else:
new_state.current_node.update(new_current_node)
yield new_state
finally:
state.current_node = current_node
return ast_node_parser
def compile_repeat(self, rule, url):
rule_parser = self._compile_rule(rule, url+'.repeat')
@parser('repeat', url)
def repeat_parser(state, context):
cnt=0
current_state = state
states_to_repeat=[state]
states_to_yield = []
productions = []
while states_to_repeat or states_to_yield or productions:
if states_to_repeat:
current_state=states_to_repeat.pop()
states_to_yield.append(current_state)
try:
production=rule_parser(current_state, context)
new_state = next(production)
#if the production does not advance the state, we reject it...
if new_state.pos == current_state.pos:
continue
productions.append(production)
states_to_repeat.append(new_state)
except (ParserError, StopIteration) as e :
continue
elif states_to_yield:
state_to_yield = states_to_yield.pop()
cnt +=1
if state_to_yield != state:
yield state_to_yield
elif productions:
production = productions[-1]
try:
new_state = next(production)
states_to_yield.append(new_state)
except (ParserError,StopIteration):
productions.pop()
if cnt==0:
raise ParserError("Not matched!")
return repeat_parser
def compile_optional(self, rule, url):
rule_parser = self._compile_rule(rule, url+'.optional')
@parser('optional', url)
def optional_parser(state, context):
try:
for new_state in rule_parser(state, context):
yield new_state
except ParserError as me:
pass
yield state
return optional_parser
def compile_store(self, args, url):
name = args['name']
value = args['value']
value_parser = self._compile_rule(value, url+'.store')
@parser('store', url)
def store_parser(state, context):
for ns in value_parser(state, context):
new_state = state.copy()
new_state.result = ns.result
yield new_state
return store_parser
def compile_literal(self, value, url):
if isinstance(value, dict):
value = self._compile_rule(value, url+'.literal')
@parser('literal', url)
def literal_parser(state, context):
context.debug(value)
if callable(value):
v = value(state, context)
else:
v = value
found_value = state.value[:len(v)]
if found_value != v:
raise ParserError("Expected {}, but found '{}'".format(value, found_value))
context.debug(v)
new_state = state.copy()
new_state.advance(len(v))
new_state.result = v
yield new_state
return literal_parser
def compile_python_code(self, code, url):
gv = globals().copy()
gv['url'] = url
exec(code,gv,gv)
return gv['parser']
def compile_or(self, alternatives, url):
alternative_parsers = []
for i,alternative in enumerate(alternatives):
alternative_parsers.append((alternative,self._compile_rule(alternative, url+'.or.{}'.format(i))))
@parser('or', url)
def or_parser(state, context):
"""
Pass in context object that contains information about the following things:
* Which rule has called this one?
*
"""
found = False
alternative_productions = []
for params,alternative_parser in alternative_parsers:
try:
alternative_productions.append(alternative_parser(state, context))
except ParserError as me:
continue
i = 0
while alternative_productions:
production = alternative_productions[i%len(alternative_productions)]
try:
new_state = next(production)
found = True
yield new_state
i+=1
except (ParserError,StopIteration):
alternative_productions.remove(production)
if not found:
raise ParserError("No alternative matched!")
return or_parser
def compile_sequence(self, rules, url):
"""
Increase the level by one for each element in the sequence
"""
parsers = []
for i,rule in enumerate(rules):
ps = self._compile_rule(rule, url+'.seq.{}'.format(i))
if ps is None:
raise AttributeError
parsers.append(ps)
@parser('sequence', url)
def sequence_parser(state, context):
"""
* Execute the first parser on the state
* For each returned state, execute the second parser
* For each returned state, execute the third parser...
"""
def parse_sequence(state, parsers):
parser = parsers.pop(0)
for new_state in parser(state, context):
if parsers:
try:
for new_new_state in parse_sequence(new_state, parsers[:]):
yield new_new_state
except ParserError:
continue
else:
yield new_state
for new_state in parse_sequence(state, parsers[:]):
yield new_state
return sequence_parser
def compile(self, debug=True):
self.parsers = {}
return self._compile_rule('start', '')
def _compile_rule(self, name_or_rule, url):
"""
Takes a YAML grammar as input and returns a Python parser function that can be
called with a Stream instance and a state as arguments.
"""
name = None
if isinstance(name_or_rule,(str,unicode)):
name = name_or_rule
if name in self.parsers:
return self.parsers[name]
rule = self.grammar[name]
else:
rule = name_or_rule
if name:
new_url = url+'.'+name
else:
new_url = url
def parse_subrule(rule, name=None):
rule_name = rule.keys()[0]
args = rule.values()[0]
if rule_name == '$python':
result = self.compile_python_code(args, url+'.{}'.format(name))
if name:
self.parsers[name] = result
return result
try:
func = getattr(self,'compile_{}'.format(rule_name.replace('-','_')))
except AttributeError:
raise ParserError("Unknown rule: {}".format(rule_name))
subparser = func(args, new_url)
@parser(rule_name, None)
def subrule_parser(state, context):
for result in subparser(state, context):
yield result
if name:
@parser(name, None)
def name_parser(state, context):
for result in subrule_parser(state, context):
yield result
self.parsers[name] = name_parser
return name_parser
return subrule_parser
#this allows definition of recursive parsing rules via a simple function call
if name:
#this will lead to infinite recursion if the parser is not replaced!
@parser(name, url)
def subrule_parser(state, context):
for result in self.parsers[name](state, context):
yield result
self.parsers[name] = subrule_parser
if isinstance(rule,(list,tuple)):
sequence_parser = self.compile_sequence(rule, new_url)
if name:
@parser(name, None)
def subrule_parser(state, context):
for result in sequence_parser(state, context):
yield result
self.parsers[name] = subrule_parser
return subrule_parser
return sequence_parser
elif isinstance(rule,dict) and len(rule) == 1:
return parse_subrule(rule, name=name)
elif isinstance(rule,(str,unicode)):
new_new_url = new_url+'.'+rule
ps = self._compile_rule(rule, new_new_url)
@parser(name, None)
def subrule_parser(state, context):
for result in ps(state, context):
yield result
self.parsers[name] = subrule_parser
return subrule_parser
raise ParserError("Unknown rule: {}".format(name or name_or_rule or '(no name given)'))
if __name__ == '__main__':
import sys
sys.setrecursionlimit(100000)
if len(sys.argv) < 3:
sys.stderr.write("Usage: {} [grammar filename] [code filename]\n".format(os.path.basename(__file__)))
exit(-1)
grammar_filename = sys.argv[1]
code_filename = sys.argv[2]
with open(grammar_filename,'r') as grammar_file:
grammar = yaml.load(grammar_file.read())
with open(code_filename,'r') as code_file:
code = code_file.read()
parser_generator = ParserGenerator(grammar)
parser = parser_generator.compile()
state = State(code)
start = time.time()
results = parser(state, Context(0,'root',None))
for result in results:
print result.line,result.col
if result.value.strip():
print "Parsing failed in line {}, column {}:\n\n{}...".format(result.line,result.col,result.value[:20])
else:
print "Parsing succeeded!"
pprint.pprint(result.current_node)
|
nilq/baby-python
|
python
|
from typing import ClassVar, List, Tuple
from urllib.parse import quote as http_quote
from .base import BaseProtocol
from ..types import IPAddressType
# HTTP 1.1 only
class HTTPProtocol(BaseProtocol[IPAddressType]):
ports: ClassVar[Tuple[int, ...]] = (80,)
_TYPES: ClassVar[List[bytes]] = [
b"*/*; q=0.300", b"text/html; q=0.999", b"application/xhtml+xml; q=1.000",
b"application/xml; q=0.900", b"text/xml; q=0.900", b"application/json; q=0.650",
b"application/pdf; q=0.800", b"image/*; q=0.700", b"image/png; q=0.775",
b"image/gif; q=0.750", b"image/jpeg; q=0.725", b"text/*; q=0.500", b"video/*; q=0.100",
b"audio/*; q=0.200", b"application/rtf; q=0.675", b"text/markdown; q=0.600",
b"text/plain; q=0.400", b"application/atom+xml; q=0.900"
]
_CHARSETS: ClassVar[List[bytes]] = [
b"*; q=0.200", b"utf-8; q=1.000", b"us-ascii; q=0.100",
b"utf-16le; q=0.900", b"utf-16; q=0.850", b"utf-16be; q=0.800",
b"utf-32le; q=0.700", b"utf-32; q=0.650", b"utf-32be; q=0.600",
b"iso-8859-15; q=0.500", b"windows-1252; q=0.400", b"iso-8859-1; q=0.300"
]
_LANGS: ClassVar[List[bytes]] = [
b"*; q=0.100", b"en-US; q=1.000", b"en; q=0.900", b"en-GB; q=0.850", b"en-CA; q=0.950",
b"en-AU; q=0.800", b"de-DE; q=0.600", b"de; q=0.500", b"de-CH; q=0.550", b"de-AT; q=0.450",
b"es; q=0.300", b"es-MX; q=0.350", b"es-ES; q=0.250", b"pt; q=0.200", b"fr; q=0.200"
]
_ENCS: ClassVar[List[bytes]] = [b"identity; q=1.000", b"*; q=0.000"]
assert _TYPES and _CHARSETS and _LANGS and _ENCS
__slots__ = ()
def pull_data(self, length_hint: int = None) -> bytes:
res = bytearray(b"GET / HTTP/1.1\r\nHost: ")
if self._dst.host is not None:
res += http_quote(self._dst.host).encode("ascii")
res += b"\r\nUser-Agent: tcpreq (TCP research scan)\r\n"
if length_hint is None or length_hint - len(res) <= 2:
return res + b"\r\n"
# Caching
res += b"Cache-Control: max-age=3600, max-stale=1600, no-transform\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Pragma
res += b"Pragma: no-cache\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Referer
res += b"Referer: about:blank\r\n"
if length_hint - len(res) <= 2:
return res + b"\r\n"
# Accept, Accept-Charset, Accept-Language, Accept-Encoding
for name, vals in ((b"Accept: ", self._TYPES), (b"Accept-Charset: ", self._CHARSETS),
(b"Accept-Language: ", self._LANGS), (b"Accept-Encoding: ", self._ENCS)):
rem = length_hint - (len(res) + len(name))
ret = True
for idx, v in enumerate(vals):
rem -= len(v) + 2 # Add 2 for next separator (", ")
if rem <= 2:
break
else:
ret = False
res += name
res += b", ".join(vals[:idx + 1])
res += b"\r\n"
if ret:
break
return res + b"\r\n"
# No need to implement push_data: HTTP is stateless (except for cookies), response can be ignored
|
nilq/baby-python
|
python
|
'''
Leia a hora inicial e a hora final de um jogo. A seguir calcule a duraรงรฃo do jogo, sabendo que o mesmo pode comeรงar em um dia e terminar em outro, tendo uma duraรงรฃo mรญnima de 1 hora e mรกxima de 24 horas.
| Input Sample | Output Samples |
| ------------ | ----------------------- |
| 16 2 | O JOGO DUROU 10 HORA(S) |
| ------------ | ----------------------- |
| 0 0 | O JOGO DUROU 24 HORA(S) |
| ------------ | ----------------------- |
| 2 16 | O JOGO DUROU 14 HORA(S) |
'''
hora = input().split()
inicio = int(hora[0])
fim = int(hora[1])
'''
result = fim - inicio
if result == 0:
print("O JOGO DUROU 24 HORA(S)")
else:
print("O JOGO DUROU {} HORA(S)".format(result))
'''
if inicio < fim:
result = fim - inicio
else:
result = (24 - inicio) + fim
print("O JOGO DUROU {} HORA(S)".format(result))
|
nilq/baby-python
|
python
|
# stdlib imports
import logging
from datetime import datetime, timedelta
# third party imports
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import (Column, Integer, Float, String,
DateTime, ForeignKey, Boolean)
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy_utils import database_exists, create_database
# We dynamically (not sure why?) create the base class for our objects
Base = declarative_base()
TIMEFMT = '%Y-%m-%dT%H:%M:%S'
MYSQL_TIMEOUT = 30
# association algorithm - any peak with:
# time > origin - TMIN and time < origin + TMAX
# AND
# distance < DISTANCE
TMIN = 60
TMAX = 180
DISTANCE = 500
P_TRAVEL_TIME = 4.2
class IncorrectDataTypesException(Exception):
pass
class IncompleteConstructorException(Exception):
pass
def get_session(url='sqlite:///:memory:', create_db=True):
"""Get a SQLAlchemy Session instance for input database URL.
:param url:
SQLAlchemy URL for database, described here:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls.
:param create_db:
Boolean indicating whether to create database from scratch.
:returns:
Sqlalchemy Session instance.
"""
# Create a sqlite in-memory database engine
if not database_exists(url):
if create_db:
create_database(url)
else:
msg = ('Database does not exist, will not create without '
'create_db turned on.')
logging.error(msg)
return None
connect_args = {}
if 'mysql' in url.lower():
connect_args = {'connect_timeout': MYSQL_TIMEOUT}
engine = create_engine(url, echo=False, connect_args=connect_args)
Base.metadata.create_all(engine)
# create a session object that we can use to insert and
# extract information from the database
Session = sessionmaker(bind=engine, autoflush=False)
session = Session()
return session
class Event(Base):
"""Class representing the "event" table in the database.
"""
EVENT = {'eventid': String(64),
'netid': String(32),
'time': DateTime(),
'lat': Float(),
'lon': Float(),
'depth': Float(),
'magnitude': Float(),
'locstring': String(1024),
'lastrun': DateTime(),
}
__tablename__ = 'event'
id = Column(Integer, primary_key=True)
eventid = Column(EVENT['eventid'], index=True)
netid = Column(EVENT['netid'])
time = Column(EVENT['time'])
lat = Column(EVENT['lat'])
lon = Column(EVENT['lon'])
depth = Column(EVENT['depth'])
magnitude = Column(EVENT['magnitude'])
locstring = Column(EVENT['locstring'])
lastrun = Column(EVENT['lastrun'])
queued_events = relationship("Queued", back_populates="event",
cascade="all, delete, delete-orphan")
@property
def is_running(self):
for queue in self.queued:
if queue.is_running:
return True
return False
@property
def age_in_days(self):
return (datetime.utcnow() - self.time) / timedelta(days=1)
def __init__(self, **kwargs):
"""Instantiate an Event object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
eventid (str): Event ID of the form "us2020abcd".
netid (str): The network code at the beginning of the eventid.
time (datetime): Origin time, UTC.
lat (float): Origin latitude.
lon (float): Origin longitude.
depth (float): Origin depth.
magnitude (float): Origin magnitude.
locstring (str): Description of earthquake location.
lastrun (datetime): Set this to something like datetime(1900,1,1).
Returns:
Event: Instance of the Event object.
"""
validate_inputs(self.EVENT, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return (f'Event: {self.eventid}')
class Queued(Base):
"""Class representing the "queued" table in the database.
"""
__tablename__ = 'queued'
QUEUED = {'event_id': Integer(),
'run_time': DateTime(),
}
id = Column(Integer, primary_key=True)
event_id = Column(QUEUED['event_id'], ForeignKey('event.id'))
run_time = Column(QUEUED['run_time'])
event = relationship("Event", back_populates="queued_events")
running_events = relationship("Running",
back_populates="queued_event",
cascade="all, delete, delete-orphan")
def __init__(self, **kwargs):
"""Instantiate a Queued object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
event_id (int): ID of an existing (committed) Event object.
run_time (datetime): Time (UTC) when event is scheduled to be run.
Returns:
Queued: Instance of the Queued object.
"""
validate_inputs(self.QUEUED, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
@property
def is_running(self):
return len(self.running_events) > 0
def __repr__(self):
return (f'Queued: {self.event.eventid} {self.run_time}')
class Running(Base):
"""Class representing the "running" table in the database.
"""
__tablename__ = 'running'
RUNNING = {'queued_id': Integer(),
'start_time': DateTime(),
'success': Boolean(),
}
id = Column(Integer, primary_key=True)
queued_id = Column(RUNNING['queued_id'], ForeignKey('queued.id'))
start_time = Column(RUNNING['start_time'])
success = Column(RUNNING['success'])
queued_event = relationship("Queued", back_populates="running_events")
def __init__(self, **kwargs):
"""Instantiate a Running object from scratch (i.e., not from a query).
Note: Although keyword arguments, all arguments below must be supplied.
Args:
queued_id (int): ID of an existing (committed) Queued object.
start_time (datetime): Time (UTC) when event began running.
success (bool): Indicates whether the event has finished running successfully.
Returns:
Running: Instance of the Running object.
"""
validate_inputs(self.RUNNING, kwargs)
for key, value in kwargs.items():
setattr(self, key, value)
@property
def minutes_running(self):
# return running time in minutes
return (datetime.utcnow() - self.start_time) / timedelta(seconds=60)
def __repr__(self):
msg = (f'Running: {self.queued_event.event.eventid} '
f'started at {self.start_time}')
return (msg)
def validate_inputs(defdict, kwdict):
"""Validate all init() inputs against the python types of table columns.
Args:
defdict (dict): Dictionary containing the column
names/SQLAlchemy types.
kwdict (dict): Dictionary containing the init() kwargs.
Raises:
IncompleteConstructorException: Not all kwargs are set.
IncorrectDataTypesException: At least one of the kwargs is
of the wrong type.
"""
# first check that all required parameters are being set
if not set(defdict.keys()) <= set(kwdict.keys()):
msg = ('In Event constructor, all the following values must be set:'
f'{str(list(defdict.keys()))}')
raise IncompleteConstructorException(msg)
errors = []
for key, value in kwdict.items():
ktype = defdict[key].python_type
if not isinstance(value, ktype):
errors.append(f'{key} must be of type {ktype}')
if len(errors):
msg = '\n'.join(errors)
raise IncorrectDataTypesException(msg)
|
nilq/baby-python
|
python
|
# Copyright (c) Xidian University and Xi'an University of Posts & Telecommunications. All Rights Reserved
import random
from .nasbench_101_cell import Cell as Cell_101
from .nasbench_201_cell import Cell as Cell_201
from gnn_lib.data import Data
from nas_lib.utils.utils_data import nas2graph
from nas_lib.utils.predictive_comparision import convert_arch_to_seq
from nas_lib.utils.utils_data import nasbench2graph_reverse
def build_datasets(args):
if args.search_space == "nasbench_101":
from nas_lib.data.nasbench_101 import NASBench101
return NASBench101(args.search_space)
elif args.search_space == 'nasbench_201':
from nas_lib.data.nasbench_201 import NASBench201
return NASBench201(args)
elif args.search_space == 'darts':
from nas_lib.data.darts import DataSetDarts
return DataSetDarts(args)
else:
raise ValueError("This architecture datasets does not support!")
def dataset_split(args, nas_dataset, budget=None):
total_keys = nas_dataset.total_keys
total_archs = nas_dataset.total_archs
if budget:
train_keys = random.sample(total_keys, budget)
else:
train_keys = random.sample(total_keys, args.search_budget)
test_keys = [key for key in total_keys if key not in train_keys]
train_data = []
test_data = []
flag = args.search_space == 'nasbench_101'
for k in train_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
else:
raise NotImplementedError()
train_data.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100-arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100-arch[5]) * 0.01
}
)
for k in test_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
else:
raise NotImplementedError()
test_data.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100-arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100-arch[5]) * 0.01
}
)
return train_data, test_data
def dataset_all(args, nas_dataset):
total_keys = nas_dataset.total_keys
total_archs = nas_dataset.total_archs
all_archs = []
flag = args.search_space == 'nasbench_101'
for k in total_keys:
arch = total_archs[k]
if args.search_space == 'nasbench_101':
cell_inst = Cell_101(matrix=arch['matrix'], ops=arch['ops'])
edge_index, node_f = nas2graph(args.search_space, (arch['matrix'], arch['ops']))
g_data = Data(edge_index=edge_index.long(), x=node_f.float())
seminas_vec = convert_arch_to_seq(arch['o_matrix'], arch['o_ops'])
edge_index_reverse, node_f_reverse = nasbench2graph_reverse((arch['matrix'], arch['ops']), reverse=True)
g_data_reverse = Data(edge_index=edge_index_reverse.long(), x=node_f_reverse.float())
if len(seminas_vec) < 27:
padding = 27 - len(seminas_vec)
seminas_vec = seminas_vec + [0 for _ in range(padding)]
all_archs.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100 - arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100 - arch[5]) * 0.01,
'g_data': g_data,
'arch_k': k,
'seminas_vec': seminas_vec,
'edge_idx': edge_index,
'node_f': node_f,
'edge_idx_reverse': edge_index_reverse,
'node_f_reverse': node_f_reverse,
'g_data_reverse': g_data_reverse
}
)
elif args.search_space == 'nasbench_201':
cell_inst = Cell_201(matrix=arch[0][0], ops=arch[0][1])
edge_index, node_f = nas2graph(args.search_space, (arch[0][0], arch[0][1]))
edge_index_reverse, node_f_reverse = nas2graph(args.search_space, (arch[0][0], arch[0][1]), reverse=True)
g_data_reverse = Data(edge_index=edge_index_reverse.long(), x=node_f_reverse.float())
all_archs.append(
{
'matrix': arch['matrix'] if flag else arch[0][0],
'ops': arch['ops'] if flag else arch[0][1],
'pe_adj_enc_vec': cell_inst.get_encoding('adj_enc_vec', args.seq_len),
'pe_path_enc_vec': cell_inst.get_encoding('path_enc_vec', args.seq_len),
'pe_path_enc_aware_vec': cell_inst.get_encoding('path_enc_aware_vec', args.seq_len),
'val_acc': arch['val'] if flag else (100 - arch[4]) * 0.01,
'test_acc': arch['test'] if flag else (100 - arch[5]) * 0.01,
'g_data': Data(edge_index=edge_index.long(), x=node_f.float()),
'arch_k': k,
'edge_idx': edge_index,
'node_f': node_f,
'edge_idx_reverse': edge_index_reverse,
'node_f_reverse': node_f_reverse,
'g_data_reverse': g_data_reverse
}
)
else:
raise NotImplementedError()
return all_archs
def split_data_from_all_data(all_data, idxs, train_data, budget, last_budget):
train_data_new = []
counter = 0
while len(train_data_new) < (budget - last_budget):
if idxs[last_budget+counter] < len(all_data):
train_data_new.append(all_data.pop(idxs[last_budget+counter]))
counter += 1
else:
counter += 1
continue
train_data.extend(train_data_new)
return train_data, all_data
def dataset_split_idx(all_data, budget=None):
idxs = list(range(len(all_data)))
random.shuffle(idxs)
train_data = [all_data[k] for k in idxs[:budget]]
test_data = [all_data[kt] for kt in idxs[budget:]]
return train_data, test_data
def dataset_split_idx_predictive_comparison(all_data, budget=None):
idxs = list(range(len(all_data)))
random.shuffle(idxs)
train_data = [all_data[k] for k in idxs[:int(budget)]]
test_data = [all_data[kt] for kt in idxs[int(budget):]]
return train_data, test_data
|
nilq/baby-python
|
python
|
'''
Utility module.
'''
import yaml
import numpy as np
swap = lambda x1, x2: (x2, x1) if x1 > x2 else (x1, x2)
square = lambda x: x**2
def read_params(file) -> dict:
'''
Read yaml file.
Args:
file (str): Path to the yaml file.
Returns:
dict: Contents of the yaml file.
'''
with open(file, 'r') as yaml_file:
parameters = yaml.full_load(yaml_file)
return parameters
def dim_number(params: dict) -> int:
'''
Gets a number of dimentions for the optimized finction.
Args:
params (dict): Algorithm parameters.
Retuns:
int: Length of a chromosome.
'''
function = params['function']
if function == 1 or function == 2:
return 1
elif function == 3 or function == 4:
return 2
return None # error
def chromosome_length(params: dict) -> int:
'''
Calculates a chromosome's length to be generated.
Args:
params (dict): Algorithm parameters.
Retuns:
int: Length of a chromosome.
'''
lower_bound = params['searchDomain']['lowerBound']
upper_bound = params['searchDomain']['upperBound']
precision = float(params['searchDomain']['precision'])
length = (upper_bound - lower_bound) / precision
length = int(np.ceil(np.log2(length)))
return length
def roulette_wheel(cum_probs: np.ndarray) -> int:
'''
Randomly selects an index given cumulative probabilities.
Args:
cum_probs (np.ndarray): Cumulative probabilities.
Returns:
int: Selected index.
'''
index = None
r = np.random.uniform()
for i, prob in enumerate(cum_probs):
if r <= prob:
index = i
break
return index
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Example code showing how to control Thorlabs TDC Motors using PyAPT
V1.2
20141125 V1.0 First working version
20141201 V1.0a Updated to short notation
20150324 V1.1 Added more descriptions
20150417 V1.2 Implemented motor without serial
Michael Leung
mcleung@stanford.edu
"""
# Import APTMotor class from PyAPT
from PyAPT import APTMotor
import time
# Create object corresponding to the motor.
Motor1 = APTMotor(83828393, HWTYPE=31) # The number should correspond to the serial number.
# Use help APTMotor to obtain full list of hardware (HW) supported.
# Note: You can control multiple motors by creating more APTMotor Objects
# Obtain current position of motor
print(Motor1.getPos())
# You can control multiple motors by creating more APTMotor Objects
# Serial numbers can be added later by using setSerialNumber and initializeHardwareDevice
# This functionality is particularly useful in the GUI setup.
Motor2 = APTMotor()
Motor2.setSerialNumber(83828393)
Motor2.initializeHardwareDevice()
print(Motor2.getPos())
# Move motor forward by 1mm, wait half a second, and return to original position.
# mRel is move relative. mAbs is move absolute (go to position xxx)
Motor1.mRel(1) # advance 1mm
time.sleep(.5)
Motor1.mRel(-1) # retract 1mm
time.sleep(1)
# Move motor forward by 1mm, wait half a second, and return to original position, at a velocity of 0.5mm/sec
motVel = 0.5 #motor velocity, in mm/sec
Motor1.mcRel(1, motVel) # advance 1mm
time.sleep(.5)
Motor1.mcRel(-1, motVel) # retract 1mm
# Clean up APT object, free up memory
Motor1.cleanUpAPT()
|
nilq/baby-python
|
python
|
'''
ะะพะฟะพะปะฝะธัะต ะฟัะธะฒะตะดะตะฝะฝัะน ะบะพะด, ัะฐะบ ััะพะฑั ะพะฝ ะฒัะฒะตะป ััะผะผั ะบะฒะฐะดัะฐัะพะฒ ัะปะตะผะตะฝัะพะฒ ัะฟะธัะบะฐ numbers.
numbers = [1, 78, 23, -65, 99, 9089, 34, -32, 0, -67, 1, 11, 111]
'''
numbers = [1, 78, 23, -65, 99, 9089, 34, -32, 0, -67, 1, 11, 111]
numbers2 = []
for i in range(len(numbers)):
numbers2.append(numbers[i] ** 2)
print(sum(numbers2))
|
nilq/baby-python
|
python
|
'''
Copyright (C) 2015 Ryan Gonzalez
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
g_backup = globals().copy()
__version__ = '0.8'
__all__ = ['overload', 'RuntimeModule', 'switch', 'tail_recurse', 'copyfunc',
'set_docstring', 'annotate', 'safe_unpack', 'modify_function',
'assign', 'fannotate', 'compare_and_swap', 'is_main',
'call_if_main', 'run_main']
import sys, inspect, types, functools
def _targspec(func, specs, attr='__orig_arg__'):
if hasattr(func, '__is_overload__') and func.__is_overload__:
return getattr(func, attr)
return specs(func)
def set_docstring(doc):
'''A simple decorator to set docstrings.
:param doc: The docstring to tie to the function.
Example::
@set_docstring('This is a docstring')
def myfunc(x):
pass'''
def wrap(f):
f.__doc__ = doc
return f
return wrap
_modify_function_doc = '''
Creates a copy of a function, changing its attributes.
:param globals: Will be added to the function's globals.
:param name: The new function name. Set to ``None`` to use the function's original name.
:param code: The new function code object. Set to ``None`` to use the function's original code object.
:param defaults: The new function defaults. Set to ``None`` to use the function's original defaults.
:param closure: The new function closure. Set to ``None`` to use the function's original closure.
.. warning:: This function can be potentially dangerous.
'''
def copyfunc(f):
'''Copies a funcion.
:param f: The function to copy.
:return: The copied function.
.. deprecated:: 0.4
Use :func:`modify_function` instead.
'''
return modify_function(f)
if sys.version_info.major == 3:
@set_docstring(_modify_function_doc)
def modify_function(f, globals={}, name=None, code=None, defaults=None,
closure=None):
if code is None: code = f.__code__
if name is None: name = f.__name__
if defaults is None: defaults = f.__defaults__
if closure is None: closure = f.__closure__
newf = types.FunctionType(code, dict(f.__globals__, **globals), name=name,
argdefs=defaults, closure=closure)
newf.__dict__.update(f.__dict__)
return newf
argspec = inspect.getfullargspec
ofullargspec = inspect.getfullargspec
def _fullargspec(func):
return _targspec(func, ofullargspec)
inspect.getfullargspec = _fullargspec
def _exec(m,g): exec(m,g)
else:
@set_docstring(_modify_function_doc)
def modify_function(f, globals={}, name=None, code=None, defaults=None,
closure=None):
if code is None: code = f.func_code
if name is None: name = f.__name__
if defaults is None: defaults = f.func_defaults
if closure is None: closure = f.func_closure
newf = types.FunctionType(code, dict(f.func_globals, **globals),
name=name, argdefs=defaults, closure=closure)
newf.__dict__.update(f.__dict__)
return newf
argspec = inspect.getargspec
eval(compile('def _exec(m,g): exec m in g', '<exec>', 'exec'))
def _gettypes(args):
return tuple(map(type, args))
oargspec = inspect.getargspec
def _argspec(func):
return _targspec(func, oargspec)
inspect.getargspec = _argspec
try:
import IPython
except ImportError:
IPython = None
else:
# Replace IPython's argspec
oipyargspec = IPython.core.oinspect.getargspec
def _ipyargspec(func):
return _targspec(func, oipyargspec, '__orig_arg_ipy__')
IPython.core.oinspect.getargspec = _ipyargspec
class overload(object):
'''Simple function overloading in Python.'''
@classmethod
def argc(self, argc=None):
'''Overloads a function based on the specified argument count.
:param argc: The argument count. Defaults to ``None``. If ``None`` is given, automatically compute the argument count from the given function.
.. note::
Keyword argument counts are NOT checked! In addition, when the argument count is automatically calculated, the keyword argument count is also ignored!
Example::
@overload.argc()
def func(a):
print 'Function 1 called'
@overload.argc()
def func(a, b):
print 'Function 2 called'
func(1) # Calls first function
func(1, 2) # Calls second function
func() # Raises error
'''
# Python 2 UnboundLocalError fix
argc = {'argc': argc}
def wrap(f):
if argc['argc'] is None:
argc['argc'] = len(argspec(f).args)
try:
st = inspect.stack()[1][0]
oldf = dict(st.f_globals, **st.f_locals)[f.__name__]
except KeyError: pass
else:
if hasattr(oldf, '__pyext_overload_basic__'):
globls = oldf.__globals__ if sys.version_info.major == 3\
else oldf.func_globals
globls['overloads'][argc['argc']] = f
return oldf
@functools.wraps(f)
def newf(*args, **kwargs):
if len(args) not in overloads:
raise TypeError(
"No overload of function '%s' that takes %d args" % (
f.__name__, len(args)))
return overloads[len(args)](*args, **kwargs)
overloads = {}
overloads[argc['argc']] = f
newf = modify_function(newf, globals={'overloads': overloads})
newf.__pyext_overload_basic__ = None
newf.__orig_arg__ = argspec(f)
if IPython:
newf.__orig_arg_ipy__ = IPython.core.oinspect.getargspec(f)
return newf
return wrap
@classmethod
def args(self, *argtypes, **kw):
'''Overload a function based on the specified argument types.
:param argtypes: The argument types. If None is given, get the argument types from the function annotations(Python 3 only)
:param kw: Can only contain 1 argument, `is_cls`. If True, the function is assumed to be part of a class.
Example::
@overload.args(str)
def func(s):
print 'Got string'
@overload.args(int, str)
def func(i, s):
print 'Got int and string'
@overload.args()
def func(i:int): # A function annotation example
print 'Got int'
func('s')
func(1)
func(1, 's')
func(True) # Raises error
'''
# XXX: some of this should be moved to a utility class
# It's duplicated from overload.argc
# Python 2 UnboundLocalError fix...again!
argtypes = {'args': tuple(argtypes)}
def wrap(f):
if len(argtypes['args']) == 1 and argtypes['args'][0] is None:
aspec = argspec(f)
argtypes['args'] = tuple(map(lambda x: x[1], sorted(
aspec.annotations.items(),
key=lambda x: aspec.args.index(x[0]))))
try:
st = inspect.stack()[1][0]
oldf = dict(st.f_globals, **st.f_locals)[f.__name__]
except KeyError: pass
else:
if hasattr(oldf, '__pyext_overload_args__'):
globls = oldf.__globals__ if sys.version_info.major == 3\
else oldf.func_globals
globls['overloads'][argtypes['args']] = f
return oldf
@functools.wraps(f)
def newf(*args):
if len(kw) == 0:
cargs = args
elif len(kw) == 1 and 'is_cls' in kw and kw['is_cls']:
cargs = args[1:]
else:
raise ValueError('Invalid keyword args specified')
types = _gettypes(cargs)
if types not in overloads:
raise TypeError(\
"No overload of function '%s' that takes: %s" % (
f.__name__, types))
return overloads[types](*args)
overloads = {}
overloads[argtypes['args']] = f
newf = modify_function(newf, globals={'overloads': overloads})
newf.__pyext_overload_args__ = None
newf.__orig_arg__ = argspec(f)
if IPython:
newf.__orig_arg_ipy__ = IPython.core.oinspect.getargspec(f)
return newf
return wrap
class _RuntimeModule(object):
'Create a module object at runtime and insert it into sys.path. If called, same as :py:func:`from_objects`.'
def __call__(self, *args, **kwargs):
return self.from_objects(*args, **kwargs)
@staticmethod
@overload.argc(1)
def from_objects(name, **d):
return _RuntimeModule.from_objects(name, '', **d)
@staticmethod
@overload.argc(2)
def from_objects(name, docstring, **d):
'''Create a module at runtime from `d`.
:param name: The module name.
:param docstring: Optional. The module's docstring.
:param \*\*d: All the keyword args, mapped from name->value.
Example: ``RuntimeModule.from_objects('name', 'doc', a=1, b=2)``'''
module = types.ModuleType(name, docstring)
module.__dict__.update(d)
module.__file__ = '<runtime_module>'
sys.modules[name] = module
return module
@staticmethod
@overload.argc(2)
def from_string(name, s):
return _RuntimeModule.from_string(name, '', s)
@staticmethod
@overload.argc(3)
def from_string(name, docstring, s):
'''Create a module at runtime from `s``.
:param name: The module name.
:param docstring: Optional. The module docstring.
:param s: A string containing the module definition.'''
g = {}
_exec(s, g)
return _RuntimeModule.from_objects(name, docstring,
**dict(filter(lambda x: x[0] not in g_backup, g.items())))
RuntimeModule = _RuntimeModule()
class CaseObject(object):
'The object returned by a switch statement. When called, it will return True if the given argument equals its value, else False. It can be called with multiple parameters, in which case it checks if its value equals any of the arguments.'
def __init__(self, value, cstyle):
self.value = value
self.did_match = False
self.cstyle = cstyle
self.did_pass = not cstyle
def __call__(self, *args):
if not self.cstyle and self.did_match: return False
if assign('res', not (self.did_pass and self.cstyle) and\
self.value in args):
self.did_match = True
return res
def quit(self):
'Forces all other calls to return False. Equilavent of a ``break`` statement.'
self.did_pass = True
def default(self):
"Executed if ``quit`` wasn't called."
return not self.did_match and (not self.did_pass if self.cstyle else True)
def __iter__(self):
yield self
def __enter__(self):
return self
def __exit__(self, *args):
pass
def switch(value, cstyle=False):
'''A Python switch statement implementation that is used with a ``with`` statement.
:param value: The value to "switch".
:param cstyle: If ``True``, then cases will automatically fall through to the next one until ``case.quit()`` is encountered.
``with`` statement example::
with switch('x'):
if case(1): print 'Huh?'
if case('x'): print 'It works!!!'
.. warning:: If you modify a variable named "case" in the same scope that you use the ``with`` statement version, you will get an UnboundLocalError. The soluction is to use ``with switch('x') as case:`` instead of ``with switch('x'):``.'''
res = CaseObject(value, cstyle)
inspect.stack()[1][0].f_globals['case'] = res
return res
def tail_recurse(spec=None):
'''Remove tail recursion from a function.
:param spec: A function that, when given the arguments, returns a bool indicating whether or not to exit. If ``None,`` tail recursion is always called unless the function returns a value.
.. note::
This function has a slight overhead that is noticable when using timeit. Only use it if the function has a possibility of going over the recursion limit.
.. warning::
This function will BREAK any code that either uses any recursion other than tail recursion or calls itself multiple times. For example, ``def x(): return x()+1`` will fail.
Example::
@tail_recurse()
def add(a, b):
if a == 0: return b
return add(a-1, b+1)
add(10000000, 1) # Doesn't max the recursion limit.
'''
def _wrap(f):
class TailRecursion(Exception):
def __init__(self, args, kwargs):
self.args = args
self.kwargs = kwargs
def _newf(*args, **kwargs):
if inspect.stack()[1][3] == f.__name__:
if (spec and spec(args)) or not spec:
raise TailRecursion(args, kwargs)
while True:
try:
res = f(*args, **kwargs)
except TailRecursion as ex:
args = ex.args
kwargs = ex.kwargs
continue
else:
return res
_newf.__doc__ = f.__doc__
return _newf
return _wrap
def annotate(*args, **kwargs):
'''Set function annotations using decorators.
:param args: This is a list of annotations for the function, in the order of the function's parameters. For example, ``annotate('Annotation 1', 'Annotation 2')`` will set the annotations of parameter 1 of the function to ``Annotation 1``.
:param kwargs: This is a mapping of argument names to annotations. Note that these are applied *after* the argument list, so any args set that way will be overriden by this mapping. If there is a key named `ret`, that will be the annotation for the function's return value.
.. deprecated:: 0.5
Use :func:`fannotate` instead.
'''
def _wrap(f):
if not hasattr(f, '__annotations__'):
f.__annotations__ = {}
if 'ret' in kwargs:
f.__annotations__['return'] = kwargs.pop('ret')
f.__annotations__.update(dict(zip(argspec(f).args, args)))
f.__annotations__.update(kwargs)
return f
return _wrap
def fannotate(*args, **kwargs):
'''Set function annotations using decorators.
:param \*args: The first positional argument is used for the function's return value; all others are discarded.
:param \**kwargs: This is a mapping of argument names to annotations.
Example::
@fannotate('This for the return value', a='Parameter a', b='Parameter b')
def x(a, b):
pass
'''
def wrap(f):
if not hasattr(f, '__annotations__'):
f.__annotations__ = {}
if len(args) >= 1:
f.__annotations__['return'] = args[0]
f.__annotations__.update(kwargs)
return f
return wrap
def safe_unpack(seq, ln, fill=None):
'''Safely unpack a sequence to length `ln`, without raising ValueError. Based on Lua's method of unpacking. Empty values will be filled in with `fill`, while any extra values will be cut off.
:param seq: The sequence to unpack.
:param ln: The expected length of the sequence.
:param fill: The value to substitute if the sequence is too small. Defaults to ``None``.
Example::
s = 'a:b'
a, b = safe_unpack(s.split(':'), 2)
# a = 'a'
# b = 'b'
s = 'a'
a, b = safe_unpack(s.split(':'), 2)
# a = 'a'
# b = None'''
if len(seq) > ln:
return seq[:ln]
elif len(seq) < ln:
return seq + type(seq)([fill]*(ln-len(seq)))
else:
return seq
def assign(varname, value):
'''Assign `value` to `varname` and return it. If `varname` is an attribute and the instance name it belongs to is not defined, a NameError is raised.
This can be used to emulate assignment as an expression. For example, this::
if assign('x', 7): ...
is equilavent to this C code::
if (x = 7) ...
.. warning::
When assigning an attribute, the instance it belongs to MUST be declared as global prior to the assignment. Otherwise, the assignment will not work.
'''
fd = inspect.stack()[1][0].f_globals
fl = inspect.stack()[1][0].f_locals
if '.' not in varname:
fd[varname] = value
else:
vsplit = list(map(str.strip, varname.split('.')))
fvars = dict(fd, **fl)
if vsplit[0] not in fvars:
raise NameError('Unknown object: %s' % vsplit[0])
base = fvars[vsplit[0]]
for x in vsplit[1:-1]:
base = getattr(base, x)
setattr(base, vsplit[-1], value)
return value
def is_main(frame=1):
"Return if the caller is main. Equilavent to ``__name__ == '__main__'``."
return inspect.stack()[frame][0].f_globals['__name__'] == '__main__'
def _call_if_main(frame, f, args):
if is_main(frame): return f(*args)
def call_if_main(f,*args):
"Call the `f` with `args` if the caller's module is main."
return _call_if_main(3,f,args)
def run_main(f,*args):
"Call `f` with the `args` and terminate the program with its return code if the caller's module is main."
sys.exit(_call_if_main(3,f,args))
def compare_and_swap(var, compare, new):
"If `var` is equal to `compare`, set it to `new`."
if assign('v', inspect.stack()[1][0].f_globals)[var] == compare:
v[var] = new
|
nilq/baby-python
|
python
|
# Generated by Django 2.2.24 on 2021-08-16 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('landing', '0103_policyarticle'),
]
operations = [
migrations.AddField(
model_name='section',
name='prefix',
field=models.TextField(blank=True, verbose_name='prefix text'),
),
]
|
nilq/baby-python
|
python
|
class Config:
"""Discriminator configurations.
"""
def __init__(self, steps: int):
"""Initializer.
Args:
steps: diffusion steps.
"""
self.steps = steps
# embedding
self.pe = 128
self.embeddings = 512
self.mappers = 2
# block
self.channels = 64
self.kernels = 3
self.layers = 10
self.leak = 0.2
|
nilq/baby-python
|
python
|
'''
Simple program to gather all the internal and external links. NOT to be confused with inlinks and outlinks.
Internal links are those links that point to another website within the same domain
External links are those links that point to another website that does NOT share the same domain
Reference link: https://www.thepythoncode.com/article/extract-all-website-links-python
'''
# First step is to import all the basic libraries required.
import requests
from urllib.parse import urlparse, urljoin
from bs4 import BeautifulSoup
import colorama
import pandas as pd
import numpy as np
import time
#Colorama is a simple tool used to display different colors on the terminal
colorama.init()
GREEN = colorama.Fore.GREEN
GRAY = colorama.Fore.LIGHTBLACK_EX
RESET = colorama.Fore.RESET
YELLOW = colorama.Fore.YELLOW
# max_urls define the maximum number of urls it must crawl to obtain the rest of the urls
# The internal and external URLs are put into sets to prevent redundancy
max_urls = 10
internal_urls = set()
external_urls = set()
#Basic methos to find whether a given URL is valid or not, just like our method
def is_valid(url):
"""
Checks whether `url` is a valid URL.
"""
parsed = urlparse(url)
return bool(parsed.netloc) and bool(parsed.scheme)
# Below function gets all the links, and as a trial we consider only the a tags for the time being.
def get_all_website_links(url):
"""
Returns all URLs that is found on `url` in which it belongs to the same website
"""
# all URLs of `url`
urls = set()
# domain name of the URL without the protocol
domain_name = urlparse(url).netloc
soup = BeautifulSoup(requests.get(url).content, "html.parser")
for a_tag in soup.findAll("a"):
href = a_tag.attrs.get("href")
if href == "" or href is None:
# href empty tag
continue
href = urljoin(url, href)
parsed_href = urlparse(href)
# remove URL GET parameters, URL fragments, etc.
href = parsed_href.scheme + "://" + parsed_href.netloc + parsed_href.path
if not is_valid(href):
# not a valid URL
continue
if href in internal_urls:
# already in the set
continue
if domain_name not in href:
# external link
if href not in external_urls:
print(f"{GRAY}[!] External link: {href}{RESET}")
external_urls.add(href)
continue
print(f"{GREEN}[*] Internal link: {href}{RESET}")
urls.add(href)
internal_urls.add(href)
return urls
total_urls_visited = 0
def crawl(url, max_urls):
"""
Crawls a web page and extracts all links.
You'll find all links in `external_urls` and `internal_urls` global set variables.
params:
max_urls (int): number of max urls to crawl, default is 10.
"""
global total_urls_visited
total_urls_visited += 1
print(f"{YELLOW}[*] Crawling: {url}{RESET}")
links = get_all_website_links(url)
for link in links:
if total_urls_visited > max_urls:
break
crawl(link, max_urls=max_urls)
Sample1 = pd.read_csv('whoisLegi.csv').sample(10)
start = time.time()
count = 1
print("Program starting, for any URLs taking more than 10s press ctrl+c")
for i in Sample1.URL:
print("URL:",count)
count +=1
try:
crawl(i, max_urls)
print("[+] Total Internal links:", len(internal_urls))
print("[+] Total External links:", len(external_urls))
print("[+] Total URLs:", len(external_urls) + len(internal_urls))
print("[+] Total crawled URLs:", max_urls)
except:
print("Not a valid URL\n")
continue
end = time.time()
print("Finished execution in:",end-start,"seconds")
|
nilq/baby-python
|
python
|
from setuptools import setup
setup(
name='simplejira',
version='1.0',
description='simplejira',
author='Brandon Squizzato',
author_email='bsquizza@redhat.com',
url='https://www.github.com/bsquizz/simplejira',
packages=['simplejira'],
install_requires=[
'jira',
'pyyaml',
'prompter',
'python-editor',
'attrs',
'prettytable',
'cmd2',
'iso8601',
'six',
'pykerberos',
'python-dateutil',
'requests',
'pbr',
'requests-kerberos',
],
scripts=['bin/simplejira']
)
|
nilq/baby-python
|
python
|
import requests
import jwt
import binascii
from base58 import b58decode_check
from ecdsa import SECP256k1, VerifyingKey, SigningKey
def submitTransaction(signedTransactionHex, nodeURL):
endpointURL = nodeURL + "submit-transaction"
payload = {'TransactionHex': signedTransactionHex}
response = requests.post(endpointURL, json=payload)
return response
def appendExtraData(transactionHex, derivedKey, nodeURL):
payload = {"TransactionHex": transactionHex,
"ExtraData": {"DerivedPublicKey": derivedKey}}
endpoint = nodeURL + "append-extra-data"
response = requests.post(endpoint, json=payload)
return response
def validateJWT(JWT, publicKey):
# this method is used to for public key validation
try:
rawPublicKeyHex = b58decode_check(publicKey)[3:].hex()
public_key = bytes(rawPublicKeyHex, 'utf-8')
public_key = binascii.unhexlify(public_key)
key = VerifyingKey.from_string(public_key, curve=SECP256k1)
key = key.to_pem()
decoded = jwt.decode(JWT, key, algorithms=['ES256'])
return {"isValid": True, "decodedJWT": decoded}
except Exception as e:
return {"isValid": False, "error": str(e)}
def getUserJWT(seedHex):
# returns JWT token of user that helps in public key validation in backend
private_key = bytes(seedHex, 'utf-8')
private_key = binascii.unhexlify(private_key)
key = SigningKey.from_string(private_key, curve=SECP256k1)
key = key.to_pem()
encoded_jwt = jwt.encode({}, key, algorithm="ES256")
return encoded_jwt
|
nilq/baby-python
|
python
|
"""A module that provides methods for accessing the Auth API and providing the logged in user details."""
import http
import json
import logging
import fastapi
import fastapi.security
import fastapi.security.http
import requests
from pydantic import BaseModel # pylint:disable=no-name-in-module
import config
logger = logging.getLogger(__name__)
bearer_scheme = fastapi.security.HTTPBearer()
def check_auth_response(response: requests.Response):
"""Review the response from the external API and throw an error if it was forbidden or unauthorized."""
if response.status_code in [http.HTTPStatus.UNAUTHORIZED, http.HTTPStatus.FORBIDDEN]:
try:
body = response.json()
description = body['description'] if 'description' in body else None
except json.decoder.JSONDecodeError:
description = None
raise fastapi.HTTPException(
status_code=response.status_code, detail=description
)
def get_user_from_auth(auth: fastapi.security.http.HTTPAuthorizationCredentials = fastapi.Depends(bearer_scheme)):
"""Make a request to Auth API and return the response body."""
auth_response = requests.get('{}/users/@me'.format(config.AUTH_API_URL),
headers={'Authorization': '{} {}'.format(auth.scheme, auth.credentials)})
check_auth_response(auth_response)
if not auth_response: # status_code is unsuccessful
logger.error('Get User call failed unexpectedly with status {}. Response body: {}'.format(
auth_response.status_code, auth_response.text))
raise fastapi.HTTPException(status_code=http.HTTPStatus.INTERNAL_SERVER_ERROR)
return auth_response.json()
def get_current_user(auth_api_user: dict = fastapi.Depends(get_user_from_auth), account_id: str = fastapi.Header(None)):
"""Parse the provided dict into a User instance."""
return User(user_id=auth_api_user['keycloakGuid'], user_name=auth_api_user['username'], account_id=account_id)
class User(BaseModel):
"""Represents the minimal user details provided by the Auth API."""
user_id: str
user_name: str
account_id: str = None
|
nilq/baby-python
|
python
|
"""
Exercise 7 - Sequence Slicing
Question: List slicing is important in various data manipulation activities. Let's do a few more exercises on that.
Please complete the script so that it prints out the first three items of list letters.
letters = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
Expected output:
['a', 'b', 'c']
"""
letters = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
print (letters[:3])
#['a', 'b', 'c'] -> 1 Punto
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# (C) 2021 gomachssm
import datetime
__copyright__ = f'(C) {datetime.date.today().year} gomachssm'
__version__ = 'dummy' # get from tag, matches v([0-9]+\.[0-9]+\.[0-9]+).
__license__ = 'Apache License, Version 2.0'
__author__ = 'gomachssm'
__url__ = 'https://github.com/gomachssm/twsqlparser'
|
nilq/baby-python
|
python
|
import os
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import standard.analysis as sa
from tools import nicename
import tools
import task
import settings
mpl.rcParams['font.size'] = 7
mpl.rcParams['pdf.fonttype'] = 42
mpl.rcParams['ps.fonttype'] = 42
mpl.rcParams['font.family'] = 'arial'
use_torch = settings.use_torch
def load_activity_tf(save_path, lesion_kwargs=None):
"""Load model activity.
Returns:
"""
import tensorflow as tf
from model import SingleLayerModel, FullModel, NormalizedMLP
# # Reload the network and analyze activity
config = tools.load_config(save_path)
config.label_type = 'sparse'
# Load dataset
train_x, train_y, val_x, val_y = task.load_data(config.data_dir)
tf.reset_default_graph()
if config.model == 'full':
CurrentModel = FullModel
elif config.model == 'singlelayer':
CurrentModel = SingleLayerModel
elif config.model == 'normmlp':
CurrentModel = NormalizedMLP
else:
raise ValueError('Unknown model type ' + str(config.model))
# Build validation model
val_x_ph = tf.placeholder(val_x.dtype, val_x.shape)
val_y_ph = tf.placeholder(val_y.dtype, val_y.shape)
model = CurrentModel(val_x_ph, val_y_ph, config=config, training=False)
# model.save_path = rootpath + model.save_path[1:]
model.save_path = save_path
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
with tf.Session(config=tf_config) as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
model.load()
if lesion_kwargs:
model.lesion_units(**lesion_kwargs)
# Validation
glo_out, glo_in, kc_in, kc_out, logits = sess.run(
[model.glo, model.glo_in, model.kc_in, model.kc, model.logits],
{val_x_ph: val_x, val_y_ph: val_y})
# results = sess.run(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES))
return {'glo_in': glo_in, 'glo': glo_out,
'kc_in': kc_in, 'kc': kc_out}
def load_activity_torch(save_path, lesion_kwargs=None):
import torch
from torchmodel import get_model
# Reload the network and analyze activity
config = tools.load_config(save_path)
# Load dataset
train_x, train_y, val_x, val_y = task.load_data(config.data_dir)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
with torch.no_grad():
model = get_model(config)
model.load()
model.to(device)
model.readout()
if lesion_kwargs is not None:
for key, val in lesion_kwargs.items():
model.lesion_units(key, val)
# validation
val_data = torch.from_numpy(val_x).float().to(device)
val_target = torch.from_numpy(val_y).long().to(device)
model.eval()
results = model(val_data, val_target)
for key, val in results.items():
try:
results[key] = val.cpu().numpy()
except AttributeError:
pass
results[key] = np.array(results[key])
return results
def load_activity(save_path, lesion_kwargs=None):
if use_torch:
return load_activity_torch(save_path, lesion_kwargs)
else:
return load_activity_tf(save_path, lesion_kwargs)
def plot_activity(save_path):
results = load_activity(save_path)
save_name = save_path.split('/')[-1]
plt.figure()
plt.hist(results['glo'].flatten(), bins=100)
plt.title('Glo activity distribution')
tools.save_fig(save_path, save_name + '_pn_activity')
plt.figure()
plt.hist(results['kc'].flatten(), bins=100)
plt.title('KC activity distribution')
tools.save_fig(save_path, save_name + '_kc_activity')
def image_activity(save_path, arg, sort_columns = True, sort_rows = True):
def _image(data, zticks, name, xlabel='', ylabel=''):
rect = [0.2, 0.15, 0.6, 0.65]
rect_cb = [0.82, 0.15, 0.02, 0.65]
fig = plt.figure(figsize=(2.6, 2.6))
ax = fig.add_axes(rect)
cm = 'Reds'
im = ax.imshow(data, cmap=cm, vmin=zticks[0], vmax=zticks[1], interpolation='none')
plt.axis('tight')
ax.set_ylabel(nicename(ylabel))
ax.set_xlabel(nicename(xlabel))
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.tick_params('both', length=0)
ax.set_xticks([0, data.shape[1]])
ax.set_yticks([0, data.shape[0]])
ax = fig.add_axes(rect_cb)
cb = plt.colorbar(im, cax=ax)
cb.set_ticks(zticks)
cb.outline.set_linewidth(0.5)
cb.set_label('Activity', fontsize=7, labelpad=5)
plt.tick_params(axis='both', which='major', labelsize=7)
cb.ax.tick_params('both', length=0)
plt.axis('tight')
tools.save_fig(save_path, '_' + name, pdf=False)
dirs = tools.get_modeldirs(save_path)
for i, d in enumerate(dirs):
results = load_activity(d)
data = results[arg]
if arg == 'glo_in':
xlabel = 'PN Input'
zticks = [0, 4]
elif arg == 'glo':
xlabel = 'PN'
zticks = [0, 4]
elif arg == 'kc':
xlabel = 'KC'
zticks = [0, 1]
else:
raise ValueError('data type not recognized for image plotting: {}'.format(arg))
if sort_columns:
data = np.sort(data, axis=1)[:,::-1]
if sort_rows:
ix = np.argsort(np.sum(data, axis=1))
data = data[ix,:]
_image(data, zticks=zticks, name = 'image_' + arg + '_' + str(i), xlabel=xlabel, ylabel='Odors')
def _distribution(data, save_path, name, xlabel, ylabel, xrange=None,
title=None, density=False):
fig = plt.figure(figsize=(1.5, 1.5))
ax = fig.add_axes((0.3, 0.25, 0.6, 0.6))
plt.hist(data, bins=30, range=xrange, density=density, align='left')
plt.ticklabel_format(axis="y", style="sci", scilimits=(0, 2))
# xticks = np.linspace(xrange[0], xrange[1], 5)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if xrange is not None:
plt.xlim(xrange)
# ax.set_xticks(xticks)
plt.locator_params(axis='x', nbins=3)
plt.locator_params(axis='y', nbins=3)
if title is not None:
plt.title(title, fontsize=7)
# ax.set_yticks(np.linspace(0, yrange, 3))
# plt.ylim([0, yrange])
ax.spines["right"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
tools.save_fig(save_path, '_' + name, pdf=True)
def distribution_activity(save_path, var_names=None):
dirs = tools.get_modeldirs(save_path)
if var_names is None:
var_names = ['kc', 'glo']
elif isinstance(var_names, str):
var_names = [var_names]
for d in dirs:
results = load_activity(d)
for var_name in var_names:
data = results[var_name].flatten()
xlabel = tools.nicename(var_name)
ylabel = 'Distribution'
name = 'dist_' + var_name + '_' + tools.get_model_name(d)
figpath = tools.get_experiment_name(d)
_distribution(data, figpath, name=name, density=True,
xlabel=xlabel, ylabel=ylabel)
def sparseness_activity(save_path, var_names, activity_threshold=0.,
lesion_kwargs=None, titlekey=None, figname=None):
"""Plot the sparseness of activity.
Args:
save_path: model path
arg: str, the activity to plot
"""
if isinstance(save_path, str):
dirs = tools.get_modeldirs(save_path)
else:
dirs = save_path
if figname is None:
figname = ''
if isinstance(var_names, str):
var_names = [var_names]
for d in dirs:
results = load_activity(d, lesion_kwargs)
config = tools.load_config(d)
for var_name in var_names:
data = results[var_name]
xrange = [-0.05, 1.05]
if var_name == 'glo':
name = 'PN'
elif var_name == 'kc':
name = 'KC'
else:
raise ValueError('Unknown var name', var_name)
figpath = tools.get_experiment_name(d)
data1 = np.mean(data > activity_threshold, axis=1)
if titlekey is None:
title = None
else:
title = tools.nicename(titlekey) + ' '
title = title + tools.nicename(getattr(config, titlekey),
mode=titlekey)
fname = figname + 'spars_' + var_name + '_' + tools.get_model_name(d)
_distribution(data1, figpath, name=fname, density=False,
xlabel='% of Active '+name+'s', title=title,
ylabel='Number of Odors', xrange=xrange)
data2 = np.mean(data > activity_threshold, axis=0)
fname = figname + 'spars_' + var_name + '2_' + tools.get_model_name(d)
_distribution(data2, figpath, name=fname, density=False,
xlabel='% of Odors', title=title,
ylabel='Number of '+name+'s', xrange=xrange)
def plot_mean_activity_sparseness(save_path, arg, xkey,
loop_key=None, select_dict=None):
dirs = tools.get_modeldirs(save_path)
mean_sparseness = []
for i, d in enumerate(dirs):
results = load_activity(d)
data = results[arg]
activity_threshold = 0
data = np.count_nonzero(data > activity_threshold, axis=1) / data.shape[1]
mean_sparseness.append(data.mean())
for i, d in enumerate(dirs):
config = tools.load_config(d)
setattr(config, arg + '_sparse_mean', mean_sparseness[i])
tools.save_config(config, d)
sa.plot_results(save_path, xkey= xkey, ykey= arg + '_sparse_mean',
ax_args= {'yticks': [0, .2, .4, .6, .8]},
figsize=(1.5, 1.5), ax_box=(0.27, 0.25, 0.65, 0.65),
loop_key=loop_key,
select_dict=select_dict)
|
nilq/baby-python
|
python
|
from django.shortcuts import render , reverse
from django.http import HttpResponseRedirect
# Create your views here.
def home(request):
return render(request , 'home.html')
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Root __init__
"""
__author__ = "Samuel Marks"
__version__ = "0.0.7"
__description__ = "CLI to replace HTTP GET on GitHub API with clones"
|
nilq/baby-python
|
python
|
import logging
from clx.analytics import detector_utils as du
log = logging.getLogger(__name__)
class DetectorDataset(object):
"""
Wrapper class is used to hold the partitioned datframes and number of the records in all partitions.
"""
def __init__(self, df, batch_size):
"""This function instantiates partitioned datframes and number of the records in all partitions.
:param df: domains dataframe.
:type df: cudf.DataFrame
:param batch_size: Number of records in the dataframe.
:type batch_size: int
"""
self.__partitioned_dfs, self.__dataset_len = self.__get_partitioned_dfs(
df, batch_size
)
@property
def partitioned_dfs(self):
return self.__partitioned_dfs
@property
def dataset_len(self):
return self.__dataset_len
# https://github.com/rapidsai/cudf/issues/2861
# https://github.com/rapidsai/cudf/issues/1473
# Workaround for partitioning dataframe into small batches
def __get_partitioned_dfs(self, df, batch_size):
"""Partition one dataframe to multiple small dataframes based on a given batch size.
:param df: Contains domains and it's types.
:type df: cudf.DataFrame
:param batch_size: Number of records has to be in each partitioned dataframe.
:type batch_size: int
"""
dataset_len = df["domain"].count()
df = du.str2ascii(df, dataset_len)
prev_chunk_offset = 0
partitioned_dfs = []
while prev_chunk_offset < dataset_len:
curr_chunk_offset = prev_chunk_offset + batch_size
chunk = df.iloc[prev_chunk_offset:curr_chunk_offset:1]
partitioned_dfs.append(chunk)
prev_chunk_offset = curr_chunk_offset
return partitioned_dfs, dataset_len
|
nilq/baby-python
|
python
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
from dataclasses import dataclass
import os
from datasets import registry as datasets_registry
from foundations import desc
from foundations import hparams
from foundations.step import Step
from lottery.desc import LotteryDesc
from platforms.platform import get_platform
@dataclass
class TrainingDesc(desc.Desc):
"""The hyperparameters necessary to describe a training run."""
model_hparams: hparams.ModelHparams
dataset_hparams: hparams.DatasetHparams
training_hparams: hparams.TrainingHparams
@staticmethod
def name_prefix(): return 'train'
@staticmethod
def add_args(parser: argparse.ArgumentParser, defaults: LotteryDesc = None):
hparams.DatasetHparams.add_args(parser, defaults=defaults.dataset_hparams if defaults else None)
hparams.ModelHparams.add_args(parser, defaults=defaults.model_hparams if defaults else None)
hparams.TrainingHparams.add_args(parser, defaults=defaults.training_hparams if defaults else None)
@staticmethod
def create_from_args(args: argparse.Namespace) -> 'TrainingDesc':
dataset_hparams = hparams.DatasetHparams.create_from_args(args)
model_hparams = hparams.ModelHparams.create_from_args(args)
training_hparams = hparams.TrainingHparams.create_from_args(args)
return TrainingDesc(model_hparams, dataset_hparams, training_hparams)
def str_to_step(self, s: str) -> Step:
return Step.from_str(s, datasets_registry.iterations_per_epoch(self.dataset_hparams))
@property
def end_step(self):
iterations_per_epoch = datasets_registry.iterations_per_epoch(self.dataset_hparams)
return Step.from_str(self.training_hparams.training_steps, iterations_per_epoch)
@property
def train_outputs(self):
datasets_registry.num_classes(self.dataset_hparams)
def run_path(self, replicate, experiment='main'):
return os.path.join(get_platform().root, self.hashname, f'replicate_{replicate}', experiment)
@property
def display(self):
return '\n'.join([self.dataset_hparams.display, self.model_hparams.display, self.training_hparams.display])
|
nilq/baby-python
|
python
|
import logging
import numpy as np
import paddle
from ..common import get_logger
from .var_group import *
from .pruning_plan import *
from .filter_pruner import FilterPruner
__all__ = ['L1NormFilterPruner']
_logger = get_logger(__name__, logging.INFO)
class L1NormFilterPruner(FilterPruner):
def __init__(self, model, input_shape, sen_file=None):
super(L1NormFilterPruner, self).__init__(
model, input_shape, sen_file=sen_file)
def cal_mask(self, var_name, pruned_ratio, group):
value = group[var_name]['value']
pruned_dims = group[var_name]['pruned_dims']
reduce_dims = [
i for i in range(len(value.shape)) if i not in pruned_dims
]
l1norm = np.mean(np.abs(value), axis=tuple(reduce_dims))
sorted_idx = l1norm.argsort()
pruned_num = int(round(len(sorted_idx) * pruned_ratio))
pruned_idx = sorted_idx[:pruned_num]
mask_shape = [value.shape[i] for i in pruned_dims]
mask = np.ones(mask_shape, dtype="int32")
mask[pruned_idx] = 0
return mask
|
nilq/baby-python
|
python
|
# Component to translate alias values into strings
import os, re, logging, json
from pathlib import Path, PosixPath, WindowsPath
_ValAliases = {} # Keeps track of classes which have been registered
def addAliasClass(aliasClass, tag=None):
'''Add a class to the supported alias list so that the ValAlias.makeAlias
method can generate the aliases as needed from text.
Args:
- aliasClass (type): The class to add to the list of supported aliases.
Must be a subclass of the ValAlias class
- [tag=None (str|None)]: The text string (should be lowercase) which
indicates this class. If not specified, the TAG value of the
aliasClass is used instead
Raises:
- ValueError:
- The provided aliasClass is not a subclass of a ValAlias
- A nalias class is already associated with the given tag
- No tag was specified
'''
# Ensure the alias class is indeed an alias class
if not isinstance(aliasClass, type):
raise ValueError('Provided alias class is not a class')
if not issubclass(aliasClass, ValAlias):
raise ValueError('Provided alias class "{0:s}" is not a subclass of a ValAlias'.format(str(aliasClass)))
# If we do not have a tag, get the tag of the alias class
if tag is None:
try:
tag = aliasClass.TAG
except AttributeError:
raise ValueError('No tag provided when adding alias class "{0:s}"'.format(str(aliasClass)))
# Convert tag to string
tag = str(tag)
# Ensure we are not adding a duplicate entry
if tag in _ValAliases:
raise ValueError('Cannot track alias class "{0:s}" with tag "{1:s}" as another alias class ("{2:s}") with that tag already exists'.format(
str(aliasClass), tag, str(_ValAliases[tag])))
# Add the entry
logging.info('Adding alias class {0:s} under tag "{1:s}"'.format(aliasClass.__name__, tag))
_ValAliases[tag] = aliasClass
class ValAlias(object):
'''Class to handle parsing an alias spec in a value string
Alias specs follow the given format:
{alias_name:options}
Where
- alias_name (str): The name of the alias to use
- options (str): An options string specifying the options for the alias
Please note that escaped braces like \\{ and \\} will be ignored and treated
as braces literals
Attributes:
- type (str): The main type of the alias
- subtypes (list<str>): Qualifiers to the main type
- parameters (dict<str,*>): Dictionary of parameters the alias uses to
evaluate itself
- file (Path): The file which defined this alias
'''
@staticmethod
def makeAliasDictionary(file):
'''Create an alias dictionary from an alias dictionary .json file
Args:
- file (str|Path): The path to the file to create the dictionary
from
Returns:
- dict<ValAlias>: The aliases contained in the file, indexed by
their names
Raises:
- FileNotFoundError: The provided file could not be found
- ValueError:
- The provided file format does not match an alias dictionary
file
- One or more alias definitions is not formed correctly
'''
# Ensure we have the file
file = Path(file)
if not file.is_file():
raise FileNotFoundError('Could not find file "{0:s}" for making an alias dictionary'.format(str(file)))
# Open and parse the file
ret = {}
with open(file, 'r') as json_file:
aliases = json.load(json_file)
for alias_name in aliases:
ret[alias_name] = ValAlias.makeAlias(aliases[alias_name], file)
# Return output dictionary
return ret
@staticmethod
def makeAlias(definition, file):
'''Create an alias from a given definition
Args:
- definition (dict<str,str>): The definition of the alias. Can
contain:
- 'type': (str) (required) The main type of the alias
- 'subtypes': (list<str>) (optional) Qualifiers to the main
type for the alias
- other: (*) (optional) Parameters the alias uses to evaluate
itself
- file (str|Path): The absolute file path to the file the alias is
defined in.
Return:
- ValAlias: An alias object
Raises:
- ValueError:
- No alias of the given type has been found
- The alias cannot be created from the given dictionary
'''
# Ensure we can find the relevant class
if not 'type' in definition:
raise ValueError('Definition for alias does not specify type')
if not definition['type'] in _ValAliases:
raise ValueError('Could not find alias of type "{0:s}" in list of supported aliases'.format(definition['type']))
# Make and return the alias
return _ValAliases[definition['type']](definition, file)
@staticmethod
def evaluateAliases(value, aliases, parents=None):
'''Parse a value string, replacing aliases with their desired values
Args:
- value (str): The string to replace aliases for
- aliases (list<dict<str,ValAlias>>): The alias dictionaries to use.
If an alias is used across multiple dictionaries, the last
dictionary to define that alias will be used.
- parents=[] (list<str>): A history of aliases whose evaluation
depends on the given value being evaluated
Return:
- (str): The value string with aliases replaced
Raises:
- ValueError: An issue occurred trying to evaluate an alias
- NotImplementedError: One of the provided aliases cannot be
evaluated
'''
# Need to make this list so we can traverse it backwards
alias_iterations = []
if parents is None:
parents = []
# Debug log
parentstr = ''
for parent in parents:
parentstr += parent + ' '
parentstr = parentstr[:-1]
logging.debug('Evaluating aliase "{0:s}" [{1:s}]'.format(value, parentstr))
# Get each alias option and iterate though
alias_pattern = re.compile(r"(?<!\\)\{([^:\}]+)(:([^\{\}]+))?(?<!\\)\}", re.MULTILINE)
alias_strings = alias_pattern.finditer(value)
for alias_m in alias_strings:
# Get the name / options from the match
alias_name = value[alias_m.start(1):alias_m.end(1)]
alias_options = value[alias_m.start(3):alias_m.end(3)]
# Check for circular dependency
if alias_name in parents:
raise ValueError('A circular dependency exists while evaluating alias "{0:s}"'.format(alias_name))
new_parents = parents.copy()
new_parents.append(alias_name)
# Find the matching alias
alias = None
for alias_dict in aliases:
if alias_name in alias_dict:
alias = alias_dict[alias_name]
# Throw exception if we could not find the right alias
if alias is None:
raise ValueError('Could not find alias named "{0:s}" in provided alias dictionaries'.format(alias_name))
# Otherwise evaluate the alias
converted_value = alias.evaluate(alias_options, aliases, new_parents)
# Add it to the list
alias_iterations.append((converted_value, alias_m))
# Going backwards, replace the values in the string
for (converted_value,alias_m) in reversed(alias_iterations):
value = value[:alias_m.start(0)] + converted_value + value[alias_m.end(0):]
logging.debug(value)
# Return
logging.debug(value)
return value.replace('\\{', '{').replace('\\}', '}')
def __init__(self, definition, file):
'''Fill out the basic attributes of the alias
Args:
- definition (dict<str,str>): The definition of the alias. Can
contain:
- 'type': (str) (required) The main type of the alias
- 'subtypes': (list<str>) (optional) Qualifiers to the main
type for the alias
- other: (*) (optional) Parameters the alias uses to evaluate
itself
- file (str|Path): The absolute file path to the file the alias is
defined in.
Raises:
- ValueError: The provided dictionary does not have a 'type' entry
'''
# Save the file
self.file = Path(file)
# Ensure we have a type
if not 'type' in definition:
raise ValueError('Cannot make alias without a specified type')
self.type = definition['type']
# Save details
self.subtypes = []
self.parameters = {}
for parameter in definition:
pvalue = definition[parameter]
if parameter == 'type':
# We've already saved the type
continue
elif parameter == 'subtypes':
# Get the subtypes; either append (for a single value) or copy list
if not isinstance(pvalue, list):
self.subtypes.append(pvalue)
else:
self.subtypes = pvalue
else:
# Copy over remaining parameters
self.parameters[parameter] = pvalue
def evaluate(self, options, other_aliases, parents):
'''Extract the string value an alias evaluates to
TODO: Check to make sure that infinite alias recursion is not possible.
Arguments:
- options (str): The string text for a given option
- other_aliases (list<dict<str,ValAlias>>): The other aliases
available for use when defining values. This is useful for
recursive aliases.
- parents (list<str>): A history of aliases whose evaluation depends
on the given value being evaluated
Return:
- str: The value the alias evaluates to
Raises:
- ValueError:
- The alias does not have enough options to evaluate
- The alias's options are not in the correct format for the
alias
- An issue occurred evaluating the alias
- A circular dependency exists in this evaluation
- NotImplementedError: this type of alias cannot be evaluated
'''
raise NotImplementedError('Cannot evaluate alias of base value alias class')
class StringAlias(ValAlias):
'''Class to handle a basic string replacement alias
Attributes:
- value (str): The string value this alias evaluates to
'''
TAG = 'string'
def __init__(self, definition, file):
'''Fill out the basic attributes of the alias
Subtypes:
Parameters:
- value (str): The value
Args:
- definition (dict<str,str>): The definition of the alias. Can
contain:
- 'type': (str) (required) The main type of the alias
- 'subtypes': (list<str>) (optional) Qualifiers to the main
type for the alias
- other: (*) (optional) Parameters the alias uses to evaluate
itself
- file (str|Path): The absolute file path to the file the alias is
defined in.
Raises:
- ValueError: The alias cannot be created from the given dictionary
'''
# Call the base constructor
super(StringAlias, self).__init__(definition, file)
# Ensure we have a value
if not 'value' in self.parameters:
raise ValueError('Cannot make a string alias without a value parameter')
# Set the value
self.value = self.parameters['value']
def evaluate(self, options, other_aliases, parents):
'''Extract the string value an alias evaluates to
TODO: Check to make sure that infinite alias recursion is not possible.
Arguments:
- options (str): The string text for a given option
- other_aliases (list<dict<str,ValAlias>>): The other aliases
available for use when defining values. This is useful for
recursive aliases.
- parents (list<str>): A history of aliases whose evaluation depends
on the given value being evaluated
Return:
- str: The value the alias evaluates to
Raises:
- ValueError:
- The alias does not have enough options to evaluate
- The alias's options are not in the correct format for the
alias
- An issue occurred evaluating the alias
- A circular dependency exists in this evaluation
- NotImplementedError: this type of alias cannot be evaluated
'''
return ValAlias.evaluateAliases(self.value, other_aliases, parents)
addAliasClass(StringAlias)
class PathAlias(ValAlias):
'''Alias to represent a path
Attributes:
- path (Path): The path to use for evaluation
'''
TAG = 'path'
def __init__(self, definition, file):
'''Fill out the basic attributes of the alias
Subtypes:
abs|rel, [dir|file]
Where the subtypes mean
- abs: The path should be evaluated as an absolute file
- rel: The path should be treated as a relative path
- dir: The path is a directory
- file: The path is a file
Parameters:
- value (str): The string value of the path
Args:
- definition (dict<str,str>): The definition of the alias. Can
contain:
- 'type': (str) (required) The main type of the alias
- 'subtypes': (list<str>) (optional) Qualifiers to the main
type for the alias
- other: (*) (optional) Parameters the alias uses to evaluate
itself
- file (str|Path): The absolute file path to the file the alias is
defined in.
Raises:
- ValueError: The alias cannot be created from the given dictionary
'''
# Call the base constructor
super(PathAlias, self).__init__(definition, file)
# Ensure we have a value
if not 'value' in self.parameters:
raise ValueError('Cannot make a path alias without a value parameter')
# Get the path
self.path = Path(self.parameters['value'])
# Make absolute if required, evaluated relative to definition file
if 'abs' in self.subtypes:
if not self.path.is_absolute():
self.path = self.file.parent.joinpath(self.path)
def evaluate(self, options, other_aliases, parents):
'''Extract the string value an alias evaluates to
TODO: Check to make sure that infinite alias recursion is not possible.
Arguments:
- options (str): The string text for a given option
- other_aliases (list<dict<str,ValAlias>>): The other aliases
available for use when defining values. This is useful for
recursive aliases.
- parents (list<str>): A history of aliases whose evaluation depends
on the given value being evaluated
Return:
- str: The value the alias evaluates to
Raises:
- ValueError:
- The alias does not have enough options to evaluate
- The alias's options are not in the correct format for the
alias
- An issue occurred evaluating the alias
- A circular dependency exists in this evaluation
- NotImplementedError: this type of alias cannot be evaluated
'''
# Get the output string
output = ValAlias.evaluateAliases(str(self.path), other_aliases, parents)
# Add slash for directories if a slash is not already there
if 'dir' in self.subtypes:
if not output[-1] == '/' and not output[-1] == '\\':
output += '/' if isinstance(self.path, PosixPath) else '\\'
return output
addAliasClass(PathAlias)
|
nilq/baby-python
|
python
|
import json
import multiprocessing
import os
import shutil
from typing import Dict, List, Tuple
import cv2
import numpy as np
from flask import request
from tensorpack.utils import logger
from tqdm import tqdm
from werkzeug import FileStorage
from werkzeug.utils import secure_filename
from zipfile import ZipFile
from mot.object_detection.query_server import \
localizer_tensorflow_serving_inference
from mot.tracker.object_tracking import ObjectTracking
from mot.tracker.video_utils import read_folder, split_video
SERVING_URL = "http://localhost:8501" # the url where the tf-serving container exposes the model
UPLOAD_FOLDER = 'tmp' # folder used to store images or videos when sending files
FPS = 4
RESOLUTION = (1024, 768)
CLASS_NAMES = ["bottles", "others", "fragments"]
SUM_THRESHOLD = 0.6 # the sum of scores for all classes must be greater than this value
# for the prediction to be kept
CLASS_TO_THRESHOLD = {"bottles": 0.4, "others": 0.3, "fragments": 0.3}
CPU_COUNT = min(int(multiprocessing.cpu_count() / 2), 32)
def handle_post_request(upload_folder: str = UPLOAD_FOLDER) -> Dict[str, np.array]:
"""This method is the first one to be called when a POST request is coming. It analyzes the incoming
format (file or JSON) and then call the appropiate methods to do the prediction.
If you want to make a prediction by sending the data as a JSON, it has to be in this format:
```json
{"image":[[[0,0,0],[0,0,0]],[[0,0,0],[0,0,0]]]}
```
or
```json
{"video": TODO}
```
Arguments:
- *upload_folder*: Where the files are temporarly stored
Returns:
- *Dict[str, np.array]*: The predictions of the TF serving module
Raises:
- *NotImplementedError*: If the format of data isn't handled yet
"""
if "file" in request.files:
return handle_file(request.files['file'], upload_folder, **request.form)
data = json.loads(request.data.decode("utf-8"))
if "image" in data:
image = np.array(data["image"])
return {"detected_trash": predict_and_format_image(image)}
if "video" in data:
raise NotImplementedError("video")
raise ValueError(
"Error during the reading of JSON. Keys {} aren't valid ones.".format(data.keys()) +
"For an image, send a JSON such as {'image': [0, 0, 0]}." +
"Sending videos over JSON isn't implemented yet."
)
def handle_file(
file: FileStorage,
upload_folder: str = UPLOAD_FOLDER,
fps: int = FPS,
resolution: Tuple[int, int] = RESOLUTION,
**kwargs
) -> Dict[str, np.array]:
"""Make the prediction if the data is coming from an uploaded file.
Arguments:
- *file*: The file, can be either an image or a video, or a zipped folder
- *upload_folder*: Where the files are temporarly stored
Returns:
- for an image: a json of format
```json
{
"image": filename,
"detected_trash":
[
{
"box": [1, 1, 2, 20],
"label": "fragments",
"score": 0.92
}, {
"box": [10, 10, 25, 20],
"label": "bottles",
"score": 0.75
}
]
}
```
- for a video or a zipped file: a json of format
```json
{
"video_length": 132,
"fps": 2,
"video_id": "GOPRO1234.mp4",
"detected_trash":
[
{
"label": "bottles",
"id": 0,
"frame_to_box": {
23: [0, 0, 1, 10],
24: [1, 1, 4, 13]
}
}, {
"label": "fragments",
"id": 1,
"frame_to_box": {
12: [10, 8, 9, 15]
}
}
]
}
```
Raises:
- *NotImplementedError*: If the format of data isn't handled yet
"""
if kwargs:
logger.warning("Unused kwargs: {}".format(kwargs))
filename = secure_filename(file.filename)
full_filepath = os.path.join(upload_folder, filename)
if not os.path.isdir(upload_folder):
os.mkdir(upload_folder)
if os.path.isfile(full_filepath):
os.remove(full_filepath)
file.save(full_filepath)
file_type = file.mimetype.split("/")[0]
# mimetype is for example 'image/png' and we only want the image
if file_type == "image":
image = cv2.imread(full_filepath) # cv2 opens in BGR
os.remove(full_filepath) # remove it as we don't need it anymore
try:
detected_trash = predict_and_format_image(image)
except ValueError as e:
return {"error": str(e)}
return {"image": filename, "detected_trash": detected_trash}
elif file_type in ["video", "application"]:
folder = None
if file.mimetype == "application/zip":
# zip case
ZipFile(full_filepath).extractall(upload_folder)
dirname = None
with ZipFile(full_filepath, 'r') as zipObj:
listOfFileNames = zipObj.namelist()
for fileName in listOfFileNames:
dirname = os.path.dirname(fileName)
zipObj.extract(fileName, upload_folder)
folder = os.path.join(upload_folder, dirname)
else:
# video case: splitting video and saving frames
folder = os.path.join(upload_folder, "{}_split".format(filename))
if os.path.isdir(folder):
shutil.rmtree(folder)
os.mkdir(folder)
logger.info("Splitting video {} to {}.".format(full_filepath, folder))
split_video(full_filepath, folder, fps=fps, resolution=resolution)
print("folder:", folder, "uplaod_folder:", upload_folder, "file.filename:", file.filename)
image_paths = read_folder(folder)
if len(image_paths) == 0:
raise ValueError("No output image")
# making inference on frames
logger.info("{} images to analyze on {} CPUs.".format(len(image_paths), CPU_COUNT))
try:
with multiprocessing.Pool(CPU_COUNT) as p:
inference_outputs = list(
tqdm(
p.imap(process_image, image_paths),
total=len(image_paths),
)
)
except ValueError as e:
return {"error": str(e)}
logger.info("Finish analyzing video {}.".format(full_filepath))
# tracking objects
logger.info("Starting tracking.")
object_tracker = ObjectTracking(filename, image_paths, inference_outputs, fps=fps)
tracks = object_tracker.compute_tracks()
logger.info("Tracking finished.")
return object_tracker.json_result(tracks)
else:
raise NotImplementedError(file_type)
def process_image(image_path: str) -> Dict[str, object]:
"""Function used to open and predict on an image. It is suposed to be used in multiprocessing.
Arguments:
- *image_path*
Returns:
- *Dict[str, object]*: Predictions for this image path
```python
predictions = {
'output/boxes:0': [[0, 0, 1, 1], [0, 0, 10, 10], [10, 10, 15, 100]],
'output/labels:0': [3, 1, 2], # the labels start at 1 since 0 is for background
'output/scores:0': [0.98, 0.87, 0.76] # sorted in descending order
}
```
"""
image = cv2.imread(image_path) # cv2 opens in BGR
return localizer_tensorflow_serving_inference(image, SERVING_URL, return_all_scores=True)
def predict_and_format_image(
image: np.ndarray,
class_names: List[str] = CLASS_NAMES,
class_to_threshold: Dict[str, float] = CLASS_TO_THRESHOLD
) -> List[Dict[str, object]]:
"""Make prediction on an image and return them in a human readable format.
Arguments:
- *image*: An numpy array in BGR
- *class_names*: The list of class names without background
- *class_to_threshold*: A dict assigning class names to threshold. If a class name isn't in
this dict, no threshold will be applied, which means that all predictions for this class
will be kept.
Returns:
- *List[Dict[str, object]]*: List of dicts such as:
```python3
{
"box": [1, 1, 2, 20],
"label": "fragments",
"score": 0.92
}
```
"""
class_names = ["BG"] + class_names
outputs = localizer_tensorflow_serving_inference(image, SERVING_URL, return_all_scores=False)
detected_trash = []
for box, label, score in zip(
outputs["output/boxes:0"], outputs["output/labels:0"], outputs["output/scores:0"]
):
if keep_prediction(class_names, label, class_to_threshold, score):
trash_json = {
"box": [round(coord, 2) for coord in box],
"label": class_names[label],
"score": score,
}
detected_trash.append(trash_json)
return detected_trash
def keep_prediction(class_names, label, class_to_threshold, score):
if isinstance(score, list): # we have scores for all classes
if np.array(score).sum() < SUM_THRESHOLD:
return False
return True
return class_names[label] not in class_to_threshold or score >= class_to_threshold[
class_names[label]]
|
nilq/baby-python
|
python
|
import numpy
def print_table(table, path):
f = open(path, 'w')
for row in range(len(table)):
for col in range(len(table[row])):
f.write(str(table[row][col]))
f.write(' ')
print(table[row][col], end=' ')
if col == len(table[row])-1:
print("\n")
f.write('\n')
S0_Box = (
(0x3e,0x72,0x5b,0x47,0xca,0xe0,0x00,0x33,0x04,0xd1,0x54,0x98,0x09,0xb9,0x6d,0xcb),
(0x7b,0x1b,0xf9,0x32,0xaf,0x9d,0x6a,0xa5,0xb8,0x2d,0xfc,0x1d,0x08,0x53,0x03,0x90),
(0x4d,0x4e,0x84,0x99,0xe4,0xce,0xd9,0x91,0xdd,0xb6,0x85,0x48,0x8b,0x29,0x6e,0xac),
(0xcd,0xc1,0xf8,0x1e,0x73,0x43,0x69,0xc6,0xb5,0xbd,0xfd,0x39,0x63,0x20,0xd4,0x38),
(0x76,0x7d,0xb2,0xa7,0xcf,0xed,0x57,0xc5,0xf3,0x2c,0xbb,0x14,0x21,0x06,0x55,0x9b),
(0xe3,0xef,0x5e,0x31,0x4f,0x7f,0x5a,0xa4,0x0d,0x82,0x51,0x49,0x5f,0xba,0x58,0x1c),
(0x4a,0x16,0xd5,0x17,0xa8,0x92,0x24,0x1f,0x8c,0xff,0xd8,0xae,0x2e,0x01,0xd3,0xad),
(0x3b,0x4b,0xda,0x46,0xeb,0xc9,0xde,0x9a,0x8f,0x87,0xd7,0x3a,0x80,0x6f,0x2f,0xc8),
(0xb1,0xb4,0x37,0xf7,0x0a,0x22,0x13,0x28,0x7c,0xcc,0x3c,0x89,0xc7,0xc3,0x96,0x56),
(0x07,0xbf,0x7e,0xf0,0x0b,0x2b,0x97,0x52,0x35,0x41,0x79,0x61,0xa6,0x4c,0x10,0xfe),
(0xbc,0x26,0x95,0x88,0x8a,0xb0,0xa3,0xfb,0xc0,0x18,0x94,0xf2,0xe1,0xe5,0xe9,0x5d),
(0xd0,0xdc,0x11,0x66,0x64,0x5c,0xec,0x59,0x42,0x75,0x12,0xf5,0x74,0x9c,0xaa,0x23),
(0x0e,0x86,0xab,0xbe,0x2a,0x02,0xe7,0x67,0xe6,0x44,0xa2,0x6c,0xc2,0x93,0x9f,0xf1),
(0xf6,0xfa,0x36,0xd2,0x50,0x68,0x9e,0x62,0x71,0x15,0x3d,0xd6,0x40,0xc4,0xe2,0x0f),
(0x8e,0x83,0x77,0x6b,0x25,0x05,0x3f,0x0c,0x30,0xea,0x70,0xb7,0xa1,0xe8,0xa9,0x65),
(0x8d,0x27,0x1a,0xdb,0x81,0xb3,0xa0,0xf4,0x45,0x7a,0x19,0xdf,0xee,0x78,0x34,0x60)
)
S1_Box = (
(0x55,0xc2,0x63,0x71,0x3b,0xc8,0x47,0x86,0x9f,0x3c,0xda,0x5b,0x29,0xaa,0xfd,0x77),
(0x8c,0xc5,0x94,0x0c,0xa6,0x1a,0x13,0x00,0xe3,0xa8,0x16,0x72,0x40,0xf9,0xf8,0x42),
(0x44,0x26,0x68,0x96,0x81,0xd9,0x45,0x3e,0x10,0x76,0xc6,0xa7,0x8b,0x39,0x43,0xe1),
(0x3a,0xb5,0x56,0x2a,0xc0,0x6d,0xb3,0x05,0x22,0x66,0xbf,0xdc,0x0b,0xfa,0x62,0x48),
(0xdd,0x20,0x11,0x06,0x36,0xc9,0xc1,0xcf,0xf6,0x27,0x52,0xbb,0x69,0xf5,0xd4,0x87),
(0x7f,0x84,0x4c,0xd2,0x9c,0x57,0xa4,0xbc,0x4f,0x9a,0xdf,0xfe,0xd6,0x8d,0x7a,0xeb),
(0x2b,0x53,0xd8,0x5c,0xa1,0x14,0x17,0xfb,0x23,0xd5,0x7d,0x30,0x67,0x73,0x08,0x09),
(0xee,0xb7,0x70,0x3f,0x61,0xb2,0x19,0x8e,0x4e,0xe5,0x4b,0x93,0x8f,0x5d,0xdb,0xa9),
(0xad,0xf1,0xae,0x2e,0xcb,0x0d,0xfc,0xf4,0x2d,0x46,0x6e,0x1d,0x97,0xe8,0xd1,0xe9),
(0x4d,0x37,0xa5,0x75,0x5e,0x83,0x9e,0xab,0x82,0x9d,0xb9,0x1c,0xe0,0xcd,0x49,0x89),
(0x01,0xb6,0xbd,0x58,0x24,0xa2,0x5f,0x38,0x78,0x99,0x15,0x90,0x50,0xb8,0x95,0xe4),
(0xd0,0x91,0xc7,0xce,0xed,0x0f,0xb4,0x6f,0xa0,0xcc,0xf0,0x02,0x4a,0x79,0xc3,0xde),
(0xa3,0xef,0xea,0x51,0xe6,0x6b,0x18,0xec,0x1b,0x2c,0x80,0xf7,0x74,0xe7,0xff,0x21),
(0x5a,0x6a,0x54,0x1e,0x41,0x31,0x92,0x35,0xc4,0x33,0x07,0x0a,0xba,0x7e,0x0e,0x34),
(0x88,0xb1,0x98,0x7c,0xf3,0x3d,0x60,0x6c,0x7b,0xca,0xd3,0x1f,0x32,0x65,0x04,0x28),
(0x64,0xbe,0x85,0x9b,0x2f,0x59,0x8a,0xd7,0xb0,0x25,0xac,0xaf,0x12,0x03,0xe2,0xf2)
)
def LAT_dot( a , b ):
a = "{0:08b}".format(a)
b = "{0:08b}".format(b)
out = 0
if a[0]=='1' and b[0]=='1':
out = 1
for i in range(1,8):
if a[i]=='1' and b[i]=='1':
out = out^1
else:
out = out ^ 0
return out
LAT0 = numpy.zeros( (256,256) )
LAT0 = LAT0.astype(int)
LAT1 = numpy.zeros( (256,256) )
LAT1 = LAT1.astype(int)
def compute_LAT(s_box, LAT):
DOT = numpy.zeros( (256,256) )
DOT = DOT.astype(int)
sbox_val = []
for p2 in range(256):
col = p2 >> 4
row = p2 & 15
sbox_val.append( s_box[row][col] )
for p1 in range(256):
for p2 in range(256):
DOT[p1][p2] = LAT_dot(p1,p2)
for a in range(256):
for b in range(256):
for i in range(256):
LAT[a][b] += DOT[a][i]^(DOT[b,sbox_val[i]])
LAT[a][b] = 256 - LAT[a][b]
LAT[a][b] = LAT[a][b] - 128
#compute S0 LAT
print('*************************ZUC S0 LAT******************')
compute_LAT(S0_Box, LAT0)
print_table(LAT0, './ZUC_S0_LAT.txt')
print('\n')
#compute S1 LAT
print('*************************ZUC S1 LAT******************')
compute_LAT(S1_Box, LAT1)
print_table(LAT1, './ZUC_S1_LAT.txt')
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# created: 2021-06-30
# creator: liguopeng@liguopeng.net
import asyncio
import logging
import threading
from abc import abstractmethod
from datetime import datetime
import paho.mqtt.client as mqtt
from gcommon.server.server_config import ServerConfig
from gcommon.utils import gtime
logger = logging.getLogger("mqtt")
class MqttConfig(ServerConfig):
pass
class MqttObserverBase(object):
mqtt_listener = None
def set_mqtt_listener(self, listener):
self.mqtt_listener = listener
@abstractmethod
def on_mqtt_connected(self, _client, _user_data, _flags, rc):
print(_client)
@staticmethod
def on_mqtt_message(self, _client, _user_data, message):
print(message.payload)
class MqttListener(threading.Thread):
def __init__(self, config: MqttConfig, observer: MqttObserverBase):
threading.Thread.__init__(self)
# daemon thread, ๅจๆไธ ctrl-c ไนๅ็จๅบๅฏไปฅ้ๅบ
self.daemon = True
self.observer = observer
self.config = config
client_id = "rcs" + gtime.date_str_by_minute()
self.client = mqtt.Client(client_id=client_id)
# asyncio loop
self.loop = asyncio.get_running_loop()
def run(self) -> None:
"""ๆณจๆ๏ผๆๆๅ่ฐๅฝๆฐ้ฝๅจ็ฌ็ซ็บฟ็จไธญๆง่ก"""
self.client.on_connect = self.on_connect
self.client.on_message = self.on_message
self.client.on_subscribe = self.on_subscribe
# ๅปบ็ซ่ฟๆฅ
if self.config.enable_ssl:
self.client.tls_set()
self.client.connect(self.config.server_address, self.config.server_port, 60)
self.client.username_pw_set(self.config.username, self.config.password)
self.client.loop_forever()
def on_subscribe_v5(self, client, userdata, mid, reasonCodes, properties):
pass
def on_subscribe(self, client, userdata, mid, granted_qos):
pass
def on_connect(self, client, userdata, flags, rc):
logger.info('Connected with result code: %s, msg: %s',
str(rc), mqtt.error_string(rc))
if rc != mqtt.MQTT_ERR_SUCCESS:
return
# client.subscribe('robot/')
assert client == self.client
# self.client.subscribe("robot/+/topic/task_status")
self.loop.call_soon_threadsafe(self.observer.on_mqtt_connected, client, userdata, flags, rc)
def subscribe(self, topic, qos=0, options=None, properties=None):
result, mid = self.client.subscribe(topic, qos, options, properties)
if result != mqtt.MQTT_ERR_SUCCESS:
logger.error('cannot subscribe topic: %s, code: %s, msg: %s',
topic, result, mqtt.error_string(result))
return False
return True
def unsubscribe(self, topic, properties=None):
self.client.unsubscribe(topic, properties)
@abstractmethod
def on_message(self, client, userdata, message):
logger.info(message.topic + " " + str(message.payload))
self.loop.call_soon_threadsafe(self.observer.on_mqtt_message, client, userdata, message)
|
nilq/baby-python
|
python
|
from scipy.optimize import minimize
from numpy.random import random
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm, ticker
from matplotlib.colors import LogNorm
import numpy as np
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from matplotlib import pyplot
import timeit
def beale(x):
f1 = 1.5 - x[0] * ( 1.0 - x[1] )
f2 = 2.25 - x[0] * ( 1.0 - x[1] ** 2 )
f3 = 2.625 - x[0] * ( 1.0 - x[1] ** 3 )
f = f1 ** 2 + f2 ** 2 + f3 ** 2
return f
# Plot the function
fig = plt.figure()
ax = Axes3D(fig, azim = -128, elev = 43)
s = .1
X = np.arange(-5, 5.+s, s)
Y = np.arange(-5, 5.+s, s)
X, Y = np.meshgrid(X, Y)
Z = beale(2, [X, Y])
#ax.plot_surface(X, Y, Z, rstride = 1, cstride = 1, norm = LogNorm(), cmap = cm.jet, linewidth=0, edgecolor='none')
ax.plot_surface(X, Y, Z, rstride = 1, cstride = 1, norm=LogNorm(), cmap = cm.jet, linewidth=0, edgecolor='none')
plt.xlabel("x")
plt.ylabel("y")
plt.title("Beale's")
plt.savefig(beale.png)
#########################################
x0s = []
for i in range(0, 30):
x0 = (random(2)-1)*20
x0s.append(x0)
iters = []
feval = []
sol = []
objective = []
times= []
for i in range(0, 30):
start_time = timeit.default_timer()
output = minimize(beale, x0s[i], method='L-BFGS-B', options= {'disp': True})
times.append(timeit.default_timer() - start_time)
iters.append(output.nit)
feval.append(output.nfev)
sol.append(output.x)
objective.append(output.fun)
#####################################
delta = 0.05
s = 0.05
X = np.arange(-3, 5, delta)
Y = np.arange(-3, 3, delta)
X, Y = np.meshgrid(X, Y)
Z = beale([X, Y])
levels = np.arange(10, 300, 10)
#plt.contour(X, Y, Z, levels=levels, norm=LogNorm())
plt.contour(X, Y, Z, levels=[0.1, 0.2, 0.3, 0.5, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 30, 50, 60, 63, 66, 70, 75, 80, 100])
plt.title('Isolines')
plt.xlabel('X1')
plt.ylabel('X2')
xs = []
ys = []
def bstop(xk):
xs.append(np.copy(xk))
ys.append(beale(xk))
xs = [np.array([-1, -1])]
ys = [beale(xs[0])]
minimize(beale, [-1, -1], method='BFGS', callback=bstop, options= {'disp': True})
linex = [-1]
liney = [-1]
for i in xs:
linex.append(i[0])
liney.append(i[1])
bfgs_y = list(ys)
bfgs, = plt.plot(linex, liney, '-o', label='BFGS')
xs = [np.array([-1, -1])]
ys = [beale(xs[0])]
minimize(beale, [-1, -1], method='L-BFGS-B', callback=bstop, options= {'disp': True})
linex = [-1]
liney = [-1]
for i in xs:
linex.append(i[0])
liney.append(i[1])
lbfgsb_y = list(ys)
lbfgsb, = plt.plot(linex, liney, '-s', label='L-BFGS-B')
xs = [
np.array([-1, -1]),
np.array([0, -2.076923e-01]),
np.array([1.101268e+00, -9.677930e-01]),
np.array([8.970397e-01, -5.260371e-01]),
np.array([1.085339e+00, -5.058077e-01]),
np.array([1.832440e+00, -2.907016e-01]),
np.array([2.198566e+00, -5.155961e-02]),
np.array([2.692337e+00, 3.684094e-01]),
np.array([2.789503e+00, 4.511403e-01]),
np.array([2.795133e+00, 4.487888e-01]),
np.array([2.818547e+00, 4.483392e-01]),
np.array([2.840796e+00, 4.519267e-01]),
np.array([2.885289e+00, 4.612113e-01]),
np.array([2.923265e+00, 4.707860e-01]),
np.array([2.980495e+00, 4.865466e-01]),
np.array([3.024381e+00, 4.997452e-01]),
np.array([3.043476e+00, 5.064746e-01]),
np.array([3.047318e+00, 5.090894e-01]),
np.array([3.042225e+00, 5.097113e-01]),
np.array([3.030713e+00, 5.080590e-01]),
np.array([3.016008e+00, 5.050824e-01]),
np.array([3.006359e+00, 5.026518e-01]),
np.array([2.999553e+00, 5.005949e-01]),
np.array([2.997714e+00, 4.997436e-01]),
np.array([2.998416e+00, 4.996591e-01]),
np.array([2.999443e+00, 4.998514e-01]),
np.array([2.999928e+00, 4.999741e-01]),
np.array([3.000001e+00, 4.999987e-01])
]
ys = [
3.870312e+01,
1.420312e+01,
5.474402e+00,
5.132615e+00,
4.056161e+00,
1.634935e+00,
8.440893e-01,
5.062609e-02,
1.015695e-02,
8.785395e-03,
6.671388e-03,
5.511229e-03,
3.959797e-03,
2.900633e-03,
1.691332e-03,
1.011259e-03,
6.995383e-04,
4.831696e-04,
2.805545e-04,
1.529509e-04,
7.094062e-05,
3.357103e-05,
1.152309e-05,
3.063215e-06,
4.650093e-07,
5.222919e-08,
2.294078e-09,
4.511352e-11,
1.837179e-13
]
linex = []
liney = []
for i in xs:
linex.append(i[0])
liney.append(i[1])
powell_y = list(ys)
powell, = plt.plot(linex, liney, '-^', label='DFP')
plt.legend(handles=[bfgs, lbfgsb, powell])
plt.title('Isolines')
plt.xlabel('x1')
plt.ylabel('x2')
plt.figure()
b, = plt.plot(bfgs_y, '-o', label='BFGS')
l, = plt.plot(lbfgsb_y, '-s', label='L-BFGS-B')
p, = plt.plot(powell_y, '-^', label='DFP')
pyplot.yscale('log')
plt.grid(True)
plt.title('Objective')
plt.legend(handles=[b, l, p])
plt.xlabel('Number of Iterations')
plt.ylabel('Objective')
|
nilq/baby-python
|
python
|
# Generated by Django 3.0.11 on 2020-11-11 20:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0003_auto_20201111_1505'),
]
operations = [
migrations.AlterField(
model_name='language',
name='listing',
field=models.ManyToManyField(blank=True, related_name='languages', to='posts.Post'),
),
migrations.AlterField(
model_name='tool',
name='listing',
field=models.ManyToManyField(blank=True, related_name='tools', to='posts.Post'),
),
]
|
nilq/baby-python
|
python
|
## Written by Daniel Buscombe,
## MARDA Science
## daniel@mardascience.com
##> Release v1.3 (July 2020)
###===================================================
# import libraries
import sys, getopt, json, os
# set to False if you wish to use cpu (not recommended)
##True or False
USE_GPU = True
# PREDICT = False
#
# ##OS
# if PREDICT == True:
# os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
if USE_GPU == True:
##use the first available GPU
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
else:
## to use the CPU (not recommended):
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
from numpy import any as npany
from sedinet_infer import *
#==============================================================
if __name__ == '__main__':
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv,"h:c:")
except getopt.GetoptError:
print('python sedinet_train.py -c configfile.json')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print(
'Example usage: python sedinet_train.py -c config/config_9percentiles.json'
)
sys.exit()
elif opt in ("-c"):
configfile = arg
# load the user configs
with open(os.getcwd()+os.sep+configfile) as f:
config = json.load(f)
###===================================================
## user defined variables: proportion of data to use for training (a.k.a. the "train/test split")
train_csvfile = config["train_csvfile"]
#csvfile containing image names and class values
test_csvfile = config["test_csvfile"]
#csvfile containing image names and class values
res_folder = config["res_folder"]
#folder containing csv file and that will contain model outputs
name = config["name"]
#name prefix for output files
#convert imagery to greyscale or not
dropout = config["dropout"]
#dropout factor
scale = config["scale"] #do scaling on variable
greyscale = config['greyscale']
try:
numclass = config['numclass']
except:
numclass = 0
try:
greyscale = config['greyscale']
except:
greyscale = True
#output variables
vars = [k for k in config.keys() if not npany([k.startswith('base'),
k.startswith('MIN_LR'), k.startswith('DO_AUG'), k.startswith('SHALLOW'), k.startswith('MAX_LR'),
k.startswith('res_folder'), k.startswith('train_csvfile'), k.startswith('csvfile'),
k.startswith('test_csvfile'), k.startswith('name'),
k.startswith('greyscale'), k.startswith('aux_in'),
k.startswith('dropout'), k.startswith('N'),
k.startswith('scale'), k.startswith('numclass')])]
vars = sorted(vars)
auxin = [k for k in config.keys() if k.startswith('aux_in')]
if len(auxin) > 0:
auxin = config[auxin[0]]
##at least for now, just one 'auxilliary'
## (numerical/categorical) input in addition to imagery
if len(vars) ==1:
mode = 'miso'
elif len(vars) >1:
mode = 'mimo'
else:
if len(vars) ==1:
mode = 'siso'
elif len(vars) >1:
mode = 'simo'
print("Mode: %s" % (mode))
###==================================================
train_csvfile = res_folder+os.sep+train_csvfile
test_csvfile = res_folder+os.sep+test_csvfile
if (mode=='siso' or mode=='simo'):
run_training_siso_simo(vars, train_csvfile, test_csvfile,
name, res_folder, mode, greyscale,
dropout, numclass, scale)
# if (mode=='miso' or mode=='mimo'):
# run_training_miso_mimo(vars, train_csvfile, test_csvfile,
# name, res_folder, mode, greyscale,
# auxin, dropout, numclass, scale)
|
nilq/baby-python
|
python
|
"""Expected errors."""
import inspect
import sys
UNREPRODUCIBLE_SUGGESTION_TEXT = (
'Here are things you can try:\n'
'- Run outside XVFB (e.g. you will be able to see the launched program '
'on screen.) with `--disable-xvfb`, which is especially useful for '
'Chrome.\n'
'- Run with the downloaded build by adding `--build download`.\n'
'- Run `build/install-build-deps.sh` to ensure all dependencies are '
'installed.\n'
'- Run with more number of trials by adding `-i 10`, '
'which is especially good for gesture-related testcases.\n'
'- Use gdb to debug by adding `--enable-debug`.')
def get_class(exit_code):
"""Get class name given an exit code."""
code_to_klass = {}
for _, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and obj != ExpectedException:
if obj.EXIT_CODE not in code_to_klass:
code_to_klass[obj.EXIT_CODE] = obj
else:
raise Exception(
'%s and %s have the same exit code.' % (
code_to_klass[obj.EXIT_CODE].__name__, obj.__name__))
return code_to_klass.get(exit_code, UnknownExitCodeError)
class ExpectedException(Exception):
"""A general Exception to extend from."""
def __init__(self, message, exit_code, extras=None):
super(ExpectedException, self).__init__(message)
self.extras = extras
self.exit_code = exit_code
class UnknownExitCodeError(ExpectedException):
"""Represents an unknown exit code error."""
EXIT_CODE = 256
class MinimizationNotFinishedError(ExpectedException):
"""Raise when the minimize_task failed or hasn't finished yet. When the
minimization is not finished, we won't find 'Running command: ' in the
stacktrace."""
MESSAGE = (
'The testcase hasn\'t been minimized yet or cannot be minimized.\n'
'If the testcase is new, please wait for a few more hours.\n'
'If we can\'t minimize the testcase, it means the testcase is '
'unreproducible and, thus, not supported by this tool.\n'
'If this testcase was found by AFL or libFuzzer, you can use the "-f"'
'flag to force this tool to try to reproduce the testcase.'
)
EXIT_CODE = 42
def __init__(self):
super(MinimizationNotFinishedError, self).__init__(
self.MESSAGE, self.EXIT_CODE)
class SanitizerNotProvidedError(ExpectedException):
"""An error to notify when a sanitizer isn't passed to a Definition"""
MESSAGE = 'A sanitizer must be provided with each Definition.'
EXIT_CODE = 43
def __init__(self):
super(SanitizerNotProvidedError, self).__init__(
self.MESSAGE, self.EXIT_CODE)
class ClusterFuzzError(ExpectedException):
"""An exception to deal with clusterfuzz.com's errors.
Makes the response dict available for inspection later on when
the exception is dealt with."""
MESSAGE = (
"Error calling clusterfuzz.com's API.\n"
'User: {identity}\n'
"Response: {response}")
EXIT_CODE = 44
def __init__(self, status_code, response, identity):
super(ClusterFuzzError, self).__init__(
self.MESSAGE.format(response=str(response), identity=identity),
self.EXIT_CODE)
self.status_code = status_code
self.response = response
self.identity = identity
class PermissionsTooPermissiveError(ExpectedException):
"""An exception to deal with file permissions errors.
Stores the filename and the current permissions.."""
MESSAGE = ('File permissions too permissive to open {filename}\n'
'Current permissions: {permission}\nExpected user access only'
'\nYou can run "chmod 600 {filename}filename" to fix this issue')
EXIT_CODE = 45
def __init__(self, filename, current_permissions):
super(PermissionsTooPermissiveError, self).__init__(
self.MESSAGE.format(filename=filename, permission=current_permissions),
self.EXIT_CODE)
self.filename = filename
self.current_permissions = current_permissions
class GomaNotInstalledError(ExpectedException):
"""An exception to tell people GOMA isn not installed."""
MESSAGE = ('Either goma is not installed, or $GOMA_DIR is not set.'
' Please set up goma before continuing. '
'See go/ma to learn more.\n\n'
"If you wouldn't like to use goma, "
'please re-run with --disable-goma.')
EXIT_CODE = 46
def __init__(self):
super(GomaNotInstalledError, self).__init__(self.MESSAGE, self.EXIT_CODE)
class JobTypeNotSupportedError(ExpectedException):
"""An exception raised when user tries to run an unsupported build type."""
# pylint: disable=line-too-long
MESSAGE = (
'Unfortunately, the job {job_type} is not yet supported.'
'If you believe that the crash will occur on Linux as well, please go '
'to https://clusterfuzz.com/upload-testcase?upload=true&testcaseId={testcase_id} '
'and choose a corresponding Linux job type. Ask us for help at '
'clusterfuzz-dev@chromium.org.')
# pylint: enable=line-too-long
EXIT_CODE = 47
def __init__(self, job_type, testcase_id):
super(JobTypeNotSupportedError, self).__init__(
self.MESSAGE.format(job_type=job_type, testcase_id=testcase_id),
self.EXIT_CODE)
class NotInstalledError(ExpectedException):
"""An exception raised to tell the user to install the required binary."""
MESSAGE = (
'{binary} is not found. Please install it or ensure the path is '
'correct.\n'
'Most of the time you can install it with `apt-get install {binary}`.')
EXIT_CODE = 48
def __init__(self, binary):
super(NotInstalledError, self).__init__(
self.MESSAGE.format(binary=binary), self.EXIT_CODE)
class GsutilNotInstalledError(ExpectedException):
"""An exception raised to tell the user to install the required binary."""
MESSAGE = (
'gsutil is not installed. Please install it. See:'
'https://cloud.google.com/storage/docs/gsutil_install')
EXIT_CODE = 49
def __init__(self):
super(GsutilNotInstalledError, self).__init__(self.MESSAGE, self.EXIT_CODE)
class BadJobTypeDefinitionError(ExpectedException):
"""An exception raised when a job type description is malformed."""
MESSAGE = (
'The definition for the {job_type} job type is incorrectly formatted or'
' missing crucial information.')
EXIT_CODE = 50
def __init__(self, job_type):
super(BadJobTypeDefinitionError, self).__init__(
self.MESSAGE.format(job_type=job_type), self.EXIT_CODE)
class UnreproducibleError(ExpectedException):
"""An exception raised when the crash cannot be reproduced."""
MESSAGE = (
'The crash cannot be reproduced after trying {count} times.\n'
+ UNREPRODUCIBLE_SUGGESTION_TEXT)
EXIT_CODE = 51
def __init__(self, count, crash_signatures):
crash_signatures = [
{'type': s.crash_type, 'state': s.crash_state_lines,
'output': s.output[:100000]}
for s in list(crash_signatures)[:10]
]
super(UnreproducibleError, self).__init__(
message=self.MESSAGE.format(count=count),
exit_code=self.EXIT_CODE,
extras={'signatures': crash_signatures})
class DirtyRepoError(ExpectedException):
"""An exception raised when the repo is dirty. Therefore, we cannot checkout
to a wanted sha."""
MESSAGE = (
"We can't run the checkout command because {source_dir} has "
'uncommitted changes.\n '
'please commit or stash these changes and re-run this tool.')
EXIT_CODE = 52
def __init__(self, source_dir):
super(DirtyRepoError, self).__init__(
self.MESSAGE.format(source_dir=source_dir), self.EXIT_CODE)
class CommandFailedError(ExpectedException):
"""An exception raised when the command doesn't return 0."""
MESSAGE = '`{cmd}` failed with the return code {returncode}.'
EXIT_CODE = 53
def __init__(self, command, returncode, stderr):
super(CommandFailedError, self).__init__(
self.MESSAGE.format(cmd=command, returncode=returncode),
self.EXIT_CODE,
extras={'stderr': stderr[:100000]})
class KillProcessFailedError(ExpectedException):
"""An exception raised when the process cannot be killed."""
MESSAGE = '`{command}` (pid={pid}) cannot be killed.'
EXIT_CODE = 54
def __init__(self, command, pid):
super(KillProcessFailedError, self).__init__(
self.MESSAGE.format(command=command, pid=pid),
self.EXIT_CODE)
class UserRespondingNoError(ExpectedException):
"""An exception raised when the user decides not to proceed."""
MESSAGE = 'User responding "no" to "{question}"'
EXIT_CODE = 55
def __init__(self, question):
super(UserRespondingNoError, self).__init__(
self.MESSAGE.format(question=question),
self.EXIT_CODE)
class InvalidTestcaseIdError(ExpectedException):
"""An exception when the testcase id is invalid."""
MESSAGE = (
'The testcase ID ({testcase_id}) is invalid.\n'
"Please double-check if there's a typo.\n"
'Also, can you access '
'https://clusterfuzz.com/testcase-detail/{testcase_id} ?')
EXIT_CODE = 56
def __init__(self, testcase_id):
super(InvalidTestcaseIdError, self).__init__(
self.MESSAGE.format(testcase_id=str(testcase_id)), self.EXIT_CODE)
class UnauthorizedError(ExpectedException):
"""An exception when the user cannot access the testcase."""
MESSAGE = (
"You ({identity}) aren't allowed to access the testcase ID "
'({testcase_id}). Can you access '
'https://clusterfuzz.com/testcase-detail/{testcase_id} ?')
EXIT_CODE = 57
def __init__(self, testcase_id, identity):
super(UnauthorizedError, self).__init__(
self.MESSAGE.format(identity=identity, testcase_id=str(testcase_id)),
self.EXIT_CODE)
class DifferentStacktraceError(ExpectedException):
"""An exception raised when the resulting crash is different."""
MESSAGE = (
'The original crash cannot be reproduced after trying {count} times.\n'
'But it seems we get a different stacktrace. Could you check if the '
'stacktrace is good enough?\n\n' + UNREPRODUCIBLE_SUGGESTION_TEXT)
EXIT_CODE = 58
def __init__(self, count, crash_signatures):
crash_signatures = [
{'type': s.crash_type, 'state': s.crash_state_lines,
'output': s.output[:50000]}
for s in list(crash_signatures)[:10]
]
super(DifferentStacktraceError, self).__init__(
message=self.MESSAGE.format(count=count),
exit_code=self.EXIT_CODE,
extras={'signatures': crash_signatures})
class GdbNotSupportedOnAndroidError(ExpectedException):
"""An exception raised when debug is enabled on Android."""
MESSAGE = "--enable-debug (or gdb) isn't supported in Android."
EXIT_CODE = 59
def __init__(self):
super(GdbNotSupportedOnAndroidError, self).__init__(
message=self.MESSAGE, exit_code=self.EXIT_CODE)
class BootFailed(ExpectedException):
"""An exception is raised after device failed to complete boot."""
MESSAGE = (
'Device failed to finish boot. Please inspect logcat output to '
'identify the issue.')
EXIT_CODE = 60
def __init__(self):
super(BootFailed, self).__init__(
message=self.MESSAGE, exit_code=self.EXIT_CODE)
class NoAndroidDeviceIdError(ExpectedException):
"""An exception is raised after installing ASAN on Android"""
MESSAGE = 'Please set the target Android device ID as the env {env_name}.'
EXIT_CODE = 61
def __init__(self, env_name):
super(NoAndroidDeviceIdError, self).__init__(
message=self.MESSAGE.format(env_name=env_name),
exit_code=self.EXIT_CODE)
class GclientManagedEnabledException(ExpectedException):
"""An exception is raised when .gclient contains managed=True."""
# pylint: disable=line-too-long
MESSAGE = (
'Please disabled `managed` in {dot_gclient_path}. `managed=True` has '
'been deprecated, and it checkouts repo to a wrong SHA. See: '
'https://www.chromium.org/developers/how-tos/get-the-code/gclient-managed-mode'
)
# pylint: enable=line-too-long
EXIT_CODE = 62
def __init__(self, dot_gclient_path):
super(GclientManagedEnabledException, self).__init__(
message=self.MESSAGE.format(dot_gclient_path=dot_gclient_path),
exit_code=self.EXIT_CODE)
|
nilq/baby-python
|
python
|
import tensorflow as tf
import fire
import json
import os
import numpy as np
import tensorflow as tf
from tensorflow.python.training.input import batch
import model, sample, encoder
import model
import numpy as np
import tensorflow as tf
from tensorflow.contrib.training import HParams
def interact_model(
model_name='124M',
seed=None,
nsamples=1,
batch_size=1,
length=None,
temperature=1,
top_k=0,
top_p=1,
models_dir='models',
):
"""
Interactively run the model
:model_name=124M : String, which model to use
:seed=None : Integer seed for random number generators, fix seed to reproduce
results
:nsamples=1 : Number of samples to return total
:batch_size=1 : Number of batches (only affects speed/memory). Must divide nsamples.
:length=None : Number of tokens in generated text, if None (default), is
determined by model hyperparameters
:temperature=1 : Float value controlling randomness in boltzmann
distribution. Lower temperature results in less random completions. As the
temperature approaches zero, the model will become deterministic and
repetitive. Higher temperature results in more random completions.
:top_k=0 : Integer value controlling diversity. 1 means only 1 word is
considered for each step (token), resulting in deterministic completions,
while 40 means 40 words are considered at each step. 0 (default) is a
special setting meaning no restrictions. 40 generally is a good value.
:models_dir : path to parent folder containing model subfolders
(i.e. contains the <model_name> folder)
"""
models_dir = os.path.expanduser(os.path.expandvars(models_dir))
if batch_size is None:
batch_size = 1
assert nsamples % batch_size == 0
enc = encoder.get_encoder(model_name, models_dir)
hparams = model.default_hparams()
with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:
hparams.override_from_dict(json.load(f))
if length is None:
length = hparams.n_ctx // 2
elif length > hparams.n_ctx:
raise ValueError("Can't get samples longer than window size: %s" % hparams.n_ctx)
with tf.Session(graph=tf.Graph()) as sess:
context = tf.placeholder(tf.int32, [batch_size, None])
np.random.seed(seed)
tf.set_random_seed(seed)
output = sample.sample_sequence(
hparams=hparams, length=length,
context=context,
batch_size=batch_size,
temperature=temperature, top_k=top_k, top_p=top_p
)
print("hparams :",hparams,'\n',
"length :", length,'\n',
"context :", context,'\n',
"batch_size :",batch_size,'\n',
"temperature :", temperature,'\n',
"top_k :",top_k,'\n',
"top_p :",top_p,'\n')
'''
hparams : [('n_ctx', 1024), ('n_embd', 768), ('n_head', 12), ('n_layer', 12), ('n_vocab', 50257)]
length : 512
context : Tensor("Placeholder:0", shape=(1, ?), dtype=int32)
batch_size : 1
temperature : 1
top_k : 0
top_p : 1
'''
saver = tf.train.Saver()
ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))
saver.restore(sess, ckpt)
while True:
raw_text = input("Model prompt >>> ")
while not raw_text:
print('Prompt should not be empty!')
raw_text = input("Model prompt >>> ")
context_tokens = enc.encode(raw_text)
generated = 0
for _ in range(nsamples // batch_size):
out = sess.run(output, feed_dict={
context: [context_tokens for _ in range(batch_size)]
})[:, len(context_tokens):]
for i in range(batch_size):
generated += 1
text = enc.decode(out[i])
print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40)
print(text)
print("=" * 80)
if __name__ == '__main__':
fire.Fire(interact_model)
################################################################################
def default_hparams():
return HParams(
n_vocab=0,
n_ctx=1024,
n_embd=768,
n_head=12,
n_layer=12,
)
def shape_list(x):
"""Deal with dynamic shape in tensorflow cleanly."""
static = x.shape.as_list()
dynamic = tf.shape(x)
return [dynamic[i] if s is None else s for i, s in enumerate(static)]
def softmax(x, axis=-1):
x = x - tf.reduce_max(x, axis=axis, keepdims=True)
ex = tf.exp(x)
return ex / tf.reduce_sum(ex, axis=axis, keepdims=True)
def gelu(x):
return 0.5*x*(1+tf.tanh(np.sqrt(2/np.pi)*(x+0.044715*tf.pow(x, 3))))
def norm(x, scope, *, axis=-1, epsilon=1e-5):
"""Normalize to mean = 0, std = 1, then do a diagonal affine transform."""
with tf.variable_scope(scope):
n_state = x.shape[-1].value
g = tf.get_variable('g', [n_state], initializer=tf.constant_initializer(1))
b = tf.get_variable('b', [n_state], initializer=tf.constant_initializer(0))
u = tf.reduce_mean(x, axis=axis, keepdims=True)
s = tf.reduce_mean(tf.square(x-u), axis=axis, keepdims=True)
x = (x - u) * tf.rsqrt(s + epsilon)
x = x*g + b
return x
def split_states(x, n):
"""Reshape the last dimension of x into [n, x.shape[-1]/n]."""
*start, m = shape_list(x)
return tf.reshape(x, start + [n, m//n])
def merge_states(x):
"""Smash the last two dimensions of x into a single dimension."""
*start, a, b = shape_list(x)
return tf.reshape(x, start + [a*b])
def conv1d(x, scope, nf, *, w_init_stdev=0.02):
with tf.variable_scope(scope):
*start, nx = shape_list(x)
w = tf.get_variable('w', [1, nx, nf], initializer=tf.random_normal_initializer(stddev=w_init_stdev))
b = tf.get_variable('b', [nf], initializer=tf.constant_initializer(0))
c = tf.reshape(tf.matmul(tf.reshape(x, [-1, nx]), tf.reshape(w, [-1, nf]))+b, start+[nf])
return c
def attention_mask(nd, ns, *, dtype):
"""1's in the lower triangle, counting from the lower right corner.
Same as tf.matrix_band_part(tf.ones([nd, ns]), -1, ns-nd), but doesn't produce garbage on TPUs.
"""
i = tf.range(nd)[:,None]
j = tf.range(ns)
m = i >= j - ns + nd
return tf.cast(m, dtype)
def attn(x, scope, n_state, *, past, hparams):
assert x.shape.ndims == 3 # Should be [batch, sequence, features]
assert n_state % hparams.n_head == 0
if past is not None:
assert past.shape.ndims == 5 # Should be [batch, 2, heads, sequence, features], where 2 is [k, v]
def split_heads(x):
# From [batch, sequence, features] to [batch, heads, sequence, features]
return tf.transpose(split_states(x, hparams.n_head), [0, 2, 1, 3])
def merge_heads(x):
# Reverse of split_heads
return merge_states(tf.transpose(x, [0, 2, 1, 3]))
def mask_attn_weights(w):
# w has shape [batch, heads, dst_sequence, src_sequence], where information flows from src to dst.
_, _, nd, ns = shape_list(w)
b = attention_mask(nd, ns, dtype=w.dtype)
b = tf.reshape(b, [1, 1, nd, ns])
w = w*b - tf.cast(1e10, w.dtype)*(1-b)
return w
def multihead_attn(q, k, v):
# q, k, v have shape [batch, heads, sequence, features]
w = tf.matmul(q, k, transpose_b=True)
w = w * tf.rsqrt(tf.cast(v.shape[-1].value, w.dtype))
w = mask_attn_weights(w)
w = softmax(w)
a = tf.matmul(w, v)
return a
with tf.variable_scope(scope):
c = conv1d(x, 'c_attn', n_state*3)
q, k, v = map(split_heads, tf.split(c, 3, axis=2))
present = tf.stack([k, v], axis=1)
if past is not None:
pk, pv = tf.unstack(past, axis=1)
k = tf.concat([pk, k], axis=-2)
v = tf.concat([pv, v], axis=-2)
a = multihead_attn(q, k, v)
a = merge_heads(a)
a = conv1d(a, 'c_proj', n_state)
return a, present
def mlp(x, scope, n_state, *, hparams):
with tf.variable_scope(scope):
nx = x.shape[-1].value
h = gelu(conv1d(x, 'c_fc', n_state))
h2 = conv1d(h, 'c_proj', nx)
return h2
def block(x, scope, *, past, hparams):
with tf.variable_scope(scope):
nx = x.shape[-1].value
a, present = attn(norm(x, 'ln_1'), 'attn', nx, past=past, hparams=hparams)
x = x + a
m = mlp(norm(x, 'ln_2'), 'mlp', nx*4, hparams=hparams)
x = x + m
return x, present
def past_shape(*, hparams, batch_size=None, sequence=None):
return [batch_size, hparams.n_layer, 2, hparams.n_head, sequence, hparams.n_embd // hparams.n_head]
def expand_tile(value, size):
"""Add a new axis of given size."""
value = tf.convert_to_tensor(value, name='value')
ndims = value.shape.ndims
return tf.tile(tf.expand_dims(value, axis=0), [size] + [1]*ndims)
def positions_for(tokens, past_length):
batch_size = tf.shape(tokens)[0]
nsteps = tf.shape(tokens)[1]
return expand_tile(past_length + tf.range(nsteps), batch_size)
def model(hparams, X, past=None, scope='model', reuse=False):
with tf.variable_scope(scope, reuse=reuse):
results = {}
batch, sequence = shape_list(X)
wpe = tf.get_variable('wpe', [hparams.n_ctx, hparams.n_embd],
initializer=tf.random_normal_initializer(stddev=0.01))
wte = tf.get_variable('wte', [hparams.n_vocab, hparams.n_embd],
initializer=tf.random_normal_initializer(stddev=0.02))
past_length = 0 if past is None else tf.shape(past)[-2]
h = tf.gather(wte, X) + tf.gather(wpe, positions_for(X, past_length))
# Transformer
presents = []
pasts = tf.unstack(past, axis=1) if past is not None else [None] * hparams.n_layer
assert len(pasts) == hparams.n_layer
for layer, past in enumerate(pasts):
h, present = block(h, 'h%d' % layer, past=past, hparams=hparams)
presents.append(present)
results['present'] = tf.stack(presents, axis=1)
h = norm(h, 'ln_f')
# Language model loss. Do tokens <n predict token n?
h_flat = tf.reshape(h, [batch*sequence, hparams.n_embd])
logits = tf.matmul(h_flat, wte, transpose_b=True)
print(logits)
logits = tf.reshape(logits, [batch, sequence, hparams.n_vocab])
results['logits'] = logits
return results
###########################################################################
def top_k_logits(logits, k):
if k == 0:
# no ์ ๋จ
return logits
def _top_k():
values, _ = tf.nn.top_k(logits, k=k)
min_values = values[:, -1, tf.newaxis]
print(min_values)
# tf.newaxis : size(์ฐจ์) ๋ณ๊ฒฝ
return tf.where(
# tf.where(bool type ํ
์, true์ผ ๋ ์ถ๋ ฅ๊ฐ, false์ผ ๋ ์ถ๋ ฅ๊ฐ)
# x, y๊ฐ ์์ผ๋ฉด ์ฐธ ์์์ ์ขํ(2D ํ
์)๋ฅผ ๋ฐํํ๋ค.
logits < min_values,
tf.ones_like(logits, dtype=logits.dtype) * -1e10, # -100์ต
# tf.ones_like : ๋ชจ๋ ์์๊ฐ 1๋ก ์ค์ ๋ tensor์ ๋์ผํ ์ ํ ๋ฐ ๋ชจ์์ tensor๋ฅผ ๋ฆฌํดํ๋ค.
logits,
)
# tf.cond : tf.equal์ด๋ฉด logits ๋์์ด ์คํ๋๊ณ , _top_k()๋ ์คํ๋์ง ์๋๋ค.
return tf.cond(
tf.equal(k, 0), # k == 0
lambda: logits,
lambda: _top_k(),
)
def top_p_logits(logits, p):
"""ํต์ฌ sampling"""
batch, _ = logits.shape.as_list()
sorted_logits = tf.sort(logits, direction='DESCENDING', axis=-1)
# ๋ด๋ฆผ์ฐจ์์ผ๋ก logits์ ์ ๋ ฌ, sort() = sort(axis=-1)
cumulative_probs = tf.cumsum(tf.nn.softmax(sorted_logits, axis=-1), axis=-1)
# tf.cumsum : ๋์ ํฉ๊ณ๋ฅผ ์ํ (ex. ([a, b, c]) # [a, a + b, a + b + c] )
indices = tf.stack([
# indices = index์ ๋ณต์
# tf.stack : (a,b,c)shape์์ Nํ
์์ ๊ธธ์ด๊ฐ ์ฃผ์ด์ก์ ๋, axis=0์ด๋ฉด (n,a,b,c) / axis = 1 ์ด๋ฉด (a,n,b,c)๊ฐ ๋๋ค
tf.range(0, batch),
# number of indices to include
tf.maximum(tf.reduce_sum(tf.cast(cumulative_probs <= p, tf.int32), axis=-1) - 1, 0),
# cast : ํ
์๋ฅผ ์๋ก์ด ํํ๋ก ์บ์คํ
ํ๋๋ฐ ์ฌ์ฉํ๋ค.
# cumulative_probs๊ฐ p๋ณด๋ค ์๊ฑฐ๋ ๊ฐ๋๋ก ํ์ฌ booleanํํ๋ก ๋ํ๋ธ๋ค
# reduce_sum : ํ
์์ ์ฐจ์๋ค์ ํ์ํ๋ฉฐ ๊ฐ์ฒด๋ค์ ์ดํฉ์ ๊ณ์ฐํ๋ค.
# cast์์ ๋์จ ๊ฐ์์ -1์ ํด์ฃผ๊ณ ์ด ๋จ์๋ก ๋ํด์ค๋ค.
# tf.maximum : ์ต๋๊ฐ ๋ฐํ
], axis=-1)
min_values = tf.gather_nd(sorted_logits, indices)
return tf.where(
logits < min_values,
tf.ones_like(logits) * -1e10,
logits,
)
def sample_sequence(*, hparams, length, start_token=None, batch_size=None, context=None, temperature=1, top_k=0, top_p=1):
if start_token is None:
assert context is not None, 'Specify exactly one of start_token and context!'
# start_token์ด none์ธ ๊ฒฝ์ฐ
# start token ์ด๋ context ์ค ํ๋๋ฅผ ์ ํํ ์ง์ ํด์ผ ํ๋ค.
else:
assert context is None, 'Specify exactly one of start_token and context!'
context = tf.fill([batch_size, 1], start_token)
# [batch size, 1] shape์์ start_token์ผ๋ก ๋ค ์ฑ์์ค๋ค.
print("๋๋ :", context)
def step(hparams, tokens, past=None):
lm_output = model.model(hparams=hparams, X=tokens, past=past, reuse=tf.AUTO_REUSE)
# reuse=tf.AUTO_REUSE : ๋ณ์๊ฐ ์๋ ๊ฒฝ์ฐ ๋ณ์๋ฅผ ์์ฑํ๊ณ ๊ทธ๋ ์ง ์์ ๊ฒฝ์ฐ ๋ฐํํ๋ค.
logits = lm_output['logits'][:, :, :hparams.n_vocab]
presents = lm_output['present']
presents.set_shape(model.past_shape(hparams=hparams, batch_size=batch_size))
return {
'logits': logits,
'presents': presents,
}
with tf.name_scope('sample_sequence'): # ์ด๋ฆ ๋ฒ์
def body(past, prev, output):
next_outputs = step(hparams, prev, past=past)
logits = next_outputs['logits'][:, -1, :] / tf.to_float(temperature)
logits = top_k_logits(logits, k=top_k)
logits = top_p_logits(logits, p=top_p)
samples = tf.multinomial(logits, num_samples=1, output_dtype=tf.int32)
# tf.multinomial : ๋คํญ๋ถํฌ๋ก๋ถํฐ ์ํ์ ๋ฝ์์ค๋ค.
return [
next_outputs['presents'] if past is None else tf.concat([past, next_outputs['presents']], axis=-2),
samples,
tf.concat([output, samples], axis=1)
]
past, prev, output = body(None, context, context)
def cond(*args):
# *args : ์ฌ๋ฌ๊ฐ์ ์ธ์๋ฅผ ํจ์์ ์ ๋ฌํ ๋ ์ฐ์ธ๋ค.
return True
_, _, tokens = tf.while_loop(
cond=cond, body=body,
maximum_iterations=length - 1,
loop_vars=[
past,
prev,
output
],
shape_invariants=[
tf.TensorShape(model.past_shape(hparams=hparams, batch_size=batch_size)),
tf.TensorShape([batch_size, None]),
tf.TensorShape([batch_size, None]),
],
back_prop=False,
)
return tokens
sample_sequence(
hparams= [('n_ctx', 1024), ('n_embd', 768), ('n_head', 12), ('n_layer', 12), ('n_vocab', 50257)],
length= 512,
context= tf.Tensor("Placeholder:0", shape=(1, )),
batch_size = 1,
temperature = 1,
top_k = 0,
top_p = 1)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.datacatalog_v1.types import common
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.datacatalog.v1",
manifest={"SearchResultType", "SearchCatalogResult",},
)
class SearchResultType(proto.Enum):
r"""The different types of resources that can be returned in
search.
"""
SEARCH_RESULT_TYPE_UNSPECIFIED = 0
ENTRY = 1
TAG_TEMPLATE = 2
ENTRY_GROUP = 3
class SearchCatalogResult(proto.Message):
r"""A result that appears in the response of a search request.
Each result captures details of one entry that matches the
search.
Attributes:
search_result_type (google.cloud.datacatalog_v1.types.SearchResultType):
Type of the search result. This field can be
used to determine which Get method to call to
fetch the full resource.
search_result_subtype (str):
Sub-type of the search result. This is a dot-delimited
description of the resource's full type, and is the same as
the value callers would provide in the "type" search facet.
Examples: ``entry.table``, ``entry.dataStream``,
``tagTemplate``.
relative_resource_name (str):
The relative resource name of the resource in URL format.
Examples:
- ``projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``
- ``projects/{project_id}/tagTemplates/{tag_template_id}``
linked_resource (str):
The full name of the cloud resource the entry belongs to.
See:
https://cloud.google.com/apis/design/resource_names#full_resource_name.
Example:
- ``//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId``
modify_time (google.protobuf.timestamp_pb2.Timestamp):
Last-modified timestamp of the entry from the
managing system.
integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem):
Output only. This field indicates the entry's
source system that Data Catalog integrates with,
such as BigQuery or Cloud Pub/Sub.
user_specified_system (str):
This field indicates the entry's source
system that Data Catalog does not integrate
with.
fully_qualified_name (str):
Fully Qualified Name of the resource. There are two main
forms of FQNs: {system}:{project}.{dot-separated path to
resource} for non-regionalized resources
{system}:{project}.{location id}.{dot-separated path to
resource} for regionalized resources Examples:
- dataproc_metastore:projectId.locationId.instanceId.databaseId.tableId
- bigquery:table.project_id.dataset_id.table_id
"""
search_result_type = proto.Field(proto.ENUM, number=1, enum="SearchResultType",)
search_result_subtype = proto.Field(proto.STRING, number=2,)
relative_resource_name = proto.Field(proto.STRING, number=3,)
linked_resource = proto.Field(proto.STRING, number=4,)
modify_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,)
integrated_system = proto.Field(
proto.ENUM, number=8, oneof="system", enum=common.IntegratedSystem,
)
user_specified_system = proto.Field(proto.STRING, number=9, oneof="system",)
fully_qualified_name = proto.Field(proto.STRING, number=10,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
nilq/baby-python
|
python
|
from datetime import date, datetime
from typing import Any, Dict
from dateutil import relativedelta
from django.db.models import Sum, Count
from em.models import Account, Transaction
class AccountHelper(object):
date_fmt = '%m-%Y'
day_fmt = '%Y-%m-%d'
@staticmethod
def get_spendings(overall_expns, **filters):
spendings = list()
account_labels = list()
account_values = list()
# print(Transaction.objects.filter(date=date.today()).annotate(Sum('amount'))) #.aggregate(expense=Sum('amount')).get('expense'))
# print(Transaction.objects.values('account__name', 'account').filter(date=date.today()).annotate(Sum('amount'))) #.aggregate(expense=Sum('amount')).get('expense'))
spendings = Transaction.objects.values('account__name', 'account').filter(**filters).annotate(spendings=Sum('amount'))
if spendings:
for spending in spendings:
account_labels.append(spending.get('account__name'))
account_values.append(spending.get('spendings'))
return spendings, account_labels, account_values
# if overall_expns:
# for account in Account.objects.all():
# amount = Transaction.objects\
# .filter(account=account, **filters)\
# .aggregate(amount=Sum('amount')).get('amount')
# if amount:
# spendings.append({
# 'account': account,
# 'spendings': amount,
# 'percentage': int((amount / overall_expns) * 100)
# })
# account_labels.append(account.name)
# account_values.append(amount)
# return spendings, account_labels, account_values
@staticmethod
def get_account_details(context: Dict[Any, Any], **kwargs):
ref_month = kwargs.get('ref_month')
account: Account = context.get('account')
# today = date.today()
# print(today.day, account.statement_date, today.day > account.statement_date)
filters = dict()
dt = datetime.strptime(ref_month, AccountHelper.date_fmt) if ref_month else date.today()
if kwargs.get('from_dt') and kwargs.get('to_dt'):
from_dt, to_dt = (
datetime.strptime(kwargs.get('from_dt'), AccountHelper.day_fmt),
datetime.strptime(kwargs.get('to_dt'), AccountHelper.day_fmt)
)
filters.update(date__range = [from_dt, to_dt])
context['selected_range'] = f"{kwargs.get('from_dt')} - {kwargs.get('to_dt')}"
else:
context['selected_range'] = datetime.strftime(dt, '%m-%Y')
filters = dict(
date__month=dt.month,
date__year=dt.year
)
context['prev_month'] = dt - relativedelta.relativedelta(months=1)
context['next_month'] = dt + relativedelta.relativedelta(months=1)
context['cur_month'] = dt
context['spendings'] = Transaction.objects\
.filter(account=account, **filters)\
.aggregate(spendings=Sum('amount'))\
.get('spendings')
context['transactions'] = Transaction.objects.filter(account=account, **filters).order_by("-date")
return context
@staticmethod
def get_act_statments(account):
statement_dates = {
"Kotak": 15,
"Citi Credit": 20,
"HDFC Nayana": 20,
}
dt = statement_dates.get(account)
statments = dict()
if dt:
# st_dt = date.today() - relativedelta.relativedelta(days=i)
for i in range(5):
ref_dt = date.today() - relativedelta.relativedelta(months=i)
to_dt = date(ref_dt.year, ref_dt.month, dt)
from_dt = to_dt - relativedelta.relativedelta(months=1)
from_dt = from_dt + relativedelta.relativedelta(days=1)
# ?fromDate=2021-03-16&toDate=2021-04-14
qp = f'?fromDate={from_dt.strftime("%Y-%m-%d")}&toDate={to_dt.strftime("%Y-%m-%d")}'
statments[f'{from_dt.strftime("%b")}-{to_dt.strftime("%b")}'] = qp
return statments
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from .type_convert import anything_to_string
from .type_convert import anything_to_bytes
from .spamc_header import SpamcHeader
from .spamc_body import SpamcBody
class SpamcProtocol(SpamcHeader, SpamcBody):
def __init__(self):
super().__init__()
def create_request(self):
request_body = b''
if self.is_have_header(b'Compress') == True:
request_body = self.zlib_compress_data(self.body)
else:
request_body = self.body
self.set_content_length(len(request_body))
request = (
b'%(headers)b\r\n'
b'%(body)b'
)
return request % {
b'headers': self.create_header_request(),
b'body': self.body
}
def create_simple_request(self, input_method, input_message):
self.set_method(input_method)
self.body = input_message
return self.create_request()
@staticmethod
def split_spamd_message(input_message):
try:
bytes_header, sep, bytes_body = input_message.partition(b'\r\n\r\n')
return bytes_header, bytes_body
except Exception as err:
raise RuntimeError('Protocol Error')
def load_from_response(self, input_message):
bytes_header, bytes_body = self.split_spamd_message(input_message)
self.parse_header_bytes(bytes_header)
if self.get_content_length() != len(bytes_body):
return False
if self.is_have_header(b'Compress') == True:
response_body = self.zlib_decompress_data(bytes_body)
else:
response_body = bytes_body
self.body = response_body
return True
def is_full_response(self, input_message, is_check_length=True):
if input_message.startswith(b'SPAMD') == False:
raise RuntimeError('Protocol Error')
try:
bytes_header, bytes_body = self.split_spamd_message(input_message)
self.parse_header_bytes(bytes_header)
if is_check_length == True:
if self.is_have_header(b'Content-length') == False:
return False
if self.get_content_length() != len(bytes_body):
return False
return True
except Exception as err:
return False
if __name__ == '__main__':
pass
|
nilq/baby-python
|
python
|
#!/usr/bin/python
#
# coveragePlot.py
#
# This program generates genomic coverage plots
# Chiu Laboratory
# University of California, San Francisco
# January, 2014
#
# Copyright (C) 2014 Charles Y Chiu - All Rights Reserved
# SURPI has been released under a modified BSD license.
# Please see license file for details.
import matplotlib
matplotlib.use('Agg')
from pylab import *
from pylab import figure, show, legend
from matplotlib import pyplot as plt
from distutils.version import LooseVersion
import numpy as np
import sys, os
import re
def smart_truncate1(text, max_length=100, suffix='...'):
"""Returns a string of at most `max_length` characters, cutting
only at word-boundaries. If the string was truncated, `suffix`
will be appended.
"""
if len(text) > max_length:
pattern = r'^(.{0,%d}\S)\s.*' % (max_length-len(suffix)-1)
return re.sub(pattern, r'\1' + suffix, text)
else:
return text
if len(sys.argv) < 3:
print "usage: coveragePlot.py <data file .map/.report> <title of plot> <log y-axes Y/N/B=both>"
sys.exit(-1)
dataFile = sys.argv[1]
mpl_version=matplotlib.__version__
# print "Installed version is: %s." % mpl_version
#load function is deprecated as of matplotlib v1.3.1, replaced with
if (LooseVersion(mpl_version) >= LooseVersion('1.3.1') ):
data = np.loadtxt(dataFile)
else:
data = mlab.load(dataFile)
outputFile = os.path.splitext(dataFile)[0]+".ps"
reportFile = os.path.splitext(dataFile)[0]+".report"
with open(reportFile) as f:
reportContent = f.readlines()
reportText = ""
logPlot = sys.argv[3]
for line in reportContent:
stripped_line = line.rstrip('\r\n\t ')
reportText = reportText + smart_truncate1(stripped_line, max_length=100, suffix='...') + "\n"
print "Loaded " + dataFile
hold(True)
if logPlot=='N':
fig=plt.figure(figsize=[8.5,4.5])
ax = fig.add_subplot(111)
fig.text(0.1,0.0,reportText, fontsize=9)
color ='k-'
plot(data[:,0],data[:,1],color)
xlabel("base position",fontsize=8)
ylabel("fold coverage",fontsize=8)
title_text = sys.argv[2]
suptitle(title_text,fontsize=9)
xMin, xMax, yMin, yMax = min(data[:,0]),max(data[:,0]),min(data[:,1]),max(data[:,1])
# add a 10% buffer to yMax
yMax *= 1.1
axis([xMin,xMax,yMin,yMax])
gcf().subplots_adjust(bottom=0.60)
plt.show()
if logPlot=='B':
fig=plt.figure(figsize=[8.5,4.5])
ax1 = fig.add_subplot(211)
color ='k-'
plot(data[:,0],data[:,1],color)
xlabel("base position",fontsize=8)
ylabel("fold coverage",fontsize=8)
xMin, xMax, yMin, yMax = min(data[:,0]),max(data[:,0]),min(data[:,1]),max(data[:,1])
yMax *= 1.1
axis([xMin,xMax,yMin,yMax])
plt.show()
ax2 = fig.add_subplot(212)
ax2.set_yscale('symlog')
fig.text(0.1,0.0,reportText, fontsize=9)
color ='k-'
plot(data[:,0],data[:,1],color)
xlabel("base position",fontsize=8)
ylabel("fold coverage",fontsize=8)
title_text = sys.argv[2]
suptitle(title_text,fontsize=9)
xMin, xMax, yMin, yMax = min(data[:,0]),max(data[:,0]),min(data[:,1]),max(data[:,1])
yMax *= 1.1
axis([xMin,xMax,yMin,yMax])
gcf().subplots_adjust(bottom=0.40)
plt.show()
if logPlot=='Y':
fig=plt.figure(figsize=[8.5,4.5])
ax = fig.add_subplot(111)
ax.set_yscale('symlog')
fig.text(0.1,0.0,reportText, fontsize=9)
color ='k-'
plot(data[:,0],data[:,1],color)
xlabel("base position",fontsize=8)
ylabel("fold coverage",fontsize=8)
title_text = sys.argv[2]
suptitle(title_text,fontsize=9)
xMin, xMax, yMin, yMax = min(data[:,0]),max(data[:,0]),min(data[:,1]),max(data[:,1])
yMax *= 1.1
axis([xMin,xMax,yMin,yMax])
gcf().subplots_adjust(bottom=0.60)
plt.show()
savefig(outputFile)
|
nilq/baby-python
|
python
|
# *******************************************************************************
# Copyright (C) 2020-2021 INAF
#
# This software is distributed under the terms of the BSD-3-Clause license
#
# Authors:
# Ambra Di Piano <ambra.dipiano@inaf.it>
# *******************************************************************************
import os
from os.path import isfile, expandvars
from sagsci.tools.utils import *
from sagsci.tools.photometry import *
# observation and target
obs_crab = 'data/crab_test_sim.fits'
target = {'ra': 83.6331, 'dec': 22.0145}
pointing = {'ra': 83.6331, 'dec': 22.5145}
# configuration
erange = [(0.03, 50)]
trange = [0, 100] # livetime in seconds (s)
radius = 0.2 # photometry region in degrees (deg)
spectral_index = -2.48 # slope of the power-law spectrum
irf = expandvars('$CTOOLS/share/caldb/data/cta/prod3b-v2/bcf/South_z20_0.5h/irf_file.fits')
# we need to add "radius" to the target dictionary
target['rad'] = radius
# init photometry
phm = Photometrics({'events_filename': obs_crab})
# remove duplicate files
offregionsfile = obs_crab.replace('.fits', '_off.reg')
if isfile(offregionsfile):
os.remove(obs_crab.replace('.fits', '_off.reg'))
# compute regions
off_regions = phm.find_off_regions(algo='cross', src=target, pnt=pointing, rad=target['rad'], save=offregionsfile)
for e in erange:
print(f'Target = {target} TeV')
print(f'Energy range = {e} s')
print(f'Time range = {trange} deg')
on, off, alpha, excess, sigma, err_note = phm.counting(src=target, rad=target['rad'], off_regions=off_regions, e_min=e[0], e_max=e[1], t_min=trange[0], t_max=trange[1], draconian=False)
print(f'on counts = {on} cts')
print(f'excess counts = {excess} cts')
print(f'significance = {sigma} cts')
exposure = get_aeff_in_region(target=target, pointing=pointing, trange=trange, erange=e, irf=irf, index=spectral_index)
print(f'aeff = {exposure} cm2')
livetime = trange[1]-trange[0]
print(f'livetime = {livetime} s')
# compute flux
flux = on / exposure / livetime
print(f'flux = {flux} ph/cm2/s')
print(f'\n{"-"*50}\n')
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
class Calculator:
def __init__(self, number1, number2):
self.number1 = int(number1)
self.number2 = int(number2)
def add(self):
print(self.number1 + self.number2)
return self.number1 + self.number2
def subtract(self):
print(self.number1 - self.number2)
return self.number1 - self.number2
def multiply(self):
print(self.number1 * self.number2)
return self.number1 * self.number2
def divide(self):
assert self.number2 > 0,"No soy tan inteligente como para dividir entre 0"
print(self.number1 / self.number2)
return self.number1 / self.number2
def execute(self,operation):
if operation == 'sumar':
return self.add()
if operation == 'restar':
return self.subtract()
if operation == 'multiplicar':
return self.multiply()
if operation == 'dividir':
return self.divide()
else:
print("Invalid operation")
if __name__ == "__main__":
operation = sys.argv[1]
number1 = sys.argv[2]
number2 = sys.argv[3]
calculator = Calculator(number1,number2)
calculator.execute(operation)
|
nilq/baby-python
|
python
|
from flask import Blueprint
bp = Blueprint('auth', __name__)
from diploma.auth import auth, emails, forms, routes
|
nilq/baby-python
|
python
|
#
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
""" This is the example of input record for the test_tranform_data. """
input_test_data = [
{
"targetingCriteria": {
"include": {
"and": [
{
"or": {
"urn:li:adTargetingFacet:titles": [
"urn:li:title:100",
"urn:li:title:10326",
"urn:li:title:10457",
"urn:li:title:10738",
"urn:li:title:10966",
"urn:li:title:11349",
"urn:li:title:1159",
]
}
},
{"or": {"urn:li:adTargetingFacet:locations": ["urn:li:geo:103644278"]}},
{"or": {"urn:li:adTargetingFacet:interfaceLocales": ["urn:li:locale:en_US"]}},
]
},
"exclude": {
"or": {
"urn:li:adTargetingFacet:facet_Key1": [
"facet_test1",
"facet_test2",
],
"urn:li:adTargetingFacet:facet_Key2": [
"facet_test3",
"facet_test4",
],
}
},
},
"changeAuditStamps": {
"created": {"time": 1629581275000},
"lastModified": {"time": 1629664544760},
},
"dateRange": {
"start": {"month": 8, "day": 13, "year": 2021},
"end": {"month": 8, "day": 13, "year": 2021},
},
"variables": {
"data": {
"com.linkedin.ads.SponsoredUpdateCreativeVariables": {
"activity": "urn:li:activity:1234",
"directSponsoredContent": 0,
"share": "urn:li:share:1234",
}
}
},
}
]
""" This is the expected output from the `transform_data` method. """
output_test_data = [
{
"targetingCriteria": {
"include": {
"and": [
{
"type": "urn:li:adTargetingFacet:titles",
"values": [
"urn:li:title:100",
"urn:li:title:10326",
"urn:li:title:10457",
"urn:li:title:10738",
"urn:li:title:10966",
"urn:li:title:11349",
"urn:li:title:1159",
],
},
{
"type": "urn:li:adTargetingFacet:locations",
"values": ["urn:li:geo:103644278"],
},
{
"type": "urn:li:adTargetingFacet:interfaceLocales",
"values": ["urn:li:locale:en_US"],
},
]
},
"exclude": {
"or": [
{
"type": "urn:li:adTargetingFacet:facet_Key1",
"values": ["facet_test1", "facet_test2"],
},
{
"type": "urn:li:adTargetingFacet:facet_Key2",
"values": ["facet_test3", "facet_test4"],
},
]
},
},
"variables": {
"type": "com.linkedin.ads.SponsoredUpdateCreativeVariables",
"values": [
{"key": "activity", "value": "urn:li:activity:1234"},
{"key": "directSponsoredContent", "value": 0},
{"key": "share", "value": "urn:li:share:1234"},
],
},
"created": "2021-08-21 21:27:55",
"lastModified": "2021-08-22 20:35:44",
"start_date": "2021-08-13",
"end_date": "2021-08-13",
}
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from mm.utils.opengl import Render
from mm.utils.mesh import generateFace
from mm.models import MeshModel
import numpy as np
import matplotlib.pyplot as plt
from skimage import io, img_as_float
if __name__ == '__main__':
# Load the first image from the video
frame = 0
img = io.imread('../data/obama/orig/%05d.png' % (frame + 1))
img = img_as_float(img)
width = img.shape[1]
height = img.shape[0]
# Load the 3DMM parameters that fit the 3DMM to each video frame
param = np.load('../data/obama/paramRTS2Orig.npy')
# Load the mesh model
m = MeshModel('../models/bfm2017.npz')
# Generate the vertex coordinates from the mesh model and the parameters
vertexCoords = generateFace(param[frame, :], m).T
# Use the mean vertex colors just for illustrative purposes
vertexColors = m.texMean.T
# Concatenate the vertex coordinates and colors-- this is how they will be inputted into the Render object
meshData = np.r_[vertexCoords, vertexColors]
# Initialize an OpenGL Render object and render the 3DMM with the corresponding video frame in the background
r = Render(width, height, meshData, m.face, indexed = False, img = img)
r.render()
# Grab the rendering from the video card
rendering = r.grabRendering()
# You can also get other parameters from the video card, such as the pixels where the 3DMM is rendered on, the index of the triangular face that contributes to the color of each pixel of these pixels, and the barycentric coordinates of such a triangular face such that the barycentric combination of the three vertex attributes (e.g. color) for this triangular face forms the color in the rendered pixel
rendering, pixelCoord, pixelFaces, pixelBarycentricCoords = r.grabRendering(return_info = True)
# Plot the rendering
plt.figure()
plt.imshow(rendering)
# Loop through some frames in the video to render some more 3DMMs
for frame in range(1, 52, 10):
img = io.imread('../data/obama/orig/%05d.png' % (frame + 1))
img = img_as_float(img)
vertexCoords = generateFace(param[frame, :], m).T
meshData = np.r_[vertexCoords, vertexColors]
# Update the video card with the new mesh data for the current frame
r.updateVertexBuffer(meshData)
# Erase the current rendering to prepare for the new rendering
r.resetFramebufferObject()
# And then render and plot the rendering
r.render()
rendering = r.grabRendering()
plt.figure()
plt.imshow(rendering)
|
nilq/baby-python
|
python
|
import copy
from cantoolz.module import CANModule
class ecu_switch(CANModule):
name = "CAN Switch"
help = """
This module emulating CAN Switch.
Init params (example):
{
'Cabin': { # From Cabin interface
'OBD2':[ # To OBD2 allowed next ID
0x81, # Left door status
0x82 # Right door status
],
},
'Engine': {
'OBD2': [
0x79,
0x709
],
'Cabin':[
0x79
]
},
'OBD2': {
'Engine':[
0x701
],
}
}
"""
_active = True
def do_init(self, params):
self._rules = params
# Effect (could be fuzz operation, sniff, filter or whatever)
def do_effect(self, can_msg, args):
current_rule = self._rules.get(args['pipe'], {})
if can_msg.CANData and args['action'] == "read": # READ
for route_to, allowed_id in current_rule.items():
if can_msg.CANFrame.frame_id in allowed_id:
buffer = self._rules[route_to].get('buffer', [])
buffer.append(copy.deepcopy(can_msg.CANFrame))
self._rules[route_to].update({'buffer': buffer})
elif args['action'] == "write" and not can_msg.CANData: # Write
buffer_len = len(current_rule.get('buffer', []))
if buffer_len > 0:
can_msg.CANFrame = self._rules[args['pipe']]['buffer'].pop(0)
can_msg.CANData = True
can_msg.bus = self._bus
return can_msg
|
nilq/baby-python
|
python
|
from collections import defaultdict
start, end = 357253, 892942
num_digits = 6
def solve(start, end, strict=False):
length = end - start
count = 0
for i in range(length):
number = start + i
previous = number % 10
consecutives = defaultdict(int)
for j in range(1, num_digits):
p = 10 ** j
digit = number // p % 10
if digit > previous:
break
if previous == digit:
consecutives[digit] += 1
previous = digit
else:
if (strict and 1 in consecutives.values()) or (not strict and consecutives):
count += 1
return count
if __name__ == "__main__":
# Part I
print(solve(start, end))
# Part II
print(solve(start, end, strict=True))
|
nilq/baby-python
|
python
|
# -----------------------------------------------------------------------------
# NDN Repo getfile client.
#
# @Author jonnykong@cs.ucla.edu
# @Date 2019-10-24
# -----------------------------------------------------------------------------
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '..'))
import asyncio as aio
import logging
from ndn.app import NDNApp
from ndn.encoding import Name, NonStrictName
from ..utils.concurrent_fetcher import concurrent_fetcher
class GetfileClient(object):
"""
This client fetches a file from the repo, and save it to working directory.
"""
def __init__(self, app: NDNApp, repo_name):
"""
A client to retrieve files from the remote repo.
:param app: NDNApp.
:param repo_name: NonStrictName. Routable name to remote repo.
"""
self.app = app
self.repo_name = repo_name
async def fetch_file(self, name_at_repo: NonStrictName):
"""
Fetch a file from remote repo, and write to the current working directory.
:param name_at_repo: NonStrictName. The name with which this file is stored in the repo.
"""
semaphore = aio.Semaphore(10)
b_array = bytearray()
async for (_, _, content, _) in concurrent_fetcher(self.app, name_at_repo, 0, None, semaphore):
b_array.extend(content)
if len(b_array) > 0:
filename = Name.to_str(name_at_repo)
filename = filename.strip().split('/')[-1]
logging.info(f'Fetching completed, writing to file {filename}')
with open(filename, 'wb') as f:
f.write(b_array)
|
nilq/baby-python
|
python
|
import attr
import logging
from typing import Callable
from mmds.exceptions import PackageNotFoundError
try:
from PIL import Image
except:
raise PackageNotFoundError("pillow", by="rgbs modality.")
from .ts import TimeSeriesModality
logger = logging.getLogger(__name__)
dumb_image = Image.new("RGB", (32, 32))
@attr.define
class RgbsModality(TimeSeriesModality):
"""A rgb sequence modality for video."""
transform: Callable
aggragate: Callable
@property
def duration(self):
return len(self.paths) / self.sample_rate
def _fetch_impl(self, *, info={}):
paths = self._slice(self.paths, info.get("t0"), info.get("t1"))
frames = list(map(self.transform, map(self._load_pil, paths)))
return self.aggragate(frames)
def _pad(self, x, n):
return x + [None] * n
@staticmethod
def _load_pil(path):
if path is None:
return dumb_image
try:
image = Image.open(path)
except:
logger.warning(f"Open {path} failed, use an empty picture instead.")
image = dumb_image
return image
|
nilq/baby-python
|
python
|
all_teams = ["ANC", "APO", "CSU", "GUC", "LTI", "MIN", "MRL", "NAI", "POS", "RI1", "RAK", "SOS", "ZJU"]
semi_teams = ["APO", "CSU", "GUC", "MIN", "MRL", "POS", "SOS", "ZJU"]
team_names = {
# "BAS" : "Baseline (no agents)",
"ANC" : "anct_rescue2013",
"APO" : "Apollo-Rescue",
"CSU" : "CSU-YUNLU",
"GUC" : "GUC_ArtSapience",
"LTI" : "LTI-Agent-Rescue",
"MIN" : "MinERS",
"MRL" : "MRL",
"NAI" : "NAITO-Rescue2013",
"POS" : "Poseidon",
"RI1" : "Ri-one",
"RAK" : "RoboAKUT",
"SOS" : "S.O.S.",
"ZJU" : "ZJUBase"
}
day1 = {'name' : "Day 1",
'shortname' : "Day1",
'maps' : ["Berlin1", "Eindhoven1", "Kobe1", "Paris1", "VC1"],
'teams' : all_teams}
day2 = {'name' : "Day 2",
'shortname' : "Day2",
'maps' : ["Mexico1", "Kobe2", "Eindhoven2", "Istanbul1", "Paris2"],
'teams' : all_teams,
'merge_with' : day1,
'highlight' : 8}
semi = {'name' : "Semifinals",
'shortname' : "Semifinals",
'maps' : ["VC2", "Berlin2", "Kobe3", "Istanbul2", "Mexico2", "Eindhoven3", "Paris3", "Eindhoven4"],
'teams' : semi_teams,
'highlight' : 4}
# final = {'name' : "Finals",
# 'shortname' : "final",
# 'maps' : ["Eindhoven1"],
# 'teams' : all_teams,
# 'merge_with' : day3,
# 'show_ranks' : 1}
rounds = [day1, day2, semi]
# semi_teams = ["RAK", "SBC", "POS", "IAM", "MRL", "RI1", "SEU", "RMA"]
# final_teams = ["POS", "IAM", "SEU", "RMA"]
# day1 = {'name' : "Preliminaries Day 1",
# 'shortname' : "Preliminary1",
# 'maps' : ["VC1", "Paris1", "Kobe1", "Berlin1", "Istanbul1"],
# 'teams' : all_teams}
# day2 = {'name' : "Preliminaries Day 2",
# 'shortname' : "Preliminary2",
# 'maps' : ["Kobe2", "Paris2", "Istanbul2", "Berlin2", "VC2"],
# 'teams' : all_teams
# 'merge_with' : day1
# 'highlight' : 8}
# semi = {'name' : "Semifinals",
# 'shortname' : "Semifinals",
# 'maps' : ["Kobe2", "Paris2", "Istanbul2", "Berlin2", "VC2"],
# 'teams' : semi_teams,
# 'highlight' : 4}
# final = {'name' : "Finals",
# 'shortname' : "Finals",
# 'maps' : ["Kobe2", "Paris2", "Istanbul2", "Berlin2", "VC2"],
# 'teams' : ["Paris5", "Berlin5", "Kobe4", "Istanbul5", "VC5"],
# 'show_ranks' : 3}
# rounds = [day1, day2, semi, final]
log_location = "logs/2013"
add_downloads = True
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.